VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 106004

Last change on this file since 106004 was 104411, checked in by vboxsync, 8 months ago

ValidationKit: Test out of selector limits/non-canonical return targets for the near return testcase, bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 372.8 KB
Line 
1/* $Id: bs3-cpu-basic-2-x0.c 104411 2024-04-24 09:40:05Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define BS3_USE_X0_TEXT_SEG
42#include <bs3kit.h>
43#include <iprt/asm.h>
44#include <iprt/asm-amd64-x86.h>
45#include <iprt/asm-mem.h>
46
47
48/*********************************************************************************************************************************
49* Defined Constants And Macros *
50*********************************************************************************************************************************/
51#undef CHECK_MEMBER
52#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
53 do \
54 { \
55 if ((a_Actual) == (a_Expected)) { /* likely */ } \
56 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
57 } while (0)
58
59
60/** Indicating that we've got operand size prefix and that it matters. */
61#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
62/** Worker requires 386 or later. */
63#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
64
65
66/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
67 *
68 * These are flags, though we've precombined a few shortening things down.
69 *
70 * @{ */
71#define MYOP_LD 0x1 /**< The instruction loads. */
72#define MYOP_ST 0x2 /**< The instruction stores */
73#define MYOP_EFL 0x4 /**< The instruction modifies EFLAGS. */
74#define MYOP_AC_GP 0x8 /**< The instruction may cause either \#AC or \#GP (FXSAVE). */
75
76#define MYOP_LD_ST 0x3 /**< Convenience: The instruction both loads and stores. */
77#define MYOP_LD_DIV 0x5 /**< Convenience: DIV instruction - loading and modifying flags. */
78/** @} */
79
80
81/*********************************************************************************************************************************
82* Structures and Typedefs *
83*********************************************************************************************************************************/
84/** Near void pointer. */
85typedef void BS3_NEAR *NPVOID;
86
87typedef struct BS3CB2INVLDESCTYPE
88{
89 uint8_t u4Type;
90 uint8_t u1DescType;
91} BS3CB2INVLDESCTYPE;
92
93typedef struct BS3CB2SIDTSGDT
94{
95 const char *pszDesc;
96 FPFNBS3FAR fpfnWorker;
97 uint8_t cbInstr;
98 bool fSs;
99 uint8_t bMode;
100 uint8_t fFlags;
101} BS3CB2SIDTSGDT;
102
103
104typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
105
106typedef struct FNBS3CPUBASIC2ACTSTCODE
107{
108 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
109 uint8_t fOp;
110 uint16_t cbMem;
111 uint8_t cbAlign;
112 uint8_t offFaultInstr; /**< For skipping fninit with the fld test. */
113} FNBS3CPUBASIC2ACTSTCODE;
114typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
115
116typedef struct BS3CPUBASIC2ACTTSTCMNMODE
117{
118 uint8_t bMode;
119 uint16_t cEntries;
120 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
121} BS3CPUBASIC2PFTTSTCMNMODE;
122typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
123
124
125/*********************************************************************************************************************************
126* External Symbols *
127*********************************************************************************************************************************/
128extern FNBS3FAR bs3CpuBasic2_Int80;
129extern FNBS3FAR bs3CpuBasic2_Int81;
130extern FNBS3FAR bs3CpuBasic2_Int82;
131extern FNBS3FAR bs3CpuBasic2_Int83;
132
133extern FNBS3FAR bs3CpuBasic2_ud2;
134#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
135extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
136
137extern FNBS3FAR bs3CpuBasic2_salc_ud2;
138extern FNBS3FAR bs3CpuBasic2_swapgs;
139
140extern FNBS3FAR bs3CpuBasic2_iret;
141extern FNBS3FAR bs3CpuBasic2_iret_opsize;
142extern FNBS3FAR bs3CpuBasic2_iret_rexw;
143
144extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
145extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
146extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
147extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
148extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
149extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
150extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
151extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
152extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
153extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
154extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
155extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
156
157extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
158extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
159extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
160extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
161extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
162extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
163extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
164extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
165extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
166extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
167extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
168extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
169
170extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
171extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
172extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
173extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
174extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
175extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
176extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
177extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
178extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
179extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
180extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
181extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
182extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
183
184extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
185extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
186extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
187extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
188extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
189extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
190extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
191extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
192extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
193extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
194extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
195extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
196
197
198/* bs3-cpu-basic-2-template.mac: */
199FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
200FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
201FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
202FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
203FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
204FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16;
205FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16;
206FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16;
207FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c16;
208
209FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
210FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
211FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
212FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
213FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
214FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32;
215FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32;
216FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32;
217FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c32;
218
219FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
220FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
221FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
222FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
223FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
224FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64;
225FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64;
226FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64;
227FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c64;
228
229
230/*********************************************************************************************************************************
231* Global Variables *
232*********************************************************************************************************************************/
233static const char BS3_FAR *g_pszTestMode = (const char *)1;
234static uint8_t g_bTestMode = 1;
235static bool g_f16BitSys = 1;
236
237
238/** SIDT test workers. */
239static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
240{
241 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
242 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
243 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
244 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
245 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
246 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
247 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
248 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
249 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
250 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
251 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
252 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
253};
254
255/** SGDT test workers. */
256static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
257{
258 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
259 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
260 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
261 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
262 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
263 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
264 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
265 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
266 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
267 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
268 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
269 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
270};
271
272/** LIDT test workers. */
273static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
274{
275 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
276 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
277 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
278 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
279 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
280 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
281 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
282 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
283 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
284 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
285 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
286 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
287 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
288};
289
290/** LGDT test workers. */
291static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
292{
293 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
294 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
295 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
296 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
297 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
298 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
299 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
300 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
301 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
302 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
303 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
304 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
305};
306
307
308
309#if 0
310/** Table containing invalid CS selector types. */
311static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
312{
313 { X86_SEL_TYPE_RO, 1 },
314 { X86_SEL_TYPE_RO_ACC, 1 },
315 { X86_SEL_TYPE_RW, 1 },
316 { X86_SEL_TYPE_RW_ACC, 1 },
317 { X86_SEL_TYPE_RO_DOWN, 1 },
318 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
319 { X86_SEL_TYPE_RW_DOWN, 1 },
320 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
321 { 0, 0 },
322 { 1, 0 },
323 { 2, 0 },
324 { 3, 0 },
325 { 4, 0 },
326 { 5, 0 },
327 { 6, 0 },
328 { 7, 0 },
329 { 8, 0 },
330 { 9, 0 },
331 { 10, 0 },
332 { 11, 0 },
333 { 12, 0 },
334 { 13, 0 },
335 { 14, 0 },
336 { 15, 0 },
337};
338
339/** Table containing invalid SS selector types. */
340static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
341{
342 { X86_SEL_TYPE_EO, 1 },
343 { X86_SEL_TYPE_EO_ACC, 1 },
344 { X86_SEL_TYPE_ER, 1 },
345 { X86_SEL_TYPE_ER_ACC, 1 },
346 { X86_SEL_TYPE_EO_CONF, 1 },
347 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
348 { X86_SEL_TYPE_ER_CONF, 1 },
349 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
350 { 0, 0 },
351 { 1, 0 },
352 { 2, 0 },
353 { 3, 0 },
354 { 4, 0 },
355 { 5, 0 },
356 { 6, 0 },
357 { 7, 0 },
358 { 8, 0 },
359 { 9, 0 },
360 { 10, 0 },
361 { 11, 0 },
362 { 12, 0 },
363 { 13, 0 },
364 { 14, 0 },
365 { 15, 0 },
366};
367#endif
368
369
370static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
371{
372 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2, 2 },
373 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2, 2 },
374 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2, 2 },
375 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2, 2 },
376 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2, 2 },
377 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
378 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
379 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
380 { bs3CpuBasic2_fxsave_ds_bx__ud2_c16, MYOP_ST | MYOP_AC_GP, 512, 16 },
381};
382
383static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
384{
385 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4, 4 },
386 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4, 4 },
387 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4, 4 },
388 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4, 4 },
389 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4, 4 },
390 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
391 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
392 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
393 { bs3CpuBasic2_fxsave_ds_bx__ud2_c32, MYOP_ST | MYOP_AC_GP, 512, 16 },
394};
395
396static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
397{
398 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8, 8 },
399 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8, 8 },
400 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8, 8 },
401 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8, 8 },
402 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8, 8 },
403 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
404 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
405 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
406 { bs3CpuBasic2_fxsave_ds_bx__ud2_c64, MYOP_ST | MYOP_AC_GP, 512, 16 },
407};
408
409static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
410{
411 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
412 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
413 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
414 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
415};
416
417
418/**
419 * Sets globals according to the mode.
420 *
421 * @param bTestMode The test mode.
422 */
423static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
424{
425 g_bTestMode = bTestMode;
426 g_pszTestMode = Bs3GetModeName(bTestMode);
427 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
428 g_usBs3TestStep = 0;
429}
430
431
432uint32_t ASMGetESP(void);
433#pragma aux ASMGetESP = \
434 ".386" \
435 "mov ax, sp" \
436 "mov edx, esp" \
437 "shr edx, 16" \
438 value [ax dx] \
439 modify exact [ax dx];
440
441
442/**
443 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
444 * and g_pszTestMode.
445 */
446static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
447{
448 va_list va;
449
450 char szTmp[168];
451 va_start(va, pszFormat);
452 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
453 va_end(va);
454
455 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
456}
457
458
459#if 0
460/**
461 * Compares trap stuff.
462 */
463static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
464{
465 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
466 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
467 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
468 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
469 if (Bs3TestSubErrorCount() != cErrorsBefore)
470 {
471 Bs3TrapPrintFrame(pTrapCtx);
472#if 1
473 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
474 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
475 ASMHalt();
476#endif
477 }
478}
479#endif
480
481
482#if 0
483/**
484 * Compares trap stuff.
485 */
486static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
487 uint8_t bXcpt, uint16_t uHandlerCs)
488{
489 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
490 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
491 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
492 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
493 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
494 if (Bs3TestSubErrorCount() != cErrorsBefore)
495 {
496 Bs3TrapPrintFrame(pTrapCtx);
497#if 1
498 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
499 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
500 ASMHalt();
501#endif
502 }
503}
504#endif
505
506/**
507 * Compares a CPU trap.
508 */
509static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
510 uint8_t bXcpt, bool f486ResumeFlagHint, uint8_t cbIpAdjust)
511{
512 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
513 uint32_t fExtraEfl;
514
515 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
516 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
517
518 if ( g_f16BitSys
519 || bXcpt == X86_XCPT_DB /* hack (10980xe)... */
520 || ( !f486ResumeFlagHint
521 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
522 fExtraEfl = 0;
523 else
524 fExtraEfl = X86_EFL_RF;
525#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
526 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
527#endif
528 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
529 if (Bs3TestSubErrorCount() != cErrorsBefore)
530 {
531 Bs3TrapPrintFrame(pTrapCtx);
532#if 1
533 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
534 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
535 ASMHalt();
536#endif
537 }
538}
539
540
541/**
542 * Compares \#GP trap.
543 */
544static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
545{
546 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
547}
548
549#if 0
550/**
551 * Compares \#NP trap.
552 */
553static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
554{
555 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
556}
557#endif
558
559/**
560 * Compares \#SS trap.
561 */
562static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
563{
564 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint, 0 /*cbIpAdjust*/);
565}
566
567#if 0
568/**
569 * Compares \#TS trap.
570 */
571static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
572{
573 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
574}
575#endif
576
577/**
578 * Compares \#PF trap.
579 */
580static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd,
581 uint64_t uCr2Expected, uint8_t cbIpAdjust)
582{
583 uint64_t const uCr2Saved = pStartCtx->cr2.u;
584 pStartCtx->cr2.u = uCr2Expected;
585 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/, cbIpAdjust);
586 pStartCtx->cr2.u = uCr2Saved;
587}
588
589/**
590 * Compares \#UD trap.
591 */
592static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
593{
594 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD,
595 true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
596}
597
598/**
599 * Compares \#AC trap.
600 */
601static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t cbIpAdjust)
602{
603 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/, cbIpAdjust);
604}
605
606/**
607 * Compares \#DB trap.
608 */
609static void bs3CpuBasic2_CompareDbCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint32_t fDr6Expect)
610{
611 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
612 uint32_t const fDr6 = Bs3RegGetDr6();
613 fDr6Expect |= X86_DR6_RA1_MASK;
614 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
615
616 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_DB, false /*f486ResumeFlagHint?*/, 0 /*cbIpAdjust*/);
617
618 if (Bs3TestSubErrorCount() > cErrorsBefore)
619 {
620#if 0
621 Bs3TestPrintf("Halting\n");
622 ASMHalt();
623#endif
624 }
625}
626
627
628/**
629 * Checks that DR6 has the initial value, i.e. is unchanged when other exception
630 * was raised before a \#DB could occur.
631 */
632static void bs3CpuBasic2_CheckDr6InitVal(void)
633{
634 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
635 uint32_t const fDr6 = Bs3RegGetDr6();
636 uint32_t const fDr6Expect = X86_DR6_INIT_VAL;
637 CHECK_MEMBER("dr6", "%#08RX32", fDr6, fDr6Expect);
638 if (Bs3TestSubErrorCount() > cErrorsBefore)
639 {
640 Bs3TestPrintf("Halting\n");
641 ASMHalt();
642 }
643}
644
645#if 0 /* convert me */
646static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
647 PX86DESC const paIdt, unsigned const cIdteShift)
648{
649 BS3TRAPFRAME TrapCtx;
650 BS3REGCTX Ctx80;
651 BS3REGCTX Ctx81;
652 BS3REGCTX Ctx82;
653 BS3REGCTX Ctx83;
654 BS3REGCTX CtxTmp;
655 BS3REGCTX CtxTmp2;
656 PBS3REGCTX apCtx8x[4];
657 unsigned iCtx;
658 unsigned iRing;
659 unsigned iDpl;
660 unsigned iRpl;
661 unsigned i, j, k;
662 uint32_t uExpected;
663 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
664# if TMPL_BITS == 16
665 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
666 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
667# else
668 bool const f286 = false;
669 bool const f386Plus = true;
670 int rc;
671 uint8_t *pbIdtCopyAlloc;
672 PX86DESC pIdtCopy;
673 const unsigned cbIdte = 1 << (3 + cIdteShift);
674 RTCCUINTXREG uCr0Saved = ASMGetCR0();
675 RTGDTR GdtrSaved;
676# endif
677 RTIDTR IdtrSaved;
678 RTIDTR Idtr;
679
680 ASMGetIDTR(&IdtrSaved);
681# if TMPL_BITS != 16
682 ASMGetGDTR(&GdtrSaved);
683# endif
684
685 /* make sure they're allocated */
686 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
687 Bs3MemZero(&Ctx80, sizeof(Ctx80));
688 Bs3MemZero(&Ctx81, sizeof(Ctx81));
689 Bs3MemZero(&Ctx82, sizeof(Ctx82));
690 Bs3MemZero(&Ctx83, sizeof(Ctx83));
691 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
692 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
693
694 /* Context array. */
695 apCtx8x[0] = &Ctx80;
696 apCtx8x[1] = &Ctx81;
697 apCtx8x[2] = &Ctx82;
698 apCtx8x[3] = &Ctx83;
699
700# if TMPL_BITS != 16
701 /* Allocate memory for playing around with the IDT. */
702 pbIdtCopyAlloc = NULL;
703 if (BS3_MODE_IS_PAGED(g_bTestMode))
704 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
705# endif
706
707 /*
708 * IDT entry 80 thru 83 are assigned DPLs according to the number.
709 * (We'll be useing more, but this'll do for now.)
710 */
711 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
712 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
713 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
714 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
715
716 Bs3RegCtxSave(&Ctx80);
717 Ctx80.rsp.u -= 0x300;
718 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
719# if TMPL_BITS == 16
720 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
721# elif TMPL_BITS == 32
722 g_uBs3TrapEipHint = Ctx80.rip.u32;
723# endif
724 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
725 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
726 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
727 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
728 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
729 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
730
731 /*
732 * Check that all the above gates work from ring-0.
733 */
734 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
735 {
736 g_usBs3TestStep = iCtx;
737# if TMPL_BITS == 32
738 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
739# endif
740 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
741 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
742 }
743
744 /*
745 * Check that the gate DPL checks works.
746 */
747 g_usBs3TestStep = 100;
748 for (iRing = 0; iRing <= 3; iRing++)
749 {
750 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
751 {
752 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
753 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
754# if TMPL_BITS == 32
755 g_uBs3TrapEipHint = CtxTmp.rip.u32;
756# endif
757 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
758 if (iCtx < iRing)
759 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
760 else
761 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
762 g_usBs3TestStep++;
763 }
764 }
765
766 /*
767 * Modify the gate CS value and run the handler at a different CPL.
768 * Throw RPL variations into the mix (completely ignored) together
769 * with gate presence.
770 * 1. CPL <= GATE.DPL
771 * 2. GATE.P
772 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
773 */
774 g_usBs3TestStep = 1000;
775 for (i = 0; i <= 3; i++)
776 {
777 for (iRing = 0; iRing <= 3; iRing++)
778 {
779 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
780 {
781# if TMPL_BITS == 32
782 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
783# endif
784 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
785 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
786
787 for (j = 0; j <= 3; j++)
788 {
789 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
790 for (k = 0; k < 2; k++)
791 {
792 g_usBs3TestStep++;
793 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
794 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
795 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
796 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
797 /*Bs3TrapPrintFrame(&TrapCtx);*/
798 if (iCtx < iRing)
799 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
800 else if (k == 0)
801 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
802 else if (i > iRing)
803 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
804 else
805 {
806 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
807 if (i <= iCtx && i <= iRing)
808 uExpectedCs |= i;
809 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
810 }
811 }
812 }
813
814 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
815 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
816 }
817 }
818 }
819 BS3_ASSERT(g_usBs3TestStep < 1600);
820
821 /*
822 * Various CS and SS related faults
823 *
824 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
825 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
826 * without making it impossible to handle faults.
827 */
828 g_usBs3TestStep = 1600;
829 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
830 Bs3GdteTestPage00.Gen.u1Present = 0;
831 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
832 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
833
834 /* CS.PRESENT = 0 */
835 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
836 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
837 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
838 bs3CpuBasic2_FailedF("selector was accessed");
839 g_usBs3TestStep++;
840
841 /* Check that GATE.DPL is checked before CS.PRESENT. */
842 for (iRing = 1; iRing < 4; iRing++)
843 {
844 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
845 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
846 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
847 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
848 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
849 bs3CpuBasic2_FailedF("selector was accessed");
850 g_usBs3TestStep++;
851 }
852
853 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
854 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
855 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
856 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
857 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
858 bs3CpuBasic2_FailedF("CS selector was accessed");
859 g_usBs3TestStep++;
860 for (iDpl = 1; iDpl < 4; iDpl++)
861 {
862 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
863 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
864 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
865 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
866 bs3CpuBasic2_FailedF("CS selector was accessed");
867 g_usBs3TestStep++;
868 }
869
870 /* 1608: Check all the invalid CS selector types alone. */
871 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
872 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
873 {
874 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
875 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
876 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
877 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
878 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
879 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
880 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
881 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
882 g_usBs3TestStep++;
883
884 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
885 Bs3GdteTestPage00.Gen.u1Present = 0;
886 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
887 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
888 Bs3GdteTestPage00.Gen.u1Present = 1;
889 g_usBs3TestStep++;
890 }
891
892 /* Fix CS again. */
893 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
894
895 /* 1632: Test SS. */
896 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
897 {
898 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
899 uint16_t const uSavedSs2 = *puTssSs2;
900 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
901
902 /* Make the handler execute in ring-2. */
903 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
904 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
905 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
906
907 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
908 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
909 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
910 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
911 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
912 bs3CpuBasic2_FailedF("CS selector was not access");
913 g_usBs3TestStep++;
914
915 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
916 that we get #SS if the selector isn't present. */
917 i = 0; /* used for cycling thru invalid CS types */
918 for (k = 0; k < 10; k++)
919 {
920 /* k=0: present,
921 k=1: not-present,
922 k=2: present but very low limit,
923 k=3: not-present, low limit.
924 k=4: present, read-only.
925 k=5: not-present, read-only.
926 k=6: present, code-selector.
927 k=7: not-present, code-selector.
928 k=8: present, read-write / no access + system (=LDT).
929 k=9: not-present, read-write / no access + system (=LDT).
930 */
931 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
932 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
933 if (k >= 8)
934 {
935 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
936 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
937 }
938 else if (k >= 6)
939 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
940 else if (k >= 4)
941 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
942 else if (k >= 2)
943 {
944 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
945 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
946 Bs3GdteTestPage03.Gen.u1Granularity = 0;
947 }
948
949 for (iDpl = 0; iDpl < 4; iDpl++)
950 {
951 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
952
953 for (iRpl = 0; iRpl < 4; iRpl++)
954 {
955 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
956 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
957 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
958 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
959 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
960 if (iRpl != 2 || iRpl != iDpl || k >= 4)
961 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
962 else if (k != 0)
963 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
964 k == 2 /*f486ResumeFlagHint*/);
965 else
966 {
967 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
968 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
969 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
970 }
971 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
972 bs3CpuBasic2_FailedF("CS selector was not access");
973 if ( TrapCtx.bXcpt == 0x83
974 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
975 {
976 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
977 bs3CpuBasic2_FailedF("SS selector was not accessed");
978 }
979 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
980 bs3CpuBasic2_FailedF("SS selector was accessed");
981 g_usBs3TestStep++;
982
983 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
984 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
985 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
986 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
987 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
988 g_usBs3TestStep++;
989
990 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
991 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
992 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
993 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
994 g_usBs3TestStep++;
995
996 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
997 Bs3GdteTestPage02.Gen.u1Present = 0;
998 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
999 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
1000 Bs3GdteTestPage02.Gen.u1Present = 1;
1001 g_usBs3TestStep++;
1002
1003 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
1004 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
1005 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
1006 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1007 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
1008 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
1009 Bs3GdteTestPage02.Gen.u1DescType = 1;
1010 g_usBs3TestStep++;
1011
1012 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
1013 The 286 had a simpler approach to these GP(0). */
1014 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
1015 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
1016 Bs3GdteTestPage02.Gen.u1Granularity = 0;
1017 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1018 if (f286)
1019 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1020 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
1021 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1022 else if (k != 0)
1023 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
1024 else
1025 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
1026 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1027 g_usBs3TestStep++;
1028 }
1029 }
1030 }
1031
1032 /* Check all the invalid SS selector types alone. */
1033 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1034 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1035 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1036 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1037 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1038 g_usBs3TestStep++;
1039 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
1040 {
1041 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
1042 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
1043 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1044 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
1045 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
1046 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
1047 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
1048 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
1049 g_usBs3TestStep++;
1050 }
1051
1052 /*
1053 * Continue the SS experiments with a expand down segment. We'll use
1054 * the same setup as we already have with gate 83h being DPL and
1055 * having CS.DPL=2.
1056 *
1057 * Expand down segments are weird. The valid area is practically speaking
1058 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
1059 * addresses from 0xffff thru 0x6001.
1060 *
1061 * So, with expand down segments we can more easily cut partially into the
1062 * pushing of the iret frame and trigger more interesting behavior than
1063 * with regular "expand up" segments where the whole pushing area is either
1064 * all fine or not not fine.
1065 */
1066 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1067 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1068 Bs3GdteTestPage03.Gen.u2Dpl = 2;
1069 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
1070 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1071
1072 /* First test, limit = max --> no bytes accessible --> #GP */
1073 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1074 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1075
1076 /* Second test, limit = 0 --> all by zero byte accessible --> works */
1077 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
1078 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
1079 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1080 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1081
1082 /* Modify the gate handler to be a dummy that immediately does UD2
1083 and triggers #UD, then advance the limit down till we get the #UD. */
1084 Bs3GdteTestPage03.Gen.u1Granularity = 0;
1085
1086 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
1087 if (g_f16BitSys)
1088 {
1089 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
1090 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
1091 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
1092 }
1093 else
1094 {
1095 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
1096 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
1097 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1098 }
1099 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1100 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1101 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1102 CtxTmp2.bCpl = 2;
1103
1104 /* test run. */
1105 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1106 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1107 g_usBs3TestStep++;
1108
1109 /* Real run. */
1110 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1111 while (i-- > 0)
1112 {
1113 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1114 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1115 if (i > 0)
1116 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1117 else
1118 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1119 g_usBs3TestStep++;
1120 }
1121
1122 /* Do a run where we do the same-ring kind of access. */
1123 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1124 if (g_f16BitSys)
1125 {
1126 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1127 i = 2*3 - 1;
1128 }
1129 else
1130 {
1131 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1132 i = 4*3 - 1;
1133 }
1134 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1135 CtxTmp2.ds = CtxTmp.ds;
1136 CtxTmp2.es = CtxTmp.es;
1137 CtxTmp2.fs = CtxTmp.fs;
1138 CtxTmp2.gs = CtxTmp.gs;
1139 while (i-- > 0)
1140 {
1141 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1142 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1143 if (i > 0)
1144 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1145 else
1146 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1147 g_usBs3TestStep++;
1148 }
1149
1150 *puTssSs2 = uSavedSs2;
1151 paIdt[0x83 << cIdteShift] = SavedGate83;
1152 }
1153 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1154 BS3_ASSERT(g_usBs3TestStep < 3000);
1155
1156 /*
1157 * Modify the gate CS value with a conforming segment.
1158 */
1159 g_usBs3TestStep = 3000;
1160 for (i = 0; i <= 3; i++) /* cs.dpl */
1161 {
1162 for (iRing = 0; iRing <= 3; iRing++)
1163 {
1164 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1165 {
1166 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1167 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1168# if TMPL_BITS == 32
1169 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1170# endif
1171
1172 for (j = 0; j <= 3; j++) /* rpl */
1173 {
1174 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1175 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1176 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1177 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1178 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1179 /*Bs3TrapPrintFrame(&TrapCtx);*/
1180 g_usBs3TestStep++;
1181 if (iCtx < iRing)
1182 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1183 else if (i > iRing)
1184 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1185 else
1186 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1187 }
1188 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1189 }
1190 }
1191 }
1192 BS3_ASSERT(g_usBs3TestStep < 3500);
1193
1194 /*
1195 * The gates must be 64-bit in long mode.
1196 */
1197 if (cIdteShift != 0)
1198 {
1199 g_usBs3TestStep = 3500;
1200 for (i = 0; i <= 3; i++)
1201 {
1202 for (iRing = 0; iRing <= 3; iRing++)
1203 {
1204 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1205 {
1206 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1207 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1208
1209 for (j = 0; j < 2; j++)
1210 {
1211 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1212 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1213 g_usBs3TestStep++;
1214 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1215 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1216 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1217 /*Bs3TrapPrintFrame(&TrapCtx);*/
1218 if (iCtx < iRing)
1219 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1220 else
1221 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1222 }
1223 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1224 }
1225 }
1226 }
1227 BS3_ASSERT(g_usBs3TestStep < 4000);
1228 }
1229
1230 /*
1231 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1232 */
1233 g_usBs3TestStep = 5000;
1234 i = (0x80 << (cIdteShift + 3)) - 1;
1235 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1236 k = (0x83 << (cIdteShift + 3)) - 1;
1237 for (; i <= k; i++, g_usBs3TestStep++)
1238 {
1239 Idtr = IdtrSaved;
1240 Idtr.cbIdt = i;
1241 ASMSetIDTR(&Idtr);
1242 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1243 if (i < j)
1244 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1245 else
1246 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1247 }
1248 ASMSetIDTR(&IdtrSaved);
1249 BS3_ASSERT(g_usBs3TestStep < 5100);
1250
1251# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1252
1253 /*
1254 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1255 * first page and 0x81 is on the second page. We need proceed to move
1256 * it down byte by byte to check that any inaccessible byte means #PF.
1257 *
1258 * Note! We must reload the alternative IDTR for each run as any kind of
1259 * printing to the string (like error reporting) will cause a switch
1260 * to real mode and back, reloading the default IDTR.
1261 */
1262 g_usBs3TestStep = 5200;
1263 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1264 {
1265 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1266 for (j = 0; j < cbIdte; j++)
1267 {
1268 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1269 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1270
1271 Idtr.cbIdt = IdtrSaved.cbIdt;
1272 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1273
1274 ASMSetIDTR(&Idtr);
1275 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1276 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1277 g_usBs3TestStep++;
1278
1279 ASMSetIDTR(&Idtr);
1280 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1281 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1282 g_usBs3TestStep++;
1283
1284 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1285 if (RT_SUCCESS(rc))
1286 {
1287 ASMSetIDTR(&Idtr);
1288 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1289 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1290 g_usBs3TestStep++;
1291
1292 ASMSetIDTR(&Idtr);
1293 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1294 if (f486Plus)
1295 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1296 else
1297 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1298 g_usBs3TestStep++;
1299
1300 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1301
1302 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1303 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1304 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1305 if (RT_SUCCESS(rc))
1306 {
1307 ASMSetIDTR(&Idtr);
1308 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1309 if (f486Plus)
1310 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1311 else
1312 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1313 g_usBs3TestStep++;
1314
1315 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1316 }
1317 }
1318 else
1319 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1320
1321 ASMSetIDTR(&IdtrSaved);
1322 }
1323 }
1324
1325 /*
1326 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1327 */
1328 g_usBs3TestStep = 5300;
1329 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1330 {
1331 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1332 Idtr.cbIdt = IdtrSaved.cbIdt;
1333 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1334
1335 ASMSetIDTR(&Idtr);
1336 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1337 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1338 g_usBs3TestStep++;
1339
1340 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1341 if (RT_SUCCESS(rc))
1342 {
1343 ASMSetIDTR(&Idtr);
1344 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1345 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1346 g_usBs3TestStep++;
1347
1348 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1349 }
1350 ASMSetIDTR(&IdtrSaved);
1351 }
1352
1353 /*
1354 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1355 * with interrupt gates 80h and 83h, respectively.
1356 */
1357/** @todo Throw in SS.u1Accessed too. */
1358 g_usBs3TestStep = 5400;
1359 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1360 {
1361 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1362 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1363 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1364
1365 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1366 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1367 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1368
1369 /* Check that the CS.A bit is being set on a general basis and that
1370 the special CS values work with out generic handler code. */
1371 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1372 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1373 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1374 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1375 g_usBs3TestStep++;
1376
1377 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1378 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1379 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1380 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1381 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1382 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1383 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1384 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1385 g_usBs3TestStep++;
1386
1387 /*
1388 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1389 * fault due to the RW bit being zero.
1390 * (We check both with with and without the WP bit if 80486.)
1391 */
1392 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1393 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1394
1395 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1396 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1397 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1398 if (RT_SUCCESS(rc))
1399 {
1400 /* ring-0 handler */
1401 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1402 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1403 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1404 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1405 g_usBs3TestStep++;
1406
1407 /* ring-3 handler */
1408 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1409 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1410 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1411 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1412 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1413 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1414 g_usBs3TestStep++;
1415
1416 /* clear WP and repeat the above. */
1417 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1418 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1419 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1420 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1421
1422 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1423 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1424 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1425 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1426 g_usBs3TestStep++;
1427
1428 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1429 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1430 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1431 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1432 g_usBs3TestStep++;
1433
1434 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1435 }
1436
1437 ASMSetCR0(uCr0Saved);
1438
1439 /*
1440 * While we're here, check that if the CS GDT entry is a non-present
1441 * page we do get a #PF with the rigth error code and CR2.
1442 */
1443 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1444 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1445 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1446 if (RT_SUCCESS(rc))
1447 {
1448 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1449 if (f486Plus)
1450 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1451 else
1452 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1453 g_usBs3TestStep++;
1454
1455 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1456 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1457 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1458 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1459
1460 if (f486Plus)
1461 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1462 else
1463 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1464 g_usBs3TestStep++;
1465
1466 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1467 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1468 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1469 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1470 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1471 }
1472
1473 /* restore */
1474 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1475 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1476 }
1477
1478# endif /* 32 || 64*/
1479
1480 /*
1481 * Check broad EFLAGS effects.
1482 */
1483 g_usBs3TestStep = 5600;
1484 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1485 {
1486 for (iRing = 0; iRing < 4; iRing++)
1487 {
1488 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1489 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1490
1491 /* all set */
1492 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1493 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1494 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1495 if (f486Plus)
1496 CtxTmp.rflags.u32 |= X86_EFL_AC;
1497 if (f486Plus && !g_f16BitSys)
1498 CtxTmp.rflags.u32 |= X86_EFL_RF;
1499 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1500 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1501 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1502 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1503
1504 if (iCtx >= iRing)
1505 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1506 else
1507 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1508 uExpected = CtxTmp.rflags.u32
1509 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1510 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1511 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1512 if (TrapCtx.fHandlerRfl != uExpected)
1513 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1514 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1515 g_usBs3TestStep++;
1516
1517 /* all cleared */
1518 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1519 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1520 else
1521 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1522 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1523 if (iCtx >= iRing)
1524 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1525 else
1526 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1527 uExpected = CtxTmp.rflags.u32;
1528 if (TrapCtx.fHandlerRfl != uExpected)
1529 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1530 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1531 g_usBs3TestStep++;
1532 }
1533 }
1534
1535/** @todo CS.LIMIT / canonical(CS) */
1536
1537
1538 /*
1539 * Check invalid gate types.
1540 */
1541 g_usBs3TestStep = 32000;
1542 for (iRing = 0; iRing <= 3; iRing++)
1543 {
1544 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1545 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1546 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1547 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1548 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1549 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1550 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1551 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1552 /*286:*/ 12, 14, 15 };
1553 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1554 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1555 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1556
1557
1558 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1559 {
1560 unsigned iType;
1561
1562 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1563 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1564# if TMPL_BITS == 32
1565 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1566# endif
1567 for (iType = 0; iType < cInvTypes; iType++)
1568 {
1569 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1570 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1571 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1572
1573 for (i = 0; i < 4; i++)
1574 {
1575 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1576 {
1577 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1578 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1579 : s_auCSes[j] | i;
1580 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1581 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1582 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1583 g_usBs3TestStep++;
1584 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1585
1586 /* Mark it not-present to check that invalid type takes precedence. */
1587 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1588 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1589 g_usBs3TestStep++;
1590 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1591 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1592 }
1593 }
1594
1595 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1596 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1597 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1598 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1599 }
1600 }
1601 }
1602 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1603
1604
1605 /** @todo
1606 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1607 * - Quickly generate all faults.
1608 * - All the peculiarities v8086.
1609 */
1610
1611# if TMPL_BITS != 16
1612 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1613# endif
1614}
1615#endif /* convert me */
1616
1617
1618static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm, bool fPf,
1619 RTCCUINTXREG uFlatBufPtr, BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1620{
1621 BS3TRAPFRAME TrapCtx;
1622 BS3REGCTX Ctx;
1623 BS3REGCTX CtxUdExpected;
1624 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1625 uint8_t iRing;
1626 uint16_t iTest;
1627
1628 /* make sure they're allocated */
1629 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1630 Bs3MemZero(&Ctx, sizeof(Ctx));
1631 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1632
1633 /*
1634 * Test all relevant rings.
1635 *
1636 * The memory operand is ds:xBX, so point it to pbBuf.
1637 * The test snippets mostly use xAX as operand, with the div
1638 * one also using xDX, so make sure they make some sense.
1639 */
1640 Bs3RegCtxSaveEx(&Ctx, bMode, 512);
1641
1642 Ctx.cr0.u32 &= ~(X86_CR0_MP | X86_CR0_EM | X86_CR0_TS); /* so fninit + fld works */
1643
1644 for (iRing = BS3_MODE_IS_V86(bMode) ? 3 : 0; iRing < cRings; iRing++)
1645 {
1646 uint32_t uEbx;
1647 uint8_t fAc;
1648
1649 if (!BS3_MODE_IS_RM_OR_V86(bMode))
1650 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1651
1652 if (!fPf || BS3_MODE_IS_32BIT_CODE(bMode) || BS3_MODE_IS_64BIT_CODE(bMode))
1653 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1654 else
1655 {
1656 /* Bs3RegCtxSetGrpDsFromCurPtr barfs when trying to output a sel:off address for the aliased buffer. */
1657 Ctx.ds = BS3_FP_SEG(pbBuf);
1658 Ctx.rbx.u32 = BS3_FP_OFF(pbBuf);
1659 }
1660 uEbx = Ctx.rbx.u32;
1661
1662 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1663 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1664 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1665
1666 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1667
1668 /*
1669 * AC flag loop.
1670 */
1671 for (fAc = 0; fAc < 2; fAc++)
1672 {
1673 if (fAc)
1674 Ctx.rflags.u32 |= X86_EFL_AC;
1675 else
1676 Ctx.rflags.u32 &= ~X86_EFL_AC;
1677
1678 /*
1679 * Loop over the test snippets.
1680 */
1681 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1682 {
1683 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1684 uint16_t const cbMem = pCmn->paEntries[iTest].cbMem;
1685 uint8_t const cbAlign = pCmn->paEntries[iTest].cbAlign;
1686 uint16_t const cbMax = cbCacheLine + cbMem;
1687 uint16_t offMem;
1688 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1689 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1690 CtxUdExpected.rip = Ctx.rip;
1691 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1692 CtxUdExpected.cs = Ctx.cs;
1693 CtxUdExpected.rflags = Ctx.rflags;
1694 if (bMode == BS3_MODE_RM)
1695 CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? Observed with bs3-cpu-instr-3 too (10980xe), seems to be the CPU doing it. */
1696 CtxUdExpected.rdx = Ctx.rdx;
1697 CtxUdExpected.rax = Ctx.rax;
1698 if (fOp & MYOP_LD)
1699 {
1700 switch (cbMem)
1701 {
1702 case 2:
1703 CtxUdExpected.rax.u16 = 0x0101;
1704 break;
1705 case 4:
1706 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1707 break;
1708 case 8:
1709 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1710 break;
1711 }
1712 }
1713
1714 /*
1715 * Buffer misalignment loop.
1716 * Note! We must make sure to cross a cache line here to make sure
1717 * to cover the split-lock scenario. (The buffer is cache
1718 * line aligned.)
1719 */
1720 for (offMem = 0; offMem < cbMax; offMem++)
1721 {
1722 bool const fMisaligned = (offMem & (cbAlign - 1)) != 0;
1723 unsigned offBuf = cbMax + cbMem * 2;
1724 while (offBuf-- > 0)
1725 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1726
1727 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB. */
1728 if (BS3_MODE_IS_16BIT_SYS(bMode))
1729 g_uBs3TrapEipHint = Ctx.rip.u32;
1730
1731 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32 ss:esp=%04RX16:%08RX32 bXcpt=%#x errcd=%#x fAm=%d fAc=%d ESP=%#RX32\n",
1732 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32, Ctx.ss, Ctx.rsp.u32, TrapCtx.bXcpt, (unsigned)TrapCtx.uErrCd, fAm, fAc, ASMGetESP());
1733
1734 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1735
1736 if ( (pCmn->paEntries[iTest].fOp & MYOP_AC_GP)
1737 && fMisaligned
1738 && (!fAm || iRing != 3 || !fAc || (offMem & 3 /* 10980XE */) == 0) )
1739 {
1740 if (fAc && bMode == BS3_MODE_RM)
1741 TrapCtx.Ctx.rflags.u32 |= X86_EFL_AC;
1742 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1743 }
1744 else if (fPf && iRing == 3 && (!fAm || !fAc || !fMisaligned)) /* #AC beats #PF */
1745 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx,
1746 X86_TRAP_PF_P | X86_TRAP_PF_US
1747 | (pCmn->paEntries[iTest].fOp & MYOP_ST ? X86_TRAP_PF_RW : 0),
1748 uFlatBufPtr + offMem + (cbMem > 64 ? cbMem - 1 /*FXSAVE*/ : 0),
1749 pCmn->paEntries[iTest].offFaultInstr);
1750 else if (!fAm || iRing != 3 || !fAc || !fMisaligned)
1751 {
1752 if (fOp & MYOP_EFL)
1753 {
1754 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1755 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1756 }
1757 if (fOp == MYOP_LD_DIV)
1758 {
1759 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1760 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1761 }
1762 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1763 }
1764 else
1765 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx, pCmn->paEntries[iTest].offFaultInstr);
1766
1767 g_usBs3TestStep++;
1768 }
1769 }
1770 }
1771 }
1772}
1773
1774
1775/**
1776 * Entrypoint for \#AC tests.
1777 *
1778 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1779 * @param bMode The CPU mode we're testing.
1780 *
1781 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1782 * with control registers and such.
1783 */
1784BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1785{
1786 unsigned cbCacheLine = 128; /** @todo detect */
1787 uint8_t BS3_FAR *pbBufAlloc;
1788 uint8_t BS3_FAR *pbBuf;
1789 unsigned idxCmnModes;
1790 uint32_t fCr0;
1791
1792 /*
1793 * Skip if 386 or older.
1794 */
1795 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1796 {
1797 Bs3TestSkipped("#AC test requires 486 or later");
1798 return BS3TESTDOMODE_SKIPPED;
1799 }
1800
1801 bs3CpuBasic2_SetGlobals(bMode);
1802
1803 /* Get us a 64-byte aligned buffer. */
1804 pbBufAlloc = pbBuf = Bs3MemAllocZ(BS3_MODE_IS_RM_OR_V86(bMode) ? BS3MEMKIND_REAL : BS3MEMKIND_TILED, X86_PAGE_SIZE * 2);
1805 if (!pbBufAlloc)
1806 return Bs3TestFailed("Failed to allocate 2 pages of real-mode memory");
1807 if (BS3_FP_OFF(pbBuf) & (X86_PAGE_SIZE - 1))
1808 pbBuf = &pbBufAlloc[X86_PAGE_SIZE - (BS3_FP_OFF(pbBuf) & X86_PAGE_OFFSET_MASK)];
1809 BS3_ASSERT(pbBuf - pbBufAlloc <= X86_PAGE_SIZE);
1810 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1811
1812 /* Find the g_aCmnModes entry. */
1813 idxCmnModes = 0;
1814 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1815 idxCmnModes++;
1816 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1817
1818 /* First round is w/o alignment checks enabled. */
1819 //Bs3TestPrintf("round 1\n");
1820 fCr0 = Bs3RegGetCr0();
1821 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1822 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1823#if 1
1824 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1825#endif
1826
1827 /* The second round is with aligment checks enabled. */
1828#if 1
1829 //Bs3TestPrintf("round 2\n");
1830 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1831 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1832#endif
1833
1834#if 1
1835 /* The third and fourth round access the buffer via a page alias that's not
1836 accessible from ring-3. The third round has ACs disabled and the fourth
1837 has them enabled. */
1838 if (BS3_MODE_IS_PAGED(bMode) && !BS3_MODE_IS_V86(bMode))
1839 {
1840 /* Alias the buffer as system memory so ring-3 access with AC+AM will cause #PF: */
1841 /** @todo the aliasing is not necessary any more... */
1842 int rc;
1843 RTCCUINTXREG uFlatBufPtr = Bs3SelPtrToFlat(pbBuf);
1844 uint64_t const uAliasPgPtr = bMode & BS3_MODE_CODE_64 ? UINT64_C(0x0000648680000000) : UINT32_C(0x80000000);
1845 rc = Bs3PagingAlias(uAliasPgPtr, uFlatBufPtr & ~(uint64_t)X86_PAGE_OFFSET_MASK, X86_PAGE_SIZE * 2,
1846 X86_PTE_P | X86_PTE_RW);
1847 if (RT_SUCCESS(rc))
1848 {
1849 /* We 'misalign' the segment base here to make sure it's the final
1850 address that gets alignment checked and not just the operand value. */
1851 RTCCUINTXREG uAliasBufPtr = (RTCCUINTXREG)uAliasPgPtr + (uFlatBufPtr & X86_PAGE_OFFSET_MASK);
1852 uint8_t BS3_FAR *pbBufAlias = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, (uFlatBufPtr & X86_PAGE_OFFSET_MASK) + 1);
1853 Bs3SelSetup16BitData(&Bs3GdteSpare00, uAliasPgPtr - 1);
1854
1855 //Bs3TestPrintf("round 3 pbBufAlias=%p\n", pbBufAlias);
1856 Bs3RegSetCr0(Bs3RegGetCr0() & ~X86_CR0_AM);
1857 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, false /*fAm*/,
1858 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1859
1860 //Bs3TestPrintf("round 4\n");
1861 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1862 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, true /*fAm*/,
1863 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1864
1865 Bs3PagingUnalias(uAliasPgPtr, X86_PAGE_SIZE * 2);
1866 }
1867 else
1868 Bs3TestFailedF("Bs3PagingAlias failed with %Rrc", rc);
1869 }
1870#endif
1871
1872 Bs3MemFree(pbBufAlloc, X86_PAGE_SIZE * 2);
1873 Bs3RegSetCr0(fCr0);
1874 return 0;
1875}
1876
1877
1878/**
1879 * Executes one round of SIDT and SGDT tests using one assembly worker.
1880 *
1881 * This is written with driving everything from the 16-bit or 32-bit worker in
1882 * mind, i.e. not assuming the test bitcount is the same as the current.
1883 */
1884static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1885 uint8_t const *pbExpected)
1886{
1887 BS3TRAPFRAME TrapCtx;
1888 BS3REGCTX Ctx;
1889 BS3REGCTX CtxUdExpected;
1890 BS3REGCTX TmpCtx;
1891 uint8_t const cbBuf = 8*2; /* test buffer area */
1892 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1893 uint8_t BS3_FAR *pbBuf = abBuf;
1894 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1895 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1896 uint8_t bFiller;
1897 int off;
1898 int off2;
1899 unsigned cb;
1900 uint8_t BS3_FAR *pbTest;
1901
1902 /* make sure they're allocated */
1903 Bs3MemZero(&Ctx, sizeof(Ctx));
1904 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1905 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1906 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1907 Bs3MemZero(&abBuf, sizeof(abBuf));
1908
1909 /* Create a context, give this routine some more stack space, point the context
1910 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1911 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1912 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1913 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1914 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1915 g_uBs3TrapEipHint = Ctx.rip.u32;
1916 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1917 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1918
1919 /* For successful SIDT attempts, we'll stop at the UD2. */
1920 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1921 CtxUdExpected.rip.u += pWorker->cbInstr;
1922
1923 /*
1924 * Check that it works at all and that only bytes we expect gets written to.
1925 */
1926 /* First with zero buffer. */
1927 Bs3MemZero(abBuf, sizeof(abBuf));
1928 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1929 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1930 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1931 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1932 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1933 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1934 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1935 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1936 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1937 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1938 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1939 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1940 g_usBs3TestStep++;
1941
1942 /* Again with a buffer filled with a byte not occuring in the previous result. */
1943 bFiller = 0x55;
1944 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1945 bFiller++;
1946 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1947 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1948 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1949
1950 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1951 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1952 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1953 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1954 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1955 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1956 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1957 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1958 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1959 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1960 g_usBs3TestStep++;
1961
1962 /*
1963 * Slide the buffer along 8 bytes to cover misalignment.
1964 */
1965 for (off = 0; off < 8; off++)
1966 {
1967 pbBuf = &abBuf[off];
1968 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1969 CtxUdExpected.rbx.u = Ctx.rbx.u;
1970
1971 /* First with zero buffer. */
1972 Bs3MemZero(abBuf, sizeof(abBuf));
1973 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1974 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1975 if (off > 0 && !ASMMemIsZero(abBuf, off))
1976 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1977 cbIdtr, off, off + cbBuf, abBuf);
1978 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1979 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1980 cbIdtr, off, off + cbBuf, abBuf);
1981 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1982 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1983 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1984 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1985 g_usBs3TestStep++;
1986
1987 /* Again with a buffer filled with a byte not occuring in the previous result. */
1988 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1989 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1990 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1991 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1992 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1993 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1994 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1995 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1996 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1997 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1998 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1999 cbIdtr, off, bFiller, off + cbBuf, abBuf);
2000 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2001 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
2002 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2003 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2004 g_usBs3TestStep++;
2005 }
2006 pbBuf = abBuf;
2007 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2008 CtxUdExpected.rbx.u = Ctx.rbx.u;
2009
2010 /*
2011 * Play with the selector limit if the target mode supports limit checking
2012 * We use BS3_SEL_TEST_PAGE_00 for this
2013 */
2014 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2015 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2016 {
2017 uint16_t cbLimit;
2018 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
2019 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2020 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2021 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2022 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2023 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2024
2025 if (pWorker->fSs)
2026 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2027 else
2028 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2029
2030 /* Expand up (normal). */
2031 for (off = 0; off < 8; off++)
2032 {
2033 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2034 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2035 {
2036 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2037 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2038 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2039 if (off + cbIdtr <= cbLimit + 1)
2040 {
2041 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2042 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2043 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2044 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2045 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2046 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2047 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2048 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
2049 }
2050 else
2051 {
2052 if (pWorker->fSs)
2053 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2054 else
2055 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2056 if (off + 2 <= cbLimit + 1)
2057 {
2058 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2059 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2060 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2061 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2062 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2063 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2064 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2065 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2066 }
2067 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2068 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2069 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2070 }
2071
2072 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2073 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2074 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2075 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2076 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2077 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2078
2079 g_usBs3TestStep++;
2080 }
2081 }
2082
2083 /* Expand down (weird). Inverted valid area compared to expand up,
2084 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2085 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2086 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2087 (because in a normal expand up the 0ffffh means all 64KB are
2088 accessible). */
2089 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2090 for (off = 0; off < 8; off++)
2091 {
2092 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2093 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2094 {
2095 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2096 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2097 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2098
2099 if (off > cbLimit)
2100 {
2101 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2102 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2103 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2104 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2105 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2106 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2107 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2108 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2109 }
2110 else
2111 {
2112 if (pWorker->fSs)
2113 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2114 else
2115 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2116 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2117 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2118 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2119 }
2120
2121 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2122 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2123 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2124 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2125 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2126 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2127
2128 g_usBs3TestStep++;
2129 }
2130 }
2131
2132 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2133 CtxUdExpected.rbx.u = Ctx.rbx.u;
2134 CtxUdExpected.ss = Ctx.ss;
2135 CtxUdExpected.ds = Ctx.ds;
2136 }
2137
2138 /*
2139 * Play with the paging.
2140 */
2141 if ( BS3_MODE_IS_PAGED(bTestMode)
2142 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2143 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2144 {
2145 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2146
2147 /*
2148 * Slide the buffer towards the trailing guard page. We'll observe the
2149 * first word being written entirely separately from the 2nd dword/qword.
2150 */
2151 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2152 {
2153 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2154 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2155 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2156 if (off + cbIdtr <= X86_PAGE_SIZE)
2157 {
2158 CtxUdExpected.rbx = Ctx.rbx;
2159 CtxUdExpected.ss = Ctx.ss;
2160 CtxUdExpected.ds = Ctx.ds;
2161 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2162 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2163 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2164 }
2165 else
2166 {
2167 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2168 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2169 if ( off <= X86_PAGE_SIZE - 2
2170 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2171 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2172 pbExpected, &pbTest[off], off);
2173 if ( off < X86_PAGE_SIZE - 2
2174 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2175 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2176 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2177 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2178 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2179 }
2180 g_usBs3TestStep++;
2181 }
2182
2183 /*
2184 * Now, do it the other way around. It should look normal now since writing
2185 * the limit will #PF first and nothing should be written.
2186 */
2187 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2188 {
2189 Bs3MemSet(pbTest, bFiller, 48);
2190 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2191 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2192 if (off >= 0)
2193 {
2194 CtxUdExpected.rbx = Ctx.rbx;
2195 CtxUdExpected.ss = Ctx.ss;
2196 CtxUdExpected.ds = Ctx.ds;
2197 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2198 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2199 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2200 }
2201 else
2202 {
2203 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2204 uFlatTest + off, 0 /*cbIpAdjust*/);
2205 if ( -off < cbIdtr
2206 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2207 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2208 bFiller, cbIdtr + off, pbTest, off);
2209 }
2210 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2211 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2212 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2213 g_usBs3TestStep++;
2214 }
2215
2216 /*
2217 * Combine paging and segment limit and check ordering.
2218 * This is kind of interesting here since it the instruction seems to
2219 * be doing two separate writes.
2220 */
2221 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2222 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2223 {
2224 uint16_t cbLimit;
2225
2226 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2227 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2228 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2229 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2230 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2231
2232 if (pWorker->fSs)
2233 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2234 else
2235 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2236
2237 /* Expand up (normal), approaching tail guard page. */
2238 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2239 {
2240 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2241 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2242 {
2243 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2244 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2245 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2246 if (off + cbIdtr <= cbLimit + 1)
2247 {
2248 /* No #GP, but maybe #PF. */
2249 if (off + cbIdtr <= X86_PAGE_SIZE)
2250 {
2251 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2252 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2253 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2254 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2255 }
2256 else
2257 {
2258 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2259 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2260 if ( off <= X86_PAGE_SIZE - 2
2261 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2262 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2263 pbExpected, &pbTest[off], off);
2264 cb = X86_PAGE_SIZE - off - 2;
2265 if ( off < X86_PAGE_SIZE - 2
2266 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2267 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2268 bFiller, cb, &pbTest[off + 2], off);
2269 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2270 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2271 }
2272 }
2273 else if (off + 2 <= cbLimit + 1)
2274 {
2275 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2276 if (off <= X86_PAGE_SIZE - 2)
2277 {
2278 if (pWorker->fSs)
2279 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2280 else
2281 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2282 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2283 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2284 pbExpected, &pbTest[off], off);
2285 cb = X86_PAGE_SIZE - off - 2;
2286 if ( off < X86_PAGE_SIZE - 2
2287 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2288 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2289 bFiller, cb, &pbTest[off + 2], off);
2290 }
2291 else
2292 {
2293 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2294 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2295 if ( off < X86_PAGE_SIZE
2296 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2297 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2298 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2299 }
2300 }
2301 else
2302 {
2303 /* #GP/#SS on limit. */
2304 if (pWorker->fSs)
2305 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2306 else
2307 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2308 if ( off < X86_PAGE_SIZE
2309 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2310 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2311 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2312 }
2313
2314 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2315 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2316 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2317 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2318
2319 g_usBs3TestStep++;
2320
2321 /* Set DS to 0 and check that we get #GP(0). */
2322 if (!pWorker->fSs)
2323 {
2324 Ctx.ds = 0;
2325 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2326 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2327 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2328 g_usBs3TestStep++;
2329 }
2330 }
2331 }
2332
2333 /* Expand down. */
2334 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2335 uFlatTest -= X86_PAGE_SIZE;
2336
2337 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2338 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2339 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2340 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2341
2342 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2343 {
2344 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2345 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2346 {
2347 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2348 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2349 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2350 if (cbLimit < off && off >= X86_PAGE_SIZE)
2351 {
2352 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2353 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2354 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2355 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2356 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2357 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2358 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2359 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2360 }
2361 else
2362 {
2363 if (cbLimit < off && off < X86_PAGE_SIZE)
2364 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2365 uFlatTest + off, 0 /*cbIpAdjust*/);
2366 else if (pWorker->fSs)
2367 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2368 else
2369 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2370 cb = cbIdtr*2;
2371 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2372 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2373 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2374 }
2375 g_usBs3TestStep++;
2376 }
2377 }
2378
2379 pbTest += X86_PAGE_SIZE;
2380 uFlatTest += X86_PAGE_SIZE;
2381 }
2382
2383 Bs3MemGuardedTestPageFree(pbTest);
2384 }
2385
2386 /*
2387 * Check non-canonical 64-bit space.
2388 */
2389 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2390 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2391 {
2392 /* Make our references relative to the gap. */
2393 pbTest += g_cbBs3PagingOneCanonicalTrap;
2394
2395 /* Hit it from below. */
2396 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2397 {
2398 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2399 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2400 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2401 if (off + cbIdtr <= 0)
2402 {
2403 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2404 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2405 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2406 }
2407 else
2408 {
2409 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2410 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2411 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2412 off2 = off <= -2 ? 2 : 0;
2413 cb = cbIdtr - off2;
2414 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2415 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2416 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2417 }
2418 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2419 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2420 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2421 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2422 }
2423
2424 /* Hit it from above. */
2425 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2426 {
2427 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2428 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2429 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2430 if (off >= 0)
2431 {
2432 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2433 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2434 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2435 }
2436 else
2437 {
2438 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2439 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2440 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2441 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2442 }
2443 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2444 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2445 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2446 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2447 }
2448
2449 }
2450}
2451
2452
2453static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2454 uint8_t const *pbExpected)
2455{
2456 unsigned idx;
2457 unsigned bRing;
2458 unsigned iStep = 0;
2459
2460 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2461 test and don't want to bother with double faults. */
2462 for (bRing = 0; bRing <= 3; bRing++)
2463 {
2464 for (idx = 0; idx < cWorkers; idx++)
2465 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2466 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2467 {
2468 g_usBs3TestStep = iStep;
2469 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2470 iStep += 1000;
2471 }
2472 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2473 break;
2474 }
2475}
2476
2477
2478BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2479{
2480 union
2481 {
2482 RTIDTR Idtr;
2483 uint8_t ab[16];
2484 } Expected;
2485
2486 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2487 bs3CpuBasic2_SetGlobals(bMode);
2488
2489 /*
2490 * Pass to common worker which is only compiled once per mode.
2491 */
2492 Bs3MemZero(&Expected, sizeof(Expected));
2493 ASMGetIDTR(&Expected.Idtr);
2494 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2495
2496 /*
2497 * Re-initialize the IDT.
2498 */
2499 Bs3TrapReInit();
2500 return 0;
2501}
2502
2503
2504BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2505{
2506 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2507 uint64_t uNew = 0;
2508 union
2509 {
2510 RTGDTR Gdtr;
2511 uint8_t ab[16];
2512 } Expected;
2513
2514 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2515 bs3CpuBasic2_SetGlobals(bMode);
2516
2517 /*
2518 * If paged mode, try push the GDT way up.
2519 */
2520 Bs3MemZero(&Expected, sizeof(Expected));
2521 ASMGetGDTR(&Expected.Gdtr);
2522 if (BS3_MODE_IS_PAGED(bMode))
2523 {
2524/** @todo loading non-canonical base addresses. */
2525 int rc;
2526 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2527 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2528 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2529 if (RT_SUCCESS(rc))
2530 {
2531 Bs3Lgdt_Gdt.uAddr = uNew;
2532 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2533 ASMGetGDTR(&Expected.Gdtr);
2534 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2535 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2536 }
2537 }
2538
2539 /*
2540 * Pass to common worker which is only compiled once per mode.
2541 */
2542 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2543
2544 /*
2545 * Unalias the GDT.
2546 */
2547 if (uNew != 0)
2548 {
2549 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2550 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2551 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2552 }
2553
2554 /*
2555 * Re-initialize the IDT.
2556 */
2557 Bs3TrapReInit();
2558 return 0;
2559}
2560
2561
2562
2563/*
2564 * LIDT & LGDT
2565 */
2566
2567/**
2568 * Executes one round of LIDT and LGDT tests using one assembly worker.
2569 *
2570 * This is written with driving everything from the 16-bit or 32-bit worker in
2571 * mind, i.e. not assuming the test bitcount is the same as the current.
2572 */
2573static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2574 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2575{
2576 static const struct
2577 {
2578 bool fGP;
2579 uint16_t cbLimit;
2580 uint64_t u64Base;
2581 } s_aValues64[] =
2582 {
2583 { false, 0x0000, UINT64_C(0x0000000000000000) },
2584 { false, 0x0001, UINT64_C(0x0000000000000001) },
2585 { false, 0x0002, UINT64_C(0x0000000000000010) },
2586 { false, 0x0003, UINT64_C(0x0000000000000123) },
2587 { false, 0x0004, UINT64_C(0x0000000000001234) },
2588 { false, 0x0005, UINT64_C(0x0000000000012345) },
2589 { false, 0x0006, UINT64_C(0x0000000000123456) },
2590 { false, 0x0007, UINT64_C(0x0000000001234567) },
2591 { false, 0x0008, UINT64_C(0x0000000012345678) },
2592 { false, 0x0009, UINT64_C(0x0000000123456789) },
2593 { false, 0x000a, UINT64_C(0x000000123456789a) },
2594 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2595 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2596 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2597 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2598 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2599 { true, 0x0000, UINT64_C(0x0000800000000000) },
2600 { true, 0x0000, UINT64_C(0x0000800000000333) },
2601 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2602 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2603 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2604 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2605 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2606 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2607 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2608 { false, 0x5678, UINT64_C(0xffff800000000000) },
2609 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2610 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2611 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2612 };
2613 static const struct
2614 {
2615 uint16_t cbLimit;
2616 uint32_t u32Base;
2617 } s_aValues32[] =
2618 {
2619 { 0xdfdf, UINT32_C(0xefefefef) },
2620 { 0x0000, UINT32_C(0x00000000) },
2621 { 0x0001, UINT32_C(0x00000001) },
2622 { 0x0002, UINT32_C(0x00000012) },
2623 { 0x0003, UINT32_C(0x00000123) },
2624 { 0x0004, UINT32_C(0x00001234) },
2625 { 0x0005, UINT32_C(0x00012345) },
2626 { 0x0006, UINT32_C(0x00123456) },
2627 { 0x0007, UINT32_C(0x01234567) },
2628 { 0x0008, UINT32_C(0x12345678) },
2629 { 0x0009, UINT32_C(0x80204060) },
2630 { 0x000a, UINT32_C(0xddeeffaa) },
2631 { 0x000b, UINT32_C(0xfdecdbca) },
2632 { 0x000c, UINT32_C(0x6098456b) },
2633 { 0x000d, UINT32_C(0x98506099) },
2634 { 0x000e, UINT32_C(0x206950bc) },
2635 { 0x000f, UINT32_C(0x9740395d) },
2636 { 0x0334, UINT32_C(0x64a9455e) },
2637 { 0xb423, UINT32_C(0xd20b6eff) },
2638 { 0x4955, UINT32_C(0x85296d46) },
2639 { 0xffff, UINT32_C(0x07000039) },
2640 { 0xefe1, UINT32_C(0x0007fe00) },
2641 };
2642
2643 BS3TRAPFRAME TrapCtx;
2644 BS3REGCTX Ctx;
2645 BS3REGCTX CtxUdExpected;
2646 BS3REGCTX TmpCtx;
2647 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2648 uint8_t abBufSave[32]; /* For saving the result after loading. */
2649 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2650 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2651 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2652 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2653 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2654 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2655 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2656 ? 3 : 4;
2657 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2658 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2659 uint8_t bFiller1; /* For filling abBufLoad. */
2660 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2661 int off;
2662 uint8_t BS3_FAR *pbTest;
2663 unsigned i;
2664
2665 /* make sure they're allocated */
2666 Bs3MemZero(&Ctx, sizeof(Ctx));
2667 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2668 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2669 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2670 Bs3MemZero(abBufSave, sizeof(abBufSave));
2671 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2672 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2673
2674 /*
2675 * Create a context, giving this routine some more stack space.
2676 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2677 * - Point DS/SS:xBX at abBufLoad.
2678 * - Point ES:xDI at abBufSave.
2679 * - Point ES:xSI at abBufRestore.
2680 */
2681 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2682 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2683 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2684 g_uBs3TrapEipHint = Ctx.rip.u32;
2685 Ctx.rflags.u16 &= ~X86_EFL_IF;
2686 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2687
2688 pbBufSave = abBufSave;
2689 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2690 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2691 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2692
2693 pbBufRestore = abBufRestore;
2694 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2695 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2696 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2697 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2698
2699 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2700 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2701
2702 /* For successful SIDT attempts, we'll stop at the UD2. */
2703 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2704 CtxUdExpected.rip.u += pWorker->cbInstr;
2705
2706 /*
2707 * Check that it works at all.
2708 */
2709 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2710 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2711 Bs3MemZero(abBufSave, sizeof(abBufSave));
2712 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2713 if (bRing != 0)
2714 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2715 else
2716 {
2717 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2718 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2719 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2720 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2721 }
2722 g_usBs3TestStep++;
2723
2724 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2725 bFiller1 = ~0x55;
2726 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2727 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2728 || bFiller1 == 0xff)
2729 bFiller1++;
2730 bFiller2 = 0x33;
2731 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2732 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2733 || bFiller2 == 0xff
2734 || bFiller2 == bFiller1)
2735 bFiller2++;
2736 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2737 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2738
2739 /* Again with a buffer filled with a byte not occuring in the previous result. */
2740 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2741 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2742 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2743 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2744 if (bRing != 0)
2745 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2746 else
2747 {
2748 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2749 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2750 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2751 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2752 }
2753 g_usBs3TestStep++;
2754
2755 /*
2756 * Try loading a bunch of different limit+base value to check what happens,
2757 * especially what happens wrt the top part of the base in 16-bit mode.
2758 */
2759 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2760 {
2761 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2762 {
2763 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2764 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2765 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2766 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2767 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2768 if (bRing != 0 || s_aValues64[i].fGP)
2769 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2770 else
2771 {
2772 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2773 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2774 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2775 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2776 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2777 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2778 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2779 }
2780 g_usBs3TestStep++;
2781 }
2782 }
2783 else
2784 {
2785 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2786 {
2787 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2788 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2789 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2790 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2791 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2792 if (bRing != 0)
2793 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2794 else
2795 {
2796 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2797 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2798 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2799 || ( cbBaseLoaded != 4
2800 && pbBufSave[2+3] != bTop16BitBase)
2801 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2802 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2803 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2804 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2805 }
2806 g_usBs3TestStep++;
2807 }
2808 }
2809
2810 /*
2811 * Slide the buffer along 8 bytes to cover misalignment.
2812 */
2813 for (off = 0; off < 8; off++)
2814 {
2815 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2816 CtxUdExpected.rbx.u = Ctx.rbx.u;
2817
2818 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2819 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2820 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2821 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2822 if (bRing != 0)
2823 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2824 else
2825 {
2826 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2827 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2828 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2829 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2830 }
2831 g_usBs3TestStep++;
2832 }
2833 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2834 CtxUdExpected.rbx.u = Ctx.rbx.u;
2835
2836 /*
2837 * Play with the selector limit if the target mode supports limit checking
2838 * We use BS3_SEL_TEST_PAGE_00 for this
2839 */
2840 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2841 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2842 {
2843 uint16_t cbLimit;
2844 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2845 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2846 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2847 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2848 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2849 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2850
2851 if (pWorker->fSs)
2852 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2853 else
2854 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2855
2856 /* Expand up (normal). */
2857 for (off = 0; off < 8; off++)
2858 {
2859 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2860 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2861 {
2862 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2863
2864 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2865 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2866 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2867 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2868 if (bRing != 0)
2869 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2870 else if (off + cbIdtr <= cbLimit + 1)
2871 {
2872 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2873 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2874 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2875 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2876 }
2877 else if (pWorker->fSs)
2878 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2879 else
2880 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2881 g_usBs3TestStep++;
2882
2883 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2884 abBufLoad[off] = abBufLoad[off + 1] = 0;
2885 abBufLoad[off + 2] |= 1;
2886 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2887 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2888 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2889 if (bRing != 0)
2890 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2891 else if (off + cbIdtr <= cbLimit + 1)
2892 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2893 else if (pWorker->fSs)
2894 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2895 else
2896 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2897 }
2898 }
2899
2900 /* Expand down (weird). Inverted valid area compared to expand up,
2901 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2902 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2903 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2904 (because in a normal expand up the 0ffffh means all 64KB are
2905 accessible). */
2906 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2907 for (off = 0; off < 8; off++)
2908 {
2909 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2910 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2911 {
2912 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2913
2914 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2915 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2916 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2917 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2918 if (bRing != 0)
2919 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2920 else if (off > cbLimit)
2921 {
2922 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2923 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2924 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2925 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2926 }
2927 else if (pWorker->fSs)
2928 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2929 else
2930 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2931 g_usBs3TestStep++;
2932
2933 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2934 abBufLoad[off] = abBufLoad[off + 1] = 0;
2935 abBufLoad[off + 2] |= 3;
2936 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2937 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2938 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2939 if (bRing != 0)
2940 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2941 else if (off > cbLimit)
2942 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2943 else if (pWorker->fSs)
2944 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2945 else
2946 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2947 }
2948 }
2949
2950 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2951 CtxUdExpected.rbx.u = Ctx.rbx.u;
2952 CtxUdExpected.ss = Ctx.ss;
2953 CtxUdExpected.ds = Ctx.ds;
2954 }
2955
2956 /*
2957 * Play with the paging.
2958 */
2959 if ( BS3_MODE_IS_PAGED(bTestMode)
2960 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2961 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2962 {
2963 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2964
2965 /*
2966 * Slide the load buffer towards the trailing guard page.
2967 */
2968 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2969 CtxUdExpected.ss = Ctx.ss;
2970 CtxUdExpected.ds = Ctx.ds;
2971 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2972 {
2973 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2974 if (off < X86_PAGE_SIZE)
2975 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2976 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2977 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2978 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2979 if (bRing != 0)
2980 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2981 else if (off + cbIdtr <= X86_PAGE_SIZE)
2982 {
2983 CtxUdExpected.rbx = Ctx.rbx;
2984 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2985 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2986 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2987 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2988 }
2989 else
2990 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2991 g_usBs3TestStep++;
2992
2993 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2994 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2995 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2996 && ( off != X86_PAGE_SIZE - 2
2997 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2998 )
2999 {
3000 pbTest[off] = 0;
3001 if (off + 1 < X86_PAGE_SIZE)
3002 pbTest[off + 1] = 0;
3003 if (off + 2 < X86_PAGE_SIZE)
3004 pbTest[off + 2] |= 7;
3005 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3006 if (bRing != 0)
3007 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3008 else
3009 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3010 g_usBs3TestStep++;
3011 }
3012 }
3013
3014 /*
3015 * Now, do it the other way around. It should look normal now since writing
3016 * the limit will #PF first and nothing should be written.
3017 */
3018 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
3019 {
3020 Bs3MemSet(pbTest, bFiller1, 48);
3021 if (off >= 0)
3022 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3023 else if (off + cbIdtr > 0)
3024 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
3025 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
3026 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3027 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3028 if (bRing != 0)
3029 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3030 else if (off >= 0)
3031 {
3032 CtxUdExpected.rbx = Ctx.rbx;
3033 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3034 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
3035 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
3036 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3037 }
3038 else
3039 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3040 g_usBs3TestStep++;
3041
3042 /* Again with messed up base as well (triple fault if buggy). */
3043 if (off < 0 && off > -cbIdtr)
3044 {
3045 if (off + 2 >= 0)
3046 pbTest[off + 2] |= 15;
3047 pbTest[off + cbIdtr - 1] ^= 0xaa;
3048 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3049 if (bRing != 0)
3050 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3051 else
3052 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3053 g_usBs3TestStep++;
3054 }
3055 }
3056
3057 /*
3058 * Combine paging and segment limit and check ordering.
3059 * This is kind of interesting here since it the instruction seems to
3060 * actually be doing two separate read, just like it's S[IG]DT counterpart.
3061 *
3062 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
3063 * that's what f486Weirdness deals with.
3064 */
3065 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
3066 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
3067 {
3068 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
3069 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
3070 uint16_t cbLimit;
3071
3072 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
3073 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
3074 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3075 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3076 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3077
3078 if (pWorker->fSs)
3079 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
3080 else
3081 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3082
3083 /* Expand up (normal), approaching tail guard page. */
3084 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3085 {
3086 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3087 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3088 {
3089 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3090 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
3091 if (off < X86_PAGE_SIZE)
3092 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
3093 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3094 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3095 if (bRing != 0)
3096 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3097 else if (off + cbIdtr <= cbLimit + 1)
3098 {
3099 /* No #GP, but maybe #PF. */
3100 if (off + cbIdtr <= X86_PAGE_SIZE)
3101 {
3102 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3103 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3104 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
3105 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3106 }
3107 else
3108 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3109 }
3110 /* No #GP/#SS on limit, but instead #PF? */
3111 else if ( !f486Weirdness
3112 ? off < cbLimit && off >= 0xfff
3113 : off + 2 < cbLimit && off >= 0xffd)
3114 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3115 /* #GP/#SS on limit or base. */
3116 else if (pWorker->fSs)
3117 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3118 else
3119 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3120
3121 g_usBs3TestStep++;
3122
3123 /* Set DS to 0 and check that we get #GP(0). */
3124 if (!pWorker->fSs)
3125 {
3126 Ctx.ds = 0;
3127 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3128 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3129 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3130 g_usBs3TestStep++;
3131 }
3132 }
3133 }
3134
3135 /* Expand down. */
3136 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
3137 uFlatTest -= X86_PAGE_SIZE;
3138
3139 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
3140 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3141 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3142 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3143
3144 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3145 {
3146 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3147 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3148 {
3149 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3150 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
3151 if (off >= X86_PAGE_SIZE)
3152 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3153 else if (off > X86_PAGE_SIZE - cbIdtr)
3154 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
3155 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3156 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3157 if (bRing != 0)
3158 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3159 else if (cbLimit < off && off >= X86_PAGE_SIZE)
3160 {
3161 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3162 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3163 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
3164 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3165 }
3166 else if (cbLimit < off && off < X86_PAGE_SIZE)
3167 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3168 else if (pWorker->fSs)
3169 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3170 else
3171 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3172 g_usBs3TestStep++;
3173 }
3174 }
3175
3176 pbTest += X86_PAGE_SIZE;
3177 uFlatTest += X86_PAGE_SIZE;
3178 }
3179
3180 Bs3MemGuardedTestPageFree(pbTest);
3181 }
3182
3183 /*
3184 * Check non-canonical 64-bit space.
3185 */
3186 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3187 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3188 {
3189 /* Make our references relative to the gap. */
3190 pbTest += g_cbBs3PagingOneCanonicalTrap;
3191
3192 /* Hit it from below. */
3193 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3194 {
3195 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3196 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3197 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3198 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3199 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3200 if (off + cbIdtr > 0 || bRing != 0)
3201 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3202 else
3203 {
3204 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3205 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3206 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3207 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3208 }
3209 }
3210
3211 /* Hit it from above. */
3212 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3213 {
3214 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3215 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3216 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3217 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3218 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3219 if (off < 0 || bRing != 0)
3220 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3221 else
3222 {
3223 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3224 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3225 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3226 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3227 }
3228 }
3229
3230 }
3231}
3232
3233
3234static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3235 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3236{
3237 unsigned idx;
3238 unsigned bRing;
3239 unsigned iStep = 0;
3240
3241 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3242 test and don't want to bother with double faults. */
3243 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3244 {
3245 for (idx = 0; idx < cWorkers; idx++)
3246 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3247 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3248 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3249 || ( bTestMode > BS3_MODE_PE16
3250 || ( bTestMode == BS3_MODE_PE16
3251 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3252 {
3253 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3254 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3255 g_usBs3TestStep = iStep;
3256 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3257 iStep += 1000;
3258 }
3259 if (BS3_MODE_IS_RM_SYS(bTestMode))
3260 break;
3261 }
3262}
3263
3264
3265BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3266{
3267 union
3268 {
3269 RTIDTR Idtr;
3270 uint8_t ab[32]; /* At least cbIdtr*2! */
3271 } Expected;
3272
3273 //if (bMode != BS3_MODE_LM64) return 0;
3274 bs3CpuBasic2_SetGlobals(bMode);
3275
3276 /*
3277 * Pass to common worker which is only compiled once per mode.
3278 */
3279 Bs3MemZero(&Expected, sizeof(Expected));
3280 ASMGetIDTR(&Expected.Idtr);
3281
3282 if (BS3_MODE_IS_RM_SYS(bMode))
3283 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3284 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3285 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3286 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3287 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3288 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3289 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3290 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3291 else
3292 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3293 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3294
3295 /*
3296 * Re-initialize the IDT.
3297 */
3298 Bs3TrapReInit();
3299 return 0;
3300}
3301
3302
3303BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3304{
3305 union
3306 {
3307 RTGDTR Gdtr;
3308 uint8_t ab[32]; /* At least cbIdtr*2! */
3309 } Expected;
3310
3311 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3312 bs3CpuBasic2_SetGlobals(bMode);
3313
3314 /*
3315 * Pass to common worker which is only compiled once per mode.
3316 */
3317 if (BS3_MODE_IS_RM_SYS(bMode))
3318 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3319 Bs3MemZero(&Expected, sizeof(Expected));
3320 ASMGetGDTR(&Expected.Gdtr);
3321
3322 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3323 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3324
3325 /*
3326 * Re-initialize the IDT.
3327 */
3328 Bs3TrapReInit();
3329 return 0;
3330}
3331
3332typedef union IRETBUF
3333{
3334 uint64_t au64[6]; /* max req is 5 */
3335 uint32_t au32[12]; /* max req is 9 */
3336 uint16_t au16[24]; /* max req is 5 */
3337 uint8_t ab[48];
3338} IRETBUF;
3339typedef IRETBUF BS3_FAR *PIRETBUF;
3340
3341
3342static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3343 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3344{
3345 if (cbPop == 2)
3346 {
3347 pIretBuf->au16[0] = (uint16_t)uPC;
3348 pIretBuf->au16[1] = uCS;
3349 pIretBuf->au16[2] = (uint16_t)fEfl;
3350 pIretBuf->au16[3] = (uint16_t)uSP;
3351 pIretBuf->au16[4] = uSS;
3352 }
3353 else if (cbPop != 8)
3354 {
3355 pIretBuf->au32[0] = (uint32_t)uPC;
3356 pIretBuf->au16[1*2] = uCS;
3357 pIretBuf->au32[2] = (uint32_t)fEfl;
3358 pIretBuf->au32[3] = (uint32_t)uSP;
3359 pIretBuf->au16[4*2] = uSS;
3360 }
3361 else
3362 {
3363 pIretBuf->au64[0] = uPC;
3364 pIretBuf->au16[1*4] = uCS;
3365 pIretBuf->au64[2] = fEfl;
3366 pIretBuf->au64[3] = uSP;
3367 pIretBuf->au16[4*4] = uSS;
3368 }
3369}
3370
3371
3372static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3373 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3374{
3375 BS3TRAPFRAME TrapCtx;
3376 BS3REGCTX Ctx;
3377 BS3REGCTX CtxUdExpected;
3378 BS3REGCTX TmpCtx;
3379 BS3REGCTX TmpCtxExpected;
3380 uint8_t abLowUd[8];
3381 uint8_t abLowIret[8];
3382 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3383 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3384 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3385 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3386 int iRingDst;
3387 int iRingSrc;
3388 uint16_t uDplSs;
3389 uint16_t uRplCs;
3390 uint16_t uRplSs;
3391// int i;
3392 uint8_t BS3_FAR *pbTest;
3393
3394 NOREF(abLowUd);
3395#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3396 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3397#define IRETBUF_SET_REG(a_idx, a_uValue) \
3398 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3399 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3400 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3401 else *(uint64_t)pbTmp = (a_uValue); \
3402 } while (0)
3403
3404 /* make sure they're allocated */
3405 Bs3MemZero(&Ctx, sizeof(Ctx));
3406 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3407 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3408 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3409 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3410
3411 /*
3412 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3413 * copies of both iret and ud in the first 64KB of memory. The stack is
3414 * below 64KB, so we'll just copy the instructions onto the stack.
3415 */
3416 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3417 Bs3MemCpy(abLowIret, pfnIret, 4);
3418
3419 /*
3420 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3421 * - Point the context at our iret instruction.
3422 * - Point SS:xSP at pIretBuf.
3423 */
3424 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3425 if (!fUseLowCode)
3426 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3427 else
3428 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3429 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3430 g_uBs3TrapEipHint = Ctx.rip.u32;
3431 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3432
3433 /*
3434 * The first success (UD) context keeps the same code bit-count as the iret.
3435 */
3436 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3437 if (!fUseLowCode)
3438 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3439 else
3440 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3441 CtxUdExpected.rsp.u += cbSameCplFrame;
3442
3443 /*
3444 * Check that it works at all.
3445 */
3446 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3447 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3448
3449 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3450 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3451 g_usBs3TestStep++;
3452
3453 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3454 {
3455 /* Selectors are modified when switching rings, so we need to know
3456 what we're dealing with there. */
3457 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3458 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3459 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3460 if (Ctx.fs || Ctx.gs)
3461 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3462
3463 /*
3464 * Test returning to outer rings if protected mode.
3465 */
3466 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3467 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3468 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3469 {
3470 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3471 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3472 TmpCtx.es = TmpCtxExpected.es;
3473 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3474 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3475 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3476 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3477 g_usBs3TestStep++;
3478 }
3479
3480 /*
3481 * Check CS.RPL and SS.RPL.
3482 */
3483 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3484 {
3485 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3486 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3487 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3488 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3489 {
3490 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3491 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3492 TmpCtx.es = TmpCtxExpected.es;
3493 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3494 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3495 {
3496 uint16_t const uSrcEs = TmpCtx.es;
3497 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3498 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3499
3500 /* CS.RPL */
3501 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3502 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3503 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3504 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3505 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3506 else
3507 {
3508 if (iRingDst < iRingSrc)
3509 TmpCtx.es = 0;
3510 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3511 TmpCtx.es = uSrcEs;
3512 }
3513 g_usBs3TestStep++;
3514
3515 /* SS.RPL */
3516 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3517 {
3518 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3519 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3520 {
3521 /* SS.DPL (iRingDst == CS.DPL) */
3522 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3523 {
3524 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3525 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3526 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3527 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3528
3529 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3530 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3531 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3532 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3533 {
3534 if (iRingDst < iRingSrc)
3535 TmpCtx.es = 0;
3536 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3537 }
3538 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3539 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3540 else
3541 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3542 TmpCtx.es = uSrcEs;
3543 g_usBs3TestStep++;
3544 }
3545 }
3546
3547 TmpCtxExpected.ss = uSavedDstSs;
3548 }
3549 }
3550 }
3551 }
3552 }
3553
3554 /*
3555 * Special 64-bit checks.
3556 */
3557 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3558 {
3559 /* The VM flag is completely ignored. */
3560 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3561 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3562 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3563 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3564 g_usBs3TestStep++;
3565
3566 /* The NT flag can be loaded just fine. */
3567 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3568 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3569 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3570 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3571 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3572 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3573 g_usBs3TestStep++;
3574
3575 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3576 Ctx.rflags.u32 |= X86_EFL_NT;
3577 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3578 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3579 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3580 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3581 g_usBs3TestStep++;
3582
3583 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3584 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3585 if (pbTest != NULL)
3586 {
3587 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3588 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3589 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3590 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3591 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3592 g_usBs3TestStep++;
3593
3594 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3595 Bs3MemGuardedTestPageFree(pbTest);
3596 }
3597 Ctx.rflags.u32 &= ~X86_EFL_NT;
3598 }
3599}
3600
3601
3602BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3603{
3604 struct
3605 {
3606 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3607 IRETBUF IRetBuf;
3608 uint8_t abGuard[32];
3609 } uBuf;
3610 size_t cbUnused;
3611
3612 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3613 bs3CpuBasic2_SetGlobals(bMode);
3614
3615 /*
3616 * Primary instruction form.
3617 */
3618 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3619 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3620 if (BS3_MODE_IS_16BIT_CODE(bMode))
3621 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3622 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3623 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3624 else
3625 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3626
3627 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3628 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3629 - (uintptr_t)uBuf.abExtraStack;
3630 if (cbUnused < 2048)
3631 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3632
3633 /*
3634 * Secondary variation: opsize prefixed.
3635 */
3636 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3637 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3638 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3639 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3640 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3641 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3642 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3643 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3644 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3645 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3646 - (uintptr_t)uBuf.abExtraStack;
3647 if (cbUnused < 2048)
3648 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3649
3650 /*
3651 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3652 */
3653 if (BS3_MODE_IS_64BIT_CODE(bMode))
3654 {
3655 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3656 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3657 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3658 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3659 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3660 - (uintptr_t)uBuf.abExtraStack;
3661 if (cbUnused < 2048)
3662 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3663 }
3664
3665 return 0;
3666}
3667
3668
3669
3670/*********************************************************************************************************************************
3671* Non-far JMP & CALL Tests *
3672*********************************************************************************************************************************/
3673#define PROTO_ALL(a_Template) \
3674 FNBS3FAR a_Template ## _c16, \
3675 a_Template ## _c32, \
3676 a_Template ## _c64
3677PROTO_ALL(bs3CpuBasic2_jmp_jb__ud2);
3678PROTO_ALL(bs3CpuBasic2_jmp_jb_back__ud2);
3679PROTO_ALL(bs3CpuBasic2_jmp_jv__ud2);
3680PROTO_ALL(bs3CpuBasic2_jmp_jv_back__ud2);
3681PROTO_ALL(bs3CpuBasic2_jmp_ind_mem__ud2);
3682PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX__ud2);
3683PROTO_ALL(bs3CpuBasic2_jmp_ind_xDI__ud2);
3684FNBS3FAR bs3CpuBasic2_jmp_ind_r9__ud2_c64;
3685PROTO_ALL(bs3CpuBasic2_call_jv__ud2);
3686PROTO_ALL(bs3CpuBasic2_call_jv_back__ud2);
3687PROTO_ALL(bs3CpuBasic2_call_ind_mem__ud2);
3688PROTO_ALL(bs3CpuBasic2_call_ind_xAX__ud2);
3689PROTO_ALL(bs3CpuBasic2_call_ind_xDI__ud2);
3690FNBS3FAR bs3CpuBasic2_call_ind_r9__ud2_c64;
3691
3692PROTO_ALL(bs3CpuBasic2_jmp_opsize_begin);
3693PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize__ud2);
3694PROTO_ALL(bs3CpuBasic2_jmp_jb_opsize_back__ud2);
3695PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize__ud2);
3696PROTO_ALL(bs3CpuBasic2_jmp_jv_opsize_back__ud2);
3697PROTO_ALL(bs3CpuBasic2_jmp_ind_mem_opsize__ud2);
3698FNBS3FAR bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64;
3699PROTO_ALL(bs3CpuBasic2_jmp_ind_xAX_opsize__ud2);
3700PROTO_ALL(bs3CpuBasic2_call_jv_opsize__ud2);
3701PROTO_ALL(bs3CpuBasic2_call_jv_opsize_back__ud2);
3702PROTO_ALL(bs3CpuBasic2_call_ind_mem_opsize__ud2);
3703FNBS3FAR bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64;
3704PROTO_ALL(bs3CpuBasic2_call_ind_xAX_opsize__ud2);
3705PROTO_ALL(bs3CpuBasic2_jmp_opsize_end);
3706#undef PROTO_ALL
3707
3708FNBS3FAR bs3CpuBasic2_jmptext16_start;
3709
3710FNBS3FAR bs3CpuBasic2_jmp_target_wrap_forward;
3711FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_forward__ud2;
3712FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2;
3713FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_forward__ud2;
3714FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2;
3715FNBS3FAR bs3CpuBasic2_call_jv16_wrap_forward__ud2;
3716FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2;
3717
3718FNBS3FAR bs3CpuBasic2_jmp_target_wrap_backward;
3719FNBS3FAR bs3CpuBasic2_jmp_jb_wrap_backward__ud2;
3720FNBS3FAR bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2;
3721FNBS3FAR bs3CpuBasic2_jmp_jv16_wrap_backward__ud2;
3722FNBS3FAR bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2;
3723FNBS3FAR bs3CpuBasic2_call_jv16_wrap_backward__ud2;
3724FNBS3FAR bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2;
3725
3726
3727
3728/**
3729 * Entrypoint for non-far JMP & CALL tests.
3730 *
3731 * @returns 0 or BS3TESTDOMODE_SKIPPED.
3732 * @param bMode The CPU mode we're testing.
3733 *
3734 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
3735 * with control registers and such.
3736 */
3737BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_jmp_call)(uint8_t bMode)
3738{
3739 BS3TRAPFRAME TrapCtx;
3740 BS3REGCTX Ctx;
3741 BS3REGCTX CtxExpected;
3742 unsigned iTest;
3743 unsigned const cMaxRecompRuns = g_cBs3ThresholdNativeRecompiler ? g_cBs3ThresholdNativeRecompiler : 1;
3744 unsigned iRecompRun;
3745
3746 /* make sure they're allocated */
3747 Bs3MemZero(&Ctx, sizeof(Ctx));
3748 Bs3MemZero(&CtxExpected, sizeof(Ctx));
3749 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3750
3751 bs3CpuBasic2_SetGlobals(bMode);
3752
3753 /*
3754 * Create a context.
3755 */
3756 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
3757 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
3758
3759 /*
3760 * 16-bit tests.
3761 *
3762 * When opsize is 16-bit relative jumps will do 16-bit calculations and
3763 * modify IP. This means that it is not possible to trigger a segment
3764 * limit #GP(0) when the limit is set to 0xffff.
3765 */
3766 if (BS3_MODE_IS_16BIT_CODE(bMode))
3767 {
3768 static struct
3769 {
3770 int8_t iWrap;
3771 bool fOpSizePfx;
3772 int8_t iGprIndirect;
3773 bool fCall;
3774 FPFNBS3FAR pfnTest;
3775 }
3776 const s_aTests[] =
3777 {
3778 { 0, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c16, },
3779 { 0, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c16, },
3780 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c16, },
3781 { 0, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c16, },
3782 { 0, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c16, },
3783 { 0, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c16, },
3784 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c16, },
3785 { 0, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c16, },
3786 { 0, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c16, },
3787 { 0, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c16, },
3788 { 0, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c16, },
3789 { 0, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c16, },
3790 { 0, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c16, },
3791 { 0, false, -1, true, bs3CpuBasic2_call_jv__ud2_c16, },
3792 { 0, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c16, },
3793 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c16, },
3794 { 0, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c16, },
3795 { 0, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c16, },
3796 { 0, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c16, },
3797 { 0, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c16, },
3798 { 0, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c16, },
3799 { 0, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c16, },
3800
3801 { -1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_backward__ud2, },
3802 { +1, false, -1, false, bs3CpuBasic2_jmp_jb_wrap_forward__ud2, },
3803 { -1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_backward__ud2, },
3804 { +1, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_wrap_forward__ud2, },
3805
3806 { -1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_backward__ud2, },
3807 { +1, false, -1, false, bs3CpuBasic2_jmp_jv16_wrap_forward__ud2, },
3808 { -1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_backward__ud2, },
3809 { +1, true, -1, false, bs3CpuBasic2_jmp_jv16_opsize_wrap_forward__ud2, },
3810 { -1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_backward__ud2, },
3811 { +1, false, -1, true, bs3CpuBasic2_call_jv16_wrap_forward__ud2, },
3812 { -1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_backward__ud2, },
3813 { +1, true, -1, true, bs3CpuBasic2_call_jv16_opsize_wrap_forward__ud2, },
3814 };
3815
3816 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3817 Bs3SelSetup16BitCode(&Bs3GdteSpare03, Bs3SelLnkPtrToFlat(bs3CpuBasic2_jmptext16_start), 0);
3818
3819 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3820 {
3821 uint64_t uGprSaved;
3822 if (s_aTests[iTest].iWrap == 0)
3823 {
3824 uint8_t const BS3_FAR *fpbCode;
3825 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
3826 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
3827 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
3828 }
3829 else
3830 {
3831 if (BS3_MODE_IS_RM_OR_V86(bMode))
3832 Ctx.cs = BS3_FP_SEG(s_aTests[iTest].pfnTest);
3833 else
3834 Ctx.cs = BS3_SEL_SPARE_03;
3835 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3836 if (s_aTests[iTest].fOpSizePfx)
3837 CtxExpected.rip.u = Ctx.rip.u;
3838 else if (s_aTests[iTest].iWrap < 0)
3839 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3840 else
3841 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_forward);
3842 }
3843 CtxExpected.cs = Ctx.cs;
3844 if (s_aTests[iTest].iGprIndirect >= 0)
3845 {
3846 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
3847 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
3848 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
3849 }
3850 CtxExpected.rsp.u = Ctx.rsp.u;
3851 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3852 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3853 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u);
3854
3855 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
3856 {
3857 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3858 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3859 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
3860 else
3861 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3862 }
3863 g_usBs3TestStep++;
3864
3865 /* Again single stepping: */
3866 //Bs3TestPrintf("stepping...\n");
3867 Ctx.rflags.u16 |= X86_EFL_TF;
3868 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3869 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
3870 {
3871 Bs3RegSetDr6(0);
3872 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3873 if (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx)
3874 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
3875 else
3876 {
3877 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3878 bs3CpuBasic2_CheckDr6InitVal();
3879 }
3880 }
3881 Ctx.rflags.u16 &= ~X86_EFL_TF;
3882 CtxExpected.rflags.u16 = Ctx.rflags.u16;
3883 g_usBs3TestStep++;
3884
3885 if (s_aTests[iTest].iGprIndirect >= 0)
3886 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
3887 }
3888
3889 /* Limit the wraparound CS segment to exclude bs3CpuBasic2_jmp_target_wrap_backward
3890 and run the backward wrapping tests. */
3891 if (!BS3_MODE_IS_RM_OR_V86(bMode))
3892 {
3893 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward) - 1;
3894 CtxExpected.cs = Ctx.cs = BS3_SEL_SPARE_03;
3895 CtxExpected.rsp.u = Ctx.rsp.u;
3896 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3897 if (s_aTests[iTest].iWrap < 0)
3898 {
3899 CtxExpected.rip.u = Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3900 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v1\n", Ctx.cs, Ctx.rip.u);
3901 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
3902 {
3903 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3904 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3905 }
3906 g_usBs3TestStep++;
3907 }
3908
3909 /* Do another round where we put the limit in the middle of the UD2
3910 instruction we're jumping to: */
3911 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3912 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
3913 if (s_aTests[iTest].iWrap < 0)
3914 {
3915 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
3916 if (s_aTests[iTest].fOpSizePfx)
3917 CtxExpected.rip.u = Ctx.rip.u;
3918 else
3919 CtxExpected.rip.u = BS3_FP_OFF(bs3CpuBasic2_jmp_target_wrap_backward);
3920 CtxExpected.rsp.u = Ctx.rsp.u;
3921 if (s_aTests[iTest].fCall && (s_aTests[iTest].iWrap == 0 || !s_aTests[iTest].fOpSizePfx))
3922 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 2;
3923 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v2\n", Ctx.cs, Ctx.rip.u);
3924 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
3925 {
3926 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3927 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
3928 }
3929 g_usBs3TestStep++;
3930 }
3931 }
3932
3933 }
3934 /*
3935 * 32-bit & 64-bit tests.
3936 *
3937 * When the opsize prefix is applied here, IP is updated and bits 63:16
3938 * cleared. However in 64-bit mode, Intel ignores the opsize prefix
3939 * whereas AMD doesn't and it works like you expect.
3940 */
3941 else
3942 {
3943 static struct
3944 {
3945 uint8_t cBits;
3946 bool fOpSizePfx;
3947 bool fIgnPfx;
3948 int8_t iGprIndirect;
3949 bool fCall;
3950 FPFNBS3FAR pfnTest;
3951 }
3952 const s_aTests[] =
3953 {
3954 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
3955 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
3956 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
3957 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
3958 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
3959 { 32, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
3960 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
3961 { 32, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
3962 { 32, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c32, },
3963 { 32, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c32, },
3964 { 32, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c32, },
3965 { 32, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c32, },
3966 { 32, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c32, },
3967 { 32, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, },
3968 { 32, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
3969 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
3970 { 32, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
3971 { 32, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c32, },
3972 { 32, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c32, },
3973 { 32, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c32, },
3974 { 32, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c32, },
3975 { 32, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c32, },
3976 /* 64bit/Intel: Use the _c64 tests, which are written to ignore the o16 prefix. */
3977 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb__ud2_c64, },
3978 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c64, },
3979 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c64, },
3980 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c64, },
3981 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv__ud2_c64, },
3982 { 64, false, true, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c64, },
3983 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c64, },
3984 { 64, true, true, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c64, },
3985 { 64, false, true, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, },
3986 { 64, true, true, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2__intel_c64, },
3987 { 64, false, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, },
3988 { 64, false, true, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, },
3989 { 64, false, true, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, },
3990 { 64, true, true, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
3991 { 64, false, true, -1, true, bs3CpuBasic2_call_jv__ud2_c64, },
3992 { 64, false, true, -1, true, bs3CpuBasic2_call_jv_back__ud2_c64, },
3993 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c64, },
3994 { 64, true, true, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c64, },
3995 { 64, false, true, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, },
3996 { 64, true, true, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2__intel_c64,},
3997 { 64, false, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, },
3998 { 64, false, true, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, },
3999 { 64, false, true, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, },
4000 { 64, true, true, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* no intel version needed */
4001 /* 64bit/AMD: Use the _c32 tests. */
4002 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb__ud2_c32, },
4003 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jb_back__ud2_c32, },
4004 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize__ud2_c32, },
4005 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jb_opsize_back__ud2_c32, },
4006 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv__ud2_c32, },
4007 { 64, false, false, -1, false, bs3CpuBasic2_jmp_jv_back__ud2_c32, },
4008 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize__ud2_c32, },
4009 { 64, true, false, -1, false, bs3CpuBasic2_jmp_jv_opsize_back__ud2_c32, },
4010 { 64, false, false, -1, false, bs3CpuBasic2_jmp_ind_mem__ud2_c64, }, /* using c64 here */
4011 { 64, true, false, -1, false, bs3CpuBasic2_jmp_ind_mem_opsize__ud2_c64, }, /* ditto */
4012 { 64, false, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX__ud2_c64, }, /* ditto */
4013 { 64, false, false, X86_GREG_xDI, false, bs3CpuBasic2_jmp_ind_xDI__ud2_c64, }, /* ditto */
4014 { 64, false, false, X86_GREG_x9, false, bs3CpuBasic2_jmp_ind_r9__ud2_c64, }, /* ditto */
4015 { 64, true, false, X86_GREG_xAX, false, bs3CpuBasic2_jmp_ind_xAX_opsize__ud2_c64, }, /* ditto */
4016 { 64, false, false, -1, true, bs3CpuBasic2_call_jv__ud2_c32, }, /* using c32 again */
4017 { 64, false, false, -1, true, bs3CpuBasic2_call_jv_back__ud2_c32, },
4018 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize__ud2_c32, },
4019 { 64, true, false, -1, true, bs3CpuBasic2_call_jv_opsize_back__ud2_c32, },
4020 { 64, false, false, -1, true, bs3CpuBasic2_call_ind_mem__ud2_c64, }, /* using c64 here */
4021 { 64, true, false, -1, true, bs3CpuBasic2_call_ind_mem_opsize__ud2_c64, }, /* ditto */
4022 { 64, false, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX__ud2_c64, }, /* ditto */
4023 { 64, false, false, X86_GREG_xDI, true, bs3CpuBasic2_call_ind_xDI__ud2_c64, }, /* ditto */
4024 { 64, false, false, X86_GREG_x9, true, bs3CpuBasic2_call_ind_r9__ud2_c64, }, /* ditto */
4025 { 64, true, false, X86_GREG_xAX, true, bs3CpuBasic2_call_ind_xAX_opsize__ud2_c64, }, /* ditto */
4026 };
4027 uint8_t const cBits = BS3_MODE_IS_64BIT_CODE(bMode) ? 64 : 32;
4028 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4029 bool const fIgnPfx = cBits == 64 && enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4030
4031 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4032 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_begin_c32);
4033 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_jmp_opsize_end_c64) - offLow;
4034 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4035 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4036 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4037 Bs3TestFailedF("Opsize overriden jumps are out of place: %#x LB %#x\n", offLow, cbLow);
4038 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4039 if (!fIgnPfx)
4040 {
4041 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4042 if (s_aTests[iTest].fOpSizePfx && s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4043 {
4044 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4045 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4046 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4047 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4048 pbCode16[offUd + 1] = 0xf1;
4049 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4050 pbLow[offUd + 1] = 0x0b;
4051 }
4052 }
4053
4054 /* Run the tests. */
4055 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4056 {
4057 if (s_aTests[iTest].cBits == cBits && s_aTests[iTest].fIgnPfx == fIgnPfx)
4058 {
4059 uint64_t uGprSaved;
4060 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4061 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4062 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4063 if (s_aTests[iTest].iGprIndirect >= 0)
4064 {
4065 uGprSaved = (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u;
4066 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u
4067 = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = CtxExpected.rip.u;
4068 }
4069 if (s_aTests[iTest].fOpSizePfx && !fIgnPfx)
4070 CtxExpected.rip.u &= UINT16_MAX;
4071 CtxExpected.rsp.u = Ctx.rsp.u;
4072 if (s_aTests[iTest].fCall)
4073 CtxExpected.rsp.u -= s_aTests[iTest].cBits == 64 ? 8
4074 : !s_aTests[iTest].fOpSizePfx ? 4 : 2;
4075
4076 //Bs3TestPrintf("cs:rip=%04RX16:%08RX64\n", Ctx.cs, Ctx.rip.u);
4077
4078 if (BS3_MODE_IS_16BIT_SYS(bMode))
4079 g_uBs3TrapEipHint = s_aTests[iTest].fOpSizePfx ? 0 : Ctx.rip.u32;
4080 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4081 {
4082 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4083 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4084 }
4085 g_usBs3TestStep++;
4086
4087 /* Again single stepping: */
4088 //Bs3TestPrintf("stepping...\n");
4089 Ctx.rflags.u16 |= X86_EFL_TF;
4090 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4091 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4092 {
4093 Bs3RegSetDr6(0);
4094 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4095 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4096 }
4097 Ctx.rflags.u16 &= ~X86_EFL_TF;
4098 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4099 g_usBs3TestStep++;
4100
4101 if (s_aTests[iTest].iGprIndirect >= 0)
4102 (&Ctx.rax)[s_aTests[iTest].iGprIndirect].u = (&CtxExpected.rax)[s_aTests[iTest].iGprIndirect].u = uGprSaved;
4103 }
4104 }
4105
4106 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4107 }
4108
4109 return 0;
4110}
4111
4112
4113/*********************************************************************************************************************************
4114* FAR JMP & FAR CALL Tests *
4115*********************************************************************************************************************************/
4116#define PROTO_ALL(a_Template) \
4117 FNBS3FAR a_Template ## _c16, \
4118 a_Template ## _c32, \
4119 a_Template ## _c64
4120PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_begin);
4121
4122FNBS3FAR bs3CpuBasic2_jmpf_ptr_rm__ud2_c16;
4123PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r0__ud2);
4124PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r1__ud2);
4125PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r2__ud2);
4126PROTO_ALL(bs3CpuBasic2_jmpf_ptr_same_r3__ud2);
4127PROTO_ALL(bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2);
4128PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2);
4129PROTO_ALL(bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2);
4130
4131FNBS3FAR bs3CpuBasic2_callf_ptr_rm__ud2_c16;
4132PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r0__ud2);
4133PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r1__ud2);
4134PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r2__ud2);
4135PROTO_ALL(bs3CpuBasic2_callf_ptr_same_r3__ud2);
4136PROTO_ALL(bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2);
4137PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs64__ud2);
4138PROTO_ALL(bs3CpuBasic2_callf_ptr_r0_cs16l__ud2);
4139
4140FNBS3FAR bs3CpuBasic2_jmpf_mem_rm__ud2_c16;
4141PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r0__ud2);
4142PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r1__ud2);
4143PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r2__ud2);
4144PROTO_ALL(bs3CpuBasic2_jmpf_mem_same_r3__ud2);
4145PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16__ud2);
4146PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs32__ud2);
4147PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs64__ud2);
4148PROTO_ALL(bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2);
4149
4150FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64;
4151FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64;
4152FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64;
4153FNBS3FAR bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64;
4154FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64;
4155FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64;
4156FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64;
4157FNBS3FAR bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64;
4158
4159FNBS3FAR bs3CpuBasic2_callf_mem_rm__ud2_c16;
4160PROTO_ALL(bs3CpuBasic2_callf_mem_same_r0__ud2);
4161PROTO_ALL(bs3CpuBasic2_callf_mem_same_r1__ud2);
4162PROTO_ALL(bs3CpuBasic2_callf_mem_same_r2__ud2);
4163PROTO_ALL(bs3CpuBasic2_callf_mem_same_r3__ud2);
4164PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16__ud2);
4165PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs32__ud2);
4166PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs64__ud2);
4167PROTO_ALL(bs3CpuBasic2_callf_mem_r0_cs16l__ud2);
4168
4169FNBS3FAR bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64;
4170FNBS3FAR bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64;
4171FNBS3FAR bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64;
4172FNBS3FAR bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64;
4173FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64;
4174FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64;
4175FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64;
4176FNBS3FAR bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64;
4177
4178PROTO_ALL(bs3CpuBasic2_far_jmp_call_opsize_end);
4179#undef PROTO_ALL
4180
4181
4182
4183/**
4184 * Entrypoint for FAR JMP & FAR CALL tests.
4185 *
4186 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4187 * @param bMode The CPU mode we're testing.
4188 *
4189 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
4190 * with control registers and such.
4191 */
4192BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_jmp_call)(uint8_t bMode)
4193{
4194 BS3TRAPFRAME TrapCtx;
4195 BS3REGCTX Ctx;
4196 BS3REGCTX CtxExpected;
4197 unsigned iTest;
4198
4199 /* make sure they're allocated */
4200 Bs3MemZero(&Ctx, sizeof(Ctx));
4201 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4202 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4203
4204 bs3CpuBasic2_SetGlobals(bMode);
4205
4206 /*
4207 * Create a context.
4208 */
4209 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
4210 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4211
4212 if (Ctx.rax.u8 == 0 || Ctx.rax.u8 == 0xff) /* for salc & the 64-bit detection */
4213 CtxExpected.rax.u8 = Ctx.rax.u8 = 0x42;
4214
4215 /*
4216 * Set up spare selectors.
4217 */
4218 Bs3GdteSpare00 = Bs3Gdte_CODE16;
4219 Bs3GdteSpare00.Gen.u1Long = 1;
4220
4221 /*
4222 * 16-bit tests.
4223 */
4224 if (BS3_MODE_IS_16BIT_CODE(bMode))
4225 {
4226 static struct
4227 {
4228 bool fRmOrV86;
4229 bool fCall;
4230 uint16_t uDstSel;
4231 uint8_t uDstBits;
4232 bool fOpSizePfx;
4233 FPFNBS3FAR pfnTest;
4234 }
4235 const s_aTests[] =
4236 {
4237 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_ptr_rm__ud2_c16, },
4238 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c16, },
4239 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c16, },
4240 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c16, },
4241 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c16, },
4242 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c16, },
4243 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4244 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4245
4246 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_ptr_rm__ud2_c16, },
4247 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c16, },
4248 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c16, },
4249 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c16, },
4250 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c16, },
4251 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c16, },
4252 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4253 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4254
4255 { true, false, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_jmpf_mem_rm__ud2_c16, },
4256 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c16, },
4257 { false, false, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c16, },
4258 { false, false, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c16, },
4259 { false, false, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c16, },
4260 { false, false, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c16, },
4261 { false, false, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c16, },
4262 { false, false, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4263 { false, false, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4264
4265 { true, true, BS3_SEL_TEXT16, 16, false, bs3CpuBasic2_callf_mem_rm__ud2_c16, },
4266 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c16, },
4267 { false, true, BS3_SEL_R1_CS16 | 1, 16, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c16, },
4268 { false, true, BS3_SEL_R2_CS16 | 2, 16, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c16, },
4269 { false, true, BS3_SEL_R3_CS16 | 3, 16, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c16, },
4270 { false, true, BS3_SEL_R0_CS16, 16, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c16, },
4271 { false, true, BS3_SEL_R0_CS32, 32, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c16, },
4272 { false, true, BS3_SEL_R0_CS64, 64, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c16, }, /* 16-bit CS, except in LM. */
4273 { false, true, BS3_SEL_SPARE_00, 64, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c16, }, /* 16-bit CS, except in LM. */
4274 };
4275 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
4276
4277 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4278 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4279 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4280 if (BS3_MODE_IS_64BIT_SYS(bMode))
4281 {
4282 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c16);
4283 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c16) - offLow;
4284 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4285 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4286 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4287 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4288 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4289 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4290 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4291 {
4292 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4293 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4294 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4295 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4296 pbLow[offUd] = 0x0f;
4297 pbLow[offUd + 1] = 0x0b;
4298 }
4299 }
4300
4301 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4302 if (s_aTests[iTest].fRmOrV86 == fRmOrV86)
4303 {
4304 uint64_t const uSavedRsp = Ctx.rsp.u;
4305 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4306 uint8_t const BS3_FAR *fpbCode;
4307
4308 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4309 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4310 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4311 if ( s_aTests[iTest].uDstBits == 32
4312 || ( s_aTests[iTest].uDstBits == 64
4313 && !BS3_MODE_IS_16BIT_SYS(bMode)
4314 && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00))
4315 CtxExpected.rip.u += BS3_ADDR_BS3TEXT16;
4316 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode))
4317 CtxExpected.rip.u &= UINT16_MAX;
4318 CtxExpected.cs = s_aTests[iTest].uDstSel;
4319 if (fGp)
4320 {
4321 CtxExpected.rip.u = Ctx.rip.u;
4322 CtxExpected.cs = Ctx.cs;
4323 }
4324 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4325 CtxExpected.rsp.u = Ctx.rsp.u;
4326 if (s_aTests[iTest].fCall && !fGp)
4327 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 8 : 4;
4328 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4329 {
4330 if (BS3_MODE_IS_64BIT_SYS(bMode))
4331 CtxExpected.rip.u -= 1;
4332 else
4333 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4334 }
4335 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4336 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4337 if (!fGp)
4338 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4339 else
4340 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4341 Ctx.rsp.u = uSavedRsp;
4342 g_usBs3TestStep++;
4343
4344 /* Again single stepping: */
4345 //Bs3TestPrintf("stepping...\n");
4346 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4347 Ctx.rflags.u16 |= X86_EFL_TF;
4348 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4349 CtxExpected.rax.u = Ctx.rax.u;
4350 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4351 CtxExpected.rip.u -= 1;
4352 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4353 if (!fGp)
4354 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4355 else
4356 {
4357 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4358 bs3CpuBasic2_CheckDr6InitVal();
4359 }
4360 Ctx.rflags.u16 &= ~X86_EFL_TF;
4361 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4362 Ctx.rsp.u = uSavedRsp;
4363 g_usBs3TestStep++;
4364 }
4365 }
4366 /*
4367 * 32-bit tests.
4368 */
4369 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4370 {
4371 static struct
4372 {
4373 bool fCall;
4374 uint16_t uDstSel;
4375 uint8_t uDstBits;
4376 bool fOpSizePfx;
4377 FPFNBS3FAR pfnTest;
4378 }
4379 const s_aTests[] =
4380 {
4381 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4382 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4383 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4384 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4385 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4386 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4387 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4388
4389 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4390 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4391 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4392 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4393 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4394 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4395 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4396
4397 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c32, },
4398 { false, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c32, },
4399 { false, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c32, },
4400 { false, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c32, },
4401 { false, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c32, },
4402 { false, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c32, },
4403 { false, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4404 { false, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4405
4406 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c32, },
4407 { true, BS3_SEL_R1_CS32 | 1, 32, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c32, },
4408 { true, BS3_SEL_R2_CS32 | 2, 32, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c32, },
4409 { true, BS3_SEL_R3_CS32 | 3, 32, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c32, },
4410 { true, BS3_SEL_R0_CS16, 16, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c32, },
4411 { true, BS3_SEL_R0_CS32, 32, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c32, },
4412 { true, BS3_SEL_R0_CS64, 64, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c32, }, /* 16-bit CS, except in LM. */
4413 { true, BS3_SEL_SPARE_00, 64, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c32, }, /* 16-bit CS, except in LM. */
4414 };
4415
4416 /* Prepare a copy of the SALC & UD2 instructions in low memory for opsize
4417 prefixed tests jumping to BS3_SEL_SPARE_00 when in 64-bit mode, because
4418 it'll be a 64-bit CS then with base=0 instead of a CS16 with base=0x10000. */
4419 if (BS3_MODE_IS_64BIT_SYS(bMode))
4420 {
4421 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_begin_c32);
4422 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_far_jmp_call_opsize_end_c32) - offLow;
4423 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4424 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4425 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4426 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4427 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4428 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4429 if (s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00 && s_aTests[iTest].uDstBits == 64)
4430 {
4431 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4432 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4433 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4434 pbLow[offUd - 1] = 0xd6; /* plant salc + ud2 in low memory */
4435 pbLow[offUd] = 0x0f;
4436 pbLow[offUd + 1] = 0x0b;
4437 }
4438 }
4439 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4440 {
4441 uint64_t const uSavedRsp = Ctx.rsp.u;
4442 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4443 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4444
4445 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4446 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4447 if ( s_aTests[iTest].uDstBits == 16
4448 || ( s_aTests[iTest].uDstBits == 64
4449 && ( BS3_MODE_IS_16BIT_SYS(bMode))
4450 || s_aTests[iTest].uDstSel == BS3_SEL_SPARE_00))
4451 CtxExpected.rip.u &= UINT16_MAX;
4452 CtxExpected.cs = s_aTests[iTest].uDstSel;
4453 if (fGp)
4454 {
4455 CtxExpected.rip.u = Ctx.rip.u;
4456 CtxExpected.cs = Ctx.cs;
4457 }
4458 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4459 CtxExpected.rsp.u = Ctx.rsp.u;
4460 if (s_aTests[iTest].fCall && !fGp)
4461 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx ? 4 : 8;
4462 if (s_aTests[iTest].uDstBits == 64 && !fGp)
4463 {
4464 if (BS3_MODE_IS_64BIT_SYS(bMode))
4465 CtxExpected.rip.u -= 1;
4466 else
4467 CtxExpected.rax.u8 = CtxExpected.rflags.u & X86_EFL_CF ? 0xff : 0x00;
4468 }
4469 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4470 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4471 if (!fGp)
4472 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4473 else
4474 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4475 Ctx.rsp.u = uSavedRsp;
4476 g_usBs3TestStep++;
4477
4478 /* Again single stepping: */
4479 //Bs3TestPrintf("stepping...\n");
4480 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4481 Ctx.rflags.u16 |= X86_EFL_TF;
4482 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4483 CtxExpected.rax.u = Ctx.rax.u;
4484 if (s_aTests[iTest].uDstBits == 64 && !fGp && !BS3_MODE_IS_64BIT_SYS(bMode))
4485 CtxExpected.rip.u -= 1;
4486 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4487 if (!fGp)
4488 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4489 else
4490 {
4491 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4492 bs3CpuBasic2_CheckDr6InitVal();
4493 }
4494 Ctx.rflags.u16 &= ~X86_EFL_TF;
4495 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4496 Ctx.rsp.u = uSavedRsp;
4497 g_usBs3TestStep++;
4498 }
4499 }
4500 /*
4501 * 64-bit tests.
4502 */
4503 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4504 {
4505 static struct
4506 {
4507 bool fInvalid;
4508 bool fCall;
4509 uint16_t uDstSel;
4510 uint8_t uDstBits;
4511 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W, 3: 066h REX.W */
4512 int8_t fFix64OpSize;
4513 FPFNBS3FAR pfnTest;
4514 }
4515 const s_aTests[] =
4516 {
4517 /* invalid opcodes: */
4518 { true, false, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r0__ud2_c32, },
4519 { true, false, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r1__ud2_c32, },
4520 { true, false, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r2__ud2_c32, },
4521 { true, false, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_same_r3__ud2_c32, },
4522 { true, false, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_opsize_flipbit_r0__ud2_c32, },
4523 { true, false, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs64__ud2_c32, },
4524 { true, false, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_jmpf_ptr_r0_cs16l__ud2_c32, },
4525
4526 { true, true, BS3_SEL_R0_CS32, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r0__ud2_c32, },
4527 { true, true, BS3_SEL_R1_CS32 | 1, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r1__ud2_c32, },
4528 { true, true, BS3_SEL_R2_CS32 | 2, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r2__ud2_c32, },
4529 { true, true, BS3_SEL_R3_CS32 | 3, 64, 0, -1, bs3CpuBasic2_callf_ptr_same_r3__ud2_c32, },
4530 { true, true, BS3_SEL_R0_CS16, 64, 0, -1, bs3CpuBasic2_callf_ptr_opsize_flipbit_r0__ud2_c32, },
4531 { true, true, BS3_SEL_R0_CS64, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs64__ud2_c32, },
4532 { true, true, BS3_SEL_SPARE_00, 64, 0, -1, bs3CpuBasic2_callf_ptr_r0_cs16l__ud2_c32, },
4533
4534 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r0__ud2_c64, },
4535 { false, false, BS3_SEL_R1_CS64 | 1, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r1__ud2_c64, },
4536 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r2__ud2_c64, },
4537 { false, false, BS3_SEL_R3_CS64 | 3, 64, 0, false, bs3CpuBasic2_jmpf_mem_same_r3__ud2_c64, },
4538 { false, false, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_c64, },
4539 { false, false, BS3_SEL_R0_CS32, 32, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_c64, },
4540 { false, false, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4541 { false, false, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4542
4543 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r0__ud2_intel_c64, },
4544 { false, false, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r1__ud2_intel_c64, },
4545 { false, false, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_jmpf_mem_same_r2__ud2_intel_c64, },
4546 { false, false, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_jmpf_mem_same_r3__ud2_intel_c64, },
4547 { false, false, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_jmpf_mem_r0_cs16__ud2_intel_c64, },
4548 { false, false, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs32__ud2_intel_c64, },
4549 { false, false, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_jmpf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4550 { false, false, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_jmpf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4551
4552 { false, true, BS3_SEL_R0_CS64, 64, 2, false, bs3CpuBasic2_callf_mem_same_r0__ud2_c64, },
4553 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, false, bs3CpuBasic2_callf_mem_same_r1__ud2_c64, },
4554 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, false, bs3CpuBasic2_callf_mem_same_r2__ud2_c64, },
4555 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, false, bs3CpuBasic2_callf_mem_same_r3__ud2_c64, },
4556 { false, true, BS3_SEL_R0_CS16, 16, 1, false, bs3CpuBasic2_callf_mem_r0_cs16__ud2_c64, },
4557 { false, true, BS3_SEL_R0_CS32, 32, 2, false, bs3CpuBasic2_callf_mem_r0_cs32__ud2_c64, },
4558 { false, true, BS3_SEL_R0_CS64, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs64__ud2_c64, }, /* 16-bit CS, except in LM. */
4559 { false, true, BS3_SEL_SPARE_00, 64, 0, false, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_c64, }, /* 16-bit CS, except in LM. */
4560
4561 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_same_r0__ud2_intel_c64, },
4562 { false, true, BS3_SEL_R1_CS64 | 1, 64, 2, true, bs3CpuBasic2_callf_mem_same_r1__ud2_intel_c64, },
4563 { false, true, BS3_SEL_R2_CS64 | 2, 64, 0, true, bs3CpuBasic2_callf_mem_same_r2__ud2_intel_c64, },
4564 { false, true, BS3_SEL_R3_CS64 | 3, 64, 2, true, bs3CpuBasic2_callf_mem_same_r3__ud2_intel_c64, },
4565 { false, true, BS3_SEL_R0_CS16, 16, 1, true, bs3CpuBasic2_callf_mem_r0_cs16__ud2_intel_c64, },
4566 { false, true, BS3_SEL_R0_CS32, 32, 0, true, bs3CpuBasic2_callf_mem_r0_cs32__ud2_intel_c64, },
4567 { false, true, BS3_SEL_R0_CS64, 64, 2, true, bs3CpuBasic2_callf_mem_r0_cs64__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4568 { false, true, BS3_SEL_SPARE_00, 64, 0, true, bs3CpuBasic2_callf_mem_r0_cs16l__ud2_intel_c64, }, /* 16-bit CS, except in LM. */
4569 };
4570 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4571 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4572
4573 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4574 {
4575 uint64_t const uSavedRsp = Ctx.rsp.u;
4576 bool const fUd = s_aTests[iTest].fInvalid;
4577 bool const fGp = (s_aTests[iTest].uDstSel & X86_SEL_RPL) != 0;
4578 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4579
4580 if (s_aTests[iTest].fFix64OpSize != fFix64OpSize && s_aTests[iTest].fFix64OpSize >= 0)
4581 continue;
4582
4583 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4584 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4585 CtxExpected.cs = s_aTests[iTest].uDstSel;
4586 if (s_aTests[iTest].uDstBits == 16)
4587 CtxExpected.rip.u &= UINT16_MAX;
4588 else if (s_aTests[iTest].uDstBits == 64 && fFix64OpSize && s_aTests[iTest].uDstSel != BS3_SEL_SPARE_00)
4589 CtxExpected.rip.u |= UINT64_C(0xfffff00000000000);
4590
4591 if (fGp || fUd)
4592 {
4593 CtxExpected.rip.u = Ctx.rip.u;
4594 CtxExpected.cs = Ctx.cs;
4595 }
4596 CtxExpected.rsp.u = Ctx.rsp.u;
4597 if (s_aTests[iTest].fCall && !fGp && !fUd)
4598 {
4599 CtxExpected.rsp.u -= s_aTests[iTest].fOpSizePfx == 0 ? 8
4600 : s_aTests[iTest].fOpSizePfx == 1 ? 4 : 16;
4601 //Bs3TestPrintf("cs:rsp=%04RX16:%04RX64 -> %04RX64 (fOpSizePfx=%d)\n", Ctx.ss, Ctx.rsp.u, CtxExpected.rsp.u, s_aTests[iTest].fOpSizePfx);
4602 }
4603 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4604 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4605 if (!fGp || fUd)
4606 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4607 else
4608 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4609 Ctx.rsp.u = uSavedRsp;
4610 g_usBs3TestStep++;
4611
4612 /* Again single stepping: */
4613 //Bs3TestPrintf("stepping...\n");
4614 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4615 Ctx.rflags.u16 |= X86_EFL_TF;
4616 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4617 CtxExpected.rax.u = Ctx.rax.u;
4618 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4619 if (fUd)
4620 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4621 else if (!fGp)
4622 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4623 else
4624 {
4625 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aTests[iTest].uDstSel & X86_TRAP_ERR_SEL_MASK);
4626 bs3CpuBasic2_CheckDr6InitVal();
4627 }
4628 Ctx.rflags.u16 &= ~X86_EFL_TF;
4629 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4630 Ctx.rsp.u = uSavedRsp;
4631 g_usBs3TestStep++;
4632 }
4633 }
4634 else
4635 Bs3TestFailed("wtf?");
4636
4637 return 0;
4638}
4639
4640
4641/*********************************************************************************************************************************
4642* Near RET *
4643*********************************************************************************************************************************/
4644#define PROTO_ALL(a_Template) \
4645 FNBS3FAR a_Template ## _c16, \
4646 a_Template ## _c32, \
4647 a_Template ## _c64
4648PROTO_ALL(bs3CpuBasic2_retn_opsize_begin);
4649PROTO_ALL(bs3CpuBasic2_retn__ud2);
4650PROTO_ALL(bs3CpuBasic2_retn_opsize__ud2);
4651PROTO_ALL(bs3CpuBasic2_retn_i24__ud2);
4652PROTO_ALL(bs3CpuBasic2_retn_i24_opsize__ud2);
4653PROTO_ALL(bs3CpuBasic2_retn_i760__ud2);
4654PROTO_ALL(bs3CpuBasic2_retn_i5193__ud2);
4655PROTO_ALL(bs3CpuBasic2_retn_i5193_opsize__ud2);
4656PROTO_ALL(bs3CpuBasic2_retn_i0__ud2);
4657PROTO_ALL(bs3CpuBasic2_retn_i0_opsize__ud2);
4658FNBS3FAR bs3CpuBasic2_retn_rexw__ud2_c64;
4659FNBS3FAR bs3CpuBasic2_retn_i24_rexw__ud2_c64;
4660FNBS3FAR bs3CpuBasic2_retn_i5193_rexw__ud2_c64;
4661FNBS3FAR bs3CpuBasic2_retn_opsize_rexw__ud2_c64;
4662FNBS3FAR bs3CpuBasic2_retn_rexw_opsize__ud2_c64;
4663FNBS3FAR bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64;
4664FNBS3FAR bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64;
4665FNBS3FAR bs3CpuBasic2_retn_i5193_opsize_rexw__ud2_c64;
4666FNBS3FAR bs3CpuBasic2_retn_i5193_rexw_opsize__ud2_c64;
4667PROTO_ALL(bs3CpuBasic2_retn_begin);
4668PROTO_ALL(bs3CpuBasic2_retn_end);
4669PROTO_ALL(bs3CpuBasic2_retn_opsize_end);
4670#undef PROTO_ALL
4671
4672
4673static void bs3CpuBasic2_retn_PrepStack(BS3PTRUNION StkPtr, PCBS3REGCTX pCtxExpected, uint8_t cbAddr)
4674{
4675 StkPtr.pu32[3] = UINT32_MAX;
4676 StkPtr.pu32[2] = UINT32_MAX;
4677 StkPtr.pu32[1] = UINT32_MAX;
4678 StkPtr.pu32[0] = UINT32_MAX;
4679 StkPtr.pu32[-1] = UINT32_MAX;
4680 StkPtr.pu32[-2] = UINT32_MAX;
4681 StkPtr.pu32[-3] = UINT32_MAX;
4682 StkPtr.pu32[-4] = UINT32_MAX;
4683 if (cbAddr == 2)
4684 StkPtr.pu16[0] = pCtxExpected->rip.u16;
4685 else if (cbAddr == 4)
4686 StkPtr.pu32[0] = pCtxExpected->rip.u32;
4687 else
4688 StkPtr.pu64[0] = pCtxExpected->rip.u64;
4689}
4690
4691
4692static void bs3CpuBasic2_retn_PrepStackInvalid(BS3PTRUNION StkPtr, PCBS3REGCTX pCtxExpected, uint8_t cbAddr)
4693{
4694 StkPtr.pu32[3] = UINT32_MAX;
4695 StkPtr.pu32[2] = UINT32_MAX;
4696 StkPtr.pu32[1] = UINT32_MAX;
4697 StkPtr.pu32[0] = UINT32_MAX;
4698 StkPtr.pu32[-1] = UINT32_MAX;
4699 StkPtr.pu32[-2] = UINT32_MAX;
4700 StkPtr.pu32[-3] = UINT32_MAX;
4701 StkPtr.pu32[-4] = UINT32_MAX;
4702 if (cbAddr == 2)
4703 StkPtr.pu16[0] = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c16);
4704 else if (cbAddr == 4)
4705 StkPtr.pu32[0] = BS3_FP_OFF(bs3CpuBasic2_retn_end_c32) - BS3_FP_OFF(bs3CpuBasic2_retn_begin_c32);
4706 else
4707 StkPtr.pu64[0] = UINT64_C(0x0000800000000000);
4708}
4709
4710
4711/**
4712 * Entrypoint for NEAR RET tests.
4713 *
4714 * @returns 0 or BS3TESTDOMODE_SKIPPED.
4715 * @param bMode The CPU mode we're testing.
4716 */
4717BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_near_ret)(uint8_t bMode)
4718{
4719 BS3TRAPFRAME TrapCtx;
4720 BS3REGCTX Ctx;
4721 BS3REGCTX CtxExpected;
4722 unsigned iTest;
4723 BS3PTRUNION StkPtr;
4724 unsigned const cMaxRecompRuns = g_cBs3ThresholdNativeRecompiler ? g_cBs3ThresholdNativeRecompiler : 1;
4725 unsigned iRecompRun;
4726
4727 /* make sure they're allocated */
4728 Bs3MemZero(&Ctx, sizeof(Ctx));
4729 Bs3MemZero(&CtxExpected, sizeof(Ctx));
4730 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
4731
4732 bs3CpuBasic2_SetGlobals(bMode);
4733
4734 /*
4735 * Create a context.
4736 *
4737 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
4738 */
4739 Bs3RegCtxSaveEx(&Ctx, bMode, 1664);
4740 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
4741 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
4742
4743 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
4744 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
4745
4746 /*
4747 * 16-bit tests.
4748 */
4749 if (BS3_MODE_IS_16BIT_CODE(bMode))
4750 {
4751 static struct
4752 {
4753 bool fOpSizePfx;
4754 uint16_t cbImm;
4755 FPFNBS3FAR pfnTest;
4756 }
4757 const s_aTests[] =
4758 {
4759 { false, 0, bs3CpuBasic2_retn__ud2_c16, },
4760 { true, 0, bs3CpuBasic2_retn_opsize__ud2_c16, },
4761 { false, 24, bs3CpuBasic2_retn_i24__ud2_c16, },
4762 { true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c16, },
4763 { false, 0, bs3CpuBasic2_retn_i0__ud2_c16, },
4764 { true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c16, },
4765 { false, 760, bs3CpuBasic2_retn_i760__ud2_c16, },
4766 { false, 5193, bs3CpuBasic2_retn_i5193__ud2_c16, },
4767 { true, 5193, bs3CpuBasic2_retn_i5193_opsize__ud2_c16, },
4768 };
4769
4770 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4771 {
4772 uint8_t const BS3_FAR *fpbCode;
4773
4774 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
4775 fpbCode = (uint8_t const BS3_FAR *)BS3_FP_MAKE(Ctx.cs, Ctx.rip.u16);
4776 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4777 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4778 CtxExpected.cs = Ctx.cs;
4779 if (!s_aTests[iTest].fOpSizePfx)
4780 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4781 else
4782 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4783 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4784 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4785 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4786 {
4787 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4788 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4789 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4790 }
4791 g_usBs3TestStep++;
4792
4793 /* Again single stepping: */
4794 //Bs3TestPrintf("stepping...\n");
4795 Ctx.rflags.u16 |= X86_EFL_TF;
4796 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4797 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4798 {
4799 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4800 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 4 : 2);
4801 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4802 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4803 }
4804 Ctx.rflags.u16 &= ~X86_EFL_TF;
4805 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4806 g_usBs3TestStep++;
4807
4808 /*
4809 * Test exceeding the selector limit by using a spare selector and
4810 * set the limit just below the return target.
4811 */
4812 if (!BS3_MODE_IS_RM_OR_V86(bMode))
4813 {
4814 Bs3SelSetup16BitCode(&Bs3GdteSpare03, BS3_ADDR_BS3TEXT16, 0);
4815
4816 Ctx.cs = BS3_SEL_SPARE_03;
4817 CtxExpected.cs = Ctx.cs;
4818
4819 Bs3GdteSpare03.Gen.u16LimitLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c16) - 1;
4820
4821 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4822 CtxExpected.rip.u = Ctx.rip.u;
4823 CtxExpected.rsp.u = Ctx.rsp.u;
4824 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v2\n", Ctx.cs, Ctx.rip.u);
4825 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4826 {
4827 bs3CpuBasic2_retn_PrepStackInvalid(StkPtr, &CtxExpected, 2);
4828 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4829 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
4830 }
4831 g_usBs3TestStep++;
4832 }
4833 }
4834 }
4835 /*
4836 * 32-bit tests.
4837 */
4838 else if (BS3_MODE_IS_32BIT_CODE(bMode))
4839 {
4840 static struct
4841 {
4842 uint8_t cBits;
4843 bool fOpSizePfx;
4844 uint16_t cbImm;
4845 FPFNBS3FAR pfnTest;
4846 }
4847 const s_aTests[] =
4848 {
4849 { 32, false, 0, bs3CpuBasic2_retn__ud2_c32, },
4850 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c32, },
4851 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c32, },
4852 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c32, },
4853 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c32, },
4854 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c32, },
4855 { 32, false, 760, bs3CpuBasic2_retn_i760__ud2_c32, },
4856 { 32, false, 5193, bs3CpuBasic2_retn_i5193__ud2_c32, },
4857 { 32, true, 5193, bs3CpuBasic2_retn_i5193_opsize__ud2_c32, },
4858 };
4859
4860 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed tests. */
4861 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c32);
4862 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c32) - offLow;
4863 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4864 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4865 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4866 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4867 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4868 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4869 if (s_aTests[iTest].fOpSizePfx)
4870 {
4871 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
4872 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
4873 BS3_ASSERT(offUd - offLow + 1 < cbLow);
4874 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
4875 pbCode16[offUd + 1] = 0xf1;
4876 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
4877 pbLow[offUd + 1] = 0x0b;
4878 }
4879
4880 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
4881 {
4882 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
4883
4884 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
4885 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
4886 CtxExpected.cs = Ctx.cs;
4887 if (!s_aTests[iTest].fOpSizePfx)
4888 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 4;
4889 else
4890 {
4891 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
4892 CtxExpected.rip.u &= UINT16_MAX;
4893 }
4894 g_uBs3TrapEipHint = CtxExpected.rip.u32;
4895 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
4896 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
4897 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4898 {
4899 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4900 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4901 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
4902 }
4903 g_usBs3TestStep++;
4904
4905 /* Again single stepping: */
4906 //Bs3TestPrintf("stepping...\n");
4907 Ctx.rflags.u16 |= X86_EFL_TF;
4908 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4909 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4910 {
4911 Bs3RegSetDr6(X86_DR6_INIT_VAL);
4912 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx ? 2 : 4);
4913 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4914 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
4915 }
4916 Ctx.rflags.u16 &= ~X86_EFL_TF;
4917 CtxExpected.rflags.u16 = Ctx.rflags.u16;
4918 g_usBs3TestStep++;
4919
4920 if ( !BS3_MODE_IS_16BIT_SYS_NO_RM(bMode)
4921 && !s_aTests[iTest].fOpSizePfx)
4922 {
4923 /*
4924 * Test exceeding the selector limit by using a spare selector and
4925 * set the limit just below the return target.
4926 */
4927 uint16_t CsOrig = Ctx.cs;
4928 uint32_t const uFlatCode = Bs3SelLnkPtrToFlat(bs3CpuBasic2_retn_begin_c32);
4929 //Bs3TestPrintf("uFlatCode=%04RX32\n", uFlatCode);
4930
4931 Bs3SelSetup32BitCode(&Bs3GdteSpare03, uFlatCode,
4932 BS3_FP_OFF(bs3CpuBasic2_retn_end_c32) - BS3_FP_OFF(bs3CpuBasic2_retn_begin_c32) - 1,
4933 0);
4934
4935 Ctx.cs = BS3_SEL_SPARE_03;
4936 CtxExpected.cs = Ctx.cs;
4937 Ctx.rip.u = BS3_FP_OFF(s_aTests[iTest].pfnTest) - BS3_FP_OFF(bs3CpuBasic2_retn_begin_c32);
4938 CtxExpected.rip.u = Ctx.rip.u;
4939 CtxExpected.rsp.u = Ctx.rsp.u;
4940 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 v2\n", Ctx.cs, Ctx.rip.u);
4941 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
4942 {
4943 bs3CpuBasic2_retn_PrepStackInvalid(StkPtr, &CtxExpected, 4);
4944 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
4945 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
4946 }
4947
4948 Ctx.cs = CsOrig;
4949 g_usBs3TestStep++;
4950 }
4951 }
4952 }
4953 /*
4954 * 64-bit tests.
4955 */
4956 else if (BS3_MODE_IS_64BIT_CODE(bMode))
4957 {
4958 static struct
4959 {
4960 uint8_t cBits;
4961 bool fOpSizePfx;
4962 uint16_t cbImm;
4963 FPFNBS3FAR pfnTest;
4964 }
4965 const s_aTests[] =
4966 {
4967 { 32, false, 0, bs3CpuBasic2_retn__ud2_c64, },
4968 { 32, false, 0, bs3CpuBasic2_retn_rexw__ud2_c64, },
4969 { 32, true, 0, bs3CpuBasic2_retn_opsize__ud2_c64, },
4970 { 32, false, 0, bs3CpuBasic2_retn_opsize_rexw__ud2_c64, },
4971 { 32, true, 0, bs3CpuBasic2_retn_rexw_opsize__ud2_c64, },
4972 { 32, false, 24, bs3CpuBasic2_retn_i24__ud2_c64, },
4973 { 32, false, 24, bs3CpuBasic2_retn_i24_rexw__ud2_c64, },
4974 { 32, true, 24, bs3CpuBasic2_retn_i24_opsize__ud2_c64, },
4975 { 32, false, 24, bs3CpuBasic2_retn_i24_opsize_rexw__ud2_c64, },
4976 { 32, true, 24, bs3CpuBasic2_retn_i24_rexw_opsize__ud2_c64, },
4977 { 32, false, 0, bs3CpuBasic2_retn_i0__ud2_c64, },
4978 { 32, true, 0, bs3CpuBasic2_retn_i0_opsize__ud2_c64, },
4979 { 32, false, 760, bs3CpuBasic2_retn_i760__ud2_c64, },
4980 { 32, false, 5193, bs3CpuBasic2_retn_i5193__ud2_c64, },
4981 { 32, false, 5193, bs3CpuBasic2_retn_i5193_rexw__ud2_c64, },
4982 { 32, true, 5193, bs3CpuBasic2_retn_i5193_opsize__ud2_c64, },
4983 { 32, false, 5193, bs3CpuBasic2_retn_i5193_opsize_rexw__ud2_c64, },
4984 { 32, true, 5193, bs3CpuBasic2_retn_i5193_rexw_opsize__ud2_c64, },
4985 };
4986 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
4987 bool const fFix64OpSize = enmCpuVendor == BS3CPUVENDOR_INTEL; /** @todo what does VIA do? */
4988
4989 /* Prepare a copy of the UD2 instructions in low memory for opsize prefixed
4990 tests, unless we're on intel where the opsize prefix is ignored. Here we
4991 just fill low memory with int3's so we can detect non-intel behaviour. */
4992 uint16_t const offLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_begin_c64);
4993 uint16_t const cbLow = BS3_FP_OFF(bs3CpuBasic2_retn_opsize_end_c64) - offLow;
4994 uint8_t BS3_FAR * const pbLow = BS3_FP_MAKE(BS3_SEL_TILED_R0, 0);
4995 uint8_t BS3_FAR * const pbCode16 = BS3_MAKE_PROT_R0PTR_FROM_FLAT(BS3_ADDR_BS3TEXT16);
4996 if (offLow < 0x600 || offLow + cbLow >= BS3_ADDR_STACK_R2)
4997 Bs3TestFailedF("Opsize overriden jumps/calls are out of place: %#x LB %#x\n", offLow, cbLow);
4998 Bs3MemSet(&pbLow[offLow], 0xcc /*int3*/, cbLow);
4999 if (!fFix64OpSize)
5000 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5001 if (s_aTests[iTest].fOpSizePfx)
5002 {
5003 uint16_t const offFn = BS3_FP_OFF(s_aTests[iTest].pfnTest);
5004 uint16_t const offUd = offFn + (int16_t)(int8_t)pbCode16[offFn - 1];
5005 BS3_ASSERT(offUd - offLow + 1 < cbLow);
5006 pbCode16[offUd] = 0xf1; /* replace original ud2 with icebp */
5007 pbCode16[offUd + 1] = 0xf1;
5008 pbLow[offUd] = 0x0f; /* plant ud2 in low memory */
5009 pbLow[offUd + 1] = 0x0b;
5010 }
5011
5012 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5013 {
5014 uint8_t const BS3_FAR *fpbCode = Bs3SelLnkPtrToCurPtr(s_aTests[iTest].pfnTest);
5015
5016 Ctx.rip.u = Bs3SelLnkPtrToFlat(s_aTests[iTest].pfnTest);
5017 CtxExpected.rip.u = Ctx.rip.u + (int64_t)(int8_t)fpbCode[-1];
5018 CtxExpected.cs = Ctx.cs;
5019 if (!s_aTests[iTest].fOpSizePfx || fFix64OpSize)
5020 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 8;
5021 else
5022 {
5023 CtxExpected.rsp.u = Ctx.rsp.u + s_aTests[iTest].cbImm + 2;
5024 CtxExpected.rip.u &= UINT16_MAX;
5025 }
5026 g_uBs3TrapEipHint = CtxExpected.rip.u32;
5027 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
5028 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64\n", Ctx.ss, Ctx.rsp.u);
5029 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
5030 {
5031 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
5032 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5033 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5034 }
5035 g_usBs3TestStep++;
5036
5037 /* Again single stepping: */
5038 //Bs3TestPrintf("stepping...\n");
5039 Ctx.rflags.u16 |= X86_EFL_TF;
5040 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5041 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
5042 {
5043 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5044 bs3CpuBasic2_retn_PrepStack(StkPtr, &CtxExpected, s_aTests[iTest].fOpSizePfx && !fFix64OpSize ? 2 : 8);
5045 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5046 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
5047 }
5048 Ctx.rflags.u16 &= ~X86_EFL_TF;
5049 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5050 g_usBs3TestStep++;
5051
5052 /* Non-canonical return address (should \#GP(0)). */
5053 if (!s_aTests[iTest].fOpSizePfx || fFix64OpSize)
5054 {
5055 CtxExpected.rip.u = Ctx.rip.u;
5056 CtxExpected.cs = Ctx.cs;
5057 CtxExpected.rsp.u = Ctx.rsp.u;
5058 for (iRecompRun = 0; iRecompRun < cMaxRecompRuns; iRecompRun++)
5059 {
5060 bs3CpuBasic2_retn_PrepStackInvalid(StkPtr, &CtxExpected, 8);
5061 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5062 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0 /*uErrCd*/);
5063 }
5064 }
5065 g_usBs3TestStep++;
5066 }
5067 }
5068 else
5069 Bs3TestFailed("wtf?");
5070
5071 return 0;
5072}
5073
5074
5075/*********************************************************************************************************************************
5076* Far RET *
5077*********************************************************************************************************************************/
5078#define PROTO_ALL(a_Template) \
5079 FNBS3FAR a_Template ## _c16, \
5080 a_Template ## _c32, \
5081 a_Template ## _c64
5082PROTO_ALL(bs3CpuBasic2_retf);
5083PROTO_ALL(bs3CpuBasic2_retf_opsize);
5084FNBS3FAR bs3CpuBasic2_retf_rexw_c64;
5085FNBS3FAR bs3CpuBasic2_retf_rexw_opsize_c64;
5086FNBS3FAR bs3CpuBasic2_retf_opsize_rexw_c64;
5087PROTO_ALL(bs3CpuBasic2_retf_i32);
5088PROTO_ALL(bs3CpuBasic2_retf_i32_opsize);
5089FNBS3FAR bs3CpuBasic2_retf_i24_rexw_c64;
5090FNBS3FAR bs3CpuBasic2_retf_i24_rexw_opsize_c64;
5091FNBS3FAR bs3CpuBasic2_retf_i24_opsize_rexw_c64;
5092PROTO_ALL(bs3CpuBasic2_retf_i888);
5093#undef PROTO_ALL
5094
5095
5096static void bs3CpuBasic2_retf_PrepStack(BS3PTRUNION StkPtr, uint8_t cbStkItem, RTSEL uRetCs, uint64_t uRetRip,
5097 bool fWithStack, uint16_t cbImm, RTSEL uRetSs, uint64_t uRetRsp)
5098{
5099 Bs3MemSet(&StkPtr.pu32[-4], 0xff, 96);
5100 if (cbStkItem == 2)
5101 {
5102 StkPtr.pu16[0] = (uint16_t)uRetRip;
5103 StkPtr.pu16[1] = uRetCs;
5104 if (fWithStack)
5105 {
5106 StkPtr.pb += cbImm;
5107 StkPtr.pu16[2] = (uint16_t)uRetRsp;
5108 StkPtr.pu16[3] = uRetSs;
5109 }
5110 }
5111 else if (cbStkItem == 4)
5112 {
5113 StkPtr.pu32[0] = (uint32_t)uRetRip;
5114 StkPtr.pu16[2] = uRetCs;
5115 if (fWithStack)
5116 {
5117 StkPtr.pb += cbImm;
5118 StkPtr.pu32[2] = (uint32_t)uRetRsp;
5119 StkPtr.pu16[6] = uRetSs;
5120 }
5121 }
5122 else
5123 {
5124 StkPtr.pu64[0] = uRetRip;
5125 StkPtr.pu16[4] = uRetCs;
5126 if (fWithStack)
5127 {
5128 StkPtr.pb += cbImm;
5129 StkPtr.pu64[2] = uRetRsp;
5130 StkPtr.pu16[12] = uRetSs;
5131 }
5132 }
5133}
5134
5135
5136/**
5137 * Entrypoint for FAR RET tests.
5138 *
5139 * @returns 0 or BS3TESTDOMODE_SKIPPED.
5140 * @param bMode The CPU mode we're testing.
5141 */
5142BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_far_ret)(uint8_t bMode)
5143{
5144 BS3TRAPFRAME TrapCtx;
5145 BS3REGCTX Ctx;
5146 BS3REGCTX Ctx2;
5147 BS3REGCTX CtxExpected;
5148 unsigned iTest;
5149 unsigned iSubTest;
5150 BS3PTRUNION StkPtr;
5151
5152#define LOW_UD_ADDR 0x0609
5153 uint8_t BS3_FAR * const pbLowUd = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_UD_ADDR);
5154#define LOW_SALC_UD_ADDR 0x0611
5155 uint8_t BS3_FAR * const pbLowSalcUd = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_SALC_UD_ADDR);
5156#define LOW_SWAPGS_ADDR 0x061d
5157 uint8_t BS3_FAR * const pbLowSwapGs = BS3_FP_MAKE(BS3_FP_SEG(&StkPtr), LOW_SWAPGS_ADDR);
5158#define BS3TEXT16_ADDR_HI (BS3_ADDR_BS3TEXT16 >> 16)
5159
5160 /* make sure they're allocated */
5161 Bs3MemZero(&Ctx, sizeof(Ctx));
5162 Bs3MemZero(&Ctx2, sizeof(Ctx2));
5163 Bs3MemZero(&CtxExpected, sizeof(CtxExpected));
5164 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
5165
5166 bs3CpuBasic2_SetGlobals(bMode);
5167
5168 //if (!BS3_MODE_IS_64BIT_SYS(bMode) && bMode != BS3_MODE_PP32_16) return 0xff;
5169 //if (bMode != BS3_MODE_PE32_16) return 0xff;
5170
5171 /*
5172 * When dealing retf with 16-bit effective operand size to 32-bit or 64-bit
5173 * code, we're restricted to a 16-bit address. So, we plant a UD
5174 * instruction below 64KB that we can target with flat 32/64 code segments.
5175 * (Putting it on the stack would be possible too, but we'd have to create
5176 * the sub-test tables dynamically, which isn't necessary.)
5177 */
5178 Bs3MemSet(&pbLowUd[-9], 0xcc, 32);
5179 Bs3MemSet(&pbLowSalcUd[-9], 0xcc, 32);
5180 Bs3MemSet(&pbLowSwapGs[-9], 0xcc, 32);
5181
5182 pbLowUd[0] = 0x0f; /* ud2 */
5183 pbLowUd[1] = 0x0b;
5184
5185 /* A variation to detect whether we're in 64-bit or 16-bit mode when
5186 executing the code. */
5187 pbLowSalcUd[0] = 0xd6; /* salc */
5188 pbLowSalcUd[1] = 0x0f; /* ud2 */
5189 pbLowSalcUd[2] = 0x0b;
5190
5191 /* A variation to check that we're not in 64-bit mode. */
5192 pbLowSwapGs[0] = 0x0f; /* swapgs */
5193 pbLowSwapGs[1] = 0x01;
5194 pbLowSwapGs[2] = 0xf8;
5195
5196 /*
5197 * Use separate stacks for all relevant CPU exceptions so we can put
5198 * garbage in unused RSP bits w/o needing to care about where a long mode
5199 * handler will end up when accessing the whole RSP. (Not an issue with
5200 * 16-bit and 32-bit protected mode kernels, as here the weird SS based
5201 * stack pointer handling is in effect and the exception handler code
5202 * will just continue using the same SS and same portion of RSP.)
5203 *
5204 * See r154660.
5205 */
5206 if (BS3_MODE_IS_64BIT_SYS(bMode))
5207 Bs3Trap64InitEx(true);
5208
5209 /*
5210 * Create some call gates and whatnot for the UD2 code using the spare selectors.
5211 */
5212 if (BS3_MODE_IS_64BIT_SYS(bMode))
5213 for (iTest = 0; iTest < 16; iTest++)
5214 Bs3SelSetupGate64(&Bs3GdteSpare00 + iTest * 2, iTest /*bType*/, 3 /*bDpl*/,
5215 BS3_SEL_R0_CS64, BS3_FP_OFF(bs3CpuBasic2_ud2) + BS3_ADDR_BS3TEXT16);
5216 else
5217 {
5218 for (iTest = 0; iTest < 16; iTest++)
5219 {
5220 Bs3SelSetupGate(&Bs3GdteSpare00 + iTest, iTest /*bType*/, 3 /*bDpl*/,
5221 BS3_SEL_R0_CS16, BS3_FP_OFF(bs3CpuBasic2_ud2), 0);
5222 Bs3SelSetupGate(&Bs3GdteSpare00 + iTest + 16, iTest /*bType*/, 3 /*bDpl*/,
5223 BS3_SEL_R0_CS32, BS3_FP_OFF(bs3CpuBasic2_ud2) + BS3_ADDR_BS3TEXT16, 0);
5224 }
5225 }
5226
5227 /*
5228 * Create a context.
5229 *
5230 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
5231 */
5232 Bs3RegCtxSaveEx(&Ctx, bMode, 1728);
5233 Ctx.rsp.u = BS3_ADDR_STACK - _16K;
5234 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
5235
5236 StkPtr.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
5237 //Bs3TestPrintf("Stack=%p rsp=%RX64\n", StkPtr.pv, Ctx.rsp.u);
5238
5239 /*
5240 * 16-bit tests.
5241 */
5242 if (BS3_MODE_IS_16BIT_CODE(bMode))
5243 {
5244 static struct
5245 {
5246 bool fOpSizePfx;
5247 uint16_t cbImm;
5248 FPFNBS3FAR pfnTest;
5249 } const s_aTests[] =
5250 {
5251 { false, 0, bs3CpuBasic2_retf_c16, },
5252 { true, 0, bs3CpuBasic2_retf_opsize_c16, },
5253 { false, 32, bs3CpuBasic2_retf_i32_c16, },
5254 { true, 32, bs3CpuBasic2_retf_i32_opsize_c16, },
5255 { false,888, bs3CpuBasic2_retf_i888_c16, },
5256 };
5257
5258 static struct
5259 {
5260 bool fRmOrV86;
5261 bool fInterPriv;
5262 int8_t iXcpt;
5263 RTSEL uStartSs;
5264 uint8_t cDstBits;
5265 RTSEL uDstCs;
5266 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5267 {
5268 uint32_t offDst;
5269 struct
5270 {
5271 NPVOID pv;
5272 uint16_t uHigh;
5273 } s;
5274 };
5275 RTSEL uDstSs;
5276 uint16_t uErrCd;
5277 } const s_aSubTests[] =
5278 { /* rm/v86, PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5279 { true, false, -1, 0, 16, BS3_SEL_TEXT16, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, 0, 0 },
5280 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_TEXT16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5281 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5282 { false, false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5283 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5284 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5285 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS32 | 1, 0 },
5286 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS32 | 1, 0 },
5287 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5288 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5289 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS32 | 2, 0 },
5290 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS32 | 2, 0 },
5291 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5292 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5293 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS32 | 3, 0 },
5294 { false, true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS32 | 3, 0 },
5295 /* conforming stuff */
5296 { false, false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, 0 },
5297 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5298 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5299 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5300 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5301 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS16_CNF },
5302 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R1_CS16_CNF },
5303 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, 0 },
5304 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5305 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5306 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS16_CNF },
5307 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS16_CNF },
5308 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS16_CNF },
5309 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS16_CNF },
5310 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, 0 },
5311 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5312 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS16_CNF },
5313 { false, false, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS16_CNF },
5314 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS16_CNF },
5315 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS16_CNF },
5316 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16_CNF },
5317 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 2, BS3_SEL_R3_CS16_CNF },
5318 { false, true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R3_SS16 | 3, 0 },
5319 /* returning to 32-bit code: */
5320 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5321 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS16 | 0, 0 },
5322 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5323 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5324 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5325 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5326 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5327 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5328 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5329 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5330 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5331 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5332 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5333 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5334 { false, false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5335 { false, false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5336 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5337 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5338 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5339 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5340 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5341 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5342 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5343 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5344 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5345 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5346 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5347 { false, true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5348 /* returning to 32-bit conforming code: */
5349 { false, false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5350 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5351 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5352 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R0_SS16 },
5353 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 1, BS3_SEL_R3_SS16 },
5354 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, BS3_SEL_R3_SS16 },
5355 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5356 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5357 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS32_CNF },
5358 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5359 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 1, BS3_SEL_R0_SS16 },
5360 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R0_SS16 },
5361 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 1, BS3_SEL_R3_SS16 },
5362 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, BS3_SEL_R3_SS16 },
5363 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5364 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5365 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS32_CNF },
5366 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS32_CNF },
5367 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5368 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5369 { false, false, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS32_CNF },
5370 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS32_CNF },
5371 { false, true, 42, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS32_CNF },
5372 { false, true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5373 /* returning to 64-bit code or 16-bit when not in long mode: */
5374 { false, false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5375 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5376 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5377 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5378 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_DS64 | 1, BS3_SEL_R0_DS64 },
5379 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_DS64 | 1, 0 },
5380 { false, false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5381 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5382 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5383 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5384 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5385 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5386 { false, true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5387 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R2_CS64 },
5388 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R2_CS64 },
5389 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R1_SS32 },
5390 { false, true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_SS32 },
5391 /* returning to 64-bit code or 16-bit when not in long mode, conforming code variant: */
5392 { false, false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5393 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5394 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5395 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5396
5397 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS64_CNF },
5398 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5399 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 2, BS3_SEL_R1_SS16 },
5400 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 1, BS3_SEL_R2_SS16 },
5401 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R2_SS16 },
5402 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5403 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5404
5405 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS64_CNF },
5406 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS64_CNF },
5407 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5408 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5409
5410 { false, false, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS64_CNF },
5411 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS64_CNF },
5412 { false, true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS64_CNF },
5413 { false, true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5414
5415 /* some additional #GP variations */ /** @todo test all possible exceptions! */
5416 { false, true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
5417 { false, true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_TSS32_DF | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_TSS32_DF },
5418 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_00 },
5419 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_01 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_01 },
5420 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_02 },
5421 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_03 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_03 },
5422 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_04 },
5423 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_05 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_05 },
5424 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_06 },
5425 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_07 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_07 },
5426 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_08 },
5427 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_09 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_09 },
5428 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0a },
5429 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0b | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0b },
5430 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0c },
5431 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0d | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0d },
5432 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0e },
5433 { false, true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_SPARE_0f | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_0f },
5434 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_10 },
5435 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_11 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_11 },
5436 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_12 },
5437 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_13 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_13 },
5438 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_14 },
5439 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_15 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_15 },
5440 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_16 },
5441 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_17 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_17 },
5442 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_18 },
5443 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_19 | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_19 },
5444 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1a },
5445 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1b | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1b },
5446 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1c },
5447 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1d | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1d },
5448 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1e },
5449 { false, true, 14, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_SPARE_1f | 0, { .offDst = 0 }, BS3_SEL_R0_SS16 | 0, BS3_SEL_SPARE_1f },
5450 };
5451
5452 bool const fRmOrV86 = BS3_MODE_IS_RM_OR_V86(bMode);
5453 BS3CPUVENDOR const enmCpuVendor = Bs3GetCpuVendor();
5454
5455 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5456 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5457 {
5458 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
5459
5460 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
5461 {
5462 g_usBs3TestStep = (iTest << 12) | (iSubTest << 4);
5463 if ( s_aSubTests[iSubTest].fRmOrV86 == fRmOrV86
5464 && (s_aSubTests[iSubTest].offDst <= UINT16_MAX || s_aTests[iTest].fOpSizePfx))
5465 {
5466 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
5467 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx ? 4 : 2;
5468 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
5469 uint32_t const uFlatDst = Bs3SelFar32ToFlat32(s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstCs)
5470 + (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode));
5471 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
5472 uint64_t uDstRspExpect, uDstRspPush;
5473 uint16_t cErrors;
5474
5475 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
5476 if (Ctx.ss != BS3_SEL_R0_SS32)
5477 Ctx.rsp.u32 |= UINT32_C(0xfffe0000);
5478 else
5479 Ctx.rsp.u32 &= UINT16_MAX;
5480 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
5481 if (s_aSubTests[iSubTest].fInterPriv)
5482 {
5483 if (s_aTests[iTest].fOpSizePfx)
5484 uDstRspPush = (uDstRspPush & UINT16_MAX) | UINT32_C(0xacdc0000);
5485 if ( uDstSs == (BS3_SEL_R1_SS32 | 1)
5486 || uDstSs == (BS3_SEL_R2_SS32 | 2)
5487 || uDstSs == (BS3_SEL_R3_SS32 | 3)
5488 || (s_aSubTests[iSubTest].cDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode)))
5489 {
5490 if (s_aTests[iTest].fOpSizePfx)
5491 uDstRspExpect = uDstRspPush;
5492 else
5493 uDstRspExpect &= UINT16_MAX;
5494 }
5495 }
5496
5497 CtxExpected.bCpl = Ctx.bCpl;
5498 CtxExpected.cs = Ctx.cs;
5499 CtxExpected.ss = Ctx.ss;
5500 CtxExpected.ds = Ctx.ds;
5501 CtxExpected.es = Ctx.es;
5502 CtxExpected.fs = Ctx.fs;
5503 CtxExpected.gs = Ctx.gs;
5504 CtxExpected.rip.u = Ctx.rip.u;
5505 CtxExpected.rsp.u = Ctx.rsp.u;
5506 CtxExpected.rax.u = Ctx.rax.u;
5507 if (s_aSubTests[iSubTest].iXcpt < 0)
5508 {
5509 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
5510 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
5511 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5512 {
5513 CtxExpected.rip.u += 1;
5514 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
5515 }
5516 CtxExpected.ss = uDstSs;
5517 CtxExpected.rsp.u = uDstRspExpect;
5518 if (s_aSubTests[iSubTest].fInterPriv)
5519 {
5520 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
5521 unsigned cSels = 4;
5522 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
5523 while (cSels-- > 0)
5524 {
5525 uint16_t uSel = *puSel;
5526 if ( (uSel & X86_SEL_MASK_OFF_RPL)
5527 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
5528 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5529 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
5530 *puSel = 0;
5531 puSel++;
5532 }
5533 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
5534 }
5535 }
5536 g_uBs3TrapEipHint = CtxExpected.rip.u32;
5537 //Bs3TestPrintf("cs:rip=%04RX16:%04RX64 -> %04RX16:%04RX64\n", Ctx.cs, Ctx.rip.u, CtxExpected.cs, CtxExpected.rip.u);
5538 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]\n", Ctx.ss, Ctx.rsp.u, CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush);
5539 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5540 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5541 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5542 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
5543 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
5544 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5545 if (s_aSubTests[iSubTest].iXcpt < 0)
5546 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5547 else
5548 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5549 g_usBs3TestStep++; /* 1 */
5550
5551 /* Bad hw bp: Setup DR0-3 but use invalid length encodings (non-byte) */
5552 //Bs3TestPrintf("hw bp: bad len\n");
5553 Bs3RegSetDr0(uFlatDst);
5554 Bs3RegSetDr1(uFlatDst);
5555 Bs3RegSetDr2(uFlatDst);
5556 Bs3RegSetDr3(uFlatDst);
5557 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5558 Bs3RegSetDr7(X86_DR7_INIT_VAL
5559 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_WORD) | X86_DR7_L_G(1)
5560 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_DWORD) | X86_DR7_L_G(2)
5561 | ( BS3_MODE_IS_64BIT_SYS(bMode)
5562 ? X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_QWORD) | X86_DR7_L_G(3) : 0) );
5563 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5564 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5565 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5566 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5567 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5568 if (s_aSubTests[iSubTest].iXcpt < 0)
5569 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5570 else
5571 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5572 bs3CpuBasic2_CheckDr6InitVal();
5573 g_usBs3TestStep++; /* 2 */
5574
5575 /* Bad hw bp: setup DR0-3 but don't enable them */
5576 //Bs3TestPrintf("hw bp: disabled\n");
5577 //Bs3RegSetDr0(uFlatDst);
5578 //Bs3RegSetDr1(uFlatDst);
5579 //Bs3RegSetDr2(uFlatDst);
5580 //Bs3RegSetDr3(uFlatDst);
5581 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5582 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5583 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5584 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5585 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5586 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5587 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5588 if (s_aSubTests[iSubTest].iXcpt < 0)
5589 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5590 else
5591 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5592 bs3CpuBasic2_CheckDr6InitVal();
5593 g_usBs3TestStep++; /* 3 */
5594
5595 /* Bad hw bp: Points at 2nd byte in the UD2. Docs says it only works when pointing at first byte. */
5596 //Bs3TestPrintf("hw bp: byte 2\n");
5597 Bs3RegSetDr0(uFlatDst + 1);
5598 Bs3RegSetDr1(uFlatDst + 1);
5599 //Bs3RegSetDr2(uFlatDst);
5600 //Bs3RegSetDr3(uFlatDst);
5601 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5602 Bs3RegSetDr7(X86_DR7_INIT_VAL
5603 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE) | X86_DR7_L_G(0)
5604 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1));
5605 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5606 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5607 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5608 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5609 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5610 if (s_aSubTests[iSubTest].iXcpt < 0)
5611 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5612 else
5613 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5614 bs3CpuBasic2_CheckDr6InitVal();
5615 g_usBs3TestStep++; /* 4 */
5616
5617 /* Again with two correctly hardware breakpoints and a disabled one that just matches the address: */
5618 //Bs3TestPrintf("bp 1 + 3...\n");
5619 Bs3RegSetDr0(uFlatDst);
5620 Bs3RegSetDr1(uFlatDst);
5621 Bs3RegSetDr2(0);
5622 Bs3RegSetDr3(uFlatDst);
5623 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5624 Bs3RegSetDr7(X86_DR7_INIT_VAL
5625 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5626 | X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_L_G(3) );
5627 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5628 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5629 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5630 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5631 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5632 if (s_aSubTests[iSubTest].iXcpt < 0)
5633 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected,
5634 enmCpuVendor == BS3CPUVENDOR_AMD ? X86_DR6_B1 | X86_DR6_B3 /* 3990x */
5635 : X86_DR6_B0 | X86_DR6_B1 | X86_DR6_B3);
5636 else
5637 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5638 g_usBs3TestStep++; /* 5 */
5639
5640 /* Again with a single locally enabled breakpoint. */
5641 //Bs3TestPrintf("bp 0/l...\n");
5642 Bs3RegSetDr0(uFlatDst);
5643 Bs3RegSetDr1(0);
5644 Bs3RegSetDr2(0);
5645 Bs3RegSetDr3(0);
5646 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_B1 | X86_DR6_B2 | X86_DR6_B3 | X86_DR6_BS);
5647 Bs3RegSetDr7(X86_DR7_INIT_VAL
5648 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE) | X86_DR7_L(0));
5649 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5650 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5651 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5652 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5653 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5654 if (s_aSubTests[iSubTest].iXcpt < 0)
5655 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_B0 | X86_DR6_BS); /* B0-B3 set, BS preserved */
5656 else
5657 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5658 g_usBs3TestStep++; /* 6 */
5659
5660 /* Again with a single globally enabled breakpoint and serveral other types of breakpoints
5661 configured but not enabled. */
5662 //Bs3TestPrintf("bp 2/g+...\n");
5663 cErrors = Bs3TestSubErrorCount();
5664 Bs3RegSetDr0(uFlatDst);
5665 Bs3RegSetDr1(uFlatDst);
5666 Bs3RegSetDr2(uFlatDst);
5667 Bs3RegSetDr3(uFlatDst);
5668 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_BS | X86_DR6_BD | X86_DR6_BT | X86_DR6_B2);
5669 Bs3RegSetDr7(X86_DR7_INIT_VAL
5670 | X86_DR7_RW(0, X86_DR7_RW_RW) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE)
5671 | X86_DR7_RW(1, X86_DR7_RW_RW) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5672 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_BYTE) | X86_DR7_G(2)
5673 | X86_DR7_RW(3, X86_DR7_RW_WO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_G(3)
5674 );
5675 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5676 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5677 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5678 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5679 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5680 if (s_aSubTests[iSubTest].iXcpt < 0)
5681 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_B2 | X86_DR6_BS | X86_DR6_BD | X86_DR6_BT);
5682 else
5683 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5684 g_usBs3TestStep++; /* 7 */
5685
5686 /* Now resume it with lots of execution breakpoints configured. */
5687 if (s_aSubTests[iSubTest].iXcpt < 0 && Bs3TestSubErrorCount() == cErrors)
5688 {
5689 Bs3MemCpy(&Ctx2, &TrapCtx.Ctx, sizeof(Ctx2));
5690 Ctx2.rflags.u32 |= X86_EFL_RF;
5691 //Bs3TestPrintf("bp 3/g+rf %04RX16:%04RX64 efl=%RX32 ds=%04RX16...\n", Ctx2.cs, Ctx2.rip.u, Ctx2.rflags.u32, Ctx2.ds);
5692 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5693 Bs3RegSetDr7(X86_DR7_INIT_VAL
5694 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE)
5695 | X86_DR7_RW(1, X86_DR7_RW_EO) | X86_DR7_LEN(1, X86_DR7_LEN_BYTE) | X86_DR7_L_G(1)
5696 | X86_DR7_RW(2, X86_DR7_RW_EO) | X86_DR7_LEN(2, X86_DR7_LEN_BYTE) | X86_DR7_G(2)
5697 | X86_DR7_RW(3, X86_DR7_RW_EO) | X86_DR7_LEN(3, X86_DR7_LEN_BYTE) | X86_DR7_G(3)
5698 );
5699 Bs3TrapSetJmpAndRestore(&Ctx2, &TrapCtx);
5700 Bs3RegSetDr7(X86_DR7_INIT_VAL);
5701 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
5702 bs3CpuBasic2_CheckDr6InitVal();
5703 }
5704 g_usBs3TestStep++; /* 8 */
5705
5706 /* Now do single stepping: */
5707 //Bs3TestPrintf("stepping...\n");
5708 Bs3RegSetDr6(X86_DR6_INIT_VAL);
5709 Ctx.rflags.u16 |= X86_EFL_TF;
5710 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5711 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
5712 {
5713 CtxExpected.rip.u -= 1;
5714 CtxExpected.rax.u = Ctx.rax.u;
5715 }
5716 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5717 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5718 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5719 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5720 if (s_aSubTests[iSubTest].iXcpt < 0)
5721 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
5722 else
5723 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5724 Ctx.rflags.u16 &= ~X86_EFL_TF;
5725 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5726 g_usBs3TestStep++; /* 9 */
5727
5728 /* Single step with B0-B3 set to check that they're not preserved
5729 and with BD & BT to check that they are (checked on Intel 6700K): */
5730 //Bs3TestPrintf("stepping b0-b3+bd+bt=1...\n");
5731 Bs3RegSetDr6(X86_DR6_INIT_VAL | X86_DR6_B_MASK | X86_DR6_BD | X86_DR6_BT);
5732 Ctx.rflags.u16 |= X86_EFL_TF;
5733 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5734 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
5735 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
5736 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
5737 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
5738 if (s_aSubTests[iSubTest].iXcpt < 0)
5739 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS | X86_DR6_BD | X86_DR6_BT);
5740 else
5741 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
5742 Ctx.rflags.u16 &= ~X86_EFL_TF;
5743 CtxExpected.rflags.u16 = Ctx.rflags.u16;
5744 g_usBs3TestStep++; /* 10 */
5745
5746 }
5747 }
5748 }
5749 }
5750 /*
5751 * 32-bit tests.
5752 */
5753 else if (BS3_MODE_IS_32BIT_CODE(bMode))
5754 {
5755 static struct
5756 {
5757 bool fOpSizePfx;
5758 uint16_t cbImm;
5759 FPFNBS3FAR pfnTest;
5760 } const s_aTests[] =
5761 {
5762 { false, 0, bs3CpuBasic2_retf_c32, },
5763 { true, 0, bs3CpuBasic2_retf_opsize_c32, },
5764 { false, 32, bs3CpuBasic2_retf_i32_c32, },
5765 { true, 32, bs3CpuBasic2_retf_i32_opsize_c32, },
5766 { false,888, bs3CpuBasic2_retf_i888_c32, },
5767 };
5768
5769 static struct
5770 {
5771 bool fInterPriv;
5772 int8_t iXcpt;
5773 RTSEL uStartSs;
5774 uint8_t cDstBits;
5775 RTSEL uDstCs;
5776 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
5777 {
5778 uint32_t offDst;
5779 struct
5780 {
5781 NPVOID pv;
5782 uint16_t uHigh;
5783 } s;
5784 };
5785 RTSEL uDstSs;
5786 uint16_t uErrCd;
5787 } const s_aSubTests[] =
5788 { /* PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
5789 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5790 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5791 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5792 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5793 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5794 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5795 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5796 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5797 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5798 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5799 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5800 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5801 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5802 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5803 /* same with 32-bit wide target addresses: */
5804 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5805 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
5806 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5807 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
5808 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5809 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
5810 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5811 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
5812 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5813 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
5814 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5815 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
5816 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5817 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
5818 /* conforming stuff */
5819 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5820 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5821 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5822 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5823 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5824 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS32_CNF },
5825 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R1_CS32_CNF },
5826 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5827 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5828 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5829 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
5830 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
5831 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
5832 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
5833 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5834 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5835 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
5836 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
5837 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
5838 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .offDst = LOW_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
5839 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS32_CNF },
5840 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_CS32_CNF },
5841 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .offDst = LOW_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5842 /* returning to 16-bit code: */
5843 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
5844 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5845 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5846 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5847 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5848 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5849 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5850 { false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS16 | 0, 0 },
5851 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5852 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5853 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5854 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5855 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5856 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5857 /* returning to 16-bit conforming code: */
5858 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
5859 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
5860 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5861 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
5862 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
5863 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
5864 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
5865 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5866 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS16_CNF },
5867 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
5868 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
5869 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
5870 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
5871 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
5872 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5873 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5874 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS16_CNF },
5875 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS16_CNF },
5876 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
5877 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
5878 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS16_CNF },
5879 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS16_CNF },
5880 { true, 42, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS16_CNF },
5881 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_ud2, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
5882 /* returning to 64-bit code or 16-bit when not in long mode: */
5883 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5884 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5885 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5886 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5887 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_DS64 | 1, BS3_SEL_R0_DS64 },
5888 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_DS64 | 1, 0 },
5889 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
5890 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5891 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
5892 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5893 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
5894 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5895 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
5896 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R2_CS64 },
5897 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R2_CS64 },
5898 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 3, BS3_SEL_R1_SS32 },
5899 { true, 14, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_SS32 },
5900 /* returning to 64-bit code or 16-bit when not in long mode, conforming code variant: */
5901 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
5902 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5903 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5904 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5905
5906 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R1_CS64_CNF },
5907 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
5908 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 2, BS3_SEL_R1_SS16 },
5909 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 1, BS3_SEL_R2_SS16 },
5910 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R2_SS16 },
5911 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5912 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5913
5914 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R2_CS64_CNF },
5915 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R2_CS64_CNF },
5916 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
5917 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5918
5919 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS16 | 0, BS3_SEL_R3_CS64_CNF },
5920 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, BS3_SEL_R3_CS64_CNF },
5921 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS64_CNF },
5922 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
5923
5924 /* some additional #GP variations */ /** @todo test all possible exceptions! */
5925 { true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
5926 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_00 },
5927 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_01 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_01 },
5928 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_02 },
5929 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_03 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_03 },
5930 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_04 },
5931 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_05 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_05 },
5932 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_06 },
5933 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_07 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_07 },
5934 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_08 },
5935 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_09 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_09 },
5936 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0a },
5937 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0b | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0b },
5938 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0c },
5939 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0d | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0d },
5940 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0e },
5941 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_SPARE_0f | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0f },
5942 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_10 },
5943 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_11 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_11 },
5944 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_12 },
5945 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_13 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_13 },
5946 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_14 },
5947 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_15 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_15 },
5948 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_16 },
5949 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_17 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_17 },
5950 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_18 },
5951 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_19 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_19 },
5952 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1a },
5953 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1b | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1b },
5954 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1c },
5955 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1d | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1d },
5956 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1e },
5957 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_SPARE_1f | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1f },
5958 };
5959
5960 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
5961 {
5962 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
5963 //Bs3TestPrintf("-------------- #%u: cs:eip=%04RX16:%08RX64 imm=%u%s\n",
5964 // iTest, Ctx.cs, Ctx.rip.u, s_aTests[iTest].cbImm, s_aTests[iTest].fOpSizePfx ? " o16" : "");
5965
5966 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
5967 {
5968 g_usBs3TestStep = (iTest << 12) | (iSubTest << 1);
5969 if (!s_aTests[iTest].fOpSizePfx || s_aSubTests[iSubTest].offDst <= UINT16_MAX)
5970 {
5971 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
5972 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx ? 2 : 4;
5973 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
5974 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
5975 uint64_t uDstRspExpect, uDstRspPush;
5976 //Bs3TestPrintf(" #%u: %s %d %#04RX16 -> %u %#04RX16:%#04RX32 %#04RX16 %#RX16\n", iSubTest, s_aSubTests[iSubTest].fInterPriv ? "priv" : "same", s_aSubTests[iSubTest].iXcpt, s_aSubTests[iSubTest].uStartSs,
5977 // s_aSubTests[iSubTest].cDstBits, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstSs, s_aSubTests[iSubTest].uErrCd);
5978
5979 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
5980 if (Ctx.ss != BS3_SEL_R0_SS32)
5981 Ctx.rsp.u32 |= UINT32_C(0xfffe0000);
5982 else
5983 Ctx.rsp.u32 &= UINT16_MAX;
5984 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
5985 if (s_aSubTests[iSubTest].fInterPriv)
5986 {
5987 if (!s_aTests[iTest].fOpSizePfx)
5988 uDstRspPush = (uDstRspPush & UINT16_MAX) | UINT32_C(0xacdc0000);
5989 if ( uDstSs == (BS3_SEL_R1_SS32 | 1)
5990 || uDstSs == (BS3_SEL_R2_SS32 | 2)
5991 || uDstSs == (BS3_SEL_R3_SS32 | 3)
5992 || (s_aSubTests[iSubTest].cDstBits == 64 && BS3_MODE_IS_64BIT_SYS(bMode)))
5993 {
5994 if (!s_aTests[iTest].fOpSizePfx)
5995 uDstRspExpect = uDstRspPush;
5996 else
5997 uDstRspExpect &= UINT16_MAX;
5998 }
5999 }
6000
6001 CtxExpected.bCpl = Ctx.bCpl;
6002 CtxExpected.cs = Ctx.cs;
6003 CtxExpected.ss = Ctx.ss;
6004 CtxExpected.ds = Ctx.ds;
6005 CtxExpected.es = Ctx.es;
6006 CtxExpected.fs = Ctx.fs;
6007 CtxExpected.gs = Ctx.gs;
6008 CtxExpected.rip.u = Ctx.rip.u;
6009 CtxExpected.rsp.u = Ctx.rsp.u;
6010 CtxExpected.rax.u = Ctx.rax.u;
6011 if (s_aSubTests[iSubTest].iXcpt < 0)
6012 {
6013 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
6014 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
6015 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
6016 {
6017 CtxExpected.rip.u += 1;
6018 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
6019 }
6020 CtxExpected.ss = uDstSs;
6021 CtxExpected.rsp.u = uDstRspExpect;
6022 if (s_aSubTests[iSubTest].fInterPriv)
6023 {
6024 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
6025 unsigned cSels = 4;
6026 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
6027 while (cSels-- > 0)
6028 {
6029 uint16_t uSel = *puSel;
6030 if ( (uSel & X86_SEL_MASK_OFF_RPL)
6031 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
6032 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
6033 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
6034 *puSel = 0;
6035 puSel++;
6036 }
6037 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
6038 }
6039 }
6040 g_uBs3TrapEipHint = CtxExpected.rip.u32;
6041 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]; %04RX16:%04RX64\n",Ctx.ss, Ctx.rsp.u,
6042 // CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush, CtxExpected.cs, CtxExpected.rip.u);
6043 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
6044 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
6045 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
6046 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
6047 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
6048 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6049 if (s_aSubTests[iSubTest].iXcpt < 0)
6050 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
6051 else
6052 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
6053 g_usBs3TestStep++;
6054
6055 /* Again single stepping: */
6056 //Bs3TestPrintf("stepping...\n");
6057 Bs3RegSetDr6(X86_DR6_INIT_VAL);
6058 Ctx.rflags.u16 |= X86_EFL_TF;
6059 CtxExpected.rflags.u16 = Ctx.rflags.u16;
6060 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
6061 {
6062 CtxExpected.rip.u -= 1;
6063 CtxExpected.rax.u = Ctx.rax.u;
6064 }
6065 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
6066 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
6067 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
6068 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6069 if (s_aSubTests[iSubTest].iXcpt < 0)
6070 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
6071 else
6072 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
6073 Ctx.rflags.u16 &= ~X86_EFL_TF;
6074 CtxExpected.rflags.u16 = Ctx.rflags.u16;
6075 g_usBs3TestStep++;
6076 }
6077 }
6078 }
6079 }
6080 /*
6081 * 64-bit tests.
6082 */
6083 else if (BS3_MODE_IS_64BIT_CODE(bMode))
6084 {
6085 static struct
6086 {
6087 uint8_t fOpSizePfx; /**< 0: none, 1: 066h, 2: REX.W; Effective op size prefix. */
6088 uint16_t cbImm;
6089 FPFNBS3FAR pfnTest;
6090 } const s_aTests[] =
6091 {
6092 { 0, 0, bs3CpuBasic2_retf_c64, },
6093 { 1, 0, bs3CpuBasic2_retf_opsize_c64, },
6094 { 0, 32, bs3CpuBasic2_retf_i32_c64, },
6095 { 1, 32, bs3CpuBasic2_retf_i32_opsize_c64, },
6096 { 2, 0, bs3CpuBasic2_retf_rexw_c64, },
6097 { 2, 0, bs3CpuBasic2_retf_opsize_rexw_c64, },
6098 { 1, 0, bs3CpuBasic2_retf_rexw_opsize_c64, },
6099 { 2, 24, bs3CpuBasic2_retf_i24_rexw_c64, },
6100 { 2, 24, bs3CpuBasic2_retf_i24_opsize_rexw_c64, },
6101 { 1, 24, bs3CpuBasic2_retf_i24_rexw_opsize_c64, },
6102 { 0,888, bs3CpuBasic2_retf_i888_c64, },
6103 };
6104
6105 static struct
6106 {
6107 bool fInterPriv;
6108 int8_t iXcpt;
6109 RTSEL uStartSs;
6110 uint8_t cDstBits;
6111 RTSEL uDstCs;
6112 union /* must use a union here as the compiler won't compile if uint16_t and will mess up fixups for uint32_t. */
6113 {
6114 uint32_t offDst;
6115 struct
6116 {
6117 NPVOID pv;
6118 uint16_t uHigh;
6119 } s;
6120 };
6121 RTSEL uDstSs;
6122 uint16_t uErrCd;
6123 } const s_aSubTests[] =
6124 { /* PriChg, Xcpt, uStartSs, => bits uDstCs offDst/pv uDstSs uErrCd */
6125 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6126 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6127 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6128 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6129 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6130 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6131 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6132 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6133 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6134 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6135 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6136 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6137 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6138 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6139 /* same with 32-bit wide target addresses: */
6140 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64 | 0, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
6141 { false, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R0_CS64 | 0, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
6142 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6143 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6144 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6145 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R1_CS64 | 1, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6146 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6147 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6148 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6149 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R2_CS64 | 2, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6150 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6151 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6152 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6153 { true, -1, BS3_SEL_R0_SS16 | 0, 64, BS3_SEL_R3_CS64 | 3, { .s = {(NPVOID)bs3CpuBasic2_salc_ud2, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6154 /* conforming stuff */
6155 { false, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6156 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6157 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6158 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6159 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R0_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6160 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS64_CNF },
6161 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R1_CS64_CNF },
6162 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6163 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6164 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R1_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6165 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS64_CNF },
6166 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS64_CNF },
6167 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS64_CNF },
6168 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS64_CNF },
6169 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6170 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R2_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6171 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS64_CNF },
6172 { false, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 0, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS64_CNF },
6173 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS64_CNF },
6174 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 1, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS64_CNF },
6175 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS64_CNF },
6176 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 2, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 2, BS3_SEL_R3_CS64_CNF },
6177 { true, -1, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_R3_CS64_CNF | 3, { .offDst = LOW_SALC_UD_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6178 /* returning to 16-bit code: */
6179 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
6180 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6181 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6182 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6183 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6184 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6185 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6186 { false, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R0_CS16 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS16 | 0, 0 },
6187 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6188 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R1_CS16 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6189 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6190 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R2_CS16 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6191 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6192 { true, -1, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6193 /* returning to 16-bit conforming code: */
6194 { false, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, 0 },
6195 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, 0 },
6196 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6197 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6198 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6199 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6200 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS16 | 2, 0 },
6201 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R0_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6202 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS16_CNF },
6203 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS16 | 1, 0 },
6204 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6205 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6206 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6207 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6208 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6209 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R1_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6210 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS16_CNF },
6211 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS16_CNF },
6212 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, 0 },
6213 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R2_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS16 | 3, 0 },
6214 { false, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS16_CNF },
6215 { true, 14, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS16_CNF },
6216 { true, 42, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS16_CNF },
6217 { true, -1, BS3_SEL_R0_SS32 | 0, 16, BS3_SEL_R3_CS16_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, 0 } }, BS3_SEL_R3_SS32 | 3, 0 },
6218 /* returning to 32-bit code - narrow 16-bit target address: */
6219 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R0_SS32 | 0, 0 },
6220 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6221 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6222 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6223 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6224 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6225 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6226 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R0_SS16 | 0, 0 },
6227 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS16 | 1, 0 },
6228 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R1_SS32 | 1, 0 },
6229 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS16 | 2, 0 },
6230 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6231 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS16 | 3, 0 },
6232 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6233 /* returning to 32-bit code - wider 32-bit target address: */
6234 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6235 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6236 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6237 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6238 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6239 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6240 { false, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R0_CS32 | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS16 | 0, 0 },
6241 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6242 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R1_CS32 | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6243 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6244 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R2_CS32 | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6245 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6246 { true, -1, BS3_SEL_R0_SS16 | 0, 32, BS3_SEL_R3_CS32 | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6247 /* returning to 32-bit conforming code: */
6248 { false, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, 0 },
6249 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, 0 },
6250 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6251 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6252 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6253 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6254 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS16 | 2, 0 },
6255 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R0_CS32_CNF | 3, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R3_SS32 | 3, 0 },
6256 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R1_CS32_CNF },
6257 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS16 | 1, 0 },
6258 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 1, BS3_SEL_R0_SS32 },
6259 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R0_SS32 },
6260 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 1, BS3_SEL_R3_SS32 },
6261 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, BS3_SEL_R3_SS32 },
6262 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, 0 },
6263 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R1_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6264 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R2_CS32_CNF },
6265 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R2_CS32_CNF },
6266 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 2, { .offDst = LOW_SWAPGS_ADDR }, BS3_SEL_R2_SS32 | 2, 0 },
6267 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R2_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS16 | 3, 0 },
6268 { false, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 0, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R0_SS32 | 0, BS3_SEL_R3_CS32_CNF },
6269 { true, 14, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 1, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R1_SS32 | 1, BS3_SEL_R3_CS32_CNF },
6270 { true, 42, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 2, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R2_SS32 | 2, BS3_SEL_R3_CS32_CNF },
6271 { true, -1, BS3_SEL_R0_SS32 | 0, 32, BS3_SEL_R3_CS32_CNF | 3, { .s = {(NPVOID)bs3CpuBasic2_swapgs, BS3TEXT16_ADDR_HI } }, BS3_SEL_R3_SS32 | 3, 0 },
6272
6273 /* some additional #GP variations */ /** @todo test all possible exceptions! */
6274 { true, 14, BS3_SEL_R0_SS16 | 0, 16, BS3_SEL_R3_CS16 | 2, { .s = { (NPVOID)bs3CpuBasic2_ud2 } }, BS3_SEL_R2_SS16 | 2, BS3_SEL_R3_CS16 },
6275
6276 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_00 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_00 },
6277 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_02 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_02 },
6278 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_04 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_04 },
6279 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_06 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_06 },
6280 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_08 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_08 },
6281 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0a },
6282 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0c },
6283 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_0e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_0e },
6284 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_10 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_10 },
6285 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_12 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_12 },
6286 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_14 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_14 },
6287 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_16 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_16 },
6288 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_18 | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_18 },
6289 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1a | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1a },
6290 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1c | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1c },
6291 { true, 14, BS3_SEL_R0_SS32 | 0, 64, BS3_SEL_SPARE_1e | 0, { .offDst = 0 }, BS3_SEL_R0_SS32 | 0, BS3_SEL_SPARE_1e },
6292 };
6293
6294 for (iTest = 0; iTest < RT_ELEMENTS(s_aTests); iTest++)
6295 {
6296 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[iTest].pfnTest);
6297 //Bs3TestPrintf("-------------- #%u: cs:eip=%04RX16:%08RX64 imm=%u%s\n", iTest, Ctx.cs, Ctx.rip.u, s_aTests[iTest].cbImm,
6298 // s_aTests[iTest].fOpSizePfx == 1 ? " o16" : s_aTests[iTest].fOpSizePfx == 2 ? " o64" : "");
6299
6300 for (iSubTest = 0; iSubTest < RT_ELEMENTS(s_aSubTests); iSubTest++)
6301 {
6302 g_usBs3TestStep = (iTest << 12) | (iSubTest << 1);
6303 if (s_aTests[iTest].fOpSizePfx != 1 || s_aSubTests[iSubTest].offDst <= UINT16_MAX)
6304 {
6305 uint16_t const cbFrmDisp = s_aSubTests[iSubTest].fInterPriv ? iSubTest % 7 : 0;
6306 uint16_t const cbStkItem = s_aTests[iTest].fOpSizePfx == 2 ? 8 : s_aTests[iTest].fOpSizePfx == 0 ? 4 : 2;
6307 uint16_t const cbFrame = (s_aSubTests[iSubTest].fInterPriv ? 4 : 2) * cbStkItem;
6308 RTSEL const uDstSs = s_aSubTests[iSubTest].uDstSs;
6309 uint64_t uDstRspExpect, uDstRspPush;
6310 //Bs3TestPrintf(" #%u: %s %d %#04RX16 -> %u %#04RX16:%#04RX32 %#04RX16 %#RX16\n", iSubTest, s_aSubTests[iSubTest].fInterPriv ? "priv" : "same", s_aSubTests[iSubTest].iXcpt, s_aSubTests[iSubTest].uStartSs,
6311 // s_aSubTests[iSubTest].cDstBits, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst, s_aSubTests[iSubTest].uDstSs, s_aSubTests[iSubTest].uErrCd);
6312
6313 Ctx.ss = s_aSubTests[iSubTest].uStartSs;
6314 uDstRspExpect = uDstRspPush = Ctx.rsp.u + s_aTests[iTest].cbImm + cbFrame + cbFrmDisp;
6315 if (s_aSubTests[iSubTest].fInterPriv)
6316 {
6317 if (s_aTests[iTest].fOpSizePfx != 1)
6318 {
6319 if (s_aTests[iTest].fOpSizePfx == 2)
6320 uDstRspPush |= UINT64_C(0xf00dfaceacdc0000);
6321 else
6322 uDstRspPush |= UINT32_C(0xacdc0000);
6323 if (s_aSubTests[iSubTest].cDstBits == 64)
6324 uDstRspExpect = uDstRspPush;
6325 else if (!BS3_SEL_IS_SS16(uDstSs))
6326 uDstRspExpect = (uint32_t)uDstRspPush;
6327 }
6328 }
6329
6330 CtxExpected.bCpl = Ctx.bCpl;
6331 CtxExpected.cs = Ctx.cs;
6332 CtxExpected.ss = Ctx.ss;
6333 CtxExpected.ds = Ctx.ds;
6334 CtxExpected.es = Ctx.es;
6335 CtxExpected.fs = Ctx.fs;
6336 CtxExpected.gs = Ctx.gs;
6337 CtxExpected.rip.u = Ctx.rip.u;
6338 CtxExpected.rsp.u = Ctx.rsp.u;
6339 CtxExpected.rax.u = Ctx.rax.u;
6340 if (s_aSubTests[iSubTest].iXcpt < 0)
6341 {
6342 CtxExpected.cs = s_aSubTests[iSubTest].uDstCs;
6343 CtxExpected.rip.u = s_aSubTests[iSubTest].offDst;
6344 if (s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
6345 {
6346 CtxExpected.rip.u += 1;
6347 CtxExpected.rax.au8[0] = CtxExpected.rflags.u16 & X86_EFL_CF ? 0xff : 0;
6348 }
6349 CtxExpected.ss = uDstSs;
6350 CtxExpected.rsp.u = uDstRspExpect;
6351 if (s_aSubTests[iSubTest].fInterPriv)
6352 {
6353 uint16_t BS3_FAR *puSel = &CtxExpected.ds; /* ASSUME member order! */
6354 unsigned cSels = 4;
6355 CtxExpected.bCpl = CtxExpected.ss & X86_SEL_RPL;
6356 while (cSels-- > 0)
6357 {
6358 uint16_t uSel = *puSel;
6359 if ( (uSel & X86_SEL_MASK_OFF_RPL)
6360 && Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u2Dpl < CtxExpected.bCpl
6361 && (Bs3Gdt[uSel >> X86_SEL_SHIFT].Gen.u4Type & (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
6362 != (X86_SEL_TYPE_CODE | X86_SEL_TYPE_CONF))
6363 *puSel = 0;
6364 puSel++;
6365 }
6366 CtxExpected.rsp.u += s_aTests[iTest].cbImm; /* arguments are dropped from both stacks. */
6367 }
6368 }
6369 g_uBs3TrapEipHint = CtxExpected.rip.u32;
6370 //Bs3TestPrintf("ss:rsp=%04RX16:%04RX64 -> %04RX16:%04RX64 [pushed %#RX64]; %04RX16:%04RX64\n",Ctx.ss, Ctx.rsp.u,
6371 // CtxExpected.ss, CtxExpected.rsp.u, uDstRspPush, CtxExpected.cs, CtxExpected.rip.u);
6372 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
6373 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
6374 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
6375 //Bs3TestPrintf("%p: %04RX16 %04RX16 %04RX16 %04RX16\n", StkPtr.pu16, StkPtr.pu16[0], StkPtr.pu16[1], StkPtr.pu16[2], StkPtr.pu16[3]);
6376 //Bs3TestPrintf("%.48Rhxd\n", StkPtr.pu16);
6377 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6378 if (s_aSubTests[iSubTest].iXcpt < 0)
6379 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
6380 else
6381 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
6382 g_usBs3TestStep++;
6383
6384 /* Again single stepping: */
6385 //Bs3TestPrintf("stepping...\n");
6386 Bs3RegSetDr6(X86_DR6_INIT_VAL);
6387 Ctx.rflags.u16 |= X86_EFL_TF;
6388 CtxExpected.rflags.u16 = Ctx.rflags.u16;
6389 if (s_aSubTests[iSubTest].iXcpt < 0 && s_aSubTests[iSubTest].cDstBits == 64 && !BS3_MODE_IS_64BIT_SYS(bMode))
6390 {
6391 CtxExpected.rip.u -= 1;
6392 CtxExpected.rax.u = Ctx.rax.u;
6393 }
6394 bs3CpuBasic2_retf_PrepStack(StkPtr, cbStkItem, s_aSubTests[iSubTest].uDstCs, s_aSubTests[iSubTest].offDst,
6395 s_aSubTests[iSubTest].fInterPriv, s_aTests[iTest].cbImm,
6396 s_aSubTests[iSubTest].uDstSs, uDstRspPush);
6397 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6398 if (s_aSubTests[iSubTest].iXcpt < 0)
6399 bs3CpuBasic2_CompareDbCtx(&TrapCtx, &CtxExpected, X86_DR6_BS);
6400 else
6401 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, s_aSubTests[iSubTest].uErrCd);
6402 Ctx.rflags.u16 &= ~X86_EFL_TF;
6403 CtxExpected.rflags.u16 = Ctx.rflags.u16;
6404 g_usBs3TestStep++;
6405 }
6406 }
6407 }
6408 }
6409 else
6410 Bs3TestFailed("wtf?");
6411
6412 if (BS3_MODE_IS_64BIT_SYS(bMode))
6413 Bs3TrapReInit();
6414 return 0;
6415}
6416
6417
6418
6419/*********************************************************************************************************************************
6420* Instruction Length *
6421*********************************************************************************************************************************/
6422
6423
6424static uint8_t bs3CpuBasic2_instr_len_Worker(uint8_t bMode, uint8_t BS3_FAR *pbCodeBuf)
6425{
6426 BS3TRAPFRAME TrapCtx;
6427 BS3REGCTX Ctx;
6428 BS3REGCTX CtxExpected;
6429 uint32_t uEipBase;
6430 unsigned cbInstr;
6431 unsigned off;
6432
6433 /* Make sure they're allocated and all zeroed. */
6434 Bs3MemZero(&Ctx, sizeof(Ctx));
6435 Bs3MemZero(&CtxExpected, sizeof(Ctx));
6436 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
6437
6438 /*
6439 * Create a context.
6440 *
6441 * ASSUMES we're in on the ring-0 stack in ring-0 and using less than 16KB.
6442 */
6443 Bs3RegCtxSaveEx(&Ctx, bMode, 768);
6444 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, (FPFNBS3FAR)pbCodeBuf);
6445 uEipBase = Ctx.rip.u32;
6446
6447 Bs3MemCpy(&CtxExpected, &Ctx, sizeof(CtxExpected));
6448
6449 /*
6450 * Simple stuff crossing the page.
6451 */
6452 for (off = X86_PAGE_SIZE - 32; off <= X86_PAGE_SIZE + 16; off++)
6453 {
6454 Ctx.rip.u32 = uEipBase + off;
6455 for (cbInstr = 0; cbInstr < 24; cbInstr++)
6456 {
6457 /*
6458 * Generate the instructions:
6459 * [es] nop
6460 * ud2
6461 */
6462 if (cbInstr > 0)
6463 {
6464 Bs3MemSet(&pbCodeBuf[off], 0x26 /* es */, cbInstr);
6465 pbCodeBuf[off + cbInstr - 1] = 0x90; /* nop */
6466 }
6467 pbCodeBuf[off + cbInstr + 0] = 0x0f; /* ud2 */
6468 pbCodeBuf[off + cbInstr + 1] = 0x0b;
6469
6470 /*
6471 * Test it.
6472 */
6473 if (cbInstr < 16)
6474 CtxExpected.rip.u32 = Ctx.rip.u32 + cbInstr;
6475 else
6476 CtxExpected.rip.u32 = Ctx.rip.u32;
6477 g_uBs3TrapEipHint = CtxExpected.rip.u32;
6478 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
6479 if (cbInstr < 16)
6480 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxExpected);
6481 else
6482 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxExpected, 0);
6483 }
6484 pbCodeBuf[off] = 0xf1; /* icebp */
6485 }
6486
6487 /*
6488 * Pit instruction length violations against the segment limit (#GP).
6489 */
6490 if (!BS3_MODE_IS_RM_OR_V86(bMode) && bMode != BS3_MODE_LM64)
6491 {
6492 /** @todo */
6493 }
6494
6495 /*
6496 * Pit instruction length violations against an invalid page (#PF).
6497 */
6498 if (BS3_MODE_IS_PAGED(bMode))
6499 {
6500 /** @todo */
6501 }
6502
6503 return 0;
6504}
6505
6506
6507/**
6508 * Entrypoint for FAR RET tests.
6509 *
6510 * @returns 0 or BS3TESTDOMODE_SKIPPED.
6511 * @param bMode The CPU mode we're testing.
6512 */
6513BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_instr_len)(uint8_t bMode)
6514{
6515 /*
6516 * Allocate three pages so we can straddle an instruction across the
6517 * boundrary for testing special IEM cases, with the last page being
6518 * made in accessible and useful for pitting #PF against #GP.
6519 */
6520 uint8_t BS3_FAR * const pbCodeBuf = (uint8_t BS3_FAR *)Bs3MemAlloc(BS3MEMKIND_REAL, X86_PAGE_SIZE * 3);
6521 //Bs3TestPrintf("pbCodeBuf=%p\n", pbCodeBuf);
6522 if (pbCodeBuf)
6523 {
6524 Bs3MemSet(pbCodeBuf, 0xf1, X86_PAGE_SIZE * 3);
6525 bs3CpuBasic2_SetGlobals(bMode);
6526
6527 if (!BS3_MODE_IS_PAGED(bMode))
6528 bs3CpuBasic2_instr_len_Worker(bMode, pbCodeBuf);
6529 else
6530 {
6531 uint32_t const uFlatLastPg = Bs3SelPtrToFlat(pbCodeBuf) + X86_PAGE_SIZE * 2;
6532 int rc = Bs3PagingProtect(uFlatLastPg, X86_PAGE_SIZE, 0, X86_PTE_P);
6533 if (RT_SUCCESS(rc))
6534 {
6535 bs3CpuBasic2_instr_len_Worker(bMode, pbCodeBuf);
6536 Bs3PagingProtect(uFlatLastPg, X86_PAGE_SIZE, X86_PTE_P, 0);
6537 }
6538 else
6539 Bs3TestFailed("Failed to allocate 3 code pages");
6540 }
6541
6542 Bs3MemFree(pbCodeBuf, X86_PAGE_SIZE * 3);
6543 }
6544 else
6545 Bs3TestFailed("Failed to allocate 3 code pages");
6546 return 0;
6547}
6548
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette