VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-x0.c@ 96966

Last change on this file since 96966 was 96407, checked in by vboxsync, 2 years ago

scm copyright and license note update

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 167.0 KB
Line 
1/* $Id: bs3-cpu-basic-2-x0.c 96407 2022-08-22 17:43:14Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2022 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define BS3_USE_X0_TEXT_SEG
42#include <bs3kit.h>
43#include <iprt/asm.h>
44#include <iprt/asm-amd64-x86.h>
45
46
47/*********************************************************************************************************************************
48* Defined Constants And Macros *
49*********************************************************************************************************************************/
50#undef CHECK_MEMBER
51#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
52 do \
53 { \
54 if ((a_Actual) == (a_Expected)) { /* likely */ } \
55 else bs3CpuBasic2_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
56 } while (0)
57
58
59/** Indicating that we've got operand size prefix and that it matters. */
60#define BS3CB2SIDTSGDT_F_OPSIZE UINT8_C(0x01)
61/** Worker requires 386 or later. */
62#define BS3CB2SIDTSGDT_F_386PLUS UINT8_C(0x02)
63
64
65/** @name MYOP_XXX - Values for FNBS3CPUBASIC2ACTSTCODE::fOp.
66 *
67 * These are flags, though we've precombined a few shortening things down.
68 *
69 * @{ */
70#define MYOP_LD 0x1 /**< The instruction loads. */
71#define MYOP_ST 0x2 /**< The instruction stores */
72#define MYOP_EFL 0x4 /**< The instruction modifies EFLAGS. */
73#define MYOP_AC_GP 0x8 /**< The instruction may cause either \#AC or \#GP (FXSAVE). */
74
75#define MYOP_LD_ST 0x3 /**< Convenience: The instruction both loads and stores. */
76#define MYOP_LD_DIV 0x5 /**< Convenience: DIV instruction - loading and modifying flags. */
77/** @} */
78
79
80/*********************************************************************************************************************************
81* Structures and Typedefs *
82*********************************************************************************************************************************/
83typedef struct BS3CB2INVLDESCTYPE
84{
85 uint8_t u4Type;
86 uint8_t u1DescType;
87} BS3CB2INVLDESCTYPE;
88
89typedef struct BS3CB2SIDTSGDT
90{
91 const char *pszDesc;
92 FPFNBS3FAR fpfnWorker;
93 uint8_t cbInstr;
94 bool fSs;
95 uint8_t bMode;
96 uint8_t fFlags;
97} BS3CB2SIDTSGDT;
98
99
100typedef void BS3_CALL FNBS3CPUBASIC2ACSNIPPET(void);
101
102typedef struct FNBS3CPUBASIC2ACTSTCODE
103{
104 FNBS3CPUBASIC2ACSNIPPET BS3_FAR *pfn;
105 uint8_t fOp;
106 uint16_t cbMem;
107 uint8_t cbAlign;
108 uint8_t offFaultInstr; /**< For skipping fninit with the fld test. */
109} FNBS3CPUBASIC2ACTSTCODE;
110typedef FNBS3CPUBASIC2ACTSTCODE const *PCFNBS3CPUBASIC2ACTSTCODE;
111
112typedef struct BS3CPUBASIC2ACTTSTCMNMODE
113{
114 uint8_t bMode;
115 uint16_t cEntries;
116 PCFNBS3CPUBASIC2ACTSTCODE paEntries;
117} BS3CPUBASIC2PFTTSTCMNMODE;
118typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
119
120
121/*********************************************************************************************************************************
122* External Symbols *
123*********************************************************************************************************************************/
124extern FNBS3FAR bs3CpuBasic2_Int80;
125extern FNBS3FAR bs3CpuBasic2_Int81;
126extern FNBS3FAR bs3CpuBasic2_Int82;
127extern FNBS3FAR bs3CpuBasic2_Int83;
128
129extern FNBS3FAR bs3CpuBasic2_ud2;
130#define g_bs3CpuBasic2_ud2_FlatAddr BS3_DATA_NM(g_bs3CpuBasic2_ud2_FlatAddr)
131extern uint32_t g_bs3CpuBasic2_ud2_FlatAddr;
132
133extern FNBS3FAR bs3CpuBasic2_iret;
134extern FNBS3FAR bs3CpuBasic2_iret_opsize;
135extern FNBS3FAR bs3CpuBasic2_iret_rexw;
136
137extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c16;
138extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c32;
139extern FNBS3FAR bs3CpuBasic2_sidt_bx_ud2_c64;
140extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c16;
141extern FNBS3FAR bs3CpuBasic2_sidt_ss_bx_ud2_c32;
142extern FNBS3FAR bs3CpuBasic2_sidt_rexw_bx_ud2_c64;
143extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c16;
144extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c32;
145extern FNBS3FAR bs3CpuBasic2_sidt_opsize_bx_ud2_c64;
146extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16;
147extern FNBS3FAR bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32;
148extern FNBS3FAR bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64;
149
150extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c16;
151extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c32;
152extern FNBS3FAR bs3CpuBasic2_sgdt_bx_ud2_c64;
153extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c16;
154extern FNBS3FAR bs3CpuBasic2_sgdt_ss_bx_ud2_c32;
155extern FNBS3FAR bs3CpuBasic2_sgdt_rexw_bx_ud2_c64;
156extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c16;
157extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c32;
158extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_bx_ud2_c64;
159extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16;
160extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32;
161extern FNBS3FAR bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64;
162
163extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16;
164extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32;
165extern FNBS3FAR bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64;
166extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
167extern FNBS3FAR bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
168extern FNBS3FAR bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
169extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16;
170extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16;
171extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32;
172extern FNBS3FAR bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64;
173extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16;
174extern FNBS3FAR bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32;
175extern FNBS3FAR bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64;
176
177extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
178extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
179extern FNBS3FAR bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
180extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
181extern FNBS3FAR bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
182extern FNBS3FAR bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
183extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
184extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
185extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
186extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16;
187extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32;
188extern FNBS3FAR bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64;
189
190
191/* bs3-cpu-basic-2-template.mac: */
192FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
193FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
194FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
195FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
196FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
197FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16;
198FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16;
199FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16;
200FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c16;
201
202FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
203FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
204FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
205FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
206FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
207FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32;
208FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32;
209FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32;
210FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c32;
211
212FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
213FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
214FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
215FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
216FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
217FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64;
218FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64;
219FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64;
220FNBS3CPUBASIC2ACSNIPPET bs3CpuBasic2_fxsave_ds_bx__ud2_c64;
221
222
223/*********************************************************************************************************************************
224* Global Variables *
225*********************************************************************************************************************************/
226static const char BS3_FAR *g_pszTestMode = (const char *)1;
227static uint8_t g_bTestMode = 1;
228static bool g_f16BitSys = 1;
229
230
231/** SIDT test workers. */
232static BS3CB2SIDTSGDT const g_aSidtWorkers[] =
233{
234 { "sidt [bx]", bs3CpuBasic2_sidt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
235 { "sidt [ss:bx]", bs3CpuBasic2_sidt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
236 { "o32 sidt [bx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
237 { "o32 sidt [ss:bx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
238 { "sidt [ebx]", bs3CpuBasic2_sidt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
239 { "sidt [ss:ebx]", bs3CpuBasic2_sidt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
240 { "o16 sidt [ebx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
241 { "o16 sidt [ss:ebx]", bs3CpuBasic2_sidt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
242 { "sidt [rbx]", bs3CpuBasic2_sidt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
243 { "o64 sidt [rbx]", bs3CpuBasic2_sidt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
244 { "o32 sidt [rbx]", bs3CpuBasic2_sidt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
245 { "o32 o64 sidt [rbx]", bs3CpuBasic2_sidt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
246};
247
248/** SGDT test workers. */
249static BS3CB2SIDTSGDT const g_aSgdtWorkers[] =
250{
251 { "sgdt [bx]", bs3CpuBasic2_sgdt_bx_ud2_c16, 3, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
252 { "sgdt [ss:bx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c16, 4, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
253 { "o32 sgdt [bx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c16, 4, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
254 { "o32 sgdt [ss:bx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c16, 5, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_386PLUS },
255 { "sgdt [ebx]", bs3CpuBasic2_sgdt_bx_ud2_c32, 3, false, BS3_MODE_CODE_32, 0 },
256 { "sgdt [ss:ebx]", bs3CpuBasic2_sgdt_ss_bx_ud2_c32, 4, true, BS3_MODE_CODE_32, 0 },
257 { "o16 sgdt [ebx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c32, 4, false, BS3_MODE_CODE_32, 0 },
258 { "o16 sgdt [ss:ebx]", bs3CpuBasic2_sgdt_opsize_ss_bx_ud2_c32, 5, true, BS3_MODE_CODE_32, 0 },
259 { "sgdt [rbx]", bs3CpuBasic2_sgdt_bx_ud2_c64, 3, false, BS3_MODE_CODE_64, 0 },
260 { "o64 sgdt [rbx]", bs3CpuBasic2_sgdt_rexw_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
261 { "o32 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_bx_ud2_c64, 4, false, BS3_MODE_CODE_64, 0 },
262 { "o32 o64 sgdt [rbx]", bs3CpuBasic2_sgdt_opsize_rexw_bx_ud2_c64, 5, false, BS3_MODE_CODE_64, 0 },
263};
264
265/** LIDT test workers. */
266static BS3CB2SIDTSGDT const g_aLidtWorkers[] =
267{
268 { "lidt [bx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
269 { "lidt [ss:bx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
270 { "o32 lidt [bx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
271 { "o32 lidt [bx]; sidt32", bs3CpuBasic2_lidt_opsize_bx__sidt32_es_di__lidt_es_si__ud2_c16, 27, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
272 { "o32 lidt [ss:bx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
273 { "lidt [ebx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
274 { "lidt [ss:ebx]", bs3CpuBasic2_lidt_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
275 { "o16 lidt [ebx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
276 { "o16 lidt [ss:ebx]", bs3CpuBasic2_lidt_opsize_ss_bx__sidt_es_di__lidt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
277 { "lidt [rbx]", bs3CpuBasic2_lidt_bx__sidt_es_di__lidt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
278 { "o64 lidt [rbx]", bs3CpuBasic2_lidt_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
279 { "o32 lidt [rbx]", bs3CpuBasic2_lidt_opsize_bx__sidt_es_di__lidt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
280 { "o32 o64 lidt [rbx]", bs3CpuBasic2_lidt_opsize_rexw_bx__sidt_es_di__lidt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
281};
282
283/** LGDT test workers. */
284static BS3CB2SIDTSGDT const g_aLgdtWorkers[] =
285{
286 { "lgdt [bx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 11, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
287 { "lgdt [ss:bx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, 0 },
288 { "o32 lgdt [bx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 12, false, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
289 { "o32 lgdt [ss:bx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c16, 13, true, BS3_MODE_CODE_16 | BS3_MODE_CODE_V86, BS3CB2SIDTSGDT_F_OPSIZE | BS3CB2SIDTSGDT_F_386PLUS },
290 { "lgdt [ebx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 11, false, BS3_MODE_CODE_32, 0 },
291 { "lgdt [ss:ebx]", bs3CpuBasic2_lgdt_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, true, BS3_MODE_CODE_32, 0 },
292 { "o16 lgdt [ebx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 12, false, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
293 { "o16 lgdt [ss:ebx]", bs3CpuBasic2_lgdt_opsize_ss_bx__sgdt_es_di__lgdt_es_si__ud2_c32, 13, true, BS3_MODE_CODE_32, BS3CB2SIDTSGDT_F_OPSIZE },
294 { "lgdt [rbx]", bs3CpuBasic2_lgdt_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 9, false, BS3_MODE_CODE_64, 0 },
295 { "o64 lgdt [rbx]", bs3CpuBasic2_lgdt_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
296 { "o32 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 10, false, BS3_MODE_CODE_64, 0 },
297 { "o32 o64 lgdt [rbx]", bs3CpuBasic2_lgdt_opsize_rexw_bx__sgdt_es_di__lgdt_es_si__ud2_c64, 11, false, BS3_MODE_CODE_64, 0 },
298};
299
300
301
302#if 0
303/** Table containing invalid CS selector types. */
304static const BS3CB2INVLDESCTYPE g_aInvalidCsTypes[] =
305{
306 { X86_SEL_TYPE_RO, 1 },
307 { X86_SEL_TYPE_RO_ACC, 1 },
308 { X86_SEL_TYPE_RW, 1 },
309 { X86_SEL_TYPE_RW_ACC, 1 },
310 { X86_SEL_TYPE_RO_DOWN, 1 },
311 { X86_SEL_TYPE_RO_DOWN_ACC, 1 },
312 { X86_SEL_TYPE_RW_DOWN, 1 },
313 { X86_SEL_TYPE_RW_DOWN_ACC, 1 },
314 { 0, 0 },
315 { 1, 0 },
316 { 2, 0 },
317 { 3, 0 },
318 { 4, 0 },
319 { 5, 0 },
320 { 6, 0 },
321 { 7, 0 },
322 { 8, 0 },
323 { 9, 0 },
324 { 10, 0 },
325 { 11, 0 },
326 { 12, 0 },
327 { 13, 0 },
328 { 14, 0 },
329 { 15, 0 },
330};
331
332/** Table containing invalid SS selector types. */
333static const BS3CB2INVLDESCTYPE g_aInvalidSsTypes[] =
334{
335 { X86_SEL_TYPE_EO, 1 },
336 { X86_SEL_TYPE_EO_ACC, 1 },
337 { X86_SEL_TYPE_ER, 1 },
338 { X86_SEL_TYPE_ER_ACC, 1 },
339 { X86_SEL_TYPE_EO_CONF, 1 },
340 { X86_SEL_TYPE_EO_CONF_ACC, 1 },
341 { X86_SEL_TYPE_ER_CONF, 1 },
342 { X86_SEL_TYPE_ER_CONF_ACC, 1 },
343 { 0, 0 },
344 { 1, 0 },
345 { 2, 0 },
346 { 3, 0 },
347 { 4, 0 },
348 { 5, 0 },
349 { 6, 0 },
350 { 7, 0 },
351 { 8, 0 },
352 { 9, 0 },
353 { 10, 0 },
354 { 11, 0 },
355 { 12, 0 },
356 { 13, 0 },
357 { 14, 0 },
358 { 15, 0 },
359};
360#endif
361
362
363static const FNBS3CPUBASIC2ACTSTCODE g_aCmn16[] =
364{
365 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, MYOP_LD, 2, 2 },
366 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, MYOP_ST, 2, 2 },
367 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, MYOP_LD_ST, 2, 2 },
368 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, MYOP_LD_ST | MYOP_EFL, 2, 2 },
369 { bs3CpuBasic2_div_ds_bx__ud2_c16, MYOP_LD_DIV, 2, 2 },
370 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
371 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c16, MYOP_LD, 10, 8, 2 /*fninit*/ },
372 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c16, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
373 { bs3CpuBasic2_fxsave_ds_bx__ud2_c16, MYOP_ST | MYOP_AC_GP, 512, 16 },
374};
375
376static const FNBS3CPUBASIC2ACTSTCODE g_aCmn32[] =
377{
378 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, MYOP_LD, 4, 4 },
379 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, MYOP_ST, 4, 4 },
380 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, MYOP_LD_ST, 4, 4 },
381 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, MYOP_LD_ST | MYOP_EFL, 4, 4 },
382 { bs3CpuBasic2_div_ds_bx__ud2_c32, MYOP_LD_DIV, 4, 4 },
383 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
384 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c32, MYOP_LD, 10, 8, 2 /*fninit*/ },
385 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c32, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
386 { bs3CpuBasic2_fxsave_ds_bx__ud2_c32, MYOP_ST | MYOP_AC_GP, 512, 16 },
387};
388
389static const FNBS3CPUBASIC2ACTSTCODE g_aCmn64[] =
390{
391 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, MYOP_LD, 8, 8 },
392 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, MYOP_ST, 8, 8 },
393 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, MYOP_LD_ST, 8, 8 },
394 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, MYOP_LD_ST | MYOP_EFL, 8, 8 },
395 { bs3CpuBasic2_div_ds_bx__ud2_c64, MYOP_LD_DIV, 8, 8 },
396 { bs3CpuBasic2_fninit_fld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
397 { bs3CpuBasic2_fninit_fbld_ds_bx__ud2_c64, MYOP_LD, 10, 8, 2 /*fninit*/ },
398 { bs3CpuBasic2_fninit_fldz_fstp_ds_bx__ud2_c64, MYOP_ST, 10, 8, 4 /*fninit+fldz*/ },
399 { bs3CpuBasic2_fxsave_ds_bx__ud2_c64, MYOP_ST | MYOP_AC_GP, 512, 16 },
400};
401
402static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
403{
404 { BS3_MODE_CODE_16, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
405 { BS3_MODE_CODE_V86, RT_ELEMENTS(g_aCmn16), g_aCmn16 },
406 { BS3_MODE_CODE_32, RT_ELEMENTS(g_aCmn32), g_aCmn32 },
407 { BS3_MODE_CODE_64, RT_ELEMENTS(g_aCmn64), g_aCmn64 },
408};
409
410
411/**
412 * Sets globals according to the mode.
413 *
414 * @param bTestMode The test mode.
415 */
416static void bs3CpuBasic2_SetGlobals(uint8_t bTestMode)
417{
418 g_bTestMode = bTestMode;
419 g_pszTestMode = Bs3GetModeName(bTestMode);
420 g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
421 g_usBs3TestStep = 0;
422}
423
424
425uint32_t ASMGetESP(void);
426#pragma aux ASMGetESP = \
427 ".386" \
428 "mov ax, sp" \
429 "mov edx, esp" \
430 "shr edx, 16" \
431 value [ax dx] \
432 modify exact [ax dx];
433
434
435/**
436 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
437 * and g_pszTestMode.
438 */
439static void bs3CpuBasic2_FailedF(const char *pszFormat, ...)
440{
441 va_list va;
442
443 char szTmp[168];
444 va_start(va, pszFormat);
445 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
446 va_end(va);
447
448 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
449}
450
451
452#if 0
453/**
454 * Compares trap stuff.
455 */
456static void bs3CpuBasic2_CompareIntCtx1(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt)
457{
458 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
459 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
460 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
461 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, 2 /*int xx*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
462 if (Bs3TestSubErrorCount() != cErrorsBefore)
463 {
464 Bs3TrapPrintFrame(pTrapCtx);
465#if 1
466 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
467 Bs3TestPrintf("Halting in CompareTrapCtx1: bXcpt=%#x\n", bXcpt);
468 ASMHalt();
469#endif
470 }
471}
472#endif
473
474
475#if 0
476/**
477 * Compares trap stuff.
478 */
479static void bs3CpuBasic2_CompareTrapCtx2(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t cbIpAdjust,
480 uint8_t bXcpt, uint16_t uHandlerCs)
481{
482 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
483 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
484 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
485 CHECK_MEMBER("uHandlerCs", "%#06x", pTrapCtx->uHandlerCs, uHandlerCs);
486 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
487 if (Bs3TestSubErrorCount() != cErrorsBefore)
488 {
489 Bs3TrapPrintFrame(pTrapCtx);
490#if 1
491 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
492 Bs3TestPrintf("Halting in CompareTrapCtx2: bXcpt=%#x\n", bXcpt);
493 ASMHalt();
494#endif
495 }
496}
497#endif
498
499/**
500 * Compares a CPU trap.
501 */
502static void bs3CpuBasic2_CompareCpuTrapCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd,
503 uint8_t bXcpt, bool f486ResumeFlagHint, uint8_t cbIpAdjust)
504{
505 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
506 uint32_t fExtraEfl;
507
508 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
509 CHECK_MEMBER("bErrCd", "%#06RX16", (uint16_t)pTrapCtx->uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
510
511 if ( g_f16BitSys
512 || ( !f486ResumeFlagHint
513 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80486 ) )
514 fExtraEfl = 0;
515 else
516 fExtraEfl = X86_EFL_RF;
517#if 0 /** @todo Running on an AMD Phenom II X6 1100T under AMD-V I'm not getting good X86_EFL_RF results. Enable this to get on with other work. */
518 fExtraEfl = pTrapCtx->Ctx.rflags.u32 & X86_EFL_RF;
519#endif
520 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbIpAdjust, 0 /*cbSpAdjust*/, fExtraEfl, g_pszTestMode, g_usBs3TestStep);
521 if (Bs3TestSubErrorCount() != cErrorsBefore)
522 {
523 Bs3TrapPrintFrame(pTrapCtx);
524#if 1
525 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
526 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
527 ASMHalt();
528#endif
529 }
530}
531
532
533/**
534 * Compares \#GP trap.
535 */
536static void bs3CpuBasic2_CompareGpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
537{
538 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_GP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
539}
540
541#if 0
542/**
543 * Compares \#NP trap.
544 */
545static void bs3CpuBasic2_CompareNpCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
546{
547 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_NP, true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
548}
549#endif
550
551/**
552 * Compares \#SS trap.
553 */
554static void bs3CpuBasic2_CompareSsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd, bool f486ResumeFlagHint)
555{
556 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_SS, f486ResumeFlagHint, 0 /*cbIpAdjust*/);
557}
558
559#if 0
560/**
561 * Compares \#TS trap.
562 */
563static void bs3CpuBasic2_CompareTsCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint16_t uErrCd)
564{
565 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_TS, false /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
566}
567#endif
568
569/**
570 * Compares \#PF trap.
571 */
572static void bs3CpuBasic2_ComparePfCtx(PCBS3TRAPFRAME pTrapCtx, PBS3REGCTX pStartCtx, uint16_t uErrCd,
573 uint64_t uCr2Expected, uint8_t cbIpAdjust)
574{
575 uint64_t const uCr2Saved = pStartCtx->cr2.u;
576 pStartCtx->cr2.u = uCr2Expected;
577 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, uErrCd, X86_XCPT_PF, true /*f486ResumeFlagHint*/, cbIpAdjust);
578 pStartCtx->cr2.u = uCr2Saved;
579}
580
581/**
582 * Compares \#UD trap.
583 */
584static void bs3CpuBasic2_CompareUdCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx)
585{
586 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*no error code*/, X86_XCPT_UD,
587 true /*f486ResumeFlagHint*/, 0 /*cbIpAdjust*/);
588}
589
590/**
591 * Compares \#AC trap.
592 */
593static void bs3CpuBasic2_CompareAcCtx(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t cbIpAdjust)
594{
595 bs3CpuBasic2_CompareCpuTrapCtx(pTrapCtx, pStartCtx, 0 /*always zero*/, X86_XCPT_AC, true /*f486ResumeFlagHint*/, cbIpAdjust);
596}
597
598
599#if 0 /* convert me */
600static void bs3CpuBasic2_RaiseXcpt1Common(uint16_t const uSysR0Cs, uint16_t const uSysR0CsConf, uint16_t const uSysR0Ss,
601 PX86DESC const paIdt, unsigned const cIdteShift)
602{
603 BS3TRAPFRAME TrapCtx;
604 BS3REGCTX Ctx80;
605 BS3REGCTX Ctx81;
606 BS3REGCTX Ctx82;
607 BS3REGCTX Ctx83;
608 BS3REGCTX CtxTmp;
609 BS3REGCTX CtxTmp2;
610 PBS3REGCTX apCtx8x[4];
611 unsigned iCtx;
612 unsigned iRing;
613 unsigned iDpl;
614 unsigned iRpl;
615 unsigned i, j, k;
616 uint32_t uExpected;
617 bool const f486Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
618# if TMPL_BITS == 16
619 bool const f386Plus = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386;
620 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
621# else
622 bool const f286 = false;
623 bool const f386Plus = true;
624 int rc;
625 uint8_t *pbIdtCopyAlloc;
626 PX86DESC pIdtCopy;
627 const unsigned cbIdte = 1 << (3 + cIdteShift);
628 RTCCUINTXREG uCr0Saved = ASMGetCR0();
629 RTGDTR GdtrSaved;
630# endif
631 RTIDTR IdtrSaved;
632 RTIDTR Idtr;
633
634 ASMGetIDTR(&IdtrSaved);
635# if TMPL_BITS != 16
636 ASMGetGDTR(&GdtrSaved);
637# endif
638
639 /* make sure they're allocated */
640 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
641 Bs3MemZero(&Ctx80, sizeof(Ctx80));
642 Bs3MemZero(&Ctx81, sizeof(Ctx81));
643 Bs3MemZero(&Ctx82, sizeof(Ctx82));
644 Bs3MemZero(&Ctx83, sizeof(Ctx83));
645 Bs3MemZero(&CtxTmp, sizeof(CtxTmp));
646 Bs3MemZero(&CtxTmp2, sizeof(CtxTmp2));
647
648 /* Context array. */
649 apCtx8x[0] = &Ctx80;
650 apCtx8x[1] = &Ctx81;
651 apCtx8x[2] = &Ctx82;
652 apCtx8x[3] = &Ctx83;
653
654# if TMPL_BITS != 16
655 /* Allocate memory for playing around with the IDT. */
656 pbIdtCopyAlloc = NULL;
657 if (BS3_MODE_IS_PAGED(g_bTestMode))
658 pbIdtCopyAlloc = Bs3MemAlloc(BS3MEMKIND_FLAT32, 12*_1K);
659# endif
660
661 /*
662 * IDT entry 80 thru 83 are assigned DPLs according to the number.
663 * (We'll be useing more, but this'll do for now.)
664 */
665 paIdt[0x80 << cIdteShift].Gate.u2Dpl = 0;
666 paIdt[0x81 << cIdteShift].Gate.u2Dpl = 1;
667 paIdt[0x82 << cIdteShift].Gate.u2Dpl = 2;
668 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
669
670 Bs3RegCtxSave(&Ctx80);
671 Ctx80.rsp.u -= 0x300;
672 Ctx80.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int80);
673# if TMPL_BITS == 16
674 Ctx80.cs = BS3_MODE_IS_RM_OR_V86(g_bTestMode) ? BS3_SEL_TEXT16 : BS3_SEL_R0_CS16;
675# elif TMPL_BITS == 32
676 g_uBs3TrapEipHint = Ctx80.rip.u32;
677# endif
678 Bs3MemCpy(&Ctx81, &Ctx80, sizeof(Ctx80));
679 Ctx81.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int81);
680 Bs3MemCpy(&Ctx82, &Ctx80, sizeof(Ctx80));
681 Ctx82.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int82);
682 Bs3MemCpy(&Ctx83, &Ctx80, sizeof(Ctx80));
683 Ctx83.rip.u = (uintptr_t)BS3_FP_OFF(&bs3CpuBasic2_Int83);
684
685 /*
686 * Check that all the above gates work from ring-0.
687 */
688 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
689 {
690 g_usBs3TestStep = iCtx;
691# if TMPL_BITS == 32
692 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
693# endif
694 Bs3TrapSetJmpAndRestore(apCtx8x[iCtx], &TrapCtx);
695 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, apCtx8x[iCtx], 0x80+iCtx /*bXcpt*/);
696 }
697
698 /*
699 * Check that the gate DPL checks works.
700 */
701 g_usBs3TestStep = 100;
702 for (iRing = 0; iRing <= 3; iRing++)
703 {
704 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
705 {
706 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
707 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
708# if TMPL_BITS == 32
709 g_uBs3TrapEipHint = CtxTmp.rip.u32;
710# endif
711 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
712 if (iCtx < iRing)
713 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
714 else
715 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
716 g_usBs3TestStep++;
717 }
718 }
719
720 /*
721 * Modify the gate CS value and run the handler at a different CPL.
722 * Throw RPL variations into the mix (completely ignored) together
723 * with gate presence.
724 * 1. CPL <= GATE.DPL
725 * 2. GATE.P
726 * 3. GATE.CS.DPL <= CPL (non-conforming segments)
727 */
728 g_usBs3TestStep = 1000;
729 for (i = 0; i <= 3; i++)
730 {
731 for (iRing = 0; iRing <= 3; iRing++)
732 {
733 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
734 {
735# if TMPL_BITS == 32
736 g_uBs3TrapEipHint = apCtx8x[iCtx]->rip.u32;
737# endif
738 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
739 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
740
741 for (j = 0; j <= 3; j++)
742 {
743 uint16_t const uCs = (uSysR0Cs | j) + (i << BS3_SEL_RING_SHIFT);
744 for (k = 0; k < 2; k++)
745 {
746 g_usBs3TestStep++;
747 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
748 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
749 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = k;
750 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
751 /*Bs3TrapPrintFrame(&TrapCtx);*/
752 if (iCtx < iRing)
753 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
754 else if (k == 0)
755 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
756 else if (i > iRing)
757 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
758 else
759 {
760 uint16_t uExpectedCs = uCs & X86_SEL_MASK_OFF_RPL;
761 if (i <= iCtx && i <= iRing)
762 uExpectedCs |= i;
763 bs3CpuBasic2_CompareTrapCtx2(&TrapCtx, &CtxTmp, 2 /*int 8xh*/, 0x80 + iCtx /*bXcpt*/, uExpectedCs);
764 }
765 }
766 }
767
768 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
769 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
770 }
771 }
772 }
773 BS3_ASSERT(g_usBs3TestStep < 1600);
774
775 /*
776 * Various CS and SS related faults
777 *
778 * We temporarily reconfigure gate 80 and 83 with new CS selectors, the
779 * latter have a CS.DPL of 2 for testing ring transisions and SS loading
780 * without making it impossible to handle faults.
781 */
782 g_usBs3TestStep = 1600;
783 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
784 Bs3GdteTestPage00.Gen.u1Present = 0;
785 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
786 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
787
788 /* CS.PRESENT = 0 */
789 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
790 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
791 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
792 bs3CpuBasic2_FailedF("selector was accessed");
793 g_usBs3TestStep++;
794
795 /* Check that GATE.DPL is checked before CS.PRESENT. */
796 for (iRing = 1; iRing < 4; iRing++)
797 {
798 Bs3MemCpy(&CtxTmp, &Ctx80, sizeof(CtxTmp));
799 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
800 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
801 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x80 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
802 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
803 bs3CpuBasic2_FailedF("selector was accessed");
804 g_usBs3TestStep++;
805 }
806
807 /* CS.DPL mismatch takes precedence over CS.PRESENT = 0. */
808 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
809 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
810 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
811 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
812 bs3CpuBasic2_FailedF("CS selector was accessed");
813 g_usBs3TestStep++;
814 for (iDpl = 1; iDpl < 4; iDpl++)
815 {
816 Bs3GdteTestPage00.Gen.u2Dpl = iDpl;
817 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
818 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
819 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
820 bs3CpuBasic2_FailedF("CS selector was accessed");
821 g_usBs3TestStep++;
822 }
823
824 /* 1608: Check all the invalid CS selector types alone. */
825 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
826 for (i = 0; i < RT_ELEMENTS(g_aInvalidCsTypes); i++)
827 {
828 Bs3GdteTestPage00.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
829 Bs3GdteTestPage00.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
830 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
831 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
832 if (Bs3GdteTestPage00.Gen.u4Type != g_aInvalidCsTypes[i].u4Type)
833 bs3CpuBasic2_FailedF("Invalid CS type %#x/%u -> %#x/%u\n",
834 g_aInvalidCsTypes[i].u4Type, g_aInvalidCsTypes[i].u1DescType,
835 Bs3GdteTestPage00.Gen.u4Type, Bs3GdteTestPage00.Gen.u1DescType);
836 g_usBs3TestStep++;
837
838 /* Incorrect CS.TYPE takes precedence over CS.PRESENT = 0. */
839 Bs3GdteTestPage00.Gen.u1Present = 0;
840 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
841 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx80, BS3_SEL_TEST_PAGE_00);
842 Bs3GdteTestPage00.Gen.u1Present = 1;
843 g_usBs3TestStep++;
844 }
845
846 /* Fix CS again. */
847 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
848
849 /* 1632: Test SS. */
850 if (!BS3_MODE_IS_64BIT_SYS(g_bTestMode))
851 {
852 uint16_t BS3_FAR *puTssSs2 = BS3_MODE_IS_16BIT_SYS(g_bTestMode) ? &Bs3Tss16.ss2 : &Bs3Tss32.ss2;
853 uint16_t const uSavedSs2 = *puTssSs2;
854 X86DESC const SavedGate83 = paIdt[0x83 << cIdteShift];
855
856 /* Make the handler execute in ring-2. */
857 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
858 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
859 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_02 | 2;
860
861 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
862 Bs3RegCtxConvertToRingX(&CtxTmp, 3); /* yeah, from 3 so SS:xSP is reloaded. */
863 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
864 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
865 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
866 bs3CpuBasic2_FailedF("CS selector was not access");
867 g_usBs3TestStep++;
868
869 /* Create a SS.DPL=2 stack segment and check that SS2.RPL matters and
870 that we get #SS if the selector isn't present. */
871 i = 0; /* used for cycling thru invalid CS types */
872 for (k = 0; k < 10; k++)
873 {
874 /* k=0: present,
875 k=1: not-present,
876 k=2: present but very low limit,
877 k=3: not-present, low limit.
878 k=4: present, read-only.
879 k=5: not-present, read-only.
880 k=6: present, code-selector.
881 k=7: not-present, code-selector.
882 k=8: present, read-write / no access + system (=LDT).
883 k=9: not-present, read-write / no access + system (=LDT).
884 */
885 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
886 Bs3GdteTestPage03.Gen.u1Present = !(k & 1);
887 if (k >= 8)
888 {
889 Bs3GdteTestPage03.Gen.u1DescType = 0; /* system */
890 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW; /* = LDT */
891 }
892 else if (k >= 6)
893 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_ER;
894 else if (k >= 4)
895 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RO;
896 else if (k >= 2)
897 {
898 Bs3GdteTestPage03.Gen.u16LimitLow = 0x400;
899 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
900 Bs3GdteTestPage03.Gen.u1Granularity = 0;
901 }
902
903 for (iDpl = 0; iDpl < 4; iDpl++)
904 {
905 Bs3GdteTestPage03.Gen.u2Dpl = iDpl;
906
907 for (iRpl = 0; iRpl < 4; iRpl++)
908 {
909 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | iRpl;
910 //Bs3TestPrintf("k=%u iDpl=%u iRpl=%u step=%u\n", k, iDpl, iRpl, g_usBs3TestStep);
911 Bs3GdteTestPage02.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
912 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
913 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
914 if (iRpl != 2 || iRpl != iDpl || k >= 4)
915 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
916 else if (k != 0)
917 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03,
918 k == 2 /*f486ResumeFlagHint*/);
919 else
920 {
921 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
922 if (TrapCtx.uHandlerSs != (BS3_SEL_TEST_PAGE_03 | 2))
923 bs3CpuBasic2_FailedF("uHandlerSs=%#x expected %#x\n", TrapCtx.uHandlerSs, BS3_SEL_TEST_PAGE_03 | 2);
924 }
925 if (!(Bs3GdteTestPage02.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
926 bs3CpuBasic2_FailedF("CS selector was not access");
927 if ( TrapCtx.bXcpt == 0x83
928 || (TrapCtx.bXcpt == X86_XCPT_SS && k == 2) )
929 {
930 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
931 bs3CpuBasic2_FailedF("SS selector was not accessed");
932 }
933 else if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
934 bs3CpuBasic2_FailedF("SS selector was accessed");
935 g_usBs3TestStep++;
936
937 /* +1: Modify the gate DPL to check that this is checked before SS.DPL and SS.PRESENT. */
938 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 2;
939 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
940 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, (0x83 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
941 paIdt[0x83 << cIdteShift].Gate.u2Dpl = 3;
942 g_usBs3TestStep++;
943
944 /* +2: Check the CS.DPL check is done before the SS ones. Restoring the
945 ring-0 INT 83 context triggers the CS.DPL < CPL check. */
946 Bs3TrapSetJmpAndRestore(&Ctx83, &TrapCtx);
947 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx83, BS3_SEL_TEST_PAGE_02);
948 g_usBs3TestStep++;
949
950 /* +3: Now mark the CS selector not present and check that that also triggers before SS stuff. */
951 Bs3GdteTestPage02.Gen.u1Present = 0;
952 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
953 bs3CpuBasic2_CompareNpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
954 Bs3GdteTestPage02.Gen.u1Present = 1;
955 g_usBs3TestStep++;
956
957 /* +4: Make the CS selector some invalid type and check it triggers before SS stuff. */
958 Bs3GdteTestPage02.Gen.u4Type = g_aInvalidCsTypes[i].u4Type;
959 Bs3GdteTestPage02.Gen.u1DescType = g_aInvalidCsTypes[i].u1DescType;
960 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
961 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_02);
962 Bs3GdteTestPage02.Gen.u4Type = X86_SEL_TYPE_ER_ACC;
963 Bs3GdteTestPage02.Gen.u1DescType = 1;
964 g_usBs3TestStep++;
965
966 /* +5: Now, make the CS selector limit too small and that it triggers after SS trouble.
967 The 286 had a simpler approach to these GP(0). */
968 Bs3GdteTestPage02.Gen.u16LimitLow = 0;
969 Bs3GdteTestPage02.Gen.u4LimitHigh = 0;
970 Bs3GdteTestPage02.Gen.u1Granularity = 0;
971 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
972 if (f286)
973 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
974 else if (iRpl != 2 || iRpl != iDpl || k >= 4)
975 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
976 else if (k != 0)
977 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, k == 2 /*f486ResumeFlagHint*/);
978 else
979 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/);
980 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
981 g_usBs3TestStep++;
982 }
983 }
984 }
985
986 /* Check all the invalid SS selector types alone. */
987 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
988 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
989 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
990 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
991 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
992 g_usBs3TestStep++;
993 for (i = 0; i < RT_ELEMENTS(g_aInvalidSsTypes); i++)
994 {
995 Bs3GdteTestPage03.Gen.u4Type = g_aInvalidSsTypes[i].u4Type;
996 Bs3GdteTestPage03.Gen.u1DescType = g_aInvalidSsTypes[i].u1DescType;
997 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
998 bs3CpuBasic2_CompareTsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03);
999 if (Bs3GdteTestPage03.Gen.u4Type != g_aInvalidSsTypes[i].u4Type)
1000 bs3CpuBasic2_FailedF("Invalid SS type %#x/%u -> %#x/%u\n",
1001 g_aInvalidSsTypes[i].u4Type, g_aInvalidSsTypes[i].u1DescType,
1002 Bs3GdteTestPage03.Gen.u4Type, Bs3GdteTestPage03.Gen.u1DescType);
1003 g_usBs3TestStep++;
1004 }
1005
1006 /*
1007 * Continue the SS experiments with a expand down segment. We'll use
1008 * the same setup as we already have with gate 83h being DPL and
1009 * having CS.DPL=2.
1010 *
1011 * Expand down segments are weird. The valid area is practically speaking
1012 * reversed. So, a 16-bit segment with a limit of 0x6000 will have valid
1013 * addresses from 0xffff thru 0x6001.
1014 *
1015 * So, with expand down segments we can more easily cut partially into the
1016 * pushing of the iret frame and trigger more interesting behavior than
1017 * with regular "expand up" segments where the whole pushing area is either
1018 * all fine or not not fine.
1019 */
1020 Bs3GdteTestPage02 = Bs3Gdt[(uSysR0Cs + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1021 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Ss + (2 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1022 Bs3GdteTestPage03.Gen.u2Dpl = 2;
1023 Bs3GdteTestPage03.Gen.u4Type = X86_SEL_TYPE_RW_DOWN;
1024 *puTssSs2 = BS3_SEL_TEST_PAGE_03 | 2;
1025
1026 /* First test, limit = max --> no bytes accessible --> #GP */
1027 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1028 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1029
1030 /* Second test, limit = 0 --> all by zero byte accessible --> works */
1031 Bs3GdteTestPage03.Gen.u16LimitLow = 0;
1032 Bs3GdteTestPage03.Gen.u4LimitHigh = 0;
1033 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1034 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83);
1035
1036 /* Modify the gate handler to be a dummy that immediately does UD2
1037 and triggers #UD, then advance the limit down till we get the #UD. */
1038 Bs3GdteTestPage03.Gen.u1Granularity = 0;
1039
1040 Bs3MemCpy(&CtxTmp2, &CtxTmp, sizeof(CtxTmp2)); /* #UD result context */
1041 if (g_f16BitSys)
1042 {
1043 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr - BS3_ADDR_BS3TEXT16;
1044 Bs3Trap16SetGate(0x83, X86_SEL_TYPE_SYS_286_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u16, 0 /*cParams*/);
1045 CtxTmp2.rsp.u = Bs3Tss16.sp2 - 2*5;
1046 }
1047 else
1048 {
1049 CtxTmp2.rip.u = g_bs3CpuBasic2_ud2_FlatAddr;
1050 Bs3Trap32SetGate(0x83, X86_SEL_TYPE_SYS_386_INT_GATE, 3, BS3_SEL_TEST_PAGE_02, CtxTmp2.rip.u32, 0 /*cParams*/);
1051 CtxTmp2.rsp.u = Bs3Tss32.esp2 - 4*5;
1052 }
1053 CtxTmp2.bMode = g_bTestMode; /* g_bBs3CurrentMode not changed by the UD2 handler. */
1054 CtxTmp2.cs = BS3_SEL_TEST_PAGE_02 | 2;
1055 CtxTmp2.ss = BS3_SEL_TEST_PAGE_03 | 2;
1056 CtxTmp2.bCpl = 2;
1057
1058 /* test run. */
1059 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1060 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1061 g_usBs3TestStep++;
1062
1063 /* Real run. */
1064 i = (g_f16BitSys ? 2 : 4) * 6 + 1;
1065 while (i-- > 0)
1066 {
1067 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1068 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1069 if (i > 0)
1070 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, BS3_SEL_TEST_PAGE_03, true /*f486ResumeFlagHint*/);
1071 else
1072 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1073 g_usBs3TestStep++;
1074 }
1075
1076 /* Do a run where we do the same-ring kind of access. */
1077 Bs3RegCtxConvertToRingX(&CtxTmp, 2);
1078 if (g_f16BitSys)
1079 {
1080 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 2*3;
1081 i = 2*3 - 1;
1082 }
1083 else
1084 {
1085 CtxTmp2.rsp.u32 = CtxTmp.rsp.u32 - 4*3;
1086 i = 4*3 - 1;
1087 }
1088 CtxTmp.ss = BS3_SEL_TEST_PAGE_03 | 2;
1089 CtxTmp2.ds = CtxTmp.ds;
1090 CtxTmp2.es = CtxTmp.es;
1091 CtxTmp2.fs = CtxTmp.fs;
1092 CtxTmp2.gs = CtxTmp.gs;
1093 while (i-- > 0)
1094 {
1095 Bs3GdteTestPage03.Gen.u16LimitLow = CtxTmp2.rsp.u16 + i - 1;
1096 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1097 if (i > 0)
1098 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &CtxTmp, 0 /*BS3_SEL_TEST_PAGE_03*/, true /*f486ResumeFlagHint*/);
1099 else
1100 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxTmp2);
1101 g_usBs3TestStep++;
1102 }
1103
1104 *puTssSs2 = uSavedSs2;
1105 paIdt[0x83 << cIdteShift] = SavedGate83;
1106 }
1107 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1108 BS3_ASSERT(g_usBs3TestStep < 3000);
1109
1110 /*
1111 * Modify the gate CS value with a conforming segment.
1112 */
1113 g_usBs3TestStep = 3000;
1114 for (i = 0; i <= 3; i++) /* cs.dpl */
1115 {
1116 for (iRing = 0; iRing <= 3; iRing++)
1117 {
1118 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1119 {
1120 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1121 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1122# if TMPL_BITS == 32
1123 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1124# endif
1125
1126 for (j = 0; j <= 3; j++) /* rpl */
1127 {
1128 uint16_t const uCs = (uSysR0CsConf | j) + (i << BS3_SEL_RING_SHIFT);
1129 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1130 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1131 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1132 //Bs3TestPrintf("%u/%u/%u/%u: cs=%04x hcs=%04x xcpt=%02x\n", i, iRing, iCtx, j, uCs, TrapCtx.uHandlerCs, TrapCtx.bXcpt);
1133 /*Bs3TrapPrintFrame(&TrapCtx);*/
1134 g_usBs3TestStep++;
1135 if (iCtx < iRing)
1136 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1137 else if (i > iRing)
1138 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1139 else
1140 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1141 }
1142 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1143 }
1144 }
1145 }
1146 BS3_ASSERT(g_usBs3TestStep < 3500);
1147
1148 /*
1149 * The gates must be 64-bit in long mode.
1150 */
1151 if (cIdteShift != 0)
1152 {
1153 g_usBs3TestStep = 3500;
1154 for (i = 0; i <= 3; i++)
1155 {
1156 for (iRing = 0; iRing <= 3; iRing++)
1157 {
1158 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1159 {
1160 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1161 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1162
1163 for (j = 0; j < 2; j++)
1164 {
1165 static const uint16_t s_auCSes[2] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32 };
1166 uint16_t uCs = (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT);
1167 g_usBs3TestStep++;
1168 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x\n", g_usBs3TestStep, iCtx, iRing, i, uCs);*/
1169 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1170 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1171 /*Bs3TrapPrintFrame(&TrapCtx);*/
1172 if (iCtx < iRing)
1173 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1174 else
1175 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, uCs & X86_SEL_MASK_OFF_RPL);
1176 }
1177 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1178 }
1179 }
1180 }
1181 BS3_ASSERT(g_usBs3TestStep < 4000);
1182 }
1183
1184 /*
1185 * IDT limit check. The 286 does not access X86DESCGATE::u16OffsetHigh.
1186 */
1187 g_usBs3TestStep = 5000;
1188 i = (0x80 << (cIdteShift + 3)) - 1;
1189 j = (0x82 << (cIdteShift + 3)) - (!f286 ? 1 : 3);
1190 k = (0x83 << (cIdteShift + 3)) - 1;
1191 for (; i <= k; i++, g_usBs3TestStep++)
1192 {
1193 Idtr = IdtrSaved;
1194 Idtr.cbIdt = i;
1195 ASMSetIDTR(&Idtr);
1196 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1197 if (i < j)
1198 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx81, (0x81 << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1199 else
1200 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1201 }
1202 ASMSetIDTR(&IdtrSaved);
1203 BS3_ASSERT(g_usBs3TestStep < 5100);
1204
1205# if TMPL_BITS != 16 /* Only do the paging related stuff in 32-bit and 64-bit modes. */
1206
1207 /*
1208 * IDT page not present. Placing the IDT copy such that 0x80 is on the
1209 * first page and 0x81 is on the second page. We need proceed to move
1210 * it down byte by byte to check that any inaccessible byte means #PF.
1211 *
1212 * Note! We must reload the alternative IDTR for each run as any kind of
1213 * printing to the string (like error reporting) will cause a switch
1214 * to real mode and back, reloading the default IDTR.
1215 */
1216 g_usBs3TestStep = 5200;
1217 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1218 {
1219 uint32_t const uCr2Expected = Bs3SelPtrToFlat(pbIdtCopyAlloc) + _4K;
1220 for (j = 0; j < cbIdte; j++)
1221 {
1222 pIdtCopy = (PX86DESC)&pbIdtCopyAlloc[_4K - cbIdte * 0x81 - j];
1223 Bs3MemCpy(pIdtCopy, paIdt, cbIdte * 256);
1224
1225 Idtr.cbIdt = IdtrSaved.cbIdt;
1226 Idtr.pIdt = Bs3SelPtrToFlat(pIdtCopy);
1227
1228 ASMSetIDTR(&Idtr);
1229 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1230 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1231 g_usBs3TestStep++;
1232
1233 ASMSetIDTR(&Idtr);
1234 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1235 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1236 g_usBs3TestStep++;
1237
1238 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1239 if (RT_SUCCESS(rc))
1240 {
1241 ASMSetIDTR(&Idtr);
1242 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1243 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1244 g_usBs3TestStep++;
1245
1246 ASMSetIDTR(&Idtr);
1247 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1248 if (f486Plus)
1249 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1250 else
1251 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1252 g_usBs3TestStep++;
1253
1254 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1255
1256 /* Check if that the entry type is checked after the whole IDTE has been cleared for #PF. */
1257 pIdtCopy[0x80 << cIdteShift].Gate.u4Type = 0;
1258 rc = Bs3PagingProtect(uCr2Expected, _4K, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1259 if (RT_SUCCESS(rc))
1260 {
1261 ASMSetIDTR(&Idtr);
1262 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1263 if (f486Plus)
1264 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, 0 /*uErrCd*/, uCr2Expected);
1265 else
1266 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx81, X86_TRAP_PF_RW /*uErrCd*/, uCr2Expected + 4 - RT_MIN(j, 4));
1267 g_usBs3TestStep++;
1268
1269 Bs3PagingProtect(uCr2Expected, _4K, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1270 }
1271 }
1272 else
1273 Bs3TestPrintf("Bs3PagingProtectPtr: %d\n", i);
1274
1275 ASMSetIDTR(&IdtrSaved);
1276 }
1277 }
1278
1279 /*
1280 * The read/write and user/supervisor bits the IDT PTEs are irrelevant.
1281 */
1282 g_usBs3TestStep = 5300;
1283 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1284 {
1285 Bs3MemCpy(pbIdtCopyAlloc, paIdt, cbIdte * 256);
1286 Idtr.cbIdt = IdtrSaved.cbIdt;
1287 Idtr.pIdt = Bs3SelPtrToFlat(pbIdtCopyAlloc);
1288
1289 ASMSetIDTR(&Idtr);
1290 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1291 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1292 g_usBs3TestStep++;
1293
1294 rc = Bs3PagingProtect(Idtr.pIdt, _4K, 0 /*fSet*/, X86_PTE_RW | X86_PTE_US /*fClear*/);
1295 if (RT_SUCCESS(rc))
1296 {
1297 ASMSetIDTR(&Idtr);
1298 Bs3TrapSetJmpAndRestore(&Ctx81, &TrapCtx);
1299 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx81, 0x81 /*bXcpt*/);
1300 g_usBs3TestStep++;
1301
1302 Bs3PagingProtect(Idtr.pIdt, _4K, X86_PTE_RW | X86_PTE_US /*fSet*/, 0 /*fClear*/);
1303 }
1304 ASMSetIDTR(&IdtrSaved);
1305 }
1306
1307 /*
1308 * Check that CS.u1Accessed is set to 1. Use the test page selector #0 and #3 together
1309 * with interrupt gates 80h and 83h, respectively.
1310 */
1311/** @todo Throw in SS.u1Accessed too. */
1312 g_usBs3TestStep = 5400;
1313 if (BS3_MODE_IS_PAGED(g_bTestMode) && pbIdtCopyAlloc)
1314 {
1315 Bs3GdteTestPage00 = Bs3Gdt[uSysR0Cs >> X86_SEL_SHIFT];
1316 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1317 paIdt[0x80 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_00;
1318
1319 Bs3GdteTestPage03 = Bs3Gdt[(uSysR0Cs + (3 << BS3_SEL_RING_SHIFT)) >> X86_SEL_SHIFT];
1320 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1321 paIdt[0x83 << cIdteShift].Gate.u16Sel = BS3_SEL_TEST_PAGE_03; /* rpl is ignored, so leave it as zero. */
1322
1323 /* Check that the CS.A bit is being set on a general basis and that
1324 the special CS values work with out generic handler code. */
1325 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1326 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1327 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1328 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed", Bs3GdteTestPage00.Gen.u4Type);
1329 g_usBs3TestStep++;
1330
1331 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1332 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1333 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1334 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1335 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1336 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1337 if (TrapCtx.uHandlerCs != (BS3_SEL_TEST_PAGE_03 | 3))
1338 bs3CpuBasic2_FailedF("uHandlerCs=%#x, expected %#x", TrapCtx.uHandlerCs, (BS3_SEL_TEST_PAGE_03 | 3));
1339 g_usBs3TestStep++;
1340
1341 /*
1342 * Now check that setting CS.u1Access to 1 does __NOT__ trigger a page
1343 * fault due to the RW bit being zero.
1344 * (We check both with with and without the WP bit if 80486.)
1345 */
1346 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1347 ASMSetCR0(uCr0Saved | X86_CR0_WP);
1348
1349 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1350 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1351 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_RW /*fClear*/);
1352 if (RT_SUCCESS(rc))
1353 {
1354 /* ring-0 handler */
1355 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1356 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1357 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1358 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1359 g_usBs3TestStep++;
1360
1361 /* ring-3 handler */
1362 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1363 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1364 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1365 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1366 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1367 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1368 g_usBs3TestStep++;
1369
1370 /* clear WP and repeat the above. */
1371 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1372 ASMSetCR0(uCr0Saved & ~X86_CR0_WP);
1373 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1374 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* (No need to RW the page - ring-0, WP=0.) */
1375
1376 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1377 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &Ctx80, 0x80 /*bXcpt*/);
1378 if (!(Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1379 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!", Bs3GdteTestPage00.Gen.u4Type);
1380 g_usBs3TestStep++;
1381
1382 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1383 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x83 /*bXcpt*/);
1384 if (!(Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED))
1385 bs3CpuBasic2_FailedF("u4Type=%#x, not accessed!n", Bs3GdteTestPage03.Gen.u4Type);
1386 g_usBs3TestStep++;
1387
1388 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_RW /*fSet*/, 0 /*fClear*/);
1389 }
1390
1391 ASMSetCR0(uCr0Saved);
1392
1393 /*
1394 * While we're here, check that if the CS GDT entry is a non-present
1395 * page we do get a #PF with the rigth error code and CR2.
1396 */
1397 Bs3GdteTestPage00.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED; /* Just for fun, really a pointless gesture. */
1398 Bs3GdteTestPage03.Gen.u4Type &= ~X86_SEL_TYPE_ACCESSED;
1399 rc = Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, 0 /*fSet*/, X86_PTE_P /*fClear*/);
1400 if (RT_SUCCESS(rc))
1401 {
1402 Bs3TrapSetJmpAndRestore(&Ctx80, &TrapCtx);
1403 if (f486Plus)
1404 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00);
1405 else
1406 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx80, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00 + 4);
1407 g_usBs3TestStep++;
1408
1409 /* Do it from ring-3 to check ErrCd, which doesn't set X86_TRAP_PF_US it turns out. */
1410 Bs3MemCpy(&CtxTmp, &Ctx83, sizeof(CtxTmp));
1411 Bs3RegCtxConvertToRingX(&CtxTmp, 3);
1412 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1413
1414 if (f486Plus)
1415 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, 0 /*uErrCd*/, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03);
1416 else
1417 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &CtxTmp, X86_TRAP_PF_RW, GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_03 + 4);
1418 g_usBs3TestStep++;
1419
1420 Bs3PagingProtect(GdtrSaved.pGdt + BS3_SEL_TEST_PAGE_00, 8, X86_PTE_P /*fSet*/, 0 /*fClear*/);
1421 if (Bs3GdteTestPage00.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1422 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #1", Bs3GdteTestPage00.Gen.u4Type);
1423 if (Bs3GdteTestPage03.Gen.u4Type & X86_SEL_TYPE_ACCESSED)
1424 bs3CpuBasic2_FailedF("u4Type=%#x, accessed! #2", Bs3GdteTestPage03.Gen.u4Type);
1425 }
1426
1427 /* restore */
1428 paIdt[0x80 << cIdteShift].Gate.u16Sel = uSysR0Cs;
1429 paIdt[0x83 << cIdteShift].Gate.u16Sel = uSysR0Cs;// + (3 << BS3_SEL_RING_SHIFT) + 3;
1430 }
1431
1432# endif /* 32 || 64*/
1433
1434 /*
1435 * Check broad EFLAGS effects.
1436 */
1437 g_usBs3TestStep = 5600;
1438 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1439 {
1440 for (iRing = 0; iRing < 4; iRing++)
1441 {
1442 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1443 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1444
1445 /* all set */
1446 CtxTmp.rflags.u32 &= X86_EFL_VM | X86_EFL_1;
1447 CtxTmp.rflags.u32 |= X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF /* | X86_EFL_TF */ /*| X86_EFL_IF*/
1448 | X86_EFL_DF | X86_EFL_OF | X86_EFL_IOPL /* | X86_EFL_NT*/;
1449 if (f486Plus)
1450 CtxTmp.rflags.u32 |= X86_EFL_AC;
1451 if (f486Plus && !g_f16BitSys)
1452 CtxTmp.rflags.u32 |= X86_EFL_RF;
1453 if (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1454 CtxTmp.rflags.u32 |= X86_EFL_VIF | X86_EFL_VIP;
1455 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1456 CtxTmp.rflags.u32 &= ~X86_EFL_RF;
1457
1458 if (iCtx >= iRing)
1459 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1460 else
1461 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1462 uExpected = CtxTmp.rflags.u32
1463 & ( X86_EFL_1 | X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_ZF | X86_EFL_SF | X86_EFL_DF
1464 | X86_EFL_OF | X86_EFL_IOPL | X86_EFL_NT | X86_EFL_VM | X86_EFL_AC | X86_EFL_VIF | X86_EFL_VIP
1465 | X86_EFL_ID /*| X86_EFL_TF*/ /*| X86_EFL_IF*/ /*| X86_EFL_RF*/ );
1466 if (TrapCtx.fHandlerRfl != uExpected)
1467 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1468 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1469 g_usBs3TestStep++;
1470
1471 /* all cleared */
1472 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80286)
1473 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_RA1_MASK | UINT16_C(0xf000));
1474 else
1475 CtxTmp.rflags.u32 = apCtx8x[iCtx]->rflags.u32 & (X86_EFL_VM | X86_EFL_RA1_MASK);
1476 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1477 if (iCtx >= iRing)
1478 bs3CpuBasic2_CompareIntCtx1(&TrapCtx, &CtxTmp, 0x80 + iCtx /*bXcpt*/);
1479 else
1480 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1481 uExpected = CtxTmp.rflags.u32;
1482 if (TrapCtx.fHandlerRfl != uExpected)
1483 bs3CpuBasic2_FailedF("unexpected handler rflags value: %RX64 expected %RX32; CtxTmp.rflags=%RX64 Ctx.rflags=%RX64\n",
1484 TrapCtx.fHandlerRfl, uExpected, CtxTmp.rflags.u, TrapCtx.Ctx.rflags.u);
1485 g_usBs3TestStep++;
1486 }
1487 }
1488
1489/** @todo CS.LIMIT / canonical(CS) */
1490
1491
1492 /*
1493 * Check invalid gate types.
1494 */
1495 g_usBs3TestStep = 32000;
1496 for (iRing = 0; iRing <= 3; iRing++)
1497 {
1498 static const uint16_t s_auCSes[] = { BS3_SEL_R0_CS16, BS3_SEL_R0_CS32, BS3_SEL_R0_CS64,
1499 BS3_SEL_TSS16, BS3_SEL_TSS32, BS3_SEL_TSS64, 0, BS3_SEL_SPARE_1f };
1500 static uint16_t const s_auInvlTypes64[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 13,
1501 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1502 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f };
1503 static uint16_t const s_auInvlTypes32[] = { 0, 1, 2, 3, 8, 9, 10, 11, 13,
1504 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17,
1505 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f,
1506 /*286:*/ 12, 14, 15 };
1507 uint16_t const * const pauInvTypes = cIdteShift != 0 ? s_auInvlTypes64 : s_auInvlTypes32;
1508 uint16_t const cInvTypes = cIdteShift != 0 ? RT_ELEMENTS(s_auInvlTypes64)
1509 : f386Plus ? RT_ELEMENTS(s_auInvlTypes32) - 3 : RT_ELEMENTS(s_auInvlTypes32);
1510
1511
1512 for (iCtx = 0; iCtx < RT_ELEMENTS(apCtx8x); iCtx++)
1513 {
1514 unsigned iType;
1515
1516 Bs3MemCpy(&CtxTmp, apCtx8x[iCtx], sizeof(CtxTmp));
1517 Bs3RegCtxConvertToRingX(&CtxTmp, iRing);
1518# if TMPL_BITS == 32
1519 g_uBs3TrapEipHint = CtxTmp.rip.u32;
1520# endif
1521 for (iType = 0; iType < cInvTypes; iType++)
1522 {
1523 uint8_t const bSavedType = paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type;
1524 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = pauInvTypes[iType] >> 4;
1525 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = pauInvTypes[iType] & 0xf;
1526
1527 for (i = 0; i < 4; i++)
1528 {
1529 for (j = 0; j < RT_ELEMENTS(s_auCSes); j++)
1530 {
1531 uint16_t uCs = (unsigned)(s_auCSes[j] - BS3_SEL_R0_FIRST) < (unsigned)(4 << BS3_SEL_RING_SHIFT)
1532 ? (s_auCSes[j] | i) + (i << BS3_SEL_RING_SHIFT)
1533 : s_auCSes[j] | i;
1534 /*Bs3TestPrintf("g_usBs3TestStep=%u iCtx=%u iRing=%u i=%u uCs=%04x type=%#x\n", g_usBs3TestStep, iCtx, iRing, i, uCs, pauInvTypes[iType]);*/
1535 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uCs;
1536 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1537 g_usBs3TestStep++;
1538 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1539
1540 /* Mark it not-present to check that invalid type takes precedence. */
1541 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 0;
1542 Bs3TrapSetJmpAndRestore(&CtxTmp, &TrapCtx);
1543 g_usBs3TestStep++;
1544 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &CtxTmp, ((0x80 + iCtx) << X86_TRAP_ERR_SEL_SHIFT) | X86_TRAP_ERR_IDT);
1545 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1546 }
1547 }
1548
1549 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u16Sel = uSysR0Cs;
1550 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u4Type = bSavedType;
1551 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1DescType = 0;
1552 paIdt[(0x80 + iCtx) << cIdteShift].Gate.u1Present = 1;
1553 }
1554 }
1555 }
1556 BS3_ASSERT(g_usBs3TestStep < 62000U && g_usBs3TestStep > 32000U);
1557
1558
1559 /** @todo
1560 * - Run \#PF and \#GP (and others?) at CPLs other than zero.
1561 * - Quickly generate all faults.
1562 * - All the peculiarities v8086.
1563 */
1564
1565# if TMPL_BITS != 16
1566 Bs3MemFree(pbIdtCopyAlloc, 12*_1K);
1567# endif
1568}
1569#endif /* convert me */
1570
1571
1572static void bs3CpuBasic2_RaiseXcpt11Worker(uint8_t bMode, uint8_t *pbBuf, unsigned cbCacheLine, bool fAm, bool fPf,
1573 RTCCUINTXREG uFlatBufPtr, BS3CPUBASIC2PFTTSTCMNMODE const BS3_FAR *pCmn)
1574{
1575 BS3TRAPFRAME TrapCtx;
1576 BS3REGCTX Ctx;
1577 BS3REGCTX CtxUdExpected;
1578 uint8_t const cRings = bMode == BS3_MODE_RM ? 1 : 4;
1579 uint8_t iRing;
1580 uint16_t iTest;
1581
1582 /* make sure they're allocated */
1583 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1584 Bs3MemZero(&Ctx, sizeof(Ctx));
1585 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1586
1587 /*
1588 * Test all relevant rings.
1589 *
1590 * The memory operand is ds:xBX, so point it to pbBuf.
1591 * The test snippets mostly use xAX as operand, with the div
1592 * one also using xDX, so make sure they make some sense.
1593 */
1594 Bs3RegCtxSaveEx(&Ctx, bMode, 512);
1595
1596 Ctx.cr0.u32 &= ~(X86_CR0_MP | X86_CR0_EM | X86_CR0_TS); /* so fninit + fld works */
1597
1598 for (iRing = BS3_MODE_IS_V86(bMode) ? 3 : 0; iRing < cRings; iRing++)
1599 {
1600 uint32_t uEbx;
1601 uint8_t fAc;
1602
1603 if (!BS3_MODE_IS_RM_OR_V86(bMode))
1604 Bs3RegCtxConvertToRingX(&Ctx, iRing);
1605
1606 if (!fPf || BS3_MODE_IS_32BIT_CODE(bMode) || BS3_MODE_IS_64BIT_CODE(bMode))
1607 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, pbBuf);
1608 else
1609 {
1610 /* Bs3RegCtxSetGrpDsFromCurPtr barfs when trying to output a sel:off address for the aliased buffer. */
1611 Ctx.ds = BS3_FP_SEG(pbBuf);
1612 Ctx.rbx.u32 = BS3_FP_OFF(pbBuf);
1613 }
1614 uEbx = Ctx.rbx.u32;
1615
1616 Ctx.rax.u = (bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_64
1617 ? UINT64_C(0x80868028680386fe) : UINT32_C(0x65020686);
1618 Ctx.rdx.u = UINT32_C(0x00100100); /* careful with range due to div */
1619
1620 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1621
1622 /*
1623 * AC flag loop.
1624 */
1625 for (fAc = 0; fAc < 2; fAc++)
1626 {
1627 if (fAc)
1628 Ctx.rflags.u32 |= X86_EFL_AC;
1629 else
1630 Ctx.rflags.u32 &= ~X86_EFL_AC;
1631
1632 /*
1633 * Loop over the test snippets.
1634 */
1635 for (iTest = 0; iTest < pCmn->cEntries; iTest++)
1636 {
1637 uint8_t const fOp = pCmn->paEntries[iTest].fOp;
1638 uint16_t const cbMem = pCmn->paEntries[iTest].cbMem;
1639 uint8_t const cbAlign = pCmn->paEntries[iTest].cbAlign;
1640 uint16_t const cbMax = cbCacheLine + cbMem;
1641 uint16_t offMem;
1642 uint8_t BS3_FAR *poffUd = (uint8_t BS3_FAR *)Bs3SelLnkPtrToCurPtr(pCmn->paEntries[iTest].pfn);
1643 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pCmn->paEntries[iTest].pfn);
1644 CtxUdExpected.rip = Ctx.rip;
1645 CtxUdExpected.rip.u = Ctx.rip.u + poffUd[-1];
1646 CtxUdExpected.cs = Ctx.cs;
1647 CtxUdExpected.rflags = Ctx.rflags;
1648 if (bMode == BS3_MODE_RM)
1649 CtxUdExpected.rflags.u32 &= ~X86_EFL_AC; /** @todo investigate. automatically cleared, or is it just our code? Observed with bs3-cpu-instr-3 too (10980xe), seems to be the CPU doing it. */
1650 CtxUdExpected.rdx = Ctx.rdx;
1651 CtxUdExpected.rax = Ctx.rax;
1652 if (fOp & MYOP_LD)
1653 {
1654 switch (cbMem)
1655 {
1656 case 2:
1657 CtxUdExpected.rax.u16 = 0x0101;
1658 break;
1659 case 4:
1660 CtxUdExpected.rax.u32 = UINT32_C(0x01010101);
1661 break;
1662 case 8:
1663 CtxUdExpected.rax.u64 = UINT64_C(0x0101010101010101);
1664 break;
1665 }
1666 }
1667
1668 /*
1669 * Buffer misalignment loop.
1670 * Note! We must make sure to cross a cache line here to make sure
1671 * to cover the split-lock scenario. (The buffer is cache
1672 * line aligned.)
1673 */
1674 for (offMem = 0; offMem < cbMax; offMem++)
1675 {
1676 bool const fMisaligned = (offMem & (cbAlign - 1)) != 0;
1677 unsigned offBuf = cbMax + cbMem * 2;
1678 while (offBuf-- > 0)
1679 pbBuf[offBuf] = 1; /* byte-by-byte to make sure it doesn't trigger AC. */
1680
1681 CtxUdExpected.rbx.u32 = Ctx.rbx.u32 = uEbx + offMem; /* ASSUMES memory in first 4GB. */
1682 if (BS3_MODE_IS_16BIT_SYS(bMode))
1683 g_uBs3TrapEipHint = Ctx.rip.u32;
1684
1685 //Bs3TestPrintf("iRing=%d iTest=%d cs:rip=%04RX16:%08RX32 ds:rbx=%04RX16:%08RX32 ss:esp=%04RX16:%08RX32 bXcpt=%#x errcd=%#x fAm=%d fAc=%d ESP=%#RX32\n",
1686 // iRing, iTest, Ctx.cs, Ctx.rip.u32, Ctx.ds, Ctx.rbx.u32, Ctx.ss, Ctx.rsp.u32, TrapCtx.bXcpt, (unsigned)TrapCtx.uErrCd, fAm, fAc, ASMGetESP());
1687
1688 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1689
1690 if ( (pCmn->paEntries[iTest].fOp & MYOP_AC_GP)
1691 && fMisaligned
1692 && (!fAm || iRing != 3 || !fAc || (offMem & 3 /* 10980XE */) == 0) )
1693 {
1694 if (fAc && bMode == BS3_MODE_RM)
1695 TrapCtx.Ctx.rflags.u32 |= X86_EFL_AC;
1696 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
1697 }
1698 else if (fPf && iRing == 3 && (!fAm || !fAc || !fMisaligned)) /* #AC beats #PF */
1699 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx,
1700 X86_TRAP_PF_P | X86_TRAP_PF_US
1701 | (pCmn->paEntries[iTest].fOp & MYOP_ST ? X86_TRAP_PF_RW : 0),
1702 uFlatBufPtr + offMem + (cbMem > 64 ? cbMem - 1 /*FXSAVE*/ : 0),
1703 pCmn->paEntries[iTest].offFaultInstr);
1704 else if (!fAm || iRing != 3 || !fAc || !fMisaligned)
1705 {
1706 if (fOp & MYOP_EFL)
1707 {
1708 CtxUdExpected.rflags.u16 &= ~X86_EFL_STATUS_BITS;
1709 CtxUdExpected.rflags.u16 |= TrapCtx.Ctx.rflags.u16 & X86_EFL_STATUS_BITS;
1710 }
1711 if (fOp == MYOP_LD_DIV)
1712 {
1713 CtxUdExpected.rax = TrapCtx.Ctx.rax;
1714 CtxUdExpected.rdx = TrapCtx.Ctx.rdx;
1715 }
1716 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1717 }
1718 else
1719 bs3CpuBasic2_CompareAcCtx(&TrapCtx, &Ctx, pCmn->paEntries[iTest].offFaultInstr);
1720
1721 g_usBs3TestStep++;
1722 }
1723 }
1724 }
1725 }
1726}
1727
1728
1729/**
1730 * Entrypoint for \#AC tests.
1731 *
1732 * @returns 0 or BS3TESTDOMODE_SKIPPED.
1733 * @param bMode The CPU mode we're testing.
1734 *
1735 * @note When testing v8086 code, we'll be running in v8086 mode. So, careful
1736 * with control registers and such.
1737 */
1738BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_RaiseXcpt11)(uint8_t bMode)
1739{
1740 unsigned cbCacheLine = 128; /** @todo detect */
1741 uint8_t BS3_FAR *pbBufAlloc;
1742 uint8_t BS3_FAR *pbBuf;
1743 unsigned idxCmnModes;
1744 uint32_t fCr0;
1745
1746 /*
1747 * Skip if 386 or older.
1748 */
1749 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) < BS3CPU_80486)
1750 {
1751 Bs3TestSkipped("#AC test requires 486 or later");
1752 return BS3TESTDOMODE_SKIPPED;
1753 }
1754
1755 bs3CpuBasic2_SetGlobals(bMode);
1756
1757 /* Get us a 64-byte aligned buffer. */
1758 pbBufAlloc = pbBuf = Bs3MemAllocZ(BS3_MODE_IS_RM_OR_V86(bMode) ? BS3MEMKIND_REAL : BS3MEMKIND_TILED, X86_PAGE_SIZE * 2);
1759 if (!pbBufAlloc)
1760 return Bs3TestFailed("Failed to allocate 2 pages of real-mode memory");
1761 if (BS3_FP_OFF(pbBuf) & (X86_PAGE_SIZE - 1))
1762 pbBuf = &pbBufAlloc[X86_PAGE_SIZE - (BS3_FP_OFF(pbBuf) & X86_PAGE_OFFSET_MASK)];
1763 BS3_ASSERT(pbBuf - pbBufAlloc <= X86_PAGE_SIZE);
1764 //Bs3TestPrintf("pbBuf=%p\n", pbBuf);
1765
1766 /* Find the g_aCmnModes entry. */
1767 idxCmnModes = 0;
1768 while (g_aCmnModes[idxCmnModes].bMode != (bMode & BS3_MODE_CODE_MASK))
1769 idxCmnModes++;
1770 //Bs3TestPrintf("idxCmnModes=%d bMode=%#x\n", idxCmnModes, bMode);
1771
1772 /* First round is w/o alignment checks enabled. */
1773 //Bs3TestPrintf("round 1\n");
1774 fCr0 = Bs3RegGetCr0();
1775 BS3_ASSERT(!(fCr0 & X86_CR0_AM));
1776 Bs3RegSetCr0(fCr0 & ~X86_CR0_AM);
1777#if 1
1778 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, false /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1779#endif
1780
1781 /* The second round is with aligment checks enabled. */
1782#if 1
1783 //Bs3TestPrintf("round 2\n");
1784 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1785 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBuf, cbCacheLine, true /*fAm*/, false /*fPf*/, 0, &g_aCmnModes[idxCmnModes]);
1786#endif
1787
1788#if 1
1789 /* The third and fourth round access the buffer via a page alias that's not
1790 accessible from ring-3. The third round has ACs disabled and the fourth
1791 has them enabled. */
1792 if (BS3_MODE_IS_PAGED(bMode) && !BS3_MODE_IS_V86(bMode))
1793 {
1794 /* Alias the buffer as system memory so ring-3 access with AC+AM will cause #PF: */
1795 /** @todo the aliasing is not necessary any more... */
1796 int rc;
1797 RTCCUINTXREG uFlatBufPtr = Bs3SelPtrToFlat(pbBuf);
1798 uint64_t const uAliasPgPtr = bMode & BS3_MODE_CODE_64 ? UINT64_C(0x0000648680000000) : UINT32_C(0x80000000);
1799 rc = Bs3PagingAlias(uAliasPgPtr, uFlatBufPtr & ~(uint64_t)X86_PAGE_OFFSET_MASK, X86_PAGE_SIZE * 2,
1800 X86_PTE_P | X86_PTE_RW);
1801 if (RT_SUCCESS(rc))
1802 {
1803 /* We 'misalign' the segment base here to make sure it's the final
1804 address that gets alignment checked and not just the operand value. */
1805 RTCCUINTXREG uAliasBufPtr = (RTCCUINTXREG)uAliasPgPtr + (uFlatBufPtr & X86_PAGE_OFFSET_MASK);
1806 uint8_t BS3_FAR *pbBufAlias = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, (uFlatBufPtr & X86_PAGE_OFFSET_MASK) + 1);
1807 Bs3SelSetup16BitData(&Bs3GdteSpare00, uAliasPgPtr - 1);
1808
1809 //Bs3TestPrintf("round 3 pbBufAlias=%p\n", pbBufAlias);
1810 Bs3RegSetCr0(Bs3RegGetCr0() & ~X86_CR0_AM);
1811 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, false /*fAm*/,
1812 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1813
1814 //Bs3TestPrintf("round 4\n");
1815 Bs3RegSetCr0(Bs3RegGetCr0() | X86_CR0_AM);
1816 bs3CpuBasic2_RaiseXcpt11Worker(bMode, pbBufAlias, cbCacheLine, true /*fAm*/,
1817 true /*fPf*/, uAliasBufPtr, &g_aCmnModes[idxCmnModes]);
1818
1819 Bs3PagingUnalias(uAliasPgPtr, X86_PAGE_SIZE * 2);
1820 }
1821 else
1822 Bs3TestFailedF("Bs3PagingAlias failed with %Rrc", rc);
1823 }
1824#endif
1825
1826 Bs3MemFree(pbBufAlloc, X86_PAGE_SIZE * 2);
1827 Bs3RegSetCr0(fCr0);
1828 return 0;
1829}
1830
1831
1832/**
1833 * Executes one round of SIDT and SGDT tests using one assembly worker.
1834 *
1835 * This is written with driving everything from the 16-bit or 32-bit worker in
1836 * mind, i.e. not assuming the test bitcount is the same as the current.
1837 */
1838static void bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
1839 uint8_t const *pbExpected)
1840{
1841 BS3TRAPFRAME TrapCtx;
1842 BS3REGCTX Ctx;
1843 BS3REGCTX CtxUdExpected;
1844 BS3REGCTX TmpCtx;
1845 uint8_t const cbBuf = 8*2; /* test buffer area */
1846 uint8_t abBuf[8*2 + 8 + 8]; /* test buffer w/ misalignment test space and some extra guard. */
1847 uint8_t BS3_FAR *pbBuf = abBuf;
1848 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
1849 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
1850 uint8_t bFiller;
1851 int off;
1852 int off2;
1853 unsigned cb;
1854 uint8_t BS3_FAR *pbTest;
1855
1856 /* make sure they're allocated */
1857 Bs3MemZero(&Ctx, sizeof(Ctx));
1858 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
1859 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
1860 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1861 Bs3MemZero(&abBuf, sizeof(abBuf));
1862
1863 /* Create a context, give this routine some more stack space, point the context
1864 at our SIDT [xBX] + UD2 combo, and point DS:xBX at abBuf. */
1865 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
1866 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1867 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
1868 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1869 g_uBs3TrapEipHint = Ctx.rip.u32;
1870 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1871 Bs3RegCtxConvertToRingX(&Ctx, bRing);
1872
1873 /* For successful SIDT attempts, we'll stop at the UD2. */
1874 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
1875 CtxUdExpected.rip.u += pWorker->cbInstr;
1876
1877 /*
1878 * Check that it works at all and that only bytes we expect gets written to.
1879 */
1880 /* First with zero buffer. */
1881 Bs3MemZero(abBuf, sizeof(abBuf));
1882 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), 0))
1883 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1884 if (!ASMMemIsZero(abBuf, sizeof(abBuf)))
1885 Bs3TestFailedF("ASMMemIsZero or Bs3MemZero is busted: abBuf=%.*Rhxs\n", sizeof(abBuf), pbBuf);
1886 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1887 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1888 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1889 Bs3TestFailedF("286: Top base byte isn't 0xff (#1): %#x\n", abBuf[cbIdtr - 1]);
1890 if (!ASMMemIsZero(&abBuf[cbIdtr], cbBuf - cbIdtr))
1891 Bs3TestFailedF("Unexpected buffer bytes set (#1): cbIdtr=%u abBuf=%.*Rhxs\n", cbIdtr, cbBuf, pbBuf);
1892 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1893 Bs3TestFailedF("Mismatch (%s,#1): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1894 g_usBs3TestStep++;
1895
1896 /* Again with a buffer filled with a byte not occuring in the previous result. */
1897 bFiller = 0x55;
1898 while (Bs3MemChr(abBuf, bFiller, cbBuf) != NULL)
1899 bFiller++;
1900 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1901 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
1902 Bs3TestFailedF("ASMMemIsAllU8 or Bs3MemSet is busted: bFiller=%#x abBuf=%.*Rhxs\n", bFiller, sizeof(abBuf), pbBuf);
1903
1904 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1905 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1906 if (f286 && abBuf[cbIdtr - 1] != 0xff)
1907 Bs3TestFailedF("286: Top base byte isn't 0xff (#2): %#x\n", abBuf[cbIdtr - 1]);
1908 if (!ASMMemIsAllU8(&abBuf[cbIdtr], cbBuf - cbIdtr, bFiller))
1909 Bs3TestFailedF("Unexpected buffer bytes set (#2): cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1910 if (Bs3MemChr(abBuf, bFiller, cbIdtr) != NULL)
1911 Bs3TestFailedF("Not all bytes touched: cbIdtr=%u bFiller=%#x abBuf=%.*Rhxs\n", cbIdtr, bFiller, cbBuf, pbBuf);
1912 if (Bs3MemCmp(abBuf, pbExpected, cbIdtr) != 0)
1913 Bs3TestFailedF("Mismatch (%s,#2): expected %.*Rhxs, got %.*Rhxs\n", pWorker->pszDesc, cbIdtr, pbExpected, cbIdtr, abBuf);
1914 g_usBs3TestStep++;
1915
1916 /*
1917 * Slide the buffer along 8 bytes to cover misalignment.
1918 */
1919 for (off = 0; off < 8; off++)
1920 {
1921 pbBuf = &abBuf[off];
1922 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBuf[off]);
1923 CtxUdExpected.rbx.u = Ctx.rbx.u;
1924
1925 /* First with zero buffer. */
1926 Bs3MemZero(abBuf, sizeof(abBuf));
1927 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1928 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1929 if (off > 0 && !ASMMemIsZero(abBuf, off))
1930 Bs3TestFailedF("Unexpected buffer bytes set before (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1931 cbIdtr, off, off + cbBuf, abBuf);
1932 if (!ASMMemIsZero(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off))
1933 Bs3TestFailedF("Unexpected buffer bytes set after (#3): cbIdtr=%u off=%u abBuf=%.*Rhxs\n",
1934 cbIdtr, off, off + cbBuf, abBuf);
1935 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1936 Bs3TestFailedF("286: Top base byte isn't 0xff (#3): %#x\n", abBuf[off + cbIdtr - 1]);
1937 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1938 Bs3TestFailedF("Mismatch (#3): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1939 g_usBs3TestStep++;
1940
1941 /* Again with a buffer filled with a byte not occuring in the previous result. */
1942 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1943 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1944 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1945 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
1946 Bs3TestFailedF("Unexpected buffer bytes set before (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1947 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1948 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - cbIdtr - off, bFiller))
1949 Bs3TestFailedF("Unexpected buffer bytes set after (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1950 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1951 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1952 Bs3TestFailedF("Not all bytes touched (#4): cbIdtr=%u off=%u bFiller=%#x abBuf=%.*Rhxs\n",
1953 cbIdtr, off, bFiller, off + cbBuf, abBuf);
1954 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
1955 Bs3TestFailedF("286: Top base byte isn't 0xff (#4): %#x\n", abBuf[off + cbIdtr - 1]);
1956 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
1957 Bs3TestFailedF("Mismatch (#4): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
1958 g_usBs3TestStep++;
1959 }
1960 pbBuf = abBuf;
1961 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
1962 CtxUdExpected.rbx.u = Ctx.rbx.u;
1963
1964 /*
1965 * Play with the selector limit if the target mode supports limit checking
1966 * We use BS3_SEL_TEST_PAGE_00 for this
1967 */
1968 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
1969 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
1970 {
1971 uint16_t cbLimit;
1972 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf);
1973 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
1974 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
1975 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
1976 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
1977 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
1978
1979 if (pWorker->fSs)
1980 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
1981 else
1982 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
1983
1984 /* Expand up (normal). */
1985 for (off = 0; off < 8; off++)
1986 {
1987 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
1988 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
1989 {
1990 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
1991 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
1992 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1993 if (off + cbIdtr <= cbLimit + 1)
1994 {
1995 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
1996 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
1997 Bs3TestFailedF("Not all bytes touched (#5): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
1998 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
1999 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2000 Bs3TestFailedF("Mismatch (#5): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2001 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2002 Bs3TestFailedF("286: Top base byte isn't 0xff (#5): %#x\n", abBuf[off + cbIdtr - 1]);
2003 }
2004 else
2005 {
2006 if (pWorker->fSs)
2007 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2008 else
2009 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2010 if (off + 2 <= cbLimit + 1)
2011 {
2012 if (Bs3MemChr(&abBuf[off], bFiller, 2) != NULL)
2013 Bs3TestFailedF("Limit bytes not touched (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2014 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2015 if (Bs3MemCmp(&abBuf[off], pbExpected, 2) != 0)
2016 Bs3TestFailedF("Mismatch (#6): expected %.2Rhxs, got %.2Rhxs\n", pbExpected, &abBuf[off]);
2017 if (!ASMMemIsAllU8(&abBuf[off + 2], cbIdtr - 2, bFiller))
2018 Bs3TestFailedF("Base bytes touched on #GP (#6): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2019 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2020 }
2021 else if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2022 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2023 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2024 }
2025
2026 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2027 Bs3TestFailedF("Leading bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2028 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2029 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2030 Bs3TestFailedF("Trailing bytes touched (#7): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2031 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2032
2033 g_usBs3TestStep++;
2034 }
2035 }
2036
2037 /* Expand down (weird). Inverted valid area compared to expand up,
2038 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2039 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2040 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2041 (because in a normal expand up the 0ffffh means all 64KB are
2042 accessible). */
2043 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2044 for (off = 0; off < 8; off++)
2045 {
2046 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2047 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2048 {
2049 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2050 Bs3MemSet(abBuf, bFiller, sizeof(abBuf));
2051 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2052
2053 if (off > cbLimit)
2054 {
2055 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2056 if (Bs3MemChr(&abBuf[off], bFiller, cbIdtr) != NULL)
2057 Bs3TestFailedF("Not all bytes touched (#8): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2058 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2059 if (Bs3MemCmp(&abBuf[off], pbExpected, cbIdtr) != 0)
2060 Bs3TestFailedF("Mismatch (#8): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &abBuf[off]);
2061 if (f286 && abBuf[off + cbIdtr - 1] != 0xff)
2062 Bs3TestFailedF("286: Top base byte isn't 0xff (#8): %#x\n", abBuf[off + cbIdtr - 1]);
2063 }
2064 else
2065 {
2066 if (pWorker->fSs)
2067 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2068 else
2069 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2070 if (!ASMMemIsAllU8(abBuf, sizeof(abBuf), bFiller))
2071 Bs3TestFailedF("Bytes touched on #GP: cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2072 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2073 }
2074
2075 if (off > 0 && !ASMMemIsAllU8(abBuf, off, bFiller))
2076 Bs3TestFailedF("Leading bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2077 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2078 if (!ASMMemIsAllU8(&abBuf[off + cbIdtr], sizeof(abBuf) - off - cbIdtr, bFiller))
2079 Bs3TestFailedF("Trailing bytes touched (#9): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x abBuf=%.*Rhxs\n",
2080 cbIdtr, off, cbLimit, bFiller, off + cbBuf, abBuf);
2081
2082 g_usBs3TestStep++;
2083 }
2084 }
2085
2086 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBuf);
2087 CtxUdExpected.rbx.u = Ctx.rbx.u;
2088 CtxUdExpected.ss = Ctx.ss;
2089 CtxUdExpected.ds = Ctx.ds;
2090 }
2091
2092 /*
2093 * Play with the paging.
2094 */
2095 if ( BS3_MODE_IS_PAGED(bTestMode)
2096 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2097 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2098 {
2099 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2100
2101 /*
2102 * Slide the buffer towards the trailing guard page. We'll observe the
2103 * first word being written entirely separately from the 2nd dword/qword.
2104 */
2105 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2106 {
2107 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2108 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2109 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2110 if (off + cbIdtr <= X86_PAGE_SIZE)
2111 {
2112 CtxUdExpected.rbx = Ctx.rbx;
2113 CtxUdExpected.ss = Ctx.ss;
2114 CtxUdExpected.ds = Ctx.ds;
2115 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2116 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2117 Bs3TestFailedF("Mismatch (#9): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2118 }
2119 else
2120 {
2121 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2122 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2123 if ( off <= X86_PAGE_SIZE - 2
2124 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2125 Bs3TestFailedF("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2126 pbExpected, &pbTest[off], off);
2127 if ( off < X86_PAGE_SIZE - 2
2128 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller))
2129 Bs3TestFailedF("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2130 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off);
2131 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2132 Bs3TestFailedF("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2133 }
2134 g_usBs3TestStep++;
2135 }
2136
2137 /*
2138 * Now, do it the other way around. It should look normal now since writing
2139 * the limit will #PF first and nothing should be written.
2140 */
2141 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2142 {
2143 Bs3MemSet(pbTest, bFiller, 48);
2144 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2145 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2146 if (off >= 0)
2147 {
2148 CtxUdExpected.rbx = Ctx.rbx;
2149 CtxUdExpected.ss = Ctx.ss;
2150 CtxUdExpected.ds = Ctx.ds;
2151 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2152 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2153 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2154 }
2155 else
2156 {
2157 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2158 uFlatTest + off, 0 /*cbIpAdjust*/);
2159 if ( -off < cbIdtr
2160 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller))
2161 Bs3TestFailedF("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n",
2162 bFiller, cbIdtr + off, pbTest, off);
2163 }
2164 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller))
2165 Bs3TestFailedF("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n",
2166 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off);
2167 g_usBs3TestStep++;
2168 }
2169
2170 /*
2171 * Combine paging and segment limit and check ordering.
2172 * This is kind of interesting here since it the instruction seems to
2173 * be doing two separate writes.
2174 */
2175 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2176 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2177 {
2178 uint16_t cbLimit;
2179
2180 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2181 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2182 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2183 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2184 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2185
2186 if (pWorker->fSs)
2187 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2188 else
2189 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2190
2191 /* Expand up (normal), approaching tail guard page. */
2192 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2193 {
2194 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2195 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2196 {
2197 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2198 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);
2199 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2200 if (off + cbIdtr <= cbLimit + 1)
2201 {
2202 /* No #GP, but maybe #PF. */
2203 if (off + cbIdtr <= X86_PAGE_SIZE)
2204 {
2205 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2206 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2207 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n",
2208 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2209 }
2210 else
2211 {
2212 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2213 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2214 if ( off <= X86_PAGE_SIZE - 2
2215 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2216 Bs3TestFailedF("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2217 pbExpected, &pbTest[off], off);
2218 cb = X86_PAGE_SIZE - off - 2;
2219 if ( off < X86_PAGE_SIZE - 2
2220 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2221 Bs3TestFailedF("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2222 bFiller, cb, &pbTest[off + 2], off);
2223 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller)
2224 Bs3TestFailedF("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]);
2225 }
2226 }
2227 else if (off + 2 <= cbLimit + 1)
2228 {
2229 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */
2230 if (off <= X86_PAGE_SIZE - 2)
2231 {
2232 if (pWorker->fSs)
2233 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2234 else
2235 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2236 if (Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2237 Bs3TestFailedF("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n",
2238 pbExpected, &pbTest[off], off);
2239 cb = X86_PAGE_SIZE - off - 2;
2240 if ( off < X86_PAGE_SIZE - 2
2241 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller))
2242 Bs3TestFailedF("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n",
2243 bFiller, cb, &pbTest[off + 2], off);
2244 }
2245 else
2246 {
2247 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2248 uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2249 if ( off < X86_PAGE_SIZE
2250 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2251 Bs3TestFailedF("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n",
2252 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2253 }
2254 }
2255 else
2256 {
2257 /* #GP/#SS on limit. */
2258 if (pWorker->fSs)
2259 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2260 else
2261 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2262 if ( off < X86_PAGE_SIZE
2263 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller))
2264 Bs3TestFailedF("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n",
2265 bFiller, X86_PAGE_SIZE - off, &pbTest[off]);
2266 }
2267
2268 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2));
2269 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller))
2270 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2271 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]);
2272
2273 g_usBs3TestStep++;
2274
2275 /* Set DS to 0 and check that we get #GP(0). */
2276 if (!pWorker->fSs)
2277 {
2278 Ctx.ds = 0;
2279 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2280 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2281 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2282 g_usBs3TestStep++;
2283 }
2284 }
2285 }
2286
2287 /* Expand down. */
2288 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
2289 uFlatTest -= X86_PAGE_SIZE;
2290
2291 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2292 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
2293 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
2294 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
2295
2296 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2297 {
2298 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2299 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
2300 {
2301 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2302 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2);
2303 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2304 if (cbLimit < off && off >= X86_PAGE_SIZE)
2305 {
2306 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2307 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2308 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n",
2309 cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2310 cb = X86_PAGE_SIZE + cbIdtr*2 - off;
2311 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller))
2312 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2313 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]);
2314 }
2315 else
2316 {
2317 if (cbLimit < off && off < X86_PAGE_SIZE)
2318 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0),
2319 uFlatTest + off, 0 /*cbIpAdjust*/);
2320 else if (pWorker->fSs)
2321 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2322 else
2323 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2324 cb = cbIdtr*2;
2325 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller))
2326 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n",
2327 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]);
2328 }
2329 g_usBs3TestStep++;
2330 }
2331 }
2332
2333 pbTest += X86_PAGE_SIZE;
2334 uFlatTest += X86_PAGE_SIZE;
2335 }
2336
2337 Bs3MemGuardedTestPageFree(pbTest);
2338 }
2339
2340 /*
2341 * Check non-canonical 64-bit space.
2342 */
2343 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
2344 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
2345 {
2346 /* Make our references relative to the gap. */
2347 pbTest += g_cbBs3PagingOneCanonicalTrap;
2348
2349 /* Hit it from below. */
2350 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2351 {
2352 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
2353 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2354 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2355 if (off + cbIdtr <= 0)
2356 {
2357 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2358 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2359 Bs3TestFailedF("Mismatch (#21): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2360 }
2361 else
2362 {
2363 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2364 if (off <= -2 && Bs3MemCmp(&pbTest[off], pbExpected, 2) != 0)
2365 Bs3TestFailedF("Mismatch (#21): expected limit %.2Rhxs, got %.2Rhxs\n", pbExpected, &pbTest[off]);
2366 off2 = off <= -2 ? 2 : 0;
2367 cb = cbIdtr - off2;
2368 if (!ASMMemIsAllU8(&pbTest[off + off2], cb, bFiller))
2369 Bs3TestFailedF("Mismatch (#21): touched base %.*Rhxs, got %.*Rhxs\n",
2370 cb, &pbExpected[off], cb, &pbTest[off + off2]);
2371 }
2372 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2373 Bs3TestFailedF("Leading bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2374 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2375 Bs3TestFailedF("Trailing bytes touched (#21): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2376 }
2377
2378 /* Hit it from above. */
2379 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
2380 {
2381 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
2382 Bs3MemSet(&pbTest[-64], bFiller, 64*2);
2383 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2384 if (off >= 0)
2385 {
2386 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2387 if (Bs3MemCmp(&pbTest[off], pbExpected, cbIdtr) != 0)
2388 Bs3TestFailedF("Mismatch (#22): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pbExpected, cbIdtr, &pbTest[off]);
2389 }
2390 else
2391 {
2392 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2393 if (!ASMMemIsAllU8(&pbTest[off], cbIdtr, bFiller))
2394 Bs3TestFailedF("Mismatch (#22): touched base %.*Rhxs, got %.*Rhxs\n",
2395 cbIdtr, &pbExpected[off], cbIdtr, &pbTest[off]);
2396 }
2397 if (!ASMMemIsAllU8(&pbTest[off - 16], 16, bFiller))
2398 Bs3TestFailedF("Leading bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off]);
2399 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], 16, bFiller))
2400 Bs3TestFailedF("Trailing bytes touched (#22): bFiller=%#x, got %.16Rhxs\n", bFiller, &pbTest[off + cbIdtr]);
2401 }
2402
2403 }
2404}
2405
2406
2407static void bs3CpuBasic2_sidt_sgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
2408 uint8_t const *pbExpected)
2409{
2410 unsigned idx;
2411 unsigned bRing;
2412 unsigned iStep = 0;
2413
2414 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
2415 test and don't want to bother with double faults. */
2416 for (bRing = 0; bRing <= 3; bRing++)
2417 {
2418 for (idx = 0; idx < cWorkers; idx++)
2419 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
2420 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ ))
2421 {
2422 g_usBs3TestStep = iStep;
2423 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pbExpected);
2424 iStep += 1000;
2425 }
2426 if (BS3_MODE_IS_RM_OR_V86(bTestMode))
2427 break;
2428 }
2429}
2430
2431
2432BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sidt)(uint8_t bMode)
2433{
2434 union
2435 {
2436 RTIDTR Idtr;
2437 uint8_t ab[16];
2438 } Expected;
2439
2440 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2441 bs3CpuBasic2_SetGlobals(bMode);
2442
2443 /*
2444 * Pass to common worker which is only compiled once per mode.
2445 */
2446 Bs3MemZero(&Expected, sizeof(Expected));
2447 ASMGetIDTR(&Expected.Idtr);
2448 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSidtWorkers, RT_ELEMENTS(g_aSidtWorkers), Expected.ab);
2449
2450 /*
2451 * Re-initialize the IDT.
2452 */
2453 Bs3TrapReInit();
2454 return 0;
2455}
2456
2457
2458BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_sgdt)(uint8_t bMode)
2459{
2460 uint64_t const uOrgAddr = Bs3Lgdt_Gdt.uAddr;
2461 uint64_t uNew = 0;
2462 union
2463 {
2464 RTGDTR Gdtr;
2465 uint8_t ab[16];
2466 } Expected;
2467
2468 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
2469 bs3CpuBasic2_SetGlobals(bMode);
2470
2471 /*
2472 * If paged mode, try push the GDT way up.
2473 */
2474 Bs3MemZero(&Expected, sizeof(Expected));
2475 ASMGetGDTR(&Expected.Gdtr);
2476 if (BS3_MODE_IS_PAGED(bMode))
2477 {
2478/** @todo loading non-canonical base addresses. */
2479 int rc;
2480 uNew = BS3_MODE_IS_64BIT_SYS(bMode) ? UINT64_C(0xffff80fedcb70000) : UINT64_C(0xc2d28000);
2481 uNew |= uOrgAddr & X86_PAGE_OFFSET_MASK;
2482 rc = Bs3PagingAlias(uNew, uOrgAddr, Bs3Lgdt_Gdt.cb, X86_PTE_P | X86_PTE_RW | X86_PTE_US | X86_PTE_D | X86_PTE_A);
2483 if (RT_SUCCESS(rc))
2484 {
2485 Bs3Lgdt_Gdt.uAddr = uNew;
2486 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uNew);
2487 ASMGetGDTR(&Expected.Gdtr);
2488 if (BS3_MODE_IS_64BIT_SYS(bMode) && ARCH_BITS != 64)
2489 *(uint32_t *)&Expected.ab[6] = (uint32_t)(uNew >> 32);
2490 }
2491 }
2492
2493 /*
2494 * Pass to common worker which is only compiled once per mode.
2495 */
2496 bs3CpuBasic2_sidt_sgdt_Common(bMode, g_aSgdtWorkers, RT_ELEMENTS(g_aSgdtWorkers), Expected.ab);
2497
2498 /*
2499 * Unalias the GDT.
2500 */
2501 if (uNew != 0)
2502 {
2503 Bs3Lgdt_Gdt.uAddr = uOrgAddr;
2504 Bs3UtilSetFullGdtr(Bs3Lgdt_Gdt.cb, uOrgAddr);
2505 Bs3PagingUnalias(uNew, Bs3Lgdt_Gdt.cb);
2506 }
2507
2508 /*
2509 * Re-initialize the IDT.
2510 */
2511 Bs3TrapReInit();
2512 return 0;
2513}
2514
2515
2516
2517/*
2518 * LIDT & LGDT
2519 */
2520
2521/**
2522 * Executes one round of LIDT and LGDT tests using one assembly worker.
2523 *
2524 * This is written with driving everything from the 16-bit or 32-bit worker in
2525 * mind, i.e. not assuming the test bitcount is the same as the current.
2526 */
2527static void bs3CpuBasic2_lidt_lgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing,
2528 uint8_t const *pbRestore, size_t cbRestore, uint8_t const *pbExpected)
2529{
2530 static const struct
2531 {
2532 bool fGP;
2533 uint16_t cbLimit;
2534 uint64_t u64Base;
2535 } s_aValues64[] =
2536 {
2537 { false, 0x0000, UINT64_C(0x0000000000000000) },
2538 { false, 0x0001, UINT64_C(0x0000000000000001) },
2539 { false, 0x0002, UINT64_C(0x0000000000000010) },
2540 { false, 0x0003, UINT64_C(0x0000000000000123) },
2541 { false, 0x0004, UINT64_C(0x0000000000001234) },
2542 { false, 0x0005, UINT64_C(0x0000000000012345) },
2543 { false, 0x0006, UINT64_C(0x0000000000123456) },
2544 { false, 0x0007, UINT64_C(0x0000000001234567) },
2545 { false, 0x0008, UINT64_C(0x0000000012345678) },
2546 { false, 0x0009, UINT64_C(0x0000000123456789) },
2547 { false, 0x000a, UINT64_C(0x000000123456789a) },
2548 { false, 0x000b, UINT64_C(0x00000123456789ab) },
2549 { false, 0x000c, UINT64_C(0x0000123456789abc) },
2550 { false, 0x001c, UINT64_C(0x00007ffffeefefef) },
2551 { false, 0xffff, UINT64_C(0x00007fffffffffff) },
2552 { true, 0xf3f1, UINT64_C(0x0000800000000000) },
2553 { true, 0x0000, UINT64_C(0x0000800000000000) },
2554 { true, 0x0000, UINT64_C(0x0000800000000333) },
2555 { true, 0x00f0, UINT64_C(0x0001000000000000) },
2556 { true, 0x0ff0, UINT64_C(0x0012000000000000) },
2557 { true, 0x0eff, UINT64_C(0x0123000000000000) },
2558 { true, 0xe0fe, UINT64_C(0x1234000000000000) },
2559 { true, 0x00ad, UINT64_C(0xffff300000000000) },
2560 { true, 0x0000, UINT64_C(0xffff7fffffffffff) },
2561 { true, 0x00f0, UINT64_C(0xffff7fffffffffff) },
2562 { false, 0x5678, UINT64_C(0xffff800000000000) },
2563 { false, 0x2969, UINT64_C(0xffffffffffeefefe) },
2564 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2565 { false, 0x1221, UINT64_C(0xffffffffffffffff) },
2566 };
2567 static const struct
2568 {
2569 uint16_t cbLimit;
2570 uint32_t u32Base;
2571 } s_aValues32[] =
2572 {
2573 { 0xdfdf, UINT32_C(0xefefefef) },
2574 { 0x0000, UINT32_C(0x00000000) },
2575 { 0x0001, UINT32_C(0x00000001) },
2576 { 0x0002, UINT32_C(0x00000012) },
2577 { 0x0003, UINT32_C(0x00000123) },
2578 { 0x0004, UINT32_C(0x00001234) },
2579 { 0x0005, UINT32_C(0x00012345) },
2580 { 0x0006, UINT32_C(0x00123456) },
2581 { 0x0007, UINT32_C(0x01234567) },
2582 { 0x0008, UINT32_C(0x12345678) },
2583 { 0x0009, UINT32_C(0x80204060) },
2584 { 0x000a, UINT32_C(0xddeeffaa) },
2585 { 0x000b, UINT32_C(0xfdecdbca) },
2586 { 0x000c, UINT32_C(0x6098456b) },
2587 { 0x000d, UINT32_C(0x98506099) },
2588 { 0x000e, UINT32_C(0x206950bc) },
2589 { 0x000f, UINT32_C(0x9740395d) },
2590 { 0x0334, UINT32_C(0x64a9455e) },
2591 { 0xb423, UINT32_C(0xd20b6eff) },
2592 { 0x4955, UINT32_C(0x85296d46) },
2593 { 0xffff, UINT32_C(0x07000039) },
2594 { 0xefe1, UINT32_C(0x0007fe00) },
2595 };
2596
2597 BS3TRAPFRAME TrapCtx;
2598 BS3REGCTX Ctx;
2599 BS3REGCTX CtxUdExpected;
2600 BS3REGCTX TmpCtx;
2601 uint8_t abBufLoad[40]; /* Test buffer w/ misalignment test space and some (cbIdtr) extra guard. */
2602 uint8_t abBufSave[32]; /* For saving the result after loading. */
2603 uint8_t abBufRestore[24]; /* For restoring sane value (same seg as abBufSave!). */
2604 uint8_t abExpectedFilled[32]; /* Same as pbExpected, except it's filled with bFiller2 instead of zeros. */
2605 uint8_t BS3_FAR *pbBufSave; /* Correctly aligned pointer into abBufSave. */
2606 uint8_t BS3_FAR *pbBufRestore; /* Correctly aligned pointer into abBufRestore. */
2607 uint8_t const cbIdtr = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 2+8 : 2+4;
2608 uint8_t const cbBaseLoaded = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 8
2609 : BS3_MODE_IS_16BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE)
2610 ? 3 : 4;
2611 bool const f286 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80286;
2612 uint8_t const bTop16BitBase = f286 ? 0xff : 0x00;
2613 uint8_t bFiller1; /* For filling abBufLoad. */
2614 uint8_t bFiller2; /* For filling abBufSave and expectations. */
2615 int off;
2616 uint8_t BS3_FAR *pbTest;
2617 unsigned i;
2618
2619 /* make sure they're allocated */
2620 Bs3MemZero(&Ctx, sizeof(Ctx));
2621 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
2622 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
2623 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
2624 Bs3MemZero(abBufSave, sizeof(abBufSave));
2625 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2626 Bs3MemZero(abBufRestore, sizeof(abBufRestore));
2627
2628 /*
2629 * Create a context, giving this routine some more stack space.
2630 * - Point the context at our LIDT [xBX] + SIDT [xDI] + LIDT [xSI] + UD2 combo.
2631 * - Point DS/SS:xBX at abBufLoad.
2632 * - Point ES:xDI at abBufSave.
2633 * - Point ES:xSI at abBufRestore.
2634 */
2635 Bs3RegCtxSaveEx(&Ctx, bTestMode, 256 /*cbExtraStack*/);
2636 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pWorker->fpfnWorker);
2637 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
2638 g_uBs3TrapEipHint = Ctx.rip.u32;
2639 Ctx.rflags.u16 &= ~X86_EFL_IF;
2640 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2641
2642 pbBufSave = abBufSave;
2643 if ((BS3_FP_OFF(pbBufSave) + 2) & 7)
2644 pbBufSave += 8 - ((BS3_FP_OFF(pbBufSave) + 2) & 7);
2645 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rdi, &Ctx.es, pbBufSave);
2646
2647 pbBufRestore = abBufRestore;
2648 if ((BS3_FP_OFF(pbBufRestore) + 2) & 7)
2649 pbBufRestore += 8 - ((BS3_FP_OFF(pbBufRestore) + 2) & 7);
2650 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsi, &Ctx.es, pbBufRestore);
2651 Bs3MemCpy(pbBufRestore, pbRestore, cbRestore);
2652
2653 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
2654 Bs3RegCtxConvertToRingX(&Ctx, bRing);
2655
2656 /* For successful SIDT attempts, we'll stop at the UD2. */
2657 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
2658 CtxUdExpected.rip.u += pWorker->cbInstr;
2659
2660 /*
2661 * Check that it works at all.
2662 */
2663 Bs3MemZero(abBufLoad, sizeof(abBufLoad));
2664 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2665 Bs3MemZero(abBufSave, sizeof(abBufSave));
2666 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2667 if (bRing != 0)
2668 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2669 else
2670 {
2671 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2672 if (Bs3MemCmp(pbBufSave, pbExpected, cbIdtr * 2) != 0)
2673 Bs3TestFailedF("Mismatch (%s, #1): expected %.*Rhxs, got %.*Rhxs\n",
2674 pWorker->pszDesc, cbIdtr*2, pbExpected, cbIdtr*2, pbBufSave);
2675 }
2676 g_usBs3TestStep++;
2677
2678 /* Determine two filler bytes that doesn't appear in the previous result or our expectations. */
2679 bFiller1 = ~0x55;
2680 while ( Bs3MemChr(pbBufSave, bFiller1, cbIdtr) != NULL
2681 || Bs3MemChr(pbRestore, bFiller1, cbRestore) != NULL
2682 || bFiller1 == 0xff)
2683 bFiller1++;
2684 bFiller2 = 0x33;
2685 while ( Bs3MemChr(pbBufSave, bFiller2, cbIdtr) != NULL
2686 || Bs3MemChr(pbRestore, bFiller2, cbRestore) != NULL
2687 || bFiller2 == 0xff
2688 || bFiller2 == bFiller1)
2689 bFiller2++;
2690 Bs3MemSet(abExpectedFilled, bFiller2, sizeof(abExpectedFilled));
2691 Bs3MemCpy(abExpectedFilled, pbExpected, cbIdtr);
2692
2693 /* Again with a buffer filled with a byte not occuring in the previous result. */
2694 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2695 Bs3MemCpy(abBufLoad, pbBufRestore, cbIdtr);
2696 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2697 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2698 if (bRing != 0)
2699 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2700 else
2701 {
2702 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2703 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2704 Bs3TestFailedF("Mismatch (%s, #2): expected %.*Rhxs, got %.*Rhxs\n",
2705 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2706 }
2707 g_usBs3TestStep++;
2708
2709 /*
2710 * Try loading a bunch of different limit+base value to check what happens,
2711 * especially what happens wrt the top part of the base in 16-bit mode.
2712 */
2713 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
2714 {
2715 for (i = 0; i < RT_ELEMENTS(s_aValues64); i++)
2716 {
2717 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2718 Bs3MemCpy(&abBufLoad[0], &s_aValues64[i].cbLimit, 2);
2719 Bs3MemCpy(&abBufLoad[2], &s_aValues64[i].u64Base, 8);
2720 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2721 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2722 if (bRing != 0 || s_aValues64[i].fGP)
2723 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2724 else
2725 {
2726 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2727 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues64[i].cbLimit, 2) != 0
2728 || Bs3MemCmp(&pbBufSave[2], &s_aValues64[i].u64Base, 8) != 0
2729 || !ASMMemIsAllU8(&pbBufSave[10], cbIdtr, bFiller2))
2730 Bs3TestFailedF("Mismatch (%s, #2): expected %04RX16:%016RX64, fillers %#x %#x, got %.*Rhxs\n",
2731 pWorker->pszDesc, s_aValues64[i].cbLimit, s_aValues64[i].u64Base,
2732 bFiller1, bFiller2, cbIdtr*2, pbBufSave);
2733 }
2734 g_usBs3TestStep++;
2735 }
2736 }
2737 else
2738 {
2739 for (i = 0; i < RT_ELEMENTS(s_aValues32); i++)
2740 {
2741 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2742 Bs3MemCpy(&abBufLoad[0], &s_aValues32[i].cbLimit, 2);
2743 Bs3MemCpy(&abBufLoad[2], &s_aValues32[i].u32Base, cbBaseLoaded);
2744 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2745 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2746 if (bRing != 0)
2747 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2748 else
2749 {
2750 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2751 if ( Bs3MemCmp(&pbBufSave[0], &s_aValues32[i].cbLimit, 2) != 0
2752 || Bs3MemCmp(&pbBufSave[2], &s_aValues32[i].u32Base, cbBaseLoaded) != 0
2753 || ( cbBaseLoaded != 4
2754 && pbBufSave[2+3] != bTop16BitBase)
2755 || !ASMMemIsAllU8(&pbBufSave[8], cbIdtr, bFiller2))
2756 Bs3TestFailedF("Mismatch (%s,#3): loaded %04RX16:%08RX32, fillers %#x %#x%s, got %.*Rhxs\n",
2757 pWorker->pszDesc, s_aValues32[i].cbLimit, s_aValues32[i].u32Base, bFiller1, bFiller2,
2758 f286 ? ", 286" : "", cbIdtr*2, pbBufSave);
2759 }
2760 g_usBs3TestStep++;
2761 }
2762 }
2763
2764 /*
2765 * Slide the buffer along 8 bytes to cover misalignment.
2766 */
2767 for (off = 0; off < 8; off++)
2768 {
2769 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &abBufLoad[off]);
2770 CtxUdExpected.rbx.u = Ctx.rbx.u;
2771
2772 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2773 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2774 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2775 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2776 if (bRing != 0)
2777 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2778 else
2779 {
2780 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2781 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2782 Bs3TestFailedF("Mismatch (%s, #4): expected %.*Rhxs, got %.*Rhxs\n",
2783 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2784 }
2785 g_usBs3TestStep++;
2786 }
2787 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2788 CtxUdExpected.rbx.u = Ctx.rbx.u;
2789
2790 /*
2791 * Play with the selector limit if the target mode supports limit checking
2792 * We use BS3_SEL_TEST_PAGE_00 for this
2793 */
2794 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
2795 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
2796 {
2797 uint16_t cbLimit;
2798 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBufLoad);
2799 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
2800 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
2801 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf;
2802 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16);
2803 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24);
2804
2805 if (pWorker->fSs)
2806 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
2807 else
2808 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
2809
2810 /* Expand up (normal). */
2811 for (off = 0; off < 8; off++)
2812 {
2813 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2814 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2815 {
2816 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2817
2818 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2819 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2820 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2821 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2822 if (bRing != 0)
2823 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2824 else if (off + cbIdtr <= cbLimit + 1)
2825 {
2826 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2827 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2828 Bs3TestFailedF("Mismatch (%s, #5): expected %.*Rhxs, got %.*Rhxs\n",
2829 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2830 }
2831 else if (pWorker->fSs)
2832 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2833 else
2834 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2835 g_usBs3TestStep++;
2836
2837 /* Again with zero limit and messed up base (should trigger tripple fault if partially loaded). */
2838 abBufLoad[off] = abBufLoad[off + 1] = 0;
2839 abBufLoad[off + 2] |= 1;
2840 abBufLoad[off + cbIdtr - 2] ^= 0x5a;
2841 abBufLoad[off + cbIdtr - 1] ^= 0xa5;
2842 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2843 if (bRing != 0)
2844 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2845 else if (off + cbIdtr <= cbLimit + 1)
2846 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2847 else if (pWorker->fSs)
2848 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2849 else
2850 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2851 }
2852 }
2853
2854 /* Expand down (weird). Inverted valid area compared to expand up,
2855 so a limit of zero give us a valid range for 0001..0ffffh (instead of
2856 a segment with one valid byte at 0000h). Whereas a limit of 0fffeh
2857 means one valid byte at 0ffffh, and a limit of 0ffffh means none
2858 (because in a normal expand up the 0ffffh means all 64KB are
2859 accessible). */
2860 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
2861 for (off = 0; off < 8; off++)
2862 {
2863 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
2864 for (cbLimit = 0; cbLimit < cbIdtr*2; cbLimit++)
2865 {
2866 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
2867
2868 Bs3MemSet(abBufLoad, bFiller1, sizeof(abBufLoad));
2869 Bs3MemCpy(&abBufLoad[off], pbBufRestore, cbIdtr);
2870 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2871 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2872 if (bRing != 0)
2873 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2874 else if (off > cbLimit)
2875 {
2876 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2877 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
2878 Bs3TestFailedF("Mismatch (%s, #6): expected %.*Rhxs, got %.*Rhxs\n",
2879 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2880 }
2881 else if (pWorker->fSs)
2882 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2883 else
2884 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2885 g_usBs3TestStep++;
2886
2887 /* Again with zero limit and messed up base (should trigger triple fault if partially loaded). */
2888 abBufLoad[off] = abBufLoad[off + 1] = 0;
2889 abBufLoad[off + 2] |= 3;
2890 abBufLoad[off + cbIdtr - 2] ^= 0x55;
2891 abBufLoad[off + cbIdtr - 1] ^= 0xaa;
2892 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2893 if (bRing != 0)
2894 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2895 else if (off > cbLimit)
2896 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2897 else if (pWorker->fSs)
2898 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
2899 else
2900 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2901 }
2902 }
2903
2904 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, abBufLoad);
2905 CtxUdExpected.rbx.u = Ctx.rbx.u;
2906 CtxUdExpected.ss = Ctx.ss;
2907 CtxUdExpected.ds = Ctx.ds;
2908 }
2909
2910 /*
2911 * Play with the paging.
2912 */
2913 if ( BS3_MODE_IS_PAGED(bTestMode)
2914 && (!pWorker->fSs || bRing == 3) /* SS.DPL == CPL, we'll get some tiled ring-3 selector here. */
2915 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL)
2916 {
2917 RTCCUINTXREG uFlatTest = Bs3SelPtrToFlat(pbTest);
2918
2919 /*
2920 * Slide the load buffer towards the trailing guard page.
2921 */
2922 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[X86_PAGE_SIZE]);
2923 CtxUdExpected.ss = Ctx.ss;
2924 CtxUdExpected.ds = Ctx.ds;
2925 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
2926 {
2927 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr*2);
2928 if (off < X86_PAGE_SIZE)
2929 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(X86_PAGE_SIZE - off, cbIdtr));
2930 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2931 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2932 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2933 if (bRing != 0)
2934 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2935 else if (off + cbIdtr <= X86_PAGE_SIZE)
2936 {
2937 CtxUdExpected.rbx = Ctx.rbx;
2938 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2939 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2940 Bs3TestFailedF("Mismatch (%s, #7): expected %.*Rhxs, got %.*Rhxs\n",
2941 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2942 }
2943 else
2944 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2945 g_usBs3TestStep++;
2946
2947 /* Again with zero limit and maybe messed up base as well (triple fault if buggy).
2948 The 386DX-40 here triple faults (or something) with off == 0xffe, nothing else. */
2949 if ( off < X86_PAGE_SIZE && off + cbIdtr > X86_PAGE_SIZE
2950 && ( off != X86_PAGE_SIZE - 2
2951 || (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) != BS3CPU_80386)
2952 )
2953 {
2954 pbTest[off] = 0;
2955 if (off + 1 < X86_PAGE_SIZE)
2956 pbTest[off + 1] = 0;
2957 if (off + 2 < X86_PAGE_SIZE)
2958 pbTest[off + 2] |= 7;
2959 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2960 if (bRing != 0)
2961 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2962 else
2963 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
2964 g_usBs3TestStep++;
2965 }
2966 }
2967
2968 /*
2969 * Now, do it the other way around. It should look normal now since writing
2970 * the limit will #PF first and nothing should be written.
2971 */
2972 for (off = cbIdtr + 4; off >= -cbIdtr - 4; off--)
2973 {
2974 Bs3MemSet(pbTest, bFiller1, 48);
2975 if (off >= 0)
2976 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
2977 else if (off + cbIdtr > 0)
2978 Bs3MemCpy(pbTest, &pbBufRestore[-off], cbIdtr + off);
2979 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rbx, pWorker->fSs ? &Ctx.ss : &Ctx.ds, &pbTest[off]);
2980 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
2981 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
2982 if (bRing != 0)
2983 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
2984 else if (off >= 0)
2985 {
2986 CtxUdExpected.rbx = Ctx.rbx;
2987 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
2988 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr*2) != 0)
2989 Bs3TestFailedF("Mismatch (%s, #8): expected %.*Rhxs, got %.*Rhxs\n",
2990 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
2991 }
2992 else
2993 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
2994 g_usBs3TestStep++;
2995
2996 /* Again with messed up base as well (triple fault if buggy). */
2997 if (off < 0 && off > -cbIdtr)
2998 {
2999 if (off + 2 >= 0)
3000 pbTest[off + 2] |= 15;
3001 pbTest[off + cbIdtr - 1] ^= 0xaa;
3002 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3003 if (bRing != 0)
3004 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3005 else
3006 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3007 g_usBs3TestStep++;
3008 }
3009 }
3010
3011 /*
3012 * Combine paging and segment limit and check ordering.
3013 * This is kind of interesting here since it the instruction seems to
3014 * actually be doing two separate read, just like it's S[IG]DT counterpart.
3015 *
3016 * Note! My 486DX4 does a DWORD limit read when the operand size is 32-bit,
3017 * that's what f486Weirdness deals with.
3018 */
3019 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode)
3020 && !BS3_MODE_IS_64BIT_CODE(bTestMode))
3021 {
3022 bool const f486Weirdness = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) == BS3CPU_80486
3023 && BS3_MODE_IS_32BIT_CODE(bTestMode) == !(pWorker->fFlags & BS3CB2SIDTSGDT_F_OPSIZE);
3024 uint16_t cbLimit;
3025
3026 Bs3GdteTestPage00 = Bs3Gdte_DATA16;
3027 Bs3GdteTestPage00.Gen.u2Dpl = bRing;
3028 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3029 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3030 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3031
3032 if (pWorker->fSs)
3033 CtxUdExpected.ss = Ctx.ss = BS3_SEL_TEST_PAGE_00 | bRing;
3034 else
3035 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3036
3037 /* Expand up (normal), approaching tail guard page. */
3038 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3039 {
3040 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3041 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3042 {
3043 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3044 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller1, cbIdtr * 2);
3045 if (off < X86_PAGE_SIZE)
3046 Bs3MemCpy(&pbTest[off], pbBufRestore, RT_MIN(cbIdtr, X86_PAGE_SIZE - off));
3047 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3048 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3049 if (bRing != 0)
3050 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3051 else if (off + cbIdtr <= cbLimit + 1)
3052 {
3053 /* No #GP, but maybe #PF. */
3054 if (off + cbIdtr <= X86_PAGE_SIZE)
3055 {
3056 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3057 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3058 Bs3TestFailedF("Mismatch (%s, #9): expected %.*Rhxs, got %.*Rhxs\n",
3059 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3060 }
3061 else
3062 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3063 }
3064 /* No #GP/#SS on limit, but instead #PF? */
3065 else if ( !f486Weirdness
3066 ? off < cbLimit && off >= 0xfff
3067 : off + 2 < cbLimit && off >= 0xffd)
3068 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + RT_MAX(off, X86_PAGE_SIZE), 0 /*cbIpAdjust*/);
3069 /* #GP/#SS on limit or base. */
3070 else if (pWorker->fSs)
3071 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3072 else
3073 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3074
3075 g_usBs3TestStep++;
3076
3077 /* Set DS to 0 and check that we get #GP(0). */
3078 if (!pWorker->fSs)
3079 {
3080 Ctx.ds = 0;
3081 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3082 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3083 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing;
3084 g_usBs3TestStep++;
3085 }
3086 }
3087 }
3088
3089 /* Expand down. */
3090 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */
3091 uFlatTest -= X86_PAGE_SIZE;
3092
3093 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC;
3094 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest;
3095 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16);
3096 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24);
3097
3098 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++)
3099 {
3100 CtxUdExpected.rbx.u = Ctx.rbx.u = off;
3101 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++)
3102 {
3103 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit;
3104 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller1, cbIdtr * 2);
3105 if (off >= X86_PAGE_SIZE)
3106 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3107 else if (off > X86_PAGE_SIZE - cbIdtr)
3108 Bs3MemCpy(&pbTest[X86_PAGE_SIZE], &pbBufRestore[X86_PAGE_SIZE - off], cbIdtr - (X86_PAGE_SIZE - off));
3109 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3110 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3111 if (bRing != 0)
3112 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3113 else if (cbLimit < off && off >= X86_PAGE_SIZE)
3114 {
3115 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3116 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3117 Bs3TestFailedF("Mismatch (%s, #10): expected %.*Rhxs, got %.*Rhxs\n",
3118 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3119 }
3120 else if (cbLimit < off && off < X86_PAGE_SIZE)
3121 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, 0, uFlatTest + off, 0 /*cbIpAdjust*/);
3122 else if (pWorker->fSs)
3123 bs3CpuBasic2_CompareSsCtx(&TrapCtx, &Ctx, 0, false /*f486ResumeFlagHint*/);
3124 else
3125 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3126 g_usBs3TestStep++;
3127 }
3128 }
3129
3130 pbTest += X86_PAGE_SIZE;
3131 uFlatTest += X86_PAGE_SIZE;
3132 }
3133
3134 Bs3MemGuardedTestPageFree(pbTest);
3135 }
3136
3137 /*
3138 * Check non-canonical 64-bit space.
3139 */
3140 if ( BS3_MODE_IS_64BIT_CODE(bTestMode)
3141 && (pbTest = (uint8_t BS3_FAR *)Bs3PagingSetupCanonicalTraps()) != NULL)
3142 {
3143 /* Make our references relative to the gap. */
3144 pbTest += g_cbBs3PagingOneCanonicalTrap;
3145
3146 /* Hit it from below. */
3147 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3148 {
3149 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0x0000800000000000) + off;
3150 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3151 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3152 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3153 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3154 if (off + cbIdtr > 0 || bRing != 0)
3155 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3156 else
3157 {
3158 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3159 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3160 Bs3TestFailedF("Mismatch (%s, #11): expected %.*Rhxs, got %.*Rhxs\n",
3161 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3162 }
3163 }
3164
3165 /* Hit it from above. */
3166 for (off = -cbIdtr - 8; off < cbIdtr + 8; off++)
3167 {
3168 Ctx.rbx.u = CtxUdExpected.rbx.u = UINT64_C(0xffff800000000000) + off;
3169 Bs3MemSet(&pbTest[-64], bFiller1, 64*2);
3170 Bs3MemCpy(&pbTest[off], pbBufRestore, cbIdtr);
3171 Bs3MemSet(abBufSave, bFiller2, sizeof(abBufSave));
3172 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3173 if (off < 0 || bRing != 0)
3174 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3175 else
3176 {
3177 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3178 if (Bs3MemCmp(pbBufSave, abExpectedFilled, cbIdtr * 2) != 0)
3179 Bs3TestFailedF("Mismatch (%s, #19): expected %.*Rhxs, got %.*Rhxs\n",
3180 pWorker->pszDesc, cbIdtr*2, abExpectedFilled, cbIdtr*2, pbBufSave);
3181 }
3182 }
3183
3184 }
3185}
3186
3187
3188static void bs3CpuBasic2_lidt_lgdt_Common(uint8_t bTestMode, BS3CB2SIDTSGDT const BS3_FAR *paWorkers, unsigned cWorkers,
3189 void const *pvRestore, size_t cbRestore, uint8_t const *pbExpected)
3190{
3191 unsigned idx;
3192 unsigned bRing;
3193 unsigned iStep = 0;
3194
3195 /* Note! We skip the SS checks for ring-0 since we badly mess up SS in the
3196 test and don't want to bother with double faults. */
3197 for (bRing = BS3_MODE_IS_V86(bTestMode) ? 3 : 0; bRing <= 3; bRing++)
3198 {
3199 for (idx = 0; idx < cWorkers; idx++)
3200 if ( (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK))
3201 && (!paWorkers[idx].fSs || bRing != 0 /** @todo || BS3_MODE_IS_64BIT_SYS(bTestMode)*/ )
3202 && ( !(paWorkers[idx].fFlags & BS3CB2SIDTSGDT_F_386PLUS)
3203 || ( bTestMode > BS3_MODE_PE16
3204 || ( bTestMode == BS3_MODE_PE16
3205 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)) ) )
3206 {
3207 //Bs3TestPrintf("idx=%-2d fpfnWorker=%p fSs=%d cbInstr=%d\n",
3208 // idx, paWorkers[idx].fpfnWorker, paWorkers[idx].fSs, paWorkers[idx].cbInstr);
3209 g_usBs3TestStep = iStep;
3210 bs3CpuBasic2_lidt_lgdt_One(&paWorkers[idx], bTestMode, bRing, pvRestore, cbRestore, pbExpected);
3211 iStep += 1000;
3212 }
3213 if (BS3_MODE_IS_RM_SYS(bTestMode))
3214 break;
3215 }
3216}
3217
3218
3219BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lidt)(uint8_t bMode)
3220{
3221 union
3222 {
3223 RTIDTR Idtr;
3224 uint8_t ab[32]; /* At least cbIdtr*2! */
3225 } Expected;
3226
3227 //if (bMode != BS3_MODE_LM64) return 0;
3228 bs3CpuBasic2_SetGlobals(bMode);
3229
3230 /*
3231 * Pass to common worker which is only compiled once per mode.
3232 */
3233 Bs3MemZero(&Expected, sizeof(Expected));
3234 ASMGetIDTR(&Expected.Idtr);
3235
3236 if (BS3_MODE_IS_RM_SYS(bMode))
3237 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3238 &Bs3Lidt_Ivt, sizeof(Bs3Lidt_Ivt), Expected.ab);
3239 else if (BS3_MODE_IS_16BIT_SYS(bMode))
3240 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3241 &Bs3Lidt_Idt16, sizeof(Bs3Lidt_Idt16), Expected.ab);
3242 else if (BS3_MODE_IS_32BIT_SYS(bMode))
3243 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3244 &Bs3Lidt_Idt32, sizeof(Bs3Lidt_Idt32), Expected.ab);
3245 else
3246 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLidtWorkers, RT_ELEMENTS(g_aLidtWorkers),
3247 &Bs3Lidt_Idt64, sizeof(Bs3Lidt_Idt64), Expected.ab);
3248
3249 /*
3250 * Re-initialize the IDT.
3251 */
3252 Bs3TrapReInit();
3253 return 0;
3254}
3255
3256
3257BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_lgdt)(uint8_t bMode)
3258{
3259 union
3260 {
3261 RTGDTR Gdtr;
3262 uint8_t ab[32]; /* At least cbIdtr*2! */
3263 } Expected;
3264
3265 //if (!BS3_MODE_IS_64BIT_SYS(bMode)) return 0;
3266 bs3CpuBasic2_SetGlobals(bMode);
3267
3268 /*
3269 * Pass to common worker which is only compiled once per mode.
3270 */
3271 if (BS3_MODE_IS_RM_SYS(bMode))
3272 ASMSetGDTR((PRTGDTR)&Bs3LgdtDef_Gdt);
3273 Bs3MemZero(&Expected, sizeof(Expected));
3274 ASMGetGDTR(&Expected.Gdtr);
3275
3276 bs3CpuBasic2_lidt_lgdt_Common(bMode, g_aLgdtWorkers, RT_ELEMENTS(g_aLgdtWorkers),
3277 &Bs3LgdtDef_Gdt, sizeof(Bs3LgdtDef_Gdt), Expected.ab);
3278
3279 /*
3280 * Re-initialize the IDT.
3281 */
3282 Bs3TrapReInit();
3283 return 0;
3284}
3285
3286typedef union IRETBUF
3287{
3288 uint64_t au64[6]; /* max req is 5 */
3289 uint32_t au32[12]; /* max req is 9 */
3290 uint16_t au16[24]; /* max req is 5 */
3291 uint8_t ab[48];
3292} IRETBUF;
3293typedef IRETBUF BS3_FAR *PIRETBUF;
3294
3295
3296static void iretbuf_SetupFrame(PIRETBUF pIretBuf, unsigned const cbPop,
3297 uint16_t uCS, uint64_t uPC, uint32_t fEfl, uint16_t uSS, uint64_t uSP)
3298{
3299 if (cbPop == 2)
3300 {
3301 pIretBuf->au16[0] = (uint16_t)uPC;
3302 pIretBuf->au16[1] = uCS;
3303 pIretBuf->au16[2] = (uint16_t)fEfl;
3304 pIretBuf->au16[3] = (uint16_t)uSP;
3305 pIretBuf->au16[4] = uSS;
3306 }
3307 else if (cbPop != 8)
3308 {
3309 pIretBuf->au32[0] = (uint32_t)uPC;
3310 pIretBuf->au16[1*2] = uCS;
3311 pIretBuf->au32[2] = (uint32_t)fEfl;
3312 pIretBuf->au32[3] = (uint32_t)uSP;
3313 pIretBuf->au16[4*2] = uSS;
3314 }
3315 else
3316 {
3317 pIretBuf->au64[0] = uPC;
3318 pIretBuf->au16[1*4] = uCS;
3319 pIretBuf->au64[2] = fEfl;
3320 pIretBuf->au64[3] = uSP;
3321 pIretBuf->au16[4*4] = uSS;
3322 }
3323}
3324
3325
3326static void bs3CpuBasic2_iret_Worker(uint8_t bTestMode, FPFNBS3FAR pfnIret, unsigned const cbPop,
3327 PIRETBUF pIretBuf, const char BS3_FAR *pszDesc)
3328{
3329 BS3TRAPFRAME TrapCtx;
3330 BS3REGCTX Ctx;
3331 BS3REGCTX CtxUdExpected;
3332 BS3REGCTX TmpCtx;
3333 BS3REGCTX TmpCtxExpected;
3334 uint8_t abLowUd[8];
3335 uint8_t abLowIret[8];
3336 FPFNBS3FAR pfnUdLow = (FPFNBS3FAR)abLowUd;
3337 FPFNBS3FAR pfnIretLow = (FPFNBS3FAR)abLowIret;
3338 unsigned const cbSameCplFrame = BS3_MODE_IS_64BIT_CODE(bTestMode) ? 5*cbPop : 3*cbPop;
3339 bool const fUseLowCode = cbPop == 2 && !BS3_MODE_IS_16BIT_CODE(bTestMode);
3340 int iRingDst;
3341 int iRingSrc;
3342 uint16_t uDplSs;
3343 uint16_t uRplCs;
3344 uint16_t uRplSs;
3345// int i;
3346 uint8_t BS3_FAR *pbTest;
3347
3348 NOREF(abLowUd);
3349#define IRETBUF_SET_SEL(a_idx, a_uValue) \
3350 do { *(uint16_t)&pIretBuf->ab[a_idx * cbPop] = (a_uValue); } while (0)
3351#define IRETBUF_SET_REG(a_idx, a_uValue) \
3352 do { uint8_t const BS3_FAR *pbTmp = &pIretBuf->ab[a_idx * cbPop]; \
3353 if (cbPop == 2) *(uint16_t)pbTmp = (uint16_t)(a_uValue); \
3354 else if (cbPop != 8) *(uint32_t)pbTmp = (uint32_t)(a_uValue); \
3355 else *(uint64_t)pbTmp = (a_uValue); \
3356 } while (0)
3357
3358 /* make sure they're allocated */
3359 Bs3MemZero(&Ctx, sizeof(Ctx));
3360 Bs3MemZero(&CtxUdExpected, sizeof(CtxUdExpected));
3361 Bs3MemZero(&TmpCtx, sizeof(TmpCtx));
3362 Bs3MemZero(&TmpCtxExpected, sizeof(TmpCtxExpected));
3363 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
3364
3365 /*
3366 * When dealing with 16-bit irets in 32-bit or 64-bit mode, we must have
3367 * copies of both iret and ud in the first 64KB of memory. The stack is
3368 * below 64KB, so we'll just copy the instructions onto the stack.
3369 */
3370 Bs3MemCpy(abLowUd, bs3CpuBasic2_ud2, 4);
3371 Bs3MemCpy(abLowIret, pfnIret, 4);
3372
3373 /*
3374 * Create a context (stack is irrelevant, we'll mainly be using pIretBuf).
3375 * - Point the context at our iret instruction.
3376 * - Point SS:xSP at pIretBuf.
3377 */
3378 Bs3RegCtxSaveEx(&Ctx, bTestMode, 0);
3379 if (!fUseLowCode)
3380 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnIret);
3381 else
3382 Bs3RegCtxSetRipCsFromCurPtr(&Ctx, pfnIretLow);
3383 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
3384 g_uBs3TrapEipHint = Ctx.rip.u32;
3385 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3386
3387 /*
3388 * The first success (UD) context keeps the same code bit-count as the iret.
3389 */
3390 Bs3MemCpy(&CtxUdExpected, &Ctx, sizeof(Ctx));
3391 if (!fUseLowCode)
3392 Bs3RegCtxSetRipCsFromLnkPtr(&CtxUdExpected, bs3CpuBasic2_ud2);
3393 else
3394 Bs3RegCtxSetRipCsFromCurPtr(&CtxUdExpected, pfnUdLow);
3395 CtxUdExpected.rsp.u += cbSameCplFrame;
3396
3397 /*
3398 * Check that it works at all.
3399 */
3400 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3401 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3402
3403 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3404 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3405 g_usBs3TestStep++;
3406
3407 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
3408 {
3409 /* Selectors are modified when switching rings, so we need to know
3410 what we're dealing with there. */
3411 if ( !BS3_SEL_IS_IN_R0_RANGE(Ctx.cs) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ss)
3412 || !BS3_SEL_IS_IN_R0_RANGE(Ctx.ds) || !BS3_SEL_IS_IN_R0_RANGE(Ctx.es))
3413 Bs3TestFailedF("Expected R0 CS, SS, DS and ES; not %#x, %#x, %#x and %#x\n", Ctx.cs, Ctx.ss, Ctx.ds, Ctx.es);
3414 if (Ctx.fs || Ctx.gs)
3415 Bs3TestFailed("Expected R0 FS and GS to be 0!\n");
3416
3417 /*
3418 * Test returning to outer rings if protected mode.
3419 */
3420 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3421 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3422 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3423 {
3424 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3425 TmpCtxExpected.ds = iRingDst ? 0 : TmpCtx.ds;
3426 TmpCtx.es = TmpCtxExpected.es;
3427 iretbuf_SetupFrame(pIretBuf, cbPop, TmpCtxExpected.cs, TmpCtxExpected.rip.u,
3428 TmpCtxExpected.rflags.u32, TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3429 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3430 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3431 g_usBs3TestStep++;
3432 }
3433
3434 /*
3435 * Check CS.RPL and SS.RPL.
3436 */
3437 for (iRingDst = 3; iRingDst >= 0; iRingDst--)
3438 {
3439 uint16_t const uDstSsR0 = (CtxUdExpected.ss & BS3_SEL_RING_SUB_MASK) + BS3_SEL_R0_FIRST;
3440 Bs3MemCpy(&TmpCtxExpected, &CtxUdExpected, sizeof(TmpCtxExpected));
3441 Bs3RegCtxConvertToRingX(&TmpCtxExpected, iRingDst);
3442 for (iRingSrc = 3; iRingSrc >= 0; iRingSrc--)
3443 {
3444 Bs3MemCpy(&TmpCtx, &Ctx, sizeof(TmpCtx));
3445 Bs3RegCtxConvertToRingX(&TmpCtx, iRingSrc);
3446 TmpCtx.es = TmpCtxExpected.es;
3447 TmpCtxExpected.ds = iRingDst != iRingSrc ? 0 : TmpCtx.ds;
3448 for (uRplCs = 0; uRplCs <= 3; uRplCs++)
3449 {
3450 uint16_t const uSrcEs = TmpCtx.es;
3451 uint16_t const uDstCs = (TmpCtxExpected.cs & X86_SEL_MASK_OFF_RPL) | uRplCs;
3452 //Bs3TestPrintf("dst=%d src=%d rplCS=%d\n", iRingDst, iRingSrc, uRplCs);
3453
3454 /* CS.RPL */
3455 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u, TmpCtxExpected.rflags.u32,
3456 TmpCtxExpected.ss, TmpCtxExpected.rsp.u);
3457 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3458 if (uRplCs == iRingDst && iRingDst >= iRingSrc)
3459 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3460 else
3461 {
3462 if (iRingDst < iRingSrc)
3463 TmpCtx.es = 0;
3464 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3465 TmpCtx.es = uSrcEs;
3466 }
3467 g_usBs3TestStep++;
3468
3469 /* SS.RPL */
3470 if (iRingDst != iRingSrc || BS3_MODE_IS_64BIT_CODE(bTestMode))
3471 {
3472 uint16_t uSavedDstSs = TmpCtxExpected.ss;
3473 for (uRplSs = 0; uRplSs <= 3; uRplSs++)
3474 {
3475 /* SS.DPL (iRingDst == CS.DPL) */
3476 for (uDplSs = 0; uDplSs <= 3; uDplSs++)
3477 {
3478 uint16_t const uDstSs = ((uDplSs << BS3_SEL_RING_SHIFT) | uRplSs) + uDstSsR0;
3479 //Bs3TestPrintf("dst=%d src=%d rplCS=%d rplSS=%d dplSS=%d dst %04x:%08RX64 %08RX32 %04x:%08RX64\n",
3480 // iRingDst, iRingSrc, uRplCs, uRplSs, uDplSs, uDstCs, TmpCtxExpected.rip.u,
3481 // TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3482
3483 iretbuf_SetupFrame(pIretBuf, cbPop, uDstCs, TmpCtxExpected.rip.u,
3484 TmpCtxExpected.rflags.u32, uDstSs, TmpCtxExpected.rsp.u);
3485 Bs3TrapSetJmpAndRestore(&TmpCtx, &TrapCtx);
3486 if (uRplCs != iRingDst || iRingDst < iRingSrc)
3487 {
3488 if (iRingDst < iRingSrc)
3489 TmpCtx.es = 0;
3490 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstCs & X86_SEL_MASK_OFF_RPL);
3491 }
3492 else if (uRplSs != iRingDst || uDplSs != iRingDst)
3493 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &TmpCtx, uDstSs & X86_SEL_MASK_OFF_RPL);
3494 else
3495 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &TmpCtxExpected);
3496 TmpCtx.es = uSrcEs;
3497 g_usBs3TestStep++;
3498 }
3499 }
3500
3501 TmpCtxExpected.ss = uSavedDstSs;
3502 }
3503 }
3504 }
3505 }
3506 }
3507
3508 /*
3509 * Special 64-bit checks.
3510 */
3511 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
3512 {
3513 /* The VM flag is completely ignored. */
3514 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3515 CtxUdExpected.rflags.u32 | X86_EFL_VM, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3516 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3517 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3518 g_usBs3TestStep++;
3519
3520 /* The NT flag can be loaded just fine. */
3521 CtxUdExpected.rflags.u32 |= X86_EFL_NT;
3522 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3523 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3524 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3525 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected);
3526 CtxUdExpected.rflags.u32 &= ~X86_EFL_NT;
3527 g_usBs3TestStep++;
3528
3529 /* However, we'll #GP(0) if it's already set (in RFLAGS) when executing IRET. */
3530 Ctx.rflags.u32 |= X86_EFL_NT;
3531 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3532 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3533 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3534 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3535 g_usBs3TestStep++;
3536
3537 /* The NT flag #GP(0) should trump all other exceptions - pit it against #PF. */
3538 pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED);
3539 if (pbTest != NULL)
3540 {
3541 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, &pbTest[X86_PAGE_SIZE]);
3542 iretbuf_SetupFrame(pIretBuf, cbPop, CtxUdExpected.cs, CtxUdExpected.rip.u,
3543 CtxUdExpected.rflags.u32, CtxUdExpected.ss, CtxUdExpected.rsp.u);
3544 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
3545 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0);
3546 g_usBs3TestStep++;
3547
3548 Bs3RegCtxSetGrpSegFromCurPtr(&Ctx, &Ctx.rsp, &Ctx.ss, pIretBuf);
3549 Bs3MemGuardedTestPageFree(pbTest);
3550 }
3551 Ctx.rflags.u32 &= ~X86_EFL_NT;
3552 }
3553}
3554
3555
3556BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuBasic2_iret)(uint8_t bMode)
3557{
3558 struct
3559 {
3560 uint8_t abExtraStack[4096]; /**< we've got ~30KB of stack, so 4KB for the trap handlers++ is not a problem. */
3561 IRETBUF IRetBuf;
3562 uint8_t abGuard[32];
3563 } uBuf;
3564 size_t cbUnused;
3565
3566 //if (bMode != BS3_MODE_LM64) return BS3TESTDOMODE_SKIPPED;
3567 bs3CpuBasic2_SetGlobals(bMode);
3568
3569 /*
3570 * Primary instruction form.
3571 */
3572 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3573 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3574 if (BS3_MODE_IS_16BIT_CODE(bMode))
3575 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 2, &uBuf.IRetBuf, "iret");
3576 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3577 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3578 else
3579 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_rexw, 8, &uBuf.IRetBuf, "o64 iret");
3580
3581 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3582 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3583 - (uintptr_t)uBuf.abExtraStack;
3584 if (cbUnused < 2048)
3585 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 1);
3586
3587 /*
3588 * Secondary variation: opsize prefixed.
3589 */
3590 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3591 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3592 if (BS3_MODE_IS_16BIT_CODE(bMode) && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
3593 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 4, &uBuf.IRetBuf, "o32 iret");
3594 else if (BS3_MODE_IS_32BIT_CODE(bMode))
3595 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3596 else if (BS3_MODE_IS_64BIT_CODE(bMode))
3597 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret, 4, &uBuf.IRetBuf, "iretd");
3598 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3599 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3600 - (uintptr_t)uBuf.abExtraStack;
3601 if (cbUnused < 2048)
3602 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 2);
3603
3604 /*
3605 * Third variation: 16-bit in 64-bit mode (truly unlikely)
3606 */
3607 if (BS3_MODE_IS_64BIT_CODE(bMode))
3608 {
3609 Bs3MemSet(&uBuf, 0xaa, sizeof(uBuf));
3610 Bs3MemSet(uBuf.abGuard, 0x88, sizeof(uBuf.abGuard));
3611 bs3CpuBasic2_iret_Worker(bMode, bs3CpuBasic2_iret_opsize, 2, &uBuf.IRetBuf, "o16 iret");
3612 BS3_ASSERT(ASMMemIsAllU8(uBuf.abGuard, sizeof(uBuf.abGuard), 0x88));
3613 cbUnused = (uintptr_t)ASMMemFirstMismatchingU8(uBuf.abExtraStack, sizeof(uBuf.abExtraStack) + sizeof(uBuf.IRetBuf), 0xaa)
3614 - (uintptr_t)uBuf.abExtraStack;
3615 if (cbUnused < 2048)
3616 Bs3TestFailedF("cbUnused=%u #%u\n", cbUnused, 3);
3617 }
3618
3619 return 0;
3620}
3621
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette