VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-weird-1-x0.c@ 102778

Last change on this file since 102778 was 102778, checked in by vboxsync, 10 months ago

ValKit/bs3-cpu-weird-1: Added a PUSH/POP xSP testcase with operand size override variations. bugref:10371

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 56.9 KB
Line 
1/* $Id: bs3-cpu-weird-1-x0.c 102778 2024-01-06 01:43:05Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-weird-2, C test driver code (16-bit).
4 */
5
6/*
7 * Copyright (C) 2007-2023 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * The contents of this file may alternatively be used under the terms
26 * of the Common Development and Distribution License Version 1.0
27 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
28 * in the VirtualBox distribution, in which case the provisions of the
29 * CDDL are applicable instead of those of the GPL.
30 *
31 * You may elect to license modified versions of this file under the
32 * terms and conditions of either the GPL or the CDDL or both.
33 *
34 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
35 */
36
37
38/*********************************************************************************************************************************
39* Header Files *
40*********************************************************************************************************************************/
41#define BS3_USE_X0_TEXT_SEG
42#include <bs3kit.h>
43#include <bs3-cmn-memory.h>
44#include <iprt/asm.h>
45#include <iprt/asm-amd64-x86.h>
46
47
48/*********************************************************************************************************************************
49* Defined Constants And Macros *
50*********************************************************************************************************************************/
51#undef CHECK_MEMBER
52#define CHECK_MEMBER(a_szName, a_szFmt, a_Actual, a_Expected) \
53 do \
54 { \
55 if ((a_Actual) == (a_Expected)) { /* likely */ } \
56 else bs3CpuWeird1_FailedF(a_szName "=" a_szFmt " expected " a_szFmt, (a_Actual), (a_Expected)); \
57 } while (0)
58
59
60/*********************************************************************************************************************************
61* External Symbols *
62*********************************************************************************************************************************/
63extern FNBS3FAR bs3CpuWeird1_InhibitedInt80_c16;
64extern FNBS3FAR bs3CpuWeird1_InhibitedInt80_c32;
65extern FNBS3FAR bs3CpuWeird1_InhibitedInt80_c64;
66extern FNBS3FAR bs3CpuWeird1_InhibitedInt80_int80_c16;
67extern FNBS3FAR bs3CpuWeird1_InhibitedInt80_int80_c32;
68extern FNBS3FAR bs3CpuWeird1_InhibitedInt80_int80_c64;
69
70extern FNBS3FAR bs3CpuWeird1_InhibitedInt3_c16;
71extern FNBS3FAR bs3CpuWeird1_InhibitedInt3_c32;
72extern FNBS3FAR bs3CpuWeird1_InhibitedInt3_c64;
73extern FNBS3FAR bs3CpuWeird1_InhibitedInt3_int3_c16;
74extern FNBS3FAR bs3CpuWeird1_InhibitedInt3_int3_c32;
75extern FNBS3FAR bs3CpuWeird1_InhibitedInt3_int3_c64;
76
77extern FNBS3FAR bs3CpuWeird1_InhibitedBp_c16;
78extern FNBS3FAR bs3CpuWeird1_InhibitedBp_c32;
79extern FNBS3FAR bs3CpuWeird1_InhibitedBp_c64;
80extern FNBS3FAR bs3CpuWeird1_InhibitedBp_int3_c16;
81extern FNBS3FAR bs3CpuWeird1_InhibitedBp_int3_c32;
82extern FNBS3FAR bs3CpuWeird1_InhibitedBp_int3_c64;
83
84
85/*********************************************************************************************************************************
86* Global Variables *
87*********************************************************************************************************************************/
88static const char BS3_FAR *g_pszTestMode = (const char *)1;
89static BS3CPUVENDOR g_enmCpuVendor = BS3CPUVENDOR_INTEL;
90static bool g_fVME = false;
91//static uint8_t g_bTestMode = 1;
92//static bool g_f16BitSys = 1;
93
94
95
96/**
97 * Sets globals according to the mode.
98 *
99 * @param bTestMode The test mode.
100 */
101static void bs3CpuWeird1_SetGlobals(uint8_t bTestMode)
102{
103// g_bTestMode = bTestMode;
104 g_pszTestMode = Bs3GetModeName(bTestMode);
105// g_f16BitSys = BS3_MODE_IS_16BIT_SYS(bTestMode);
106 g_usBs3TestStep = 0;
107 g_enmCpuVendor = Bs3GetCpuVendor();
108 g_fVME = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486
109 && (Bs3RegGetCr4() & X86_CR4_VME);
110}
111
112
113/**
114 * Wrapper around Bs3TestFailedF that prefixes the error with g_usBs3TestStep
115 * and g_pszTestMode.
116 */
117static void bs3CpuWeird1_FailedF(const char *pszFormat, ...)
118{
119 va_list va;
120
121 char szTmp[168];
122 va_start(va, pszFormat);
123 Bs3StrPrintfV(szTmp, sizeof(szTmp), pszFormat, va);
124 va_end(va);
125
126 Bs3TestFailedF("%u - %s: %s", g_usBs3TestStep, g_pszTestMode, szTmp);
127}
128
129
130/**
131 * Compares interrupt stuff.
132 */
133static void bs3CpuWeird1_CompareDbgInhibitRingXfer(PCBS3TRAPFRAME pTrapCtx, PCBS3REGCTX pStartCtx, uint8_t bXcpt,
134 int8_t cbPcAdjust, int8_t cbSpAdjust, uint32_t uDr6Expected,
135 uint8_t cbIretFrame, uint64_t uHandlerRsp)
136{
137 uint32_t uDr6 = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386 ? Bs3RegGetDr6() : X86_DR6_INIT_VAL;
138 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
139 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, bXcpt);
140 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, 0);
141 CHECK_MEMBER("cbIretFrame", "%#04x", pTrapCtx->cbIretFrame, cbIretFrame);
142 CHECK_MEMBER("uHandlerRsp", "%#06RX64", pTrapCtx->uHandlerRsp, uHandlerRsp);
143 if (uDr6 != uDr6Expected)
144 bs3CpuWeird1_FailedF("dr6=%#010RX32 expected %#010RX32", uDr6, uDr6Expected);
145 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, pStartCtx, cbPcAdjust, cbSpAdjust, 0 /*fExtraEfl*/, g_pszTestMode, g_usBs3TestStep);
146 if (Bs3TestSubErrorCount() != cErrorsBefore)
147 {
148 Bs3TrapPrintFrame(pTrapCtx);
149 Bs3TestPrintf("DR6=%#RX32; Handler: CS=%04RX16 SS:ESP=%04RX16:%08RX64 EFL=%RX64 cbIret=%#x\n",
150 uDr6, pTrapCtx->uHandlerCs, pTrapCtx->uHandlerSs, pTrapCtx->uHandlerRsp,
151 pTrapCtx->fHandlerRfl, pTrapCtx->cbIretFrame);
152#if 0
153 Bs3TestPrintf("Halting in CompareIntCtx: bXcpt=%#x\n", bXcpt);
154 ASMHalt();
155#endif
156 }
157}
158
159static uint64_t bs3CpuWeird1_GetTrapHandlerEIP(uint8_t bXcpt, uint8_t bMode, bool fV86)
160{
161 if ( BS3_MODE_IS_RM_SYS(bMode)
162 || (fV86 && BS3_MODE_IS_V86(bMode)))
163 {
164 PRTFAR16 paIvt = (PRTFAR16)Bs3XptrFlatToCurrent(0);
165 return paIvt[bXcpt].off;
166 }
167 if (BS3_MODE_IS_16BIT_SYS(bMode))
168 return Bs3Idt16[bXcpt].Gate.u16OffsetLow;
169 if (BS3_MODE_IS_32BIT_SYS(bMode))
170 return RT_MAKE_U32(Bs3Idt32[bXcpt].Gate.u16OffsetLow, Bs3Idt32[bXcpt].Gate.u16OffsetHigh);
171 return RT_MAKE_U64(RT_MAKE_U32(Bs3Idt64[bXcpt].Gate.u16OffsetLow, Bs3Idt32[bXcpt].Gate.u16OffsetHigh),
172 Bs3Idt64[bXcpt].Gate.u32OffsetTop);
173}
174
175
176static int bs3CpuWeird1_DbgInhibitRingXfer_Worker(uint8_t bTestMode, uint8_t bIntGate, uint8_t cbRingInstr, int8_t cbSpAdjust,
177 FPFNBS3FAR pfnTestCode, FPFNBS3FAR pfnTestLabel)
178{
179 BS3TRAPFRAME TrapCtx;
180 BS3TRAPFRAME TrapExpect;
181 BS3REGCTX Ctx;
182 uint8_t bSavedDpl;
183 uint8_t const offTestLabel = BS3_FP_OFF(pfnTestLabel) - BS3_FP_OFF(pfnTestCode);
184 //uint8_t const cbIretFrameSame = BS3_MODE_IS_RM_SYS(bTestMode) ? 6
185 // : BS3_MODE_IS_16BIT_SYS(bTestMode) ? 12
186 // : BS3_MODE_IS_64BIT_SYS(bTestMode) ? 40 : 12;
187 uint8_t cbIretFrameInt;
188 uint8_t cbIretFrameIntDb;
189 uint8_t const cbIretFrameSame = BS3_MODE_IS_16BIT_SYS(bTestMode) ? 6
190 : BS3_MODE_IS_32BIT_SYS(bTestMode) ? 12 : 40;
191 uint8_t const cbSpAdjSame = BS3_MODE_IS_64BIT_SYS(bTestMode) ? 48 : cbIretFrameSame;
192 uint8_t bVmeMethod = 0;
193 uint64_t uHandlerRspInt;
194 uint64_t uHandlerRspIntDb;
195 BS3_XPTR_AUTO(uint32_t, StackXptr);
196
197 /* make sure they're allocated */
198 Bs3MemZero(&Ctx, sizeof(Ctx));
199 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
200 Bs3MemZero(&TrapExpect, sizeof(TrapExpect));
201
202 /*
203 * Make INT xx accessible from DPL 3 and create a ring-3 context that we can work with.
204 */
205 bSavedDpl = Bs3TrapSetDpl(bIntGate, 3);
206
207 Bs3RegCtxSaveEx(&Ctx, bTestMode, 1024);
208 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, pfnTestCode);
209 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
210 g_uBs3TrapEipHint = Ctx.rip.u32;
211 Ctx.rflags.u32 &= ~X86_EFL_RF;
212
213 /* Raw-mode enablers. */
214 Ctx.rflags.u32 |= X86_EFL_IF;
215 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
216 Ctx.cr0.u32 |= X86_CR0_WP;
217
218 /* We put the SS value on the stack so we can easily set breakpoints there. */
219 Ctx.rsp.u32 -= 8;
220 BS3_XPTR_SET_FLAT(uint32_t, StackXptr, Ctx.rsp.u32); /* ASSUMES SS.BASE == 0!! */
221
222 /* ring-3 */
223 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
224 Bs3RegCtxConvertToRingX(&Ctx, 3);
225
226 /* V8086: Set IOPL to 3. */
227 if (BS3_MODE_IS_V86(bTestMode))
228 {
229 Ctx.rflags.u32 |= X86_EFL_IOPL;
230 if (g_fVME)
231 {
232 Bs3RegSetTr(BS3_SEL_TSS32_IRB);
233#if 0
234 /* SDMv3b, 20.3.3 method 5: */
235 ASMBitClear(&Bs3SharedIntRedirBm, bIntGate);
236 bVmeMethod = 5;
237#else
238 /* SDMv3b, 20.3.3 method 4 (similar to non-VME): */
239 ASMBitSet(&Bs3SharedIntRedirBm, bIntGate);
240 bVmeMethod = 4;
241 }
242#endif
243 }
244
245 /*
246 * Test #0: Test run. Calc expected delayed #DB from it.
247 */
248 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
249 {
250 Bs3RegSetDr7(0);
251 Bs3RegSetDr6(X86_DR6_INIT_VAL);
252 }
253 *BS3_XPTR_GET(uint32_t, StackXptr) = Ctx.ss;
254 Bs3TrapSetJmpAndRestore(&Ctx, &TrapExpect);
255 if (TrapExpect.bXcpt != bIntGate)
256 {
257
258 Bs3TestFailedF("%u: bXcpt is %#x, expected %#x!\n", g_usBs3TestStep, TrapExpect.bXcpt, bIntGate);
259 Bs3TrapPrintFrame(&TrapExpect);
260 return 1;
261 }
262
263 cbIretFrameInt = TrapExpect.cbIretFrame;
264 cbIretFrameIntDb = cbIretFrameInt + cbIretFrameSame;
265 uHandlerRspInt = TrapExpect.uHandlerRsp;
266 uHandlerRspIntDb = uHandlerRspInt - cbSpAdjSame;
267
268 TrapExpect.Ctx.bCpl = 0;
269 TrapExpect.Ctx.cs = TrapExpect.uHandlerCs;
270 TrapExpect.Ctx.ss = TrapExpect.uHandlerSs;
271 TrapExpect.Ctx.rsp.u64 = TrapExpect.uHandlerRsp;
272 TrapExpect.Ctx.rflags.u64 = TrapExpect.fHandlerRfl;
273 if (BS3_MODE_IS_V86(bTestMode))
274 {
275 if (bVmeMethod >= 5)
276 {
277 TrapExpect.Ctx.rflags.u32 |= X86_EFL_VM;
278 TrapExpect.Ctx.bCpl = 3;
279 TrapExpect.Ctx.rip.u64 = bs3CpuWeird1_GetTrapHandlerEIP(bIntGate, bTestMode, true);
280 cbIretFrameIntDb = 36;
281 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
282 uHandlerRspIntDb = Bs3Tss16.sp0 - cbIretFrameIntDb;
283 else
284 uHandlerRspIntDb = Bs3Tss32.esp0 - cbIretFrameIntDb;
285 }
286 else
287 {
288 TrapExpect.Ctx.ds = 0;
289 TrapExpect.Ctx.es = 0;
290 TrapExpect.Ctx.fs = 0;
291 TrapExpect.Ctx.gs = 0;
292 }
293 }
294
295 /*
296 * Test #1: Single stepping ring-3. Ignored except for V8086 w/ VME.
297 */
298 g_usBs3TestStep++;
299 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
300 {
301 Bs3RegSetDr7(0);
302 Bs3RegSetDr6(X86_DR6_INIT_VAL);
303 }
304 *BS3_XPTR_GET(uint32_t, StackXptr) = Ctx.ss;
305 Ctx.rflags.u32 |= X86_EFL_TF;
306
307 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
308 if ( !BS3_MODE_IS_V86(bTestMode)
309 || bVmeMethod < 5)
310 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &Ctx, bIntGate, offTestLabel + cbRingInstr, cbSpAdjust,
311 X86_DR6_INIT_VAL, cbIretFrameInt, uHandlerRspInt);
312 else
313 {
314 TrapExpect.Ctx.rflags.u32 |= X86_EFL_TF;
315 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &TrapExpect.Ctx, X86_XCPT_DB, offTestLabel, -2,
316 X86_DR6_INIT_VAL | X86_DR6_BS, cbIretFrameIntDb, uHandlerRspIntDb);
317 TrapExpect.Ctx.rflags.u32 &= ~X86_EFL_TF;
318 }
319
320 Ctx.rflags.u32 &= ~X86_EFL_TF;
321 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80386)
322 {
323 uint32_t uDr6Expect;
324
325 /*
326 * Test #2: Execution breakpoint on ring transition instruction.
327 * This hits on AMD-V (threadripper) but not on VT-x (skylake).
328 */
329 g_usBs3TestStep++;
330 Bs3RegSetDr0(Bs3SelRealModeCodeToFlat(pfnTestLabel));
331 Bs3RegSetDr7(X86_DR7_L0 | X86_DR7_G0 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE));
332 Bs3RegSetDr6(X86_DR6_INIT_VAL);
333 *BS3_XPTR_GET(uint32_t, StackXptr) = Ctx.ss;
334
335 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
336 Bs3RegSetDr7(0);
337 if (g_enmCpuVendor == BS3CPUVENDOR_AMD || g_enmCpuVendor == BS3CPUVENDOR_HYGON)
338 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &Ctx, X86_XCPT_DB, offTestLabel, cbSpAdjust,
339 X86_DR6_INIT_VAL | X86_DR6_B0, cbIretFrameInt, uHandlerRspInt);
340 else
341 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &Ctx, bIntGate, offTestLabel + cbRingInstr, cbSpAdjust,
342 X86_DR6_INIT_VAL, cbIretFrameInt, uHandlerRspInt);
343
344 /*
345 * Test #3: Same as above, but with the LE and GE flags set.
346 */
347 g_usBs3TestStep++;
348 Bs3RegSetDr0(Bs3SelRealModeCodeToFlat(pfnTestLabel));
349 Bs3RegSetDr7(X86_DR7_L0 | X86_DR7_G0 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE) | X86_DR7_LE | X86_DR7_GE);
350 Bs3RegSetDr6(X86_DR6_INIT_VAL);
351 *BS3_XPTR_GET(uint32_t, StackXptr) = Ctx.ss;
352
353 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
354 if (g_enmCpuVendor == BS3CPUVENDOR_AMD || g_enmCpuVendor == BS3CPUVENDOR_HYGON)
355 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &Ctx, X86_XCPT_DB, offTestLabel, cbSpAdjust,
356 X86_DR6_INIT_VAL | X86_DR6_B0, cbIretFrameInt, uHandlerRspInt);
357 else
358 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &Ctx, bIntGate, offTestLabel + cbRingInstr, cbSpAdjust,
359 X86_DR6_INIT_VAL, cbIretFrameInt, uHandlerRspInt);
360
361 /*
362 * Test #4: Execution breakpoint on pop ss / mov ss. Hits.
363 * Note! In real mode AMD-V updates the stack pointer, or something else is busted. Totally weird!
364 */
365 g_usBs3TestStep++;
366 Bs3RegSetDr0(Bs3SelRealModeCodeToFlat(pfnTestCode));
367 Bs3RegSetDr7(X86_DR7_L0 | X86_DR7_G0 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE));
368 Bs3RegSetDr6(X86_DR6_INIT_VAL);
369 *BS3_XPTR_GET(uint32_t, StackXptr) = Ctx.ss;
370
371 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
372 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &Ctx, X86_XCPT_DB, 0, 0, X86_DR6_INIT_VAL | X86_DR6_B0,
373 cbIretFrameInt,
374 uHandlerRspInt - (BS3_MODE_IS_RM_SYS(bTestMode) ? 2 : 0) );
375
376 /*
377 * Test #5: Same as above, but with the LE and GE flags set.
378 */
379 g_usBs3TestStep++;
380 Bs3RegSetDr0(Bs3SelRealModeCodeToFlat(pfnTestCode));
381 Bs3RegSetDr7(X86_DR7_L0 | X86_DR7_G0 | X86_DR7_RW(0, X86_DR7_RW_EO) | X86_DR7_LEN(0, X86_DR7_LEN_BYTE) | X86_DR7_LE | X86_DR7_GE);
382 Bs3RegSetDr6(X86_DR6_INIT_VAL);
383 *BS3_XPTR_GET(uint32_t, StackXptr) = Ctx.ss;
384
385 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
386 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &Ctx, X86_XCPT_DB, 0, 0, X86_DR6_INIT_VAL | X86_DR6_B0,
387 cbIretFrameInt,
388 uHandlerRspInt - (BS3_MODE_IS_RM_SYS(bTestMode) ? 2 : 0) );
389
390 /*
391 * Test #6: Data breakpoint on SS load. The #DB is delivered after ring transition. Weird!
392 *
393 * Note! Intel loses the B0 status, probably for reasons similar to Pentium Pro errata 3. Similar
394 * erratum is seen with virtually every march since, e.g. skylake SKL009 & SKL111.
395 * Weirdly enougth, they seem to get this right in real mode. Go figure.
396 */
397 g_usBs3TestStep++;
398 *BS3_XPTR_GET(uint32_t, StackXptr) = Ctx.ss;
399 Bs3RegSetDr0(BS3_XPTR_GET_FLAT(uint32_t, StackXptr));
400 Bs3RegSetDr7(X86_DR7_L0 | X86_DR7_G0 | X86_DR7_RW(0, X86_DR7_RW_RW) | X86_DR7_LEN(0, X86_DR7_LEN_WORD));
401 Bs3RegSetDr6(X86_DR6_INIT_VAL);
402
403 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
404 TrapExpect.Ctx.rip = TrapCtx.Ctx.rip; /// @todo fixme
405 Bs3RegSetDr7(0);
406 uDr6Expect = X86_DR6_INIT_VAL | X86_DR6_B0;
407 if (g_enmCpuVendor == BS3CPUVENDOR_INTEL && bTestMode != BS3_MODE_RM)
408 uDr6Expect = X86_DR6_INIT_VAL;
409 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &TrapExpect.Ctx, X86_XCPT_DB, 0, 0, uDr6Expect,
410 cbIretFrameSame, uHandlerRspIntDb);
411
412 /*
413 * Test #7: Same as above, but with the LE and GE flags set.
414 */
415 g_usBs3TestStep++;
416 *BS3_XPTR_GET(uint32_t, StackXptr) = Ctx.ss;
417 Bs3RegSetDr0(BS3_XPTR_GET_FLAT(uint32_t, StackXptr));
418 Bs3RegSetDr7(X86_DR7_L0 | X86_DR7_G0 | X86_DR7_RW(0, X86_DR7_RW_RW) | X86_DR7_LEN(0, X86_DR7_LEN_WORD) | X86_DR7_LE | X86_DR7_GE);
419 Bs3RegSetDr6(X86_DR6_INIT_VAL);
420
421 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
422 TrapExpect.Ctx.rip = TrapCtx.Ctx.rip; /// @todo fixme
423 Bs3RegSetDr7(0);
424 uDr6Expect = X86_DR6_INIT_VAL | X86_DR6_B0;
425 if (g_enmCpuVendor == BS3CPUVENDOR_INTEL && bTestMode != BS3_MODE_RM)
426 uDr6Expect = X86_DR6_INIT_VAL;
427 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &TrapExpect.Ctx, X86_XCPT_DB, 0, 0, uDr6Expect,
428 cbIretFrameSame, uHandlerRspIntDb);
429
430 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
431 {
432 /*
433 * Test #8: Data breakpoint on SS GDT entry. Half weird!
434 * Note! Intel loses the B1 status, see test #6.
435 */
436 g_usBs3TestStep++;
437 *BS3_XPTR_GET(uint32_t, StackXptr) = (Ctx.ss & X86_SEL_RPL) | BS3_SEL_SPARE_00;
438 Bs3GdteSpare00 = Bs3Gdt[Ctx.ss / sizeof(Bs3Gdt[0])];
439
440 Bs3RegSetDr1(Bs3SelPtrToFlat(&Bs3GdteSpare00));
441 Bs3RegSetDr7(X86_DR7_L1 | X86_DR7_G1 | X86_DR7_RW(1, X86_DR7_RW_RW) | X86_DR7_LEN(1, X86_DR7_LEN_DWORD));
442 Bs3RegSetDr6(X86_DR6_INIT_VAL);
443
444 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
445 TrapExpect.Ctx.rip = TrapCtx.Ctx.rip; /// @todo fixme
446 Bs3RegSetDr7(0);
447 uDr6Expect = g_enmCpuVendor == BS3CPUVENDOR_INTEL ? X86_DR6_INIT_VAL : X86_DR6_INIT_VAL | X86_DR6_B1;
448 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &TrapExpect.Ctx, X86_XCPT_DB, 0, 0, uDr6Expect,
449 cbIretFrameSame, uHandlerRspIntDb);
450
451 /*
452 * Test #9: Same as above, but with the LE and GE flags set.
453 */
454 g_usBs3TestStep++;
455 *BS3_XPTR_GET(uint32_t, StackXptr) = (Ctx.ss & X86_SEL_RPL) | BS3_SEL_SPARE_00;
456 Bs3GdteSpare00 = Bs3Gdt[Ctx.ss / sizeof(Bs3Gdt[0])];
457
458 Bs3RegSetDr1(Bs3SelPtrToFlat(&Bs3GdteSpare00));
459 Bs3RegSetDr7(X86_DR7_L1 | X86_DR7_G1 | X86_DR7_RW(1, X86_DR7_RW_RW) | X86_DR7_LEN(1, X86_DR7_LEN_DWORD) | X86_DR7_LE | X86_DR7_GE);
460 Bs3RegSetDr6(X86_DR6_INIT_VAL);
461
462 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
463 TrapExpect.Ctx.rip = TrapCtx.Ctx.rip; /// @todo fixme
464 Bs3RegSetDr7(0);
465 uDr6Expect = g_enmCpuVendor == BS3CPUVENDOR_INTEL ? X86_DR6_INIT_VAL : X86_DR6_INIT_VAL | X86_DR6_B1;
466 bs3CpuWeird1_CompareDbgInhibitRingXfer(&TrapCtx, &TrapExpect.Ctx, X86_XCPT_DB, 0, 0, uDr6Expect,
467 cbIretFrameSame, uHandlerRspIntDb);
468 }
469
470 /*
471 * Cleanup.
472 */
473 Bs3RegSetDr0(0);
474 Bs3RegSetDr1(0);
475 Bs3RegSetDr2(0);
476 Bs3RegSetDr3(0);
477 Bs3RegSetDr6(X86_DR6_INIT_VAL);
478 Bs3RegSetDr7(0);
479 }
480 Bs3TrapSetDpl(bIntGate, bSavedDpl);
481 return 0;
482}
483
484
485BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuWeird1_DbgInhibitRingXfer)(uint8_t bMode)
486{
487 if (BS3_MODE_IS_V86(bMode))
488 switch (bMode)
489 {
490 /** @todo some busted stack stuff with the 16-bit guys. Also, if VME is
491 * enabled, we're probably not able to do any sensible testing. */
492 case BS3_MODE_PP16_V86:
493 case BS3_MODE_PE16_V86:
494 case BS3_MODE_PAE16_V86:
495 return BS3TESTDOMODE_SKIPPED;
496 }
497 //if (bMode != BS3_MODE_PE16_V86) return BS3TESTDOMODE_SKIPPED;
498 //if (bMode != BS3_MODE_PAEV86) return BS3TESTDOMODE_SKIPPED;
499
500 bs3CpuWeird1_SetGlobals(bMode);
501
502 /** @todo test sysenter and syscall too. */
503 /** @todo test INTO. */
504 /** @todo test all V8086 software INT delivery modes (currently only 4 and 1). */
505
506 /* Note! Both ICEBP and BOUND has be checked cursorily and found not to be affected. */
507 if (BS3_MODE_IS_16BIT_CODE(bMode))
508 {
509 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x80, 2, 2, bs3CpuWeird1_InhibitedInt80_c16, bs3CpuWeird1_InhibitedInt80_int80_c16);
510 if (!BS3_MODE_IS_V86(bMode) || !g_fVME)
511 {
512 /** @todo explain why these GURU */
513 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x03, 2, 2, bs3CpuWeird1_InhibitedInt3_c16, bs3CpuWeird1_InhibitedInt3_int3_c16);
514 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x03, 1, 2, bs3CpuWeird1_InhibitedBp_c16, bs3CpuWeird1_InhibitedBp_int3_c16);
515 }
516 }
517 else if (BS3_MODE_IS_32BIT_CODE(bMode))
518 {
519 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x80, 2, 4, bs3CpuWeird1_InhibitedInt80_c32, bs3CpuWeird1_InhibitedInt80_int80_c32);
520 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x03, 2, 4, bs3CpuWeird1_InhibitedInt3_c32, bs3CpuWeird1_InhibitedInt3_int3_c32);
521 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x03, 1, 4, bs3CpuWeird1_InhibitedBp_c32, bs3CpuWeird1_InhibitedBp_int3_c32);
522 }
523 else
524 {
525 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x80, 2, 0, bs3CpuWeird1_InhibitedInt80_c64, bs3CpuWeird1_InhibitedInt80_int80_c64);
526 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x03, 2, 0, bs3CpuWeird1_InhibitedInt3_c64, bs3CpuWeird1_InhibitedInt3_int3_c64);
527 bs3CpuWeird1_DbgInhibitRingXfer_Worker(bMode, 0x03, 1, 0, bs3CpuWeird1_InhibitedBp_c64, bs3CpuWeird1_InhibitedBp_int3_c64);
528 }
529
530 return 0;
531}
532
533
534/*********************************************************************************************************************************
535* IP / EIP Wrapping *
536*********************************************************************************************************************************/
537#define PROTO_ALL(a_Template) \
538 FNBS3FAR a_Template ## _c16, a_Template ## _c16_EndProc, \
539 a_Template ## _c32, a_Template ## _c32_EndProc, \
540 a_Template ## _c64, a_Template ## _c64_EndProc
541PROTO_ALL(bs3CpuWeird1_PcWrapBenign1);
542PROTO_ALL(bs3CpuWeird1_PcWrapBenign2);
543PROTO_ALL(bs3CpuWeird1_PcWrapCpuId);
544PROTO_ALL(bs3CpuWeird1_PcWrapIn80);
545PROTO_ALL(bs3CpuWeird1_PcWrapOut80);
546PROTO_ALL(bs3CpuWeird1_PcWrapSmsw);
547PROTO_ALL(bs3CpuWeird1_PcWrapRdCr0);
548PROTO_ALL(bs3CpuWeird1_PcWrapRdDr0);
549PROTO_ALL(bs3CpuWeird1_PcWrapWrDr0);
550#undef PROTO_ALL
551
552typedef enum { kPcWrapSetup_None, kPcWrapSetup_ZeroRax } PCWRAPSETUP;
553
554/**
555 * Compares pc wraparound result.
556 */
557static uint8_t bs3CpuWeird1_ComparePcWrap(PCBS3TRAPFRAME pTrapCtx, PCBS3TRAPFRAME pTrapExpect)
558{
559 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
560 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, pTrapExpect->bXcpt);
561 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, pTrapExpect->uErrCd);
562 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, &pTrapExpect->Ctx, 0 /*cbPcAdjust*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/,
563 g_pszTestMode, g_usBs3TestStep);
564 if (Bs3TestSubErrorCount() != cErrorsBefore)
565 {
566 Bs3TrapPrintFrame(pTrapCtx);
567 Bs3TestPrintf("CS=%04RX16 SS:ESP=%04RX16:%08RX64 EFL=%RX64 cbIret=%#x\n",
568 pTrapCtx->uHandlerCs, pTrapCtx->uHandlerSs, pTrapCtx->uHandlerRsp,
569 pTrapCtx->fHandlerRfl, pTrapCtx->cbIretFrame);
570#if 0
571 Bs3TestPrintf("Halting in ComparePcWrap: bXcpt=%#x\n", pTrapCtx->bXcpt);
572 ASMHalt();
573#endif
574 return 1;
575 }
576 return 0;
577}
578
579
580static uint8_t bs3CpuWeird1_PcWrapping_Worker16(uint8_t bMode, RTSEL SelCode, uint8_t BS3_FAR *pbHead,
581 uint8_t BS3_FAR *pbTail, uint8_t BS3_FAR *pbAfter,
582 void const BS3_FAR *pvTemplate, size_t cbTemplate, PCWRAPSETUP enmSetup)
583{
584 BS3TRAPFRAME TrapCtx;
585 BS3TRAPFRAME TrapExpect;
586 BS3REGCTX Ctx;
587 uint8_t bXcpt;
588
589 /* make sure they're allocated */
590 Bs3MemZero(&Ctx, sizeof(Ctx));
591 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
592 Bs3MemZero(&TrapExpect, sizeof(TrapExpect));
593
594 /*
595 * Create the expected result by first placing the code template
596 * at the start of the buffer and giving it a quick run.
597 *
598 * I cannot think of any instruction always causing #GP(0) right now, so
599 * we generate a ud2 and modify it instead.
600 */
601 Bs3MemCpy(pbHead, pvTemplate, cbTemplate);
602 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80286)
603 {
604 pbHead[cbTemplate] = 0xcc; /* int3 */
605 bXcpt = X86_XCPT_BP;
606 }
607 else
608 {
609 pbHead[cbTemplate] = 0x0f; /* ud2 */
610 pbHead[cbTemplate + 1] = 0x0b;
611 bXcpt = X86_XCPT_UD;
612 }
613
614 Bs3RegCtxSaveEx(&Ctx, bMode, 1024);
615
616 Ctx.cs = SelCode;
617 Ctx.rip.u = 0;
618 switch (enmSetup)
619 {
620 case kPcWrapSetup_None:
621 break;
622 case kPcWrapSetup_ZeroRax:
623 Ctx.rax.u = 0;
624 break;
625 }
626
627 /* V8086: Set IOPL to 3. */
628 if (BS3_MODE_IS_V86(bMode))
629 Ctx.rflags.u32 |= X86_EFL_IOPL;
630
631 Bs3TrapSetJmpAndRestore(&Ctx, &TrapExpect);
632 if (TrapExpect.bXcpt != bXcpt)
633 {
634
635 Bs3TestFailedF("%u: Setup: bXcpt is %#x, expected %#x!\n", g_usBs3TestStep, TrapExpect.bXcpt, bXcpt);
636 Bs3TrapPrintFrame(&TrapExpect);
637 return 1;
638 }
639
640 /*
641 * Adjust the contexts for the real test.
642 */
643 Ctx.cs = SelCode;
644 Ctx.rip.u = (uint32_t)_64K - cbTemplate;
645
646 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80286)
647 TrapExpect.Ctx.rip.u = 1;
648 else
649 {
650 if (BS3_MODE_IS_16BIT_SYS(bMode))
651 TrapExpect.Ctx.rip.u = 0;
652 else
653 TrapExpect.Ctx.rip.u = UINT32_C(0x10000);
654 TrapExpect.bXcpt = X86_XCPT_GP;
655 TrapExpect.uErrCd = 0;
656 }
657
658 /*
659 * Prepare the buffer for 16-bit wrap around.
660 */
661 Bs3MemSet(pbHead, 0xcc, 64); /* int3 */
662 if (bXcpt == X86_XCPT_UD)
663 {
664 pbHead[0] = 0x0f; /* ud2 */
665 pbHead[1] = 0x0b;
666 }
667 Bs3MemCpy(&pbTail[_4K - cbTemplate], pvTemplate, cbTemplate);
668 Bs3MemSet(pbAfter, 0xf1, 64); /* icebp / int1 */
669
670 /*
671 * Do a test run.
672 */
673 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
674 if (!bs3CpuWeird1_ComparePcWrap(&TrapCtx, &TrapExpect))
675 {
676#if 0 /* needs more work */
677 /*
678 * Slide the instruction template across the boundrary byte-by-byte and
679 * check that it triggers #GP on the initial instruction on 386+.
680 */
681 unsigned cbTail;
682 unsigned cbHead;
683 g_usBs3TestStep++;
684 for (cbTail = cbTemplate - 1, cbHead = 1; cbTail > 0; cbTail--, cbHead++, g_usBs3TestStep++)
685 {
686 pbTail[X86_PAGE_SIZE - cbTail - 1] = 0xcc;
687 Bs3MemCpy(&pbTail[X86_PAGE_SIZE - cbTail], pvTemplate, cbTail);
688 Bs3MemCpy(pbHead, &((uint8_t const *)pvTemplate)[cbTail], cbHead);
689 if (bXcpt == X86_XCPT_BP)
690 pbHead[cbHead] = 0xcc; /* int3 */
691 else
692 {
693 pbHead[cbHead] = 0x0f; /* ud2 */
694 pbHead[cbHead + 1] = 0x0b;
695 }
696
697 Ctx.rip.u = (uint32_t)_64K - cbTail;
698 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) <= BS3CPU_80286)
699 TrapExpect.Ctx.rip.u = cbHead + 1;
700 else
701 {
702 TrapExpect.Ctx.rip.u = Ctx.rip.u;
703 }
704
705 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
706 if (bs3CpuWeird1_ComparePcWrap(&TrapCtx, &TrapExpect))
707 return 1;
708 }
709#endif
710 }
711 return 0;
712}
713
714
715static uint8_t bs3CpuWeird1_PcWrapping_Worker32(uint8_t bMode, RTSEL SelCode, uint8_t BS3_FAR *pbPage1,
716 uint8_t BS3_FAR *pbPage2, uint32_t uFlatPage2,
717 void const BS3_FAR *pvTemplate, size_t cbTemplate, PCWRAPSETUP enmSetup)
718{
719 BS3TRAPFRAME TrapCtx;
720 BS3TRAPFRAME TrapExpect;
721 BS3REGCTX Ctx;
722 unsigned cbPage1;
723 unsigned cbPage2;
724
725 /* make sure they're allocated */
726 Bs3MemZero(&Ctx, sizeof(Ctx));
727 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
728 Bs3MemZero(&TrapExpect, sizeof(TrapExpect));
729
730 //Bs3TestPrintf("SelCode=%#x pbPage1=%p pbPage2=%p uFlatPage2=%RX32 pvTemplate=%p cbTemplate\n",
731 // SelCode, pbPage1, pbPage2, uFlatPage2, pvTemplate, cbTemplate);
732
733 /*
734 * Create the expected result by first placing the code template
735 * at the start of the buffer and giving it a quick run.
736 */
737 Bs3MemSet(pbPage1, 0xcc, _4K);
738 Bs3MemSet(pbPage2, 0xcc, _4K);
739 Bs3MemCpy(&pbPage1[_4K - cbTemplate], pvTemplate, cbTemplate);
740 pbPage2[0] = 0x0f; /* ud2 */
741 pbPage2[1] = 0x0b;
742
743 Bs3RegCtxSaveEx(&Ctx, bMode, 1024);
744
745 Ctx.cs = BS3_SEL_R0_CS32;
746 Ctx.rip.u = uFlatPage2 - cbTemplate;
747 switch (enmSetup)
748 {
749 case kPcWrapSetup_None:
750 break;
751 case kPcWrapSetup_ZeroRax:
752 Ctx.rax.u = 0;
753 break;
754 }
755
756 Bs3TrapSetJmpAndRestore(&Ctx, &TrapExpect);
757 if (TrapExpect.bXcpt != X86_XCPT_UD)
758 {
759
760 Bs3TestFailedF("%u: Setup: bXcpt is %#x, expected %#x!\n", g_usBs3TestStep, TrapExpect.bXcpt, X86_XCPT_UD);
761 Bs3TrapPrintFrame(&TrapExpect);
762 return 1;
763 }
764
765 /*
766 * The real test uses the special CS selector.
767 */
768 Ctx.cs = SelCode;
769 TrapExpect.Ctx.cs = SelCode;
770
771 /*
772 * Unlike 16-bit mode, the instruction may cross the wraparound boundary,
773 * so we test by advancing the template across byte-by-byte.
774 */
775 for (cbPage1 = cbTemplate, cbPage2 = 0; cbPage1 > 0; cbPage1--, cbPage2++, g_usBs3TestStep++)
776 {
777 pbPage1[X86_PAGE_SIZE - cbPage1 - 1] = 0xcc;
778 Bs3MemCpy(&pbPage1[X86_PAGE_SIZE - cbPage1], pvTemplate, cbPage1);
779 Bs3MemCpy(pbPage2, &((uint8_t const *)pvTemplate)[cbPage1], cbPage2);
780 pbPage2[cbPage2] = 0x0f; /* ud2 */
781 pbPage2[cbPage2 + 1] = 0x0b;
782
783 Ctx.rip.u = UINT32_MAX - cbPage1 + 1;
784 TrapExpect.Ctx.rip.u = cbPage2;
785
786 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
787 if (bs3CpuWeird1_ComparePcWrap(&TrapCtx, &TrapExpect))
788 return 1;
789 }
790 return 0;
791}
792
793
794static uint8_t bs3CpuWeird1_PcWrapping_Worker64(uint8_t bMode, uint8_t BS3_FAR *pbBuf, uint32_t uFlatBuf,
795 void const BS3_FAR *pvTemplate, size_t cbTemplate, PCWRAPSETUP enmSetup)
796{
797 uint8_t BS3_FAR * const pbPage1 = pbBuf; /* mapped at 0, 4G and 8G */
798 uint8_t BS3_FAR * const pbPage2 = &pbBuf[X86_PAGE_SIZE]; /* mapped at -4K, 4G-4K and 8G-4K. */
799 BS3TRAPFRAME TrapCtx;
800 BS3TRAPFRAME TrapExpect;
801 BS3REGCTX Ctx;
802 unsigned cbStart;
803 unsigned cbEnd;
804
805 /* make sure they're allocated */
806 Bs3MemZero(&Ctx, sizeof(Ctx));
807 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
808 Bs3MemZero(&TrapExpect, sizeof(TrapExpect));
809
810 /*
811 * Create the expected result by first placing the code template
812 * at the start of the buffer and giving it a quick run.
813 */
814 Bs3MemCpy(pbPage1, pvTemplate, cbTemplate);
815 pbPage1[cbTemplate] = 0x0f; /* ud2 */
816 pbPage1[cbTemplate + 1] = 0x0b;
817
818 Bs3RegCtxSaveEx(&Ctx, bMode, 1024);
819
820 Ctx.rip.u = uFlatBuf;
821 switch (enmSetup)
822 {
823 case kPcWrapSetup_None:
824 break;
825 case kPcWrapSetup_ZeroRax:
826 Ctx.rax.u = 0;
827 break;
828 }
829
830 Bs3TrapSetJmpAndRestore(&Ctx, &TrapExpect);
831 if (TrapExpect.bXcpt != X86_XCPT_UD)
832 {
833
834 Bs3TestFailedF("%u: Setup: bXcpt is %#x, expected %#x!\n", g_usBs3TestStep, TrapExpect.bXcpt, X86_XCPT_UD);
835 Bs3TrapPrintFrame(&TrapExpect);
836 return 1;
837 }
838
839 /*
840 * Unlike 16-bit mode, the instruction may cross the wraparound boundary,
841 * so we test by advancing the template across byte-by-byte.
842 *
843 * Page #1 is mapped at address zero and Page #2 as the last one.
844 */
845 Bs3MemSet(pbBuf, 0xf1, X86_PAGE_SIZE * 2);
846 for (cbStart = cbTemplate, cbEnd = 0; cbStart > 0; cbStart--, cbEnd++)
847 {
848 pbPage2[X86_PAGE_SIZE - cbStart - 1] = 0xf1;
849 Bs3MemCpy(&pbPage2[X86_PAGE_SIZE - cbStart], pvTemplate, cbStart);
850 Bs3MemCpy(pbPage1, &((uint8_t const *)pvTemplate)[cbStart], cbEnd);
851 pbPage1[cbEnd] = 0x0f; /* ud2 */
852 pbPage1[cbEnd + 1] = 0x0b;
853
854 Ctx.rip.u = UINT64_MAX - cbStart + 1;
855 TrapExpect.Ctx.rip.u = cbEnd;
856
857 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
858 if (bs3CpuWeird1_ComparePcWrap(&TrapCtx, &TrapExpect))
859 return 1;
860 g_usBs3TestStep++;
861
862 /* Also check that crossing 4G isn't buggered up in our code by
863 32-bit and 16-bit mode support.*/
864 Ctx.rip.u = _4G - cbStart;
865 TrapExpect.Ctx.rip.u = _4G + cbEnd;
866 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
867 if (bs3CpuWeird1_ComparePcWrap(&TrapCtx, &TrapExpect))
868 return 1;
869 g_usBs3TestStep++;
870
871 Ctx.rip.u = _4G*2 - cbStart;
872 TrapExpect.Ctx.rip.u = _4G*2 + cbEnd;
873 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
874 if (bs3CpuWeird1_ComparePcWrap(&TrapCtx, &TrapExpect))
875 return 1;
876 g_usBs3TestStep += 2;
877 }
878 return 0;
879}
880
881
882
883BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuWeird1_PcWrapping)(uint8_t bMode)
884{
885 uint8_t bRet = 1;
886 size_t i;
887
888 bs3CpuWeird1_SetGlobals(bMode);
889
890 if (BS3_MODE_IS_16BIT_CODE(bMode))
891 {
892 /*
893 * For 16-bit testing, we need a 68 KB buffer.
894 *
895 * This is a little annoying to work with from 16-bit bit, so we use
896 * separate pointers to each interesting bit of it.
897 */
898 /** @todo add api for doing this, so we don't need to include bs3-cmn-memory.h. */
899 uint8_t BS3_FAR *pbBuf = (uint8_t BS3_FAR *)Bs3SlabAllocEx(&g_Bs3Mem4KLow.Core, 17 /*cPages*/, 0 /*fFlags*/);
900 if (pbBuf != NULL)
901 {
902 uint32_t const uFlatBuf = Bs3SelPtrToFlat(pbBuf);
903 uint8_t BS3_FAR *pbTail = Bs3XptrFlatToCurrent(uFlatBuf + 0x0f000);
904 uint8_t BS3_FAR *pbAfter = Bs3XptrFlatToCurrent(uFlatBuf + UINT32_C(0x10000));
905 RTSEL SelCode;
906 uint32_t off;
907 static struct { FPFNBS3FAR pfnStart, pfnEnd; PCWRAPSETUP enmSetup; unsigned fNoV86 : 1; }
908 const s_aTemplates16[] =
909 {
910#define ENTRY16(a_Template, a_enmSetup, a_fNoV86) { a_Template ## _c16, a_Template ## _c16_EndProc, a_enmSetup, a_fNoV86 }
911 ENTRY16(bs3CpuWeird1_PcWrapBenign1, kPcWrapSetup_None, 0),
912 ENTRY16(bs3CpuWeird1_PcWrapBenign2, kPcWrapSetup_None, 0),
913 ENTRY16(bs3CpuWeird1_PcWrapCpuId, kPcWrapSetup_ZeroRax, 0),
914 ENTRY16(bs3CpuWeird1_PcWrapIn80, kPcWrapSetup_None, 0),
915 ENTRY16(bs3CpuWeird1_PcWrapOut80, kPcWrapSetup_None, 0),
916 ENTRY16(bs3CpuWeird1_PcWrapSmsw, kPcWrapSetup_None, 0),
917 ENTRY16(bs3CpuWeird1_PcWrapRdCr0, kPcWrapSetup_None, 1),
918 ENTRY16(bs3CpuWeird1_PcWrapRdDr0, kPcWrapSetup_None, 1),
919 ENTRY16(bs3CpuWeird1_PcWrapWrDr0, kPcWrapSetup_ZeroRax, 1),
920#undef ENTRY16
921 };
922
923 /* Fill the buffer with int1 instructions: */
924 for (off = 0; off < UINT32_C(0x11000); off += _4K)
925 {
926 uint8_t BS3_FAR *pbPage = Bs3XptrFlatToCurrent(uFlatBuf + off);
927 Bs3MemSet(pbPage, 0xf1, _4K);
928 }
929
930 /* Setup the CS for it. */
931 SelCode = (uint16_t)(uFlatBuf >> 4);
932 if (!BS3_MODE_IS_RM_OR_V86(bMode))
933 {
934 Bs3SelSetup16BitCode(&Bs3GdteSpare00, uFlatBuf, 0);
935 SelCode = BS3_SEL_SPARE_00;
936 }
937
938 /* Allow IN and OUT to port 80h from V8086 mode. */
939 if (BS3_MODE_IS_V86(bMode))
940 {
941 Bs3RegSetTr(BS3_SEL_TSS32_IOBP_IRB);
942 ASMBitClear(Bs3SharedIobp, 0x80);
943 }
944
945 for (i = 0; i < RT_ELEMENTS(s_aTemplates16); i++)
946 {
947 if (!s_aTemplates16[i].fNoV86 || !BS3_MODE_IS_V86(bMode))
948 bs3CpuWeird1_PcWrapping_Worker16(bMode, SelCode, pbBuf, pbTail, pbAfter, s_aTemplates16[i].pfnStart,
949 (uintptr_t)s_aTemplates16[i].pfnEnd - (uintptr_t)s_aTemplates16[i].pfnStart,
950 s_aTemplates16[i].enmSetup);
951 g_usBs3TestStep = i * 256;
952 }
953
954 if (BS3_MODE_IS_V86(bMode))
955 ASMBitSet(Bs3SharedIobp, 0x80);
956
957 Bs3SlabFree(&g_Bs3Mem4KLow.Core, uFlatBuf, 17);
958
959 bRet = 0;
960 }
961 else
962 Bs3TestFailed("Failed to allocate 17 pages (68KB)");
963 }
964 else
965 {
966 /*
967 * For 32-bit and 64-bit mode we just need two pages.
968 */
969 size_t const cbBuf = X86_PAGE_SIZE * 2;
970 uint8_t BS3_FAR *pbBuf = (uint8_t BS3_FAR *)Bs3MemAlloc(BS3MEMKIND_TILED, cbBuf);
971 if (pbBuf)
972 {
973 uint32_t const uFlatBuf = Bs3SelPtrToFlat(pbBuf);
974 Bs3MemSet(pbBuf, 0xf1, cbBuf);
975
976 /*
977 * For 32-bit we set up a CS that starts with the 2nd page and
978 * ends with the first.
979 */
980 if (BS3_MODE_IS_32BIT_CODE(bMode))
981 {
982 static struct { FPFNBS3FAR pfnStart, pfnEnd; PCWRAPSETUP enmSetup; } const s_aTemplates32[] =
983 {
984#define ENTRY32(a_Template, a_enmSetup) { a_Template ## _c32, a_Template ## _c32_EndProc, a_enmSetup }
985 ENTRY32(bs3CpuWeird1_PcWrapBenign1, kPcWrapSetup_None),
986 ENTRY32(bs3CpuWeird1_PcWrapBenign2, kPcWrapSetup_None),
987 ENTRY32(bs3CpuWeird1_PcWrapCpuId, kPcWrapSetup_ZeroRax),
988 ENTRY32(bs3CpuWeird1_PcWrapIn80, kPcWrapSetup_None),
989 ENTRY32(bs3CpuWeird1_PcWrapOut80, kPcWrapSetup_None),
990 ENTRY32(bs3CpuWeird1_PcWrapSmsw, kPcWrapSetup_None),
991 ENTRY32(bs3CpuWeird1_PcWrapRdCr0, kPcWrapSetup_None),
992 ENTRY32(bs3CpuWeird1_PcWrapRdDr0, kPcWrapSetup_None),
993 ENTRY32(bs3CpuWeird1_PcWrapWrDr0, kPcWrapSetup_ZeroRax),
994#undef ENTRY32
995 };
996
997 Bs3SelSetup32BitCode(&Bs3GdteSpare00, uFlatBuf + X86_PAGE_SIZE, UINT32_MAX, 0);
998
999 for (i = 0; i < RT_ELEMENTS(s_aTemplates32); i++)
1000 {
1001 //Bs3TestPrintf("pfnStart=%p pfnEnd=%p\n", s_aTemplates32[i].pfnStart, s_aTemplates32[i].pfnEnd);
1002 bs3CpuWeird1_PcWrapping_Worker32(bMode, BS3_SEL_SPARE_00, pbBuf, &pbBuf[X86_PAGE_SIZE],
1003 uFlatBuf + X86_PAGE_SIZE, Bs3SelLnkPtrToCurPtr(s_aTemplates32[i].pfnStart),
1004 (uintptr_t)s_aTemplates32[i].pfnEnd - (uintptr_t)s_aTemplates32[i].pfnStart,
1005 s_aTemplates32[i].enmSetup);
1006 g_usBs3TestStep = i * 256;
1007 }
1008
1009 bRet = 0;
1010 }
1011 /*
1012 * For 64-bit we have to alias the two buffer pages to the first and
1013 * last page in the address space. To test that the 32-bit 4G rollover
1014 * isn't incorrectly applied to LM64, we repeat this mappingfor the 4G
1015 * and 8G boundaries too.
1016 *
1017 * This ASSUMES there is nothing important in page 0 when in LM64.
1018 */
1019 else
1020 {
1021 static const struct { uint64_t uDst; uint16_t off; } s_aMappings[] =
1022 {
1023 { UINT64_MAX - X86_PAGE_SIZE + 1, X86_PAGE_SIZE * 1 },
1024 { UINT64_C(0), X86_PAGE_SIZE * 0 },
1025#if 1 /* technically not required as we just repeat the same 4G address space in long mode: */
1026 { _4G - X86_PAGE_SIZE, X86_PAGE_SIZE * 1 },
1027 { _4G, X86_PAGE_SIZE * 0 },
1028 { _4G*2 - X86_PAGE_SIZE, X86_PAGE_SIZE * 1 },
1029 { _4G*2, X86_PAGE_SIZE * 0 },
1030#endif
1031 };
1032 int rc = VINF_SUCCESS;
1033 unsigned iMap;
1034 BS3_ASSERT(bMode == BS3_MODE_LM64);
1035 for (iMap = 0; iMap < RT_ELEMENTS(s_aMappings) && RT_SUCCESS(rc); iMap++)
1036 {
1037 rc = Bs3PagingAlias(s_aMappings[iMap].uDst, uFlatBuf + s_aMappings[iMap].off, X86_PAGE_SIZE,
1038 X86_PTE_P | X86_PTE_A | X86_PTE_D | X86_PTE_RW);
1039 if (RT_FAILURE(rc))
1040 Bs3TestFailedF("Bs3PagingAlias(%#RX64,...) failed: %d", s_aMappings[iMap].uDst, rc);
1041 }
1042
1043 if (RT_SUCCESS(rc))
1044 {
1045 static struct { FPFNBS3FAR pfnStart, pfnEnd; PCWRAPSETUP enmSetup; } const s_aTemplates64[] =
1046 {
1047#define ENTRY64(a_Template, a_enmSetup) { a_Template ## _c64, a_Template ## _c64_EndProc, a_enmSetup }
1048 ENTRY64(bs3CpuWeird1_PcWrapBenign1, kPcWrapSetup_None),
1049 ENTRY64(bs3CpuWeird1_PcWrapBenign2, kPcWrapSetup_None),
1050 ENTRY64(bs3CpuWeird1_PcWrapCpuId, kPcWrapSetup_ZeroRax),
1051 ENTRY64(bs3CpuWeird1_PcWrapIn80, kPcWrapSetup_None),
1052 ENTRY64(bs3CpuWeird1_PcWrapOut80, kPcWrapSetup_None),
1053 ENTRY64(bs3CpuWeird1_PcWrapSmsw, kPcWrapSetup_None),
1054 ENTRY64(bs3CpuWeird1_PcWrapRdCr0, kPcWrapSetup_None),
1055 ENTRY64(bs3CpuWeird1_PcWrapRdDr0, kPcWrapSetup_None),
1056 ENTRY64(bs3CpuWeird1_PcWrapWrDr0, kPcWrapSetup_ZeroRax),
1057#undef ENTRY64
1058 };
1059
1060 for (i = 0; i < RT_ELEMENTS(s_aTemplates64); i++)
1061 {
1062 bs3CpuWeird1_PcWrapping_Worker64(bMode, pbBuf, uFlatBuf,
1063 Bs3SelLnkPtrToCurPtr(s_aTemplates64[i].pfnStart),
1064 (uintptr_t)s_aTemplates64[i].pfnEnd
1065 - (uintptr_t)s_aTemplates64[i].pfnStart,
1066 s_aTemplates64[i].enmSetup);
1067 g_usBs3TestStep = i * 256;
1068 }
1069
1070 bRet = 0;
1071
1072 Bs3PagingUnalias(UINT64_C(0), X86_PAGE_SIZE);
1073 }
1074
1075 while (iMap-- > 0)
1076 Bs3PagingUnalias(s_aMappings[iMap].uDst, X86_PAGE_SIZE);
1077 }
1078 Bs3MemFree(pbBuf, cbBuf);
1079 }
1080 else
1081 Bs3TestFailed("Failed to allocate 2-3 pages for tests.");
1082 }
1083
1084 return bRet;
1085}
1086
1087
1088/*********************************************************************************************************************************
1089* PUSH / POP *
1090*********************************************************************************************************************************/
1091#define PROTO_ALL(a_Template) \
1092 FNBS3FAR a_Template ## _c16, \
1093 a_Template ## _c32, \
1094 a_Template ## _c64
1095PROTO_ALL(bs3CpuWeird1_Push_xSP_Ud2);
1096PROTO_ALL(bs3CpuWeird1_Push_opsize_xSP_Ud2);
1097PROTO_ALL(bs3CpuWeird1_Push_opsize_xBX_Ud2);
1098PROTO_ALL(bs3CpuWeird1_Pop_xSP_Ud2);
1099PROTO_ALL(bs3CpuWeird1_Pop_opsize_xSP_Ud2);
1100PROTO_ALL(bs3CpuWeird1_Pop_opsize_xBX_Ud2);
1101#undef PROTO_ALL
1102
1103
1104/**
1105 * Compares push/pop result.
1106 */
1107static uint8_t bs3CpuWeird1_ComparePushPop(PCBS3TRAPFRAME pTrapCtx, PCBS3TRAPFRAME pTrapExpect)
1108{
1109 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
1110 CHECK_MEMBER("bXcpt", "%#04x", pTrapCtx->bXcpt, pTrapExpect->bXcpt);
1111 CHECK_MEMBER("bErrCd", "%#06RX64", pTrapCtx->uErrCd, pTrapExpect->uErrCd);
1112 Bs3TestCheckRegCtxEx(&pTrapCtx->Ctx, &pTrapExpect->Ctx, 0 /*cbPcAdjust*/, 0 /*cbSpAdjust*/, 0 /*fExtraEfl*/,
1113 g_pszTestMode, g_usBs3TestStep);
1114 if (Bs3TestSubErrorCount() != cErrorsBefore)
1115 {
1116 Bs3TrapPrintFrame(pTrapCtx);
1117 Bs3TestPrintf("CS=%04RX16 SS:ESP=%04RX16:%08RX64 EFL=%RX64 cbIret=%#x\n",
1118 pTrapCtx->uHandlerCs, pTrapCtx->uHandlerSs, pTrapCtx->uHandlerRsp,
1119 pTrapCtx->fHandlerRfl, pTrapCtx->cbIretFrame);
1120#if 0
1121 Bs3TestPrintf("Halting in ComparePushPop: bXcpt=%#x\n", pTrapCtx->bXcpt);
1122 ASMHalt();
1123#endif
1124 return 1;
1125 }
1126 return 0;
1127}
1128
1129
1130/** Initialize the stack around the CS:RSP with fixed values. */
1131static void bs3CpuWeird1_PushPopInitStack(BS3PTRUNION PtrStack)
1132{
1133 PtrStack.pu16[-8] = UINT16_C(0x1e0f);
1134 PtrStack.pu16[-7] = UINT16_C(0x3c2d);
1135 PtrStack.pu16[-6] = UINT16_C(0x5a4b);
1136 PtrStack.pu16[-5] = UINT16_C(0x7869);
1137 PtrStack.pu16[-4] = UINT16_C(0x9687);
1138 PtrStack.pu16[-3] = UINT16_C(0xb4a5);
1139 PtrStack.pu16[-2] = UINT16_C(0xd2c3);
1140 PtrStack.pu16[-1] = UINT16_C(0xf0e1);
1141 PtrStack.pu16[0] = UINT16_C(0xfdec);
1142 PtrStack.pu16[1] = UINT16_C(0xdbca);
1143 PtrStack.pu16[2] = UINT16_C(0xb9a8);
1144 PtrStack.pu16[3] = UINT16_C(0x9786);
1145 PtrStack.pu16[4] = UINT16_C(0x7564);
1146 PtrStack.pu16[5] = UINT16_C(0x5342);
1147 PtrStack.pu16[6] = UINT16_C(0x3120);
1148}
1149
1150
1151BS3_DECL_FAR(uint8_t) BS3_CMN_FAR_NM(bs3CpuWeird1_PushPop)(uint8_t bTestMode)
1152{
1153 static struct
1154 {
1155 FPFNBS3FAR pfnStart;
1156 uint8_t cBits;
1157 bool fPush; /**< true if push, false if pop. */
1158 int8_t cbAdjSp; /**< The SP adjustment value. */
1159 uint8_t idxReg; /**< The X86_GREG_xXX value of the register in question. */
1160 uint8_t offUd2; /**< The UD2 offset into the code. */
1161 } s_aTests[] =
1162 {
1163 { bs3CpuWeird1_Push_opsize_xBX_Ud2_c16, 16, true, -4, X86_GREG_xBX, 2 },
1164 { bs3CpuWeird1_Pop_opsize_xBX_Ud2_c16, 16, false, +4, X86_GREG_xBX, 2 },
1165 { bs3CpuWeird1_Push_xSP_Ud2_c16, 16, true, -2, X86_GREG_xSP, 1 },
1166 { bs3CpuWeird1_Push_opsize_xSP_Ud2_c16, 16, true, -4, X86_GREG_xSP, 2 },
1167 { bs3CpuWeird1_Pop_xSP_Ud2_c16, 16, false, +2, X86_GREG_xSP, 1 },
1168 { bs3CpuWeird1_Pop_opsize_xSP_Ud2_c16, 16, false, +4, X86_GREG_xSP, 2 },
1169
1170 { bs3CpuWeird1_Push_opsize_xBX_Ud2_c32, 32, true, -2, X86_GREG_xBX, 2 },
1171 { bs3CpuWeird1_Pop_opsize_xBX_Ud2_c32, 32, false, +2, X86_GREG_xBX, 2 },
1172 { bs3CpuWeird1_Push_xSP_Ud2_c32, 32, true, -4, X86_GREG_xSP, 1 },
1173 { bs3CpuWeird1_Push_opsize_xSP_Ud2_c32, 32, true, -2, X86_GREG_xSP, 2 },
1174 { bs3CpuWeird1_Pop_xSP_Ud2_c32, 32, false, +4, X86_GREG_xSP, 1 },
1175 { bs3CpuWeird1_Pop_opsize_xSP_Ud2_c32, 32, false, +2, X86_GREG_xSP, 2 },
1176
1177 { bs3CpuWeird1_Push_opsize_xBX_Ud2_c64, 64, true, -2, X86_GREG_xBX, 2 },
1178 { bs3CpuWeird1_Pop_opsize_xBX_Ud2_c64, 64, false, +2, X86_GREG_xBX, 2 },
1179 { bs3CpuWeird1_Push_xSP_Ud2_c64, 64, true, -8, X86_GREG_xSP, 1 },
1180 { bs3CpuWeird1_Push_opsize_xSP_Ud2_c64, 64, true, -2, X86_GREG_xSP, 2 },
1181 { bs3CpuWeird1_Pop_xSP_Ud2_c64, 64, false, +8, X86_GREG_xSP, 1 },
1182 { bs3CpuWeird1_Pop_opsize_xSP_Ud2_c64, 64, false, +2, X86_GREG_xSP, 2 },
1183 };
1184 BS3TRAPFRAME TrapCtx;
1185 BS3TRAPFRAME TrapExpect;
1186 BS3REGCTX Ctx;
1187 uint8_t const cTestBits = BS3_MODE_IS_16BIT_CODE(bTestMode) ? 16
1188 : BS3_MODE_IS_32BIT_CODE(bTestMode) ? 32 : 64;
1189 uint8_t BS3_FAR *pbAltStack = NULL;
1190 BS3PTRUNION PtrStack;
1191 unsigned i;
1192
1193 /* make sure they're allocated */
1194 Bs3MemZero(&Ctx, sizeof(Ctx));
1195 Bs3MemZero(&TrapCtx, sizeof(TrapCtx));
1196 Bs3MemZero(&TrapExpect, sizeof(TrapExpect));
1197
1198 bs3CpuWeird1_SetGlobals(bTestMode);
1199
1200 /* Construct a basic context. */
1201 Bs3RegCtxSaveEx(&Ctx, bTestMode, 1024);
1202 Ctx.rflags.u32 &= ~X86_EFL_RF;
1203 if (BS3_MODE_IS_64BIT_CODE(bTestMode))
1204 {
1205 Ctx.rbx.au32[1] ^= UINT32_C(0x12305c78);
1206 Ctx.rcx.au32[1] ^= UINT32_C(0x33447799);
1207 Ctx.rax.au32[1] ^= UINT32_C(0x9983658a);
1208 Ctx.r11.au32[1] ^= UINT32_C(0xbbeeffdd);
1209 Ctx.r12.au32[1] ^= UINT32_C(0x87272728);
1210 }
1211
1212 /* ring-3 if possible, since that'll enable automatic stack switching. */
1213 if (!BS3_MODE_IS_RM_OR_V86(bTestMode))
1214 Bs3RegCtxConvertToRingX(&Ctx, 3);
1215
1216 /* Make PtrStack == SS:xSP from Ctx. */
1217 PtrStack.pv = Bs3RegCtxGetRspSsAsCurPtr(&Ctx);
1218
1219#if 1
1220 /* Use our own stack so we can observe the effect of ESP/RSP rolling across
1221 a 64KB boundrary when just popping SP. */
1222 if (!BS3_MODE_IS_16BIT_CODE(bTestMode)) /** @todo extend this to 16-bit code as well (except RM ofc). */
1223 {
1224 uint32_t uFlatNextSeg;
1225 pbAltStack = (uint8_t BS3_FAR *)Bs3SlabAllocEx(&g_Bs3Mem4KUpperTiled.Core, 17 /*cPages*/, 0 /*fFlags*/);
1226 if (!pbAltStack)
1227 {
1228 Bs3TestFailed("Failed to allocate 68K for alternative stack!");
1229 return 1;
1230 }
1231
1232 /* Modify RSP to be one byte under the 64KB boundrary. */
1233 uFlatNextSeg = (Bs3SelPtrToFlat(pbAltStack) + _64K) & ~UINT32_C(0xffff);
1234 Ctx.rsp.u = uFlatNextSeg - 1;
1235 //Bs3TestPrintf("uFlatNextSeg=%RX32 rsp=%RX64 ss=%RX16\n", uFlatNextSeg, Ctx.rsp.u, Ctx.ss);
1236
1237 /* Modify the PtrStack accordingly, using a spare selector for addressing it. */
1238 Bs3SelSetup16BitData(&Bs3GdteSpare00, uFlatNextSeg - _4K);
1239 PtrStack.pv = BS3_FP_MAKE(BS3_SEL_SPARE_00 | 3, _4K - 1);
1240 }
1241#endif
1242
1243 /*
1244 * Iterate the test snippets and run those relevant to the test context.
1245 */
1246 for (i = 0; i < RT_ELEMENTS(s_aTests); i++)
1247 {
1248 if (s_aTests[i].cBits == cTestBits)
1249 {
1250 PBS3REG const pReg = &(&Ctx.rax)[s_aTests[i].idxReg];
1251 unsigned iRep; /**< This is to trigger native recompilation. */
1252 BS3REG SavedReg;
1253 BS3REG SavedRsp;
1254
1255 /* Save context stuff we may change: */
1256 SavedReg.u = pReg->u;
1257 SavedRsp.u = Ctx.rsp.u;
1258
1259 /* Setup the test context. */
1260 Bs3RegCtxSetRipCsFromLnkPtr(&Ctx, s_aTests[i].pfnStart);
1261 if (BS3_MODE_IS_16BIT_SYS(bTestMode))
1262 g_uBs3TrapEipHint = Ctx.rip.u32;
1263
1264 if (BS3_MODE_IS_16BIT_CODE(bTestMode))
1265 Ctx.rsp.u32 |= UINT32_C(0x34560000); /* This part should be ignored, as the stack is also 16-bit. */
1266
1267 /* The basic expected trap context. */
1268 TrapExpect.bXcpt = X86_XCPT_UD;
1269 Bs3MemCpy(&TrapExpect.Ctx, &Ctx, sizeof(TrapExpect.Ctx));
1270 TrapExpect.Ctx.rsp.u += s_aTests[i].cbAdjSp;
1271 TrapExpect.Ctx.rip.u += s_aTests[i].offUd2;
1272 if (!BS3_MODE_IS_16BIT_SYS(bTestMode))
1273 TrapExpect.Ctx.rflags.u32 |= X86_EFL_RF;
1274
1275 g_usBs3TestStep = i;
1276
1277 if (s_aTests[i].cbAdjSp < 0)
1278 {
1279 /*
1280 * PUSH
1281 */
1282 RTUINT64U u64ExpectPushed;
1283 BS3PTRUNION PtrStack2;
1284 PtrStack2.pb = PtrStack.pb + s_aTests[i].cbAdjSp;
1285
1286 bs3CpuWeird1_PushPopInitStack(PtrStack);
1287 u64ExpectPushed.u = *PtrStack2.pu64;
1288 switch (s_aTests[i].cbAdjSp)
1289 {
1290 case -2: u64ExpectPushed.au16[0] = pReg->au16[0]; break;
1291 case -4: u64ExpectPushed.au32[0] = pReg->au32[0]; break;
1292 case -8: u64ExpectPushed.au64[0] = pReg->u; break;
1293 }
1294
1295 for (iRep = 0; iRep < 256; iRep++)
1296 {
1297 bs3CpuWeird1_PushPopInitStack(PtrStack);
1298 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1299 if (bs3CpuWeird1_ComparePushPop(&TrapCtx, &TrapExpect))
1300 break;
1301 if (*PtrStack2.pu64 != u64ExpectPushed.u)
1302 {
1303 Bs3TestFailedF("%u - Unexpected stack value after push: %RX64, expected %RX64",
1304 g_usBs3TestStep, *PtrStack2.pu64, u64ExpectPushed);
1305 break;
1306 }
1307 }
1308 }
1309 else
1310 {
1311 /*
1312 * POP.
1313 *
1314 * This is where it gets interesting. When popping a partial
1315 * SP and the upper part also changes, this is preserved. I.e.
1316 * the CPU first writes the updated RSP then the register or
1317 * register part that it popped.
1318 */
1319 PBS3REG const pExpectReg = &(&TrapExpect.Ctx.rax)[s_aTests[i].idxReg];
1320 RTUINT64U u64PopValue;
1321
1322 bs3CpuWeird1_PushPopInitStack(PtrStack);
1323 u64PopValue.u = *PtrStack.pu64;
1324 if (bTestMode != BS3_MODE_RM)
1325 {
1326 /* When in ring-3 we can put whatever we want on the stack, as the UD2 will cause a stack switch. */
1327 switch (s_aTests[i].cbAdjSp)
1328 {
1329 case 2: u64PopValue.au16[0] = ~pReg->au16[0] ^ UINT16_C(0xf394); break;
1330 case 4: u64PopValue.au32[0] = ~pReg->au32[0] ^ UINT32_C(0x9e501ab3); break;
1331 case 8: u64PopValue.au64[0] = ~pReg->u ^ UINT64_C(0xbf5fedd520fe9a45); break;
1332 }
1333 }
1334 else
1335 {
1336 /* In real mode we have to be a little more careful. */
1337 if (s_aTests[i].cbAdjSp == 2)
1338 u64PopValue.au16[0] = pReg->au16[0] - 382;
1339 else
1340 {
1341 u64PopValue.au16[0] = pReg->au16[0] - 258;
1342 u64PopValue.au16[1] = ~pReg->au16[1];
1343 }
1344 }
1345
1346 switch (s_aTests[i].cbAdjSp)
1347 {
1348 case 2:
1349 pExpectReg->au16[0] = u64PopValue.au16[0];
1350 break;
1351 case 4:
1352 pExpectReg->au32[0] = u64PopValue.au32[0];
1353 pExpectReg->au32[1] = 0;
1354 break;
1355 case 8:
1356 pExpectReg->u = u64PopValue.u;
1357 break;
1358 }
1359 //Bs3TestPrintf("iTest=%u/%d: %RX64 -> %RX64\n", i, s_aTests[i].cbAdjSp, pReg->u, pExpectReg->u);
1360
1361 for (iRep = 0; iRep < 256; iRep++)
1362 {
1363 bs3CpuWeird1_PushPopInitStack(PtrStack);
1364 *PtrStack.pu64 = u64PopValue.u;
1365 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx);
1366 if (bs3CpuWeird1_ComparePushPop(&TrapCtx, &TrapExpect))
1367 break;
1368 }
1369 }
1370
1371 /* Restore context (except cs:rsp): */
1372 pReg->u = SavedReg.u;
1373 Ctx.rsp.u = SavedRsp.u;
1374 }
1375 }
1376
1377 if (pbAltStack)
1378 Bs3SlabFree(&g_Bs3Mem4KUpperTiled.Core, Bs3SelPtrToFlat(pbAltStack), 17);
1379
1380 return 0;
1381}
1382
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette