VirtualBox

source: vbox/trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-pf.c32@ 93115

Last change on this file since 93115 was 93115, checked in by vboxsync, 3 years ago

scm --update-copyright-year

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 85.1 KB
Line 
1/* $Id: bs3-cpu-basic-2-pf.c32 93115 2022-01-01 11:31:46Z vboxsync $ */
2/** @file
3 * BS3Kit - bs3-cpu-basic-2, 32-bit C code for testing \#PF.
4 */
5
6/*
7 * Copyright (C) 2007-2022 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 */
26
27
28/*********************************************************************************************************************************
29* Header Files *
30*********************************************************************************************************************************/
31#include <bs3kit.h>
32#include <iprt/asm-amd64-x86.h>
33
34
35/*********************************************************************************************************************************
36* Defined Constants And Macros *
37*********************************************************************************************************************************/
38#define CHECK_MEMBER(a_pszMode, a_szName, a_szFmt, a_Actual, a_Expected) \
39 do { \
40 if ((a_Actual) == (a_Expected)) { /* likely */ } \
41 else Bs3TestFailedF("%u - %s: " a_szName "=" a_szFmt " expected " a_szFmt, \
42 g_usBs3TestStep, (a_pszMode), (a_Actual), (a_Expected)); \
43 } while (0)
44
45#define BS3CPUBASIC2PF_HALT(pThis) \
46 do { \
47 Bs3TestPrintf("Halting: pteworker=%s store=%s accessor=%s\n", \
48 pThis->pszPteWorker, pThis->pszStore, pThis->pszAccessor); \
49 ASMHalt(); \
50 } while (0)
51
52
53/** @def BS3CPUBASIC2PF_FASTER
54 * This is useful for IEM execution. */
55#define BS3CPUBASIC2PF_FASTER
56
57
58/*********************************************************************************************************************************
59* Structures and Typedefs *
60*********************************************************************************************************************************/
61typedef void BS3_CALL FNBS3CPUBASIC2PFSNIPPET(void);
62
63typedef struct FNBS3CPUBASIC2PFTSTCODE
64{
65 FNBS3CPUBASIC2PFSNIPPET *pfn;
66 uint8_t offUd2;
67
68} FNBS3CPUBASIC2PFTSTCODE;
69typedef FNBS3CPUBASIC2PFTSTCODE const *PCFNBS3CPUBASIC2PFTSTCODE;
70
71typedef struct BS3CPUBASIC2PFTTSTCMNMODE
72{
73 uint8_t bMode;
74 FNBS3CPUBASIC2PFTSTCODE MovLoad;
75 FNBS3CPUBASIC2PFTSTCODE MovStore;
76 FNBS3CPUBASIC2PFTSTCODE Xchg;
77 FNBS3CPUBASIC2PFTSTCODE CmpXchg;
78 FNBS3CPUBASIC2PFTSTCODE DivMem;
79} BS3CPUBASIC2PFTTSTCMNMODE;
80typedef BS3CPUBASIC2PFTTSTCMNMODE const *PCBS3CPUBASIC2PFTTSTCMNMODE;
81
82
83typedef struct BS3CPUBASIC2PFSTATE
84{
85 /** The mode we're currently testing. */
86 uint8_t bMode;
87 /** The size of a natural access. */
88 uint8_t cbAccess;
89 /** The common mode functions. */
90 PCBS3CPUBASIC2PFTTSTCMNMODE pCmnMode;
91 /** Address of the test area (alias). */
92 union
93 {
94 uint64_t u;
95 uint32_t u32;
96 uint16_t u16;
97 } uTestAddr;
98 /** Pointer to the orignal test area mapping. */
99 uint8_t *pbOrgTest;
100 /** The size of the test area (at least two pages). */
101 uint32_t cbTest;
102 /** cbTest expressed as a page count. */
103 uint16_t cTestPages;
104 /** The number of PTEs in the first PTE, i.e. what we can
105 * safely access via PgInfo.u.Pae.pPte/PgInfo.u.Legacy.pPte. */
106 uint16_t cTest1stPtes;
107 /** The number of PDEs for cTestPages. */
108 uint16_t cTestPdes;
109 /** 16-bit data selector for uTestAddr.u32. */
110 uint16_t uSel16TestData;
111 /** 16-bit code selector for uTestAddr.u32. */
112 uint16_t uSel16TestCode;
113 /** The size of the PDE backup. */
114 uint16_t cbPdeBackup;
115 /** The size of the PTE backup. */
116 uint16_t cbPteBackup;
117 /** Test paging information for uTestAddr.u. */
118 BS3PAGINGINFO4ADDR PgInfo;
119
120 /** Set if we can use the INVLPG instruction. */
121 bool fUseInvlPg;
122 /** Physical addressing width. */
123 uint8_t cBitsPhysWidth;
124
125 /** Reflects CR0.WP. */
126 bool fWp;
127 /** Reflects EFER.NXE & CR4.PAE. */
128 bool fNxe;
129
130 const char *pszAccessor;
131 const char *pszPteWorker;
132 const char *pszStore;
133
134 /** Trap context frame. */
135 BS3TRAPFRAME TrapCtx;
136 /** Expected result context. */
137 BS3REGCTX ExpectCtx;
138
139 /** The PML4E backup. */
140 uint64_t u64Pml4eBackup;
141 /** The PDPTE backup. */
142 uint64_t u64PdpteBackup;
143 /** The PDE backup. */
144 uint64_t au64PdeBackup[16];
145 /** The PTE backup. */
146 union
147 {
148 uint32_t Legacy[X86_PG_ENTRIES];
149 uint64_t Pae[X86_PG_PAE_ENTRIES];
150 } PteBackup;
151
152} BS3CPUBASIC2PFSTATE;
153/** Pointer to state for the \#PF test. */
154typedef BS3CPUBASIC2PFSTATE *PBS3CPUBASIC2PFSTATE;
155
156
157/**
158 * Paging modification worker.
159 */
160typedef struct BS3CPUBASIC2PFMODPT
161{
162 const char *pszName;
163 uint32_t fPresent : 1;
164 uint32_t fUser : 1;
165 uint32_t fWriteable : 1;
166 uint32_t fNoExecute : 1;
167 uint32_t fReserved : 1;
168 uint32_t uModifyArg : 24;
169 void (*pfnModify)(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, struct BS3CPUBASIC2PFMODPT const *pEntry,
170 uint32_t fClearMask, uint32_t fSetMask);
171 bool (*pfnApplicable)(PBS3CPUBASIC2PFSTATE pThis, struct BS3CPUBASIC2PFMODPT const *pEntry);
172} BS3CPUBASIC2PFMODPT;
173typedef BS3CPUBASIC2PFMODPT const *PCBS3CPUBASIC2PFMODPT;
174
175/** Page level protection. Alternative is page directory or higher level. */
176#define BS3CB2PFACC_F_PAGE_LEVEL RT_BIT(0)
177/** Directly access the boobytrapped page, no edging on or off it. */
178#define BS3CB2PFACC_F_DIRECT RT_BIT(1)
179
180/**
181 * Memory accessor.
182 */
183typedef struct BS3CPUBASIC2PFACCESSOR
184{
185 /** Accessor name. */
186 const char *pszName;
187 /** The accessor. */
188 void (*pfnAccessor)(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags, uint8_t bXcpt, uint8_t uPfErrCd);
189 /** The X86_TRAP_PF_XXX access flags this access sets. */
190 uint32_t fAccess;
191 /** The exception when things are fine. */
192 uint8_t bOkayXcpt;
193} BS3CPUBASIC2PFACCESSOR;
194typedef const BS3CPUBASIC2PFACCESSOR *PCBS3CPUBASIC2PFACCESSOR;
195
196
197/*********************************************************************************************************************************
198* Internal Functions *
199*********************************************************************************************************************************/
200FNBS3TESTDOMODE bs3CpuBasic2_RaiseXcpt0e_c32;
201
202/* bs3-cpu-basic-2-asm.asm: */
203void BS3_CALL bs3CpuBasic2_Store_mov_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
204void BS3_CALL bs3CpuBasic2_Store_xchg_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
205void BS3_CALL bs3CpuBasic2_Store_cmpxchg_c32(void *pvDst, uint32_t uValue, uint32_t uOld);
206
207
208/* bs3-cpu-basic-2-template.mac: */
209FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c16;
210FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c16;
211FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16;
212FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16;
213FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c16;
214
215FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c32;
216FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c32;
217FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32;
218FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32;
219FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c32;
220
221FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ax_ds_bx__ud2_c64;
222FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_mov_ds_bx_ax__ud2_c64;
223FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64;
224FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64;
225FNBS3CPUBASIC2PFSNIPPET bs3CpuBasic2_div_ds_bx__ud2_c64;
226
227
228/*********************************************************************************************************************************
229* Global Variables *
230*********************************************************************************************************************************/
231/** Page table access functions. */
232static const struct
233{
234 const char *pszName;
235 void (BS3_CALL *pfnStore)(void *pvDst, uint32_t uValue, uint32_t uOld);
236} g_aStoreMethods[] =
237{
238 { "mov", bs3CpuBasic2_Store_mov_c32 },
239 { "xchg", bs3CpuBasic2_Store_xchg_c32 },
240 { "cmpxchg", bs3CpuBasic2_Store_cmpxchg_c32 },
241};
242
243
244static const BS3CPUBASIC2PFTTSTCMNMODE g_aCmnModes[] =
245{
246 {
247 BS3_MODE_CODE_16,
248 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, 2 },
249 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, 2 },
250 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, 2 },
251 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, 3 },
252 { bs3CpuBasic2_div_ds_bx__ud2_c16, 2 },
253 },
254 {
255 BS3_MODE_CODE_32,
256 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c32, 2 },
257 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c32, 2 },
258 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c32, 2 },
259 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c32, 3 },
260 { bs3CpuBasic2_div_ds_bx__ud2_c32, 2 },
261 },
262 {
263 BS3_MODE_CODE_64,
264 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c64, 2 + 1 },
265 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c64, 2 + 1 },
266 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c64, 2 + 1 },
267 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c64, 3 + 1 },
268 { bs3CpuBasic2_div_ds_bx__ud2_c64, 2 + 1 },
269 },
270 {
271 BS3_MODE_CODE_V86,
272 { bs3CpuBasic2_mov_ax_ds_bx__ud2_c16, 2 },
273 { bs3CpuBasic2_mov_ds_bx_ax__ud2_c16, 2 },
274 { bs3CpuBasic2_xchg_ds_bx_ax__ud2_c16, 2 },
275 { bs3CpuBasic2_cmpxchg_ds_bx_cx__ud2_c16, 3 },
276 { bs3CpuBasic2_div_ds_bx__ud2_c16, 2 },
277 },
278};
279
280
281/**
282 * Compares a CPU trap.
283 */
284static void bs3CpuBasic2Pf_CompareCtx(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pExpectCtx, int cbPcAdjust,
285 uint8_t bXcpt, unsigned uErrCd)
286{
287 const char *pszHint = "xxxx";
288 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
289 uint32_t fExtraEfl;
290
291 CHECK_MEMBER(pszHint, "bXcpt", "%#04x", pThis->TrapCtx.bXcpt, bXcpt);
292 CHECK_MEMBER(pszHint, "uErrCd", "%#06RX16", (uint16_t)pThis->TrapCtx.uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
293
294 fExtraEfl = X86_EFL_RF;
295 if (BS3_MODE_IS_16BIT_SYS(g_bBs3CurrentMode))
296 fExtraEfl = 0;
297 else
298 fExtraEfl = X86_EFL_RF;
299 Bs3TestCheckRegCtxEx(&pThis->TrapCtx.Ctx, pExpectCtx, cbPcAdjust, 0 /*cbSpAdjust*/, fExtraEfl, pszHint, g_usBs3TestStep);
300 if (Bs3TestSubErrorCount() != cErrorsBefore)
301 {
302 Bs3TrapPrintFrame(&pThis->TrapCtx);
303#if 1
304 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
305 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
306 BS3CPUBASIC2PF_HALT(pThis);
307#endif
308 }
309}
310
311
312/**
313 * Compares a CPU trap.
314 */
315static void bs3CpuBasic2Pf_CompareSimpleCtx(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pStartCtx, int offAddPC,
316 uint8_t bXcpt, unsigned uErrCd, uint64_t uCr2)
317{
318 const char *pszHint = "xxxx";
319 uint16_t const cErrorsBefore = Bs3TestSubErrorCount();
320 uint64_t const uSavedCr2 = pStartCtx->cr2.u;
321 uint32_t fExtraEfl;
322
323 CHECK_MEMBER(pszHint, "bXcpt", "%#04x", pThis->TrapCtx.bXcpt, bXcpt);
324 CHECK_MEMBER(pszHint, "uErrCd", "%#06RX16", (uint16_t)pThis->TrapCtx.uErrCd, (uint16_t)uErrCd); /* 486 only writes a word */
325
326 fExtraEfl = X86_EFL_RF;
327 if (BS3_MODE_IS_16BIT_SYS(g_bBs3CurrentMode))
328 fExtraEfl = 0;
329 else
330 fExtraEfl = X86_EFL_RF;
331 pStartCtx->cr2.u = uCr2;
332 Bs3TestCheckRegCtxEx(&pThis->TrapCtx.Ctx, pStartCtx, offAddPC, 0 /*cbSpAdjust*/, fExtraEfl, pszHint, g_usBs3TestStep);
333 pStartCtx->cr2.u = uSavedCr2;
334 if (Bs3TestSubErrorCount() != cErrorsBefore)
335 {
336 Bs3TrapPrintFrame(&pThis->TrapCtx);
337#if 1
338 Bs3TestPrintf("Halting: g_uBs3CpuDetected=%#x\n", g_uBs3CpuDetected);
339 Bs3TestPrintf("Halting: bXcpt=%#x uErrCd=%#x\n", bXcpt, uErrCd);
340 BS3CPUBASIC2PF_HALT(pThis);
341#endif
342 }
343}
344
345
346/**
347 * Checks the trap context for a simple \#PF trap.
348 */
349static void bs3CpuBasic2Pf_CompareSimplePf(PBS3CPUBASIC2PFSTATE pThis, PCBS3REGCTX pStartCtx, int offAddPC,
350 unsigned uErrCd, uint64_t uCr2)
351{
352 bs3CpuBasic2Pf_CompareSimpleCtx(pThis, (PBS3REGCTX)pStartCtx, offAddPC, X86_XCPT_PF, uErrCd, uCr2);
353}
354
355/**
356 * Checks the trap context for a simple \#UD trap.
357 */
358static void bs3CpuBasic2Pf_CompareSimpleUd(PBS3CPUBASIC2PFSTATE pThis, PCBS3REGCTX pStartCtx, int offAddPC)
359{
360 bs3CpuBasic2Pf_CompareSimpleCtx(pThis, (PBS3REGCTX)pStartCtx, offAddPC, X86_XCPT_UD, 0, pStartCtx->cr2.u);
361}
362
363
364/**
365 * Restores all the paging entries from backup and flushes everything.
366 */
367static void bs3CpuBasic2Pf_FlushAll(void)
368{
369 if ((g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
370 {
371 uint32_t uCr4 = ASMGetCR4();
372 if (uCr4 & (X86_CR4_PGE | X86_CR4_PCIDE))
373 {
374 ASMSetCR4(uCr4 & ~(X86_CR4_PGE | X86_CR4_PCIDE));
375 ASMSetCR4(uCr4);
376 return;
377 }
378 }
379
380 ASMReloadCR3();
381}
382
383
384/**
385 * Restores all the paging entries from backup and flushes everything.
386 *
387 * @param pThis Test state data.
388 */
389static void bs3CpuBasic2Pf_RestoreFromBackups(PBS3CPUBASIC2PFSTATE pThis)
390{
391 Bs3MemCpy(pThis->PgInfo.u.Legacy.pPte, &pThis->PteBackup, pThis->cbPteBackup);
392 Bs3MemCpy(pThis->PgInfo.u.Legacy.pPde, pThis->au64PdeBackup, pThis->cbPdeBackup);
393 if (pThis->PgInfo.cEntries > 2)
394 pThis->PgInfo.u.Pae.pPdpe->u = pThis->u64PdpteBackup;
395 if (pThis->PgInfo.cEntries > 3)
396 pThis->PgInfo.u.Pae.pPml4e->u = pThis->u64Pml4eBackup;
397 bs3CpuBasic2Pf_FlushAll();
398}
399
400
401/** @name BS3CPUBASIC2PFACCESSOR::pfnAccessor Implementations
402 * @{ */
403
404static void bs3CpuBasic2Pf_DoExec(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags, uint8_t bXcpt, uint8_t uPfErrCd)
405{
406 uint8_t *pbOrgTest = pThis->pbOrgTest;
407 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE + 1 : X86_PAGE_SIZE + 2;
408 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? offEnd - 1 : X86_PAGE_SIZE - 5;
409
410 for (; off < offEnd; off++)
411 {
412 /* Emit a little bit of code (using the original allocation mapping) and point pCtx to it. */
413 pbOrgTest[off + 0] = X86_OP_PRF_SIZE_ADDR;
414 pbOrgTest[off + 1] = X86_OP_PRF_SIZE_OP;
415 pbOrgTest[off + 2] = 0x90; /* NOP */
416 pbOrgTest[off + 3] = 0x0f; /* UD2 */
417 pbOrgTest[off + 4] = 0x0b;
418 pbOrgTest[off + 5] = 0xeb; /* JMP $-4 */
419 pbOrgTest[off + 6] = 0xfc;
420 switch (pThis->bMode & BS3_MODE_CODE_MASK)
421 {
422 default:
423 pCtx->rip.u = pThis->uTestAddr.u + off;
424 break;
425 case BS3_MODE_CODE_16:
426 Bs3SelSetup16BitCode(&Bs3GdteSpare01, pThis->uTestAddr.u32, pCtx->bCpl);
427 pCtx->rip.u = off;
428 pCtx->cs = BS3_SEL_SPARE_01 | pCtx->bCpl;
429 break;
430 case BS3_MODE_CODE_V86:
431 /** @todo fix me. */
432 return;
433 }
434 //Bs3TestPrintf("cs:rip=%04x:%010RX64 iRing=%d\n", pCtx->cs, pCtx->rip.u, pCtx->bCpl);
435
436 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
437 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
438 if ( bXcpt != X86_XCPT_PF
439 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off < X86_PAGE_SIZE - 4))
440 bs3CpuBasic2Pf_CompareSimpleUd(pThis, pCtx, 3);
441 else if (!(fFlags & BS3CB2PFACC_F_PAGE_LEVEL) || off >= X86_PAGE_SIZE)
442 bs3CpuBasic2Pf_CompareSimplePf(pThis, pCtx, 0, uPfErrCd, pThis->uTestAddr.u + off);
443 else
444 bs3CpuBasic2Pf_CompareSimplePf(pThis, pCtx,
445 off + 3 == X86_PAGE_SIZE || off + 4 == X86_PAGE_SIZE
446 ? RT_MIN(X86_PAGE_SIZE, off + 3) - off : 0,
447 uPfErrCd, pThis->uTestAddr.u + RT_MIN(X86_PAGE_SIZE, off + 4));
448 }
449}
450
451
452static void bs3CpuBasic2Pf_SetCsEip(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, PCFNBS3CPUBASIC2PFTSTCODE pCode)
453{
454 switch (pThis->bMode & BS3_MODE_CODE_MASK)
455 {
456 default:
457 pCtx->rip.u = (uintptr_t)pCode->pfn;
458 break;
459
460 case BS3_MODE_CODE_16:
461 {
462 uint32_t uFar16 = Bs3SelFlatCodeToProtFar16((uintptr_t)pCode->pfn);
463 pCtx->rip.u = (uint16_t)uFar16;
464 pCtx->cs = (uint16_t)(uFar16 >> 16) | pCtx->bCpl;
465 pCtx->cs += (uint16_t)pCtx->bCpl << BS3_SEL_RING_SHIFT;
466 break;
467 }
468
469 case BS3_MODE_CODE_V86:
470 {
471 uint32_t uFar16 = Bs3SelFlatCodeToRealMode((uintptr_t)pCode->pfn);
472 pCtx->rip.u = (uint16_t)uFar16;
473 pCtx->cs = (uint16_t)(uFar16 >> 16);
474 break;
475 }
476 }
477}
478
479
480/**
481 * Test a simple load instruction around the edges of page two.
482 *
483 * @param pThis The test stat data.
484 * @param pCtx The test context.
485 * @param fFlags BS3CB2PFACC_F_XXX.
486 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
487 * X86_XCPT_UD.
488 * @param uPfErrCd The error code for \#PFs.
489 */
490static void bs3CpuBasic2Pf_DoMovLoad(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags, uint8_t bXcpt, uint8_t uPfErrCd)
491{
492 static uint64_t const s_uValue = UINT64_C(0x7c4d0114428d);
493 uint64_t uExpectRax;
494 unsigned i;
495
496 /*
497 * Adjust the incoming context and calculate our expections.
498 */
499 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->MovLoad);
500 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
501 switch (pThis->bMode & BS3_MODE_CODE_MASK)
502 {
503 case BS3_MODE_CODE_16:
504 case BS3_MODE_CODE_V86:
505 uExpectRax = (uint16_t)s_uValue | (pCtx->rax.u & UINT64_C(0xffffffffffff0000));
506 break;
507 case BS3_MODE_CODE_32:
508 uExpectRax = (uint32_t)s_uValue | (pCtx->rax.u & UINT64_C(0xffffffff00000000));
509 break;
510 case BS3_MODE_CODE_64:
511 uExpectRax = s_uValue;
512 break;
513 }
514 if (uExpectRax == pCtx->rax.u)
515 pCtx->rax.u = ~pCtx->rax.u;
516
517 /*
518 * Make two approaches to the test page (the 2nd one):
519 * - i=0: Start on the 1st page and edge into the 2nd.
520 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
521 */
522 for (i = 0; i < 2; i++)
523 {
524 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
525 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
526
527 for (; off < offEnd; off++)
528 {
529 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValue;
530 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
531 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
532 else
533 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
534
535 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
536 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
537
538 if ( bXcpt != X86_XCPT_PF
539 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
540 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
541 {
542 pThis->ExpectCtx.rax.u = uExpectRax;
543 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->MovLoad.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
544 pThis->ExpectCtx.rax = pCtx->rax;
545 }
546 else
547 {
548 if (off < X86_PAGE_SIZE)
549 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
550 else
551 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
552 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
553 pThis->ExpectCtx.cr2 = pCtx->cr2;
554 }
555 }
556
557 if (fFlags & BS3CB2PFACC_F_DIRECT)
558 break;
559 }
560}
561
562
563/**
564 * Test a simple store instruction around the edges of page two.
565 *
566 * @param pThis The test stat data.
567 * @param pCtx The test context.
568 * @param fFlags BS3CB2PFACC_F_XXX.
569 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
570 * X86_XCPT_UD.
571 * @param uPfErrCd The error code for \#PFs.
572 */
573static void bs3CpuBasic2Pf_DoMovStore(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
574 uint8_t bXcpt, uint8_t uPfErrCd)
575{
576 static uint64_t const s_uValue = UINT64_C(0x3af45ead86a34a26);
577 static uint64_t const s_uValueFlipped = UINT64_C(0xc50ba152795cb5d9);
578 uint64_t const uRaxSaved = pCtx->rax.u;
579 uint64_t uExpectStored;
580 unsigned i;
581
582 /*
583 * Adjust the incoming context and calculate our expections.
584 */
585 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->MovStore);
586 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
587 pCtx->rax.u = (uint32_t)s_uValue; /* leave the upper part zero */
588 else
589 pCtx->rax.u = s_uValue;
590
591 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
592 switch (pThis->bMode & BS3_MODE_CODE_MASK)
593 {
594 case BS3_MODE_CODE_16:
595 case BS3_MODE_CODE_V86:
596 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
597 break;
598 case BS3_MODE_CODE_32:
599 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
600 break;
601 case BS3_MODE_CODE_64:
602 uExpectStored = s_uValue;
603 break;
604 }
605
606 /*
607 * Make two approaches to the test page (the 2nd one):
608 * - i=0: Start on the 1st page and edge into the 2nd.
609 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
610 */
611 for (i = 0; i < 2; i++)
612 {
613 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
614 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
615 for (; off < offEnd; off++)
616 {
617 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
618 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
619 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
620 else
621 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
622
623 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
624 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
625
626 if ( bXcpt != X86_XCPT_PF
627 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
628 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
629 {
630 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->MovStore.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
631 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
632 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
633 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
634 }
635 else
636 {
637 if (off < X86_PAGE_SIZE)
638 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
639 else
640 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
641 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
642 pThis->ExpectCtx.cr2 = pCtx->cr2;
643 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
644 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
645 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
646
647 }
648 }
649
650 if (fFlags & BS3CB2PFACC_F_DIRECT)
651 break;
652 }
653
654 pCtx->rax.u = uRaxSaved;
655}
656
657
658/**
659 * Test a xchg instruction around the edges of page two.
660 *
661 * @param pThis The test stat data.
662 * @param pCtx The test context.
663 * @param fFlags BS3CB2PFACC_F_XXX.
664 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
665 * X86_XCPT_UD.
666 * @param uPfErrCd The error code for \#PFs.
667 */
668static void bs3CpuBasic2Pf_DoXchg(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags, uint8_t bXcpt, uint8_t uPfErrCd)
669{
670 static uint64_t const s_uValue = UINT64_C(0xea58699648e2f32c);
671 static uint64_t const s_uValueFlipped = UINT64_C(0x15a79669b71d0cd3);
672 uint64_t const uRaxSaved = pCtx->rax.u;
673 uint64_t uRaxIn;
674 uint64_t uExpectedRax;
675 uint64_t uExpectStored;
676 unsigned i;
677
678 /*
679 * Adjust the incoming context and calculate our expections.
680 */
681 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->Xchg);
682 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
683 uRaxIn = (uint32_t)s_uValue; /* leave the upper part zero */
684 else
685 uRaxIn = s_uValue;
686
687 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
688 switch (pThis->bMode & BS3_MODE_CODE_MASK)
689 {
690 case BS3_MODE_CODE_16:
691 case BS3_MODE_CODE_V86:
692 uExpectedRax = (uint16_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffffffff0000));
693 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
694 break;
695 case BS3_MODE_CODE_32:
696 uExpectedRax = (uint32_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffff00000000));
697 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
698 break;
699 case BS3_MODE_CODE_64:
700 uExpectedRax = s_uValueFlipped;
701 uExpectStored = s_uValue;
702 break;
703 }
704
705 /*
706 * Make two approaches to the test page (the 2nd one):
707 * - i=0: Start on the 1st page and edge into the 2nd.
708 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
709 */
710 for (i = 0; i < 2; i++)
711 {
712 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
713 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
714 for (; off < offEnd; off++)
715 {
716 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
717 pCtx->rax.u = uRaxIn;
718 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
719 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
720 else
721 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
722
723 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
724 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
725
726 if ( bXcpt != X86_XCPT_PF
727 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
728 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
729 {
730 pThis->ExpectCtx.rax.u = uExpectedRax;
731 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->Xchg.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
732 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
733 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
734 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
735 }
736 else
737 {
738 pThis->ExpectCtx.rax.u = uRaxIn;
739 if (off < X86_PAGE_SIZE)
740 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
741 else
742 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
743 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
744 pThis->ExpectCtx.cr2 = pCtx->cr2;
745 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
746 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
747 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
748 }
749 }
750
751 if (fFlags & BS3CB2PFACC_F_DIRECT)
752 break;
753 }
754
755 pCtx->rax.u = uRaxSaved;
756}
757
758
759/**
760 * Test a cmpxchg instruction around the edges of page two.
761 *
762 * @param pThis The test stat data.
763 * @param pCtx The test context.
764 * @param fFlags BS3CB2PFACC_F_XXX.
765 * @param bXcpt X86_XCPT_PF if this can cause \#PFs, otherwise
766 * X86_XCPT_UD.
767 * @param uPfErrCd The error code for \#PFs.
768 * @param fMissmatch Whether to fail and not store (@c true), or succeed
769 * and do the store.
770 */
771static void bs3CpuBasic2Pf_DoCmpXchg(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
772 uint8_t bXcpt, uint8_t uPfErrCd, bool fMissmatch)
773{
774 static uint64_t const s_uValue = UINT64_C(0xea58699648e2f32c);
775 static uint64_t const s_uValueFlipped = UINT64_C(0x15a79669b71d0cd3);
776 static uint64_t const s_uValueOther = UINT64_C(0x2171239bcb044c81);
777 uint64_t const uRaxSaved = pCtx->rax.u;
778 uint64_t const uRcxSaved = pCtx->rcx.u;
779 uint64_t uRaxIn;
780 uint64_t uExpectedRax;
781 uint32_t uExpectedFlags;
782 uint64_t uExpectStored;
783 unsigned i;
784
785 /*
786 * Adjust the incoming context and calculate our expections.
787 * Hint: CMPXCHG [xBX],xCX ; xAX compare and update implicit, ZF set to !fMissmatch.
788 */
789 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->CmpXchg);
790 if ((pThis->bMode & BS3_MODE_CODE_MASK) != BS3_MODE_CODE_64)
791 {
792 uRaxIn = (uint32_t)(fMissmatch ? s_uValueOther : s_uValueFlipped); /* leave the upper part zero */
793 pCtx->rcx.u = (uint32_t)s_uValue; /* ditto */
794 }
795 else
796 {
797 uRaxIn = fMissmatch ? s_uValueOther : s_uValueFlipped;
798 pCtx->rcx.u = s_uValue;
799 }
800 if (fMissmatch)
801 pCtx->rflags.u32 |= X86_EFL_ZF;
802 else
803 pCtx->rflags.u32 &= ~X86_EFL_ZF;
804
805 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
806 uExpectedFlags = pCtx->rflags.u32 & ~(X86_EFL_CF | X86_EFL_PF | X86_EFL_AF | X86_EFL_SF | X86_EFL_OF | X86_EFL_ZF);
807 switch (pThis->bMode & BS3_MODE_CODE_MASK)
808 {
809 case BS3_MODE_CODE_16:
810 case BS3_MODE_CODE_V86:
811 uExpectedRax = (uint16_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffffffff0000));
812 uExpectStored = (uint16_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffffffff0000));
813 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
814 break;
815 case BS3_MODE_CODE_32:
816 uExpectedRax = (uint32_t)s_uValueFlipped | (uRaxIn & UINT64_C(0xffffffff00000000));
817 uExpectStored = (uint32_t)s_uValue | (s_uValueFlipped & UINT64_C(0xffffffff00000000));
818 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
819 break;
820 case BS3_MODE_CODE_64:
821 uExpectedRax = s_uValueFlipped;
822 uExpectStored = s_uValue;
823 uExpectedFlags |= !fMissmatch ? X86_EFL_ZF | X86_EFL_PF : X86_EFL_AF;
824 break;
825 }
826 if (fMissmatch)
827 uExpectStored = s_uValueFlipped;
828
829 /*
830 * Make two approaches to the test page (the 2nd one):
831 * - i=0: Start on the 1st page and edge into the 2nd.
832 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
833 */
834 for (i = 0; i < 2; i++)
835 {
836 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
837 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
838 for (; off < offEnd; off++)
839 {
840 *(uint64_t *)&pThis->pbOrgTest[off] = s_uValueFlipped;
841 pCtx->rax.u = uRaxIn;
842 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
843 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
844 else
845 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
846
847 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
848 //Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
849
850 if ( bXcpt != X86_XCPT_PF
851 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
852 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
853 {
854 pThis->ExpectCtx.rax.u = uExpectedRax;
855 pThis->ExpectCtx.rflags.u32 = uExpectedFlags;
856 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, pThis->pCmnMode->CmpXchg.offUd2, X86_XCPT_UD, 0 /*uErrCd*/);
857 if (*(uint64_t *)&pThis->pbOrgTest[off] != uExpectStored)
858 Bs3TestFailedF("%u - %s: Stored %#RX64, expected %#RX64",
859 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uExpectStored);
860 }
861 else
862 {
863 pThis->ExpectCtx.rax.u = uRaxIn;
864 pThis->ExpectCtx.rflags = pCtx->rflags;
865 if (off < X86_PAGE_SIZE)
866 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
867 else
868 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
869 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
870 pThis->ExpectCtx.cr2 = pCtx->cr2;
871 if (*(uint64_t *)&pThis->pbOrgTest[off] != s_uValueFlipped)
872 Bs3TestFailedF("%u - %s: #PF'ed store modified memory: %#RX64, expected %#RX64",
873 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], s_uValueFlipped);
874 }
875 }
876
877 if (fFlags & BS3CB2PFACC_F_DIRECT)
878 break;
879 }
880
881 pCtx->rax.u = uRaxSaved;
882 pCtx->rcx.u = uRcxSaved;
883}
884
885
886static void bs3CpuBasic2Pf_DoCmpXchgMiss(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
887 uint8_t bXcpt, uint8_t uPfErrCd)
888{
889 bs3CpuBasic2Pf_DoCmpXchg(pThis, pCtx, fFlags, bXcpt, uPfErrCd, true /*fMissmatch*/ );
890}
891
892
893static void bs3CpuBasic2Pf_DoCmpXchgMatch(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
894 uint8_t bXcpt, uint8_t uPfErrCd)
895{
896 bs3CpuBasic2Pf_DoCmpXchg(pThis, pCtx, fFlags, bXcpt, uPfErrCd , false /*fMissmatch*/ );
897}
898
899
900/**
901 * @interface_method_impl{BS3CPUBASIC2PFACCESSOR,pfnAccessor,
902 * DIV [MEM=0] for checking the accessed bit}
903 */
904static void bs3CpuBasic2Pf_DoDivByZero(PBS3CPUBASIC2PFSTATE pThis, PBS3REGCTX pCtx, uint32_t fFlags,
905 uint8_t bXcpt, uint8_t uPfErrCd)
906{
907 static uint64_t const s_uFiller = UINT64_C(0x9856703711f4069e);
908 uint64_t uZeroAndFill;
909 unsigned i;
910
911 /*
912 * Adjust the incoming context and calculate our expections.
913 */
914 bs3CpuBasic2Pf_SetCsEip(pThis, pCtx, &pThis->pCmnMode->DivMem);
915
916 Bs3MemCpy(&pThis->ExpectCtx, pCtx, sizeof(pThis->ExpectCtx));
917 switch (pThis->bMode & BS3_MODE_CODE_MASK)
918 {
919 case BS3_MODE_CODE_16:
920 case BS3_MODE_CODE_V86:
921 uZeroAndFill = s_uFiller & UINT64_C(0xffffffffffff0000);
922 break;
923 case BS3_MODE_CODE_32:
924 uZeroAndFill = s_uFiller & UINT64_C(0xffffffff00000000);
925 break;
926 case BS3_MODE_CODE_64:
927 uZeroAndFill = 0;
928 break;
929 }
930
931 /*
932 * Make two approaches to the test page (the 2nd one):
933 * - i=0: Start on the 1st page and edge into the 2nd.
934 * - i=1: Start at the end of the 2nd page and edge off it and into the 3rd.
935 */
936 for (i = 0; i < 2; i++)
937 {
938 unsigned off = fFlags & BS3CB2PFACC_F_DIRECT ? X86_PAGE_SIZE : X86_PAGE_SIZE * (i + 1) - pThis->cbAccess;
939 unsigned offEnd = fFlags & BS3CB2PFACC_F_DIRECT ? off + 1 : X86_PAGE_SIZE * (i + 1) + (i == 0 ? 8 : 7);
940 for (; off < offEnd; off++)
941 {
942 *(uint64_t *)&pThis->pbOrgTest[off] = uZeroAndFill;
943 if (BS3_MODE_IS_16BIT_CODE(pThis->bMode))
944 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = off;
945 else
946 pThis->ExpectCtx.rbx.u = pCtx->rbx.u = pThis->uTestAddr.u + off;
947
948 Bs3TrapSetJmpAndRestore(pCtx, &pThis->TrapCtx);
949 //if (pThis->bMode == BS3_MODE_PP16_32) Bs3TestPrintf("off=%#06x bXcpt=%#x uErrCd=%#RX64\n", off, pThis->TrapCtx.bXcpt, pThis->TrapCtx.uErrCd);
950
951 if ( bXcpt != X86_XCPT_PF
952 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off >= X86_PAGE_SIZE * 2)
953 || ((fFlags & BS3CB2PFACC_F_PAGE_LEVEL) && off <= X86_PAGE_SIZE - pThis->cbAccess) )
954 {
955 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, X86_XCPT_DE, 0 /*uErrCd*/);
956 if (*(uint64_t *)&pThis->pbOrgTest[off] != uZeroAndFill)
957 Bs3TestFailedF("%u - %s: Modified source op: %#RX64, expected %#RX64",
958 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uZeroAndFill);
959 }
960 else
961 {
962 if (off < X86_PAGE_SIZE)
963 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + X86_PAGE_SIZE;
964 else
965 pThis->ExpectCtx.cr2.u = pThis->uTestAddr.u + off;
966 bs3CpuBasic2Pf_CompareCtx(pThis, &pThis->ExpectCtx, 0 /*cbPcAdjust*/, bXcpt, uPfErrCd);
967 pThis->ExpectCtx.cr2 = pCtx->cr2;
968 if (*(uint64_t *)&pThis->pbOrgTest[off] != uZeroAndFill)
969 Bs3TestFailedF("%u - %s: Modified source op: %#RX64, expected %#RX64",
970 g_usBs3TestStep, "xxxx", *(uint64_t *)&pThis->pbOrgTest[off], uZeroAndFill);
971 }
972 }
973
974 if (fFlags & BS3CB2PFACC_F_DIRECT)
975 break;
976 }
977}
978
979
980static BS3CPUBASIC2PFACCESSOR const g_aAccessors[] =
981{
982 { "DoExec", bs3CpuBasic2Pf_DoExec, X86_TRAP_PF_ID, X86_XCPT_UD },
983 { "DoMovLoad", bs3CpuBasic2Pf_DoMovLoad, 0, X86_XCPT_UD },
984 { "DoMovStore", bs3CpuBasic2Pf_DoMovStore, X86_TRAP_PF_RW, X86_XCPT_UD },
985 { "DoXchg", bs3CpuBasic2Pf_DoXchg, X86_TRAP_PF_RW, X86_XCPT_UD },
986 { "DoCmpXchgMiss", bs3CpuBasic2Pf_DoCmpXchgMiss, X86_TRAP_PF_RW, X86_XCPT_UD },
987 { "DoCmpXhcgMatch", bs3CpuBasic2Pf_DoCmpXchgMatch, X86_TRAP_PF_RW, X86_XCPT_UD },
988 { "DoDivByZero", bs3CpuBasic2Pf_DoDivByZero, 0, X86_XCPT_DE },
989};
990
991/** @} */
992
993
994/** @name BS3CPUBASIC2PFMODPT::pfnModify implementations.
995 * @{ */
996
997
998static void bs3CpuBasic2Pf_ClearMask(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry,
999 uint32_t fClearMask, uint32_t fSetMask)
1000{
1001 if (pThis->PgInfo.cbEntry == 4)
1002 {
1003 uint32_t const uOrg = pThis->PteBackup.Legacy[1];
1004 uint32_t uNew = ((uOrg & ~fClearMask) | fSetMask) & ~(uint32_t)pEntry->uModifyArg;
1005 uint32_t const uOld = pThis->PgInfo.u.Legacy.pPte[1].u;
1006 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Legacy.pPte + 1, uNew, uOld);
1007 }
1008 else
1009 {
1010 uint64_t const uOrg = pThis->PteBackup.Pae[1];
1011 uint64_t uNew = ((uOrg & ~(uint64_t)fClearMask) | fSetMask) & ~(uint64_t)pEntry->uModifyArg;
1012 uint64_t const uOld = pThis->PgInfo.u.Pae.pPte[1].u;
1013
1014 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[0], (uint32_t)uNew, (uint32_t)uOld);
1015 if ((uint32_t)(uNew >> 32) != (uint32_t)(uOld >> 32))
1016 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[1],
1017 (uint32_t)(uNew >> 32), (uint32_t)(uOld >> 32));
1018 }
1019}
1020
1021static void bs3CpuBasic2Pf_SetBit(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry,
1022 uint32_t fClearMask, uint32_t fSetMask)
1023{
1024 if (pThis->PgInfo.cbEntry == 4)
1025 {
1026 uint32_t const uOrg = pThis->PteBackup.Legacy[1];
1027 uint32_t uNew = (uOrg & ~fClearMask) | fSetMask | RT_BIT_32(pEntry->uModifyArg);
1028 uint32_t const uOld = pThis->PgInfo.u.Legacy.pPte[1].u;
1029 g_aStoreMethods[iStore].pfnStore(pThis->PgInfo.u.Legacy.pPte + 1, uNew, uOld);
1030 }
1031 else
1032 {
1033 uint64_t const uOrg = pThis->PteBackup.Pae[1];
1034 uint64_t uNew = ((uOrg & ~(uint64_t)fClearMask) | fSetMask) | RT_BIT_64(pEntry->uModifyArg);
1035 uint64_t const uOld = pThis->PgInfo.u.Pae.pPte[1].u;
1036
1037 if (pEntry->uModifyArg < 32 || (uint32_t)uNew != (uint32_t)uOld)
1038 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[0], (uint32_t)uNew, (uint32_t)uOld);
1039 if (pEntry->uModifyArg >= 32 || (uint32_t)(uNew >> 32) != (uint32_t)(uOld >> 32))
1040 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[1],
1041 (uint32_t)(uNew >> 32), (uint32_t)(uOld >> 32));
1042 }
1043}
1044
1045static void bs3CpuBasic2Pf_NoChange(PBS3CPUBASIC2PFSTATE pThis, unsigned iStore, PCBS3CPUBASIC2PFMODPT pEntry,
1046 uint32_t fClearMask, uint32_t fSetMask)
1047{
1048 if (pThis->PgInfo.cbEntry == 4)
1049 {
1050 uint32_t const uOrg = pThis->PteBackup.Legacy[1];
1051 uint32_t uNew = (uOrg & ~fClearMask) | fSetMask;
1052 uint32_t const uOld = pThis->PgInfo.u.Legacy.pPte[1].u;
1053 if (uNew != uOld)
1054 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Legacy.pPte[1], uNew, uOld);
1055 }
1056 else
1057 {
1058 uint64_t const uOrg = pThis->PteBackup.Pae[1];
1059 uint64_t uNew = (uOrg & ~(uint64_t)fClearMask) | fSetMask;
1060 uint64_t const uOld = pThis->PgInfo.u.Pae.pPte[1].u;
1061 if (uNew != uOld)
1062 {
1063 if ((uint32_t)uNew != (uint32_t)uOld)
1064 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[0], (uint32_t)uNew, (uint32_t)uOld);
1065 if ((uint32_t)(uNew >> 32) != (uint32_t)(uOld >> 32))
1066 g_aStoreMethods[iStore].pfnStore(&pThis->PgInfo.u.Pae.pPte[1].au32[1],
1067 (uint32_t)(uNew >> 32), (uint32_t)(uOld >> 32));
1068 }
1069 }
1070}
1071
1072/** @} */
1073
1074
1075/** @name BS3CPUBASIC2PFMODPT::pfnApplicable implementations.
1076 * @{ */
1077
1078static bool bs3CpuBasic2Pf_IsPteBitReserved(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
1079{
1080 if (pThis->PgInfo.cbEntry == 8)
1081 {
1082 /* Bits 52..63 or 62 (NXE=1). */
1083 if (pThis->PgInfo.cEntries == 3)
1084 {
1085 if ((uint32_t)(pEntry->uModifyArg - 52U) < (uint32_t)(12 - pThis->fNxe))
1086 return true;
1087 }
1088 else if (pEntry->uModifyArg == 63 && !pThis->fNxe)
1089 return true;
1090
1091 /* Reserved physical address bits. */
1092 if (pEntry->uModifyArg < 52)
1093 {
1094 if ((uint32_t)pEntry->uModifyArg >= (uint32_t)pThis->cBitsPhysWidth)
1095 return true;
1096 }
1097 }
1098 return false;
1099}
1100
1101static bool bs3CpuBasic2Pf_IsPteBitSoftwareUsable(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
1102{
1103 if (pThis->PgInfo.cbEntry == 8)
1104 {
1105 if (pThis->PgInfo.cEntries != 3)
1106 {
1107 if ((uint32_t)(pEntry->uModifyArg - 52U) < (uint32_t)11)
1108 return true;
1109 }
1110 }
1111 return false;
1112}
1113
1114
1115static bool bs3CpuBasic2Pf_IsNxe(PBS3CPUBASIC2PFSTATE pThis, PCBS3CPUBASIC2PFMODPT pEntry)
1116{
1117 return pThis->fNxe && pThis->PgInfo.cbEntry == 8;
1118}
1119
1120/** @} */
1121
1122
1123static const BS3CPUBASIC2PFMODPT g_aPteWorkers[] =
1124{
1125/* { pszName, P U W NX RSV ModiyfArg pfnModify, pfnApplicable }, */
1126 { "org", 1, 1, 1, 0, 0, 0, bs3CpuBasic2Pf_NoChange, NULL },
1127 { "!US", 1, 0, 1, 0, 0, X86_PTE_US, bs3CpuBasic2Pf_ClearMask, NULL },
1128 { "!RW", 1, 1, 0, 0, 0, X86_PTE_RW, bs3CpuBasic2Pf_ClearMask, NULL },
1129 { "!RW+!US", 1, 0, 0, 0, 0, X86_PTE_RW | X86_PTE_US, bs3CpuBasic2Pf_ClearMask, NULL },
1130 { "!P", 0, 0, 0, 0, 0, X86_PTE_P, bs3CpuBasic2Pf_ClearMask, NULL },
1131 { "NX", 1, 1, 1, 1, 0, 63, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsNxe },
1132 { "RSVPH[32]", 0, 0, 0, 0, 1, 32, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1133 { "RSVPH[33]", 0, 0, 0, 0, 1, 33, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1134 { "RSVPH[34]", 0, 0, 0, 0, 1, 34, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1135 { "RSVPH[35]", 0, 0, 0, 0, 1, 35, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1136 { "RSVPH[36]", 0, 0, 0, 0, 1, 36, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1137 { "RSVPH[37]", 0, 0, 0, 0, 1, 37, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1138 { "RSVPH[38]", 0, 0, 0, 0, 1, 38, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1139 { "RSVPH[39]", 0, 0, 0, 0, 1, 39, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1140 { "RSVPH[40]", 0, 0, 0, 0, 1, 40, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1141 { "RSVPH[41]", 0, 0, 0, 0, 1, 41, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1142 { "RSVPH[42]", 0, 0, 0, 0, 1, 42, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1143 { "RSVPH[43]", 0, 0, 0, 0, 1, 43, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1144 { "RSVPH[44]", 0, 0, 0, 0, 1, 44, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1145 { "RSVPH[45]", 0, 0, 0, 0, 1, 45, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1146 { "RSVPH[46]", 0, 0, 0, 0, 1, 46, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1147 { "RSVPH[47]", 0, 0, 0, 0, 1, 47, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1148 { "RSVPH[48]", 0, 0, 0, 0, 1, 48, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1149 { "RSVPH[49]", 0, 0, 0, 0, 1, 49, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1150 { "RSVPH[50]", 0, 0, 0, 0, 1, 50, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1151 { "RSVPH[51]", 0, 0, 0, 0, 1, 51, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1152 { "RSV[52]", 0, 0, 0, 0, 1, 52, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1153 { "RSV[53]", 0, 0, 0, 0, 1, 53, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1154 { "RSV[54]", 0, 0, 0, 0, 1, 54, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1155 { "RSV[55]", 0, 0, 0, 0, 1, 55, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1156 { "RSV[56]", 0, 0, 0, 0, 1, 56, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1157 { "RSV[57]", 0, 0, 0, 0, 1, 57, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1158 { "RSV[58]", 0, 0, 0, 0, 1, 58, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1159 { "RSV[59]", 0, 0, 0, 0, 1, 59, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1160 { "RSV[60]", 0, 0, 0, 0, 1, 60, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1161 { "RSV[61]", 0, 0, 0, 0, 1, 61, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1162 { "RSV[62]", 0, 0, 0, 0, 1, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1163 { "RSV[62]", 0, 0, 0, 0, 1, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1164 { "RSV[63]", 0, 0, 0, 0, 1, 63, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitReserved },
1165 { "!RSV[52]", 1, 1, 1, 0, 0, 52, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1166 { "!RSV[53]", 1, 1, 1, 0, 0, 53, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1167 { "!RSV[54]", 1, 1, 1, 0, 0, 54, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1168 { "!RSV[55]", 1, 1, 1, 0, 0, 55, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1169 { "!RSV[56]", 1, 1, 1, 0, 0, 56, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1170 { "!RSV[57]", 1, 1, 1, 0, 0, 57, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1171 { "!RSV[58]", 1, 1, 1, 0, 0, 58, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1172 { "!RSV[59]", 1, 1, 1, 0, 0, 59, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1173 { "!RSV[60]", 1, 1, 1, 0, 0, 60, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1174 { "!RSV[61]", 1, 1, 1, 0, 0, 61, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1175 { "!RSV[62]", 1, 1, 1, 0, 0, 62, bs3CpuBasic2Pf_SetBit, bs3CpuBasic2Pf_IsPteBitSoftwareUsable },
1176
1177};
1178
1179
1180/**
1181 * Worker for bs3CpuBasic2_RaiseXcpt0e_c32 that does the actual testing.
1182 *
1183 * Caller does all the cleaning up.
1184 *
1185 * @returns Error count.
1186 * @param pThis Test state data.
1187 * @param fNxe Whether NX is enabled.
1188 */
1189static uint8_t bs3CpuBasic2_RaiseXcpt0eWorker(PBS3CPUBASIC2PFSTATE register pThis, bool const fWp, bool const fNxe)
1190{
1191 unsigned iLevel;
1192 unsigned iRing;
1193 unsigned iStore;
1194 unsigned iAccessor;
1195 unsigned iOuter;
1196 unsigned cPml4Tests;
1197 unsigned cPdPtrTests;
1198 uint32_t const fPfIdMask = fNxe ? UINT32_MAX : ~X86_TRAP_PF_ID;
1199 BS3REGCTX aCtxts[4];
1200
1201 pThis->fWp = fWp;
1202 pThis->fNxe = fNxe;
1203
1204 /** @todo figure out V8086 testing. */
1205 if ((pThis->bMode & BS3_MODE_CODE_MASK) == BS3_MODE_CODE_V86)
1206 return BS3TESTDOMODE_SKIPPED;
1207
1208
1209 /* paranoia: Touch the various big stack structures to ensure the compiler has allocated stack for them. */
1210 for (iRing = 0; iRing < RT_ELEMENTS(aCtxts); iRing++)
1211 Bs3MemZero(&aCtxts[iRing], sizeof(aCtxts[iRing]));
1212
1213 /*
1214 * Set up a few contexts for testing this stuff.
1215 */
1216 Bs3RegCtxSaveEx(&aCtxts[0], pThis->bMode, 2048);
1217 for (iRing = 1; iRing < 4; iRing++)
1218 {
1219 aCtxts[iRing] = aCtxts[0];
1220 Bs3RegCtxConvertToRingX(&aCtxts[iRing], iRing);
1221 }
1222
1223 if (!BS3_MODE_IS_16BIT_CODE(pThis->bMode))
1224 {
1225 for (iRing = 0; iRing < 4; iRing++)
1226 aCtxts[iRing].rbx.u = pThis->uTestAddr.u;
1227 }
1228 else
1229 {
1230 for (iRing = 0; iRing < 4; iRing++)
1231 {
1232 aCtxts[iRing].ds = pThis->uSel16TestData;
1233 aCtxts[iRing].rbx.u = 0;
1234 }
1235 }
1236
1237 /*
1238 * Check basic operation:
1239 */
1240 for (iRing = 0; iRing < 4; iRing++)
1241 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1242 g_aAccessors[iAccessor].pfnAccessor(pThis, &aCtxts[iRing], BS3CB2PFACC_F_PAGE_LEVEL, X86_XCPT_UD, UINT8_MAX);
1243
1244 /*
1245 * Some PTE checks. We only mess with the 2nd page.
1246 */
1247 for (iOuter = 0; iOuter < 2; iOuter++)
1248 {
1249 uint32_t const fAccessor = (iOuter == 0 ? BS3CB2PFACC_F_DIRECT : 0) | BS3CB2PFACC_F_PAGE_LEVEL;
1250 unsigned iPteWrk;
1251
1252 bs3CpuBasic2Pf_FlushAll();
1253 for (iPteWrk = 0; iPteWrk < RT_ELEMENTS(g_aPteWorkers); iPteWrk++)
1254 {
1255 BS3CPUBASIC2PFMODPT EffWrk;
1256 const BS3CPUBASIC2PFMODPT *pPteWrk = &g_aPteWorkers[iPteWrk];
1257 if (pPteWrk->pfnApplicable && !pPteWrk->pfnApplicable(pThis, pPteWrk))
1258 continue;
1259
1260 pThis->pszPteWorker = pPteWrk->pszName;
1261
1262 EffWrk = *pPteWrk;
1263
1264#if 1
1265 /*
1266 * Do the modification once, then test all different accesses
1267 * without flushing the TLB or anything in-between.
1268 */
1269 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1270 {
1271 pThis->pszStore = g_aStoreMethods[iStore].pszName;
1272 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, 0);
1273
1274 for (iRing = 0; iRing < 4; iRing++)
1275 {
1276 PBS3REGCTX const pCtx = &aCtxts[iRing];
1277 if ( EffWrk.fReserved
1278 || !EffWrk.fPresent
1279 || (!EffWrk.fUser && iRing == 3))
1280 {
1281 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1282 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1283 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1284 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1285 {
1286 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1287 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1288 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1289 }
1290 }
1291 else
1292 {
1293 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1294 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1295 {
1296 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1297 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1298 && EffWrk.fNoExecute)
1299 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1300 && !EffWrk.fWriteable
1301 && (fWp || iRing == 3)) )
1302 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1303 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1304 else
1305 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1306 }
1307 }
1308 }
1309
1310 /* Reset the paging + full flush. */
1311 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1312 }
1313#endif
1314
1315#define CHECK_AD_BITS(a_fExpectedAD) \
1316 do { \
1317 uint32_t fActualAD = ( pThis->PgInfo.cbEntry == 8 \
1318 ? pThis->PgInfo.u.Pae.pPte[1].au32[0] : pThis->PgInfo.u.Legacy.pPte[1].au32[0]) \
1319 & (X86_PTE_A | X86_PTE_D); \
1320 if (fActualAD != (a_fExpectedAD)) \
1321 { \
1322 Bs3TestFailedF("%u - %s/%u: unexpected A/D bits: %#x, expected %#x\n", \
1323 g_usBs3TestStep, "xxxx", __LINE__, fActualAD, a_fExpectedAD); \
1324 BS3CPUBASIC2PF_HALT(pThis); \
1325 } \
1326 } while (0)
1327
1328 /*
1329 * Again, but redoing everything for each accessor.
1330 */
1331 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1332 {
1333 pThis->pszStore = g_aStoreMethods[iStore].pszName;
1334
1335 for (iRing = 0; iRing < 4; iRing++)
1336 {
1337 PBS3REGCTX const pCtx = &aCtxts[iRing];
1338
1339 if ( EffWrk.fReserved
1340 || !EffWrk.fPresent
1341 || (!EffWrk.fUser && iRing == 3))
1342 {
1343 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1344 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1345 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1346 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1347 {
1348 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1349
1350 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, 0);
1351 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1352 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1353 CHECK_AD_BITS(0);
1354 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1355
1356 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1357 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1358 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1359 CHECK_AD_BITS(0);
1360 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1361 }
1362 }
1363 else
1364 {
1365 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1366 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1367 {
1368 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1369 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1370 && EffWrk.fNoExecute)
1371 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1372 && !EffWrk.fWriteable
1373 && (fWp || iRing == 3)) )
1374 {
1375 uint32_t const fErrCd = fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask);
1376
1377 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1378 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1379 CHECK_AD_BITS(0);
1380 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1381
1382 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1383 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1384 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1385 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1386
1387 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1388 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1389 CHECK_AD_BITS(X86_PTE_D);
1390 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1391
1392 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1393 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1394 CHECK_AD_BITS(X86_PTE_A);
1395 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1396 }
1397 else
1398 {
1399 uint32_t const fExpectedAD = (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1400 ? X86_PTE_A | X86_PTE_D : X86_PTE_A;
1401
1402 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1403 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1404 CHECK_AD_BITS(fExpectedAD);
1405 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1406
1407 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1408 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1409 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1410 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1411
1412 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1413 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1414 CHECK_AD_BITS(fExpectedAD | X86_PTE_D);
1415 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1416
1417 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1418 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1419 CHECK_AD_BITS(fExpectedAD | X86_PTE_A);
1420 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1421 }
1422 }
1423 }
1424 }
1425 }
1426
1427 /*
1428 * Again, but using invalidate page.
1429 */
1430 if (pThis->fUseInvlPg)
1431 {
1432 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1433
1434 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1435 {
1436 pThis->pszStore = g_aStoreMethods[iStore].pszName;
1437
1438 for (iRing = 0; iRing < 4; iRing++)
1439 {
1440 PBS3REGCTX const pCtx = &aCtxts[iRing];
1441
1442 if ( EffWrk.fReserved
1443 || !EffWrk.fPresent
1444 || (!EffWrk.fUser && iRing == 3))
1445 {
1446 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1447 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1448 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1449 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1450 {
1451 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1452
1453 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, 0);
1454 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1455 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1456 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1457 CHECK_AD_BITS(0);
1458
1459 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1460 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1461 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1462 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1463 CHECK_AD_BITS(0);
1464 }
1465 }
1466 else
1467 {
1468 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1469 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1470 {
1471 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1472 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1473 && EffWrk.fNoExecute)
1474 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1475 && !EffWrk.fWriteable
1476 && (fWp || iRing == 3)) )
1477 {
1478 uint32_t const fErrCd = fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask);
1479
1480 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1481 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1482 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1483 CHECK_AD_BITS(0);
1484
1485 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1486 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1487 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1488 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1489
1490 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1491 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1492 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1493 CHECK_AD_BITS(X86_PTE_D);
1494
1495 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1496 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1497 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1498 CHECK_AD_BITS(X86_PTE_A);
1499 }
1500 else
1501 {
1502 uint32_t const fExpectedAD = (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1503 ? X86_PTE_A | X86_PTE_D : X86_PTE_A;
1504
1505 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1506 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1507 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1508 CHECK_AD_BITS(fExpectedAD);
1509
1510 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1511 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1512 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1513 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1514
1515 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1516 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1517 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1518 CHECK_AD_BITS(fExpectedAD | X86_PTE_D);
1519
1520 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1521 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1522 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1523 CHECK_AD_BITS(fExpectedAD | X86_PTE_A);
1524 }
1525 }
1526 }
1527 }
1528 }
1529
1530 bs3CpuBasic2Pf_RestoreFromBackups(pThis);
1531 }
1532 }
1533 }
1534
1535
1536 /*
1537 * Do all 4 paging levels. We start out with full access to the page and
1538 * restrict it in various ways.
1539 *
1540 * (On the final level we only mess with the 2nd page for now.)
1541 */
1542 cPdPtrTests = 1;
1543 cPml4Tests = 1;
1544 if (pThis->uTestAddr.u >= UINT64_C(0x8000000000))
1545 {
1546 cPml4Tests = 2;
1547 cPdPtrTests = 2;
1548 }
1549 else if (pThis->PgInfo.cEntries == 3)
1550 cPdPtrTests = 2;
1551
1552#if 0
1553 /* Loop 1: Accessor flags. */
1554 for (iOuter = 0; iOuter < 2; iOuter++)
1555 {
1556 uint32_t const fAccessor = (iOuter == 0 ? BS3CB2PFACC_F_DIRECT : 0) | BS3CB2PFACC_F_PAGE_LEVEL;
1557
1558 /* Loop 2: Paging store method. */
1559 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1560 {
1561 unsigned iPml4Test;
1562 int8_t cReserved = 0;
1563 int8_t cNotPresent = 0;
1564 int8_t cNotWrite = 0;
1565 int8_t cNotUser = 0;
1566 int8_t cExecute = 0;
1567
1568 /* Loop 3: Page map level 4 */
1569 for (iPml4Test = 0; iPml4Test < cPml4Tests; iPml4Test++)
1570 {
1571 unsigned iPdPtrTest;
1572
1573 /* Loop 4: Page directory pointer table. */
1574 for (iPdPtrTest = 0; iPdPtrTest < cPdPtrTests; iPdPtrTest++)
1575 {
1576 unsigned iPdTest;
1577
1578 /* Loop 5: Page directory. */
1579 for (iPdTest = 0; iPdTest < 2; iPdTest++)
1580 {
1581 unsigned iPtTest;
1582
1583 /* Loop 6: Page table. */
1584 for (iPtTest = 0; iPtTest < 2; iPtTest++)
1585 {
1586 /* Loop 7: Accessor ring. */
1587 for (iRing = 0; iRing < 4; iRing++)
1588 {
1589 PBS3REGCTX const pCtx = &aCtxts[iRing];
1590
1591 if ( EffWrk.fReserved
1592 || !EffWrk.fPresent
1593 || (!EffWrk.fUser && iRing == 3))
1594 {
1595 uint32_t const fPfBase = ( EffWrk.fReserved ? X86_TRAP_PF_P | X86_TRAP_PF_RSVD
1596 : EffWrk.fPresent ? X86_TRAP_PF_P : 0)
1597 | (iRing == 3 ? X86_TRAP_PF_US : 0);
1598 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1599 {
1600 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1601
1602 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, 0);
1603 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1604 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1605 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1606 CHECK_AD_BITS(0);
1607
1608 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1609 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1610 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF,
1611 fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask));
1612 CHECK_AD_BITS(0);
1613 }
1614 }
1615 else
1616 {
1617 uint32_t const fPfBase = X86_TRAP_PF_P | (iRing == 3 ? X86_TRAP_PF_US : 0);
1618 for (iAccessor = 0; iAccessor < RT_ELEMENTS(g_aAccessors); iAccessor++)
1619 {
1620 pThis->pszAccessor = g_aAccessors[iAccessor].pszName;
1621 if ( ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_ID)
1622 && EffWrk.fNoExecute)
1623 || ( (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1624 && !EffWrk.fWriteable
1625 && (fWp || iRing == 3)) )
1626 {
1627 uint32_t const fErrCd = fPfBase | (g_aAccessors[iAccessor].fAccess & fPfIdMask);
1628
1629 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1630 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1631 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1632 CHECK_AD_BITS(0);
1633
1634 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1635 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1636 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1637 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1638
1639 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1640 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1641 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1642 CHECK_AD_BITS(X86_PTE_D);
1643
1644 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1645 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1646 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_PF, fErrCd);
1647 CHECK_AD_BITS(X86_PTE_A);
1648 }
1649 else
1650 {
1651 uint32_t const fExpectedAD = (g_aAccessors[iAccessor].fAccess & X86_TRAP_PF_RW)
1652 ? X86_PTE_A | X86_PTE_D : X86_PTE_A;
1653
1654 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A | X86_PTE_D, 0);
1655 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1656 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1657 CHECK_AD_BITS(fExpectedAD);
1658
1659 pPteWrk->pfnModify(pThis, iStore, pPteWrk, 0, X86_PTE_A | X86_PTE_D);
1660 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1661 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1662 CHECK_AD_BITS(X86_PTE_A | X86_PTE_D);
1663
1664 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_A, X86_PTE_D);
1665 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1666 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1667 CHECK_AD_BITS(fExpectedAD | X86_PTE_D);
1668
1669 pPteWrk->pfnModify(pThis, iStore, pPteWrk, X86_PTE_D, X86_PTE_A);
1670 ASMInvalidatePage(pThis->uTestAddr.u + X86_PAGE_SIZE);
1671 g_aAccessors[iAccessor].pfnAccessor(pThis, pCtx, fAccessor, X86_XCPT_UD, UINT8_MAX);
1672 CHECK_AD_BITS(fExpectedAD | X86_PTE_A);
1673 }
1674 }
1675 }
1676 }
1677
1678 }
1679 }
1680 }
1681 }
1682
1683 }
1684 }
1685#endif
1686
1687 /*
1688 * Check reserved bits on each paging level.
1689 */
1690
1691 /* Loop 1: Accessor flags (only direct for now). */
1692 for (iOuter = 0; iOuter < 1; iOuter++)
1693 {
1694 uint32_t const fAccessor = BS3CB2PFACC_F_DIRECT;
1695
1696 /* Loop 2: Paging store method. */
1697 for (iStore = 0; iStore < RT_ELEMENTS(g_aStoreMethods); iStore++)
1698 {
1699 /* Loop 3: Accessor ring. */
1700 for (iRing = 0; iRing < 4; iRing++)
1701 {
1702 /* Loop 4: Which level we mess up. */
1703 for (iLevel = 0; iLevel < pThis->PgInfo.cEntries; iLevel++)
1704 {
1705#if 0
1706 const BS3CPUBASIC2PFMODPT *pPteWrk = &g_aPteWorkers[iPteWrk];
1707 if (pThis->PgInfo.)
1708 {
1709 }
1710#endif
1711
1712
1713 }
1714 }
1715 }
1716 }
1717
1718
1719
1720 return 0;
1721}
1722
1723
1724BS3_DECL_CALLBACK(uint8_t) bs3CpuBasic2_RaiseXcpt0e_c32(uint8_t bMode)
1725{
1726 void *pvTestUnaligned;
1727 uint32_t cbTestUnaligned = _8M;
1728 uint8_t bRet = 1;
1729 int rc;
1730 BS3CPUBASIC2PFSTATE State;
1731
1732 /*
1733 * Initalize the state data.
1734 */
1735 Bs3MemZero(&State, sizeof(State));
1736 State.bMode = bMode;
1737 switch (bMode & BS3_MODE_CODE_MASK)
1738 {
1739 case BS3_MODE_CODE_16: State.cbAccess = sizeof(uint16_t); break;
1740 case BS3_MODE_CODE_V86: State.cbAccess = sizeof(uint16_t); break;
1741 case BS3_MODE_CODE_32: State.cbAccess = sizeof(uint32_t); break;
1742 case BS3_MODE_CODE_64: State.cbAccess = sizeof(uint64_t); break;
1743 }
1744 State.pCmnMode = &g_aCmnModes[0];
1745 while (State.pCmnMode->bMode != (bMode & BS3_MODE_CODE_MASK))
1746 State.pCmnMode++;
1747 State.fUseInvlPg = (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486;
1748
1749 /* Figure physical addressing width. */
1750 State.cBitsPhysWidth = 32;
1751 if ( (g_uBs3CpuDetected & BS3CPU_F_CPUID)
1752 && (ASMCpuId_EDX(1) & (X86_CPUID_FEATURE_EDX_PSE36 | X86_CPUID_FEATURE_EDX_PAE)) )
1753 State.cBitsPhysWidth = 36;
1754
1755 if ( (g_uBs3CpuDetected & BS3CPU_F_CPUID_EXT_LEAVES)
1756 && ASMCpuId_EAX(0x80000000) >= 0x80000008)
1757 {
1758 uint8_t cBits = (uint8_t)ASMCpuId_EAX(0x80000008);
1759 if (cBits >= 32 && cBits <= 52)
1760 State.cBitsPhysWidth = cBits;
1761 else
1762 Bs3TestPrintf("CPUID 0x80000008: Physical bitcount out of range: %u\n", cBits);
1763 }
1764 //Bs3TestPrintf("Physical bitcount: %u\n", State.cBitsPhysWidth);
1765
1766 /*
1767 * Allocate a some memory we can play around with, then carve a size aligned
1768 * chunk out of it so we might be able to maybe play with 2/4MB pages too.
1769 */
1770 cbTestUnaligned = _8M * 2;
1771 while ((pvTestUnaligned = Bs3MemAlloc(BS3MEMKIND_FLAT32, cbTestUnaligned)) == NULL)
1772 {
1773 cbTestUnaligned >>= 1;
1774 if (cbTestUnaligned <= _16K)
1775 {
1776 Bs3TestFailed("Failed to allocate memory to play around with\n");
1777 return 1;
1778 }
1779 }
1780
1781 /* align. */
1782 if ((uintptr_t)pvTestUnaligned & (cbTestUnaligned - 1))
1783 {
1784 State.cbTest = cbTestUnaligned >> 1;
1785 State.pbOrgTest = (uint8_t *)(((uintptr_t)pvTestUnaligned + State.cbTest - 1) & ~(State.cbTest - 1));
1786 }
1787 else
1788 {
1789 State.pbOrgTest = pvTestUnaligned;
1790 State.cbTest = cbTestUnaligned;
1791 }
1792 State.cTestPages = State.cbTest >> X86_PAGE_SHIFT;
1793
1794 /*
1795 * Alias this memory far away from where our code and data lives.
1796 */
1797 if (bMode & BS3_MODE_CODE_64)
1798 State.uTestAddr.u = UINT64_C(0x0000648680000000);
1799 else
1800 State.uTestAddr.u = UINT32_C(0x80000000);
1801 rc = Bs3PagingAlias(State.uTestAddr.u, (uintptr_t)State.pbOrgTest, State.cbTest, X86_PTE_P | X86_PTE_RW | X86_PTE_US);
1802 if (RT_SUCCESS(rc))
1803 {
1804 rc = Bs3PagingQueryAddressInfo(State.uTestAddr.u, &State.PgInfo);
1805 if (RT_SUCCESS(rc))
1806 {
1807//if (bMode & BS3_MODE_CODE_64) ASMHalt();
1808 /* Set values that derives from the test memory size and paging info. */
1809 if (State.PgInfo.cEntries == 2)
1810 {
1811 State.cTestPdes = (State.cTestPages + X86_PG_ENTRIES - 1) / X86_PG_ENTRIES;
1812 State.cTest1stPtes = RT_MIN(State.cTestPages, X86_PG_ENTRIES);
1813 State.cbPdeBackup = State.cTestPdes * (X86_PAGE_SIZE / X86_PG_ENTRIES);
1814 State.cbPteBackup = State.cTest1stPtes * (X86_PAGE_SIZE / X86_PG_ENTRIES);
1815 }
1816 else
1817 {
1818 State.cTestPdes = (State.cTestPages + X86_PG_PAE_ENTRIES - 1) / X86_PG_PAE_ENTRIES;
1819 State.cTest1stPtes = RT_MIN(State.cTestPages, X86_PG_PAE_ENTRIES);
1820 State.cbPdeBackup = State.cTestPdes * (X86_PAGE_SIZE / X86_PG_PAE_ENTRIES);
1821 State.cbPteBackup = State.cTest1stPtes * (X86_PAGE_SIZE / X86_PG_PAE_ENTRIES);
1822 }
1823#ifdef BS3CPUBASIC2PF_FASTER
1824 State.cbPteBackup = State.PgInfo.cbEntry * 4;
1825#endif
1826 if (State.cTestPdes <= RT_ELEMENTS(State.au64PdeBackup))
1827 {
1828 uint32_t cr0 = ASMGetCR0();
1829
1830 /* Back up the structures. */
1831 Bs3MemCpy(&State.PteBackup, State.PgInfo.u.Legacy.pPte, State.cbPteBackup);
1832 Bs3MemCpy(State.au64PdeBackup, State.PgInfo.u.Legacy.pPde, State.cbPdeBackup);
1833 if (State.PgInfo.cEntries > 2)
1834 State.u64PdpteBackup = State.PgInfo.u.Pae.pPdpe->u;
1835 if (State.PgInfo.cEntries > 3)
1836 State.u64Pml4eBackup = State.PgInfo.u.Pae.pPml4e->u;
1837
1838 /*
1839 * Setup a 16-bit selector for accessing the alias.
1840 */
1841 Bs3SelSetup16BitData(&Bs3GdteSpare00, State.uTestAddr.u32);
1842 State.uSel16TestData = BS3_SEL_SPARE_00 | 3;
1843
1844 /*
1845 * Do the testing.
1846 */
1847 ASMSetCR0(ASMGetCR0() & ~X86_CR0_WP);
1848 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, false /*fWp*/, false /*fNxe*/);
1849 if (bRet == 0 && (g_uBs3CpuDetected & BS3CPU_TYPE_MASK) >= BS3CPU_80486)
1850 {
1851 ASMSetCR0(ASMGetCR0() | X86_CR0_WP);
1852 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, true /*fWp*/, false /*fNxe*/);
1853 }
1854
1855 /* Do again with NX enabled. */
1856 if (bRet == 0 && (g_uBs3CpuDetected & BS3CPU_F_NX))
1857 {
1858 ASMWrMsr(MSR_K6_EFER, ASMRdMsr(MSR_K6_EFER) | MSR_K6_EFER_NXE);
1859 ASMSetCR0(ASMGetCR0() & ~X86_CR0_WP);
1860 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, false /*fWp*/, State.PgInfo.cbEntry == 8 /*fNxe*/);
1861 ASMSetCR0(ASMGetCR0() | X86_CR0_WP);
1862 bRet = bs3CpuBasic2_RaiseXcpt0eWorker(&State, true /*fWp*/, State.PgInfo.cbEntry == 8 /*fNxe*/);
1863 ASMWrMsr(MSR_K6_EFER, ASMRdMsr(MSR_K6_EFER) & ~MSR_K6_EFER_NXE);
1864 }
1865 bs3CpuBasic2Pf_RestoreFromBackups(&State);
1866 ASMSetCR0((ASMGetCR0() & ~X86_CR0_WP) | (cr0 & X86_CR0_WP));
1867 }
1868 else
1869 Bs3TestFailedF("cTestPdes=%u!\n", State.cTestPdes);
1870 }
1871 else
1872 Bs3TestFailedF("Bs3PagingQueryAddressInfo failed: %d\n", rc);
1873 Bs3PagingUnalias(State.uTestAddr.u, State.cbTest);
1874 }
1875 else
1876 Bs3TestFailedF("Bs3PagingAlias failed! rc=%d\n", rc);
1877 Bs3MemFree(pvTestUnaligned, cbTestUnaligned);
1878 return bRet;
1879}
1880
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette