VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/CPUMAllMsrs.cpp@ 76148

Last change on this file since 76148 was 76147, checked in by vboxsync, 6 years ago

VMM: Nested VMX: bugref:9180 Use VMX's allowed0 and allowed1 bits nomenclature, it's easier when parsing the spec despite 'val' and 'zap' being shorter and nicer in Log statements.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 255.0 KB
Line 
1/* $Id: CPUMAllMsrs.cpp 76147 2018-12-11 06:56:36Z vboxsync $ */
2/** @file
3 * CPUM - CPU MSR Registers.
4 */
5
6/*
7 * Copyright (C) 2013-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*********************************************************************************************************************************
20* Header Files *
21*********************************************************************************************************************************/
22#define LOG_GROUP LOG_GROUP_CPUM
23#include <VBox/vmm/cpum.h>
24#include <VBox/vmm/apic.h>
25#include <VBox/vmm/hm.h>
26#include <VBox/vmm/hm_vmx.h>
27#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
28# include <VBox/vmm/iem.h>
29#endif
30#include <VBox/vmm/tm.h>
31#include <VBox/vmm/gim.h>
32#include "CPUMInternal.h"
33#include <VBox/vmm/vm.h>
34#include <VBox/err.h>
35
36
37/*********************************************************************************************************************************
38* Defined Constants And Macros *
39*********************************************************************************************************************************/
40/**
41 * Validates the CPUMMSRRANGE::offCpumCpu value and declares a local variable
42 * pointing to it.
43 *
44 * ASSUMES sizeof(a_Type) is a power of two and that the member is aligned
45 * correctly.
46 */
47#define CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(a_pVCpu, a_pRange, a_Type, a_VarName) \
48 AssertMsgReturn( (a_pRange)->offCpumCpu >= 8 \
49 && (a_pRange)->offCpumCpu < sizeof(CPUMCPU) \
50 && !((a_pRange)->offCpumCpu & (RT_MIN(sizeof(a_Type), 8) - 1)) \
51 , ("offCpumCpu=%#x %s\n", (a_pRange)->offCpumCpu, (a_pRange)->szName), \
52 VERR_CPUM_MSR_BAD_CPUMCPU_OFFSET); \
53 a_Type *a_VarName = (a_Type *)((uintptr_t)&(a_pVCpu)->cpum.s + (a_pRange)->offCpumCpu)
54
55
56/*********************************************************************************************************************************
57* Structures and Typedefs *
58*********************************************************************************************************************************/
59
60/**
61 * Implements reading one or more MSRs.
62 *
63 * @returns VBox status code.
64 * @retval VINF_SUCCESS on success.
65 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
66 * current context (raw-mode or ring-0).
67 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR).
68 *
69 * @param pVCpu The cross context virtual CPU structure.
70 * @param idMsr The MSR we're reading.
71 * @param pRange The MSR range descriptor.
72 * @param puValue Where to return the value.
73 */
74typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMRDMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue);
75/** Pointer to a RDMSR worker for a specific MSR or range of MSRs. */
76typedef FNCPUMRDMSR *PFNCPUMRDMSR;
77
78
79/**
80 * Implements writing one or more MSRs.
81 *
82 * @retval VINF_SUCCESS on success.
83 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
84 * current context (raw-mode or ring-0).
85 * @retval VERR_CPUM_RAISE_GP_0 on failure.
86 *
87 * @param pVCpu The cross context virtual CPU structure.
88 * @param idMsr The MSR we're writing.
89 * @param pRange The MSR range descriptor.
90 * @param uValue The value to set, ignored bits masked.
91 * @param uRawValue The raw value with the ignored bits not masked.
92 */
93typedef DECLCALLBACK(VBOXSTRICTRC) FNCPUMWRMSR(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue);
94/** Pointer to a WRMSR worker for a specific MSR or range of MSRs. */
95typedef FNCPUMWRMSR *PFNCPUMWRMSR;
96
97
98
99/*
100 * Generic functions.
101 * Generic functions.
102 * Generic functions.
103 */
104
105
106/** @callback_method_impl{FNCPUMRDMSR} */
107static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_FixedValue(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
108{
109 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
110 *puValue = pRange->uValue;
111 return VINF_SUCCESS;
112}
113
114
115/** @callback_method_impl{FNCPUMWRMSR} */
116static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IgnoreWrite(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
117{
118 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
119 Log(("CPUM: Ignoring WRMSR %#x (%s), %#llx\n", idMsr, pRange->szName, uValue));
120 return VINF_SUCCESS;
121}
122
123
124/** @callback_method_impl{FNCPUMRDMSR} */
125static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_WriteOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
126{
127 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(puValue);
128 return VERR_CPUM_RAISE_GP_0;
129}
130
131
132/** @callback_method_impl{FNCPUMWRMSR} */
133static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_ReadOnly(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
134{
135 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
136 Assert(pRange->fWrGpMask == UINT64_MAX);
137 return VERR_CPUM_RAISE_GP_0;
138}
139
140
141
142
143/*
144 * IA32
145 * IA32
146 * IA32
147 */
148
149/** @callback_method_impl{FNCPUMRDMSR} */
150static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
151{
152 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
153 *puValue = 0; /** @todo implement machine check injection. */
154 return VINF_SUCCESS;
155}
156
157
158/** @callback_method_impl{FNCPUMWRMSR} */
159static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
160{
161 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
162 /** @todo implement machine check injection. */
163 return VINF_SUCCESS;
164}
165
166
167/** @callback_method_impl{FNCPUMRDMSR} */
168static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
169{
170 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
171 *puValue = 0; /** @todo implement machine check injection. */
172 return VINF_SUCCESS;
173}
174
175
176/** @callback_method_impl{FNCPUMWRMSR} */
177static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
178{
179 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
180 /** @todo implement machine check injection. */
181 return VINF_SUCCESS;
182}
183
184
185/** @callback_method_impl{FNCPUMRDMSR} */
186static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
187{
188 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
189 *puValue = TMCpuTickGet(pVCpu);
190#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
191 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
192#endif
193 return VINF_SUCCESS;
194}
195
196
197/** @callback_method_impl{FNCPUMWRMSR} */
198static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TimestampCounter(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
199{
200 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
201 TMCpuTickSet(pVCpu->CTX_SUFF(pVM), pVCpu, uValue);
202 return VINF_SUCCESS;
203}
204
205
206/** @callback_method_impl{FNCPUMRDMSR} */
207static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
208{
209 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
210 uint64_t uValue = pRange->uValue;
211 if (uValue & 0x1f00)
212 {
213 /* Max allowed bus ratio present. */
214 /** @todo Implement scaled BUS frequency. */
215 }
216
217 *puValue = uValue;
218 return VINF_SUCCESS;
219}
220
221
222/** @callback_method_impl{FNCPUMRDMSR} */
223static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
224{
225 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
226 return APICGetBaseMsr(pVCpu, puValue);
227}
228
229
230/** @callback_method_impl{FNCPUMWRMSR} */
231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ApicBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
232{
233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
234 return APICSetBaseMsr(pVCpu, uValue);
235}
236
237
238/**
239 * Get fixed IA32_FEATURE_CONTROL value for NEM and cpumMsrRd_Ia32FeatureControl.
240 *
241 * @returns Fixed IA32_FEATURE_CONTROL value.
242 * @param pVCpu The cross context per CPU structure.
243 */
244VMM_INT_DECL(uint64_t) CPUMGetGuestIa32FeatureControl(PVMCPU pVCpu)
245{
246 /* Always report the MSR lock bit as set, in order to prevent guests from modifiying this MSR. */
247 uint64_t fFeatCtl = MSR_IA32_FEATURE_CONTROL_LOCK;
248
249 /* Report VMX features. */
250 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
251 fFeatCtl |= MSR_IA32_FEATURE_CONTROL_VMXON;
252
253 return fFeatCtl;
254}
255
256/** @callback_method_impl{FNCPUMRDMSR} */
257static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
258{
259 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
260 *puValue = CPUMGetGuestIa32FeatureControl(pVCpu);
261 return VINF_SUCCESS;
262}
263
264
265/** @callback_method_impl{FNCPUMWRMSR} */
266static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FeatureControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
267{
268 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
269 return VERR_CPUM_RAISE_GP_0;
270}
271
272
273/** @callback_method_impl{FNCPUMRDMSR} */
274static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
275{
276 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
277 /** @todo fake microcode update. */
278 *puValue = pRange->uValue;
279 return VINF_SUCCESS;
280}
281
282
283/** @callback_method_impl{FNCPUMWRMSR} */
284static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosSignId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
285{
286 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
287 /* Normally, zero is written to Ia32BiosSignId before reading it in order
288 to select the signature instead of the BBL_CR_D3 behaviour. The GP mask
289 of the database entry should take care of most illegal writes for now, so
290 just ignore all writes atm. */
291 return VINF_SUCCESS;
292}
293
294
295/** @callback_method_impl{FNCPUMWRMSR} */
296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosUpdateTrigger(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
297{
298 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
299
300 /* Microcode updates cannot be loaded in VMX non-root mode. */
301 if (CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
302 return VINF_SUCCESS;
303
304 /** @todo Fake bios update trigger better. The value is the address to an
305 * update package, I think. We should probably GP if it's invalid. */
306 return VINF_SUCCESS;
307}
308
309
310/**
311 * Get MSR_IA32_SMM_MONITOR_CTL value for IEM and cpumMsrRd_Ia32SmmMonitorCtl.
312 *
313 * @returns The MSR_IA32_SMM_MONITOR_CTL value.
314 * @param pVCpu The cross context per CPU structure.
315 */
316VMM_INT_DECL(uint64_t) CPUMGetGuestIa32SmmMonitorCtl(PVMCPU pVCpu)
317{
318 /* We do not support dual-monitor treatment for SMI and SMM. */
319 /** @todo SMM. */
320 RT_NOREF(pVCpu);
321 return 0;
322}
323
324
325/** @callback_method_impl{FNCPUMRDMSR} */
326static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
327{
328 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
329 *puValue = CPUMGetGuestIa32SmmMonitorCtl(pVCpu);
330 return VINF_SUCCESS;
331}
332
333
334/** @callback_method_impl{FNCPUMWRMSR} */
335static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmmMonitorCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
336{
337 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
338 /** @todo SMM. */
339 return VINF_SUCCESS;
340}
341
342
343/** @callback_method_impl{FNCPUMRDMSR} */
344static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
345{
346 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
347 /** @todo check CPUID leaf 0ah. */
348 *puValue = 0;
349 return VINF_SUCCESS;
350}
351
352
353/** @callback_method_impl{FNCPUMWRMSR} */
354static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PmcN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
355{
356 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
357 /** @todo check CPUID leaf 0ah. */
358 return VINF_SUCCESS;
359}
360
361
362/** @callback_method_impl{FNCPUMRDMSR} */
363static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
364{
365 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
366 /** @todo return 0x1000 if we try emulate mwait 100% correctly. */
367 *puValue = 0x40; /** @todo Change to CPU cache line size. */
368 return VINF_SUCCESS;
369}
370
371
372/** @callback_method_impl{FNCPUMWRMSR} */
373static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MonitorFilterLineSize(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
374{
375 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
376 /** @todo should remember writes, though it's supposedly something only a BIOS
377 * would write so, it's not extremely important. */
378 return VINF_SUCCESS;
379}
380
381/** @callback_method_impl{FNCPUMRDMSR} */
382static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
383{
384 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
385 /** @todo Read MPERF: Adjust against previously written MPERF value. Is TSC
386 * what we want? */
387 *puValue = TMCpuTickGet(pVCpu);
388#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
389 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
390#endif
391 return VINF_SUCCESS;
392}
393
394
395/** @callback_method_impl{FNCPUMWRMSR} */
396static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MPerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
397{
398 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
399 /** @todo Write MPERF: Calc adjustment. */
400 return VINF_SUCCESS;
401}
402
403
404/** @callback_method_impl{FNCPUMRDMSR} */
405static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
406{
407 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
408 /** @todo Read APERF: Adjust against previously written MPERF value. Is TSC
409 * what we want? */
410 *puValue = TMCpuTickGet(pVCpu);
411#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
412 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
413#endif
414 return VINF_SUCCESS;
415}
416
417
418/** @callback_method_impl{FNCPUMWRMSR} */
419static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32APerf(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
420{
421 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
422 /** @todo Write APERF: Calc adjustment. */
423 return VINF_SUCCESS;
424}
425
426
427/**
428 * Get fixed IA32_MTRR_CAP value for NEM and cpumMsrRd_Ia32MtrrCap.
429 *
430 * @returns Fixed IA32_MTRR_CAP value.
431 * @param pVCpu The cross context per CPU structure.
432 */
433VMM_INT_DECL(uint64_t) CPUMGetGuestIa32MtrrCap(PVMCPU pVCpu)
434{
435 RT_NOREF_PV(pVCpu);
436
437 /* This is currently a bit weird. :-) */
438 uint8_t const cVariableRangeRegs = 0;
439 bool const fSystemManagementRangeRegisters = false;
440 bool const fFixedRangeRegisters = false;
441 bool const fWriteCombiningType = false;
442 return cVariableRangeRegs
443 | (fFixedRangeRegisters ? RT_BIT_64(8) : 0)
444 | (fWriteCombiningType ? RT_BIT_64(10) : 0)
445 | (fSystemManagementRangeRegisters ? RT_BIT_64(11) : 0);
446}
447
448/** @callback_method_impl{FNCPUMRDMSR} */
449static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
450{
451 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
452 *puValue = CPUMGetGuestIa32MtrrCap(pVCpu);
453 return VINF_SUCCESS;
454}
455
456
457/** @callback_method_impl{FNCPUMRDMSR} */
458static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
459{
460 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
461 /** @todo Implement variable MTRR storage. */
462 Assert(pRange->uValue == (idMsr - 0x200) / 2);
463 *puValue = 0;
464 return VINF_SUCCESS;
465}
466
467
468/** @callback_method_impl{FNCPUMWRMSR} */
469static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
470{
471 /*
472 * Validate the value.
473 */
474 Assert(pRange->uValue == (idMsr - 0x200) / 2);
475 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
476
477 uint8_t uType = uValue & 0xff;
478 if ((uType >= 7) || (uType == 2) || (uType == 3))
479 {
480 Log(("CPUM: Invalid type set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n", idMsr, uValue, uType));
481 return VERR_CPUM_RAISE_GP_0;
482 }
483
484 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
485 if (fInvPhysMask & uValue)
486 {
487 Log(("CPUM: Invalid physical address bits set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n",
488 idMsr, uValue, uValue & fInvPhysMask));
489 return VERR_CPUM_RAISE_GP_0;
490 }
491
492 /*
493 * Store it.
494 */
495 /** @todo Implement variable MTRR storage. */
496 return VINF_SUCCESS;
497}
498
499
500/** @callback_method_impl{FNCPUMRDMSR} */
501static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
502{
503 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
504 /** @todo Implement variable MTRR storage. */
505 Assert(pRange->uValue == (idMsr - 0x200) / 2);
506 *puValue = 0;
507 return VINF_SUCCESS;
508}
509
510
511/** @callback_method_impl{FNCPUMWRMSR} */
512static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
513{
514 /*
515 * Validate the value.
516 */
517 Assert(pRange->uValue == (idMsr - 0x200) / 2);
518 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
519
520 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
521 if (fInvPhysMask & uValue)
522 {
523 Log(("CPUM: Invalid physical address bits set writing MTRR PhysMask MSR %#x: %#llx (%#llx)\n",
524 idMsr, uValue, uValue & fInvPhysMask));
525 return VERR_CPUM_RAISE_GP_0;
526 }
527
528 /*
529 * Store it.
530 */
531 /** @todo Implement variable MTRR storage. */
532 return VINF_SUCCESS;
533}
534
535
536/** @callback_method_impl{FNCPUMRDMSR} */
537static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
538{
539 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
540 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
541 *puValue = *puFixedMtrr;
542 return VINF_SUCCESS;
543}
544
545
546/** @callback_method_impl{FNCPUMWRMSR} */
547static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrFixed(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
548{
549 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
550 RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue);
551
552 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
553 {
554 uint8_t uType = (uint8_t)(uValue >> cShift);
555 if ((uType >= 7) || (uType == 2) || (uType == 3))
556 {
557 Log(("CPUM: Invalid MTRR type at %u:%u in fixed range (%#x/%s): %#llx (%#llx)\n",
558 cShift + 7, cShift, idMsr, pRange->szName, uValue, uType));
559 return VERR_CPUM_RAISE_GP_0;
560 }
561 }
562 *puFixedMtrr = uValue;
563 return VINF_SUCCESS;
564}
565
566
567/** @callback_method_impl{FNCPUMRDMSR} */
568static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
569{
570 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
571 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType;
572 return VINF_SUCCESS;
573}
574
575
576/** @callback_method_impl{FNCPUMWRMSR} */
577static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrDefType(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
578{
579 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
580
581 uint8_t uType = uValue & 0xff;
582 if ((uType >= 7) || (uType == 2) || (uType == 3))
583 {
584 Log(("CPUM: Invalid MTRR default type value on %s: %#llx (%#llx)\n", pRange->szName, uValue, uType));
585 return VERR_CPUM_RAISE_GP_0;
586 }
587
588 pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType = uValue;
589 return VINF_SUCCESS;
590}
591
592
593/** @callback_method_impl{FNCPUMRDMSR} */
594static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
595{
596 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
597 *puValue = pVCpu->cpum.s.Guest.msrPAT;
598 return VINF_SUCCESS;
599}
600
601
602/** @callback_method_impl{FNCPUMWRMSR} */
603static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Pat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
604{
605 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
606 if (CPUMIsPatMsrValid(uValue))
607 {
608 pVCpu->cpum.s.Guest.msrPAT = uValue;
609 return VINF_SUCCESS;
610 }
611 return VERR_CPUM_RAISE_GP_0;
612}
613
614
615/** @callback_method_impl{FNCPUMRDMSR} */
616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
617{
618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
619 *puValue = pVCpu->cpum.s.Guest.SysEnter.cs;
620 return VINF_SUCCESS;
621}
622
623
624/** @callback_method_impl{FNCPUMWRMSR} */
625static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterCs(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
626{
627 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
628
629 /* Note! We used to mask this by 0xffff, but turns out real HW doesn't and
630 there are generally 32-bit working bits backing this register. */
631 pVCpu->cpum.s.Guest.SysEnter.cs = uValue;
632 return VINF_SUCCESS;
633}
634
635
636/** @callback_method_impl{FNCPUMRDMSR} */
637static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
638{
639 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
640 *puValue = pVCpu->cpum.s.Guest.SysEnter.esp;
641 return VINF_SUCCESS;
642}
643
644
645/** @callback_method_impl{FNCPUMWRMSR} */
646static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEsp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
647{
648 if (X86_IS_CANONICAL(uValue))
649 {
650 pVCpu->cpum.s.Guest.SysEnter.esp = uValue;
651 return VINF_SUCCESS;
652 }
653 Log(("CPUM: IA32_SYSENTER_ESP not canonical! %#llx\n", uValue));
654 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
655 return VERR_CPUM_RAISE_GP_0;
656}
657
658
659/** @callback_method_impl{FNCPUMRDMSR} */
660static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
661{
662 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
663 *puValue = pVCpu->cpum.s.Guest.SysEnter.eip;
664 return VINF_SUCCESS;
665}
666
667
668/** @callback_method_impl{FNCPUMWRMSR} */
669static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
670{
671 if (X86_IS_CANONICAL(uValue))
672 {
673 pVCpu->cpum.s.Guest.SysEnter.eip = uValue;
674 return VINF_SUCCESS;
675 }
676 LogRel(("CPUM: IA32_SYSENTER_EIP not canonical! %#llx\n", uValue));
677 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
678 return VERR_CPUM_RAISE_GP_0;
679}
680
681
682/** @callback_method_impl{FNCPUMRDMSR} */
683static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
684{
685#if 0 /** @todo implement machine checks. */
686 *puValue = pRange->uValue & (RT_BIT_64(8) | 0);
687#else
688 *puValue = 0;
689#endif
690 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
691 return VINF_SUCCESS;
692}
693
694
695/** @callback_method_impl{FNCPUMRDMSR} */
696static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
697{
698 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
699 /** @todo implement machine checks. */
700 *puValue = 0;
701 return VINF_SUCCESS;
702}
703
704
705/** @callback_method_impl{FNCPUMWRMSR} */
706static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
707{
708 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
709 /** @todo implement machine checks. */
710 return VINF_SUCCESS;
711}
712
713
714/** @callback_method_impl{FNCPUMRDMSR} */
715static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
716{
717 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
718 /** @todo implement machine checks. */
719 *puValue = 0;
720 return VINF_SUCCESS;
721}
722
723
724/** @callback_method_impl{FNCPUMWRMSR} */
725static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
726{
727 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
728 /** @todo implement machine checks. */
729 return VINF_SUCCESS;
730}
731
732
733/** @callback_method_impl{FNCPUMRDMSR} */
734static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
735{
736 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
737 /** @todo implement IA32_DEBUGCTL. */
738 *puValue = 0;
739 return VINF_SUCCESS;
740}
741
742
743/** @callback_method_impl{FNCPUMWRMSR} */
744static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
745{
746 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
747 /** @todo implement IA32_DEBUGCTL. */
748 return VINF_SUCCESS;
749}
750
751
752/** @callback_method_impl{FNCPUMRDMSR} */
753static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
754{
755 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
756 /** @todo implement intel SMM. */
757 *puValue = 0;
758 return VINF_SUCCESS;
759}
760
761
762/** @callback_method_impl{FNCPUMWRMSR} */
763static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
764{
765 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
766 /** @todo implement intel SMM. */
767 return VERR_CPUM_RAISE_GP_0;
768}
769
770
771/** @callback_method_impl{FNCPUMRDMSR} */
772static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
773{
774 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
775 /** @todo implement intel SMM. */
776 *puValue = 0;
777 return VINF_SUCCESS;
778}
779
780
781/** @callback_method_impl{FNCPUMWRMSR} */
782static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
783{
784 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
785 /** @todo implement intel SMM. */
786 return VERR_CPUM_RAISE_GP_0;
787}
788
789
790/** @callback_method_impl{FNCPUMRDMSR} */
791static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
792{
793 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
794 /** @todo implement intel direct cache access (DCA)?? */
795 *puValue = 0;
796 return VINF_SUCCESS;
797}
798
799
800/** @callback_method_impl{FNCPUMWRMSR} */
801static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PlatformDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
802{
803 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
804 /** @todo implement intel direct cache access (DCA)?? */
805 return VINF_SUCCESS;
806}
807
808
809/** @callback_method_impl{FNCPUMRDMSR} */
810static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32CpuDcaCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
811{
812 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
813 /** @todo implement intel direct cache access (DCA)?? */
814 *puValue = 0;
815 return VINF_SUCCESS;
816}
817
818
819/** @callback_method_impl{FNCPUMRDMSR} */
820static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
821{
822 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
823 /** @todo implement intel direct cache access (DCA)?? */
824 *puValue = 0;
825 return VINF_SUCCESS;
826}
827
828
829/** @callback_method_impl{FNCPUMWRMSR} */
830static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Dca0Cap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
831{
832 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
833 /** @todo implement intel direct cache access (DCA)?? */
834 return VINF_SUCCESS;
835}
836
837
838/** @callback_method_impl{FNCPUMRDMSR} */
839static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
840{
841 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
842 /** @todo implement IA32_PERFEVTSEL0+. */
843 *puValue = 0;
844 return VINF_SUCCESS;
845}
846
847
848/** @callback_method_impl{FNCPUMWRMSR} */
849static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
850{
851 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
852 /** @todo implement IA32_PERFEVTSEL0+. */
853 return VINF_SUCCESS;
854}
855
856
857/** @callback_method_impl{FNCPUMRDMSR} */
858static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
859{
860 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
861 uint64_t uValue = pRange->uValue;
862
863 /* Always provide the max bus ratio for now. XNU expects it. */
864 uValue &= ~((UINT64_C(0x1f) << 40) | RT_BIT_64(46));
865
866 PVM pVM = pVCpu->CTX_SUFF(pVM);
867 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
868 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
869 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
870 if (uTscRatio > 0x1f)
871 uTscRatio = 0x1f;
872 uValue |= (uint64_t)uTscRatio << 40;
873
874 *puValue = uValue;
875 return VINF_SUCCESS;
876}
877
878
879/** @callback_method_impl{FNCPUMWRMSR} */
880static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
881{
882 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
883 /* Pentium4 allows writing, but all bits are ignored. */
884 return VINF_SUCCESS;
885}
886
887
888/** @callback_method_impl{FNCPUMRDMSR} */
889static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
890{
891 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
892 /** @todo implement IA32_PERFCTL. */
893 *puValue = 0;
894 return VINF_SUCCESS;
895}
896
897
898/** @callback_method_impl{FNCPUMWRMSR} */
899static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
900{
901 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
902 /** @todo implement IA32_PERFCTL. */
903 return VINF_SUCCESS;
904}
905
906
907/** @callback_method_impl{FNCPUMRDMSR} */
908static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
909{
910 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
911 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
912 *puValue = 0;
913 return VINF_SUCCESS;
914}
915
916
917/** @callback_method_impl{FNCPUMWRMSR} */
918static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
919{
920 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
921 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
922 return VINF_SUCCESS;
923}
924
925
926/** @callback_method_impl{FNCPUMRDMSR} */
927static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
928{
929 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
930 /** @todo implement performance counters. */
931 *puValue = 0;
932 return VINF_SUCCESS;
933}
934
935
936/** @callback_method_impl{FNCPUMWRMSR} */
937static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
938{
939 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
940 /** @todo implement performance counters. */
941 return VINF_SUCCESS;
942}
943
944
945/** @callback_method_impl{FNCPUMRDMSR} */
946static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
947{
948 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
949 /** @todo implement performance counters. */
950 *puValue = 0;
951 return VINF_SUCCESS;
952}
953
954
955/** @callback_method_impl{FNCPUMWRMSR} */
956static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
957{
958 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
959 /** @todo implement performance counters. */
960 return VINF_SUCCESS;
961}
962
963
964/** @callback_method_impl{FNCPUMRDMSR} */
965static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
966{
967 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
968 /** @todo implement performance counters. */
969 *puValue = 0;
970 return VINF_SUCCESS;
971}
972
973
974/** @callback_method_impl{FNCPUMWRMSR} */
975static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
976{
977 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
978 /** @todo implement performance counters. */
979 return VINF_SUCCESS;
980}
981
982
983/** @callback_method_impl{FNCPUMRDMSR} */
984static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
985{
986 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
987 /** @todo implement performance counters. */
988 *puValue = 0;
989 return VINF_SUCCESS;
990}
991
992
993/** @callback_method_impl{FNCPUMWRMSR} */
994static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
995{
996 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
997 /** @todo implement performance counters. */
998 return VINF_SUCCESS;
999}
1000
1001
1002/** @callback_method_impl{FNCPUMRDMSR} */
1003static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1004{
1005 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1006 /** @todo implement performance counters. */
1007 *puValue = 0;
1008 return VINF_SUCCESS;
1009}
1010
1011
1012/** @callback_method_impl{FNCPUMWRMSR} */
1013static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1014{
1015 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1016 /** @todo implement performance counters. */
1017 return VINF_SUCCESS;
1018}
1019
1020
1021/** @callback_method_impl{FNCPUMRDMSR} */
1022static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1023{
1024 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1025 /** @todo implement performance counters. */
1026 *puValue = 0;
1027 return VINF_SUCCESS;
1028}
1029
1030
1031/** @callback_method_impl{FNCPUMWRMSR} */
1032static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PebsEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1033{
1034 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1035 /** @todo implement performance counters. */
1036 return VINF_SUCCESS;
1037}
1038
1039
1040/** @callback_method_impl{FNCPUMRDMSR} */
1041static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1042{
1043 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1044 /** @todo implement IA32_CLOCK_MODULATION. */
1045 *puValue = 0;
1046 return VINF_SUCCESS;
1047}
1048
1049
1050/** @callback_method_impl{FNCPUMWRMSR} */
1051static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ClockModulation(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1052{
1053 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1054 /** @todo implement IA32_CLOCK_MODULATION. */
1055 return VINF_SUCCESS;
1056}
1057
1058
1059/** @callback_method_impl{FNCPUMRDMSR} */
1060static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1061{
1062 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1063 /** @todo implement IA32_THERM_INTERRUPT. */
1064 *puValue = 0;
1065 return VINF_SUCCESS;
1066}
1067
1068
1069/** @callback_method_impl{FNCPUMWRMSR} */
1070static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermInterrupt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1071{
1072 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1073 /** @todo implement IA32_THERM_STATUS. */
1074 return VINF_SUCCESS;
1075}
1076
1077
1078/** @callback_method_impl{FNCPUMRDMSR} */
1079static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1080{
1081 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1082 /** @todo implement IA32_THERM_STATUS. */
1083 *puValue = 0;
1084 return VINF_SUCCESS;
1085}
1086
1087
1088/** @callback_method_impl{FNCPUMWRMSR} */
1089static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1090{
1091 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1092 /** @todo implement IA32_THERM_INTERRUPT. */
1093 return VINF_SUCCESS;
1094}
1095
1096
1097/** @callback_method_impl{FNCPUMRDMSR} */
1098static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1099{
1100 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1101 /** @todo implement IA32_THERM2_CTL. */
1102 *puValue = 0;
1103 return VINF_SUCCESS;
1104}
1105
1106
1107/** @callback_method_impl{FNCPUMWRMSR} */
1108static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Therm2Ctl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1109{
1110 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1111 /** @todo implement IA32_THERM2_CTL. */
1112 return VINF_SUCCESS;
1113}
1114
1115
1116/** @callback_method_impl{FNCPUMRDMSR} */
1117static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1118{
1119 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1120 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1121 return VINF_SUCCESS;
1122}
1123
1124
1125/** @callback_method_impl{FNCPUMWRMSR} */
1126static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MiscEnable(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1127{
1128 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1129#ifdef LOG_ENABLED
1130 uint64_t const uOld = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1131#endif
1132
1133 /* Unsupported bits are generally ignored and stripped by the MSR range
1134 entry that got us here. So, we just need to preserve fixed bits. */
1135 pVCpu->cpum.s.GuestMsrs.msr.MiscEnable = uValue
1136 | MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL
1137 | MSR_IA32_MISC_ENABLE_BTS_UNAVAIL;
1138
1139 Log(("CPUM: IA32_MISC_ENABLE; old=%#llx written=%#llx => %#llx\n",
1140 uOld, uValue, pVCpu->cpum.s.GuestMsrs.msr.MiscEnable));
1141
1142 /** @todo Wire IA32_MISC_ENABLE bit 22 to our NT 4 CPUID trick. */
1143 /** @todo Wire up MSR_IA32_MISC_ENABLE_XD_DISABLE. */
1144 return VINF_SUCCESS;
1145}
1146
1147
1148/** @callback_method_impl{FNCPUMRDMSR} */
1149static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1150{
1151 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange);
1152
1153 /** @todo Implement machine check exception injection. */
1154 switch (idMsr & 3)
1155 {
1156 case 0:
1157 case 1:
1158 *puValue = 0;
1159 break;
1160
1161 /* The ADDR and MISC registers aren't accessible since the
1162 corresponding STATUS bits are zero. */
1163 case 2:
1164 Log(("CPUM: Reading IA32_MCi_ADDR %#x -> #GP\n", idMsr));
1165 return VERR_CPUM_RAISE_GP_0;
1166 case 3:
1167 Log(("CPUM: Reading IA32_MCi_MISC %#x -> #GP\n", idMsr));
1168 return VERR_CPUM_RAISE_GP_0;
1169 }
1170 return VINF_SUCCESS;
1171}
1172
1173
1174/** @callback_method_impl{FNCPUMWRMSR} */
1175static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McCtlStatusAddrMiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1176{
1177 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1178 switch (idMsr & 3)
1179 {
1180 case 0:
1181 /* Ignore writes to the CTL register. */
1182 break;
1183
1184 case 1:
1185 /* According to specs, the STATUS register can only be written to
1186 with the value 0. VBoxCpuReport thinks different for a
1187 Pentium M Dothan, but implementing according to specs now. */
1188 if (uValue != 0)
1189 {
1190 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_STATUS %#x -> #GP\n", uValue, idMsr));
1191 return VERR_CPUM_RAISE_GP_0;
1192 }
1193 break;
1194
1195 /* Specs states that ADDR and MISC can be cleared by writing zeros.
1196 Writing 1s will GP. Need to figure out how this relates to the
1197 ADDRV and MISCV status flags. If writing is independent of those
1198 bits, we need to know whether the CPU really implements them since
1199 that is exposed by writing 0 to them.
1200 Implementing the solution with the fewer GPs for now. */
1201 case 2:
1202 if (uValue != 0)
1203 {
1204 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_ADDR %#x -> #GP\n", uValue, idMsr));
1205 return VERR_CPUM_RAISE_GP_0;
1206 }
1207 break;
1208 case 3:
1209 if (uValue != 0)
1210 {
1211 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_MISC %#x -> #GP\n", uValue, idMsr));
1212 return VERR_CPUM_RAISE_GP_0;
1213 }
1214 break;
1215 }
1216 return VINF_SUCCESS;
1217}
1218
1219
1220/** @callback_method_impl{FNCPUMRDMSR} */
1221static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1222{
1223 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1224 /** @todo Implement machine check exception injection. */
1225 *puValue = 0;
1226 return VINF_SUCCESS;
1227}
1228
1229
1230/** @callback_method_impl{FNCPUMWRMSR} */
1231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McNCtl2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1232{
1233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1234 /** @todo Implement machine check exception injection. */
1235 return VINF_SUCCESS;
1236}
1237
1238
1239/** @callback_method_impl{FNCPUMRDMSR} */
1240static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1241{
1242 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1243 /** @todo implement IA32_DS_AREA. */
1244 *puValue = 0;
1245 return VINF_SUCCESS;
1246}
1247
1248
1249/** @callback_method_impl{FNCPUMWRMSR} */
1250static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DsArea(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1251{
1252 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1254 return VINF_SUCCESS;
1255}
1256
1257
1258/** @callback_method_impl{FNCPUMRDMSR} */
1259static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1260{
1261 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1262 /** @todo implement TSC deadline timer. */
1263 *puValue = 0;
1264 return VINF_SUCCESS;
1265}
1266
1267
1268/** @callback_method_impl{FNCPUMWRMSR} */
1269static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TscDeadline(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1270{
1271 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1272 /** @todo implement TSC deadline timer. */
1273 return VINF_SUCCESS;
1274}
1275
1276
1277/** @callback_method_impl{FNCPUMRDMSR} */
1278static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1279{
1280 RT_NOREF_PV(pRange);
1281#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1282 if ( CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest)
1283 && CPUMIsGuestVmxProcCtls2Set(pVCpu, &pVCpu->cpum.s.Guest, VMX_PROC_CTLS2_VIRT_X2APIC_MODE))
1284 {
1285 VBOXSTRICTRC rcStrict = IEMExecVmxVirtApicAccessMsr(pVCpu, idMsr, puValue, false /* fWrite */);
1286 if (rcStrict == VINF_VMX_MODIFIES_BEHAVIOR)
1287 return VINF_SUCCESS;
1288 if (rcStrict == VERR_OUT_OF_RANGE)
1289 return VERR_CPUM_RAISE_GP_0;
1290 Assert(rcStrict == VINF_VMX_INTERCEPT_NOT_ACTIVE);
1291 }
1292#endif
1293 return APICReadMsr(pVCpu, idMsr, puValue);
1294}
1295
1296
1297/** @callback_method_impl{FNCPUMWRMSR} */
1298static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32X2ApicN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1299{
1300 RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1301#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1302 if ( CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest)
1303 && CPUMIsGuestVmxProcCtls2Set(pVCpu, &pVCpu->cpum.s.Guest, VMX_PROC_CTLS2_VIRT_X2APIC_MODE))
1304 {
1305 VBOXSTRICTRC rcStrict = IEMExecVmxVirtApicAccessMsr(pVCpu, idMsr, &uValue, true /* fWrite */);
1306 if (rcStrict == VINF_VMX_MODIFIES_BEHAVIOR)
1307 return VINF_SUCCESS;
1308 if (rcStrict == VERR_OUT_OF_RANGE)
1309 return VERR_CPUM_RAISE_GP_0;
1310 Assert(rcStrict == VINF_VMX_INTERCEPT_NOT_ACTIVE);
1311 }
1312#endif
1313 return APICWriteMsr(pVCpu, idMsr, uValue);
1314}
1315
1316
1317/** @callback_method_impl{FNCPUMRDMSR} */
1318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1319{
1320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1321 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1322 *puValue = 0;
1323 return VINF_SUCCESS;
1324}
1325
1326
1327/** @callback_method_impl{FNCPUMWRMSR} */
1328static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugInterface(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1329{
1330 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1331 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1332 return VINF_SUCCESS;
1333}
1334
1335
1336/**
1337 * Gets IA32_VMX_BASIC for IEM and cpumMsrRd_Ia32VmxBasic.
1338 *
1339 * @returns IA32_VMX_BASIC value.
1340 * @param pVCpu The cross context per CPU structure.
1341 */
1342VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxBasic(PVMCPU pVCpu)
1343{
1344 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1345 uint64_t uVmxMsr;
1346 if (pGuestFeatures->fVmx)
1347 {
1348 uVmxMsr = RT_BF_MAKE(VMX_BF_BASIC_VMCS_ID, VMX_V_VMCS_REVISION_ID )
1349 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_SIZE, VMX_V_VMCS_SIZE )
1350 | RT_BF_MAKE(VMX_BF_BASIC_PHYSADDR_WIDTH, !pGuestFeatures->fLongMode )
1351 | RT_BF_MAKE(VMX_BF_BASIC_DUAL_MON, 0 )
1352 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_MEM_TYPE, VMX_BASIC_MEM_TYPE_WB )
1353 | RT_BF_MAKE(VMX_BF_BASIC_VMCS_INS_OUTS, pGuestFeatures->fVmxInsOutInfo)
1354 | RT_BF_MAKE(VMX_BF_BASIC_TRUE_CTLS, 0 );
1355 }
1356 else
1357 uVmxMsr = 0;
1358 return uVmxMsr;
1359}
1360
1361
1362/** @callback_method_impl{FNCPUMRDMSR} */
1363static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxBasic(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1364{
1365 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1366 *puValue = CPUMGetGuestIa32VmxBasic(pVCpu);
1367 return VINF_SUCCESS;
1368}
1369
1370
1371/**
1372 * Gets IA32_VMX_PINBASED_CTLS for IEM and cpumMsrRd_Ia32VmxPinbasedCtls.
1373 *
1374 * @returns IA32_VMX_PINBASED_CTLS value.
1375 * @param pVCpu The cross context per CPU structure.
1376 */
1377VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxPinbasedCtls(PVMCPU pVCpu)
1378{
1379 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1380 uint64_t uVmxMsr;
1381 if (pGuestFeatures->fVmx)
1382 {
1383 uint32_t const fFeatures = (pGuestFeatures->fVmxExtIntExit << VMX_BF_PIN_CTLS_EXT_INT_EXIT_SHIFT )
1384 | (pGuestFeatures->fVmxNmiExit << VMX_BF_PIN_CTLS_NMI_EXIT_SHIFT )
1385 | (pGuestFeatures->fVmxVirtNmi << VMX_BF_PIN_CTLS_VIRT_NMI_SHIFT )
1386 | (pGuestFeatures->fVmxPreemptTimer << VMX_BF_PIN_CTLS_PREEMPT_TIMER_SHIFT)
1387 | (pGuestFeatures->fVmxPostedInt << VMX_BF_PIN_CTLS_POSTED_INT_SHIFT );
1388 /* Set the default1 class bits. See Intel spec. A.3.1 "Pin-Based VM-Execution Controls". */
1389 uint32_t const fAllowed0 = VMX_PIN_CTLS_DEFAULT1;
1390 uint32_t const fAllowed1 = fFeatures | VMX_PIN_CTLS_DEFAULT1;
1391 AssertMsg((fAllowed0 & fAllowed1) == fAllowed0, ("fAllowed0=%#RX32 fAllowed1=%#RX32 fFeatures=%#RX32\n",
1392 fAllowed0, fAllowed1, fFeatures));
1393 uVmxMsr = RT_MAKE_U64(fAllowed0, fAllowed1);
1394 LogRel(("fVmxExtIntExit=%u fFeatures=%#RX32 uVmxMsr=%#RX64\n", !!pGuestFeatures->fVmxExtIntExit, fFeatures, uVmxMsr));
1395 }
1396 else
1397 uVmxMsr = 0;
1398 return uVmxMsr;
1399}
1400
1401
1402/** @callback_method_impl{FNCPUMRDMSR} */
1403static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1404{
1405 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1406 *puValue = CPUMGetGuestIa32VmxPinbasedCtls(pVCpu);
1407 return VINF_SUCCESS;
1408}
1409
1410
1411/**
1412 * Gets IA32_VMX_PROCBASED_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1413 *
1414 * @returns IA32_VMX_PROCBASED_CTLS value.
1415 * @param pVCpu The cross context per CPU structure.
1416 */
1417VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls(PVMCPU pVCpu)
1418{
1419 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1420 uint64_t uVmxMsr;
1421 if (pGuestFeatures->fVmx)
1422 {
1423 uint32_t const fFeatures = (pGuestFeatures->fVmxIntWindowExit << VMX_BF_PROC_CTLS_INT_WINDOW_EXIT_SHIFT )
1424 | (pGuestFeatures->fVmxTscOffsetting << VMX_BF_PROC_CTLS_USE_TSC_OFFSETTING_SHIFT)
1425 | (pGuestFeatures->fVmxHltExit << VMX_BF_PROC_CTLS_HLT_EXIT_SHIFT )
1426 | (pGuestFeatures->fVmxInvlpgExit << VMX_BF_PROC_CTLS_INVLPG_EXIT_SHIFT )
1427 | (pGuestFeatures->fVmxMwaitExit << VMX_BF_PROC_CTLS_MWAIT_EXIT_SHIFT )
1428 | (pGuestFeatures->fVmxRdpmcExit << VMX_BF_PROC_CTLS_RDPMC_EXIT_SHIFT )
1429 | (pGuestFeatures->fVmxRdtscExit << VMX_BF_PROC_CTLS_RDTSC_EXIT_SHIFT )
1430 | (pGuestFeatures->fVmxCr3LoadExit << VMX_BF_PROC_CTLS_CR3_LOAD_EXIT_SHIFT )
1431 | (pGuestFeatures->fVmxCr3StoreExit << VMX_BF_PROC_CTLS_CR3_STORE_EXIT_SHIFT )
1432 | (pGuestFeatures->fVmxCr8LoadExit << VMX_BF_PROC_CTLS_CR8_LOAD_EXIT_SHIFT )
1433 | (pGuestFeatures->fVmxCr8StoreExit << VMX_BF_PROC_CTLS_CR8_STORE_EXIT_SHIFT )
1434 | (pGuestFeatures->fVmxUseTprShadow << VMX_BF_PROC_CTLS_USE_TPR_SHADOW_SHIFT )
1435 | (pGuestFeatures->fVmxNmiWindowExit << VMX_BF_PROC_CTLS_NMI_WINDOW_EXIT_SHIFT )
1436 | (pGuestFeatures->fVmxMovDRxExit << VMX_BF_PROC_CTLS_MOV_DR_EXIT_SHIFT )
1437 | (pGuestFeatures->fVmxUncondIoExit << VMX_BF_PROC_CTLS_UNCOND_IO_EXIT_SHIFT )
1438 | (pGuestFeatures->fVmxUseIoBitmaps << VMX_BF_PROC_CTLS_USE_IO_BITMAPS_SHIFT )
1439 | (pGuestFeatures->fVmxMonitorTrapFlag << VMX_BF_PROC_CTLS_MONITOR_TRAP_FLAG_SHIFT )
1440 | (pGuestFeatures->fVmxUseMsrBitmaps << VMX_BF_PROC_CTLS_USE_MSR_BITMAPS_SHIFT )
1441 | (pGuestFeatures->fVmxMonitorExit << VMX_BF_PROC_CTLS_MONITOR_EXIT_SHIFT )
1442 | (pGuestFeatures->fVmxPauseExit << VMX_BF_PROC_CTLS_PAUSE_EXIT_SHIFT )
1443 | (pGuestFeatures->fVmxSecondaryExecCtls << VMX_BF_PROC_CTLS_USE_SECONDARY_CTLS_SHIFT);
1444 /* Set the default1 class bits. See Intel spec. A.3.2 "Primary Processor-Based VM-Execution Controls". */
1445 uint32_t const fAllowed0 = VMX_PROC_CTLS_DEFAULT1;
1446 uint32_t const fAllowed1 = fFeatures | VMX_PROC_CTLS_DEFAULT1;
1447 AssertMsg((fAllowed0 & fAllowed1) == fAllowed0, ("fAllowed0=%#RX32 fAllowed1=%#RX32 fFeatures=%#RX32\n", fAllowed0,
1448 fAllowed1, fFeatures));
1449 uVmxMsr = RT_MAKE_U64(fAllowed0, fAllowed1);
1450 }
1451 else
1452 uVmxMsr = 0;
1453 return uVmxMsr;
1454}
1455
1456
1457/** @callback_method_impl{FNCPUMRDMSR} */
1458static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1459{
1460 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1461 *puValue = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu);
1462 return VINF_SUCCESS;
1463}
1464
1465
1466/**
1467 * Gets IA32_VMX_EXIT_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls.
1468 *
1469 * @returns IA32_VMX_EXIT_CTLS value.
1470 * @param pVCpu The cross context per CPU structure.
1471 */
1472VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxExitCtls(PVMCPU pVCpu)
1473{
1474 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1475 uint64_t uVmxMsr;
1476 if (pGuestFeatures->fVmx)
1477 {
1478 uint32_t const fFeatures = (pGuestFeatures->fVmxExitSaveDebugCtls << VMX_BF_EXIT_CTLS_SAVE_DEBUG_SHIFT )
1479 | (pGuestFeatures->fVmxHostAddrSpaceSize << VMX_BF_EXIT_CTLS_HOST_ADDR_SPACE_SIZE_SHIFT)
1480 | (pGuestFeatures->fVmxExitAckExtInt << VMX_BF_EXIT_CTLS_ACK_EXT_INT_SHIFT )
1481 | (pGuestFeatures->fVmxExitSavePatMsr << VMX_BF_EXIT_CTLS_SAVE_PAT_MSR_SHIFT )
1482 | (pGuestFeatures->fVmxExitLoadPatMsr << VMX_BF_EXIT_CTLS_LOAD_PAT_MSR_SHIFT )
1483 | (pGuestFeatures->fVmxExitSaveEferMsr << VMX_BF_EXIT_CTLS_SAVE_EFER_MSR_SHIFT )
1484 | (pGuestFeatures->fVmxExitLoadEferMsr << VMX_BF_EXIT_CTLS_LOAD_EFER_MSR_SHIFT )
1485 | (pGuestFeatures->fVmxSavePreemptTimer << VMX_BF_EXIT_CTLS_SAVE_PREEMPT_TIMER_SHIFT );
1486 /* Set the default1 class bits. See Intel spec. A.4 "VM-exit Controls". */
1487 uint32_t const fAllowed0 = VMX_EXIT_CTLS_DEFAULT1;
1488 uint32_t const fAllowed1 = fFeatures | VMX_EXIT_CTLS_DEFAULT1;
1489 AssertMsg((fAllowed0 & fAllowed1) == fAllowed0, ("fAllowed0=%#RX32 fAllowed1=%#RX32 fFeatures=%#RX32\n", fAllowed0,
1490 fAllowed1, fFeatures));
1491 uVmxMsr = RT_MAKE_U64(fAllowed0, fAllowed1);
1492 }
1493 else
1494 uVmxMsr = 0;
1495 return uVmxMsr;
1496}
1497
1498
1499/** @callback_method_impl{FNCPUMRDMSR} */
1500static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1501{
1502 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1503 *puValue = CPUMGetGuestIa32VmxExitCtls(pVCpu);
1504 return VINF_SUCCESS;
1505}
1506
1507
1508/**
1509 * Gets IA32_VMX_ENTRY_CTLS for IEM and cpumMsrRd_Ia32VmxEntryCtls.
1510 *
1511 * @returns IA32_VMX_ENTRY_CTLS value.
1512 * @param pVCpu The cross context per CPU structure.
1513 */
1514VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxEntryCtls(PVMCPU pVCpu)
1515{
1516 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1517 uint64_t uVmxMsr;
1518 if (pGuestFeatures->fVmx)
1519 {
1520 uint32_t const fFeatures = (pGuestFeatures->fVmxEntryLoadDebugCtls << VMX_BF_ENTRY_CTLS_LOAD_DEBUG_SHIFT )
1521 | (pGuestFeatures->fVmxIa32eModeGuest << VMX_BF_ENTRY_CTLS_IA32E_MODE_GUEST_SHIFT)
1522 | (pGuestFeatures->fVmxEntryLoadEferMsr << VMX_BF_ENTRY_CTLS_LOAD_EFER_MSR_SHIFT )
1523 | (pGuestFeatures->fVmxEntryLoadPatMsr << VMX_BF_ENTRY_CTLS_LOAD_PAT_MSR_SHIFT );
1524 /* Set the default1 class bits. See Intel spec. A.5 "VM-entry Controls". */
1525 uint32_t const fAllowed0 = VMX_ENTRY_CTLS_DEFAULT1;
1526 uint32_t const fAllowed1 = fFeatures | VMX_ENTRY_CTLS_DEFAULT1;
1527 AssertMsg((fAllowed0 & fAllowed1) == fAllowed0, ("fAllowed0=%#RX32 fAllowed0=%#RX32 fFeatures=%#RX32\n", fAllowed0,
1528 fAllowed1, fFeatures));
1529 uVmxMsr = RT_MAKE_U64(fAllowed0, fAllowed1);
1530 }
1531 else
1532 uVmxMsr = 0;
1533 return uVmxMsr;
1534}
1535
1536
1537/** @callback_method_impl{FNCPUMRDMSR} */
1538static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1539{
1540 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1541 *puValue = CPUMGetGuestIa32VmxEntryCtls(pVCpu);
1542 return VINF_SUCCESS;
1543}
1544
1545
1546/**
1547 * Gets IA32_VMX_MISC for IEM and cpumMsrRd_Ia32VmxMisc.
1548 *
1549 * @returns IA32_VMX_MISC MSR.
1550 * @param pVCpu The cross context per CPU structure.
1551 */
1552VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxMisc(PVMCPU pVCpu)
1553{
1554 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1555 uint64_t uVmxMsr;
1556 if (pGuestFeatures->fVmx)
1557 {
1558 uint64_t uHostMsr;
1559 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_MISC, &uHostMsr);
1560 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1561 uint8_t const cMaxMsrs = RT_MIN(RT_BF_GET(uHostMsr, VMX_BF_MISC_MAX_MSRS), VMX_V_AUTOMSR_COUNT_MAX);
1562 uint8_t const fActivityState = RT_BF_GET(uHostMsr, VMX_BF_MISC_ACTIVITY_STATES) & VMX_V_GUEST_ACTIVITY_STATE_MASK;
1563 uVmxMsr = RT_BF_MAKE(VMX_BF_MISC_PREEMPT_TIMER_TSC, VMX_V_PREEMPT_TIMER_SHIFT )
1564 | RT_BF_MAKE(VMX_BF_MISC_EXIT_SAVE_EFER_LMA, pGuestFeatures->fVmxExitSaveEferLma )
1565 | RT_BF_MAKE(VMX_BF_MISC_ACTIVITY_STATES, fActivityState )
1566 | RT_BF_MAKE(VMX_BF_MISC_INTEL_PT, pGuestFeatures->fVmxIntelPt )
1567 | RT_BF_MAKE(VMX_BF_MISC_SMM_READ_SMBASE_MSR, 0 )
1568 | RT_BF_MAKE(VMX_BF_MISC_CR3_TARGET, VMX_V_CR3_TARGET_COUNT )
1569 | RT_BF_MAKE(VMX_BF_MISC_MAX_MSRS, cMaxMsrs )
1570 | RT_BF_MAKE(VMX_BF_MISC_VMXOFF_BLOCK_SMI, 0 )
1571 | RT_BF_MAKE(VMX_BF_MISC_VMWRITE_ALL, pGuestFeatures->fVmxVmwriteAll )
1572 | RT_BF_MAKE(VMX_BF_MISC_ENTRY_INJECT_SOFT_INT, pGuestFeatures->fVmxEntryInjectSoftInt)
1573 | RT_BF_MAKE(VMX_BF_MISC_MSEG_ID, VMX_V_MSEG_REV_ID );
1574 }
1575 else
1576 uVmxMsr = 0;
1577 return uVmxMsr;
1578}
1579
1580
1581/** @callback_method_impl{FNCPUMRDMSR} */
1582static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxMisc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1583{
1584 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1585 *puValue = CPUMGetGuestIa32VmxMisc(pVCpu);
1586 return VINF_SUCCESS;
1587}
1588
1589
1590/**
1591 * Gets IA32_VMX_CR0_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc.
1592 *
1593 * @returns IA32_VMX_CR0_FIXED0 value.
1594 * @param pVCpu The cross context per CPU structure.
1595 */
1596VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed0(PVMCPU pVCpu)
1597{
1598 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1599 if (pGuestFeatures->fVmx)
1600 {
1601 uint64_t const uVmxMsr = pGuestFeatures->fVmxUnrestrictedGuest ? VMX_V_CR0_FIXED0_UX : VMX_V_CR0_FIXED0;
1602 return uVmxMsr;
1603 }
1604 return 0;
1605}
1606
1607
1608/** @callback_method_impl{FNCPUMRDMSR} */
1609static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1610{
1611 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1612 *puValue = CPUMGetGuestIa32VmxCr0Fixed0(pVCpu);
1613 return VINF_SUCCESS;
1614}
1615
1616
1617/**
1618 * Gets IA32_VMX_CR0_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc.
1619 *
1620 * @returns IA32_VMX_CR0_FIXED1 MSR.
1621 * @param pVCpu The cross context per CPU structure.
1622 */
1623VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed1(PVMCPU pVCpu)
1624{
1625 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1626 uint64_t uVmxMsr;
1627 if (pGuestFeatures->fVmx)
1628 {
1629 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR0_FIXED1, &uVmxMsr);
1630 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1631 uVmxMsr |= VMX_V_CR0_FIXED0; /* Make sure the CR0 MB1 bits are not clear. */
1632 }
1633 else
1634 uVmxMsr = 0;
1635 return uVmxMsr;
1636}
1637
1638
1639/** @callback_method_impl{FNCPUMRDMSR} */
1640static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1641{
1642 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1643 Assert(idMsr == MSR_IA32_VMX_CR0_FIXED1);
1644 *puValue = CPUMGetGuestIa32VmxCr0Fixed1(pVCpu);
1645 return VINF_SUCCESS;
1646}
1647
1648
1649/**
1650 * Gets IA32_VMX_CR4_FIXED0 for IEM and cpumMsrRd_Ia32VmxCr4Fixed0.
1651 *
1652 * @returns IA32_VMX_CR4_FIXED0 value.
1653 * @param pVCpu The cross context per CPU structure.
1654 */
1655VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed0(PVMCPU pVCpu)
1656{
1657 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1658 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR4_FIXED0 : 0;
1659 return uVmxMsr;
1660}
1661
1662
1663/** @callback_method_impl{FNCPUMRDMSR} */
1664static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed0(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1665{
1666 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1667 *puValue = CPUMGetGuestIa32VmxCr4Fixed0(pVCpu);
1668 return VINF_SUCCESS;
1669}
1670
1671
1672/**
1673 * Gets IA32_VMX_CR4_FIXED1 for IEM and cpumMsrRd_Ia32VmxCr4Fixed1.
1674 *
1675 * @returns IA32_VMX_CR4_FIXED1 MSR.
1676 * @param pVCpu The cross context per CPU structure.
1677 */
1678VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed1(PVMCPU pVCpu)
1679{
1680 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1681 uint64_t uVmxMsr;
1682 if (pGuestFeatures->fVmx)
1683 {
1684 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR4_FIXED1, &uVmxMsr);
1685 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc);
1686 uVmxMsr |= VMX_V_CR4_FIXED0; /* Make sure the CR4 MB1 bits are not clear. */
1687 }
1688 else
1689 uVmxMsr = 0;
1690 return uVmxMsr;
1691}
1692
1693
1694/** @callback_method_impl{FNCPUMRDMSR} */
1695static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1696{
1697 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1698 Assert(idMsr == MSR_IA32_VMX_CR4_FIXED1);
1699 *puValue = CPUMGetGuestIa32VmxCr4Fixed1(pVCpu);
1700 return VINF_SUCCESS;
1701}
1702
1703
1704/**
1705 * Gets IA32_VMX_VMCS_ENUM for IEM and cpumMsrRd_Ia32VmxVmcsEnum.
1706 *
1707 * @returns IA32_VMX_VMCS_ENUM value.
1708 * @param pVCpu The cross context per CPU structure.
1709 */
1710VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmcsEnum(PVMCPU pVCpu)
1711{
1712 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1713 uint64_t uVmxMsr;
1714 if (pGuestFeatures->fVmx)
1715 uVmxMsr = VMX_V_VMCS_MAX_INDEX << VMX_BF_VMCS_ENUM_HIGHEST_IDX_SHIFT;
1716 else
1717 uVmxMsr = 0;
1718 return uVmxMsr;
1719}
1720
1721
1722/** @callback_method_impl{FNCPUMRDMSR} */
1723static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmcsEnum(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1724{
1725 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1726 *puValue = CPUMGetGuestIa32VmxVmcsEnum(pVCpu);
1727 return VINF_SUCCESS;
1728}
1729
1730
1731/**
1732 * Gets MSR_IA32_VMX_PROCBASED_CTLS2 for IEM and cpumMsrRd_Ia32VmxProcBasedCtls2.
1733 *
1734 * @returns MSR_IA32_VMX_PROCBASED_CTLS2 value.
1735 * @param pVCpu The cross context per CPU structure.
1736 */
1737VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls2(PVMCPU pVCpu)
1738{
1739 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1740 uint64_t uVmxMsr;
1741 if ( pGuestFeatures->fVmx
1742 && pGuestFeatures->fVmxSecondaryExecCtls)
1743 {
1744 uint32_t const fFeatures = (pGuestFeatures->fVmxVirtApicAccess << VMX_BF_PROC_CTLS2_VIRT_APIC_ACCESS_SHIFT )
1745 | (pGuestFeatures->fVmxEpt << VMX_BF_PROC_CTLS2_EPT_SHIFT )
1746 | (pGuestFeatures->fVmxDescTableExit << VMX_BF_PROC_CTLS2_DESC_TABLE_EXIT_SHIFT )
1747 | (pGuestFeatures->fVmxRdtscp << VMX_BF_PROC_CTLS2_RDTSCP_SHIFT )
1748 | (pGuestFeatures->fVmxVirtX2ApicMode << VMX_BF_PROC_CTLS2_VIRT_X2APIC_MODE_SHIFT )
1749 | (pGuestFeatures->fVmxVpid << VMX_BF_PROC_CTLS2_VPID_SHIFT )
1750 | (pGuestFeatures->fVmxWbinvdExit << VMX_BF_PROC_CTLS2_WBINVD_EXIT_SHIFT )
1751 | (pGuestFeatures->fVmxUnrestrictedGuest << VMX_BF_PROC_CTLS2_UNRESTRICTED_GUEST_SHIFT)
1752 | (pGuestFeatures->fVmxApicRegVirt << VMX_BF_PROC_CTLS2_APIC_REG_VIRT_SHIFT )
1753 | (pGuestFeatures->fVmxVirtIntDelivery << VMX_BF_PROC_CTLS2_VIRT_INT_DELIVERY_SHIFT )
1754 | (pGuestFeatures->fVmxPauseLoopExit << VMX_BF_PROC_CTLS2_PAUSE_LOOP_EXIT_SHIFT )
1755 | (pGuestFeatures->fVmxRdrandExit << VMX_BF_PROC_CTLS2_RDRAND_EXIT_SHIFT )
1756 | (pGuestFeatures->fVmxInvpcid << VMX_BF_PROC_CTLS2_INVPCID_SHIFT )
1757 | (pGuestFeatures->fVmxVmFunc << VMX_BF_PROC_CTLS2_VMFUNC_SHIFT )
1758 | (pGuestFeatures->fVmxVmcsShadowing << VMX_BF_PROC_CTLS2_VMCS_SHADOWING_SHIFT )
1759 | (pGuestFeatures->fVmxRdseedExit << VMX_BF_PROC_CTLS2_RDSEED_EXIT_SHIFT )
1760 | (pGuestFeatures->fVmxPml << VMX_BF_PROC_CTLS2_PML_SHIFT )
1761 | (pGuestFeatures->fVmxEptXcptVe << VMX_BF_PROC_CTLS2_EPT_VE_SHIFT )
1762 | (pGuestFeatures->fVmxXsavesXrstors << VMX_BF_PROC_CTLS2_XSAVES_XRSTORS_SHIFT )
1763 | (pGuestFeatures->fVmxUseTscScaling << VMX_BF_PROC_CTLS2_TSC_SCALING_SHIFT );
1764 /* No default1 class bits. A.3.3 "Secondary Processor-Based VM-Execution Controls". */
1765 uint32_t const fAllowed0 = 0;
1766 uint32_t const fAllowed1 = fFeatures;
1767 uVmxMsr = RT_MAKE_U64(fAllowed0, fAllowed1);
1768 }
1769 else
1770 uVmxMsr = 0;
1771 return uVmxMsr;
1772}
1773
1774
1775/** @callback_method_impl{FNCPUMRDMSR} */
1776static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1777{
1778 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1779 *puValue = CPUMGetGuestIa32VmxProcbasedCtls2(pVCpu);
1780 return VINF_SUCCESS;
1781}
1782
1783
1784/** @callback_method_impl{FNCPUMRDMSR} */
1785static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEptVpidCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1786{
1787 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1788 *puValue = 0;
1789 return VINF_SUCCESS;
1790}
1791
1792
1793/** @callback_method_impl{FNCPUMRDMSR} */
1794static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTruePinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1795{
1796 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1797 *puValue = 0;
1798 return VINF_SUCCESS;
1799}
1800
1801
1802/** @callback_method_impl{FNCPUMRDMSR} */
1803static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1804{
1805 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1806 *puValue = 0;
1807 return VINF_SUCCESS;
1808}
1809
1810
1811/** @callback_method_impl{FNCPUMRDMSR} */
1812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1813{
1814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1815 *puValue = 0;
1816 return VINF_SUCCESS;
1817}
1818
1819
1820/** @callback_method_impl{FNCPUMRDMSR} */
1821static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1822{
1823 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1824 *puValue = 0;
1825 return VINF_SUCCESS;
1826}
1827
1828
1829/**
1830 * Gets IA32_VMX_VMFUNC for IEM and cpumMsrRd_Ia32VmxVmFunc.
1831 *
1832 * @returns IA32_VMX_VMFUNC value.
1833 * @param pVCpu The cross context per CPU structure.
1834 */
1835VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmFunc(PVMCPU pVCpu)
1836{
1837 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures;
1838 uint64_t uVmxMsr;
1839 if ( pGuestFeatures->fVmx
1840 && pGuestFeatures->fVmxVmFunc)
1841 uVmxMsr = RT_BF_MAKE(VMX_BF_VMFUNC_EPTP_SWITCHING, 1);
1842 else
1843 uVmxMsr = 0;
1844 return uVmxMsr;
1845}
1846
1847
1848/** @callback_method_impl{FNCPUMRDMSR} */
1849static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmFunc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1850{
1851 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1852 *puValue = CPUMGetGuestIa32VmxVmFunc(pVCpu);
1853 return VINF_SUCCESS;
1854}
1855
1856
1857/** @callback_method_impl{FNCPUMRDMSR} */
1858static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1859{
1860 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1861 *puValue = pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
1862 return VINF_SUCCESS;
1863}
1864
1865
1866/** @callback_method_impl{FNCPUMWRMSR} */
1867static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SpecCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1868{
1869 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1870
1871 /* NB: The STIBP bit can be set even when IBRS is present, regardless of whether STIBP is actually implemented. */
1872 if (uValue & ~(MSR_IA32_SPEC_CTRL_F_IBRS | MSR_IA32_SPEC_CTRL_F_STIBP))
1873 {
1874 Log(("CPUM: Invalid IA32_SPEC_CTRL bits (trying to write %#llx)\n", uValue));
1875 return VERR_CPUM_RAISE_GP_0;
1876 }
1877
1878 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
1879 return VINF_SUCCESS;
1880}
1881
1882
1883/** @callback_method_impl{FNCPUMWRMSR} */
1884static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PredCmd(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1885{
1886 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1887 return VINF_SUCCESS;
1888}
1889
1890
1891/** @callback_method_impl{FNCPUMRDMSR} */
1892static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ArchCapabilities(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1893{
1894 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1895 *puValue = pVCpu->cpum.s.GuestMsrs.msr.ArchCaps;
1896 return VINF_SUCCESS;
1897}
1898
1899
1900
1901
1902
1903
1904
1905
1906
1907
1908
1909
1910/*
1911 * AMD64
1912 * AMD64
1913 * AMD64
1914 */
1915
1916
1917/** @callback_method_impl{FNCPUMRDMSR} */
1918static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1919{
1920 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1921 *puValue = pVCpu->cpum.s.Guest.msrEFER;
1922 return VINF_SUCCESS;
1923}
1924
1925
1926/** @callback_method_impl{FNCPUMWRMSR} */
1927static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64Efer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1928{
1929 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1930 uint64_t uValidatedEfer;
1931 uint64_t const uOldEfer = pVCpu->cpum.s.Guest.msrEFER;
1932 int rc = CPUMIsGuestEferMsrWriteValid(pVCpu->CTX_SUFF(pVM), pVCpu->cpum.s.Guest.cr0, uOldEfer, uValue, &uValidatedEfer);
1933 if (RT_FAILURE(rc))
1934 return VERR_CPUM_RAISE_GP_0;
1935
1936 CPUMSetGuestEferMsrNoChecks(pVCpu, uOldEfer, uValidatedEfer);
1937 return VINF_SUCCESS;
1938}
1939
1940
1941/** @callback_method_impl{FNCPUMRDMSR} */
1942static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1943{
1944 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1945 *puValue = pVCpu->cpum.s.Guest.msrSTAR;
1946 return VINF_SUCCESS;
1947}
1948
1949
1950/** @callback_method_impl{FNCPUMWRMSR} */
1951static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1952{
1953 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1954 pVCpu->cpum.s.Guest.msrSTAR = uValue;
1955 return VINF_SUCCESS;
1956}
1957
1958
1959/** @callback_method_impl{FNCPUMRDMSR} */
1960static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1961{
1962 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1963 *puValue = pVCpu->cpum.s.Guest.msrLSTAR;
1964 return VINF_SUCCESS;
1965}
1966
1967
1968/** @callback_method_impl{FNCPUMWRMSR} */
1969static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64LongSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1970{
1971 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1972 if (!X86_IS_CANONICAL(uValue))
1973 {
1974 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1975 return VERR_CPUM_RAISE_GP_0;
1976 }
1977 pVCpu->cpum.s.Guest.msrLSTAR = uValue;
1978 return VINF_SUCCESS;
1979}
1980
1981
1982/** @callback_method_impl{FNCPUMRDMSR} */
1983static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1984{
1985 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1986 *puValue = pVCpu->cpum.s.Guest.msrCSTAR;
1987 return VINF_SUCCESS;
1988}
1989
1990
1991/** @callback_method_impl{FNCPUMWRMSR} */
1992static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64CompSyscallTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1993{
1994 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1995 if (!X86_IS_CANONICAL(uValue))
1996 {
1997 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1998 return VERR_CPUM_RAISE_GP_0;
1999 }
2000 pVCpu->cpum.s.Guest.msrCSTAR = uValue;
2001 return VINF_SUCCESS;
2002}
2003
2004
2005/** @callback_method_impl{FNCPUMRDMSR} */
2006static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2007{
2008 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2009 *puValue = pVCpu->cpum.s.Guest.msrSFMASK;
2010 return VINF_SUCCESS;
2011}
2012
2013
2014/** @callback_method_impl{FNCPUMWRMSR} */
2015static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallFlagMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2016{
2017 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2018 pVCpu->cpum.s.Guest.msrSFMASK = uValue;
2019 return VINF_SUCCESS;
2020}
2021
2022
2023/** @callback_method_impl{FNCPUMRDMSR} */
2024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2025{
2026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2027 *puValue = pVCpu->cpum.s.Guest.fs.u64Base;
2028 return VINF_SUCCESS;
2029}
2030
2031
2032/** @callback_method_impl{FNCPUMWRMSR} */
2033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64FsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2034{
2035 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2036 pVCpu->cpum.s.Guest.fs.u64Base = uValue;
2037 return VINF_SUCCESS;
2038}
2039
2040
2041/** @callback_method_impl{FNCPUMRDMSR} */
2042static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2043{
2044 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2045 *puValue = pVCpu->cpum.s.Guest.gs.u64Base;
2046 return VINF_SUCCESS;
2047}
2048
2049/** @callback_method_impl{FNCPUMWRMSR} */
2050static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64GsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2051{
2052 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2053 pVCpu->cpum.s.Guest.gs.u64Base = uValue;
2054 return VINF_SUCCESS;
2055}
2056
2057
2058
2059/** @callback_method_impl{FNCPUMRDMSR} */
2060static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2061{
2062 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2063 *puValue = pVCpu->cpum.s.Guest.msrKERNELGSBASE;
2064 return VINF_SUCCESS;
2065}
2066
2067/** @callback_method_impl{FNCPUMWRMSR} */
2068static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64KernelGsBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2069{
2070 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2071 pVCpu->cpum.s.Guest.msrKERNELGSBASE = uValue;
2072 return VINF_SUCCESS;
2073}
2074
2075
2076/** @callback_method_impl{FNCPUMRDMSR} */
2077static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2078{
2079 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2080 *puValue = pVCpu->cpum.s.GuestMsrs.msr.TscAux;
2081 return VINF_SUCCESS;
2082}
2083
2084/** @callback_method_impl{FNCPUMWRMSR} */
2085static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64TscAux(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2086{
2087 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2088 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
2089 return VINF_SUCCESS;
2090}
2091
2092
2093/*
2094 * Intel specific
2095 * Intel specific
2096 * Intel specific
2097 */
2098
2099/** @callback_method_impl{FNCPUMRDMSR} */
2100static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2101{
2102 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2103 /** @todo recalc clock frequency ratio? */
2104 *puValue = pRange->uValue;
2105 return VINF_SUCCESS;
2106}
2107
2108
2109/** @callback_method_impl{FNCPUMWRMSR} */
2110static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelEblCrPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2111{
2112 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2113 /** @todo Write EBL_CR_POWERON: Remember written bits. */
2114 return VINF_SUCCESS;
2115}
2116
2117
2118/** @callback_method_impl{FNCPUMRDMSR} */
2119static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreThreadCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2120{
2121 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2122
2123 /* Note! According to cpuid_set_info in XNU (10.7.0), Westmere CPU only
2124 have a 4-bit core count. */
2125 uint16_t cCores = pVCpu->CTX_SUFF(pVM)->cCpus;
2126 uint16_t cThreads = cCores; /** @todo hyper-threading. */
2127 *puValue = RT_MAKE_U32(cThreads, cCores);
2128 return VINF_SUCCESS;
2129}
2130
2131
2132/** @callback_method_impl{FNCPUMRDMSR} */
2133static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2134{
2135 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2136 /** @todo P4 hard power on config */
2137 *puValue = pRange->uValue;
2138 return VINF_SUCCESS;
2139}
2140
2141
2142/** @callback_method_impl{FNCPUMWRMSR} */
2143static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcHardPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2144{
2145 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2146 /** @todo P4 hard power on config */
2147 return VINF_SUCCESS;
2148}
2149
2150
2151/** @callback_method_impl{FNCPUMRDMSR} */
2152static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2153{
2154 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2155 /** @todo P4 soft power on config */
2156 *puValue = pRange->uValue;
2157 return VINF_SUCCESS;
2158}
2159
2160
2161/** @callback_method_impl{FNCPUMWRMSR} */
2162static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcSoftPowerOn(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2163{
2164 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2165 /** @todo P4 soft power on config */
2166 return VINF_SUCCESS;
2167}
2168
2169
2170/** @callback_method_impl{FNCPUMRDMSR} */
2171static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2172{
2173 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2174
2175 uint64_t uValue;
2176 PVM pVM = pVCpu->CTX_SUFF(pVM);
2177 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2178 if (pVM->cpum.s.GuestFeatures.uModel >= 2)
2179 {
2180 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ && pVM->cpum.s.GuestFeatures.uModel <= 2)
2181 {
2182 uScalableBusHz = CPUM_SBUSFREQ_100MHZ;
2183 uValue = 0;
2184 }
2185 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2186 {
2187 uScalableBusHz = CPUM_SBUSFREQ_133MHZ;
2188 uValue = 1;
2189 }
2190 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2191 {
2192 uScalableBusHz = CPUM_SBUSFREQ_167MHZ;
2193 uValue = 3;
2194 }
2195 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2196 {
2197 uScalableBusHz = CPUM_SBUSFREQ_200MHZ;
2198 uValue = 2;
2199 }
2200 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ && pVM->cpum.s.GuestFeatures.uModel > 2)
2201 {
2202 uScalableBusHz = CPUM_SBUSFREQ_267MHZ;
2203 uValue = 0;
2204 }
2205 else
2206 {
2207 uScalableBusHz = CPUM_SBUSFREQ_333MHZ;
2208 uValue = 6;
2209 }
2210 uValue <<= 16;
2211
2212 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2213 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2214 uValue |= (uint32_t)uTscRatio << 24;
2215
2216 uValue |= pRange->uValue & ~UINT64_C(0xff0f0000);
2217 }
2218 else
2219 {
2220 /* Probably more stuff here, but intel doesn't want to tell us. */
2221 uValue = pRange->uValue;
2222 uValue &= ~(RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23)); /* 100 MHz is only documented value */
2223 }
2224
2225 *puValue = uValue;
2226 return VINF_SUCCESS;
2227}
2228
2229
2230/** @callback_method_impl{FNCPUMWRMSR} */
2231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcFrequencyId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2232{
2233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2234 /** @todo P4 bus frequency config */
2235 return VINF_SUCCESS;
2236}
2237
2238
2239/** @callback_method_impl{FNCPUMRDMSR} */
2240static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6FsbFrequency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2241{
2242 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2243
2244 /* Convert the scalable bus frequency to the encoding in the intel manual (for core+). */
2245 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVCpu->CTX_SUFF(pVM));
2246 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ)
2247 *puValue = 5;
2248 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2249 *puValue = 1;
2250 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2251 *puValue = 3;
2252 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2253 *puValue = 2;
2254 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ)
2255 *puValue = 0;
2256 else if (uScalableBusHz <= CPUM_SBUSFREQ_333MHZ)
2257 *puValue = 4;
2258 else /*if (uScalableBusHz <= CPUM_SBUSFREQ_400MHZ)*/
2259 *puValue = 6;
2260
2261 *puValue |= pRange->uValue & ~UINT64_C(0x7);
2262
2263 return VINF_SUCCESS;
2264}
2265
2266
2267/** @callback_method_impl{FNCPUMRDMSR} */
2268static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPlatformInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2269{
2270 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2271
2272 /* Just indicate a fixed TSC, no turbo boost, no programmable anything. */
2273 PVM pVM = pVCpu->CTX_SUFF(pVM);
2274 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2275 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2276 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2277 uint64_t uValue = ((uint32_t)uTscRatio << 8) /* TSC invariant frequency. */
2278 | ((uint64_t)uTscRatio << 40); /* The max turbo frequency. */
2279
2280 /* Ivy bridge has a minimum operating ratio as well. */
2281 if (true) /** @todo detect sandy bridge. */
2282 uValue |= (uint64_t)uTscRatio << 48;
2283
2284 *puValue = uValue;
2285 return VINF_SUCCESS;
2286}
2287
2288
2289/** @callback_method_impl{FNCPUMRDMSR} */
2290static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2291{
2292 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2293
2294 uint64_t uValue = pRange->uValue & ~UINT64_C(0x1ff00);
2295
2296 PVM pVM = pVCpu->CTX_SUFF(pVM);
2297 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2298 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2299 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2300 uValue |= (uint32_t)uTscRatio << 8;
2301
2302 *puValue = uValue;
2303 return VINF_SUCCESS;
2304}
2305
2306
2307/** @callback_method_impl{FNCPUMWRMSR} */
2308static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelFlexRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2309{
2310 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2311 /** @todo implement writing MSR_FLEX_RATIO. */
2312 return VINF_SUCCESS;
2313}
2314
2315
2316/** @callback_method_impl{FNCPUMRDMSR} */
2317static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2318{
2319 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2320 *puValue = pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl;
2321 return VINF_SUCCESS;
2322}
2323
2324
2325/** @callback_method_impl{FNCPUMWRMSR} */
2326static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPkgCStConfigControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2327{
2328 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2329
2330 if (pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl & RT_BIT_64(15))
2331 {
2332 Log(("CPUM: WRMSR %#x (%s), %#llx: Write protected -> #GP\n", idMsr, pRange->szName, uValue));
2333 return VERR_CPUM_RAISE_GP_0;
2334 }
2335#if 0 /** @todo check what real (old) hardware does. */
2336 if ((uValue & 7) >= 5)
2337 {
2338 Log(("CPUM: WRMSR %#x (%s), %#llx: Invalid limit (%d) -> #GP\n", idMsr, pRange->szName, uValue, (uint32_t)(uValue & 7)));
2339 return VERR_CPUM_RAISE_GP_0;
2340 }
2341#endif
2342 pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl = uValue;
2343 return VINF_SUCCESS;
2344}
2345
2346
2347/** @callback_method_impl{FNCPUMRDMSR} */
2348static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2349{
2350 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2351 /** @todo implement I/O mwait wakeup. */
2352 *puValue = 0;
2353 return VINF_SUCCESS;
2354}
2355
2356
2357/** @callback_method_impl{FNCPUMWRMSR} */
2358static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPmgIoCaptureBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2359{
2360 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2361 /** @todo implement I/O mwait wakeup. */
2362 return VINF_SUCCESS;
2363}
2364
2365
2366/** @callback_method_impl{FNCPUMRDMSR} */
2367static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2368{
2369 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2370 /** @todo implement last branch records. */
2371 *puValue = 0;
2372 return VINF_SUCCESS;
2373}
2374
2375
2376/** @callback_method_impl{FNCPUMWRMSR} */
2377static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2378{
2379 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2380 /** @todo implement last branch records. */
2381 return VINF_SUCCESS;
2382}
2383
2384
2385/** @callback_method_impl{FNCPUMRDMSR} */
2386static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2387{
2388 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2389 /** @todo implement last branch records. */
2390 *puValue = 0;
2391 return VINF_SUCCESS;
2392}
2393
2394
2395/** @callback_method_impl{FNCPUMWRMSR} */
2396static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2397{
2398 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2399 /** @todo implement last branch records. */
2400 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2401 * if the rest of the bits are zero. Automatic sign extending?
2402 * Investigate! */
2403 if (!X86_IS_CANONICAL(uValue))
2404 {
2405 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2406 return VERR_CPUM_RAISE_GP_0;
2407 }
2408 return VINF_SUCCESS;
2409}
2410
2411
2412/** @callback_method_impl{FNCPUMRDMSR} */
2413static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2414{
2415 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2416 /** @todo implement last branch records. */
2417 *puValue = 0;
2418 return VINF_SUCCESS;
2419}
2420
2421
2422/** @callback_method_impl{FNCPUMWRMSR} */
2423static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchToN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2424{
2425 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2426 /** @todo implement last branch records. */
2427 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2428 * if the rest of the bits are zero. Automatic sign extending?
2429 * Investigate! */
2430 if (!X86_IS_CANONICAL(uValue))
2431 {
2432 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2433 return VERR_CPUM_RAISE_GP_0;
2434 }
2435 return VINF_SUCCESS;
2436}
2437
2438
2439/** @callback_method_impl{FNCPUMRDMSR} */
2440static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2441{
2442 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2443 /** @todo implement last branch records. */
2444 *puValue = 0;
2445 return VINF_SUCCESS;
2446}
2447
2448
2449/** @callback_method_impl{FNCPUMWRMSR} */
2450static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchTos(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2451{
2452 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2453 /** @todo implement last branch records. */
2454 return VINF_SUCCESS;
2455}
2456
2457
2458/** @callback_method_impl{FNCPUMRDMSR} */
2459static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2460{
2461 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2462 *puValue = pRange->uValue;
2463 return VINF_SUCCESS;
2464}
2465
2466
2467/** @callback_method_impl{FNCPUMWRMSR} */
2468static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2469{
2470 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2471 return VINF_SUCCESS;
2472}
2473
2474
2475/** @callback_method_impl{FNCPUMRDMSR} */
2476static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2477{
2478 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2479 *puValue = pRange->uValue;
2480 return VINF_SUCCESS;
2481}
2482
2483
2484/** @callback_method_impl{FNCPUMWRMSR} */
2485static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2486{
2487 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2488 return VINF_SUCCESS;
2489}
2490
2491
2492/** @callback_method_impl{FNCPUMRDMSR} */
2493static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2494{
2495 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2496 *puValue = pRange->uValue;
2497 return VINF_SUCCESS;
2498}
2499
2500
2501/** @callback_method_impl{FNCPUMWRMSR} */
2502static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TemperatureTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2503{
2504 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2505 return VINF_SUCCESS;
2506}
2507
2508
2509/** @callback_method_impl{FNCPUMRDMSR} */
2510static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2511{
2512 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2513 /** @todo machine check. */
2514 *puValue = pRange->uValue;
2515 return VINF_SUCCESS;
2516}
2517
2518
2519/** @callback_method_impl{FNCPUMWRMSR} */
2520static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MsrOffCoreResponseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2521{
2522 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2523 /** @todo machine check. */
2524 return VINF_SUCCESS;
2525}
2526
2527
2528/** @callback_method_impl{FNCPUMRDMSR} */
2529static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2530{
2531 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2532 *puValue = 0;
2533 return VINF_SUCCESS;
2534}
2535
2536
2537/** @callback_method_impl{FNCPUMWRMSR} */
2538static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MiscPwrMgmt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2539{
2540 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2541 return VINF_SUCCESS;
2542}
2543
2544
2545/** @callback_method_impl{FNCPUMRDMSR} */
2546static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2547{
2548 RT_NOREF_PV(idMsr);
2549 int rc = CPUMGetGuestCRx(pVCpu, pRange->uValue, puValue);
2550 AssertRC(rc);
2551 return VINF_SUCCESS;
2552}
2553
2554
2555/** @callback_method_impl{FNCPUMWRMSR} */
2556static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP6CrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2557{
2558 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2559 /* This CRx interface differs from the MOV CRx, GReg interface in that
2560 #GP(0) isn't raised if unsupported bits are written to. Instead they
2561 are simply ignored and masked off. (Pentium M Dothan) */
2562 /** @todo Implement MSR_P6_CRx writing. Too much effort for very little, if
2563 * any, gain. */
2564 return VINF_SUCCESS;
2565}
2566
2567
2568/** @callback_method_impl{FNCPUMRDMSR} */
2569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2570{
2571 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2572 /** @todo implement CPUID masking. */
2573 *puValue = UINT64_MAX;
2574 return VINF_SUCCESS;
2575}
2576
2577
2578/** @callback_method_impl{FNCPUMWRMSR} */
2579static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2580{
2581 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2582 /** @todo implement CPUID masking. */
2583 return VINF_SUCCESS;
2584}
2585
2586
2587/** @callback_method_impl{FNCPUMRDMSR} */
2588static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2589{
2590 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2591 /** @todo implement CPUID masking. */
2592 *puValue = 0;
2593 return VINF_SUCCESS;
2594}
2595
2596
2597/** @callback_method_impl{FNCPUMWRMSR} */
2598static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2599{
2600 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2601 /** @todo implement CPUID masking. */
2602 return VINF_SUCCESS;
2603}
2604
2605
2606
2607/** @callback_method_impl{FNCPUMRDMSR} */
2608static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2609{
2610 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2611 /** @todo implement CPUID masking. */
2612 *puValue = UINT64_MAX;
2613 return VINF_SUCCESS;
2614}
2615
2616
2617/** @callback_method_impl{FNCPUMWRMSR} */
2618static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2619{
2620 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2621 /** @todo implement CPUID masking. */
2622 return VINF_SUCCESS;
2623}
2624
2625
2626
2627/** @callback_method_impl{FNCPUMRDMSR} */
2628static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2629{
2630 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2631 /** @todo implement AES-NI. */
2632 *puValue = 3; /* Bit 0 is lock bit, bit 1 disables AES-NI. That's what they say. */
2633 return VINF_SUCCESS;
2634}
2635
2636
2637/** @callback_method_impl{FNCPUMWRMSR} */
2638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyAesNiCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2639{
2640 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2641 /** @todo implement AES-NI. */
2642 return VERR_CPUM_RAISE_GP_0;
2643}
2644
2645
2646/** @callback_method_impl{FNCPUMRDMSR} */
2647static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2648{
2649 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2650 /** @todo implement intel C states. */
2651 *puValue = pRange->uValue;
2652 return VINF_SUCCESS;
2653}
2654
2655
2656/** @callback_method_impl{FNCPUMWRMSR} */
2657static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TurboRatioLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2658{
2659 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2660 /** @todo implement intel C states. */
2661 return VINF_SUCCESS;
2662}
2663
2664
2665/** @callback_method_impl{FNCPUMRDMSR} */
2666static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2667{
2668 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2669 /** @todo implement last-branch-records. */
2670 *puValue = 0;
2671 return VINF_SUCCESS;
2672}
2673
2674
2675/** @callback_method_impl{FNCPUMWRMSR} */
2676static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7LbrSelect(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2677{
2678 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2679 /** @todo implement last-branch-records. */
2680 return VINF_SUCCESS;
2681}
2682
2683
2684/** @callback_method_impl{FNCPUMRDMSR} */
2685static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2686{
2687 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2688 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2689 *puValue = 0;
2690 return VINF_SUCCESS;
2691}
2692
2693
2694/** @callback_method_impl{FNCPUMWRMSR} */
2695static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyErrorControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2696{
2697 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2698 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2699 return VINF_SUCCESS;
2700}
2701
2702
2703/** @callback_method_impl{FNCPUMRDMSR} */
2704static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7VirtualLegacyWireCap(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2705{
2706 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2707 /** @todo implement memory VLW? */
2708 *puValue = pRange->uValue;
2709 /* Note: A20M is known to be bit 1 as this was disclosed in spec update
2710 AAJ49/AAK51/????, which documents the inversion of this bit. The
2711 Sandy bridge CPU here has value 0x74, so it probably doesn't have a BIOS
2712 that correct things. Some guesses at the other bits:
2713 bit 2 = INTR
2714 bit 4 = SMI
2715 bit 5 = INIT
2716 bit 6 = NMI */
2717 return VINF_SUCCESS;
2718}
2719
2720
2721/** @callback_method_impl{FNCPUMRDMSR} */
2722static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2723{
2724 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2725 /** @todo intel power management */
2726 *puValue = 0;
2727 return VINF_SUCCESS;
2728}
2729
2730
2731/** @callback_method_impl{FNCPUMWRMSR} */
2732static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PowerCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2733{
2734 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2735 /** @todo intel power management */
2736 return VINF_SUCCESS;
2737}
2738
2739
2740/** @callback_method_impl{FNCPUMRDMSR} */
2741static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2742{
2743 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2744 /** @todo intel performance counters. */
2745 *puValue = 0;
2746 return VINF_SUCCESS;
2747}
2748
2749
2750/** @callback_method_impl{FNCPUMWRMSR} */
2751static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPebsNumAlt(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2752{
2753 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2754 /** @todo intel performance counters. */
2755 return VINF_SUCCESS;
2756}
2757
2758
2759/** @callback_method_impl{FNCPUMRDMSR} */
2760static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2761{
2762 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2763 /** @todo intel performance counters. */
2764 *puValue = 0;
2765 return VINF_SUCCESS;
2766}
2767
2768
2769/** @callback_method_impl{FNCPUMWRMSR} */
2770static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PebsLdLat(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2771{
2772 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2773 /** @todo intel performance counters. */
2774 return VINF_SUCCESS;
2775}
2776
2777
2778/** @callback_method_impl{FNCPUMRDMSR} */
2779static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PkgCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2780{
2781 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2782 /** @todo intel power management. */
2783 *puValue = 0;
2784 return VINF_SUCCESS;
2785}
2786
2787
2788/** @callback_method_impl{FNCPUMRDMSR} */
2789static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreCnResidencyN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2790{
2791 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2792 /** @todo intel power management. */
2793 *puValue = 0;
2794 return VINF_SUCCESS;
2795}
2796
2797
2798/** @callback_method_impl{FNCPUMRDMSR} */
2799static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2800{
2801 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2802 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2803 *puValue = 0;
2804 return VINF_SUCCESS;
2805}
2806
2807
2808/** @callback_method_impl{FNCPUMWRMSR} */
2809static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrCurrentConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2810{
2811 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2812 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2813 return VINF_SUCCESS;
2814}
2815
2816
2817/** @callback_method_impl{FNCPUMRDMSR} */
2818static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2819{
2820 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2821 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2822 *puValue = 0;
2823 return VINF_SUCCESS;
2824}
2825
2826
2827/** @callback_method_impl{FNCPUMWRMSR} */
2828static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrMiscConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2829{
2830 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2831 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2832 return VINF_SUCCESS;
2833}
2834
2835
2836/** @callback_method_impl{FNCPUMRDMSR} */
2837static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2838{
2839 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2840 /** @todo intel RAPL. */
2841 *puValue = pRange->uValue;
2842 return VINF_SUCCESS;
2843}
2844
2845
2846/** @callback_method_impl{FNCPUMWRMSR} */
2847static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyRaplPowerUnit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2848{
2849 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2850 /* Note! This is documented as read only and except for a Silvermont sample has
2851 always been classified as read only. This is just here to make it compile. */
2852 return VINF_SUCCESS;
2853}
2854
2855
2856/** @callback_method_impl{FNCPUMRDMSR} */
2857static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2858{
2859 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2860 /** @todo intel power management. */
2861 *puValue = 0;
2862 return VINF_SUCCESS;
2863}
2864
2865
2866/** @callback_method_impl{FNCPUMWRMSR} */
2867static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgCnIrtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2868{
2869 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2870 /** @todo intel power management. */
2871 return VINF_SUCCESS;
2872}
2873
2874
2875/** @callback_method_impl{FNCPUMRDMSR} */
2876static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2877{
2878 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2879 /** @todo intel power management. */
2880 *puValue = 0;
2881 return VINF_SUCCESS;
2882}
2883
2884
2885/** @callback_method_impl{FNCPUMWRMSR} */
2886static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgC2Residency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2887{
2888 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2889 /* Note! This is documented as read only and except for a Silvermont sample has
2890 always been classified as read only. This is just here to make it compile. */
2891 return VINF_SUCCESS;
2892}
2893
2894
2895/** @callback_method_impl{FNCPUMRDMSR} */
2896static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2897{
2898 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2899 /** @todo intel RAPL. */
2900 *puValue = 0;
2901 return VINF_SUCCESS;
2902}
2903
2904
2905/** @callback_method_impl{FNCPUMWRMSR} */
2906static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPkgPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2907{
2908 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2909 /** @todo intel RAPL. */
2910 return VINF_SUCCESS;
2911}
2912
2913
2914/** @callback_method_impl{FNCPUMRDMSR} */
2915static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2916{
2917 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2918 /** @todo intel power management. */
2919 *puValue = 0;
2920 return VINF_SUCCESS;
2921}
2922
2923
2924/** @callback_method_impl{FNCPUMRDMSR} */
2925static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2926{
2927 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2928 /** @todo intel power management. */
2929 *puValue = 0;
2930 return VINF_SUCCESS;
2931}
2932
2933
2934/** @callback_method_impl{FNCPUMRDMSR} */
2935static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2936{
2937 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2938 /** @todo intel power management. */
2939 *puValue = 0;
2940 return VINF_SUCCESS;
2941}
2942
2943
2944/** @callback_method_impl{FNCPUMRDMSR} */
2945static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2946{
2947 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2948 /** @todo intel RAPL. */
2949 *puValue = 0;
2950 return VINF_SUCCESS;
2951}
2952
2953
2954/** @callback_method_impl{FNCPUMWRMSR} */
2955static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplDramPowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2956{
2957 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2958 /** @todo intel RAPL. */
2959 return VINF_SUCCESS;
2960}
2961
2962
2963/** @callback_method_impl{FNCPUMRDMSR} */
2964static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramEnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2965{
2966 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2967 /** @todo intel power management. */
2968 *puValue = 0;
2969 return VINF_SUCCESS;
2970}
2971
2972
2973/** @callback_method_impl{FNCPUMRDMSR} */
2974static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2975{
2976 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2977 /** @todo intel power management. */
2978 *puValue = 0;
2979 return VINF_SUCCESS;
2980}
2981
2982
2983/** @callback_method_impl{FNCPUMRDMSR} */
2984static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2985{
2986 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2987 /** @todo intel power management. */
2988 *puValue = 0;
2989 return VINF_SUCCESS;
2990}
2991
2992
2993/** @callback_method_impl{FNCPUMRDMSR} */
2994static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2995{
2996 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2997 /** @todo intel RAPL. */
2998 *puValue = 0;
2999 return VINF_SUCCESS;
3000}
3001
3002
3003/** @callback_method_impl{FNCPUMWRMSR} */
3004static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3005{
3006 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3007 /** @todo intel RAPL. */
3008 return VINF_SUCCESS;
3009}
3010
3011
3012/** @callback_method_impl{FNCPUMRDMSR} */
3013static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3014{
3015 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3016 /** @todo intel power management. */
3017 *puValue = 0;
3018 return VINF_SUCCESS;
3019}
3020
3021
3022/** @callback_method_impl{FNCPUMRDMSR} */
3023static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3024{
3025 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3026 /** @todo intel RAPL. */
3027 *puValue = 0;
3028 return VINF_SUCCESS;
3029}
3030
3031
3032/** @callback_method_impl{FNCPUMWRMSR} */
3033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3034{
3035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3036 /** @todo intel RAPL. */
3037 return VINF_SUCCESS;
3038}
3039
3040
3041/** @callback_method_impl{FNCPUMRDMSR} */
3042static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PerfStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3043{
3044 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3045 /** @todo intel power management. */
3046 *puValue = 0;
3047 return VINF_SUCCESS;
3048}
3049
3050
3051/** @callback_method_impl{FNCPUMRDMSR} */
3052static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3053{
3054 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3055 /** @todo intel RAPL. */
3056 *puValue = 0;
3057 return VINF_SUCCESS;
3058}
3059
3060
3061/** @callback_method_impl{FNCPUMWRMSR} */
3062static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1PowerLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3063{
3064 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3065 /** @todo intel RAPL. */
3066 return VINF_SUCCESS;
3067}
3068
3069
3070/** @callback_method_impl{FNCPUMRDMSR} */
3071static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1EnergyStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3072{
3073 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3074 /** @todo intel power management. */
3075 *puValue = 0;
3076 return VINF_SUCCESS;
3077}
3078
3079
3080/** @callback_method_impl{FNCPUMRDMSR} */
3081static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3082{
3083 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3084 /** @todo intel RAPL. */
3085 *puValue = 0;
3086 return VINF_SUCCESS;
3087}
3088
3089
3090/** @callback_method_impl{FNCPUMWRMSR} */
3091static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1Policy(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3092{
3093 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3094 /** @todo intel RAPL. */
3095 return VINF_SUCCESS;
3096}
3097
3098
3099/** @callback_method_impl{FNCPUMRDMSR} */
3100static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpNominal(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3101{
3102 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3103 /** @todo intel power management. */
3104 *puValue = pRange->uValue;
3105 return VINF_SUCCESS;
3106}
3107
3108
3109/** @callback_method_impl{FNCPUMRDMSR} */
3110static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3111{
3112 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3113 /** @todo intel power management. */
3114 *puValue = pRange->uValue;
3115 return VINF_SUCCESS;
3116}
3117
3118
3119/** @callback_method_impl{FNCPUMRDMSR} */
3120static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3121{
3122 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3123 /** @todo intel power management. */
3124 *puValue = pRange->uValue;
3125 return VINF_SUCCESS;
3126}
3127
3128
3129/** @callback_method_impl{FNCPUMRDMSR} */
3130static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3131{
3132 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3133 /** @todo intel power management. */
3134 *puValue = 0;
3135 return VINF_SUCCESS;
3136}
3137
3138
3139/** @callback_method_impl{FNCPUMWRMSR} */
3140static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyConfigTdpControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3141{
3142 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3143 /** @todo intel power management. */
3144 return VINF_SUCCESS;
3145}
3146
3147
3148/** @callback_method_impl{FNCPUMRDMSR} */
3149static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3150{
3151 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3152 /** @todo intel power management. */
3153 *puValue = 0;
3154 return VINF_SUCCESS;
3155}
3156
3157
3158/** @callback_method_impl{FNCPUMWRMSR} */
3159static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyTurboActivationRatio(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3160{
3161 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3162 /** @todo intel power management. */
3163 return VINF_SUCCESS;
3164}
3165
3166
3167/** @callback_method_impl{FNCPUMRDMSR} */
3168static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3169{
3170 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3171 /** @todo uncore msrs. */
3172 *puValue = 0;
3173 return VINF_SUCCESS;
3174}
3175
3176
3177/** @callback_method_impl{FNCPUMWRMSR} */
3178static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3179{
3180 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3181 /** @todo uncore msrs. */
3182 return VINF_SUCCESS;
3183}
3184
3185
3186/** @callback_method_impl{FNCPUMRDMSR} */
3187static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3188{
3189 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3190 /** @todo uncore msrs. */
3191 *puValue = 0;
3192 return VINF_SUCCESS;
3193}
3194
3195
3196/** @callback_method_impl{FNCPUMWRMSR} */
3197static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3198{
3199 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3200 /** @todo uncore msrs. */
3201 return VINF_SUCCESS;
3202}
3203
3204
3205/** @callback_method_impl{FNCPUMRDMSR} */
3206static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3207{
3208 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3209 /** @todo uncore msrs. */
3210 *puValue = 0;
3211 return VINF_SUCCESS;
3212}
3213
3214
3215/** @callback_method_impl{FNCPUMWRMSR} */
3216static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3217{
3218 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3219 /** @todo uncore msrs. */
3220 return VINF_SUCCESS;
3221}
3222
3223
3224/** @callback_method_impl{FNCPUMRDMSR} */
3225static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3226{
3227 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3228 /** @todo uncore msrs. */
3229 *puValue = 0;
3230 return VINF_SUCCESS;
3231}
3232
3233
3234/** @callback_method_impl{FNCPUMWRMSR} */
3235static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtrCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3236{
3237 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3238 /** @todo uncore msrs. */
3239 return VINF_SUCCESS;
3240}
3241
3242
3243/** @callback_method_impl{FNCPUMRDMSR} */
3244static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3245{
3246 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3247 /** @todo uncore msrs. */
3248 *puValue = 0;
3249 return VINF_SUCCESS;
3250}
3251
3252
3253/** @callback_method_impl{FNCPUMWRMSR} */
3254static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3255{
3256 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3257 /** @todo uncore msrs. */
3258 return VINF_SUCCESS;
3259}
3260
3261
3262/** @callback_method_impl{FNCPUMRDMSR} */
3263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncCBoxConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3264{
3265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3266 /** @todo uncore msrs. */
3267 *puValue = 0;
3268 return VINF_SUCCESS;
3269}
3270
3271
3272/** @callback_method_impl{FNCPUMRDMSR} */
3273static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3274{
3275 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3276 /** @todo uncore msrs. */
3277 *puValue = 0;
3278 return VINF_SUCCESS;
3279}
3280
3281
3282/** @callback_method_impl{FNCPUMWRMSR} */
3283static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3284{
3285 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3286 /** @todo uncore msrs. */
3287 return VINF_SUCCESS;
3288}
3289
3290
3291/** @callback_method_impl{FNCPUMRDMSR} */
3292static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3293{
3294 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3295 /** @todo uncore msrs. */
3296 *puValue = 0;
3297 return VINF_SUCCESS;
3298}
3299
3300
3301/** @callback_method_impl{FNCPUMWRMSR} */
3302static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfEvtSelN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3303{
3304 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3305 /** @todo uncore msrs. */
3306 return VINF_SUCCESS;
3307}
3308
3309
3310/** @callback_method_impl{FNCPUMRDMSR} */
3311static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SmiCount(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3312{
3313 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3314
3315 /*
3316 * 31:0 is SMI count (read only), 63:32 reserved.
3317 * Since we don't do SMI, the count is always zero.
3318 */
3319 *puValue = 0;
3320 return VINF_SUCCESS;
3321}
3322
3323
3324/** @callback_method_impl{FNCPUMRDMSR} */
3325static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3326{
3327 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3328 /** @todo implement enhanced multi thread termal monitoring? */
3329 *puValue = pRange->uValue;
3330 return VINF_SUCCESS;
3331}
3332
3333
3334/** @callback_method_impl{FNCPUMWRMSR} */
3335static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2EmttmCrTablesN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3336{
3337 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3338 /** @todo implement enhanced multi thread termal monitoring? */
3339 return VINF_SUCCESS;
3340}
3341
3342
3343/** @callback_method_impl{FNCPUMRDMSR} */
3344static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3345{
3346 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3347 /** @todo SMM & C-states? */
3348 *puValue = 0;
3349 return VINF_SUCCESS;
3350}
3351
3352
3353/** @callback_method_impl{FNCPUMWRMSR} */
3354static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2SmmCStMiscInfo(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3355{
3356 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3357 /** @todo SMM & C-states? */
3358 return VINF_SUCCESS;
3359}
3360
3361
3362/** @callback_method_impl{FNCPUMRDMSR} */
3363static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3364{
3365 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3366 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3367 *puValue = 0;
3368 return VINF_SUCCESS;
3369}
3370
3371
3372/** @callback_method_impl{FNCPUMWRMSR} */
3373static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1ExtConfig(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3374{
3375 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3376 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3377 return VINF_SUCCESS;
3378}
3379
3380
3381/** @callback_method_impl{FNCPUMRDMSR} */
3382static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3383{
3384 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3385 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3386 *puValue = 0;
3387 return VINF_SUCCESS;
3388}
3389
3390
3391/** @callback_method_impl{FNCPUMWRMSR} */
3392static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1DtsCalControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3393{
3394 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3395 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3396 return VINF_SUCCESS;
3397}
3398
3399
3400/** @callback_method_impl{FNCPUMRDMSR} */
3401static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3402{
3403 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3404 /** @todo Core2+ platform environment control interface control register? */
3405 *puValue = 0;
3406 return VINF_SUCCESS;
3407}
3408
3409
3410/** @callback_method_impl{FNCPUMWRMSR} */
3411static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2PeciControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3412{
3413 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3414 /** @todo Core2+ platform environment control interface control register? */
3415 return VINF_SUCCESS;
3416}
3417
3418
3419/** @callback_method_impl{FNCPUMRDMSR} */
3420static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelAtSilvCoreC1Recidency(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3421{
3422 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3423 *puValue = 0;
3424 return VINF_SUCCESS;
3425}
3426
3427
3428/*
3429 * Multiple vendor P6 MSRs.
3430 * Multiple vendor P6 MSRs.
3431 * Multiple vendor P6 MSRs.
3432 *
3433 * These MSRs were introduced with the P6 but not elevated to architectural
3434 * MSRs, despite other vendors implementing them.
3435 */
3436
3437
3438/** @callback_method_impl{FNCPUMRDMSR} */
3439static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3440{
3441 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3442 /* AMD seems to just record RIP, while intel claims to record RIP+CS.BASE
3443 if I read the docs correctly, thus the need for separate functions. */
3444 /** @todo implement last branch records. */
3445 *puValue = 0;
3446 return VINF_SUCCESS;
3447}
3448
3449
3450/** @callback_method_impl{FNCPUMRDMSR} */
3451static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3452{
3453 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3454 /** @todo implement last branch records. */
3455 *puValue = 0;
3456 return VINF_SUCCESS;
3457}
3458
3459
3460/** @callback_method_impl{FNCPUMRDMSR} */
3461static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3462{
3463 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3464 /** @todo implement last exception records. */
3465 *puValue = 0;
3466 return VINF_SUCCESS;
3467}
3468
3469
3470/** @callback_method_impl{FNCPUMWRMSR} */
3471static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntFromIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3472{
3473 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3474 /** @todo implement last exception records. */
3475 /* Note! On many CPUs, the high bit of the 0x000001dd register is always writable, even when the result is
3476 a non-cannonical address. */
3477 return VINF_SUCCESS;
3478}
3479
3480
3481/** @callback_method_impl{FNCPUMRDMSR} */
3482static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3483{
3484 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3485 /** @todo implement last exception records. */
3486 *puValue = 0;
3487 return VINF_SUCCESS;
3488}
3489
3490
3491/** @callback_method_impl{FNCPUMWRMSR} */
3492static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntToIp(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3493{
3494 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3495 /** @todo implement last exception records. */
3496 return VINF_SUCCESS;
3497}
3498
3499
3500
3501/*
3502 * AMD specific
3503 * AMD specific
3504 * AMD specific
3505 */
3506
3507
3508/** @callback_method_impl{FNCPUMRDMSR} */
3509static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3510{
3511 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3512 /** @todo Implement TscRateMsr */
3513 *puValue = RT_MAKE_U64(0, 1); /* 1.0 = reset value. */
3514 return VINF_SUCCESS;
3515}
3516
3517
3518/** @callback_method_impl{FNCPUMWRMSR} */
3519static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hTscRate(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3520{
3521 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3522 /** @todo Implement TscRateMsr */
3523 return VINF_SUCCESS;
3524}
3525
3526
3527/** @callback_method_impl{FNCPUMRDMSR} */
3528static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3529{
3530 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3531 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3532 /* Note: Only listes in BKDG for Family 15H. */
3533 *puValue = 0;
3534 return VINF_SUCCESS;
3535}
3536
3537
3538/** @callback_method_impl{FNCPUMWRMSR} */
3539static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3540{
3541 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3542 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3543 return VINF_SUCCESS;
3544}
3545
3546
3547/** @callback_method_impl{FNCPUMRDMSR} */
3548static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3549{
3550 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3551 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3552 /* Note: Only listes in BKDG for Family 15H. */
3553 *puValue = 0;
3554 return VINF_SUCCESS;
3555}
3556
3557
3558/** @callback_method_impl{FNCPUMWRMSR} */
3559static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCbAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3560{
3561 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3562 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3563 return VINF_SUCCESS;
3564}
3565
3566
3567/** @callback_method_impl{FNCPUMRDMSR} */
3568static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3569{
3570 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3571 /** @todo machine check. */
3572 *puValue = 0;
3573 return VINF_SUCCESS;
3574}
3575
3576
3577/** @callback_method_impl{FNCPUMWRMSR} */
3578static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMc4MiscN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3579{
3580 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3581 /** @todo machine check. */
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** @callback_method_impl{FNCPUMRDMSR} */
3587static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3588{
3589 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3590 /** @todo AMD performance events. */
3591 *puValue = 0;
3592 return VINF_SUCCESS;
3593}
3594
3595
3596/** @callback_method_impl{FNCPUMWRMSR} */
3597static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3598{
3599 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3600 /** @todo AMD performance events. */
3601 return VINF_SUCCESS;
3602}
3603
3604
3605/** @callback_method_impl{FNCPUMRDMSR} */
3606static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3607{
3608 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3609 /** @todo AMD performance events. */
3610 *puValue = 0;
3611 return VINF_SUCCESS;
3612}
3613
3614
3615/** @callback_method_impl{FNCPUMWRMSR} */
3616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3617{
3618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3619 /** @todo AMD performance events. */
3620 return VINF_SUCCESS;
3621}
3622
3623
3624/** @callback_method_impl{FNCPUMRDMSR} */
3625static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3626{
3627 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3628 /** @todo AMD SYS_CFG */
3629 *puValue = pRange->uValue;
3630 return VINF_SUCCESS;
3631}
3632
3633
3634/** @callback_method_impl{FNCPUMWRMSR} */
3635static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SysCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3636{
3637 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3638 /** @todo AMD SYS_CFG */
3639 return VINF_SUCCESS;
3640}
3641
3642
3643/** @callback_method_impl{FNCPUMRDMSR} */
3644static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3645{
3646 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3647 /** @todo AMD HW_CFG */
3648 *puValue = 0;
3649 return VINF_SUCCESS;
3650}
3651
3652
3653/** @callback_method_impl{FNCPUMWRMSR} */
3654static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3655{
3656 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3657 /** @todo AMD HW_CFG */
3658 return VINF_SUCCESS;
3659}
3660
3661
3662/** @callback_method_impl{FNCPUMRDMSR} */
3663static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3664{
3665 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3666 /** @todo AMD IorrMask/IorrBase */
3667 *puValue = 0;
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/** @callback_method_impl{FNCPUMWRMSR} */
3673static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrBaseN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3674{
3675 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3676 /** @todo AMD IorrMask/IorrBase */
3677 return VINF_SUCCESS;
3678}
3679
3680
3681/** @callback_method_impl{FNCPUMRDMSR} */
3682static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3683{
3684 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3685 /** @todo AMD IorrMask/IorrBase */
3686 *puValue = 0;
3687 return VINF_SUCCESS;
3688}
3689
3690
3691/** @callback_method_impl{FNCPUMWRMSR} */
3692static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3693{
3694 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3695 /** @todo AMD IorrMask/IorrBase */
3696 return VINF_SUCCESS;
3697}
3698
3699
3700/** @callback_method_impl{FNCPUMRDMSR} */
3701static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3702{
3703 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3704 *puValue = 0;
3705 /** @todo return 4GB - RamHoleSize here for TOPMEM. Figure out what to return
3706 * for TOPMEM2. */
3707 //if (pRange->uValue == 0)
3708 // *puValue = _4G - RamHoleSize;
3709 return VINF_SUCCESS;
3710}
3711
3712
3713/** @callback_method_impl{FNCPUMWRMSR} */
3714static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8TopOfMemN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3715{
3716 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3717 /** @todo AMD TOPMEM and TOPMEM2/TOM2. */
3718 return VINF_SUCCESS;
3719}
3720
3721
3722/** @callback_method_impl{FNCPUMRDMSR} */
3723static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3724{
3725 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3726 /** @todo AMD NB_CFG1 */
3727 *puValue = 0;
3728 return VINF_SUCCESS;
3729}
3730
3731
3732/** @callback_method_impl{FNCPUMWRMSR} */
3733static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8NbCfg1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3734{
3735 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3736 /** @todo AMD NB_CFG1 */
3737 return VINF_SUCCESS;
3738}
3739
3740
3741/** @callback_method_impl{FNCPUMRDMSR} */
3742static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3743{
3744 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3745 /** @todo machine check. */
3746 *puValue = 0;
3747 return VINF_SUCCESS;
3748}
3749
3750
3751/** @callback_method_impl{FNCPUMWRMSR} */
3752static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McXcptRedir(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3753{
3754 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3755 /** @todo machine check. */
3756 return VINF_SUCCESS;
3757}
3758
3759
3760/** @callback_method_impl{FNCPUMRDMSR} */
3761static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3762{
3763 RT_NOREF_PV(idMsr);
3764 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), pRange->uValue / 2 + 0x80000001);
3765 if (pLeaf)
3766 {
3767 if (!(pRange->uValue & 1))
3768 *puValue = RT_MAKE_U64(pLeaf->uEax, pLeaf->uEbx);
3769 else
3770 *puValue = RT_MAKE_U64(pLeaf->uEcx, pLeaf->uEdx);
3771 }
3772 else
3773 *puValue = 0;
3774 return VINF_SUCCESS;
3775}
3776
3777
3778/** @callback_method_impl{FNCPUMWRMSR} */
3779static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuNameN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3780{
3781 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3782 /** @todo Remember guest programmed CPU name. */
3783 return VINF_SUCCESS;
3784}
3785
3786
3787/** @callback_method_impl{FNCPUMRDMSR} */
3788static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3789{
3790 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3791 /** @todo AMD HTC. */
3792 *puValue = pRange->uValue;
3793 return VINF_SUCCESS;
3794}
3795
3796
3797/** @callback_method_impl{FNCPUMWRMSR} */
3798static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3799{
3800 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3801 /** @todo AMD HTC. */
3802 return VINF_SUCCESS;
3803}
3804
3805
3806/** @callback_method_impl{FNCPUMRDMSR} */
3807static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3808{
3809 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3810 /** @todo AMD STC. */
3811 *puValue = 0;
3812 return VINF_SUCCESS;
3813}
3814
3815
3816/** @callback_method_impl{FNCPUMWRMSR} */
3817static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SwThermalCtrl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3818{
3819 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3820 /** @todo AMD STC. */
3821 return VINF_SUCCESS;
3822}
3823
3824
3825/** @callback_method_impl{FNCPUMRDMSR} */
3826static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3827{
3828 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3829 /** @todo AMD FIDVID_CTL. */
3830 *puValue = pRange->uValue;
3831 return VINF_SUCCESS;
3832}
3833
3834
3835/** @callback_method_impl{FNCPUMWRMSR} */
3836static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8FidVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3837{
3838 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3839 /** @todo AMD FIDVID_CTL. */
3840 return VINF_SUCCESS;
3841}
3842
3843
3844/** @callback_method_impl{FNCPUMRDMSR} */
3845static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3846{
3847 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3848 /** @todo AMD FIDVID_STATUS. */
3849 *puValue = pRange->uValue;
3850 return VINF_SUCCESS;
3851}
3852
3853
3854/** @callback_method_impl{FNCPUMRDMSR} */
3855static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3856{
3857 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3858 /** @todo AMD MC. */
3859 *puValue = 0;
3860 return VINF_SUCCESS;
3861}
3862
3863
3864/** @callback_method_impl{FNCPUMWRMSR} */
3865static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McCtlMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3866{
3867 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3868 /** @todo AMD MC. */
3869 return VINF_SUCCESS;
3870}
3871
3872
3873/** @callback_method_impl{FNCPUMRDMSR} */
3874static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3875{
3876 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3877 /** @todo AMD SMM/SMI and I/O trap. */
3878 *puValue = 0;
3879 return VINF_SUCCESS;
3880}
3881
3882
3883/** @callback_method_impl{FNCPUMWRMSR} */
3884static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3885{
3886 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3887 /** @todo AMD SMM/SMI and I/O trap. */
3888 return VINF_SUCCESS;
3889}
3890
3891
3892/** @callback_method_impl{FNCPUMRDMSR} */
3893static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3894{
3895 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3896 /** @todo AMD SMM/SMI and I/O trap. */
3897 *puValue = 0;
3898 return VINF_SUCCESS;
3899}
3900
3901
3902/** @callback_method_impl{FNCPUMWRMSR} */
3903static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapCtlSts(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3904{
3905 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3906 /** @todo AMD SMM/SMI and I/O trap. */
3907 return VINF_SUCCESS;
3908}
3909
3910
3911/** @callback_method_impl{FNCPUMRDMSR} */
3912static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3913{
3914 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3915 /** @todo Interrupt pending message. */
3916 *puValue = 0;
3917 return VINF_SUCCESS;
3918}
3919
3920
3921/** @callback_method_impl{FNCPUMWRMSR} */
3922static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IntPendingMessage(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3923{
3924 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3925 /** @todo Interrupt pending message. */
3926 return VINF_SUCCESS;
3927}
3928
3929
3930/** @callback_method_impl{FNCPUMRDMSR} */
3931static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3932{
3933 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3934 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3935 *puValue = 0;
3936 return VINF_SUCCESS;
3937}
3938
3939
3940/** @callback_method_impl{FNCPUMWRMSR} */
3941static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiTriggerIoCycle(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3942{
3943 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3944 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3945 return VINF_SUCCESS;
3946}
3947
3948
3949/** @callback_method_impl{FNCPUMRDMSR} */
3950static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3951{
3952 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3953 /** @todo AMD MMIO Configuration base address. */
3954 *puValue = 0;
3955 return VINF_SUCCESS;
3956}
3957
3958
3959/** @callback_method_impl{FNCPUMWRMSR} */
3960static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMmioCfgBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3961{
3962 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3963 /** @todo AMD MMIO Configuration base address. */
3964 return VINF_SUCCESS;
3965}
3966
3967
3968/** @callback_method_impl{FNCPUMRDMSR} */
3969static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3970{
3971 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3972 /** @todo AMD 0xc0010059. */
3973 *puValue = 0;
3974 return VINF_SUCCESS;
3975}
3976
3977
3978/** @callback_method_impl{FNCPUMWRMSR} */
3979static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hTrapCtlMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3980{
3981 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3982 /** @todo AMD 0xc0010059. */
3983 return VINF_SUCCESS;
3984}
3985
3986
3987/** @callback_method_impl{FNCPUMRDMSR} */
3988static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateCurLimit(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3989{
3990 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3991 /** @todo AMD P-states. */
3992 *puValue = pRange->uValue;
3993 return VINF_SUCCESS;
3994}
3995
3996
3997/** @callback_method_impl{FNCPUMRDMSR} */
3998static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3999{
4000 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4001 /** @todo AMD P-states. */
4002 *puValue = pRange->uValue;
4003 return VINF_SUCCESS;
4004}
4005
4006
4007/** @callback_method_impl{FNCPUMWRMSR} */
4008static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4009{
4010 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4011 /** @todo AMD P-states. */
4012 return VINF_SUCCESS;
4013}
4014
4015
4016/** @callback_method_impl{FNCPUMRDMSR} */
4017static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4018{
4019 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4020 /** @todo AMD P-states. */
4021 *puValue = pRange->uValue;
4022 return VINF_SUCCESS;
4023}
4024
4025
4026/** @callback_method_impl{FNCPUMWRMSR} */
4027static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4028{
4029 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4030 /** @todo AMD P-states. */
4031 return VINF_SUCCESS;
4032}
4033
4034
4035/** @callback_method_impl{FNCPUMRDMSR} */
4036static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4037{
4038 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4039 /** @todo AMD P-states. */
4040 *puValue = pRange->uValue;
4041 return VINF_SUCCESS;
4042}
4043
4044
4045/** @callback_method_impl{FNCPUMWRMSR} */
4046static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4047{
4048 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4049 /** @todo AMD P-states. */
4050 return VINF_SUCCESS;
4051}
4052
4053
4054/** @callback_method_impl{FNCPUMRDMSR} */
4055static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4056{
4057 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4058 /** @todo AMD P-states. */
4059 *puValue = pRange->uValue;
4060 return VINF_SUCCESS;
4061}
4062
4063
4064/** @callback_method_impl{FNCPUMWRMSR} */
4065static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidControl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4066{
4067 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4068 /** @todo AMD P-states. */
4069 return VINF_SUCCESS;
4070}
4071
4072
4073/** @callback_method_impl{FNCPUMRDMSR} */
4074static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4075{
4076 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4077 /** @todo AMD P-states. */
4078 *puValue = pRange->uValue;
4079 return VINF_SUCCESS;
4080}
4081
4082
4083/** @callback_method_impl{FNCPUMWRMSR} */
4084static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4085{
4086 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4087 /* Note! Writing 0 seems to not GP, not sure if it does anything to the value... */
4088 /** @todo AMD P-states. */
4089 return VINF_SUCCESS;
4090}
4091
4092
4093/** @callback_method_impl{FNCPUMRDMSR} */
4094static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4095{
4096 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4097 /** @todo AMD C-states. */
4098 *puValue = 0;
4099 return VINF_SUCCESS;
4100}
4101
4102
4103/** @callback_method_impl{FNCPUMWRMSR} */
4104static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCStateIoBaseAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4105{
4106 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4107 /** @todo AMD C-states. */
4108 return VINF_SUCCESS;
4109}
4110
4111
4112/** @callback_method_impl{FNCPUMRDMSR} */
4113static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4114{
4115 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4116 /** @todo AMD machine checks. */
4117 *puValue = 0;
4118 return VINF_SUCCESS;
4119}
4120
4121
4122/** @callback_method_impl{FNCPUMWRMSR} */
4123static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCpuWatchdogTimer(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4124{
4125 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4126 /** @todo AMD machine checks. */
4127 return VINF_SUCCESS;
4128}
4129
4130
4131/** @callback_method_impl{FNCPUMRDMSR} */
4132static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4133{
4134 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4135 /** @todo AMD SMM. */
4136 *puValue = 0;
4137 return VINF_SUCCESS;
4138}
4139
4140
4141/** @callback_method_impl{FNCPUMWRMSR} */
4142static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmBase(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4143{
4144 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4145 /** @todo AMD SMM. */
4146 return VINF_SUCCESS;
4147}
4148
4149
4150/** @callback_method_impl{FNCPUMRDMSR} */
4151static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4152{
4153 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4154 /** @todo AMD SMM. */
4155 *puValue = 0;
4156 return VINF_SUCCESS;
4157}
4158
4159
4160/** @callback_method_impl{FNCPUMWRMSR} */
4161static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4162{
4163 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4164 /** @todo AMD SMM. */
4165 return VINF_SUCCESS;
4166}
4167
4168
4169
4170/** @callback_method_impl{FNCPUMRDMSR} */
4171static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4172{
4173 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4174 /** @todo AMD SMM. */
4175 *puValue = 0;
4176 return VINF_SUCCESS;
4177}
4178
4179
4180/** @callback_method_impl{FNCPUMWRMSR} */
4181static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmMask(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4182{
4183 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4184 /** @todo AMD SMM. */
4185 return VINF_SUCCESS;
4186}
4187
4188
4189/** @callback_method_impl{FNCPUMRDMSR} */
4190static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4191{
4192 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4193 PVM pVM = pVCpu->CTX_SUFF(pVM);
4194 if (pVM->cpum.s.GuestFeatures.fSvm)
4195 *puValue = MSR_K8_VM_CR_LOCK;
4196 else
4197 *puValue = 0;
4198 return VINF_SUCCESS;
4199}
4200
4201
4202/** @callback_method_impl{FNCPUMWRMSR} */
4203static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmCr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4204{
4205 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4206 PVM pVM = pVCpu->CTX_SUFF(pVM);
4207 if (pVM->cpum.s.GuestFeatures.fSvm)
4208 {
4209 /* Silently ignore writes to LOCK and SVM_DISABLE bit when the LOCK bit is set (see cpumMsrRd_AmdK8VmCr). */
4210 if (uValue & (MSR_K8_VM_CR_DPD | MSR_K8_VM_CR_R_INIT | MSR_K8_VM_CR_DIS_A20M))
4211 return VERR_CPUM_RAISE_GP_0;
4212 return VINF_SUCCESS;
4213 }
4214 return VERR_CPUM_RAISE_GP_0;
4215}
4216
4217
4218/** @callback_method_impl{FNCPUMRDMSR} */
4219static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4220{
4221 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4222 /** @todo AMD IGNNE\# control. */
4223 *puValue = 0;
4224 return VINF_SUCCESS;
4225}
4226
4227
4228/** @callback_method_impl{FNCPUMWRMSR} */
4229static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IgnNe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4230{
4231 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4232 /** @todo AMD IGNNE\# control. */
4233 return VINF_SUCCESS;
4234}
4235
4236
4237/** @callback_method_impl{FNCPUMRDMSR} */
4238static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4239{
4240 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4241 /** @todo AMD SMM. */
4242 *puValue = 0;
4243 return VINF_SUCCESS;
4244}
4245
4246
4247/** @callback_method_impl{FNCPUMWRMSR} */
4248static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4249{
4250 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4251 /** @todo AMD SMM. */
4252 return VINF_SUCCESS;
4253}
4254
4255
4256/** @callback_method_impl{FNCPUMRDMSR} */
4257static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4258{
4259 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4260 *puValue = pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa;
4261 return VINF_SUCCESS;
4262}
4263
4264
4265/** @callback_method_impl{FNCPUMWRMSR} */
4266static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmHSavePa(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4267{
4268 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4269 if (uValue & UINT64_C(0xfff))
4270 {
4271 Log(("CPUM: Invalid setting of low 12 bits set writing host-state save area MSR %#x: %#llx\n", idMsr, uValue));
4272 return VERR_CPUM_RAISE_GP_0;
4273 }
4274
4275 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
4276 if (fInvPhysMask & uValue)
4277 {
4278 Log(("CPUM: Invalid physical address bits set writing host-state save area MSR %#x: %#llx (%#llx)\n",
4279 idMsr, uValue, uValue & fInvPhysMask));
4280 return VERR_CPUM_RAISE_GP_0;
4281 }
4282
4283 pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa = uValue;
4284 return VINF_SUCCESS;
4285}
4286
4287
4288/** @callback_method_impl{FNCPUMRDMSR} */
4289static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4290{
4291 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4292 /** @todo AMD SVM. */
4293 *puValue = 0; /* RAZ */
4294 return VINF_SUCCESS;
4295}
4296
4297
4298/** @callback_method_impl{FNCPUMWRMSR} */
4299static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hVmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4300{
4301 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4302 /** @todo AMD SVM. */
4303 return VINF_SUCCESS;
4304}
4305
4306
4307/** @callback_method_impl{FNCPUMRDMSR} */
4308static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4309{
4310 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4311 /** @todo AMD SMM. */
4312 *puValue = 0; /* RAZ */
4313 return VINF_SUCCESS;
4314}
4315
4316
4317/** @callback_method_impl{FNCPUMWRMSR} */
4318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hSmmLockKey(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4319{
4320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4321 /** @todo AMD SMM. */
4322 return VINF_SUCCESS;
4323}
4324
4325
4326/** @callback_method_impl{FNCPUMRDMSR} */
4327static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4328{
4329 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4330 /** @todo AMD SMM/SMI. */
4331 *puValue = 0;
4332 return VINF_SUCCESS;
4333}
4334
4335
4336/** @callback_method_impl{FNCPUMWRMSR} */
4337static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hLocalSmiStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4338{
4339 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4340 /** @todo AMD SMM/SMI. */
4341 return VINF_SUCCESS;
4342}
4343
4344
4345/** @callback_method_impl{FNCPUMRDMSR} */
4346static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4347{
4348 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4349 /** @todo AMD OS visible workaround. */
4350 *puValue = pRange->uValue;
4351 return VINF_SUCCESS;
4352}
4353
4354
4355/** @callback_method_impl{FNCPUMWRMSR} */
4356static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkIdLength(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4357{
4358 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4359 /** @todo AMD OS visible workaround. */
4360 return VINF_SUCCESS;
4361}
4362
4363
4364/** @callback_method_impl{FNCPUMRDMSR} */
4365static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4366{
4367 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4368 /** @todo AMD OS visible workaround. */
4369 *puValue = 0;
4370 return VINF_SUCCESS;
4371}
4372
4373
4374/** @callback_method_impl{FNCPUMWRMSR} */
4375static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkStatus(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4376{
4377 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4378 /** @todo AMD OS visible workaround. */
4379 return VINF_SUCCESS;
4380}
4381
4382
4383/** @callback_method_impl{FNCPUMRDMSR} */
4384static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4385{
4386 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4387 /** @todo AMD L2I performance counters. */
4388 *puValue = 0;
4389 return VINF_SUCCESS;
4390}
4391
4392
4393/** @callback_method_impl{FNCPUMWRMSR} */
4394static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4395{
4396 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4397 /** @todo AMD L2I performance counters. */
4398 return VINF_SUCCESS;
4399}
4400
4401
4402/** @callback_method_impl{FNCPUMRDMSR} */
4403static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4404{
4405 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4406 /** @todo AMD L2I performance counters. */
4407 *puValue = 0;
4408 return VINF_SUCCESS;
4409}
4410
4411
4412/** @callback_method_impl{FNCPUMWRMSR} */
4413static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4414{
4415 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4416 /** @todo AMD L2I performance counters. */
4417 return VINF_SUCCESS;
4418}
4419
4420
4421/** @callback_method_impl{FNCPUMRDMSR} */
4422static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4423{
4424 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4425 /** @todo AMD Northbridge performance counters. */
4426 *puValue = 0;
4427 return VINF_SUCCESS;
4428}
4429
4430
4431/** @callback_method_impl{FNCPUMWRMSR} */
4432static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtlN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4433{
4434 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4435 /** @todo AMD Northbridge performance counters. */
4436 return VINF_SUCCESS;
4437}
4438
4439
4440/** @callback_method_impl{FNCPUMRDMSR} */
4441static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4442{
4443 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4444 /** @todo AMD Northbridge performance counters. */
4445 *puValue = 0;
4446 return VINF_SUCCESS;
4447}
4448
4449
4450/** @callback_method_impl{FNCPUMWRMSR} */
4451static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtrN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4452{
4453 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4454 /** @todo AMD Northbridge performance counters. */
4455 return VINF_SUCCESS;
4456}
4457
4458
4459/** @callback_method_impl{FNCPUMRDMSR} */
4460static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4461{
4462 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4463 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4464 * cpus. Need to be explored and verify K7 presence. */
4465 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4466 *puValue = pRange->uValue;
4467 return VINF_SUCCESS;
4468}
4469
4470
4471/** @callback_method_impl{FNCPUMWRMSR} */
4472static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7MicrocodeCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4473{
4474 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4475 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4476 * cpus. Need to be explored and verify K7 presence. */
4477 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4478 return VINF_SUCCESS;
4479}
4480
4481
4482/** @callback_method_impl{FNCPUMRDMSR} */
4483static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4484{
4485 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4486 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4487 * cpus. Need to be explored and verify K7 presence. */
4488 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4489 * describing EBL_CR_POWERON. */
4490 *puValue = pRange->uValue;
4491 return VINF_SUCCESS;
4492}
4493
4494
4495/** @callback_method_impl{FNCPUMWRMSR} */
4496static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7ClusterIdMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4497{
4498 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4499 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4500 * cpus. Need to be explored and verify K7 presence. */
4501 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4502 * describing EBL_CR_POWERON. */
4503 return VINF_SUCCESS;
4504}
4505
4506
4507/** @callback_method_impl{FNCPUMRDMSR} */
4508static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4509{
4510 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4511 bool fIgnored;
4512 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeafEx(pVCpu->CTX_SUFF(pVM), 0x00000007, 0, &fIgnored);
4513 if (pLeaf)
4514 *puValue = RT_MAKE_U64(pLeaf->uEbx, pLeaf->uEax);
4515 else
4516 *puValue = 0;
4517 return VINF_SUCCESS;
4518}
4519
4520
4521/** @callback_method_impl{FNCPUMWRMSR} */
4522static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd07hEbax(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4523{
4524 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4525 /** @todo Changing CPUID leaf 7/0. */
4526 return VINF_SUCCESS;
4527}
4528
4529
4530/** @callback_method_impl{FNCPUMRDMSR} */
4531static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4532{
4533 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4534 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000006);
4535 if (pLeaf)
4536 *puValue = pLeaf->uEcx;
4537 else
4538 *puValue = 0;
4539 return VINF_SUCCESS;
4540}
4541
4542
4543/** @callback_method_impl{FNCPUMWRMSR} */
4544static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd06hEcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4545{
4546 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4547 /** @todo Changing CPUID leaf 6. */
4548 return VINF_SUCCESS;
4549}
4550
4551
4552/** @callback_method_impl{FNCPUMRDMSR} */
4553static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4554{
4555 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4556 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000001);
4557 if (pLeaf)
4558 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4559 else
4560 *puValue = 0;
4561 return VINF_SUCCESS;
4562}
4563
4564
4565/** @callback_method_impl{FNCPUMWRMSR} */
4566static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4567{
4568 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4569 /** @todo Changing CPUID leaf 0x80000001. */
4570 return VINF_SUCCESS;
4571}
4572
4573
4574/** @callback_method_impl{FNCPUMRDMSR} */
4575static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4576{
4577 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4578 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x80000001);
4579 if (pLeaf)
4580 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4581 else
4582 *puValue = 0;
4583 return VINF_SUCCESS;
4584}
4585
4586
4587/** @callback_method_impl{FNCPUMWRMSR} */
4588static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4589{
4590 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4591 /** @todo Changing CPUID leaf 0x80000001. */
4592 return VINF_SUCCESS;
4593}
4594
4595
4596/** @callback_method_impl{FNCPUMRDMSR} */
4597static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PatchLevel(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4598{
4599 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4600 /** @todo Fake AMD microcode patching. */
4601 *puValue = pRange->uValue;
4602 return VINF_SUCCESS;
4603}
4604
4605
4606/** @callback_method_impl{FNCPUMWRMSR} */
4607static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PatchLoader(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4608{
4609 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4610 /** @todo Fake AMD microcode patching. */
4611 return VINF_SUCCESS;
4612}
4613
4614
4615/** @callback_method_impl{FNCPUMRDMSR} */
4616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4617{
4618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4619 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4620 * cpus. Need to be explored and verify K7 presence. */
4621 /** @todo undocumented */
4622 *puValue = 0;
4623 return VINF_SUCCESS;
4624}
4625
4626
4627/** @callback_method_impl{FNCPUMWRMSR} */
4628static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugStatusMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4629{
4630 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4631 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4632 * cpus. Need to be explored and verify K7 presence. */
4633 /** @todo undocumented */
4634 return VINF_SUCCESS;
4635}
4636
4637
4638/** @callback_method_impl{FNCPUMRDMSR} */
4639static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4640{
4641 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4642 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4643 * cpus. Need to be explored and verify K7 presence. */
4644 /** @todo undocumented */
4645 *puValue = 0;
4646 return VINF_SUCCESS;
4647}
4648
4649
4650/** @callback_method_impl{FNCPUMWRMSR} */
4651static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceBaseMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4652{
4653 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4654 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4655 * cpus. Need to be explored and verify K7 presence. */
4656 /** @todo undocumented */
4657 return VINF_SUCCESS;
4658}
4659
4660
4661/** @callback_method_impl{FNCPUMRDMSR} */
4662static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4663{
4664 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4665 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4666 * cpus. Need to be explored and verify K7 presence. */
4667 /** @todo undocumented */
4668 *puValue = 0;
4669 return VINF_SUCCESS;
4670}
4671
4672
4673/** @callback_method_impl{FNCPUMWRMSR} */
4674static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTracePtrMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4675{
4676 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4677 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4678 * cpus. Need to be explored and verify K7 presence. */
4679 /** @todo undocumented */
4680 return VINF_SUCCESS;
4681}
4682
4683
4684/** @callback_method_impl{FNCPUMRDMSR} */
4685static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4686{
4687 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4688 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4689 * cpus. Need to be explored and verify K7 presence. */
4690 /** @todo undocumented */
4691 *puValue = 0;
4692 return VINF_SUCCESS;
4693}
4694
4695
4696/** @callback_method_impl{FNCPUMWRMSR} */
4697static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceLimitMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4698{
4699 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4700 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4701 * cpus. Need to be explored and verify K7 presence. */
4702 /** @todo undocumented */
4703 return VINF_SUCCESS;
4704}
4705
4706
4707/** @callback_method_impl{FNCPUMRDMSR} */
4708static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4709{
4710 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4711 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4712 * cpus. Need to be explored and verify K7 presence. */
4713 /** @todo undocumented */
4714 *puValue = 0;
4715 return VINF_SUCCESS;
4716}
4717
4718
4719/** @callback_method_impl{FNCPUMWRMSR} */
4720static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4721{
4722 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4723 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4724 * cpus. Need to be explored and verify K7 presence. */
4725 /** @todo undocumented */
4726 return VINF_SUCCESS;
4727}
4728
4729
4730/** @callback_method_impl{FNCPUMRDMSR} */
4731static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4732{
4733 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4734 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4735 * cpus. Need to be explored and verify K7 presence. */
4736 /** @todo undocumented */
4737 *puValue = 0;
4738 return VINF_SUCCESS;
4739}
4740
4741
4742/** @callback_method_impl{FNCPUMWRMSR} */
4743static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7FastFlushCountMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4744{
4745 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4746 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4747 * cpus. Need to be explored and verify K7 presence. */
4748 /** @todo undocumented */
4749 return VINF_SUCCESS;
4750}
4751
4752
4753/** @callback_method_impl{FNCPUMRDMSR} */
4754static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4755{
4756 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4757 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4758 * cpus. Need to be explored and verify K7 presence. */
4759 /** @todo AMD node ID and bios scratch. */
4760 *puValue = 0; /* nodeid = 0; nodes-per-cpu = 1 */
4761 return VINF_SUCCESS;
4762}
4763
4764
4765/** @callback_method_impl{FNCPUMWRMSR} */
4766static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7NodeId(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4767{
4768 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4769 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4770 * cpus. Need to be explored and verify K7 presence. */
4771 /** @todo AMD node ID and bios scratch. */
4772 return VINF_SUCCESS;
4773}
4774
4775
4776/** @callback_method_impl{FNCPUMRDMSR} */
4777static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4778{
4779 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4780 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4781 * cpus. Need to be explored and verify K7 presence. */
4782 /** @todo AMD DRx address masking (range breakpoints). */
4783 *puValue = 0;
4784 return VINF_SUCCESS;
4785}
4786
4787
4788/** @callback_method_impl{FNCPUMWRMSR} */
4789static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DrXAddrMaskN(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4790{
4791 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4792 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4793 * cpus. Need to be explored and verify K7 presence. */
4794 /** @todo AMD DRx address masking (range breakpoints). */
4795 return VINF_SUCCESS;
4796}
4797
4798
4799/** @callback_method_impl{FNCPUMRDMSR} */
4800static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4801{
4802 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4803 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4804 * cpus. Need to be explored and verify K7 presence. */
4805 /** @todo AMD undocument debugging features. */
4806 *puValue = 0;
4807 return VINF_SUCCESS;
4808}
4809
4810
4811/** @callback_method_impl{FNCPUMWRMSR} */
4812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMatchMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4813{
4814 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4815 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4816 * cpus. Need to be explored and verify K7 presence. */
4817 /** @todo AMD undocument debugging features. */
4818 return VINF_SUCCESS;
4819}
4820
4821
4822/** @callback_method_impl{FNCPUMRDMSR} */
4823static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4824{
4825 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4826 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4827 * cpus. Need to be explored and verify K7 presence. */
4828 /** @todo AMD undocument debugging features. */
4829 *puValue = 0;
4830 return VINF_SUCCESS;
4831}
4832
4833
4834/** @callback_method_impl{FNCPUMWRMSR} */
4835static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMaskMaybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4836{
4837 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4838 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4839 * cpus. Need to be explored and verify K7 presence. */
4840 /** @todo AMD undocument debugging features. */
4841 return VINF_SUCCESS;
4842}
4843
4844
4845/** @callback_method_impl{FNCPUMRDMSR} */
4846static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4847{
4848 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4849 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4850 * cpus. Need to be explored and verify K7 presence. */
4851 /** @todo AMD load-store config. */
4852 *puValue = 0;
4853 return VINF_SUCCESS;
4854}
4855
4856
4857/** @callback_method_impl{FNCPUMWRMSR} */
4858static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7LoadStoreCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4859{
4860 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4861 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4862 * cpus. Need to be explored and verify K7 presence. */
4863 /** @todo AMD load-store config. */
4864 return VINF_SUCCESS;
4865}
4866
4867
4868/** @callback_method_impl{FNCPUMRDMSR} */
4869static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4870{
4871 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4872 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4873 * cpus. Need to be explored and verify K7 presence. */
4874 /** @todo AMD instruction cache config. */
4875 *puValue = 0;
4876 return VINF_SUCCESS;
4877}
4878
4879
4880/** @callback_method_impl{FNCPUMWRMSR} */
4881static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7InstrCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4882{
4883 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4884 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4885 * cpus. Need to be explored and verify K7 presence. */
4886 /** @todo AMD instruction cache config. */
4887 return VINF_SUCCESS;
4888}
4889
4890
4891/** @callback_method_impl{FNCPUMRDMSR} */
4892static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4893{
4894 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4895 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4896 * cpus. Need to be explored and verify K7 presence. */
4897 /** @todo AMD data cache config. */
4898 *puValue = 0;
4899 return VINF_SUCCESS;
4900}
4901
4902
4903/** @callback_method_impl{FNCPUMWRMSR} */
4904static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DataCacheCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4905{
4906 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4907 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4908 * cpus. Need to be explored and verify K7 presence. */
4909 /** @todo AMD data cache config. */
4910 return VINF_SUCCESS;
4911}
4912
4913
4914/** @callback_method_impl{FNCPUMRDMSR} */
4915static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4916{
4917 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4918 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4919 * cpus. Need to be explored and verify K7 presence. */
4920 /** @todo AMD bus unit config. */
4921 *puValue = 0;
4922 return VINF_SUCCESS;
4923}
4924
4925
4926/** @callback_method_impl{FNCPUMWRMSR} */
4927static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BusUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4928{
4929 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4930 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4931 * cpus. Need to be explored and verify K7 presence. */
4932 /** @todo AMD bus unit config. */
4933 return VINF_SUCCESS;
4934}
4935
4936
4937/** @callback_method_impl{FNCPUMRDMSR} */
4938static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4939{
4940 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4941 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4942 * cpus. Need to be explored and verify K7 presence. */
4943 /** @todo Undocument AMD debug control register \#2. */
4944 *puValue = 0;
4945 return VINF_SUCCESS;
4946}
4947
4948
4949/** @callback_method_impl{FNCPUMWRMSR} */
4950static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugCtl2Maybe(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4951{
4952 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4953 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4954 * cpus. Need to be explored and verify K7 presence. */
4955 /** @todo Undocument AMD debug control register \#2. */
4956 return VINF_SUCCESS;
4957}
4958
4959
4960/** @callback_method_impl{FNCPUMRDMSR} */
4961static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4962{
4963 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4964 /** @todo AMD FPU config. */
4965 *puValue = 0;
4966 return VINF_SUCCESS;
4967}
4968
4969
4970/** @callback_method_impl{FNCPUMWRMSR} */
4971static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hFpuCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4972{
4973 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4974 /** @todo AMD FPU config. */
4975 return VINF_SUCCESS;
4976}
4977
4978
4979/** @callback_method_impl{FNCPUMRDMSR} */
4980static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4981{
4982 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4983 /** @todo AMD decoder config. */
4984 *puValue = 0;
4985 return VINF_SUCCESS;
4986}
4987
4988
4989/** @callback_method_impl{FNCPUMWRMSR} */
4990static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hDecoderCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4991{
4992 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4993 /** @todo AMD decoder config. */
4994 return VINF_SUCCESS;
4995}
4996
4997
4998/** @callback_method_impl{FNCPUMRDMSR} */
4999static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5000{
5001 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5002 /* Note! 10h and 16h */
5003 /** @todo AMD bus unit config. */
5004 *puValue = 0;
5005 return VINF_SUCCESS;
5006}
5007
5008
5009/** @callback_method_impl{FNCPUMWRMSR} */
5010static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hBusUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5011{
5012 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5013 /* Note! 10h and 16h */
5014 /** @todo AMD bus unit config. */
5015 return VINF_SUCCESS;
5016}
5017
5018
5019/** @callback_method_impl{FNCPUMRDMSR} */
5020static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5021{
5022 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5023 /** @todo AMD unit config. */
5024 *puValue = 0;
5025 return VINF_SUCCESS;
5026}
5027
5028
5029/** @callback_method_impl{FNCPUMWRMSR} */
5030static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5031{
5032 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5033 /** @todo AMD unit config. */
5034 return VINF_SUCCESS;
5035}
5036
5037
5038/** @callback_method_impl{FNCPUMRDMSR} */
5039static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5040{
5041 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5042 /** @todo AMD unit config 2. */
5043 *puValue = 0;
5044 return VINF_SUCCESS;
5045}
5046
5047
5048/** @callback_method_impl{FNCPUMWRMSR} */
5049static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5050{
5051 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5052 /** @todo AMD unit config 2. */
5053 return VINF_SUCCESS;
5054}
5055
5056
5057/** @callback_method_impl{FNCPUMRDMSR} */
5058static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5059{
5060 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5061 /** @todo AMD combined unit config 3. */
5062 *puValue = 0;
5063 return VINF_SUCCESS;
5064}
5065
5066
5067/** @callback_method_impl{FNCPUMWRMSR} */
5068static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5069{
5070 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5071 /** @todo AMD combined unit config 3. */
5072 return VINF_SUCCESS;
5073}
5074
5075
5076/** @callback_method_impl{FNCPUMRDMSR} */
5077static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5078{
5079 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5080 /** @todo AMD execution unit config. */
5081 *puValue = 0;
5082 return VINF_SUCCESS;
5083}
5084
5085
5086/** @callback_method_impl{FNCPUMWRMSR} */
5087static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hExecUnitCfg(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5088{
5089 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5090 /** @todo AMD execution unit config. */
5091 return VINF_SUCCESS;
5092}
5093
5094
5095/** @callback_method_impl{FNCPUMRDMSR} */
5096static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5097{
5098 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5099 /** @todo AMD load-store config 2. */
5100 *puValue = 0;
5101 return VINF_SUCCESS;
5102}
5103
5104
5105/** @callback_method_impl{FNCPUMWRMSR} */
5106static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLoadStoreCfg2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5107{
5108 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5109 /** @todo AMD load-store config 2. */
5110 return VINF_SUCCESS;
5111}
5112
5113
5114/** @callback_method_impl{FNCPUMRDMSR} */
5115static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5116{
5117 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5118 /** @todo AMD IBS. */
5119 *puValue = 0;
5120 return VINF_SUCCESS;
5121}
5122
5123
5124/** @callback_method_impl{FNCPUMWRMSR} */
5125static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5126{
5127 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5128 /** @todo AMD IBS. */
5129 return VINF_SUCCESS;
5130}
5131
5132
5133/** @callback_method_impl{FNCPUMRDMSR} */
5134static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5135{
5136 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5137 /** @todo AMD IBS. */
5138 *puValue = 0;
5139 return VINF_SUCCESS;
5140}
5141
5142
5143/** @callback_method_impl{FNCPUMWRMSR} */
5144static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5145{
5146 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5147 /** @todo AMD IBS. */
5148 return VINF_SUCCESS;
5149}
5150
5151
5152/** @callback_method_impl{FNCPUMRDMSR} */
5153static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5154{
5155 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5156 /** @todo AMD IBS. */
5157 *puValue = 0;
5158 return VINF_SUCCESS;
5159}
5160
5161
5162/** @callback_method_impl{FNCPUMWRMSR} */
5163static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5164{
5165 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5166 /** @todo AMD IBS. */
5167 return VINF_SUCCESS;
5168}
5169
5170
5171/** @callback_method_impl{FNCPUMRDMSR} */
5172static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5173{
5174 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5175 /** @todo AMD IBS. */
5176 *puValue = 0;
5177 return VINF_SUCCESS;
5178}
5179
5180
5181/** @callback_method_impl{FNCPUMWRMSR} */
5182static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpExecCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5183{
5184 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5185 /** @todo AMD IBS. */
5186 return VINF_SUCCESS;
5187}
5188
5189
5190/** @callback_method_impl{FNCPUMRDMSR} */
5191static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5192{
5193 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5194 /** @todo AMD IBS. */
5195 *puValue = 0;
5196 return VINF_SUCCESS;
5197}
5198
5199
5200/** @callback_method_impl{FNCPUMWRMSR} */
5201static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpRip(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5202{
5203 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5204 /** @todo AMD IBS. */
5205 if (!X86_IS_CANONICAL(uValue))
5206 {
5207 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5208 return VERR_CPUM_RAISE_GP_0;
5209 }
5210 return VINF_SUCCESS;
5211}
5212
5213
5214/** @callback_method_impl{FNCPUMRDMSR} */
5215static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5216{
5217 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5218 /** @todo AMD IBS. */
5219 *puValue = 0;
5220 return VINF_SUCCESS;
5221}
5222
5223
5224/** @callback_method_impl{FNCPUMWRMSR} */
5225static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5226{
5227 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5228 /** @todo AMD IBS. */
5229 return VINF_SUCCESS;
5230}
5231
5232
5233/** @callback_method_impl{FNCPUMRDMSR} */
5234static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5235{
5236 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5237 /** @todo AMD IBS. */
5238 *puValue = 0;
5239 return VINF_SUCCESS;
5240}
5241
5242
5243/** @callback_method_impl{FNCPUMWRMSR} */
5244static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5245{
5246 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5247 /** @todo AMD IBS. */
5248 return VINF_SUCCESS;
5249}
5250
5251
5252/** @callback_method_impl{FNCPUMRDMSR} */
5253static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5254{
5255 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5256 /** @todo AMD IBS. */
5257 *puValue = 0;
5258 return VINF_SUCCESS;
5259}
5260
5261
5262/** @callback_method_impl{FNCPUMWRMSR} */
5263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData3(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5264{
5265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5266 /** @todo AMD IBS. */
5267 return VINF_SUCCESS;
5268}
5269
5270
5271/** @callback_method_impl{FNCPUMRDMSR} */
5272static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5273{
5274 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5275 /** @todo AMD IBS. */
5276 *puValue = 0;
5277 return VINF_SUCCESS;
5278}
5279
5280
5281/** @callback_method_impl{FNCPUMWRMSR} */
5282static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcLinAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5283{
5284 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5285 /** @todo AMD IBS. */
5286 if (!X86_IS_CANONICAL(uValue))
5287 {
5288 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5289 return VERR_CPUM_RAISE_GP_0;
5290 }
5291 return VINF_SUCCESS;
5292}
5293
5294
5295/** @callback_method_impl{FNCPUMRDMSR} */
5296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5297{
5298 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5299 /** @todo AMD IBS. */
5300 *puValue = 0;
5301 return VINF_SUCCESS;
5302}
5303
5304
5305/** @callback_method_impl{FNCPUMWRMSR} */
5306static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcPhysAddr(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5307{
5308 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5309 /** @todo AMD IBS. */
5310 return VINF_SUCCESS;
5311}
5312
5313
5314/** @callback_method_impl{FNCPUMRDMSR} */
5315static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5316{
5317 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5318 /** @todo AMD IBS. */
5319 *puValue = 0;
5320 return VINF_SUCCESS;
5321}
5322
5323
5324/** @callback_method_impl{FNCPUMWRMSR} */
5325static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsCtl(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5326{
5327 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5328 /** @todo AMD IBS. */
5329 return VINF_SUCCESS;
5330}
5331
5332
5333/** @callback_method_impl{FNCPUMRDMSR} */
5334static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5335{
5336 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5337 /** @todo AMD IBS. */
5338 *puValue = 0;
5339 return VINF_SUCCESS;
5340}
5341
5342
5343/** @callback_method_impl{FNCPUMWRMSR} */
5344static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam14hIbsBrTarget(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5345{
5346 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5347 /** @todo AMD IBS. */
5348 if (!X86_IS_CANONICAL(uValue))
5349 {
5350 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5351 return VERR_CPUM_RAISE_GP_0;
5352 }
5353 return VINF_SUCCESS;
5354}
5355
5356
5357
5358/*
5359 * GIM MSRs.
5360 * GIM MSRs.
5361 * GIM MSRs.
5362 */
5363
5364
5365/** @callback_method_impl{FNCPUMRDMSR} */
5366static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5367{
5368#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5369 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5370 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5371 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5372 return VERR_CPUM_RAISE_GP_0;
5373#endif
5374 return GIMReadMsr(pVCpu, idMsr, pRange, puValue);
5375}
5376
5377
5378/** @callback_method_impl{FNCPUMWRMSR} */
5379static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Gim(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5380{
5381#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5382 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5383 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5384 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5385 return VERR_CPUM_RAISE_GP_0;
5386#endif
5387 return GIMWriteMsr(pVCpu, idMsr, pRange, uValue, uRawValue);
5388}
5389
5390
5391/**
5392 * MSR read function table.
5393 */
5394static const PFNCPUMRDMSR g_aCpumRdMsrFns[kCpumMsrRdFn_End] =
5395{
5396 NULL, /* Invalid */
5397 cpumMsrRd_FixedValue,
5398 NULL, /* Alias */
5399 cpumMsrRd_WriteOnly,
5400 cpumMsrRd_Ia32P5McAddr,
5401 cpumMsrRd_Ia32P5McType,
5402 cpumMsrRd_Ia32TimestampCounter,
5403 cpumMsrRd_Ia32PlatformId,
5404 cpumMsrRd_Ia32ApicBase,
5405 cpumMsrRd_Ia32FeatureControl,
5406 cpumMsrRd_Ia32BiosSignId,
5407 cpumMsrRd_Ia32SmmMonitorCtl,
5408 cpumMsrRd_Ia32PmcN,
5409 cpumMsrRd_Ia32MonitorFilterLineSize,
5410 cpumMsrRd_Ia32MPerf,
5411 cpumMsrRd_Ia32APerf,
5412 cpumMsrRd_Ia32MtrrCap,
5413 cpumMsrRd_Ia32MtrrPhysBaseN,
5414 cpumMsrRd_Ia32MtrrPhysMaskN,
5415 cpumMsrRd_Ia32MtrrFixed,
5416 cpumMsrRd_Ia32MtrrDefType,
5417 cpumMsrRd_Ia32Pat,
5418 cpumMsrRd_Ia32SysEnterCs,
5419 cpumMsrRd_Ia32SysEnterEsp,
5420 cpumMsrRd_Ia32SysEnterEip,
5421 cpumMsrRd_Ia32McgCap,
5422 cpumMsrRd_Ia32McgStatus,
5423 cpumMsrRd_Ia32McgCtl,
5424 cpumMsrRd_Ia32DebugCtl,
5425 cpumMsrRd_Ia32SmrrPhysBase,
5426 cpumMsrRd_Ia32SmrrPhysMask,
5427 cpumMsrRd_Ia32PlatformDcaCap,
5428 cpumMsrRd_Ia32CpuDcaCap,
5429 cpumMsrRd_Ia32Dca0Cap,
5430 cpumMsrRd_Ia32PerfEvtSelN,
5431 cpumMsrRd_Ia32PerfStatus,
5432 cpumMsrRd_Ia32PerfCtl,
5433 cpumMsrRd_Ia32FixedCtrN,
5434 cpumMsrRd_Ia32PerfCapabilities,
5435 cpumMsrRd_Ia32FixedCtrCtrl,
5436 cpumMsrRd_Ia32PerfGlobalStatus,
5437 cpumMsrRd_Ia32PerfGlobalCtrl,
5438 cpumMsrRd_Ia32PerfGlobalOvfCtrl,
5439 cpumMsrRd_Ia32PebsEnable,
5440 cpumMsrRd_Ia32ClockModulation,
5441 cpumMsrRd_Ia32ThermInterrupt,
5442 cpumMsrRd_Ia32ThermStatus,
5443 cpumMsrRd_Ia32Therm2Ctl,
5444 cpumMsrRd_Ia32MiscEnable,
5445 cpumMsrRd_Ia32McCtlStatusAddrMiscN,
5446 cpumMsrRd_Ia32McNCtl2,
5447 cpumMsrRd_Ia32DsArea,
5448 cpumMsrRd_Ia32TscDeadline,
5449 cpumMsrRd_Ia32X2ApicN,
5450 cpumMsrRd_Ia32DebugInterface,
5451 cpumMsrRd_Ia32VmxBasic,
5452 cpumMsrRd_Ia32VmxPinbasedCtls,
5453 cpumMsrRd_Ia32VmxProcbasedCtls,
5454 cpumMsrRd_Ia32VmxExitCtls,
5455 cpumMsrRd_Ia32VmxEntryCtls,
5456 cpumMsrRd_Ia32VmxMisc,
5457 cpumMsrRd_Ia32VmxCr0Fixed0,
5458 cpumMsrRd_Ia32VmxCr0Fixed1,
5459 cpumMsrRd_Ia32VmxCr4Fixed0,
5460 cpumMsrRd_Ia32VmxCr4Fixed1,
5461 cpumMsrRd_Ia32VmxVmcsEnum,
5462 cpumMsrRd_Ia32VmxProcBasedCtls2,
5463 cpumMsrRd_Ia32VmxEptVpidCap,
5464 cpumMsrRd_Ia32VmxTruePinbasedCtls,
5465 cpumMsrRd_Ia32VmxTrueProcbasedCtls,
5466 cpumMsrRd_Ia32VmxTrueExitCtls,
5467 cpumMsrRd_Ia32VmxTrueEntryCtls,
5468 cpumMsrRd_Ia32VmxVmFunc,
5469 cpumMsrRd_Ia32SpecCtrl,
5470 cpumMsrRd_Ia32ArchCapabilities,
5471
5472 cpumMsrRd_Amd64Efer,
5473 cpumMsrRd_Amd64SyscallTarget,
5474 cpumMsrRd_Amd64LongSyscallTarget,
5475 cpumMsrRd_Amd64CompSyscallTarget,
5476 cpumMsrRd_Amd64SyscallFlagMask,
5477 cpumMsrRd_Amd64FsBase,
5478 cpumMsrRd_Amd64GsBase,
5479 cpumMsrRd_Amd64KernelGsBase,
5480 cpumMsrRd_Amd64TscAux,
5481
5482 cpumMsrRd_IntelEblCrPowerOn,
5483 cpumMsrRd_IntelI7CoreThreadCount,
5484 cpumMsrRd_IntelP4EbcHardPowerOn,
5485 cpumMsrRd_IntelP4EbcSoftPowerOn,
5486 cpumMsrRd_IntelP4EbcFrequencyId,
5487 cpumMsrRd_IntelP6FsbFrequency,
5488 cpumMsrRd_IntelPlatformInfo,
5489 cpumMsrRd_IntelFlexRatio,
5490 cpumMsrRd_IntelPkgCStConfigControl,
5491 cpumMsrRd_IntelPmgIoCaptureBase,
5492 cpumMsrRd_IntelLastBranchFromToN,
5493 cpumMsrRd_IntelLastBranchFromN,
5494 cpumMsrRd_IntelLastBranchToN,
5495 cpumMsrRd_IntelLastBranchTos,
5496 cpumMsrRd_IntelBblCrCtl,
5497 cpumMsrRd_IntelBblCrCtl3,
5498 cpumMsrRd_IntelI7TemperatureTarget,
5499 cpumMsrRd_IntelI7MsrOffCoreResponseN,
5500 cpumMsrRd_IntelI7MiscPwrMgmt,
5501 cpumMsrRd_IntelP6CrN,
5502 cpumMsrRd_IntelCpuId1FeatureMaskEcdx,
5503 cpumMsrRd_IntelCpuId1FeatureMaskEax,
5504 cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx,
5505 cpumMsrRd_IntelI7SandyAesNiCtl,
5506 cpumMsrRd_IntelI7TurboRatioLimit,
5507 cpumMsrRd_IntelI7LbrSelect,
5508 cpumMsrRd_IntelI7SandyErrorControl,
5509 cpumMsrRd_IntelI7VirtualLegacyWireCap,
5510 cpumMsrRd_IntelI7PowerCtl,
5511 cpumMsrRd_IntelI7SandyPebsNumAlt,
5512 cpumMsrRd_IntelI7PebsLdLat,
5513 cpumMsrRd_IntelI7PkgCnResidencyN,
5514 cpumMsrRd_IntelI7CoreCnResidencyN,
5515 cpumMsrRd_IntelI7SandyVrCurrentConfig,
5516 cpumMsrRd_IntelI7SandyVrMiscConfig,
5517 cpumMsrRd_IntelI7SandyRaplPowerUnit,
5518 cpumMsrRd_IntelI7SandyPkgCnIrtlN,
5519 cpumMsrRd_IntelI7SandyPkgC2Residency,
5520 cpumMsrRd_IntelI7RaplPkgPowerLimit,
5521 cpumMsrRd_IntelI7RaplPkgEnergyStatus,
5522 cpumMsrRd_IntelI7RaplPkgPerfStatus,
5523 cpumMsrRd_IntelI7RaplPkgPowerInfo,
5524 cpumMsrRd_IntelI7RaplDramPowerLimit,
5525 cpumMsrRd_IntelI7RaplDramEnergyStatus,
5526 cpumMsrRd_IntelI7RaplDramPerfStatus,
5527 cpumMsrRd_IntelI7RaplDramPowerInfo,
5528 cpumMsrRd_IntelI7RaplPp0PowerLimit,
5529 cpumMsrRd_IntelI7RaplPp0EnergyStatus,
5530 cpumMsrRd_IntelI7RaplPp0Policy,
5531 cpumMsrRd_IntelI7RaplPp0PerfStatus,
5532 cpumMsrRd_IntelI7RaplPp1PowerLimit,
5533 cpumMsrRd_IntelI7RaplPp1EnergyStatus,
5534 cpumMsrRd_IntelI7RaplPp1Policy,
5535 cpumMsrRd_IntelI7IvyConfigTdpNominal,
5536 cpumMsrRd_IntelI7IvyConfigTdpLevel1,
5537 cpumMsrRd_IntelI7IvyConfigTdpLevel2,
5538 cpumMsrRd_IntelI7IvyConfigTdpControl,
5539 cpumMsrRd_IntelI7IvyTurboActivationRatio,
5540 cpumMsrRd_IntelI7UncPerfGlobalCtrl,
5541 cpumMsrRd_IntelI7UncPerfGlobalStatus,
5542 cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl,
5543 cpumMsrRd_IntelI7UncPerfFixedCtrCtrl,
5544 cpumMsrRd_IntelI7UncPerfFixedCtr,
5545 cpumMsrRd_IntelI7UncCBoxConfig,
5546 cpumMsrRd_IntelI7UncArbPerfCtrN,
5547 cpumMsrRd_IntelI7UncArbPerfEvtSelN,
5548 cpumMsrRd_IntelI7SmiCount,
5549 cpumMsrRd_IntelCore2EmttmCrTablesN,
5550 cpumMsrRd_IntelCore2SmmCStMiscInfo,
5551 cpumMsrRd_IntelCore1ExtConfig,
5552 cpumMsrRd_IntelCore1DtsCalControl,
5553 cpumMsrRd_IntelCore2PeciControl,
5554 cpumMsrRd_IntelAtSilvCoreC1Recidency,
5555
5556 cpumMsrRd_P6LastBranchFromIp,
5557 cpumMsrRd_P6LastBranchToIp,
5558 cpumMsrRd_P6LastIntFromIp,
5559 cpumMsrRd_P6LastIntToIp,
5560
5561 cpumMsrRd_AmdFam15hTscRate,
5562 cpumMsrRd_AmdFam15hLwpCfg,
5563 cpumMsrRd_AmdFam15hLwpCbAddr,
5564 cpumMsrRd_AmdFam10hMc4MiscN,
5565 cpumMsrRd_AmdK8PerfCtlN,
5566 cpumMsrRd_AmdK8PerfCtrN,
5567 cpumMsrRd_AmdK8SysCfg,
5568 cpumMsrRd_AmdK8HwCr,
5569 cpumMsrRd_AmdK8IorrBaseN,
5570 cpumMsrRd_AmdK8IorrMaskN,
5571 cpumMsrRd_AmdK8TopOfMemN,
5572 cpumMsrRd_AmdK8NbCfg1,
5573 cpumMsrRd_AmdK8McXcptRedir,
5574 cpumMsrRd_AmdK8CpuNameN,
5575 cpumMsrRd_AmdK8HwThermalCtrl,
5576 cpumMsrRd_AmdK8SwThermalCtrl,
5577 cpumMsrRd_AmdK8FidVidControl,
5578 cpumMsrRd_AmdK8FidVidStatus,
5579 cpumMsrRd_AmdK8McCtlMaskN,
5580 cpumMsrRd_AmdK8SmiOnIoTrapN,
5581 cpumMsrRd_AmdK8SmiOnIoTrapCtlSts,
5582 cpumMsrRd_AmdK8IntPendingMessage,
5583 cpumMsrRd_AmdK8SmiTriggerIoCycle,
5584 cpumMsrRd_AmdFam10hMmioCfgBaseAddr,
5585 cpumMsrRd_AmdFam10hTrapCtlMaybe,
5586 cpumMsrRd_AmdFam10hPStateCurLimit,
5587 cpumMsrRd_AmdFam10hPStateControl,
5588 cpumMsrRd_AmdFam10hPStateStatus,
5589 cpumMsrRd_AmdFam10hPStateN,
5590 cpumMsrRd_AmdFam10hCofVidControl,
5591 cpumMsrRd_AmdFam10hCofVidStatus,
5592 cpumMsrRd_AmdFam10hCStateIoBaseAddr,
5593 cpumMsrRd_AmdFam10hCpuWatchdogTimer,
5594 cpumMsrRd_AmdK8SmmBase,
5595 cpumMsrRd_AmdK8SmmAddr,
5596 cpumMsrRd_AmdK8SmmMask,
5597 cpumMsrRd_AmdK8VmCr,
5598 cpumMsrRd_AmdK8IgnNe,
5599 cpumMsrRd_AmdK8SmmCtl,
5600 cpumMsrRd_AmdK8VmHSavePa,
5601 cpumMsrRd_AmdFam10hVmLockKey,
5602 cpumMsrRd_AmdFam10hSmmLockKey,
5603 cpumMsrRd_AmdFam10hLocalSmiStatus,
5604 cpumMsrRd_AmdFam10hOsVisWrkIdLength,
5605 cpumMsrRd_AmdFam10hOsVisWrkStatus,
5606 cpumMsrRd_AmdFam16hL2IPerfCtlN,
5607 cpumMsrRd_AmdFam16hL2IPerfCtrN,
5608 cpumMsrRd_AmdFam15hNorthbridgePerfCtlN,
5609 cpumMsrRd_AmdFam15hNorthbridgePerfCtrN,
5610 cpumMsrRd_AmdK7MicrocodeCtl,
5611 cpumMsrRd_AmdK7ClusterIdMaybe,
5612 cpumMsrRd_AmdK8CpuIdCtlStd07hEbax,
5613 cpumMsrRd_AmdK8CpuIdCtlStd06hEcx,
5614 cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx,
5615 cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx,
5616 cpumMsrRd_AmdK8PatchLevel,
5617 cpumMsrRd_AmdK7DebugStatusMaybe,
5618 cpumMsrRd_AmdK7BHTraceBaseMaybe,
5619 cpumMsrRd_AmdK7BHTracePtrMaybe,
5620 cpumMsrRd_AmdK7BHTraceLimitMaybe,
5621 cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe,
5622 cpumMsrRd_AmdK7FastFlushCountMaybe,
5623 cpumMsrRd_AmdK7NodeId,
5624 cpumMsrRd_AmdK7DrXAddrMaskN,
5625 cpumMsrRd_AmdK7Dr0DataMatchMaybe,
5626 cpumMsrRd_AmdK7Dr0DataMaskMaybe,
5627 cpumMsrRd_AmdK7LoadStoreCfg,
5628 cpumMsrRd_AmdK7InstrCacheCfg,
5629 cpumMsrRd_AmdK7DataCacheCfg,
5630 cpumMsrRd_AmdK7BusUnitCfg,
5631 cpumMsrRd_AmdK7DebugCtl2Maybe,
5632 cpumMsrRd_AmdFam15hFpuCfg,
5633 cpumMsrRd_AmdFam15hDecoderCfg,
5634 cpumMsrRd_AmdFam10hBusUnitCfg2,
5635 cpumMsrRd_AmdFam15hCombUnitCfg,
5636 cpumMsrRd_AmdFam15hCombUnitCfg2,
5637 cpumMsrRd_AmdFam15hCombUnitCfg3,
5638 cpumMsrRd_AmdFam15hExecUnitCfg,
5639 cpumMsrRd_AmdFam15hLoadStoreCfg2,
5640 cpumMsrRd_AmdFam10hIbsFetchCtl,
5641 cpumMsrRd_AmdFam10hIbsFetchLinAddr,
5642 cpumMsrRd_AmdFam10hIbsFetchPhysAddr,
5643 cpumMsrRd_AmdFam10hIbsOpExecCtl,
5644 cpumMsrRd_AmdFam10hIbsOpRip,
5645 cpumMsrRd_AmdFam10hIbsOpData,
5646 cpumMsrRd_AmdFam10hIbsOpData2,
5647 cpumMsrRd_AmdFam10hIbsOpData3,
5648 cpumMsrRd_AmdFam10hIbsDcLinAddr,
5649 cpumMsrRd_AmdFam10hIbsDcPhysAddr,
5650 cpumMsrRd_AmdFam10hIbsCtl,
5651 cpumMsrRd_AmdFam14hIbsBrTarget,
5652
5653 cpumMsrRd_Gim
5654};
5655
5656
5657/**
5658 * MSR write function table.
5659 */
5660static const PFNCPUMWRMSR g_aCpumWrMsrFns[kCpumMsrWrFn_End] =
5661{
5662 NULL, /* Invalid */
5663 cpumMsrWr_IgnoreWrite,
5664 cpumMsrWr_ReadOnly,
5665 NULL, /* Alias */
5666 cpumMsrWr_Ia32P5McAddr,
5667 cpumMsrWr_Ia32P5McType,
5668 cpumMsrWr_Ia32TimestampCounter,
5669 cpumMsrWr_Ia32ApicBase,
5670 cpumMsrWr_Ia32FeatureControl,
5671 cpumMsrWr_Ia32BiosSignId,
5672 cpumMsrWr_Ia32BiosUpdateTrigger,
5673 cpumMsrWr_Ia32SmmMonitorCtl,
5674 cpumMsrWr_Ia32PmcN,
5675 cpumMsrWr_Ia32MonitorFilterLineSize,
5676 cpumMsrWr_Ia32MPerf,
5677 cpumMsrWr_Ia32APerf,
5678 cpumMsrWr_Ia32MtrrPhysBaseN,
5679 cpumMsrWr_Ia32MtrrPhysMaskN,
5680 cpumMsrWr_Ia32MtrrFixed,
5681 cpumMsrWr_Ia32MtrrDefType,
5682 cpumMsrWr_Ia32Pat,
5683 cpumMsrWr_Ia32SysEnterCs,
5684 cpumMsrWr_Ia32SysEnterEsp,
5685 cpumMsrWr_Ia32SysEnterEip,
5686 cpumMsrWr_Ia32McgStatus,
5687 cpumMsrWr_Ia32McgCtl,
5688 cpumMsrWr_Ia32DebugCtl,
5689 cpumMsrWr_Ia32SmrrPhysBase,
5690 cpumMsrWr_Ia32SmrrPhysMask,
5691 cpumMsrWr_Ia32PlatformDcaCap,
5692 cpumMsrWr_Ia32Dca0Cap,
5693 cpumMsrWr_Ia32PerfEvtSelN,
5694 cpumMsrWr_Ia32PerfStatus,
5695 cpumMsrWr_Ia32PerfCtl,
5696 cpumMsrWr_Ia32FixedCtrN,
5697 cpumMsrWr_Ia32PerfCapabilities,
5698 cpumMsrWr_Ia32FixedCtrCtrl,
5699 cpumMsrWr_Ia32PerfGlobalStatus,
5700 cpumMsrWr_Ia32PerfGlobalCtrl,
5701 cpumMsrWr_Ia32PerfGlobalOvfCtrl,
5702 cpumMsrWr_Ia32PebsEnable,
5703 cpumMsrWr_Ia32ClockModulation,
5704 cpumMsrWr_Ia32ThermInterrupt,
5705 cpumMsrWr_Ia32ThermStatus,
5706 cpumMsrWr_Ia32Therm2Ctl,
5707 cpumMsrWr_Ia32MiscEnable,
5708 cpumMsrWr_Ia32McCtlStatusAddrMiscN,
5709 cpumMsrWr_Ia32McNCtl2,
5710 cpumMsrWr_Ia32DsArea,
5711 cpumMsrWr_Ia32TscDeadline,
5712 cpumMsrWr_Ia32X2ApicN,
5713 cpumMsrWr_Ia32DebugInterface,
5714 cpumMsrWr_Ia32SpecCtrl,
5715 cpumMsrWr_Ia32PredCmd,
5716
5717 cpumMsrWr_Amd64Efer,
5718 cpumMsrWr_Amd64SyscallTarget,
5719 cpumMsrWr_Amd64LongSyscallTarget,
5720 cpumMsrWr_Amd64CompSyscallTarget,
5721 cpumMsrWr_Amd64SyscallFlagMask,
5722 cpumMsrWr_Amd64FsBase,
5723 cpumMsrWr_Amd64GsBase,
5724 cpumMsrWr_Amd64KernelGsBase,
5725 cpumMsrWr_Amd64TscAux,
5726
5727 cpumMsrWr_IntelEblCrPowerOn,
5728 cpumMsrWr_IntelP4EbcHardPowerOn,
5729 cpumMsrWr_IntelP4EbcSoftPowerOn,
5730 cpumMsrWr_IntelP4EbcFrequencyId,
5731 cpumMsrWr_IntelFlexRatio,
5732 cpumMsrWr_IntelPkgCStConfigControl,
5733 cpumMsrWr_IntelPmgIoCaptureBase,
5734 cpumMsrWr_IntelLastBranchFromToN,
5735 cpumMsrWr_IntelLastBranchFromN,
5736 cpumMsrWr_IntelLastBranchToN,
5737 cpumMsrWr_IntelLastBranchTos,
5738 cpumMsrWr_IntelBblCrCtl,
5739 cpumMsrWr_IntelBblCrCtl3,
5740 cpumMsrWr_IntelI7TemperatureTarget,
5741 cpumMsrWr_IntelI7MsrOffCoreResponseN,
5742 cpumMsrWr_IntelI7MiscPwrMgmt,
5743 cpumMsrWr_IntelP6CrN,
5744 cpumMsrWr_IntelCpuId1FeatureMaskEcdx,
5745 cpumMsrWr_IntelCpuId1FeatureMaskEax,
5746 cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx,
5747 cpumMsrWr_IntelI7SandyAesNiCtl,
5748 cpumMsrWr_IntelI7TurboRatioLimit,
5749 cpumMsrWr_IntelI7LbrSelect,
5750 cpumMsrWr_IntelI7SandyErrorControl,
5751 cpumMsrWr_IntelI7PowerCtl,
5752 cpumMsrWr_IntelI7SandyPebsNumAlt,
5753 cpumMsrWr_IntelI7PebsLdLat,
5754 cpumMsrWr_IntelI7SandyVrCurrentConfig,
5755 cpumMsrWr_IntelI7SandyVrMiscConfig,
5756 cpumMsrWr_IntelI7SandyRaplPowerUnit,
5757 cpumMsrWr_IntelI7SandyPkgCnIrtlN,
5758 cpumMsrWr_IntelI7SandyPkgC2Residency,
5759 cpumMsrWr_IntelI7RaplPkgPowerLimit,
5760 cpumMsrWr_IntelI7RaplDramPowerLimit,
5761 cpumMsrWr_IntelI7RaplPp0PowerLimit,
5762 cpumMsrWr_IntelI7RaplPp0Policy,
5763 cpumMsrWr_IntelI7RaplPp1PowerLimit,
5764 cpumMsrWr_IntelI7RaplPp1Policy,
5765 cpumMsrWr_IntelI7IvyConfigTdpControl,
5766 cpumMsrWr_IntelI7IvyTurboActivationRatio,
5767 cpumMsrWr_IntelI7UncPerfGlobalCtrl,
5768 cpumMsrWr_IntelI7UncPerfGlobalStatus,
5769 cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl,
5770 cpumMsrWr_IntelI7UncPerfFixedCtrCtrl,
5771 cpumMsrWr_IntelI7UncPerfFixedCtr,
5772 cpumMsrWr_IntelI7UncArbPerfCtrN,
5773 cpumMsrWr_IntelI7UncArbPerfEvtSelN,
5774 cpumMsrWr_IntelCore2EmttmCrTablesN,
5775 cpumMsrWr_IntelCore2SmmCStMiscInfo,
5776 cpumMsrWr_IntelCore1ExtConfig,
5777 cpumMsrWr_IntelCore1DtsCalControl,
5778 cpumMsrWr_IntelCore2PeciControl,
5779
5780 cpumMsrWr_P6LastIntFromIp,
5781 cpumMsrWr_P6LastIntToIp,
5782
5783 cpumMsrWr_AmdFam15hTscRate,
5784 cpumMsrWr_AmdFam15hLwpCfg,
5785 cpumMsrWr_AmdFam15hLwpCbAddr,
5786 cpumMsrWr_AmdFam10hMc4MiscN,
5787 cpumMsrWr_AmdK8PerfCtlN,
5788 cpumMsrWr_AmdK8PerfCtrN,
5789 cpumMsrWr_AmdK8SysCfg,
5790 cpumMsrWr_AmdK8HwCr,
5791 cpumMsrWr_AmdK8IorrBaseN,
5792 cpumMsrWr_AmdK8IorrMaskN,
5793 cpumMsrWr_AmdK8TopOfMemN,
5794 cpumMsrWr_AmdK8NbCfg1,
5795 cpumMsrWr_AmdK8McXcptRedir,
5796 cpumMsrWr_AmdK8CpuNameN,
5797 cpumMsrWr_AmdK8HwThermalCtrl,
5798 cpumMsrWr_AmdK8SwThermalCtrl,
5799 cpumMsrWr_AmdK8FidVidControl,
5800 cpumMsrWr_AmdK8McCtlMaskN,
5801 cpumMsrWr_AmdK8SmiOnIoTrapN,
5802 cpumMsrWr_AmdK8SmiOnIoTrapCtlSts,
5803 cpumMsrWr_AmdK8IntPendingMessage,
5804 cpumMsrWr_AmdK8SmiTriggerIoCycle,
5805 cpumMsrWr_AmdFam10hMmioCfgBaseAddr,
5806 cpumMsrWr_AmdFam10hTrapCtlMaybe,
5807 cpumMsrWr_AmdFam10hPStateControl,
5808 cpumMsrWr_AmdFam10hPStateStatus,
5809 cpumMsrWr_AmdFam10hPStateN,
5810 cpumMsrWr_AmdFam10hCofVidControl,
5811 cpumMsrWr_AmdFam10hCofVidStatus,
5812 cpumMsrWr_AmdFam10hCStateIoBaseAddr,
5813 cpumMsrWr_AmdFam10hCpuWatchdogTimer,
5814 cpumMsrWr_AmdK8SmmBase,
5815 cpumMsrWr_AmdK8SmmAddr,
5816 cpumMsrWr_AmdK8SmmMask,
5817 cpumMsrWr_AmdK8VmCr,
5818 cpumMsrWr_AmdK8IgnNe,
5819 cpumMsrWr_AmdK8SmmCtl,
5820 cpumMsrWr_AmdK8VmHSavePa,
5821 cpumMsrWr_AmdFam10hVmLockKey,
5822 cpumMsrWr_AmdFam10hSmmLockKey,
5823 cpumMsrWr_AmdFam10hLocalSmiStatus,
5824 cpumMsrWr_AmdFam10hOsVisWrkIdLength,
5825 cpumMsrWr_AmdFam10hOsVisWrkStatus,
5826 cpumMsrWr_AmdFam16hL2IPerfCtlN,
5827 cpumMsrWr_AmdFam16hL2IPerfCtrN,
5828 cpumMsrWr_AmdFam15hNorthbridgePerfCtlN,
5829 cpumMsrWr_AmdFam15hNorthbridgePerfCtrN,
5830 cpumMsrWr_AmdK7MicrocodeCtl,
5831 cpumMsrWr_AmdK7ClusterIdMaybe,
5832 cpumMsrWr_AmdK8CpuIdCtlStd07hEbax,
5833 cpumMsrWr_AmdK8CpuIdCtlStd06hEcx,
5834 cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx,
5835 cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx,
5836 cpumMsrWr_AmdK8PatchLoader,
5837 cpumMsrWr_AmdK7DebugStatusMaybe,
5838 cpumMsrWr_AmdK7BHTraceBaseMaybe,
5839 cpumMsrWr_AmdK7BHTracePtrMaybe,
5840 cpumMsrWr_AmdK7BHTraceLimitMaybe,
5841 cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe,
5842 cpumMsrWr_AmdK7FastFlushCountMaybe,
5843 cpumMsrWr_AmdK7NodeId,
5844 cpumMsrWr_AmdK7DrXAddrMaskN,
5845 cpumMsrWr_AmdK7Dr0DataMatchMaybe,
5846 cpumMsrWr_AmdK7Dr0DataMaskMaybe,
5847 cpumMsrWr_AmdK7LoadStoreCfg,
5848 cpumMsrWr_AmdK7InstrCacheCfg,
5849 cpumMsrWr_AmdK7DataCacheCfg,
5850 cpumMsrWr_AmdK7BusUnitCfg,
5851 cpumMsrWr_AmdK7DebugCtl2Maybe,
5852 cpumMsrWr_AmdFam15hFpuCfg,
5853 cpumMsrWr_AmdFam15hDecoderCfg,
5854 cpumMsrWr_AmdFam10hBusUnitCfg2,
5855 cpumMsrWr_AmdFam15hCombUnitCfg,
5856 cpumMsrWr_AmdFam15hCombUnitCfg2,
5857 cpumMsrWr_AmdFam15hCombUnitCfg3,
5858 cpumMsrWr_AmdFam15hExecUnitCfg,
5859 cpumMsrWr_AmdFam15hLoadStoreCfg2,
5860 cpumMsrWr_AmdFam10hIbsFetchCtl,
5861 cpumMsrWr_AmdFam10hIbsFetchLinAddr,
5862 cpumMsrWr_AmdFam10hIbsFetchPhysAddr,
5863 cpumMsrWr_AmdFam10hIbsOpExecCtl,
5864 cpumMsrWr_AmdFam10hIbsOpRip,
5865 cpumMsrWr_AmdFam10hIbsOpData,
5866 cpumMsrWr_AmdFam10hIbsOpData2,
5867 cpumMsrWr_AmdFam10hIbsOpData3,
5868 cpumMsrWr_AmdFam10hIbsDcLinAddr,
5869 cpumMsrWr_AmdFam10hIbsDcPhysAddr,
5870 cpumMsrWr_AmdFam10hIbsCtl,
5871 cpumMsrWr_AmdFam14hIbsBrTarget,
5872
5873 cpumMsrWr_Gim
5874};
5875
5876
5877/**
5878 * Looks up the range for the given MSR.
5879 *
5880 * @returns Pointer to the range if found, NULL if not.
5881 * @param pVM The cross context VM structure.
5882 * @param idMsr The MSR to look up.
5883 */
5884# ifndef IN_RING3
5885static
5886# endif
5887PCPUMMSRRANGE cpumLookupMsrRange(PVM pVM, uint32_t idMsr)
5888{
5889 /*
5890 * Binary lookup.
5891 */
5892 uint32_t cRanges = pVM->cpum.s.GuestInfo.cMsrRanges;
5893 if (!cRanges)
5894 return NULL;
5895 PCPUMMSRRANGE paRanges = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5896 for (;;)
5897 {
5898 uint32_t i = cRanges / 2;
5899 if (idMsr < paRanges[i].uFirst)
5900 {
5901 if (i == 0)
5902 break;
5903 cRanges = i;
5904 }
5905 else if (idMsr > paRanges[i].uLast)
5906 {
5907 i++;
5908 if (i >= cRanges)
5909 break;
5910 cRanges -= i;
5911 paRanges = &paRanges[i];
5912 }
5913 else
5914 {
5915 if (paRanges[i].enmRdFn == kCpumMsrRdFn_MsrAlias)
5916 return cpumLookupMsrRange(pVM, paRanges[i].uValue);
5917 return &paRanges[i];
5918 }
5919 }
5920
5921# ifdef VBOX_STRICT
5922 /*
5923 * Linear lookup to verify the above binary search.
5924 */
5925 uint32_t cLeft = pVM->cpum.s.GuestInfo.cMsrRanges;
5926 PCPUMMSRRANGE pCur = pVM->cpum.s.GuestInfo.CTX_SUFF(paMsrRanges);
5927 while (cLeft-- > 0)
5928 {
5929 if (idMsr >= pCur->uFirst && idMsr <= pCur->uLast)
5930 {
5931 AssertFailed();
5932 if (pCur->enmRdFn == kCpumMsrRdFn_MsrAlias)
5933 return cpumLookupMsrRange(pVM, pCur->uValue);
5934 return pCur;
5935 }
5936 pCur++;
5937 }
5938# endif
5939 return NULL;
5940}
5941
5942
5943/**
5944 * Query a guest MSR.
5945 *
5946 * The caller is responsible for checking privilege if the call is the result of
5947 * a RDMSR instruction. We'll do the rest.
5948 *
5949 * @retval VINF_SUCCESS on success.
5950 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
5951 * current context (raw-mode or ring-0).
5952 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR), the caller is
5953 * expected to take the appropriate actions. @a *puValue is set to 0.
5954 * @param pVCpu The cross context virtual CPU structure.
5955 * @param idMsr The MSR.
5956 * @param puValue Where to return the value.
5957 *
5958 * @remarks This will always return the right values, even when we're in the
5959 * recompiler.
5960 */
5961VMMDECL(VBOXSTRICTRC) CPUMQueryGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t *puValue)
5962{
5963 *puValue = 0;
5964
5965 VBOXSTRICTRC rcStrict;
5966 PVM pVM = pVCpu->CTX_SUFF(pVM);
5967 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5968 if (pRange)
5969 {
5970 CPUMMSRRDFN enmRdFn = (CPUMMSRRDFN)pRange->enmRdFn;
5971 AssertReturn(enmRdFn > kCpumMsrRdFn_Invalid && enmRdFn < kCpumMsrRdFn_End, VERR_CPUM_IPE_1);
5972
5973 PFNCPUMRDMSR pfnRdMsr = g_aCpumRdMsrFns[enmRdFn];
5974 AssertReturn(pfnRdMsr, VERR_CPUM_IPE_2);
5975
5976 STAM_COUNTER_INC(&pRange->cReads);
5977 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5978
5979 rcStrict = pfnRdMsr(pVCpu, idMsr, pRange, puValue);
5980 if (rcStrict == VINF_SUCCESS)
5981 Log2(("CPUM: RDMSR %#x (%s) -> %#llx\n", idMsr, pRange->szName, *puValue));
5982 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
5983 {
5984 Log(("CPUM: RDMSR %#x (%s) -> #GP(0)\n", idMsr, pRange->szName));
5985 STAM_COUNTER_INC(&pRange->cGps);
5986 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsRaiseGp);
5987 }
5988#ifndef IN_RING3
5989 else if (rcStrict == VINF_CPUM_R3_MSR_READ)
5990 Log(("CPUM: RDMSR %#x (%s) -> ring-3\n", idMsr, pRange->szName));
5991#endif
5992 else
5993 {
5994 Log(("CPUM: RDMSR %#x (%s) -> rcStrict=%Rrc\n", idMsr, pRange->szName, VBOXSTRICTRC_VAL(rcStrict)));
5995 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
5996 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
5997 Assert(rcStrict != VERR_EM_INTERPRETER);
5998 }
5999 }
6000 else
6001 {
6002 Log(("CPUM: Unknown RDMSR %#x -> #GP(0)\n", idMsr));
6003 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
6004 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsUnknown);
6005 rcStrict = VERR_CPUM_RAISE_GP_0;
6006 }
6007 return rcStrict;
6008}
6009
6010
6011/**
6012 * Writes to a guest MSR.
6013 *
6014 * The caller is responsible for checking privilege if the call is the result of
6015 * a WRMSR instruction. We'll do the rest.
6016 *
6017 * @retval VINF_SUCCESS on success.
6018 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
6019 * current context (raw-mode or ring-0).
6020 * @retval VERR_CPUM_RAISE_GP_0 on failure, the caller is expected to take the
6021 * appropriate actions.
6022 *
6023 * @param pVCpu The cross context virtual CPU structure.
6024 * @param idMsr The MSR id.
6025 * @param uValue The value to set.
6026 *
6027 * @remarks Everyone changing MSR values, including the recompiler, shall do it
6028 * by calling this method. This makes sure we have current values and
6029 * that we trigger all the right actions when something changes.
6030 *
6031 * For performance reasons, this actually isn't entirely true for some
6032 * MSRs when in HM mode. The code here and in HM must be aware of
6033 * this.
6034 */
6035VMMDECL(VBOXSTRICTRC) CPUMSetGuestMsr(PVMCPU pVCpu, uint32_t idMsr, uint64_t uValue)
6036{
6037 VBOXSTRICTRC rcStrict;
6038 PVM pVM = pVCpu->CTX_SUFF(pVM);
6039 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
6040 if (pRange)
6041 {
6042 STAM_COUNTER_INC(&pRange->cWrites);
6043 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6044
6045 if (!(uValue & pRange->fWrGpMask))
6046 {
6047 CPUMMSRWRFN enmWrFn = (CPUMMSRWRFN)pRange->enmWrFn;
6048 AssertReturn(enmWrFn > kCpumMsrWrFn_Invalid && enmWrFn < kCpumMsrWrFn_End, VERR_CPUM_IPE_1);
6049
6050 PFNCPUMWRMSR pfnWrMsr = g_aCpumWrMsrFns[enmWrFn];
6051 AssertReturn(pfnWrMsr, VERR_CPUM_IPE_2);
6052
6053 uint64_t uValueAdjusted = uValue & ~pRange->fWrIgnMask;
6054 if (uValueAdjusted != uValue)
6055 {
6056 STAM_COUNTER_INC(&pRange->cIgnoredBits);
6057 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesToIgnoredBits);
6058 }
6059
6060 rcStrict = pfnWrMsr(pVCpu, idMsr, pRange, uValueAdjusted, uValue);
6061 if (rcStrict == VINF_SUCCESS)
6062 Log2(("CPUM: WRMSR %#x (%s), %#llx [%#llx]\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6063 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
6064 {
6065 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> #GP(0)\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6066 STAM_COUNTER_INC(&pRange->cGps);
6067 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6068 }
6069#ifndef IN_RING3
6070 else if (rcStrict == VINF_CPUM_R3_MSR_WRITE)
6071 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> ring-3\n", idMsr, pRange->szName, uValueAdjusted, uValue));
6072#endif
6073 else
6074 {
6075 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> rcStrict=%Rrc\n",
6076 idMsr, pRange->szName, uValueAdjusted, uValue, VBOXSTRICTRC_VAL(rcStrict)));
6077 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
6078 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
6079 Assert(rcStrict != VERR_EM_INTERPRETER);
6080 }
6081 }
6082 else
6083 {
6084 Log(("CPUM: WRMSR %#x (%s), %#llx -> #GP(0) - invalid bits %#llx\n",
6085 idMsr, pRange->szName, uValue, uValue & pRange->fWrGpMask));
6086 STAM_COUNTER_INC(&pRange->cGps);
6087 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
6088 rcStrict = VERR_CPUM_RAISE_GP_0;
6089 }
6090 }
6091 else
6092 {
6093 Log(("CPUM: Unknown WRMSR %#x, %#llx -> #GP(0)\n", idMsr, uValue));
6094 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
6095 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesUnknown);
6096 rcStrict = VERR_CPUM_RAISE_GP_0;
6097 }
6098 return rcStrict;
6099}
6100
6101
6102#if defined(VBOX_STRICT) && defined(IN_RING3)
6103/**
6104 * Performs some checks on the static data related to MSRs.
6105 *
6106 * @returns VINF_SUCCESS on success, error on failure.
6107 */
6108int cpumR3MsrStrictInitChecks(void)
6109{
6110#define CPUM_ASSERT_RD_MSR_FN(a_Register) \
6111 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_##a_Register] == cpumMsrRd_##a_Register, VERR_CPUM_IPE_2);
6112#define CPUM_ASSERT_WR_MSR_FN(a_Register) \
6113 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_##a_Register] == cpumMsrWr_##a_Register, VERR_CPUM_IPE_2);
6114
6115 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6116 CPUM_ASSERT_RD_MSR_FN(FixedValue);
6117 CPUM_ASSERT_RD_MSR_FN(WriteOnly);
6118 CPUM_ASSERT_RD_MSR_FN(Ia32P5McAddr);
6119 CPUM_ASSERT_RD_MSR_FN(Ia32P5McType);
6120 CPUM_ASSERT_RD_MSR_FN(Ia32TimestampCounter);
6121 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformId);
6122 CPUM_ASSERT_RD_MSR_FN(Ia32ApicBase);
6123 CPUM_ASSERT_RD_MSR_FN(Ia32FeatureControl);
6124 CPUM_ASSERT_RD_MSR_FN(Ia32BiosSignId);
6125 CPUM_ASSERT_RD_MSR_FN(Ia32SmmMonitorCtl);
6126 CPUM_ASSERT_RD_MSR_FN(Ia32PmcN);
6127 CPUM_ASSERT_RD_MSR_FN(Ia32MonitorFilterLineSize);
6128 CPUM_ASSERT_RD_MSR_FN(Ia32MPerf);
6129 CPUM_ASSERT_RD_MSR_FN(Ia32APerf);
6130 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrCap);
6131 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysBaseN);
6132 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysMaskN);
6133 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrFixed);
6134 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrDefType);
6135 CPUM_ASSERT_RD_MSR_FN(Ia32Pat);
6136 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterCs);
6137 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEsp);
6138 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEip);
6139 CPUM_ASSERT_RD_MSR_FN(Ia32McgCap);
6140 CPUM_ASSERT_RD_MSR_FN(Ia32McgStatus);
6141 CPUM_ASSERT_RD_MSR_FN(Ia32McgCtl);
6142 CPUM_ASSERT_RD_MSR_FN(Ia32DebugCtl);
6143 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysBase);
6144 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysMask);
6145 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformDcaCap);
6146 CPUM_ASSERT_RD_MSR_FN(Ia32CpuDcaCap);
6147 CPUM_ASSERT_RD_MSR_FN(Ia32Dca0Cap);
6148 CPUM_ASSERT_RD_MSR_FN(Ia32PerfEvtSelN);
6149 CPUM_ASSERT_RD_MSR_FN(Ia32PerfStatus);
6150 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCtl);
6151 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrN);
6152 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCapabilities);
6153 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrCtrl);
6154 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalStatus);
6155 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalCtrl);
6156 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalOvfCtrl);
6157 CPUM_ASSERT_RD_MSR_FN(Ia32PebsEnable);
6158 CPUM_ASSERT_RD_MSR_FN(Ia32ClockModulation);
6159 CPUM_ASSERT_RD_MSR_FN(Ia32ThermInterrupt);
6160 CPUM_ASSERT_RD_MSR_FN(Ia32ThermStatus);
6161 CPUM_ASSERT_RD_MSR_FN(Ia32MiscEnable);
6162 CPUM_ASSERT_RD_MSR_FN(Ia32McCtlStatusAddrMiscN);
6163 CPUM_ASSERT_RD_MSR_FN(Ia32McNCtl2);
6164 CPUM_ASSERT_RD_MSR_FN(Ia32DsArea);
6165 CPUM_ASSERT_RD_MSR_FN(Ia32TscDeadline);
6166 CPUM_ASSERT_RD_MSR_FN(Ia32X2ApicN);
6167 CPUM_ASSERT_RD_MSR_FN(Ia32DebugInterface);
6168 CPUM_ASSERT_RD_MSR_FN(Ia32VmxBasic);
6169 CPUM_ASSERT_RD_MSR_FN(Ia32VmxPinbasedCtls);
6170 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcbasedCtls);
6171 CPUM_ASSERT_RD_MSR_FN(Ia32VmxExitCtls);
6172 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEntryCtls);
6173 CPUM_ASSERT_RD_MSR_FN(Ia32VmxMisc);
6174 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed0);
6175 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed1);
6176 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed0);
6177 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed1);
6178 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmcsEnum);
6179 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcBasedCtls2);
6180 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEptVpidCap);
6181 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTruePinbasedCtls);
6182 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueProcbasedCtls);
6183 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueExitCtls);
6184 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueEntryCtls);
6185 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmFunc);
6186 CPUM_ASSERT_RD_MSR_FN(Ia32SpecCtrl);
6187 CPUM_ASSERT_RD_MSR_FN(Ia32ArchCapabilities);
6188
6189 CPUM_ASSERT_RD_MSR_FN(Amd64Efer);
6190 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallTarget);
6191 CPUM_ASSERT_RD_MSR_FN(Amd64LongSyscallTarget);
6192 CPUM_ASSERT_RD_MSR_FN(Amd64CompSyscallTarget);
6193 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallFlagMask);
6194 CPUM_ASSERT_RD_MSR_FN(Amd64FsBase);
6195 CPUM_ASSERT_RD_MSR_FN(Amd64GsBase);
6196 CPUM_ASSERT_RD_MSR_FN(Amd64KernelGsBase);
6197 CPUM_ASSERT_RD_MSR_FN(Amd64TscAux);
6198
6199 CPUM_ASSERT_RD_MSR_FN(IntelEblCrPowerOn);
6200 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreThreadCount);
6201 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcHardPowerOn);
6202 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcSoftPowerOn);
6203 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcFrequencyId);
6204 CPUM_ASSERT_RD_MSR_FN(IntelP6FsbFrequency);
6205 CPUM_ASSERT_RD_MSR_FN(IntelPlatformInfo);
6206 CPUM_ASSERT_RD_MSR_FN(IntelFlexRatio);
6207 CPUM_ASSERT_RD_MSR_FN(IntelPkgCStConfigControl);
6208 CPUM_ASSERT_RD_MSR_FN(IntelPmgIoCaptureBase);
6209 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromToN);
6210 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromN);
6211 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchToN);
6212 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchTos);
6213 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl);
6214 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl3);
6215 CPUM_ASSERT_RD_MSR_FN(IntelI7TemperatureTarget);
6216 CPUM_ASSERT_RD_MSR_FN(IntelI7MsrOffCoreResponseN);
6217 CPUM_ASSERT_RD_MSR_FN(IntelI7MiscPwrMgmt);
6218 CPUM_ASSERT_RD_MSR_FN(IntelP6CrN);
6219 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6220 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEax);
6221 CPUM_ASSERT_RD_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6222 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyAesNiCtl);
6223 CPUM_ASSERT_RD_MSR_FN(IntelI7TurboRatioLimit);
6224 CPUM_ASSERT_RD_MSR_FN(IntelI7LbrSelect);
6225 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyErrorControl);
6226 CPUM_ASSERT_RD_MSR_FN(IntelI7VirtualLegacyWireCap);
6227 CPUM_ASSERT_RD_MSR_FN(IntelI7PowerCtl);
6228 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPebsNumAlt);
6229 CPUM_ASSERT_RD_MSR_FN(IntelI7PebsLdLat);
6230 CPUM_ASSERT_RD_MSR_FN(IntelI7PkgCnResidencyN);
6231 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreCnResidencyN);
6232 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrCurrentConfig);
6233 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrMiscConfig);
6234 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyRaplPowerUnit);
6235 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgCnIrtlN);
6236 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgC2Residency);
6237 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerLimit);
6238 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgEnergyStatus);
6239 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPerfStatus);
6240 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerInfo);
6241 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerLimit);
6242 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramEnergyStatus);
6243 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPerfStatus);
6244 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerInfo);
6245 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PowerLimit);
6246 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0EnergyStatus);
6247 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0Policy);
6248 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PerfStatus);
6249 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1PowerLimit);
6250 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1EnergyStatus);
6251 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1Policy);
6252 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpNominal);
6253 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel1);
6254 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel2);
6255 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpControl);
6256 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyTurboActivationRatio);
6257 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalCtrl);
6258 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalStatus);
6259 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6260 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6261 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtr);
6262 CPUM_ASSERT_RD_MSR_FN(IntelI7UncCBoxConfig);
6263 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfCtrN);
6264 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfEvtSelN);
6265 CPUM_ASSERT_RD_MSR_FN(IntelI7SmiCount);
6266 CPUM_ASSERT_RD_MSR_FN(IntelCore2EmttmCrTablesN);
6267 CPUM_ASSERT_RD_MSR_FN(IntelCore2SmmCStMiscInfo);
6268 CPUM_ASSERT_RD_MSR_FN(IntelCore1ExtConfig);
6269 CPUM_ASSERT_RD_MSR_FN(IntelCore1DtsCalControl);
6270 CPUM_ASSERT_RD_MSR_FN(IntelCore2PeciControl);
6271 CPUM_ASSERT_RD_MSR_FN(IntelAtSilvCoreC1Recidency);
6272
6273 CPUM_ASSERT_RD_MSR_FN(P6LastBranchFromIp);
6274 CPUM_ASSERT_RD_MSR_FN(P6LastBranchToIp);
6275 CPUM_ASSERT_RD_MSR_FN(P6LastIntFromIp);
6276 CPUM_ASSERT_RD_MSR_FN(P6LastIntToIp);
6277
6278 CPUM_ASSERT_RD_MSR_FN(AmdFam15hTscRate);
6279 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCfg);
6280 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCbAddr);
6281 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMc4MiscN);
6282 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtlN);
6283 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtrN);
6284 CPUM_ASSERT_RD_MSR_FN(AmdK8SysCfg);
6285 CPUM_ASSERT_RD_MSR_FN(AmdK8HwCr);
6286 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrBaseN);
6287 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrMaskN);
6288 CPUM_ASSERT_RD_MSR_FN(AmdK8TopOfMemN);
6289 CPUM_ASSERT_RD_MSR_FN(AmdK8NbCfg1);
6290 CPUM_ASSERT_RD_MSR_FN(AmdK8McXcptRedir);
6291 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuNameN);
6292 CPUM_ASSERT_RD_MSR_FN(AmdK8HwThermalCtrl);
6293 CPUM_ASSERT_RD_MSR_FN(AmdK8SwThermalCtrl);
6294 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidControl);
6295 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidStatus);
6296 CPUM_ASSERT_RD_MSR_FN(AmdK8McCtlMaskN);
6297 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapN);
6298 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6299 CPUM_ASSERT_RD_MSR_FN(AmdK8IntPendingMessage);
6300 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiTriggerIoCycle);
6301 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6302 CPUM_ASSERT_RD_MSR_FN(AmdFam10hTrapCtlMaybe);
6303 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateCurLimit);
6304 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateControl);
6305 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateStatus);
6306 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateN);
6307 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidControl);
6308 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidStatus);
6309 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCStateIoBaseAddr);
6310 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCpuWatchdogTimer);
6311 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmBase);
6312 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmAddr);
6313 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmMask);
6314 CPUM_ASSERT_RD_MSR_FN(AmdK8VmCr);
6315 CPUM_ASSERT_RD_MSR_FN(AmdK8IgnNe);
6316 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmCtl);
6317 CPUM_ASSERT_RD_MSR_FN(AmdK8VmHSavePa);
6318 CPUM_ASSERT_RD_MSR_FN(AmdFam10hVmLockKey);
6319 CPUM_ASSERT_RD_MSR_FN(AmdFam10hSmmLockKey);
6320 CPUM_ASSERT_RD_MSR_FN(AmdFam10hLocalSmiStatus);
6321 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkIdLength);
6322 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkStatus);
6323 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtlN);
6324 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtrN);
6325 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6326 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6327 CPUM_ASSERT_RD_MSR_FN(AmdK7MicrocodeCtl);
6328 CPUM_ASSERT_RD_MSR_FN(AmdK7ClusterIdMaybe);
6329 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6330 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6331 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6332 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6333 CPUM_ASSERT_RD_MSR_FN(AmdK8PatchLevel);
6334 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugStatusMaybe);
6335 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceBaseMaybe);
6336 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTracePtrMaybe);
6337 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceLimitMaybe);
6338 CPUM_ASSERT_RD_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6339 CPUM_ASSERT_RD_MSR_FN(AmdK7FastFlushCountMaybe);
6340 CPUM_ASSERT_RD_MSR_FN(AmdK7NodeId);
6341 CPUM_ASSERT_RD_MSR_FN(AmdK7DrXAddrMaskN);
6342 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMatchMaybe);
6343 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMaskMaybe);
6344 CPUM_ASSERT_RD_MSR_FN(AmdK7LoadStoreCfg);
6345 CPUM_ASSERT_RD_MSR_FN(AmdK7InstrCacheCfg);
6346 CPUM_ASSERT_RD_MSR_FN(AmdK7DataCacheCfg);
6347 CPUM_ASSERT_RD_MSR_FN(AmdK7BusUnitCfg);
6348 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugCtl2Maybe);
6349 CPUM_ASSERT_RD_MSR_FN(AmdFam15hFpuCfg);
6350 CPUM_ASSERT_RD_MSR_FN(AmdFam15hDecoderCfg);
6351 CPUM_ASSERT_RD_MSR_FN(AmdFam10hBusUnitCfg2);
6352 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg);
6353 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg2);
6354 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg3);
6355 CPUM_ASSERT_RD_MSR_FN(AmdFam15hExecUnitCfg);
6356 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLoadStoreCfg2);
6357 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchCtl);
6358 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchLinAddr);
6359 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6360 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpExecCtl);
6361 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpRip);
6362 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData);
6363 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData2);
6364 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData3);
6365 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcLinAddr);
6366 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcPhysAddr);
6367 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsCtl);
6368 CPUM_ASSERT_RD_MSR_FN(AmdFam14hIbsBrTarget);
6369
6370 CPUM_ASSERT_RD_MSR_FN(Gim)
6371
6372 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_Invalid] == NULL, VERR_CPUM_IPE_2);
6373 CPUM_ASSERT_WR_MSR_FN(Ia32P5McAddr);
6374 CPUM_ASSERT_WR_MSR_FN(Ia32P5McType);
6375 CPUM_ASSERT_WR_MSR_FN(Ia32TimestampCounter);
6376 CPUM_ASSERT_WR_MSR_FN(Ia32ApicBase);
6377 CPUM_ASSERT_WR_MSR_FN(Ia32FeatureControl);
6378 CPUM_ASSERT_WR_MSR_FN(Ia32BiosSignId);
6379 CPUM_ASSERT_WR_MSR_FN(Ia32BiosUpdateTrigger);
6380 CPUM_ASSERT_WR_MSR_FN(Ia32SmmMonitorCtl);
6381 CPUM_ASSERT_WR_MSR_FN(Ia32PmcN);
6382 CPUM_ASSERT_WR_MSR_FN(Ia32MonitorFilterLineSize);
6383 CPUM_ASSERT_WR_MSR_FN(Ia32MPerf);
6384 CPUM_ASSERT_WR_MSR_FN(Ia32APerf);
6385 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysBaseN);
6386 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysMaskN);
6387 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrFixed);
6388 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrDefType);
6389 CPUM_ASSERT_WR_MSR_FN(Ia32Pat);
6390 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterCs);
6391 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEsp);
6392 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEip);
6393 CPUM_ASSERT_WR_MSR_FN(Ia32McgStatus);
6394 CPUM_ASSERT_WR_MSR_FN(Ia32McgCtl);
6395 CPUM_ASSERT_WR_MSR_FN(Ia32DebugCtl);
6396 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysBase);
6397 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysMask);
6398 CPUM_ASSERT_WR_MSR_FN(Ia32PlatformDcaCap);
6399 CPUM_ASSERT_WR_MSR_FN(Ia32Dca0Cap);
6400 CPUM_ASSERT_WR_MSR_FN(Ia32PerfEvtSelN);
6401 CPUM_ASSERT_WR_MSR_FN(Ia32PerfStatus);
6402 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCtl);
6403 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrN);
6404 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCapabilities);
6405 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrCtrl);
6406 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalStatus);
6407 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalCtrl);
6408 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalOvfCtrl);
6409 CPUM_ASSERT_WR_MSR_FN(Ia32PebsEnable);
6410 CPUM_ASSERT_WR_MSR_FN(Ia32ClockModulation);
6411 CPUM_ASSERT_WR_MSR_FN(Ia32ThermInterrupt);
6412 CPUM_ASSERT_WR_MSR_FN(Ia32ThermStatus);
6413 CPUM_ASSERT_WR_MSR_FN(Ia32MiscEnable);
6414 CPUM_ASSERT_WR_MSR_FN(Ia32McCtlStatusAddrMiscN);
6415 CPUM_ASSERT_WR_MSR_FN(Ia32McNCtl2);
6416 CPUM_ASSERT_WR_MSR_FN(Ia32DsArea);
6417 CPUM_ASSERT_WR_MSR_FN(Ia32TscDeadline);
6418 CPUM_ASSERT_WR_MSR_FN(Ia32X2ApicN);
6419 CPUM_ASSERT_WR_MSR_FN(Ia32DebugInterface);
6420 CPUM_ASSERT_WR_MSR_FN(Ia32SpecCtrl);
6421 CPUM_ASSERT_WR_MSR_FN(Ia32PredCmd);
6422
6423 CPUM_ASSERT_WR_MSR_FN(Amd64Efer);
6424 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallTarget);
6425 CPUM_ASSERT_WR_MSR_FN(Amd64LongSyscallTarget);
6426 CPUM_ASSERT_WR_MSR_FN(Amd64CompSyscallTarget);
6427 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallFlagMask);
6428 CPUM_ASSERT_WR_MSR_FN(Amd64FsBase);
6429 CPUM_ASSERT_WR_MSR_FN(Amd64GsBase);
6430 CPUM_ASSERT_WR_MSR_FN(Amd64KernelGsBase);
6431 CPUM_ASSERT_WR_MSR_FN(Amd64TscAux);
6432
6433 CPUM_ASSERT_WR_MSR_FN(IntelEblCrPowerOn);
6434 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcHardPowerOn);
6435 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcSoftPowerOn);
6436 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcFrequencyId);
6437 CPUM_ASSERT_WR_MSR_FN(IntelFlexRatio);
6438 CPUM_ASSERT_WR_MSR_FN(IntelPkgCStConfigControl);
6439 CPUM_ASSERT_WR_MSR_FN(IntelPmgIoCaptureBase);
6440 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromToN);
6441 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromN);
6442 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchToN);
6443 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchTos);
6444 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl);
6445 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl3);
6446 CPUM_ASSERT_WR_MSR_FN(IntelI7TemperatureTarget);
6447 CPUM_ASSERT_WR_MSR_FN(IntelI7MsrOffCoreResponseN);
6448 CPUM_ASSERT_WR_MSR_FN(IntelI7MiscPwrMgmt);
6449 CPUM_ASSERT_WR_MSR_FN(IntelP6CrN);
6450 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6451 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEax);
6452 CPUM_ASSERT_WR_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6453 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyAesNiCtl);
6454 CPUM_ASSERT_WR_MSR_FN(IntelI7TurboRatioLimit);
6455 CPUM_ASSERT_WR_MSR_FN(IntelI7LbrSelect);
6456 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyErrorControl);
6457 CPUM_ASSERT_WR_MSR_FN(IntelI7PowerCtl);
6458 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPebsNumAlt);
6459 CPUM_ASSERT_WR_MSR_FN(IntelI7PebsLdLat);
6460 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrCurrentConfig);
6461 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrMiscConfig);
6462 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgCnIrtlN);
6463 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgC2Residency);
6464 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPkgPowerLimit);
6465 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplDramPowerLimit);
6466 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0PowerLimit);
6467 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0Policy);
6468 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1PowerLimit);
6469 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1Policy);
6470 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyConfigTdpControl);
6471 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyTurboActivationRatio);
6472 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalCtrl);
6473 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalStatus);
6474 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6475 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6476 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtr);
6477 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfCtrN);
6478 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfEvtSelN);
6479 CPUM_ASSERT_WR_MSR_FN(IntelCore2EmttmCrTablesN);
6480 CPUM_ASSERT_WR_MSR_FN(IntelCore2SmmCStMiscInfo);
6481 CPUM_ASSERT_WR_MSR_FN(IntelCore1ExtConfig);
6482 CPUM_ASSERT_WR_MSR_FN(IntelCore1DtsCalControl);
6483 CPUM_ASSERT_WR_MSR_FN(IntelCore2PeciControl);
6484
6485 CPUM_ASSERT_WR_MSR_FN(P6LastIntFromIp);
6486 CPUM_ASSERT_WR_MSR_FN(P6LastIntToIp);
6487
6488 CPUM_ASSERT_WR_MSR_FN(AmdFam15hTscRate);
6489 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCfg);
6490 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCbAddr);
6491 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMc4MiscN);
6492 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtlN);
6493 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtrN);
6494 CPUM_ASSERT_WR_MSR_FN(AmdK8SysCfg);
6495 CPUM_ASSERT_WR_MSR_FN(AmdK8HwCr);
6496 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrBaseN);
6497 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrMaskN);
6498 CPUM_ASSERT_WR_MSR_FN(AmdK8TopOfMemN);
6499 CPUM_ASSERT_WR_MSR_FN(AmdK8NbCfg1);
6500 CPUM_ASSERT_WR_MSR_FN(AmdK8McXcptRedir);
6501 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuNameN);
6502 CPUM_ASSERT_WR_MSR_FN(AmdK8HwThermalCtrl);
6503 CPUM_ASSERT_WR_MSR_FN(AmdK8SwThermalCtrl);
6504 CPUM_ASSERT_WR_MSR_FN(AmdK8FidVidControl);
6505 CPUM_ASSERT_WR_MSR_FN(AmdK8McCtlMaskN);
6506 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapN);
6507 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6508 CPUM_ASSERT_WR_MSR_FN(AmdK8IntPendingMessage);
6509 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiTriggerIoCycle);
6510 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6511 CPUM_ASSERT_WR_MSR_FN(AmdFam10hTrapCtlMaybe);
6512 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateControl);
6513 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateStatus);
6514 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateN);
6515 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidControl);
6516 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidStatus);
6517 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCStateIoBaseAddr);
6518 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCpuWatchdogTimer);
6519 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmBase);
6520 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmAddr);
6521 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmMask);
6522 CPUM_ASSERT_WR_MSR_FN(AmdK8VmCr);
6523 CPUM_ASSERT_WR_MSR_FN(AmdK8IgnNe);
6524 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmCtl);
6525 CPUM_ASSERT_WR_MSR_FN(AmdK8VmHSavePa);
6526 CPUM_ASSERT_WR_MSR_FN(AmdFam10hVmLockKey);
6527 CPUM_ASSERT_WR_MSR_FN(AmdFam10hSmmLockKey);
6528 CPUM_ASSERT_WR_MSR_FN(AmdFam10hLocalSmiStatus);
6529 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkIdLength);
6530 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkStatus);
6531 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtlN);
6532 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtrN);
6533 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6534 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6535 CPUM_ASSERT_WR_MSR_FN(AmdK7MicrocodeCtl);
6536 CPUM_ASSERT_WR_MSR_FN(AmdK7ClusterIdMaybe);
6537 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6538 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6539 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6540 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6541 CPUM_ASSERT_WR_MSR_FN(AmdK8PatchLoader);
6542 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugStatusMaybe);
6543 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceBaseMaybe);
6544 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTracePtrMaybe);
6545 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceLimitMaybe);
6546 CPUM_ASSERT_WR_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6547 CPUM_ASSERT_WR_MSR_FN(AmdK7FastFlushCountMaybe);
6548 CPUM_ASSERT_WR_MSR_FN(AmdK7NodeId);
6549 CPUM_ASSERT_WR_MSR_FN(AmdK7DrXAddrMaskN);
6550 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMatchMaybe);
6551 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMaskMaybe);
6552 CPUM_ASSERT_WR_MSR_FN(AmdK7LoadStoreCfg);
6553 CPUM_ASSERT_WR_MSR_FN(AmdK7InstrCacheCfg);
6554 CPUM_ASSERT_WR_MSR_FN(AmdK7DataCacheCfg);
6555 CPUM_ASSERT_WR_MSR_FN(AmdK7BusUnitCfg);
6556 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugCtl2Maybe);
6557 CPUM_ASSERT_WR_MSR_FN(AmdFam15hFpuCfg);
6558 CPUM_ASSERT_WR_MSR_FN(AmdFam15hDecoderCfg);
6559 CPUM_ASSERT_WR_MSR_FN(AmdFam10hBusUnitCfg2);
6560 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg);
6561 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg2);
6562 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg3);
6563 CPUM_ASSERT_WR_MSR_FN(AmdFam15hExecUnitCfg);
6564 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLoadStoreCfg2);
6565 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchCtl);
6566 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchLinAddr);
6567 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6568 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpExecCtl);
6569 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpRip);
6570 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData);
6571 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData2);
6572 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData3);
6573 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcLinAddr);
6574 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcPhysAddr);
6575 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsCtl);
6576 CPUM_ASSERT_WR_MSR_FN(AmdFam14hIbsBrTarget);
6577
6578 CPUM_ASSERT_WR_MSR_FN(Gim);
6579
6580 return VINF_SUCCESS;
6581}
6582#endif /* VBOX_STRICT && IN_RING3 */
6583
6584
6585/**
6586 * Gets the scalable bus frequency.
6587 *
6588 * The bus frequency is used as a base in several MSRs that gives the CPU and
6589 * other frequency ratios.
6590 *
6591 * @returns Scalable bus frequency in Hz. Will not return CPUM_SBUSFREQ_UNKNOWN.
6592 * @param pVM The cross context VM structure.
6593 */
6594VMMDECL(uint64_t) CPUMGetGuestScalableBusFrequency(PVM pVM)
6595{
6596 uint64_t uFreq = pVM->cpum.s.GuestInfo.uScalableBusFreq;
6597 if (uFreq == CPUM_SBUSFREQ_UNKNOWN)
6598 uFreq = CPUM_SBUSFREQ_100MHZ;
6599 return uFreq;
6600}
6601
6602
6603/**
6604 * Sets the guest EFER MSR without performing any additional checks.
6605 *
6606 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6607 * @param uOldEfer The previous EFER MSR value.
6608 * @param uValidEfer The new, validated EFER MSR value.
6609 *
6610 * @remarks One would normally call CPUMIsGuestEferMsrWriteValid() before calling
6611 * this function to change the EFER in order to perform an EFER transition.
6612 */
6613VMMDECL(void) CPUMSetGuestEferMsrNoChecks(PVMCPU pVCpu, uint64_t uOldEfer, uint64_t uValidEfer)
6614{
6615 pVCpu->cpum.s.Guest.msrEFER = uValidEfer;
6616
6617 /* AMD64 Architecture Programmer's Manual: 15.15 TLB Control; flush the TLB
6618 if MSR_K6_EFER_NXE, MSR_K6_EFER_LME or MSR_K6_EFER_LMA are changed. */
6619 if ( (uOldEfer & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA))
6620 != (pVCpu->cpum.s.Guest.msrEFER & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA)))
6621 {
6622 /// @todo PGMFlushTLB(pVCpu, cr3, true /*fGlobal*/);
6623 HMFlushTLB(pVCpu);
6624
6625 /* Notify PGM about NXE changes. */
6626 if ( (uOldEfer & MSR_K6_EFER_NXE)
6627 != (pVCpu->cpum.s.Guest.msrEFER & MSR_K6_EFER_NXE))
6628 PGMNotifyNxeChanged(pVCpu, !(uOldEfer & MSR_K6_EFER_NXE));
6629 }
6630}
6631
6632
6633/**
6634 * Checks if a guest PAT MSR write is valid.
6635 *
6636 * @returns @c true if the PAT bit combination is valid, @c false otherwise.
6637 * @param uValue The PAT MSR value.
6638 */
6639VMMDECL(bool) CPUMIsPatMsrValid(uint64_t uValue)
6640{
6641 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
6642 {
6643 /* Check all eight bits because the top 5 bits of each byte are reserved. */
6644 uint8_t uType = (uint8_t)(uValue >> cShift);
6645 if ((uType >= 8) || (uType == 2) || (uType == 3))
6646 {
6647 Log(("CPUM: Invalid PAT type at %u:%u in IA32_PAT: %#llx (%#llx)\n", cShift + 7, cShift, uValue, uType));
6648 return false;
6649 }
6650 }
6651 return true;
6652}
6653
6654
6655/**
6656 * Validates an EFER MSR write and provides the new, validated EFER MSR.
6657 *
6658 * @returns VBox status code.
6659 * @param pVM The cross context VM structure.
6660 * @param uCr0 The CR0 of the CPU corresponding to the EFER MSR.
6661 * @param uOldEfer Value of the previous EFER MSR on the CPU if any.
6662 * @param uNewEfer The new EFER MSR value being written.
6663 * @param puValidEfer Where to store the validated EFER (only updated if
6664 * this function returns VINF_SUCCESS).
6665 */
6666VMMDECL(int) CPUMIsGuestEferMsrWriteValid(PVM pVM, uint64_t uCr0, uint64_t uOldEfer, uint64_t uNewEfer, uint64_t *puValidEfer)
6667{
6668 /* #GP(0) If anything outside the allowed bits is set. */
6669 uint64_t fMask = CPUMGetGuestEferMsrValidMask(pVM);
6670 if (uNewEfer & ~fMask)
6671 {
6672 Log(("CPUM: Settings disallowed EFER bit. uNewEfer=%#RX64 fAllowed=%#RX64 -> #GP(0)\n", uNewEfer, fMask));
6673 return VERR_CPUM_RAISE_GP_0;
6674 }
6675
6676 /* Check for illegal MSR_K6_EFER_LME transitions: not allowed to change LME if
6677 paging is enabled. (AMD Arch. Programmer's Manual Volume 2: Table 14-5) */
6678 if ( (uOldEfer & MSR_K6_EFER_LME) != (uNewEfer & MSR_K6_EFER_LME)
6679 && (uCr0 & X86_CR0_PG))
6680 {
6681 Log(("CPUM: Illegal MSR_K6_EFER_LME change: paging is enabled!!\n"));
6682 return VERR_CPUM_RAISE_GP_0;
6683 }
6684
6685 /* There are a few more: e.g. MSR_K6_EFER_LMSLE. */
6686 AssertMsg(!(uNewEfer & ~( MSR_K6_EFER_NXE
6687 | MSR_K6_EFER_LME
6688 | MSR_K6_EFER_LMA /* ignored anyway */
6689 | MSR_K6_EFER_SCE
6690 | MSR_K6_EFER_FFXSR
6691 | MSR_K6_EFER_SVME)),
6692 ("Unexpected value %#RX64\n", uNewEfer));
6693
6694 /* Ignore EFER.LMA, it's updated when setting CR0. */
6695 fMask &= ~MSR_K6_EFER_LMA;
6696
6697 *puValidEfer = (uOldEfer & ~fMask) | (uNewEfer & fMask);
6698 return VINF_SUCCESS;
6699}
6700
6701
6702/**
6703 * Gets the mask of valid EFER bits depending on supported guest-CPU features.
6704 *
6705 * @returns Mask of valid EFER bits.
6706 * @param pVM The cross context VM structure.
6707 *
6708 * @remarks EFER.LMA is included as part of the valid mask. It's not invalid but
6709 * rather a read-only bit.
6710 */
6711VMMDECL(uint64_t) CPUMGetGuestEferMsrValidMask(PVM pVM)
6712{
6713 uint32_t const fExtFeatures = pVM->cpum.s.aGuestCpuIdPatmExt[0].uEax >= 0x80000001
6714 ? pVM->cpum.s.aGuestCpuIdPatmExt[1].uEdx
6715 : 0;
6716 uint64_t fMask = 0;
6717 uint64_t const fIgnoreMask = MSR_K6_EFER_LMA;
6718
6719 /* Filter out those bits the guest is allowed to change. (e.g. LMA is read-only) */
6720 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_NX)
6721 fMask |= MSR_K6_EFER_NXE;
6722 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_LONG_MODE)
6723 fMask |= MSR_K6_EFER_LME;
6724 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_SYSCALL)
6725 fMask |= MSR_K6_EFER_SCE;
6726 if (fExtFeatures & X86_CPUID_AMD_FEATURE_EDX_FFXSR)
6727 fMask |= MSR_K6_EFER_FFXSR;
6728 if (pVM->cpum.s.GuestFeatures.fSvm)
6729 fMask |= MSR_K6_EFER_SVME;
6730
6731 return (fIgnoreMask | fMask);
6732}
6733
6734
6735/**
6736 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6737 *
6738 * @returns The register value.
6739 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6740 * @thread EMT(pVCpu)
6741 */
6742VMM_INT_DECL(uint64_t) CPUMGetGuestTscAux(PVMCPU pVCpu)
6743{
6744 Assert(!(pVCpu->cpum.s.Guest.fExtrn & CPUMCTX_EXTRN_TSC_AUX));
6745 return pVCpu->cpum.s.GuestMsrs.msr.TscAux;
6746}
6747
6748
6749/**
6750 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6751 *
6752 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6753 * @param uValue The new value.
6754 * @thread EMT(pVCpu)
6755 */
6756VMM_INT_DECL(void) CPUMSetGuestTscAux(PVMCPU pVCpu, uint64_t uValue)
6757{
6758 pVCpu->cpum.s.Guest.fExtrn &= ~CPUMCTX_EXTRN_TSC_AUX;
6759 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
6760}
6761
6762
6763/**
6764 * Fast way for HM to access the IA32_SPEC_CTRL register.
6765 *
6766 * @returns The register value.
6767 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6768 * @thread EMT(pVCpu)
6769 */
6770VMM_INT_DECL(uint64_t) CPUMGetGuestSpecCtrl(PVMCPU pVCpu)
6771{
6772 return pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
6773}
6774
6775
6776/**
6777 * Fast way for HM to access the IA32_SPEC_CTRL register.
6778 *
6779 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6780 * @param uValue The new value.
6781 * @thread EMT(pVCpu)
6782 */
6783VMM_INT_DECL(void) CPUMSetGuestSpecCtrl(PVMCPU pVCpu, uint64_t uValue)
6784{
6785 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
6786}
6787
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette