VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/CPUMAllMsrs.cpp@ 107044

Last change on this file since 107044 was 106061, checked in by vboxsync, 2 months ago

Copyright year updates by scm.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 245.8 KB
Line 
1/* $Id: CPUMAllMsrs.cpp 106061 2024-09-16 14:03:52Z vboxsync $ */
2/** @file
3 * CPUM - CPU MSR Registers.
4 */
5
6/*
7 * Copyright (C) 2013-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28
29/*********************************************************************************************************************************
30* Header Files *
31*********************************************************************************************************************************/
32#define LOG_GROUP LOG_GROUP_CPUM
33#include <VBox/vmm/cpum.h>
34#include <VBox/vmm/apic.h>
35#include <VBox/vmm/hm.h>
36#include <VBox/vmm/hm_vmx.h>
37#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
38# include <VBox/vmm/iem.h>
39#endif
40#include <VBox/vmm/tm.h>
41#include <VBox/vmm/gim.h>
42#include "CPUMInternal.h"
43#include <VBox/vmm/vmcc.h>
44#include <VBox/err.h>
45
46
47/*********************************************************************************************************************************
48* Defined Constants And Macros *
49*********************************************************************************************************************************/
50/**
51 * Validates the CPUMMSRRANGE::offCpumCpu value and declares a local variable
52 * pointing to it.
53 *
54 * ASSUMES sizeof(a_Type) is a power of two and that the member is aligned
55 * correctly.
56 */
57#define CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(a_pVCpu, a_pRange, a_Type, a_VarName) \
58 AssertMsgReturn( (a_pRange)->offCpumCpu >= 8 \
59 && (a_pRange)->offCpumCpu < sizeof(CPUMCPU) \
60 && !((a_pRange)->offCpumCpu & (RT_MIN(sizeof(a_Type), 8) - 1)) \
61 , ("offCpumCpu=%#x %s\n", (a_pRange)->offCpumCpu, (a_pRange)->szName), \
62 VERR_CPUM_MSR_BAD_CPUMCPU_OFFSET); \
63 a_Type *a_VarName = (a_Type *)((uintptr_t)&(a_pVCpu)->cpum.s + (a_pRange)->offCpumCpu)
64
65
66/*********************************************************************************************************************************
67* Structures and Typedefs *
68*********************************************************************************************************************************/
69
70/**
71 * Implements reading one or more MSRs.
72 *
73 * @returns VBox status code.
74 * @retval VINF_SUCCESS on success.
75 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
76 * current context (raw-mode or ring-0).
77 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR).
78 *
79 * @param pVCpu The cross context virtual CPU structure.
80 * @param idMsr The MSR we're reading.
81 * @param pRange The MSR range descriptor.
82 * @param puValue Where to return the value.
83 */
84typedef DECLCALLBACKTYPE(VBOXSTRICTRC, FNCPUMRDMSR,(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue));
85/** Pointer to a RDMSR worker for a specific MSR or range of MSRs. */
86typedef FNCPUMRDMSR *PFNCPUMRDMSR;
87
88
89/**
90 * Implements writing one or more MSRs.
91 *
92 * @retval VINF_SUCCESS on success.
93 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
94 * current context (raw-mode or ring-0).
95 * @retval VERR_CPUM_RAISE_GP_0 on failure.
96 *
97 * @param pVCpu The cross context virtual CPU structure.
98 * @param idMsr The MSR we're writing.
99 * @param pRange The MSR range descriptor.
100 * @param uValue The value to set, ignored bits masked.
101 * @param uRawValue The raw value with the ignored bits not masked.
102 */
103typedef DECLCALLBACKTYPE(VBOXSTRICTRC, FNCPUMWRMSR,(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange,
104 uint64_t uValue, uint64_t uRawValue));
105/** Pointer to a WRMSR worker for a specific MSR or range of MSRs. */
106typedef FNCPUMWRMSR *PFNCPUMWRMSR;
107
108
109
110/*
111 * Generic functions.
112 * Generic functions.
113 * Generic functions.
114 */
115
116
117/** @callback_method_impl{FNCPUMRDMSR} */
118static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_FixedValue(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
119{
120 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
121 *puValue = pRange->uValue;
122 return VINF_SUCCESS;
123}
124
125
126/** @callback_method_impl{FNCPUMWRMSR} */
127static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IgnoreWrite(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
128{
129 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
130 Log(("CPUM: Ignoring WRMSR %#x (%s), %#llx\n", idMsr, pRange->szName, uValue));
131 return VINF_SUCCESS;
132}
133
134
135/** @callback_method_impl{FNCPUMRDMSR} */
136static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_WriteOnly(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
137{
138 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(puValue);
139 return VERR_CPUM_RAISE_GP_0;
140}
141
142
143/** @callback_method_impl{FNCPUMWRMSR} */
144static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_ReadOnly(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
145{
146 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
147 Assert(pRange->fWrGpMask == UINT64_MAX);
148 return VERR_CPUM_RAISE_GP_0;
149}
150
151
152
153
154/*
155 * IA32
156 * IA32
157 * IA32
158 */
159
160/** @callback_method_impl{FNCPUMRDMSR} */
161static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
162{
163 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
164 *puValue = 0; /** @todo implement machine check injection. */
165 return VINF_SUCCESS;
166}
167
168
169/** @callback_method_impl{FNCPUMWRMSR} */
170static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
171{
172 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
173 /** @todo implement machine check injection. */
174 return VINF_SUCCESS;
175}
176
177
178/** @callback_method_impl{FNCPUMRDMSR} */
179static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32P5McType(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
180{
181 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
182 *puValue = 0; /** @todo implement machine check injection. */
183 return VINF_SUCCESS;
184}
185
186
187/** @callback_method_impl{FNCPUMWRMSR} */
188static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32P5McType(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
189{
190 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
191 /** @todo implement machine check injection. */
192 return VINF_SUCCESS;
193}
194
195
196/** @callback_method_impl{FNCPUMRDMSR} */
197static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TimestampCounter(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
198{
199 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
200 *puValue = TMCpuTickGet(pVCpu);
201#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
202 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
203#endif
204 return VINF_SUCCESS;
205}
206
207
208/** @callback_method_impl{FNCPUMWRMSR} */
209static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TimestampCounter(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
210{
211 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
212 TMCpuTickSet(pVCpu->CTX_SUFF(pVM), pVCpu, uValue);
213 return VINF_SUCCESS;
214}
215
216
217/** @callback_method_impl{FNCPUMRDMSR} */
218static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformId(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
219{
220 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
221 uint64_t uValue = pRange->uValue;
222 if (uValue & 0x1f00)
223 {
224 /* Max allowed bus ratio present. */
225 /** @todo Implement scaled BUS frequency. */
226 }
227
228 *puValue = uValue;
229 return VINF_SUCCESS;
230}
231
232
233/** @callback_method_impl{FNCPUMRDMSR} */
234static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ApicBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
235{
236 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
237 return APICGetBaseMsr(pVCpu, puValue);
238}
239
240
241/** @callback_method_impl{FNCPUMWRMSR} */
242static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ApicBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
243{
244 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
245 return APICSetBaseMsr(pVCpu, uValue);
246}
247
248
249/**
250 * Gets IA32_FEATURE_CONTROL value for IEM, NEM and cpumMsrRd_Ia32FeatureControl.
251 *
252 * @returns IA32_FEATURE_CONTROL value.
253 * @param pVCpu The cross context per CPU structure.
254 */
255VMM_INT_DECL(uint64_t) CPUMGetGuestIa32FeatCtrl(PCVMCPUCC pVCpu)
256{
257 uint64_t uFeatCtrlMsr = MSR_IA32_FEATURE_CONTROL_LOCK;
258 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
259 uFeatCtrlMsr |= MSR_IA32_FEATURE_CONTROL_VMXON;
260 return uFeatCtrlMsr;
261}
262
263
264/** @callback_method_impl{FNCPUMRDMSR} */
265static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FeatureControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
266{
267 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
268 *puValue = CPUMGetGuestIa32FeatCtrl(pVCpu);
269 return VINF_SUCCESS;
270}
271
272
273/** @callback_method_impl{FNCPUMWRMSR} */
274static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FeatureControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
275{
276 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
277 return VERR_CPUM_RAISE_GP_0;
278}
279
280
281/** @callback_method_impl{FNCPUMRDMSR} */
282static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32BiosSignId(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
283{
284 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
285 /** @todo fake microcode update. */
286 *puValue = pRange->uValue;
287 return VINF_SUCCESS;
288}
289
290
291/** @callback_method_impl{FNCPUMWRMSR} */
292static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosSignId(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
293{
294 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
295 /* Normally, zero is written to Ia32BiosSignId before reading it in order
296 to select the signature instead of the BBL_CR_D3 behaviour. The GP mask
297 of the database entry should take care of most illegal writes for now, so
298 just ignore all writes atm. */
299 return VINF_SUCCESS;
300}
301
302
303/** @callback_method_impl{FNCPUMWRMSR} */
304static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32BiosUpdateTrigger(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
305{
306 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
307
308 /* Microcode updates cannot be loaded in VMX non-root mode. */
309 if (CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
310 return VINF_SUCCESS;
311
312 /** @todo Fake bios update trigger better. The value is the address to an
313 * update package, I think. We should probably GP if it's invalid. */
314 return VINF_SUCCESS;
315}
316
317
318/**
319 * Get MSR_IA32_SMM_MONITOR_CTL value for IEM and cpumMsrRd_Ia32SmmMonitorCtl.
320 *
321 * @returns The MSR_IA32_SMM_MONITOR_CTL value.
322 * @param pVCpu The cross context per CPU structure.
323 */
324VMM_INT_DECL(uint64_t) CPUMGetGuestIa32SmmMonitorCtl(PCVMCPUCC pVCpu)
325{
326 /* We do not support dual-monitor treatment for SMI and SMM. */
327 /** @todo SMM. */
328 RT_NOREF(pVCpu);
329 return 0;
330}
331
332
333/** @callback_method_impl{FNCPUMRDMSR} */
334static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmmMonitorCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
335{
336 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
337 *puValue = CPUMGetGuestIa32SmmMonitorCtl(pVCpu);
338 return VINF_SUCCESS;
339}
340
341
342/** @callback_method_impl{FNCPUMWRMSR} */
343static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmmMonitorCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
344{
345 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
346 /** @todo SMM. */
347 return VINF_SUCCESS;
348}
349
350
351/** @callback_method_impl{FNCPUMRDMSR} */
352static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PmcN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
353{
354 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
355 /** @todo check CPUID leaf 0ah. */
356 *puValue = 0;
357 return VINF_SUCCESS;
358}
359
360
361/** @callback_method_impl{FNCPUMWRMSR} */
362static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PmcN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
363{
364 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
365 /** @todo check CPUID leaf 0ah. */
366 return VINF_SUCCESS;
367}
368
369
370/** @callback_method_impl{FNCPUMRDMSR} */
371static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MonitorFilterLineSize(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
372{
373 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
374 /** @todo return 0x1000 if we try emulate mwait 100% correctly. */
375 *puValue = 0x40; /** @todo Change to CPU cache line size. */
376 return VINF_SUCCESS;
377}
378
379
380/** @callback_method_impl{FNCPUMWRMSR} */
381static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MonitorFilterLineSize(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
382{
383 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
384 /** @todo should remember writes, though it's supposedly something only a BIOS
385 * would write so, it's not extremely important. */
386 return VINF_SUCCESS;
387}
388
389/** @callback_method_impl{FNCPUMRDMSR} */
390static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MPerf(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
391{
392 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
393 /** @todo Read MPERF: Adjust against previously written MPERF value. Is TSC
394 * what we want? */
395 *puValue = TMCpuTickGet(pVCpu);
396#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
397 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
398#endif
399 return VINF_SUCCESS;
400}
401
402
403/** @callback_method_impl{FNCPUMWRMSR} */
404static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MPerf(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
405{
406 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
407 /** @todo Write MPERF: Calc adjustment. */
408 return VINF_SUCCESS;
409}
410
411
412/** @callback_method_impl{FNCPUMRDMSR} */
413static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32APerf(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
414{
415 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
416 /** @todo Read APERF: Adjust against previously written MPERF value. Is TSC
417 * what we want? */
418 *puValue = TMCpuTickGet(pVCpu);
419#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
420 *puValue = CPUMApplyNestedGuestTscOffset(pVCpu, *puValue);
421#endif
422 return VINF_SUCCESS;
423}
424
425
426/** @callback_method_impl{FNCPUMWRMSR} */
427static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32APerf(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
428{
429 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
430 /** @todo Write APERF: Calc adjustment. */
431 return VINF_SUCCESS;
432}
433
434
435/**
436 * Get fixed IA32_MTRR_CAP value for NEM and cpumMsrRd_Ia32MtrrCap.
437 *
438 * @returns Fixed IA32_MTRR_CAP value.
439 * @param pVCpu The cross context per CPU structure.
440 */
441VMM_INT_DECL(uint64_t) CPUMGetGuestIa32MtrrCap(PCVMCPUCC pVCpu)
442{
443 if (pVCpu->CTX_SUFF(pVM)->cpum.s.fMtrrRead)
444 return pVCpu->cpum.s.GuestMsrs.msr.MtrrCap;
445
446 /* This is currently a bit weird. :-) */
447 uint8_t const cVariableRangeRegs = 0;
448 bool const fSystemManagementRangeRegisters = false;
449 bool const fFixedRangeRegisters = false;
450 bool const fWriteCombiningType = false;
451 bool const fProcRsvdRangeRegisters = false;
452 return cVariableRangeRegs
453 | (fFixedRangeRegisters ? MSR_IA32_MTRR_CAP_FIX : 0)
454 | (fWriteCombiningType ? MSR_IA32_MTRR_CAP_WC : 0)
455 | (fSystemManagementRangeRegisters ? MSR_IA32_MTRR_CAP_SMRR : 0)
456 | (fProcRsvdRangeRegisters ? MSR_IA32_MTRR_CAP_PRMRR : 0);
457}
458
459/** @callback_method_impl{FNCPUMRDMSR} */
460static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrCap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
461{
462 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
463 *puValue = CPUMGetGuestIa32MtrrCap(pVCpu);
464 return VINF_SUCCESS;
465}
466
467
468/** @callback_method_impl{FNCPUMRDMSR} */
469static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysBaseN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
470{
471 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
472 Assert(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fMtrr);
473 Assert(pRange->uValue == (idMsr - MSR_IA32_MTRR_PHYSBASE0) / 2);
474 if (pVCpu->CTX_SUFF(pVM)->cpum.s.fMtrrRead)
475 {
476 AssertLogRelMsgReturn(pRange->uValue < RT_ELEMENTS(pVCpu->cpum.s.GuestMsrs.msr.aMtrrVarMsrs),
477 ("MTRR MSR (%#RX32) out-of-bounds, must be <= %#RX32\n", idMsr, CPUMCTX_MAX_MTRRVAR_COUNT),
478 VERR_CPUM_RAISE_GP_0);
479 AssertLogRelMsgReturn(!(idMsr % 2),
480 ("MTRR MSR (%#RX32) invalid, must be at even offset\n", idMsr), VERR_CPUM_RAISE_GP_0);
481 *puValue = pVCpu->cpum.s.GuestMsrs.msr.aMtrrVarMsrs[pRange->uValue].MtrrPhysBase;
482 }
483 else
484 *puValue = 0;
485 return VINF_SUCCESS;
486}
487
488
489/** @callback_method_impl{FNCPUMWRMSR} */
490static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysBaseN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
491{
492 /*
493 * Validate the value.
494 */
495 Assert(pRange->uValue == (idMsr - MSR_IA32_MTRR_PHYSBASE0) / 2);
496 RT_NOREF_PV(uRawValue);
497 Assert(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fMtrr);
498
499 uint8_t uType = uValue & 0xff;
500 if ((uType >= 7) || (uType == 2) || (uType == 3))
501 {
502 Log(("CPUM: Invalid type set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n", idMsr, uValue, uType));
503 return VERR_CPUM_RAISE_GP_0;
504 }
505
506 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
507 if (fInvPhysMask & uValue)
508 {
509 Log(("CPUM: Invalid physical address bits set writing MTRR PhysBase MSR %#x: %#llx (%#llx)\n",
510 idMsr, uValue, uValue & fInvPhysMask));
511 return VERR_CPUM_RAISE_GP_0;
512 }
513
514 /*
515 * Store it.
516 */
517 if (pVCpu->CTX_SUFF(pVM)->cpum.s.fMtrrWrite)
518 {
519 AssertCompile(CPUMCTX_MAX_MTRRVAR_COUNT == RT_ELEMENTS(pVCpu->cpum.s.GuestMsrs.msr.aMtrrVarMsrs));
520 AssertLogRelMsgReturn(pRange->uValue < CPUMCTX_MAX_MTRRVAR_COUNT,
521 ("MTRR MSR (%#RX32) out-of-bounds, must be <= %#RX32\n", idMsr, CPUMCTX_MAX_MTRRVAR_COUNT),
522 VERR_CPUM_RAISE_GP_0);
523 AssertLogRelMsgReturn(!(idMsr % 2),
524 ("MTRR MSR (%#RX32) invalid, must be at even offset\n", idMsr), VERR_CPUM_RAISE_GP_0);
525 pVCpu->cpum.s.GuestMsrs.msr.aMtrrVarMsrs[pRange->uValue].MtrrPhysBase = uValue;
526 /** @todo Act on the potential memory type change. */
527 }
528 return VINF_SUCCESS;
529}
530
531
532/** @callback_method_impl{FNCPUMRDMSR} */
533static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrPhysMaskN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
534{
535 RT_NOREF_PV(idMsr);
536 Assert(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fMtrr);
537 Assert(pRange->uValue == (idMsr - MSR_IA32_MTRR_PHYSBASE0) / 2);
538 if (pVCpu->CTX_SUFF(pVM)->cpum.s.fMtrrRead)
539 {
540 AssertLogRelMsgReturn(pRange->uValue < RT_ELEMENTS(pVCpu->cpum.s.GuestMsrs.msr.aMtrrVarMsrs),
541 ("MTRR MSR (%#RX32) out-of-bounds, must be <= %#RX32\n", idMsr, CPUMCTX_MAX_MTRRVAR_COUNT),
542 VERR_CPUM_RAISE_GP_0);
543 AssertLogRelMsgReturn(idMsr % 2,
544 ("MTRR MSR (%#RX32) invalid, must be at odd offset\n", idMsr), VERR_CPUM_RAISE_GP_0);
545 *puValue = pVCpu->cpum.s.GuestMsrs.msr.aMtrrVarMsrs[pRange->uValue].MtrrPhysMask;
546 }
547 else
548 *puValue = 0;
549 return VINF_SUCCESS;
550}
551
552
553/** @callback_method_impl{FNCPUMWRMSR} */
554static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrPhysMaskN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
555{
556 /*
557 * Validate the value.
558 */
559 Assert(pRange->uValue == (idMsr - MSR_IA32_MTRR_PHYSBASE0) / 2);
560 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue); RT_NOREF_PV(pRange);
561 Assert(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fMtrr);
562
563 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
564 if (fInvPhysMask & uValue)
565 {
566 Log(("CPUM: Invalid physical address bits set writing MTRR PhysMask MSR %#x: %#llx (%#llx)\n",
567 idMsr, uValue, uValue & fInvPhysMask));
568 return VERR_CPUM_RAISE_GP_0;
569 }
570
571 /*
572 * Store it.
573 */
574 if (pVCpu->CTX_SUFF(pVM)->cpum.s.fMtrrWrite)
575 {
576 AssertLogRelMsgReturn(pRange->uValue < RT_ELEMENTS(pVCpu->cpum.s.GuestMsrs.msr.aMtrrVarMsrs),
577 ("MTRR MSR (%#RX32) out-of-bounds, must be <= %#RX32\n", idMsr, CPUMCTX_MAX_MTRRVAR_COUNT),
578 VERR_CPUM_RAISE_GP_0);
579 AssertLogRelMsgReturn(idMsr % 2,
580 ("MTRR MSR (%#RX32) invalid, must be at odd offset\n", idMsr), VERR_CPUM_RAISE_GP_0);
581 pVCpu->cpum.s.GuestMsrs.msr.aMtrrVarMsrs[pRange->uValue].MtrrPhysMask = uValue;
582 /** @todo Act on the potential memory type change. */
583 }
584 return VINF_SUCCESS;
585}
586
587
588/** @callback_method_impl{FNCPUMRDMSR} */
589static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrFixed(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
590{
591 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
592 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
593 Assert(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fMtrr);
594 *puValue = *puFixedMtrr;
595 return VINF_SUCCESS;
596}
597
598
599/** @callback_method_impl{FNCPUMWRMSR} */
600static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrFixed(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
601{
602 CPUM_MSR_ASSERT_CPUMCPU_OFFSET_RETURN(pVCpu, pRange, uint64_t, puFixedMtrr);
603 RT_NOREF_PV(idMsr); RT_NOREF_PV(uRawValue);
604 Assert(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fMtrr);
605
606 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
607 {
608 uint8_t uType = (uint8_t)(uValue >> cShift);
609 if ((uType >= 7) || (uType == 2) || (uType == 3))
610 {
611 Log(("CPUM: Invalid MTRR type at %u:%u in fixed range (%#x/%s): %#llx (%#llx)\n",
612 cShift + 7, cShift, idMsr, pRange->szName, uValue, uType));
613 return VERR_CPUM_RAISE_GP_0;
614 }
615 }
616 *puFixedMtrr = uValue;
617 return VINF_SUCCESS;
618}
619
620
621/** @callback_method_impl{FNCPUMRDMSR} */
622static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MtrrDefType(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
623{
624 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
625 Assert(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fMtrr);
626 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType;
627 return VINF_SUCCESS;
628}
629
630
631/** @callback_method_impl{FNCPUMWRMSR} */
632static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MtrrDefType(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
633{
634 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
635 Assert(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fMtrr);
636
637 uint8_t uType = uValue & MSR_IA32_MTRR_DEF_TYPE_DEF_MT_MASK;
638 if ((uType >= 7) || (uType == 2) || (uType == 3))
639 {
640 Log(("CPUM: Invalid MTRR default type value on %s: %#llx (%#llx)\n", pRange->szName, uValue, uType));
641 return VERR_CPUM_RAISE_GP_0;
642 }
643
644 pVCpu->cpum.s.GuestMsrs.msr.MtrrDefType = uValue;
645 return VINF_SUCCESS;
646}
647
648
649/** @callback_method_impl{FNCPUMRDMSR} */
650static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Pat(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
651{
652 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
653 *puValue = pVCpu->cpum.s.Guest.msrPAT;
654 return VINF_SUCCESS;
655}
656
657
658/** @callback_method_impl{FNCPUMWRMSR} */
659static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Pat(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
660{
661 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
662 if (CPUMIsPatMsrValid(uValue))
663 {
664 pVCpu->cpum.s.Guest.msrPAT = uValue;
665 return VINF_SUCCESS;
666 }
667 return VERR_CPUM_RAISE_GP_0;
668}
669
670
671/** @callback_method_impl{FNCPUMRDMSR} */
672static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterCs(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
673{
674 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
675 *puValue = pVCpu->cpum.s.Guest.SysEnter.cs;
676 return VINF_SUCCESS;
677}
678
679
680/** @callback_method_impl{FNCPUMWRMSR} */
681static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterCs(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
682{
683 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
684
685 /* Note! We used to mask this by 0xffff, but turns out real HW doesn't and
686 there are generally 32-bit working bits backing this register. */
687 pVCpu->cpum.s.Guest.SysEnter.cs = uValue;
688 return VINF_SUCCESS;
689}
690
691
692/** @callback_method_impl{FNCPUMRDMSR} */
693static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEsp(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
694{
695 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
696 *puValue = pVCpu->cpum.s.Guest.SysEnter.esp;
697 return VINF_SUCCESS;
698}
699
700
701/** @callback_method_impl{FNCPUMWRMSR} */
702static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEsp(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
703{
704 if (X86_IS_CANONICAL(uValue))
705 {
706 pVCpu->cpum.s.Guest.SysEnter.esp = uValue;
707 return VINF_SUCCESS;
708 }
709 Log(("CPUM: IA32_SYSENTER_ESP not canonical! %#llx\n", uValue));
710 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
711 return VERR_CPUM_RAISE_GP_0;
712}
713
714
715/** @callback_method_impl{FNCPUMRDMSR} */
716static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SysEnterEip(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
717{
718 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
719 *puValue = pVCpu->cpum.s.Guest.SysEnter.eip;
720 return VINF_SUCCESS;
721}
722
723
724/** @callback_method_impl{FNCPUMWRMSR} */
725static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SysEnterEip(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
726{
727 if (X86_IS_CANONICAL(uValue))
728 {
729 pVCpu->cpum.s.Guest.SysEnter.eip = uValue;
730 return VINF_SUCCESS;
731 }
732 LogRel(("CPUM: IA32_SYSENTER_EIP not canonical! %#llx\n", uValue));
733 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
734 return VERR_CPUM_RAISE_GP_0;
735}
736
737
738/** @callback_method_impl{FNCPUMRDMSR} */
739static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
740{
741 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
742#if 0 /** @todo implement machine checks. */
743 *puValue = pRange->uValue & (RT_BIT_64(8) | 0);
744#else
745 *puValue = 0;
746#endif
747 return VINF_SUCCESS;
748}
749
750
751/** @callback_method_impl{FNCPUMRDMSR} */
752static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
753{
754 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
755 /** @todo implement machine checks. */
756 *puValue = 0;
757 return VINF_SUCCESS;
758}
759
760
761/** @callback_method_impl{FNCPUMWRMSR} */
762static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
763{
764 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
765 /** @todo implement machine checks. */
766 return VINF_SUCCESS;
767}
768
769
770/** @callback_method_impl{FNCPUMRDMSR} */
771static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McgCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
772{
773 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
774 /** @todo implement machine checks. */
775 *puValue = 0;
776 return VINF_SUCCESS;
777}
778
779
780/** @callback_method_impl{FNCPUMWRMSR} */
781static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McgCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
782{
783 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
784 /** @todo implement machine checks. */
785 return VINF_SUCCESS;
786}
787
788
789/** @callback_method_impl{FNCPUMRDMSR} */
790static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
791{
792 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
793 /** @todo implement IA32_DEBUGCTL. */
794 *puValue = 0;
795 return VINF_SUCCESS;
796}
797
798
799/** @callback_method_impl{FNCPUMWRMSR} */
800static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
801{
802 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
803 /** @todo implement IA32_DEBUGCTL. */
804 return VINF_SUCCESS;
805}
806
807
808/** @callback_method_impl{FNCPUMRDMSR} */
809static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
810{
811 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
812 /** @todo implement intel SMM. */
813 *puValue = 0;
814 return VINF_SUCCESS;
815}
816
817
818/** @callback_method_impl{FNCPUMWRMSR} */
819static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
820{
821 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
822 /** @todo implement intel SMM. */
823 return VERR_CPUM_RAISE_GP_0;
824}
825
826
827/** @callback_method_impl{FNCPUMRDMSR} */
828static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SmrrPhysMask(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
829{
830 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
831 /** @todo implement intel SMM. */
832 *puValue = 0;
833 return VINF_SUCCESS;
834}
835
836
837/** @callback_method_impl{FNCPUMWRMSR} */
838static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SmrrPhysMask(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
839{
840 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
841 /** @todo implement intel SMM. */
842 return VERR_CPUM_RAISE_GP_0;
843}
844
845
846/** @callback_method_impl{FNCPUMRDMSR} */
847static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PlatformDcaCap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
848{
849 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
850 /** @todo implement intel direct cache access (DCA)?? */
851 *puValue = 0;
852 return VINF_SUCCESS;
853}
854
855
856/** @callback_method_impl{FNCPUMWRMSR} */
857static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PlatformDcaCap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
858{
859 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
860 /** @todo implement intel direct cache access (DCA)?? */
861 return VINF_SUCCESS;
862}
863
864
865/** @callback_method_impl{FNCPUMRDMSR} */
866static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32CpuDcaCap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
867{
868 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
869 /** @todo implement intel direct cache access (DCA)?? */
870 *puValue = 0;
871 return VINF_SUCCESS;
872}
873
874
875/** @callback_method_impl{FNCPUMRDMSR} */
876static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Dca0Cap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
877{
878 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
879 /** @todo implement intel direct cache access (DCA)?? */
880 *puValue = 0;
881 return VINF_SUCCESS;
882}
883
884
885/** @callback_method_impl{FNCPUMWRMSR} */
886static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Dca0Cap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
887{
888 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
889 /** @todo implement intel direct cache access (DCA)?? */
890 return VINF_SUCCESS;
891}
892
893
894/** @callback_method_impl{FNCPUMRDMSR} */
895static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfEvtSelN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
896{
897 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
898 /** @todo implement IA32_PERFEVTSEL0+. */
899 *puValue = 0;
900 return VINF_SUCCESS;
901}
902
903
904/** @callback_method_impl{FNCPUMWRMSR} */
905static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfEvtSelN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
906{
907 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
908 /** @todo implement IA32_PERFEVTSEL0+. */
909 return VINF_SUCCESS;
910}
911
912
913/** @callback_method_impl{FNCPUMRDMSR} */
914static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
915{
916 RT_NOREF_PV(idMsr);
917 uint64_t uValue = pRange->uValue;
918
919 /* Always provide the max bus ratio for now. XNU expects it. */
920 uValue &= ~((UINT64_C(0x1f) << 40) | RT_BIT_64(46));
921
922 PVMCC pVM = pVCpu->CTX_SUFF(pVM);
923 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
924 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
925 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
926 if (uTscRatio > 0x1f)
927 uTscRatio = 0x1f;
928 uValue |= (uint64_t)uTscRatio << 40;
929
930 *puValue = uValue;
931 return VINF_SUCCESS;
932}
933
934
935/** @callback_method_impl{FNCPUMWRMSR} */
936static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
937{
938 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
939 /* Pentium4 allows writing, but all bits are ignored. */
940 return VINF_SUCCESS;
941}
942
943
944/** @callback_method_impl{FNCPUMRDMSR} */
945static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
946{
947 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
948 /** @todo implement IA32_PERFCTL. */
949 *puValue = 0;
950 return VINF_SUCCESS;
951}
952
953
954/** @callback_method_impl{FNCPUMWRMSR} */
955static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
956{
957 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
958 /** @todo implement IA32_PERFCTL. */
959 return VINF_SUCCESS;
960}
961
962
963/** @callback_method_impl{FNCPUMRDMSR} */
964static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
965{
966 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
967 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
968 *puValue = 0;
969 return VINF_SUCCESS;
970}
971
972
973/** @callback_method_impl{FNCPUMWRMSR} */
974static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
975{
976 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
977 /** @todo implement IA32_FIXED_CTRn (fixed performance counters). */
978 return VINF_SUCCESS;
979}
980
981
982/** @callback_method_impl{FNCPUMRDMSR} */
983static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfCapabilities(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
984{
985 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
986 /** @todo implement performance counters. */
987 *puValue = 0;
988 return VINF_SUCCESS;
989}
990
991
992/** @callback_method_impl{FNCPUMWRMSR} */
993static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfCapabilities(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
994{
995 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
996 /** @todo implement performance counters. */
997 return VINF_SUCCESS;
998}
999
1000
1001/** @callback_method_impl{FNCPUMRDMSR} */
1002static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32FixedCtrCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1003{
1004 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1005 /** @todo implement performance counters. */
1006 *puValue = 0;
1007 return VINF_SUCCESS;
1008}
1009
1010
1011/** @callback_method_impl{FNCPUMWRMSR} */
1012static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FixedCtrCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1013{
1014 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1015 /** @todo implement performance counters. */
1016 return VINF_SUCCESS;
1017}
1018
1019
1020/** @callback_method_impl{FNCPUMRDMSR} */
1021static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1022{
1023 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1024 /** @todo implement performance counters. */
1025 *puValue = 0;
1026 return VINF_SUCCESS;
1027}
1028
1029
1030/** @callback_method_impl{FNCPUMWRMSR} */
1031static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1032{
1033 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1034 /** @todo implement performance counters. */
1035 return VINF_SUCCESS;
1036}
1037
1038
1039/** @callback_method_impl{FNCPUMRDMSR} */
1040static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1041{
1042 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1043 /** @todo implement performance counters. */
1044 *puValue = 0;
1045 return VINF_SUCCESS;
1046}
1047
1048
1049/** @callback_method_impl{FNCPUMWRMSR} */
1050static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1051{
1052 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1053 /** @todo implement performance counters. */
1054 return VINF_SUCCESS;
1055}
1056
1057
1058/** @callback_method_impl{FNCPUMRDMSR} */
1059static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PerfGlobalOvfCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1060{
1061 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1062 /** @todo implement performance counters. */
1063 *puValue = 0;
1064 return VINF_SUCCESS;
1065}
1066
1067
1068/** @callback_method_impl{FNCPUMWRMSR} */
1069static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PerfGlobalOvfCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1070{
1071 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1072 /** @todo implement performance counters. */
1073 return VINF_SUCCESS;
1074}
1075
1076
1077/** @callback_method_impl{FNCPUMRDMSR} */
1078static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32PebsEnable(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1079{
1080 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1081 /** @todo implement performance counters. */
1082 *puValue = 0;
1083 return VINF_SUCCESS;
1084}
1085
1086
1087/** @callback_method_impl{FNCPUMWRMSR} */
1088static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PebsEnable(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1089{
1090 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1091 /** @todo implement performance counters. */
1092 return VINF_SUCCESS;
1093}
1094
1095
1096/** @callback_method_impl{FNCPUMRDMSR} */
1097static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ClockModulation(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1098{
1099 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1100 /** @todo implement IA32_CLOCK_MODULATION. */
1101 *puValue = 0;
1102 return VINF_SUCCESS;
1103}
1104
1105
1106/** @callback_method_impl{FNCPUMWRMSR} */
1107static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ClockModulation(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1108{
1109 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1110 /** @todo implement IA32_CLOCK_MODULATION. */
1111 return VINF_SUCCESS;
1112}
1113
1114
1115/** @callback_method_impl{FNCPUMRDMSR} */
1116static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermInterrupt(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1117{
1118 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1119 /** @todo implement IA32_THERM_INTERRUPT. */
1120 *puValue = 0;
1121 return VINF_SUCCESS;
1122}
1123
1124
1125/** @callback_method_impl{FNCPUMWRMSR} */
1126static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermInterrupt(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1127{
1128 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1129 /** @todo implement IA32_THERM_STATUS. */
1130 return VINF_SUCCESS;
1131}
1132
1133
1134/** @callback_method_impl{FNCPUMRDMSR} */
1135static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ThermStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1136{
1137 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1138 /** @todo implement IA32_THERM_STATUS. */
1139 *puValue = 0;
1140 return VINF_SUCCESS;
1141}
1142
1143
1144/** @callback_method_impl{FNCPUMWRMSR} */
1145static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32ThermStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1146{
1147 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1148 /** @todo implement IA32_THERM_INTERRUPT. */
1149 return VINF_SUCCESS;
1150}
1151
1152
1153/** @callback_method_impl{FNCPUMRDMSR} */
1154static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Therm2Ctl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1155{
1156 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1157 /** @todo implement IA32_THERM2_CTL. */
1158 *puValue = 0;
1159 return VINF_SUCCESS;
1160}
1161
1162
1163/** @callback_method_impl{FNCPUMWRMSR} */
1164static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32Therm2Ctl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1165{
1166 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1167 /** @todo implement IA32_THERM2_CTL. */
1168 return VINF_SUCCESS;
1169}
1170
1171
1172/** @callback_method_impl{FNCPUMRDMSR} */
1173static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32MiscEnable(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1174{
1175 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1176 *puValue = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1177 return VINF_SUCCESS;
1178}
1179
1180
1181/** @callback_method_impl{FNCPUMWRMSR} */
1182static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32MiscEnable(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1183{
1184 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1185#ifdef LOG_ENABLED
1186 uint64_t const uOld = pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
1187#endif
1188
1189 /* Unsupported bits are generally ignored and stripped by the MSR range
1190 entry that got us here. So, we just need to preserve fixed bits. */
1191 pVCpu->cpum.s.GuestMsrs.msr.MiscEnable = uValue
1192 | MSR_IA32_MISC_ENABLE_PEBS_UNAVAIL
1193 | MSR_IA32_MISC_ENABLE_BTS_UNAVAIL;
1194
1195 Log(("CPUM: IA32_MISC_ENABLE; old=%#llx written=%#llx => %#llx\n",
1196 uOld, uValue, pVCpu->cpum.s.GuestMsrs.msr.MiscEnable));
1197
1198 /** @todo Wire IA32_MISC_ENABLE bit 22 to our NT 4 CPUID trick. */
1199 /** @todo Wire up MSR_IA32_MISC_ENABLE_XD_DISABLE. */
1200 return VINF_SUCCESS;
1201}
1202
1203
1204/** @callback_method_impl{FNCPUMRDMSR} */
1205static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McCtlStatusAddrMiscN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1206{
1207 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange);
1208
1209 /** @todo Implement machine check exception injection. */
1210 switch (idMsr & 3)
1211 {
1212 case 0:
1213 case 1:
1214 *puValue = 0;
1215 break;
1216
1217 /* The ADDR and MISC registers aren't accessible since the
1218 corresponding STATUS bits are zero. */
1219 case 2:
1220 Log(("CPUM: Reading IA32_MCi_ADDR %#x -> #GP\n", idMsr));
1221 return VERR_CPUM_RAISE_GP_0;
1222 case 3:
1223 Log(("CPUM: Reading IA32_MCi_MISC %#x -> #GP\n", idMsr));
1224 return VERR_CPUM_RAISE_GP_0;
1225 }
1226 return VINF_SUCCESS;
1227}
1228
1229
1230/** @callback_method_impl{FNCPUMWRMSR} */
1231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McCtlStatusAddrMiscN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1232{
1233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1234 switch (idMsr & 3)
1235 {
1236 case 0:
1237 /* Ignore writes to the CTL register. */
1238 break;
1239
1240 case 1:
1241 /* According to specs, the STATUS register can only be written to
1242 with the value 0. VBoxCpuReport thinks different for a
1243 Pentium M Dothan, but implementing according to specs now. */
1244 if (uValue != 0)
1245 {
1246 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_STATUS %#x -> #GP\n", uValue, idMsr));
1247 return VERR_CPUM_RAISE_GP_0;
1248 }
1249 break;
1250
1251 /* Specs states that ADDR and MISC can be cleared by writing zeros.
1252 Writing 1s will GP. Need to figure out how this relates to the
1253 ADDRV and MISCV status flags. If writing is independent of those
1254 bits, we need to know whether the CPU really implements them since
1255 that is exposed by writing 0 to them.
1256 Implementing the solution with the fewer GPs for now. */
1257 case 2:
1258 if (uValue != 0)
1259 {
1260 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_ADDR %#x -> #GP\n", uValue, idMsr));
1261 return VERR_CPUM_RAISE_GP_0;
1262 }
1263 break;
1264 case 3:
1265 if (uValue != 0)
1266 {
1267 Log(("CPUM: Writing non-zero value (%#llx) to IA32_MCi_MISC %#x -> #GP\n", uValue, idMsr));
1268 return VERR_CPUM_RAISE_GP_0;
1269 }
1270 break;
1271 }
1272 return VINF_SUCCESS;
1273}
1274
1275
1276/** @callback_method_impl{FNCPUMRDMSR} */
1277static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32McNCtl2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1278{
1279 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1280 /** @todo Implement machine check exception injection. */
1281 *puValue = 0;
1282 return VINF_SUCCESS;
1283}
1284
1285
1286/** @callback_method_impl{FNCPUMWRMSR} */
1287static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32McNCtl2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1288{
1289 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1290 /** @todo Implement machine check exception injection. */
1291 return VINF_SUCCESS;
1292}
1293
1294
1295/** @callback_method_impl{FNCPUMRDMSR} */
1296static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DsArea(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1297{
1298 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1299 /** @todo implement IA32_DS_AREA. */
1300 *puValue = 0;
1301 return VINF_SUCCESS;
1302}
1303
1304
1305/** @callback_method_impl{FNCPUMWRMSR} */
1306static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DsArea(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1307{
1308 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1309 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1310 return VINF_SUCCESS;
1311}
1312
1313
1314/** @callback_method_impl{FNCPUMRDMSR} */
1315static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32TscDeadline(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1316{
1317 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1318 /** @todo implement TSC deadline timer. */
1319 *puValue = 0;
1320 return VINF_SUCCESS;
1321}
1322
1323
1324/** @callback_method_impl{FNCPUMWRMSR} */
1325static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32TscDeadline(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1326{
1327 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1328 /** @todo implement TSC deadline timer. */
1329 return VINF_SUCCESS;
1330}
1331
1332
1333/** @callback_method_impl{FNCPUMRDMSR} */
1334static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32X2ApicN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1335{
1336 RT_NOREF_PV(pRange);
1337#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1338 if ( CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest)
1339 && CPUMIsGuestVmxProcCtls2Set(&pVCpu->cpum.s.Guest, VMX_PROC_CTLS2_VIRT_X2APIC_MODE))
1340 {
1341 VBOXSTRICTRC rcStrict = IEMExecVmxVirtApicAccessMsr(pVCpu, idMsr, puValue, false /* fWrite */);
1342 if (rcStrict == VINF_VMX_MODIFIES_BEHAVIOR)
1343 return VINF_SUCCESS;
1344 if (rcStrict == VERR_OUT_OF_RANGE)
1345 return VERR_CPUM_RAISE_GP_0;
1346 Assert(rcStrict == VINF_VMX_INTERCEPT_NOT_ACTIVE);
1347 }
1348#endif
1349 return APICReadMsr(pVCpu, idMsr, puValue);
1350}
1351
1352
1353/** @callback_method_impl{FNCPUMWRMSR} */
1354static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32X2ApicN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1355{
1356 RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1357#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
1358 if ( CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest)
1359 && CPUMIsGuestVmxProcCtls2Set(&pVCpu->cpum.s.Guest, VMX_PROC_CTLS2_VIRT_X2APIC_MODE))
1360 {
1361 VBOXSTRICTRC rcStrict = IEMExecVmxVirtApicAccessMsr(pVCpu, idMsr, &uValue, true /* fWrite */);
1362 if (rcStrict == VINF_VMX_MODIFIES_BEHAVIOR)
1363 return VINF_SUCCESS;
1364 if (rcStrict == VERR_OUT_OF_RANGE)
1365 return VERR_CPUM_RAISE_GP_0;
1366 Assert(rcStrict == VINF_VMX_INTERCEPT_NOT_ACTIVE);
1367 }
1368#endif
1369 return APICWriteMsr(pVCpu, idMsr, uValue);
1370}
1371
1372
1373/** @callback_method_impl{FNCPUMRDMSR} */
1374static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32DebugInterface(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1375{
1376 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1377 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1378 *puValue = 0;
1379 return VINF_SUCCESS;
1380}
1381
1382
1383/** @callback_method_impl{FNCPUMWRMSR} */
1384static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32DebugInterface(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1385{
1386 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1387 /** @todo IA32_DEBUG_INTERFACE (no docs) */
1388 return VINF_SUCCESS;
1389}
1390
1391
1392/** @callback_method_impl{FNCPUMRDMSR} */
1393static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxBasic(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1394{
1395 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1396 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1397 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64Basic;
1398 else
1399 *puValue = 0;
1400 return VINF_SUCCESS;
1401}
1402
1403
1404/** @callback_method_impl{FNCPUMRDMSR} */
1405static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1406{
1407 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1408 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1409 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.PinCtls.u;
1410 else
1411 *puValue = 0;
1412 return VINF_SUCCESS;
1413}
1414
1415/** @callback_method_impl{FNCPUMRDMSR} */
1416static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcbasedCtls(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1417{
1418 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1419 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1420 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.ProcCtls.u;
1421 else
1422 *puValue = 0;
1423 return VINF_SUCCESS;
1424}
1425
1426
1427/** @callback_method_impl{FNCPUMRDMSR} */
1428static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxExitCtls(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1429{
1430 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1431 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1432 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.ExitCtls.u;
1433 else
1434 *puValue = 0;
1435 return VINF_SUCCESS;
1436}
1437
1438
1439/** @callback_method_impl{FNCPUMRDMSR} */
1440static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEntryCtls(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1441{
1442 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1443 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1444 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.EntryCtls.u;
1445 else
1446 *puValue = 0;
1447 return VINF_SUCCESS;
1448}
1449
1450
1451
1452/** @callback_method_impl{FNCPUMRDMSR} */
1453static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxMisc(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1454{
1455 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1456 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1457 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64Misc;
1458 else
1459 *puValue = 0;
1460 return VINF_SUCCESS;
1461}
1462
1463
1464/** @callback_method_impl{FNCPUMRDMSR} */
1465static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed0(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1466{
1467 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1468 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1469 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64Cr0Fixed0;
1470 else
1471 *puValue = 0;
1472 return VINF_SUCCESS;
1473}
1474
1475
1476/** @callback_method_impl{FNCPUMRDMSR} */
1477static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1478{
1479 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1480 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1481 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64Cr0Fixed1;
1482 else
1483 *puValue = 0;
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/** @callback_method_impl{FNCPUMRDMSR} */
1489static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed0(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1490{
1491 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1492 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1493 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64Cr4Fixed0;
1494 else
1495 *puValue = 0;
1496 return VINF_SUCCESS;
1497}
1498
1499
1500/** @callback_method_impl{FNCPUMRDMSR} */
1501static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1502{
1503 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1504 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1505 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64Cr4Fixed1;
1506 else
1507 *puValue = 0;
1508 return VINF_SUCCESS;
1509}
1510
1511
1512/** @callback_method_impl{FNCPUMRDMSR} */
1513static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmcsEnum(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1514{
1515 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1516 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1517 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64VmcsEnum;
1518 else
1519 *puValue = 0;
1520 return VINF_SUCCESS;
1521}
1522
1523
1524/** @callback_method_impl{FNCPUMRDMSR} */
1525static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1526{
1527 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1528 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1529 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.ProcCtls2.u;
1530 else
1531 *puValue = 0;
1532 return VINF_SUCCESS;
1533}
1534
1535
1536/**
1537 * Get fixed IA32_VMX_EPT_VPID_CAP value for PGM and cpumMsrRd_Ia32VmxEptVpidCap.
1538 *
1539 * @returns Fixed IA32_VMX_EPT_VPID_CAP value.
1540 * @param pVCpu The cross context per CPU structure.
1541 */
1542VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxEptVpidCap(PCVMCPUCC pVCpu)
1543{
1544 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1545 return pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64EptVpidCaps;
1546 return 0;
1547}
1548
1549
1550/** @callback_method_impl{FNCPUMRDMSR} */
1551static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEptVpidCap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1552{
1553 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1554 *puValue = CPUMGetGuestIa32VmxEptVpidCap(pVCpu);
1555 return VINF_SUCCESS;
1556}
1557
1558
1559/** @callback_method_impl{FNCPUMRDMSR} */
1560static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTruePinbasedCtls(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1561{
1562 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1563 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1564 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.TruePinCtls.u;
1565 else
1566 *puValue = 0;
1567 return VINF_SUCCESS;
1568}
1569
1570
1571/** @callback_method_impl{FNCPUMRDMSR} */
1572static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueProcbasedCtls(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1573{
1574 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1575 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1576 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.TrueProcCtls.u;
1577 else
1578 *puValue = 0;
1579 return VINF_SUCCESS;
1580}
1581
1582
1583/** @callback_method_impl{FNCPUMRDMSR} */
1584static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueExitCtls(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1585{
1586 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1587 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1588 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.TrueExitCtls.u;
1589 else
1590 *puValue = 0;
1591 return VINF_SUCCESS;
1592}
1593
1594
1595/** @callback_method_impl{FNCPUMRDMSR} */
1596static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxTrueEntryCtls(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1597{
1598 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1599 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1600 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.TrueEntryCtls.u;
1601 else
1602 *puValue = 0;
1603 return VINF_SUCCESS;
1604}
1605
1606
1607/** @callback_method_impl{FNCPUMRDMSR} */
1608static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxVmFunc(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1609{
1610 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1611 if (pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.fVmx)
1612 *puValue = pVCpu->cpum.s.Guest.hwvirt.vmx.Msrs.u64VmFunc;
1613 else
1614 *puValue = 0;
1615 return VINF_SUCCESS;
1616}
1617
1618
1619/** @callback_method_impl{FNCPUMRDMSR} */
1620static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32SpecCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1621{
1622 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1623 *puValue = pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
1624 return VINF_SUCCESS;
1625}
1626
1627
1628/** @callback_method_impl{FNCPUMWRMSR} */
1629static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32SpecCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1630{
1631 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1632
1633 /* NB: The STIBP bit can be set even when IBRS is present, regardless of whether STIBP is actually implemented. */
1634 if (uValue & ~(MSR_IA32_SPEC_CTRL_F_IBRS | MSR_IA32_SPEC_CTRL_F_STIBP))
1635 {
1636 Log(("CPUM: Invalid IA32_SPEC_CTRL bits (trying to write %#llx)\n", uValue));
1637 return VERR_CPUM_RAISE_GP_0;
1638 }
1639
1640 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
1641 return VINF_SUCCESS;
1642}
1643
1644
1645/** @callback_method_impl{FNCPUMWRMSR} */
1646static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32PredCmd(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1647{
1648 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1649 return VINF_SUCCESS;
1650}
1651
1652
1653/** @callback_method_impl{FNCPUMRDMSR} */
1654static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32ArchCapabilities(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1655{
1656 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1657 *puValue = pVCpu->cpum.s.GuestMsrs.msr.ArchCaps;
1658 return VINF_SUCCESS;
1659}
1660
1661
1662/** @callback_method_impl{FNCPUMWRMSR} */
1663static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Ia32FlushCmd(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1664{
1665 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1666 if ((uValue & ~MSR_IA32_FLUSH_CMD_F_L1D) == 0)
1667 return VINF_SUCCESS;
1668 Log(("CPUM: Invalid MSR_IA32_FLUSH_CMD_ bits (trying to write %#llx)\n", uValue));
1669 return VERR_CPUM_RAISE_GP_0;
1670}
1671
1672
1673
1674/*
1675 * AMD64
1676 * AMD64
1677 * AMD64
1678 */
1679
1680
1681/** @callback_method_impl{FNCPUMRDMSR} */
1682static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64Efer(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1683{
1684 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1685 *puValue = pVCpu->cpum.s.Guest.msrEFER;
1686 return VINF_SUCCESS;
1687}
1688
1689
1690/** @callback_method_impl{FNCPUMWRMSR} */
1691static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64Efer(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1692{
1693 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1694 uint64_t uValidatedEfer;
1695 uint64_t const uOldEfer = pVCpu->cpum.s.Guest.msrEFER;
1696 int rc = CPUMIsGuestEferMsrWriteValid(pVCpu->CTX_SUFF(pVM), pVCpu->cpum.s.Guest.cr0, uOldEfer, uValue, &uValidatedEfer);
1697 if (RT_FAILURE(rc))
1698 return VERR_CPUM_RAISE_GP_0;
1699
1700 CPUMSetGuestEferMsrNoChecks(pVCpu, uOldEfer, uValidatedEfer);
1701 return VINF_SUCCESS;
1702}
1703
1704
1705/** @callback_method_impl{FNCPUMRDMSR} */
1706static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1707{
1708 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1709 *puValue = pVCpu->cpum.s.Guest.msrSTAR;
1710 return VINF_SUCCESS;
1711}
1712
1713
1714/** @callback_method_impl{FNCPUMWRMSR} */
1715static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1716{
1717 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1718 pVCpu->cpum.s.Guest.msrSTAR = uValue;
1719 return VINF_SUCCESS;
1720}
1721
1722
1723/** @callback_method_impl{FNCPUMRDMSR} */
1724static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64LongSyscallTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1725{
1726 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1727 *puValue = pVCpu->cpum.s.Guest.msrLSTAR;
1728 return VINF_SUCCESS;
1729}
1730
1731
1732/** @callback_method_impl{FNCPUMWRMSR} */
1733static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64LongSyscallTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1734{
1735 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1736 if (!X86_IS_CANONICAL(uValue))
1737 {
1738 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1739 return VERR_CPUM_RAISE_GP_0;
1740 }
1741 pVCpu->cpum.s.Guest.msrLSTAR = uValue;
1742 return VINF_SUCCESS;
1743}
1744
1745
1746/** @callback_method_impl{FNCPUMRDMSR} */
1747static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64CompSyscallTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1748{
1749 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1750 *puValue = pVCpu->cpum.s.Guest.msrCSTAR;
1751 return VINF_SUCCESS;
1752}
1753
1754
1755/** @callback_method_impl{FNCPUMWRMSR} */
1756static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64CompSyscallTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1757{
1758 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1759 if (!X86_IS_CANONICAL(uValue))
1760 {
1761 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1762 return VERR_CPUM_RAISE_GP_0;
1763 }
1764 pVCpu->cpum.s.Guest.msrCSTAR = uValue;
1765 return VINF_SUCCESS;
1766}
1767
1768
1769/** @callback_method_impl{FNCPUMRDMSR} */
1770static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64SyscallFlagMask(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1771{
1772 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1773 *puValue = pVCpu->cpum.s.Guest.msrSFMASK;
1774 return VINF_SUCCESS;
1775}
1776
1777
1778/** @callback_method_impl{FNCPUMWRMSR} */
1779static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64SyscallFlagMask(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1780{
1781 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1782 /* The high bits are ignored and read-as-zero, writing to them does not raise #GP. See @bugref{10610}.*/
1783 pVCpu->cpum.s.Guest.msrSFMASK = uValue & UINT32_MAX;
1784 return VINF_SUCCESS;
1785}
1786
1787
1788/** @callback_method_impl{FNCPUMRDMSR} */
1789static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64FsBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1790{
1791 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1792 *puValue = pVCpu->cpum.s.Guest.fs.u64Base;
1793 return VINF_SUCCESS;
1794}
1795
1796
1797/** @callback_method_impl{FNCPUMWRMSR} */
1798static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64FsBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1799{
1800 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1801 if (X86_IS_CANONICAL(uValue))
1802 {
1803 pVCpu->cpum.s.Guest.fs.u64Base = uValue;
1804 return VINF_SUCCESS;
1805 }
1806 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1807 return VERR_CPUM_RAISE_GP_0;
1808}
1809
1810
1811/** @callback_method_impl{FNCPUMRDMSR} */
1812static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64GsBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1813{
1814 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1815 *puValue = pVCpu->cpum.s.Guest.gs.u64Base;
1816 return VINF_SUCCESS;
1817}
1818
1819/** @callback_method_impl{FNCPUMWRMSR} */
1820static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64GsBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1821{
1822 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1823 if (X86_IS_CANONICAL(uValue))
1824 {
1825 pVCpu->cpum.s.Guest.gs.u64Base = uValue;
1826 return VINF_SUCCESS;
1827 }
1828 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1829 return VERR_CPUM_RAISE_GP_0;
1830}
1831
1832
1833
1834/** @callback_method_impl{FNCPUMRDMSR} */
1835static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64KernelGsBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1836{
1837 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1838 *puValue = pVCpu->cpum.s.Guest.msrKERNELGSBASE;
1839 return VINF_SUCCESS;
1840}
1841
1842/** @callback_method_impl{FNCPUMWRMSR} */
1843static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64KernelGsBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1844{
1845 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1846 if (X86_IS_CANONICAL(uValue))
1847 {
1848 pVCpu->cpum.s.Guest.msrKERNELGSBASE = uValue;
1849 return VINF_SUCCESS;
1850 }
1851 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
1852 return VERR_CPUM_RAISE_GP_0;
1853}
1854
1855
1856/** @callback_method_impl{FNCPUMRDMSR} */
1857static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Amd64TscAux(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1858{
1859 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1860 *puValue = pVCpu->cpum.s.GuestMsrs.msr.TscAux;
1861 return VINF_SUCCESS;
1862}
1863
1864/** @callback_method_impl{FNCPUMWRMSR} */
1865static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Amd64TscAux(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1866{
1867 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
1868 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
1869 return VINF_SUCCESS;
1870}
1871
1872
1873/*
1874 * Intel specific
1875 * Intel specific
1876 * Intel specific
1877 */
1878
1879/** @callback_method_impl{FNCPUMRDMSR} */
1880static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelEblCrPowerOn(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1881{
1882 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
1883 /** @todo recalc clock frequency ratio? */
1884 *puValue = pRange->uValue;
1885 return VINF_SUCCESS;
1886}
1887
1888
1889/** @callback_method_impl{FNCPUMWRMSR} */
1890static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelEblCrPowerOn(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1891{
1892 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1893 /** @todo Write EBL_CR_POWERON: Remember written bits. */
1894 return VINF_SUCCESS;
1895}
1896
1897
1898/** @callback_method_impl{FNCPUMRDMSR} */
1899static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreThreadCount(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1900{
1901 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
1902
1903 /* Note! According to cpuid_set_info in XNU (10.7.0), Westmere CPU only
1904 have a 4-bit core count. */
1905 uint16_t cCores = pVCpu->CTX_SUFF(pVM)->cCpus;
1906 uint16_t cThreads = cCores; /** @todo hyper-threading. */
1907 *puValue = RT_MAKE_U32(cThreads, cCores);
1908 return VINF_SUCCESS;
1909}
1910
1911
1912/** @callback_method_impl{FNCPUMRDMSR} */
1913static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcHardPowerOn(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1914{
1915 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
1916 /** @todo P4 hard power on config */
1917 *puValue = pRange->uValue;
1918 return VINF_SUCCESS;
1919}
1920
1921
1922/** @callback_method_impl{FNCPUMWRMSR} */
1923static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcHardPowerOn(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1924{
1925 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1926 /** @todo P4 hard power on config */
1927 return VINF_SUCCESS;
1928}
1929
1930
1931/** @callback_method_impl{FNCPUMRDMSR} */
1932static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcSoftPowerOn(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1933{
1934 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
1935 /** @todo P4 soft power on config */
1936 *puValue = pRange->uValue;
1937 return VINF_SUCCESS;
1938}
1939
1940
1941/** @callback_method_impl{FNCPUMWRMSR} */
1942static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcSoftPowerOn(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
1943{
1944 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
1945 /** @todo P4 soft power on config */
1946 return VINF_SUCCESS;
1947}
1948
1949
1950/** @callback_method_impl{FNCPUMRDMSR} */
1951static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP4EbcFrequencyId(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
1952{
1953 RT_NOREF_PV(idMsr);
1954
1955 uint64_t uValue;
1956 PVMCC pVM = pVCpu->CTX_SUFF(pVM);
1957 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
1958 if (pVM->cpum.s.GuestFeatures.uModel >= 2)
1959 {
1960 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ && pVM->cpum.s.GuestFeatures.uModel <= 2)
1961 {
1962 uScalableBusHz = CPUM_SBUSFREQ_100MHZ;
1963 uValue = 0;
1964 }
1965 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
1966 {
1967 uScalableBusHz = CPUM_SBUSFREQ_133MHZ;
1968 uValue = 1;
1969 }
1970 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
1971 {
1972 uScalableBusHz = CPUM_SBUSFREQ_167MHZ;
1973 uValue = 3;
1974 }
1975 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
1976 {
1977 uScalableBusHz = CPUM_SBUSFREQ_200MHZ;
1978 uValue = 2;
1979 }
1980 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ && pVM->cpum.s.GuestFeatures.uModel > 2)
1981 {
1982 uScalableBusHz = CPUM_SBUSFREQ_267MHZ;
1983 uValue = 0;
1984 }
1985 else
1986 {
1987 uScalableBusHz = CPUM_SBUSFREQ_333MHZ;
1988 uValue = 6;
1989 }
1990 uValue <<= 16;
1991
1992 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
1993 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
1994 uValue |= (uint32_t)uTscRatio << 24;
1995
1996 uValue |= pRange->uValue & ~UINT64_C(0xff0f0000);
1997 }
1998 else
1999 {
2000 /* Probably more stuff here, but intel doesn't want to tell us. */
2001 uValue = pRange->uValue;
2002 uValue &= ~(RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23)); /* 100 MHz is only documented value */
2003 }
2004
2005 *puValue = uValue;
2006 return VINF_SUCCESS;
2007}
2008
2009
2010/** @callback_method_impl{FNCPUMWRMSR} */
2011static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP4EbcFrequencyId(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2012{
2013 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2014 /** @todo P4 bus frequency config */
2015 return VINF_SUCCESS;
2016}
2017
2018
2019/** @callback_method_impl{FNCPUMRDMSR} */
2020static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6FsbFrequency(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2021{
2022 RT_NOREF_PV(idMsr);
2023
2024 /* Convert the scalable bus frequency to the encoding in the intel manual (for core+). */
2025 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVCpu->CTX_SUFF(pVM));
2026 if (uScalableBusHz <= CPUM_SBUSFREQ_100MHZ)
2027 *puValue = 5;
2028 else if (uScalableBusHz <= CPUM_SBUSFREQ_133MHZ)
2029 *puValue = 1;
2030 else if (uScalableBusHz <= CPUM_SBUSFREQ_167MHZ)
2031 *puValue = 3;
2032 else if (uScalableBusHz <= CPUM_SBUSFREQ_200MHZ)
2033 *puValue = 2;
2034 else if (uScalableBusHz <= CPUM_SBUSFREQ_267MHZ)
2035 *puValue = 0;
2036 else if (uScalableBusHz <= CPUM_SBUSFREQ_333MHZ)
2037 *puValue = 4;
2038 else /*if (uScalableBusHz <= CPUM_SBUSFREQ_400MHZ)*/
2039 *puValue = 6;
2040
2041 *puValue |= pRange->uValue & ~UINT64_C(0x7);
2042
2043 return VINF_SUCCESS;
2044}
2045
2046
2047/** @callback_method_impl{FNCPUMRDMSR} */
2048static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPlatformInfo(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2049{
2050 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2051
2052 /* Just indicate a fixed TSC, no turbo boost, no programmable anything. */
2053 PVMCC pVM = pVCpu->CTX_SUFF(pVM);
2054 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2055 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2056 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2057 uint64_t uValue = ((uint32_t)uTscRatio << 8) /* TSC invariant frequency. */
2058 | ((uint64_t)uTscRatio << 40); /* The max turbo frequency. */
2059
2060 /* Ivy bridge has a minimum operating ratio as well. */
2061 if (true) /** @todo detect sandy bridge. */
2062 uValue |= (uint64_t)uTscRatio << 48;
2063
2064 *puValue = uValue;
2065 return VINF_SUCCESS;
2066}
2067
2068
2069/** @callback_method_impl{FNCPUMRDMSR} */
2070static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelFlexRatio(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2071{
2072 RT_NOREF_PV(idMsr);
2073
2074 uint64_t uValue = pRange->uValue & ~UINT64_C(0x1ff00);
2075
2076 PVMCC pVM = pVCpu->CTX_SUFF(pVM);
2077 uint64_t uScalableBusHz = CPUMGetGuestScalableBusFrequency(pVM);
2078 uint64_t uTscHz = TMCpuTicksPerSecond(pVM);
2079 uint8_t uTscRatio = (uint8_t)((uTscHz + uScalableBusHz / 2) / uScalableBusHz);
2080 uValue |= (uint32_t)uTscRatio << 8;
2081
2082 *puValue = uValue;
2083 return VINF_SUCCESS;
2084}
2085
2086
2087/** @callback_method_impl{FNCPUMWRMSR} */
2088static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelFlexRatio(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2089{
2090 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2091 /** @todo implement writing MSR_FLEX_RATIO. */
2092 return VINF_SUCCESS;
2093}
2094
2095
2096/** @callback_method_impl{FNCPUMRDMSR} */
2097static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPkgCStConfigControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2098{
2099 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2100 *puValue = pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl;
2101 return VINF_SUCCESS;
2102}
2103
2104
2105/** @callback_method_impl{FNCPUMWRMSR} */
2106static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPkgCStConfigControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2107{
2108 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2109
2110 if (pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl & RT_BIT_64(15))
2111 {
2112 Log(("CPUM: WRMSR %#x (%s), %#llx: Write protected -> #GP\n", idMsr, pRange->szName, uValue));
2113 return VERR_CPUM_RAISE_GP_0;
2114 }
2115#if 0 /** @todo check what real (old) hardware does. */
2116 if ((uValue & 7) >= 5)
2117 {
2118 Log(("CPUM: WRMSR %#x (%s), %#llx: Invalid limit (%d) -> #GP\n", idMsr, pRange->szName, uValue, (uint32_t)(uValue & 7)));
2119 return VERR_CPUM_RAISE_GP_0;
2120 }
2121#endif
2122 pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl = uValue;
2123 return VINF_SUCCESS;
2124}
2125
2126
2127/** @callback_method_impl{FNCPUMRDMSR} */
2128static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelPmgIoCaptureBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2129{
2130 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2131 /** @todo implement I/O mwait wakeup. */
2132 *puValue = 0;
2133 return VINF_SUCCESS;
2134}
2135
2136
2137/** @callback_method_impl{FNCPUMWRMSR} */
2138static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelPmgIoCaptureBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2139{
2140 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2141 /** @todo implement I/O mwait wakeup. */
2142 return VINF_SUCCESS;
2143}
2144
2145
2146/** @callback_method_impl{FNCPUMRDMSR} */
2147static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromToN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2148{
2149 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2150 /** @todo implement last branch records. */
2151 *puValue = 0;
2152 return VINF_SUCCESS;
2153}
2154
2155
2156/** @callback_method_impl{FNCPUMWRMSR} */
2157static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromToN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2158{
2159 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2160 /** @todo implement last branch records. */
2161 return VINF_SUCCESS;
2162}
2163
2164
2165/** @callback_method_impl{FNCPUMRDMSR} */
2166static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchFromN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2167{
2168 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2169 /** @todo implement last branch records. */
2170 *puValue = 0;
2171 return VINF_SUCCESS;
2172}
2173
2174
2175/** @callback_method_impl{FNCPUMWRMSR} */
2176static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchFromN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2177{
2178 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
2179 /** @todo implement last branch records. */
2180 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2181 * if the rest of the bits are zero. Automatic sign extending?
2182 * Investigate! */
2183 if (!X86_IS_CANONICAL(uValue))
2184 {
2185 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2186 return VERR_CPUM_RAISE_GP_0;
2187 }
2188 return VINF_SUCCESS;
2189}
2190
2191
2192/** @callback_method_impl{FNCPUMRDMSR} */
2193static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchToN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2194{
2195 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2196 /** @todo implement last branch records. */
2197 *puValue = 0;
2198 return VINF_SUCCESS;
2199}
2200
2201
2202/** @callback_method_impl{FNCPUMWRMSR} */
2203static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchToN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2204{
2205 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2206 /** @todo implement last branch records. */
2207 /** @todo Probing indicates that bit 63 is settable on SandyBridge, at least
2208 * if the rest of the bits are zero. Automatic sign extending?
2209 * Investigate! */
2210 if (!X86_IS_CANONICAL(uValue))
2211 {
2212 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
2213 return VERR_CPUM_RAISE_GP_0;
2214 }
2215 return VINF_SUCCESS;
2216}
2217
2218
2219/** @callback_method_impl{FNCPUMRDMSR} */
2220static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelLastBranchTos(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2221{
2222 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2223 /** @todo implement last branch records. */
2224 *puValue = 0;
2225 return VINF_SUCCESS;
2226}
2227
2228
2229/** @callback_method_impl{FNCPUMWRMSR} */
2230static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelLastBranchTos(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2231{
2232 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2233 /** @todo implement last branch records. */
2234 return VINF_SUCCESS;
2235}
2236
2237
2238/** @callback_method_impl{FNCPUMRDMSR} */
2239static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2240{
2241 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2242 *puValue = pRange->uValue;
2243 return VINF_SUCCESS;
2244}
2245
2246
2247/** @callback_method_impl{FNCPUMWRMSR} */
2248static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2249{
2250 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2251 return VINF_SUCCESS;
2252}
2253
2254
2255/** @callback_method_impl{FNCPUMRDMSR} */
2256static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelBblCrCtl3(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2257{
2258 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2259 *puValue = pRange->uValue;
2260 return VINF_SUCCESS;
2261}
2262
2263
2264/** @callback_method_impl{FNCPUMWRMSR} */
2265static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelBblCrCtl3(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2266{
2267 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2268 return VINF_SUCCESS;
2269}
2270
2271
2272/** @callback_method_impl{FNCPUMRDMSR} */
2273static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TemperatureTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2274{
2275 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2276 *puValue = pRange->uValue;
2277 return VINF_SUCCESS;
2278}
2279
2280
2281/** @callback_method_impl{FNCPUMWRMSR} */
2282static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TemperatureTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2283{
2284 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2285 return VINF_SUCCESS;
2286}
2287
2288
2289/** @callback_method_impl{FNCPUMRDMSR} */
2290static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MsrOffCoreResponseN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2291{
2292 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2293 /** @todo machine check. */
2294 *puValue = pRange->uValue;
2295 return VINF_SUCCESS;
2296}
2297
2298
2299/** @callback_method_impl{FNCPUMWRMSR} */
2300static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MsrOffCoreResponseN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2301{
2302 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2303 /** @todo machine check. */
2304 return VINF_SUCCESS;
2305}
2306
2307
2308/** @callback_method_impl{FNCPUMRDMSR} */
2309static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7MiscPwrMgmt(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2310{
2311 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2312 *puValue = 0;
2313 return VINF_SUCCESS;
2314}
2315
2316
2317/** @callback_method_impl{FNCPUMWRMSR} */
2318static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7MiscPwrMgmt(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2319{
2320 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2321 return VINF_SUCCESS;
2322}
2323
2324
2325/** @callback_method_impl{FNCPUMRDMSR} */
2326static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelP6CrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2327{
2328 RT_NOREF_PV(idMsr);
2329 int rc = CPUMGetGuestCRx(pVCpu, pRange->uValue, puValue);
2330 AssertRC(rc);
2331 return VINF_SUCCESS;
2332}
2333
2334
2335/** @callback_method_impl{FNCPUMWRMSR} */
2336static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelP6CrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2337{
2338 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2339 /* This CRx interface differs from the MOV CRx, GReg interface in that
2340 #GP(0) isn't raised if unsupported bits are written to. Instead they
2341 are simply ignored and masked off. (Pentium M Dothan) */
2342 /** @todo Implement MSR_P6_CRx writing. Too much effort for very little, if
2343 * any, gain. */
2344 return VINF_SUCCESS;
2345}
2346
2347
2348/** @callback_method_impl{FNCPUMRDMSR} */
2349static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEcdx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2350{
2351 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2352 /** @todo implement CPUID masking. */
2353 *puValue = UINT64_MAX;
2354 return VINF_SUCCESS;
2355}
2356
2357
2358/** @callback_method_impl{FNCPUMWRMSR} */
2359static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEcdx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2360{
2361 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2362 /** @todo implement CPUID masking. */
2363 return VINF_SUCCESS;
2364}
2365
2366
2367/** @callback_method_impl{FNCPUMRDMSR} */
2368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId1FeatureMaskEax(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2369{
2370 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2371 /** @todo implement CPUID masking. */
2372 *puValue = 0;
2373 return VINF_SUCCESS;
2374}
2375
2376
2377/** @callback_method_impl{FNCPUMWRMSR} */
2378static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId1FeatureMaskEax(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2379{
2380 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2381 /** @todo implement CPUID masking. */
2382 return VINF_SUCCESS;
2383}
2384
2385
2386
2387/** @callback_method_impl{FNCPUMRDMSR} */
2388static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2389{
2390 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2391 /** @todo implement CPUID masking. */
2392 *puValue = UINT64_MAX;
2393 return VINF_SUCCESS;
2394}
2395
2396
2397/** @callback_method_impl{FNCPUMWRMSR} */
2398static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2399{
2400 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2401 /** @todo implement CPUID masking. */
2402 return VINF_SUCCESS;
2403}
2404
2405
2406
2407/** @callback_method_impl{FNCPUMRDMSR} */
2408static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyAesNiCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2409{
2410 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2411 /** @todo implement AES-NI. */
2412 *puValue = 3; /* Bit 0 is lock bit, bit 1 disables AES-NI. That's what they say. */
2413 return VINF_SUCCESS;
2414}
2415
2416
2417/** @callback_method_impl{FNCPUMWRMSR} */
2418static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyAesNiCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2419{
2420 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2421 /** @todo implement AES-NI. */
2422 return VERR_CPUM_RAISE_GP_0;
2423}
2424
2425
2426/** @callback_method_impl{FNCPUMRDMSR} */
2427static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7TurboRatioLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2428{
2429 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2430 /** @todo implement intel C states. */
2431 *puValue = pRange->uValue;
2432 return VINF_SUCCESS;
2433}
2434
2435
2436/** @callback_method_impl{FNCPUMWRMSR} */
2437static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7TurboRatioLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2438{
2439 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2440 /** @todo implement intel C states. */
2441 return VINF_SUCCESS;
2442}
2443
2444
2445/** @callback_method_impl{FNCPUMRDMSR} */
2446static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7LbrSelect(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2447{
2448 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2449 /** @todo implement last-branch-records. */
2450 *puValue = 0;
2451 return VINF_SUCCESS;
2452}
2453
2454
2455/** @callback_method_impl{FNCPUMWRMSR} */
2456static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7LbrSelect(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2457{
2458 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2459 /** @todo implement last-branch-records. */
2460 return VINF_SUCCESS;
2461}
2462
2463
2464/** @callback_method_impl{FNCPUMRDMSR} */
2465static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyErrorControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2466{
2467 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2468 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2469 *puValue = 0;
2470 return VINF_SUCCESS;
2471}
2472
2473
2474/** @callback_method_impl{FNCPUMWRMSR} */
2475static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyErrorControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2476{
2477 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2478 /** @todo implement memory error injection (MSR_ERROR_CONTROL). */
2479 return VINF_SUCCESS;
2480}
2481
2482
2483/** @callback_method_impl{FNCPUMRDMSR} */
2484static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7VirtualLegacyWireCap(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2485{
2486 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2487 /** @todo implement memory VLW? */
2488 *puValue = pRange->uValue;
2489 /* Note: A20M is known to be bit 1 as this was disclosed in spec update
2490 AAJ49/AAK51/????, which documents the inversion of this bit. The
2491 Sandy bridge CPU here has value 0x74, so it probably doesn't have a BIOS
2492 that correct things. Some guesses at the other bits:
2493 bit 2 = INTR
2494 bit 4 = SMI
2495 bit 5 = INIT
2496 bit 6 = NMI */
2497 return VINF_SUCCESS;
2498}
2499
2500
2501/** @callback_method_impl{FNCPUMRDMSR} */
2502static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PowerCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2503{
2504 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2505 /** @todo intel power management */
2506 *puValue = 0;
2507 return VINF_SUCCESS;
2508}
2509
2510
2511/** @callback_method_impl{FNCPUMWRMSR} */
2512static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PowerCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2513{
2514 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2515 /** @todo intel power management */
2516 return VINF_SUCCESS;
2517}
2518
2519
2520/** @callback_method_impl{FNCPUMRDMSR} */
2521static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPebsNumAlt(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2522{
2523 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2524 /** @todo intel performance counters. */
2525 *puValue = 0;
2526 return VINF_SUCCESS;
2527}
2528
2529
2530/** @callback_method_impl{FNCPUMWRMSR} */
2531static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPebsNumAlt(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2532{
2533 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2534 /** @todo intel performance counters. */
2535 return VINF_SUCCESS;
2536}
2537
2538
2539/** @callback_method_impl{FNCPUMRDMSR} */
2540static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PebsLdLat(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2541{
2542 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2543 /** @todo intel performance counters. */
2544 *puValue = 0;
2545 return VINF_SUCCESS;
2546}
2547
2548
2549/** @callback_method_impl{FNCPUMWRMSR} */
2550static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7PebsLdLat(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2551{
2552 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2553 /** @todo intel performance counters. */
2554 return VINF_SUCCESS;
2555}
2556
2557
2558/** @callback_method_impl{FNCPUMRDMSR} */
2559static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7PkgCnResidencyN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2560{
2561 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2562 /** @todo intel power management. */
2563 *puValue = 0;
2564 return VINF_SUCCESS;
2565}
2566
2567
2568/** @callback_method_impl{FNCPUMRDMSR} */
2569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7CoreCnResidencyN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2570{
2571 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2572 /** @todo intel power management. */
2573 *puValue = 0;
2574 return VINF_SUCCESS;
2575}
2576
2577
2578/** @callback_method_impl{FNCPUMRDMSR} */
2579static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrCurrentConfig(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2580{
2581 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2582 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2583 *puValue = 0;
2584 return VINF_SUCCESS;
2585}
2586
2587
2588/** @callback_method_impl{FNCPUMWRMSR} */
2589static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrCurrentConfig(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2590{
2591 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2592 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2593 return VINF_SUCCESS;
2594}
2595
2596
2597/** @callback_method_impl{FNCPUMRDMSR} */
2598static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyVrMiscConfig(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2599{
2600 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2601 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2602 *puValue = 0;
2603 return VINF_SUCCESS;
2604}
2605
2606
2607/** @callback_method_impl{FNCPUMWRMSR} */
2608static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyVrMiscConfig(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2609{
2610 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2611 /** @todo Figure out what MSR_VR_CURRENT_CONFIG & MSR_VR_MISC_CONFIG are. */
2612 return VINF_SUCCESS;
2613}
2614
2615
2616/** @callback_method_impl{FNCPUMRDMSR} */
2617static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyRaplPowerUnit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2618{
2619 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2620 /** @todo intel RAPL. */
2621 *puValue = pRange->uValue;
2622 return VINF_SUCCESS;
2623}
2624
2625
2626/** @callback_method_impl{FNCPUMWRMSR} */
2627static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyRaplPowerUnit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2628{
2629 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2630 /* Note! This is documented as read only and except for a Silvermont sample has
2631 always been classified as read only. This is just here to make it compile. */
2632 return VINF_SUCCESS;
2633}
2634
2635
2636/** @callback_method_impl{FNCPUMRDMSR} */
2637static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgCnIrtlN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2638{
2639 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2640 /** @todo intel power management. */
2641 *puValue = 0;
2642 return VINF_SUCCESS;
2643}
2644
2645
2646/** @callback_method_impl{FNCPUMWRMSR} */
2647static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgCnIrtlN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2648{
2649 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2650 /** @todo intel power management. */
2651 return VINF_SUCCESS;
2652}
2653
2654
2655/** @callback_method_impl{FNCPUMRDMSR} */
2656static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SandyPkgC2Residency(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2657{
2658 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2659 /** @todo intel power management. */
2660 *puValue = 0;
2661 return VINF_SUCCESS;
2662}
2663
2664
2665/** @callback_method_impl{FNCPUMWRMSR} */
2666static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7SandyPkgC2Residency(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2667{
2668 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2669 /* Note! This is documented as read only and except for a Silvermont sample has
2670 always been classified as read only. This is just here to make it compile. */
2671 return VINF_SUCCESS;
2672}
2673
2674
2675/** @callback_method_impl{FNCPUMRDMSR} */
2676static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2677{
2678 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2679 /** @todo intel RAPL. */
2680 *puValue = 0;
2681 return VINF_SUCCESS;
2682}
2683
2684
2685/** @callback_method_impl{FNCPUMWRMSR} */
2686static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPkgPowerLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2687{
2688 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2689 /** @todo intel RAPL. */
2690 return VINF_SUCCESS;
2691}
2692
2693
2694/** @callback_method_impl{FNCPUMRDMSR} */
2695static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgEnergyStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2696{
2697 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2698 /** @todo intel power management. */
2699 *puValue = 0;
2700 return VINF_SUCCESS;
2701}
2702
2703
2704/** @callback_method_impl{FNCPUMRDMSR} */
2705static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPerfStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2706{
2707 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2708 /** @todo intel power management. */
2709 *puValue = 0;
2710 return VINF_SUCCESS;
2711}
2712
2713
2714/** @callback_method_impl{FNCPUMRDMSR} */
2715static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPkgPowerInfo(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2716{
2717 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2718 /** @todo intel power management. */
2719 *puValue = 0;
2720 return VINF_SUCCESS;
2721}
2722
2723
2724/** @callback_method_impl{FNCPUMRDMSR} */
2725static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2726{
2727 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2728 /** @todo intel RAPL. */
2729 *puValue = 0;
2730 return VINF_SUCCESS;
2731}
2732
2733
2734/** @callback_method_impl{FNCPUMWRMSR} */
2735static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplDramPowerLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2736{
2737 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2738 /** @todo intel RAPL. */
2739 return VINF_SUCCESS;
2740}
2741
2742
2743/** @callback_method_impl{FNCPUMRDMSR} */
2744static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramEnergyStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2745{
2746 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2747 /** @todo intel power management. */
2748 *puValue = 0;
2749 return VINF_SUCCESS;
2750}
2751
2752
2753/** @callback_method_impl{FNCPUMRDMSR} */
2754static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPerfStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2755{
2756 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2757 /** @todo intel power management. */
2758 *puValue = 0;
2759 return VINF_SUCCESS;
2760}
2761
2762
2763/** @callback_method_impl{FNCPUMRDMSR} */
2764static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplDramPowerInfo(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2765{
2766 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2767 /** @todo intel power management. */
2768 *puValue = 0;
2769 return VINF_SUCCESS;
2770}
2771
2772
2773/** @callback_method_impl{FNCPUMRDMSR} */
2774static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PowerLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2775{
2776 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2777 /** @todo intel RAPL. */
2778 *puValue = 0;
2779 return VINF_SUCCESS;
2780}
2781
2782
2783/** @callback_method_impl{FNCPUMWRMSR} */
2784static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0PowerLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2785{
2786 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2787 /** @todo intel RAPL. */
2788 return VINF_SUCCESS;
2789}
2790
2791
2792/** @callback_method_impl{FNCPUMRDMSR} */
2793static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0EnergyStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2794{
2795 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2796 /** @todo intel power management. */
2797 *puValue = 0;
2798 return VINF_SUCCESS;
2799}
2800
2801
2802/** @callback_method_impl{FNCPUMRDMSR} */
2803static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0Policy(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2804{
2805 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2806 /** @todo intel RAPL. */
2807 *puValue = 0;
2808 return VINF_SUCCESS;
2809}
2810
2811
2812/** @callback_method_impl{FNCPUMWRMSR} */
2813static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp0Policy(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2814{
2815 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2816 /** @todo intel RAPL. */
2817 return VINF_SUCCESS;
2818}
2819
2820
2821/** @callback_method_impl{FNCPUMRDMSR} */
2822static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp0PerfStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2823{
2824 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2825 /** @todo intel power management. */
2826 *puValue = 0;
2827 return VINF_SUCCESS;
2828}
2829
2830
2831/** @callback_method_impl{FNCPUMRDMSR} */
2832static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1PowerLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2833{
2834 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2835 /** @todo intel RAPL. */
2836 *puValue = 0;
2837 return VINF_SUCCESS;
2838}
2839
2840
2841/** @callback_method_impl{FNCPUMWRMSR} */
2842static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1PowerLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2843{
2844 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2845 /** @todo intel RAPL. */
2846 return VINF_SUCCESS;
2847}
2848
2849
2850/** @callback_method_impl{FNCPUMRDMSR} */
2851static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1EnergyStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2852{
2853 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2854 /** @todo intel power management. */
2855 *puValue = 0;
2856 return VINF_SUCCESS;
2857}
2858
2859
2860/** @callback_method_impl{FNCPUMRDMSR} */
2861static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7RaplPp1Policy(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2862{
2863 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2864 /** @todo intel RAPL. */
2865 *puValue = 0;
2866 return VINF_SUCCESS;
2867}
2868
2869
2870/** @callback_method_impl{FNCPUMWRMSR} */
2871static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7RaplPp1Policy(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2872{
2873 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2874 /** @todo intel RAPL. */
2875 return VINF_SUCCESS;
2876}
2877
2878
2879/** @callback_method_impl{FNCPUMRDMSR} */
2880static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpNominal(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2881{
2882 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2883 /** @todo intel power management. */
2884 *puValue = pRange->uValue;
2885 return VINF_SUCCESS;
2886}
2887
2888
2889/** @callback_method_impl{FNCPUMRDMSR} */
2890static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel1(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2891{
2892 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2893 /** @todo intel power management. */
2894 *puValue = pRange->uValue;
2895 return VINF_SUCCESS;
2896}
2897
2898
2899/** @callback_method_impl{FNCPUMRDMSR} */
2900static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpLevel2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2901{
2902 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
2903 /** @todo intel power management. */
2904 *puValue = pRange->uValue;
2905 return VINF_SUCCESS;
2906}
2907
2908
2909/** @callback_method_impl{FNCPUMRDMSR} */
2910static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyConfigTdpControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2911{
2912 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2913 /** @todo intel power management. */
2914 *puValue = 0;
2915 return VINF_SUCCESS;
2916}
2917
2918
2919/** @callback_method_impl{FNCPUMWRMSR} */
2920static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyConfigTdpControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2921{
2922 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2923 /** @todo intel power management. */
2924 return VINF_SUCCESS;
2925}
2926
2927
2928/** @callback_method_impl{FNCPUMRDMSR} */
2929static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7IvyTurboActivationRatio(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2930{
2931 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2932 /** @todo intel power management. */
2933 *puValue = 0;
2934 return VINF_SUCCESS;
2935}
2936
2937
2938/** @callback_method_impl{FNCPUMWRMSR} */
2939static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7IvyTurboActivationRatio(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2940{
2941 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2942 /** @todo intel power management. */
2943 return VINF_SUCCESS;
2944}
2945
2946
2947/** @callback_method_impl{FNCPUMRDMSR} */
2948static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2949{
2950 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2951 /** @todo uncore msrs. */
2952 *puValue = 0;
2953 return VINF_SUCCESS;
2954}
2955
2956
2957/** @callback_method_impl{FNCPUMWRMSR} */
2958static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2959{
2960 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2961 /** @todo uncore msrs. */
2962 return VINF_SUCCESS;
2963}
2964
2965
2966/** @callback_method_impl{FNCPUMRDMSR} */
2967static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2968{
2969 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2970 /** @todo uncore msrs. */
2971 *puValue = 0;
2972 return VINF_SUCCESS;
2973}
2974
2975
2976/** @callback_method_impl{FNCPUMWRMSR} */
2977static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2978{
2979 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2980 /** @todo uncore msrs. */
2981 return VINF_SUCCESS;
2982}
2983
2984
2985/** @callback_method_impl{FNCPUMRDMSR} */
2986static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
2987{
2988 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
2989 /** @todo uncore msrs. */
2990 *puValue = 0;
2991 return VINF_SUCCESS;
2992}
2993
2994
2995/** @callback_method_impl{FNCPUMWRMSR} */
2996static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
2997{
2998 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
2999 /** @todo uncore msrs. */
3000 return VINF_SUCCESS;
3001}
3002
3003
3004/** @callback_method_impl{FNCPUMRDMSR} */
3005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtrCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3006{
3007 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3008 /** @todo uncore msrs. */
3009 *puValue = 0;
3010 return VINF_SUCCESS;
3011}
3012
3013
3014/** @callback_method_impl{FNCPUMWRMSR} */
3015static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtrCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3016{
3017 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3018 /** @todo uncore msrs. */
3019 return VINF_SUCCESS;
3020}
3021
3022
3023/** @callback_method_impl{FNCPUMRDMSR} */
3024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncPerfFixedCtr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3025{
3026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3027 /** @todo uncore msrs. */
3028 *puValue = 0;
3029 return VINF_SUCCESS;
3030}
3031
3032
3033/** @callback_method_impl{FNCPUMWRMSR} */
3034static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncPerfFixedCtr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3035{
3036 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3037 /** @todo uncore msrs. */
3038 return VINF_SUCCESS;
3039}
3040
3041
3042/** @callback_method_impl{FNCPUMRDMSR} */
3043static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncCBoxConfig(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3044{
3045 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3046 /** @todo uncore msrs. */
3047 *puValue = 0;
3048 return VINF_SUCCESS;
3049}
3050
3051
3052/** @callback_method_impl{FNCPUMRDMSR} */
3053static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3054{
3055 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3056 /** @todo uncore msrs. */
3057 *puValue = 0;
3058 return VINF_SUCCESS;
3059}
3060
3061
3062/** @callback_method_impl{FNCPUMWRMSR} */
3063static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3064{
3065 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3066 /** @todo uncore msrs. */
3067 return VINF_SUCCESS;
3068}
3069
3070
3071/** @callback_method_impl{FNCPUMRDMSR} */
3072static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7UncArbPerfEvtSelN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3073{
3074 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3075 /** @todo uncore msrs. */
3076 *puValue = 0;
3077 return VINF_SUCCESS;
3078}
3079
3080
3081/** @callback_method_impl{FNCPUMWRMSR} */
3082static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelI7UncArbPerfEvtSelN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3083{
3084 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3085 /** @todo uncore msrs. */
3086 return VINF_SUCCESS;
3087}
3088
3089
3090/** @callback_method_impl{FNCPUMRDMSR} */
3091static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelI7SmiCount(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3092{
3093 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3094
3095 /*
3096 * 31:0 is SMI count (read only), 63:32 reserved.
3097 * Since we don't do SMI, the count is always zero.
3098 */
3099 *puValue = 0;
3100 return VINF_SUCCESS;
3101}
3102
3103
3104/** @callback_method_impl{FNCPUMRDMSR} */
3105static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2EmttmCrTablesN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3106{
3107 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3108 /** @todo implement enhanced multi thread termal monitoring? */
3109 *puValue = pRange->uValue;
3110 return VINF_SUCCESS;
3111}
3112
3113
3114/** @callback_method_impl{FNCPUMWRMSR} */
3115static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2EmttmCrTablesN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3116{
3117 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3118 /** @todo implement enhanced multi thread termal monitoring? */
3119 return VINF_SUCCESS;
3120}
3121
3122
3123/** @callback_method_impl{FNCPUMRDMSR} */
3124static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2SmmCStMiscInfo(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3125{
3126 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3127 /** @todo SMM & C-states? */
3128 *puValue = 0;
3129 return VINF_SUCCESS;
3130}
3131
3132
3133/** @callback_method_impl{FNCPUMWRMSR} */
3134static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2SmmCStMiscInfo(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3135{
3136 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3137 /** @todo SMM & C-states? */
3138 return VINF_SUCCESS;
3139}
3140
3141
3142/** @callback_method_impl{FNCPUMRDMSR} */
3143static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1ExtConfig(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3144{
3145 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3146 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3147 *puValue = 0;
3148 return VINF_SUCCESS;
3149}
3150
3151
3152/** @callback_method_impl{FNCPUMWRMSR} */
3153static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1ExtConfig(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3154{
3155 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3156 /** @todo Core1&2 EXT_CONFIG (whatever that is)? */
3157 return VINF_SUCCESS;
3158}
3159
3160
3161/** @callback_method_impl{FNCPUMRDMSR} */
3162static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore1DtsCalControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3163{
3164 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3165 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3166 *puValue = 0;
3167 return VINF_SUCCESS;
3168}
3169
3170
3171/** @callback_method_impl{FNCPUMWRMSR} */
3172static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore1DtsCalControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3173{
3174 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3175 /** @todo Core1&2(?) DTS_CAL_CTRL (whatever that is)? */
3176 return VINF_SUCCESS;
3177}
3178
3179
3180/** @callback_method_impl{FNCPUMRDMSR} */
3181static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelCore2PeciControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3182{
3183 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3184 /** @todo Core2+ platform environment control interface control register? */
3185 *puValue = 0;
3186 return VINF_SUCCESS;
3187}
3188
3189
3190/** @callback_method_impl{FNCPUMWRMSR} */
3191static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_IntelCore2PeciControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3192{
3193 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3194 /** @todo Core2+ platform environment control interface control register? */
3195 return VINF_SUCCESS;
3196}
3197
3198
3199/** @callback_method_impl{FNCPUMRDMSR} */
3200static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_IntelAtSilvCoreC1Recidency(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3201{
3202 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3203 *puValue = 0;
3204 return VINF_SUCCESS;
3205}
3206
3207
3208/*
3209 * Multiple vendor P6 MSRs.
3210 * Multiple vendor P6 MSRs.
3211 * Multiple vendor P6 MSRs.
3212 *
3213 * These MSRs were introduced with the P6 but not elevated to architectural
3214 * MSRs, despite other vendors implementing them.
3215 */
3216
3217
3218/** @callback_method_impl{FNCPUMRDMSR} */
3219static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchFromIp(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3220{
3221 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3222 /* AMD seems to just record RIP, while intel claims to record RIP+CS.BASE
3223 if I read the docs correctly, thus the need for separate functions. */
3224 /** @todo implement last branch records. */
3225 *puValue = 0;
3226 return VINF_SUCCESS;
3227}
3228
3229
3230/** @callback_method_impl{FNCPUMRDMSR} */
3231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastBranchToIp(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3232{
3233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3234 /** @todo implement last branch records. */
3235 *puValue = 0;
3236 return VINF_SUCCESS;
3237}
3238
3239
3240/** @callback_method_impl{FNCPUMRDMSR} */
3241static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntFromIp(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3242{
3243 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3244 /** @todo implement last exception records. */
3245 *puValue = 0;
3246 return VINF_SUCCESS;
3247}
3248
3249
3250/** @callback_method_impl{FNCPUMWRMSR} */
3251static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntFromIp(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3252{
3253 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3254 /** @todo implement last exception records. */
3255 /* Note! On many CPUs, the high bit of the 0x000001dd register is always writable, even when the result is
3256 a non-cannonical address. */
3257 return VINF_SUCCESS;
3258}
3259
3260
3261/** @callback_method_impl{FNCPUMRDMSR} */
3262static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_P6LastIntToIp(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3263{
3264 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3265 /** @todo implement last exception records. */
3266 *puValue = 0;
3267 return VINF_SUCCESS;
3268}
3269
3270
3271/** @callback_method_impl{FNCPUMWRMSR} */
3272static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_P6LastIntToIp(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3273{
3274 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3275 /** @todo implement last exception records. */
3276 return VINF_SUCCESS;
3277}
3278
3279
3280
3281/*
3282 * AMD specific
3283 * AMD specific
3284 * AMD specific
3285 */
3286
3287
3288/** @callback_method_impl{FNCPUMRDMSR} */
3289static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hTscRate(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3290{
3291 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3292 /** @todo Implement TscRateMsr */
3293 *puValue = RT_MAKE_U64(0, 1); /* 1.0 = reset value. */
3294 return VINF_SUCCESS;
3295}
3296
3297
3298/** @callback_method_impl{FNCPUMWRMSR} */
3299static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hTscRate(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3300{
3301 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3302 /** @todo Implement TscRateMsr */
3303 return VINF_SUCCESS;
3304}
3305
3306
3307/** @callback_method_impl{FNCPUMRDMSR} */
3308static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3309{
3310 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3311 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3312 /* Note: Only listes in BKDG for Family 15H. */
3313 *puValue = 0;
3314 return VINF_SUCCESS;
3315}
3316
3317
3318/** @callback_method_impl{FNCPUMWRMSR} */
3319static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3320{
3321 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3322 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3323 return VINF_SUCCESS;
3324}
3325
3326
3327/** @callback_method_impl{FNCPUMRDMSR} */
3328static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLwpCbAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3329{
3330 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3331 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3332 /* Note: Only listes in BKDG for Family 15H. */
3333 *puValue = 0;
3334 return VINF_SUCCESS;
3335}
3336
3337
3338/** @callback_method_impl{FNCPUMWRMSR} */
3339static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLwpCbAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3340{
3341 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3342 /** @todo Implement AMD LWP? (Instructions: LWPINS, LWPVAL, LLWPCB, SLWPCB) */
3343 return VINF_SUCCESS;
3344}
3345
3346
3347/** @callback_method_impl{FNCPUMRDMSR} */
3348static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMc4MiscN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3349{
3350 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3351 /** @todo machine check. */
3352 *puValue = 0;
3353 return VINF_SUCCESS;
3354}
3355
3356
3357/** @callback_method_impl{FNCPUMWRMSR} */
3358static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMc4MiscN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3359{
3360 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3361 /** @todo machine check. */
3362 return VINF_SUCCESS;
3363}
3364
3365
3366/** @callback_method_impl{FNCPUMRDMSR} */
3367static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtlN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3368{
3369 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3370 /** @todo AMD performance events. */
3371 *puValue = 0;
3372 return VINF_SUCCESS;
3373}
3374
3375
3376/** @callback_method_impl{FNCPUMWRMSR} */
3377static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtlN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3378{
3379 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3380 /** @todo AMD performance events. */
3381 return VINF_SUCCESS;
3382}
3383
3384
3385/** @callback_method_impl{FNCPUMRDMSR} */
3386static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PerfCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3387{
3388 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3389 /** @todo AMD performance events. */
3390 *puValue = 0;
3391 return VINF_SUCCESS;
3392}
3393
3394
3395/** @callback_method_impl{FNCPUMWRMSR} */
3396static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PerfCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3397{
3398 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3399 /** @todo AMD performance events. */
3400 return VINF_SUCCESS;
3401}
3402
3403
3404/** @callback_method_impl{FNCPUMRDMSR} */
3405static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SysCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3406{
3407 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3408 /** @todo AMD SYS_CFG */
3409 *puValue = pRange->uValue;
3410 return VINF_SUCCESS;
3411}
3412
3413
3414/** @callback_method_impl{FNCPUMWRMSR} */
3415static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SysCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3416{
3417 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3418 /** @todo AMD SYS_CFG */
3419 return VINF_SUCCESS;
3420}
3421
3422
3423/** @callback_method_impl{FNCPUMRDMSR} */
3424static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwCr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3425{
3426 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3427 /** @todo AMD HW_CFG */
3428 *puValue = 0;
3429 return VINF_SUCCESS;
3430}
3431
3432
3433/** @callback_method_impl{FNCPUMWRMSR} */
3434static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwCr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3435{
3436 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3437 /** @todo AMD HW_CFG */
3438 return VINF_SUCCESS;
3439}
3440
3441
3442/** @callback_method_impl{FNCPUMRDMSR} */
3443static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrBaseN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3444{
3445 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3446 /** @todo AMD IorrMask/IorrBase */
3447 *puValue = 0;
3448 return VINF_SUCCESS;
3449}
3450
3451
3452/** @callback_method_impl{FNCPUMWRMSR} */
3453static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrBaseN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3454{
3455 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3456 /** @todo AMD IorrMask/IorrBase */
3457 return VINF_SUCCESS;
3458}
3459
3460
3461/** @callback_method_impl{FNCPUMRDMSR} */
3462static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IorrMaskN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3463{
3464 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3465 /** @todo AMD IorrMask/IorrBase */
3466 *puValue = 0;
3467 return VINF_SUCCESS;
3468}
3469
3470
3471/** @callback_method_impl{FNCPUMWRMSR} */
3472static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IorrMaskN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3473{
3474 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3475 /** @todo AMD IorrMask/IorrBase */
3476 return VINF_SUCCESS;
3477}
3478
3479
3480/** @callback_method_impl{FNCPUMRDMSR} */
3481static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8TopOfMemN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3482{
3483 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3484 *puValue = 0;
3485 /** @todo return 4GB - RamHoleSize here for TOPMEM. Figure out what to return
3486 * for TOPMEM2. */
3487 //if (pRange->uValue == 0)
3488 // *puValue = _4G - RamHoleSize;
3489 return VINF_SUCCESS;
3490}
3491
3492
3493/** @callback_method_impl{FNCPUMWRMSR} */
3494static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8TopOfMemN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3495{
3496 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3497 /** @todo AMD TOPMEM and TOPMEM2/TOM2. */
3498 return VINF_SUCCESS;
3499}
3500
3501
3502/** @callback_method_impl{FNCPUMRDMSR} */
3503static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8NbCfg1(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3504{
3505 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3506 /** @todo AMD NB_CFG1 */
3507 *puValue = 0;
3508 return VINF_SUCCESS;
3509}
3510
3511
3512/** @callback_method_impl{FNCPUMWRMSR} */
3513static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8NbCfg1(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3514{
3515 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3516 /** @todo AMD NB_CFG1 */
3517 return VINF_SUCCESS;
3518}
3519
3520
3521/** @callback_method_impl{FNCPUMRDMSR} */
3522static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McXcptRedir(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3523{
3524 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3525 /** @todo machine check. */
3526 *puValue = 0;
3527 return VINF_SUCCESS;
3528}
3529
3530
3531/** @callback_method_impl{FNCPUMWRMSR} */
3532static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McXcptRedir(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3533{
3534 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3535 /** @todo machine check. */
3536 return VINF_SUCCESS;
3537}
3538
3539
3540/** @callback_method_impl{FNCPUMRDMSR} */
3541static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuNameN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3542{
3543 RT_NOREF_PV(idMsr);
3544 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), pRange->uValue / 2 + 0x80000001);
3545 if (pLeaf)
3546 {
3547 if (!(pRange->uValue & 1))
3548 *puValue = RT_MAKE_U64(pLeaf->uEax, pLeaf->uEbx);
3549 else
3550 *puValue = RT_MAKE_U64(pLeaf->uEcx, pLeaf->uEdx);
3551 }
3552 else
3553 *puValue = 0;
3554 return VINF_SUCCESS;
3555}
3556
3557
3558/** @callback_method_impl{FNCPUMWRMSR} */
3559static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuNameN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3560{
3561 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3562 /** @todo Remember guest programmed CPU name. */
3563 return VINF_SUCCESS;
3564}
3565
3566
3567/** @callback_method_impl{FNCPUMRDMSR} */
3568static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8HwThermalCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3569{
3570 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3571 /** @todo AMD HTC. */
3572 *puValue = pRange->uValue;
3573 return VINF_SUCCESS;
3574}
3575
3576
3577/** @callback_method_impl{FNCPUMWRMSR} */
3578static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8HwThermalCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3579{
3580 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3581 /** @todo AMD HTC. */
3582 return VINF_SUCCESS;
3583}
3584
3585
3586/** @callback_method_impl{FNCPUMRDMSR} */
3587static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SwThermalCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3588{
3589 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3590 /** @todo AMD STC. */
3591 *puValue = 0;
3592 return VINF_SUCCESS;
3593}
3594
3595
3596/** @callback_method_impl{FNCPUMWRMSR} */
3597static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SwThermalCtrl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3598{
3599 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3600 /** @todo AMD STC. */
3601 return VINF_SUCCESS;
3602}
3603
3604
3605/** @callback_method_impl{FNCPUMRDMSR} */
3606static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3607{
3608 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3609 /** @todo AMD FIDVID_CTL. */
3610 *puValue = pRange->uValue;
3611 return VINF_SUCCESS;
3612}
3613
3614
3615/** @callback_method_impl{FNCPUMWRMSR} */
3616static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8FidVidControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3617{
3618 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3619 /** @todo AMD FIDVID_CTL. */
3620 return VINF_SUCCESS;
3621}
3622
3623
3624/** @callback_method_impl{FNCPUMRDMSR} */
3625static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8FidVidStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3626{
3627 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3628 /** @todo AMD FIDVID_STATUS. */
3629 *puValue = pRange->uValue;
3630 return VINF_SUCCESS;
3631}
3632
3633
3634/** @callback_method_impl{FNCPUMRDMSR} */
3635static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8McCtlMaskN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3636{
3637 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3638 /** @todo AMD MC. */
3639 *puValue = 0;
3640 return VINF_SUCCESS;
3641}
3642
3643
3644/** @callback_method_impl{FNCPUMWRMSR} */
3645static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8McCtlMaskN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3646{
3647 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3648 /** @todo AMD MC. */
3649 return VINF_SUCCESS;
3650}
3651
3652
3653/** @callback_method_impl{FNCPUMRDMSR} */
3654static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3655{
3656 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3657 /** @todo AMD SMM/SMI and I/O trap. */
3658 *puValue = 0;
3659 return VINF_SUCCESS;
3660}
3661
3662
3663/** @callback_method_impl{FNCPUMWRMSR} */
3664static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3665{
3666 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3667 /** @todo AMD SMM/SMI and I/O trap. */
3668 return VINF_SUCCESS;
3669}
3670
3671
3672/** @callback_method_impl{FNCPUMRDMSR} */
3673static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiOnIoTrapCtlSts(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3674{
3675 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3676 /** @todo AMD SMM/SMI and I/O trap. */
3677 *puValue = 0;
3678 return VINF_SUCCESS;
3679}
3680
3681
3682/** @callback_method_impl{FNCPUMWRMSR} */
3683static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiOnIoTrapCtlSts(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3684{
3685 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3686 /** @todo AMD SMM/SMI and I/O trap. */
3687 return VINF_SUCCESS;
3688}
3689
3690
3691/** @callback_method_impl{FNCPUMRDMSR} */
3692static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IntPendingMessage(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3693{
3694 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3695 /** @todo Interrupt pending message. */
3696 *puValue = 0;
3697 return VINF_SUCCESS;
3698}
3699
3700
3701/** @callback_method_impl{FNCPUMWRMSR} */
3702static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IntPendingMessage(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3703{
3704 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3705 /** @todo Interrupt pending message. */
3706 return VINF_SUCCESS;
3707}
3708
3709
3710/** @callback_method_impl{FNCPUMRDMSR} */
3711static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmiTriggerIoCycle(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3712{
3713 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3714 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3715 *puValue = 0;
3716 return VINF_SUCCESS;
3717}
3718
3719
3720/** @callback_method_impl{FNCPUMWRMSR} */
3721static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmiTriggerIoCycle(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3722{
3723 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3724 /** @todo AMD SMM/SMI and trigger I/O cycle. */
3725 return VINF_SUCCESS;
3726}
3727
3728
3729/** @callback_method_impl{FNCPUMRDMSR} */
3730static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hMmioCfgBaseAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3731{
3732 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3733 /** @todo AMD MMIO Configuration base address. */
3734 *puValue = 0;
3735 return VINF_SUCCESS;
3736}
3737
3738
3739/** @callback_method_impl{FNCPUMWRMSR} */
3740static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hMmioCfgBaseAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3741{
3742 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3743 /** @todo AMD MMIO Configuration base address. */
3744 return VINF_SUCCESS;
3745}
3746
3747
3748/** @callback_method_impl{FNCPUMRDMSR} */
3749static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hTrapCtlMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3750{
3751 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3752 /** @todo AMD 0xc0010059. */
3753 *puValue = 0;
3754 return VINF_SUCCESS;
3755}
3756
3757
3758/** @callback_method_impl{FNCPUMWRMSR} */
3759static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hTrapCtlMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3760{
3761 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3762 /** @todo AMD 0xc0010059. */
3763 return VINF_SUCCESS;
3764}
3765
3766
3767/** @callback_method_impl{FNCPUMRDMSR} */
3768static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateCurLimit(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3769{
3770 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3771 /** @todo AMD P-states. */
3772 *puValue = pRange->uValue;
3773 return VINF_SUCCESS;
3774}
3775
3776
3777/** @callback_method_impl{FNCPUMRDMSR} */
3778static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3779{
3780 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3781 /** @todo AMD P-states. */
3782 *puValue = pRange->uValue;
3783 return VINF_SUCCESS;
3784}
3785
3786
3787/** @callback_method_impl{FNCPUMWRMSR} */
3788static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3789{
3790 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3791 /** @todo AMD P-states. */
3792 return VINF_SUCCESS;
3793}
3794
3795
3796/** @callback_method_impl{FNCPUMRDMSR} */
3797static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3798{
3799 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3800 /** @todo AMD P-states. */
3801 *puValue = pRange->uValue;
3802 return VINF_SUCCESS;
3803}
3804
3805
3806/** @callback_method_impl{FNCPUMWRMSR} */
3807static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3808{
3809 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3810 /** @todo AMD P-states. */
3811 return VINF_SUCCESS;
3812}
3813
3814
3815/** @callback_method_impl{FNCPUMRDMSR} */
3816static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hPStateN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3817{
3818 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3819 /** @todo AMD P-states. */
3820 *puValue = pRange->uValue;
3821 return VINF_SUCCESS;
3822}
3823
3824
3825/** @callback_method_impl{FNCPUMWRMSR} */
3826static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hPStateN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3827{
3828 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3829 /** @todo AMD P-states. */
3830 return VINF_SUCCESS;
3831}
3832
3833
3834/** @callback_method_impl{FNCPUMRDMSR} */
3835static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3836{
3837 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3838 /** @todo AMD P-states. */
3839 *puValue = pRange->uValue;
3840 return VINF_SUCCESS;
3841}
3842
3843
3844/** @callback_method_impl{FNCPUMWRMSR} */
3845static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidControl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3846{
3847 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3848 /** @todo AMD P-states. */
3849 return VINF_SUCCESS;
3850}
3851
3852
3853/** @callback_method_impl{FNCPUMRDMSR} */
3854static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCofVidStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3855{
3856 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
3857 /** @todo AMD P-states. */
3858 *puValue = pRange->uValue;
3859 return VINF_SUCCESS;
3860}
3861
3862
3863/** @callback_method_impl{FNCPUMWRMSR} */
3864static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCofVidStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3865{
3866 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3867 /* Note! Writing 0 seems to not GP, not sure if it does anything to the value... */
3868 /** @todo AMD P-states. */
3869 return VINF_SUCCESS;
3870}
3871
3872
3873/** @callback_method_impl{FNCPUMRDMSR} */
3874static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCStateIoBaseAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3875{
3876 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3877 /** @todo AMD C-states. */
3878 *puValue = 0;
3879 return VINF_SUCCESS;
3880}
3881
3882
3883/** @callback_method_impl{FNCPUMWRMSR} */
3884static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCStateIoBaseAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3885{
3886 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3887 /** @todo AMD C-states. */
3888 return VINF_SUCCESS;
3889}
3890
3891
3892/** @callback_method_impl{FNCPUMRDMSR} */
3893static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hCpuWatchdogTimer(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3894{
3895 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3896 /** @todo AMD machine checks. */
3897 *puValue = 0;
3898 return VINF_SUCCESS;
3899}
3900
3901
3902/** @callback_method_impl{FNCPUMWRMSR} */
3903static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hCpuWatchdogTimer(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3904{
3905 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3906 /** @todo AMD machine checks. */
3907 return VINF_SUCCESS;
3908}
3909
3910
3911/** @callback_method_impl{FNCPUMRDMSR} */
3912static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3913{
3914 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3915 /** @todo AMD SMM. */
3916 *puValue = 0;
3917 return VINF_SUCCESS;
3918}
3919
3920
3921/** @callback_method_impl{FNCPUMWRMSR} */
3922static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmBase(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3923{
3924 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3925 /** @todo AMD SMM. */
3926 return VINF_SUCCESS;
3927}
3928
3929
3930/** @callback_method_impl{FNCPUMRDMSR} */
3931static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3932{
3933 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3934 /** @todo AMD SMM. */
3935 *puValue = 0;
3936 return VINF_SUCCESS;
3937}
3938
3939
3940/** @callback_method_impl{FNCPUMWRMSR} */
3941static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3942{
3943 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3944 /** @todo AMD SMM. */
3945 return VINF_SUCCESS;
3946}
3947
3948
3949
3950/** @callback_method_impl{FNCPUMRDMSR} */
3951static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmMask(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3952{
3953 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3954 /** @todo AMD SMM. */
3955 *puValue = 0;
3956 return VINF_SUCCESS;
3957}
3958
3959
3960/** @callback_method_impl{FNCPUMWRMSR} */
3961static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmMask(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3962{
3963 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
3964 /** @todo AMD SMM. */
3965 return VINF_SUCCESS;
3966}
3967
3968
3969/** @callback_method_impl{FNCPUMRDMSR} */
3970static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmCr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
3971{
3972 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
3973 PVM pVM = pVCpu->CTX_SUFF(pVM);
3974 if (pVM->cpum.s.GuestFeatures.fSvm)
3975 *puValue = MSR_K8_VM_CR_LOCK;
3976 else
3977 *puValue = 0;
3978 return VINF_SUCCESS;
3979}
3980
3981
3982/** @callback_method_impl{FNCPUMWRMSR} */
3983static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmCr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
3984{
3985 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
3986 PVM pVM = pVCpu->CTX_SUFF(pVM);
3987 if (pVM->cpum.s.GuestFeatures.fSvm)
3988 {
3989 /* Silently ignore writes to LOCK and SVM_DISABLE bit when the LOCK bit is set (see cpumMsrRd_AmdK8VmCr). */
3990 if (uValue & (MSR_K8_VM_CR_DPD | MSR_K8_VM_CR_R_INIT | MSR_K8_VM_CR_DIS_A20M))
3991 return VERR_CPUM_RAISE_GP_0;
3992 return VINF_SUCCESS;
3993 }
3994 return VERR_CPUM_RAISE_GP_0;
3995}
3996
3997
3998/** @callback_method_impl{FNCPUMRDMSR} */
3999static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8IgnNe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4000{
4001 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4002 /** @todo AMD IGNNE\# control. */
4003 *puValue = 0;
4004 return VINF_SUCCESS;
4005}
4006
4007
4008/** @callback_method_impl{FNCPUMWRMSR} */
4009static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8IgnNe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4010{
4011 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4012 /** @todo AMD IGNNE\# control. */
4013 return VINF_SUCCESS;
4014}
4015
4016
4017/** @callback_method_impl{FNCPUMRDMSR} */
4018static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8SmmCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4019{
4020 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4021 /** @todo AMD SMM. */
4022 *puValue = 0;
4023 return VINF_SUCCESS;
4024}
4025
4026
4027/** @callback_method_impl{FNCPUMWRMSR} */
4028static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8SmmCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4029{
4030 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4031 /** @todo AMD SMM. */
4032 return VINF_SUCCESS;
4033}
4034
4035
4036/** @callback_method_impl{FNCPUMRDMSR} */
4037static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8VmHSavePa(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4038{
4039 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4040 *puValue = pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa;
4041 return VINF_SUCCESS;
4042}
4043
4044
4045/** @callback_method_impl{FNCPUMWRMSR} */
4046static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8VmHSavePa(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4047{
4048 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uRawValue);
4049 if (uValue & UINT64_C(0xfff))
4050 {
4051 Log(("CPUM: Invalid setting of low 12 bits set writing host-state save area MSR %#x: %#llx\n", idMsr, uValue));
4052 return VERR_CPUM_RAISE_GP_0;
4053 }
4054
4055 uint64_t fInvPhysMask = ~(RT_BIT_64(pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures.cMaxPhysAddrWidth) - 1U);
4056 if (fInvPhysMask & uValue)
4057 {
4058 Log(("CPUM: Invalid physical address bits set writing host-state save area MSR %#x: %#llx (%#llx)\n",
4059 idMsr, uValue, uValue & fInvPhysMask));
4060 return VERR_CPUM_RAISE_GP_0;
4061 }
4062
4063 pVCpu->cpum.s.Guest.hwvirt.svm.uMsrHSavePa = uValue;
4064 return VINF_SUCCESS;
4065}
4066
4067
4068/** @callback_method_impl{FNCPUMRDMSR} */
4069static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hVmLockKey(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4070{
4071 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4072 /** @todo AMD SVM. */
4073 *puValue = 0; /* RAZ */
4074 return VINF_SUCCESS;
4075}
4076
4077
4078/** @callback_method_impl{FNCPUMWRMSR} */
4079static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hVmLockKey(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4080{
4081 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4082 /** @todo AMD SVM. */
4083 return VINF_SUCCESS;
4084}
4085
4086
4087/** @callback_method_impl{FNCPUMRDMSR} */
4088static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hSmmLockKey(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4089{
4090 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4091 /** @todo AMD SMM. */
4092 *puValue = 0; /* RAZ */
4093 return VINF_SUCCESS;
4094}
4095
4096
4097/** @callback_method_impl{FNCPUMWRMSR} */
4098static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hSmmLockKey(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4099{
4100 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4101 /** @todo AMD SMM. */
4102 return VINF_SUCCESS;
4103}
4104
4105
4106/** @callback_method_impl{FNCPUMRDMSR} */
4107static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hLocalSmiStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4108{
4109 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4110 /** @todo AMD SMM/SMI. */
4111 *puValue = 0;
4112 return VINF_SUCCESS;
4113}
4114
4115
4116/** @callback_method_impl{FNCPUMWRMSR} */
4117static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hLocalSmiStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4118{
4119 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4120 /** @todo AMD SMM/SMI. */
4121 return VINF_SUCCESS;
4122}
4123
4124
4125/** @callback_method_impl{FNCPUMRDMSR} */
4126static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkIdLength(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4127{
4128 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4129 /** @todo AMD OS visible workaround. */
4130 *puValue = pRange->uValue;
4131 return VINF_SUCCESS;
4132}
4133
4134
4135/** @callback_method_impl{FNCPUMWRMSR} */
4136static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkIdLength(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4137{
4138 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4139 /** @todo AMD OS visible workaround. */
4140 return VINF_SUCCESS;
4141}
4142
4143
4144/** @callback_method_impl{FNCPUMRDMSR} */
4145static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hOsVisWrkStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4146{
4147 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4148 /** @todo AMD OS visible workaround. */
4149 *puValue = 0;
4150 return VINF_SUCCESS;
4151}
4152
4153
4154/** @callback_method_impl{FNCPUMWRMSR} */
4155static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hOsVisWrkStatus(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4156{
4157 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4158 /** @todo AMD OS visible workaround. */
4159 return VINF_SUCCESS;
4160}
4161
4162
4163/** @callback_method_impl{FNCPUMRDMSR} */
4164static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtlN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4165{
4166 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4167 /** @todo AMD L2I performance counters. */
4168 *puValue = 0;
4169 return VINF_SUCCESS;
4170}
4171
4172
4173/** @callback_method_impl{FNCPUMWRMSR} */
4174static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtlN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4175{
4176 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4177 /** @todo AMD L2I performance counters. */
4178 return VINF_SUCCESS;
4179}
4180
4181
4182/** @callback_method_impl{FNCPUMRDMSR} */
4183static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam16hL2IPerfCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4184{
4185 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4186 /** @todo AMD L2I performance counters. */
4187 *puValue = 0;
4188 return VINF_SUCCESS;
4189}
4190
4191
4192/** @callback_method_impl{FNCPUMWRMSR} */
4193static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam16hL2IPerfCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4194{
4195 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4196 /** @todo AMD L2I performance counters. */
4197 return VINF_SUCCESS;
4198}
4199
4200
4201/** @callback_method_impl{FNCPUMRDMSR} */
4202static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtlN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4203{
4204 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4205 /** @todo AMD Northbridge performance counters. */
4206 *puValue = 0;
4207 return VINF_SUCCESS;
4208}
4209
4210
4211/** @callback_method_impl{FNCPUMWRMSR} */
4212static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtlN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4213{
4214 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4215 /** @todo AMD Northbridge performance counters. */
4216 return VINF_SUCCESS;
4217}
4218
4219
4220/** @callback_method_impl{FNCPUMRDMSR} */
4221static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hNorthbridgePerfCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4222{
4223 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4224 /** @todo AMD Northbridge performance counters. */
4225 *puValue = 0;
4226 return VINF_SUCCESS;
4227}
4228
4229
4230/** @callback_method_impl{FNCPUMWRMSR} */
4231static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hNorthbridgePerfCtrN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4232{
4233 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4234 /** @todo AMD Northbridge performance counters. */
4235 return VINF_SUCCESS;
4236}
4237
4238
4239/** @callback_method_impl{FNCPUMRDMSR} */
4240static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7MicrocodeCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4241{
4242 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4243 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4244 * cpus. Need to be explored and verify K7 presence. */
4245 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4246 *puValue = pRange->uValue;
4247 return VINF_SUCCESS;
4248}
4249
4250
4251/** @callback_method_impl{FNCPUMWRMSR} */
4252static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7MicrocodeCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4253{
4254 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4255 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4256 * cpus. Need to be explored and verify K7 presence. */
4257 /** @todo Undocumented register only seen mentioned in fam15h erratum \#608. */
4258 return VINF_SUCCESS;
4259}
4260
4261
4262/** @callback_method_impl{FNCPUMRDMSR} */
4263static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7ClusterIdMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4264{
4265 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4266 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4267 * cpus. Need to be explored and verify K7 presence. */
4268 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4269 * describing EBL_CR_POWERON. */
4270 *puValue = pRange->uValue;
4271 return VINF_SUCCESS;
4272}
4273
4274
4275/** @callback_method_impl{FNCPUMWRMSR} */
4276static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7ClusterIdMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4277{
4278 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4279 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4280 * cpus. Need to be explored and verify K7 presence. */
4281 /** @todo Undocumented register only seen mentioned in fam16h BKDG r3.00 when
4282 * describing EBL_CR_POWERON. */
4283 return VINF_SUCCESS;
4284}
4285
4286
4287/** @callback_method_impl{FNCPUMRDMSR} */
4288static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd07hEbax(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4289{
4290 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4291 bool fIgnored;
4292 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeafEx(pVCpu->CTX_SUFF(pVM), 0x00000007, 0, &fIgnored);
4293 if (pLeaf)
4294 *puValue = RT_MAKE_U64(pLeaf->uEbx, pLeaf->uEax);
4295 else
4296 *puValue = 0;
4297 return VINF_SUCCESS;
4298}
4299
4300
4301/** @callback_method_impl{FNCPUMWRMSR} */
4302static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd07hEbax(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4303{
4304 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4305 /** @todo Changing CPUID leaf 7/0. */
4306 return VINF_SUCCESS;
4307}
4308
4309
4310/** @callback_method_impl{FNCPUMRDMSR} */
4311static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd06hEcx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4312{
4313 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4314 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000006);
4315 if (pLeaf)
4316 *puValue = pLeaf->uEcx;
4317 else
4318 *puValue = 0;
4319 return VINF_SUCCESS;
4320}
4321
4322
4323/** @callback_method_impl{FNCPUMWRMSR} */
4324static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd06hEcx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4325{
4326 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4327 /** @todo Changing CPUID leaf 6. */
4328 return VINF_SUCCESS;
4329}
4330
4331
4332/** @callback_method_impl{FNCPUMRDMSR} */
4333static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4334{
4335 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4336 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x00000001);
4337 if (pLeaf)
4338 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4339 else
4340 *puValue = 0;
4341 return VINF_SUCCESS;
4342}
4343
4344
4345/** @callback_method_impl{FNCPUMWRMSR} */
4346static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4347{
4348 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4349 /** @todo Changing CPUID leaf 0x80000001. */
4350 return VINF_SUCCESS;
4351}
4352
4353
4354/** @callback_method_impl{FNCPUMRDMSR} */
4355static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4356{
4357 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4358 PCPUMCPUIDLEAF pLeaf = cpumCpuIdGetLeaf(pVCpu->CTX_SUFF(pVM), 0x80000001);
4359 if (pLeaf)
4360 *puValue = RT_MAKE_U64(pLeaf->uEdx, pLeaf->uEcx);
4361 else
4362 *puValue = 0;
4363 return VINF_SUCCESS;
4364}
4365
4366
4367/** @callback_method_impl{FNCPUMWRMSR} */
4368static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4369{
4370 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4371 /** @todo Changing CPUID leaf 0x80000001. */
4372 return VINF_SUCCESS;
4373}
4374
4375
4376/** @callback_method_impl{FNCPUMRDMSR} */
4377static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK8PatchLevel(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4378{
4379 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr);
4380 /** @todo Fake AMD microcode patching. */
4381 *puValue = pRange->uValue;
4382 return VINF_SUCCESS;
4383}
4384
4385
4386/** @callback_method_impl{FNCPUMWRMSR} */
4387static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK8PatchLoader(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4388{
4389 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4390 /** @todo Fake AMD microcode patching. */
4391 return VINF_SUCCESS;
4392}
4393
4394
4395/** @callback_method_impl{FNCPUMRDMSR} */
4396static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugStatusMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4397{
4398 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4399 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4400 * cpus. Need to be explored and verify K7 presence. */
4401 /** @todo undocumented */
4402 *puValue = 0;
4403 return VINF_SUCCESS;
4404}
4405
4406
4407/** @callback_method_impl{FNCPUMWRMSR} */
4408static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugStatusMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4409{
4410 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4411 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4412 * cpus. Need to be explored and verify K7 presence. */
4413 /** @todo undocumented */
4414 return VINF_SUCCESS;
4415}
4416
4417
4418/** @callback_method_impl{FNCPUMRDMSR} */
4419static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceBaseMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4420{
4421 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4422 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4423 * cpus. Need to be explored and verify K7 presence. */
4424 /** @todo undocumented */
4425 *puValue = 0;
4426 return VINF_SUCCESS;
4427}
4428
4429
4430/** @callback_method_impl{FNCPUMWRMSR} */
4431static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceBaseMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4432{
4433 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4434 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4435 * cpus. Need to be explored and verify K7 presence. */
4436 /** @todo undocumented */
4437 return VINF_SUCCESS;
4438}
4439
4440
4441/** @callback_method_impl{FNCPUMRDMSR} */
4442static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTracePtrMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4443{
4444 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4445 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4446 * cpus. Need to be explored and verify K7 presence. */
4447 /** @todo undocumented */
4448 *puValue = 0;
4449 return VINF_SUCCESS;
4450}
4451
4452
4453/** @callback_method_impl{FNCPUMWRMSR} */
4454static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTracePtrMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4455{
4456 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4457 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4458 * cpus. Need to be explored and verify K7 presence. */
4459 /** @todo undocumented */
4460 return VINF_SUCCESS;
4461}
4462
4463
4464/** @callback_method_impl{FNCPUMRDMSR} */
4465static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BHTraceLimitMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4466{
4467 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4468 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4469 * cpus. Need to be explored and verify K7 presence. */
4470 /** @todo undocumented */
4471 *puValue = 0;
4472 return VINF_SUCCESS;
4473}
4474
4475
4476/** @callback_method_impl{FNCPUMWRMSR} */
4477static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BHTraceLimitMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4478{
4479 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4480 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4481 * cpus. Need to be explored and verify K7 presence. */
4482 /** @todo undocumented */
4483 return VINF_SUCCESS;
4484}
4485
4486
4487/** @callback_method_impl{FNCPUMRDMSR} */
4488static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4489{
4490 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4491 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4492 * cpus. Need to be explored and verify K7 presence. */
4493 /** @todo undocumented */
4494 *puValue = 0;
4495 return VINF_SUCCESS;
4496}
4497
4498
4499/** @callback_method_impl{FNCPUMWRMSR} */
4500static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4501{
4502 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4503 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4504 * cpus. Need to be explored and verify K7 presence. */
4505 /** @todo undocumented */
4506 return VINF_SUCCESS;
4507}
4508
4509
4510/** @callback_method_impl{FNCPUMRDMSR} */
4511static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7FastFlushCountMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4512{
4513 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4514 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4515 * cpus. Need to be explored and verify K7 presence. */
4516 /** @todo undocumented */
4517 *puValue = 0;
4518 return VINF_SUCCESS;
4519}
4520
4521
4522/** @callback_method_impl{FNCPUMWRMSR} */
4523static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7FastFlushCountMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4524{
4525 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4526 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4527 * cpus. Need to be explored and verify K7 presence. */
4528 /** @todo undocumented */
4529 return VINF_SUCCESS;
4530}
4531
4532
4533/** @callback_method_impl{FNCPUMRDMSR} */
4534static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7NodeId(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4535{
4536 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4537 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4538 * cpus. Need to be explored and verify K7 presence. */
4539 /** @todo AMD node ID and bios scratch. */
4540 *puValue = 0; /* nodeid = 0; nodes-per-cpu = 1 */
4541 return VINF_SUCCESS;
4542}
4543
4544
4545/** @callback_method_impl{FNCPUMWRMSR} */
4546static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7NodeId(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4547{
4548 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4549 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4550 * cpus. Need to be explored and verify K7 presence. */
4551 /** @todo AMD node ID and bios scratch. */
4552 return VINF_SUCCESS;
4553}
4554
4555
4556/** @callback_method_impl{FNCPUMRDMSR} */
4557static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DrXAddrMaskN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4558{
4559 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4560 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4561 * cpus. Need to be explored and verify K7 presence. */
4562 /** @todo AMD DRx address masking (range breakpoints). */
4563 *puValue = 0;
4564 return VINF_SUCCESS;
4565}
4566
4567
4568/** @callback_method_impl{FNCPUMWRMSR} */
4569static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DrXAddrMaskN(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4570{
4571 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4572 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4573 * cpus. Need to be explored and verify K7 presence. */
4574 /** @todo AMD DRx address masking (range breakpoints). */
4575 return VINF_SUCCESS;
4576}
4577
4578
4579/** @callback_method_impl{FNCPUMRDMSR} */
4580static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMatchMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4581{
4582 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4583 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4584 * cpus. Need to be explored and verify K7 presence. */
4585 /** @todo AMD undocument debugging features. */
4586 *puValue = 0;
4587 return VINF_SUCCESS;
4588}
4589
4590
4591/** @callback_method_impl{FNCPUMWRMSR} */
4592static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMatchMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4593{
4594 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4595 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4596 * cpus. Need to be explored and verify K7 presence. */
4597 /** @todo AMD undocument debugging features. */
4598 return VINF_SUCCESS;
4599}
4600
4601
4602/** @callback_method_impl{FNCPUMRDMSR} */
4603static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7Dr0DataMaskMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4604{
4605 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4606 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4607 * cpus. Need to be explored and verify K7 presence. */
4608 /** @todo AMD undocument debugging features. */
4609 *puValue = 0;
4610 return VINF_SUCCESS;
4611}
4612
4613
4614/** @callback_method_impl{FNCPUMWRMSR} */
4615static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7Dr0DataMaskMaybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4616{
4617 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4618 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4619 * cpus. Need to be explored and verify K7 presence. */
4620 /** @todo AMD undocument debugging features. */
4621 return VINF_SUCCESS;
4622}
4623
4624
4625/** @callback_method_impl{FNCPUMRDMSR} */
4626static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7LoadStoreCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4627{
4628 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4629 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4630 * cpus. Need to be explored and verify K7 presence. */
4631 /** @todo AMD load-store config. */
4632 *puValue = 0;
4633 return VINF_SUCCESS;
4634}
4635
4636
4637/** @callback_method_impl{FNCPUMWRMSR} */
4638static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7LoadStoreCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4639{
4640 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4641 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4642 * cpus. Need to be explored and verify K7 presence. */
4643 /** @todo AMD load-store config. */
4644 return VINF_SUCCESS;
4645}
4646
4647
4648/** @callback_method_impl{FNCPUMRDMSR} */
4649static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7InstrCacheCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4650{
4651 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4652 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4653 * cpus. Need to be explored and verify K7 presence. */
4654 /** @todo AMD instruction cache config. */
4655 *puValue = 0;
4656 return VINF_SUCCESS;
4657}
4658
4659
4660/** @callback_method_impl{FNCPUMWRMSR} */
4661static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7InstrCacheCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4662{
4663 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4664 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4665 * cpus. Need to be explored and verify K7 presence. */
4666 /** @todo AMD instruction cache config. */
4667 return VINF_SUCCESS;
4668}
4669
4670
4671/** @callback_method_impl{FNCPUMRDMSR} */
4672static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DataCacheCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4673{
4674 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4675 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4676 * cpus. Need to be explored and verify K7 presence. */
4677 /** @todo AMD data cache config. */
4678 *puValue = 0;
4679 return VINF_SUCCESS;
4680}
4681
4682
4683/** @callback_method_impl{FNCPUMWRMSR} */
4684static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DataCacheCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4685{
4686 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4687 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4688 * cpus. Need to be explored and verify K7 presence. */
4689 /** @todo AMD data cache config. */
4690 return VINF_SUCCESS;
4691}
4692
4693
4694/** @callback_method_impl{FNCPUMRDMSR} */
4695static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7BusUnitCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4696{
4697 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4698 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4699 * cpus. Need to be explored and verify K7 presence. */
4700 /** @todo AMD bus unit config. */
4701 *puValue = 0;
4702 return VINF_SUCCESS;
4703}
4704
4705
4706/** @callback_method_impl{FNCPUMWRMSR} */
4707static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7BusUnitCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4708{
4709 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4710 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4711 * cpus. Need to be explored and verify K7 presence. */
4712 /** @todo AMD bus unit config. */
4713 return VINF_SUCCESS;
4714}
4715
4716
4717/** @callback_method_impl{FNCPUMRDMSR} */
4718static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdK7DebugCtl2Maybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4719{
4720 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4721 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4722 * cpus. Need to be explored and verify K7 presence. */
4723 /** @todo Undocument AMD debug control register \#2. */
4724 *puValue = 0;
4725 return VINF_SUCCESS;
4726}
4727
4728
4729/** @callback_method_impl{FNCPUMWRMSR} */
4730static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdK7DebugCtl2Maybe(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4731{
4732 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4733 /** @todo Allegedly requiring edi=0x9c5a203a when execuing rdmsr/wrmsr on older
4734 * cpus. Need to be explored and verify K7 presence. */
4735 /** @todo Undocument AMD debug control register \#2. */
4736 return VINF_SUCCESS;
4737}
4738
4739
4740/** @callback_method_impl{FNCPUMRDMSR} */
4741static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hFpuCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4742{
4743 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4744 /** @todo AMD FPU config. */
4745 *puValue = 0;
4746 return VINF_SUCCESS;
4747}
4748
4749
4750/** @callback_method_impl{FNCPUMWRMSR} */
4751static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hFpuCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4752{
4753 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4754 /** @todo AMD FPU config. */
4755 return VINF_SUCCESS;
4756}
4757
4758
4759/** @callback_method_impl{FNCPUMRDMSR} */
4760static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hDecoderCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4761{
4762 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4763 /** @todo AMD decoder config. */
4764 *puValue = 0;
4765 return VINF_SUCCESS;
4766}
4767
4768
4769/** @callback_method_impl{FNCPUMWRMSR} */
4770static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hDecoderCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4771{
4772 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4773 /** @todo AMD decoder config. */
4774 return VINF_SUCCESS;
4775}
4776
4777
4778/** @callback_method_impl{FNCPUMRDMSR} */
4779static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hBusUnitCfg2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4780{
4781 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4782 /* Note! 10h and 16h */
4783 /** @todo AMD bus unit config. */
4784 *puValue = 0;
4785 return VINF_SUCCESS;
4786}
4787
4788
4789/** @callback_method_impl{FNCPUMWRMSR} */
4790static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hBusUnitCfg2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4791{
4792 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4793 /* Note! 10h and 16h */
4794 /** @todo AMD bus unit config. */
4795 return VINF_SUCCESS;
4796}
4797
4798
4799/** @callback_method_impl{FNCPUMRDMSR} */
4800static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4801{
4802 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4803 /** @todo AMD unit config. */
4804 *puValue = 0;
4805 return VINF_SUCCESS;
4806}
4807
4808
4809/** @callback_method_impl{FNCPUMWRMSR} */
4810static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4811{
4812 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4813 /** @todo AMD unit config. */
4814 return VINF_SUCCESS;
4815}
4816
4817
4818/** @callback_method_impl{FNCPUMRDMSR} */
4819static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4820{
4821 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4822 /** @todo AMD unit config 2. */
4823 *puValue = 0;
4824 return VINF_SUCCESS;
4825}
4826
4827
4828/** @callback_method_impl{FNCPUMWRMSR} */
4829static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4830{
4831 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4832 /** @todo AMD unit config 2. */
4833 return VINF_SUCCESS;
4834}
4835
4836
4837/** @callback_method_impl{FNCPUMRDMSR} */
4838static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hCombUnitCfg3(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4839{
4840 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4841 /** @todo AMD combined unit config 3. */
4842 *puValue = 0;
4843 return VINF_SUCCESS;
4844}
4845
4846
4847/** @callback_method_impl{FNCPUMWRMSR} */
4848static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hCombUnitCfg3(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4849{
4850 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4851 /** @todo AMD combined unit config 3. */
4852 return VINF_SUCCESS;
4853}
4854
4855
4856/** @callback_method_impl{FNCPUMRDMSR} */
4857static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hExecUnitCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4858{
4859 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4860 /** @todo AMD execution unit config. */
4861 *puValue = 0;
4862 return VINF_SUCCESS;
4863}
4864
4865
4866/** @callback_method_impl{FNCPUMWRMSR} */
4867static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hExecUnitCfg(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4868{
4869 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4870 /** @todo AMD execution unit config. */
4871 return VINF_SUCCESS;
4872}
4873
4874
4875/** @callback_method_impl{FNCPUMRDMSR} */
4876static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam15hLoadStoreCfg2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4877{
4878 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4879 /** @todo AMD load-store config 2. */
4880 *puValue = 0;
4881 return VINF_SUCCESS;
4882}
4883
4884
4885/** @callback_method_impl{FNCPUMWRMSR} */
4886static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam15hLoadStoreCfg2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4887{
4888 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4889 /** @todo AMD load-store config 2. */
4890 return VINF_SUCCESS;
4891}
4892
4893
4894/** @callback_method_impl{FNCPUMRDMSR} */
4895static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4896{
4897 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4898 /** @todo AMD IBS. */
4899 *puValue = 0;
4900 return VINF_SUCCESS;
4901}
4902
4903
4904/** @callback_method_impl{FNCPUMWRMSR} */
4905static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4906{
4907 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4908 /** @todo AMD IBS. */
4909 return VINF_SUCCESS;
4910}
4911
4912
4913/** @callback_method_impl{FNCPUMRDMSR} */
4914static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchLinAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4915{
4916 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4917 /** @todo AMD IBS. */
4918 *puValue = 0;
4919 return VINF_SUCCESS;
4920}
4921
4922
4923/** @callback_method_impl{FNCPUMWRMSR} */
4924static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchLinAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4925{
4926 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4927 /** @todo AMD IBS. */
4928 return VINF_SUCCESS;
4929}
4930
4931
4932/** @callback_method_impl{FNCPUMRDMSR} */
4933static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsFetchPhysAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4934{
4935 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4936 /** @todo AMD IBS. */
4937 *puValue = 0;
4938 return VINF_SUCCESS;
4939}
4940
4941
4942/** @callback_method_impl{FNCPUMWRMSR} */
4943static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsFetchPhysAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4944{
4945 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4946 /** @todo AMD IBS. */
4947 return VINF_SUCCESS;
4948}
4949
4950
4951/** @callback_method_impl{FNCPUMRDMSR} */
4952static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpExecCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4953{
4954 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4955 /** @todo AMD IBS. */
4956 *puValue = 0;
4957 return VINF_SUCCESS;
4958}
4959
4960
4961/** @callback_method_impl{FNCPUMWRMSR} */
4962static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpExecCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4963{
4964 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4965 /** @todo AMD IBS. */
4966 return VINF_SUCCESS;
4967}
4968
4969
4970/** @callback_method_impl{FNCPUMRDMSR} */
4971static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpRip(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4972{
4973 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4974 /** @todo AMD IBS. */
4975 *puValue = 0;
4976 return VINF_SUCCESS;
4977}
4978
4979
4980/** @callback_method_impl{FNCPUMWRMSR} */
4981static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpRip(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
4982{
4983 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
4984 /** @todo AMD IBS. */
4985 if (!X86_IS_CANONICAL(uValue))
4986 {
4987 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
4988 return VERR_CPUM_RAISE_GP_0;
4989 }
4990 return VINF_SUCCESS;
4991}
4992
4993
4994/** @callback_method_impl{FNCPUMRDMSR} */
4995static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
4996{
4997 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
4998 /** @todo AMD IBS. */
4999 *puValue = 0;
5000 return VINF_SUCCESS;
5001}
5002
5003
5004/** @callback_method_impl{FNCPUMWRMSR} */
5005static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5006{
5007 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5008 /** @todo AMD IBS. */
5009 return VINF_SUCCESS;
5010}
5011
5012
5013/** @callback_method_impl{FNCPUMRDMSR} */
5014static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5015{
5016 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5017 /** @todo AMD IBS. */
5018 *puValue = 0;
5019 return VINF_SUCCESS;
5020}
5021
5022
5023/** @callback_method_impl{FNCPUMWRMSR} */
5024static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData2(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5025{
5026 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5027 /** @todo AMD IBS. */
5028 return VINF_SUCCESS;
5029}
5030
5031
5032/** @callback_method_impl{FNCPUMRDMSR} */
5033static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsOpData3(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5034{
5035 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5036 /** @todo AMD IBS. */
5037 *puValue = 0;
5038 return VINF_SUCCESS;
5039}
5040
5041
5042/** @callback_method_impl{FNCPUMWRMSR} */
5043static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsOpData3(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5044{
5045 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5046 /** @todo AMD IBS. */
5047 return VINF_SUCCESS;
5048}
5049
5050
5051/** @callback_method_impl{FNCPUMRDMSR} */
5052static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcLinAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5053{
5054 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5055 /** @todo AMD IBS. */
5056 *puValue = 0;
5057 return VINF_SUCCESS;
5058}
5059
5060
5061/** @callback_method_impl{FNCPUMWRMSR} */
5062static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcLinAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5063{
5064 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5065 /** @todo AMD IBS. */
5066 if (!X86_IS_CANONICAL(uValue))
5067 {
5068 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5069 return VERR_CPUM_RAISE_GP_0;
5070 }
5071 return VINF_SUCCESS;
5072}
5073
5074
5075/** @callback_method_impl{FNCPUMRDMSR} */
5076static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsDcPhysAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5077{
5078 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5079 /** @todo AMD IBS. */
5080 *puValue = 0;
5081 return VINF_SUCCESS;
5082}
5083
5084
5085/** @callback_method_impl{FNCPUMWRMSR} */
5086static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsDcPhysAddr(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5087{
5088 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5089 /** @todo AMD IBS. */
5090 return VINF_SUCCESS;
5091}
5092
5093
5094/** @callback_method_impl{FNCPUMRDMSR} */
5095static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam10hIbsCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5096{
5097 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5098 /** @todo AMD IBS. */
5099 *puValue = 0;
5100 return VINF_SUCCESS;
5101}
5102
5103
5104/** @callback_method_impl{FNCPUMWRMSR} */
5105static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam10hIbsCtl(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5106{
5107 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5108 /** @todo AMD IBS. */
5109 return VINF_SUCCESS;
5110}
5111
5112
5113/** @callback_method_impl{FNCPUMRDMSR} */
5114static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_AmdFam14hIbsBrTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5115{
5116 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange);
5117 /** @todo AMD IBS. */
5118 *puValue = 0;
5119 return VINF_SUCCESS;
5120}
5121
5122
5123/** @callback_method_impl{FNCPUMWRMSR} */
5124static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_AmdFam14hIbsBrTarget(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5125{
5126 RT_NOREF_PV(pVCpu); RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); RT_NOREF_PV(uValue); RT_NOREF_PV(uRawValue);
5127 /** @todo AMD IBS. */
5128 if (!X86_IS_CANONICAL(uValue))
5129 {
5130 Log(("CPUM: wrmsr %s(%#x), %#llx -> #GP - not canonical\n", pRange->szName, idMsr, uValue));
5131 return VERR_CPUM_RAISE_GP_0;
5132 }
5133 return VINF_SUCCESS;
5134}
5135
5136
5137
5138/*
5139 * GIM MSRs.
5140 * GIM MSRs.
5141 * GIM MSRs.
5142 */
5143
5144
5145/** @callback_method_impl{FNCPUMRDMSR} */
5146static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Gim(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)
5147{
5148#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5149 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5150 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5151 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5152 return VERR_CPUM_RAISE_GP_0;
5153#endif
5154 return GIMReadMsr(pVCpu, idMsr, pRange, puValue);
5155}
5156
5157
5158/** @callback_method_impl{FNCPUMWRMSR} */
5159static DECLCALLBACK(VBOXSTRICTRC) cpumMsrWr_Gim(PVMCPUCC pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t uValue, uint64_t uRawValue)
5160{
5161#if defined(VBOX_WITH_NESTED_HWVIRT_SVM) || defined(VBOX_WITH_NESTED_HWVIRT_VMX)
5162 /* Raise #GP(0) like a physical CPU would since the nested-hypervisor hasn't intercept these MSRs. */
5163 if ( CPUMIsGuestInSvmNestedHwVirtMode(&pVCpu->cpum.s.Guest)
5164 || CPUMIsGuestInVmxNonRootMode(&pVCpu->cpum.s.Guest))
5165 return VERR_CPUM_RAISE_GP_0;
5166#endif
5167 return GIMWriteMsr(pVCpu, idMsr, pRange, uValue, uRawValue);
5168}
5169
5170
5171/**
5172 * MSR read function table.
5173 */
5174static const struct READMSRCLANG11WEIRDNOTHROW { PFNCPUMRDMSR pfnRdMsr; } g_aCpumRdMsrFns[kCpumMsrRdFn_End] =
5175{
5176 { NULL }, /* Invalid */
5177 { cpumMsrRd_FixedValue },
5178 { NULL }, /* Alias */
5179 { cpumMsrRd_WriteOnly },
5180 { cpumMsrRd_Ia32P5McAddr },
5181 { cpumMsrRd_Ia32P5McType },
5182 { cpumMsrRd_Ia32TimestampCounter },
5183 { cpumMsrRd_Ia32PlatformId },
5184 { cpumMsrRd_Ia32ApicBase },
5185 { cpumMsrRd_Ia32FeatureControl },
5186 { cpumMsrRd_Ia32BiosSignId },
5187 { cpumMsrRd_Ia32SmmMonitorCtl },
5188 { cpumMsrRd_Ia32PmcN },
5189 { cpumMsrRd_Ia32MonitorFilterLineSize },
5190 { cpumMsrRd_Ia32MPerf },
5191 { cpumMsrRd_Ia32APerf },
5192 { cpumMsrRd_Ia32MtrrCap },
5193 { cpumMsrRd_Ia32MtrrPhysBaseN },
5194 { cpumMsrRd_Ia32MtrrPhysMaskN },
5195 { cpumMsrRd_Ia32MtrrFixed },
5196 { cpumMsrRd_Ia32MtrrDefType },
5197 { cpumMsrRd_Ia32Pat },
5198 { cpumMsrRd_Ia32SysEnterCs },
5199 { cpumMsrRd_Ia32SysEnterEsp },
5200 { cpumMsrRd_Ia32SysEnterEip },
5201 { cpumMsrRd_Ia32McgCap },
5202 { cpumMsrRd_Ia32McgStatus },
5203 { cpumMsrRd_Ia32McgCtl },
5204 { cpumMsrRd_Ia32DebugCtl },
5205 { cpumMsrRd_Ia32SmrrPhysBase },
5206 { cpumMsrRd_Ia32SmrrPhysMask },
5207 { cpumMsrRd_Ia32PlatformDcaCap },
5208 { cpumMsrRd_Ia32CpuDcaCap },
5209 { cpumMsrRd_Ia32Dca0Cap },
5210 { cpumMsrRd_Ia32PerfEvtSelN },
5211 { cpumMsrRd_Ia32PerfStatus },
5212 { cpumMsrRd_Ia32PerfCtl },
5213 { cpumMsrRd_Ia32FixedCtrN },
5214 { cpumMsrRd_Ia32PerfCapabilities },
5215 { cpumMsrRd_Ia32FixedCtrCtrl },
5216 { cpumMsrRd_Ia32PerfGlobalStatus },
5217 { cpumMsrRd_Ia32PerfGlobalCtrl },
5218 { cpumMsrRd_Ia32PerfGlobalOvfCtrl },
5219 { cpumMsrRd_Ia32PebsEnable },
5220 { cpumMsrRd_Ia32ClockModulation },
5221 { cpumMsrRd_Ia32ThermInterrupt },
5222 { cpumMsrRd_Ia32ThermStatus },
5223 { cpumMsrRd_Ia32Therm2Ctl },
5224 { cpumMsrRd_Ia32MiscEnable },
5225 { cpumMsrRd_Ia32McCtlStatusAddrMiscN },
5226 { cpumMsrRd_Ia32McNCtl2 },
5227 { cpumMsrRd_Ia32DsArea },
5228 { cpumMsrRd_Ia32TscDeadline },
5229 { cpumMsrRd_Ia32X2ApicN },
5230 { cpumMsrRd_Ia32DebugInterface },
5231 { cpumMsrRd_Ia32VmxBasic },
5232 { cpumMsrRd_Ia32VmxPinbasedCtls },
5233 { cpumMsrRd_Ia32VmxProcbasedCtls },
5234 { cpumMsrRd_Ia32VmxExitCtls },
5235 { cpumMsrRd_Ia32VmxEntryCtls },
5236 { cpumMsrRd_Ia32VmxMisc },
5237 { cpumMsrRd_Ia32VmxCr0Fixed0 },
5238 { cpumMsrRd_Ia32VmxCr0Fixed1 },
5239 { cpumMsrRd_Ia32VmxCr4Fixed0 },
5240 { cpumMsrRd_Ia32VmxCr4Fixed1 },
5241 { cpumMsrRd_Ia32VmxVmcsEnum },
5242 { cpumMsrRd_Ia32VmxProcBasedCtls2 },
5243 { cpumMsrRd_Ia32VmxEptVpidCap },
5244 { cpumMsrRd_Ia32VmxTruePinbasedCtls },
5245 { cpumMsrRd_Ia32VmxTrueProcbasedCtls },
5246 { cpumMsrRd_Ia32VmxTrueExitCtls },
5247 { cpumMsrRd_Ia32VmxTrueEntryCtls },
5248 { cpumMsrRd_Ia32VmxVmFunc },
5249 { cpumMsrRd_Ia32SpecCtrl },
5250 { cpumMsrRd_Ia32ArchCapabilities },
5251
5252 { cpumMsrRd_Amd64Efer },
5253 { cpumMsrRd_Amd64SyscallTarget },
5254 { cpumMsrRd_Amd64LongSyscallTarget },
5255 { cpumMsrRd_Amd64CompSyscallTarget },
5256 { cpumMsrRd_Amd64SyscallFlagMask },
5257 { cpumMsrRd_Amd64FsBase },
5258 { cpumMsrRd_Amd64GsBase },
5259 { cpumMsrRd_Amd64KernelGsBase },
5260 { cpumMsrRd_Amd64TscAux },
5261
5262 { cpumMsrRd_IntelEblCrPowerOn },
5263 { cpumMsrRd_IntelI7CoreThreadCount },
5264 { cpumMsrRd_IntelP4EbcHardPowerOn },
5265 { cpumMsrRd_IntelP4EbcSoftPowerOn },
5266 { cpumMsrRd_IntelP4EbcFrequencyId },
5267 { cpumMsrRd_IntelP6FsbFrequency },
5268 { cpumMsrRd_IntelPlatformInfo },
5269 { cpumMsrRd_IntelFlexRatio },
5270 { cpumMsrRd_IntelPkgCStConfigControl },
5271 { cpumMsrRd_IntelPmgIoCaptureBase },
5272 { cpumMsrRd_IntelLastBranchFromToN },
5273 { cpumMsrRd_IntelLastBranchFromN },
5274 { cpumMsrRd_IntelLastBranchToN },
5275 { cpumMsrRd_IntelLastBranchTos },
5276 { cpumMsrRd_IntelBblCrCtl },
5277 { cpumMsrRd_IntelBblCrCtl3 },
5278 { cpumMsrRd_IntelI7TemperatureTarget },
5279 { cpumMsrRd_IntelI7MsrOffCoreResponseN },
5280 { cpumMsrRd_IntelI7MiscPwrMgmt },
5281 { cpumMsrRd_IntelP6CrN },
5282 { cpumMsrRd_IntelCpuId1FeatureMaskEcdx },
5283 { cpumMsrRd_IntelCpuId1FeatureMaskEax },
5284 { cpumMsrRd_IntelCpuId80000001FeatureMaskEcdx },
5285 { cpumMsrRd_IntelI7SandyAesNiCtl },
5286 { cpumMsrRd_IntelI7TurboRatioLimit },
5287 { cpumMsrRd_IntelI7LbrSelect },
5288 { cpumMsrRd_IntelI7SandyErrorControl },
5289 { cpumMsrRd_IntelI7VirtualLegacyWireCap },
5290 { cpumMsrRd_IntelI7PowerCtl },
5291 { cpumMsrRd_IntelI7SandyPebsNumAlt },
5292 { cpumMsrRd_IntelI7PebsLdLat },
5293 { cpumMsrRd_IntelI7PkgCnResidencyN },
5294 { cpumMsrRd_IntelI7CoreCnResidencyN },
5295 { cpumMsrRd_IntelI7SandyVrCurrentConfig },
5296 { cpumMsrRd_IntelI7SandyVrMiscConfig },
5297 { cpumMsrRd_IntelI7SandyRaplPowerUnit },
5298 { cpumMsrRd_IntelI7SandyPkgCnIrtlN },
5299 { cpumMsrRd_IntelI7SandyPkgC2Residency },
5300 { cpumMsrRd_IntelI7RaplPkgPowerLimit },
5301 { cpumMsrRd_IntelI7RaplPkgEnergyStatus },
5302 { cpumMsrRd_IntelI7RaplPkgPerfStatus },
5303 { cpumMsrRd_IntelI7RaplPkgPowerInfo },
5304 { cpumMsrRd_IntelI7RaplDramPowerLimit },
5305 { cpumMsrRd_IntelI7RaplDramEnergyStatus },
5306 { cpumMsrRd_IntelI7RaplDramPerfStatus },
5307 { cpumMsrRd_IntelI7RaplDramPowerInfo },
5308 { cpumMsrRd_IntelI7RaplPp0PowerLimit },
5309 { cpumMsrRd_IntelI7RaplPp0EnergyStatus },
5310 { cpumMsrRd_IntelI7RaplPp0Policy },
5311 { cpumMsrRd_IntelI7RaplPp0PerfStatus },
5312 { cpumMsrRd_IntelI7RaplPp1PowerLimit },
5313 { cpumMsrRd_IntelI7RaplPp1EnergyStatus },
5314 { cpumMsrRd_IntelI7RaplPp1Policy },
5315 { cpumMsrRd_IntelI7IvyConfigTdpNominal },
5316 { cpumMsrRd_IntelI7IvyConfigTdpLevel1 },
5317 { cpumMsrRd_IntelI7IvyConfigTdpLevel2 },
5318 { cpumMsrRd_IntelI7IvyConfigTdpControl },
5319 { cpumMsrRd_IntelI7IvyTurboActivationRatio },
5320 { cpumMsrRd_IntelI7UncPerfGlobalCtrl },
5321 { cpumMsrRd_IntelI7UncPerfGlobalStatus },
5322 { cpumMsrRd_IntelI7UncPerfGlobalOvfCtrl },
5323 { cpumMsrRd_IntelI7UncPerfFixedCtrCtrl },
5324 { cpumMsrRd_IntelI7UncPerfFixedCtr },
5325 { cpumMsrRd_IntelI7UncCBoxConfig },
5326 { cpumMsrRd_IntelI7UncArbPerfCtrN },
5327 { cpumMsrRd_IntelI7UncArbPerfEvtSelN },
5328 { cpumMsrRd_IntelI7SmiCount },
5329 { cpumMsrRd_IntelCore2EmttmCrTablesN },
5330 { cpumMsrRd_IntelCore2SmmCStMiscInfo },
5331 { cpumMsrRd_IntelCore1ExtConfig },
5332 { cpumMsrRd_IntelCore1DtsCalControl },
5333 { cpumMsrRd_IntelCore2PeciControl },
5334 { cpumMsrRd_IntelAtSilvCoreC1Recidency },
5335
5336 { cpumMsrRd_P6LastBranchFromIp },
5337 { cpumMsrRd_P6LastBranchToIp },
5338 { cpumMsrRd_P6LastIntFromIp },
5339 { cpumMsrRd_P6LastIntToIp },
5340
5341 { cpumMsrRd_AmdFam15hTscRate },
5342 { cpumMsrRd_AmdFam15hLwpCfg },
5343 { cpumMsrRd_AmdFam15hLwpCbAddr },
5344 { cpumMsrRd_AmdFam10hMc4MiscN },
5345 { cpumMsrRd_AmdK8PerfCtlN },
5346 { cpumMsrRd_AmdK8PerfCtrN },
5347 { cpumMsrRd_AmdK8SysCfg },
5348 { cpumMsrRd_AmdK8HwCr },
5349 { cpumMsrRd_AmdK8IorrBaseN },
5350 { cpumMsrRd_AmdK8IorrMaskN },
5351 { cpumMsrRd_AmdK8TopOfMemN },
5352 { cpumMsrRd_AmdK8NbCfg1 },
5353 { cpumMsrRd_AmdK8McXcptRedir },
5354 { cpumMsrRd_AmdK8CpuNameN },
5355 { cpumMsrRd_AmdK8HwThermalCtrl },
5356 { cpumMsrRd_AmdK8SwThermalCtrl },
5357 { cpumMsrRd_AmdK8FidVidControl },
5358 { cpumMsrRd_AmdK8FidVidStatus },
5359 { cpumMsrRd_AmdK8McCtlMaskN },
5360 { cpumMsrRd_AmdK8SmiOnIoTrapN },
5361 { cpumMsrRd_AmdK8SmiOnIoTrapCtlSts },
5362 { cpumMsrRd_AmdK8IntPendingMessage },
5363 { cpumMsrRd_AmdK8SmiTriggerIoCycle },
5364 { cpumMsrRd_AmdFam10hMmioCfgBaseAddr },
5365 { cpumMsrRd_AmdFam10hTrapCtlMaybe },
5366 { cpumMsrRd_AmdFam10hPStateCurLimit },
5367 { cpumMsrRd_AmdFam10hPStateControl },
5368 { cpumMsrRd_AmdFam10hPStateStatus },
5369 { cpumMsrRd_AmdFam10hPStateN },
5370 { cpumMsrRd_AmdFam10hCofVidControl },
5371 { cpumMsrRd_AmdFam10hCofVidStatus },
5372 { cpumMsrRd_AmdFam10hCStateIoBaseAddr },
5373 { cpumMsrRd_AmdFam10hCpuWatchdogTimer },
5374 { cpumMsrRd_AmdK8SmmBase },
5375 { cpumMsrRd_AmdK8SmmAddr },
5376 { cpumMsrRd_AmdK8SmmMask },
5377 { cpumMsrRd_AmdK8VmCr },
5378 { cpumMsrRd_AmdK8IgnNe },
5379 { cpumMsrRd_AmdK8SmmCtl },
5380 { cpumMsrRd_AmdK8VmHSavePa },
5381 { cpumMsrRd_AmdFam10hVmLockKey },
5382 { cpumMsrRd_AmdFam10hSmmLockKey },
5383 { cpumMsrRd_AmdFam10hLocalSmiStatus },
5384 { cpumMsrRd_AmdFam10hOsVisWrkIdLength },
5385 { cpumMsrRd_AmdFam10hOsVisWrkStatus },
5386 { cpumMsrRd_AmdFam16hL2IPerfCtlN },
5387 { cpumMsrRd_AmdFam16hL2IPerfCtrN },
5388 { cpumMsrRd_AmdFam15hNorthbridgePerfCtlN },
5389 { cpumMsrRd_AmdFam15hNorthbridgePerfCtrN },
5390 { cpumMsrRd_AmdK7MicrocodeCtl },
5391 { cpumMsrRd_AmdK7ClusterIdMaybe },
5392 { cpumMsrRd_AmdK8CpuIdCtlStd07hEbax },
5393 { cpumMsrRd_AmdK8CpuIdCtlStd06hEcx },
5394 { cpumMsrRd_AmdK8CpuIdCtlStd01hEdcx },
5395 { cpumMsrRd_AmdK8CpuIdCtlExt01hEdcx },
5396 { cpumMsrRd_AmdK8PatchLevel },
5397 { cpumMsrRd_AmdK7DebugStatusMaybe },
5398 { cpumMsrRd_AmdK7BHTraceBaseMaybe },
5399 { cpumMsrRd_AmdK7BHTracePtrMaybe },
5400 { cpumMsrRd_AmdK7BHTraceLimitMaybe },
5401 { cpumMsrRd_AmdK7HardwareDebugToolCfgMaybe },
5402 { cpumMsrRd_AmdK7FastFlushCountMaybe },
5403 { cpumMsrRd_AmdK7NodeId },
5404 { cpumMsrRd_AmdK7DrXAddrMaskN },
5405 { cpumMsrRd_AmdK7Dr0DataMatchMaybe },
5406 { cpumMsrRd_AmdK7Dr0DataMaskMaybe },
5407 { cpumMsrRd_AmdK7LoadStoreCfg },
5408 { cpumMsrRd_AmdK7InstrCacheCfg },
5409 { cpumMsrRd_AmdK7DataCacheCfg },
5410 { cpumMsrRd_AmdK7BusUnitCfg },
5411 { cpumMsrRd_AmdK7DebugCtl2Maybe },
5412 { cpumMsrRd_AmdFam15hFpuCfg },
5413 { cpumMsrRd_AmdFam15hDecoderCfg },
5414 { cpumMsrRd_AmdFam10hBusUnitCfg2 },
5415 { cpumMsrRd_AmdFam15hCombUnitCfg },
5416 { cpumMsrRd_AmdFam15hCombUnitCfg2 },
5417 { cpumMsrRd_AmdFam15hCombUnitCfg3 },
5418 { cpumMsrRd_AmdFam15hExecUnitCfg },
5419 { cpumMsrRd_AmdFam15hLoadStoreCfg2 },
5420 { cpumMsrRd_AmdFam10hIbsFetchCtl },
5421 { cpumMsrRd_AmdFam10hIbsFetchLinAddr },
5422 { cpumMsrRd_AmdFam10hIbsFetchPhysAddr },
5423 { cpumMsrRd_AmdFam10hIbsOpExecCtl },
5424 { cpumMsrRd_AmdFam10hIbsOpRip },
5425 { cpumMsrRd_AmdFam10hIbsOpData },
5426 { cpumMsrRd_AmdFam10hIbsOpData2 },
5427 { cpumMsrRd_AmdFam10hIbsOpData3 },
5428 { cpumMsrRd_AmdFam10hIbsDcLinAddr },
5429 { cpumMsrRd_AmdFam10hIbsDcPhysAddr },
5430 { cpumMsrRd_AmdFam10hIbsCtl },
5431 { cpumMsrRd_AmdFam14hIbsBrTarget },
5432
5433 { cpumMsrRd_Gim },
5434};
5435
5436
5437/**
5438 * MSR write function table.
5439 */
5440static const struct WRITEMSRCLANG11WEIRDNOTHROW { PFNCPUMWRMSR pfnWrMsr; } g_aCpumWrMsrFns[kCpumMsrWrFn_End] =
5441{
5442 { NULL }, /* Invalid */
5443 { cpumMsrWr_IgnoreWrite },
5444 { cpumMsrWr_ReadOnly },
5445 { NULL }, /* Alias */
5446 { cpumMsrWr_Ia32P5McAddr },
5447 { cpumMsrWr_Ia32P5McType },
5448 { cpumMsrWr_Ia32TimestampCounter },
5449 { cpumMsrWr_Ia32ApicBase },
5450 { cpumMsrWr_Ia32FeatureControl },
5451 { cpumMsrWr_Ia32BiosSignId },
5452 { cpumMsrWr_Ia32BiosUpdateTrigger },
5453 { cpumMsrWr_Ia32SmmMonitorCtl },
5454 { cpumMsrWr_Ia32PmcN },
5455 { cpumMsrWr_Ia32MonitorFilterLineSize },
5456 { cpumMsrWr_Ia32MPerf },
5457 { cpumMsrWr_Ia32APerf },
5458 { cpumMsrWr_Ia32MtrrPhysBaseN },
5459 { cpumMsrWr_Ia32MtrrPhysMaskN },
5460 { cpumMsrWr_Ia32MtrrFixed },
5461 { cpumMsrWr_Ia32MtrrDefType },
5462 { cpumMsrWr_Ia32Pat },
5463 { cpumMsrWr_Ia32SysEnterCs },
5464 { cpumMsrWr_Ia32SysEnterEsp },
5465 { cpumMsrWr_Ia32SysEnterEip },
5466 { cpumMsrWr_Ia32McgStatus },
5467 { cpumMsrWr_Ia32McgCtl },
5468 { cpumMsrWr_Ia32DebugCtl },
5469 { cpumMsrWr_Ia32SmrrPhysBase },
5470 { cpumMsrWr_Ia32SmrrPhysMask },
5471 { cpumMsrWr_Ia32PlatformDcaCap },
5472 { cpumMsrWr_Ia32Dca0Cap },
5473 { cpumMsrWr_Ia32PerfEvtSelN },
5474 { cpumMsrWr_Ia32PerfStatus },
5475 { cpumMsrWr_Ia32PerfCtl },
5476 { cpumMsrWr_Ia32FixedCtrN },
5477 { cpumMsrWr_Ia32PerfCapabilities },
5478 { cpumMsrWr_Ia32FixedCtrCtrl },
5479 { cpumMsrWr_Ia32PerfGlobalStatus },
5480 { cpumMsrWr_Ia32PerfGlobalCtrl },
5481 { cpumMsrWr_Ia32PerfGlobalOvfCtrl },
5482 { cpumMsrWr_Ia32PebsEnable },
5483 { cpumMsrWr_Ia32ClockModulation },
5484 { cpumMsrWr_Ia32ThermInterrupt },
5485 { cpumMsrWr_Ia32ThermStatus },
5486 { cpumMsrWr_Ia32Therm2Ctl },
5487 { cpumMsrWr_Ia32MiscEnable },
5488 { cpumMsrWr_Ia32McCtlStatusAddrMiscN },
5489 { cpumMsrWr_Ia32McNCtl2 },
5490 { cpumMsrWr_Ia32DsArea },
5491 { cpumMsrWr_Ia32TscDeadline },
5492 { cpumMsrWr_Ia32X2ApicN },
5493 { cpumMsrWr_Ia32DebugInterface },
5494 { cpumMsrWr_Ia32SpecCtrl },
5495 { cpumMsrWr_Ia32PredCmd },
5496 { cpumMsrWr_Ia32FlushCmd },
5497
5498 { cpumMsrWr_Amd64Efer },
5499 { cpumMsrWr_Amd64SyscallTarget },
5500 { cpumMsrWr_Amd64LongSyscallTarget },
5501 { cpumMsrWr_Amd64CompSyscallTarget },
5502 { cpumMsrWr_Amd64SyscallFlagMask },
5503 { cpumMsrWr_Amd64FsBase },
5504 { cpumMsrWr_Amd64GsBase },
5505 { cpumMsrWr_Amd64KernelGsBase },
5506 { cpumMsrWr_Amd64TscAux },
5507
5508 { cpumMsrWr_IntelEblCrPowerOn },
5509 { cpumMsrWr_IntelP4EbcHardPowerOn },
5510 { cpumMsrWr_IntelP4EbcSoftPowerOn },
5511 { cpumMsrWr_IntelP4EbcFrequencyId },
5512 { cpumMsrWr_IntelFlexRatio },
5513 { cpumMsrWr_IntelPkgCStConfigControl },
5514 { cpumMsrWr_IntelPmgIoCaptureBase },
5515 { cpumMsrWr_IntelLastBranchFromToN },
5516 { cpumMsrWr_IntelLastBranchFromN },
5517 { cpumMsrWr_IntelLastBranchToN },
5518 { cpumMsrWr_IntelLastBranchTos },
5519 { cpumMsrWr_IntelBblCrCtl },
5520 { cpumMsrWr_IntelBblCrCtl3 },
5521 { cpumMsrWr_IntelI7TemperatureTarget },
5522 { cpumMsrWr_IntelI7MsrOffCoreResponseN },
5523 { cpumMsrWr_IntelI7MiscPwrMgmt },
5524 { cpumMsrWr_IntelP6CrN },
5525 { cpumMsrWr_IntelCpuId1FeatureMaskEcdx },
5526 { cpumMsrWr_IntelCpuId1FeatureMaskEax },
5527 { cpumMsrWr_IntelCpuId80000001FeatureMaskEcdx },
5528 { cpumMsrWr_IntelI7SandyAesNiCtl },
5529 { cpumMsrWr_IntelI7TurboRatioLimit },
5530 { cpumMsrWr_IntelI7LbrSelect },
5531 { cpumMsrWr_IntelI7SandyErrorControl },
5532 { cpumMsrWr_IntelI7PowerCtl },
5533 { cpumMsrWr_IntelI7SandyPebsNumAlt },
5534 { cpumMsrWr_IntelI7PebsLdLat },
5535 { cpumMsrWr_IntelI7SandyVrCurrentConfig },
5536 { cpumMsrWr_IntelI7SandyVrMiscConfig },
5537 { cpumMsrWr_IntelI7SandyRaplPowerUnit },
5538 { cpumMsrWr_IntelI7SandyPkgCnIrtlN },
5539 { cpumMsrWr_IntelI7SandyPkgC2Residency },
5540 { cpumMsrWr_IntelI7RaplPkgPowerLimit },
5541 { cpumMsrWr_IntelI7RaplDramPowerLimit },
5542 { cpumMsrWr_IntelI7RaplPp0PowerLimit },
5543 { cpumMsrWr_IntelI7RaplPp0Policy },
5544 { cpumMsrWr_IntelI7RaplPp1PowerLimit },
5545 { cpumMsrWr_IntelI7RaplPp1Policy },
5546 { cpumMsrWr_IntelI7IvyConfigTdpControl },
5547 { cpumMsrWr_IntelI7IvyTurboActivationRatio },
5548 { cpumMsrWr_IntelI7UncPerfGlobalCtrl },
5549 { cpumMsrWr_IntelI7UncPerfGlobalStatus },
5550 { cpumMsrWr_IntelI7UncPerfGlobalOvfCtrl },
5551 { cpumMsrWr_IntelI7UncPerfFixedCtrCtrl },
5552 { cpumMsrWr_IntelI7UncPerfFixedCtr },
5553 { cpumMsrWr_IntelI7UncArbPerfCtrN },
5554 { cpumMsrWr_IntelI7UncArbPerfEvtSelN },
5555 { cpumMsrWr_IntelCore2EmttmCrTablesN },
5556 { cpumMsrWr_IntelCore2SmmCStMiscInfo },
5557 { cpumMsrWr_IntelCore1ExtConfig },
5558 { cpumMsrWr_IntelCore1DtsCalControl },
5559 { cpumMsrWr_IntelCore2PeciControl },
5560
5561 { cpumMsrWr_P6LastIntFromIp },
5562 { cpumMsrWr_P6LastIntToIp },
5563
5564 { cpumMsrWr_AmdFam15hTscRate },
5565 { cpumMsrWr_AmdFam15hLwpCfg },
5566 { cpumMsrWr_AmdFam15hLwpCbAddr },
5567 { cpumMsrWr_AmdFam10hMc4MiscN },
5568 { cpumMsrWr_AmdK8PerfCtlN },
5569 { cpumMsrWr_AmdK8PerfCtrN },
5570 { cpumMsrWr_AmdK8SysCfg },
5571 { cpumMsrWr_AmdK8HwCr },
5572 { cpumMsrWr_AmdK8IorrBaseN },
5573 { cpumMsrWr_AmdK8IorrMaskN },
5574 { cpumMsrWr_AmdK8TopOfMemN },
5575 { cpumMsrWr_AmdK8NbCfg1 },
5576 { cpumMsrWr_AmdK8McXcptRedir },
5577 { cpumMsrWr_AmdK8CpuNameN },
5578 { cpumMsrWr_AmdK8HwThermalCtrl },
5579 { cpumMsrWr_AmdK8SwThermalCtrl },
5580 { cpumMsrWr_AmdK8FidVidControl },
5581 { cpumMsrWr_AmdK8McCtlMaskN },
5582 { cpumMsrWr_AmdK8SmiOnIoTrapN },
5583 { cpumMsrWr_AmdK8SmiOnIoTrapCtlSts },
5584 { cpumMsrWr_AmdK8IntPendingMessage },
5585 { cpumMsrWr_AmdK8SmiTriggerIoCycle },
5586 { cpumMsrWr_AmdFam10hMmioCfgBaseAddr },
5587 { cpumMsrWr_AmdFam10hTrapCtlMaybe },
5588 { cpumMsrWr_AmdFam10hPStateControl },
5589 { cpumMsrWr_AmdFam10hPStateStatus },
5590 { cpumMsrWr_AmdFam10hPStateN },
5591 { cpumMsrWr_AmdFam10hCofVidControl },
5592 { cpumMsrWr_AmdFam10hCofVidStatus },
5593 { cpumMsrWr_AmdFam10hCStateIoBaseAddr },
5594 { cpumMsrWr_AmdFam10hCpuWatchdogTimer },
5595 { cpumMsrWr_AmdK8SmmBase },
5596 { cpumMsrWr_AmdK8SmmAddr },
5597 { cpumMsrWr_AmdK8SmmMask },
5598 { cpumMsrWr_AmdK8VmCr },
5599 { cpumMsrWr_AmdK8IgnNe },
5600 { cpumMsrWr_AmdK8SmmCtl },
5601 { cpumMsrWr_AmdK8VmHSavePa },
5602 { cpumMsrWr_AmdFam10hVmLockKey },
5603 { cpumMsrWr_AmdFam10hSmmLockKey },
5604 { cpumMsrWr_AmdFam10hLocalSmiStatus },
5605 { cpumMsrWr_AmdFam10hOsVisWrkIdLength },
5606 { cpumMsrWr_AmdFam10hOsVisWrkStatus },
5607 { cpumMsrWr_AmdFam16hL2IPerfCtlN },
5608 { cpumMsrWr_AmdFam16hL2IPerfCtrN },
5609 { cpumMsrWr_AmdFam15hNorthbridgePerfCtlN },
5610 { cpumMsrWr_AmdFam15hNorthbridgePerfCtrN },
5611 { cpumMsrWr_AmdK7MicrocodeCtl },
5612 { cpumMsrWr_AmdK7ClusterIdMaybe },
5613 { cpumMsrWr_AmdK8CpuIdCtlStd07hEbax },
5614 { cpumMsrWr_AmdK8CpuIdCtlStd06hEcx },
5615 { cpumMsrWr_AmdK8CpuIdCtlStd01hEdcx },
5616 { cpumMsrWr_AmdK8CpuIdCtlExt01hEdcx },
5617 { cpumMsrWr_AmdK8PatchLoader },
5618 { cpumMsrWr_AmdK7DebugStatusMaybe },
5619 { cpumMsrWr_AmdK7BHTraceBaseMaybe },
5620 { cpumMsrWr_AmdK7BHTracePtrMaybe },
5621 { cpumMsrWr_AmdK7BHTraceLimitMaybe },
5622 { cpumMsrWr_AmdK7HardwareDebugToolCfgMaybe },
5623 { cpumMsrWr_AmdK7FastFlushCountMaybe },
5624 { cpumMsrWr_AmdK7NodeId },
5625 { cpumMsrWr_AmdK7DrXAddrMaskN },
5626 { cpumMsrWr_AmdK7Dr0DataMatchMaybe },
5627 { cpumMsrWr_AmdK7Dr0DataMaskMaybe },
5628 { cpumMsrWr_AmdK7LoadStoreCfg },
5629 { cpumMsrWr_AmdK7InstrCacheCfg },
5630 { cpumMsrWr_AmdK7DataCacheCfg },
5631 { cpumMsrWr_AmdK7BusUnitCfg },
5632 { cpumMsrWr_AmdK7DebugCtl2Maybe },
5633 { cpumMsrWr_AmdFam15hFpuCfg },
5634 { cpumMsrWr_AmdFam15hDecoderCfg },
5635 { cpumMsrWr_AmdFam10hBusUnitCfg2 },
5636 { cpumMsrWr_AmdFam15hCombUnitCfg },
5637 { cpumMsrWr_AmdFam15hCombUnitCfg2 },
5638 { cpumMsrWr_AmdFam15hCombUnitCfg3 },
5639 { cpumMsrWr_AmdFam15hExecUnitCfg },
5640 { cpumMsrWr_AmdFam15hLoadStoreCfg2 },
5641 { cpumMsrWr_AmdFam10hIbsFetchCtl },
5642 { cpumMsrWr_AmdFam10hIbsFetchLinAddr },
5643 { cpumMsrWr_AmdFam10hIbsFetchPhysAddr },
5644 { cpumMsrWr_AmdFam10hIbsOpExecCtl },
5645 { cpumMsrWr_AmdFam10hIbsOpRip },
5646 { cpumMsrWr_AmdFam10hIbsOpData },
5647 { cpumMsrWr_AmdFam10hIbsOpData2 },
5648 { cpumMsrWr_AmdFam10hIbsOpData3 },
5649 { cpumMsrWr_AmdFam10hIbsDcLinAddr },
5650 { cpumMsrWr_AmdFam10hIbsDcPhysAddr },
5651 { cpumMsrWr_AmdFam10hIbsCtl },
5652 { cpumMsrWr_AmdFam14hIbsBrTarget },
5653
5654 { cpumMsrWr_Gim },
5655};
5656
5657
5658/**
5659 * Looks up the range for the given MSR.
5660 *
5661 * @returns Pointer to the range if found, NULL if not.
5662 * @param pVM The cross context VM structure.
5663 * @param idMsr The MSR to look up.
5664 */
5665# ifndef IN_RING3
5666static
5667# endif
5668PCPUMMSRRANGE cpumLookupMsrRange(PVM pVM, uint32_t idMsr)
5669{
5670 /*
5671 * Binary lookup.
5672 */
5673 uint32_t cRanges = RT_MIN(pVM->cpum.s.GuestInfo.cMsrRanges, RT_ELEMENTS(pVM->cpum.s.GuestInfo.aMsrRanges));
5674 if (!cRanges)
5675 return NULL;
5676 PCPUMMSRRANGE paRanges = pVM->cpum.s.GuestInfo.aMsrRanges;
5677 for (;;)
5678 {
5679 uint32_t i = cRanges / 2;
5680 if (idMsr < paRanges[i].uFirst)
5681 {
5682 if (i == 0)
5683 break;
5684 cRanges = i;
5685 }
5686 else if (idMsr > paRanges[i].uLast)
5687 {
5688 i++;
5689 if (i >= cRanges)
5690 break;
5691 cRanges -= i;
5692 paRanges = &paRanges[i];
5693 }
5694 else
5695 {
5696 if (paRanges[i].enmRdFn == kCpumMsrRdFn_MsrAlias)
5697 return cpumLookupMsrRange(pVM, paRanges[i].uValue);
5698 return &paRanges[i];
5699 }
5700 }
5701
5702# ifdef VBOX_STRICT
5703 /*
5704 * Linear lookup to verify the above binary search.
5705 */
5706 uint32_t cLeft = RT_MIN(pVM->cpum.s.GuestInfo.cMsrRanges, RT_ELEMENTS(pVM->cpum.s.GuestInfo.aMsrRanges));
5707 PCPUMMSRRANGE pCur = pVM->cpum.s.GuestInfo.aMsrRanges;
5708 while (cLeft-- > 0)
5709 {
5710 if (idMsr >= pCur->uFirst && idMsr <= pCur->uLast)
5711 {
5712 AssertFailed();
5713 if (pCur->enmRdFn == kCpumMsrRdFn_MsrAlias)
5714 return cpumLookupMsrRange(pVM, pCur->uValue);
5715 return pCur;
5716 }
5717 pCur++;
5718 }
5719# endif
5720 return NULL;
5721}
5722
5723
5724/**
5725 * Query a guest MSR.
5726 *
5727 * The caller is responsible for checking privilege if the call is the result of
5728 * a RDMSR instruction. We'll do the rest.
5729 *
5730 * @retval VINF_SUCCESS on success.
5731 * @retval VINF_CPUM_R3_MSR_READ if the MSR read could not be serviced in the
5732 * current context (raw-mode or ring-0).
5733 * @retval VERR_CPUM_RAISE_GP_0 on failure (invalid MSR), the caller is
5734 * expected to take the appropriate actions. @a *puValue is set to 0.
5735 * @param pVCpu The cross context virtual CPU structure.
5736 * @param idMsr The MSR.
5737 * @param puValue Where to return the value.
5738 *
5739 * @remarks This will always return the right values, even when we're in the
5740 * recompiler.
5741 */
5742VMMDECL(VBOXSTRICTRC) CPUMQueryGuestMsr(PVMCPUCC pVCpu, uint32_t idMsr, uint64_t *puValue)
5743{
5744 *puValue = 0;
5745
5746 VBOXSTRICTRC rcStrict;
5747 PVM pVM = pVCpu->CTX_SUFF(pVM);
5748 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5749 if (pRange)
5750 {
5751 CPUMMSRRDFN enmRdFn = (CPUMMSRRDFN)pRange->enmRdFn;
5752 AssertReturn(enmRdFn > kCpumMsrRdFn_Invalid && enmRdFn < kCpumMsrRdFn_End, VERR_CPUM_IPE_1);
5753
5754 PFNCPUMRDMSR pfnRdMsr = g_aCpumRdMsrFns[enmRdFn].pfnRdMsr;
5755 AssertReturn(pfnRdMsr, VERR_CPUM_IPE_2);
5756
5757 STAM_COUNTER_INC(&pRange->cReads);
5758 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5759
5760 rcStrict = pfnRdMsr(pVCpu, idMsr, pRange, puValue);
5761 if (rcStrict == VINF_SUCCESS)
5762 Log2(("CPUM: RDMSR %#x (%s) -> %#llx\n", idMsr, pRange->szName, *puValue));
5763 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
5764 {
5765 Log(("CPUM: RDMSR %#x (%s) -> #GP(0)\n", idMsr, pRange->szName));
5766 STAM_COUNTER_INC(&pRange->cGps);
5767 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsRaiseGp);
5768 }
5769#ifndef IN_RING3
5770 else if (rcStrict == VINF_CPUM_R3_MSR_READ)
5771 Log(("CPUM: RDMSR %#x (%s) -> ring-3\n", idMsr, pRange->szName));
5772#endif
5773 else
5774 {
5775 Log(("CPUM: RDMSR %#x (%s) -> rcStrict=%Rrc\n", idMsr, pRange->szName, VBOXSTRICTRC_VAL(rcStrict)));
5776 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
5777 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
5778 Assert(rcStrict != VERR_EM_INTERPRETER);
5779 }
5780 }
5781 else
5782 {
5783 Log(("CPUM: Unknown RDMSR %#x -> #GP(0)\n", idMsr));
5784 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReads);
5785 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrReadsUnknown);
5786 rcStrict = VERR_CPUM_RAISE_GP_0;
5787 }
5788 return rcStrict;
5789}
5790
5791
5792/**
5793 * Writes to a guest MSR.
5794 *
5795 * The caller is responsible for checking privilege if the call is the result of
5796 * a WRMSR instruction. We'll do the rest.
5797 *
5798 * @retval VINF_SUCCESS on success.
5799 * @retval VINF_CPUM_R3_MSR_WRITE if the MSR write could not be serviced in the
5800 * current context (raw-mode or ring-0).
5801 * @retval VERR_CPUM_RAISE_GP_0 on failure, the caller is expected to take the
5802 * appropriate actions.
5803 *
5804 * @param pVCpu The cross context virtual CPU structure.
5805 * @param idMsr The MSR id.
5806 * @param uValue The value to set.
5807 *
5808 * @remarks Everyone changing MSR values, including the recompiler, shall do it
5809 * by calling this method. This makes sure we have current values and
5810 * that we trigger all the right actions when something changes.
5811 *
5812 * For performance reasons, this actually isn't entirely true for some
5813 * MSRs when in HM mode. The code here and in HM must be aware of
5814 * this.
5815 */
5816VMMDECL(VBOXSTRICTRC) CPUMSetGuestMsr(PVMCPUCC pVCpu, uint32_t idMsr, uint64_t uValue)
5817{
5818 VBOXSTRICTRC rcStrict;
5819 PVM pVM = pVCpu->CTX_SUFF(pVM);
5820 PCPUMMSRRANGE pRange = cpumLookupMsrRange(pVM, idMsr);
5821 if (pRange)
5822 {
5823 STAM_COUNTER_INC(&pRange->cWrites);
5824 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
5825
5826 if (!(uValue & pRange->fWrGpMask))
5827 {
5828 CPUMMSRWRFN enmWrFn = (CPUMMSRWRFN)pRange->enmWrFn;
5829 AssertReturn(enmWrFn > kCpumMsrWrFn_Invalid && enmWrFn < kCpumMsrWrFn_End, VERR_CPUM_IPE_1);
5830
5831 PFNCPUMWRMSR pfnWrMsr = g_aCpumWrMsrFns[enmWrFn].pfnWrMsr;
5832 AssertReturn(pfnWrMsr, VERR_CPUM_IPE_2);
5833
5834 uint64_t uValueAdjusted = uValue & ~pRange->fWrIgnMask;
5835 if (uValueAdjusted != uValue)
5836 {
5837 STAM_COUNTER_INC(&pRange->cIgnoredBits);
5838 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesToIgnoredBits);
5839 }
5840
5841 rcStrict = pfnWrMsr(pVCpu, idMsr, pRange, uValueAdjusted, uValue);
5842 if (rcStrict == VINF_SUCCESS)
5843 Log2(("CPUM: WRMSR %#x (%s), %#llx [%#llx]\n", idMsr, pRange->szName, uValueAdjusted, uValue));
5844 else if (rcStrict == VERR_CPUM_RAISE_GP_0)
5845 {
5846 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> #GP(0)\n", idMsr, pRange->szName, uValueAdjusted, uValue));
5847 STAM_COUNTER_INC(&pRange->cGps);
5848 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
5849 }
5850#ifndef IN_RING3
5851 else if (rcStrict == VINF_CPUM_R3_MSR_WRITE)
5852 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> ring-3\n", idMsr, pRange->szName, uValueAdjusted, uValue));
5853#endif
5854 else
5855 {
5856 Log(("CPUM: WRMSR %#x (%s), %#llx [%#llx] -> rcStrict=%Rrc\n",
5857 idMsr, pRange->szName, uValueAdjusted, uValue, VBOXSTRICTRC_VAL(rcStrict)));
5858 AssertMsgStmt(RT_FAILURE_NP(rcStrict), ("%Rrc idMsr=%#x\n", VBOXSTRICTRC_VAL(rcStrict), idMsr),
5859 rcStrict = VERR_IPE_UNEXPECTED_INFO_STATUS);
5860 Assert(rcStrict != VERR_EM_INTERPRETER);
5861 }
5862 }
5863 else
5864 {
5865 Log(("CPUM: WRMSR %#x (%s), %#llx -> #GP(0) - invalid bits %#llx\n",
5866 idMsr, pRange->szName, uValue, uValue & pRange->fWrGpMask));
5867 STAM_COUNTER_INC(&pRange->cGps);
5868 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesRaiseGp);
5869 rcStrict = VERR_CPUM_RAISE_GP_0;
5870 }
5871 }
5872 else
5873 {
5874 Log(("CPUM: Unknown WRMSR %#x, %#llx -> #GP(0)\n", idMsr, uValue));
5875 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWrites);
5876 STAM_REL_COUNTER_INC(&pVM->cpum.s.cMsrWritesUnknown);
5877 rcStrict = VERR_CPUM_RAISE_GP_0;
5878 }
5879 return rcStrict;
5880}
5881
5882
5883#if defined(VBOX_STRICT) && defined(IN_RING3)
5884/**
5885 * Performs some checks on the static data related to MSRs.
5886 *
5887 * @returns VINF_SUCCESS on success, error on failure.
5888 */
5889int cpumR3MsrStrictInitChecks(void)
5890{
5891#define CPUM_ASSERT_RD_MSR_FN(a_Register) \
5892 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_##a_Register].pfnRdMsr == cpumMsrRd_##a_Register, VERR_CPUM_IPE_2);
5893#define CPUM_ASSERT_WR_MSR_FN(a_Register) \
5894 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_##a_Register].pfnWrMsr == cpumMsrWr_##a_Register, VERR_CPUM_IPE_2);
5895
5896 AssertReturn(g_aCpumRdMsrFns[kCpumMsrRdFn_Invalid].pfnRdMsr == NULL, VERR_CPUM_IPE_2);
5897 CPUM_ASSERT_RD_MSR_FN(FixedValue);
5898 CPUM_ASSERT_RD_MSR_FN(WriteOnly);
5899 CPUM_ASSERT_RD_MSR_FN(Ia32P5McAddr);
5900 CPUM_ASSERT_RD_MSR_FN(Ia32P5McType);
5901 CPUM_ASSERT_RD_MSR_FN(Ia32TimestampCounter);
5902 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformId);
5903 CPUM_ASSERT_RD_MSR_FN(Ia32ApicBase);
5904 CPUM_ASSERT_RD_MSR_FN(Ia32FeatureControl);
5905 CPUM_ASSERT_RD_MSR_FN(Ia32BiosSignId);
5906 CPUM_ASSERT_RD_MSR_FN(Ia32SmmMonitorCtl);
5907 CPUM_ASSERT_RD_MSR_FN(Ia32PmcN);
5908 CPUM_ASSERT_RD_MSR_FN(Ia32MonitorFilterLineSize);
5909 CPUM_ASSERT_RD_MSR_FN(Ia32MPerf);
5910 CPUM_ASSERT_RD_MSR_FN(Ia32APerf);
5911 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrCap);
5912 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysBaseN);
5913 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrPhysMaskN);
5914 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrFixed);
5915 CPUM_ASSERT_RD_MSR_FN(Ia32MtrrDefType);
5916 CPUM_ASSERT_RD_MSR_FN(Ia32Pat);
5917 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterCs);
5918 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEsp);
5919 CPUM_ASSERT_RD_MSR_FN(Ia32SysEnterEip);
5920 CPUM_ASSERT_RD_MSR_FN(Ia32McgCap);
5921 CPUM_ASSERT_RD_MSR_FN(Ia32McgStatus);
5922 CPUM_ASSERT_RD_MSR_FN(Ia32McgCtl);
5923 CPUM_ASSERT_RD_MSR_FN(Ia32DebugCtl);
5924 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysBase);
5925 CPUM_ASSERT_RD_MSR_FN(Ia32SmrrPhysMask);
5926 CPUM_ASSERT_RD_MSR_FN(Ia32PlatformDcaCap);
5927 CPUM_ASSERT_RD_MSR_FN(Ia32CpuDcaCap);
5928 CPUM_ASSERT_RD_MSR_FN(Ia32Dca0Cap);
5929 CPUM_ASSERT_RD_MSR_FN(Ia32PerfEvtSelN);
5930 CPUM_ASSERT_RD_MSR_FN(Ia32PerfStatus);
5931 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCtl);
5932 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrN);
5933 CPUM_ASSERT_RD_MSR_FN(Ia32PerfCapabilities);
5934 CPUM_ASSERT_RD_MSR_FN(Ia32FixedCtrCtrl);
5935 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalStatus);
5936 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalCtrl);
5937 CPUM_ASSERT_RD_MSR_FN(Ia32PerfGlobalOvfCtrl);
5938 CPUM_ASSERT_RD_MSR_FN(Ia32PebsEnable);
5939 CPUM_ASSERT_RD_MSR_FN(Ia32ClockModulation);
5940 CPUM_ASSERT_RD_MSR_FN(Ia32ThermInterrupt);
5941 CPUM_ASSERT_RD_MSR_FN(Ia32ThermStatus);
5942 CPUM_ASSERT_RD_MSR_FN(Ia32MiscEnable);
5943 CPUM_ASSERT_RD_MSR_FN(Ia32McCtlStatusAddrMiscN);
5944 CPUM_ASSERT_RD_MSR_FN(Ia32McNCtl2);
5945 CPUM_ASSERT_RD_MSR_FN(Ia32DsArea);
5946 CPUM_ASSERT_RD_MSR_FN(Ia32TscDeadline);
5947 CPUM_ASSERT_RD_MSR_FN(Ia32X2ApicN);
5948 CPUM_ASSERT_RD_MSR_FN(Ia32DebugInterface);
5949 CPUM_ASSERT_RD_MSR_FN(Ia32VmxBasic);
5950 CPUM_ASSERT_RD_MSR_FN(Ia32VmxPinbasedCtls);
5951 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcbasedCtls);
5952 CPUM_ASSERT_RD_MSR_FN(Ia32VmxExitCtls);
5953 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEntryCtls);
5954 CPUM_ASSERT_RD_MSR_FN(Ia32VmxMisc);
5955 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed0);
5956 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr0Fixed1);
5957 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed0);
5958 CPUM_ASSERT_RD_MSR_FN(Ia32VmxCr4Fixed1);
5959 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmcsEnum);
5960 CPUM_ASSERT_RD_MSR_FN(Ia32VmxProcBasedCtls2);
5961 CPUM_ASSERT_RD_MSR_FN(Ia32VmxEptVpidCap);
5962 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTruePinbasedCtls);
5963 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueProcbasedCtls);
5964 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueExitCtls);
5965 CPUM_ASSERT_RD_MSR_FN(Ia32VmxTrueEntryCtls);
5966 CPUM_ASSERT_RD_MSR_FN(Ia32VmxVmFunc);
5967 CPUM_ASSERT_RD_MSR_FN(Ia32SpecCtrl);
5968 CPUM_ASSERT_RD_MSR_FN(Ia32ArchCapabilities);
5969
5970 CPUM_ASSERT_RD_MSR_FN(Amd64Efer);
5971 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallTarget);
5972 CPUM_ASSERT_RD_MSR_FN(Amd64LongSyscallTarget);
5973 CPUM_ASSERT_RD_MSR_FN(Amd64CompSyscallTarget);
5974 CPUM_ASSERT_RD_MSR_FN(Amd64SyscallFlagMask);
5975 CPUM_ASSERT_RD_MSR_FN(Amd64FsBase);
5976 CPUM_ASSERT_RD_MSR_FN(Amd64GsBase);
5977 CPUM_ASSERT_RD_MSR_FN(Amd64KernelGsBase);
5978 CPUM_ASSERT_RD_MSR_FN(Amd64TscAux);
5979
5980 CPUM_ASSERT_RD_MSR_FN(IntelEblCrPowerOn);
5981 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreThreadCount);
5982 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcHardPowerOn);
5983 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcSoftPowerOn);
5984 CPUM_ASSERT_RD_MSR_FN(IntelP4EbcFrequencyId);
5985 CPUM_ASSERT_RD_MSR_FN(IntelP6FsbFrequency);
5986 CPUM_ASSERT_RD_MSR_FN(IntelPlatformInfo);
5987 CPUM_ASSERT_RD_MSR_FN(IntelFlexRatio);
5988 CPUM_ASSERT_RD_MSR_FN(IntelPkgCStConfigControl);
5989 CPUM_ASSERT_RD_MSR_FN(IntelPmgIoCaptureBase);
5990 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromToN);
5991 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchFromN);
5992 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchToN);
5993 CPUM_ASSERT_RD_MSR_FN(IntelLastBranchTos);
5994 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl);
5995 CPUM_ASSERT_RD_MSR_FN(IntelBblCrCtl3);
5996 CPUM_ASSERT_RD_MSR_FN(IntelI7TemperatureTarget);
5997 CPUM_ASSERT_RD_MSR_FN(IntelI7MsrOffCoreResponseN);
5998 CPUM_ASSERT_RD_MSR_FN(IntelI7MiscPwrMgmt);
5999 CPUM_ASSERT_RD_MSR_FN(IntelP6CrN);
6000 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6001 CPUM_ASSERT_RD_MSR_FN(IntelCpuId1FeatureMaskEax);
6002 CPUM_ASSERT_RD_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6003 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyAesNiCtl);
6004 CPUM_ASSERT_RD_MSR_FN(IntelI7TurboRatioLimit);
6005 CPUM_ASSERT_RD_MSR_FN(IntelI7LbrSelect);
6006 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyErrorControl);
6007 CPUM_ASSERT_RD_MSR_FN(IntelI7VirtualLegacyWireCap);
6008 CPUM_ASSERT_RD_MSR_FN(IntelI7PowerCtl);
6009 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPebsNumAlt);
6010 CPUM_ASSERT_RD_MSR_FN(IntelI7PebsLdLat);
6011 CPUM_ASSERT_RD_MSR_FN(IntelI7PkgCnResidencyN);
6012 CPUM_ASSERT_RD_MSR_FN(IntelI7CoreCnResidencyN);
6013 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrCurrentConfig);
6014 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyVrMiscConfig);
6015 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyRaplPowerUnit);
6016 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgCnIrtlN);
6017 CPUM_ASSERT_RD_MSR_FN(IntelI7SandyPkgC2Residency);
6018 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerLimit);
6019 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgEnergyStatus);
6020 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPerfStatus);
6021 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPkgPowerInfo);
6022 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerLimit);
6023 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramEnergyStatus);
6024 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPerfStatus);
6025 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplDramPowerInfo);
6026 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PowerLimit);
6027 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0EnergyStatus);
6028 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0Policy);
6029 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp0PerfStatus);
6030 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1PowerLimit);
6031 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1EnergyStatus);
6032 CPUM_ASSERT_RD_MSR_FN(IntelI7RaplPp1Policy);
6033 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpNominal);
6034 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel1);
6035 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpLevel2);
6036 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyConfigTdpControl);
6037 CPUM_ASSERT_RD_MSR_FN(IntelI7IvyTurboActivationRatio);
6038 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalCtrl);
6039 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalStatus);
6040 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6041 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6042 CPUM_ASSERT_RD_MSR_FN(IntelI7UncPerfFixedCtr);
6043 CPUM_ASSERT_RD_MSR_FN(IntelI7UncCBoxConfig);
6044 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfCtrN);
6045 CPUM_ASSERT_RD_MSR_FN(IntelI7UncArbPerfEvtSelN);
6046 CPUM_ASSERT_RD_MSR_FN(IntelI7SmiCount);
6047 CPUM_ASSERT_RD_MSR_FN(IntelCore2EmttmCrTablesN);
6048 CPUM_ASSERT_RD_MSR_FN(IntelCore2SmmCStMiscInfo);
6049 CPUM_ASSERT_RD_MSR_FN(IntelCore1ExtConfig);
6050 CPUM_ASSERT_RD_MSR_FN(IntelCore1DtsCalControl);
6051 CPUM_ASSERT_RD_MSR_FN(IntelCore2PeciControl);
6052 CPUM_ASSERT_RD_MSR_FN(IntelAtSilvCoreC1Recidency);
6053
6054 CPUM_ASSERT_RD_MSR_FN(P6LastBranchFromIp);
6055 CPUM_ASSERT_RD_MSR_FN(P6LastBranchToIp);
6056 CPUM_ASSERT_RD_MSR_FN(P6LastIntFromIp);
6057 CPUM_ASSERT_RD_MSR_FN(P6LastIntToIp);
6058
6059 CPUM_ASSERT_RD_MSR_FN(AmdFam15hTscRate);
6060 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCfg);
6061 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLwpCbAddr);
6062 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMc4MiscN);
6063 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtlN);
6064 CPUM_ASSERT_RD_MSR_FN(AmdK8PerfCtrN);
6065 CPUM_ASSERT_RD_MSR_FN(AmdK8SysCfg);
6066 CPUM_ASSERT_RD_MSR_FN(AmdK8HwCr);
6067 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrBaseN);
6068 CPUM_ASSERT_RD_MSR_FN(AmdK8IorrMaskN);
6069 CPUM_ASSERT_RD_MSR_FN(AmdK8TopOfMemN);
6070 CPUM_ASSERT_RD_MSR_FN(AmdK8NbCfg1);
6071 CPUM_ASSERT_RD_MSR_FN(AmdK8McXcptRedir);
6072 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuNameN);
6073 CPUM_ASSERT_RD_MSR_FN(AmdK8HwThermalCtrl);
6074 CPUM_ASSERT_RD_MSR_FN(AmdK8SwThermalCtrl);
6075 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidControl);
6076 CPUM_ASSERT_RD_MSR_FN(AmdK8FidVidStatus);
6077 CPUM_ASSERT_RD_MSR_FN(AmdK8McCtlMaskN);
6078 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapN);
6079 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6080 CPUM_ASSERT_RD_MSR_FN(AmdK8IntPendingMessage);
6081 CPUM_ASSERT_RD_MSR_FN(AmdK8SmiTriggerIoCycle);
6082 CPUM_ASSERT_RD_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6083 CPUM_ASSERT_RD_MSR_FN(AmdFam10hTrapCtlMaybe);
6084 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateCurLimit);
6085 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateControl);
6086 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateStatus);
6087 CPUM_ASSERT_RD_MSR_FN(AmdFam10hPStateN);
6088 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidControl);
6089 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCofVidStatus);
6090 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCStateIoBaseAddr);
6091 CPUM_ASSERT_RD_MSR_FN(AmdFam10hCpuWatchdogTimer);
6092 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmBase);
6093 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmAddr);
6094 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmMask);
6095 CPUM_ASSERT_RD_MSR_FN(AmdK8VmCr);
6096 CPUM_ASSERT_RD_MSR_FN(AmdK8IgnNe);
6097 CPUM_ASSERT_RD_MSR_FN(AmdK8SmmCtl);
6098 CPUM_ASSERT_RD_MSR_FN(AmdK8VmHSavePa);
6099 CPUM_ASSERT_RD_MSR_FN(AmdFam10hVmLockKey);
6100 CPUM_ASSERT_RD_MSR_FN(AmdFam10hSmmLockKey);
6101 CPUM_ASSERT_RD_MSR_FN(AmdFam10hLocalSmiStatus);
6102 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkIdLength);
6103 CPUM_ASSERT_RD_MSR_FN(AmdFam10hOsVisWrkStatus);
6104 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtlN);
6105 CPUM_ASSERT_RD_MSR_FN(AmdFam16hL2IPerfCtrN);
6106 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6107 CPUM_ASSERT_RD_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6108 CPUM_ASSERT_RD_MSR_FN(AmdK7MicrocodeCtl);
6109 CPUM_ASSERT_RD_MSR_FN(AmdK7ClusterIdMaybe);
6110 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6111 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6112 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6113 CPUM_ASSERT_RD_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6114 CPUM_ASSERT_RD_MSR_FN(AmdK8PatchLevel);
6115 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugStatusMaybe);
6116 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceBaseMaybe);
6117 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTracePtrMaybe);
6118 CPUM_ASSERT_RD_MSR_FN(AmdK7BHTraceLimitMaybe);
6119 CPUM_ASSERT_RD_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6120 CPUM_ASSERT_RD_MSR_FN(AmdK7FastFlushCountMaybe);
6121 CPUM_ASSERT_RD_MSR_FN(AmdK7NodeId);
6122 CPUM_ASSERT_RD_MSR_FN(AmdK7DrXAddrMaskN);
6123 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMatchMaybe);
6124 CPUM_ASSERT_RD_MSR_FN(AmdK7Dr0DataMaskMaybe);
6125 CPUM_ASSERT_RD_MSR_FN(AmdK7LoadStoreCfg);
6126 CPUM_ASSERT_RD_MSR_FN(AmdK7InstrCacheCfg);
6127 CPUM_ASSERT_RD_MSR_FN(AmdK7DataCacheCfg);
6128 CPUM_ASSERT_RD_MSR_FN(AmdK7BusUnitCfg);
6129 CPUM_ASSERT_RD_MSR_FN(AmdK7DebugCtl2Maybe);
6130 CPUM_ASSERT_RD_MSR_FN(AmdFam15hFpuCfg);
6131 CPUM_ASSERT_RD_MSR_FN(AmdFam15hDecoderCfg);
6132 CPUM_ASSERT_RD_MSR_FN(AmdFam10hBusUnitCfg2);
6133 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg);
6134 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg2);
6135 CPUM_ASSERT_RD_MSR_FN(AmdFam15hCombUnitCfg3);
6136 CPUM_ASSERT_RD_MSR_FN(AmdFam15hExecUnitCfg);
6137 CPUM_ASSERT_RD_MSR_FN(AmdFam15hLoadStoreCfg2);
6138 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchCtl);
6139 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchLinAddr);
6140 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6141 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpExecCtl);
6142 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpRip);
6143 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData);
6144 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData2);
6145 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsOpData3);
6146 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcLinAddr);
6147 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsDcPhysAddr);
6148 CPUM_ASSERT_RD_MSR_FN(AmdFam10hIbsCtl);
6149 CPUM_ASSERT_RD_MSR_FN(AmdFam14hIbsBrTarget);
6150
6151 CPUM_ASSERT_RD_MSR_FN(Gim)
6152
6153 AssertReturn(g_aCpumWrMsrFns[kCpumMsrWrFn_Invalid].pfnWrMsr == NULL, VERR_CPUM_IPE_2);
6154 CPUM_ASSERT_WR_MSR_FN(Ia32P5McAddr);
6155 CPUM_ASSERT_WR_MSR_FN(Ia32P5McType);
6156 CPUM_ASSERT_WR_MSR_FN(Ia32TimestampCounter);
6157 CPUM_ASSERT_WR_MSR_FN(Ia32ApicBase);
6158 CPUM_ASSERT_WR_MSR_FN(Ia32FeatureControl);
6159 CPUM_ASSERT_WR_MSR_FN(Ia32BiosSignId);
6160 CPUM_ASSERT_WR_MSR_FN(Ia32BiosUpdateTrigger);
6161 CPUM_ASSERT_WR_MSR_FN(Ia32SmmMonitorCtl);
6162 CPUM_ASSERT_WR_MSR_FN(Ia32PmcN);
6163 CPUM_ASSERT_WR_MSR_FN(Ia32MonitorFilterLineSize);
6164 CPUM_ASSERT_WR_MSR_FN(Ia32MPerf);
6165 CPUM_ASSERT_WR_MSR_FN(Ia32APerf);
6166 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysBaseN);
6167 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrPhysMaskN);
6168 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrFixed);
6169 CPUM_ASSERT_WR_MSR_FN(Ia32MtrrDefType);
6170 CPUM_ASSERT_WR_MSR_FN(Ia32Pat);
6171 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterCs);
6172 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEsp);
6173 CPUM_ASSERT_WR_MSR_FN(Ia32SysEnterEip);
6174 CPUM_ASSERT_WR_MSR_FN(Ia32McgStatus);
6175 CPUM_ASSERT_WR_MSR_FN(Ia32McgCtl);
6176 CPUM_ASSERT_WR_MSR_FN(Ia32DebugCtl);
6177 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysBase);
6178 CPUM_ASSERT_WR_MSR_FN(Ia32SmrrPhysMask);
6179 CPUM_ASSERT_WR_MSR_FN(Ia32PlatformDcaCap);
6180 CPUM_ASSERT_WR_MSR_FN(Ia32Dca0Cap);
6181 CPUM_ASSERT_WR_MSR_FN(Ia32PerfEvtSelN);
6182 CPUM_ASSERT_WR_MSR_FN(Ia32PerfStatus);
6183 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCtl);
6184 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrN);
6185 CPUM_ASSERT_WR_MSR_FN(Ia32PerfCapabilities);
6186 CPUM_ASSERT_WR_MSR_FN(Ia32FixedCtrCtrl);
6187 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalStatus);
6188 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalCtrl);
6189 CPUM_ASSERT_WR_MSR_FN(Ia32PerfGlobalOvfCtrl);
6190 CPUM_ASSERT_WR_MSR_FN(Ia32PebsEnable);
6191 CPUM_ASSERT_WR_MSR_FN(Ia32ClockModulation);
6192 CPUM_ASSERT_WR_MSR_FN(Ia32ThermInterrupt);
6193 CPUM_ASSERT_WR_MSR_FN(Ia32ThermStatus);
6194 CPUM_ASSERT_WR_MSR_FN(Ia32MiscEnable);
6195 CPUM_ASSERT_WR_MSR_FN(Ia32McCtlStatusAddrMiscN);
6196 CPUM_ASSERT_WR_MSR_FN(Ia32McNCtl2);
6197 CPUM_ASSERT_WR_MSR_FN(Ia32DsArea);
6198 CPUM_ASSERT_WR_MSR_FN(Ia32TscDeadline);
6199 CPUM_ASSERT_WR_MSR_FN(Ia32X2ApicN);
6200 CPUM_ASSERT_WR_MSR_FN(Ia32DebugInterface);
6201 CPUM_ASSERT_WR_MSR_FN(Ia32SpecCtrl);
6202 CPUM_ASSERT_WR_MSR_FN(Ia32PredCmd);
6203 CPUM_ASSERT_WR_MSR_FN(Ia32FlushCmd);
6204
6205 CPUM_ASSERT_WR_MSR_FN(Amd64Efer);
6206 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallTarget);
6207 CPUM_ASSERT_WR_MSR_FN(Amd64LongSyscallTarget);
6208 CPUM_ASSERT_WR_MSR_FN(Amd64CompSyscallTarget);
6209 CPUM_ASSERT_WR_MSR_FN(Amd64SyscallFlagMask);
6210 CPUM_ASSERT_WR_MSR_FN(Amd64FsBase);
6211 CPUM_ASSERT_WR_MSR_FN(Amd64GsBase);
6212 CPUM_ASSERT_WR_MSR_FN(Amd64KernelGsBase);
6213 CPUM_ASSERT_WR_MSR_FN(Amd64TscAux);
6214
6215 CPUM_ASSERT_WR_MSR_FN(IntelEblCrPowerOn);
6216 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcHardPowerOn);
6217 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcSoftPowerOn);
6218 CPUM_ASSERT_WR_MSR_FN(IntelP4EbcFrequencyId);
6219 CPUM_ASSERT_WR_MSR_FN(IntelFlexRatio);
6220 CPUM_ASSERT_WR_MSR_FN(IntelPkgCStConfigControl);
6221 CPUM_ASSERT_WR_MSR_FN(IntelPmgIoCaptureBase);
6222 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromToN);
6223 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchFromN);
6224 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchToN);
6225 CPUM_ASSERT_WR_MSR_FN(IntelLastBranchTos);
6226 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl);
6227 CPUM_ASSERT_WR_MSR_FN(IntelBblCrCtl3);
6228 CPUM_ASSERT_WR_MSR_FN(IntelI7TemperatureTarget);
6229 CPUM_ASSERT_WR_MSR_FN(IntelI7MsrOffCoreResponseN);
6230 CPUM_ASSERT_WR_MSR_FN(IntelI7MiscPwrMgmt);
6231 CPUM_ASSERT_WR_MSR_FN(IntelP6CrN);
6232 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEcdx);
6233 CPUM_ASSERT_WR_MSR_FN(IntelCpuId1FeatureMaskEax);
6234 CPUM_ASSERT_WR_MSR_FN(IntelCpuId80000001FeatureMaskEcdx);
6235 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyAesNiCtl);
6236 CPUM_ASSERT_WR_MSR_FN(IntelI7TurboRatioLimit);
6237 CPUM_ASSERT_WR_MSR_FN(IntelI7LbrSelect);
6238 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyErrorControl);
6239 CPUM_ASSERT_WR_MSR_FN(IntelI7PowerCtl);
6240 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPebsNumAlt);
6241 CPUM_ASSERT_WR_MSR_FN(IntelI7PebsLdLat);
6242 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrCurrentConfig);
6243 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyVrMiscConfig);
6244 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgCnIrtlN);
6245 CPUM_ASSERT_WR_MSR_FN(IntelI7SandyPkgC2Residency);
6246 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPkgPowerLimit);
6247 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplDramPowerLimit);
6248 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0PowerLimit);
6249 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp0Policy);
6250 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1PowerLimit);
6251 CPUM_ASSERT_WR_MSR_FN(IntelI7RaplPp1Policy);
6252 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyConfigTdpControl);
6253 CPUM_ASSERT_WR_MSR_FN(IntelI7IvyTurboActivationRatio);
6254 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalCtrl);
6255 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalStatus);
6256 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfGlobalOvfCtrl);
6257 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtrCtrl);
6258 CPUM_ASSERT_WR_MSR_FN(IntelI7UncPerfFixedCtr);
6259 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfCtrN);
6260 CPUM_ASSERT_WR_MSR_FN(IntelI7UncArbPerfEvtSelN);
6261 CPUM_ASSERT_WR_MSR_FN(IntelCore2EmttmCrTablesN);
6262 CPUM_ASSERT_WR_MSR_FN(IntelCore2SmmCStMiscInfo);
6263 CPUM_ASSERT_WR_MSR_FN(IntelCore1ExtConfig);
6264 CPUM_ASSERT_WR_MSR_FN(IntelCore1DtsCalControl);
6265 CPUM_ASSERT_WR_MSR_FN(IntelCore2PeciControl);
6266
6267 CPUM_ASSERT_WR_MSR_FN(P6LastIntFromIp);
6268 CPUM_ASSERT_WR_MSR_FN(P6LastIntToIp);
6269
6270 CPUM_ASSERT_WR_MSR_FN(AmdFam15hTscRate);
6271 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCfg);
6272 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLwpCbAddr);
6273 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMc4MiscN);
6274 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtlN);
6275 CPUM_ASSERT_WR_MSR_FN(AmdK8PerfCtrN);
6276 CPUM_ASSERT_WR_MSR_FN(AmdK8SysCfg);
6277 CPUM_ASSERT_WR_MSR_FN(AmdK8HwCr);
6278 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrBaseN);
6279 CPUM_ASSERT_WR_MSR_FN(AmdK8IorrMaskN);
6280 CPUM_ASSERT_WR_MSR_FN(AmdK8TopOfMemN);
6281 CPUM_ASSERT_WR_MSR_FN(AmdK8NbCfg1);
6282 CPUM_ASSERT_WR_MSR_FN(AmdK8McXcptRedir);
6283 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuNameN);
6284 CPUM_ASSERT_WR_MSR_FN(AmdK8HwThermalCtrl);
6285 CPUM_ASSERT_WR_MSR_FN(AmdK8SwThermalCtrl);
6286 CPUM_ASSERT_WR_MSR_FN(AmdK8FidVidControl);
6287 CPUM_ASSERT_WR_MSR_FN(AmdK8McCtlMaskN);
6288 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapN);
6289 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiOnIoTrapCtlSts);
6290 CPUM_ASSERT_WR_MSR_FN(AmdK8IntPendingMessage);
6291 CPUM_ASSERT_WR_MSR_FN(AmdK8SmiTriggerIoCycle);
6292 CPUM_ASSERT_WR_MSR_FN(AmdFam10hMmioCfgBaseAddr);
6293 CPUM_ASSERT_WR_MSR_FN(AmdFam10hTrapCtlMaybe);
6294 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateControl);
6295 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateStatus);
6296 CPUM_ASSERT_WR_MSR_FN(AmdFam10hPStateN);
6297 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidControl);
6298 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCofVidStatus);
6299 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCStateIoBaseAddr);
6300 CPUM_ASSERT_WR_MSR_FN(AmdFam10hCpuWatchdogTimer);
6301 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmBase);
6302 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmAddr);
6303 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmMask);
6304 CPUM_ASSERT_WR_MSR_FN(AmdK8VmCr);
6305 CPUM_ASSERT_WR_MSR_FN(AmdK8IgnNe);
6306 CPUM_ASSERT_WR_MSR_FN(AmdK8SmmCtl);
6307 CPUM_ASSERT_WR_MSR_FN(AmdK8VmHSavePa);
6308 CPUM_ASSERT_WR_MSR_FN(AmdFam10hVmLockKey);
6309 CPUM_ASSERT_WR_MSR_FN(AmdFam10hSmmLockKey);
6310 CPUM_ASSERT_WR_MSR_FN(AmdFam10hLocalSmiStatus);
6311 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkIdLength);
6312 CPUM_ASSERT_WR_MSR_FN(AmdFam10hOsVisWrkStatus);
6313 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtlN);
6314 CPUM_ASSERT_WR_MSR_FN(AmdFam16hL2IPerfCtrN);
6315 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtlN);
6316 CPUM_ASSERT_WR_MSR_FN(AmdFam15hNorthbridgePerfCtrN);
6317 CPUM_ASSERT_WR_MSR_FN(AmdK7MicrocodeCtl);
6318 CPUM_ASSERT_WR_MSR_FN(AmdK7ClusterIdMaybe);
6319 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd07hEbax);
6320 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd06hEcx);
6321 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlStd01hEdcx);
6322 CPUM_ASSERT_WR_MSR_FN(AmdK8CpuIdCtlExt01hEdcx);
6323 CPUM_ASSERT_WR_MSR_FN(AmdK8PatchLoader);
6324 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugStatusMaybe);
6325 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceBaseMaybe);
6326 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTracePtrMaybe);
6327 CPUM_ASSERT_WR_MSR_FN(AmdK7BHTraceLimitMaybe);
6328 CPUM_ASSERT_WR_MSR_FN(AmdK7HardwareDebugToolCfgMaybe);
6329 CPUM_ASSERT_WR_MSR_FN(AmdK7FastFlushCountMaybe);
6330 CPUM_ASSERT_WR_MSR_FN(AmdK7NodeId);
6331 CPUM_ASSERT_WR_MSR_FN(AmdK7DrXAddrMaskN);
6332 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMatchMaybe);
6333 CPUM_ASSERT_WR_MSR_FN(AmdK7Dr0DataMaskMaybe);
6334 CPUM_ASSERT_WR_MSR_FN(AmdK7LoadStoreCfg);
6335 CPUM_ASSERT_WR_MSR_FN(AmdK7InstrCacheCfg);
6336 CPUM_ASSERT_WR_MSR_FN(AmdK7DataCacheCfg);
6337 CPUM_ASSERT_WR_MSR_FN(AmdK7BusUnitCfg);
6338 CPUM_ASSERT_WR_MSR_FN(AmdK7DebugCtl2Maybe);
6339 CPUM_ASSERT_WR_MSR_FN(AmdFam15hFpuCfg);
6340 CPUM_ASSERT_WR_MSR_FN(AmdFam15hDecoderCfg);
6341 CPUM_ASSERT_WR_MSR_FN(AmdFam10hBusUnitCfg2);
6342 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg);
6343 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg2);
6344 CPUM_ASSERT_WR_MSR_FN(AmdFam15hCombUnitCfg3);
6345 CPUM_ASSERT_WR_MSR_FN(AmdFam15hExecUnitCfg);
6346 CPUM_ASSERT_WR_MSR_FN(AmdFam15hLoadStoreCfg2);
6347 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchCtl);
6348 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchLinAddr);
6349 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsFetchPhysAddr);
6350 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpExecCtl);
6351 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpRip);
6352 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData);
6353 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData2);
6354 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsOpData3);
6355 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcLinAddr);
6356 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsDcPhysAddr);
6357 CPUM_ASSERT_WR_MSR_FN(AmdFam10hIbsCtl);
6358 CPUM_ASSERT_WR_MSR_FN(AmdFam14hIbsBrTarget);
6359
6360 CPUM_ASSERT_WR_MSR_FN(Gim);
6361
6362 return VINF_SUCCESS;
6363}
6364#endif /* VBOX_STRICT && IN_RING3 */
6365
6366
6367/**
6368 * Gets the scalable bus frequency.
6369 *
6370 * The bus frequency is used as a base in several MSRs that gives the CPU and
6371 * other frequency ratios.
6372 *
6373 * @returns Scalable bus frequency in Hz. Will not return CPUM_SBUSFREQ_UNKNOWN.
6374 * @param pVM The cross context VM structure.
6375 */
6376VMMDECL(uint64_t) CPUMGetGuestScalableBusFrequency(PVM pVM)
6377{
6378 uint64_t uFreq = pVM->cpum.s.GuestInfo.uScalableBusFreq;
6379 if (uFreq == CPUM_SBUSFREQ_UNKNOWN)
6380 uFreq = CPUM_SBUSFREQ_100MHZ;
6381 return uFreq;
6382}
6383
6384
6385/**
6386 * Sets the guest EFER MSR without performing any additional checks.
6387 *
6388 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6389 * @param uOldEfer The previous EFER MSR value.
6390 * @param uValidEfer The new, validated EFER MSR value.
6391 *
6392 * @remarks One would normally call CPUMIsGuestEferMsrWriteValid() before calling
6393 * this function to change the EFER in order to perform an EFER transition.
6394 */
6395VMMDECL(void) CPUMSetGuestEferMsrNoChecks(PVMCPUCC pVCpu, uint64_t uOldEfer, uint64_t uValidEfer)
6396{
6397 pVCpu->cpum.s.Guest.msrEFER = uValidEfer;
6398
6399 /* AMD64 Architecture Programmer's Manual: 15.15 TLB Control; flush the TLB
6400 if MSR_K6_EFER_NXE, MSR_K6_EFER_LME or MSR_K6_EFER_LMA are changed. */
6401 if ( (uOldEfer & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA))
6402 != (pVCpu->cpum.s.Guest.msrEFER & (MSR_K6_EFER_NXE | MSR_K6_EFER_LME | MSR_K6_EFER_LMA)))
6403 {
6404 /// @todo PGMFlushTLB(pVCpu, cr3, true /*fGlobal*/);
6405 HMFlushTlb(pVCpu);
6406
6407 /* Notify PGM about NXE changes. */
6408 if ( (uOldEfer & MSR_K6_EFER_NXE)
6409 != (pVCpu->cpum.s.Guest.msrEFER & MSR_K6_EFER_NXE))
6410 PGMNotifyNxeChanged(pVCpu, !(uOldEfer & MSR_K6_EFER_NXE));
6411 }
6412}
6413
6414
6415/**
6416 * Checks if a guest PAT MSR write is valid.
6417 *
6418 * @returns @c true if the PAT bit combination is valid, @c false otherwise.
6419 * @param uValue The PAT MSR value.
6420 */
6421VMMDECL(bool) CPUMIsPatMsrValid(uint64_t uValue)
6422{
6423 for (uint32_t cShift = 0; cShift < 63; cShift += 8)
6424 {
6425 /* Check all eight bits because the top 5 bits of each byte are reserved. */
6426 uint8_t uType = (uint8_t)(uValue >> cShift);
6427 if ((uType >= 8) || (uType == 2) || (uType == 3))
6428 {
6429 Log(("CPUM: Invalid PAT type at %u:%u in IA32_PAT: %#llx (%#llx)\n", cShift + 7, cShift, uValue, uType));
6430 return false;
6431 }
6432 }
6433 return true;
6434}
6435
6436
6437/**
6438 * Validates an EFER MSR write and provides the new, validated EFER MSR.
6439 *
6440 * @returns VBox status code.
6441 * @param pVM The cross context VM structure.
6442 * @param uCr0 The CR0 of the CPU corresponding to the EFER MSR.
6443 * @param uOldEfer Value of the previous EFER MSR on the CPU if any.
6444 * @param uNewEfer The new EFER MSR value being written.
6445 * @param puValidEfer Where to store the validated EFER (only updated if
6446 * this function returns VINF_SUCCESS).
6447 */
6448VMMDECL(int) CPUMIsGuestEferMsrWriteValid(PVM pVM, uint64_t uCr0, uint64_t uOldEfer, uint64_t uNewEfer, uint64_t *puValidEfer)
6449{
6450 /* #GP(0) If anything outside the allowed bits is set. */
6451 uint64_t fMask = CPUMGetGuestEferMsrValidMask(pVM);
6452 if (uNewEfer & ~fMask)
6453 {
6454 Log(("CPUM: Settings disallowed EFER bit. uNewEfer=%#RX64 fAllowed=%#RX64 -> #GP(0)\n", uNewEfer, fMask));
6455 return VERR_CPUM_RAISE_GP_0;
6456 }
6457
6458 /* Check for illegal MSR_K6_EFER_LME transitions: not allowed to change LME if
6459 paging is enabled. (AMD Arch. Programmer's Manual Volume 2: Table 14-5) */
6460 if ( (uOldEfer & MSR_K6_EFER_LME) != (uNewEfer & MSR_K6_EFER_LME)
6461 && (uCr0 & X86_CR0_PG))
6462 {
6463 Log(("CPUM: Illegal MSR_K6_EFER_LME change: paging is enabled!!\n"));
6464 return VERR_CPUM_RAISE_GP_0;
6465 }
6466
6467 /* There are a few more: e.g. MSR_K6_EFER_LMSLE. */
6468 AssertMsg(!(uNewEfer & ~( MSR_K6_EFER_NXE
6469 | MSR_K6_EFER_LME
6470 | MSR_K6_EFER_LMA /* ignored anyway */
6471 | MSR_K6_EFER_SCE
6472 | MSR_K6_EFER_FFXSR
6473 | MSR_K6_EFER_SVME)),
6474 ("Unexpected value %#RX64\n", uNewEfer));
6475
6476 /* Ignore EFER.LMA, it's updated when setting CR0. */
6477 fMask &= ~MSR_K6_EFER_LMA;
6478
6479 *puValidEfer = (uOldEfer & ~fMask) | (uNewEfer & fMask);
6480 return VINF_SUCCESS;
6481}
6482
6483
6484/**
6485 * Gets the mask of valid EFER bits depending on supported guest-CPU features.
6486 *
6487 * @returns Mask of valid EFER bits.
6488 * @param pVM The cross context VM structure.
6489 *
6490 * @remarks EFER.LMA is included as part of the valid mask. It's not invalid but
6491 * rather a read-only bit.
6492 */
6493VMMDECL(uint64_t) CPUMGetGuestEferMsrValidMask(PVM pVM)
6494{
6495 uint32_t const fExtFeatures = pVM->cpum.s.aGuestCpuIdPatmExt[0].uEax >= 0x80000001
6496 ? pVM->cpum.s.aGuestCpuIdPatmExt[1].uEdx
6497 : 0;
6498 uint64_t fMask = 0;
6499 uint64_t const fIgnoreMask = MSR_K6_EFER_LMA;
6500
6501 /* Filter out those bits the guest is allowed to change. (e.g. LMA is read-only) */
6502 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_NX)
6503 fMask |= MSR_K6_EFER_NXE;
6504 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_LONG_MODE)
6505 fMask |= MSR_K6_EFER_LME;
6506 if (fExtFeatures & X86_CPUID_EXT_FEATURE_EDX_SYSCALL)
6507 fMask |= MSR_K6_EFER_SCE;
6508 if (fExtFeatures & X86_CPUID_AMD_FEATURE_EDX_FFXSR)
6509 fMask |= MSR_K6_EFER_FFXSR;
6510 if (pVM->cpum.s.GuestFeatures.fSvm)
6511 fMask |= MSR_K6_EFER_SVME;
6512
6513 return (fIgnoreMask | fMask);
6514}
6515
6516
6517/**
6518 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6519 *
6520 * @returns The register value.
6521 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6522 * @thread EMT(pVCpu)
6523 */
6524VMM_INT_DECL(uint64_t) CPUMGetGuestTscAux(PVMCPUCC pVCpu)
6525{
6526 Assert(!(pVCpu->cpum.s.Guest.fExtrn & CPUMCTX_EXTRN_TSC_AUX));
6527 return pVCpu->cpum.s.GuestMsrs.msr.TscAux;
6528}
6529
6530
6531/**
6532 * Fast way for HM to access the MSR_K8_TSC_AUX register.
6533 *
6534 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6535 * @param uValue The new value.
6536 * @thread EMT(pVCpu)
6537 */
6538VMM_INT_DECL(void) CPUMSetGuestTscAux(PVMCPUCC pVCpu, uint64_t uValue)
6539{
6540 pVCpu->cpum.s.Guest.fExtrn &= ~CPUMCTX_EXTRN_TSC_AUX;
6541 pVCpu->cpum.s.GuestMsrs.msr.TscAux = uValue;
6542}
6543
6544
6545/**
6546 * Fast way for HM to access the IA32_SPEC_CTRL register.
6547 *
6548 * @returns The register value.
6549 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6550 * @thread EMT(pVCpu)
6551 */
6552VMM_INT_DECL(uint64_t) CPUMGetGuestSpecCtrl(PVMCPUCC pVCpu)
6553{
6554 return pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl;
6555}
6556
6557
6558/**
6559 * Fast way for HM to access the IA32_SPEC_CTRL register.
6560 *
6561 * @param pVCpu The cross context virtual CPU structure of the calling EMT.
6562 * @param uValue The new value.
6563 * @thread EMT(pVCpu)
6564 */
6565VMM_INT_DECL(void) CPUMSetGuestSpecCtrl(PVMCPUCC pVCpu, uint64_t uValue)
6566{
6567 pVCpu->cpum.s.GuestMsrs.msr.SpecCtrl = uValue;
6568}
6569
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette