Lines Matching refs:ASMRdMsr

1313         pHostMsr->u64Value = ASMRdMsr(pHostMsr->u32Msr);
1427 pHostMsr->u64Value = ASMRdMsr(pHostMsr->u32Msr);
1452 pVCpu->hm.s.vmx.u64HostLStarMsr = ASMRdMsr(MSR_K8_LSTAR);
1453 pVCpu->hm.s.vmx.u64HostStarMsr = ASMRdMsr(MSR_K6_STAR);
1454 pVCpu->hm.s.vmx.u64HostSFMaskMsr = ASMRdMsr(MSR_K8_SF_MASK);
1455 pVCpu->hm.s.vmx.u64HostKernelGSBaseMsr = ASMRdMsr(MSR_K8_KERNEL_GS_BASE);
1502 pMixedCtx->msrLSTAR = ASMRdMsr(MSR_K8_LSTAR);
1503 pMixedCtx->msrSTAR = ASMRdMsr(MSR_K6_STAR);
1504 pMixedCtx->msrSFMASK = ASMRdMsr(MSR_K8_SF_MASK);
1505 pMixedCtx->msrKERNELGSBASE = ASMRdMsr(MSR_K8_KERNEL_GS_BASE);
1535 Assert(ASMRdMsr(uMsr) == pVCpu->hm.s.vmx.u64Host##a_HostMsr##Msr); \
1646 uint64_t u64HostEferMsr = ASMRdMsr(MSR_K6_EFER);
1681 uint64_t u64Msr = ASMRdMsr(pHostMsr->u32Msr);
1682 AssertMsgReturnVoid(pHostMsr->u64Value == u64Msr, ("u32Msr=%#RX32 VMCS Value=%#RX64 ASMRdMsr=%#RX64 cMsrs=%u\n",
3124 uint64_t u64FSBase = ASMRdMsr(MSR_K8_FS_BASE);
3125 uint64_t u64GSBase = ASMRdMsr(MSR_K8_GS_BASE);
3174 rc = VMXWriteVmcs64(VMX_VMCS_HOST_SYSENTER_ESP, ASMRdMsr(MSR_IA32_SYSENTER_ESP));
3176 rc = VMXWriteVmcs64(VMX_VMCS_HOST_SYSENTER_EIP, ASMRdMsr(MSR_IA32_SYSENTER_EIP));
3189 rc = VMXWriteVmcs64(VMX_VMCS_HOST_SYSENTER_ESP, ASMRdMsr(MSR_IA32_SYSENTER_ESP));
3191 rc = VMXWriteVmcs64(VMX_VMCS_HOST_SYSENTER_EIP, ASMRdMsr(MSR_IA32_SYSENTER_EIP));
5151 Log4(("MSR_K6_EFER = %#RX64\n", ASMRdMsr(MSR_K6_EFER)));
5152 Log4(("MSR_K8_CSTAR = %#RX64\n", ASMRdMsr(MSR_K8_CSTAR)));
5153 Log4(("MSR_K8_LSTAR = %#RX64\n", ASMRdMsr(MSR_K8_LSTAR)));
5154 Log4(("MSR_K6_STAR = %#RX64\n", ASMRdMsr(MSR_K6_STAR)));
5155 Log4(("MSR_K8_SF_MASK = %#RX64\n", ASMRdMsr(MSR_K8_SF_MASK)));
5156 Log4(("MSR_K8_KERNEL_GS_BASE = %#RX64\n", ASMRdMsr(MSR_K8_KERNEL_GS_BASE)));