Lines Matching refs:cMsrs

1227  * @param   cMsrs       The number of MSRs.
1229 DECLINLINE(int) hmR0VmxSetAutoLoadStoreMsrCount(PVMCPU pVCpu, uint32_t cMsrs)
1233 if (RT_UNLIKELY(cMsrs > cMaxSupportedMsrs))
1235 LogRel(("CPU auto-load/store MSR count in VMCS exceeded cMsrs=%u Supported=%u.\n", cMsrs, cMaxSupportedMsrs));
1241 int rc = VMXWriteVmcs32(VMX_VMCS32_CTRL_ENTRY_MSR_LOAD_COUNT, cMsrs); AssertRCReturn(rc, rc);
1242 rc = VMXWriteVmcs32(VMX_VMCS32_CTRL_EXIT_MSR_STORE_COUNT, cMsrs); AssertRCReturn(rc, rc);
1245 rc = VMXWriteVmcs32(VMX_VMCS32_CTRL_EXIT_MSR_LOAD_COUNT, cMsrs); AssertRCReturn(rc, rc);
1248 pVCpu->hm.s.vmx.cMsrs = cMsrs;
1270 uint32_t cMsrs = pVCpu->hm.s.vmx.cMsrs;
1272 for (i = 0; i < cMsrs; i++)
1280 if (i == cMsrs)
1282 ++cMsrs;
1283 int rc = hmR0VmxSetAutoLoadStoreMsrCount(pVCpu, cMsrs);
1332 uint32_t cMsrs = pVCpu->hm.s.vmx.cMsrs;
1333 for (uint32_t i = 0; i < cMsrs; i++)
1339 if (i == cMsrs - 1)
1341 --cMsrs;
1347 pLastGuestMsr += cMsrs - 1;
1353 pLastHostMsr += cMsrs - 1;
1356 --cMsrs;
1363 if (cMsrs != pVCpu->hm.s.vmx.cMsrs)
1365 int rc = hmR0VmxSetAutoLoadStoreMsrCount(pVCpu, cMsrs);
1372 Log4(("Removed MSR %#RX32 new cMsrs=%u\n", uMsr, pVCpu->hm.s.vmx.cMsrs));
1391 uint32_t cMsrs = pVCpu->hm.s.vmx.cMsrs;
1393 for (uint32_t i = 0; i < cMsrs; i++, pGuestMsr++)
1414 uint32_t cMsrs = pVCpu->hm.s.vmx.cMsrs;
1416 for (uint32_t i = 0; i < cMsrs; i++, pHostMsr++, pGuestMsr++)
1663 uint32_t cMsrs;
1664 int rc = VMXReadVmcs32(VMX_VMCS32_CTRL_ENTRY_MSR_LOAD_COUNT, &cMsrs); AssertRC(rc);
1665 Assert(cMsrs == pVCpu->hm.s.vmx.cMsrs);
1667 rc = VMXReadVmcs32(VMX_VMCS32_CTRL_EXIT_MSR_STORE_COUNT, &cMsrs); AssertRC(rc);
1668 Assert(cMsrs == pVCpu->hm.s.vmx.cMsrs);
1670 rc = VMXReadVmcs32(VMX_VMCS32_CTRL_EXIT_MSR_LOAD_COUNT, &cMsrs); AssertRC(rc);
1671 Assert(cMsrs == pVCpu->hm.s.vmx.cMsrs);
1675 for (uint32_t i = 0; i < cMsrs; i++, pHostMsr++, pGuestMsr++)
1678 AssertMsgReturnVoid(pHostMsr->u32Msr == pGuestMsr->u32Msr, ("HostMsr=%#RX32 GuestMsr=%#RX32 cMsrs=%u\n", pHostMsr->u32Msr,
1679 pGuestMsr->u32Msr, cMsrs));
1682 AssertMsgReturnVoid(pHostMsr->u64Value == u64Msr, ("u32Msr=%#RX32 VMCS Value=%#RX64 ASMRdMsr=%#RX64 cMsrs=%u\n",
1683 pHostMsr->u32Msr, pHostMsr->u64Value, u64Msr, cMsrs));
1699 AssertMsgReturnVoid(enmRead == VMXMSREXIT_PASSTHRU_READ, ("u32Msr=%#RX32 cMsrs=%u No passthru read!\n",
1700 pGuestMsr->u32Msr, cMsrs));
1701 AssertMsgReturnVoid(enmWrite == VMXMSREXIT_PASSTHRU_WRITE, ("u32Msr=%#RX32 cMsrs=%u No passthru write!\n",
1702 pGuestMsr->u32Msr, cMsrs));
4779 for (uint32_t i = 0; i < pVCpu->hm.s.vmx.cMsrs; i++, pMsr++)
4832 Log4(("Load[%RU32]: MSR[--]: u32Msr=%#RX32 u64Value=%#RX64 cMsrs=%u\n", pVCpu->idCpu, MSR_K6_EFER,
4833 pMixedCtx->msrEFER, pVCpu->hm.s.vmx.cMsrs));
6323 uint32_t cMsrs = pVCpu->hm.s.vmx.cMsrs;
6324 Log4(("hmR0VmxSaveGuestAutoLoadStoreMsrs: cMsrs=%u\n", cMsrs));
6325 for (uint32_t i = 0; i < cMsrs; i++, pMsr++)
6339 AssertMsgFailed(("Unexpected MSR in auto-load/store area. uMsr=%#RX32 cMsrs=%u\n", pMsr->u32Msr, cMsrs));
8709 && pVCpu->hm.s.vmx.cMsrs > 0)
8795 Assert(!pVCpu->hm.s.vmx.cMsrs || pVCpu->hm.s.vmx.fUpdatedHostMsrs);