Lines Matching refs:cpum

38 #include <VBox/vmm/cpum.h>
560 pVM->aCpus[i].cpum.s.fUseFlags |= CPUM_USE_FFXSR_LEAKY;
581 AssertCompileMemberAlignment(VM, cpum.s, 32);
582 AssertCompile(sizeof(pVM->cpum.s) <= sizeof(pVM->cpum.padding));
586 AssertCompileMemberAlignment(VM, cpum, 64);
588 AssertCompileMemberAlignment(VMCPU, cpum.s, 64);
589 AssertCompileMemberSizeAlignment(VM, aCpus[0].cpum.s, 64);
600 pVM->cpum.s.offCPUMCPU0 = RT_OFFSETOF(VM, aCpus[0].cpum) - RT_OFFSETOF(VM, cpum);
601 Assert((uintptr_t)&pVM->cpum + pVM->cpum.s.offCPUMCPU0 == (uintptr_t)&pVM->aCpus[0].cpum);
609 pVCpu->cpum.s.offCPUM = RT_OFFSETOF(VM, aCpus[i].cpum) - RT_OFFSETOF(VM, cpum);
610 Assert((uintptr_t)&pVCpu->cpum - pVCpu->cpum.s.offCPUM == (uintptr_t)&pVM->cpum);
627 rc = cpumR3CpuIdExplodeFeatures(paLeaves, cLeaves, &pVM->cpum.s.HostFeatures);
630 pVM->cpum.s.GuestFeatures.enmCpuVendor = pVM->cpum.s.HostFeatures.enmCpuVendor;
635 if (!pVM->cpum.s.HostFeatures.fFxSaveRstor)
637 if (!pVM->cpum.s.HostFeatures.fMmx)
639 if (!pVM->cpum.s.HostFeatures.fTsc)
645 pVM->cpum.s.CR4.AndMask = X86_CR4_OSXMMEEXCPT | X86_CR4_PVI | X86_CR4_VME;
646 pVM->cpum.s.CR4.OrMask = X86_CR4_OSFXSR;
652 if ( pVM->cpum.s.HostFeatures.fXSaveRstor
653 && pVM->cpum.s.HostFeatures.fOpSysXSaveRstor)
660 pVM->cpum.s.fXStateHostMask = fXStateHostMask;
663 LogRel(("CPUM: fXStateHostMask=%#llx; initial: %#llx\n", pVM->cpum.s.fXStateHostMask, fXStateHostMask));
668 uint32_t cbMaxXState = pVM->cpum.s.HostFeatures.cbMaxExtendedState;
681 pVCpu->cpum.s.Guest.pXStateR3 = (PX86XSAVEAREA)pbXStates;
682 pVCpu->cpum.s.Guest.pXStateR0 = MMHyperR3ToR0(pVM, pbXStates);
683 pVCpu->cpum.s.Guest.pXStateRC = MMHyperR3ToR0(pVM, pbXStates);
686 pVCpu->cpum.s.Host.pXStateR3 = (PX86XSAVEAREA)pbXStates;
687 pVCpu->cpum.s.Host.pXStateR0 = MMHyperR3ToR0(pVM, pbXStates);
688 pVCpu->cpum.s.Host.pXStateRC = MMHyperR3ToR0(pVM, pbXStates);
691 pVCpu->cpum.s.Hyper.pXStateR3 = (PX86XSAVEAREA)pbXStates;
692 pVCpu->cpum.s.Hyper.pXStateR0 = MMHyperR3ToR0(pVM, pbXStates);
693 pVCpu->cpum.s.Hyper.pXStateRC = MMHyperR3ToR0(pVM, pbXStates);
696 pVCpu->cpum.s.Host.fXStateMask = fXStateHostMask;
706 rc = SSMR3RegisterInternal(pVM, "cpum", 1, CPUM_SAVED_STATE_VERSION, sizeof(CPUM),
716 DBGFR3InfoRegisterInternal(pVM, "cpum", "Displays the all the cpu states.", &cpumR3InfoAll);
756 pVM->cpum.s.GuestInfo.paMsrRangesRC = MMHyperR3ToRC(pVM, pVM->cpum.s.GuestInfo.paMsrRangesR3);
757 pVM->cpum.s.GuestInfo.paCpuIdLeavesRC = MMHyperR3ToRC(pVM, pVM->cpum.s.GuestInfo.paCpuIdLeavesR3);
762 pVCpu->cpum.s.Guest.pXStateRC = MMHyperR3ToRC(pVM, pVCpu->cpum.s.Guest.pXStateR3);
763 pVCpu->cpum.s.Host.pXStateRC = MMHyperR3ToRC(pVM, pVCpu->cpum.s.Host.pXStateR3);
764 pVCpu->cpum.s.Hyper.pXStateRC = MMHyperR3ToRC(pVM, pVCpu->cpum.s.Hyper.pXStateR3); /** @todo remove me */
790 Assert( (pVM->cpum.s.aGuestCpuIdPatmStd[4].uEax & UINT32_C(0xffffc000)) == 0
791 || pVM->cpum.s.aGuestCpuIdPatmStd[0].uEax < 0x4);
792 pVM->cpum.s.aGuestCpuIdPatmStd[4].uEax &= UINT32_C(0x00003fff);
813 memset(pVCpu->cpum.s.aMagic, 0, sizeof(pVCpu->cpum.s.aMagic));
814 pVCpu->cpum.s.uMagic = 0;
837 PCPUMCTX pCtx = &pVCpu->cpum.s.Guest;
842 uint32_t fUseFlags = pVCpu->cpum.s.fUseFlags & ~CPUM_USED_FPU_SINCE_REM;
848 pVCpu->cpum.s.fUseFlags = fUseFlags;
920 if (pVM->cpum.s.HostFeatures.cbMaxExtendedState >= RT_OFFSETOF(X86XSAVEAREA, Hdr))
942 pVCpu->cpum.s.GuestMsrs.msr.MiscEnable = MSR_IA32_MISC_ENABLE_BTS_UNAVAIL
944 | (pVM->cpum.s.GuestFeatures.fMonitorMWait ? MSR_IA32_MISC_ENABLE_MONITOR : 0)
948 pRange->fWrGpMask &= ~pVCpu->cpum.s.GuestMsrs.msr.MiscEnable;
963 pVCpu->cpum.s.GuestMsrs.msr.PkgCStateCfgCtrl = 1 /*C1*/ | RT_BIT_32(25) | RT_BIT_32(26) | RT_BIT_32(27) | RT_BIT_32(28);
987 PCPUMCTX pCtx = &pVM->aCpus[i].cpum.s.Guest;
990 strcpy((char *)pVM->aCpus[i].cpum.s.aMagic, "CPUMCPU Magic");
991 pVM->aCpus[i].cpum.s.uMagic = UINT64_C(0xDEADBEEFDEADBEEF);
1031 SSMR3PutStructEx(pSSM, &pVCpu->cpum.s.Hyper.pXStateR3->x87, sizeof(*pVCpu->cpum.s.Hyper.pXStateR3),
1033 SSMR3PutStructEx(pSSM, &pVCpu->cpum.s.Hyper, sizeof(pVCpu->cpum.s.Hyper),
1038 SSMR3PutU32(pSSM, sizeof(pVM->aCpus[0].cpum.s.GuestMsrs.msr));
1043 SSMR3PutStructEx(pSSM, &pVCpu->cpum.s.Guest.pXStateR3->x87, sizeof(*pVCpu->cpum.s.Guest.pXStateR3),
1045 SSMR3PutStructEx(pSSM, &pVCpu->cpum.s.Guest, sizeof(pVCpu->cpum.s.Guest),
1047 SSMR3PutU32(pSSM, pVCpu->cpum.s.fUseFlags);
1048 SSMR3PutU32(pSSM, pVCpu->cpum.s.fChanged);
1049 AssertCompileSizeAlignment(pVCpu->cpum.s.GuestMsrs.msr, sizeof(uint64_t));
1050 SSMR3PutMem(pSSM, &pVCpu->cpum.s.GuestMsrs, sizeof(pVCpu->cpum.s.GuestMsrs.msr));
1064 pVM->cpum.s.fPendingRestore = true;
1123 uint64_t uCR3 = pVCpu->cpum.s.Hyper.cr3;
1124 uint64_t uRSP = pVCpu->cpum.s.Hyper.rsp; /* see VMMR3Relocate(). */
1126 SSMR3GetStructEx(pSSM, &pVCpu->cpum.s.Hyper.pXStateR3->x87, sizeof(pVCpu->cpum.s.Hyper.pXStateR3->x87),
1128 SSMR3GetStructEx(pSSM, &pVCpu->cpum.s.Hyper, sizeof(pVCpu->cpum.s.Hyper),
1130 pVCpu->cpum.s.Hyper.cr3 = uCR3;
1131 pVCpu->cpum.s.Hyper.rsp = uRSP;
1159 SSMR3GetStructEx(pSSM, &pVCpu->cpum.s.Guest.pXStateR3->x87, sizeof(pVCpu->cpum.s.Guest.pXStateR3->x87),
1161 SSMR3GetStructEx(pSSM, &pVCpu->cpum.s.Guest, sizeof(pVCpu->cpum.s.Guest),
1163 SSMR3GetU32(pSSM, &pVCpu->cpum.s.fUseFlags);
1164 SSMR3GetU32(pSSM, &pVCpu->cpum.s.fChanged);
1166 SSMR3GetMem(pSSM, &pVCpu->cpum.s.GuestMsrs.au64[0], cbMsrs);
1169 SSMR3GetMem(pSSM, &pVCpu->cpum.s.GuestMsrs.au64[0], 2 * sizeof(uint64_t)); /* Restore two MSRs. */
1175 pVCpu->cpum.s.Guest.dr[6] &= ~(X86_DR6_RAZ_MASK | X86_DR6_MBZ_MASK);
1176 pVCpu->cpum.s.Guest.dr[6] |= X86_DR6_RA1_MASK;
1177 pVCpu->cpum.s.Guest.dr[7] &= ~(X86_DR7_RAZ_MASK | X86_DR7_MBZ_MASK);
1178 pVCpu->cpum.s.Guest.dr[7] |= X86_DR7_RA1_MASK;
1190 && !(pVCpu->cpum.s.fChanged & CPUM_CHANGED_HIDDEN_SEL_REGS_INVALID));
1191 PCPUMSELREG paSelReg = CPUMCTX_FIRST_SREG(&pVCpu->cpum.s.Guest);
1200 pVCpu->cpum.s.Guest.ldtr.fFlags = CPUMSELREG_FLAGS_VALID;
1201 pVCpu->cpum.s.Guest.ldtr.ValidSel = pVCpu->cpum.s.Guest.ldtr.Sel;
1214 pVCpu->cpum.s.Guest.ldtr.fFlags = CPUMSELREG_FLAGS_VALID;
1215 pVCpu->cpum.s.Guest.ldtr.ValidSel = pVCpu->cpum.s.Guest.ldtr.Sel;
1217 pVCpu->cpum.s.Guest.tr.fFlags = CPUMSELREG_FLAGS_VALID;
1218 pVCpu->cpum.s.Guest.tr.ValidSel = pVCpu->cpum.s.Guest.tr.Sel;
1226 pVM->aCpus[iCpu].cpum.s.fChanged &= CPUM_CHANGED_HIDDEN_SEL_REGS_INVALID;
1234 AssertLogRelReturn(!(pVCpu->cpum.s.Guest.es.fFlags & !CPUMSELREG_FLAGS_VALID_MASK), VERR_SSM_UNEXPECTED_DATA);
1235 AssertLogRelReturn(!(pVCpu->cpum.s.Guest.cs.fFlags & !CPUMSELREG_FLAGS_VALID_MASK), VERR_SSM_UNEXPECTED_DATA);
1236 AssertLogRelReturn(!(pVCpu->cpum.s.Guest.ss.fFlags & !CPUMSELREG_FLAGS_VALID_MASK), VERR_SSM_UNEXPECTED_DATA);
1237 AssertLogRelReturn(!(pVCpu->cpum.s.Guest.ds.fFlags & !CPUMSELREG_FLAGS_VALID_MASK), VERR_SSM_UNEXPECTED_DATA);
1238 AssertLogRelReturn(!(pVCpu->cpum.s.Guest.fs.fFlags & !CPUMSELREG_FLAGS_VALID_MASK), VERR_SSM_UNEXPECTED_DATA);
1239 AssertLogRelReturn(!(pVCpu->cpum.s.Guest.gs.fFlags & !CPUMSELREG_FLAGS_VALID_MASK), VERR_SSM_UNEXPECTED_DATA);
1243 pVM->cpum.s.fPendingRestore = false;
1262 if (cElements > RT_ELEMENTS(pVM->cpum.s.aGuestCpuIdPatmStd))
1264 SSMR3GetMem(pSSM, &pVM->cpum.s.aGuestCpuIdPatmStd[0], cElements*sizeof(pVM->cpum.s.aGuestCpuIdPatmStd[0]));
1267 if (cElements != RT_ELEMENTS(pVM->cpum.s.aGuestCpuIdPatmExt))
1269 SSMR3GetMem(pSSM, &pVM->cpum.s.aGuestCpuIdPatmExt[0], sizeof(pVM->cpum.s.aGuestCpuIdPatmExt));
1272 if (cElements != RT_ELEMENTS(pVM->cpum.s.aGuestCpuIdPatmCentaur))
1274 SSMR3GetMem(pSSM, &pVM->cpum.s.aGuestCpuIdPatmCentaur[0], sizeof(pVM->cpum.s.aGuestCpuIdPatmCentaur));
1276 SSMR3GetMem(pSSM, &pVM->cpum.s.GuestInfo.DefCpuId, sizeof(pVM->cpum.s.GuestInfo.DefCpuId));
1328 AssertRelease(!(pVM->cpum.s.aGuestCpuIdPatmStd[1].uEcx &
1374 if (pVM->cpum.s.fPendingRestore)
1386 PGMNotifyNxeChanged(pVCpu, RT_BOOL(pVCpu->cpum.s.Guest.msrEFER & MSR_K6_EFER_NXE));
1389 PDMApicGetBase(pVCpu, &pVCpu->cpum.s.Guest.msrApicBase);
1393 pVCpu->cpum.s.fUseFlags |= CPUM_USE_SUPPORTS_LONGMODE;
1407 return pVM->cpum.s.fPendingRestore;
1679 * Display all cpu states and any other cpum info.
1755 PCPUMCTX pCtx = &pVCpu->cpum.s.Guest;
1799 cpumR3InfoOne(pVM, &pVCpu->cpum.s.Hyper, CPUMCTX2CORE(&pVCpu->cpum.s.Hyper), pHlp, enmType, ".");
1800 pHlp->pfnPrintf(pHlp, "CR4OrMask=%#x CR4AndMask=%#x\n", pVM->cpum.s.CR4.OrMask, pVM->cpum.s.CR4.AndMask);
1822 PCPUMHOSTCTX pCtx = &pVM->aCpus[0].cpum.s.Host;
2115 pVM->cpum.s.CR4.OrMask &= fAnd;
2116 pVM->cpum.s.CR4.OrMask |= fOr;
2133 Assert(!pVCpu->cpum.s.fRawEntered);
2134 Assert(!pVCpu->cpum.s.fRemEntered);
2144 uint32_t fFlags = pVCpu->cpum.s.fChanged;
2145 pVCpu->cpum.s.fChanged = 0;
2148 if (pVCpu->cpum.s.fUseFlags & CPUM_USED_FPU_SINCE_REM)
2151 pVCpu->cpum.s.fUseFlags &= ~CPUM_USED_FPU_SINCE_REM;
2154 pVCpu->cpum.s.fRemEntered = true;
2168 Assert(!pVCpu->cpum.s.fRawEntered);
2169 Assert(pVCpu->cpum.s.fRemEntered);
2171 pVCpu->cpum.s.fRemEntered = false;
2193 PDMApicGetBase(pVCpu, &pVCpu->cpum.s.Guest.msrApicBase);
2194 Log(("CPUMR3InitCompleted pVM=%p APIC base[%u]=%RX64\n", pVM, (unsigned)i, pVCpu->cpum.s.Guest.msrApicBase));
2198 pVCpu->cpum.s.fUseFlags |= CPUM_USE_SUPPORTS_LONGMODE;