Lines Matching refs:tm

23 #include <VBox/vmm/tm.h>
51 PVM pVM = RT_FROM_MEMBER(pData, VM, CTX_SUFF(tm.s.VirtualGetRawData));
72 PVM pVM = RT_FROM_MEMBER(pData, VM, CTX_SUFF(tm.s.VirtualGetRawData));
133 ASMAtomicWritePtr((void * volatile *)&CTX_SUFF(pVM->tm.s.pfnVirtualGetRaw), (void *)(uintptr_t)pfnWorker);
143 PVM pVM = RT_FROM_MEMBER(pData, VM, CTX_SUFF(tm.s.VirtualGetRawData));
157 uint64_t u64 = CTXALLSUFF(pVM->tm.s.pfnVirtualGetRaw)(&CTXALLSUFF(pVM->tm.s.VirtualGetRawData));
159 uint32_t cPrevSteps = pVM->tm.s.CTX_SUFF(VirtualGetRawData).c1nsSteps;
160 uint64_t u64 = pVM->tm.s.CTX_SUFF(pfnVirtualGetRaw)(&pVM->tm.s.CTX_SUFF(VirtualGetRawData));
161 if (cPrevSteps != pVM->tm.s.CTX_SUFF(VirtualGetRawData).c1nsSteps)
182 u64 -= pVM->tm.s.u64VirtualWarpDriveStart;
183 u64 *= pVM->tm.s.u32VirtualWarpDrivePercentage;
185 u64 += pVM->tm.s.u64VirtualWarpDriveStart;
192 u64 -= pVM->tm.s.u64VirtualOffset;
205 if (RT_LIKELY(!pVM->tm.s.fVirtualWarpDrive))
206 return tmVirtualGetRawNanoTS(pVM) - pVM->tm.s.u64VirtualOffset;
217 if (RT_LIKELY(pVM->tm.s.cVirtualTicking))
219 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualGet);
227 PVMCPU pVCpuDst = &pVM->aCpus[pVM->tm.s.idTimerCpu];
229 && !pVM->tm.s.fRunningQueues
230 && ( pVM->tm.s.CTX_SUFF(paTimerQueues)[TMCLOCK_VIRTUAL].u64Expire <= u64
231 || ( pVM->tm.s.fVirtualSyncTicking
232 && pVM->tm.s.CTX_SUFF(paTimerQueues)[TMCLOCK_VIRTUAL_SYNC].u64Expire <= u64 - pVM->tm.s.offVirtualSync
235 && !pVM->tm.s.fRunningQueues
238 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualGetSetFF);
251 u64 = pVM->tm.s.u64Virtual;
299 if (RT_UNLIKELY(pVM->tm.s.fVirtualWarpDrive))
300 return ASMMultU64ByU32DivByU32(cVirtTicksToDeadline, 100, pVM->tm.s.u32VirtualWarpDrivePercentage);
318 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetLocked);
326 const uint64_t u64Prev = pVM->tm.s.u64VirtualSyncCatchUpPrev;
330 uint64_t u64Sub = ASMMultU64ByU32DivByU32(u64Delta, pVM->tm.s.u32VirtualSyncCatchUpPercentage, 100);
331 if (off > u64Sub + pVM->tm.s.offVirtualSyncGivenUp)
334 Log4(("TM: %'RU64/-%'8RU64: sub %RU32 [vsghcul]\n", u64 - off, off - pVM->tm.s.offVirtualSyncGivenUp, u64Sub));
339 STAM_PROFILE_ADV_STOP(&pVM->tm.s.StatVirtualSyncCatchup, c);
340 off = pVM->tm.s.offVirtualSyncGivenUp;
360 uint64_t u64Last = ASMAtomicUoReadU64(&pVM->tm.s.u64VirtualSync);
364 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetAdjLast);
367 uint64_t u64Expire = ASMAtomicReadU64(&pVM->tm.s.CTX_SUFF(paTimerQueues)[TMCLOCK_VIRTUAL_SYNC].u64Expire);
370 ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSync, u64);
372 ASMAtomicWriteU64(&pVM->tm.s.offVirtualSync, off);
374 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncCatchUp, false);
376 ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSyncCatchUpPrev, u64);
380 if (pVM->tm.s.fVirtualSyncCatchUp)
382 pVM->tm.s.u32VirtualSyncCatchUpPercentage + 100);
385 PDMCritSectLeave(&pVM->tm.s.VirtualSyncLock);
390 ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSync, u64);
391 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, false);
394 PVMCPU pVCpuDst = &pVM->aCpus[pVM->tm.s.idTimerCpu];
397 Log4(("TM: %'RU64/-%'8RU64: exp tmr=>ff [vsghcul]\n", u64, pVM->tm.s.offVirtualSync - pVM->tm.s.offVirtualSyncGivenUp));
398 PDMCritSectLeave(&pVM->tm.s.VirtualSyncLock);
408 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetSetFF);
409 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetExpired);
411 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetLocked);
434 if (!pVM->tm.s.fVirtualSyncTicking)
436 u64 = ASMAtomicUoReadU64(&pVM->tm.s.u64VirtualSync);
437 PDMCritSectLeave(&pVM->tm.s.VirtualSyncLock);
440 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetLocked);
449 uint64_t off = ASMAtomicUoReadU64(&pVM->tm.s.offVirtualSync);
450 if (ASMAtomicUoReadBool(&pVM->tm.s.fVirtualSyncCatchUp))
460 uint64_t u64Last = ASMAtomicUoReadU64(&pVM->tm.s.u64VirtualSync);
464 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetAdjLast);
467 uint64_t u64Expire = ASMAtomicReadU64(&pVM->tm.s.CTX_SUFF(paTimerQueues)[TMCLOCK_VIRTUAL_SYNC].u64Expire);
470 ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSync, u64);
471 PDMCritSectLeave(&pVM->tm.s.VirtualSyncLock);
478 ASMAtomicWriteU64(&pVM->tm.s.u64VirtualSync, u64);
479 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, false);
482 PVMCPU pVCpuDst = &pVM->aCpus[pVM->tm.s.idTimerCpu];
485 Log4(("TM: %'RU64/-%'8RU64: exp tmr=>ff [vsgl]\n", u64, pVM->tm.s.offVirtualSync - pVM->tm.s.offVirtualSyncGivenUp));
486 PDMCritSectLeave(&pVM->tm.s.VirtualSyncLock);
496 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetSetFF);
497 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetExpired);
499 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetLocked);
519 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGet);
522 if (!pVM->tm.s.fVirtualSyncTicking)
526 u64 = pVM->tm.s.u64VirtualSync;
534 Assert(pVM->tm.s.cVirtualTicking);
538 PVMCPU pVCpuDst = &pVM->aCpus[pVM->tm.s.idTimerCpu];
540 && pVM->tm.s.CTX_SUFF(paTimerQueues)[TMCLOCK_VIRTUAL].u64Expire <= u64)
550 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetSetFF);
561 if (PDMCritSectTryEnter(&pVM->tm.s.VirtualSyncLock) == VINF_SUCCESS)
569 if (ASMAtomicUoReadBool(&pVM->tm.s.fVirtualSyncTicking))
571 if (!ASMAtomicUoReadBool(&pVM->tm.s.fVirtualSyncCatchUp))
573 off = ASMAtomicReadU64(&pVM->tm.s.offVirtualSync);
574 if (RT_LIKELY( ASMAtomicUoReadBool(&pVM->tm.s.fVirtualSyncTicking)
575 && !ASMAtomicUoReadBool(&pVM->tm.s.fVirtualSyncCatchUp)
576 && off == ASMAtomicReadU64(&pVM->tm.s.offVirtualSync)))
579 uint64_t const u64Expire = ASMAtomicReadU64(&pVM->tm.s.CTX_SUFF(paTimerQueues)[TMCLOCK_VIRTUAL_SYNC].u64Expire);
584 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetLockless);
594 off = ASMAtomicReadU64(&pVM->tm.s.u64VirtualSync);
595 if (RT_LIKELY(!ASMAtomicReadBool(&pVM->tm.s.fVirtualSyncTicking)))
599 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetLockless);
629 int rcLock = PDMCritSectTryEnter(&pVM->tm.s.VirtualSyncLock);
634 if (!ASMAtomicReadBool(&pVM->tm.s.fVirtualSyncTicking))
636 off = ASMAtomicReadU64(&pVM->tm.s.u64VirtualSync);
637 if ( ASMAtomicReadBool(&pVM->tm.s.fVirtualSyncTicking)
647 off = ASMAtomicReadU64(&pVM->tm.s.offVirtualSync);
648 if (ASMAtomicReadBool(&pVM->tm.s.fVirtualSyncCatchUp))
651 uint64_t const u64Prev = ASMAtomicReadU64(&pVM->tm.s.u64VirtualSyncCatchUpPrev);
652 uint64_t const offGivenUp = ASMAtomicReadU64(&pVM->tm.s.offVirtualSyncGivenUp);
653 uint32_t const u32Pct = ASMAtomicReadU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage);
654 if ( ( u64Prev == ASMAtomicReadU64(&pVM->tm.s.u64VirtualSyncCatchUpPrev)
655 && offGivenUp == ASMAtomicReadU64(&pVM->tm.s.offVirtualSyncGivenUp)
656 && u32Pct == ASMAtomicReadU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage)
657 && ASMAtomicReadBool(&pVM->tm.s.fVirtualSyncCatchUp))
667 Log4(("TM: %'RU64/-%'8RU64: sub %RU32 [NoLock]\n", u64 - off, pVM->tm.s.offVirtualSync - offGivenUp, u64Sub));
672 STAM_PROFILE_ADV_STOP(&pVM->tm.s.StatVirtualSyncCatchup, c);
682 if ( ASMAtomicUoReadBool(&pVM->tm.s.fVirtualSyncTicking)
683 && ASMAtomicReadBool(&pVM->tm.s.fVirtualSyncCatchUp))
689 else if ( off == ASMAtomicReadU64(&pVM->tm.s.offVirtualSync)
690 && !ASMAtomicReadBool(&pVM->tm.s.fVirtualSyncCatchUp))
696 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetELoop);
705 uint64_t u64Expire = ASMAtomicReadU64(&pVM->tm.s.CTX_SUFF(paTimerQueues)[TMCLOCK_VIRTUAL_SYNC].u64Expire);
708 PVMCPU pVCpuDst = &pVM->aCpus[pVM->tm.s.idTimerCpu];
720 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetSetFF);
721 Log4(("TM: %'RU64/-%'8RU64: exp tmr=>ff [NoLock]\n", u64, pVM->tm.s.offVirtualSync - pVM->tm.s.offVirtualSyncGivenUp));
724 Log4(("TM: %'RU64/-%'8RU64: exp tmr [NoLock]\n", u64, pVM->tm.s.offVirtualSync - pVM->tm.s.offVirtualSyncGivenUp));
727 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualSyncGetExpired);
732 if (ASMAtomicReadBool(&pVM->tm.s.fVirtualSyncCatchUp))
734 ASMAtomicReadU32(&pVM->tm.s.u32VirtualSyncCatchUpPercentage) + 100);
832 return pVM->tm.s.offVirtualSync - pVM->tm.s.offVirtualSyncGivenUp;
844 if (pVM->tm.s.fVirtualSyncCatchUp)
845 return pVM->tm.s.u32VirtualSyncCatchUpPercentage;
871 uint32_t c = ASMAtomicDecU32(&pVM->tm.s.cVirtualTicking);
875 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualPause);
876 pVM->tm.s.u64Virtual = tmVirtualGetRaw(pVM);
877 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, false);
891 uint32_t c = ASMAtomicIncU32(&pVM->tm.s.cVirtualTicking);
895 STAM_COUNTER_INC(&pVM->tm.s.StatVirtualResume);
896 pVM->tm.s.u64VirtualRawPrev = 0;
897 pVM->tm.s.u64VirtualWarpDriveStart = tmVirtualGetRawNanoTS(pVM);
898 pVM->tm.s.u64VirtualOffset = pVM->tm.s.u64VirtualWarpDriveStart - pVM->tm.s.u64Virtual;
899 ASMAtomicWriteBool(&pVM->tm.s.fVirtualSyncTicking, true);