Lines Matching defs:iCpu

140     uint32_t            iCpu     = RTMpCpuIdToSetIndex(idCpu);
143 RTR0TESTR0_CHECK_MSG(iCpu < RT_ELEMENTS(g_aOmniLatency), ("iCpu=%d idCpu=%u\n", iCpu, idCpu));
144 if (iCpu < RT_ELEMENTS(g_aOmniLatency))
146 uint32_t iSample = g_aOmniLatency[iCpu].cSamples;
147 if (iSample < RT_ELEMENTS(g_aOmniLatency[iCpu].aSamples))
149 g_aOmniLatency[iCpu].aSamples[iSample].uTsc = ASMReadTSC();
150 g_aOmniLatency[iCpu].aSamples[iSample].uNanoTs = RTTimeSystemNanoTS();
151 g_aOmniLatency[iCpu].cSamples = iSample + 1;
169 uint32_t iCpu = RTMpCpuIdToSetIndex(idCpu);
172 RTR0TESTR0_CHECK_MSG(iCpu < RTCPUSET_MAX_CPUS, ("iCpu=%d idCpu=%u\n", iCpu, idCpu));
173 if (iCpu < RTCPUSET_MAX_CPUS)
175 uint32_t iCountedTick = ASMAtomicIncU32(&paStates[iCpu].cTicks);
177 ("iCountedTick=%u iTick=%u iCpu=%d idCpu=%u\n", iCountedTick, iTick, iCpu, idCpu));
178 paStates[iCpu].u64Last = RTTimeSystemNanoTS();
179 if (!paStates[iCpu].u64Start)
181 paStates[iCpu].u64Start = paStates[iCpu].u64Last;
182 RTR0TESTR0_CHECK_MSG(iCountedTick == 1, ("iCountedTick=%u iCpu=%d idCpu=%u\n", iCountedTick, iCpu, idCpu));
564 for (uint32_t iCpu = 0; iCpu < RTCPUSET_MAX_CPUS; iCpu++)
565 if (RTCpuSetIsMemberByIndex(&OnlineSet, iCpu))
570 State.u.Specific.idCpu = RTMpCpuIdFromSetIndex(iCpu);
574 fFlags |= RTTIMER_FLAGS_CPU(iCpu);
592 ("cShots=%u iCpu=%u i=%u iCurCpu=%u cNsElapsed=%'llu\n",
593 State.cShots, iCpu, i, RTMpCpuIdToSetIndex(RTMpCpuId()), cNsElapsed ));
595 RTR0TESTR0_CHECK_MSG_BREAK(!State.u.Specific.fFailed, ("iCpu=%u i=%u\n", iCpu, i));
729 for (uint32_t iCpu = 0; iCpu < RTCPUSET_MAX_CPUS; iCpu++)
730 if (RTCpuSetIsMemberByIndex(&OnlineSet, iCpu))
736 State.u.Specific.idCpu = RTMpCpuIdFromSetIndex(iCpu);
740 fFlags |= RTTIMER_FLAGS_CPU(iCpu);
759 ("cShots=%u iCpu=%u i=%u iCurCpu=%u cNsElapsed=%'llu\n",
760 State.cShots, iCpu, i, RTMpCpuIdToSetIndex(RTMpCpuId()), cNsElapsed));
762 RTR0TESTR0_CHECK_MSG_BREAK(!State.u.Specific.fFailed, ("iCpu=%u i=%u\n", iCpu, i));
799 for (uint32_t iCpu = 0; iCpu < RTCPUSET_MAX_CPUS; iCpu++)
801 paStates[iCpu].u64Start = 0;
802 paStates[iCpu].u64Last = 0;
803 ASMAtomicWriteU32(&paStates[iCpu].cTicks, 0);
821 for (uint32_t iCpu = 0; iCpu < RTCPUSET_MAX_CPUS; iCpu++)
823 if (paStates[iCpu].u64Start)
825 if (paStates[iCpu].u64Start < u64MinStart)
826 u64MinStart = paStates[iCpu].u64Start;
827 if (paStates[iCpu].u64Last > u64MaxStop)
828 u64MaxStop = paStates[iCpu].u64Last;
840 for (uint32_t iCpu = 0; iCpu < RTCPUSET_MAX_CPUS; iCpu++)
841 if (paStates[iCpu].cTicks)
843 RTR0TESTR0_CHECK_MSG(RTCpuSetIsMemberByIndex(&OnlineSet, iCpu), ("%d\n", iCpu));
844 RTR0TESTR0_CHECK_MSG(paStates[iCpu].cTicks <= cMaxTicks && paStates[iCpu].cTicks >= cMinTicks,
845 ("min=%u, ticks=%u, avg=%u max=%u, iCpu=%u, interval=%'u, elapsed=%'llu/%'llu\n",
846 cMinTicks, paStates[iCpu].cTicks, cAvgTicks, cMaxTicks, iCpu,
850 RTR0TESTR0_CHECK_MSG(!RTCpuSetIsMemberByIndex(&OnlineSet, iCpu), ("%d\n", iCpu));
900 for (uint32_t iCpu = 0; iCpu < RT_ELEMENTS(g_aOmniLatency); iCpu++)
902 uint32_t cSamples = g_aOmniLatency[iCpu].cSamples;
908 int64_t cNsDelta = g_aOmniLatency[iCpu].aSamples[iSample - 1].uNanoTs
909 - g_aOmniLatency[iCpu].aSamples[iSample].uNanoTs;