Lines Matching refs:pVCpu

61 static int      emR3RawForcedActions(PVM pVM, PVMCPU pVCpu, PCPUMCTX pCtx);
62 DECLINLINE(int) emR3RawExecuteInstruction(PVM pVM, PVMCPU pVCpu, const char *pszPrefix, int rcGC = VINF_SUCCESS);
63 static int emR3RawGuestTrap(PVM pVM, PVMCPU pVCpu);
64 static int emR3RawPatchTrap(PVM pVM, PVMCPU pVCpu, PCPUMCTX pCtx, int gcret);
65 static int emR3RawPrivileged(PVM pVM, PVMCPU pVCpu);
66 static int emR3RawExecuteIOInstruction(PVM pVM, PVMCPU pVCpu);
67 static int emR3RawRingSwitch(PVM pVM, PVMCPU pVCpu);
83 static void emR3RecordCli(PVM pVM, PVMCPU pVCpu, RTGCPTR GCPtrInstr)
87 pRec = (PCLISTAT)RTAvlGCPtrGet(&pVCpu->em.s.pCliStatTree, GCPtrInstr);
101 bool fRc = RTAvlGCPtrInsert(&pVCpu->em.s.pCliStatTree, &pRec->Core);
105 STAM_COUNTER_INC(&pVCpu->em.s.StatTotalClis);
119 * @param pVCpu Pointer to the VMCPU.
121 int emR3RawResumeHyper(PVM pVM, PVMCPU pVCpu)
124 PCPUMCTX pCtx = pVCpu->em.s.pCtx;
125 Assert(pVCpu->em.s.enmState == EMSTATE_DEBUG_HYPER);
131 CPUMRawEnter(pVCpu);
132 CPUMSetHyperEFlags(pVCpu, CPUMGetHyperEFlags(pVCpu) | X86_EFL_RF);
133 rc = VMMR3ResumeHyper(pVM, pVCpu);
135 rc = CPUMRawLeave(pVCpu, rc);
136 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK);
141 rc = emR3HighPriorityPostForcedActions(pVM, pVCpu, rc);
142 rc = emR3RawHandleRC(pVM, pVCpu, pCtx, rc);
143 rc = emR3RawUpdateForceFlag(pVM, pVCpu, pCtx, rc);
153 * @param pVCpu Pointer to the VMCPU.
155 int emR3RawStep(PVM pVM, PVMCPU pVCpu)
157 Assert( pVCpu->em.s.enmState == EMSTATE_DEBUG_HYPER
158 || pVCpu->em.s.enmState == EMSTATE_DEBUG_GUEST_RAW
159 || pVCpu->em.s.enmState == EMSTATE_DEBUG_GUEST_REM);
161 PCPUMCTX pCtx = pVCpu->em.s.pCtx;
162 bool fGuest = pVCpu->em.s.enmState != EMSTATE_DEBUG_HYPER;
164 Log(("emR3RawStep: cs:eip=%RTsel:%RGr efl=%RGr\n", fGuest ? CPUMGetGuestCS(pVCpu) : CPUMGetHyperCS(pVCpu),
165 fGuest ? CPUMGetGuestEIP(pVCpu) : CPUMGetHyperEIP(pVCpu), fGuest ? CPUMGetGuestEFlags(pVCpu) : CPUMGetHyperEFlags(pVCpu)));
173 || VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK))
175 rc = emR3RawForcedActions(pVM, pVCpu, pCtx);
176 VBOXVMM_EM_FF_RAW_RET(pVCpu, rc);
184 CPUMSetGuestEFlags(pVCpu, CPUMGetGuestEFlags(pVCpu) | X86_EFL_TF | X86_EFL_RF);
187 CPUMSetHyperEFlags(pVCpu, CPUMGetHyperEFlags(pVCpu) | X86_EFL_TF | X86_EFL_RF);
193 CPUMRawEnter(pVCpu);
196 if (pVCpu->em.s.enmState == EMSTATE_DEBUG_HYPER)
197 rc = VMMR3ResumeHyper(pVM, pVCpu);
199 rc = VMMR3RawRunGC(pVM, pVCpu);
201 Log(("emR3RawStep: cs:eip=%RTsel:%RGr efl=%RGr - GC rc %Rrc\n", fGuest ? CPUMGetGuestCS(pVCpu) : CPUMGetHyperCS(pVCpu),
202 fGuest ? CPUMGetGuestEIP(pVCpu) : CPUMGetHyperEIP(pVCpu), fGuest ? CPUMGetGuestEFlags(pVCpu) : CPUMGetHyperEFlags(pVCpu), rc));
206 rc = CPUMRawLeave(pVCpu, rc);
207 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK);
214 CPUMSetGuestEFlags(pVCpu, CPUMGetGuestEFlags(pVCpu) & ~X86_EFL_TF);
216 CPUMSetHyperEFlags(pVCpu, CPUMGetHyperEFlags(pVCpu) & ~X86_EFL_TF);
221 rc = emR3HighPriorityPostForcedActions(pVM, pVCpu, rc);
222 rc = emR3RawHandleRC(pVM, pVCpu, pCtx, rc);
223 rc = emR3RawUpdateForceFlag(pVM, pVCpu, pCtx, rc);
231 int emR3SingleStepExecRaw(PVM pVM, PVMCPU pVCpu, uint32_t cIterations)
234 EMSTATE enmOldState = pVCpu->em.s.enmState;
235 pVCpu->em.s.enmState = EMSTATE_DEBUG_GUEST_RAW;
240 DBGFR3PrgStep(pVCpu);
241 DBGFR3_DISAS_INSTR_CUR_LOG(pVCpu, "RSS");
242 rc = emR3RawStep(pVM, pVCpu);
248 CPUMSetGuestEFlags(pVCpu, CPUMGetGuestEFlags(pVCpu) & ~X86_EFL_TF);
249 pVCpu->em.s.enmState = enmOldState;
262 * @param pVCpu Pointer to the VMCPU.
268 static int emR3RawExecuteInstructionWorker(PVM pVM, PVMCPU pVCpu, int rcGC, const char *pszPrefix)
270 static int emR3RawExecuteInstructionWorker(PVM pVM, PVMCPU pVCpu, int rcGC)
273 PCPUMCTX pCtx = pVCpu->em.s.pCtx;
283 DBGFR3_DISAS_INSTR_CUR_LOG(pVCpu, pszPrefix);
307 uNewEip, pCtx->eflags.Bits.u1IF, pVCpu->em.s.pPatmGCState->uVMFlags));
317 return emR3RawExecuteInstruction(pVM, pVCpu, "PATCHIR");
322 return emR3RawExecuteInstruction(pVM, pVCpu, "PATCHIRET");
331 uNewEip, pCtx->eflags.Bits.u1IF, pVCpu->em.s.pPatmGCState->uVMFlags));
333 return emR3RawExecuteInstruction(pVM, pVCpu, "PATCHIR");
340 uNewEip, pCtx->eflags.Bits.u1IF, pVCpu->em.s.pPatmGCState->uVMFlags));
348 return emR3RawExecuteInstruction(pVM, pVCpu, "PATCHIR");
372 STAM_PROFILE_START(&pVCpu->em.s.StatIEMEmu, a);
373 rc = VBOXSTRICTRC_TODO(IEMExecOne(pVCpu));
374 STAM_PROFILE_STOP(&pVCpu->em.s.StatIEMEmu, a);
389 STAM_PROFILE_START(&pVCpu->em.s.StatREMEmu, b);
397 if (pVM->em.s.idLastRemCpu != pVCpu->idCpu)
398 CPUMSetChangedFlags(pVCpu, CPUM_CHANGED_ALL);
399 pVM->em.s.idLastRemCpu = pVCpu->idCpu;
401 rc = REMR3EmulateInstruction(pVM, pVCpu);
403 STAM_PROFILE_STOP(&pVCpu->em.s.StatREMEmu, b);
418 * @param pVCpu Pointer to the VMCPU.
423 DECLINLINE(int) emR3RawExecuteInstruction(PVM pVM, PVMCPU pVCpu, const char *pszPrefix, int rcGC)
426 return emR3RawExecuteInstructionWorker(pVM, pVCpu, rcGC, pszPrefix);
428 return emR3RawExecuteInstructionWorker(pVM, pVCpu, rcGC);
437 * @param pVCpu Pointer to the VMCPU.
439 static int emR3RawExecuteIOInstruction(PVM pVM, PVMCPU pVCpu)
442 STAM_PROFILE_START(&pVCpu->em.s.StatIOEmu, a);
445 VBOXSTRICTRC rcStrict = IEMExecOne(pVCpu);
447 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatIoIem);
448 STAM_PROFILE_STOP(&pVCpu->em.s.StatIOEmu, a);
452 PCPUMCTX pCtx = pVCpu->em.s.pCtx;
454 STAM_PROFILE_START(&pVCpu->em.s.StatIOEmu, a);
460 int rc = CPUMR3DisasmInstrCPU(pVM, pVCpu, pCtx, pCtx->rip, &Cpu, "IO EMU");
471 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatIn);
472 rcStrict = IOMInterpretIN(pVM, pVCpu, CPUMCTX2CORE(pCtx), &Cpu);
478 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatOut);
479 rcStrict = IOMInterpretOUT(pVM, pVCpu, CPUMCTX2CORE(pCtx), &Cpu);
491 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatIn);
492 rcStrict = IOMInterpretINS(pVM, pVCpu, CPUMCTX2CORE(pCtx), &Cpu);
499 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatOut);
500 rcStrict = IOMInterpretOUTS(pVM, pVCpu, CPUMCTX2CORE(pCtx), &Cpu);
513 STAM_PROFILE_STOP(&pVCpu->em.s.StatIOEmu, a);
519 STAM_PROFILE_STOP(&pVCpu->em.s.StatIOEmu, a);
520 rcStrict = emR3RawGuestTrap(pVM, pVCpu);
527 STAM_PROFILE_STOP(&pVCpu->em.s.StatIOEmu, a);
532 STAM_PROFILE_STOP(&pVCpu->em.s.StatIOEmu, a);
533 return emR3RawExecuteInstruction(pVM, pVCpu, "IO: ");
543 * @param pVCpu Pointer to the VMCPU.
545 static int emR3RawGuestTrap(PVM pVM, PVMCPU pVCpu)
547 PCPUMCTX pCtx = pVCpu->em.s.pCtx;
556 int rc = TRPMQueryTrapAll(pVCpu, &u8TrapNo, &enmType, &uErrorCode, &uCR2, NULL /* pu8InstrLen */);
572 uint32_t uCpl = CPUMGetGuestCPL(pVCpu);
577 return emR3RawPatchTrap(pVM, pVCpu, pCtx, rc);
596 rc = EMR3CheckRawForcedActions(pVM, pVCpu);
602 rc = TRPMForwardTrap(pVCpu, CPUMCTX2CORE(pCtx), u8TrapNo, uErrorCode, enmError, TRPM_TRAP, -1);
605 TRPMResetTrap(pVCpu);
632 rc = CPUMR3DisasmInstrCPU(pVM, pVCpu, pCtx, pCtx->rip, &cpu, "Guest Trap (#UD): ");
637 CPUMGetGuestCpuId(pVCpu, 1, 0, &u32Dummy, &u32Dummy, &u32ExtFeatures, &u32Features);
640 rc = TRPMResetTrap(pVCpu);
643 rc = VBOXSTRICTRC_TODO(EMInterpretInstructionDisasState(pVCpu, &cpu, CPUMCTX2CORE(pCtx), 0, EMCODETYPE_SUPERVISOR));
646 return emR3RawExecuteInstruction(pVM, pVCpu, "Monitor: ");
658 rc = CPUMR3DisasmInstrCPU(pVM, pVCpu, pCtx, pCtx->rip, &cpu, "Guest Trap: ");
666 rc = TRPMResetTrap(pVCpu);
668 return emR3RawExecuteInstruction(pVM, pVCpu, "IO Guest Trap: ");
674 DBGFR3_DISAS_INSTR_CUR_LOG(pVCpu, "Guest trap");
679 int rc2 = PGMGstGetPage(pVCpu, uCR2, &fFlags, &GCPhys);
704 * @param pVCpu Pointer to the VMCPU.
706 static int emR3RawRingSwitch(PVM pVM, PVMCPU pVCpu)
710 PCPUMCTX pCtx = pVCpu->em.s.pCtx;
715 rc = CPUMR3DisasmInstrCPU(pVM, pVCpu, pCtx, pCtx->rip, &Cpu, "RSWITCH: ");
723 CPUMGetGuestCodeBits(pVCpu) == 32 ? PATMFL_CODE32 : 0);
726 DBGFR3_DISAS_INSTR_CUR_LOG(pVCpu, "Patched sysenter instruction");
736 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatSysEnter);
739 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatSysExit);
742 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatSysCall);
745 STAM_COUNTER_INC(&pVCpu->em.s.CTX_SUFF(pStats)->StatSysRet);
754 return emR3RawExecuteInstruction(pVM, pVCpu, "RSWITCH: ");
763 * @param pVCpu Pointer to the VMCPU.
767 static int emR3RawPatchTrap(PVM pVM, PVMCPU pVCpu, PCPUMCTX pCtx, int gcret)
792 rc = TRPMQueryTrapAll(pVCpu, &u8TrapNo, &enmType, &uErrorCode, &uCR2, NULL /* pu8InstrLen */);
799 TRPMResetTrap(pVCpu);
810 DBGFR3_DISAS_INSTR_CUR_LOG(pVCpu, "Patch code");
813 rc = CPUMR3DisasmInstrCPU(pVM, pVCpu, pCtx, pCtx->eip, &Cpu, "Patch code: ");
820 rc = PGMPhysSimpleReadGCPtr(pVCpu, &eip, pCtx->esp, 4);
821 rc |= PGMPhysSimpleReadGCPtr(pVCpu, &selCS, pCtx->esp+4, 4);
822 rc |= PGMPhysSimpleReadGCPtr(pVCpu, &uEFlags, pCtx->esp+8, 4);
830 rc |= PGMPhysSimpleReadGCPtr(pVCpu, &esp, pCtx->esp + 12, 4);
831 rc |= PGMPhysSimpleReadGCPtr(pVCpu, &selSS, pCtx->esp + 16, 4);
836 rc = PGMPhysSimpleReadGCPtr(pVCpu, &selES, pCtx->esp + 20, 4);
837 rc |= PGMPhysSimpleReadGCPtr(pVCpu, &selDS, pCtx->esp + 24, 4);
838 rc |= PGMPhysSimpleReadGCPtr(pVCpu, &selFS, pCtx->esp + 28, 4);
839 rc |= PGMPhysSimpleReadGCPtr(pVCpu, &selGS, pCtx->esp + 32, 4);
868 if (!(pVCpu->em.s.pPatmGCState->uVMFlags & X86_EFL_IF))
890 return emR3RawExecuteInstruction(pVM, pVCpu, "PATCHIR");
902 uNewEip, pCtx->eflags.Bits.u1IF, pVCpu->em.s.pPatmGCState->uVMFlags));
905 return emR3RawExecuteInstruction(pVM, pVCpu, "PATCHEMUL: ");
911 if (!(pVCpu->em.s.pPatmGCState->uVMFlags & X86_EFL_IF))
922 return emR3RawExecuteInstruction(pVM, pVCpu, "PATCHIR");
947 * @param pVCpu Pointer to the VMCPU.
949 static int emR3RawPrivileged(PVM pVM, PVMCPU pVCpu)
951 PCPUMCTX pCtx = pVCpu->em.s.pCtx;
973 CPUMGetGuestCodeBits(pVCpu) == 32 ? PATMFL_CODE32 : 0);
979 DBGFR3_DISAS_INSTR_CUR_LOG(pVCpu, "Patched privileged instruction");
989 DBGFR3_DISAS_INSTR_CUR_LOG(pVCpu, "Privileged instr");
999 rc = CPUMR3DisasmInstrCPU(pVM, pVCpu, pCtx, pCtx->rip, &Cpu, "PRIV: ");
1003 PEMSTATS pStats = pVCpu->em.s.CTX_SUFF(pStats);
1014 emR3RecordCli(pVM, pVCpu, pCtx->rip);
1080 && CPUMGetGuestCodeBits(pVCpu) == 32)
1082 STAM_PROFILE_START(&pVCpu->em.s.StatPrivEmu, a);
1089 STAM_PROFILE_STOP(&pVCpu->em.s.StatPrivEmu, a);
1094 EMSetInhibitInterruptsPC(pVCpu, pCtx->rip + Cpu.cbInstr);
1097 STAM_PROFILE_STOP(&pVCpu->em.s.StatPrivEmu, a);
1129 DBGFR3_DISAS_INSTR_CUR_LOG(pVCpu, "Privileged instr");
1133 rc = VBOXSTRICTRC_TODO(EMInterpretInstructionDisasState(pVCpu, &Cpu, CPUMCTX2CORE(pCtx), 0, EMCODETYPE_SUPERVISOR));
1136 STAM_PROFILE_STOP(&pVCpu->em.s.StatPrivEmu, a);
1174 STAM_PROFILE_STOP(&pVCpu->em.s.StatPrivEmu, a);
1179 return emR3RawPatchTrap(pVM, pVCpu, pCtx, VINF_PATM_PATCH_TRAP_GP);
1181 return emR3RawExecuteInstruction(pVM, pVCpu, "PRIV");
1199 * @param pVCpu Pointer to the VMCPU.
1203 int emR3RawUpdateForceFlag(PVM pVM, PVMCPU pVCpu, PCPUMCTX pCtx, int rc)
1216 pVCpu->em.s.fForceRAW = true;
1219 pVCpu->em.s.fForceRAW = false;
1230 * @param pVCpu Pointer to the VMCPU.
1232 VMMR3_INT_DECL(int) EMR3CheckRawForcedActions(PVM pVM, PVMCPU pVCpu)
1234 int rc = emR3RawForcedActions(pVM, pVCpu, pVCpu->em.s.pCtx);
1235 VBOXVMM_EM_FF_RAW_RET(pVCpu, rc);
1248 * @param pVCpu Pointer to the VMCPU.
1251 static int emR3RawForcedActions(PVM pVM, PVMCPU pVCpu, PCPUMCTX pCtx)
1257 VBOXVMM_EM_FF_RAW(pVCpu, pVM->fGlobalForcedActions, pVCpu->fLocalForcedActions);
1262 if (VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_SELM_SYNC_GDT | VMCPU_FF_SELM_SYNC_LDT))
1264 VBOXSTRICTRC rcStrict = SELMR3UpdateFromCPUM(pVM, pVCpu);
1276 if (VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_TRPM_SYNC_IDT))
1278 if ( VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_PGM_SYNC_CR3)
1282 int rc = PGMSyncCR3(pVCpu, pCtx->cr0, pCtx->cr3, pCtx->cr4, VMCPU_FF_IS_SET(pVCpu, VMCPU_FF_PGM_SYNC_CR3));
1287 int rc = TRPMR3SyncIDT(pVM, pVCpu);
1295 if (VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_SELM_SYNC_TSS))
1297 int rc = SELMR3SyncTSS(pVM, pVCpu);
1305 if (VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_PGM_SYNC_CR3 | VMCPU_FF_PGM_SYNC_CR3_NON_GLOBAL))
1307 Assert(pVCpu->em.s.enmState != EMSTATE_WAIT_SIPI);
1308 int rc = PGMSyncCR3(pVCpu, pCtx->cr0, pCtx->cr3, pCtx->cr4, VMCPU_FF_IS_SET(pVCpu, VMCPU_FF_PGM_SYNC_CR3));
1312 Assert(!VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_SELM_SYNC_GDT | VMCPU_FF_SELM_SYNC_LDT));
1316 rc = PGMPrefetchPage(pVCpu, SELMToFlat(pVM, DISSELREG_CS, CPUMCTX2CORE(pCtx), pCtx->rip));
1318 rc = PGMPrefetchPage(pVCpu, SELMToFlat(pVM, DISSELREG_SS, CPUMCTX2CORE(pCtx), pCtx->rsp));
1326 rc = PGMSyncCR3(pVCpu, pCtx->cr0, pCtx->cr3, pCtx->cr4, VMCPU_FF_IS_SET(pVCpu, VMCPU_FF_PGM_SYNC_CR3));
1331 Assert(!VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_SELM_SYNC_GDT | VMCPU_FF_SELM_SYNC_LDT));
1368 * @param pVCpu Pointer to the VMCPU.
1372 int emR3RawExecute(PVM pVM, PVMCPU pVCpu, bool *pfFFDone)
1374 STAM_REL_PROFILE_ADV_START(&pVCpu->em.s.StatRAWTotal, a);
1377 PCPUMCTX pCtx = pVCpu->em.s.pCtx;
1379 pVCpu->em.s.fForceRAW = false;
1391 STAM_PROFILE_ADV_START(&pVCpu->em.s.StatRAWEntry, b);
1398 Assert(REMR3QueryPendingInterrupt(pVM, pVCpu) == REM_NO_PENDING_IRQ);
1405 if ( !VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_PGM_SYNC_CR3 | VMCPU_FF_PGM_SYNC_CR3_NON_GLOBAL)
1418 || VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK))
1420 rc = emR3RawForcedActions(pVM, pVCpu, pCtx);
1421 VBOXVMM_EM_FF_RAW_RET(pVCpu, rc);
1431 rc = CPUMRawEnter(pVCpu);
1434 STAM_PROFILE_ADV_STOP(&pVCpu->em.s.StatRAWEntry, b);
1445 STAM_PROFILE_ADV_SUSPEND(&pVCpu->em.s.StatRAWEntry, b);
1447 STAM_PROFILE_ADV_RESUME(&pVCpu->em.s.StatRAWEntry, b);
1449 || VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK))
1451 rc = emR3RawForcedActions(pVM, pVCpu, pCtx);
1452 VBOXVMM_EM_FF_RAW_RET(pVCpu, rc);
1455 rc = CPUMRawLeave(pVCpu, rc);
1470 pCtx->cs.Sel, pCtx->eip, pCtx->ss.Sel, pCtx->esp, CPUMRawGetEFlags(pVCpu), !!(pGCState->uVMFlags & X86_EFL_IF), pCtx->eflags.Bits.u1IF,
1485 STAM_PROFILE_ADV_STOP(&pVCpu->em.s.StatRAWEntry, b);
1486 if (RT_LIKELY(emR3IsExecutionAllowed(pVM, pVCpu)))
1488 STAM_PROFILE_START(&pVCpu->em.s.StatRAWExec, c);
1489 VBOXVMM_EM_RAW_RUN_PRE(pVCpu, pCtx);
1490 rc = VMMR3RawRunGC(pVM, pVCpu);
1491 VBOXVMM_EM_RAW_RUN_RET(pVCpu, pCtx, rc);
1492 STAM_PROFILE_STOP(&pVCpu->em.s.StatRAWExec, c);
1497 STAM_REL_PROFILE_ADV_START(&pVCpu->em.s.StatCapped, u);
1499 STAM_REL_PROFILE_ADV_STOP(&pVCpu->em.s.StatCapped, u);
1502 STAM_PROFILE_ADV_START(&pVCpu->em.s.StatRAWTail, d);
1505 (pCtx->ss.Sel & X86_SEL_RPL), pCtx->eip, pCtx->esp, CPUMRawGetEFlags(pVCpu),
1515 rc = CPUMRawLeave(pVCpu, rc);
1516 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK);
1518 || VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK))
1519 rc = emR3HighPriorityPostForcedActions(pVM, pVCpu, rc);
1525 if ( !VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_SELM_SYNC_TSS | VMCPU_FF_SELM_SYNC_GDT) /* GDT implies TSS at the moment. */
1543 LogIt(NULL, 0, LOG_GROUP_PATM, ("Patch code interrupted at %RRv for reason %Rrc\n", (RTRCPTR)CPUMGetGuestEIP(pVCpu), rc));
1549 if ( !VMCPU_FF_IS_PENDING(pVCpu, VMCPU_FF_PGM_SYNC_CR3 | VMCPU_FF_PGM_SYNC_CR3_NON_GLOBAL)
1563 STAM_PROFILE_ADV_STOP(&pVCpu->em.s.StatRAWTail, d);
1566 rc = emR3RawHandleRC(pVM, pVCpu, pCtx, rc);
1569 rc = emR3RawUpdateForceFlag(pVM, pVCpu, pCtx, rc);
1572 STAM_PROFILE_ADV_STOP(&pVCpu->em.s.StatRAWTail, d);
1581 TMTimerPollVoid(pVM, pVCpu);
1583 STAM_PROFILE_ADV_STOP(&pVCpu->em.s.StatRAWTail, d);
1585 || VMCPU_FF_IS_PENDING(pVCpu, ~VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK))
1589 STAM_REL_PROFILE_ADV_SUSPEND(&pVCpu->em.s.StatRAWTotal, a);
1590 rc = emR3ForcedActions(pVM, pVCpu, rc);
1591 VBOXVMM_EM_FF_ALL_RET(pVCpu, rc);
1592 STAM_REL_PROFILE_ADV_RESUME(&pVCpu->em.s.StatRAWTotal, a);
1596 rc = emR3RawUpdateForceFlag(pVM, pVCpu, pCtx, rc);
1612 STAM_REL_PROFILE_ADV_STOP(&pVCpu->em.s.StatRAWTotal, a);