Lines Matching refs:pVCpu

259 #define VMCPU_GET_STATE(pVCpu)              ( (pVCpu)->enmState )
261 #define VMCPU_SET_STATE(pVCpu, enmNewState) \
262 ASMAtomicWriteU32((uint32_t volatile *)&(pVCpu)->enmState, (enmNewState))
264 #define VMCPU_CMPXCHG_STATE(pVCpu, enmNewState, enmOldState) \
265 ASMAtomicCmpXchgU32((uint32_t volatile *)&(pVCpu)->enmState, (enmNewState), (enmOldState))
268 # define VMCPU_ASSERT_STATE(pVCpu, enmExpectedState) \
270 VMCPUSTATE enmState = VMCPU_GET_STATE(pVCpu); \
273 enmState, enmExpectedState, (pVCpu)->idCpu)); \
276 # define VMCPU_ASSERT_STATE(pVCpu, enmExpectedState) do { } while (0)
528 * @param pVCpu Pointer to the VMCPU.
531 #define VMCPU_FF_SET(pVCpu, fFlag) ASMAtomicOrU32(&(pVCpu)->fLocalForcedActions, (fFlag))
551 * @param pVCpu Pointer to the VMCPU.
554 #define VMCPU_FF_CLEAR(pVCpu, fFlag) ASMAtomicAndU32(&(pVCpu)->fLocalForcedActions, ~(fFlag))
567 * @param pVCpu Pointer to the VMCPU.
570 #define VMCPU_FF_IS_SET(pVCpu, fFlag) (((pVCpu)->fLocalForcedActions & (fFlag)) == (fFlag))
595 * @param pVCpu Pointer to the VMCPU.
598 #define VMCPU_FF_TEST_AND_CLEAR(pVCpu, iBit) (ASMAtomicBitTestAndClear(&(pVCpu)->fLocalForcedActions, iBit##_BIT))
603 * @param pVCpu Pointer to the VMCPU.
606 #define VMCPU_FF_IS_PENDING(pVCpu, fFlags) RT_BOOL((pVCpu)->fLocalForcedActions & (fFlags))
622 * @param pVCpu Pointer to the VMCPU.
626 #define VMCPU_FF_IS_PENDING_EXCEPT(pVCpu, fFlags, fExcpt) ( ((pVCpu)->fLocalForcedActions & (fFlags)) && !((pVCpu)->fLocalForcedActions & (fExcpt)) )
645 # define VMCPU_IS_EMT(pVCpu) true
647 # define VMCPU_IS_EMT(pVCpu) ((pVCpu) && ((pVCpu) == VMMGetCpu((pVCpu)->CTX_SUFF(pVM))))
668 # define VMCPU_ASSERT_EMT(pVCpu) Assert(VMCPU_IS_EMT(pVCpu))
670 # define VMCPU_ASSERT_EMT(pVCpu) AssertMsg(VMCPU_IS_EMT(pVCpu), \
672 RTThreadNativeSelf(), (pVCpu) ? (pVCpu)->hNativeThreadR0 : 0, \
673 (pVCpu) ? (pVCpu)->idCpu : 0))
675 # define VMCPU_ASSERT_EMT(pVCpu) AssertMsg(VMCPU_IS_EMT(pVCpu), \
677 RTThreadNativeSelf(), (pVCpu)->hNativeThread, (pVCpu)->idCpu))
698 # define VMCPU_ASSERT_EMT_RETURN(pVCpu, rc) AssertReturn(VMCPU_IS_EMT(pVCpu), (rc))
700 # define VMCPU_ASSERT_EMT_RETURN(pVCpu, rc) AssertReturn(VMCPU_IS_EMT(pVCpu), (rc))
702 # define VMCPU_ASSERT_EMT_RETURN(pVCpu, rc) \
703 AssertMsgReturn(VMCPU_IS_EMT(pVCpu), \
705 RTThreadNativeSelf(), (pVCpu)->hNativeThread, (pVCpu)->idCpu), \
714 # define VMCPU_ASSERT_EMT_OR_GURU(pVCpu) Assert( VMCPU_IS_EMT(pVCpu) \
715 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_GURU_MEDITATION \
716 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_GURU_MEDITATION_LS )
718 # define VMCPU_ASSERT_EMT_OR_GURU(pVCpu) \
719 AssertMsg( VMCPU_IS_EMT(pVCpu) \
720 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_GURU_MEDITATION \
721 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_GURU_MEDITATION_LS, \
723 RTThreadNativeSelf(), (pVCpu)->hNativeThread, (pVCpu)->idCpu))
731 # define VMCPU_ASSERT_EMT_OR_NOT_RUNNING(pVCpu) \
732 Assert( VMCPU_IS_EMT(pVCpu) \
733 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_RUNNING \
734 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_RUNNING_LS \
735 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_RUNNING_FT )
737 # define VMCPU_ASSERT_EMT_OR_NOT_RUNNING(pVCpu) \
738 AssertMsg( VMCPU_IS_EMT(pVCpu) \
739 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_RUNNING \
740 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_RUNNING_LS \
741 || pVCpu->CTX_SUFF(pVM)->enmVMState == VMSTATE_RUNNING_FT, \
743 RTThreadNativeSelf(), (pVCpu)->hNativeThread, (pVCpu)->idCpu))
803 #define VMCPU_ASSERT_VALID_EXT_RETURN(pVCpu, rc) \
804 AssertMsgReturn( RT_VALID_ALIGNED_PTR(pVCpu, 64) \
805 && RT_VALID_ALIGNED_PTR((pVCpu)->CTX_SUFF(pVM), PAGE_SIZE) \
806 && (unsigned)(pVCpu)->CTX_SUFF(pVM)->enmVMState < (unsigned)VMSTATE_DESTROYING, \
807 ("pVCpu=%p pVM=%p state %s\n", (pVCpu), RT_VALID_ALIGNED_PTR(pVCpu, 64) ? (pVCpu)->CTX_SUFF(pVM) : NULL, \
808 RT_VALID_ALIGNED_PTR(pVCpu, 64) && RT_VALID_ALIGNED_PTR((pVCpu)->CTX_SUFF(pVM), PAGE_SIZE) \
809 ? VMGetStateName((pVCpu)->pVMR3->enmVMState) : ""), \