Lines Matching defs:u64Val
1642 uint64_t u64Val;
1643 int rc = VMXReadVmcs64(VMX_VMCS64_HOST_FIELD_EFER_FULL, &u64Val);
1647 AssertMsgReturnVoid(u64HostEferMsr == u64Val, ("u64HostEferMsr=%#RX64 u64Val=%#RX64\n", u64HostEferMsr, u64Val));
5010 uint64_t u64Val;
5052 rc = VMXReadVmcs64(VMX_VMCS64_CTRL_EPTP_FULL, &u64Val); AssertRC(rc);
5053 Log4(("VMX_VMCS64_CTRL_EPTP_FULL %#RX64\n", u64Val));
5056 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_RIP, &u64Val); AssertRC(rc);
5057 Log4(("Old Guest Rip %#RX64 New %#RX64\n", pCtx->rip, u64Val));
5058 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_RSP, &u64Val); AssertRC(rc);
5059 Log4(("Old Guest Rsp %#RX64 New %#RX64\n", pCtx->rsp, u64Val));
5480 * @param u64Val 16, 32 or 64-bit value.
5482 VMMR0DECL(int) VMXWriteVmcs64Ex(PVMCPU pVCpu, uint32_t idxField, uint64_t u64Val)
5519 rc = VMXWriteVmcs32(idxField, u64Val);
5520 rc |= VMXWriteVmcs32(idxField + 1, (uint32_t)(u64Val >> 32));
5545 if (!(u64Val >> 32))
5548 rc = VMXWriteVmcs32(idxField, (uint32_t)u64Val);
5554 rc = VMXWriteCachedVmcsEx(pVCpu, idxField, u64Val);
5561 AssertMsgFailed(("VMXWriteVmcs64Ex: Invalid field %#RX32 (pVCpu=%p u64Val=%#RX64)\n", idxField, pVCpu, u64Val));
5577 * @param u64Val 16, 32 or 64-bit value.
5579 VMMR0DECL(int) VMXWriteCachedVmcsEx(PVMCPU pVCpu, uint32_t idxField, uint64_t u64Val)
5592 pCache->Write.aFieldVal[i] = u64Val;
5598 pCache->Write.aFieldVal[pCache->Write.cValidEntries] = u64Val;
6060 uint64_t u64Val = 0;
6061 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_RIP, &u64Val);
6064 pMixedCtx->rip = u64Val;
6087 uint64_t u64Val = 0;
6088 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_RSP, &u64Val);
6091 pMixedCtx->rsp = u64Val;
6246 uint64_t u64Val = 0;
6249 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_SYSENTER_EIP, &u64Val); AssertRCReturn(rc, rc);
6250 pMixedCtx->SysEnter.eip = u64Val;
6255 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_SYSENTER_ESP, &u64Val); AssertRCReturn(rc, rc);
6256 pMixedCtx->SysEnter.esp = u64Val;
6385 uint64_t u64Val = 0;
6386 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_CR3, &u64Val);
6387 if (pMixedCtx->cr3 != u64Val)
6389 CPUMSetGuestCR3(pVCpu, u64Val);
6392 PGMUpdateCR3(pVCpu, u64Val);
6487 uint64_t u64Val = 0;
6488 rc = VMXReadVmcsGstNByIdxVal(idxBase, &u64Val);
6490 pSelReg->u64Base = u64Val;
6611 uint64_t u64Val = 0;
6615 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_GDTR_BASE, &u64Val); AssertRCReturn(rc, rc);
6617 pMixedCtx->gdtr.pGdt = u64Val;
6625 rc = VMXReadVmcsGstN(VMX_VMCS_GUEST_IDTR_BASE, &u64Val); AssertRCReturn(rc, rc);
6627 pMixedCtx->idtr.pIdt = u64Val;
9472 uint64_t u64Val;
9473 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_DEBUGCTL_FULL, &u64Val);
9476 && (u64Val & 0xfffffe3c)) /* Bits 31:9, bits 5:2 MBZ. */
9480 uint64_t u64DebugCtlMsr = u64Val;
9496 rc = VMXReadVmcs64(VMX_VMCS_GUEST_RIP, &u64Val);
9502 HMVMX_CHECK_BREAK(!(u64Val & UINT64_C(0xffffffff00000000)), VMX_IGS_LONGMODE_RIP_INVALID);
9510 rc = VMXReadVmcs64(VMX_VMCS_GUEST_RFLAGS, &u64Val);
9512 HMVMX_CHECK_BREAK(!(u64Val & UINT64_C(0xffffffffffc08028)), /* Bit 63:22, Bit 15, 5, 3 MBZ. */
9514 HMVMX_CHECK_BREAK((u64Val & X86_EFL_RA1_MASK), VMX_IGS_RFLAGS_RESERVED1); /* Bit 1 MB1. */
9515 u32Eflags = u64Val;
9570 rc = VMXReadVmcs64(VMX_VMCS_HOST_SYSENTER_ESP, &u64Val);
9572 HMVMX_CHECK_BREAK(X86_IS_CANONICAL(u64Val), VMX_IGS_SYSENTER_ESP_NOT_CANONICAL);
9574 rc = VMXReadVmcs64(VMX_VMCS_HOST_SYSENTER_EIP, &u64Val);
9576 HMVMX_CHECK_BREAK(X86_IS_CANONICAL(u64Val), VMX_IGS_SYSENTER_EIP_NOT_CANONICAL);
9585 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_PERF_GLOBAL_CTRL_FULL, &u64Val);
9587 HMVMX_CHECK_BREAK(!(u64Val & UINT64_C(0xfffffff8fffffffc)),
9596 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_PAT_FULL, &u64Val);
9598 HMVMX_CHECK_BREAK(!(u64Val & UINT64_C(0x707070707070707)), VMX_IGS_PAT_MSR_RESERVED);
9601 uint8_t u8Val = (u64Val & 0xff);
9611 u64Val >>= 8;
9621 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_EFER_FULL, &u64Val);
9623 HMVMX_CHECK_BREAK(!(u64Val & UINT64_C(0xfffffffffffff2fe)),
9625 HMVMX_CHECK_BREAK(RT_BOOL(u64Val & MSR_K6_EFER_LMA) == RT_BOOL(pVCpu->hm.s.vmx.u32EntryCtls & VMX_VMCS_CTRL_ENTRY_IA32E_MODE_GUEST),
9629 || RT_BOOL(u64Val & MSR_K6_EFER_LMA) == RT_BOOL(u64Val & MSR_K6_EFER_LME),
9861 rc = VMXReadVmcs64(VMX_VMCS_GUEST_GDTR_BASE, &u64Val);
9863 HMVMX_CHECK_BREAK(X86_IS_CANONICAL(u64Val), VMX_IGS_GDTR_BASE_NOT_CANONICAL);
9865 rc = VMXReadVmcs64(VMX_VMCS_GUEST_IDTR_BASE, &u64Val);
9867 HMVMX_CHECK_BREAK(X86_IS_CANONICAL(u64Val), VMX_IGS_IDTR_BASE_NOT_CANONICAL);
9949 rc = VMXReadVmcs64(VMX_VMCS_GUEST_PENDING_DEBUG_EXCEPTIONS, &u64Val);
9952 HMVMX_CHECK_BREAK(!(u64Val & UINT64_C(0xffffffffffffaff0)), VMX_IGS_LONGMODE_PENDING_DEBUG_RESERVED);
9953 u32Val = u64Val; /* For pending debug exceptions checks below. */
9960 HMVMX_CHECK_BREAK(!(u64Val & 0xffffaff0), VMX_IGS_PENDING_DEBUG_RESERVED);
9982 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_VMCS_LINK_PTR_FULL, &u64Val);
9984 if (u64Val != UINT64_C(0xffffffffffffffff))
9986 HMVMX_CHECK_BREAK(!(u64Val & 0xfff), VMX_IGS_VMCS_LINK_PTR_RESERVED);
9999 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_PDPTE0_FULL, &u64Val);
10001 HMVMX_CHECK_BREAK(!(u64Val & X86_PDPE_PAE_MBZ_MASK), VMX_IGS_PAE_PDPTE_RESERVED);
10003 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_PDPTE1_FULL, &u64Val);
10005 HMVMX_CHECK_BREAK(!(u64Val & X86_PDPE_PAE_MBZ_MASK), VMX_IGS_PAE_PDPTE_RESERVED);
10007 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_PDPTE2_FULL, &u64Val);
10009 HMVMX_CHECK_BREAK(!(u64Val & X86_PDPE_PAE_MBZ_MASK), VMX_IGS_PAE_PDPTE_RESERVED);
10011 rc = VMXReadVmcs64(VMX_VMCS64_GUEST_PDPTE3_FULL, &u64Val);
10013 HMVMX_CHECK_BREAK(!(u64Val & X86_PDPE_PAE_MBZ_MASK), VMX_IGS_PAE_PDPTE_RESERVED);
10691 uint64_t u64Val;
10716 rc = VMXReadVmcs64(VMX_VMCS64_CTRL_EPTP_FULL, &u64Val); AssertRC(rc);
10717 Log4(("VMX_VMCS64_CTRL_EPTP_FULL %#RX64\n", u64Val));