PGMAllShw.h revision 2fe20148504a5410c92013227801f655898d83aa
/* $Id$ */
/** @file
* VBox - Page Manager, Shadow Paging Template - All context code.
*/
/*
* Copyright (C) 2006-2007 Sun Microsystems, Inc.
*
* This file is part of VirtualBox Open Source Edition (OSE), as
* available from http://www.virtualbox.org. This file is free software;
* General Public License (GPL) as published by the Free Software
* Foundation, in version 2 as it comes in the "COPYING" file of the
* VirtualBox OSE distribution. VirtualBox OSE is distributed in the
* hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
* Clara, CA 95054 USA or visit http://www.sun.com if you need
* additional information or have any questions.
*/
/*******************************************************************************
* Defined Constants And Macros *
*******************************************************************************/
#if PGM_SHW_TYPE == PGM_TYPE_32BIT
# define SHW_PDE_PG_MASK X86_PDE_PG_MASK
# define SHW_PD_SHIFT X86_PD_SHIFT
# define SHW_PD_MASK X86_PD_MASK
# define SHW_TOTAL_PD_ENTRIES X86_PG_ENTRIES
# define SHW_PTE_PG_MASK X86_PTE_PG_MASK
# define SHW_PT_SHIFT X86_PT_SHIFT
# define SHW_PT_MASK X86_PT_MASK
# define SHW_POOL_ROOT_IDX PGMPOOL_IDX_PD
#else
# define PSHWPTE PX86PTEPAE
# define PSHWPDE PX86PDEPAE
# define SHW_PDE_PG_MASK X86_PDE_PAE_PG_MASK
# define SHW_PD_SHIFT X86_PD_PAE_SHIFT
# define SHW_PD_MASK X86_PD_PAE_MASK
# define SHW_PTE_PG_MASK X86_PTE_PAE_PG_MASK
# define SHW_PT_SHIFT X86_PT_PAE_SHIFT
# define SHW_PT_MASK X86_PT_PAE_MASK
#if PGM_SHW_TYPE == PGM_TYPE_AMD64
# define SHW_PDPT_SHIFT X86_PDPT_SHIFT
# define SHW_PDPT_MASK X86_PDPT_MASK_AMD64
# define SHW_PDPE_PG_MASK X86_PDPE_PG_MASK
# define SHW_POOL_ROOT_IDX PGMPOOL_IDX_PAE_PD
#else /* 32 bits PAE mode */
# define SHW_PDPT_SHIFT X86_PDPT_SHIFT
# define SHW_PDPT_MASK X86_PDPT_MASK_PAE
# define SHW_PDPE_PG_MASK X86_PDPE_PG_MASK
# define SHW_POOL_ROOT_IDX PGMPOOL_IDX_PAE_PD
#endif
#endif
/*******************************************************************************
* Internal Functions *
*******************************************************************************/
PGM_SHW_DECL(int, ModifyPage)(PVM pVM, RTGCUINTPTR GCPtr, size_t cbPages, uint64_t fFlags, uint64_t fMask);
/**
* Gets effective page information (from the VMM page directory).
*
* @returns VBox status.
* @param pVM VM Handle.
* @param GCPtr Guest Context virtual address of the page.
* @param pfFlags Where to store the flags. These are X86_PTE_*.
* @param pHCPhys Where to store the HC physical address of the page.
* This is page aligned.
* @remark You should use PGMMapGetPage() for pages in a mapping.
*/
{
#if PGM_SHW_TYPE == PGM_TYPE_NESTED
return VERR_PAGE_TABLE_NOT_PRESENT;
#else /* PGM_SHW_TYPE != PGM_TYPE_NESTED */
/*
* Get the PDE.
*/
# if PGM_SHW_TYPE == PGM_TYPE_AMD64
/* PML4 */
return VERR_PAGE_TABLE_NOT_PRESENT;
/* PDPT */
if (VBOX_FAILURE(rc))
return rc;
return VERR_PAGE_TABLE_NOT_PRESENT;
/* PD */
if (VBOX_FAILURE(rc))
return rc;
/* Merge accessed, write, user and no-execute bits into the PDE. */
# else /* PGM_TYPE_32BIT */
# endif
return VERR_PAGE_TABLE_NOT_PRESENT;
/*
* Get PT entry.
*/
if (!(Pde.u & PGM_PDFLAGS_MAPPING))
{
if (VBOX_FAILURE(rc))
return rc;
}
else /* mapping: */
{
# if PGM_SHW_TYPE == PGM_TYPE_AMD64
AssertFailed(); /* can't happen */
# else
# if PGM_SHW_TYPE == PGM_TYPE_32BIT
# else /* PAE */
# endif
# endif
}
return VERR_PAGE_NOT_PRESENT;
/*
* Store the results.
* RW and US flags depend on the entire page translation hierarchy - except for
* legacy PAE which has a simplified PDPE.
*/
if (pfFlags)
{
# if PGM_WITH_NX(PGM_SHW_TYPE)
/* The NX bit is determined by a bitwise OR between the PT and PD */
if (fNoExecuteBitValid)
# endif
}
if (pHCPhys)
return VINF_SUCCESS;
#endif /* PGM_SHW_TYPE != PGM_TYPE_NESTED */
}
/**
* Modify page flags for a range of pages in the shadow context.
*
* The existing flags are ANDed with the fMask and ORed with the fFlags.
*
* @returns VBox status code.
* @param pVM VM handle.
* @param GCPtr Virtual address of the first page in the range. Page aligned!
* @param cb Size (in bytes) of the range to apply the modification to. Page aligned!
* @param fFlags The OR mask - page flags X86_PTE_*, excluding the page mask of course.
* @param fMask The AND mask - page flags X86_PTE_*.
* Be extremely CAREFUL with ~'ing values because they can be 32-bit!
* @remark You must use PGMMapModifyPage() for pages in a mapping.
*/
PGM_SHW_DECL(int, ModifyPage)(PVM pVM, RTGCUINTPTR GCPtr, size_t cb, uint64_t fFlags, uint64_t fMask)
{
# if PGM_SHW_TYPE == PGM_TYPE_NESTED
return VERR_PAGE_TABLE_NOT_PRESENT;
# else /* PGM_SHW_TYPE != PGM_TYPE_NESTED */
int rc;
/*
* Walk page tables and pages till we're done.
*/
for (;;)
{
/*
* Get the PDE.
*/
# if PGM_SHW_TYPE == PGM_TYPE_AMD64
/* PML4 */
return VERR_PAGE_TABLE_NOT_PRESENT;
/* PDPT */
if (VBOX_FAILURE(rc))
return rc;
return VERR_PAGE_TABLE_NOT_PRESENT;
/* PD */
if (VBOX_FAILURE(rc))
return rc;
# else /* PGM_TYPE_32BIT */
# endif
return VERR_PAGE_TABLE_NOT_PRESENT;
/*
* Map the page table.
*/
if (VBOX_FAILURE(rc))
return rc;
{
{
}
/* next page */
if (!cb)
return VINF_SUCCESS;
iPTE++;
}
}
# endif /* PGM_SHW_TYPE != PGM_TYPE_NESTED */
}