Lines Matching defs:entry

66 static void x86pte_zero(htable_t *dest, uint_t entry, uint_t count);
114 static x86pte_t x86pte_cas(htable_t *ht, uint_t entry, x86pte_t old,
1086 * Unlink an entry for a table at vaddr and level out of the existing table
1092 uint_t entry = htable_va2entry(vaddr, higher);
1100 found = x86pte_cas(higher, entry, expect, 0);
1114 * When a top level VLP page table entry changes, we must issue
1130 * Link an entry for a new table at vaddr and level into the existing table
1136 uint_t entry = htable_va2entry(vaddr, higher);
1146 found = x86pte_cas(higher, entry, 0, newptp);
1151 * When any top level VLP page table entry changes, we must issue
1322 * Acquires a hold on a known htable (from a locked hment entry).
1592 * Walk through a given htable looking for the first valid entry. This
1597 * If the routine finds a valid entry in the htable (at or beyond the
1600 * caller's responsibility to determine which. If no valid entry is
1734 * Find lowest table with any entry for given address.
1770 * Find the htable and page table entry index of the given virtual address
1773 * entry, and has a hold on the htable.
1779 uint_t *entry,
1794 if (entry != NULL)
1795 *entry = e;
1804 * Find the htable and page table entry index of the given virtual address.
1807 * entry, and has a hold on the htable.
1810 htable_getpage(struct hat *hat, uintptr_t vaddr, uint_t *entry)
1820 if (entry)
1821 *entry = e;
1869 htable_e2va(htable_t *ht, uint_t entry)
1874 ASSERT(entry < HTABLE_NUM_PTES(ht));
1875 va = ht->ht_vaddr + ((uintptr_t)entry << LEVEL_SHIFT(l));
2065 * Atomic retrieval of a pagetable entry
2068 x86pte_get(htable_t *ht, uint_t entry)
2076 ASSERT(entry < mmu.ptes_per_table);
2077 ptep = x86pte_access_pagetable(ht, entry);
2084 * Atomic unconditional set of a page table entry, it returns the previous
2094 x86pte_set(htable_t *ht, uint_t entry, x86pte_t new, void *ptr)
2102 uintptr_t addr = htable_e2va(ht, entry);
2108 ptep = x86pte_access_pagetable(ht, entry);
2172 * Atomic compare and swap of a page table entry. No TLB invalidates are done.
2178 x86pte_cas(htable_t *ht, uint_t entry, x86pte_t old, x86pte_t new)
2193 ma = pa_to_ma(PT_INDEX_PHYSADDR(pfn_to_pa(ht->ht_pfn), entry));
2204 ht->ht_hat->hat_user_ptable), entry));
2217 ptep = x86pte_access_pagetable(ht, entry);
2226 * Invalidate a page table entry as long as it currently maps something that
2236 uint_t entry,
2251 ptep = x86pte_access_pagetable(ht, entry);
2266 ma = pa_to_ma(PT_INDEX_PHYSADDR(pfn_to_pa(ht->ht_pfn), entry));
2289 hat_tlb_inval(ht->ht_hat, htable_e2va(ht, entry));
2298 * Change a page table entry af it currently matches the value in expect.
2303 uint_t entry,
2314 ptep = x86pte_access_pagetable(ht, entry);
2319 hat_tlb_inval(ht->ht_hat, htable_e2va(ht, entry));
2324 * TLB entry before the TLB shootdown finished.
2352 x86pte_copy(htable_t *src, htable_t *dest, uint_t entry, uint_t count)
2369 dst_va = (caddr_t)x86pte_access_pagetable(dest, entry);
2372 PT_INDEX_PTR(hat_kpm_pfn2va(src->ht_pfn), entry);
2379 src_va = (caddr_t)PT_INDEX_PTR(PWIN_VA(x), entry);
2405 x86pte_copy(htable_t *src, htable_t *dest, uint_t entry, uint_t count)
2411 src_va = (caddr_t)x86pte_access_pagetable(src, entry);
2418 set_pteval(pfn_to_pa(dest->ht_pfn), entry,
2422 htable_e2va(dest, entry) < HYPERVISOR_VIRT_END)
2425 entry, dest->ht_level, pte);
2429 ++entry;
2440 x86pte_zero(htable_t *dest, uint_t entry, uint_t count)
2467 dst_va = (caddr_t)PT_INDEX_PTR(PWIN_VA(x), entry);
2470 dst_va = (caddr_t)x86pte_access_pagetable(dest, entry);