vmm_gpt_entry_t
vmm_gpt_entry_t *vgi_entries[MAX_GPT_LEVEL];
vmm_gpt_entry_t *vgie_ptep;
uint64_t vmm_gpt_walk(vmm_gpt_t *, uint64_t, vmm_gpt_entry_t **,
bool vmm_gpt_map_at(vmm_gpt_t *, vmm_gpt_entry_t *, pfn_t, uint_t, uint8_t);
bool vmm_gpte_is_mapped(const vmm_gpt_entry_t *, pfn_t *, uint_t *);
bool vmm_gpte_reset_accessed(vmm_gpt_entry_t *, bool);
bool vmm_gpte_reset_dirty(vmm_gpt_entry_t *, bool);
bool vmm_gpte_query_accessed(const vmm_gpt_entry_t *);
bool vmm_gpte_query_dirty(const vmm_gpt_entry_t *);
vmm_gpt_entry_t *vgn_entries;
vmm_gpt_entry_t *entries[MAX_GPT_LEVEL], pte;
const vmm_gpt_entry_t pte = *entry.vgie_ptep;
vmm_gpte_is_mapped(const vmm_gpt_entry_t *ptep, pfn_t *pfnp, uint_t *protp)
vmm_gpte_reset_accessed(vmm_gpt_entry_t *ptep, bool on)
vmm_gpte_reset_dirty(vmm_gpt_entry_t *ptep, bool on)
vmm_gpte_query_accessed(const vmm_gpt_entry_t *ptep)
vmm_gpte_query_dirty(const vmm_gpt_entry_t *ptep)
node->vgn_entries = (vmm_gpt_entry_t *)page;
vmm_gpt_ptep_index(const vmm_gpt_entry_t *ptep)
vmm_gpt_walk(vmm_gpt_t *gpt, uint64_t gpa, vmm_gpt_entry_t **entries,
vmm_gpt_entry_t *current_entries = gpt->vgpt_root->vgn_entries;
const vmm_gpt_entry_t pte = *entries[lvl];
current_entries = (vmm_gpt_entry_t *)hat_kpm_pfn2va(pfn);
vmm_gpt_walk_advance(vmm_gpt_t *gpt, uint64_t gpa, vmm_gpt_entry_t **entries,
vmm_gpt_entry_t *ptep = entries[lvl];
vmm_gpt_entry_t pte = *entries[lvl];
vmm_gpt_entry_t *next_table =
(vmm_gpt_entry_t *)hat_kpm_pfn2va(pfn);
vmm_gpt_map_at(vmm_gpt_t *gpt, vmm_gpt_entry_t *ptep, pfn_t pfn, uint_t prot,
const vmm_gpt_entry_t pte = vmm_gpti_map_page(pfn, prot, attr);
const vmm_gpt_entry_t old_pte = atomic_cas_64(ptep, 0, pte);