L3_PAGE_SIZE
pmap_invalidate_range(pmap, va, va + L3_PAGE_SIZE - 1);
va_last = va + L3_PAGE_SIZE - PAGE_SIZE;
va_last = va + L3_PAGE_SIZE - PAGE_SIZE;
end - start >= L3_PAGE_SIZE) {
start += L3_PAGE_SIZE;
kernel_vm_end = VM_MIN_KERNEL_ADDRESS + nkpt * L3_PAGE_SIZE;
va_next = (sva + L3_PAGE_SIZE) & ~L3_PAGE_MASK;
va_next = (addr + L3_PAGE_SIZE) & ~L3_PAGE_MASK;
addr + L3_PAGE_SIZE > end_addr)
L3_PAGE_SIZE / PAGE_SIZE);
pa = (newpde & (PG_PS_FRAME | PG_A | PG_V)) + L3_PAGE_SIZE - PAGE_SIZE;
if (pmap_remove_ptes(pmap, va, va + L3_PAGE_SIZE, l3e,
for (mt = m; mt < &m[L3_PAGE_SIZE / PAGE_SIZE]; mt++)
pmap->pm_stats.wired_count += L3_PAGE_SIZE / PAGE_SIZE;
pmap_resident_count_inc(pmap, L3_PAGE_SIZE / PAGE_SIZE);
if ((va & L3_PAGE_MASK) == 0 && va + L3_PAGE_SIZE <= end &&
m = vm_radix_iter_jump(&pages, L3_PAGE_SIZE / PAGE_SIZE);
addr < (VM_MIN_KERNEL_ADDRESS + nkpt * L3_PAGE_SIZE))
addr = roundup2(addr, L3_PAGE_SIZE);
kernel_vm_end = (kernel_vm_end + L3_PAGE_SIZE) & ~L3_PAGE_MASK;
kernel_vm_end = (kernel_vm_end + L3_PAGE_SIZE) & ~L3_PAGE_MASK;
pagesizes[1] = L3_PAGE_SIZE;
pv_npg = howmany(vm_phys_segs[vm_phys_nsegs - 1].end, L3_PAGE_SIZE);
pa < ptepa + size; pa += L3_PAGE_SIZE) {
addr += L3_PAGE_SIZE;
pmap_resident_count_inc(pmap, L3_PAGE_SIZE / PAGE_SIZE);
addr += L3_PAGE_SIZE;
eva = sva + L3_PAGE_SIZE;
va_next = (sva + L3_PAGE_SIZE) & ~L3_PAGE_MASK;
if (sva + L3_PAGE_SIZE == va_next && eva >= va_next) {
pmap->pm_stats.wired_count -= (L3_PAGE_SIZE / PAGE_SIZE);
pmap_resident_count_dec(pmap, L3_PAGE_SIZE / PAGE_SIZE);
eva = sva + L3_PAGE_SIZE;
va_next = (sva + L3_PAGE_SIZE) & ~L3_PAGE_MASK;
if (sva + L3_PAGE_SIZE == va_next && eva >= va_next) {
for (mt = m; mt < &m[L3_PAGE_SIZE / PAGE_SIZE]; mt++)
pmap_resident_count_dec(pmap, L3_PAGE_SIZE / PAGE_SIZE);
for (mt = m; mt < &m[L3_PAGE_SIZE / PAGE_SIZE]; mt++)
va_next = (sva + L3_PAGE_SIZE) & ~L3_PAGE_MASK;
if (sva + L3_PAGE_SIZE == va_next && eva >= va_next) {
pmap->pm_stats.wired_count -= L3_PAGE_SIZE /
if (size < L3_PAGE_SIZE)
if (size - ((L3_PAGE_SIZE - superpage_offset) & L3_PAGE_MASK) < L3_PAGE_SIZE ||
newpde += L3_PAGE_SIZE;
tmpva = trunc_2mpage(tmpva) + L3_PAGE_SIZE;
tmpva += L3_PAGE_SIZE;
pa_end = pa_start + L3_PAGE_SIZE;
pa_end += L3_PAGE_SIZE;
pa_end = pa_start + L3_PAGE_SIZE;
tmpva = trunc_2mpage(tmpva) + L3_PAGE_SIZE;
for (va = start; va < end; va += L3_PAGE_SIZE) {
pa = vm_phys_early_alloc(domain, L3_PAGE_SIZE);
#define L3_PAGE_MASK (L3_PAGE_SIZE-1)