#ifndef _ARM_PMAP_H_
#define _ARM_PMAP_H_
#ifdef _KERNEL
#include <arm/cpuconf.h>
#include <arm/pte.h>
#ifndef _LOCORE
#include <arm/cpufunc.h>
#endif
#define L2_BUCKET_LOG2 4
#define L2_BUCKET_SIZE (1 << L2_BUCKET_LOG2)
#define L2_LOG2 ((32 - L1_S_SHIFT) - L2_BUCKET_LOG2)
#define L2_SIZE (1 << L2_LOG2)
#ifndef _LOCORE
struct l1_ttable;
struct l2_dtable;
union pmap_cache_state {
struct {
union {
u_int8_t csu_cache_b[2];
u_int16_t csu_cache;
} cs_cache_u;
union {
u_int8_t csu_tlb_b[2];
u_int16_t csu_tlb;
} cs_tlb_u;
} cs_s;
u_int32_t cs_all;
};
#define cs_cache_id cs_s.cs_cache_u.csu_cache_b[0]
#define cs_cache_d cs_s.cs_cache_u.csu_cache_b[1]
#define cs_cache cs_s.cs_cache_u.csu_cache
#define cs_tlb_id cs_s.cs_tlb_u.csu_tlb_b[0]
#define cs_tlb_d cs_s.cs_tlb_u.csu_tlb_b[1]
#define cs_tlb cs_s.cs_tlb_u.csu_tlb
#define PMAP_CACHE_STATE_ALL 0xffffffffu
struct pmap {
u_int8_t pm_domain;
int pm_remove_all;
struct l1_ttable *pm_l1;
union pmap_cache_state pm_cstate;
u_int pm_refs;
struct l2_dtable *pm_l2[L2_SIZE];
struct pmap_statistics pm_stats;
};
typedef struct pmap *pmap_t;
#define PMAP_PA_MASK ~((paddr_t)PAGE_MASK)
#define PMAP_NOCACHE 0x1
#define PMAP_DEVICE 0x2
typedef struct pv_addr {
SLIST_ENTRY(pv_addr) pv_list;
paddr_t pv_pa;
vaddr_t pv_va;
} pv_addr_t;
#define PTE_KERNEL 0
#define PTE_USER 1
#define PTE_NOCACHE 0
#define PTE_CACHE 1
#define PTE_PAGETABLE 2
#define PVF_MOD 0x01
#define PVF_REF 0x02
#define PVF_WIRED 0x04
#define PVF_WRITE 0x08
#define PVF_EXEC 0x10
extern struct pmap kernel_pmap_store;
#define pmap_kernel() (&kernel_pmap_store)
#define pmap_resident_count(pmap) ((pmap)->pm_stats.resident_count)
#define pmap_wired_count(pmap) ((pmap)->pm_stats.wired_count)
#define pmap_is_modified(pg) \
(((pg)->mdpage.pvh_attrs & PVF_MOD) != 0)
#define pmap_is_referenced(pg) \
(((pg)->mdpage.pvh_attrs & PVF_REF) != 0)
#define pmap_deactivate(p) do { } while (0)
#define pmap_init_percpu() do { } while (0)
#define pmap_unuse_final(p) do { } while (0)
#define pmap_remove_holes(vm) do { } while (0)
#define PMAP_CHECK_COPYIN 1
#define PMAP_GROWKERNEL
void pmap_bootstrap(pd_entry_t *, vaddr_t, vaddr_t);
int pmap_get_pde_pte(pmap_t, vaddr_t, pd_entry_t **, pt_entry_t **);
int pmap_get_pde(pmap_t, vaddr_t, pd_entry_t **);
void pmap_set_pcb_pagedir(pmap_t, struct pcb *);
void pmap_postinit(void);
void vector_page_setprot(int);
void pmap_kenter_cache(vaddr_t va, paddr_t pa, vm_prot_t prot, int cacheable);
void pmap_map_section(vaddr_t, vaddr_t, paddr_t, int, int);
void pmap_map_entry(vaddr_t, vaddr_t, paddr_t, int, int);
vsize_t pmap_map_chunk(vaddr_t, vaddr_t, paddr_t, vsize_t, int, int);
void pmap_link_l2pt(vaddr_t, vaddr_t, pv_addr_t *);
extern vaddr_t pmap_curmaxkvaddr;
static __inline pt_entry_t *
vtopte(vaddr_t va)
{
pd_entry_t *pdep;
pt_entry_t *ptep;
if (pmap_get_pde_pte(pmap_kernel(), va, &pdep, &ptep) == FALSE)
return (NULL);
return (ptep);
}
extern int pmap_needs_pte_sync;
#define PMAP_NEEDS_PTE_SYNC pmap_needs_pte_sync
#define PTE_SYNC(pte) \
do { \
cpu_drain_writebuf(); \
if (PMAP_NEEDS_PTE_SYNC) { \
paddr_t pa; \
cpu_dcache_wb_range((vaddr_t)(pte), sizeof(pt_entry_t));\
if (cpu_sdcache_enabled()) { \
(void)pmap_extract(pmap_kernel(), (vaddr_t)(pte), &pa); \
cpu_sdcache_wb_range((vaddr_t)(pte), (paddr_t)(pa), \
sizeof(pt_entry_t)); \
}; \
cpu_drain_writebuf(); \
} \
} while (0)
#define PTE_SYNC_RANGE(pte, cnt) \
do { \
cpu_drain_writebuf(); \
if (PMAP_NEEDS_PTE_SYNC) { \
paddr_t pa; \
cpu_dcache_wb_range((vaddr_t)(pte), \
(cnt) << 2); \
if (cpu_sdcache_enabled()) { \
(void)pmap_extract(pmap_kernel(), (vaddr_t)(pte), &pa);\
cpu_sdcache_wb_range((vaddr_t)(pte), (paddr_t)(pa), \
(cnt) << 2); \
}; \
cpu_drain_writebuf(); \
} \
} while (0)
#define l1pte_valid(pde) (((pde) & L1_TYPE_MASK) != L1_TYPE_INV)
#define l1pte_section_p(pde) (((pde) & L1_TYPE_MASK) == L1_TYPE_S)
#define l1pte_page_p(pde) (((pde) & L1_TYPE_MASK) == L1_TYPE_C)
#define l1pte_fpage_p(pde) (((pde) & L1_TYPE_MASK) == L1_TYPE_F)
#define l2pte_index(v) (((v) & L2_ADDR_BITS) >> L2_S_SHIFT)
#define l2pte_valid(pte) (((pte) & L2_TYPE_MASK) != L2_TYPE_INV)
#define l2pte_pa(pte) ((pte) & L2_S_FRAME)
#define pmap_pde_v(pde) l1pte_valid(*(pde))
#define pmap_pde_section(pde) l1pte_section_p(*(pde))
#define pmap_pde_page(pde) l1pte_page_p(*(pde))
#define pmap_pde_fpage(pde) l1pte_fpage_p(*(pde))
void pmap_pte_init_armv7(void);
#endif
#define PMAP_DOMAINS 15
#define PMAP_DOMAIN_KERNEL 15
#define L1_S_PROT_UR_v7 (L1_S_V7_AP(AP_V7_KRUR))
#define L1_S_PROT_UW_v7 (L1_S_V7_AP(AP_KRWURW))
#define L1_S_PROT_KR_v7 (L1_S_V7_AP(AP_V7_KR))
#define L1_S_PROT_KW_v7 (L1_S_V7_AP(AP_KRW))
#define L1_S_PROT_MASK_v7 (L1_S_V7_AP(0x07))
#define L1_S_CACHE_MASK_v7 (L1_S_B|L1_S_C|L1_S_V7_TEX_MASK)
#define L1_S_COHERENT_v7 (L1_S_C)
#define L2_L_PROT_UR_v7 (L2_V7_AP(AP_V7_KRUR))
#define L2_L_PROT_UW_v7 (L2_V7_AP(AP_KRWURW))
#define L2_L_PROT_KR_v7 (L2_V7_AP(AP_V7_KR))
#define L2_L_PROT_KW_v7 (L2_V7_AP(AP_KRW))
#define L2_L_PROT_MASK_v7 (L2_V7_AP(0x07) | L2_V7_L_XN)
#define L2_L_CACHE_MASK_v7 (L2_B|L2_C|L2_V7_L_TEX_MASK)
#define L2_L_COHERENT_v7 (L2_C)
#define L2_S_PROT_UR_v7 (L2_V7_AP(AP_V7_KRUR))
#define L2_S_PROT_UW_v7 (L2_V7_AP(AP_KRWURW))
#define L2_S_PROT_KR_v7 (L2_V7_AP(AP_V7_KR))
#define L2_S_PROT_KW_v7 (L2_V7_AP(AP_KRW))
#define L2_S_PROT_MASK_v7 (L2_V7_AP(0x07) | L2_V7_S_XN)
#define L2_S_CACHE_MASK_v7 (L2_B|L2_C|L2_V7_S_TEX_MASK)
#define L2_S_COHERENT_v7 (L2_C)
#define L1_S_PROTO_v7 (L1_TYPE_S)
#define L1_C_PROTO_v7 (L1_TYPE_C)
#define L2_L_PROTO (L2_TYPE_L)
#define L2_S_PROTO_v7 (L2_TYPE_S)
#define L1_S_PROT_UR L1_S_PROT_UR_v7
#define L1_S_PROT_UW L1_S_PROT_UW_v7
#define L1_S_PROT_KR L1_S_PROT_KR_v7
#define L1_S_PROT_KW L1_S_PROT_KW_v7
#define L1_S_PROT_MASK L1_S_PROT_MASK_v7
#define L2_L_PROT_UR L2_L_PROT_UR_v7
#define L2_L_PROT_UW L2_L_PROT_UW_v7
#define L2_L_PROT_KR L2_L_PROT_KR_v7
#define L2_L_PROT_KW L2_L_PROT_KW_v7
#define L2_L_PROT_MASK L2_L_PROT_MASK_v7
#define L2_S_PROT_UR L2_S_PROT_UR_v7
#define L2_S_PROT_UW L2_S_PROT_UW_v7
#define L2_S_PROT_KR L2_S_PROT_KR_v7
#define L2_S_PROT_KW L2_S_PROT_KW_v7
#define L2_S_PROT_MASK L2_S_PROT_MASK_v7
#define L1_S_CACHE_MASK L1_S_CACHE_MASK_v7
#define L2_L_CACHE_MASK L2_L_CACHE_MASK_v7
#define L2_S_CACHE_MASK L2_S_CACHE_MASK_v7
#define L1_S_COHERENT L1_S_COHERENT_v7
#define L2_L_COHERENT L2_L_COHERENT_v7
#define L2_S_COHERENT L2_S_COHERENT_v7
#define L1_S_PROTO L1_S_PROTO_v7
#define L1_C_PROTO L1_C_PROTO_v7
#define L2_S_PROTO L2_S_PROTO_v7
#ifndef _LOCORE
static __inline pt_entry_t
L1_S_PROT(int ku, vm_prot_t pr)
{
pt_entry_t pte;
if (ku == PTE_USER)
pte = (pr & PROT_WRITE) ? L1_S_PROT_UW : L1_S_PROT_UR;
else
pte = (pr & PROT_WRITE) ? L1_S_PROT_KW : L1_S_PROT_KR;
if ((pr & PROT_EXEC) == 0)
pte |= L1_S_V7_XN;
return pte;
}
static __inline pt_entry_t
L2_L_PROT(int ku, vm_prot_t pr)
{
pt_entry_t pte;
if (ku == PTE_USER)
pte = (pr & PROT_WRITE) ? L2_L_PROT_UW : L2_L_PROT_UR;
else
pte = (pr & PROT_WRITE) ? L2_L_PROT_KW : L2_L_PROT_KR;
if ((pr & PROT_EXEC) == 0)
pte |= L2_V7_L_XN;
return pte;
}
static __inline pt_entry_t
L2_S_PROT(int ku, vm_prot_t pr)
{
pt_entry_t pte;
if (ku == PTE_USER)
pte = (pr & PROT_WRITE) ? L2_S_PROT_UW : L2_S_PROT_UR;
else
pte = (pr & PROT_WRITE) ? L2_S_PROT_KW : L2_S_PROT_KR;
if ((pr & PROT_EXEC) == 0)
pte |= L2_V7_S_XN;
return pte;
}
static __inline int
l2pte_is_writeable(pt_entry_t pte, struct pmap *pm)
{
return (pte & L2_V7_AP(0x4)) == 0;
}
#endif
#define L1_S_MAPPABLE_P(va, pa, size) \
((((va) | (pa)) & L1_S_OFFSET) == 0 && (size) >= L1_S_SIZE)
#define L2_L_MAPPABLE_P(va, pa, size) \
((((va) | (pa)) & L2_L_OFFSET) == 0 && (size) >= L2_L_SIZE)
#endif
#ifndef _LOCORE
struct vm_page_md {
struct pv_entry *pvh_list;
int pvh_attrs;
};
#define VM_MDPAGE_INIT(pg) \
do { \
(pg)->mdpage.pvh_list = NULL; \
(pg)->mdpage.pvh_attrs = 0; \
} while (0)
#endif
#endif