p4dval_t
static inline void __p4d_populate(p4d_t *p4dp, phys_addr_t pudp, p4dval_t prot)
p4dval_t p4dval = P4D_TYPE_TABLE | P4D_TABLE_AF;
static inline void __p4d_populate(p4d_t *p4dp, phys_addr_t pudp, p4dval_t prot)
#define P4D_TYPE_TABLE (_AT(p4dval_t, 3) << 0)
#define P4D_TYPE_MASK (_AT(p4dval_t, 3) << 0)
#define P4D_TYPE_SECT (_AT(p4dval_t, 1) << 0)
#define P4D_SECT_RDONLY (_AT(p4dval_t, 1) << 7) /* AP[2] */
#define P4D_TABLE_AF (_AT(p4dval_t, 1) << 10) /* Ignored if no FEAT_HAFT */
#define P4D_TABLE_PXN (_AT(p4dval_t, 1) << 59)
#define P4D_TABLE_UXN (_AT(p4dval_t, 1) << 60)
typedef struct { p4dval_t p4d; } p4d_t;
p4dval_t p4dval = P4D_TYPE_TABLE | P4D_TABLE_UXN | P4D_TABLE_AF;
p4d = (p4dval_t *)rip_rel_ptr(level4_kernel_pgt);
p4dval_t *p4d;
p4dval_t val = native_p4d_val(p4d);
static inline p4d_t __p4d(p4dval_t val)
p4dval_t ret = PVOP_ALT_CALLEE1(p4dval_t, pv_ops, mmu.make_p4d, val,
static inline p4dval_t p4d_val(p4d_t p4d)
return PVOP_ALT_CALLEE1(p4dval_t, pv_ops, mmu.p4d_val, p4d.p4d,
typedef struct { p4dval_t p4d; } p4d_t;
static inline p4dval_t native_p4d_val(p4d_t p4d)
static inline p4dval_t native_p4d_val(p4d_t p4d)
static inline p4dval_t p4d_pfn_mask(p4d_t p4d)
static inline p4dval_t p4d_flags_mask(p4d_t p4d)
static inline p4dval_t p4d_flags(p4d_t p4d)
p4d_p = (p4dval_t *)((pgd & PTE_PFN_MASK) + __START_KERNEL_map - phys_base);
p4d_p = (p4dval_t *)early_dynamic_pgts[next_early_pgt++];
*p4d_p = (p4dval_t)pud_p - __START_KERNEL_map + phys_base + _KERNPG_TABLE;
p4dval_t p4d, *p4d_p;
p4dval_t p4d_val = __pa_nodebug(kasan_early_shadow_pud) | _KERNPG_TABLE;
p4d_t xen_make_p4d(p4dval_t p4d);
__visible p4dval_t xen_p4d_val(p4d_t p4d)
__visible p4d_t xen_make_p4d(p4dval_t p4d)
p4dval_t xen_p4d_val(p4d_t p4d);
TRACE_DEFINE_SIZEOF(p4dval_t);
__field(p4dval_t, p4dval)
(int)sizeof(p4dval_t) * 2, (unsigned long long)pgd_val(native_make_pgd(__entry->p4dval)),
(int)sizeof(p4dval_t) * 2, (unsigned long long)__entry->p4dval)