node_start_pfn
NODE_DATA(node)->node_start_pfn = start_pfn;
NODE_DATA(node)->node_start_pfn = start_pfn;
NODE_DATA(nid)->node_start_pfn = start_pfn;
if (pfn >= node_start_pfn(nid) && pfn <= node_end_pfn(nid))
NODE_DATA(nid)->node_start_pfn = start_pfn;
NODE_DATA(nid)->node_start_pfn = start_pfn;
p->node_start_pfn = start_pfn;
NODE_DATA(nid)->node_start_pfn = start_pfn;
unsigned long node_start_pfn;
#define node_start_pfn(nid) (NODE_DATA(nid)->node_start_pfn)
return pgdat->node_start_pfn + pgdat->node_spanned_pages;
VMCOREINFO_OFFSET(pglist_data, node_start_pfn);
pfn = pgdat->node_start_pfn;
unsigned long node_start_pfn = 0, node_end_pfn = 0;
node_start_pfn = zone->zone_start_pfn;
if (zone->zone_start_pfn < node_start_pfn)
node_start_pfn = zone->zone_start_pfn;
pgdat->node_start_pfn = node_start_pfn;
pgdat->node_spanned_pages = node_end_pfn - node_start_pfn;
if (!pgdat->node_spanned_pages || start_pfn < pgdat->node_start_pfn)
pgdat->node_start_pfn = start_pfn;
pgdat->node_spanned_pages = max(start_pfn + nr_pages, old_end_pfn) - pgdat->node_start_pfn;
unsigned long node_start_pfn,
*zone_start_pfn = clamp(node_start_pfn, zone_low, zone_high);
if (*zone_end_pfn < node_start_pfn || *zone_start_pfn > node_end_pfn)
*zone_start_pfn = max(*zone_start_pfn, node_start_pfn);
unsigned long node_start_pfn,
node_start_pfn,
pgdat->node_start_pfn = 0;
start = pgdat->node_start_pfn & ~(MAX_ORDER_NR_PAGES - 1);
offset = pgdat->node_start_pfn - start;
if (page_to_pfn(mem_map) != pgdat->node_start_pfn)
pgdat->node_start_pfn = start_pfn;
BUG_ON(pgdat->first_deferred_pfn < pgdat->node_start_pfn);
index = pfn - round_down(node_start_pfn(page_to_nid(page)),
if (!IS_ALIGNED(node_start_pfn(nid), MAX_ORDER_NR_PAGES) ||
start_pfn = node_start_pfn(nid);
if (pfn < pgdat->node_start_pfn || pfn >= pgdat_end_pfn(pgdat))
if (pfn < pgdat->node_start_pfn || pfn >= pgdat_end_pfn(pgdat))
range->start = PFN_PHYS(node_start_pfn(target_node));