__GFP_DMA32
gfp & ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM));
return __GFP_DMA32;
page = alloc_page(GFP_KERNEL_ACCOUNT | __GFP_DMA32);
page = alloc_page(GFP_KERNEL|__GFP_DMA32);
GFP_KERNEL | __GFP_DMA32 | __GFP_ZERO,
BUG_ON(mapping_gfp_constraint(mapping, __GFP_DMA32) &&
mapping_set_gfp_mask(obj->filp->f_mapping, GFP_KERNEL | __GFP_DMA32);
gfp |= __GFP_DMA32;
GEM_BUG_ON(gfp & __GFP_DMA32 && next_pfn >= 0x00100000UL);
mask |= __GFP_DMA32;
mask |= __GFP_DMA32;
omap_dmm->dummy_page = alloc_page(GFP_KERNEL | __GFP_DMA32);
mapping_set_gfp_mask(mapping, GFP_USER | __GFP_DMA32);
gfp &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM | __GFP_COMP);
if (WARN_ON_ONCE(gfp & (__GFP_COMP | __GFP_DMA | __GFP_DMA32 |
q->gfp_flags = __GFP_DMA32;
q->gfp_flags = __GFP_DMA32;
solo_enc->vidq.gfp_flags = __GFP_DMA32 | __GFP_KSWAPD_RECLAIM;
solo_dev->vidq.gfp_flags = __GFP_DMA32 | __GFP_KSWAPD_RECLAIM;
dev->vidq.gfp_flags = __GFP_DMA32 | __GFP_KSWAPD_RECLAIM;
src_vq->gfp_flags = __GFP_DMA32;
dst_vq->gfp_flags = __GFP_DMA32;
mask &= ~(__GFP_DMA32|__GFP_HIGHMEM);
#define GFP_ZONEMASK (__GFP_DMA|__GFP_HIGHMEM|__GFP_DMA32|__GFP_MOVABLE)
#define GFP_DMA32 __GFP_DMA32
flag &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM);
if (WARN_ON_ONCE(gfp & (__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM)))
else if (gfp & __GFP_DMA32)
else if (IS_ENABLED(CONFIG_ZONE_DMA32) && (gfp_mask & __GFP_DMA32))
gfp |= __GFP_DMA32;
!(gfp & (__GFP_DMA32 | __GFP_DMA)))
gfp |= __GFP_DMA32;
gfp = (gfp & ~__GFP_DMA32) | __GFP_DMA;
#define GFP_SLAB_BUG_MASK (__GFP_DMA32|__GFP_HIGHMEM|~__GFP_BITS_MASK)
#define GFP_ZONEMASK (__GFP_DMA|__GFP_HIGHMEM|__GFP_DMA32|__GFP_MOVABLE)
#define GFP_DMA32 __GFP_DMA32