__GFP_DMA
gfp & ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM));
return __GFP_DMA;
flag &= ~(__GFP_DMA | __GFP_HIGHMEM);
gfp &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM | __GFP_COMP);
if (WARN_ON_ONCE(gfp & (__GFP_COMP | __GFP_DMA | __GFP_DMA32 |
as->pd = iommu_alloc_pages_sz(GFP_KERNEL | __GFP_DMA, SMMU_SIZE_PD);
pt = iommu_alloc_pages_sz(gfp | __GFP_DMA, SMMU_SIZE_PT);
uap->dmatx.buf = kmalloc(PL011_DMA_BUFFER_SIZE, GFP_KERNEL | __GFP_DMA);
#define GFP_ZONEMASK (__GFP_DMA|__GFP_HIGHMEM|__GFP_DMA32|__GFP_MOVABLE)
#define GFP_DMA __GFP_DMA
(IS_ENABLED(CONFIG_ZONE_DMA) ? __GFP_DMA : 0) | \
if (IS_ENABLED(CONFIG_ZONE_DMA) && (flags & __GFP_DMA))
flag &= ~(__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM);
if (WARN_ON_ONCE(gfp & (__GFP_DMA | __GFP_DMA32 | __GFP_HIGHMEM)))
if (gfp & __GFP_DMA)
if (IS_ENABLED(CONFIG_ZONE_DMA) && (gfp_mask & __GFP_DMA))
gfp |= __GFP_DMA;
!(gfp & (__GFP_DMA32 | __GFP_DMA)))
!(gfp & __GFP_DMA))
gfp = (gfp & ~__GFP_DMA32) | __GFP_DMA;
gfp = GFP_ATOMIC|__GFP_DMA;
((gfp_mask & __GFP_DMA) && !has_managed_dma()))
#define OBJCGS_CLEAR_MASK (__GFP_DMA | __GFP_RECLAIMABLE | \
#define GFP_ZONEMASK (__GFP_DMA|__GFP_HIGHMEM|__GFP_DMA32|__GFP_MOVABLE)
#define GFP_DMA __GFP_DMA