dma_readl
while (dma_readl(atdma, CHSR) & atchan->mask)
while (dma_readl(atdma, CHSR) & atdma->all_chan_mask)
while (dma_readl(atdma, EBCISR))
atdma->save_imr = dma_readl(atdma, EBCIMR);
while (dma_readl(atdma, EBCISR))
dma_readl(atdma, EBCIMR),
dma_readl(atdma, CHSR));
return !!(dma_readl(atdma, CHSR) & atchan->mask);
imr = dma_readl(atdma, EBCIMR);
status = dma_readl(atdma, EBCISR);
while (dma_readl(dw, CFG) & DW_CFG_DMA_EN)
if (dma_readl(dw, CH_EN) & dwc->mask) {
BUG_ON(dma_readl(to_dw_dma(chan->device), CH_EN) & dwc->mask);
dw_params = dma_readl(dw, DW_PARAMS);
pdata->block_size = dma_readl(dw, MAX_BLK_SIZE);
while (dma_readl(dw, CH_EN) & dwc->mask)
if (dma_readl(dw, CH_EN) & dwc->mask) {
if (dma_readl(dw, CH_EN) & dwc->mask) {
status_xfer = dma_readl(dw, RAW.XFER);
status_xfer = dma_readl(dw, RAW.XFER);
status_err = dma_readl(dw, RAW.ERROR);
status = dma_readl(dw, STATUS_INT);
status = dma_readl(dw, STATUS_INT);
u32 status = dma_readl(idma64, STATUS_INT);
status_xfer = dma_readl(idma64, RAW(XFER));
status_err = dma_readl(idma64, RAW(ERROR));
} while (dma_readl(idma64, CFG) & IDMA64_CFG_DMA_EN && --count);
val = dma_readl(od, OWL_DMA_IDLE_STAT);
irq_pd = dma_readl(od, OWL_DMA_IRQ_PD0);
pending = dma_readl(od, OWL_DMA_IRQ_PD0);
dma_readl(od, OWL_DMA_IRQ_PD0);
global_irq_pending = dma_readl(od, OWL_DMA_IRQ_PD0);
val = dma_readl(pd, CTL2);
val = dma_readl(pd, CTL0);
val = dma_readl(pd, CTL3);
val = dma_readl(pd, CTL0);
val = dma_readl(pd, CTL3);
val = dma_readl(pd, STS0);
val = dma_readl(pd, STS2);
sts0 = dma_readl(pd, STS0);
sts2 = dma_readl(pd, STS2);
pd->regs.dma_ctl0 = dma_readl(pd, CTL0);
pd->regs.dma_ctl1 = dma_readl(pd, CTL1);
pd->regs.dma_ctl2 = dma_readl(pd, CTL2);
pd->regs.dma_ctl3 = dma_readl(pd, CTL3);
mcr = dma_readl(ddev, MCR);
dma_readl(ddev, MCR));