xdma_chan
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
return chan_info->dir == xdma_chan->dir;
struct xdma_chan *h2c_chans;
struct xdma_chan *c2h_chans;
static inline struct xdma_chan *to_xdma_chan(struct dma_chan *chan)
return container_of(chan, struct xdma_chan, vchan.chan);
static int xdma_channel_init(struct xdma_chan *chan)
xdma_alloc_desc(struct xdma_chan *chan, u32 desc_num, bool cyclic)
static int xdma_xfer_start(struct xdma_chan *xchan)
static int xdma_xfer_stop(struct xdma_chan *xchan)
struct xdma_chan **chans, *xchan;
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
spin_lock_irqsave(&xdma_chan->vchan.lock, flags);
if (vchan_issue_pending(&xdma_chan->vchan))
xdma_xfer_start(xdma_chan);
spin_unlock_irqrestore(&xdma_chan->vchan.lock, flags);
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
xdma_xfer_stop(xdma_chan);
spin_lock_irqsave(&xdma_chan->vchan.lock, flags);
xdma_chan->busy = false;
xdma_chan->stop_requested = true;
vd = vchan_next_desc(&xdma_chan->vchan);
vchan_get_all_descriptors(&xdma_chan->vchan, &head);
list_splice_tail(&head, &xdma_chan->vchan.desc_terminated);
spin_unlock_irqrestore(&xdma_chan->vchan.lock, flags);
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
struct xdma_device *xdev = xdma_chan->xdev_hdl;
regmap_read(xdev->rmap, xdma_chan->base + XDMA_CHAN_STATUS, &st);
wait_for_completion_timeout(&xdma_chan->last_interrupt, msecs_to_jiffies(1000));
vchan_synchronize(&xdma_chan->vchan);
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
sw_desc = xdma_alloc_desc(xdma_chan, desc_num, false);
dev_addr = xdma_chan->cfg.dst_addr;
dev_addr = xdma_chan->cfg.src_addr;
tx_desc = vchan_tx_prep(&xdma_chan->vchan, &sw_desc->vdesc, flags);
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
struct xdma_device *xdev = xdma_chan->xdev_hdl;
sw_desc = xdma_alloc_desc(xdma_chan, periods, true);
dev_addr = xdma_chan->cfg.dst_addr;
dev_addr = xdma_chan->cfg.src_addr;
tx_desc = vchan_tx_prep(&xdma_chan->vchan, &sw_desc->vdesc, flags);
struct xdma_chan *xchan = to_xdma_chan(chan);
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
memcpy(&xdma_chan->cfg, cfg, sizeof(*cfg));
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
vchan_free_chan_resources(&xdma_chan->vchan);
dma_pool_destroy(xdma_chan->desc_pool);
xdma_chan->desc_pool = NULL;
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
struct xdma_device *xdev = xdma_chan->xdev_hdl;
xdma_chan->desc_pool = dma_pool_create(dma_chan_name(chan), dev, XDMA_DESC_BLOCK_SIZE,
if (!xdma_chan->desc_pool) {
struct xdma_chan *xdma_chan = to_xdma_chan(chan);
spin_lock_irqsave(&xdma_chan->vchan.lock, flags);
vd = vchan_find_desc(&xdma_chan->vchan, cookie);
spin_unlock_irqrestore(&xdma_chan->vchan.lock, flags);
struct xdma_chan *xchan = dev_id;
struct xdma_chan *chan;