xhci_trb_virt_to_dma
xhci_trb_virt_to_dma(ring->enq_seg,
req->trb_dma = xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue);
deq = xhci_trb_virt_to_dma(dbc->ring_evt->deq_seg,
xhci_trb_virt_to_dma(dbc->ring_evt->deq_seg,
deq = xhci_trb_virt_to_dma(dbc->ring_evt->deq_seg,
xhci_trb_virt_to_dma((d)->ring_out->enq_seg, (d)->ring_out->enqueue)
xhci_trb_virt_to_dma((d)->ring_in->enq_seg, (d)->ring_in->enqueue)
dma = xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue);
dma = xhci_trb_virt_to_dma(ring->deq_seg, ring->dequeue);
ep0_ctx->deq = cpu_to_le64(xhci_trb_virt_to_dma(ep_ring->enq_seg,
deq = xhci_trb_virt_to_dma(ir->event_ring->deq_seg,
addr = xhci_trb_virt_to_dma(ep->ring->deq_seg, ep->ring->dequeue);
(unsigned long long)xhci_trb_virt_to_dma(
if (xhci_trb_virt_to_dma(ep->queued_deq_seg,
cmd_dequeue_dma = xhci_trb_virt_to_dma(xhci->cmd_ring->deq_seg,
(unsigned long long)xhci_trb_virt_to_dma(td->start_seg, td->start_trb),
(unsigned long long)xhci_trb_virt_to_dma(td->end_seg, td->end_trb));
(unsigned long long) xhci_trb_virt_to_dma(
xhci_trb_virt_to_dma(ir->event_ring->deq_seg,
deq = xhci_trb_virt_to_dma(ir->event_ring->deq_seg,
xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue));
crcr = xhci_trb_virt_to_dma(new_seg, new_deq);
if (!hw_dequeue_found && xhci_trb_virt_to_dma(new_seg, new_deq)
addr = xhci_trb_virt_to_dma(new_seg, new_deq);
__entry->enq = xhci_trb_virt_to_dma(ring->enq_seg, ring->enqueue);
__entry->deq = xhci_trb_virt_to_dma(ring->deq_seg, ring->dequeue);
(unsigned long long) xhci_trb_virt_to_dma(
deq_dma = xhci_trb_virt_to_dma(xhci->cmd_ring->deq_seg, xhci->cmd_ring->dequeue);
dma_addr_t xhci_trb_virt_to_dma(struct xhci_segment *seg, union xhci_trb *trb);