hw_to_dev
dma_sync_single_for_device(hw_to_dev(qp->dev->hw), buf->mem.pa, buf->mem.size, DMA_BIDIRECTIONAL);
dma_sync_single_for_device(hw_to_dev(rsrc->dev->hw), buf->mem.pa, buf->mem.size, DMA_BIDIRECTIONAL);
buf->mem.pa = dma_map_single(hw_to_dev(dev->hw), buf->mem.va, buf->mem.size, DMA_BIDIRECTIONAL);
if (dma_mapping_error(hw_to_dev(dev->hw), buf->mem.pa)) {
dma_sync_single_for_cpu(hw_to_dev(dev->hw), buf->mem.pa, buf->mem.size, DMA_BIDIRECTIONAL);
dma_sync_single_for_cpu(hw_to_dev(dev->hw), buf->mem.pa, buf->mem.size, DMA_BIDIRECTIONAL);
dma_sync_single_for_cpu(hw_to_dev(rsrc->dev->hw), buf->mem.pa, buf->mem.size, DMA_BIDIRECTIONAL);
pg_dma[i] = dma_map_page(hw_to_dev(hw), vm_page, 0, PAGE_SIZE, DMA_BIDIRECTIONAL);
if (dma_mapping_error(hw_to_dev(hw), pg_dma[i]))
dma_unmap_page(hw_to_dev(hw), pg_dma[i], PAGE_SIZE, DMA_BIDIRECTIONAL);
void *hw_to_dev(struct irdma_hw *hw);