#ifdef XGE_DEBUG_FP
#include "xgehal-ring.h"
#endif
__HAL_STATIC_RING __HAL_INLINE_RING xge_hal_ring_rxd_priv_t*
__hal_ring_rxd_priv(xge_hal_ring_t *ring, xge_hal_dtr_h dtrh)
{
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
xge_hal_ring_rxd_priv_t *rxd_priv;
xge_assert(rxdp);
#if defined(XGE_HAL_USE_5B_MODE)
xge_assert(ring);
if (ring->buffer_mode == XGE_HAL_RING_QUEUE_BUFFER_MODE_5) {
xge_hal_ring_rxd_5_t *rxdp_5 = (xge_hal_ring_rxd_5_t *)dtrh;
#if defined (XGE_OS_PLATFORM_64BIT)
int memblock_idx = rxdp_5->host_control >> 16;
int i = rxdp_5->host_control & 0xFFFF;
rxd_priv = (xge_hal_ring_rxd_priv_t *)
((char*)ring->mempool->memblocks_priv_arr[memblock_idx] + ring->rxd_priv_size * i);
#else
rxd_priv = (xge_hal_ring_rxd_priv_t *)rxdp_5->host_control;
#endif
} else
#endif
{
rxd_priv = (xge_hal_ring_rxd_priv_t *)
(ulong_t)rxdp->host_control;
}
xge_assert(rxd_priv);
xge_assert(rxd_priv->dma_object);
xge_assert(rxd_priv->dma_object->handle == rxd_priv->dma_handle);
xge_assert(rxd_priv->dma_object->addr + rxd_priv->dma_offset ==
rxd_priv->dma_addr);
return rxd_priv;
}
__HAL_STATIC_RING __HAL_INLINE_RING int
__hal_ring_block_memblock_idx(xge_hal_ring_block_t *block)
{
return (int)*((u64 *)(void *)((char *)block +
XGE_HAL_RING_MEMBLOCK_IDX_OFFSET));
}
__HAL_STATIC_RING __HAL_INLINE_RING void
__hal_ring_block_memblock_idx_set(xge_hal_ring_block_t*block, int memblock_idx)
{
*((u64 *)(void *)((char *)block +
XGE_HAL_RING_MEMBLOCK_IDX_OFFSET)) =
memblock_idx;
}
__HAL_STATIC_RING __HAL_INLINE_RING dma_addr_t
__hal_ring_block_next_pointer(xge_hal_ring_block_t *block)
{
return (dma_addr_t)*((u64 *)(void *)((char *)block +
XGE_HAL_RING_NEXT_BLOCK_POINTER_OFFSET));
}
__HAL_STATIC_RING __HAL_INLINE_RING void
__hal_ring_block_next_pointer_set(xge_hal_ring_block_t *block,
dma_addr_t dma_next)
{
*((u64 *)(void *)((char *)block +
XGE_HAL_RING_NEXT_BLOCK_POINTER_OFFSET)) = dma_next;
}
__HAL_STATIC_RING __HAL_INLINE_RING void*
xge_hal_ring_dtr_private(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
{
return (char *)__hal_ring_rxd_priv((xge_hal_ring_t *) channelh, dtrh) +
sizeof(xge_hal_ring_rxd_priv_t);
}
__HAL_STATIC_RING __HAL_INLINE_RING xge_hal_status_e
xge_hal_ring_dtr_reserve(xge_hal_channel_h channelh, xge_hal_dtr_h *dtrh)
{
xge_hal_status_e status;
#if defined(XGE_HAL_RX_MULTI_RESERVE_IRQ)
unsigned long flags;
#endif
#if defined(XGE_HAL_RX_MULTI_RESERVE)
xge_os_spin_lock(&((xge_hal_channel_t*)channelh)->reserve_lock);
#elif defined(XGE_HAL_RX_MULTI_RESERVE_IRQ)
xge_os_spin_lock_irq(&((xge_hal_channel_t*)channelh)->reserve_lock,
flags);
#endif
status = __hal_channel_dtr_alloc(channelh, dtrh);
#if defined(XGE_HAL_RX_MULTI_RESERVE)
xge_os_spin_unlock(&((xge_hal_channel_t*)channelh)->reserve_lock);
#elif defined(XGE_HAL_RX_MULTI_RESERVE_IRQ)
xge_os_spin_unlock_irq(&((xge_hal_channel_t*)channelh)->reserve_lock,
flags);
#endif
if (status == XGE_HAL_OK) {
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)*dtrh;
rxdp->control_1 = rxdp->control_2 = 0;
#if defined(XGE_OS_MEMORY_CHECK)
__hal_ring_rxd_priv((xge_hal_ring_t *) channelh, rxdp)->allocated = 1;
#endif
}
return status;
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_info_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
xge_hal_dtr_info_t *ext_info)
{
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
ext_info->l3_cksum = XGE_HAL_RXD_GET_L3_CKSUM(rxdp->control_1);
ext_info->l4_cksum = XGE_HAL_RXD_GET_L4_CKSUM(rxdp->control_1);
ext_info->frame = XGE_HAL_RXD_GET_FRAME_TYPE(rxdp->control_1);
ext_info->proto = XGE_HAL_RXD_GET_FRAME_PROTO(rxdp->control_1);
ext_info->vlan = XGE_HAL_RXD_GET_VLAN_TAG(rxdp->control_2);
ext_info->rth_it_hit = XGE_HAL_RXD_GET_RTH_IT_HIT(rxdp->control_1);
ext_info->rth_spdm_hit =
XGE_HAL_RXD_GET_RTH_SPDM_HIT(rxdp->control_1);
ext_info->rth_hash_type =
XGE_HAL_RXD_GET_RTH_HASH_TYPE(rxdp->control_1);
ext_info->rth_value = XGE_HAL_RXD_1_GET_RTH_VALUE(rxdp->control_2);
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_info_nb_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
xge_hal_dtr_info_t *ext_info)
{
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
ext_info->l3_cksum = XGE_HAL_RXD_GET_L3_CKSUM(rxdp->control_1);
ext_info->l4_cksum = XGE_HAL_RXD_GET_L4_CKSUM(rxdp->control_1);
ext_info->frame = XGE_HAL_RXD_GET_FRAME_TYPE(rxdp->control_1);
ext_info->proto = XGE_HAL_RXD_GET_FRAME_PROTO(rxdp->control_1);
ext_info->vlan = XGE_HAL_RXD_GET_VLAN_TAG(rxdp->control_2);
ext_info->rth_it_hit = XGE_HAL_RXD_GET_RTH_IT_HIT(rxdp->control_1);
ext_info->rth_spdm_hit =
XGE_HAL_RXD_GET_RTH_SPDM_HIT(rxdp->control_1);
ext_info->rth_hash_type =
XGE_HAL_RXD_GET_RTH_HASH_TYPE(rxdp->control_1);
ext_info->rth_value = (u32)rxdp->buffer0_ptr;
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_1b_set(xge_hal_dtr_h dtrh, dma_addr_t dma_pointer, int size)
{
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
rxdp->buffer0_ptr = dma_pointer;
rxdp->control_2 &= (~XGE_HAL_RXD_1_MASK_BUFFER0_SIZE);
rxdp->control_2 |= XGE_HAL_RXD_1_SET_BUFFER0_SIZE(size);
xge_debug_ring(XGE_TRACE, "xge_hal_ring_dtr_1b_set: rxdp %p control_2 %p buffer0_ptr %p",
(xge_hal_ring_rxd_1_t *)dtrh,
rxdp->control_2,
rxdp->buffer0_ptr);
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_1b_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
dma_addr_t *dma_pointer, int *pkt_length)
{
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
*pkt_length = XGE_HAL_RXD_1_GET_BUFFER0_SIZE(rxdp->control_2);
*dma_pointer = rxdp->buffer0_ptr;
((xge_hal_channel_t *)channelh)->poll_bytes += *pkt_length;
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_3b_set(xge_hal_dtr_h dtrh, dma_addr_t dma_pointers[],
int sizes[])
{
xge_hal_ring_rxd_3_t *rxdp = (xge_hal_ring_rxd_3_t *)dtrh;
rxdp->buffer0_ptr = dma_pointers[0];
rxdp->control_2 &= (~XGE_HAL_RXD_3_MASK_BUFFER0_SIZE);
rxdp->control_2 |= XGE_HAL_RXD_3_SET_BUFFER0_SIZE(sizes[0]);
rxdp->buffer1_ptr = dma_pointers[1];
rxdp->control_2 &= (~XGE_HAL_RXD_3_MASK_BUFFER1_SIZE);
rxdp->control_2 |= XGE_HAL_RXD_3_SET_BUFFER1_SIZE(sizes[1]);
rxdp->buffer2_ptr = dma_pointers[2];
rxdp->control_2 &= (~XGE_HAL_RXD_3_MASK_BUFFER2_SIZE);
rxdp->control_2 |= XGE_HAL_RXD_3_SET_BUFFER2_SIZE(sizes[2]);
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_3b_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
dma_addr_t dma_pointers[], int sizes[])
{
xge_hal_ring_rxd_3_t *rxdp = (xge_hal_ring_rxd_3_t *)dtrh;
dma_pointers[0] = rxdp->buffer0_ptr;
sizes[0] = XGE_HAL_RXD_3_GET_BUFFER0_SIZE(rxdp->control_2);
dma_pointers[1] = rxdp->buffer1_ptr;
sizes[1] = XGE_HAL_RXD_3_GET_BUFFER1_SIZE(rxdp->control_2);
dma_pointers[2] = rxdp->buffer2_ptr;
sizes[2] = XGE_HAL_RXD_3_GET_BUFFER2_SIZE(rxdp->control_2);
((xge_hal_channel_t *)channelh)->poll_bytes += sizes[0] + sizes[1] +
sizes[2];
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_5b_set(xge_hal_dtr_h dtrh, dma_addr_t dma_pointers[],
int sizes[])
{
xge_hal_ring_rxd_5_t *rxdp = (xge_hal_ring_rxd_5_t *)dtrh;
rxdp->buffer0_ptr = dma_pointers[0];
rxdp->control_2 &= (~XGE_HAL_RXD_5_MASK_BUFFER0_SIZE);
rxdp->control_2 |= XGE_HAL_RXD_5_SET_BUFFER0_SIZE(sizes[0]);
rxdp->buffer1_ptr = dma_pointers[1];
rxdp->control_2 &= (~XGE_HAL_RXD_5_MASK_BUFFER1_SIZE);
rxdp->control_2 |= XGE_HAL_RXD_5_SET_BUFFER1_SIZE(sizes[1]);
rxdp->buffer2_ptr = dma_pointers[2];
rxdp->control_2 &= (~XGE_HAL_RXD_5_MASK_BUFFER2_SIZE);
rxdp->control_2 |= XGE_HAL_RXD_5_SET_BUFFER2_SIZE(sizes[2]);
rxdp->buffer3_ptr = dma_pointers[3];
rxdp->control_3 &= (~XGE_HAL_RXD_5_MASK_BUFFER3_SIZE);
rxdp->control_3 |= XGE_HAL_RXD_5_SET_BUFFER3_SIZE(sizes[3]);
rxdp->buffer4_ptr = dma_pointers[4];
rxdp->control_3 &= (~XGE_HAL_RXD_5_MASK_BUFFER4_SIZE);
rxdp->control_3 |= XGE_HAL_RXD_5_SET_BUFFER4_SIZE(sizes[4]);
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_5b_get(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh,
dma_addr_t dma_pointers[], int sizes[])
{
xge_hal_ring_rxd_5_t *rxdp = (xge_hal_ring_rxd_5_t *)dtrh;
dma_pointers[0] = rxdp->buffer0_ptr;
sizes[0] = XGE_HAL_RXD_5_GET_BUFFER0_SIZE(rxdp->control_2);
dma_pointers[1] = rxdp->buffer1_ptr;
sizes[1] = XGE_HAL_RXD_5_GET_BUFFER1_SIZE(rxdp->control_2);
dma_pointers[2] = rxdp->buffer2_ptr;
sizes[2] = XGE_HAL_RXD_5_GET_BUFFER2_SIZE(rxdp->control_2);
dma_pointers[3] = rxdp->buffer3_ptr;
sizes[3] = XGE_HAL_RXD_5_GET_BUFFER3_SIZE(rxdp->control_3);
dma_pointers[4] = rxdp->buffer4_ptr;
sizes[4] = XGE_HAL_RXD_5_GET_BUFFER4_SIZE(rxdp->control_3);
((xge_hal_channel_t *)channelh)->poll_bytes += sizes[0] + sizes[1] +
sizes[2] + sizes[3] + sizes[4];
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_pre_post(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
{
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
xge_hal_ring_rxd_priv_t *priv;
xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
#endif
#if defined(XGE_HAL_RX_MULTI_POST_IRQ)
unsigned long flags;
#endif
rxdp->control_2 |= XGE_HAL_RXD_NOT_COMPLETED;
#ifdef XGE_DEBUG_ASSERT
XGE_HAL_RXD_SET_T_CODE(rxdp->control_1, XGE_HAL_RXD_T_CODE_UNUSED_C);
#endif
xge_debug_ring(XGE_TRACE, "xge_hal_ring_dtr_pre_post: rxd 0x"XGE_OS_LLXFMT" posted %d post_qid %d",
(unsigned long long)(ulong_t)dtrh,
((xge_hal_ring_t *)channelh)->channel.post_index,
((xge_hal_ring_t *)channelh)->channel.post_qid);
#if defined(XGE_HAL_RX_MULTI_POST)
xge_os_spin_lock(&((xge_hal_channel_t*)channelh)->post_lock);
#elif defined(XGE_HAL_RX_MULTI_POST_IRQ)
xge_os_spin_lock_irq(&((xge_hal_channel_t*)channelh)->post_lock,
flags);
#endif
#if defined(XGE_DEBUG_ASSERT) && defined(XGE_HAL_RING_ENFORCE_ORDER)
{
xge_hal_channel_t *channel = (xge_hal_channel_t *)channelh;
if (channel->post_index != 0) {
xge_hal_dtr_h prev_dtrh;
xge_hal_ring_rxd_priv_t *rxdp_priv;
rxdp_priv = __hal_ring_rxd_priv((xge_hal_ring_t*)channel, rxdp);
prev_dtrh = channel->work_arr[channel->post_index - 1];
if (prev_dtrh != NULL &&
(rxdp_priv->dma_offset & (~0xFFF)) !=
rxdp_priv->dma_offset) {
xge_assert((char *)prev_dtrh +
((xge_hal_ring_t*)channel)->rxd_size == dtrh);
}
}
}
#endif
__hal_channel_dtr_post(channelh, dtrh);
#if defined(XGE_HAL_RX_MULTI_POST)
xge_os_spin_unlock(&((xge_hal_channel_t*)channelh)->post_lock);
#elif defined(XGE_HAL_RX_MULTI_POST_IRQ)
xge_os_spin_unlock_irq(&((xge_hal_channel_t*)channelh)->post_lock,
flags);
#endif
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_post_post(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
{
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
xge_hal_ring_rxd_priv_t *priv;
#endif
rxdp->control_1 |= XGE_HAL_RXD_POSTED_4_XFRAME;
#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
priv = __hal_ring_rxd_priv(ring, rxdp);
xge_os_dma_sync(ring->channel.pdev,
priv->dma_handle, priv->dma_addr,
priv->dma_offset, ring->rxd_size,
XGE_OS_DMA_DIR_TODEVICE);
#endif
xge_debug_ring(XGE_TRACE, "xge_hal_ring_dtr_post_post: rxdp %p control_1 %p",
(xge_hal_ring_rxd_1_t *)dtrh,
rxdp->control_1);
if (ring->channel.usage_cnt > 0)
ring->channel.usage_cnt--;
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_post_post_wmb(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
{
xge_hal_ring_rxd_1_t *rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
xge_hal_ring_rxd_priv_t *priv;
#endif
xge_os_wmb();
rxdp->control_1 |= XGE_HAL_RXD_POSTED_4_XFRAME;
#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
priv = __hal_ring_rxd_priv(ring, rxdp);
xge_os_dma_sync(ring->channel.pdev,
priv->dma_handle, priv->dma_addr,
priv->dma_offset, ring->rxd_size,
XGE_OS_DMA_DIR_TODEVICE);
#endif
if (ring->channel.usage_cnt > 0)
ring->channel.usage_cnt--;
xge_debug_ring(XGE_TRACE, "xge_hal_ring_dtr_post_post_wmb: rxdp %p control_1 %p rxds_with_host %d",
(xge_hal_ring_rxd_1_t *)dtrh,
rxdp->control_1, ring->channel.usage_cnt);
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_post(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
{
xge_hal_ring_dtr_pre_post(channelh, dtrh);
xge_hal_ring_dtr_post_post(channelh, dtrh);
}
__HAL_STATIC_RING __HAL_INLINE_RING xge_hal_status_e
xge_hal_ring_dtr_next_completed(xge_hal_channel_h channelh, xge_hal_dtr_h *dtrh,
u8 *t_code)
{
xge_hal_ring_rxd_1_t *rxdp;
xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
xge_hal_ring_rxd_priv_t *priv;
#endif
__hal_channel_dtr_try_complete(ring, dtrh);
rxdp = (xge_hal_ring_rxd_1_t *)*dtrh;
if (rxdp == NULL) {
return XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS;
}
#if defined(XGE_OS_DMA_REQUIRES_SYNC) && defined(XGE_HAL_DMA_DTR_STREAMING)
priv = __hal_ring_rxd_priv(ring, rxdp);
xge_os_dma_sync(ring->channel.pdev,
priv->dma_handle, priv->dma_addr,
priv->dma_offset, 24,
XGE_OS_DMA_DIR_FROMDEVICE);
#endif
if (!(rxdp->control_2 & XGE_HAL_RXD_NOT_COMPLETED) &&
!(rxdp->control_1 & XGE_HAL_RXD_POSTED_4_XFRAME)) {
#ifndef XGE_HAL_IRQ_POLLING
if (++ring->cmpl_cnt > ring->indicate_max_pkts) {
*dtrh = 0;
return XGE_HAL_COMPLETIONS_REMAIN;
}
#endif
#ifdef XGE_DEBUG_ASSERT
#if defined(XGE_HAL_USE_5B_MODE)
#if !defined(XGE_OS_PLATFORM_64BIT)
if (ring->buffer_mode == XGE_HAL_RING_QUEUE_BUFFER_MODE_5) {
xge_assert(((xge_hal_ring_rxd_5_t *)
rxdp)->host_control!=0);
}
#endif
#else
xge_assert(rxdp->host_control!=0);
#endif
#endif
__hal_channel_dtr_complete(ring);
*t_code = (u8)XGE_HAL_RXD_GET_T_CODE(rxdp->control_1);
xge_assert(*t_code != XGE_HAL_RXD_T_CODE_UNUSED_C);
xge_debug_ring(XGE_TRACE,
"compl_index %d post_qid %d t_code %d rxd 0x"XGE_OS_LLXFMT,
((xge_hal_channel_t*)ring)->compl_index,
((xge_hal_channel_t*)ring)->post_qid, *t_code,
(unsigned long long)(ulong_t)rxdp);
ring->channel.usage_cnt++;
if (ring->channel.stats.usage_max < ring->channel.usage_cnt)
ring->channel.stats.usage_max = ring->channel.usage_cnt;
return XGE_HAL_OK;
}
*dtrh = 0;
return XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS;
}
__HAL_STATIC_RING __HAL_INLINE_RING void
xge_hal_ring_dtr_free(xge_hal_channel_h channelh, xge_hal_dtr_h dtrh)
{
#if defined(XGE_HAL_RX_MULTI_FREE_IRQ)
unsigned long flags;
#endif
#if defined(XGE_HAL_RX_MULTI_FREE)
xge_os_spin_lock(&((xge_hal_channel_t*)channelh)->free_lock);
#elif defined(XGE_HAL_RX_MULTI_FREE_IRQ)
xge_os_spin_lock_irq(&((xge_hal_channel_t*)channelh)->free_lock,
flags);
#endif
__hal_channel_dtr_free(channelh, dtrh);
#if defined(XGE_OS_MEMORY_CHECK)
__hal_ring_rxd_priv((xge_hal_ring_t * ) channelh, dtrh)->allocated = 0;
#endif
#if defined(XGE_HAL_RX_MULTI_FREE)
xge_os_spin_unlock(&((xge_hal_channel_t*)channelh)->free_lock);
#elif defined(XGE_HAL_RX_MULTI_FREE_IRQ)
xge_os_spin_unlock_irq(&((xge_hal_channel_t*)channelh)->free_lock,
flags);
#endif
}
__HAL_STATIC_RING __HAL_INLINE_RING xge_hal_status_e
xge_hal_ring_is_next_dtr_completed(xge_hal_channel_h channelh)
{
xge_hal_ring_rxd_1_t *rxdp;
xge_hal_ring_t *ring = (xge_hal_ring_t *)channelh;
xge_hal_dtr_h dtrh;
__hal_channel_dtr_try_complete(ring, &dtrh);
rxdp = (xge_hal_ring_rxd_1_t *)dtrh;
if (rxdp == NULL) {
return XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS;
}
if (!(rxdp->control_2 & XGE_HAL_RXD_NOT_COMPLETED) &&
!(rxdp->control_1 & XGE_HAL_RXD_POSTED_4_XFRAME)) {
#ifdef XGE_DEBUG_ASSERT
#if defined(XGE_HAL_USE_5B_MODE)
#if !defined(XGE_OS_PLATFORM_64BIT)
if (ring->buffer_mode == XGE_HAL_RING_QUEUE_BUFFER_MODE_5) {
xge_assert(((xge_hal_ring_rxd_5_t *)
rxdp)->host_control!=0);
}
#endif
#else
xge_assert(rxdp->host_control!=0);
#endif
#endif
return XGE_HAL_OK;
}
return XGE_HAL_INF_NO_MORE_COMPLETED_DESCRIPTORS;
}