TX_DESC_NUM
#define TX_NEXT(N) (((N) + 1) & (TX_DESC_NUM-1))
struct sk_buff *tx_skb[TX_DESC_NUM];
dma_addr_t tx_phys[TX_DESC_NUM];
return (head - tail) % TX_DESC_NUM;
if (unlikely(netif_queue_stopped(ndev)) && (count < (TX_DESC_NUM - 1)))
if (count == (TX_DESC_NUM - 1)) {
TX_DESC_NUM * sizeof(struct tx_desc),
for (i = 0; i < TX_DESC_NUM; i++)
dma_free_coherent(d, TX_DESC_NUM * sizeof(struct tx_desc),
priv->tx_coalesce_frames = TX_DESC_NUM * 3 / 4;
priv->tx_bq.count = TX_DESC_NUM;
priv->tx_rq.count = TX_DESC_NUM;
TX_DESC_NUM * sizeof(struct sg_desc),
TX_DESC_NUM * sizeof(struct sg_desc),
struct sk_buff *tx_skb[TX_DESC_NUM];
hix5hd2_set_desc_depth(priv, RX_DESC_NUM, TX_DESC_NUM);
num = CIRC_CNT(end, start, TX_DESC_NUM);
pos = dma_ring_incr(pos, TX_DESC_NUM);
pos = dma_ring_incr(pos, TX_DESC_NUM);
for (i = 0; i < TX_DESC_NUM; i++) {
for (i = 0; i < TX_DESC_NUM; i++) {
return CIRC_SPACE(priv->tx_head, priv->tx_tail, TX_DESC_NUM);
TX_DESC_NUM, &priv->tx_base,
priv->tx_buf_base = kmalloc_array(priv->tx_buf_size, TX_DESC_NUM,
TX_REG_DESC_SIZE * TX_DESC_NUM,
dma_addr_t tx_mapping[TX_DESC_NUM];
unsigned int tx_len[TX_DESC_NUM];
struct sk_buff *tx_skb[TX_DESC_NUM];
#define TX_DESC_NUM_MASK (TX_DESC_NUM - 1)
struct spl2sw_skb_info tx_temp_skb_info[TX_DESC_NUM];
(TX_DESC_NUM + MAC_GUARD_DESC_NUM));
desc_size = (TX_DESC_NUM + MAC_GUARD_DESC_NUM) * sizeof(struct spl2sw_mac_desc);
comm->rx_desc[0] = &comm->tx_desc[TX_DESC_NUM + MAC_GUARD_DESC_NUM];
for (i = 0; i < TX_DESC_NUM; i++)
for (i = 0; i < TX_DESC_NUM; i++) {
if (tx_pos == (TX_DESC_NUM - 1))
tx_pos = ((tx_pos + 1) == TX_DESC_NUM) ? 0 : tx_pos + 1;
tx_done_pos = ((tx_done_pos + 1) == TX_DESC_NUM) ? 0 : tx_done_pos + 1;
writel(comm->desc_dma + sizeof(struct spl2sw_mac_desc) * TX_DESC_NUM,
writel(comm->desc_dma + sizeof(struct spl2sw_mac_desc) * (TX_DESC_NUM +
writel(comm->desc_dma + sizeof(struct spl2sw_mac_desc) * (TX_DESC_NUM +