ET_TX_NDESC
if ((tbd->tbd_used + ET_NSEG_SPARE) > ET_TX_NDESC) {
for (i = 0; i < ET_TX_NDESC; ++i) {
CSR_WRITE_4(sc, ET_TX_RING_CNT, ET_TX_NDESC - 1);
maxsegs = ET_TX_NDESC - tbd->tbd_used;
KKASSERT(tx_ring->tr_ready_index < ET_TX_NDESC);
idx = (first_idx + i) % ET_TX_NDESC;
KKASSERT(tx_ring->tr_ready_index < ET_TX_NDESC);
if (++tx_ring->tr_ready_index == ET_TX_NDESC) {
KKASSERT(tbd->tbd_used <= ET_TX_NDESC);
KKASSERT(tbd->tbd_start_index < ET_TX_NDESC);
if (++tbd->tbd_start_index == ET_TX_NDESC) {
if (tbd->tbd_used + ET_NSEG_SPARE <= ET_TX_NDESC)
ifq_init_maxlen(&ifp->if_snd, ET_TX_NDESC);
et_dma_mbuf_destroy(sc, ET_TX_NDESC, rx_done);
for (i = 0; i < ET_TX_NDESC; ++i) {
#define ET_TX_RING_SIZE (ET_TX_NDESC * sizeof(struct et_txdesc))
struct et_txbuf tbd_buf[ET_TX_NDESC];