Lines Matching defs:tcb

322  *             * ... Initial placement for all tcb's
326 * | tcb on free list |---*------------------>| tcb on work list |
328 * ^ . tcb allocated |
338 * tcb recycled, mblk
836 i40e_tx_control_block_t *tcb = itrq->itrq_tcb_area;
838 for (i = 0; i < itrq->itrq_tx_free_list_size; i++, tcb++) {
839 i40e_free_dma_buffer(&tcb->tcb_dma);
840 if (tcb->tcb_dma_handle != NULL) {
841 ddi_dma_free_handle(&tcb->tcb_dma_handle);
842 tcb->tcb_dma_handle = NULL;
876 i40e_tx_control_block_t *tcb;
928 i40e_error(i40e, "failed to allocate a %d entry tcb area for "
934 * For each tcb, allocate DMA memory.
938 tcb = itrq->itrq_tcb_area;
939 for (i = 0; i < itrq->itrq_tx_free_list_size; i++, tcb++) {
940 VERIFY(tcb != NULL);
949 &tcb->tcb_dma_handle);
954 tcb->tcb_dma_handle = NULL;
958 if (i40e_alloc_dma_buffer(i40e, &tcb->tcb_dma,
967 itrq->itrq_tcb_free_list[i] = tcb;
1884 i40e_tcb_free(i40e_trqpair_t *itrq, i40e_tx_control_block_t *tcb)
1886 ASSERT(tcb != NULL);
1890 itrq->itrq_tcb_free_list[itrq->itrq_tcb_free] = tcb;
1921 i40e_tcb_reset(i40e_tx_control_block_t *tcb)
1923 switch (tcb->tcb_type) {
1925 tcb->tcb_dma.dmab_len = 0;
1928 (void) ddi_dma_unbind_handle(tcb->tcb_dma_handle);
1932 panic("trying to free tcb %p with bad type none", (void *)tcb);
1934 panic("unknown i40e tcb type: %d", tcb->tcb_type);
1937 tcb->tcb_type = I40E_TX_NONE;
1938 freemsg(tcb->tcb_mp);
1939 tcb->tcb_mp = NULL;
1940 tcb->tcb_next = NULL;
1969 i40e_tx_control_block_t *tcb;
1971 tcb = itrq->itrq_tcb_work_list[index];
1972 VERIFY(tcb != NULL);
1974 i40e_tcb_reset(tcb);
1975 i40e_tcb_free(itrq, tcb);
2036 i40e_tx_control_block_t *tcb;
2038 tcb = itrq->itrq_tcb_work_list[toclean];
2040 ASSERT(tcb != NULL);
2041 tcb->tcb_next = tcbhead;
2042 tcbhead = tcb;
2068 * Now clean up the tcb.
2071 i40e_tx_control_block_t *tcb = tcbhead;
2073 tcbhead = tcb->tcb_next;
2074 i40e_tcb_reset(tcb);
2075 i40e_tcb_free(itrq, tcb);
2101 i40e_tx_control_block_t *tcb;
2154 tcb = i40e_tcb_alloc(itrq);
2155 if (tcb == NULL) {
2166 ASSERT(tcb->tcb_dma.dmab_len == 0);
2167 ASSERT(tcb->tcb_dma.dmab_size >= mpsize);
2170 void *coff = tcb->tcb_dma.dmab_address + tcb->tcb_dma.dmab_len;
2173 tcb->tcb_dma.dmab_len += clen;
2175 ASSERT(tcb->tcb_dma.dmab_len == mpsize);
2181 tcb->tcb_mp = mp;
2182 tcb->tcb_type = I40E_TX_COPY;
2183 I40E_DMA_SYNC(&tcb->tcb_dma, DDI_DMA_SYNC_FORDEV);
2200 itrq->itrq_tcb_work_list[itrq->itrq_desc_tail] = tcb;
2218 CPU_TO_LE64((uintptr_t)tcb->tcb_dma.dmab_dma_address);
2222 ((uint64_t)tcb->tcb_dma.dmab_len << I40E_TXD_QW1_TX_BUF_SZ_SHIFT)));
2253 * need to signal MAC. If there are allocated tcb's, return them now.
2257 if (tcb != NULL) {
2258 tcb->tcb_mp = NULL;
2259 i40e_tcb_reset(tcb);
2260 i40e_tcb_free(itrq, tcb);