Lines Matching defs:i40e

365 #error	"unknown architecture for i40e"
551 i40e_alloc_dma_buffer(i40e_t *i40e, i40e_dma_buffer_t *dmap,
569 ret = ddi_dma_alloc_handle(i40e->i40e_dip, attrsp, DDI_DMA_DONTWAIT,
572 i40e_error(i40e, "failed to allocate dma handle for I/O "
585 i40e_error(i40e, "failed to allocate %ld bytes of DMA for I/O "
606 i40e_error(i40e, "failed to allocate %ld bytes of DMA for I/O "
653 i40e_alloc_rx_data(i40e_t *i40e, i40e_trqpair_t *itrq)
661 rxd->rxd_i40e = i40e;
663 rxd->rxd_ring_size = i40e->i40e_rx_ring_size;
664 rxd->rxd_free_list_size = i40e->i40e_rx_ring_size;
671 i40e_error(i40e, "failed to allocate rx work list for a ring "
680 i40e_error(i40e, "failed to allocate a %d entry rx free list "
688 i40e_error(i40e, "failed to allocate a %d entry rcb area for "
717 i40e_t *i40e = rxd->rxd_i40e;
723 mutex_enter(&i40e->i40e_rx_pending_lock);
749 atomic_inc_32(&i40e->i40e_rx_pending);
752 mutex_exit(&i40e->i40e_rx_pending_lock);
765 i40e_t *i40e = rxd->rxd_i40e;
772 if (i40e_alloc_dma_buffer(i40e, &rxd->rxd_desc_area,
773 &i40e->i40e_static_dma_attr, &i40e->i40e_desc_acc_attr, B_FALSE,
775 i40e_error(i40e, "failed to allocate DMA resources "
786 dmasz = i40e->i40e_rx_buf_size;
799 if (i40e_alloc_dma_buffer(i40e, dmap,
800 &i40e->i40e_static_dma_attr, &i40e->i40e_buf_acc_attr,
802 i40e_error(i40e, "failed to allocate rx dma buffer");
877 i40e_t *i40e = itrq->itrq_i40e;
879 itrq->itrq_tx_ring_size = i40e->i40e_tx_ring_size;
880 itrq->itrq_tx_free_list_size = i40e->i40e_tx_ring_size +
881 (i40e->i40e_tx_ring_size >> 1);
890 if (i40e_alloc_dma_buffer(i40e, &itrq->itrq_desc_area,
891 &i40e->i40e_static_dma_attr, &i40e->i40e_desc_acc_attr,
893 i40e_error(i40e, "failed to allocate DMA resources for tx "
908 i40e_error(i40e, "failed to allocate a %d entry tx work list "
916 i40e_error(i40e, "failed to allocate a %d entry tx free list "
928 i40e_error(i40e, "failed to allocate a %d entry tcb area for "
936 dmasz = i40e->i40e_tx_buf_size;
947 ret = ddi_dma_alloc_handle(i40e->i40e_dip,
948 &i40e->i40e_txbind_dma_attr, DDI_DMA_DONTWAIT, NULL,
951 i40e_error(i40e, "failed to allocate DMA handle for tx "
958 if (i40e_alloc_dma_buffer(i40e, &tcb->tcb_dma,
959 &i40e->i40e_static_dma_attr, &i40e->i40e_buf_acc_attr,
961 i40e_error(i40e, "failed to allocate %ld bytes of "
980 * Free all memory associated with all of the rings on this i40e instance. Note,
984 i40e_free_ring_mem(i40e_t *i40e, boolean_t failed_init)
988 for (i = 0; i < i40e->i40e_num_trqpairs; i++) {
989 i40e_rx_data_t *rxd = i40e->i40e_trqpairs[i].itrq_rxdata;
1002 mutex_enter(&i40e->i40e_rx_pending_lock);
1006 i40e->i40e_trqpairs[i].itrq_rxdata = NULL;
1008 mutex_exit(&i40e->i40e_rx_pending_lock);
1010 i40e_free_tx_dma(&i40e->i40e_trqpairs[i]);
1015 * Allocate all of the resources associated with all of the rings on this i40e
1021 i40e_alloc_ring_mem(i40e_t *i40e)
1025 for (i = 0; i < i40e->i40e_num_trqpairs; i++) {
1026 if (i40e_alloc_rx_data(i40e, &i40e->i40e_trqpairs[i]) ==
1030 if (i40e_alloc_rx_dma(i40e->i40e_trqpairs[i].itrq_rxdata) ==
1034 if (i40e_alloc_tx_dma(&i40e->i40e_trqpairs[i]) == B_FALSE)
1041 i40e_free_ring_mem(i40e, B_TRUE);
1047 * Because every instance of i40e may have different support for FMA
1052 i40e_init_dma_attrs(i40e_t *i40e, boolean_t fma)
1054 bcopy(&i40e_g_static_dma_attr, &i40e->i40e_static_dma_attr,
1056 bcopy(&i40e_g_txbind_dma_attr, &i40e->i40e_txbind_dma_attr,
1058 bcopy(&i40e_g_desc_acc_attr, &i40e->i40e_desc_acc_attr,
1060 bcopy(&i40e_g_buf_acc_attr, &i40e->i40e_buf_acc_attr,
1064 i40e->i40e_static_dma_attr.dma_attr_flags |= DDI_DMA_FLAGERR;
1065 i40e->i40e_txbind_dma_attr.dma_attr_flags |= DDI_DMA_FLAGERR;
1067 i40e->i40e_static_dma_attr.dma_attr_flags &= ~DDI_DMA_FLAGERR;
1068 i40e->i40e_txbind_dma_attr.dma_attr_flags &= ~DDI_DMA_FLAGERR;
1113 i40e_t *i40e;
1118 i40e = rxd->rxd_i40e;
1148 mutex_enter(&i40e->i40e_rx_pending_lock);
1150 atomic_dec_32(&i40e->i40e_rx_pending);
1158 cv_broadcast(&i40e->i40e_rx_pending_cv);
1161 mutex_exit(&i40e->i40e_rx_pending_lock);
1170 i40e_t *i40e = rxd->rxd_i40e;
1200 ddi_fm_service_impact(i40e->i40e_dip, DDI_SERVICE_DEGRADED);
1201 atomic_or_32(&i40e->i40e_state, I40E_ERROR);
1227 i40e_t *i40e = rxd->rxd_i40e;
1237 ddi_fm_service_impact(i40e->i40e_dip, DDI_SERVICE_DEGRADED);
1238 atomic_or_32(&i40e->i40e_state, I40E_ERROR);
1368 i40e_t *i40e;
1380 i40e = itrq->itrq_i40e;
1381 hw = &i40e->i40e_hw_space;
1383 if (!(i40e->i40e_state & I40E_STARTED) ||
1384 (i40e->i40e_state & I40E_OVERTEMP) ||
1385 (i40e->i40e_state & I40E_SUSPENDED) ||
1386 (i40e->i40e_state & I40E_ERROR))
1397 ddi_fm_service_impact(i40e->i40e_dip, DDI_SERVICE_DEGRADED);
1398 atomic_or_32(&i40e->i40e_state, I40E_ERROR);
1478 if (plen >= i40e->i40e_rx_dma_min)
1484 if (i40e->i40e_rx_hcksum_enable)
1519 if (rx_frames > i40e->i40e_rx_limit_per_intr) {
1534 ddi_fm_service_impact(i40e->i40e_dip, DDI_SERVICE_DEGRADED);
1535 atomic_or_32(&i40e->i40e_state, I40E_ERROR);
1540 ddi_acc_handle_t rh = i40e->i40e_osdep_space.ios_reg_handle;
1546 ddi_fm_service_impact(i40e->i40e_dip,
1548 atomic_or_32(&i40e->i40e_state, I40E_ERROR);
1776 i40e_tx_context(i40e_t *i40e, i40e_trqpair_t *itrq, mblk_t *mp,
1786 if (i40e->i40e_tx_hcksum_enable != B_TRUE)
1934 panic("unknown i40e tcb type: %d", tcb->tcb_type);
1997 i40e_t *i40e = itrq->itrq_i40e;
2005 mac_tx_ring_update(i40e->i40e_mac_hdl,
2025 ddi_fm_service_impact(i40e->i40e_dip, DDI_SERVICE_DEGRADED);
2026 atomic_or_32(&i40e->i40e_state, I40E_ERROR);
2058 itrq->itrq_desc_free > i40e->i40e_tx_block_thresh) {
2061 mac_tx_ring_update(i40e->i40e_mac_hdl, itrq->itrq_mactxring);
2107 i40e_t *i40e = itrq->itrq_i40e;
2108 i40e_hw_t *hw = &i40e->i40e_hw_space;
2113 if (!(i40e->i40e_state & I40E_STARTED) ||
2114 (i40e->i40e_state & I40E_OVERTEMP) ||
2115 (i40e->i40e_state & I40E_SUSPENDED) ||
2116 (i40e->i40e_state & I40E_ERROR) ||
2117 (i40e->i40e_link_state != LINK_STATE_UP)) {
2127 if (i40e_tx_context(i40e, itrq, mp, &tctx) < 0) {
2186 if (itrq->itrq_desc_free < i40e->i40e_tx_block_thresh) {
2231 if (i40e_check_acc_handle(i40e->i40e_osdep_space.ios_reg_handle) !=
2238 ddi_fm_service_impact(i40e->i40e_dip, DDI_SERVICE_DEGRADED);
2239 atomic_or_32(&i40e->i40e_state, I40E_ERROR);