Lines Matching defs:bgep

38 static void bge_refill(bge_t *bgep, buff_ring_t *brp, sw_rbd_t *srbdp);
57 bge_refill(bge_t *bgep, buff_ring_t *brp, sw_rbd_t *srbdp)
65 bge_mbx_put(bgep, brp->chip_mbx_reg, slot);
68 static mblk_t *bge_receive_packet(bge_t *bgep, bge_rbd_t *hw_rbd_p,
73 bge_receive_packet(bge_t *bgep, bge_rbd_t *hw_rbd_p, recv_ring_t *rrp)
92 BGE_PKTDUMP((bgep, &hw_rbd, NULL, "bad ring flags!"));
96 brp = &bgep->buff[BGE_JUMBO_BUFF_RING];
101 brp = &bgep->buff[BGE_MINI_BUFF_RING];
106 brp = &bgep->buff[BGE_STD_BUFF_RING];
112 BGE_PKTDUMP((bgep, &hw_rbd, NULL, "bad ring index!"));
119 BGE_PKTDUMP((bgep, &hw_rbd, srbdp, "bad ring token"));
125 BGE_PKTDUMP((bgep, &hw_rbd, srbdp, "unterminated packet"));
131 BGE_PKTDUMP((bgep, &hw_rbd, srbdp, "errored packet"));
141 if (bgep->asf_enabled && (hw_rbd.flags & RBD_FLAG_VLAN_TAG))
142 maxsize = bgep->chipid.ethmax_size + ETHERFCSL;
150 maxsize = bgep->chipid.ethmax_size + VLAN_TAGSZ + ETHERFCSL;
153 BGE_PKTDUMP((bgep, &hw_rbd, srbdp, "oversize packet"));
158 if (bgep->asf_enabled && (hw_rbd.flags & RBD_FLAG_VLAN_TAG))
165 BGE_PKTDUMP((bgep, &hw_rbd, srbdp, "undersize packet"));
177 if (bgep->asf_enabled && (hw_rbd.flags & RBD_FLAG_VLAN_TAG)) {
195 if (bge_check_dma_handle(bgep, srbdp->pbuf.dma_hdl) != DDI_FM_OK) {
196 bgep->bge_dma_error = B_TRUE;
197 bgep->bge_chip_state = BGE_CHIP_ERROR;
201 if (bgep->asf_enabled && (hw_rbd.flags & RBD_FLAG_VLAN_TAG)) {
222 if (bgep->asf_enabled && (hw_rbd.flags & RBD_FLAG_VLAN_TAG)) {
235 BGE_PKTDUMP((bgep, &hw_rbd, srbdp, "stuttered packet?"));
253 bge_refill(bgep, brp, srbdp);
262 bgep->bge_chip_state = BGE_CHIP_ERROR;
263 bge_fm_ereport(bgep, DDI_FM_DEVICE_INVAL_STATE);
278 static mblk_t *bge_receive_ring(bge_t *bgep, recv_ring_t *rrp);
284 bge_receive_ring(bge_t *bgep, recv_ring_t *rrp)
301 bgep->bge_chip_state = BGE_CHIP_ERROR;
302 bge_fm_ereport(bgep, DDI_FM_DEVICE_INVAL_STATE);
305 if (bge_check_dma_handle(bgep, rrp->desc.dma_hdl) != DDI_FM_OK) {
307 bge_mbx_put(bgep, rrp->chip_mbx_reg, rrp->rx_next);
308 bgep->bge_dma_error = B_TRUE;
309 bgep->bge_chip_state = BGE_CHIP_ERROR;
320 if ((mp = bge_receive_packet(bgep, &hw_rbd_p[slot], rrp))
329 bge_mbx_put(bgep, rrp->chip_mbx_reg, rrp->rx_next);
330 if (bge_check_acc_handle(bgep, bgep->io_handle) != DDI_FM_OK)
331 bgep->bge_chip_state = BGE_CHIP_ERROR;
354 bge_t *bgep = rrp->bgep;
370 bgep->bge_chip_state = BGE_CHIP_ERROR;
371 bge_fm_ereport(bgep, DDI_FM_DEVICE_INVAL_STATE);
375 if (bge_check_dma_handle(bgep, rrp->desc.dma_hdl) != DDI_FM_OK) {
377 bge_mbx_put(bgep, rrp->chip_mbx_reg, rrp->rx_next);
378 bgep->bge_dma_error = B_TRUE;
379 bgep->bge_chip_state = BGE_CHIP_ERROR;
391 if ((mp = bge_receive_packet(bgep, &hw_rbd_p[slot], rrp))
400 bge_mbx_put(bgep, rrp->chip_mbx_reg, rrp->rx_next);
401 if (bge_check_acc_handle(bgep, bgep->io_handle) != DDI_FM_OK)
402 bgep->bge_chip_state = BGE_CHIP_ERROR;
410 void bge_receive(bge_t *bgep, bge_status_t *bsp);
414 bge_receive(bge_t *bgep, bge_status_t *bsp)
420 for (index = 0; index < bgep->chipid.rx_rings; index++) {
424 rrp = &bgep->recv[index];
437 mp = bge_receive_ring(bgep, rrp);
441 mac_rx_ring(bgep->mh, rrp->ring_handle, mp,