Lines Matching defs:wvp

65 static void hci1394_compile_ixl_init(hci1394_comp_ixl_vars_t *wvp,
68 static void hci1394_compile_ixl_endup(hci1394_comp_ixl_vars_t *wvp);
69 static void hci1394_parse_ixl(hci1394_comp_ixl_vars_t *wvp,
71 static void hci1394_finalize_all_xfer_desc(hci1394_comp_ixl_vars_t *wvp);
72 static void hci1394_finalize_cur_xfer_desc(hci1394_comp_ixl_vars_t *wvp);
73 static void hci1394_bld_recv_pkt_desc(hci1394_comp_ixl_vars_t *wvp);
74 static void hci1394_bld_recv_buf_ppb_desc(hci1394_comp_ixl_vars_t *wvp);
75 static void hci1394_bld_recv_buf_fill_desc(hci1394_comp_ixl_vars_t *wvp);
76 static void hci1394_bld_xmit_pkt_desc(hci1394_comp_ixl_vars_t *wvp);
77 static void hci1394_bld_xmit_buf_desc(hci1394_comp_ixl_vars_t *wvp);
78 static void hci1394_bld_xmit_hdronly_nopkt_desc(hci1394_comp_ixl_vars_t *wvp);
79 static int hci1394_bld_dma_mem_desc_blk(hci1394_comp_ixl_vars_t *wvp,
81 static void hci1394_set_xmit_pkt_hdr(hci1394_comp_ixl_vars_t *wvp);
82 static void hci1394_set_xmit_skip_mode(hci1394_comp_ixl_vars_t *wvp);
83 static void hci1394_set_xmit_storevalue_desc(hci1394_comp_ixl_vars_t *wvp);
84 static int hci1394_set_next_xfer_buf(hci1394_comp_ixl_vars_t *wvp,
86 static int hci1394_flush_end_desc_check(hci1394_comp_ixl_vars_t *wvp,
88 static int hci1394_flush_hci_cache(hci1394_comp_ixl_vars_t *wvp);
89 static uint32_t hci1394_alloc_storevalue_dma_mem(hci1394_comp_ixl_vars_t *wvp);
90 static hci1394_xfer_ctl_t *hci1394_alloc_xfer_ctl(hci1394_comp_ixl_vars_t *wvp,
92 static void *hci1394_alloc_dma_mem(hci1394_comp_ixl_vars_t *wvp,
170 hci1394_compile_ixl_init(hci1394_comp_ixl_vars_t *wvp,
178 wvp->soft_statep = soft_statep;
179 wvp->ctxtp = ctxtp;
198 wvp->xcs_firstp = NULL;
199 wvp->xcs_currentp = NULL;
201 wvp->dma_firstp = NULL;
202 wvp->dma_currentp = NULL;
203 wvp->dma_bld_error = 0;
205 wvp->ixl_io_mode = ctxtp->ctxt_flags;
206 wvp->ixl_cur_cmdp = NULL;
207 wvp->ixl_cur_xfer_stp = NULL;
208 wvp->ixl_cur_labelp = NULL;
210 wvp->ixl_xfer_st_cnt = 0; /* count of xfer start commands found */
211 wvp->xfer_state = XFER_NONE; /* none, pkt, buf, skip, hdronly */
212 wvp->xfer_hci_flush = 0; /* updateable - xfer, jump, set */
213 wvp->xfer_pktlen = 0;
214 wvp->xfer_bufcnt = 0;
215 wvp->descriptors = 0;
218 wvp->ixl_setsyncwait_cnt = 0;
221 wvp->ixl_settagsync_cmdp = NULL;
222 wvp->ixl_setskipmode_cmdp = NULL;
223 wvp->default_skipmode = ctxtp->default_skipmode; /* nxt,self,stop,jmp */
224 wvp->default_skiplabelp = ctxtp->default_skiplabelp;
225 wvp->default_skipxferp = NULL;
226 wvp->skipmode = ctxtp->default_skipmode;
227 wvp->skiplabelp = NULL;
228 wvp->skipxferp = NULL;
229 wvp->default_tag = ctxtp->default_tag;
230 wvp->default_sync = ctxtp->default_sync;
231 wvp->storevalue_bufp = hci1394_alloc_storevalue_dma_mem(wvp);
232 wvp->storevalue_data = 0;
233 wvp->xmit_pkthdr1 = 0;
234 wvp->xmit_pkthdr2 = 0;
248 hci1394_compile_ixl_endup(hci1394_comp_ixl_vars_t *wvp)
258 if ((wvp->dma_bld_error == 0) && (wvp->ixl_xfer_st_cnt == 0)) {
263 wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS;
267 if (wvp->dma_bld_error == 0) {
268 err = hci1394_ixl_find_next_exec_xfer(wvp->ctxtp->ixl_firstp,
277 wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS;
282 dma_nextp = wvp->ctxtp->dma_firstp;
288 wvp->dma_bld_error = IXL1394_EINTERNAL_ERROR;
304 if (wvp->dma_bld_error != 0) {
305 wvp->ctxtp->xcs_firstp = (void *)wvp->xcs_firstp;
306 wvp->ctxtp->dma_firstp = wvp->dma_firstp;
307 hci1394_ixl_cleanup(wvp->soft_statep, wvp->ctxtp);
317 wvp->ctxtp->default_skipxferp = wvp->default_skipxferp;
318 wvp->ctxtp->dma_mem_execp = 0;
324 wvp->ctxtp->dma_mem_execp = (uint32_t)((hci1394_xfer_ctl_t *)
326 wvp->ctxtp->xcs_firstp = (void *)wvp->xcs_firstp;
327 wvp->ctxtp->dma_firstp = wvp->dma_firstp;
328 wvp->ctxtp->dma_last_time = 0;
329 wvp->ctxtp->ixl_exec_depth = 0;
330 wvp->ctxtp->ixl_execp = NULL;
348 hci1394_parse_ixl(hci1394_comp_ixl_vars_t *wvp, ixl1394_command_t *ixlp)
361 while ((ixlnextp != NULL) && (wvp->dma_bld_error == 0)) {
364 wvp->ixl_cur_cmdp = ixlcurp = ixlnextp;
374 if ((((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) != 0) &&
376 (((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) == 0) &&
387 wvp->dma_bld_error = IXL1394_EBAD_IXL_OPCODE;
393 wvp->ixl_io_mode, tnf_opaque, ixl_opcode,
396 wvp->dma_bld_error = IXL1394_EWRONG_XR_CMD_MODE;
407 hci1394_finalize_cur_xfer_desc(wvp);
409 if (wvp->dma_bld_error != 0) {
440 wvp->xfer_state = XFER_BUF;
443 wvp->ixl_cur_xfer_stp = ixlcurp;
449 if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_BFFILL) == 0) {
468 wvp->dma_bld_error =
478 if (hci1394_set_next_xfer_buf(wvp,
482 /* wvp->dma_bld_error is set by above call */
495 if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_BFFILL) != 0) {
501 wvp->dma_bld_error = IXL1394_EWRONG_XR_CMD_MODE;
507 wvp->xfer_state = XFER_PKT;
508 wvp->ixl_cur_xfer_stp = ixlcurp;
514 if (hci1394_set_next_xfer_buf(wvp,
518 /* wvp->dma_bld_error is set by above call */
531 if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_BFFILL) != 0) {
537 wvp->dma_bld_error = IXL1394_EWRONG_XR_CMD_MODE;
542 if (wvp->xfer_state != XFER_PKT) {
548 wvp->dma_bld_error = IXL1394_EMISPLACED_RECV;
557 wvp->ixl_cur_xfer_stp;
563 ixlcurp->compiler_resv = wvp->xfer_bufcnt;
569 if (hci1394_set_next_xfer_buf(wvp,
573 /* wvp->dma_bld_error is set by above call */
582 wvp->xfer_hci_flush |= UPDATEABLE_XFER;
602 wvp->xfer_state = XFER_BUF;
605 wvp->ixl_cur_xfer_stp = ixlcurp;
622 wvp->dma_bld_error = IXL1394_EPKTSIZE_RATIO;
627 if (hci1394_set_next_xfer_buf(wvp,
631 /* wvp->dma_bld_error is set by above call */
645 wvp->xfer_state = XFER_PKT;
646 wvp->ixl_cur_xfer_stp = ixlcurp;
652 if (hci1394_set_next_xfer_buf(wvp,
656 /* wvp->dma_bld_error is set by above call */
670 wvp->xfer_state = XFER_PKT;
671 wvp->ixl_cur_xfer_stp = ixlcurp;
683 wvp->dma_bld_error = IXL1394_EPKT_HDR_MISSING;
691 if (hci1394_set_next_xfer_buf(wvp,
695 /* wvp->dma_bld_error is set by above call */
708 if (wvp->xfer_state != XFER_PKT) {
714 wvp->dma_bld_error = IXL1394_EMISPLACED_SEND;
723 wvp->ixl_cur_xfer_stp;
729 ixlcurp->compiler_resv = wvp->xfer_bufcnt;
735 if (hci1394_set_next_xfer_buf(wvp,
739 /* wvp->dma_bld_error is set by above call */
748 wvp->xfer_hci_flush |= UPDATEABLE_XFER;
755 wvp->xfer_state = XMIT_HDRONLY;
758 wvp->ixl_cur_xfer_stp = ixlcurp;
763 wvp->xfer_state = XMIT_NOPKT;
766 wvp->ixl_cur_xfer_stp = ixlcurp;
789 wvp->dma_bld_error = IXL1394_EJUMP_NOT_TO_LABEL;
800 wvp->ixl_cur_labelp = ixlcurp;
803 wvp->xfer_hci_flush |= INITIATING_LBL;
822 if (wvp->ixl_setskipmode_cmdp != NULL) {
829 wvp->dma_bld_error = IXL1394_EDUPLICATE_SET_CMD;
834 wvp->ixl_setskipmode_cmdp = (ixl1394_set_skipmode_t *)
837 if ((wvp->ixl_setskipmode_cmdp->skipmode !=
839 (wvp->ixl_setskipmode_cmdp->skipmode !=
841 (wvp->ixl_setskipmode_cmdp->skipmode !=
843 (wvp->ixl_setskipmode_cmdp->skipmode !=
850 wvp->ixl_setskipmode_cmdp->skipmode);
852 wvp->dma_bld_error = IXL1394_EBAD_SKIPMODE;
860 if ((wvp->ixl_setskipmode_cmdp->skipmode ==
862 ((wvp->ixl_setskipmode_cmdp->label == NULL) ||
863 (wvp->ixl_setskipmode_cmdp->label->ixl_opcode !=
871 wvp->ixl_setskipmode_cmdp->label);
873 wvp->dma_bld_error = IXL1394_EJUMP_NOT_TO_LABEL;
881 wvp->xfer_hci_flush |= UPDATEABLE_SET;
891 if (wvp->ixl_settagsync_cmdp != NULL) {
898 wvp->dma_bld_error = IXL1394_EDUPLICATE_SET_CMD;
903 wvp->ixl_settagsync_cmdp =
911 wvp->xfer_hci_flush |= UPDATEABLE_SET;
921 wvp->ixl_setsyncwait_cnt++;
931 wvp->dma_bld_error = IXL1394_EBAD_IXL_OPCODE;
937 wvp->ixl_cur_cmdp = NULL;
938 if (wvp->dma_bld_error == 0) {
939 hci1394_finalize_cur_xfer_desc(wvp);
959 hci1394_finalize_all_xfer_desc(hci1394_comp_ixl_vars_t *wvp)
987 if (((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) == 0) &&
988 (wvp->ctxtp->default_skipmode == IXL1394_SKIP_TO_LABEL)) {
990 err = hci1394_ixl_find_next_exec_xfer(wvp->default_skiplabelp,
991 NULL, &wvp->default_skipxferp);
997 skipixl_cmdp, wvp->default_skiplabelp);
1001 wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS;
1007 ixlnextp = wvp->ctxtp->ixl_firstp;
1010 while ((ixlnextp != NULL) && (wvp->dma_bld_error == 0)) {
1062 wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS;
1145 wvp->ixl_setskipmode_cmdp = xferctl_curp->skipmodep;
1146 hci1394_set_xmit_skip_mode(wvp);
1153 if ((wvp->skipmode == IXL1394_SKIP_TO_LABEL) &&
1154 (wvp->skipxferp != NULL)) {
1156 wvp->skipxferp->compiler_privatep;
1166 switch (wvp->skipmode) {
1239 * currently collected into the working vars struct (wvp). After some
1244 hci1394_finalize_cur_xfer_desc(hci1394_comp_ixl_vars_t *wvp)
1253 if (wvp->ixl_cur_cmdp != NULL) {
1254 ixlopcode = wvp->ixl_cur_cmdp->ixl_opcode;
1263 if (wvp->xfer_state == XFER_NONE) {
1272 (wvp->ixl_cur_cmdp == NULL) ||
1273 (wvp->ixl_cur_cmdp->next_ixlp == NULL)) {
1274 if ((wvp->ixl_settagsync_cmdp != NULL) ||
1275 (wvp->ixl_setskipmode_cmdp != NULL) ||
1276 (wvp->ixl_setsyncwait_cnt != 0)) {
1278 wvp->dma_bld_error = IXL1394_EUNAPPLIED_SET_CMD;
1286 wvp->ixl_cur_cmdp);
1297 wvp->dma_bld_error = IXL1394_EUPDATE_DISALLOWED;
1302 tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp);
1320 wvp->ixl_xfer_st_cnt++;
1327 if ((wvp->ixl_cur_xfer_stp->ixl_opcode & IXL1394_OPF_UPDATE) != 0) {
1328 wvp->xfer_hci_flush |= UPDATEABLE_XFER;
1335 wvp->xfer_hci_flush |= UPDATEABLE_JUMP;
1350 if (wvp->xfer_hci_flush != 0) {
1351 if (((wvp->ixl_cur_xfer_stp->ixl_opcode &
1352 IXL1394_OPTY_XFER_PKT_ST) != 0) || ((wvp->xfer_hci_flush &
1356 if (hci1394_flush_hci_cache(wvp) != DDI_SUCCESS) {
1361 /* wvp->dma_bld_error is set by above call */
1371 switch (wvp->xfer_state) {
1374 if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) != 0) {
1375 hci1394_bld_recv_pkt_desc(wvp);
1377 hci1394_bld_xmit_pkt_desc(wvp);
1382 if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_RECV) != 0) {
1383 if ((wvp->ixl_io_mode & HCI1394_ISO_CTXT_BFFILL) != 0) {
1384 hci1394_bld_recv_buf_fill_desc(wvp);
1386 hci1394_bld_recv_buf_ppb_desc(wvp);
1389 hci1394_bld_xmit_buf_desc(wvp);
1395 hci1394_bld_xmit_hdronly_nopkt_desc(wvp);
1403 ixl_commandp, wvp->ixl_cur_cmdp);
1404 wvp->dma_bld_error = IXL1394_EINTERNAL_ERROR;
1408 if (wvp->dma_bld_error != 0) {
1412 /* wvp->dma_bld_error is set by above call */
1421 wvp->ixl_cur_cmdp->compiler_privatep =
1422 (void *)wvp->ixl_cur_xfer_stp;
1426 if (wvp->ixl_cur_labelp != NULL) {
1428 (wvp->ixl_cur_xfer_stp->compiler_privatep))->ctl_flags |=
1430 wvp->ixl_cur_labelp = NULL;
1437 if (wvp->ixl_setskipmode_cmdp != NULL) {
1439 (wvp->ixl_cur_xfer_stp->compiler_privatep))->skipmodep =
1440 wvp->ixl_setskipmode_cmdp;
1444 wvp->ixl_cur_xfer_stp = NULL;
1447 wvp->ixl_settagsync_cmdp = NULL;
1448 wvp->ixl_setskipmode_cmdp = NULL;
1449 wvp->ixl_setsyncwait_cnt = 0;
1452 wvp->descriptors = 0;
1455 wvp->xfer_pktlen = 0;
1456 wvp->xfer_bufcnt = 0;
1459 wvp->xfer_hci_flush = 0;
1462 wvp->xfer_state = XFER_NONE;
1473 hci1394_bld_recv_pkt_desc(hci1394_comp_ixl_vars_t *wvp)
1489 if ((wvp->descriptors + wvp->xfer_bufcnt) > HCI1394_DESC_MAX_Z) {
1491 wvp->dma_bld_error = IXL1394_EFRAGMENT_OFLO;
1496 wvp->ixl_cur_xfer_stp, tnf_int, frag_count,
1497 wvp->descriptors + wvp->xfer_bufcnt);
1504 if ((xctlp = hci1394_alloc_xfer_ctl(wvp, 1)) == NULL) {
1506 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
1511 ixl_commandp, wvp->ixl_cur_xfer_stp);
1521 wvp->ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp;
1527 if (wvp->ixl_setsyncwait_cnt > 0) {
1528 wvp->ixl_setsyncwait_cnt = 1;
1535 for (ii = 0; ii < wvp->xfer_bufcnt; ii++) {
1536 wv_descp = &wvp->descriptor_block[wvp->descriptors];
1538 if (ii == (wvp->xfer_bufcnt - 1)) {
1540 DESC_INTR_DSABL, wait_for_sync, wvp->xfer_size[ii]);
1543 wvp->xfer_size[ii]);
1545 wv_descp->data_addr = wvp->xfer_bufp[ii];
1547 wv_descp->status = (wvp->xfer_size[ii] <<
1549 wvp->descriptors++;
1553 if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound) !=
1558 /* wvp->dma_bld_error is set by above function call */
1568 dma_descp + (wvp->xfer_bufcnt - 1) * sizeof (hci1394_desc_t);
1569 xctlp->dma[0].dma_buf = &wvp->dma_currentp->mem;
1581 hci1394_bld_recv_buf_ppb_desc(hci1394_comp_ixl_vars_t *wvp)
1596 local_ixl_cur_xfer_stp = (ixl1394_xfer_buf_t *)wvp->ixl_cur_xfer_stp;
1603 if ((xctlp = hci1394_alloc_xfer_ctl(wvp, pktcnt)) == NULL) {
1605 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
1610 ixl_commandp, wvp->ixl_cur_xfer_stp);
1626 if (wvp->ixl_setsyncwait_cnt > 0) {
1627 wvp->ixl_setsyncwait_cnt = 1;
1635 wv_descp = &wvp->descriptor_block[wvp->descriptors];
1642 wvp->descriptors++;
1647 wvp->ixl_cur_xfer_stp, tnf_int, pkt_count, pktcnt, tnf_int,
1659 if (hci1394_flush_end_desc_check(wvp, ii) !=
1669 /* wvp->dma_bld_error is set by above call */
1675 if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp,
1681 /* wvp->dma_bld_error is set by above call */
1691 xctlp->dma[ii].dma_buf = &wvp->dma_currentp->mem;
1694 wvp->descriptor_block[wvp->descriptors - 1].data_addr +=
1707 hci1394_bld_recv_buf_fill_desc(hci1394_comp_ixl_vars_t *wvp)
1718 local_ixl_cur_xfer_stp = (ixl1394_xfer_buf_t *)wvp->ixl_cur_xfer_stp;
1722 if ((xctlp = hci1394_alloc_xfer_ctl(wvp, 1)) == NULL) {
1724 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
1729 ixl_commandp, wvp->ixl_cur_xfer_stp);
1745 if (wvp->ixl_setsyncwait_cnt > 0) {
1746 wvp->ixl_setsyncwait_cnt = 1;
1756 HCI1394_INIT_IR_BF_IMORE(&wvp->descriptor_block[wvp->descriptors],
1759 wvp->descriptor_block[wvp->descriptors].data_addr =
1761 wvp->descriptor_block[wvp->descriptors].branch = 0;
1762 wvp->descriptor_block[wvp->descriptors].status =
1765 wvp->descriptors++;
1768 if (hci1394_flush_end_desc_check(wvp, 0) != DDI_SUCCESS) {
1772 /* wvp->dma_bld_error is set by above call */
1777 if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound)
1782 /* wvp->dma_bld_error is set by above call */
1792 xctlp->dma[0].dma_buf = &wvp->dma_currentp->mem;
1803 hci1394_bld_xmit_pkt_desc(hci1394_comp_ixl_vars_t *wvp)
1820 if ((wvp->descriptors + 2 + wvp->xfer_bufcnt) > HCI1394_DESC_MAX_Z) {
1822 wvp->dma_bld_error = IXL1394_EFRAGMENT_OFLO;
1827 wvp->ixl_cur_xfer_stp, tnf_int, frag_count,
1828 wvp->descriptors + 2 + wvp->xfer_bufcnt);
1835 if (wvp->xfer_pktlen > 0xFFFF) {
1837 wvp->dma_bld_error = IXL1394_EPKTSIZE_MAX_OFLO;
1842 wvp->ixl_cur_xfer_stp, tnf_int, total_pktlen,
1843 wvp->xfer_pktlen);
1850 if ((xctlp = hci1394_alloc_xfer_ctl(wvp, 1)) == NULL) {
1852 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
1857 ixl_commandp, wvp->ixl_cur_cmdp);
1867 wvp->ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp;
1870 hci1394_set_xmit_pkt_hdr(wvp);
1877 (&wvp->descriptor_block[wvp->descriptors]);
1883 wv_omi_descp->q1 = wvp->xmit_pkthdr1;
1884 wv_omi_descp->q2 = wvp->xmit_pkthdr2;
1888 wvp->descriptors += 2;
1894 for (ii = 0; ii < wvp->xfer_bufcnt; ii++) {
1895 wv_descp = &wvp->descriptor_block[wvp->descriptors];
1897 if (ii == (wvp->xfer_bufcnt - 1)) {
1899 DESC_INTR_DSABL, wvp->xfer_size[ii]);
1901 HCI1394_INIT_IT_OMORE(wv_descp, wvp->xfer_size[ii]);
1903 wv_descp->data_addr = wvp->xfer_bufp[ii];
1906 wvp->descriptors++;
1910 if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp, &dma_desc_bound) !=
1915 /* wvp->dma_bld_error is set by above call */
1925 dma_descp + (wvp->xfer_bufcnt + 1) * sizeof (hci1394_desc_t);
1926 xctlp->dma[0].dma_buf = &wvp->dma_currentp->mem;
1937 hci1394_bld_xmit_buf_desc(hci1394_comp_ixl_vars_t *wvp)
1952 local_ixl_cur_xfer_stp = (ixl1394_xfer_buf_t *)wvp->ixl_cur_xfer_stp;
1959 if ((xctlp = hci1394_alloc_xfer_ctl(wvp, pktcnt)) == NULL) {
1961 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
1966 ixl_commandp, wvp->ixl_cur_cmdp);
1979 wvp->xfer_pktlen = pktsize;
1980 hci1394_set_xmit_pkt_hdr(wvp);
1987 &wvp->descriptor_block[wvp->descriptors];
1994 wv_omi_descp->q1 = wvp->xmit_pkthdr1;
1995 wv_omi_descp->q2 = wvp->xmit_pkthdr2;
1999 wvp->descriptors += 2;
2002 wv_descp = &wvp->descriptor_block[wvp->descriptors];
2008 wvp->descriptors++;
2019 if (hci1394_flush_end_desc_check(wvp, ii) !=
2025 /* wvp->dma_bld_error is set by above call */
2031 if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp,
2036 /* wvp->dma_bld_error is set by above call */
2047 xctlp->dma[ii].dma_buf = &wvp->dma_currentp->mem;
2050 wvp->descriptor_block[wvp->descriptors - 1].data_addr +=
2069 hci1394_bld_xmit_hdronly_nopkt_desc(hci1394_comp_ixl_vars_t *wvp)
2083 repcnt = ((ixl1394_xmit_special_t *)wvp->ixl_cur_xfer_stp)->count;
2089 if ((xctlp = hci1394_alloc_xfer_ctl(wvp, repcnt)) == NULL) {
2091 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
2096 ixl_commandp, wvp->ixl_cur_cmdp);
2106 wvp->ixl_cur_xfer_stp->compiler_privatep = (void *)xctlp;
2112 hci1394_set_xmit_storevalue_desc(wvp);
2118 if ((wvp->ixl_cur_xfer_stp->ixl_opcode & ~IXL1394_OPF_UPDATE) ==
2122 hci1394_set_xmit_pkt_hdr(wvp);
2129 &wvp->descriptor_block[wvp->descriptors];
2137 wv_oli_descp->q1 = wvp->xmit_pkthdr1;
2138 wv_oli_descp->q2 = wvp->xmit_pkthdr2;
2141 wvp->descriptors += 2;
2147 wv_ol_descp = &wvp->descriptor_block[wvp->descriptors];
2153 wvp->descriptors++;
2165 if (hci1394_flush_end_desc_check(wvp, ii) !=
2171 /* wvp->dma_bld_error is set by above call */
2177 if (hci1394_bld_dma_mem_desc_blk(wvp, &dma_descp,
2183 /* wvp->dma_bld_error is set by above call */
2193 xctlp->dma[ii].dma_buf = &wvp->dma_currentp->mem;
2204 hci1394_bld_dma_mem_desc_blk(hci1394_comp_ixl_vars_t *wvp, caddr_t *dma_descpp,
2213 if (wvp->descriptors == 0) {
2215 wvp->dma_bld_error = IXL1394_EINTERNAL_ERROR;
2226 *dma_descpp = (caddr_t)hci1394_alloc_dma_mem(wvp, wvp->descriptors *
2231 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
2241 ddi_rep_put32(wvp->dma_currentp->mem.bi_handle,
2242 (uint_t *)wvp->descriptor_block, (uint_t *)*dma_descpp,
2243 wvp->descriptors * (sizeof (hci1394_desc_t) >> 2),
2246 bcopy(wvp->descriptor_block, *dma_descpp,
2247 wvp->descriptors * sizeof (hci1394_desc_t));
2253 *dma_desc_bound = (dma_bound & ~DESC_Z_MASK) | wvp->descriptors;
2266 hci1394_set_xmit_pkt_hdr(hci1394_comp_ixl_vars_t *wvp)
2279 if (wvp->ixl_settagsync_cmdp == NULL) {
2280 tag = wvp->default_tag;
2281 sync = wvp->default_sync;
2283 tag = wvp->ixl_settagsync_cmdp->tag;
2284 sync = wvp->ixl_settagsync_cmdp->sync;
2285 wvp->ixl_settagsync_cmdp = NULL;
2295 wvp->xmit_pkthdr1 = (wvp->ctxtp->isospd << DESC_PKT_SPD_SHIFT) |
2296 (tag << DESC_PKT_TAG_SHIFT) | (wvp->ctxtp->isochan <<
2300 wvp->xmit_pkthdr2 = wvp->xfer_pktlen << DESC_PKT_DATALEN_SHIFT;
2314 hci1394_set_xmit_skip_mode(hci1394_comp_ixl_vars_t *wvp)
2321 if (wvp->ixl_setskipmode_cmdp == NULL) {
2322 wvp->skipmode = wvp->default_skipmode;
2323 wvp->skiplabelp = wvp->default_skiplabelp;
2324 wvp->skipxferp = wvp->default_skipxferp;
2326 wvp->skipmode = wvp->ixl_setskipmode_cmdp->skipmode;
2327 wvp->skiplabelp = wvp->ixl_setskipmode_cmdp->label;
2328 wvp->skipxferp = NULL;
2329 if (wvp->skipmode == IXL1394_SKIP_TO_LABEL) {
2330 err = hci1394_ixl_find_next_exec_xfer(wvp->skiplabelp,
2331 NULL, &wvp->skipxferp);
2338 wvp->ixl_setskipmode_cmdp);
2339 wvp->skipxferp = NULL;
2340 wvp->dma_bld_error = IXL1394_ENO_DATA_PKTS;
2343 wvp->ixl_setskipmode_cmdp->compiler_privatep =
2344 (void *)wvp->skipxferp;
2357 hci1394_set_xmit_storevalue_desc(hci1394_comp_ixl_vars_t *wvp)
2362 wvp->descriptors++;
2364 HCI1394_INIT_IT_STORE(&wvp->descriptor_block[wvp->descriptors - 1],
2365 wvp->storevalue_data);
2366 wvp->descriptor_block[wvp->descriptors - 1].data_addr =
2367 wvp->storevalue_bufp;
2368 wvp->descriptor_block[wvp->descriptors - 1].branch = 0;
2369 wvp->descriptor_block[wvp->descriptors - 1].status = 0;
2377 * This routine adds the data buffer to the current wvp list.
2378 * Returns DDI_SUCCESS or DDI_FAILURE. If DDI_FAILURE, wvp->dma_bld_error
2382 hci1394_set_next_xfer_buf(hci1394_comp_ixl_vars_t *wvp, uint32_t bufp,
2391 wvp->dma_bld_error = IXL1394_ENULL_BUFFER_ADDR;
2399 wvp->xfer_bufcnt++;
2402 if (wvp->xfer_bufcnt > HCI1394_DESC_MAX_Z) {
2404 wvp->dma_bld_error = IXL1394_EFRAGMENT_OFLO;
2409 wvp->xfer_bufcnt);
2416 wvp->xfer_bufp[wvp->xfer_bufcnt - 1] = bufp;
2417 wvp->xfer_size[wvp->xfer_bufcnt - 1] = size;
2420 wvp->xfer_pktlen += size;
2436 * Returns DDI_SUCCESS or DDI_FAILURE. If DDI_FAILURE, wvp->dma_bld_error
2440 hci1394_flush_end_desc_check(hci1394_comp_ixl_vars_t *wvp, uint32_t count)
2446 ((wvp->xfer_hci_flush & (UPDATEABLE_XFER | UPDATEABLE_SET |
2449 if (wvp->xfer_hci_flush & UPDATEABLE_JUMP) {
2450 if (hci1394_flush_hci_cache(wvp) != DDI_SUCCESS) {
2456 /* wvp->dma_bld_error is set by above call */
2521 * Returns DDI_SUCCESS or DDI_FAILURE. If DDI_FAILURE, wvp->dma_bld_error
2525 hci1394_flush_hci_cache(hci1394_comp_ixl_vars_t *wvp)
2532 if (hci1394_alloc_dma_mem(wvp, sizeof (hci1394_desc_t), &dma_bound) ==
2535 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
2560 hci1394_alloc_storevalue_dma_mem(hci1394_comp_ixl_vars_t *wvp)
2567 if (hci1394_alloc_dma_mem(wvp, sizeof (hci1394_desc_t),
2570 wvp->dma_bld_error = IXL1394_EMEM_ALLOC_FAIL;
2575 tnf_opaque, ixl_commandp, wvp->ixl_cur_cmdp);
2594 hci1394_alloc_xfer_ctl(hci1394_comp_ixl_vars_t *wvp, uint32_t dmacnt)
2635 if (wvp->xcs_firstp == NULL) {
2636 wvp->xcs_firstp = wvp->xcs_currentp = xcsp;
2638 wvp->xcs_currentp->ctl_nextp = xcsp;
2639 wvp->xcs_currentp = xcsp;
2654 hci1394_alloc_dma_mem(hci1394_comp_ixl_vars_t *wvp, uint32_t size,
2670 if ((wvp->dma_currentp == NULL) ||
2671 (size > (wvp->dma_currentp->mem.bi_cookie.dmac_size -
2672 wvp->dma_currentp->used))) {
2693 if (wvp->dma_currentp != NULL) {
2695 memp = &wvp->dma_currentp->mem;
2697 dma_new->offset = wvp->dma_currentp->offset +
2707 wvp->dma_currentp->mem_handle;
2708 wvp->dma_currentp->mem_handle = NULL;
2722 ret = hci1394_buf_alloc(&wvp->soft_statep->drvinfo,
2769 if (wvp->dma_currentp != NULL) {
2770 wvp->dma_currentp->dma_nextp = dma_new;
2771 wvp->dma_currentp = dma_new;
2774 wvp->dma_currentp = wvp->dma_firstp = dma_new;
2779 dma_mem_ret = wvp->dma_currentp->mem.bi_kaddr +
2780 wvp->dma_currentp->offset + wvp->dma_currentp->used;
2781 *dma_bound = wvp->dma_currentp->mem.bi_cookie.dmac_address +
2782 wvp->dma_currentp->used;
2783 wvp->dma_currentp->used += size;