Lines Matching defs:event_idp

1348 	struct rdma_cm_id	*root_idp, *event_idp, *idp;
1360 event_idp = idp = (struct rdma_cm_id *)inp;
1411 event_idp = ibcma_create_new_id(idp);
1412 if (event_idp == NULL) {
1417 event_idp->device = sol_cma_acquire_device(ntohll(
1419 event_idp->port_num = sidr_req->sreq_hca_port;
1420 (event_idp->route).num_paths = 0;
1422 event_chanp = (sol_cma_chan_t *)event_idp;
1429 &(event_idp->route.addr.dst_addr), &info.src_port);
1433 &(event_idp->route.addr.src_addr), &info.src_port);
1448 root_idp, event_idp, event_chanp->chan_session_id);
1455 root_idp, event_idp,
1459 rdma_destroy_id(event_idp);
1466 (void *)event_idp, where);
1509 cma_generate_event(event_idp, event, evt_status, NULL, ud_paramp);
1525 struct rdma_cm_id *root_idp, *event_idp;
1616 event_idp = ibcma_create_new_id(idp);
1617 if (event_idp == NULL) {
1630 event_idp->device = sol_cma_acquire_device(ntohll(
1632 event_idp->port_num = reqp->req_prim_hca_port;
1633 (event_idp->route).num_paths = reqp->req_alt_hca_port ? 2 : 1;
1634 event_idp->route.path_rec = kmem_zalloc(
1635 sizeof (struct ib_sa_path_rec) * ((event_idp->route).num_paths),
1640 &(event_idp->route.path_rec[0]), base_lid);
1641 (event_idp->route.path_rec[0]).mtu = (uint8_t)rtr_data.req_path_mtu;
1647 &(event_idp->route.path_rec[1]), base_lid);
1648 (event_idp->route.path_rec[1]).mtu =
1652 *event_id_ptr = event_idp;
1654 event_chanp = (sol_cma_chan_t *)event_idp;
1668 &(event_idp->route.addr.dst_addr), &info.src_port);
1672 &(event_idp->route.addr.src_addr), &info.src_port);
1685 root_idp, event_idp, event_chanp->chan_session_id);
1693 root_idp, event_idp,
1700 rdma_destroy_id(event_idp);
1707 avl_insert(&root_chanp->chan_req_avl_tree, (void *)event_idp, where);
1748 struct rdma_cm_id *event_idp, *root_idp;
1782 event_idp = cma_get_acpt_idp(root_idp, eventp->cm_channel);
1784 if (event_idp == NULL) {
1790 *event_id_ptr = event_idp;
1791 event_chanp = (sol_cma_chan_t *)event_idp;
1817 struct rdma_cm_id *root_idp, *event_idp;
1840 event_idp = cma_get_acpt_idp(root_idp, eventp->cm_channel);
1841 event_chanp = (sol_cma_chan_t *)event_idp;
1842 if (event_idp == NULL) {
1850 avl_remove(&root_chanp->chan_acpt_avl_tree, event_idp);
1858 *event_id_ptr = event_idp;
1867 struct rdma_cm_id *root_idp, *event_idp;
1911 event_idp = cma_get_acpt_idp(root_idp,
1913 if (event_idp == NULL) {
1922 event_chanp = (sol_cma_chan_t *)event_idp;
1923 *event_id_ptr = event_idp;
1926 event_idp);
1954 event_idp = cma_get_req_idp(root_idp,
1956 if (event_idp == NULL) {
1965 event_chanp = (sol_cma_chan_t *)event_idp;
1967 *event_id_ptr = event_idp;
1969 event_idp);
2011 struct rdma_cm_id *idp, *event_idp;
2021 idp = event_idp = (struct rdma_cm_id *)inp;
2045 status = ibcma_handle_req(idp, &event_idp, eventp, paramp,
2079 status = ibcma_handle_est(idp, &event_idp, eventp, paramp,
2088 status = ibcma_handle_closed(idp, &event_idp, eventp,
2096 status = ibcma_handle_failed(idp, &event_idp, eventp, paramp,
2109 if (event_idp) {
2110 cma_generate_event(event_idp, event, event_status,