Lines Matching refs:iod
103 struct nvmet_fc_ls_iod *iod; member
180 return (iodptr - iodptr->tgtport->iod); in nvmet_fc_iodnum()
265 struct nvmet_fc_ls_iod *iod);
546 struct nvmet_fc_ls_iod *iod; in nvmet_fc_alloc_ls_iodlist() local
549 iod = kcalloc(NVMET_LS_CTX_COUNT, sizeof(struct nvmet_fc_ls_iod), in nvmet_fc_alloc_ls_iodlist()
551 if (!iod) in nvmet_fc_alloc_ls_iodlist()
554 tgtport->iod = iod; in nvmet_fc_alloc_ls_iodlist()
556 for (i = 0; i < NVMET_LS_CTX_COUNT; iod++, i++) { in nvmet_fc_alloc_ls_iodlist()
557 INIT_WORK(&iod->work, nvmet_fc_handle_ls_rqst_work); in nvmet_fc_alloc_ls_iodlist()
558 iod->tgtport = tgtport; in nvmet_fc_alloc_ls_iodlist()
559 list_add_tail(&iod->ls_rcv_list, &tgtport->ls_rcv_list); in nvmet_fc_alloc_ls_iodlist()
561 iod->rqstbuf = kzalloc(sizeof(union nvmefc_ls_requests) + in nvmet_fc_alloc_ls_iodlist()
564 if (!iod->rqstbuf) in nvmet_fc_alloc_ls_iodlist()
567 iod->rspbuf = (union nvmefc_ls_responses *)&iod->rqstbuf[1]; in nvmet_fc_alloc_ls_iodlist()
569 iod->rspdma = fc_dma_map_single(tgtport->dev, iod->rspbuf, in nvmet_fc_alloc_ls_iodlist()
570 sizeof(*iod->rspbuf), in nvmet_fc_alloc_ls_iodlist()
572 if (fc_dma_mapping_error(tgtport->dev, iod->rspdma)) in nvmet_fc_alloc_ls_iodlist()
579 kfree(iod->rqstbuf); in nvmet_fc_alloc_ls_iodlist()
580 list_del(&iod->ls_rcv_list); in nvmet_fc_alloc_ls_iodlist()
581 for (iod--, i--; i >= 0; iod--, i--) { in nvmet_fc_alloc_ls_iodlist()
582 fc_dma_unmap_single(tgtport->dev, iod->rspdma, in nvmet_fc_alloc_ls_iodlist()
583 sizeof(*iod->rspbuf), DMA_TO_DEVICE); in nvmet_fc_alloc_ls_iodlist()
584 kfree(iod->rqstbuf); in nvmet_fc_alloc_ls_iodlist()
585 list_del(&iod->ls_rcv_list); in nvmet_fc_alloc_ls_iodlist()
588 kfree(iod); in nvmet_fc_alloc_ls_iodlist()
596 struct nvmet_fc_ls_iod *iod = tgtport->iod; in nvmet_fc_free_ls_iodlist() local
599 for (i = 0; i < NVMET_LS_CTX_COUNT; iod++, i++) { in nvmet_fc_free_ls_iodlist()
601 iod->rspdma, sizeof(*iod->rspbuf), in nvmet_fc_free_ls_iodlist()
603 kfree(iod->rqstbuf); in nvmet_fc_free_ls_iodlist()
604 list_del(&iod->ls_rcv_list); in nvmet_fc_free_ls_iodlist()
606 kfree(tgtport->iod); in nvmet_fc_free_ls_iodlist()
612 struct nvmet_fc_ls_iod *iod; in nvmet_fc_alloc_ls_iod() local
616 iod = list_first_entry_or_null(&tgtport->ls_rcv_list, in nvmet_fc_alloc_ls_iod()
618 if (iod) in nvmet_fc_alloc_ls_iod()
619 list_move_tail(&iod->ls_rcv_list, &tgtport->ls_busylist); in nvmet_fc_alloc_ls_iod()
621 return iod; in nvmet_fc_alloc_ls_iod()
627 struct nvmet_fc_ls_iod *iod) in nvmet_fc_free_ls_iod() argument
632 list_move(&iod->ls_rcv_list, &tgtport->ls_rcv_list); in nvmet_fc_free_ls_iod()
1660 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_create_association() argument
1662 struct fcnvme_ls_cr_assoc_rqst *rqst = &iod->rqstbuf->rq_cr_assoc; in nvmet_fc_ls_create_association()
1663 struct fcnvme_ls_cr_assoc_acc *acc = &iod->rspbuf->rsp_cr_assoc; in nvmet_fc_ls_create_association()
1677 if (iod->rqstdatalen < FCNVME_LSDESC_CRA_RQST_MINLEN) in nvmet_fc_ls_create_association()
1695 iod->assoc = nvmet_fc_alloc_target_assoc( in nvmet_fc_ls_create_association()
1696 tgtport, iod->hosthandle); in nvmet_fc_ls_create_association()
1697 if (!iod->assoc) in nvmet_fc_ls_create_association()
1700 queue = nvmet_fc_alloc_target_queue(iod->assoc, 0, in nvmet_fc_ls_create_association()
1704 nvmet_fc_tgt_a_put(iod->assoc); in nvmet_fc_ls_create_association()
1713 iod->lsrsp->rsplen = nvme_fc_format_rjt(acc, in nvmet_fc_ls_create_association()
1726 tgtport->fc_target_port.port_num, iod->assoc->a_id); in nvmet_fc_ls_create_association()
1730 iod->lsrsp->rsplen = sizeof(*acc); in nvmet_fc_ls_create_association()
1741 cpu_to_be64(nvmet_fc_makeconnid(iod->assoc, 0)); in nvmet_fc_ls_create_association()
1751 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_create_connection() argument
1753 struct fcnvme_ls_cr_conn_rqst *rqst = &iod->rqstbuf->rq_cr_conn; in nvmet_fc_ls_create_connection()
1754 struct fcnvme_ls_cr_conn_acc *acc = &iod->rspbuf->rsp_cr_conn; in nvmet_fc_ls_create_connection()
1760 if (iod->rqstdatalen < sizeof(struct fcnvme_ls_cr_conn_rqst)) in nvmet_fc_ls_create_connection()
1786 iod->assoc = nvmet_fc_find_target_assoc(tgtport, in nvmet_fc_ls_create_connection()
1788 if (!iod->assoc) in nvmet_fc_ls_create_connection()
1791 queue = nvmet_fc_alloc_target_queue(iod->assoc, in nvmet_fc_ls_create_connection()
1798 nvmet_fc_tgt_a_put(iod->assoc); in nvmet_fc_ls_create_connection()
1806 iod->lsrsp->rsplen = nvme_fc_format_rjt(acc, in nvmet_fc_ls_create_connection()
1821 iod->lsrsp->rsplen = sizeof(*acc); in nvmet_fc_ls_create_connection()
1831 cpu_to_be64(nvmet_fc_makeconnid(iod->assoc, in nvmet_fc_ls_create_connection()
1841 struct nvmet_fc_ls_iod *iod) in nvmet_fc_ls_disconnect() argument
1844 &iod->rqstbuf->rq_dis_assoc; in nvmet_fc_ls_disconnect()
1846 &iod->rspbuf->rsp_dis_assoc; in nvmet_fc_ls_disconnect()
1854 ret = nvmefc_vldt_lsreq_discon_assoc(iod->rqstdatalen, rqst); in nvmet_fc_ls_disconnect()
1859 iod->assoc = assoc; in nvmet_fc_ls_disconnect()
1868 iod->lsrsp->rsplen = nvme_fc_format_rjt(acc, in nvmet_fc_ls_disconnect()
1879 iod->lsrsp->rsplen = sizeof(*acc); in nvmet_fc_ls_disconnect()
1897 assoc->rcv_disconn = iod; in nvmet_fc_ls_disconnect()
1907 sizeof(*iod->rspbuf), in nvmet_fc_ls_disconnect()
1932 struct nvmet_fc_ls_iod *iod = lsrsp->nvme_fc_private; in nvmet_fc_xmt_ls_rsp_done() local
1933 struct nvmet_fc_tgtport *tgtport = iod->tgtport; in nvmet_fc_xmt_ls_rsp_done()
1935 fc_dma_sync_single_for_cpu(tgtport->dev, iod->rspdma, in nvmet_fc_xmt_ls_rsp_done()
1936 sizeof(*iod->rspbuf), DMA_TO_DEVICE); in nvmet_fc_xmt_ls_rsp_done()
1937 nvmet_fc_free_ls_iod(tgtport, iod); in nvmet_fc_xmt_ls_rsp_done()
1943 struct nvmet_fc_ls_iod *iod) in nvmet_fc_xmt_ls_rsp() argument
1947 fc_dma_sync_single_for_device(tgtport->dev, iod->rspdma, in nvmet_fc_xmt_ls_rsp()
1948 sizeof(*iod->rspbuf), DMA_TO_DEVICE); in nvmet_fc_xmt_ls_rsp()
1950 ret = tgtport->ops->xmt_ls_rsp(&tgtport->fc_target_port, iod->lsrsp); in nvmet_fc_xmt_ls_rsp()
1952 nvmet_fc_xmt_ls_rsp_done(iod->lsrsp); in nvmet_fc_xmt_ls_rsp()
1960 struct nvmet_fc_ls_iod *iod) in nvmet_fc_handle_ls_rqst() argument
1962 struct fcnvme_ls_rqst_w0 *w0 = &iod->rqstbuf->rq_cr_assoc.w0; in nvmet_fc_handle_ls_rqst()
1965 iod->lsrsp->nvme_fc_private = iod; in nvmet_fc_handle_ls_rqst()
1966 iod->lsrsp->rspbuf = iod->rspbuf; in nvmet_fc_handle_ls_rqst()
1967 iod->lsrsp->rspdma = iod->rspdma; in nvmet_fc_handle_ls_rqst()
1968 iod->lsrsp->done = nvmet_fc_xmt_ls_rsp_done; in nvmet_fc_handle_ls_rqst()
1970 iod->lsrsp->rsplen = 0; in nvmet_fc_handle_ls_rqst()
1972 iod->assoc = NULL; in nvmet_fc_handle_ls_rqst()
1982 nvmet_fc_ls_create_association(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1986 nvmet_fc_ls_create_connection(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1990 sendrsp = nvmet_fc_ls_disconnect(tgtport, iod); in nvmet_fc_handle_ls_rqst()
1993 iod->lsrsp->rsplen = nvme_fc_format_rjt(iod->rspbuf, in nvmet_fc_handle_ls_rqst()
1994 sizeof(*iod->rspbuf), w0->ls_cmd, in nvmet_fc_handle_ls_rqst()
1999 nvmet_fc_xmt_ls_rsp(tgtport, iod); in nvmet_fc_handle_ls_rqst()
2008 struct nvmet_fc_ls_iod *iod = in nvmet_fc_handle_ls_rqst_work() local
2010 struct nvmet_fc_tgtport *tgtport = iod->tgtport; in nvmet_fc_handle_ls_rqst_work()
2012 nvmet_fc_handle_ls_rqst(tgtport, iod); in nvmet_fc_handle_ls_rqst_work()
2041 struct nvmet_fc_ls_iod *iod; in nvmet_fc_rcv_ls_req() local
2061 iod = nvmet_fc_alloc_ls_iod(tgtport); in nvmet_fc_rcv_ls_req()
2062 if (!iod) { in nvmet_fc_rcv_ls_req()
2071 iod->lsrsp = lsrsp; in nvmet_fc_rcv_ls_req()
2072 iod->fcpreq = NULL; in nvmet_fc_rcv_ls_req()
2073 memcpy(iod->rqstbuf, lsreqbuf, lsreqbuf_len); in nvmet_fc_rcv_ls_req()
2074 iod->rqstdatalen = lsreqbuf_len; in nvmet_fc_rcv_ls_req()
2075 iod->hosthandle = hosthandle; in nvmet_fc_rcv_ls_req()
2077 queue_work(nvmet_wq, &iod->work); in nvmet_fc_rcv_ls_req()