/openbmc/linux/drivers/infiniband/hw/erdma/ |
H A D | erdma_qp.c | 208 const struct ib_send_wr *send_wr, u16 wqe_idx, in fill_inline_data() argument 220 while (i < send_wr->num_sge) { in fill_inline_data() 221 bytes += send_wr->sg_list[i].length; in fill_inline_data() 225 remain_size = send_wr->sg_list[i].length; in fill_inline_data() 232 (void *)(uintptr_t)send_wr->sg_list[i].addr + in fill_inline_data() 254 static int fill_sgl(struct erdma_qp *qp, const struct ib_send_wr *send_wr, in fill_sgl() argument 261 if (send_wr->num_sge > qp->dev->attrs.max_send_sge) in fill_sgl() 267 while (i < send_wr->num_sge) { in fill_sgl() 273 bytes += send_wr->sg_list[i].length; in fill_sgl() 274 memcpy(sgl + sgl_offset, &send_wr->sg_list[i], in fill_sgl() [all …]
|
H A D | erdma_verbs.h | 353 int erdma_post_send(struct ib_qp *ibqp, const struct ib_send_wr *send_wr,
|
/openbmc/linux/drivers/infiniband/ulp/isert/ |
H A D | ib_isert.c | 786 struct ib_send_wr send_wr; in isert_login_post_send() local 794 send_wr.next = NULL; in isert_login_post_send() 795 send_wr.wr_cqe = &tx_desc->tx_cqe; in isert_login_post_send() 796 send_wr.sg_list = tx_desc->tx_sg; in isert_login_post_send() 797 send_wr.num_sge = tx_desc->num_sge; in isert_login_post_send() 798 send_wr.opcode = IB_WR_SEND; in isert_login_post_send() 799 send_wr.send_flags = IB_SEND_SIGNALED; in isert_login_post_send() 801 ret = ib_post_send(isert_conn->qp, &send_wr, NULL); in isert_login_post_send() 867 struct ib_send_wr *send_wr) in isert_init_send_wr() argument 872 send_wr->wr_cqe = &tx_desc->tx_cqe; in isert_init_send_wr() [all …]
|
H A D | ib_isert.h | 132 struct ib_send_wr send_wr; member
|
/openbmc/linux/drivers/infiniband/core/ |
H A D | mad.c | 62 struct ib_ud_wr *wr = &mad_send_wr->send_wr; in create_mad_addr_info() 607 struct ib_ud_wr *send_wr = &mad_send_wr->send_wr; in handle_outgoing_dr_smp() local 616 port_num = send_wr->port_num; in handle_outgoing_dr_smp() 689 send_wr->wr.wr_cqe, drslid, in handle_outgoing_dr_smp() 690 send_wr->pkey_index, in handle_outgoing_dr_smp() 691 send_wr->port_num, &mad_wc); in handle_outgoing_dr_smp() 750 local->mad_send_wr->send_wr.pkey_index = out_mad_pkey_index; in handle_outgoing_dr_smp() 788 static int alloc_send_rmpp_list(struct ib_mad_send_wr_private *send_wr, in alloc_send_rmpp_list() argument 791 struct ib_mad_send_buf *send_buf = &send_wr->send_buf; in alloc_send_rmpp_list() 799 pad = send_wr->pad; in alloc_send_rmpp_list() [all …]
|
H A D | cma_trace.h | 186 __field(u32, send_wr) 198 __entry->send_wr = qp_init_attr->cap.max_send_wr; 218 rdma_show_qp_type(__entry->qp_type), __entry->send_wr,
|
H A D | mad_priv.h | 128 struct ib_ud_wr send_wr; member
|
H A D | agent.c | 129 mad_send_wr->send_wr.port_num = port_num; in agent_send_response()
|
/openbmc/linux/drivers/infiniband/ulp/iser/ |
H A D | iscsi_iser.h | 144 #define ISER_GET_MAX_XMIT_CMDS(send_wr) ((send_wr \ argument 247 struct ib_send_wr send_wr; member
|
H A D | iser_memory.c | 281 wr->wr.next = &tx_desc->send_wr; in iser_reg_sig_mr() 328 wr->wr.next = &tx_desc->send_wr; in iser_fast_reg_mr()
|
H A D | iser_verbs.c | 859 struct ib_send_wr *wr = &tx_desc->send_wr; in iser_post_send()
|
/openbmc/linux/fs/smb/client/ |
H A D | smbdirect.c | 685 struct ib_send_wr send_wr; in smbd_post_send_negotiate_req() local 724 send_wr.next = NULL; in smbd_post_send_negotiate_req() 725 send_wr.wr_cqe = &request->cqe; in smbd_post_send_negotiate_req() 726 send_wr.sg_list = request->sge; in smbd_post_send_negotiate_req() 727 send_wr.num_sge = request->num_sge; in smbd_post_send_negotiate_req() 728 send_wr.opcode = IB_WR_SEND; in smbd_post_send_negotiate_req() 729 send_wr.send_flags = IB_SEND_SIGNALED; in smbd_post_send_negotiate_req() 736 rc = ib_post_send(info->id->qp, &send_wr, NULL); in smbd_post_send_negotiate_req() 795 struct ib_send_wr send_wr; in smbd_post_send() local 811 send_wr.next = NULL; in smbd_post_send() [all …]
|
/openbmc/linux/net/sunrpc/xprtrdma/ |
H A D | frwr_ops.c | 386 struct ib_send_wr *post_wr, *send_wr = &req->rl_wr; in frwr_send() local 393 post_wr = send_wr; in frwr_send() 408 send_wr->send_flags |= IB_SEND_SIGNALED; in frwr_send() 412 send_wr->send_flags &= ~IB_SEND_SIGNALED; in frwr_send()
|
/openbmc/linux/drivers/nvme/target/ |
H A D | rdma.c | 60 struct ib_send_wr send_wr; member 425 r->send_wr.wr_cqe = &r->send_cqe; in nvmet_rdma_alloc_rsp() 426 r->send_wr.sg_list = &r->send_sge; in nvmet_rdma_alloc_rsp() 427 r->send_wr.num_sge = 1; in nvmet_rdma_alloc_rsp() 428 r->send_wr.send_flags = IB_SEND_SIGNALED; in nvmet_rdma_alloc_rsp() 716 rsp->send_wr.opcode = IB_WR_SEND_WITH_INV; in nvmet_rdma_queue_response() 717 rsp->send_wr.ex.invalidate_rkey = rsp->invalidate_rkey; in nvmet_rdma_queue_response() 719 rsp->send_wr.opcode = IB_WR_SEND; in nvmet_rdma_queue_response() 728 cm_id->port_num, NULL, &rsp->send_wr); in nvmet_rdma_queue_response() 730 first_wr = &rsp->send_wr; in nvmet_rdma_queue_response() [all …]
|
/openbmc/qemu/migration/ |
H A D | rdma.c | 1710 struct ibv_send_wr send_wr = { in qemu_rdma_post_send_control() local 1737 ret = ibv_post_send(rdma->qp, &send_wr, &bad_wr); in qemu_rdma_post_send_control() 1999 struct ibv_send_wr send_wr = { 0 }; in qemu_rdma_write_one() local 2153 send_wr.wr.rdma.rkey = block->remote_keys[chunk]; in qemu_rdma_write_one() 2155 send_wr.wr.rdma.rkey = block->remote_rkey; in qemu_rdma_write_one() 2171 send_wr.wr_id = qemu_rdma_make_wrid(RDMA_WRID_RDMA_WRITE, in qemu_rdma_write_one() 2174 send_wr.opcode = IBV_WR_RDMA_WRITE; in qemu_rdma_write_one() 2175 send_wr.send_flags = IBV_SEND_SIGNALED; in qemu_rdma_write_one() 2176 send_wr.sg_list = &sge; in qemu_rdma_write_one() 2177 send_wr.num_sge = 1; in qemu_rdma_write_one() [all …]
|
/openbmc/linux/drivers/infiniband/hw/mlx5/ |
H A D | wr.c | 416 static int set_sig_data_segment(const struct ib_send_wr *send_wr, in set_sig_data_segment() argument 557 static int set_pi_umr_wr(const struct ib_send_wr *send_wr, in set_pi_umr_wr() argument 561 const struct ib_reg_wr *wr = reg_wr(send_wr); in set_pi_umr_wr() 569 if (unlikely(send_wr->num_sge != 0) || in set_pi_umr_wr() 599 ret = set_sig_data_segment(send_wr, wr->mr, sig_attrs, qp, seg, size, in set_pi_umr_wr()
|
/openbmc/linux/drivers/infiniband/hw/bnxt_re/ |
H A D | ib_verbs.h | 216 int bnxt_re_post_send(struct ib_qp *qp, const struct ib_send_wr *send_wr,
|
/openbmc/linux/drivers/infiniband/ulp/srpt/ |
H A D | ib_srpt.c | 2875 struct ib_send_wr send_wr, *first_wr = &send_wr; in srpt_queue_response() local 2938 send_wr.next = NULL; in srpt_queue_response() 2939 send_wr.wr_cqe = &ioctx->ioctx.cqe; in srpt_queue_response() 2940 send_wr.sg_list = &sge; in srpt_queue_response() 2941 send_wr.num_sge = 1; in srpt_queue_response() 2942 send_wr.opcode = IB_WR_SEND; in srpt_queue_response() 2943 send_wr.send_flags = IB_SEND_SIGNALED; in srpt_queue_response()
|
/openbmc/linux/include/rdma/ |
H A D | ib_verbs.h | 2350 int (*post_send)(struct ib_qp *qp, const struct ib_send_wr *send_wr, 3835 * @send_wr: A list of work requests to post on the send queue. 3845 const struct ib_send_wr *send_wr, in ib_post_send() 3850 return qp->device->ops.post_send(qp, send_wr, bad_send_wr ? : &dummy); 3843 ib_post_send(struct ib_qp * qp,const struct ib_send_wr * send_wr,const struct ib_send_wr ** bad_send_wr) ib_post_send() argument
|
/openbmc/linux/include/uapi/rdma/ |
H A D | ib_user_verbs.h | 843 struct ib_uverbs_send_wr send_wr[]; member
|
/openbmc/linux/drivers/infiniband/hw/hns/ |
H A D | hns_roce_hw_v2.c | 3370 struct ib_send_wr *send_wr; in free_mr_post_send_lp_wqe() local 3373 send_wr = &rdma_wr.wr; in free_mr_post_send_lp_wqe() 3374 send_wr->opcode = IB_WR_RDMA_WRITE; in free_mr_post_send_lp_wqe() 3376 ret = hns_roce_v2_post_send(&hr_qp->ibqp, send_wr, &bad_wr); in free_mr_post_send_lp_wqe()
|