Home
last modified time | relevance | path

Searched refs:to_mdev (Results 1 – 25 of 42) sorted by relevance

12

/openbmc/linux/drivers/infiniband/hw/mthca/
H A Dmthca_provider.c59 struct mthca_dev *mdev = to_mdev(ibdev); in mthca_query_device()
139 err = mthca_MAD_IFC(to_mdev(ibdev), 1, 1, in mthca_query_port()
233 err = mthca_MAD_IFC(to_mdev(ibdev), 1, 1, in mthca_query_pkey()
262 err = mthca_MAD_IFC(to_mdev(ibdev), 1, 1, in mthca_query_gid()
273 err = mthca_MAD_IFC(to_mdev(ibdev), 1, 1, in mthca_query_gid()
294 if (!(to_mdev(ibdev)->active)) in mthca_alloc_ucontext()
298 if (mthca_is_memfree(to_mdev(ibdev))) in mthca_alloc_ucontext()
360 mthca_pd_free(to_mdev(ibdev), pd); in mthca_alloc_pd()
561 mthca_unmap_user_db(to_mdev(qp->device), in mthca_destroy_qp()
629 mthca_free_cq(to_mdev(ibdev), cq); in mthca_create_cq()
[all …]
H A Dmthca_mad.c122 mthca_update_rate(to_mdev(ibdev), port_num); in smp_snoop()
123 update_sm_ah(to_mdev(ibdev), port_num, in smp_snoop()
157 mutex_lock(&to_mdev(dev)->cap_mask_mutex); in node_desc_override()
160 mutex_unlock(&to_mdev(dev)->cap_mask_mutex); in node_desc_override()
211 forward_trap(to_mdev(ibdev), port_num, in); in mthca_process_mad()
251 err = mthca_MAD_IFC(to_mdev(ibdev), mad_flags & IB_MAD_IGNORE_MKEY, in mthca_process_mad()
257 mthca_err(to_mdev(ibdev), "MAD_IFC returned %d\n", err); in mthca_process_mad()
H A Dmthca_cq.c335 if (!mthca_is_memfree(to_mdev(cq->ibcq.device)) && in mthca_cq_resize_copy_cqes()
657 struct mthca_dev *dev = to_mdev(ibcq->device); in mthca_poll_cq()
730 mthca_write64(dbhi, 0xffffffff, to_mdev(cq->device)->kar + MTHCA_CQ_DOORBELL, in mthca_tavor_arm_cq()
731 MTHCA_GET_DOORBELL_LOCK(&to_mdev(cq->device)->doorbell_lock)); in mthca_tavor_arm_cq()
762 to_mdev(ibcq->device)->kar + MTHCA_CQ_DOORBELL, in mthca_arbel_arm_cq()
763 MTHCA_GET_DOORBELL_LOCK(&to_mdev(ibcq->device)->doorbell_lock)); in mthca_arbel_arm_cq()
/openbmc/linux/drivers/infiniband/hw/mlx5/
H A Dib_virt.c53 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_ib_get_vf_config()
95 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_ib_set_vf_link_state()
129 dev = to_mdev(device); in mlx5_ib_get_vf_stats()
154 struct mlx5_ib_dev *dev = to_mdev(device); in set_vf_node_guid()
178 struct mlx5_ib_dev *dev = to_mdev(device); in set_vf_port_guid()
214 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_ib_get_vf_guid()
H A Dgsi.c91 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_create_gsi()
170 struct mlx5_ib_dev *dev = to_mdev(mqp->ibqp.device); in mlx5_ib_destroy_gsi()
222 struct mlx5_ib_dev *dev = to_mdev(qp->device); in modify_to_rts()
262 struct mlx5_ib_dev *dev = to_mdev(device); in setup_qp()
323 struct mlx5_ib_dev *dev = to_mdev(qp->device); in mlx5_ib_gsi_modify_qp()
363 struct mlx5_ib_dev *dev = to_mdev(gsi->rx_qp->device); in mlx5_ib_add_outstanding_wr()
413 struct mlx5_ib_dev *dev = to_mdev(gsi->rx_qp->device); in get_tx_qp()
H A Dcounters.c187 mlx5_fc_destroy(to_mdev(counters->device)->mdev, in mlx5_ib_destroy_counters()
258 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_alloc_hw_device_stats()
267 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_alloc_hw_port_stats()
368 struct mlx5_ib_dev *dev = to_mdev(ibdev); in do_get_hw_stats()
430 struct mlx5_ib_dev *dev = to_mdev(ibdev); in do_get_op_stat()
461 struct mlx5_ib_dev *dev = to_mdev(ibdev); in do_get_op_stats()
483 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_get_hw_stats()
544 struct mlx5_ib_dev *dev = to_mdev(qp->device); in mlx5_ib_counter_bind_qp()
844 struct mlx5_ib_dev *dev = to_mdev(ibdev); in read_flow_counters()
917 to_mdev(ibcounters->device)->mdev, false); in mlx5_ib_flow_counters_set_data()
[all …]
H A Dmacsec.c220 struct mlx5_ib_dev *dev = to_mdev(attr->device); in mlx5r_add_gid_macsec_operations()
263 ret = set_roce_addr(to_mdev(physical_gid->device), in mlx5r_add_gid_macsec_operations()
290 set_roce_addr(to_mdev(physical_gid->device), physical_gid->port_num, in mlx5r_add_gid_macsec_operations()
304 struct mlx5_ib_dev *dev = to_mdev(attr->device); in mlx5r_del_gid_macsec_operations()
346 set_roce_addr(to_mdev(physical_gid->device), in mlx5r_del_gid_macsec_operations()
H A Dmad.c252 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_process_mad()
293 err = mlx5_MAD_IFC(to_mdev(ibdev), mad_flags & IB_MAD_IGNORE_MKEY, in mlx5_ib_process_mad()
352 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, 1, NULL, NULL, in_mad, in mlx5_query_mad_ifc_smp_attr_node_info()
491 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, in mlx5_query_mad_ifc_pkey()
520 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, in mlx5_query_mad_ifc_gids()
531 err = mlx5_MAD_IFC(to_mdev(ibdev), 1, 1, port, NULL, NULL, in_mad, in mlx5_query_mad_ifc_gids()
547 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_mad_ifc_port()
H A Dsrq.c47 struct mlx5_ib_dev *dev = to_mdev(pd->device); in create_srq_user()
195 struct mlx5_ib_dev *dev = to_mdev(ib_srq->device); in mlx5_ib_create_srq()
327 struct mlx5_ib_dev *dev = to_mdev(ibsrq->device); in mlx5_ib_modify_srq()
352 struct mlx5_ib_dev *dev = to_mdev(ibsrq->device); in mlx5_ib_query_srq()
376 struct mlx5_ib_dev *dev = to_mdev(srq->device); in mlx5_ib_destroy_srq()
411 struct mlx5_ib_dev *dev = to_mdev(ibsrq->device); in mlx5_ib_post_srq_recv()
H A Dqos.c17 struct mlx5_ib_dev *dev = to_mdev(device); in pp_is_supported()
47 dev = to_mdev(c->ibucontext.device); in UVERBS_HANDLER()
H A Dmr.c62 struct mlx5_ib_dev *dev = to_mdev(pd->device); in set_mkc_access_pd_addr_fields()
1065 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_get_dma_mr()
1157 struct mlx5_ib_dev *dev = to_mdev(pd->device); in alloc_cacheable_mr()
1208 struct mlx5_ib_dev *dev = to_mdev(pd->device); in reg_create()
1295 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_get_dm_mr()
1390 struct mlx5_ib_dev *dev = to_mdev(pd->device); in create_real_mr()
1434 struct mlx5_ib_dev *dev = to_mdev(pd->device); in create_user_odp_mr()
1492 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_reg_user_mr()
1534 struct mlx5_ib_dev *dev = to_mdev(pd->device); in mlx5_ib_reg_user_mr_dmabuf()
1773 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_alloc_priv_descs()
[all …]
H A Dmain.c108 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_ib_port_link_layer()
251 struct mlx5_ib_dev *ibdev = to_mdev(device); in mlx5_ib_get_netdev()
475 struct mlx5_ib_dev *dev = to_mdev(device); in mlx5_query_port_roce()
660 if (mlx5_use_mad_ifc(to_mdev(ibdev))) in mlx5_get_vport_access_method()
703 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_system_image_guid()
735 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_max_pkeys()
756 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_vendor_id()
822 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_query_device()
1221 struct mlx5_ib_dev *dev = to_mdev(ibdev); in translate_active_width()
1306 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_query_hca_port()
[all …]
H A Ddm.c167 struct mlx5_ib_dev *dev = to_mdev(dm->base.ibdm.device); in mlx5_ib_dm_memic_free()
209 struct mlx5_ib_dev *dev = to_mdev(uobj->context->device); in UVERBS_HANDLER()
278 struct mlx5_dm *dm_db = &to_mdev(ctx->device)->dm; in handle_alloc_dm_memic()
355 struct mlx5_core_dev *dev = to_mdev(ctx->device)->mdev; in handle_alloc_dm_sw_icm()
432 mlx5_ib_dbg(to_mdev(ibdev), "alloc_dm req: dm_type=%d user_length=0x%llx log_alignment=%d\n", in mlx5_ib_alloc_dm()
470 struct mlx5_core_dev *dev = to_mdev(dm->base.ibdm.device)->mdev; in mlx5_dm_icm_dealloc()
H A Drestrack.c153 struct mlx5_ib_dev *dev = to_mdev(ibcq->device); in fill_res_cq_entry_raw()
161 struct mlx5_ib_dev *dev = to_mdev(ibqp->device); in fill_res_qp_entry_raw()
H A Dcq.c51 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx5_ib_cq_event()
170 struct mlx5_ib_dev *dev = to_mdev(qp->ibqp.device); in handle_responder()
451 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx5_poll_one()
584 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in poll_soft_wc()
611 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx5_ib_poll_cq()
646 struct mlx5_core_dev *mdev = to_mdev(ibcq->device)->mdev; in mlx5_ib_arm_cq()
950 struct mlx5_ib_dev *dev = to_mdev(ibdev); in mlx5_ib_create_cq()
1051 struct mlx5_ib_dev *dev = to_mdev(cq->device); in mlx5_ib_destroy_cq()
1134 struct mlx5_ib_dev *dev = to_mdev(cq->device); in mlx5_ib_modify_cq()
1209 struct mlx5_ib_dev *dev = to_mdev(cq->ibcq.device); in copy_resize_cqes()
[all …]
/openbmc/linux/drivers/infiniband/hw/mlx4/
H A Dmr.c67 err = mlx4_mr_alloc(to_mdev(pd->device)->dev, to_mpd(pd)->pdn, 0, in mlx4_ib_get_dma_mr()
72 err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
82 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr()
410 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_reg_user_mr()
447 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_reg_user_mr()
463 struct mlx4_ib_dev *dev = to_mdev(mr->device); in mlx4_ib_rereg_user_mr()
603 ret = mlx4_mr_free(to_mdev(ibmr->device)->dev, &mr->mmr); in mlx4_ib_dereg_mr()
615 struct mlx4_ib_dev *dev = to_mdev(ibmw->device); in mlx4_ib_alloc_mw()
640 mlx4_mw_free(to_mdev(ibmw->device)->dev, &mw->mmw); in mlx4_ib_dealloc_mw()
647 struct mlx4_ib_dev *dev = to_mdev(pd->device); in mlx4_ib_alloc_mr()
H A Dmain.c129 struct mlx4_ib_dev *ibdev = to_mdev(device); in mlx4_ib_get_netdev()
439 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_query_device()
755 struct mlx4_ib_dev *mdev = to_mdev(ibdev); in eth_link_query_port()
839 struct mlx4_ib_dev *dev = to_mdev(ibdev); in __mlx4_ib_query_gid()
907 if (mlx4_is_slave(to_mdev(ibdev)->dev)) { in mlx4_ib_query_sl2vl()
921 if (mlx4_is_mfunc(to_mdev(ibdev)->dev)) in mlx4_ib_query_sl2vl()
1009 if (mlx4_is_slave(to_mdev(ibdev)->dev)) in mlx4_ib_modify_device()
1097 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_alloc_ucontext()
2099 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_alloc_hw_device_stats()
2112 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_alloc_hw_port_stats()
[all …]
H A Dsrq.c76 struct mlx4_ib_dev *dev = to_mdev(ib_srq->device); in mlx4_ib_create_srq()
227 struct mlx4_ib_dev *dev = to_mdev(ibsrq->device); in mlx4_ib_modify_srq()
252 struct mlx4_ib_dev *dev = to_mdev(ibsrq->device); in mlx4_ib_query_srq()
270 struct mlx4_ib_dev *dev = to_mdev(srq->device); in mlx4_ib_destroy_srq()
317 struct mlx4_ib_dev *mdev = to_mdev(ibsrq->device); in mlx4_ib_post_srq_recv()
H A Dcm.c156 struct rb_root *sl_id_map = &to_mdev(ibdev)->sriov.sl_id_map; in id_map_find_by_sl_id()
201 struct rb_root *sl_id_map = &to_mdev(ibdev)->sriov.sl_id_map; in sl_id_map_add()
236 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in id_map_alloc()
245 ent->dev = to_mdev(ibdev); in id_map_alloc()
268 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in id_map_get()
284 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in schedule_delayed()
429 struct mlx4_ib_sriov *sriov = &to_mdev(ibdev)->sriov; in mlx4_ib_demux_cm_handler()
H A Dcq.c96 struct mlx4_ib_dev *dev = to_mdev(cq->device); in mlx4_ib_modify_cq()
180 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_create_cq()
381 struct mlx4_ib_dev *dev = to_mdev(ibcq->device); in mlx4_ib_resize_cq()
479 struct mlx4_ib_dev *dev = to_mdev(cq->device); in mlx4_ib_destroy_cq()
695 struct mlx4_ib_dev *dev = to_mdev(cq->ibcq.device); in mlx4_ib_poll_one()
715 mqp = __mlx4_qp_lookup(to_mdev(cq->ibcq.device)->dev, in mlx4_ib_poll_one()
727 msrq = mlx4_srq_lookup(to_mdev(cq->ibcq.device)->dev, in mlx4_ib_poll_one()
837 if (mlx4_is_mfunc(to_mdev(cq->ibcq.device)->dev)) { in mlx4_ib_poll_one()
883 struct mlx4_ib_dev *mdev = to_mdev(cq->ibcq.device); in mlx4_ib_poll_cq()
909 to_mdev(ibcq->device)->uar_map, in mlx4_ib_arm_cq()
[all …]
H A Dmad.c230 struct mlx4_ib_dev *dev = to_mdev(ibdev); in smp_snoop()
435 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_find_real_gid()
686 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_demux_mad()
833 forward_trap(to_mdev(ibdev), port_num, in_mad); in ib_process_mad()
866 err = mlx4_MAD_IFC(to_mdev(ibdev), in ib_process_mad()
877 if (!mlx4_is_slave(to_mdev(ibdev)->dev)) in ib_process_mad()
942 struct mlx4_ib_dev *dev = to_mdev(ibdev); in iboe_process_mad()
989 struct mlx4_ib_dev *dev = to_mdev(ibdev); in mlx4_ib_process_mad()
1295 struct mlx4_ib_dev *dev = to_mdev(ctx->ib_dev); in mlx4_ib_tunnel_comp_handler()
1306 struct mlx4_ib_dev *dev = to_mdev(ctx->ib_dev); in mlx4_ib_wire_comp_handler()
[all …]
H A Dqp.c864 struct mlx4_ib_dev *dev = to_mdev(pd->device); in create_rq()
995 struct mlx4_ib_dev *dev = to_mdev(pd->device); in create_qp_common()
1626 struct mlx4_ib_dev *dev = to_mdev(device); in mlx4_ib_create_qp()
2139 dev = to_mdev(ibwq->device); in __mlx4_ib_modify_qp()
2149 dev = to_mdev(ibqp->device); in __mlx4_ib_modify_qp()
3018 struct mlx4_ib_dev *ibdev = to_mdev(ib_dev); in build_mlx_header()
3045 if (mlx4_is_mfunc(to_mdev(ib_dev)->dev)) { in build_mlx_header()
3097 if (mlx4_is_mfunc(to_mdev(ib_dev)->dev)) { in build_mlx_header()
3105 &(to_mdev(ib_dev) in build_mlx_header()
3111 to_mdev(ib_dev) in build_mlx_header()
[all …]
H A Dah.c46 struct mlx4_dev *dev = to_mdev(ib_ah->device)->dev; in create_ib_ah()
79 struct mlx4_ib_dev *ibdev = to_mdev(ib_ah->device); in create_iboe_ah()
/openbmc/linux/drivers/dma/
H A Daltera-msgdma.c201 #define to_mdev(chan) container_of(chan, struct msgdma_device, dmachan) macro
306 struct msgdma_device *mdev = to_mdev(tx->chan); in msgdma_tx_submit()
335 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_prep_memcpy()
393 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_prep_slave_sg()
464 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_dma_config()
571 struct msgdma_device *mdev = to_mdev(chan); in msgdma_issue_pending()
638 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_free_chan_resources()
655 struct msgdma_device *mdev = to_mdev(dchan); in msgdma_alloc_chan_resources()
/openbmc/linux/drivers/most/
H A Dmost_usb.c124 #define to_mdev(d) container_of(d, struct most_dev, iface) macro
238 struct most_dev *mdev = to_mdev(iface); in hdm_poison_channel()
343 struct most_dev *mdev = to_mdev(mbo->ifp); in hdm_write_completion()
395 struct most_dev *mdev = to_mdev(mbo->ifp); in hdm_read_completion()
460 struct most_dev *mdev = to_mdev(iface); in hdm_enqueue()
537 struct most_dev *mdev = to_mdev(mbo->ifp); in hdm_dma_alloc()
545 struct most_dev *mdev = to_mdev(mbo->ifp); in hdm_dma_free()
571 struct most_dev *mdev = to_mdev(iface); in hdm_configure_channel()
651 struct most_dev *mdev = to_mdev(iface); in hdm_request_netinfo()

12