Home
last modified time | relevance | path

Searched refs:mvdev (Results 1 – 9 of 9) sorted by relevance

/openbmc/linux/drivers/vdpa/mlx5/core/
H A Dresources.c233 if (!mvdev->cvq.iotlb) in init_ctrl_vq()
237 vringh_set_iotlb(&mvdev->cvq.vring, mvdev->cvq.iotlb, &mvdev->cvq.iommu_lock); in init_ctrl_vq()
244 vhost_iotlb_free(mvdev->cvq.iotlb); in cleanup_ctrl_vq()
259 mutex_init(&mvdev->mr.mkey_mtx); in mlx5_vdpa_alloc_resources()
266 err = create_uctx(mvdev, &res->uid); in mlx5_vdpa_alloc_resources()
287 err = init_ctrl_vq(mvdev); in mlx5_vdpa_alloc_resources()
300 destroy_uctx(mvdev, res->uid); in mlx5_vdpa_alloc_resources()
304 mutex_destroy(&mvdev->mr.mkey_mtx); in mlx5_vdpa_alloc_resources()
315 cleanup_ctrl_vq(mvdev); in mlx5_vdpa_free_resources()
319 destroy_uctx(mvdev, res->uid); in mlx5_vdpa_free_resources()
[all …]
H A Dmr.c212 fill_indir(mvdev, mr, in); in create_indirect_key()
299 destroy_direct_mr(mvdev, mr); in unmap_direct_mr()
345 unmap_direct_mr(mvdev, dmr); in add_direct_chain()
414 unmap_direct_mr(mvdev, dmr); in create_user_mr()
487 unmap_direct_mr(mvdev, dmr); in destroy_user_mr()
497 prune_iotlb(mvdev); in _mlx5_vdpa_destroy_cvq_mr()
511 destroy_user_mr(mvdev, mr); in _mlx5_vdpa_destroy_dvq_mr()
513 destroy_dma_mr(mvdev, mr); in _mlx5_vdpa_destroy_dvq_mr()
532 mlx5_vdpa_destroy_mr_asid(mvdev, mvdev->group2asid[MLX5_VDPA_CVQ_GROUP]); in mlx5_vdpa_destroy_mr()
533 mlx5_vdpa_destroy_mr_asid(mvdev, mvdev->group2asid[MLX5_VDPA_DATAVQ_GROUP]); in mlx5_vdpa_destroy_mr()
[all …]
H A Dmlx5_vdpa.h71 struct mlx5_vdpa_dev *mvdev; member
103 int mlx5_vdpa_create_tis(struct mlx5_vdpa_dev *mvdev, void *in, u32 *tisn);
104 void mlx5_vdpa_destroy_tis(struct mlx5_vdpa_dev *mvdev, u32 tisn);
107 void mlx5_vdpa_destroy_rqt(struct mlx5_vdpa_dev *mvdev, u32 rqtn);
108 int mlx5_vdpa_create_tir(struct mlx5_vdpa_dev *mvdev, void *in, u32 *tirn);
109 void mlx5_vdpa_destroy_tir(struct mlx5_vdpa_dev *mvdev, u32 tirn);
112 int mlx5_vdpa_alloc_resources(struct mlx5_vdpa_dev *mvdev);
113 void mlx5_vdpa_free_resources(struct mlx5_vdpa_dev *mvdev);
114 int mlx5_vdpa_create_mkey(struct mlx5_vdpa_dev *mvdev, u32 *mkey, u32 *in,
116 int mlx5_vdpa_destroy_mkey(struct mlx5_vdpa_dev *mvdev, u32 mkey);
[all …]
/openbmc/linux/drivers/vdpa/mlx5/net/
H A Dmlx5_vnet.c263 struct mlx5_vdpa_dev *mvdev = &ndev->mvdev; in create_tis() local
2023 mvdev = wqent->mvdev; in mlx5_cvq_kick_handler()
2093 if (!mvdev->wq || !mvdev->cvq.ready) in mlx5_vdpa_kick_vq()
2361 print_features(mvdev, ndev->mvdev.mlx_features, false); in mlx5_vdpa_get_device_features()
2427 mvdev->max_idx = mvdev->max_vqs; in update_cvq_info()
2475 mvdev = wqent->mvdev; in update_carrier()
2496 wqent->mvdev = &ndev->mvdev; in queue_link_work()
2608 print_status(mvdev, ndev->mvdev.status, false); in mlx5_vdpa_get_status()
3421 mvdev = &ndev->mvdev; in mlx5_vdpa_dev_add()
3507 ndev->cvq_ent.mvdev = mvdev; in mlx5_vdpa_dev_add()
[all …]
H A Ddebug.c102 node->ucast_counter.mdev = ndev->mvdev.mdev; in mlx5_vdpa_add_rx_counters()
103 node->mcast_counter.mdev = ndev->mvdev.mdev; in mlx5_vdpa_add_rx_counters()
142 mdev = ndev->mvdev.mdev; in mlx5_vdpa_add_debugfs()
143 ndev->debugfs = debugfs_create_dir(dev_name(&ndev->mvdev.vdev.dev), in mlx5_vdpa_add_debugfs()
H A Dmlx5_vnet.h10 container_of(__mvdev, struct mlx5_vdpa_net, mvdev)
44 struct mlx5_vdpa_dev mvdev; member
/openbmc/linux/drivers/vfio/pci/mlx5/
H A Dmain.c392 struct mlx5vf_pci_core_device *mvdev = migf->mvdev; in mlx5vf_precopy_ioctl() local
557 migf->mvdev = mvdev; in mlx5vf_pci_save_device_data()
966 migf->mvdev = mvdev; in mlx5vf_pci_resume_device_data()
1012 if (mvdev->resuming_migf) { in mlx5vf_disable_fds()
1018 if (mvdev->saving_migf) { in mlx5vf_disable_fds()
1024 mvdev->saving_migf = NULL; in mlx5vf_disable_fds()
1032 u32 cur = mvdev->mig_state; in mlx5vf_pci_step_device_state_locked()
1224 if (!mvdev->migrate_cap) in mlx5vf_pci_aer_reset_done()
1255 if (mvdev->migrate_cap) in mlx5vf_pci_open_device()
1334 if (IS_ERR(mvdev)) in mlx5vf_pci_probe()
[all …]
H A Dcmd.c46 if (mvdev->mdev_detach) in mlx5vf_cmd_suspend_vhca()
78 if (mvdev->mdev_detach) in mlx5vf_cmd_resume_vhca()
195 mlx5_sriov_blocking_notifier_unregister(mvdev->mdev, mvdev->vf_id, in mlx5vf_cmd_remove_migratable()
196 &mvdev->nb); in mlx5vf_cmd_remove_migratable()
211 if (!mvdev->mdev) in mlx5vf_cmd_set_migratable()
218 if (mvdev->vf_id < 0) in mlx5vf_cmd_set_migratable()
221 ret = mlx5vf_is_migratable(mvdev->mdev, mvdev->vf_id + 1); in mlx5vf_cmd_set_migratable()
225 if (mlx5vf_cmd_get_vhca_id(mvdev->mdev, mvdev->vf_id + 1, in mlx5vf_cmd_set_migratable()
230 if (!mvdev->cb_wq) in mlx5vf_cmd_set_migratable()
236 ret = mlx5_sriov_blocking_notifier_register(mvdev->mdev, mvdev->vf_id, in mlx5vf_cmd_set_migratable()
[all …]
H A Dcmd.h16 #define MLX5VF_PRE_COPY_SUPP(mvdev) \ argument
17 ((mvdev)->core_device.vdev.migration_flags & VFIO_MIGRATION_PRE_COPY)
102 struct mlx5vf_pci_core_device *mvdev; member
187 int mlx5vf_cmd_resume_vhca(struct mlx5vf_pci_core_device *mvdev, u16 op_mod);
190 void mlx5vf_cmd_set_migratable(struct mlx5vf_pci_core_device *mvdev,
193 void mlx5vf_cmd_remove_migratable(struct mlx5vf_pci_core_device *mvdev);
194 void mlx5vf_cmd_close_migratable(struct mlx5vf_pci_core_device *mvdev);
195 int mlx5vf_cmd_save_vhca_state(struct mlx5vf_pci_core_device *mvdev,
199 int mlx5vf_cmd_load_vhca_state(struct mlx5vf_pci_core_device *mvdev,
217 void mlx5vf_state_mutex_unlock(struct mlx5vf_pci_core_device *mvdev);
[all …]