Lines Matching refs:num_ent

80 	u32 num_ent;  member
96 u32 num_ent; member
354 struct mlx5_vdpa_virtqueue *mvq, u32 num_ent) in qp_prepare() argument
381 MLX5_SET(qpc, qpc, log_rq_size, ilog2(num_ent)); in qp_prepare()
387 static int rq_buf_alloc(struct mlx5_vdpa_net *ndev, struct mlx5_vdpa_qp *vqp, u32 num_ent) in rq_buf_alloc() argument
390 num_ent * sizeof(struct mlx5_wqe_data_seg), &vqp->frag_buf, in rq_buf_alloc()
411 err = rq_buf_alloc(ndev, vqp, mvq->num_ent); in qp_create()
427 qp_prepare(ndev, vqp->fw, in, mvq, mvq->num_ent); in qp_create()
445 rx_post(vqp, mvq->num_ent); in qp_create()
517 if (num > mvq->num_ent / 2) { in mlx5_vdpa_cq_comp()
535 static int cq_create(struct mlx5_vdpa_net *ndev, u16 idx, u32 num_ent) in cq_create() argument
557 err = cq_frag_buf_alloc(ndev, &vcq->buf, num_ent); in cq_create()
586 MLX5_SET(cqc, cqc, log_cq_size, ilog2(num_ent)); in cq_create()
596 vcq->cqe = num_ent; in cq_create()
690 (*umemp)->size = p_a * mvq->num_ent + p_b; in set_umem_size()
908 MLX5_SET(virtio_q, vq_ctx, queue_size, mvq->num_ent); in create_virtqueue()
1268 for (i = 0; i < irqp->num_ent; i++) { in alloc_vector()
1292 for (i = 0; i < irqp->num_ent; i++) in dealloc_vector()
1305 if (!mvq->num_ent) in setup_vq()
1311 err = cq_create(ndev, idx, mvq->num_ent); in setup_vq()
2148 mvq->num_ent = num; in mlx5_vdpa_set_vq_num()
2627 ri->num_ent = mvq->num_ent; in save_channel_info()
2672 mvq->num_ent = ri->num_ent; in restore_channels_info()
2981 for (i = ndev->irqp.num_ent - 1; i >= 0; i--) { in free_irqs()
3350 ndev->irqp.num_ent++; in allocate_irqs()