Searched refs:MLX5_SEND_WQE_BB (Results 1 – 15 of 15) sorted by relevance
/openbmc/linux/drivers/net/ethernet/mellanox/mlx5/core/en_accel/ |
H A D | ktls_utils.h | 54 (DIV_ROUND_UP(sizeof(struct mlx5e_set_tls_static_params_wqe), MLX5_SEND_WQE_BB)) 57 (DIV_ROUND_UP(sizeof(struct mlx5e_set_tls_progress_params_wqe), MLX5_SEND_WQE_BB)) 60 (DIV_ROUND_UP(sizeof(struct mlx5e_get_tls_progress_params_wqe), MLX5_SEND_WQE_BB))
|
H A D | ktls_tx.c | 15 (DIV_ROUND_UP(sizeof(struct mlx5e_dump_wqe), MLX5_SEND_WQE_BB))
|
/openbmc/linux/drivers/net/ethernet/mellanox/mlx5/core/lib/ |
H A D | aso.h | 11 (DIV_ROUND_UP(sizeof(struct mlx5_aso_wqe), MLX5_SEND_WQE_BB)) 13 (DIV_ROUND_UP(sizeof(struct mlx5_aso_wqe_data), MLX5_SEND_WQE_BB))
|
H A D | aso.c | 280 MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB)); in mlx5_aso_create_sq()
|
/openbmc/linux/include/linux/mlx5/ |
H A D | qp.h | 41 #define MLX5_SIG_WQE_SIZE (MLX5_SEND_WQE_BB * 8) 156 MLX5_SEND_WQE_BB = 64, enumerator 159 #define MLX5_SEND_WQEBB_NUM_DS (MLX5_SEND_WQE_BB / MLX5_SEND_WQE_DS) 165 #define MLX5_SEND_WQE_MAX_SIZE (MLX5_SEND_WQE_MAX_WQEBBS * MLX5_SEND_WQE_BB)
|
/openbmc/linux/drivers/net/ethernet/mellanox/mlx5/core/ |
H A D | wq.c | 105 u8 log_sq_stride = ilog2(MLX5_SEND_WQE_BB); in mlx5_wq_qp_create() 134 u16 sq_strides_offset = rq_byte_size / MLX5_SEND_WQE_BB; in mlx5_wq_qp_create()
|
H A D | en.h | 154 (DIV_ROUND_UP(MLX5E_KLM_UMR_WQE_SZ(klm_entries), MLX5_SEND_WQE_BB)) 166 MLX5E_KLM_ENTRIES_PER_WQE(MLX5_SEND_WQE_BB * mlx5e_get_max_sq_aligned_wqebbs(mdev)) 208 MLX5_CAP_GEN(mdev, max_wqe_sz_sq) / MLX5_SEND_WQE_BB); in mlx5e_get_max_sq_wqebbs()
|
/openbmc/linux/drivers/net/ethernet/mellanox/mlx5/core/en/ |
H A D | params.c | 109 max_wqe_size = mlx5e_get_max_sq_aligned_wqebbs(mdev) * MLX5_SEND_WQE_BB; in mlx5e_mpwrq_log_wqe_sz() 161 MLX5_SEND_WQE_BB); in mlx5e_mpwrq_umr_wqebbs() 175 MLX5_SEND_WQE_BB / mlx5e_mpwrq_umr_entry_size(umr_mode)); in mlx5e_mpwrq_mtts_per_wqe() 1051 MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB)); in mlx5e_build_sq_param_common() 1230 useful_space = PAGE_SIZE - mlx5e_get_max_sq_wqebbs(mdev) + MLX5_SEND_WQE_BB; in mlx5e_build_icosq_log_wq_sz() 1231 total_pages = DIV_ROUND_UP(wqebbs * MLX5_SEND_WQE_BB, useful_space); in mlx5e_build_icosq_log_wq_sz() 1232 wqebbs = total_pages * (PAGE_SIZE / MLX5_SEND_WQE_BB); in mlx5e_build_icosq_log_wq_sz()
|
H A D | txrx.h | 467 WARN_ON_ONCE(PAGE_SIZE / MLX5_SEND_WQE_BB < (u16)mlx5e_get_max_sq_wqebbs(mdev)); in mlx5e_stop_room_for_wqe()
|
H A D | reporter_tx.c | 353 sq_stride = MLX5_SEND_WQE_BB; in mlx5e_tx_reporter_diagnose_generic_txqsq()
|
/openbmc/linux/drivers/infiniband/hw/mlx5/ |
H A D | wr.h | 41 return fragment_end + MLX5_SEND_WQE_BB; in get_sq_edge()
|
H A D | mem.c | 132 MLX5_SEND_WQE_BB); in post_send_nop()
|
H A D | wr.c | 683 *size = ALIGN(*size, MLX5_SEND_WQE_BB >> 4); in set_reg_wr() 774 qp->sq.cur_post += DIV_ROUND_UP(size * 16, MLX5_SEND_WQE_BB); in mlx5r_finish_wqe() 780 seg = PTR_ALIGN(seg, MLX5_SEND_WQE_BB); in mlx5r_finish_wqe()
|
H A D | qp.c | 166 min_t(size_t, buflen - bytes_copied, MLX5_SEND_WQE_BB); in mlx5_ib_read_kernel_wqe_sq() 473 wq_size = max_t(int, wq_size, MLX5_SEND_WQE_BB); in set_rq_size() 562 ALIGN(max_t(int, inl_size, size), MLX5_SEND_WQE_BB) < MLX5_SIG_WQE_SIZE) in calc_send_wqe() 565 return ALIGN(max_t(int, inl_size, size), MLX5_SEND_WQE_BB); in calc_send_wqe() 616 qp->sq.wqe_cnt = wq_size / MLX5_SEND_WQE_BB; in calc_sq_size() 619 attr->cap.max_send_wr, wqe_size, MLX5_SEND_WQE_BB, in calc_sq_size() 624 qp->sq.wqe_shift = ilog2(MLX5_SEND_WQE_BB); in calc_sq_size() 994 qp->sq.wqe_shift = ilog2(MLX5_SEND_WQE_BB); in _create_user_qp() 1144 MLX5_SEND_WQE_BB; in _create_kernel_qp() 1147 ilog2(MLX5_SEND_WQE_BB), in _create_kernel_qp() [all …]
|
/openbmc/linux/drivers/net/ethernet/mellanox/mlx5/core/steering/ |
H A D | dr_send.c | 477 dr_qp->sq.pc += DIV_ROUND_UP(size * 16, MLX5_SEND_WQE_BB); in dr_rdma_segments()
|