MLX5_SEND_WQE_BB
max_wqe_size = ALIGN(max_wqe_size, MLX5_SEND_WQE_BB);
return (max_wqe_bb_units * MLX5_SEND_WQE_BB) / max_wqe_size;
MLX5_SEND_WQE_BB;
ilog2(MLX5_SEND_WQE_BB),
MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB));
min_t(size_t, buflen - bytes_copied, MLX5_SEND_WQE_BB);
wq_size = max_t(int, wq_size, MLX5_SEND_WQE_BB);
ALIGN(max_t(int, inl_size, size), MLX5_SEND_WQE_BB) < MLX5_SIG_WQE_SIZE)
return ALIGN(max_t(int, inl_size, size), MLX5_SEND_WQE_BB);
qp->sq.wqe_cnt = wq_size / MLX5_SEND_WQE_BB;
attr->cap.max_send_wr, wqe_size, MLX5_SEND_WQE_BB,
qp->sq.wqe_shift = ilog2(MLX5_SEND_WQE_BB);
qp->sq.wqe_shift = ilog2(MLX5_SEND_WQE_BB);
*size = ALIGN(*size, MLX5_SEND_WQE_BB >> 4);
qp->sq.cur_post += DIV_ROUND_UP(size * 16, MLX5_SEND_WQE_BB);
seg = PTR_ALIGN(seg, MLX5_SEND_WQE_BB);
return fragment_end + MLX5_SEND_WQE_BB;
MLX5_CAP_GEN(mdev, max_wqe_sz_sq) / MLX5_SEND_WQE_BB);
max_wqe_size = mlx5e_get_max_sq_aligned_wqebbs(mdev) * MLX5_SEND_WQE_BB;
useful_space = PAGE_SIZE - mlx5e_get_max_sq_wqebbs(mdev) + MLX5_SEND_WQE_BB;
total_pages = DIV_ROUND_UP(wqebbs * MLX5_SEND_WQE_BB, useful_space);
wqebbs = total_pages * (PAGE_SIZE / MLX5_SEND_WQE_BB);
MLX5_SEND_WQE_BB);
MLX5_SEND_WQE_BB / mlx5e_mpwrq_umr_entry_size(umr_mode));
MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB));
u32 sq_stride = MLX5_SEND_WQE_BB;
(DIV_ROUND_UP(MLX5E_KSM_UMR_WQE_SZ(ksm_entries), MLX5_SEND_WQE_BB))
WARN_ON_ONCE(PAGE_SIZE / MLX5_SEND_WQE_BB < (u16)mlx5e_get_max_sq_wqebbs(mdev));
MLX5E_KSM_ENTRIES_PER_WQE(MLX5_SEND_WQE_BB * mlx5e_get_max_sq_aligned_wqebbs(mdev))
(DIV_ROUND_UP(sizeof(struct mlx5e_dump_wqe), MLX5_SEND_WQE_BB))
(DIV_ROUND_UP(sizeof(struct mlx5e_set_tls_static_params_wqe), MLX5_SEND_WQE_BB))
(DIV_ROUND_UP(sizeof(struct mlx5e_set_tls_progress_params_wqe), MLX5_SEND_WQE_BB))
(DIV_ROUND_UP(sizeof(struct mlx5e_get_tls_progress_params_wqe), MLX5_SEND_WQE_BB))
MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB));
(DIV_ROUND_UP(sizeof(struct mlx5_aso_wqe), MLX5_SEND_WQE_BB))
(DIV_ROUND_UP(sizeof(struct mlx5_aso_wqe_data), MLX5_SEND_WQE_BB))
send_attr.len = MLX5_SEND_WQE_BB * 2 - sizeof(struct mlx5hws_wqe_ctrl_seg);
MLX5_SEND_WQE_BB - sizeof(struct mlx5hws_wqe_ctrl_seg));
memcpy(wqe_data, p, MLX5_SEND_WQE_BB);
MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB));
*len = MLX5_SEND_WQE_BB;
dr_qp->sq.pc += DIV_ROUND_UP(size * 16, MLX5_SEND_WQE_BB);
MLX5_SET(wq, wq, log_wq_stride, ilog2(MLX5_SEND_WQE_BB));
u8 log_sq_stride = ilog2(MLX5_SEND_WQE_BB);
u16 sq_strides_offset = rq_byte_size / MLX5_SEND_WQE_BB;
#define MLX5_SEND_WQEBB_NUM_DS (MLX5_SEND_WQE_BB / MLX5_SEND_WQE_DS)
#define MLX5_SEND_WQE_MAX_SIZE (MLX5_SEND_WQE_MAX_WQEBBS * MLX5_SEND_WQE_BB)
#define MLX5_SIG_WQE_SIZE (MLX5_SEND_WQE_BB * 8)