Home
last modified time | relevance | path

Searched refs:nr_io_queues (Results 1 – 11 of 11) sorted by relevance

/openbmc/linux/drivers/nvme/target/
H A Dloop.c318 unsigned int nr_io_queues; in nvme_loop_init_io_queues() local
321 nr_io_queues = min(opts->nr_io_queues, num_online_cpus()); in nvme_loop_init_io_queues()
322 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_loop_init_io_queues()
323 if (ret || !nr_io_queues) in nvme_loop_init_io_queues()
326 dev_info(ctrl->ctrl.device, "creating %d I/O queues.\n", nr_io_queues); in nvme_loop_init_io_queues()
328 for (i = 1; i <= nr_io_queues; i++) { in nvme_loop_init_io_queues()
575 ctrl->queues = kcalloc(opts->nr_io_queues + 1, sizeof(*ctrl->queues), in nvme_loop_create_ctrl()
593 if (opts->nr_io_queues) { in nvme_loop_create_ctrl()
/openbmc/linux/drivers/nvme/host/
H A Dfabrics.c668 opts->nr_io_queues = num_online_cpus(); in nvmf_parse_options()
766 opts->nr_io_queues = min_t(unsigned int, in nvmf_parse_options()
969 opts->nr_io_queues = 0; in nvmf_parse_options()
999 void nvmf_set_io_queues(struct nvmf_ctrl_options *opts, u32 nr_io_queues, in nvmf_set_io_queues() argument
1002 if (opts->nr_write_queues && opts->nr_io_queues < nr_io_queues) { in nvmf_set_io_queues()
1008 io_queues[HCTX_TYPE_READ] = opts->nr_io_queues; in nvmf_set_io_queues()
1009 nr_io_queues -= io_queues[HCTX_TYPE_READ]; in nvmf_set_io_queues()
1011 min(opts->nr_write_queues, nr_io_queues); in nvmf_set_io_queues()
1012 nr_io_queues -= io_queues[HCTX_TYPE_DEFAULT]; in nvmf_set_io_queues()
1020 min(opts->nr_io_queues, nr_io_queues); in nvmf_set_io_queues()
[all …]
H A Dfabrics.h116 unsigned int nr_io_queues; member
201 return min(opts->nr_io_queues, num_online_cpus()) + in nvmf_nr_io_queues()
218 void nvmf_set_io_queues(struct nvmf_ctrl_options *opts, u32 nr_io_queues,
H A Dpci.c1472 static int nvme_cmb_qdepth(struct nvme_dev *dev, int nr_io_queues, in nvme_cmb_qdepth() argument
1479 if (q_size_aligned * nr_io_queues > dev->cmb_size) { in nvme_cmb_qdepth()
1480 u64 mem_per_q = div_u64(dev->cmb_size, nr_io_queues); in nvme_cmb_qdepth()
1694 static unsigned long db_bar_size(struct nvme_dev *dev, unsigned nr_io_queues) in db_bar_size() argument
1696 return NVME_REG_DBS + ((nr_io_queues + 1) * 8 * dev->db_stride); in db_bar_size()
2215 static int nvme_setup_irqs(struct nvme_dev *dev, unsigned int nr_io_queues) in nvme_setup_irqs() argument
2230 poll_queues = min(dev->nr_poll_queues, nr_io_queues - 1); in nvme_setup_irqs()
2247 irq_queues += (nr_io_queues - poll_queues); in nvme_setup_irqs()
2269 unsigned int nr_io_queues; in nvme_setup_io_queues() local
2280 nr_io_queues = dev->nr_allocated_queues - 1; in nvme_setup_io_queues()
[all …]
H A Dfc.c2899 unsigned int nr_io_queues; in nvme_fc_create_io_queues() local
2902 nr_io_queues = min(min(opts->nr_io_queues, num_online_cpus()), in nvme_fc_create_io_queues()
2904 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_fc_create_io_queues()
2911 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_create_io_queues()
2912 if (!nr_io_queues) in nvme_fc_create_io_queues()
2953 unsigned int nr_io_queues; in nvme_fc_recreate_io_queues() local
2956 nr_io_queues = min(min(opts->nr_io_queues, num_online_cpus()), in nvme_fc_recreate_io_queues()
2958 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_fc_recreate_io_queues()
2965 if (!nr_io_queues && prior_ioq_cnt) { in nvme_fc_recreate_io_queues()
2972 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_fc_recreate_io_queues()
[all …]
H A Drdma.c719 unsigned int nr_io_queues; in nvme_rdma_alloc_io_queues() local
722 nr_io_queues = nvmf_nr_io_queues(opts); in nvme_rdma_alloc_io_queues()
723 ret = nvme_set_queue_count(&ctrl->ctrl, &nr_io_queues); in nvme_rdma_alloc_io_queues()
727 if (nr_io_queues == 0) { in nvme_rdma_alloc_io_queues()
733 ctrl->ctrl.queue_count = nr_io_queues + 1; in nvme_rdma_alloc_io_queues()
735 "creating %d I/O queues.\n", nr_io_queues); in nvme_rdma_alloc_io_queues()
737 nvmf_set_io_queues(opts, nr_io_queues, ctrl->io_queues); in nvme_rdma_alloc_io_queues()
2270 ctrl->ctrl.queue_count = opts->nr_io_queues + opts->nr_write_queues + in nvme_rdma_create_ctrl()
H A Dapple.c983 unsigned int nr_io_queues = 1; in apple_nvme_reset_work() local
1133 nr_io_queues = 1; in apple_nvme_reset_work()
1134 ret = nvme_set_queue_count(&anv->ctrl, &nr_io_queues); in apple_nvme_reset_work()
1137 if (nr_io_queues != 1) { in apple_nvme_reset_work()
1142 anv->ctrl.queue_count = nr_io_queues + 1; in apple_nvme_reset_work()
H A Dtcp.c1812 unsigned int nr_io_queues; in nvme_tcp_alloc_io_queues() local
1815 nr_io_queues = nvmf_nr_io_queues(ctrl->opts); in nvme_tcp_alloc_io_queues()
1816 ret = nvme_set_queue_count(ctrl, &nr_io_queues); in nvme_tcp_alloc_io_queues()
1820 if (nr_io_queues == 0) { in nvme_tcp_alloc_io_queues()
1826 ctrl->queue_count = nr_io_queues + 1; in nvme_tcp_alloc_io_queues()
1828 "creating %d I/O queues.\n", nr_io_queues); in nvme_tcp_alloc_io_queues()
1830 nvmf_set_io_queues(ctrl->opts, nr_io_queues, in nvme_tcp_alloc_io_queues()
2529 ctrl->ctrl.queue_count = opts->nr_io_queues + opts->nr_write_queues + in nvme_tcp_create_ctrl()
H A Dauth.c57 return ctrl->opts->nr_io_queues + ctrl->opts->nr_write_queues + in ctrl_max_dhchaps()
H A Dcore.c1552 int status, nr_io_queues; in nvme_set_queue_count() local
1574 nr_io_queues = min(result & 0xffff, result >> 16) + 1; in nvme_set_queue_count()
1575 *count = min(*count, nr_io_queues); in nvme_set_queue_count()
/openbmc/u-boot/drivers/nvme/
H A Dnvme.c556 int nr_io_queues; in nvme_setup_io_queues() local
559 nr_io_queues = 1; in nvme_setup_io_queues()
560 result = nvme_set_queue_count(dev, nr_io_queues); in nvme_setup_io_queues()
564 dev->max_qid = nr_io_queues; in nvme_setup_io_queues()
567 nvme_free_queues(dev, nr_io_queues + 1); in nvme_setup_io_queues()