/openbmc/linux/block/ |
H A D | bsg-lib.c | 49 job = blk_mq_rq_to_pdu(rq); in bsg_transport_sg_io_fn() 207 struct bsg_job *job = blk_mq_rq_to_pdu(rq); in bsg_complete() 234 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_prepare_job() 291 ret = bset->job_fn(blk_mq_rq_to_pdu(req)); in bsg_queue_rq() 304 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_init_rq() 315 struct bsg_job *job = blk_mq_rq_to_pdu(req); in bsg_exit_rq()
|
/openbmc/linux/include/scsi/ |
H A D | scsi_tcq.h | 39 return blk_mq_rq_to_pdu(req); in scsi_host_find_tag()
|
/openbmc/linux/drivers/nvme/host/ |
H A D | apple.c | 337 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_iod_list() 345 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_free_prps() 360 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_unmap_data() 397 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_setup_prps() 492 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_setup_prp_simple() 511 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_map_data() 551 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_unmap_rq() 739 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_queue_rq() 788 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_init_request() 878 struct apple_nvme_iod *iod = blk_mq_rq_to_pdu(req); in apple_nvme_timeout()
|
H A D | rdma.c | 288 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_exit_request() 298 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_init_request() 1213 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_dma_unmap_req() 1230 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_unmap_data() 1460 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_dma_map_req() 1520 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_map_data() 1696 req = blk_mq_rq_to_pdu(rq); in nvme_rdma_process_nvme_rsp() 1936 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_complete_timed_out() 1945 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_timeout() 1984 struct nvme_rdma_request *req = blk_mq_rq_to_pdu(rq); in nvme_rdma_queue_rq() [all …]
|
H A D | tcp.c | 469 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_exit_request() 479 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_init_request() 560 req = blk_mq_rq_to_pdu(rq); in nvme_tcp_process_nvme_cqe() 676 req = blk_mq_rq_to_pdu(rq); in nvme_tcp_handle_r2t() 774 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_data() 861 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_ddgst() 874 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_recv_ddgst() 2264 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_complete_timed_out() 2273 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_timeout() 2313 struct nvme_tcp_request *req = blk_mq_rq_to_pdu(rq); in nvme_tcp_map_data() [all …]
|
H A D | pci.c | 424 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_init_request() 528 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_free_prps() 543 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_unmap_data() 583 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_setup_prps() 689 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_pci_setup_sgls() 735 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_setup_prp_simple() 756 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_setup_sgl_simple() 773 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_map_data() 829 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_map_metadata() 841 struct nvme_iod *iod = blk_mq_rq_to_pdu(req); in nvme_prep_rq() [all …]
|
H A D | fc.c | 1836 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(rq); in nvme_fc_exit_request() 2146 struct nvme_fcp_op_w_sgl *op = blk_mq_rq_to_pdu(rq); in nvme_fc_init_request() 2458 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(req); in nvme_fc_terminate_exchange() 2571 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(rq); in nvme_fc_timeout() 2795 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(rq); in nvme_fc_queue_rq() 2852 struct nvme_fc_fcp_op *op = blk_mq_rq_to_pdu(rq); in nvme_fc_complete_rq()
|
/openbmc/linux/drivers/scsi/ |
H A D | scsi_lib.c | 227 scmd = blk_mq_rq_to_pdu(req); in scsi_execute_cmd() 535 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_end_request() 1127 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_initialize_rq() 1156 scsi_mq_uninit_cmd(blk_mq_rq_to_pdu(rq)); in scsi_cleanup_rq() 1179 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_setup_scsi_cmnd() 1428 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_complete() 1551 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_prepare_cmd() 1693 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_mq_set_rq_budget_token() 1700 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_mq_get_rq_budget_token() 1712 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(req); in scsi_queue_rq() [all …]
|
H A D | scsi_debugfs.c | 37 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq), *cmd2; in scsi_show_rq()
|
H A D | hosts.c | 589 struct scsi_cmnd *cmd = blk_mq_rq_to_pdu(rq); in scsi_host_check_in_flight() 683 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in complete_all_cmds_iter() 719 struct scsi_cmnd *sc = blk_mq_rq_to_pdu(req); in __scsi_host_busy_iter_fn()
|
H A D | scsi_ioctl.c | 351 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in scsi_fill_sghdr_rq() 375 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(rq); in scsi_complete_sghdr_rq() 445 scmd = blk_mq_rq_to_pdu(rq); in sg_io() 541 scmd = blk_mq_rq_to_pdu(rq); in sg_scsi_ioctl()
|
H A D | scsi_bsg.c | 34 scmd = blk_mq_rq_to_pdu(rq); in scsi_bsg_sg_io_fn()
|
/openbmc/linux/drivers/nvme/target/ |
H A D | loop.c | 76 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_complete_rq() 137 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_queue_rq() 208 struct nvme_loop_iod *iod = blk_mq_rq_to_pdu(req); in nvme_loop_init_request() 212 return nvme_loop_init_iod(ctrl, blk_mq_rq_to_pdu(req), in nvme_loop_init_request()
|
/openbmc/linux/drivers/md/ |
H A D | dm-rq.c | 124 return blk_mq_rq_to_pdu(rq); in tio_from_request() 461 struct dm_rq_target_io *tio = blk_mq_rq_to_pdu(rq); in dm_mq_init_request() 481 struct dm_rq_target_io *tio = blk_mq_rq_to_pdu(rq); in dm_mq_queue_rq()
|
/openbmc/linux/drivers/mmc/core/ |
H A D | queue.h | 27 return blk_mq_rq_to_pdu(rq); in req_to_mmc_queue_req()
|
/openbmc/linux/drivers/block/ |
H A D | virtio_blk.c | 337 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_request_done() 434 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtio_queue_rq() 471 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_prep_rq_batch() 488 struct virtblk_req *vbr = blk_mq_rq_to_pdu(req); in virtblk_add_req_batch() 575 vbr = blk_mq_rq_to_pdu(req); in virtblk_submit_zone_report() 874 vbr = blk_mq_rq_to_pdu(req); in virtblk_get_id() 1253 virtblk_unmap_data(req, blk_mq_rq_to_pdu(req)); in virtblk_complete_batch()
|
H A D | nbd.c | 364 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_complete_rq() 428 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_xmit_timeout() 786 cmd = blk_mq_rq_to_pdu(req); in nbd_handle_reply() 923 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(req); in nbd_clear_req() 1095 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(bd->rq); in nbd_queue_rq() 1788 struct nbd_cmd *cmd = blk_mq_rq_to_pdu(rq); in nbd_init_request()
|
H A D | ublk_drv.c | 646 struct ublk_rq_data *data = blk_mq_rq_to_pdu(req); in ublk_init_req_ref() 656 struct ublk_rq_data *data = blk_mq_rq_to_pdu(req); in ublk_get_req_ref() 668 struct ublk_rq_data *data = blk_mq_rq_to_pdu(req); in ublk_put_req_ref() 1277 struct ublk_rq_data *data = blk_mq_rq_to_pdu(rq); in ublk_queue_cmd()
|
/openbmc/linux/drivers/mtd/ubi/ |
H A D | block.c | 182 struct ubiblock_pdu *pdu = blk_mq_rq_to_pdu(req); in ubiblock_read() 317 struct ubiblock_pdu *pdu = blk_mq_rq_to_pdu(req); in ubiblock_init_request()
|
/openbmc/linux/drivers/block/mtip32xx/ |
H A D | mtip32xx.c | 155 return blk_mq_rq_to_pdu(blk_mq_tag_to_rq(dd->tags.tags[0], tag)); in mtip_cmd_from_tag() 999 int_cmd = blk_mq_rq_to_pdu(rq); in mtip_exec_internal_command() 2431 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_softirq_done_fn() 2446 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(req); in mtip_abort_cmd() 3253 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_check_unal_depth() 3277 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_issue_reserved_cmd() 3315 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_queue_rq() 3336 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_free_cmd() 3349 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(rq); in mtip_init_cmd() 3365 struct mtip_cmd *cmd = blk_mq_rq_to_pdu(req); in mtip_cmd_timeout()
|
/openbmc/linux/drivers/s390/block/ |
H A D | scm_blk.c | 257 error = blk_mq_rq_to_pdu(scmrq->request[i]); in scm_request_finish() 420 blk_status_t *error = blk_mq_rq_to_pdu(req); in scm_blk_request_done()
|
H A D | dasd_fba.c | 352 blk_mq_rq_to_pdu(req)); in dasd_fba_build_cp_discard() 487 blk_mq_rq_to_pdu(req)); in dasd_fba_build_cp_regular()
|
/openbmc/linux/drivers/block/null_blk/ |
H A D | main.c | 875 end_cmd(blk_mq_rq_to_pdu(rq)); in null_complete_rq() 1566 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in should_timeout_request() 1574 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in should_requeue_request() 1676 cmd = blk_mq_rq_to_pdu(req); in null_poll() 1691 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in null_timeout_rq() 1725 struct nullb_cmd *cmd = blk_mq_rq_to_pdu(rq); in null_queue_rq()
|
/openbmc/linux/drivers/target/ |
H A D | target_core_pscsi.c | 958 scmd = blk_mq_rq_to_pdu(req); in pscsi_execute_cmd() 1009 struct scsi_cmnd *scmd = blk_mq_rq_to_pdu(req); in pscsi_req_done()
|
/openbmc/linux/drivers/block/aoe/ |
H A D | aoedev.c | 171 req = blk_mq_rq_to_pdu(rq); in aoe_failip()
|