Home
last modified time | relevance | path

Searched refs:sg (Results 151 – 175 of 964) sorted by relevance

12345678910>>...39

/openbmc/linux/drivers/media/pci/cx88/
H A Dcx88-core.c75 struct scatterlist *sg; in cx88_risc_field() local
88 sg = sglist; in cx88_risc_field()
91 offset -= sg_dma_len(sg); in cx88_risc_field()
92 sg = sg_next(sg); in cx88_risc_field()
108 (sg_dma_len(sg) - offset)); in cx88_risc_field()
110 todo -= (sg_dma_len(sg) - offset); in cx88_risc_field()
112 sg = sg_next(sg); in cx88_risc_field()
113 while (todo > sg_dma_len(sg)) { in cx88_risc_field()
115 sg_dma_len(sg)); in cx88_risc_field()
117 todo -= sg_dma_len(sg); in cx88_risc_field()
[all …]
/openbmc/linux/drivers/virtio/
H A Dvirtio_balloon.c147 struct scatterlist sg; in tell_host() local
153 virtqueue_add_outbuf(vq, &sg, 1, vb, GFP_KERNEL); in tell_host()
162 struct scatterlist *sg, unsigned int nents) in virtballoon_free_page_report() argument
376 struct scatterlist sg; in stats_handle_request() local
385 virtqueue_add_outbuf(vq, &sg, 1, vb, GFP_KERNEL); in stats_handle_request()
539 struct scatterlist sg; in init_vqs() local
550 err = virtqueue_add_outbuf(vb->stats_vq, &sg, 1, vb, in init_vqs()
584 struct scatterlist sg; in send_cmd_id_start() local
603 struct scatterlist sg; in send_cmd_id_stop() local
622 struct scatterlist sg; in get_free_page_and_send() local
[all …]
/openbmc/linux/block/
H A Dblk-merge.c448 if (!*sg) in blk_next_sg()
458 sg_unmark_end(*sg); in blk_next_sg()
459 return sg_next(*sg); in blk_next_sg()
464 struct scatterlist **sg) in blk_bvec_map_sg() argument
486 *sg = blk_next_sg(sg, sglist); in blk_bvec_map_sg()
487 sg_set_page(*sg, page, len, offset); in blk_bvec_map_sg()
500 *sg = blk_next_sg(sg, sglist); in __blk_bvec_map_sg()
513 if (!*sg) in __blk_segment_map_sg_merge()
522 (*sg)->length += nbytes; in __blk_segment_map_sg_merge()
529 struct scatterlist **sg) in __blk_bios_map_sg() argument
[all …]
/openbmc/linux/tools/virtio/
H A Dvringh_test.c330 struct scatterlist sg[4]; in parallel_test() local
355 sg_init_table(sg, num_sg = 3); in parallel_test()
356 sg_set_buf(&sg[0], (void *)dbuf, 1); in parallel_test()
357 sg_set_buf(&sg[1], (void *)dbuf + 1, 2); in parallel_test()
358 sg_set_buf(&sg[2], (void *)dbuf + 3, 1); in parallel_test()
361 sg_init_table(sg, num_sg = 2); in parallel_test()
362 sg_set_buf(&sg[0], (void *)dbuf, 1); in parallel_test()
366 sg_init_table(sg, num_sg = 1); in parallel_test()
367 sg_set_buf(&sg[0], (void *)dbuf, 4); in parallel_test()
370 sg_init_table(sg, num_sg = 4); in parallel_test()
[all …]
/openbmc/linux/drivers/infiniband/hw/mlx4/
H A Dmr.c193 struct scatterlist *sg; in mlx4_ib_umem_write_mtt() local
203 for_each_sgtable_dma_sg(&umem->sgt_append.sgt, sg, i) { in mlx4_ib_umem_write_mtt()
204 if (cur_start_addr + len == sg_dma_address(sg)) { in mlx4_ib_umem_write_mtt()
206 len += sg_dma_len(sg); in mlx4_ib_umem_write_mtt()
222 cur_start_addr = sg_dma_address(sg); in mlx4_ib_umem_write_mtt()
223 len = sg_dma_len(sg); in mlx4_ib_umem_write_mtt()
267 struct scatterlist *sg; in mlx4_ib_umem_calc_optimal_mtt_size() local
282 current_block_start = sg_dma_address(sg); in mlx4_ib_umem_calc_optimal_mtt_size()
310 next_block_start = sg_dma_address(sg); in mlx4_ib_umem_calc_optimal_mtt_size()
335 current_block_len = sg_dma_len(sg); in mlx4_ib_umem_calc_optimal_mtt_size()
[all …]
/openbmc/linux/drivers/gpu/drm/
H A Ddrm_prime.c815 struct sg_table *sg; in drm_prime_pages_to_sg() local
819 sg = kmalloc(sizeof(struct sg_table), GFP_KERNEL); in drm_prime_pages_to_sg()
820 if (!sg) in drm_prime_pages_to_sg()
831 kfree(sg); in drm_prime_pages_to_sg()
832 sg = ERR_PTR(err); in drm_prime_pages_to_sg()
834 return sg; in drm_prime_pages_to_sg()
851 struct scatterlist *sg; in drm_prime_get_contiguous_size() local
855 for_each_sgtable_dma_sg(sgt, sg, i) { in drm_prime_get_contiguous_size()
856 unsigned int len = sg_dma_len(sg); in drm_prime_get_contiguous_size()
860 if (sg_dma_address(sg) != expected) in drm_prime_get_contiguous_size()
[all …]
/openbmc/linux/drivers/target/
H A Dtarget_core_iblock.c463 buf = kmap(sg_page(sg)) + sg->offset; in iblock_execute_zero_out()
471 kunmap(sg_page(sg)); in iblock_execute_zero_out()
493 struct scatterlist *sg; in iblock_execute_write_same() local
510 sg = &cmd->t_data_sg[0]; in iblock_execute_write_same()
540 while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset) in iblock_execute_write_same()
541 != sg->length) { in iblock_execute_write_same()
553 sectors -= sg->length >> SECTOR_SHIFT; in iblock_execute_write_same()
732 struct scatterlist *sg; in iblock_execute_rw() local
789 for_each_sg(sgl, sg, sgl_nents, i) { in iblock_execute_rw()
795 while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset) in iblock_execute_rw()
[all …]
H A Dtarget_core_rd.c67 struct scatterlist *sg; in rd_release_sgl_table() local
71 sg = sg_table[i].sg_table; in rd_release_sgl_table()
75 pg = sg_page(&sg[j]); in rd_release_sgl_table()
81 kfree(sg); in rd_release_sgl_table()
119 struct scatterlist *sg; in rd_allocate_sgl_table() local
134 sg = kmalloc_array(sg_per_table + chain_entry, sizeof(*sg), in rd_allocate_sgl_table()
136 if (!sg) in rd_allocate_sgl_table()
143 max_sg_per_table + 1, sg); in rd_allocate_sgl_table()
146 sg_table[i].sg_table = sg; in rd_allocate_sgl_table()
159 sg_assign_page(&sg[j], pg); in rd_allocate_sgl_table()
[all …]
/openbmc/linux/drivers/mmc/core/
H A Dqueue.c168 struct scatterlist *sg; in mmc_alloc_sg() local
170 sg = kmalloc_array(sg_len, sizeof(*sg), gfp); in mmc_alloc_sg()
171 if (sg) in mmc_alloc_sg()
172 sg_init_table(sg, sg_len); in mmc_alloc_sg()
174 return sg; in mmc_alloc_sg()
211 mq_rq->sg = mmc_alloc_sg(mmc_get_max_segments(host), GFP_KERNEL); in mmc_mq_init_request()
212 if (!mq_rq->sg) in mmc_mq_init_request()
223 kfree(mq_rq->sg); in mmc_mq_exit_request()
224 mq_rq->sg = NULL; in mmc_mq_exit_request()
523 return blk_rq_map_sg(mq->queue, req, mqrq->sg); in mmc_queue_map_sg()
/openbmc/linux/drivers/gpu/drm/rockchip/
H A Drockchip_drm_gem.c455 struct sg_table *sg, in rockchip_gem_iommu_map_sg() argument
458 rk_obj->sgt = sg; in rockchip_gem_iommu_map_sg()
465 struct sg_table *sg, in rockchip_gem_dma_map_sg() argument
468 int err = dma_map_sgtable(drm->dev, sg, DMA_BIDIRECTIONAL, 0); in rockchip_gem_dma_map_sg()
472 if (drm_prime_get_contiguous_size(sg) < attach->dmabuf->size) { in rockchip_gem_dma_map_sg()
474 dma_unmap_sgtable(drm->dev, sg, DMA_BIDIRECTIONAL, 0); in rockchip_gem_dma_map_sg()
478 rk_obj->dma_addr = sg_dma_address(sg->sgl); in rockchip_gem_dma_map_sg()
479 rk_obj->sgt = sg; in rockchip_gem_dma_map_sg()
486 struct sg_table *sg) in rockchip_gem_prime_import_sg_table() argument
497 ret = rockchip_gem_iommu_map_sg(drm, attach, sg, rk_obj); in rockchip_gem_prime_import_sg_table()
[all …]
/openbmc/linux/drivers/dma/qcom/
H A Dqcom_adm.c236 u32 remainder = sg_dma_len(sg); in adm_process_fc_descriptors()
238 u32 mem_addr = sg_dma_address(sg); in adm_process_fc_descriptors()
280 if (sg_is_last(sg)) in adm_process_fc_descriptors()
283 if (box_desc && sg_is_last(sg)) in adm_process_fc_descriptors()
299 struct scatterlist *sg, in adm_process_non_fc_descriptors() argument
303 u32 remainder = sg_dma_len(sg); in adm_process_non_fc_descriptors()
330 if (sg_is_last(sg)) in adm_process_non_fc_descriptors()
356 struct scatterlist *sg; in adm_prep_slave_sg() local
393 for_each_sg(sgl, sg, sg_len, i) { in adm_prep_slave_sg()
397 if (sg_dma_len(sg) % burst) in adm_prep_slave_sg()
[all …]
/openbmc/linux/net/rds/
H A Drdma.c272 sg = kmalloc_array(nents, sizeof(*sg), GFP_KERNEL); in __rds_rdma_map()
273 if (!sg) { in __rds_rdma_map()
278 sg_init_table(sg, nents); in __rds_rdma_map()
301 kfree(sg); in __rds_rdma_map()
331 kfree(sg); in __rds_rdma_map()
768 struct scatterlist *sg; in rds_cmsg_rdma_args() local
771 sg_set_page(sg, pages[j], in rds_cmsg_rdma_args()
775 sg_dma_len(sg) = sg->length; in rds_cmsg_rdma_args()
777 sg->offset, sg->length, iov->addr, iov->bytes); in rds_cmsg_rdma_args()
779 iov->addr += sg->length; in rds_cmsg_rdma_args()
[all …]
H A Dib_frmr.c134 ret = ib_map_mr_sg_zbva(frmr->mr, ibmr->sg, ibmr->sg_dma_len, in rds_ib_post_reg_frmr()
190 struct scatterlist *sg, unsigned int sg_len) in rds_ib_map_frmr() argument
203 ibmr->sg = sg; in rds_ib_map_frmr()
208 ibmr->sg_dma_len = ib_dma_map_sg(dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr()
221 unsigned int dma_len = sg_dma_len(&ibmr->sg[i]); in rds_ib_map_frmr()
222 u64 dma_addr = sg_dma_address(&ibmr->sg[i]); in rds_ib_map_frmr()
260 ib_dma_unmap_sg(rds_ibdev->dev, ibmr->sg, ibmr->sg_len, in rds_ib_map_frmr()
403 struct scatterlist *sg, in rds_ib_reg_frmr() argument
426 ret = rds_ib_map_frmr(rds_ibdev, ibmr->pool, ibmr, sg, nents); in rds_ib_reg_frmr()
H A Dib.h330 struct scatterlist *sg; in rds_ib_dma_sync_sg_for_cpu() local
333 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_cpu()
334 ib_dma_sync_single_for_cpu(dev, sg_dma_address(sg), in rds_ib_dma_sync_sg_for_cpu()
335 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_cpu()
345 struct scatterlist *sg; in rds_ib_dma_sync_sg_for_device() local
348 for_each_sg(sglist, sg, sg_dma_len, i) { in rds_ib_dma_sync_sg_for_device()
349 ib_dma_sync_single_for_device(dev, sg_dma_address(sg), in rds_ib_dma_sync_sg_for_device()
350 sg_dma_len(sg), direction); in rds_ib_dma_sync_sg_for_device()
429 unsigned int hdr_off, unsigned int sg, unsigned int off);
/openbmc/qemu/include/sysemu/
H A Ddma.h38 ScatterGatherEntry *sg; member
295 QEMUSGList *sg, uint64_t offset, uint32_t align,
299 QEMUSGList *sg, uint64_t offset, uint32_t align,
302 QEMUSGList *sg, uint64_t offset, uint32_t align,
305 QEMUSGList *sg, MemTxAttrs attrs);
307 QEMUSGList *sg, MemTxAttrs attrs);
310 QEMUSGList *sg, enum BlockAcctType type);
/openbmc/linux/drivers/crypto/caam/
H A Dcaamrng.c120 struct scatterlist sg[1]; in caam_rng_fill_async() local
124 sg_init_table(sg, ARRAY_SIZE(sg)); in caam_rng_fill_async()
125 nents = kfifo_dma_in_prepare(&ctx->fifo, sg, ARRAY_SIZE(sg), in caam_rng_fill_async()
130 len = caam_rng_read_one(ctx->jrdev, sg_virt(&sg[0]), in caam_rng_fill_async()
131 sg[0].length, in caam_rng_fill_async()
/openbmc/linux/drivers/gpu/drm/mediatek/
H A Dmtk_drm_gem.c108 if (mtk_gem->sg) in mtk_drm_gem_free_object()
109 drm_prime_gem_destroy(obj, mtk_gem->sg); in mtk_drm_gem_free_object()
216 struct dma_buf_attachment *attach, struct sg_table *sg) in mtk_gem_prime_import_sg_table() argument
221 if (drm_prime_get_contiguous_size(sg) < attach->dmabuf->size) { in mtk_gem_prime_import_sg_table()
230 mtk_gem->dma_addr = sg_dma_address(sg->sgl); in mtk_gem_prime_import_sg_table()
231 mtk_gem->sg = sg; in mtk_gem_prime_import_sg_table()
/openbmc/linux/drivers/crypto/
H A Dimg-hash.c96 struct scatterlist *sg; member
219 if (ctx->sg) in img_hash_dma_callback()
236 sg, in img_hash_xmit_dma()
370 if (!ctx->sg) in img_hash_dma_task()
373 addr = sg_virt(ctx->sg); in img_hash_dma_task()
408 ctx->sg = sg_next(ctx->sg); in img_hash_dma_task()
410 len = ctx->sg->length; in img_hash_dma_task()
417 if (tbc >= ctx->sg->length) { in img_hash_dma_task()
418 ctx->sg = sg_next(ctx->sg); in img_hash_dma_task()
430 ctx->sg = sg_next(ctx->sg); in img_hash_dma_task()
[all …]
/openbmc/linux/drivers/scsi/ibmvscsi_tgt/
H A Dlibsrp.c166 struct scatterlist *sg = NULL; in srp_direct_data() local
171 sg = cmd->se_cmd.t_data_sg; in srp_direct_data()
172 nsg = dma_map_sg(iue->target->dev, sg, cmd->se_cmd.t_data_nents, in srp_direct_data()
184 err = rdma_io(cmd, sg, nsg, md, 1, dir, len); in srp_direct_data()
187 dma_unmap_sg(iue->target->dev, sg, nsg, DMA_BIDIRECTIONAL); in srp_direct_data()
199 struct scatterlist dummy, *sg = NULL; in srp_indirect_data() local
206 sg = cmd->se_cmd.t_data_sg; in srp_indirect_data()
243 nsg = dma_map_sg(iue->target->dev, sg, cmd->se_cmd.t_data_nents, in srp_indirect_data()
256 err = rdma_io(cmd, sg, nsg, md, nmd, dir, len); in srp_indirect_data()
259 dma_unmap_sg(iue->target->dev, sg, nsg, DMA_BIDIRECTIONAL); in srp_indirect_data()
/openbmc/linux/drivers/crypto/stm32/
H A Dstm32-hash.c162 struct scatterlist *sg; member
342 if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) { in stm32_hash_append_sg()
343 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg()
358 rctx->sg = sg_next(rctx->sg); in stm32_hash_append_sg()
359 if (rctx->sg) in stm32_hash_append_sg()
676 sg[0] = *tsg; in stm32_hash_dma_send()
677 len = sg->length; in stm32_hash_dma_send()
679 if (sg_is_last(sg) || (bufcnt + sg[0].length) >= rctx->total) { in stm32_hash_dma_send()
690 sg->length = len; in stm32_hash_dma_send()
694 sg->length = ALIGN(sg->length, in stm32_hash_dma_send()
[all …]
/openbmc/linux/sound/soc/sh/
H A Dsiu_pcm.c112 struct scatterlist sg; in siu_pcm_wr_set() local
115 sg_init_table(&sg, 1); in siu_pcm_wr_set()
116 sg_set_page(&sg, pfn_to_page(PFN_DOWN(buff)), in siu_pcm_wr_set()
118 sg_dma_len(&sg) = size; in siu_pcm_wr_set()
119 sg_dma_address(&sg) = buff; in siu_pcm_wr_set()
122 &sg, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); in siu_pcm_wr_set()
160 struct scatterlist sg; in siu_pcm_rd_set() local
165 sg_init_table(&sg, 1); in siu_pcm_rd_set()
166 sg_set_page(&sg, pfn_to_page(PFN_DOWN(buff)), in siu_pcm_rd_set()
168 sg_dma_len(&sg) = size; in siu_pcm_rd_set()
[all …]
/openbmc/linux/arch/powerpc/platforms/powernv/
H A Dopal.c1143 struct opal_sg_list *sg, *first = NULL; in opal_vmalloc_to_sg_list() local
1146 sg = kzalloc(PAGE_SIZE, GFP_KERNEL); in opal_vmalloc_to_sg_list()
1147 if (!sg) in opal_vmalloc_to_sg_list()
1150 first = sg; in opal_vmalloc_to_sg_list()
1167 sg->length = cpu_to_be64( in opal_vmalloc_to_sg_list()
1170 sg->next = cpu_to_be64(__pa(next)); in opal_vmalloc_to_sg_list()
1171 sg = next; in opal_vmalloc_to_sg_list()
1190 while (sg) { in opal_free_sg_list()
1193 kfree(sg); in opal_free_sg_list()
1196 sg = __va(next); in opal_free_sg_list()
[all …]
/openbmc/linux/drivers/media/pci/bt8xx/
H A Dbttv-risc.c42 struct scatterlist *sg; in bttv_risc_packed() local
68 sg = sglist; in bttv_risc_packed()
74 offset -= sg_dma_len(sg); in bttv_risc_packed()
75 sg = sg_next(sg); in bttv_risc_packed()
87 (sg_dma_len(sg)-offset)); in bttv_risc_packed()
89 todo -= (sg_dma_len(sg)-offset); in bttv_risc_packed()
91 sg = sg_next(sg); in bttv_risc_packed()
92 while (todo > sg_dma_len(sg)) { in bttv_risc_packed()
94 sg_dma_len(sg)); in bttv_risc_packed()
96 todo -= sg_dma_len(sg); in bttv_risc_packed()
[all …]
/openbmc/linux/crypto/
H A Dgcm.c69 struct scatterlist sg; member
79 struct scatterlist sg; member
104 struct scatterlist sg[1]; in crypto_gcm_setkey() member
128 skcipher_request_set_crypt(&data->req, data->sg, data->sg, in crypto_gcm_setkey()
156 struct scatterlist *sg; in crypto_gcm_init_common() local
165 if (sg != pctx->src + 1) in crypto_gcm_init_common()
166 sg_chain(pctx->src, 2, sg); in crypto_gcm_init_common()
172 if (sg != pctx->dst + 1) in crypto_gcm_init_common()
727 struct scatterlist *sg; in crypto_rfc4106_crypt() local
739 if (sg != rctx->src + 1) in crypto_rfc4106_crypt()
[all …]
H A Dessiv.c61 struct scatterlist sg[4]; member
208 struct scatterlist *sg; in essiv_aead_crypt() local
219 sg_init_table(rctx->sg, 4); in essiv_aead_crypt()
232 sg_set_buf(rctx->sg, rctx->assoc, ssize); in essiv_aead_crypt()
234 sg_set_page(rctx->sg, sg_page(req->src), ssize, in essiv_aead_crypt()
238 sg_set_buf(rctx->sg + 1, iv, ivsize); in essiv_aead_crypt()
239 sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen); in essiv_aead_crypt()
240 if (sg != rctx->sg + 2) in essiv_aead_crypt()
241 sg_chain(rctx->sg, 3, sg); in essiv_aead_crypt()
243 src = rctx->sg; in essiv_aead_crypt()

12345678910>>...39