Lines Matching refs:buf
60 static int vb2_dma_sg_alloc_compacted(struct vb2_dma_sg_buf *buf, in vb2_dma_sg_alloc_compacted() argument
64 unsigned long size = buf->size; in vb2_dma_sg_alloc_compacted()
85 __free_page(buf->pages[last_page]); in vb2_dma_sg_alloc_compacted()
93 buf->pages[last_page++] = &pages[i]; in vb2_dma_sg_alloc_compacted()
104 struct vb2_dma_sg_buf *buf; in vb2_dma_sg_alloc() local
112 buf = kzalloc(sizeof *buf, GFP_KERNEL); in vb2_dma_sg_alloc()
113 if (!buf) in vb2_dma_sg_alloc()
116 buf->vaddr = NULL; in vb2_dma_sg_alloc()
117 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dma_sg_alloc()
118 buf->offset = 0; in vb2_dma_sg_alloc()
119 buf->size = size; in vb2_dma_sg_alloc()
121 buf->num_pages = size >> PAGE_SHIFT; in vb2_dma_sg_alloc()
122 buf->dma_sgt = &buf->sg_table; in vb2_dma_sg_alloc()
129 buf->pages = kvcalloc(buf->num_pages, sizeof(struct page *), GFP_KERNEL); in vb2_dma_sg_alloc()
130 if (!buf->pages) in vb2_dma_sg_alloc()
133 ret = vb2_dma_sg_alloc_compacted(buf, vb->vb2_queue->gfp_flags); in vb2_dma_sg_alloc()
137 ret = sg_alloc_table_from_pages(buf->dma_sgt, buf->pages, in vb2_dma_sg_alloc()
138 buf->num_pages, 0, size, GFP_KERNEL); in vb2_dma_sg_alloc()
143 buf->dev = get_device(dev); in vb2_dma_sg_alloc()
145 sgt = &buf->sg_table; in vb2_dma_sg_alloc()
150 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_alloc()
154 buf->handler.refcount = &buf->refcount; in vb2_dma_sg_alloc()
155 buf->handler.put = vb2_dma_sg_put; in vb2_dma_sg_alloc()
156 buf->handler.arg = buf; in vb2_dma_sg_alloc()
157 buf->vb = vb; in vb2_dma_sg_alloc()
159 refcount_set(&buf->refcount, 1); in vb2_dma_sg_alloc()
162 __func__, buf->num_pages); in vb2_dma_sg_alloc()
163 return buf; in vb2_dma_sg_alloc()
166 put_device(buf->dev); in vb2_dma_sg_alloc()
167 sg_free_table(buf->dma_sgt); in vb2_dma_sg_alloc()
169 num_pages = buf->num_pages; in vb2_dma_sg_alloc()
171 __free_page(buf->pages[num_pages]); in vb2_dma_sg_alloc()
173 kvfree(buf->pages); in vb2_dma_sg_alloc()
175 kfree(buf); in vb2_dma_sg_alloc()
181 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_put() local
182 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put()
183 int i = buf->num_pages; in vb2_dma_sg_put()
185 if (refcount_dec_and_test(&buf->refcount)) { in vb2_dma_sg_put()
187 buf->num_pages); in vb2_dma_sg_put()
188 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_put()
190 if (buf->vaddr) in vb2_dma_sg_put()
191 vm_unmap_ram(buf->vaddr, buf->num_pages); in vb2_dma_sg_put()
192 sg_free_table(buf->dma_sgt); in vb2_dma_sg_put()
194 __free_page(buf->pages[i]); in vb2_dma_sg_put()
195 kvfree(buf->pages); in vb2_dma_sg_put()
196 put_device(buf->dev); in vb2_dma_sg_put()
197 kfree(buf); in vb2_dma_sg_put()
203 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_prepare() local
204 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare()
206 if (buf->vb->skip_cache_sync_on_prepare) in vb2_dma_sg_prepare()
209 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_prepare()
214 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_finish() local
215 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish()
217 if (buf->vb->skip_cache_sync_on_finish) in vb2_dma_sg_finish()
220 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_finish()
226 struct vb2_dma_sg_buf *buf; in vb2_dma_sg_get_userptr() local
233 buf = kzalloc(sizeof *buf, GFP_KERNEL); in vb2_dma_sg_get_userptr()
234 if (!buf) in vb2_dma_sg_get_userptr()
237 buf->vaddr = NULL; in vb2_dma_sg_get_userptr()
238 buf->dev = dev; in vb2_dma_sg_get_userptr()
239 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dma_sg_get_userptr()
240 buf->offset = vaddr & ~PAGE_MASK; in vb2_dma_sg_get_userptr()
241 buf->size = size; in vb2_dma_sg_get_userptr()
242 buf->dma_sgt = &buf->sg_table; in vb2_dma_sg_get_userptr()
243 buf->vb = vb; in vb2_dma_sg_get_userptr()
245 buf->dma_dir == DMA_FROM_DEVICE || in vb2_dma_sg_get_userptr()
246 buf->dma_dir == DMA_BIDIRECTIONAL); in vb2_dma_sg_get_userptr()
249 buf->vec = vec; in vb2_dma_sg_get_userptr()
251 buf->pages = frame_vector_pages(vec); in vb2_dma_sg_get_userptr()
252 if (IS_ERR(buf->pages)) in vb2_dma_sg_get_userptr()
254 buf->num_pages = frame_vector_count(vec); in vb2_dma_sg_get_userptr()
256 if (sg_alloc_table_from_pages(buf->dma_sgt, buf->pages, in vb2_dma_sg_get_userptr()
257 buf->num_pages, buf->offset, size, 0)) in vb2_dma_sg_get_userptr()
260 sgt = &buf->sg_table; in vb2_dma_sg_get_userptr()
265 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_get_userptr()
269 return buf; in vb2_dma_sg_get_userptr()
272 sg_free_table(&buf->sg_table); in vb2_dma_sg_get_userptr()
276 kfree(buf); in vb2_dma_sg_get_userptr()
286 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_put_userptr() local
287 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put_userptr()
288 int i = buf->num_pages; in vb2_dma_sg_put_userptr()
291 __func__, buf->num_pages); in vb2_dma_sg_put_userptr()
292 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, DMA_ATTR_SKIP_CPU_SYNC); in vb2_dma_sg_put_userptr()
293 if (buf->vaddr) in vb2_dma_sg_put_userptr()
294 vm_unmap_ram(buf->vaddr, buf->num_pages); in vb2_dma_sg_put_userptr()
295 sg_free_table(buf->dma_sgt); in vb2_dma_sg_put_userptr()
296 if (buf->dma_dir == DMA_FROM_DEVICE || in vb2_dma_sg_put_userptr()
297 buf->dma_dir == DMA_BIDIRECTIONAL) in vb2_dma_sg_put_userptr()
299 set_page_dirty_lock(buf->pages[i]); in vb2_dma_sg_put_userptr()
300 vb2_destroy_framevec(buf->vec); in vb2_dma_sg_put_userptr()
301 kfree(buf); in vb2_dma_sg_put_userptr()
306 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_vaddr() local
310 BUG_ON(!buf); in vb2_dma_sg_vaddr()
312 if (!buf->vaddr) { in vb2_dma_sg_vaddr()
313 if (buf->db_attach) { in vb2_dma_sg_vaddr()
314 ret = dma_buf_vmap_unlocked(buf->db_attach->dmabuf, &map); in vb2_dma_sg_vaddr()
315 buf->vaddr = ret ? NULL : map.vaddr; in vb2_dma_sg_vaddr()
317 buf->vaddr = vm_map_ram(buf->pages, buf->num_pages, -1); in vb2_dma_sg_vaddr()
322 return buf->vaddr ? buf->vaddr + buf->offset : NULL; in vb2_dma_sg_vaddr()
327 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_num_users() local
329 return refcount_read(&buf->refcount); in vb2_dma_sg_num_users()
334 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_mmap() local
337 if (!buf) { in vb2_dma_sg_mmap()
342 err = vm_map_pages(vma, buf->pages, buf->num_pages); in vb2_dma_sg_mmap()
351 vma->vm_private_data = &buf->handler; in vb2_dma_sg_mmap()
375 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_attach() local
386 ret = sg_alloc_table(sgt, buf->dma_sgt->orig_nents, GFP_KERNEL); in vb2_dma_sg_dmabuf_ops_attach()
392 rd = buf->dma_sgt->sgl; in vb2_dma_sg_dmabuf_ops_attach()
469 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_begin_cpu_access() local
470 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_dmabuf_ops_begin_cpu_access()
472 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dma_sg_dmabuf_ops_begin_cpu_access()
480 struct vb2_dma_sg_buf *buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_end_cpu_access() local
481 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_dmabuf_ops_end_cpu_access()
483 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dma_sg_dmabuf_ops_end_cpu_access()
490 struct vb2_dma_sg_buf *buf; in vb2_dma_sg_dmabuf_ops_vmap() local
493 buf = dbuf->priv; in vb2_dma_sg_dmabuf_ops_vmap()
494 vaddr = vb2_dma_sg_vaddr(buf->vb, buf); in vb2_dma_sg_dmabuf_ops_vmap()
525 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_get_dmabuf() local
530 exp_info.size = buf->size; in vb2_dma_sg_get_dmabuf()
532 exp_info.priv = buf; in vb2_dma_sg_get_dmabuf()
534 if (WARN_ON(!buf->dma_sgt)) in vb2_dma_sg_get_dmabuf()
542 refcount_inc(&buf->refcount); in vb2_dma_sg_get_dmabuf()
553 struct vb2_dma_sg_buf *buf = mem_priv; in vb2_dma_sg_map_dmabuf() local
556 if (WARN_ON(!buf->db_attach)) { in vb2_dma_sg_map_dmabuf()
561 if (WARN_ON(buf->dma_sgt)) { in vb2_dma_sg_map_dmabuf()
567 sgt = dma_buf_map_attachment_unlocked(buf->db_attach, buf->dma_dir); in vb2_dma_sg_map_dmabuf()
573 buf->dma_sgt = sgt; in vb2_dma_sg_map_dmabuf()
574 buf->vaddr = NULL; in vb2_dma_sg_map_dmabuf()
581 struct vb2_dma_sg_buf *buf = mem_priv; in vb2_dma_sg_unmap_dmabuf() local
582 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_unmap_dmabuf()
583 struct iosys_map map = IOSYS_MAP_INIT_VADDR(buf->vaddr); in vb2_dma_sg_unmap_dmabuf()
585 if (WARN_ON(!buf->db_attach)) { in vb2_dma_sg_unmap_dmabuf()
595 if (buf->vaddr) { in vb2_dma_sg_unmap_dmabuf()
596 dma_buf_vunmap_unlocked(buf->db_attach->dmabuf, &map); in vb2_dma_sg_unmap_dmabuf()
597 buf->vaddr = NULL; in vb2_dma_sg_unmap_dmabuf()
599 dma_buf_unmap_attachment_unlocked(buf->db_attach, sgt, buf->dma_dir); in vb2_dma_sg_unmap_dmabuf()
601 buf->dma_sgt = NULL; in vb2_dma_sg_unmap_dmabuf()
606 struct vb2_dma_sg_buf *buf = mem_priv; in vb2_dma_sg_detach_dmabuf() local
609 if (WARN_ON(buf->dma_sgt)) in vb2_dma_sg_detach_dmabuf()
610 vb2_dma_sg_unmap_dmabuf(buf); in vb2_dma_sg_detach_dmabuf()
613 dma_buf_detach(buf->db_attach->dmabuf, buf->db_attach); in vb2_dma_sg_detach_dmabuf()
614 kfree(buf); in vb2_dma_sg_detach_dmabuf()
620 struct vb2_dma_sg_buf *buf; in vb2_dma_sg_attach_dmabuf() local
629 buf = kzalloc(sizeof(*buf), GFP_KERNEL); in vb2_dma_sg_attach_dmabuf()
630 if (!buf) in vb2_dma_sg_attach_dmabuf()
633 buf->dev = dev; in vb2_dma_sg_attach_dmabuf()
635 dba = dma_buf_attach(dbuf, buf->dev); in vb2_dma_sg_attach_dmabuf()
638 kfree(buf); in vb2_dma_sg_attach_dmabuf()
642 buf->dma_dir = vb->vb2_queue->dma_dir; in vb2_dma_sg_attach_dmabuf()
643 buf->size = size; in vb2_dma_sg_attach_dmabuf()
644 buf->db_attach = dba; in vb2_dma_sg_attach_dmabuf()
645 buf->vb = vb; in vb2_dma_sg_attach_dmabuf()
647 return buf; in vb2_dma_sg_attach_dmabuf()
652 struct vb2_dma_sg_buf *buf = buf_priv; in vb2_dma_sg_cookie() local
654 return buf->dma_sgt; in vb2_dma_sg_cookie()