Lines Matching full:buffer
53 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_attach() local
61 ret = sg_alloc_table_from_pages(&a->table, buffer->pages, in cma_heap_attach()
62 buffer->pagecount, 0, in cma_heap_attach()
63 buffer->pagecount << PAGE_SHIFT, in cma_heap_attach()
76 mutex_lock(&buffer->lock); in cma_heap_attach()
77 list_add(&a->list, &buffer->attachments); in cma_heap_attach()
78 mutex_unlock(&buffer->lock); in cma_heap_attach()
86 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_detach() local
89 mutex_lock(&buffer->lock); in cma_heap_detach()
91 mutex_unlock(&buffer->lock); in cma_heap_detach()
124 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_dma_buf_begin_cpu_access() local
127 mutex_lock(&buffer->lock); in cma_heap_dma_buf_begin_cpu_access()
129 if (buffer->vmap_cnt) in cma_heap_dma_buf_begin_cpu_access()
130 invalidate_kernel_vmap_range(buffer->vaddr, buffer->len); in cma_heap_dma_buf_begin_cpu_access()
132 list_for_each_entry(a, &buffer->attachments, list) { in cma_heap_dma_buf_begin_cpu_access()
137 mutex_unlock(&buffer->lock); in cma_heap_dma_buf_begin_cpu_access()
145 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_dma_buf_end_cpu_access() local
148 mutex_lock(&buffer->lock); in cma_heap_dma_buf_end_cpu_access()
150 if (buffer->vmap_cnt) in cma_heap_dma_buf_end_cpu_access()
151 flush_kernel_vmap_range(buffer->vaddr, buffer->len); in cma_heap_dma_buf_end_cpu_access()
153 list_for_each_entry(a, &buffer->attachments, list) { in cma_heap_dma_buf_end_cpu_access()
158 mutex_unlock(&buffer->lock); in cma_heap_dma_buf_end_cpu_access()
166 struct cma_heap_buffer *buffer = vma->vm_private_data; in cma_heap_vm_fault() local
168 if (vmf->pgoff >= buffer->pagecount) in cma_heap_vm_fault()
171 vmf->page = buffer->pages[vmf->pgoff]; in cma_heap_vm_fault()
183 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_mmap() local
189 vma->vm_private_data = buffer; in cma_heap_mmap()
194 static void *cma_heap_do_vmap(struct cma_heap_buffer *buffer) in cma_heap_do_vmap() argument
198 vaddr = vmap(buffer->pages, buffer->pagecount, VM_MAP, PAGE_KERNEL); in cma_heap_do_vmap()
207 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_vmap() local
211 mutex_lock(&buffer->lock); in cma_heap_vmap()
212 if (buffer->vmap_cnt) { in cma_heap_vmap()
213 buffer->vmap_cnt++; in cma_heap_vmap()
214 iosys_map_set_vaddr(map, buffer->vaddr); in cma_heap_vmap()
218 vaddr = cma_heap_do_vmap(buffer); in cma_heap_vmap()
223 buffer->vaddr = vaddr; in cma_heap_vmap()
224 buffer->vmap_cnt++; in cma_heap_vmap()
225 iosys_map_set_vaddr(map, buffer->vaddr); in cma_heap_vmap()
227 mutex_unlock(&buffer->lock); in cma_heap_vmap()
234 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_vunmap() local
236 mutex_lock(&buffer->lock); in cma_heap_vunmap()
237 if (!--buffer->vmap_cnt) { in cma_heap_vunmap()
238 vunmap(buffer->vaddr); in cma_heap_vunmap()
239 buffer->vaddr = NULL; in cma_heap_vunmap()
241 mutex_unlock(&buffer->lock); in cma_heap_vunmap()
247 struct cma_heap_buffer *buffer = dmabuf->priv; in cma_heap_dma_buf_release() local
248 struct cma_heap *cma_heap = buffer->heap; in cma_heap_dma_buf_release()
250 if (buffer->vmap_cnt > 0) { in cma_heap_dma_buf_release()
251 WARN(1, "%s: buffer still mapped in the kernel\n", __func__); in cma_heap_dma_buf_release()
252 vunmap(buffer->vaddr); in cma_heap_dma_buf_release()
253 buffer->vaddr = NULL; in cma_heap_dma_buf_release()
257 kfree(buffer->pages); in cma_heap_dma_buf_release()
259 cma_release(cma_heap->cma, buffer->cma_pages, buffer->pagecount); in cma_heap_dma_buf_release()
260 kfree(buffer); in cma_heap_dma_buf_release()
282 struct cma_heap_buffer *buffer; in cma_heap_allocate() local
292 buffer = kzalloc(sizeof(*buffer), GFP_KERNEL); in cma_heap_allocate()
293 if (!buffer) in cma_heap_allocate()
296 INIT_LIST_HEAD(&buffer->attachments); in cma_heap_allocate()
297 mutex_init(&buffer->lock); in cma_heap_allocate()
298 buffer->len = size; in cma_heap_allocate()
330 buffer->pages = kmalloc_array(pagecount, sizeof(*buffer->pages), GFP_KERNEL); in cma_heap_allocate()
331 if (!buffer->pages) { in cma_heap_allocate()
337 buffer->pages[pg] = &cma_pages[pg]; in cma_heap_allocate()
339 buffer->cma_pages = cma_pages; in cma_heap_allocate()
340 buffer->heap = cma_heap; in cma_heap_allocate()
341 buffer->pagecount = pagecount; in cma_heap_allocate()
346 exp_info.size = buffer->len; in cma_heap_allocate()
348 exp_info.priv = buffer; in cma_heap_allocate()
357 kfree(buffer->pages); in cma_heap_allocate()
361 kfree(buffer); in cma_heap_allocate()