Lines Matching refs:dev

36 static void dmam_release(struct device *dev, void *res)  in dmam_release()  argument
40 dma_free_attrs(dev, this->size, this->vaddr, this->dma_handle, in dmam_release()
44 static int dmam_match(struct device *dev, void *res, void *match_data) in dmam_match() argument
65 void dmam_free_coherent(struct device *dev, size_t size, void *vaddr, in dmam_free_coherent() argument
70 WARN_ON(devres_destroy(dev, dmam_release, dmam_match, &match_data)); in dmam_free_coherent()
71 dma_free_coherent(dev, size, vaddr, dma_handle); in dmam_free_coherent()
89 void *dmam_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, in dmam_alloc_attrs() argument
99 vaddr = dma_alloc_attrs(dev, size, dma_handle, gfp, attrs); in dmam_alloc_attrs()
110 devres_add(dev, dr); in dmam_alloc_attrs()
116 static bool dma_go_direct(struct device *dev, dma_addr_t mask, in dma_go_direct() argument
122 if (dev->dma_ops_bypass) in dma_go_direct()
123 return min_not_zero(mask, dev->bus_dma_limit) >= in dma_go_direct()
124 dma_direct_get_required_mask(dev); in dma_go_direct()
135 static inline bool dma_alloc_direct(struct device *dev, in dma_alloc_direct() argument
138 return dma_go_direct(dev, dev->coherent_dma_mask, ops); in dma_alloc_direct()
141 static inline bool dma_map_direct(struct device *dev, in dma_map_direct() argument
144 return dma_go_direct(dev, *dev->dma_mask, ops); in dma_map_direct()
147 dma_addr_t dma_map_page_attrs(struct device *dev, struct page *page, in dma_map_page_attrs() argument
151 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_page_attrs()
156 if (WARN_ON_ONCE(!dev->dma_mask)) in dma_map_page_attrs()
159 if (dma_map_direct(dev, ops) || in dma_map_page_attrs()
160 arch_dma_map_page_direct(dev, page_to_phys(page) + offset + size)) in dma_map_page_attrs()
161 addr = dma_direct_map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
163 addr = ops->map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
165 debug_dma_map_page(dev, page, offset, size, dir, addr, attrs); in dma_map_page_attrs()
171 void dma_unmap_page_attrs(struct device *dev, dma_addr_t addr, size_t size, in dma_unmap_page_attrs() argument
174 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_page_attrs()
177 if (dma_map_direct(dev, ops) || in dma_unmap_page_attrs()
178 arch_dma_unmap_page_direct(dev, addr + size)) in dma_unmap_page_attrs()
179 dma_direct_unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
181 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
182 debug_dma_unmap_page(dev, addr, size, dir); in dma_unmap_page_attrs()
186 static int __dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in __dma_map_sg_attrs() argument
189 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_map_sg_attrs()
194 if (WARN_ON_ONCE(!dev->dma_mask)) in __dma_map_sg_attrs()
197 if (dma_map_direct(dev, ops) || in __dma_map_sg_attrs()
198 arch_dma_map_sg_direct(dev, sg, nents)) in __dma_map_sg_attrs()
199 ents = dma_direct_map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
201 ents = ops->map_sg(dev, sg, nents, dir, attrs); in __dma_map_sg_attrs()
205 debug_dma_map_sg(dev, sg, nents, ents, dir, attrs); in __dma_map_sg_attrs()
231 unsigned int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_map_sg_attrs() argument
236 ret = __dma_map_sg_attrs(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
270 int dma_map_sgtable(struct device *dev, struct sg_table *sgt, in dma_map_sgtable() argument
275 nents = __dma_map_sg_attrs(dev, sgt->sgl, sgt->orig_nents, dir, attrs); in dma_map_sgtable()
283 void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument
287 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_sg_attrs()
290 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs()
291 if (dma_map_direct(dev, ops) || in dma_unmap_sg_attrs()
292 arch_dma_unmap_sg_direct(dev, sg, nents)) in dma_unmap_sg_attrs()
293 dma_direct_unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
295 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
299 dma_addr_t dma_map_resource(struct device *dev, phys_addr_t phys_addr, in dma_map_resource() argument
302 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_map_resource()
307 if (WARN_ON_ONCE(!dev->dma_mask)) in dma_map_resource()
310 if (dma_map_direct(dev, ops)) in dma_map_resource()
311 addr = dma_direct_map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
313 addr = ops->map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
315 debug_dma_map_resource(dev, phys_addr, size, dir, addr, attrs); in dma_map_resource()
320 void dma_unmap_resource(struct device *dev, dma_addr_t addr, size_t size, in dma_unmap_resource() argument
323 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_unmap_resource()
326 if (!dma_map_direct(dev, ops) && ops->unmap_resource) in dma_unmap_resource()
327 ops->unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
328 debug_dma_unmap_resource(dev, addr, size, dir); in dma_unmap_resource()
332 void dma_sync_single_for_cpu(struct device *dev, dma_addr_t addr, size_t size, in dma_sync_single_for_cpu() argument
335 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_cpu()
338 if (dma_map_direct(dev, ops)) in dma_sync_single_for_cpu()
339 dma_direct_sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
341 ops->sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
342 debug_dma_sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
346 void dma_sync_single_for_device(struct device *dev, dma_addr_t addr, in dma_sync_single_for_device() argument
349 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_single_for_device()
352 if (dma_map_direct(dev, ops)) in dma_sync_single_for_device()
353 dma_direct_sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
355 ops->sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
356 debug_dma_sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
360 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
363 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_cpu()
366 if (dma_map_direct(dev, ops)) in dma_sync_sg_for_cpu()
367 dma_direct_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
369 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
370 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
374 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_device() argument
377 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_sync_sg_for_device()
380 if (dma_map_direct(dev, ops)) in dma_sync_sg_for_device()
381 dma_direct_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
383 ops->sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
384 debug_dma_sync_sg_for_device(dev, sg, nelems, dir); in dma_sync_sg_for_device()
399 int dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, in dma_get_sgtable_attrs() argument
403 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_sgtable_attrs()
405 if (dma_alloc_direct(dev, ops)) in dma_get_sgtable_attrs()
406 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs()
410 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs()
419 pgprot_t dma_pgprot(struct device *dev, pgprot_t prot, unsigned long attrs) in dma_pgprot() argument
421 if (dev_is_dma_coherent(dev)) in dma_pgprot()
438 bool dma_can_mmap(struct device *dev) in dma_can_mmap() argument
440 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_can_mmap()
442 if (dma_alloc_direct(dev, ops)) in dma_can_mmap()
443 return dma_direct_can_mmap(dev); in dma_can_mmap()
461 int dma_mmap_attrs(struct device *dev, struct vm_area_struct *vma, in dma_mmap_attrs() argument
465 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_attrs()
467 if (dma_alloc_direct(dev, ops)) in dma_mmap_attrs()
468 return dma_direct_mmap(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_attrs()
472 return ops->mmap(dev, vma, cpu_addr, dma_addr, size, attrs); in dma_mmap_attrs()
476 u64 dma_get_required_mask(struct device *dev) in dma_get_required_mask() argument
478 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_required_mask()
480 if (dma_alloc_direct(dev, ops)) in dma_get_required_mask()
481 return dma_direct_get_required_mask(dev); in dma_get_required_mask()
483 return ops->get_required_mask(dev); in dma_get_required_mask()
497 void *dma_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle, in dma_alloc_attrs() argument
500 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_attrs()
503 WARN_ON_ONCE(!dev->coherent_dma_mask); in dma_alloc_attrs()
513 if (dma_alloc_from_dev_coherent(dev, size, dma_handle, &cpu_addr)) in dma_alloc_attrs()
519 if (dma_alloc_direct(dev, ops)) in dma_alloc_attrs()
520 cpu_addr = dma_direct_alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
522 cpu_addr = ops->alloc(dev, size, dma_handle, flag, attrs); in dma_alloc_attrs()
526 debug_dma_alloc_coherent(dev, size, *dma_handle, cpu_addr, attrs); in dma_alloc_attrs()
531 void dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, in dma_free_attrs() argument
534 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_attrs()
536 if (dma_release_from_dev_coherent(dev, get_order(size), cpu_addr)) in dma_free_attrs()
550 debug_dma_free_coherent(dev, size, cpu_addr, dma_handle); in dma_free_attrs()
551 if (dma_alloc_direct(dev, ops)) in dma_free_attrs()
552 dma_direct_free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
554 ops->free(dev, size, cpu_addr, dma_handle, attrs); in dma_free_attrs()
558 static struct page *__dma_alloc_pages(struct device *dev, size_t size, in __dma_alloc_pages() argument
561 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_alloc_pages()
563 if (WARN_ON_ONCE(!dev->coherent_dma_mask)) in __dma_alloc_pages()
571 if (dma_alloc_direct(dev, ops)) in __dma_alloc_pages()
572 return dma_direct_alloc_pages(dev, size, dma_handle, dir, gfp); in __dma_alloc_pages()
575 return ops->alloc_pages(dev, size, dma_handle, dir, gfp); in __dma_alloc_pages()
578 struct page *dma_alloc_pages(struct device *dev, size_t size, in dma_alloc_pages() argument
581 struct page *page = __dma_alloc_pages(dev, size, dma_handle, dir, gfp); in dma_alloc_pages()
584 debug_dma_map_page(dev, page, 0, size, dir, *dma_handle, 0); in dma_alloc_pages()
589 static void __dma_free_pages(struct device *dev, size_t size, struct page *page, in __dma_free_pages() argument
592 const struct dma_map_ops *ops = get_dma_ops(dev); in __dma_free_pages()
595 if (dma_alloc_direct(dev, ops)) in __dma_free_pages()
596 dma_direct_free_pages(dev, size, page, dma_handle, dir); in __dma_free_pages()
598 ops->free_pages(dev, size, page, dma_handle, dir); in __dma_free_pages()
601 void dma_free_pages(struct device *dev, size_t size, struct page *page, in dma_free_pages() argument
604 debug_dma_unmap_page(dev, dma_handle, size, dir); in dma_free_pages()
605 __dma_free_pages(dev, size, page, dma_handle, dir); in dma_free_pages()
609 int dma_mmap_pages(struct device *dev, struct vm_area_struct *vma, in dma_mmap_pages() argument
622 static struct sg_table *alloc_single_sgt(struct device *dev, size_t size, in alloc_single_sgt() argument
633 page = __dma_alloc_pages(dev, size, &sgt->sgl->dma_address, dir, gfp); in alloc_single_sgt()
646 struct sg_table *dma_alloc_noncontiguous(struct device *dev, size_t size, in dma_alloc_noncontiguous() argument
649 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_alloc_noncontiguous()
658 sgt = ops->alloc_noncontiguous(dev, size, dir, gfp, attrs); in dma_alloc_noncontiguous()
660 sgt = alloc_single_sgt(dev, size, dir, gfp); in dma_alloc_noncontiguous()
664 debug_dma_map_sg(dev, sgt->sgl, sgt->orig_nents, 1, dir, attrs); in dma_alloc_noncontiguous()
670 static void free_single_sgt(struct device *dev, size_t size, in free_single_sgt() argument
673 __dma_free_pages(dev, size, sg_page(sgt->sgl), sgt->sgl->dma_address, in free_single_sgt()
679 void dma_free_noncontiguous(struct device *dev, size_t size, in dma_free_noncontiguous() argument
682 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_free_noncontiguous()
684 debug_dma_unmap_sg(dev, sgt->sgl, sgt->orig_nents, dir); in dma_free_noncontiguous()
686 ops->free_noncontiguous(dev, size, sgt, dir); in dma_free_noncontiguous()
688 free_single_sgt(dev, size, sgt, dir); in dma_free_noncontiguous()
692 void *dma_vmap_noncontiguous(struct device *dev, size_t size, in dma_vmap_noncontiguous() argument
695 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_vmap_noncontiguous()
704 void dma_vunmap_noncontiguous(struct device *dev, void *vaddr) in dma_vunmap_noncontiguous() argument
706 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_vunmap_noncontiguous()
713 int dma_mmap_noncontiguous(struct device *dev, struct vm_area_struct *vma, in dma_mmap_noncontiguous() argument
716 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_mmap_noncontiguous()
726 return dma_mmap_pages(dev, vma, size, sg_page(sgt->sgl)); in dma_mmap_noncontiguous()
730 static int dma_supported(struct device *dev, u64 mask) in dma_supported() argument
732 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_supported()
739 return dma_direct_supported(dev, mask); in dma_supported()
742 return ops->dma_supported(dev, mask); in dma_supported()
745 bool dma_pci_p2pdma_supported(struct device *dev) in dma_pci_p2pdma_supported() argument
747 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_pci_p2pdma_supported()
763 int dma_set_mask(struct device *dev, u64 mask) in dma_set_mask() argument
771 if (!dev->dma_mask || !dma_supported(dev, mask)) in dma_set_mask()
774 arch_dma_set_mask(dev, mask); in dma_set_mask()
775 *dev->dma_mask = mask; in dma_set_mask()
780 int dma_set_coherent_mask(struct device *dev, u64 mask) in dma_set_coherent_mask() argument
788 if (!dma_supported(dev, mask)) in dma_set_coherent_mask()
791 dev->coherent_dma_mask = mask; in dma_set_coherent_mask()
796 size_t dma_max_mapping_size(struct device *dev) in dma_max_mapping_size() argument
798 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_max_mapping_size()
801 if (dma_map_direct(dev, ops)) in dma_max_mapping_size()
802 size = dma_direct_max_mapping_size(dev); in dma_max_mapping_size()
804 size = ops->max_mapping_size(dev); in dma_max_mapping_size()
810 size_t dma_opt_mapping_size(struct device *dev) in dma_opt_mapping_size() argument
812 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_opt_mapping_size()
818 return min(dma_max_mapping_size(dev), size); in dma_opt_mapping_size()
822 bool dma_need_sync(struct device *dev, dma_addr_t dma_addr) in dma_need_sync() argument
824 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_need_sync()
826 if (dma_map_direct(dev, ops)) in dma_need_sync()
827 return dma_direct_need_sync(dev, dma_addr); in dma_need_sync()
832 unsigned long dma_get_merge_boundary(struct device *dev) in dma_get_merge_boundary() argument
834 const struct dma_map_ops *ops = get_dma_ops(dev); in dma_get_merge_boundary()
839 return ops->get_merge_boundary(dev); in dma_get_merge_boundary()