Home
last modified time | relevance | path

Searched refs:iova (Results 1 – 25 of 304) sorted by relevance

12345678910>>...13

/openbmc/linux/drivers/media/platform/nvidia/tegra-vde/
H A Diommu.c24 struct iova *iova; in tegra_vde_iommu_map() local
34 if (!iova) in tegra_vde_iommu_map()
37 addr = iova_dma_addr(&vde->iova, iova); in tegra_vde_iommu_map()
42 __free_iova(&vde->iova, iova); in tegra_vde_iommu_map()
46 *iovap = iova; in tegra_vde_iommu_map()
55 dma_addr_t addr = iova_dma_addr(&vde->iova, iova); in tegra_vde_iommu_unmap()
58 __free_iova(&vde->iova, iova); in tegra_vde_iommu_unmap()
64 struct iova *iova; in tegra_vde_iommu_init() local
103 iova = reserve_iova(&vde->iova, 0x60000000 >> shift, in tegra_vde_iommu_init()
105 if (!iova) { in tegra_vde_iommu_init()
[all …]
H A Ddmabuf-cache.c28 struct iova *iova; member
39 tegra_vde_iommu_unmap(entry->vde, entry->iova); in tegra_vde_release_entry()
73 struct iova *iova; in tegra_vde_dmabuf_cache_map() local
91 *addrp = iova_dma_addr(&vde->iova, entry->iova); in tegra_vde_dmabuf_cache_map()
125 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map()
129 *addrp = iova_dma_addr(&vde->iova, iova); in tegra_vde_dmabuf_cache_map()
132 iova = NULL; in tegra_vde_dmabuf_cache_map()
139 entry->iova = iova; in tegra_vde_dmabuf_cache_map()
/openbmc/linux/drivers/iommu/
H A Diova.c248 static void free_iova_mem(struct iova *iova) in free_iova_mem() argument
345 struct iova *iova = to_iova(node); in private_find_iova() local
375 struct iova *iova; in find_iova() local
414 struct iova *iova; in free_iova() local
510 struct iova *iova, *tmp; in put_iova_domain() local
524 struct iova *iova = to_iova(node); in __is_range_overlap() local
534 struct iova *iova; in alloc_and_init_iova() local
549 struct iova *iova; in __insert_new_range() local
552 if (iova) in __insert_new_range()
559 __adjust_overlap_range(struct iova *iova, in __adjust_overlap_range() argument
[all …]
H A Dtegra-gart.c62 iova < gart->iovmm_end; \
63 iova += GART_PAGE_SIZE)
73 unsigned long iova) in gart_read_pte() argument
85 unsigned long iova; in do_gart_setup() local
87 for_each_gart_pte(gart, iova) in do_gart_setup()
197 unsigned long iova) in __gart_iommu_unmap() argument
204 gart_set_pte(gart, iova, 0); in __gart_iommu_unmap()
226 dma_addr_t iova) in gart_iommu_iova_to_phys() argument
235 pte = gart_read_pte(gart, iova); in gart_iommu_iova_to_phys()
290 unsigned long iova; in tegra_gart_suspend() local
[all …]
H A Dio-pgtable-arm-v7s.c565 iova += pgsize; in arm_v7s_map_pages()
754 iova += pgsize; in arm_v7s_unmap_pages()
970 iova = 0; in arm_v7s_do_selftests()
973 if (ops->map_pages(ops, iova, iova, size, 1, in arm_v7s_do_selftests()
980 if (!ops->map_pages(ops, iova, iova + size, size, 1, in arm_v7s_do_selftests()
985 if (ops->iova_to_phys(ops, iova + 42) != (iova + 42)) in arm_v7s_do_selftests()
988 iova += SZ_16M; in arm_v7s_do_selftests()
1012 iova = 0; in arm_v7s_do_selftests()
1023 if (ops->map_pages(ops, iova, iova, size, 1, IOMMU_WRITE, in arm_v7s_do_selftests()
1027 if (ops->iova_to_phys(ops, iova + 42) != (iova + 42)) in arm_v7s_do_selftests()
[all …]
H A Dsun50i-iommu.c301 unsigned long iova) in sun50i_iommu_zap_iova() argument
318 unsigned long iova) in sun50i_iommu_zap_ptw_cache() argument
340 sun50i_iommu_zap_iova(iommu, iova); in sun50i_iommu_zap_range()
406 unsigned long iova, size_t size) in sun50i_iommu_iotlb_sync_map() argument
612 &iova, &page_phys, &paddr, prot); in sun50i_iommu_map()
649 dma_addr_t iova) in sun50i_iommu_iova_to_phys() argument
667 sun50i_iova_get_page_offset(iova); in sun50i_iommu_iova_to_phys()
866 phys_addr_t iova; in sun50i_iommu_handle_pt_irq() local
883 return iova; in sun50i_iommu_handle_pt_irq()
889 phys_addr_t iova; in sun50i_iommu_handle_perm_irq() local
[all …]
H A Dio-pgtable-dart.c124 unsigned long iova, phys_addr_t paddr, in dart_init_pte() argument
196 int tbl = dart_get_table(data, iova); in dart_get_l2()
202 ptep += dart_get_l1_index(data, iova); in dart_get_l2()
257 tbl = dart_get_table(data, iova); in dart_map_pages()
260 ptep += dart_get_l1_index(data, iova); in dart_map_pages()
310 ptep = dart_get_l2(data, iova); in dart_unmap_pages()
332 iova + i * pgsize, pgsize); in dart_unmap_pages()
342 unsigned long iova) in dart_iova_to_phys() argument
347 ptep = dart_get_l2(data, iova); in dart_iova_to_phys()
353 ptep += dart_get_l2_index(data, iova); in dart_iova_to_phys()
[all …]
H A Dexynos-iommu.c97 #define section_offs(iova) (iova & (SECT_SIZE - 1)) argument
99 #define lpage_offs(iova) (iova & (LPAGE_SIZE - 1)) argument
101 #define spage_offs(iova) (iova & (SPAGE_SIZE - 1)) argument
108 return iova >> SECT_ORDER; in lv1ent_offset()
459 iova += SPAGE_SIZE; in __sysmmu_tlb_invalidate_entry()
669 sysmmu_iova_t iova) in sysmmu_tlb_invalidate_flpdcache() argument
1128 iova); in lv1set_section()
1135 iova); in lv1set_section()
1255 __func__, ret, size, iova); in exynos_iommu_map()
1312 ent = page_entry(ent, iova); in exynos_iommu_unmap()
[all …]
/openbmc/linux/include/linux/
H A Diova.h18 struct iova { struct
37 struct iova anchor; /* rbtree lookup anchor */
43 static inline unsigned long iova_size(struct iova *iova) in iova_size() argument
45 return iova->pfn_hi - iova->pfn_lo + 1; in iova_size()
60 return iova & iova_mask(iovad); in iova_offset()
68 static inline dma_addr_t iova_dma_addr(struct iova_domain *iovad, struct iova *iova) in iova_dma_addr() argument
70 return (dma_addr_t)iova->pfn_lo << iova_shift(iovad); in iova_dma_addr()
75 return iova >> iova_shift(iovad); in iova_pfn()
85 void __free_iova(struct iova_domain *iovad, struct iova *iova);
114 static inline void __free_iova(struct iova_domain *iovad, struct iova *iova) in __free_iova() argument
[all …]
/openbmc/linux/drivers/fpga/
H A Ddfl-afu-dma-region.c125 u64 iova, u64 size) in dma_region_check_iova() argument
127 if (!size && region->iova != iova) in dma_region_check_iova()
130 return (region->iova <= iova) && in dma_region_check_iova()
131 (region->length + region->iova >= iova + size); in dma_region_check_iova()
150 (unsigned long long)region->iova); in afu_dma_region_add()
164 if (region->iova < this->iova) in afu_dma_region_add()
166 else if (region->iova > this->iova) in afu_dma_region_add()
217 if (region->iova) in afu_dma_region_destroy()
262 if (iova < region->iova) in afu_dma_region_find()
264 else if (iova > region->iova) in afu_dma_region_find()
[all …]
/openbmc/linux/include/trace/events/
H A Diommu.h83 TP_ARGS(iova, paddr, size),
86 __field(u64, iova)
92 __entry->iova = iova;
98 __entry->iova, __entry->iova + __entry->size, __entry->paddr,
110 __field(u64, iova)
116 __entry->iova = iova;
122 __entry->iova, __entry->iova + __entry->size,
131 TP_ARGS(dev, iova, flags),
136 __field(u64, iova)
143 __entry->iova = iova;
[all …]
/openbmc/linux/drivers/staging/media/ipu3/
H A Dipu3-dmamap.c102 struct iova *iova; in imgu_dmamap_alloc() local
109 if (!iova) in imgu_dmamap_alloc()
146 __free_iova(&imgu->iova_domain, iova); in imgu_dmamap_alloc()
153 struct iova *iova; in imgu_dmamap_unmap() local
155 iova = find_iova(&imgu->iova_domain, in imgu_dmamap_unmap()
157 if (WARN_ON(!iova)) in imgu_dmamap_unmap()
163 __free_iova(&imgu->iova_domain, iova); in imgu_dmamap_unmap()
189 struct iova *iova; in imgu_dmamap_map_sg() local
209 if (!iova) in imgu_dmamap_map_sg()
213 iova->pfn_lo, iova->pfn_hi); in imgu_dmamap_map_sg()
[all …]
H A Dipu3-mmu.c157 iova >>= IPU3_PAGE_SHIFT; in address_to_pte_idx()
160 *l2pt_idx = iova & IPU3_L2PT_MASK; in address_to_pte_idx()
162 iova >>= IPU3_L2PT_SHIFT; in address_to_pte_idx()
165 *l1pt_idx = iova & IPU3_L1PT_MASK; in address_to_pte_idx()
264 iova, &paddr, size); in imgu_mmu_map()
269 iova, &paddr, size); in imgu_mmu_map()
278 iova += IPU3_PAGE_SIZE; in imgu_mmu_map()
333 imgu_mmu_unmap(info, iova, mapped); in imgu_mmu_map_sg()
392 iova, size); in imgu_mmu_unmap()
408 iova, unmapped_page); in imgu_mmu_unmap()
[all …]
/openbmc/linux/drivers/vfio/
H A Diova_bitmap.c36 unsigned long iova; member
112 unsigned long iova; member
128 unsigned long iova) in iova_bitmap_offset_to_index()
198 mapped->iova = iova_bitmap_mapped_iova(bitmap); in iova_bitmap_get()
254 bitmap->iova = iova; in iova_bitmap_alloc()
256 mapped->iova = iova; in iova_bitmap_alloc()
329 if (iova + remaining - 1 > max_iova) in iova_bitmap_mapped_length()
330 remaining -= ((iova + remaining - 1) - max_iova); in iova_bitmap_mapped_length()
403 unsigned long iova, size_t length) in iova_bitmap_set() argument
406 unsigned long cur_bit = ((iova - mapped->iova) >> in iova_bitmap_set()
[all …]
H A Dvfio_iommu_type1.c331 if (iova < vpfn->iova) in vfio_find_vpfn()
333 else if (iova > vpfn->iova) in vfio_find_vpfn()
352 if (new->iova < vpfn->iova) in vfio_link_pfn()
376 vpfn->iova = iova; in vfio_add_to_pfn_list()
996 entry->iova = *iova; in unmap_unpin_fast()
1040 dma_addr_t iova = dma->iova, end = dma->iova + dma->size; in vfio_unmap_unpin() local
1220 if (dma->iova < iova) in vfio_iova_dirty_bitmap()
1460 dma_addr_t iova = dma->iova; in vfio_pin_map_dma() local
1636 dma->iova = iova; in vfio_dma_do_map()
1700 iova = dma->iova; in vfio_iommu_replay()
[all …]
/openbmc/linux/drivers/vdpa/vdpa_user/
H A Diova_domain.c109 while (iova <= last) { in vduse_domain_map_bounce_page()
118 iova += PAGE_SIZE; in vduse_domain_map_bounce_page()
129 while (iova <= last) { in vduse_domain_unmap_bounce_page()
132 iova += PAGE_SIZE; in vduse_domain_unmap_bounce_page()
185 iova += sz; in vduse_domain_bounce()
386 if (!iova) in vduse_domain_map_page()
401 return iova; in vduse_domain_map_page()
433 if (!iova || !orig) in vduse_domain_alloc_coherent()
437 if (vduse_iotlb_add_range(domain, (u64)iova, (u64)iova + size - 1, in vduse_domain_alloc_coherent()
445 *dma_addr = iova; in vduse_domain_alloc_coherent()
[all …]
/openbmc/linux/drivers/infiniband/sw/rxe/
H A Drxe_mr.c35 if (iova < mr->ibmr.iova || in mr_check_range()
36 iova + length > mr->ibmr.iova + mr->ibmr.length) { in mr_check_range()
77 return (iova >> mr->page_shift) - (mr->ibmr.iova >> mr->page_shift); in rxe_mr_iova_to_index()
82 return iova & (mr_page_size(mr) - 1); in rxe_mr_iova_to_page_offset()
345 u64 iova; in copy_data() local
397 iova = sge->addr + offset; in copy_data()
461 iova += bytes; in rxe_flush_pmem_iova()
485 page_offset = iova & (PAGE_SIZE - 1); in rxe_mr_do_atomic_op()
486 page = ib_virt_dma_to_page(iova); in rxe_mr_do_atomic_op()
543 page = ib_virt_dma_to_page(iova); in rxe_mr_do_atomic_write()
[all …]
/openbmc/linux/drivers/gpu/drm/msm/
H A Dmsm_iommu.c37 unsigned long iova, phys_addr_t paddr, in calc_pgsize() argument
43 unsigned long addr_merge = paddr | iova; in calc_pgsize()
73 if ((iova ^ paddr) & (pgsize_next - 1)) in calc_pgsize()
100 pgsize = calc_pgsize(pagetable, iova, iova, size, &count); in msm_iommu_pagetable_unmap()
106 iova += unmapped; in msm_iommu_pagetable_unmap()
121 u64 addr = iova; in msm_iommu_pagetable_map()
145 msm_iommu_pagetable_unmap(mmu, iova, addr - iova); in msm_iommu_pagetable_map()
368 if (iova & BIT_ULL(48)) in msm_iommu_map()
369 iova |= GENMASK_ULL(63, 49); in msm_iommu_map()
381 if (iova & BIT_ULL(48)) in msm_iommu_unmap()
[all …]
/openbmc/linux/drivers/iommu/amd/
H A Dio_pgtable_v2.c143 pte = &pgd[PM_LEVEL_INDEX(level, iova)]; in v2_alloc_pte()
144 iova = PAGE_SIZE_ALIGN(iova, PAGE_SIZE); in v2_alloc_pte()
175 pte = &pte[PM_LEVEL_INDEX(level, iova)]; in v2_alloc_pte()
216 pte = &pte[PM_LEVEL_INDEX(level - 1, iova)]; in fetch_pte()
245 unsigned long o_iova = iova; in iommu_v2_map_pages()
260 iova, map_size, gfp, &updated); in iommu_v2_map_pages()
269 iova += map_size; in iommu_v2_map_pages()
289 unsigned long iova, in iommu_v2_unmap_pages() argument
310 iova = (iova & ~(unmap_size - 1)) + unmap_size; in iommu_v2_unmap_pages()
323 pte = fetch_pte(pgtable, iova, &pte_pgsize); in iommu_v2_iova_to_phys()
[all …]
/openbmc/qemu/hw/vfio/
H A Dcommon.c296 hwaddr iova = iotlb->iova + giommu->iommu_offset; in vfio_iommu_map_notify() local
301 iova, iova + iotlb->addr_mask); in vfio_iommu_map_notify()
547 hwaddr iova; in vfio_get_section_iova_range() local
558 *out_iova = iova; in vfio_get_section_iova_range()
570 hwaddr iova, end; in vfio_listener_region_add() local
732 hwaddr iova, end; in vfio_listener_region_del() local
889 if (*min > iova) { in vfio_dirty_tracking_update()
890 *min = iova; in vfio_dirty_tracking_update()
1113 report->iova = iova; in vfio_device_dma_logging_report()
1205 hwaddr iova = iotlb->iova + giommu->iommu_offset; in vfio_iommu_map_dirty_notify() local
[all …]
/openbmc/linux/tools/testing/selftests/iommu/
H A Diommufd_fail_nth.c234 __u64 iova; in TEST_FAIL_NTH() local
304 __u64 iova; in TEST_FAIL_NTH() local
341 __u64 iova; in TEST_FAIL_NTH() local
386 __u64 iova; in TEST_FAIL_NTH() local
413 .access_rw = { .iova = iova, in TEST_FAIL_NTH()
444 .access_rw = { .iova = iova, in TEST_FAIL_NTH()
465 __u64 iova; in TEST_FAIL_NTH() local
493 .access_pages = { .iova = iova, in TEST_FAIL_NTH()
521 __u64 iova; in TEST_FAIL_NTH() local
552 .access_pages = { .iova = iova, in TEST_FAIL_NTH()
[all …]
/openbmc/linux/drivers/iommu/iommufd/
H A Dselftest.c48 u64 *iova) in iommufd_test_syz_conv_iova() argument
84 *iova = iommufd_test_syz_conv_iova(&ioas->iopt, iova); in iommufd_test_syz_conv_iova_id()
218 iova += MOCK_IO_PAGE_SIZE; in mock_domain_map_pages()
264 iova += MOCK_IO_PAGE_SIZE; in mock_domain_unmap_pages()
272 dma_addr_t iova) in mock_domain_iova_to_phys() argument
558 iova += MOCK_IO_PAGE_SIZE; in iommufd_test_md_check_pa()
614 unsigned long iova; member
646 if (iova > item->iova + item->length - 1 || in iommufd_test_access_unmap()
885 uptr - (iova - ALIGN_DOWN(iova, PAGE_SIZE)), pages, in iommufd_test_access_pages()
897 item->iova = iova; in iommufd_test_access_pages()
[all …]
/openbmc/qemu/hw/arm/
H A Dsmmu-common.c42 b += extract64(key->iova, 0, 32); in smmu_iotlb_key_hash()
43 c += extract64(key->iova, 32, 32); in smmu_iotlb_key_hash()
55 return (k1->asid == k2->asid) && (k1->iova == k2->iova) && in smmu_iotlb_key_equal()
63 SMMUIOTLBKey key = {.asid = asid, .vmid = vmid, .iova = iova, in smmu_get_iotlb_key()
162 return ((info->iova & ~entry->addr_mask) == entry->iova) || in smmu_hash_remove_by_asid_vmid_iova()
163 ((entry->iova & ~info->mask) == info->iova); in smmu_hash_remove_by_asid_vmid_iova()
185 .asid = asid, .iova = iova, in smmu_iotlb_inv_iova()
308 SMMUTransTableInfo *tt = select_tt(cfg, iova); in smmu_ptw_64_s1()
374 tlbe->entry.iova = iova & ~mask; in smmu_ptw_64_s1()
495 tlbe->entry.iova = ipa & ~mask; in smmu_ptw_64_s2()
[all …]
/openbmc/linux/drivers/gpu/drm/etnaviv/
H A Detnaviv_mmu.c17 unsigned long iova, size_t size) in etnaviv_context_unmap() argument
24 iova, size, pgsize); in etnaviv_context_unmap()
34 iova += unmapped_page; in etnaviv_context_unmap()
43 unsigned long orig_iova = iova; in etnaviv_context_map()
60 iova += pgsize; in etnaviv_context_map()
75 unsigned int da = iova; in etnaviv_iommu_map()
100 etnaviv_context_unmap(context, iova, da - iova); in etnaviv_iommu_map()
108 unsigned int da = iova; in etnaviv_iommu_unmap()
293 u32 iova; in etnaviv_iommu_map_gem() local
297 mapping->iova = iova; in etnaviv_iommu_map_gem()
[all …]
/openbmc/qemu/util/
H A Diova-tree.c62 if (m1->iova > m2->iova + m2->size) { in iova_tree_compare()
66 if (m1->iova + m1->size < m2->iova) { in iova_tree_compare()
118 const DMAMap *iova_tree_find_address(const IOVATree *tree, hwaddr iova) in iova_tree_find_address() argument
120 const DMAMap map = { .iova = iova, .size = 0 }; in iova_tree_find_address()
135 if (map->iova + map->size < map->iova || map->perm == IOMMU_NONE) { in iova_tree_insert()
200 if (this && this->iova + this->size < args->iova_begin) { in iova_tree_alloc_map_in_hole()
204 hole_start = MAX(prev ? prev->iova + prev->size + 1 : 0, args->iova_begin); in iova_tree_alloc_map_in_hole()
205 hole_last = this ? this->iova : HWADDR_MAX; in iova_tree_alloc_map_in_hole()
274 map->iova = args.iova_result; in iova_tree_alloc_map()

12345678910>>...13