Lines Matching refs:vma

154 		if ((!walk->vma && (pmd_leaf(*pmd) || !pmd_present(*pmd))) ||  in walk_pmd_range()
159 if (walk->vma) in walk_pmd_range()
160 split_huge_pmd(walk->vma, pmd, addr); in walk_pmd_range()
208 if ((!walk->vma && (pud_leaf(*pud) || !pud_present(*pud))) || in walk_pud_range()
213 if (walk->vma) in walk_pud_range()
214 split_huge_pud(walk->vma, pud, addr); in walk_pud_range()
312 struct vm_area_struct *vma = walk->vma; in walk_hugetlb_range() local
313 struct hstate *h = hstate_vma(vma); in walk_hugetlb_range()
321 hugetlb_vma_lock_read(vma); in walk_hugetlb_range()
324 pte = hugetlb_walk(vma, addr & hmask, sz); in walk_hugetlb_range()
332 hugetlb_vma_unlock_read(vma); in walk_hugetlb_range()
355 struct vm_area_struct *vma = walk->vma; in walk_page_test() local
369 if (vma->vm_flags & VM_PFNMAP) { in walk_page_test()
382 struct vm_area_struct *vma = walk->vma; in __walk_page_range() local
391 if (is_vm_hugetlb_page(vma)) { in __walk_page_range()
412 static inline void process_vma_walk_lock(struct vm_area_struct *vma, in process_vma_walk_lock() argument
418 vma_start_write(vma); in process_vma_walk_lock()
421 vma_assert_write_locked(vma); in process_vma_walk_lock()
476 struct vm_area_struct *vma; in walk_page_range() local
491 vma = find_vma(walk.mm, start); in walk_page_range()
493 if (!vma) { /* after the last vma */ in walk_page_range()
494 walk.vma = NULL; in walk_page_range()
498 } else if (start < vma->vm_start) { /* outside vma */ in walk_page_range()
499 walk.vma = NULL; in walk_page_range()
500 next = min(end, vma->vm_start); in walk_page_range()
504 process_vma_walk_lock(vma, ops->walk_lock); in walk_page_range()
505 walk.vma = vma; in walk_page_range()
506 next = min(end, vma->vm_end); in walk_page_range()
507 vma = find_vma(mm, vma->vm_end); in walk_page_range()
564 int walk_page_range_vma(struct vm_area_struct *vma, unsigned long start, in walk_page_range_vma() argument
570 .mm = vma->vm_mm, in walk_page_range_vma()
571 .vma = vma, in walk_page_range_vma()
577 if (start < vma->vm_start || end > vma->vm_end) in walk_page_range_vma()
581 process_vma_walk_lock(vma, ops->walk_lock); in walk_page_range_vma()
585 int walk_page_vma(struct vm_area_struct *vma, const struct mm_walk_ops *ops, in walk_page_vma() argument
590 .mm = vma->vm_mm, in walk_page_vma()
591 .vma = vma, in walk_page_vma()
599 process_vma_walk_lock(vma, ops->walk_lock); in walk_page_vma()
600 return __walk_page_range(vma->vm_start, vma->vm_end, &walk); in walk_page_vma()
641 struct vm_area_struct *vma; in walk_page_mapping() local
647 vma_interval_tree_foreach(vma, &mapping->i_mmap, first_index, in walk_page_mapping()
650 vba = vma->vm_pgoff; in walk_page_mapping()
651 vea = vba + vma_pages(vma); in walk_page_mapping()
657 start_addr = ((cba - vba) << PAGE_SHIFT) + vma->vm_start; in walk_page_mapping()
658 end_addr = ((cea - vba) << PAGE_SHIFT) + vma->vm_start; in walk_page_mapping()
662 walk.vma = vma; in walk_page_mapping()
663 walk.mm = vma->vm_mm; in walk_page_mapping()
665 err = walk_page_test(vma->vm_start, vma->vm_end, &walk); in walk_page_mapping()