Lines Matching refs:haddr

645 	unsigned long haddr = vmf->address & HPAGE_PMD_MASK;  in __do_huge_pmd_anonymous_page()  local
694 folio_add_new_anon_rmap(folio, vma, haddr); in __do_huge_pmd_anonymous_page()
697 set_pmd_at(vma->vm_mm, haddr, vmf->pmd, entry); in __do_huge_pmd_anonymous_page()
754 struct vm_area_struct *vma, unsigned long haddr, pmd_t *pmd, in set_huge_zero_page() argument
763 set_pmd_at(mm, haddr, pmd, entry); in set_huge_zero_page()
772 unsigned long haddr = vmf->address & HPAGE_PMD_MASK; in do_huge_pmd_anonymous_page() local
774 if (!transhuge_vma_suitable(vma, haddr)) in do_huge_pmd_anonymous_page()
809 haddr, vmf->pmd, zero_page); in do_huge_pmd_anonymous_page()
820 folio = vma_alloc_folio(gfp, HPAGE_PMD_ORDER, vma, haddr, true); in do_huge_pmd_anonymous_page()
1288 unsigned long haddr = vmf->address & HPAGE_PMD_MASK; in do_huge_pmd_wp_page() local
1354 if (pmdp_set_access_flags(vma, haddr, vmf->pmd, entry, 1)) in do_huge_pmd_wp_page()
1487 unsigned long haddr = vmf->address & HPAGE_PMD_MASK; in do_huge_pmd_numa_page() local
1510 page = vm_normal_page_pmd(vma, haddr, pmd); in do_huge_pmd_numa_page()
1525 target_nid = numa_migrate_prep(page, vma, haddr, page_nid, in do_huge_pmd_numa_page()
1556 set_pmd_at(vma->vm_mm, haddr, vmf->pmd, pmd); in do_huge_pmd_numa_page()
1981 unsigned long haddr) in __split_huge_pud_locked() argument
1983 VM_BUG_ON(haddr & ~HPAGE_PUD_MASK); in __split_huge_pud_locked()
1984 VM_BUG_ON_VMA(vma->vm_start > haddr, vma); in __split_huge_pud_locked()
1985 VM_BUG_ON_VMA(vma->vm_end < haddr + HPAGE_PUD_SIZE, vma); in __split_huge_pud_locked()
1990 pudp_huge_clear_flush(vma, haddr, pud); in __split_huge_pud_locked()
2015 unsigned long haddr, pmd_t *pmd) in __split_huge_zero_page_pmd() argument
2032 old_pmd = pmdp_huge_clear_flush(vma, haddr, pmd); in __split_huge_zero_page_pmd()
2037 pte = pte_offset_map(&_pmd, haddr); in __split_huge_zero_page_pmd()
2039 for (i = 0, addr = haddr; i < HPAGE_PMD_NR; i++, addr += PAGE_SIZE) { in __split_huge_zero_page_pmd()
2056 unsigned long haddr, bool freeze) in __split_huge_pmd_locked() argument
2068 VM_BUG_ON(haddr & ~HPAGE_PMD_MASK); in __split_huge_pmd_locked()
2069 VM_BUG_ON_VMA(vma->vm_start > haddr, vma); in __split_huge_pmd_locked()
2070 VM_BUG_ON_VMA(vma->vm_end < haddr + HPAGE_PMD_SIZE, vma); in __split_huge_pmd_locked()
2077 old_pmd = pmdp_huge_clear_flush(vma, haddr, pmd); in __split_huge_pmd_locked()
2114 return __split_huge_zero_page_pmd(vma, haddr, pmd); in __split_huge_pmd_locked()
2155 old_pmd = pmdp_invalidate(vma, haddr, pmd); in __split_huge_pmd_locked()
2197 pte = pte_offset_map(&_pmd, haddr); in __split_huge_pmd_locked()
2199 for (i = 0, addr = haddr; i < HPAGE_PMD_NR; i++, addr += PAGE_SIZE) { in __split_huge_pmd_locked()
3265 unsigned long haddr = address & HPAGE_PMD_MASK; in remove_migration_pmd() local
3293 page_add_anon_rmap(new, vma, haddr, rmap_flags); in remove_migration_pmd()
3298 set_pmd_at(mm, haddr, pvmw->pmd, pmde); in remove_migration_pmd()