Searched refs:pmdp_get (Results 1 – 9 of 9) sorted by relevance
107 if (!pmd_leaf(pmdp_get(pmd))) in pmd_clear_huge()115 pte_t *pte = (pte_t *)pmd_page_vaddr(pmdp_get(pmd)); in pmd_free_pte_page()132 VM_BUG_ON(pmd_trans_huge(pmdp_get(pmdp))); in pmdp_collapse_flush()
58 pmd_t val = pmdp_get(pmd); in pageattr_pmd_entry()111 if (pmd_leaf(pmdp_get(pmdp))) { in __split_linear_mapping_pmd()113 unsigned long pfn = _pmd_pfn(pmdp_get(pmdp)); in __split_linear_mapping_pmd()114 pgprot_t prot = __pgprot(pmd_val(pmdp_get(pmdp)) & ~_PAGE_PFN_MASK); in __split_linear_mapping_pmd()448 if (!pmd_present(pmdp_get(pmd))) in kernel_page_present()450 if (pmd_leaf(pmdp_get(pmd))) in kernel_page_present()
165 if (!pmd_present(pmdp_get(pmd_k))) { in vmalloc_fault()169 if (pmd_leaf(pmdp_get(pmd_k))) in vmalloc_fault()
34 if (pmd_none(pmdp_get(pmd))) { in kasan_populate_pte()66 if (pmd_none(pmdp_get(pmdp)) && IS_ALIGNED(vaddr, PMD_SIZE) && in kasan_populate_pmd()
116 if (!pmd_present(pmdp_get(pmd))) in huge_pte_offset()
309 if (pmd_trans_huge(pmdp_get(pmd))) { in damon_mkold_pmd_entry()311 pmde = pmdp_get(pmd); in damon_mkold_pmd_entry()449 if (pmd_trans_huge(pmdp_get(pmd))) { in damon_young_pmd_entry()453 pmde = pmdp_get(pmd); in damon_young_pmd_entry()
57 struct folio *folio = damon_get_folio(pmd_pfn(pmdp_get(pmd))); in damon_pmdp_mkold()
97 *accessed = pmd_young(pmdp_get(pvmw.pmd)) || in __damon_pa_young()
288 #ifndef pmdp_get289 static inline pmd_t pmdp_get(pmd_t *pmdp) in pmdp_get() function511 return pmdp_get(pmdp); in pmdp_get_lockless()