150acfb2bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */ 207037db5SPalmer Dabbelt /* 307037db5SPalmer Dabbelt * Copyright (C) 2009 Chen Liqin <liqin.chen@sunplusct.com> 407037db5SPalmer Dabbelt * Copyright (C) 2012 Regents of the University of California 507037db5SPalmer Dabbelt */ 607037db5SPalmer Dabbelt 707037db5SPalmer Dabbelt #ifndef _ASM_RISCV_PGALLOC_H 807037db5SPalmer Dabbelt #define _ASM_RISCV_PGALLOC_H 907037db5SPalmer Dabbelt 1007037db5SPalmer Dabbelt #include <linux/mm.h> 1107037db5SPalmer Dabbelt #include <asm/tlb.h> 1207037db5SPalmer Dabbelt 136bd33e1eSChristoph Hellwig #ifdef CONFIG_MMU 14e8a62cc2SAlexandre Ghiti #define __HAVE_ARCH_PUD_ALLOC_ONE 15e8a62cc2SAlexandre Ghiti #define __HAVE_ARCH_PUD_FREE 161355c31eSMike Rapoport #include <asm-generic/pgalloc.h> 17d1b46fe5SMike Rapoport 1807037db5SPalmer Dabbelt static inline void pmd_populate_kernel(struct mm_struct *mm, 1907037db5SPalmer Dabbelt pmd_t *pmd, pte_t *pte) 2007037db5SPalmer Dabbelt { 2107037db5SPalmer Dabbelt unsigned long pfn = virt_to_pfn(pte); 2207037db5SPalmer Dabbelt 2307037db5SPalmer Dabbelt set_pmd(pmd, __pmd((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); 2407037db5SPalmer Dabbelt } 2507037db5SPalmer Dabbelt 2607037db5SPalmer Dabbelt static inline void pmd_populate(struct mm_struct *mm, 2707037db5SPalmer Dabbelt pmd_t *pmd, pgtable_t pte) 2807037db5SPalmer Dabbelt { 2907037db5SPalmer Dabbelt unsigned long pfn = virt_to_pfn(page_address(pte)); 3007037db5SPalmer Dabbelt 3107037db5SPalmer Dabbelt set_pmd(pmd, __pmd((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); 3207037db5SPalmer Dabbelt } 3307037db5SPalmer Dabbelt 3407037db5SPalmer Dabbelt #ifndef __PAGETABLE_PMD_FOLDED 3507037db5SPalmer Dabbelt static inline void pud_populate(struct mm_struct *mm, pud_t *pud, pmd_t *pmd) 3607037db5SPalmer Dabbelt { 3707037db5SPalmer Dabbelt unsigned long pfn = virt_to_pfn(pmd); 3807037db5SPalmer Dabbelt 3907037db5SPalmer Dabbelt set_pud(pud, __pud((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); 4007037db5SPalmer Dabbelt } 41e8a62cc2SAlexandre Ghiti 42e8a62cc2SAlexandre Ghiti static inline void p4d_populate(struct mm_struct *mm, p4d_t *p4d, pud_t *pud) 43e8a62cc2SAlexandre Ghiti { 44e8a62cc2SAlexandre Ghiti if (pgtable_l4_enabled) { 45e8a62cc2SAlexandre Ghiti unsigned long pfn = virt_to_pfn(pud); 46e8a62cc2SAlexandre Ghiti 47e8a62cc2SAlexandre Ghiti set_p4d(p4d, __p4d((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); 48e8a62cc2SAlexandre Ghiti } 49e8a62cc2SAlexandre Ghiti } 50e8a62cc2SAlexandre Ghiti 51e8a62cc2SAlexandre Ghiti static inline void p4d_populate_safe(struct mm_struct *mm, p4d_t *p4d, 52e8a62cc2SAlexandre Ghiti pud_t *pud) 53e8a62cc2SAlexandre Ghiti { 54e8a62cc2SAlexandre Ghiti if (pgtable_l4_enabled) { 55e8a62cc2SAlexandre Ghiti unsigned long pfn = virt_to_pfn(pud); 56e8a62cc2SAlexandre Ghiti 57e8a62cc2SAlexandre Ghiti set_p4d_safe(p4d, 58e8a62cc2SAlexandre Ghiti __p4d((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); 59e8a62cc2SAlexandre Ghiti } 60e8a62cc2SAlexandre Ghiti } 61e8a62cc2SAlexandre Ghiti 62*d10efa21SQinglin Pan static inline void pgd_populate(struct mm_struct *mm, pgd_t *pgd, p4d_t *p4d) 63*d10efa21SQinglin Pan { 64*d10efa21SQinglin Pan if (pgtable_l5_enabled) { 65*d10efa21SQinglin Pan unsigned long pfn = virt_to_pfn(p4d); 66*d10efa21SQinglin Pan 67*d10efa21SQinglin Pan set_pgd(pgd, __pgd((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); 68*d10efa21SQinglin Pan } 69*d10efa21SQinglin Pan } 70*d10efa21SQinglin Pan 71*d10efa21SQinglin Pan static inline void pgd_populate_safe(struct mm_struct *mm, pgd_t *pgd, 72*d10efa21SQinglin Pan p4d_t *p4d) 73*d10efa21SQinglin Pan { 74*d10efa21SQinglin Pan if (pgtable_l5_enabled) { 75*d10efa21SQinglin Pan unsigned long pfn = virt_to_pfn(p4d); 76*d10efa21SQinglin Pan 77*d10efa21SQinglin Pan set_pgd_safe(pgd, 78*d10efa21SQinglin Pan __pgd((pfn << _PAGE_PFN_SHIFT) | _PAGE_TABLE)); 79*d10efa21SQinglin Pan } 80*d10efa21SQinglin Pan } 81*d10efa21SQinglin Pan 82e8a62cc2SAlexandre Ghiti #define pud_alloc_one pud_alloc_one 83e8a62cc2SAlexandre Ghiti static inline pud_t *pud_alloc_one(struct mm_struct *mm, unsigned long addr) 84e8a62cc2SAlexandre Ghiti { 85e8a62cc2SAlexandre Ghiti if (pgtable_l4_enabled) 86e8a62cc2SAlexandre Ghiti return __pud_alloc_one(mm, addr); 87e8a62cc2SAlexandre Ghiti 88e8a62cc2SAlexandre Ghiti return NULL; 89e8a62cc2SAlexandre Ghiti } 90e8a62cc2SAlexandre Ghiti 91e8a62cc2SAlexandre Ghiti #define pud_free pud_free 92e8a62cc2SAlexandre Ghiti static inline void pud_free(struct mm_struct *mm, pud_t *pud) 93e8a62cc2SAlexandre Ghiti { 94e8a62cc2SAlexandre Ghiti if (pgtable_l4_enabled) 95e8a62cc2SAlexandre Ghiti __pud_free(mm, pud); 96e8a62cc2SAlexandre Ghiti } 97e8a62cc2SAlexandre Ghiti 98e8a62cc2SAlexandre Ghiti #define __pud_free_tlb(tlb, pud, addr) pud_free((tlb)->mm, pud) 99*d10efa21SQinglin Pan 100*d10efa21SQinglin Pan #define p4d_alloc_one p4d_alloc_one 101*d10efa21SQinglin Pan static inline p4d_t *p4d_alloc_one(struct mm_struct *mm, unsigned long addr) 102*d10efa21SQinglin Pan { 103*d10efa21SQinglin Pan if (pgtable_l5_enabled) { 104*d10efa21SQinglin Pan gfp_t gfp = GFP_PGTABLE_USER; 105*d10efa21SQinglin Pan 106*d10efa21SQinglin Pan if (mm == &init_mm) 107*d10efa21SQinglin Pan gfp = GFP_PGTABLE_KERNEL; 108*d10efa21SQinglin Pan return (p4d_t *)get_zeroed_page(gfp); 109*d10efa21SQinglin Pan } 110*d10efa21SQinglin Pan 111*d10efa21SQinglin Pan return NULL; 112*d10efa21SQinglin Pan } 113*d10efa21SQinglin Pan 114*d10efa21SQinglin Pan static inline void __p4d_free(struct mm_struct *mm, p4d_t *p4d) 115*d10efa21SQinglin Pan { 116*d10efa21SQinglin Pan BUG_ON((unsigned long)p4d & (PAGE_SIZE-1)); 117*d10efa21SQinglin Pan free_page((unsigned long)p4d); 118*d10efa21SQinglin Pan } 119*d10efa21SQinglin Pan 120*d10efa21SQinglin Pan #define p4d_free p4d_free 121*d10efa21SQinglin Pan static inline void p4d_free(struct mm_struct *mm, p4d_t *p4d) 122*d10efa21SQinglin Pan { 123*d10efa21SQinglin Pan if (pgtable_l5_enabled) 124*d10efa21SQinglin Pan __p4d_free(mm, p4d); 125*d10efa21SQinglin Pan } 126*d10efa21SQinglin Pan 127*d10efa21SQinglin Pan #define __p4d_free_tlb(tlb, p4d, addr) p4d_free((tlb)->mm, p4d) 12807037db5SPalmer Dabbelt #endif /* __PAGETABLE_PMD_FOLDED */ 12907037db5SPalmer Dabbelt 13007037db5SPalmer Dabbelt static inline pgd_t *pgd_alloc(struct mm_struct *mm) 13107037db5SPalmer Dabbelt { 13207037db5SPalmer Dabbelt pgd_t *pgd; 13307037db5SPalmer Dabbelt 13407037db5SPalmer Dabbelt pgd = (pgd_t *)__get_free_page(GFP_KERNEL); 13507037db5SPalmer Dabbelt if (likely(pgd != NULL)) { 13607037db5SPalmer Dabbelt memset(pgd, 0, USER_PTRS_PER_PGD * sizeof(pgd_t)); 13707037db5SPalmer Dabbelt /* Copy kernel mappings */ 13807037db5SPalmer Dabbelt memcpy(pgd + USER_PTRS_PER_PGD, 13907037db5SPalmer Dabbelt init_mm.pgd + USER_PTRS_PER_PGD, 14007037db5SPalmer Dabbelt (PTRS_PER_PGD - USER_PTRS_PER_PGD) * sizeof(pgd_t)); 14107037db5SPalmer Dabbelt } 14207037db5SPalmer Dabbelt return pgd; 14307037db5SPalmer Dabbelt } 14407037db5SPalmer Dabbelt 14507037db5SPalmer Dabbelt #ifndef __PAGETABLE_PMD_FOLDED 14607037db5SPalmer Dabbelt 14707037db5SPalmer Dabbelt #define __pmd_free_tlb(tlb, pmd, addr) pmd_free((tlb)->mm, pmd) 14807037db5SPalmer Dabbelt 14907037db5SPalmer Dabbelt #endif /* __PAGETABLE_PMD_FOLDED */ 15007037db5SPalmer Dabbelt 15107037db5SPalmer Dabbelt #define __pte_free_tlb(tlb, pte, buf) \ 15207037db5SPalmer Dabbelt do { \ 153b4ed71f5SMark Rutland pgtable_pte_page_dtor(pte); \ 15407037db5SPalmer Dabbelt tlb_remove_page((tlb), pte); \ 15507037db5SPalmer Dabbelt } while (0) 1566bd33e1eSChristoph Hellwig #endif /* CONFIG_MMU */ 15707037db5SPalmer Dabbelt 15807037db5SPalmer Dabbelt #endif /* _ASM_RISCV_PGALLOC_H */ 159