1 // SPDX-License-Identifier: GPL-2.0 2 // Copyright (C) 2019 Andes Technology Corporation 3 4 #include <linux/pfn.h> 5 #include <linux/init_task.h> 6 #include <linux/kasan.h> 7 #include <linux/kernel.h> 8 #include <linux/memblock.h> 9 #include <asm/tlbflush.h> 10 #include <asm/pgtable.h> 11 #include <asm/fixmap.h> 12 13 extern pgd_t early_pg_dir[PTRS_PER_PGD]; 14 asmlinkage void __init kasan_early_init(void) 15 { 16 uintptr_t i; 17 pgd_t *pgd = early_pg_dir + pgd_index(KASAN_SHADOW_START); 18 19 for (i = 0; i < PTRS_PER_PTE; ++i) 20 set_pte(kasan_early_shadow_pte + i, 21 mk_pte(virt_to_page(kasan_early_shadow_page), 22 PAGE_KERNEL)); 23 24 for (i = 0; i < PTRS_PER_PMD; ++i) 25 set_pmd(kasan_early_shadow_pmd + i, 26 pfn_pmd(PFN_DOWN(__pa((uintptr_t)kasan_early_shadow_pte)), 27 __pgprot(_PAGE_TABLE))); 28 29 for (i = KASAN_SHADOW_START; i < KASAN_SHADOW_END; 30 i += PGDIR_SIZE, ++pgd) 31 set_pgd(pgd, 32 pfn_pgd(PFN_DOWN(__pa(((uintptr_t)kasan_early_shadow_pmd))), 33 __pgprot(_PAGE_TABLE))); 34 35 /* init for swapper_pg_dir */ 36 pgd = pgd_offset_k(KASAN_SHADOW_START); 37 38 for (i = KASAN_SHADOW_START; i < KASAN_SHADOW_END; 39 i += PGDIR_SIZE, ++pgd) 40 set_pgd(pgd, 41 pfn_pgd(PFN_DOWN(__pa(((uintptr_t)kasan_early_shadow_pmd))), 42 __pgprot(_PAGE_TABLE))); 43 44 flush_tlb_all(); 45 } 46 47 static void __init populate(void *start, void *end) 48 { 49 unsigned long i; 50 unsigned long vaddr = (unsigned long)start & PAGE_MASK; 51 unsigned long vend = PAGE_ALIGN((unsigned long)end); 52 unsigned long n_pages = (vend - vaddr) / PAGE_SIZE; 53 unsigned long n_pmds = 54 (n_pages % PTRS_PER_PTE) ? n_pages / PTRS_PER_PTE + 1 : 55 n_pages / PTRS_PER_PTE; 56 pgd_t *pgd = pgd_offset_k(vaddr); 57 pmd_t *pmd = memblock_alloc(n_pmds * sizeof(pmd_t), PAGE_SIZE); 58 pte_t *pte = memblock_alloc(n_pages * sizeof(pte_t), PAGE_SIZE); 59 60 for (i = 0; i < n_pages; i++) { 61 phys_addr_t phys = memblock_phys_alloc(PAGE_SIZE, PAGE_SIZE); 62 63 set_pte(pte + i, pfn_pte(PHYS_PFN(phys), PAGE_KERNEL)); 64 } 65 66 for (i = 0; i < n_pmds; ++pgd, i += PTRS_PER_PMD) 67 set_pgd(pgd, pfn_pgd(PFN_DOWN(__pa(((uintptr_t)(pmd + i)))), 68 __pgprot(_PAGE_TABLE))); 69 70 for (i = 0; i < n_pages; ++pmd, i += PTRS_PER_PTE) 71 set_pmd(pmd, pfn_pmd(PFN_DOWN(__pa((uintptr_t)(pte + i))), 72 __pgprot(_PAGE_TABLE))); 73 74 flush_tlb_all(); 75 memset(start, 0, end - start); 76 } 77 78 void __init kasan_init(void) 79 { 80 struct memblock_region *reg; 81 unsigned long i; 82 83 kasan_populate_early_shadow((void *)KASAN_SHADOW_START, 84 (void *)kasan_mem_to_shadow((void *)VMALLOC_END)); 85 86 for_each_memblock(memory, reg) { 87 void *start = (void *)__va(reg->base); 88 void *end = (void *)__va(reg->base + reg->size); 89 90 if (start >= end) 91 break; 92 93 populate(kasan_mem_to_shadow(start), 94 kasan_mem_to_shadow(end)); 95 }; 96 97 for (i = 0; i < PTRS_PER_PTE; i++) 98 set_pte(&kasan_early_shadow_pte[i], 99 mk_pte(virt_to_page(kasan_early_shadow_page), 100 __pgprot(_PAGE_PRESENT | _PAGE_READ | _PAGE_ACCESSED))); 101 102 memset(kasan_early_shadow_page, 0, PAGE_SIZE); 103 init_task.kasan_depth = 0; 104 } 105