1bb1520d5SAlexander Gordeev // SPDX-License-Identifier: GPL-2.0 2bb1520d5SAlexander Gordeev #include <linux/sched/task.h> 3bb1520d5SAlexander Gordeev #include <linux/pgtable.h> 4*557b1970SVasily Gorbik #include <linux/kasan.h> 5bb1520d5SAlexander Gordeev #include <asm/pgalloc.h> 6bb1520d5SAlexander Gordeev #include <asm/facility.h> 7bb1520d5SAlexander Gordeev #include <asm/sections.h> 88c37cb7dSVasily Gorbik #include <asm/physmem_info.h> 98e9205d2SAlexander Gordeev #include <asm/maccess.h> 102154e0b3SAlexander Gordeev #include <asm/abs_lowcore.h> 11bb1520d5SAlexander Gordeev #include "decompressor.h" 12bb1520d5SAlexander Gordeev #include "boot.h" 13bb1520d5SAlexander Gordeev 14f913a660SVasily Gorbik unsigned long __bootdata_preserved(s390_invalid_asce); 15f913a660SVasily Gorbik 16bb1520d5SAlexander Gordeev #define init_mm (*(struct mm_struct *)vmlinux.init_mm_off) 17bb1520d5SAlexander Gordeev #define swapper_pg_dir vmlinux.swapper_pg_dir_off 18bb1520d5SAlexander Gordeev #define invalid_pg_dir vmlinux.invalid_pg_dir_off 19bb1520d5SAlexander Gordeev 20*557b1970SVasily Gorbik enum populate_mode { 21*557b1970SVasily Gorbik POPULATE_NONE, 22*557b1970SVasily Gorbik POPULATE_ONE2ONE, 23*557b1970SVasily Gorbik POPULATE_ABS_LOWCORE, 24*557b1970SVasily Gorbik #ifdef CONFIG_KASAN 25*557b1970SVasily Gorbik POPULATE_KASAN_MAP_SHADOW, 26*557b1970SVasily Gorbik POPULATE_KASAN_ZERO_SHADOW, 27*557b1970SVasily Gorbik POPULATE_KASAN_SHALLOW 28*557b1970SVasily Gorbik #endif 29*557b1970SVasily Gorbik }; 30*557b1970SVasily Gorbik 31*557b1970SVasily Gorbik static void pgtable_populate(unsigned long addr, unsigned long end, enum populate_mode mode); 32*557b1970SVasily Gorbik 33*557b1970SVasily Gorbik #ifdef CONFIG_KASAN 34*557b1970SVasily Gorbik 35*557b1970SVasily Gorbik #define kasan_early_shadow_page vmlinux.kasan_early_shadow_page_off 36*557b1970SVasily Gorbik #define kasan_early_shadow_pte ((pte_t *)vmlinux.kasan_early_shadow_pte_off) 37*557b1970SVasily Gorbik #define kasan_early_shadow_pmd ((pmd_t *)vmlinux.kasan_early_shadow_pmd_off) 38*557b1970SVasily Gorbik #define kasan_early_shadow_pud ((pud_t *)vmlinux.kasan_early_shadow_pud_off) 39*557b1970SVasily Gorbik #define kasan_early_shadow_p4d ((p4d_t *)vmlinux.kasan_early_shadow_p4d_off) 40*557b1970SVasily Gorbik #define __sha(x) ((unsigned long)kasan_mem_to_shadow((void *)x)) 41*557b1970SVasily Gorbik 42*557b1970SVasily Gorbik static pte_t pte_z; 43*557b1970SVasily Gorbik 44*557b1970SVasily Gorbik static void kasan_populate_shadow(void) 45*557b1970SVasily Gorbik { 46*557b1970SVasily Gorbik pmd_t pmd_z = __pmd(__pa(kasan_early_shadow_pte) | _SEGMENT_ENTRY); 47*557b1970SVasily Gorbik pud_t pud_z = __pud(__pa(kasan_early_shadow_pmd) | _REGION3_ENTRY); 48*557b1970SVasily Gorbik p4d_t p4d_z = __p4d(__pa(kasan_early_shadow_pud) | _REGION2_ENTRY); 49*557b1970SVasily Gorbik unsigned long untracked_end; 50*557b1970SVasily Gorbik unsigned long start, end; 51*557b1970SVasily Gorbik int i; 52*557b1970SVasily Gorbik 53*557b1970SVasily Gorbik pte_z = __pte(__pa(kasan_early_shadow_page) | pgprot_val(PAGE_KERNEL_RO)); 54*557b1970SVasily Gorbik if (!machine.has_nx) 55*557b1970SVasily Gorbik pte_z = clear_pte_bit(pte_z, __pgprot(_PAGE_NOEXEC)); 56*557b1970SVasily Gorbik crst_table_init((unsigned long *)kasan_early_shadow_p4d, p4d_val(p4d_z)); 57*557b1970SVasily Gorbik crst_table_init((unsigned long *)kasan_early_shadow_pud, pud_val(pud_z)); 58*557b1970SVasily Gorbik crst_table_init((unsigned long *)kasan_early_shadow_pmd, pmd_val(pmd_z)); 59*557b1970SVasily Gorbik memset64((u64 *)kasan_early_shadow_pte, pte_val(pte_z), PTRS_PER_PTE); 60*557b1970SVasily Gorbik 61*557b1970SVasily Gorbik /* 62*557b1970SVasily Gorbik * Current memory layout: 63*557b1970SVasily Gorbik * +- 0 -------------+ +- shadow start -+ 64*557b1970SVasily Gorbik * |1:1 ident mapping| /|1/8 of ident map| 65*557b1970SVasily Gorbik * | | / | | 66*557b1970SVasily Gorbik * +-end of ident map+ / +----------------+ 67*557b1970SVasily Gorbik * | ... gap ... | / | kasan | 68*557b1970SVasily Gorbik * | | / | zero page | 69*557b1970SVasily Gorbik * +- vmalloc area -+ / | mapping | 70*557b1970SVasily Gorbik * | vmalloc_size | / | (untracked) | 71*557b1970SVasily Gorbik * +- modules vaddr -+ / +----------------+ 72*557b1970SVasily Gorbik * | 2Gb |/ | unmapped | allocated per module 73*557b1970SVasily Gorbik * +- shadow start -+ +----------------+ 74*557b1970SVasily Gorbik * | 1/8 addr space | | zero pg mapping| (untracked) 75*557b1970SVasily Gorbik * +- shadow end ----+---------+- shadow end ---+ 76*557b1970SVasily Gorbik * 77*557b1970SVasily Gorbik * Current memory layout (KASAN_VMALLOC): 78*557b1970SVasily Gorbik * +- 0 -------------+ +- shadow start -+ 79*557b1970SVasily Gorbik * |1:1 ident mapping| /|1/8 of ident map| 80*557b1970SVasily Gorbik * | | / | | 81*557b1970SVasily Gorbik * +-end of ident map+ / +----------------+ 82*557b1970SVasily Gorbik * | ... gap ... | / | kasan zero page| (untracked) 83*557b1970SVasily Gorbik * | | / | mapping | 84*557b1970SVasily Gorbik * +- vmalloc area -+ / +----------------+ 85*557b1970SVasily Gorbik * | vmalloc_size | / |shallow populate| 86*557b1970SVasily Gorbik * +- modules vaddr -+ / +----------------+ 87*557b1970SVasily Gorbik * | 2Gb |/ |shallow populate| 88*557b1970SVasily Gorbik * +- shadow start -+ +----------------+ 89*557b1970SVasily Gorbik * | 1/8 addr space | | zero pg mapping| (untracked) 90*557b1970SVasily Gorbik * +- shadow end ----+---------+- shadow end ---+ 91*557b1970SVasily Gorbik */ 92*557b1970SVasily Gorbik 93*557b1970SVasily Gorbik for_each_physmem_usable_range(i, &start, &end) 94*557b1970SVasily Gorbik pgtable_populate(__sha(start), __sha(end), POPULATE_KASAN_MAP_SHADOW); 95*557b1970SVasily Gorbik if (IS_ENABLED(CONFIG_KASAN_VMALLOC)) { 96*557b1970SVasily Gorbik untracked_end = VMALLOC_START; 97*557b1970SVasily Gorbik /* shallowly populate kasan shadow for vmalloc and modules */ 98*557b1970SVasily Gorbik pgtable_populate(__sha(VMALLOC_START), __sha(MODULES_END), POPULATE_KASAN_SHALLOW); 99*557b1970SVasily Gorbik } else { 100*557b1970SVasily Gorbik untracked_end = MODULES_VADDR; 101*557b1970SVasily Gorbik } 102*557b1970SVasily Gorbik /* populate kasan shadow for untracked memory */ 103*557b1970SVasily Gorbik pgtable_populate(__sha(ident_map_size), __sha(untracked_end), POPULATE_KASAN_ZERO_SHADOW); 104*557b1970SVasily Gorbik pgtable_populate(__sha(MODULES_END), __sha(_REGION1_SIZE), POPULATE_KASAN_ZERO_SHADOW); 105*557b1970SVasily Gorbik } 106*557b1970SVasily Gorbik 107*557b1970SVasily Gorbik static bool kasan_pgd_populate_zero_shadow(pgd_t *pgd, unsigned long addr, 108*557b1970SVasily Gorbik unsigned long end, enum populate_mode mode) 109*557b1970SVasily Gorbik { 110*557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW && 111*557b1970SVasily Gorbik IS_ALIGNED(addr, PGDIR_SIZE) && end - addr >= PGDIR_SIZE) { 112*557b1970SVasily Gorbik pgd_populate(&init_mm, pgd, kasan_early_shadow_p4d); 113*557b1970SVasily Gorbik return true; 114*557b1970SVasily Gorbik } 115*557b1970SVasily Gorbik return false; 116*557b1970SVasily Gorbik } 117*557b1970SVasily Gorbik 118*557b1970SVasily Gorbik static bool kasan_p4d_populate_zero_shadow(p4d_t *p4d, unsigned long addr, 119*557b1970SVasily Gorbik unsigned long end, enum populate_mode mode) 120*557b1970SVasily Gorbik { 121*557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW && 122*557b1970SVasily Gorbik IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) { 123*557b1970SVasily Gorbik p4d_populate(&init_mm, p4d, kasan_early_shadow_pud); 124*557b1970SVasily Gorbik return true; 125*557b1970SVasily Gorbik } 126*557b1970SVasily Gorbik return false; 127*557b1970SVasily Gorbik } 128*557b1970SVasily Gorbik 129*557b1970SVasily Gorbik static bool kasan_pud_populate_zero_shadow(pud_t *pud, unsigned long addr, 130*557b1970SVasily Gorbik unsigned long end, enum populate_mode mode) 131*557b1970SVasily Gorbik { 132*557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW && 133*557b1970SVasily Gorbik IS_ALIGNED(addr, PUD_SIZE) && end - addr >= PUD_SIZE) { 134*557b1970SVasily Gorbik pud_populate(&init_mm, pud, kasan_early_shadow_pmd); 135*557b1970SVasily Gorbik return true; 136*557b1970SVasily Gorbik } 137*557b1970SVasily Gorbik return false; 138*557b1970SVasily Gorbik } 139*557b1970SVasily Gorbik 140*557b1970SVasily Gorbik static bool kasan_pmd_populate_zero_shadow(pmd_t *pmd, unsigned long addr, 141*557b1970SVasily Gorbik unsigned long end, enum populate_mode mode) 142*557b1970SVasily Gorbik { 143*557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW && 144*557b1970SVasily Gorbik IS_ALIGNED(addr, PMD_SIZE) && end - addr >= PMD_SIZE) { 145*557b1970SVasily Gorbik pmd_populate(&init_mm, pmd, kasan_early_shadow_pte); 146*557b1970SVasily Gorbik return true; 147*557b1970SVasily Gorbik } 148*557b1970SVasily Gorbik return false; 149*557b1970SVasily Gorbik } 150*557b1970SVasily Gorbik 151*557b1970SVasily Gorbik static bool kasan_pte_populate_zero_shadow(pte_t *pte, enum populate_mode mode) 152*557b1970SVasily Gorbik { 153*557b1970SVasily Gorbik pte_t entry; 154*557b1970SVasily Gorbik 155*557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW) { 156*557b1970SVasily Gorbik set_pte(pte, pte_z); 157*557b1970SVasily Gorbik return true; 158*557b1970SVasily Gorbik } 159*557b1970SVasily Gorbik return false; 160*557b1970SVasily Gorbik } 161*557b1970SVasily Gorbik #else 162*557b1970SVasily Gorbik 163*557b1970SVasily Gorbik static inline void kasan_populate_shadow(void) {} 164*557b1970SVasily Gorbik 165*557b1970SVasily Gorbik static inline bool kasan_pgd_populate_zero_shadow(pgd_t *pgd, unsigned long addr, 166*557b1970SVasily Gorbik unsigned long end, enum populate_mode mode) 167*557b1970SVasily Gorbik { 168*557b1970SVasily Gorbik return false; 169*557b1970SVasily Gorbik } 170*557b1970SVasily Gorbik 171*557b1970SVasily Gorbik static inline bool kasan_p4d_populate_zero_shadow(p4d_t *p4d, unsigned long addr, 172*557b1970SVasily Gorbik unsigned long end, enum populate_mode mode) 173*557b1970SVasily Gorbik { 174*557b1970SVasily Gorbik return false; 175*557b1970SVasily Gorbik } 176*557b1970SVasily Gorbik 177*557b1970SVasily Gorbik static inline bool kasan_pud_populate_zero_shadow(pud_t *pud, unsigned long addr, 178*557b1970SVasily Gorbik unsigned long end, enum populate_mode mode) 179*557b1970SVasily Gorbik { 180*557b1970SVasily Gorbik return false; 181*557b1970SVasily Gorbik } 182*557b1970SVasily Gorbik 183*557b1970SVasily Gorbik static inline bool kasan_pmd_populate_zero_shadow(pmd_t *pmd, unsigned long addr, 184*557b1970SVasily Gorbik unsigned long end, enum populate_mode mode) 185*557b1970SVasily Gorbik { 186*557b1970SVasily Gorbik return false; 187*557b1970SVasily Gorbik } 188*557b1970SVasily Gorbik 189*557b1970SVasily Gorbik static bool kasan_pte_populate_zero_shadow(pte_t *pte, enum populate_mode mode) 190*557b1970SVasily Gorbik { 191*557b1970SVasily Gorbik return false; 192*557b1970SVasily Gorbik } 193*557b1970SVasily Gorbik 194*557b1970SVasily Gorbik #endif 195*557b1970SVasily Gorbik 1968e9205d2SAlexander Gordeev /* 1978e9205d2SAlexander Gordeev * Mimic virt_to_kpte() in lack of init_mm symbol. Skip pmd NULL check though. 1988e9205d2SAlexander Gordeev */ 1998e9205d2SAlexander Gordeev static inline pte_t *__virt_to_kpte(unsigned long va) 2008e9205d2SAlexander Gordeev { 2018e9205d2SAlexander Gordeev return pte_offset_kernel(pmd_offset(pud_offset(p4d_offset(pgd_offset_k(va), va), va), va), va); 2028e9205d2SAlexander Gordeev } 2038e9205d2SAlexander Gordeev 204bb1520d5SAlexander Gordeev static void *boot_crst_alloc(unsigned long val) 205bb1520d5SAlexander Gordeev { 206f913a660SVasily Gorbik unsigned long size = PAGE_SIZE << CRST_ALLOC_ORDER; 207bb1520d5SAlexander Gordeev unsigned long *table; 208bb1520d5SAlexander Gordeev 209f913a660SVasily Gorbik table = (unsigned long *)physmem_alloc_top_down(RR_VMEM, size, size); 210bb1520d5SAlexander Gordeev crst_table_init(table, val); 211bb1520d5SAlexander Gordeev return table; 212bb1520d5SAlexander Gordeev } 213bb1520d5SAlexander Gordeev 214bb1520d5SAlexander Gordeev static pte_t *boot_pte_alloc(void) 215bb1520d5SAlexander Gordeev { 216*557b1970SVasily Gorbik static void *pte_leftover; 217bb1520d5SAlexander Gordeev pte_t *pte; 218bb1520d5SAlexander Gordeev 219*557b1970SVasily Gorbik /* 220*557b1970SVasily Gorbik * handling pte_leftovers this way helps to avoid memory fragmentation 221*557b1970SVasily Gorbik * during POPULATE_KASAN_MAP_SHADOW when EDAT is off 222*557b1970SVasily Gorbik */ 223*557b1970SVasily Gorbik if (!pte_leftover) { 224*557b1970SVasily Gorbik pte_leftover = (void *)physmem_alloc_top_down(RR_VMEM, PAGE_SIZE, PAGE_SIZE); 225*557b1970SVasily Gorbik pte = pte_leftover + _PAGE_TABLE_SIZE; 226*557b1970SVasily Gorbik } else { 227*557b1970SVasily Gorbik pte = pte_leftover; 228*557b1970SVasily Gorbik pte_leftover = NULL; 229*557b1970SVasily Gorbik } 230*557b1970SVasily Gorbik 231bb1520d5SAlexander Gordeev memset64((u64 *)pte, _PAGE_INVALID, PTRS_PER_PTE); 232bb1520d5SAlexander Gordeev return pte; 233bb1520d5SAlexander Gordeev } 234bb1520d5SAlexander Gordeev 235*557b1970SVasily Gorbik static unsigned long _pa(unsigned long addr, unsigned long size, enum populate_mode mode) 236e0e0a87bSAlexander Gordeev { 237e0e0a87bSAlexander Gordeev switch (mode) { 2388e9205d2SAlexander Gordeev case POPULATE_NONE: 2398e9205d2SAlexander Gordeev return -1; 240e0e0a87bSAlexander Gordeev case POPULATE_ONE2ONE: 241e0e0a87bSAlexander Gordeev return addr; 2422154e0b3SAlexander Gordeev case POPULATE_ABS_LOWCORE: 2432154e0b3SAlexander Gordeev return __abs_lowcore_pa(addr); 244*557b1970SVasily Gorbik #ifdef CONFIG_KASAN 245*557b1970SVasily Gorbik case POPULATE_KASAN_MAP_SHADOW: 246*557b1970SVasily Gorbik addr = physmem_alloc_top_down(RR_VMEM, size, size); 247*557b1970SVasily Gorbik memset((void *)addr, 0, size); 248*557b1970SVasily Gorbik return addr; 249*557b1970SVasily Gorbik #endif 250e0e0a87bSAlexander Gordeev default: 251e0e0a87bSAlexander Gordeev return -1; 252e0e0a87bSAlexander Gordeev } 253e0e0a87bSAlexander Gordeev } 254e0e0a87bSAlexander Gordeev 255bb1520d5SAlexander Gordeev static bool can_large_pud(pud_t *pu_dir, unsigned long addr, unsigned long end) 256bb1520d5SAlexander Gordeev { 257bb1520d5SAlexander Gordeev return machine.has_edat2 && 258bb1520d5SAlexander Gordeev IS_ALIGNED(addr, PUD_SIZE) && (end - addr) >= PUD_SIZE; 259bb1520d5SAlexander Gordeev } 260bb1520d5SAlexander Gordeev 261bb1520d5SAlexander Gordeev static bool can_large_pmd(pmd_t *pm_dir, unsigned long addr, unsigned long end) 262bb1520d5SAlexander Gordeev { 263bb1520d5SAlexander Gordeev return machine.has_edat1 && 264bb1520d5SAlexander Gordeev IS_ALIGNED(addr, PMD_SIZE) && (end - addr) >= PMD_SIZE; 265bb1520d5SAlexander Gordeev } 266bb1520d5SAlexander Gordeev 267e0e0a87bSAlexander Gordeev static void pgtable_pte_populate(pmd_t *pmd, unsigned long addr, unsigned long end, 268e0e0a87bSAlexander Gordeev enum populate_mode mode) 269bb1520d5SAlexander Gordeev { 270bb1520d5SAlexander Gordeev pte_t *pte, entry; 271bb1520d5SAlexander Gordeev 272bb1520d5SAlexander Gordeev pte = pte_offset_kernel(pmd, addr); 273bb1520d5SAlexander Gordeev for (; addr < end; addr += PAGE_SIZE, pte++) { 274bb1520d5SAlexander Gordeev if (pte_none(*pte)) { 275*557b1970SVasily Gorbik if (kasan_pte_populate_zero_shadow(pte, mode)) 276*557b1970SVasily Gorbik continue; 277*557b1970SVasily Gorbik entry = __pte(_pa(addr, PAGE_SIZE, mode)); 278bb1520d5SAlexander Gordeev entry = set_pte_bit(entry, PAGE_KERNEL_EXEC); 279bb1520d5SAlexander Gordeev set_pte(pte, entry); 280bb1520d5SAlexander Gordeev } 281bb1520d5SAlexander Gordeev } 282bb1520d5SAlexander Gordeev } 283bb1520d5SAlexander Gordeev 284e0e0a87bSAlexander Gordeev static void pgtable_pmd_populate(pud_t *pud, unsigned long addr, unsigned long end, 285e0e0a87bSAlexander Gordeev enum populate_mode mode) 286bb1520d5SAlexander Gordeev { 287bb1520d5SAlexander Gordeev unsigned long next; 288bb1520d5SAlexander Gordeev pmd_t *pmd, entry; 289bb1520d5SAlexander Gordeev pte_t *pte; 290bb1520d5SAlexander Gordeev 291bb1520d5SAlexander Gordeev pmd = pmd_offset(pud, addr); 292bb1520d5SAlexander Gordeev for (; addr < end; addr = next, pmd++) { 293bb1520d5SAlexander Gordeev next = pmd_addr_end(addr, end); 294bb1520d5SAlexander Gordeev if (pmd_none(*pmd)) { 295*557b1970SVasily Gorbik if (kasan_pmd_populate_zero_shadow(pmd, addr, next, mode)) 296*557b1970SVasily Gorbik continue; 297bb1520d5SAlexander Gordeev if (can_large_pmd(pmd, addr, next)) { 298*557b1970SVasily Gorbik entry = __pmd(_pa(addr, _SEGMENT_SIZE, mode)); 299bb1520d5SAlexander Gordeev entry = set_pmd_bit(entry, SEGMENT_KERNEL_EXEC); 300bb1520d5SAlexander Gordeev set_pmd(pmd, entry); 301bb1520d5SAlexander Gordeev continue; 302bb1520d5SAlexander Gordeev } 303bb1520d5SAlexander Gordeev pte = boot_pte_alloc(); 304bb1520d5SAlexander Gordeev pmd_populate(&init_mm, pmd, pte); 305bb1520d5SAlexander Gordeev } else if (pmd_large(*pmd)) { 306bb1520d5SAlexander Gordeev continue; 307bb1520d5SAlexander Gordeev } 308e0e0a87bSAlexander Gordeev pgtable_pte_populate(pmd, addr, next, mode); 309bb1520d5SAlexander Gordeev } 310bb1520d5SAlexander Gordeev } 311bb1520d5SAlexander Gordeev 312e0e0a87bSAlexander Gordeev static void pgtable_pud_populate(p4d_t *p4d, unsigned long addr, unsigned long end, 313e0e0a87bSAlexander Gordeev enum populate_mode mode) 314bb1520d5SAlexander Gordeev { 315bb1520d5SAlexander Gordeev unsigned long next; 316bb1520d5SAlexander Gordeev pud_t *pud, entry; 317bb1520d5SAlexander Gordeev pmd_t *pmd; 318bb1520d5SAlexander Gordeev 319bb1520d5SAlexander Gordeev pud = pud_offset(p4d, addr); 320bb1520d5SAlexander Gordeev for (; addr < end; addr = next, pud++) { 321bb1520d5SAlexander Gordeev next = pud_addr_end(addr, end); 322bb1520d5SAlexander Gordeev if (pud_none(*pud)) { 323*557b1970SVasily Gorbik if (kasan_pud_populate_zero_shadow(pud, addr, next, mode)) 324*557b1970SVasily Gorbik continue; 325bb1520d5SAlexander Gordeev if (can_large_pud(pud, addr, next)) { 326*557b1970SVasily Gorbik entry = __pud(_pa(addr, _REGION3_SIZE, mode)); 327bb1520d5SAlexander Gordeev entry = set_pud_bit(entry, REGION3_KERNEL_EXEC); 328bb1520d5SAlexander Gordeev set_pud(pud, entry); 329bb1520d5SAlexander Gordeev continue; 330bb1520d5SAlexander Gordeev } 331bb1520d5SAlexander Gordeev pmd = boot_crst_alloc(_SEGMENT_ENTRY_EMPTY); 332bb1520d5SAlexander Gordeev pud_populate(&init_mm, pud, pmd); 333bb1520d5SAlexander Gordeev } else if (pud_large(*pud)) { 334bb1520d5SAlexander Gordeev continue; 335bb1520d5SAlexander Gordeev } 336e0e0a87bSAlexander Gordeev pgtable_pmd_populate(pud, addr, next, mode); 337bb1520d5SAlexander Gordeev } 338bb1520d5SAlexander Gordeev } 339bb1520d5SAlexander Gordeev 340e0e0a87bSAlexander Gordeev static void pgtable_p4d_populate(pgd_t *pgd, unsigned long addr, unsigned long end, 341e0e0a87bSAlexander Gordeev enum populate_mode mode) 342bb1520d5SAlexander Gordeev { 343bb1520d5SAlexander Gordeev unsigned long next; 344bb1520d5SAlexander Gordeev p4d_t *p4d; 345bb1520d5SAlexander Gordeev pud_t *pud; 346bb1520d5SAlexander Gordeev 347bb1520d5SAlexander Gordeev p4d = p4d_offset(pgd, addr); 348bb1520d5SAlexander Gordeev for (; addr < end; addr = next, p4d++) { 349bb1520d5SAlexander Gordeev next = p4d_addr_end(addr, end); 350bb1520d5SAlexander Gordeev if (p4d_none(*p4d)) { 351*557b1970SVasily Gorbik if (kasan_p4d_populate_zero_shadow(p4d, addr, next, mode)) 352*557b1970SVasily Gorbik continue; 353bb1520d5SAlexander Gordeev pud = boot_crst_alloc(_REGION3_ENTRY_EMPTY); 354bb1520d5SAlexander Gordeev p4d_populate(&init_mm, p4d, pud); 355bb1520d5SAlexander Gordeev } 356e0e0a87bSAlexander Gordeev pgtable_pud_populate(p4d, addr, next, mode); 357bb1520d5SAlexander Gordeev } 358bb1520d5SAlexander Gordeev } 359bb1520d5SAlexander Gordeev 360e0e0a87bSAlexander Gordeev static void pgtable_populate(unsigned long addr, unsigned long end, enum populate_mode mode) 361bb1520d5SAlexander Gordeev { 362bb1520d5SAlexander Gordeev unsigned long next; 363bb1520d5SAlexander Gordeev pgd_t *pgd; 364bb1520d5SAlexander Gordeev p4d_t *p4d; 365bb1520d5SAlexander Gordeev 366bb1520d5SAlexander Gordeev pgd = pgd_offset(&init_mm, addr); 367bb1520d5SAlexander Gordeev for (; addr < end; addr = next, pgd++) { 368bb1520d5SAlexander Gordeev next = pgd_addr_end(addr, end); 369bb1520d5SAlexander Gordeev if (pgd_none(*pgd)) { 370*557b1970SVasily Gorbik if (kasan_pgd_populate_zero_shadow(pgd, addr, next, mode)) 371*557b1970SVasily Gorbik continue; 372bb1520d5SAlexander Gordeev p4d = boot_crst_alloc(_REGION2_ENTRY_EMPTY); 373bb1520d5SAlexander Gordeev pgd_populate(&init_mm, pgd, p4d); 374bb1520d5SAlexander Gordeev } 375*557b1970SVasily Gorbik #ifdef CONFIG_KASAN 376*557b1970SVasily Gorbik if (mode == POPULATE_KASAN_SHALLOW) 377*557b1970SVasily Gorbik continue; 378*557b1970SVasily Gorbik #endif 379e0e0a87bSAlexander Gordeev pgtable_p4d_populate(pgd, addr, next, mode); 380bb1520d5SAlexander Gordeev } 381bb1520d5SAlexander Gordeev } 382bb1520d5SAlexander Gordeev 383bf64f051SVasily Gorbik void setup_vmem(unsigned long asce_limit) 384bb1520d5SAlexander Gordeev { 385e966ccf8SVasily Gorbik unsigned long start, end; 386bb1520d5SAlexander Gordeev unsigned long asce_type; 387bb1520d5SAlexander Gordeev unsigned long asce_bits; 388e966ccf8SVasily Gorbik int i; 389bb1520d5SAlexander Gordeev 390bb1520d5SAlexander Gordeev if (asce_limit == _REGION1_SIZE) { 391bb1520d5SAlexander Gordeev asce_type = _REGION2_ENTRY_EMPTY; 392bb1520d5SAlexander Gordeev asce_bits = _ASCE_TYPE_REGION2 | _ASCE_TABLE_LENGTH; 393bb1520d5SAlexander Gordeev } else { 394bb1520d5SAlexander Gordeev asce_type = _REGION3_ENTRY_EMPTY; 395bb1520d5SAlexander Gordeev asce_bits = _ASCE_TYPE_REGION3 | _ASCE_TABLE_LENGTH; 396bb1520d5SAlexander Gordeev } 397bb1520d5SAlexander Gordeev s390_invalid_asce = invalid_pg_dir | _ASCE_TYPE_REGION3 | _ASCE_TABLE_LENGTH; 398bb1520d5SAlexander Gordeev 399bb1520d5SAlexander Gordeev crst_table_init((unsigned long *)swapper_pg_dir, asce_type); 400bb1520d5SAlexander Gordeev crst_table_init((unsigned long *)invalid_pg_dir, _REGION3_ENTRY_EMPTY); 401bb1520d5SAlexander Gordeev 402bb1520d5SAlexander Gordeev /* 403bb1520d5SAlexander Gordeev * To allow prefixing the lowcore must be mapped with 4KB pages. 404bb1520d5SAlexander Gordeev * To prevent creation of a large page at address 0 first map 405bb1520d5SAlexander Gordeev * the lowcore and create the identity mapping only afterwards. 406bb1520d5SAlexander Gordeev */ 407e0e0a87bSAlexander Gordeev pgtable_populate(0, sizeof(struct lowcore), POPULATE_ONE2ONE); 4088c37cb7dSVasily Gorbik for_each_physmem_usable_range(i, &start, &end) 409bf64f051SVasily Gorbik pgtable_populate(start, end, POPULATE_ONE2ONE); 4102154e0b3SAlexander Gordeev pgtable_populate(__abs_lowcore, __abs_lowcore + sizeof(struct lowcore), 4112154e0b3SAlexander Gordeev POPULATE_ABS_LOWCORE); 4128e9205d2SAlexander Gordeev pgtable_populate(__memcpy_real_area, __memcpy_real_area + PAGE_SIZE, 4138e9205d2SAlexander Gordeev POPULATE_NONE); 4148e9205d2SAlexander Gordeev memcpy_real_ptep = __virt_to_kpte(__memcpy_real_area); 415bb1520d5SAlexander Gordeev 416*557b1970SVasily Gorbik kasan_populate_shadow(); 417*557b1970SVasily Gorbik 418bb1520d5SAlexander Gordeev S390_lowcore.kernel_asce = swapper_pg_dir | asce_bits; 419bb1520d5SAlexander Gordeev S390_lowcore.user_asce = s390_invalid_asce; 420bb1520d5SAlexander Gordeev 421bb1520d5SAlexander Gordeev __ctl_load(S390_lowcore.kernel_asce, 1, 1); 422bb1520d5SAlexander Gordeev __ctl_load(S390_lowcore.user_asce, 7, 7); 423bb1520d5SAlexander Gordeev __ctl_load(S390_lowcore.kernel_asce, 13, 13); 424bb1520d5SAlexander Gordeev 425bb1520d5SAlexander Gordeev init_mm.context.asce = S390_lowcore.kernel_asce; 426bb1520d5SAlexander Gordeev } 427