1bb1520d5SAlexander Gordeev // SPDX-License-Identifier: GPL-2.0
2bb1520d5SAlexander Gordeev #include <linux/sched/task.h>
3bb1520d5SAlexander Gordeev #include <linux/pgtable.h>
4557b1970SVasily Gorbik #include <linux/kasan.h>
5bb1520d5SAlexander Gordeev #include <asm/pgalloc.h>
6bb1520d5SAlexander Gordeev #include <asm/facility.h>
7bb1520d5SAlexander Gordeev #include <asm/sections.h>
88c37cb7dSVasily Gorbik #include <asm/physmem_info.h>
98e9205d2SAlexander Gordeev #include <asm/maccess.h>
102154e0b3SAlexander Gordeev #include <asm/abs_lowcore.h>
11bb1520d5SAlexander Gordeev #include "decompressor.h"
12bb1520d5SAlexander Gordeev #include "boot.h"
13bb1520d5SAlexander Gordeev
14f913a660SVasily Gorbik unsigned long __bootdata_preserved(s390_invalid_asce);
15f913a660SVasily Gorbik
1681e84796SHeiko Carstens #ifdef CONFIG_PROC_FS
1781e84796SHeiko Carstens atomic_long_t __bootdata_preserved(direct_pages_count[PG_DIRECT_MAP_MAX]);
1881e84796SHeiko Carstens #endif
1981e84796SHeiko Carstens
20bb1520d5SAlexander Gordeev #define init_mm (*(struct mm_struct *)vmlinux.init_mm_off)
21bb1520d5SAlexander Gordeev #define swapper_pg_dir vmlinux.swapper_pg_dir_off
22bb1520d5SAlexander Gordeev #define invalid_pg_dir vmlinux.invalid_pg_dir_off
23bb1520d5SAlexander Gordeev
24557b1970SVasily Gorbik enum populate_mode {
25557b1970SVasily Gorbik POPULATE_NONE,
2607fdd662SHeiko Carstens POPULATE_DIRECT,
27557b1970SVasily Gorbik POPULATE_ABS_LOWCORE,
28557b1970SVasily Gorbik #ifdef CONFIG_KASAN
29557b1970SVasily Gorbik POPULATE_KASAN_MAP_SHADOW,
30557b1970SVasily Gorbik POPULATE_KASAN_ZERO_SHADOW,
31557b1970SVasily Gorbik POPULATE_KASAN_SHALLOW
32557b1970SVasily Gorbik #endif
33557b1970SVasily Gorbik };
34557b1970SVasily Gorbik
35557b1970SVasily Gorbik static void pgtable_populate(unsigned long addr, unsigned long end, enum populate_mode mode);
36557b1970SVasily Gorbik
37557b1970SVasily Gorbik #ifdef CONFIG_KASAN
38557b1970SVasily Gorbik
39557b1970SVasily Gorbik #define kasan_early_shadow_page vmlinux.kasan_early_shadow_page_off
40557b1970SVasily Gorbik #define kasan_early_shadow_pte ((pte_t *)vmlinux.kasan_early_shadow_pte_off)
41557b1970SVasily Gorbik #define kasan_early_shadow_pmd ((pmd_t *)vmlinux.kasan_early_shadow_pmd_off)
42557b1970SVasily Gorbik #define kasan_early_shadow_pud ((pud_t *)vmlinux.kasan_early_shadow_pud_off)
43557b1970SVasily Gorbik #define kasan_early_shadow_p4d ((p4d_t *)vmlinux.kasan_early_shadow_p4d_off)
44557b1970SVasily Gorbik #define __sha(x) ((unsigned long)kasan_mem_to_shadow((void *)x))
45557b1970SVasily Gorbik
46557b1970SVasily Gorbik static pte_t pte_z;
47557b1970SVasily Gorbik
kasan_populate(unsigned long start,unsigned long end,enum populate_mode mode)483e826100SAlexander Gordeev static inline void kasan_populate(unsigned long start, unsigned long end, enum populate_mode mode)
493e826100SAlexander Gordeev {
503e826100SAlexander Gordeev start = PAGE_ALIGN_DOWN(__sha(start));
513e826100SAlexander Gordeev end = PAGE_ALIGN(__sha(end));
523e826100SAlexander Gordeev pgtable_populate(start, end, mode);
533e826100SAlexander Gordeev }
543e826100SAlexander Gordeev
kasan_populate_shadow(void)55557b1970SVasily Gorbik static void kasan_populate_shadow(void)
56557b1970SVasily Gorbik {
57557b1970SVasily Gorbik pmd_t pmd_z = __pmd(__pa(kasan_early_shadow_pte) | _SEGMENT_ENTRY);
58557b1970SVasily Gorbik pud_t pud_z = __pud(__pa(kasan_early_shadow_pmd) | _REGION3_ENTRY);
59557b1970SVasily Gorbik p4d_t p4d_z = __p4d(__pa(kasan_early_shadow_pud) | _REGION2_ENTRY);
6032789967SVasily Gorbik unsigned long memgap_start = 0;
61557b1970SVasily Gorbik unsigned long untracked_end;
62557b1970SVasily Gorbik unsigned long start, end;
63557b1970SVasily Gorbik int i;
64557b1970SVasily Gorbik
65557b1970SVasily Gorbik pte_z = __pte(__pa(kasan_early_shadow_page) | pgprot_val(PAGE_KERNEL_RO));
66557b1970SVasily Gorbik if (!machine.has_nx)
67557b1970SVasily Gorbik pte_z = clear_pte_bit(pte_z, __pgprot(_PAGE_NOEXEC));
68557b1970SVasily Gorbik crst_table_init((unsigned long *)kasan_early_shadow_p4d, p4d_val(p4d_z));
69557b1970SVasily Gorbik crst_table_init((unsigned long *)kasan_early_shadow_pud, pud_val(pud_z));
70557b1970SVasily Gorbik crst_table_init((unsigned long *)kasan_early_shadow_pmd, pmd_val(pmd_z));
71557b1970SVasily Gorbik memset64((u64 *)kasan_early_shadow_pte, pte_val(pte_z), PTRS_PER_PTE);
72557b1970SVasily Gorbik
73557b1970SVasily Gorbik /*
74557b1970SVasily Gorbik * Current memory layout:
75557b1970SVasily Gorbik * +- 0 -------------+ +- shadow start -+
76557b1970SVasily Gorbik * |1:1 ident mapping| /|1/8 of ident map|
77557b1970SVasily Gorbik * | | / | |
78557b1970SVasily Gorbik * +-end of ident map+ / +----------------+
79557b1970SVasily Gorbik * | ... gap ... | / | kasan |
80557b1970SVasily Gorbik * | | / | zero page |
81557b1970SVasily Gorbik * +- vmalloc area -+ / | mapping |
82557b1970SVasily Gorbik * | vmalloc_size | / | (untracked) |
83557b1970SVasily Gorbik * +- modules vaddr -+ / +----------------+
84557b1970SVasily Gorbik * | 2Gb |/ | unmapped | allocated per module
85557b1970SVasily Gorbik * +- shadow start -+ +----------------+
86557b1970SVasily Gorbik * | 1/8 addr space | | zero pg mapping| (untracked)
87557b1970SVasily Gorbik * +- shadow end ----+---------+- shadow end ---+
88557b1970SVasily Gorbik *
89557b1970SVasily Gorbik * Current memory layout (KASAN_VMALLOC):
90557b1970SVasily Gorbik * +- 0 -------------+ +- shadow start -+
91557b1970SVasily Gorbik * |1:1 ident mapping| /|1/8 of ident map|
92557b1970SVasily Gorbik * | | / | |
93557b1970SVasily Gorbik * +-end of ident map+ / +----------------+
94557b1970SVasily Gorbik * | ... gap ... | / | kasan zero page| (untracked)
95557b1970SVasily Gorbik * | | / | mapping |
96557b1970SVasily Gorbik * +- vmalloc area -+ / +----------------+
97557b1970SVasily Gorbik * | vmalloc_size | / |shallow populate|
98557b1970SVasily Gorbik * +- modules vaddr -+ / +----------------+
99557b1970SVasily Gorbik * | 2Gb |/ |shallow populate|
100557b1970SVasily Gorbik * +- shadow start -+ +----------------+
101557b1970SVasily Gorbik * | 1/8 addr space | | zero pg mapping| (untracked)
102557b1970SVasily Gorbik * +- shadow end ----+---------+- shadow end ---+
103557b1970SVasily Gorbik */
104557b1970SVasily Gorbik
10532789967SVasily Gorbik for_each_physmem_usable_range(i, &start, &end) {
1063e826100SAlexander Gordeev kasan_populate(start, end, POPULATE_KASAN_MAP_SHADOW);
10732789967SVasily Gorbik if (memgap_start && physmem_info.info_source == MEM_DETECT_DIAG260)
10832789967SVasily Gorbik kasan_populate(memgap_start, start, POPULATE_KASAN_ZERO_SHADOW);
10932789967SVasily Gorbik memgap_start = end;
11032789967SVasily Gorbik }
111557b1970SVasily Gorbik if (IS_ENABLED(CONFIG_KASAN_VMALLOC)) {
112557b1970SVasily Gorbik untracked_end = VMALLOC_START;
113557b1970SVasily Gorbik /* shallowly populate kasan shadow for vmalloc and modules */
1143e826100SAlexander Gordeev kasan_populate(VMALLOC_START, MODULES_END, POPULATE_KASAN_SHALLOW);
115557b1970SVasily Gorbik } else {
116557b1970SVasily Gorbik untracked_end = MODULES_VADDR;
117557b1970SVasily Gorbik }
118557b1970SVasily Gorbik /* populate kasan shadow for untracked memory */
1193e826100SAlexander Gordeev kasan_populate(ident_map_size, untracked_end, POPULATE_KASAN_ZERO_SHADOW);
1203e826100SAlexander Gordeev kasan_populate(MODULES_END, _REGION1_SIZE, POPULATE_KASAN_ZERO_SHADOW);
121557b1970SVasily Gorbik }
122557b1970SVasily Gorbik
kasan_pgd_populate_zero_shadow(pgd_t * pgd,unsigned long addr,unsigned long end,enum populate_mode mode)123557b1970SVasily Gorbik static bool kasan_pgd_populate_zero_shadow(pgd_t *pgd, unsigned long addr,
124557b1970SVasily Gorbik unsigned long end, enum populate_mode mode)
125557b1970SVasily Gorbik {
126557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW &&
127557b1970SVasily Gorbik IS_ALIGNED(addr, PGDIR_SIZE) && end - addr >= PGDIR_SIZE) {
128557b1970SVasily Gorbik pgd_populate(&init_mm, pgd, kasan_early_shadow_p4d);
129557b1970SVasily Gorbik return true;
130557b1970SVasily Gorbik }
131557b1970SVasily Gorbik return false;
132557b1970SVasily Gorbik }
133557b1970SVasily Gorbik
kasan_p4d_populate_zero_shadow(p4d_t * p4d,unsigned long addr,unsigned long end,enum populate_mode mode)134557b1970SVasily Gorbik static bool kasan_p4d_populate_zero_shadow(p4d_t *p4d, unsigned long addr,
135557b1970SVasily Gorbik unsigned long end, enum populate_mode mode)
136557b1970SVasily Gorbik {
137557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW &&
138557b1970SVasily Gorbik IS_ALIGNED(addr, P4D_SIZE) && end - addr >= P4D_SIZE) {
139557b1970SVasily Gorbik p4d_populate(&init_mm, p4d, kasan_early_shadow_pud);
140557b1970SVasily Gorbik return true;
141557b1970SVasily Gorbik }
142557b1970SVasily Gorbik return false;
143557b1970SVasily Gorbik }
144557b1970SVasily Gorbik
kasan_pud_populate_zero_shadow(pud_t * pud,unsigned long addr,unsigned long end,enum populate_mode mode)145557b1970SVasily Gorbik static bool kasan_pud_populate_zero_shadow(pud_t *pud, unsigned long addr,
146557b1970SVasily Gorbik unsigned long end, enum populate_mode mode)
147557b1970SVasily Gorbik {
148557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW &&
149557b1970SVasily Gorbik IS_ALIGNED(addr, PUD_SIZE) && end - addr >= PUD_SIZE) {
150557b1970SVasily Gorbik pud_populate(&init_mm, pud, kasan_early_shadow_pmd);
151557b1970SVasily Gorbik return true;
152557b1970SVasily Gorbik }
153557b1970SVasily Gorbik return false;
154557b1970SVasily Gorbik }
155557b1970SVasily Gorbik
kasan_pmd_populate_zero_shadow(pmd_t * pmd,unsigned long addr,unsigned long end,enum populate_mode mode)156557b1970SVasily Gorbik static bool kasan_pmd_populate_zero_shadow(pmd_t *pmd, unsigned long addr,
157557b1970SVasily Gorbik unsigned long end, enum populate_mode mode)
158557b1970SVasily Gorbik {
159557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW &&
160557b1970SVasily Gorbik IS_ALIGNED(addr, PMD_SIZE) && end - addr >= PMD_SIZE) {
161557b1970SVasily Gorbik pmd_populate(&init_mm, pmd, kasan_early_shadow_pte);
162557b1970SVasily Gorbik return true;
163557b1970SVasily Gorbik }
164557b1970SVasily Gorbik return false;
165557b1970SVasily Gorbik }
166557b1970SVasily Gorbik
kasan_pte_populate_zero_shadow(pte_t * pte,enum populate_mode mode)167557b1970SVasily Gorbik static bool kasan_pte_populate_zero_shadow(pte_t *pte, enum populate_mode mode)
168557b1970SVasily Gorbik {
169557b1970SVasily Gorbik pte_t entry;
170557b1970SVasily Gorbik
171557b1970SVasily Gorbik if (mode == POPULATE_KASAN_ZERO_SHADOW) {
172557b1970SVasily Gorbik set_pte(pte, pte_z);
173557b1970SVasily Gorbik return true;
174557b1970SVasily Gorbik }
175557b1970SVasily Gorbik return false;
176557b1970SVasily Gorbik }
177557b1970SVasily Gorbik #else
178557b1970SVasily Gorbik
kasan_populate_shadow(void)179557b1970SVasily Gorbik static inline void kasan_populate_shadow(void) {}
180557b1970SVasily Gorbik
kasan_pgd_populate_zero_shadow(pgd_t * pgd,unsigned long addr,unsigned long end,enum populate_mode mode)181557b1970SVasily Gorbik static inline bool kasan_pgd_populate_zero_shadow(pgd_t *pgd, unsigned long addr,
182557b1970SVasily Gorbik unsigned long end, enum populate_mode mode)
183557b1970SVasily Gorbik {
184557b1970SVasily Gorbik return false;
185557b1970SVasily Gorbik }
186557b1970SVasily Gorbik
kasan_p4d_populate_zero_shadow(p4d_t * p4d,unsigned long addr,unsigned long end,enum populate_mode mode)187557b1970SVasily Gorbik static inline bool kasan_p4d_populate_zero_shadow(p4d_t *p4d, unsigned long addr,
188557b1970SVasily Gorbik unsigned long end, enum populate_mode mode)
189557b1970SVasily Gorbik {
190557b1970SVasily Gorbik return false;
191557b1970SVasily Gorbik }
192557b1970SVasily Gorbik
kasan_pud_populate_zero_shadow(pud_t * pud,unsigned long addr,unsigned long end,enum populate_mode mode)193557b1970SVasily Gorbik static inline bool kasan_pud_populate_zero_shadow(pud_t *pud, unsigned long addr,
194557b1970SVasily Gorbik unsigned long end, enum populate_mode mode)
195557b1970SVasily Gorbik {
196557b1970SVasily Gorbik return false;
197557b1970SVasily Gorbik }
198557b1970SVasily Gorbik
kasan_pmd_populate_zero_shadow(pmd_t * pmd,unsigned long addr,unsigned long end,enum populate_mode mode)199557b1970SVasily Gorbik static inline bool kasan_pmd_populate_zero_shadow(pmd_t *pmd, unsigned long addr,
200557b1970SVasily Gorbik unsigned long end, enum populate_mode mode)
201557b1970SVasily Gorbik {
202557b1970SVasily Gorbik return false;
203557b1970SVasily Gorbik }
204557b1970SVasily Gorbik
kasan_pte_populate_zero_shadow(pte_t * pte,enum populate_mode mode)205557b1970SVasily Gorbik static bool kasan_pte_populate_zero_shadow(pte_t *pte, enum populate_mode mode)
206557b1970SVasily Gorbik {
207557b1970SVasily Gorbik return false;
208557b1970SVasily Gorbik }
209557b1970SVasily Gorbik
210557b1970SVasily Gorbik #endif
211557b1970SVasily Gorbik
2128e9205d2SAlexander Gordeev /*
2138e9205d2SAlexander Gordeev * Mimic virt_to_kpte() in lack of init_mm symbol. Skip pmd NULL check though.
2148e9205d2SAlexander Gordeev */
__virt_to_kpte(unsigned long va)2158e9205d2SAlexander Gordeev static inline pte_t *__virt_to_kpte(unsigned long va)
2168e9205d2SAlexander Gordeev {
2178e9205d2SAlexander Gordeev return pte_offset_kernel(pmd_offset(pud_offset(p4d_offset(pgd_offset_k(va), va), va), va), va);
2188e9205d2SAlexander Gordeev }
2198e9205d2SAlexander Gordeev
boot_crst_alloc(unsigned long val)220bb1520d5SAlexander Gordeev static void *boot_crst_alloc(unsigned long val)
221bb1520d5SAlexander Gordeev {
222f913a660SVasily Gorbik unsigned long size = PAGE_SIZE << CRST_ALLOC_ORDER;
223bb1520d5SAlexander Gordeev unsigned long *table;
224bb1520d5SAlexander Gordeev
225f913a660SVasily Gorbik table = (unsigned long *)physmem_alloc_top_down(RR_VMEM, size, size);
226bb1520d5SAlexander Gordeev crst_table_init(table, val);
227bb1520d5SAlexander Gordeev return table;
228bb1520d5SAlexander Gordeev }
229bb1520d5SAlexander Gordeev
boot_pte_alloc(void)230bb1520d5SAlexander Gordeev static pte_t *boot_pte_alloc(void)
231bb1520d5SAlexander Gordeev {
232557b1970SVasily Gorbik static void *pte_leftover;
233bb1520d5SAlexander Gordeev pte_t *pte;
234bb1520d5SAlexander Gordeev
235557b1970SVasily Gorbik /*
236557b1970SVasily Gorbik * handling pte_leftovers this way helps to avoid memory fragmentation
237557b1970SVasily Gorbik * during POPULATE_KASAN_MAP_SHADOW when EDAT is off
238557b1970SVasily Gorbik */
239557b1970SVasily Gorbik if (!pte_leftover) {
240557b1970SVasily Gorbik pte_leftover = (void *)physmem_alloc_top_down(RR_VMEM, PAGE_SIZE, PAGE_SIZE);
241557b1970SVasily Gorbik pte = pte_leftover + _PAGE_TABLE_SIZE;
242557b1970SVasily Gorbik } else {
243557b1970SVasily Gorbik pte = pte_leftover;
244557b1970SVasily Gorbik pte_leftover = NULL;
245557b1970SVasily Gorbik }
246557b1970SVasily Gorbik
247bb1520d5SAlexander Gordeev memset64((u64 *)pte, _PAGE_INVALID, PTRS_PER_PTE);
248bb1520d5SAlexander Gordeev return pte;
249bb1520d5SAlexander Gordeev }
250bb1520d5SAlexander Gordeev
_pa(unsigned long addr,unsigned long size,enum populate_mode mode)251557b1970SVasily Gorbik static unsigned long _pa(unsigned long addr, unsigned long size, enum populate_mode mode)
252e0e0a87bSAlexander Gordeev {
253e0e0a87bSAlexander Gordeev switch (mode) {
2548e9205d2SAlexander Gordeev case POPULATE_NONE:
2558e9205d2SAlexander Gordeev return -1;
25607fdd662SHeiko Carstens case POPULATE_DIRECT:
257e0e0a87bSAlexander Gordeev return addr;
2582154e0b3SAlexander Gordeev case POPULATE_ABS_LOWCORE:
2592154e0b3SAlexander Gordeev return __abs_lowcore_pa(addr);
260557b1970SVasily Gorbik #ifdef CONFIG_KASAN
261557b1970SVasily Gorbik case POPULATE_KASAN_MAP_SHADOW:
262557b1970SVasily Gorbik addr = physmem_alloc_top_down(RR_VMEM, size, size);
263557b1970SVasily Gorbik memset((void *)addr, 0, size);
264557b1970SVasily Gorbik return addr;
265557b1970SVasily Gorbik #endif
266e0e0a87bSAlexander Gordeev default:
267e0e0a87bSAlexander Gordeev return -1;
268e0e0a87bSAlexander Gordeev }
269e0e0a87bSAlexander Gordeev }
270e0e0a87bSAlexander Gordeev
can_large_pud(pud_t * pu_dir,unsigned long addr,unsigned long end)271bb1520d5SAlexander Gordeev static bool can_large_pud(pud_t *pu_dir, unsigned long addr, unsigned long end)
272bb1520d5SAlexander Gordeev {
273bb1520d5SAlexander Gordeev return machine.has_edat2 &&
274bb1520d5SAlexander Gordeev IS_ALIGNED(addr, PUD_SIZE) && (end - addr) >= PUD_SIZE;
275bb1520d5SAlexander Gordeev }
276bb1520d5SAlexander Gordeev
can_large_pmd(pmd_t * pm_dir,unsigned long addr,unsigned long end)277bb1520d5SAlexander Gordeev static bool can_large_pmd(pmd_t *pm_dir, unsigned long addr, unsigned long end)
278bb1520d5SAlexander Gordeev {
279bb1520d5SAlexander Gordeev return machine.has_edat1 &&
280bb1520d5SAlexander Gordeev IS_ALIGNED(addr, PMD_SIZE) && (end - addr) >= PMD_SIZE;
281bb1520d5SAlexander Gordeev }
282bb1520d5SAlexander Gordeev
pgtable_pte_populate(pmd_t * pmd,unsigned long addr,unsigned long end,enum populate_mode mode)283e0e0a87bSAlexander Gordeev static void pgtable_pte_populate(pmd_t *pmd, unsigned long addr, unsigned long end,
284e0e0a87bSAlexander Gordeev enum populate_mode mode)
285bb1520d5SAlexander Gordeev {
28681e84796SHeiko Carstens unsigned long pages = 0;
287bb1520d5SAlexander Gordeev pte_t *pte, entry;
288bb1520d5SAlexander Gordeev
289bb1520d5SAlexander Gordeev pte = pte_offset_kernel(pmd, addr);
290bb1520d5SAlexander Gordeev for (; addr < end; addr += PAGE_SIZE, pte++) {
291bb1520d5SAlexander Gordeev if (pte_none(*pte)) {
292557b1970SVasily Gorbik if (kasan_pte_populate_zero_shadow(pte, mode))
293557b1970SVasily Gorbik continue;
294557b1970SVasily Gorbik entry = __pte(_pa(addr, PAGE_SIZE, mode));
295c0f1d478SHeiko Carstens entry = set_pte_bit(entry, PAGE_KERNEL);
296c0f1d478SHeiko Carstens if (!machine.has_nx)
297c0f1d478SHeiko Carstens entry = clear_pte_bit(entry, __pgprot(_PAGE_NOEXEC));
298bb1520d5SAlexander Gordeev set_pte(pte, entry);
29981e84796SHeiko Carstens pages++;
300bb1520d5SAlexander Gordeev }
301bb1520d5SAlexander Gordeev }
30281e84796SHeiko Carstens if (mode == POPULATE_DIRECT)
30381e84796SHeiko Carstens update_page_count(PG_DIRECT_MAP_4K, pages);
304bb1520d5SAlexander Gordeev }
305bb1520d5SAlexander Gordeev
pgtable_pmd_populate(pud_t * pud,unsigned long addr,unsigned long end,enum populate_mode mode)306e0e0a87bSAlexander Gordeev static void pgtable_pmd_populate(pud_t *pud, unsigned long addr, unsigned long end,
307e0e0a87bSAlexander Gordeev enum populate_mode mode)
308bb1520d5SAlexander Gordeev {
30981e84796SHeiko Carstens unsigned long next, pages = 0;
310bb1520d5SAlexander Gordeev pmd_t *pmd, entry;
311bb1520d5SAlexander Gordeev pte_t *pte;
312bb1520d5SAlexander Gordeev
313bb1520d5SAlexander Gordeev pmd = pmd_offset(pud, addr);
314bb1520d5SAlexander Gordeev for (; addr < end; addr = next, pmd++) {
315bb1520d5SAlexander Gordeev next = pmd_addr_end(addr, end);
316bb1520d5SAlexander Gordeev if (pmd_none(*pmd)) {
317557b1970SVasily Gorbik if (kasan_pmd_populate_zero_shadow(pmd, addr, next, mode))
318557b1970SVasily Gorbik continue;
319bb1520d5SAlexander Gordeev if (can_large_pmd(pmd, addr, next)) {
320557b1970SVasily Gorbik entry = __pmd(_pa(addr, _SEGMENT_SIZE, mode));
321c0f1d478SHeiko Carstens entry = set_pmd_bit(entry, SEGMENT_KERNEL);
322c0f1d478SHeiko Carstens if (!machine.has_nx)
323c0f1d478SHeiko Carstens entry = clear_pmd_bit(entry, __pgprot(_SEGMENT_ENTRY_NOEXEC));
324bb1520d5SAlexander Gordeev set_pmd(pmd, entry);
32581e84796SHeiko Carstens pages++;
326bb1520d5SAlexander Gordeev continue;
327bb1520d5SAlexander Gordeev }
328bb1520d5SAlexander Gordeev pte = boot_pte_alloc();
329bb1520d5SAlexander Gordeev pmd_populate(&init_mm, pmd, pte);
330bb1520d5SAlexander Gordeev } else if (pmd_large(*pmd)) {
331bb1520d5SAlexander Gordeev continue;
332bb1520d5SAlexander Gordeev }
333e0e0a87bSAlexander Gordeev pgtable_pte_populate(pmd, addr, next, mode);
334bb1520d5SAlexander Gordeev }
33581e84796SHeiko Carstens if (mode == POPULATE_DIRECT)
33681e84796SHeiko Carstens update_page_count(PG_DIRECT_MAP_1M, pages);
337bb1520d5SAlexander Gordeev }
338bb1520d5SAlexander Gordeev
pgtable_pud_populate(p4d_t * p4d,unsigned long addr,unsigned long end,enum populate_mode mode)339e0e0a87bSAlexander Gordeev static void pgtable_pud_populate(p4d_t *p4d, unsigned long addr, unsigned long end,
340e0e0a87bSAlexander Gordeev enum populate_mode mode)
341bb1520d5SAlexander Gordeev {
34281e84796SHeiko Carstens unsigned long next, pages = 0;
343bb1520d5SAlexander Gordeev pud_t *pud, entry;
344bb1520d5SAlexander Gordeev pmd_t *pmd;
345bb1520d5SAlexander Gordeev
346bb1520d5SAlexander Gordeev pud = pud_offset(p4d, addr);
347bb1520d5SAlexander Gordeev for (; addr < end; addr = next, pud++) {
348bb1520d5SAlexander Gordeev next = pud_addr_end(addr, end);
349bb1520d5SAlexander Gordeev if (pud_none(*pud)) {
350557b1970SVasily Gorbik if (kasan_pud_populate_zero_shadow(pud, addr, next, mode))
351557b1970SVasily Gorbik continue;
352bb1520d5SAlexander Gordeev if (can_large_pud(pud, addr, next)) {
353557b1970SVasily Gorbik entry = __pud(_pa(addr, _REGION3_SIZE, mode));
354c0f1d478SHeiko Carstens entry = set_pud_bit(entry, REGION3_KERNEL);
355c0f1d478SHeiko Carstens if (!machine.has_nx)
356c0f1d478SHeiko Carstens entry = clear_pud_bit(entry, __pgprot(_REGION_ENTRY_NOEXEC));
357bb1520d5SAlexander Gordeev set_pud(pud, entry);
35881e84796SHeiko Carstens pages++;
359bb1520d5SAlexander Gordeev continue;
360bb1520d5SAlexander Gordeev }
361bb1520d5SAlexander Gordeev pmd = boot_crst_alloc(_SEGMENT_ENTRY_EMPTY);
362bb1520d5SAlexander Gordeev pud_populate(&init_mm, pud, pmd);
363*907835e6SPeter Xu } else if (pud_leaf(*pud)) {
364bb1520d5SAlexander Gordeev continue;
365bb1520d5SAlexander Gordeev }
366e0e0a87bSAlexander Gordeev pgtable_pmd_populate(pud, addr, next, mode);
367bb1520d5SAlexander Gordeev }
36881e84796SHeiko Carstens if (mode == POPULATE_DIRECT)
36981e84796SHeiko Carstens update_page_count(PG_DIRECT_MAP_2G, pages);
370bb1520d5SAlexander Gordeev }
371bb1520d5SAlexander Gordeev
pgtable_p4d_populate(pgd_t * pgd,unsigned long addr,unsigned long end,enum populate_mode mode)372e0e0a87bSAlexander Gordeev static void pgtable_p4d_populate(pgd_t *pgd, unsigned long addr, unsigned long end,
373e0e0a87bSAlexander Gordeev enum populate_mode mode)
374bb1520d5SAlexander Gordeev {
375bb1520d5SAlexander Gordeev unsigned long next;
376bb1520d5SAlexander Gordeev p4d_t *p4d;
377bb1520d5SAlexander Gordeev pud_t *pud;
378bb1520d5SAlexander Gordeev
379bb1520d5SAlexander Gordeev p4d = p4d_offset(pgd, addr);
380bb1520d5SAlexander Gordeev for (; addr < end; addr = next, p4d++) {
381bb1520d5SAlexander Gordeev next = p4d_addr_end(addr, end);
382bb1520d5SAlexander Gordeev if (p4d_none(*p4d)) {
383557b1970SVasily Gorbik if (kasan_p4d_populate_zero_shadow(p4d, addr, next, mode))
384557b1970SVasily Gorbik continue;
385bb1520d5SAlexander Gordeev pud = boot_crst_alloc(_REGION3_ENTRY_EMPTY);
386bb1520d5SAlexander Gordeev p4d_populate(&init_mm, p4d, pud);
387bb1520d5SAlexander Gordeev }
388e0e0a87bSAlexander Gordeev pgtable_pud_populate(p4d, addr, next, mode);
389bb1520d5SAlexander Gordeev }
390bb1520d5SAlexander Gordeev }
391bb1520d5SAlexander Gordeev
pgtable_populate(unsigned long addr,unsigned long end,enum populate_mode mode)392e0e0a87bSAlexander Gordeev static void pgtable_populate(unsigned long addr, unsigned long end, enum populate_mode mode)
393bb1520d5SAlexander Gordeev {
394bb1520d5SAlexander Gordeev unsigned long next;
395bb1520d5SAlexander Gordeev pgd_t *pgd;
396bb1520d5SAlexander Gordeev p4d_t *p4d;
397bb1520d5SAlexander Gordeev
398bb1520d5SAlexander Gordeev pgd = pgd_offset(&init_mm, addr);
399bb1520d5SAlexander Gordeev for (; addr < end; addr = next, pgd++) {
400bb1520d5SAlexander Gordeev next = pgd_addr_end(addr, end);
401bb1520d5SAlexander Gordeev if (pgd_none(*pgd)) {
402557b1970SVasily Gorbik if (kasan_pgd_populate_zero_shadow(pgd, addr, next, mode))
403557b1970SVasily Gorbik continue;
404bb1520d5SAlexander Gordeev p4d = boot_crst_alloc(_REGION2_ENTRY_EMPTY);
405bb1520d5SAlexander Gordeev pgd_populate(&init_mm, pgd, p4d);
406bb1520d5SAlexander Gordeev }
407557b1970SVasily Gorbik #ifdef CONFIG_KASAN
408557b1970SVasily Gorbik if (mode == POPULATE_KASAN_SHALLOW)
409557b1970SVasily Gorbik continue;
410557b1970SVasily Gorbik #endif
411e0e0a87bSAlexander Gordeev pgtable_p4d_populate(pgd, addr, next, mode);
412bb1520d5SAlexander Gordeev }
413bb1520d5SAlexander Gordeev }
414bb1520d5SAlexander Gordeev
setup_vmem(unsigned long asce_limit)415bf64f051SVasily Gorbik void setup_vmem(unsigned long asce_limit)
416bb1520d5SAlexander Gordeev {
417e966ccf8SVasily Gorbik unsigned long start, end;
418bb1520d5SAlexander Gordeev unsigned long asce_type;
419bb1520d5SAlexander Gordeev unsigned long asce_bits;
420e966ccf8SVasily Gorbik int i;
421bb1520d5SAlexander Gordeev
422bb1520d5SAlexander Gordeev if (asce_limit == _REGION1_SIZE) {
423bb1520d5SAlexander Gordeev asce_type = _REGION2_ENTRY_EMPTY;
424bb1520d5SAlexander Gordeev asce_bits = _ASCE_TYPE_REGION2 | _ASCE_TABLE_LENGTH;
425bb1520d5SAlexander Gordeev } else {
426bb1520d5SAlexander Gordeev asce_type = _REGION3_ENTRY_EMPTY;
427bb1520d5SAlexander Gordeev asce_bits = _ASCE_TYPE_REGION3 | _ASCE_TABLE_LENGTH;
428bb1520d5SAlexander Gordeev }
429bb1520d5SAlexander Gordeev s390_invalid_asce = invalid_pg_dir | _ASCE_TYPE_REGION3 | _ASCE_TABLE_LENGTH;
430bb1520d5SAlexander Gordeev
431bb1520d5SAlexander Gordeev crst_table_init((unsigned long *)swapper_pg_dir, asce_type);
432bb1520d5SAlexander Gordeev crst_table_init((unsigned long *)invalid_pg_dir, _REGION3_ENTRY_EMPTY);
433bb1520d5SAlexander Gordeev
434bb1520d5SAlexander Gordeev /*
435bb1520d5SAlexander Gordeev * To allow prefixing the lowcore must be mapped with 4KB pages.
436bb1520d5SAlexander Gordeev * To prevent creation of a large page at address 0 first map
437bb1520d5SAlexander Gordeev * the lowcore and create the identity mapping only afterwards.
438bb1520d5SAlexander Gordeev */
43907fdd662SHeiko Carstens pgtable_populate(0, sizeof(struct lowcore), POPULATE_DIRECT);
4408c37cb7dSVasily Gorbik for_each_physmem_usable_range(i, &start, &end)
44107fdd662SHeiko Carstens pgtable_populate(start, end, POPULATE_DIRECT);
4422154e0b3SAlexander Gordeev pgtable_populate(__abs_lowcore, __abs_lowcore + sizeof(struct lowcore),
4432154e0b3SAlexander Gordeev POPULATE_ABS_LOWCORE);
4448e9205d2SAlexander Gordeev pgtable_populate(__memcpy_real_area, __memcpy_real_area + PAGE_SIZE,
4458e9205d2SAlexander Gordeev POPULATE_NONE);
4468e9205d2SAlexander Gordeev memcpy_real_ptep = __virt_to_kpte(__memcpy_real_area);
447bb1520d5SAlexander Gordeev
448557b1970SVasily Gorbik kasan_populate_shadow();
449557b1970SVasily Gorbik
450bb1520d5SAlexander Gordeev S390_lowcore.kernel_asce = swapper_pg_dir | asce_bits;
451bb1520d5SAlexander Gordeev S390_lowcore.user_asce = s390_invalid_asce;
452bb1520d5SAlexander Gordeev
453bb1520d5SAlexander Gordeev __ctl_load(S390_lowcore.kernel_asce, 1, 1);
454bb1520d5SAlexander Gordeev __ctl_load(S390_lowcore.user_asce, 7, 7);
455bb1520d5SAlexander Gordeev __ctl_load(S390_lowcore.kernel_asce, 13, 13);
456bb1520d5SAlexander Gordeev
457bb1520d5SAlexander Gordeev init_mm.context.asce = S390_lowcore.kernel_asce;
458bb1520d5SAlexander Gordeev }
459