1 /* SPDX-License-Identifier: GPL-2.0 */ 2 #ifndef __ASM_KASAN_H 3 #define __ASM_KASAN_H 4 5 #include <asm/pgtable.h> 6 7 #ifdef CONFIG_KASAN 8 9 #define KASAN_SHADOW_SCALE_SHIFT 3 10 #define KASAN_SHADOW_SIZE \ 11 (_AC(1, UL) << (_REGION1_SHIFT - KASAN_SHADOW_SCALE_SHIFT)) 12 #define KASAN_SHADOW_OFFSET _AC(CONFIG_KASAN_SHADOW_OFFSET, UL) 13 #define KASAN_SHADOW_START KASAN_SHADOW_OFFSET 14 #define KASAN_SHADOW_END (KASAN_SHADOW_START + KASAN_SHADOW_SIZE) 15 16 extern void kasan_early_init(void); 17 extern void kasan_copy_shadow_mapping(void); 18 extern void kasan_free_early_identity(void); 19 20 /* 21 * Estimate kasan memory requirements, which it will reserve 22 * at the very end of available physical memory. To estimate 23 * that, we take into account that kasan would require 24 * 1/8 of available physical memory (for shadow memory) + 25 * creating page tables for the whole memory + shadow memory 26 * region (1 + 1/8). To keep page tables estimates simple take 27 * the double of combined ptes size. 28 * 29 * physmem parameter has to be already adjusted if not entire physical memory 30 * would be used (e.g. due to effect of "mem=" option). 31 */ 32 static inline unsigned long kasan_estimate_memory_needs(unsigned long physmem) 33 { 34 unsigned long kasan_needs; 35 unsigned long pages; 36 /* for shadow memory */ 37 kasan_needs = round_up(physmem / 8, PAGE_SIZE); 38 /* for paging structures */ 39 pages = DIV_ROUND_UP(physmem + kasan_needs, PAGE_SIZE); 40 kasan_needs += DIV_ROUND_UP(pages, _PAGE_ENTRIES) * _PAGE_TABLE_SIZE * 2; 41 42 return kasan_needs; 43 } 44 #else 45 static inline void kasan_early_init(void) { } 46 static inline void kasan_copy_shadow_mapping(void) { } 47 static inline void kasan_free_early_identity(void) { } 48 static inline unsigned long kasan_estimate_memory_needs(unsigned long physmem) { return 0; } 49 #endif 50 51 #endif 52