1 // SPDX-License-Identifier: GPL-2.0 2 #include <linux/io.h> 3 #include <linux/slab.h> 4 #include <linux/memblock.h> 5 #include <linux/cc_platform.h> 6 #include <linux/pgtable.h> 7 8 #include <asm/set_memory.h> 9 #include <asm/realmode.h> 10 #include <asm/tlbflush.h> 11 #include <asm/crash.h> 12 #include <asm/sev.h> 13 14 struct real_mode_header *real_mode_header; 15 u32 *trampoline_cr4_features; 16 17 /* Hold the pgd entry used on booting additional CPUs */ 18 pgd_t trampoline_pgd_entry; 19 20 void __init reserve_real_mode(void) 21 { 22 phys_addr_t mem; 23 size_t size = real_mode_size_needed(); 24 25 if (!size) 26 return; 27 28 WARN_ON(slab_is_available()); 29 30 /* Has to be under 1M so we can execute real-mode AP code. */ 31 mem = memblock_phys_alloc_range(size, PAGE_SIZE, 0, 1<<20); 32 if (!mem) 33 pr_info("No sub-1M memory is available for the trampoline\n"); 34 else 35 set_real_mode_mem(mem); 36 37 /* 38 * Unconditionally reserve the entire fisrt 1M, see comment in 39 * setup_arch(). 40 */ 41 memblock_reserve(0, SZ_1M); 42 } 43 44 static void sme_sev_setup_real_mode(struct trampoline_header *th) 45 { 46 #ifdef CONFIG_AMD_MEM_ENCRYPT 47 if (cc_platform_has(CC_ATTR_HOST_MEM_ENCRYPT)) 48 th->flags |= TH_FLAGS_SME_ACTIVE; 49 50 if (cc_platform_has(CC_ATTR_GUEST_STATE_ENCRYPT)) { 51 /* 52 * Skip the call to verify_cpu() in secondary_startup_64 as it 53 * will cause #VC exceptions when the AP can't handle them yet. 54 */ 55 th->start = (u64) secondary_startup_64_no_verify; 56 57 if (sev_es_setup_ap_jump_table(real_mode_header)) 58 panic("Failed to get/update SEV-ES AP Jump Table"); 59 } 60 #endif 61 } 62 63 static void __init setup_real_mode(void) 64 { 65 u16 real_mode_seg; 66 const u32 *rel; 67 u32 count; 68 unsigned char *base; 69 unsigned long phys_base; 70 struct trampoline_header *trampoline_header; 71 size_t size = PAGE_ALIGN(real_mode_blob_end - real_mode_blob); 72 #ifdef CONFIG_X86_64 73 u64 *trampoline_pgd; 74 u64 efer; 75 #endif 76 77 base = (unsigned char *)real_mode_header; 78 79 /* 80 * If SME is active, the trampoline area will need to be in 81 * decrypted memory in order to bring up other processors 82 * successfully. This is not needed for SEV. 83 */ 84 if (cc_platform_has(CC_ATTR_HOST_MEM_ENCRYPT)) 85 set_memory_decrypted((unsigned long)base, size >> PAGE_SHIFT); 86 87 memcpy(base, real_mode_blob, size); 88 89 phys_base = __pa(base); 90 real_mode_seg = phys_base >> 4; 91 92 rel = (u32 *) real_mode_relocs; 93 94 /* 16-bit segment relocations. */ 95 count = *rel++; 96 while (count--) { 97 u16 *seg = (u16 *) (base + *rel++); 98 *seg = real_mode_seg; 99 } 100 101 /* 32-bit linear relocations. */ 102 count = *rel++; 103 while (count--) { 104 u32 *ptr = (u32 *) (base + *rel++); 105 *ptr += phys_base; 106 } 107 108 /* Must be performed *after* relocation. */ 109 trampoline_header = (struct trampoline_header *) 110 __va(real_mode_header->trampoline_header); 111 112 #ifdef CONFIG_X86_32 113 trampoline_header->start = __pa_symbol(startup_32_smp); 114 trampoline_header->gdt_limit = __BOOT_DS + 7; 115 trampoline_header->gdt_base = __pa_symbol(boot_gdt); 116 #else 117 /* 118 * Some AMD processors will #GP(0) if EFER.LMA is set in WRMSR 119 * so we need to mask it out. 120 */ 121 rdmsrl(MSR_EFER, efer); 122 trampoline_header->efer = efer & ~EFER_LMA; 123 124 trampoline_header->start = (u64) secondary_startup_64; 125 trampoline_cr4_features = &trampoline_header->cr4; 126 *trampoline_cr4_features = mmu_cr4_features; 127 128 trampoline_header->flags = 0; 129 130 trampoline_pgd = (u64 *) __va(real_mode_header->trampoline_pgd); 131 trampoline_pgd[0] = trampoline_pgd_entry.pgd; 132 trampoline_pgd[511] = init_top_pgt[511].pgd; 133 #endif 134 135 sme_sev_setup_real_mode(trampoline_header); 136 } 137 138 /* 139 * reserve_real_mode() gets called very early, to guarantee the 140 * availability of low memory. This is before the proper kernel page 141 * tables are set up, so we cannot set page permissions in that 142 * function. Also trampoline code will be executed by APs so we 143 * need to mark it executable at do_pre_smp_initcalls() at least, 144 * thus run it as a early_initcall(). 145 */ 146 static void __init set_real_mode_permissions(void) 147 { 148 unsigned char *base = (unsigned char *) real_mode_header; 149 size_t size = PAGE_ALIGN(real_mode_blob_end - real_mode_blob); 150 151 size_t ro_size = 152 PAGE_ALIGN(real_mode_header->ro_end) - 153 __pa(base); 154 155 size_t text_size = 156 PAGE_ALIGN(real_mode_header->ro_end) - 157 real_mode_header->text_start; 158 159 unsigned long text_start = 160 (unsigned long) __va(real_mode_header->text_start); 161 162 set_memory_nx((unsigned long) base, size >> PAGE_SHIFT); 163 set_memory_ro((unsigned long) base, ro_size >> PAGE_SHIFT); 164 set_memory_x((unsigned long) text_start, text_size >> PAGE_SHIFT); 165 } 166 167 static int __init init_real_mode(void) 168 { 169 if (!real_mode_header) 170 panic("Real mode trampoline was not allocated"); 171 172 setup_real_mode(); 173 set_real_mode_permissions(); 174 175 return 0; 176 } 177 early_initcall(init_real_mode); 178