1/* 2 * linux/arch/arm/kernel/head-nommu.S 3 * 4 * Copyright (C) 1994-2002 Russell King 5 * Copyright (C) 2003-2006 Hyok S. Choi 6 * 7 * This program is free software; you can redistribute it and/or modify 8 * it under the terms of the GNU General Public License version 2 as 9 * published by the Free Software Foundation. 10 * 11 * Common kernel startup code (non-paged MM) 12 * 13 */ 14#include <linux/linkage.h> 15#include <linux/init.h> 16#include <linux/errno.h> 17 18#include <asm/assembler.h> 19#include <asm/ptrace.h> 20#include <asm/asm-offsets.h> 21#include <asm/memory.h> 22#include <asm/cp15.h> 23#include <asm/thread_info.h> 24#include <asm/v7m.h> 25#include <asm/mpu.h> 26#include <asm/page.h> 27 28/* 29 * Kernel startup entry point. 30 * --------------------------- 31 * 32 * This is normally called from the decompressor code. The requirements 33 * are: MMU = off, D-cache = off, I-cache = dont care, r0 = 0, 34 * r1 = machine nr. 35 * 36 * See linux/arch/arm/tools/mach-types for the complete list of machine 37 * numbers for r1. 38 * 39 */ 40 41 __HEAD 42 43#ifdef CONFIG_CPU_THUMBONLY 44 .thumb 45ENTRY(stext) 46#else 47 .arm 48ENTRY(stext) 49 50 THUMB( badr r9, 1f ) @ Kernel is always entered in ARM. 51 THUMB( bx r9 ) @ If this is a Thumb-2 kernel, 52 THUMB( .thumb ) @ switch to Thumb now. 53 THUMB(1: ) 54#endif 55 56 setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, r9 @ ensure svc mode 57 @ and irqs disabled 58#if defined(CONFIG_CPU_CP15) 59 mrc p15, 0, r9, c0, c0 @ get processor id 60#elif defined(CONFIG_CPU_V7M) 61 ldr r9, =BASEADDR_V7M_SCB 62 ldr r9, [r9, V7M_SCB_CPUID] 63#else 64 ldr r9, =CONFIG_PROCESSOR_ID 65#endif 66 bl __lookup_processor_type @ r5=procinfo r9=cpuid 67 movs r10, r5 @ invalid processor (r5=0)? 68 beq __error_p @ yes, error 'p' 69 70#ifdef CONFIG_ARM_MPU 71 bl __setup_mpu 72#endif 73 74 badr lr, 1f @ return (PIC) address 75 ldr r12, [r10, #PROCINFO_INITFUNC] 76 add r12, r12, r10 77 ret r12 781: ldr lr, =__mmap_switched 79 b __after_proc_init 80ENDPROC(stext) 81 82#ifdef CONFIG_SMP 83 .text 84ENTRY(secondary_startup) 85 /* 86 * Common entry point for secondary CPUs. 87 * 88 * Ensure that we're in SVC mode, and IRQs are disabled. Lookup 89 * the processor type - there is no need to check the machine type 90 * as it has already been validated by the primary processor. 91 */ 92 setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, r9 93#ifndef CONFIG_CPU_CP15 94 ldr r9, =CONFIG_PROCESSOR_ID 95#else 96 mrc p15, 0, r9, c0, c0 @ get processor id 97#endif 98 bl __lookup_processor_type @ r5=procinfo r9=cpuid 99 movs r10, r5 @ invalid processor? 100 beq __error_p @ yes, error 'p' 101 102 ldr r7, __secondary_data 103 104#ifdef CONFIG_ARM_MPU 105 bl __secondary_setup_mpu @ Initialize the MPU 106#endif 107 108 badr lr, 1f @ return (PIC) address 109 ldr r12, [r10, #PROCINFO_INITFUNC] 110 add r12, r12, r10 111 ret r12 1121: bl __after_proc_init 113 ldr sp, [r7, #12] @ set up the stack pointer 114 mov fp, #0 115 b secondary_start_kernel 116ENDPROC(secondary_startup) 117 118 .type __secondary_data, %object 119__secondary_data: 120 .long secondary_data 121#endif /* CONFIG_SMP */ 122 123/* 124 * Set the Control Register and Read the process ID. 125 */ 126 .text 127__after_proc_init: 128#ifdef CONFIG_CPU_CP15 129 /* 130 * CP15 system control register value returned in r0 from 131 * the CPU init function. 132 */ 133#if defined(CONFIG_ALIGNMENT_TRAP) && __LINUX_ARM_ARCH__ < 6 134 orr r0, r0, #CR_A 135#else 136 bic r0, r0, #CR_A 137#endif 138#ifdef CONFIG_CPU_DCACHE_DISABLE 139 bic r0, r0, #CR_C 140#endif 141#ifdef CONFIG_CPU_BPREDICT_DISABLE 142 bic r0, r0, #CR_Z 143#endif 144#ifdef CONFIG_CPU_ICACHE_DISABLE 145 bic r0, r0, #CR_I 146#endif 147 mcr p15, 0, r0, c1, c0, 0 @ write control reg 148#elif defined (CONFIG_CPU_V7M) 149 /* For V7M systems we want to modify the CCR similarly to the SCTLR */ 150#ifdef CONFIG_CPU_DCACHE_DISABLE 151 bic r0, r0, #V7M_SCB_CCR_DC 152#endif 153#ifdef CONFIG_CPU_BPREDICT_DISABLE 154 bic r0, r0, #V7M_SCB_CCR_BP 155#endif 156#ifdef CONFIG_CPU_ICACHE_DISABLE 157 bic r0, r0, #V7M_SCB_CCR_IC 158#endif 159 movw r3, #:lower16:(BASEADDR_V7M_SCB + V7M_SCB_CCR) 160 movt r3, #:upper16:(BASEADDR_V7M_SCB + V7M_SCB_CCR) 161 str r0, [r3] 162#endif /* CONFIG_CPU_CP15 elif CONFIG_CPU_V7M */ 163 ret lr 164ENDPROC(__after_proc_init) 165 .ltorg 166 167#ifdef CONFIG_ARM_MPU 168 169 170#ifndef CONFIG_CPU_V7M 171/* Set which MPU region should be programmed */ 172.macro set_region_nr tmp, rgnr, unused 173 mov \tmp, \rgnr @ Use static region numbers 174 mcr p15, 0, \tmp, c6, c2, 0 @ Write RGNR 175.endm 176 177/* Setup a single MPU region, either D or I side (D-side for unified) */ 178.macro setup_region bar, acr, sr, side = PMSAv7_DATA_SIDE, unused 179 mcr p15, 0, \bar, c6, c1, (0 + \side) @ I/DRBAR 180 mcr p15, 0, \acr, c6, c1, (4 + \side) @ I/DRACR 181 mcr p15, 0, \sr, c6, c1, (2 + \side) @ I/DRSR 182.endm 183#else 184.macro set_region_nr tmp, rgnr, base 185 mov \tmp, \rgnr 186 str \tmp, [\base, #PMSAv7_RNR] 187.endm 188 189.macro setup_region bar, acr, sr, unused, base 190 lsl \acr, \acr, #16 191 orr \acr, \acr, \sr 192 str \bar, [\base, #PMSAv7_RBAR] 193 str \acr, [\base, #PMSAv7_RASR] 194.endm 195 196#endif 197/* 198 * Setup the MPU and initial MPU Regions. We create the following regions: 199 * Region 0: Use this for probing the MPU details, so leave disabled. 200 * Region 1: Background region - covers the whole of RAM as strongly ordered 201 * Region 2: Normal, Shared, cacheable for RAM. From PHYS_OFFSET, size from r6 202 * Region 3: Normal, shared, inaccessible from PL0 to protect the vectors page 203 * 204 * r6: Value to be written to DRSR (and IRSR if required) for PMSAv7_RAM_REGION 205*/ 206 __HEAD 207 208ENTRY(__setup_mpu) 209 210 /* Probe for v7 PMSA compliance */ 211M_CLASS(movw r12, #:lower16:BASEADDR_V7M_SCB) 212M_CLASS(movt r12, #:upper16:BASEADDR_V7M_SCB) 213 214AR_CLASS(mrc p15, 0, r0, c0, c1, 4) @ Read ID_MMFR0 215M_CLASS(ldr r0, [r12, 0x50]) 216 and r0, r0, #(MMFR0_PMSA) @ PMSA field 217 teq r0, #(MMFR0_PMSAv7) @ PMSA v7 218 beq __setup_pmsa_v7 219 220 ret lr 221ENDPROC(__setup_mpu) 222 223ENTRY(__setup_pmsa_v7) 224 /* Calculate the size of a region covering just the kernel */ 225 ldr r5, =PLAT_PHYS_OFFSET @ Region start: PHYS_OFFSET 226 ldr r6, =(_end) @ Cover whole kernel 227 sub r6, r6, r5 @ Minimum size of region to map 228 clz r6, r6 @ Region size must be 2^N... 229 rsb r6, r6, #31 @ ...so round up region size 230 lsl r6, r6, #PMSAv7_RSR_SZ @ Put size in right field 231 orr r6, r6, #(1 << PMSAv7_RSR_EN) @ Set region enabled bit 232 233 /* Determine whether the D/I-side memory map is unified. We set the 234 * flags here and continue to use them for the rest of this function */ 235AR_CLASS(mrc p15, 0, r0, c0, c0, 4) @ MPUIR 236M_CLASS(ldr r0, [r12, #MPU_TYPE]) 237 ands r5, r0, #MPUIR_DREGION_SZMASK @ 0 size d region => No MPU 238 bxeq lr 239 tst r0, #MPUIR_nU @ MPUIR_nU = 0 for unified 240 241 /* Setup second region first to free up r6 */ 242 set_region_nr r0, #PMSAv7_RAM_REGION, r12 243 isb 244 /* Full access from PL0, PL1, shared for CONFIG_SMP, cacheable */ 245 ldr r0, =PLAT_PHYS_OFFSET @ RAM starts at PHYS_OFFSET 246 ldr r5,=(PMSAv7_AP_PL1RW_PL0RW | PMSAv7_RGN_NORMAL) 247 248 setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ PHYS_OFFSET, shared, enabled 249 beq 1f @ Memory-map not unified 250 setup_region r0, r5, r6, PMSAv7_INSTR_SIDE, r12 @ PHYS_OFFSET, shared, enabled 2511: isb 252 253 /* First/background region */ 254 set_region_nr r0, #PMSAv7_BG_REGION, r12 255 isb 256 /* Execute Never, strongly ordered, inaccessible to PL0, rw PL1 */ 257 mov r0, #0 @ BG region starts at 0x0 258 ldr r5,=(PMSAv7_ACR_XN | PMSAv7_RGN_STRONGLY_ORDERED | PMSAv7_AP_PL1RW_PL0NA) 259 mov r6, #PMSAv7_RSR_ALL_MEM @ 4GB region, enabled 260 261 setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ 0x0, BG region, enabled 262 beq 2f @ Memory-map not unified 263 setup_region r0, r5, r6, PMSAv7_INSTR_SIDE r12 @ 0x0, BG region, enabled 2642: isb 265 266#ifdef CONFIG_XIP_KERNEL 267 set_region_nr r0, #PMSAv7_ROM_REGION, r12 268 isb 269 270 ldr r5,=(PMSAv7_AP_PL1RO_PL0NA | PMSAv7_RGN_NORMAL) 271 272 ldr r0, =CONFIG_XIP_PHYS_ADDR @ ROM start 273 ldr r6, =(_exiprom) @ ROM end 274 sub r6, r6, r0 @ Minimum size of region to map 275 clz r6, r6 @ Region size must be 2^N... 276 rsb r6, r6, #31 @ ...so round up region size 277 lsl r6, r6, #PMSAv7_RSR_SZ @ Put size in right field 278 orr r6, r6, #(1 << PMSAv7_RSR_EN) @ Set region enabled bit 279 280 setup_region r0, r5, r6, PMSAv7_DATA_SIDE, r12 @ XIP_PHYS_ADDR, shared, enabled 281 beq 3f @ Memory-map not unified 282 setup_region r0, r5, r6, PMSAv7_INSTR_SIDE, r12 @ XIP_PHYS_ADDR, shared, enabled 2833: isb 284#endif 285 286 /* Enable the MPU */ 287AR_CLASS(mrc p15, 0, r0, c1, c0, 0) @ Read SCTLR 288AR_CLASS(bic r0, r0, #CR_BR) @ Disable the 'default mem-map' 289AR_CLASS(orr r0, r0, #CR_M) @ Set SCTRL.M (MPU on) 290AR_CLASS(mcr p15, 0, r0, c1, c0, 0) @ Enable MPU 291 292M_CLASS(ldr r0, [r12, #MPU_CTRL]) 293M_CLASS(bic r0, #MPU_CTRL_PRIVDEFENA) 294M_CLASS(orr r0, #MPU_CTRL_ENABLE) 295M_CLASS(str r0, [r12, #MPU_CTRL]) 296 isb 297 298 ret lr 299ENDPROC(__setup_pmsa_v7) 300 301#ifdef CONFIG_SMP 302/* 303 * r6: pointer at mpu_rgn_info 304 */ 305 306 .text 307ENTRY(__secondary_setup_mpu) 308 /* Use MPU region info supplied by __cpu_up */ 309 ldr r6, [r7] @ get secondary_data.mpu_rgn_info 310 311 /* Probe for v7 PMSA compliance */ 312 mrc p15, 0, r0, c0, c1, 4 @ Read ID_MMFR0 313 and r0, r0, #(MMFR0_PMSA) @ PMSA field 314 teq r0, #(MMFR0_PMSAv7) @ PMSA v7 315 beq __secondary_setup_pmsa_v7 316 b __error_p 317ENDPROC(__secondary_setup_mpu) 318 319/* 320 * r6: pointer at mpu_rgn_info 321 */ 322ENTRY(__secondary_setup_pmsa_v7) 323 /* Determine whether the D/I-side memory map is unified. We set the 324 * flags here and continue to use them for the rest of this function */ 325 mrc p15, 0, r0, c0, c0, 4 @ MPUIR 326 ands r5, r0, #MPUIR_DREGION_SZMASK @ 0 size d region => No MPU 327 beq __error_p 328 329 ldr r4, [r6, #MPU_RNG_INFO_USED] 330 mov r5, #MPU_RNG_SIZE 331 add r3, r6, #MPU_RNG_INFO_RNGS 332 mla r3, r4, r5, r3 333 3341: 335 tst r0, #MPUIR_nU @ MPUIR_nU = 0 for unified 336 sub r3, r3, #MPU_RNG_SIZE 337 sub r4, r4, #1 338 339 set_region_nr r0, r4 340 isb 341 342 ldr r0, [r3, #MPU_RGN_DRBAR] 343 ldr r6, [r3, #MPU_RGN_DRSR] 344 ldr r5, [r3, #MPU_RGN_DRACR] 345 346 setup_region r0, r5, r6, PMSAv7_DATA_SIDE 347 beq 2f 348 setup_region r0, r5, r6, PMSAv7_INSTR_SIDE 3492: isb 350 351 mrc p15, 0, r0, c0, c0, 4 @ Reevaluate the MPUIR 352 cmp r4, #0 353 bgt 1b 354 355 /* Enable the MPU */ 356 mrc p15, 0, r0, c1, c0, 0 @ Read SCTLR 357 bic r0, r0, #CR_BR @ Disable the 'default mem-map' 358 orr r0, r0, #CR_M @ Set SCTRL.M (MPU on) 359 mcr p15, 0, r0, c1, c0, 0 @ Enable MPU 360 isb 361 362 ret lr 363ENDPROC(__secondary_setup_pmsa_v7) 364 365#endif /* CONFIG_SMP */ 366#endif /* CONFIG_ARM_MPU */ 367#include "head-common.S" 368