xref: /openbmc/u-boot/arch/arm/cpu/armv8/start.S (revision 90101386)
1/*
2 * (C) Copyright 2013
3 * David Feng <fenghua@phytium.com.cn>
4 *
5 * SPDX-License-Identifier:	GPL-2.0+
6 */
7
8#include <asm-offsets.h>
9#include <config.h>
10#include <linux/linkage.h>
11#include <asm/macro.h>
12#include <asm/armv8/mmu.h>
13
14/*************************************************************************
15 *
16 * Startup Code (reset vector)
17 *
18 *************************************************************************/
19
20.globl	_start
21_start:
22	b	reset
23
24#ifdef CONFIG_ENABLE_ARM_SOC_BOOT0_HOOK
25/*
26 * Various SoCs need something special and SoC-specific up front in
27 * order to boot, allow them to set that in their boot0.h file and then
28 * use it here.
29 */
30#include <asm/arch/boot0.h>
31ARM_SOC_BOOT0_HOOK
32#endif
33
34	.align 3
35
36.globl	_TEXT_BASE
37_TEXT_BASE:
38	.quad	CONFIG_SYS_TEXT_BASE
39
40/*
41 * These are defined in the linker script.
42 */
43.globl	_end_ofs
44_end_ofs:
45	.quad	_end - _start
46
47.globl	_bss_start_ofs
48_bss_start_ofs:
49	.quad	__bss_start - _start
50
51.globl	_bss_end_ofs
52_bss_end_ofs:
53	.quad	__bss_end - _start
54
55reset:
56	/* Allow the board to save important registers */
57	b	save_boot_params
58.globl	save_boot_params_ret
59save_boot_params_ret:
60
61#ifdef CONFIG_SYS_RESET_SCTRL
62	bl reset_sctrl
63#endif
64	/*
65	 * Could be EL3/EL2/EL1, Initial State:
66	 * Little Endian, MMU Disabled, i/dCache Disabled
67	 */
68	adr	x0, vectors
69	switch_el x1, 3f, 2f, 1f
703:	msr	vbar_el3, x0
71	mrs	x0, scr_el3
72	orr	x0, x0, #0xf			/* SCR_EL3.NS|IRQ|FIQ|EA */
73	msr	scr_el3, x0
74	msr	cptr_el3, xzr			/* Enable FP/SIMD */
75#ifdef COUNTER_FREQUENCY
76	ldr	x0, =COUNTER_FREQUENCY
77	msr	cntfrq_el0, x0			/* Initialize CNTFRQ */
78#endif
79	b	0f
802:	msr	vbar_el2, x0
81	mov	x0, #0x33ff
82	msr	cptr_el2, x0			/* Enable FP/SIMD */
83	b	0f
841:	msr	vbar_el1, x0
85	mov	x0, #3 << 20
86	msr	cpacr_el1, x0			/* Enable FP/SIMD */
870:
88
89	/* Apply ARM core specific erratas */
90	bl	apply_core_errata
91
92	/*
93	 * Cache/BPB/TLB Invalidate
94	 * i-cache is invalidated before enabled in icache_enable()
95	 * tlb is invalidated before mmu is enabled in dcache_enable()
96	 * d-cache is invalidated before enabled in dcache_enable()
97	 */
98
99	/* Processor specific initialization */
100	bl	lowlevel_init
101
102#if CONFIG_IS_ENABLED(ARMV8_SPIN_TABLE)
103	branch_if_master x0, x1, master_cpu
104	b	spin_table_secondary_jump
105	/* never return */
106#elif defined(CONFIG_ARMV8_MULTIENTRY)
107	branch_if_master x0, x1, master_cpu
108
109	/*
110	 * Slave CPUs
111	 */
112slave_cpu:
113	wfe
114	ldr	x1, =CPU_RELEASE_ADDR
115	ldr	x0, [x1]
116	cbz	x0, slave_cpu
117	br	x0			/* branch to the given address */
118#endif /* CONFIG_ARMV8_MULTIENTRY */
119master_cpu:
120	bl	_main
121
122#ifdef CONFIG_SYS_RESET_SCTRL
123reset_sctrl:
124	switch_el x1, 3f, 2f, 1f
1253:
126	mrs	x0, sctlr_el3
127	b	0f
1282:
129	mrs	x0, sctlr_el2
130	b	0f
1311:
132	mrs	x0, sctlr_el1
133
1340:
135	ldr	x1, =0xfdfffffa
136	and	x0, x0, x1
137
138	switch_el x1, 6f, 5f, 4f
1396:
140	msr	sctlr_el3, x0
141	b	7f
1425:
143	msr	sctlr_el2, x0
144	b	7f
1454:
146	msr	sctlr_el1, x0
147
1487:
149	dsb	sy
150	isb
151	b	__asm_invalidate_tlb_all
152	ret
153#endif
154
155/*-----------------------------------------------------------------------*/
156
157WEAK(apply_core_errata)
158
159	mov	x29, lr			/* Save LR */
160	/* For now, we support Cortex-A57 specific errata only */
161
162	/* Check if we are running on a Cortex-A57 core */
163	branch_if_a57_core x0, apply_a57_core_errata
1640:
165	mov	lr, x29			/* Restore LR */
166	ret
167
168apply_a57_core_errata:
169
170#ifdef CONFIG_ARM_ERRATA_828024
171	mrs	x0, S3_1_c15_c2_0	/* cpuactlr_el1 */
172	/* Disable non-allocate hint of w-b-n-a memory type */
173	orr	x0, x0, #1 << 49
174	/* Disable write streaming no L1-allocate threshold */
175	orr	x0, x0, #3 << 25
176	/* Disable write streaming no-allocate threshold */
177	orr	x0, x0, #3 << 27
178	msr	S3_1_c15_c2_0, x0	/* cpuactlr_el1 */
179#endif
180
181#ifdef CONFIG_ARM_ERRATA_826974
182	mrs	x0, S3_1_c15_c2_0	/* cpuactlr_el1 */
183	/* Disable speculative load execution ahead of a DMB */
184	orr	x0, x0, #1 << 59
185	msr	S3_1_c15_c2_0, x0	/* cpuactlr_el1 */
186#endif
187
188#ifdef CONFIG_ARM_ERRATA_833471
189	mrs	x0, S3_1_c15_c2_0	/* cpuactlr_el1 */
190	/* FPSCR write flush.
191	 * Note that in some cases where a flush is unnecessary this
192	    could impact performance. */
193	orr	x0, x0, #1 << 38
194	msr	S3_1_c15_c2_0, x0	/* cpuactlr_el1 */
195#endif
196
197#ifdef CONFIG_ARM_ERRATA_829520
198	mrs	x0, S3_1_c15_c2_0	/* cpuactlr_el1 */
199	/* Disable Indirect Predictor bit will prevent this erratum
200	    from occurring
201	 * Note that in some cases where a flush is unnecessary this
202	    could impact performance. */
203	orr	x0, x0, #1 << 4
204	msr	S3_1_c15_c2_0, x0	/* cpuactlr_el1 */
205#endif
206
207#ifdef CONFIG_ARM_ERRATA_833069
208	mrs	x0, S3_1_c15_c2_0	/* cpuactlr_el1 */
209	/* Disable Enable Invalidates of BTB bit */
210	and	x0, x0, #0xE
211	msr	S3_1_c15_c2_0, x0	/* cpuactlr_el1 */
212#endif
213	b 0b
214ENDPROC(apply_core_errata)
215
216/*-----------------------------------------------------------------------*/
217
218WEAK(lowlevel_init)
219	mov	x29, lr			/* Save LR */
220
221#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
222	branch_if_slave x0, 1f
223	ldr	x0, =GICD_BASE
224	bl	gic_init_secure
2251:
226#if defined(CONFIG_GICV3)
227	ldr	x0, =GICR_BASE
228	bl	gic_init_secure_percpu
229#elif defined(CONFIG_GICV2)
230	ldr	x0, =GICD_BASE
231	ldr	x1, =GICC_BASE
232	bl	gic_init_secure_percpu
233#endif
234#endif
235
236#ifdef CONFIG_ARMV8_MULTIENTRY
237	branch_if_master x0, x1, 2f
238
239	/*
240	 * Slave should wait for master clearing spin table.
241	 * This sync prevent salves observing incorrect
242	 * value of spin table and jumping to wrong place.
243	 */
244#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
245#ifdef CONFIG_GICV2
246	ldr	x0, =GICC_BASE
247#endif
248	bl	gic_wait_for_interrupt
249#endif
250
251	/*
252	 * All slaves will enter EL2 and optionally EL1.
253	 */
254	adr	x3, lowlevel_in_el2
255	ldr	x4, =ES_TO_AARCH64
256	bl	armv8_switch_to_el2
257
258lowlevel_in_el2:
259#ifdef CONFIG_ARMV8_SWITCH_TO_EL1
260	adr	x3, lowlevel_in_el1
261	ldr	x4, =ES_TO_AARCH64
262	bl	armv8_switch_to_el1
263
264lowlevel_in_el1:
265#endif
266
267#endif /* CONFIG_ARMV8_MULTIENTRY */
268
2692:
270	mov	lr, x29			/* Restore LR */
271	ret
272ENDPROC(lowlevel_init)
273
274WEAK(smp_kick_all_cpus)
275	/* Kick secondary cpus up by SGI 0 interrupt */
276#if defined(CONFIG_GICV2) || defined(CONFIG_GICV3)
277	ldr	x0, =GICD_BASE
278	b	gic_kick_secondary_cpus
279#endif
280	ret
281ENDPROC(smp_kick_all_cpus)
282
283/*-----------------------------------------------------------------------*/
284
285ENTRY(c_runtime_cpu_setup)
286	/* Relocate vBAR */
287	adr	x0, vectors
288	switch_el x1, 3f, 2f, 1f
2893:	msr	vbar_el3, x0
290	b	0f
2912:	msr	vbar_el2, x0
292	b	0f
2931:	msr	vbar_el1, x0
2940:
295
296	ret
297ENDPROC(c_runtime_cpu_setup)
298
299WEAK(save_boot_params)
300	b	save_boot_params_ret	/* back to my caller */
301ENDPROC(save_boot_params)
302