xref: /openbmc/u-boot/arch/arm/include/asm/macro.h (revision 95de1e2f)
1 /*
2  * include/asm-arm/macro.h
3  *
4  * Copyright (C) 2009 Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com>
5  *
6  * SPDX-License-Identifier:	GPL-2.0+
7  */
8 
9 #ifndef __ASM_ARM_MACRO_H__
10 #define __ASM_ARM_MACRO_H__
11 #ifdef __ASSEMBLY__
12 
13 /*
14  * These macros provide a convenient way to write 8, 16 and 32 bit data
15  * to any address.
16  * Registers r4 and r5 are used, any data in these registers are
17  * overwritten by the macros.
18  * The macros are valid for any ARM architecture, they do not implement
19  * any memory barriers so caution is recommended when using these when the
20  * caches are enabled or on a multi-core system.
21  */
22 
23 .macro	write32, addr, data
24 	ldr	r4, =\addr
25 	ldr	r5, =\data
26 	str	r5, [r4]
27 .endm
28 
29 .macro	write16, addr, data
30 	ldr	r4, =\addr
31 	ldrh	r5, =\data
32 	strh	r5, [r4]
33 .endm
34 
35 .macro	write8, addr, data
36 	ldr	r4, =\addr
37 	ldrb	r5, =\data
38 	strb	r5, [r4]
39 .endm
40 
41 /*
42  * This macro generates a loop that can be used for delays in the code.
43  * Register r4 is used, any data in this register is overwritten by the
44  * macro.
45  * The macro is valid for any ARM architeture. The actual time spent in the
46  * loop will vary from CPU to CPU though.
47  */
48 
49 .macro	wait_timer, time
50 	ldr	r4, =\time
51 1:
52 	nop
53 	subs	r4, r4, #1
54 	bcs	1b
55 .endm
56 
57 #ifdef CONFIG_ARM64
58 /*
59  * Register aliases.
60  */
61 lr	.req	x30
62 
63 /*
64  * Branch according to exception level
65  */
66 .macro	switch_el, xreg, el3_label, el2_label, el1_label
67 	mrs	\xreg, CurrentEL
68 	cmp	\xreg, 0xc
69 	b.eq	\el3_label
70 	cmp	\xreg, 0x8
71 	b.eq	\el2_label
72 	cmp	\xreg, 0x4
73 	b.eq	\el1_label
74 .endm
75 
76 /*
77  * Branch if current processor is a Cortex-A57 core.
78  */
79 .macro	branch_if_a57_core, xreg, a57_label
80 	mrs	\xreg, midr_el1
81 	lsr	\xreg, \xreg, #4
82 	and	\xreg, \xreg, #0x00000FFF
83 	cmp	\xreg, #0xD07		/* Cortex-A57 MPCore processor. */
84 	b.eq	\a57_label
85 .endm
86 
87 /*
88  * Branch if current processor is a Cortex-A53 core.
89  */
90 .macro	branch_if_a53_core, xreg, a53_label
91 	mrs	\xreg, midr_el1
92 	lsr	\xreg, \xreg, #4
93 	and	\xreg, \xreg, #0x00000FFF
94 	cmp	\xreg, #0xD03		/* Cortex-A53 MPCore processor. */
95 	b.eq	\a53_label
96 .endm
97 
98 /*
99  * Branch if current processor is a slave,
100  * choose processor with all zero affinity value as the master.
101  */
102 .macro	branch_if_slave, xreg, slave_label
103 #ifdef CONFIG_ARMV8_MULTIENTRY
104 	/* NOTE: MPIDR handling will be erroneous on multi-cluster machines */
105 	mrs	\xreg, mpidr_el1
106 	tst	\xreg, #0xff		/* Test Affinity 0 */
107 	b.ne	\slave_label
108 	lsr	\xreg, \xreg, #8
109 	tst	\xreg, #0xff		/* Test Affinity 1 */
110 	b.ne	\slave_label
111 	lsr	\xreg, \xreg, #8
112 	tst	\xreg, #0xff		/* Test Affinity 2 */
113 	b.ne	\slave_label
114 	lsr	\xreg, \xreg, #16
115 	tst	\xreg, #0xff		/* Test Affinity 3 */
116 	b.ne	\slave_label
117 #endif
118 .endm
119 
120 /*
121  * Branch if current processor is a master,
122  * choose processor with all zero affinity value as the master.
123  */
124 .macro	branch_if_master, xreg1, xreg2, master_label
125 #ifdef CONFIG_ARMV8_MULTIENTRY
126 	/* NOTE: MPIDR handling will be erroneous on multi-cluster machines */
127 	mrs	\xreg1, mpidr_el1
128 	lsr	\xreg2, \xreg1, #32
129 	lsl	\xreg1, \xreg1, #40
130 	lsr	\xreg1, \xreg1, #40
131 	orr	\xreg1, \xreg1, \xreg2
132 	cbz	\xreg1, \master_label
133 #else
134 	b 	\master_label
135 #endif
136 .endm
137 
138 .macro armv8_switch_to_el2_m, xreg1
139 	/* 64bit EL2 | HCE | SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1 */
140 	mov	\xreg1, #0x5b1
141 	msr	scr_el3, \xreg1
142 	msr	cptr_el3, xzr		/* Disable coprocessor traps to EL3 */
143 	mov	\xreg1, #0x33ff
144 	msr	cptr_el2, \xreg1	/* Disable coprocessor traps to EL2 */
145 
146 	/* Initialize Generic Timers */
147 	msr	cntvoff_el2, xzr
148 
149 	/* Initialize SCTLR_EL2
150 	 *
151 	 * setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1
152 	 * and RES0 bits (31,30,27,26,24,21,20,17,15-13,10-6) +
153 	 * EE,WXN,I,SA,C,A,M to 0
154 	 */
155 	mov	\xreg1, #0x0830
156 	movk	\xreg1, #0x30C5, lsl #16
157 	msr	sctlr_el2, \xreg1
158 
159 	/* Return to the EL2_SP2 mode from EL3 */
160 	mov	\xreg1, sp
161 	msr	sp_el2, \xreg1		/* Migrate SP */
162 	mrs	\xreg1, vbar_el3
163 	msr	vbar_el2, \xreg1	/* Migrate VBAR */
164 	mov	\xreg1, #0x3c9
165 	msr	spsr_el3, \xreg1	/* EL2_SP2 | D | A | I | F */
166 	msr	elr_el3, lr
167 	eret
168 .endm
169 
170 .macro armv8_switch_to_el1_m, xreg1, xreg2
171 	/* Initialize Generic Timers */
172 	mrs	\xreg1, cnthctl_el2
173 	orr	\xreg1, \xreg1, #0x3	/* Enable EL1 access to timers */
174 	msr	cnthctl_el2, \xreg1
175 	msr	cntvoff_el2, xzr
176 
177 	/* Initilize MPID/MPIDR registers */
178 	mrs	\xreg1, midr_el1
179 	mrs	\xreg2, mpidr_el1
180 	msr	vpidr_el2, \xreg1
181 	msr	vmpidr_el2, \xreg2
182 
183 	/* Disable coprocessor traps */
184 	mov	\xreg1, #0x33ff
185 	msr	cptr_el2, \xreg1	/* Disable coprocessor traps to EL2 */
186 	msr	hstr_el2, xzr		/* Disable coprocessor traps to EL2 */
187 	mov	\xreg1, #3 << 20
188 	msr	cpacr_el1, \xreg1	/* Enable FP/SIMD at EL1 */
189 
190 	/* Initialize HCR_EL2 */
191 	mov	\xreg1, #(1 << 31)		/* 64bit EL1 */
192 	orr	\xreg1, \xreg1, #(1 << 29)	/* Disable HVC */
193 	msr	hcr_el2, \xreg1
194 
195 	/* SCTLR_EL1 initialization
196 	 *
197 	 * setting RES1 bits (29,28,23,22,20,11) to 1
198 	 * and RES0 bits (31,30,27,21,17,13,10,6) +
199 	 * UCI,EE,EOE,WXN,nTWE,nTWI,UCT,DZE,I,UMA,SED,ITD,
200 	 * CP15BEN,SA0,SA,C,A,M to 0
201 	 */
202 	mov	\xreg1, #0x0800
203 	movk	\xreg1, #0x30d0, lsl #16
204 	msr	sctlr_el1, \xreg1
205 
206 	/* Return to the EL1_SP1 mode from EL2 */
207 	mov	\xreg1, sp
208 	msr	sp_el1, \xreg1		/* Migrate SP */
209 	mrs	\xreg1, vbar_el2
210 	msr	vbar_el1, \xreg1	/* Migrate VBAR */
211 	mov	\xreg1, #0x3c5
212 	msr	spsr_el2, \xreg1	/* EL1_SP1 | D | A | I | F */
213 	msr	elr_el2, lr
214 	eret
215 .endm
216 
217 #if defined(CONFIG_GICV3)
218 .macro gic_wait_for_interrupt_m xreg1
219 0 :	wfi
220 	mrs     \xreg1, ICC_IAR1_EL1
221 	msr     ICC_EOIR1_EL1, \xreg1
222 	cbnz    \xreg1, 0b
223 .endm
224 #elif defined(CONFIG_GICV2)
225 .macro gic_wait_for_interrupt_m xreg1, wreg2
226 0 :	wfi
227 	ldr     \wreg2, [\xreg1, GICC_AIAR]
228 	str     \wreg2, [\xreg1, GICC_AEOIR]
229 	and	\wreg2, \wreg2, #0x3ff
230 	cbnz    \wreg2, 0b
231 .endm
232 #endif
233 
234 #endif /* CONFIG_ARM64 */
235 
236 #endif /* __ASSEMBLY__ */
237 #endif /* __ASM_ARM_MACRO_H__ */
238