xref: /openbmc/linux/arch/arm64/include/asm/assembler.h (revision 3e26a691)
1 /*
2  * Based on arch/arm/include/asm/assembler.h
3  *
4  * Copyright (C) 1996-2000 Russell King
5  * Copyright (C) 2012 ARM Ltd.
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License version 2 as
9  * published by the Free Software Foundation.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19 #ifndef __ASSEMBLY__
20 #error "Only include this from assembly code"
21 #endif
22 
23 #ifndef __ASM_ASSEMBLER_H
24 #define __ASM_ASSEMBLER_H
25 
26 #include <asm/ptrace.h>
27 #include <asm/thread_info.h>
28 
29 /*
30  * Stack pushing/popping (register pairs only). Equivalent to store decrement
31  * before, load increment after.
32  */
33 	.macro	push, xreg1, xreg2
34 	stp	\xreg1, \xreg2, [sp, #-16]!
35 	.endm
36 
37 	.macro	pop, xreg1, xreg2
38 	ldp	\xreg1, \xreg2, [sp], #16
39 	.endm
40 
41 /*
42  * Enable and disable interrupts.
43  */
44 	.macro	disable_irq
45 	msr	daifset, #2
46 	.endm
47 
48 	.macro	enable_irq
49 	msr	daifclr, #2
50 	.endm
51 
52 /*
53  * Enable and disable debug exceptions.
54  */
55 	.macro	disable_dbg
56 	msr	daifset, #8
57 	.endm
58 
59 	.macro	enable_dbg
60 	msr	daifclr, #8
61 	.endm
62 
63 	.macro	disable_step_tsk, flgs, tmp
64 	tbz	\flgs, #TIF_SINGLESTEP, 9990f
65 	mrs	\tmp, mdscr_el1
66 	bic	\tmp, \tmp, #1
67 	msr	mdscr_el1, \tmp
68 	isb	// Synchronise with enable_dbg
69 9990:
70 	.endm
71 
72 	.macro	enable_step_tsk, flgs, tmp
73 	tbz	\flgs, #TIF_SINGLESTEP, 9990f
74 	disable_dbg
75 	mrs	\tmp, mdscr_el1
76 	orr	\tmp, \tmp, #1
77 	msr	mdscr_el1, \tmp
78 9990:
79 	.endm
80 
81 /*
82  * Enable both debug exceptions and interrupts. This is likely to be
83  * faster than two daifclr operations, since writes to this register
84  * are self-synchronising.
85  */
86 	.macro	enable_dbg_and_irq
87 	msr	daifclr, #(8 | 2)
88 	.endm
89 
90 /*
91  * SMP data memory barrier
92  */
93 	.macro	smp_dmb, opt
94 	dmb	\opt
95 	.endm
96 
97 /*
98  * Emit an entry into the exception table
99  */
100 	.macro		_asm_extable, from, to
101 	.pushsection	__ex_table, "a"
102 	.align		3
103 	.long		(\from - .), (\to - .)
104 	.popsection
105 	.endm
106 
107 #define USER(l, x...)				\
108 9999:	x;					\
109 	_asm_extable	9999b, l
110 
111 /*
112  * Register aliases.
113  */
114 lr	.req	x30		// link register
115 
116 /*
117  * Vector entry
118  */
119 	 .macro	ventry	label
120 	.align	7
121 	b	\label
122 	.endm
123 
124 /*
125  * Select code when configured for BE.
126  */
127 #ifdef CONFIG_CPU_BIG_ENDIAN
128 #define CPU_BE(code...) code
129 #else
130 #define CPU_BE(code...)
131 #endif
132 
133 /*
134  * Select code when configured for LE.
135  */
136 #ifdef CONFIG_CPU_BIG_ENDIAN
137 #define CPU_LE(code...)
138 #else
139 #define CPU_LE(code...) code
140 #endif
141 
142 /*
143  * Define a macro that constructs a 64-bit value by concatenating two
144  * 32-bit registers. Note that on big endian systems the order of the
145  * registers is swapped.
146  */
147 #ifndef CONFIG_CPU_BIG_ENDIAN
148 	.macro	regs_to_64, rd, lbits, hbits
149 #else
150 	.macro	regs_to_64, rd, hbits, lbits
151 #endif
152 	orr	\rd, \lbits, \hbits, lsl #32
153 	.endm
154 
155 /*
156  * Pseudo-ops for PC-relative adr/ldr/str <reg>, <symbol> where
157  * <symbol> is within the range +/- 4 GB of the PC.
158  */
159 	/*
160 	 * @dst: destination register (64 bit wide)
161 	 * @sym: name of the symbol
162 	 * @tmp: optional scratch register to be used if <dst> == sp, which
163 	 *       is not allowed in an adrp instruction
164 	 */
165 	.macro	adr_l, dst, sym, tmp=
166 	.ifb	\tmp
167 	adrp	\dst, \sym
168 	add	\dst, \dst, :lo12:\sym
169 	.else
170 	adrp	\tmp, \sym
171 	add	\dst, \tmp, :lo12:\sym
172 	.endif
173 	.endm
174 
175 	/*
176 	 * @dst: destination register (32 or 64 bit wide)
177 	 * @sym: name of the symbol
178 	 * @tmp: optional 64-bit scratch register to be used if <dst> is a
179 	 *       32-bit wide register, in which case it cannot be used to hold
180 	 *       the address
181 	 */
182 	.macro	ldr_l, dst, sym, tmp=
183 	.ifb	\tmp
184 	adrp	\dst, \sym
185 	ldr	\dst, [\dst, :lo12:\sym]
186 	.else
187 	adrp	\tmp, \sym
188 	ldr	\dst, [\tmp, :lo12:\sym]
189 	.endif
190 	.endm
191 
192 	/*
193 	 * @src: source register (32 or 64 bit wide)
194 	 * @sym: name of the symbol
195 	 * @tmp: mandatory 64-bit scratch register to calculate the address
196 	 *       while <src> needs to be preserved.
197 	 */
198 	.macro	str_l, src, sym, tmp
199 	adrp	\tmp, \sym
200 	str	\src, [\tmp, :lo12:\sym]
201 	.endm
202 
203 	/*
204 	 * @sym: The name of the per-cpu variable
205 	 * @reg: Result of per_cpu(sym, smp_processor_id())
206 	 * @tmp: scratch register
207 	 */
208 	.macro this_cpu_ptr, sym, reg, tmp
209 	adr_l	\reg, \sym
210 	mrs	\tmp, tpidr_el1
211 	add	\reg, \reg, \tmp
212 	.endm
213 
214 /*
215  * Annotate a function as position independent, i.e., safe to be called before
216  * the kernel virtual mapping is activated.
217  */
218 #define ENDPIPROC(x)			\
219 	.globl	__pi_##x;		\
220 	.type 	__pi_##x, %function;	\
221 	.set	__pi_##x, x;		\
222 	.size	__pi_##x, . - x;	\
223 	ENDPROC(x)
224 
225 	/*
226 	 * Emit a 64-bit absolute little endian symbol reference in a way that
227 	 * ensures that it will be resolved at build time, even when building a
228 	 * PIE binary. This requires cooperation from the linker script, which
229 	 * must emit the lo32/hi32 halves individually.
230 	 */
231 	.macro	le64sym, sym
232 	.long	\sym\()_lo32
233 	.long	\sym\()_hi32
234 	.endm
235 
236 #endif	/* __ASM_ASSEMBLER_H */
237