xref: /openbmc/linux/arch/arm64/include/asm/assembler.h (revision a8fe58ce)
1 /*
2  * Based on arch/arm/include/asm/assembler.h
3  *
4  * Copyright (C) 1996-2000 Russell King
5  * Copyright (C) 2012 ARM Ltd.
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License version 2 as
9  * published by the Free Software Foundation.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU General Public License for more details.
15  *
16  * You should have received a copy of the GNU General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19 #ifndef __ASSEMBLY__
20 #error "Only include this from assembly code"
21 #endif
22 
23 #ifndef __ASM_ASSEMBLER_H
24 #define __ASM_ASSEMBLER_H
25 
26 #include <asm/ptrace.h>
27 #include <asm/thread_info.h>
28 
29 /*
30  * Stack pushing/popping (register pairs only). Equivalent to store decrement
31  * before, load increment after.
32  */
33 	.macro	push, xreg1, xreg2
34 	stp	\xreg1, \xreg2, [sp, #-16]!
35 	.endm
36 
37 	.macro	pop, xreg1, xreg2
38 	ldp	\xreg1, \xreg2, [sp], #16
39 	.endm
40 
41 /*
42  * Enable and disable interrupts.
43  */
44 	.macro	disable_irq
45 	msr	daifset, #2
46 	.endm
47 
48 	.macro	enable_irq
49 	msr	daifclr, #2
50 	.endm
51 
52 /*
53  * Enable and disable debug exceptions.
54  */
55 	.macro	disable_dbg
56 	msr	daifset, #8
57 	.endm
58 
59 	.macro	enable_dbg
60 	msr	daifclr, #8
61 	.endm
62 
63 	.macro	disable_step_tsk, flgs, tmp
64 	tbz	\flgs, #TIF_SINGLESTEP, 9990f
65 	mrs	\tmp, mdscr_el1
66 	bic	\tmp, \tmp, #1
67 	msr	mdscr_el1, \tmp
68 	isb	// Synchronise with enable_dbg
69 9990:
70 	.endm
71 
72 	.macro	enable_step_tsk, flgs, tmp
73 	tbz	\flgs, #TIF_SINGLESTEP, 9990f
74 	disable_dbg
75 	mrs	\tmp, mdscr_el1
76 	orr	\tmp, \tmp, #1
77 	msr	mdscr_el1, \tmp
78 9990:
79 	.endm
80 
81 /*
82  * Enable both debug exceptions and interrupts. This is likely to be
83  * faster than two daifclr operations, since writes to this register
84  * are self-synchronising.
85  */
86 	.macro	enable_dbg_and_irq
87 	msr	daifclr, #(8 | 2)
88 	.endm
89 
90 /*
91  * SMP data memory barrier
92  */
93 	.macro	smp_dmb, opt
94 	dmb	\opt
95 	.endm
96 
97 #define USER(l, x...)				\
98 9999:	x;					\
99 	.section __ex_table,"a";		\
100 	.align	3;				\
101 	.quad	9999b,l;			\
102 	.previous
103 
104 /*
105  * Register aliases.
106  */
107 lr	.req	x30		// link register
108 
109 /*
110  * Vector entry
111  */
112 	 .macro	ventry	label
113 	.align	7
114 	b	\label
115 	.endm
116 
117 /*
118  * Select code when configured for BE.
119  */
120 #ifdef CONFIG_CPU_BIG_ENDIAN
121 #define CPU_BE(code...) code
122 #else
123 #define CPU_BE(code...)
124 #endif
125 
126 /*
127  * Select code when configured for LE.
128  */
129 #ifdef CONFIG_CPU_BIG_ENDIAN
130 #define CPU_LE(code...)
131 #else
132 #define CPU_LE(code...) code
133 #endif
134 
135 /*
136  * Define a macro that constructs a 64-bit value by concatenating two
137  * 32-bit registers. Note that on big endian systems the order of the
138  * registers is swapped.
139  */
140 #ifndef CONFIG_CPU_BIG_ENDIAN
141 	.macro	regs_to_64, rd, lbits, hbits
142 #else
143 	.macro	regs_to_64, rd, hbits, lbits
144 #endif
145 	orr	\rd, \lbits, \hbits, lsl #32
146 	.endm
147 
148 /*
149  * Pseudo-ops for PC-relative adr/ldr/str <reg>, <symbol> where
150  * <symbol> is within the range +/- 4 GB of the PC.
151  */
152 	/*
153 	 * @dst: destination register (64 bit wide)
154 	 * @sym: name of the symbol
155 	 * @tmp: optional scratch register to be used if <dst> == sp, which
156 	 *       is not allowed in an adrp instruction
157 	 */
158 	.macro	adr_l, dst, sym, tmp=
159 	.ifb	\tmp
160 	adrp	\dst, \sym
161 	add	\dst, \dst, :lo12:\sym
162 	.else
163 	adrp	\tmp, \sym
164 	add	\dst, \tmp, :lo12:\sym
165 	.endif
166 	.endm
167 
168 	/*
169 	 * @dst: destination register (32 or 64 bit wide)
170 	 * @sym: name of the symbol
171 	 * @tmp: optional 64-bit scratch register to be used if <dst> is a
172 	 *       32-bit wide register, in which case it cannot be used to hold
173 	 *       the address
174 	 */
175 	.macro	ldr_l, dst, sym, tmp=
176 	.ifb	\tmp
177 	adrp	\dst, \sym
178 	ldr	\dst, [\dst, :lo12:\sym]
179 	.else
180 	adrp	\tmp, \sym
181 	ldr	\dst, [\tmp, :lo12:\sym]
182 	.endif
183 	.endm
184 
185 	/*
186 	 * @src: source register (32 or 64 bit wide)
187 	 * @sym: name of the symbol
188 	 * @tmp: mandatory 64-bit scratch register to calculate the address
189 	 *       while <src> needs to be preserved.
190 	 */
191 	.macro	str_l, src, sym, tmp
192 	adrp	\tmp, \sym
193 	str	\src, [\tmp, :lo12:\sym]
194 	.endm
195 
196 	/*
197 	 * @sym: The name of the per-cpu variable
198 	 * @reg: Result of per_cpu(sym, smp_processor_id())
199 	 * @tmp: scratch register
200 	 */
201 	.macro this_cpu_ptr, sym, reg, tmp
202 	adr_l	\reg, \sym
203 	mrs	\tmp, tpidr_el1
204 	add	\reg, \reg, \tmp
205 	.endm
206 
207 /*
208  * Annotate a function as position independent, i.e., safe to be called before
209  * the kernel virtual mapping is activated.
210  */
211 #define ENDPIPROC(x)			\
212 	.globl	__pi_##x;		\
213 	.type 	__pi_##x, %function;	\
214 	.set	__pi_##x, x;		\
215 	.size	__pi_##x, . - x;	\
216 	ENDPROC(x)
217 
218 #endif	/* __ASM_ASSEMBLER_H */
219