xref: /openbmc/linux/arch/xtensa/include/asm/asmmacro.h (revision 338d9150)
1 /*
2  * include/asm-xtensa/asmmacro.h
3  *
4  * This file is subject to the terms and conditions of the GNU General Public
5  * License.  See the file "COPYING" in the main directory of this archive
6  * for more details.
7  *
8  * Copyright (C) 2005 Tensilica Inc.
9  */
10 
11 #ifndef _XTENSA_ASMMACRO_H
12 #define _XTENSA_ASMMACRO_H
13 
14 #include <asm-generic/export.h>
15 #include <asm/core.h>
16 
17 /*
18  * Some little helpers for loops. Use zero-overhead-loops
19  * where applicable and if supported by the processor.
20  *
21  * __loopi ar, at, size, inc
22  *         ar	register initialized with the start address
23  *	   at	scratch register used by macro
24  *	   size	size immediate value
25  *	   inc	increment
26  *
27  * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond]
28  *	   ar	register initialized with the start address
29  *	   as	register initialized with the size
30  *	   at	scratch register use by macro
31  *	   inc_log2	increment [in log2]
32  *	   mask_log2	mask [in log2]
33  *	   cond		true condition (used in loop'cond')
34  *	   ncond	false condition (used in b'ncond')
35  *
36  * __loop  as
37  *	   restart loop. 'as' register must not have been modified!
38  *
39  * __endla ar, as, incr
40  *	   ar	start address (modified)
41  *	   as	scratch register used by __loops/__loopi macros or
42  *		end address used by __loopt macro
43  *	   inc	increment
44  */
45 
46 /*
47  * loop for given size as immediate
48  */
49 
50 	.macro	__loopi ar, at, size, incr
51 
52 #if XCHAL_HAVE_LOOPS
53 		movi	\at, ((\size + \incr - 1) / (\incr))
54 		loop	\at, 99f
55 #else
56 		addi	\at, \ar, \size
57 		98:
58 #endif
59 
60 	.endm
61 
62 /*
63  * loop for given size in register
64  */
65 
66 	.macro	__loops	ar, as, at, incr_log2, mask_log2, cond, ncond
67 
68 #if XCHAL_HAVE_LOOPS
69 		.ifgt \incr_log2 - 1
70 			addi	\at, \as, (1 << \incr_log2) - 1
71 			.ifnc \mask_log2,
72 				extui	\at, \at, \incr_log2, \mask_log2
73 			.else
74 				srli	\at, \at, \incr_log2
75 			.endif
76 		.endif
77 		loop\cond	\at, 99f
78 #else
79 		.ifnc \mask_log2,
80 			extui	\at, \as, \incr_log2, \mask_log2
81 		.else
82 			.ifnc \ncond,
83 				srli	\at, \as, \incr_log2
84 			.endif
85 		.endif
86 		.ifnc \ncond,
87 			b\ncond	\at, 99f
88 
89 		.endif
90 		.ifnc \mask_log2,
91 			slli	\at, \at, \incr_log2
92 			add	\at, \ar, \at
93 		.else
94 			add	\at, \ar, \as
95 		.endif
96 #endif
97 		98:
98 
99 	.endm
100 
101 /*
102  * loop from ar to as
103  */
104 
105 	.macro	__loopt	ar, as, at, incr_log2
106 
107 #if XCHAL_HAVE_LOOPS
108 		sub	\at, \as, \ar
109 		.ifgt	\incr_log2 - 1
110 			addi	\at, \at, (1 << \incr_log2) - 1
111 			srli	\at, \at, \incr_log2
112 		.endif
113 		loop	\at, 99f
114 #else
115 		98:
116 #endif
117 
118 	.endm
119 
120 /*
121  * restart loop. registers must be unchanged
122  */
123 
124 	.macro	__loop	as
125 
126 #if XCHAL_HAVE_LOOPS
127 		loop	\as, 99f
128 #else
129 		98:
130 #endif
131 
132 	.endm
133 
134 /*
135  * end of loop with no increment of the address.
136  */
137 
138 	.macro	__endl	ar, as
139 #if !XCHAL_HAVE_LOOPS
140 		bltu	\ar, \as, 98b
141 #endif
142 		99:
143 	.endm
144 
145 /*
146  * end of loop with increment of the address.
147  */
148 
149 	.macro	__endla	ar, as, incr
150 		addi	\ar, \ar, \incr
151 		__endl	\ar \as
152 	.endm
153 
154 /* Load or store instructions that may cause exceptions use the EX macro. */
155 
156 #define EX(handler)				\
157 	.section __ex_table, "a";		\
158 	.word	97f, handler;			\
159 	.previous				\
160 97:
161 
162 
163 /*
164  * Extract unaligned word that is split between two registers w0 and w1
165  * into r regardless of machine endianness. SAR must be loaded with the
166  * starting bit of the word (see __ssa8).
167  */
168 
169 	.macro __src_b	r, w0, w1
170 #ifdef __XTENSA_EB__
171 		src	\r, \w0, \w1
172 #else
173 		src	\r, \w1, \w0
174 #endif
175 	.endm
176 
177 /*
178  * Load 2 lowest address bits of r into SAR for __src_b to extract unaligned
179  * word starting at r from two registers loaded from consecutive aligned
180  * addresses covering r regardless of machine endianness.
181  *
182  *      r   0   1   2   3
183  * LE SAR   0   8  16  24
184  * BE SAR  32  24  16   8
185  */
186 
187 	.macro __ssa8	r
188 #ifdef __XTENSA_EB__
189 		ssa8b	\r
190 #else
191 		ssa8l	\r
192 #endif
193 	.endm
194 
195 	.macro	do_nsau cnt, val, tmp, a
196 #if XCHAL_HAVE_NSA
197 	nsau	\cnt, \val
198 #else
199 	mov	\a, \val
200 	movi	\cnt, 0
201 	extui	\tmp, \a, 16, 16
202 	bnez	\tmp, 0f
203 	movi	\cnt, 16
204 	slli	\a, \a, 16
205 0:
206 	extui	\tmp, \a, 24, 8
207 	bnez	\tmp, 1f
208 	addi	\cnt, \cnt, 8
209 	slli	\a, \a, 8
210 1:
211 	movi	\tmp, __nsau_data
212 	extui	\a, \a, 24, 8
213 	add	\tmp, \tmp, \a
214 	l8ui	\tmp, \tmp, 0
215 	add	\cnt, \cnt, \tmp
216 #endif /* !XCHAL_HAVE_NSA */
217 	.endm
218 
219 	.macro	do_abs dst, src, tmp
220 #if XCHAL_HAVE_ABS
221 	abs	\dst, \src
222 #else
223 	neg	\tmp, \src
224 	movgez	\tmp, \src, \src
225 	mov	\dst, \tmp
226 #endif
227 	.endm
228 
229 #if defined(__XTENSA_WINDOWED_ABI__)
230 
231 /* Assembly instructions for windowed kernel ABI. */
232 #define KABI_W
233 /* Assembly instructions for call0 kernel ABI (will be ignored). */
234 #define KABI_C0 #
235 
236 #define XTENSA_FRAME_SIZE_RESERVE	16
237 #define XTENSA_SPILL_STACK_RESERVE	32
238 
239 #define abi_entry(frame_size) \
240 	entry sp, (XTENSA_FRAME_SIZE_RESERVE + \
241 		   (((frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
242 		    -XTENSA_STACK_ALIGNMENT))
243 #define abi_entry_default abi_entry(0)
244 
245 #define abi_ret(frame_size) retw
246 #define abi_ret_default retw
247 
248 	/* direct call */
249 #define abi_call call4
250 	/* indirect call */
251 #define abi_callx callx4
252 	/* outgoing call argument registers */
253 #define abi_arg0 a6
254 #define abi_arg1 a7
255 #define abi_arg2 a8
256 #define abi_arg3 a9
257 #define abi_arg4 a10
258 #define abi_arg5 a11
259 	/* return value */
260 #define abi_rv a6
261 	/* registers preserved across call */
262 #define abi_saved0 a2
263 #define abi_saved1 a3
264 
265 	/* none of the above */
266 #define abi_tmp0 a4
267 #define abi_tmp1 a5
268 
269 #elif defined(__XTENSA_CALL0_ABI__)
270 
271 /* Assembly instructions for windowed kernel ABI (will be ignored). */
272 #define KABI_W #
273 /* Assembly instructions for call0 kernel ABI. */
274 #define KABI_C0
275 
276 #define XTENSA_SPILL_STACK_RESERVE	0
277 
278 #define abi_entry(frame_size) __abi_entry (frame_size)
279 
280 	.macro	__abi_entry frame_size
281 	.ifgt \frame_size
282 	addi sp, sp, -(((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
283 		       -XTENSA_STACK_ALIGNMENT)
284 	.endif
285 	.endm
286 
287 #define abi_entry_default
288 
289 #define abi_ret(frame_size) __abi_ret (frame_size)
290 
291 	.macro	__abi_ret frame_size
292 	.ifgt \frame_size
293 	addi sp, sp, (((\frame_size) + XTENSA_STACK_ALIGNMENT - 1) & \
294 		      -XTENSA_STACK_ALIGNMENT)
295 	.endif
296 	ret
297 	.endm
298 
299 #define abi_ret_default ret
300 
301 	/* direct call */
302 #define abi_call call0
303 	/* indirect call */
304 #define abi_callx callx0
305 	/* outgoing call argument registers */
306 #define abi_arg0 a2
307 #define abi_arg1 a3
308 #define abi_arg2 a4
309 #define abi_arg3 a5
310 #define abi_arg4 a6
311 #define abi_arg5 a7
312 	/* return value */
313 #define abi_rv a2
314 	/* registers preserved across call */
315 #define abi_saved0 a12
316 #define abi_saved1 a13
317 
318 	/* none of the above */
319 #define abi_tmp0 a8
320 #define abi_tmp1 a9
321 
322 #else
323 #error Unsupported Xtensa ABI
324 #endif
325 
326 #if defined(USER_SUPPORT_WINDOWED)
327 /* Assembly instructions for windowed user ABI. */
328 #define UABI_W
329 /* Assembly instructions for call0 user ABI (will be ignored). */
330 #define UABI_C0 #
331 #else
332 /* Assembly instructions for windowed user ABI (will be ignored). */
333 #define UABI_W #
334 /* Assembly instructions for call0 user ABI. */
335 #define UABI_C0
336 #endif
337 
338 #define __XTENSA_HANDLER	.section ".exception.text", "ax"
339 
340 #endif /* _XTENSA_ASMMACRO_H */
341