xref: /openbmc/linux/arch/arm/include/asm/assembler.h (revision 39ad04cc)
14baa9922SRussell King /*
24baa9922SRussell King  *  arch/arm/include/asm/assembler.h
34baa9922SRussell King  *
44baa9922SRussell King  *  Copyright (C) 1996-2000 Russell King
54baa9922SRussell King  *
64baa9922SRussell King  * This program is free software; you can redistribute it and/or modify
74baa9922SRussell King  * it under the terms of the GNU General Public License version 2 as
84baa9922SRussell King  * published by the Free Software Foundation.
94baa9922SRussell King  *
104baa9922SRussell King  *  This file contains arm architecture specific defines
114baa9922SRussell King  *  for the different processors.
124baa9922SRussell King  *
134baa9922SRussell King  *  Do not include any C declarations in this file - it is included by
144baa9922SRussell King  *  assembler source.
154baa9922SRussell King  */
162bc58a6fSMagnus Damm #ifndef __ASM_ASSEMBLER_H__
172bc58a6fSMagnus Damm #define __ASM_ASSEMBLER_H__
182bc58a6fSMagnus Damm 
194baa9922SRussell King #ifndef __ASSEMBLY__
204baa9922SRussell King #error "Only include this from assembly code"
214baa9922SRussell King #endif
224baa9922SRussell King 
234baa9922SRussell King #include <asm/ptrace.h>
24247055aaSCatalin Marinas #include <asm/domain.h>
2580c59dafSDave Martin #include <asm/opcodes-virt.h>
264baa9922SRussell King 
276f6f6a70SRob Herring #define IOMEM(x)	(x)
286f6f6a70SRob Herring 
294baa9922SRussell King /*
304baa9922SRussell King  * Endian independent macros for shifting bytes within registers.
314baa9922SRussell King  */
324baa9922SRussell King #ifndef __ARMEB__
33d98b90eaSVictor Kamensky #define lspull          lsr
34d98b90eaSVictor Kamensky #define lspush          lsl
354baa9922SRussell King #define get_byte_0      lsl #0
364baa9922SRussell King #define get_byte_1	lsr #8
374baa9922SRussell King #define get_byte_2	lsr #16
384baa9922SRussell King #define get_byte_3	lsr #24
394baa9922SRussell King #define put_byte_0      lsl #0
404baa9922SRussell King #define put_byte_1	lsl #8
414baa9922SRussell King #define put_byte_2	lsl #16
424baa9922SRussell King #define put_byte_3	lsl #24
434baa9922SRussell King #else
44d98b90eaSVictor Kamensky #define lspull          lsl
45d98b90eaSVictor Kamensky #define lspush          lsr
464baa9922SRussell King #define get_byte_0	lsr #24
474baa9922SRussell King #define get_byte_1	lsr #16
484baa9922SRussell King #define get_byte_2	lsr #8
494baa9922SRussell King #define get_byte_3      lsl #0
504baa9922SRussell King #define put_byte_0	lsl #24
514baa9922SRussell King #define put_byte_1	lsl #16
524baa9922SRussell King #define put_byte_2	lsl #8
534baa9922SRussell King #define put_byte_3      lsl #0
544baa9922SRussell King #endif
554baa9922SRussell King 
56457c2403SBen Dooks /* Select code for any configuration running in BE8 mode */
57457c2403SBen Dooks #ifdef CONFIG_CPU_ENDIAN_BE8
58457c2403SBen Dooks #define ARM_BE8(code...) code
59457c2403SBen Dooks #else
60457c2403SBen Dooks #define ARM_BE8(code...)
61457c2403SBen Dooks #endif
62457c2403SBen Dooks 
634baa9922SRussell King /*
644baa9922SRussell King  * Data preload for architectures that support it
654baa9922SRussell King  */
664baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 5
674baa9922SRussell King #define PLD(code...)	code
684baa9922SRussell King #else
694baa9922SRussell King #define PLD(code...)
704baa9922SRussell King #endif
714baa9922SRussell King 
724baa9922SRussell King /*
734baa9922SRussell King  * This can be used to enable code to cacheline align the destination
744baa9922SRussell King  * pointer when bulk writing to memory.  Experiments on StrongARM and
754baa9922SRussell King  * XScale didn't show this a worthwhile thing to do when the cache is not
764baa9922SRussell King  * set to write-allocate (this would need further testing on XScale when WA
774baa9922SRussell King  * is used).
784baa9922SRussell King  *
794baa9922SRussell King  * On Feroceon there is much to gain however, regardless of cache mode.
804baa9922SRussell King  */
814baa9922SRussell King #ifdef CONFIG_CPU_FEROCEON
824baa9922SRussell King #define CALGN(code...) code
834baa9922SRussell King #else
844baa9922SRussell King #define CALGN(code...)
854baa9922SRussell King #endif
864baa9922SRussell King 
874baa9922SRussell King /*
884baa9922SRussell King  * Enable and disable interrupts
894baa9922SRussell King  */
904baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6
910d928b0bSUwe Kleine-König 	.macro	disable_irq_notrace
924baa9922SRussell King 	cpsid	i
934baa9922SRussell King 	.endm
944baa9922SRussell King 
950d928b0bSUwe Kleine-König 	.macro	enable_irq_notrace
964baa9922SRussell King 	cpsie	i
974baa9922SRussell King 	.endm
984baa9922SRussell King #else
990d928b0bSUwe Kleine-König 	.macro	disable_irq_notrace
1004baa9922SRussell King 	msr	cpsr_c, #PSR_I_BIT | SVC_MODE
1014baa9922SRussell King 	.endm
1024baa9922SRussell King 
1030d928b0bSUwe Kleine-König 	.macro	enable_irq_notrace
1044baa9922SRussell King 	msr	cpsr_c, #SVC_MODE
1054baa9922SRussell King 	.endm
1064baa9922SRussell King #endif
1074baa9922SRussell King 
1080d928b0bSUwe Kleine-König 	.macro asm_trace_hardirqs_off
1090d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS)
1100d928b0bSUwe Kleine-König 	stmdb   sp!, {r0-r3, ip, lr}
1110d928b0bSUwe Kleine-König 	bl	trace_hardirqs_off
1120d928b0bSUwe Kleine-König 	ldmia	sp!, {r0-r3, ip, lr}
1130d928b0bSUwe Kleine-König #endif
1140d928b0bSUwe Kleine-König 	.endm
1150d928b0bSUwe Kleine-König 
1160d928b0bSUwe Kleine-König 	.macro asm_trace_hardirqs_on_cond, cond
1170d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS)
1180d928b0bSUwe Kleine-König 	/*
1190d928b0bSUwe Kleine-König 	 * actually the registers should be pushed and pop'd conditionally, but
1200d928b0bSUwe Kleine-König 	 * after bl the flags are certainly clobbered
1210d928b0bSUwe Kleine-König 	 */
1220d928b0bSUwe Kleine-König 	stmdb   sp!, {r0-r3, ip, lr}
1230d928b0bSUwe Kleine-König 	bl\cond	trace_hardirqs_on
1240d928b0bSUwe Kleine-König 	ldmia	sp!, {r0-r3, ip, lr}
1250d928b0bSUwe Kleine-König #endif
1260d928b0bSUwe Kleine-König 	.endm
1270d928b0bSUwe Kleine-König 
1280d928b0bSUwe Kleine-König 	.macro asm_trace_hardirqs_on
1290d928b0bSUwe Kleine-König 	asm_trace_hardirqs_on_cond al
1300d928b0bSUwe Kleine-König 	.endm
1310d928b0bSUwe Kleine-König 
1320d928b0bSUwe Kleine-König 	.macro disable_irq
1330d928b0bSUwe Kleine-König 	disable_irq_notrace
1340d928b0bSUwe Kleine-König 	asm_trace_hardirqs_off
1350d928b0bSUwe Kleine-König 	.endm
1360d928b0bSUwe Kleine-König 
1370d928b0bSUwe Kleine-König 	.macro enable_irq
1380d928b0bSUwe Kleine-König 	asm_trace_hardirqs_on
1390d928b0bSUwe Kleine-König 	enable_irq_notrace
1400d928b0bSUwe Kleine-König 	.endm
1414baa9922SRussell King /*
1424baa9922SRussell King  * Save the current IRQ state and disable IRQs.  Note that this macro
1434baa9922SRussell King  * assumes FIQs are enabled, and that the processor is in SVC mode.
1444baa9922SRussell King  */
1454baa9922SRussell King 	.macro	save_and_disable_irqs, oldcpsr
14655bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M
14755bdd694SCatalin Marinas 	mrs	\oldcpsr, primask
14855bdd694SCatalin Marinas #else
1494baa9922SRussell King 	mrs	\oldcpsr, cpsr
15055bdd694SCatalin Marinas #endif
1514baa9922SRussell King 	disable_irq
1524baa9922SRussell King 	.endm
1534baa9922SRussell King 
1548e43a905SRabin Vincent 	.macro	save_and_disable_irqs_notrace, oldcpsr
1558e43a905SRabin Vincent 	mrs	\oldcpsr, cpsr
1568e43a905SRabin Vincent 	disable_irq_notrace
1578e43a905SRabin Vincent 	.endm
1588e43a905SRabin Vincent 
1594baa9922SRussell King /*
1604baa9922SRussell King  * Restore interrupt state previously stored in a register.  We don't
1614baa9922SRussell King  * guarantee that this will preserve the flags.
1624baa9922SRussell King  */
1630d928b0bSUwe Kleine-König 	.macro	restore_irqs_notrace, oldcpsr
16455bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M
16555bdd694SCatalin Marinas 	msr	primask, \oldcpsr
16655bdd694SCatalin Marinas #else
1674baa9922SRussell King 	msr	cpsr_c, \oldcpsr
16855bdd694SCatalin Marinas #endif
1694baa9922SRussell King 	.endm
1704baa9922SRussell King 
1710d928b0bSUwe Kleine-König 	.macro restore_irqs, oldcpsr
1720d928b0bSUwe Kleine-König 	tst	\oldcpsr, #PSR_I_BIT
1730d928b0bSUwe Kleine-König 	asm_trace_hardirqs_on_cond eq
1740d928b0bSUwe Kleine-König 	restore_irqs_notrace \oldcpsr
1750d928b0bSUwe Kleine-König 	.endm
1760d928b0bSUwe Kleine-König 
17739ad04ccSCatalin Marinas /*
17839ad04ccSCatalin Marinas  * Get current thread_info.
17939ad04ccSCatalin Marinas  */
18039ad04ccSCatalin Marinas 	.macro	get_thread_info, rd
18139ad04ccSCatalin Marinas  ARM(	mov	\rd, sp, lsr #13	)
18239ad04ccSCatalin Marinas  THUMB(	mov	\rd, sp			)
18339ad04ccSCatalin Marinas  THUMB(	lsr	\rd, \rd, #13		)
18439ad04ccSCatalin Marinas 	mov	\rd, \rd, lsl #13
18539ad04ccSCatalin Marinas 	.endm
18639ad04ccSCatalin Marinas 
1874baa9922SRussell King #define USER(x...)				\
1884baa9922SRussell King 9999:	x;					\
1894260415fSRussell King 	.pushsection __ex_table,"a";		\
1904baa9922SRussell King 	.align	3;				\
1914baa9922SRussell King 	.long	9999b,9001f;			\
1924260415fSRussell King 	.popsection
193bac4e960SRussell King 
194f00ec48fSRussell King #ifdef CONFIG_SMP
195f00ec48fSRussell King #define ALT_SMP(instr...)					\
196f00ec48fSRussell King 9998:	instr
197ed3768a8SDave Martin /*
198ed3768a8SDave Martin  * Note: if you get assembler errors from ALT_UP() when building with
199ed3768a8SDave Martin  * CONFIG_THUMB2_KERNEL, you almost certainly need to use
200ed3768a8SDave Martin  * ALT_SMP( W(instr) ... )
201ed3768a8SDave Martin  */
202f00ec48fSRussell King #define ALT_UP(instr...)					\
203f00ec48fSRussell King 	.pushsection ".alt.smp.init", "a"			;\
204f00ec48fSRussell King 	.long	9998b						;\
205ed3768a8SDave Martin 9997:	instr							;\
206ed3768a8SDave Martin 	.if . - 9997b != 4					;\
207ed3768a8SDave Martin 		.error "ALT_UP() content must assemble to exactly 4 bytes";\
208ed3768a8SDave Martin 	.endif							;\
209f00ec48fSRussell King 	.popsection
210f00ec48fSRussell King #define ALT_UP_B(label)					\
211f00ec48fSRussell King 	.equ	up_b_offset, label - 9998b			;\
212f00ec48fSRussell King 	.pushsection ".alt.smp.init", "a"			;\
213f00ec48fSRussell King 	.long	9998b						;\
214ed3768a8SDave Martin 	W(b)	. + up_b_offset					;\
215f00ec48fSRussell King 	.popsection
216f00ec48fSRussell King #else
217f00ec48fSRussell King #define ALT_SMP(instr...)
218f00ec48fSRussell King #define ALT_UP(instr...) instr
219f00ec48fSRussell King #define ALT_UP_B(label) b label
220f00ec48fSRussell King #endif
221f00ec48fSRussell King 
222bac4e960SRussell King /*
223d675d0bcSWill Deacon  * Instruction barrier
224d675d0bcSWill Deacon  */
225d675d0bcSWill Deacon 	.macro	instr_sync
226d675d0bcSWill Deacon #if __LINUX_ARM_ARCH__ >= 7
227d675d0bcSWill Deacon 	isb
228d675d0bcSWill Deacon #elif __LINUX_ARM_ARCH__ == 6
229d675d0bcSWill Deacon 	mcr	p15, 0, r0, c7, c5, 4
230d675d0bcSWill Deacon #endif
231d675d0bcSWill Deacon 	.endm
232d675d0bcSWill Deacon 
233d675d0bcSWill Deacon /*
234bac4e960SRussell King  * SMP data memory barrier
235bac4e960SRussell King  */
236ed3768a8SDave Martin 	.macro	smp_dmb mode
237bac4e960SRussell King #ifdef CONFIG_SMP
238bac4e960SRussell King #if __LINUX_ARM_ARCH__ >= 7
239ed3768a8SDave Martin 	.ifeqs "\mode","arm"
2403ea12806SWill Deacon 	ALT_SMP(dmb	ish)
241ed3768a8SDave Martin 	.else
2423ea12806SWill Deacon 	ALT_SMP(W(dmb)	ish)
243ed3768a8SDave Martin 	.endif
244bac4e960SRussell King #elif __LINUX_ARM_ARCH__ == 6
245f00ec48fSRussell King 	ALT_SMP(mcr	p15, 0, r0, c7, c10, 5)	@ dmb
246f00ec48fSRussell King #else
247f00ec48fSRussell King #error Incompatible SMP platform
248bac4e960SRussell King #endif
249ed3768a8SDave Martin 	.ifeqs "\mode","arm"
250f00ec48fSRussell King 	ALT_UP(nop)
251ed3768a8SDave Martin 	.else
252ed3768a8SDave Martin 	ALT_UP(W(nop))
253ed3768a8SDave Martin 	.endif
254bac4e960SRussell King #endif
255bac4e960SRussell King 	.endm
256b86040a5SCatalin Marinas 
25755bdd694SCatalin Marinas #if defined(CONFIG_CPU_V7M)
25855bdd694SCatalin Marinas 	/*
25955bdd694SCatalin Marinas 	 * setmode is used to assert to be in svc mode during boot. For v7-M
26055bdd694SCatalin Marinas 	 * this is done in __v7m_setup, so setmode can be empty here.
26155bdd694SCatalin Marinas 	 */
26255bdd694SCatalin Marinas 	.macro	setmode, mode, reg
26355bdd694SCatalin Marinas 	.endm
26455bdd694SCatalin Marinas #elif defined(CONFIG_THUMB2_KERNEL)
265b86040a5SCatalin Marinas 	.macro	setmode, mode, reg
266b86040a5SCatalin Marinas 	mov	\reg, #\mode
267b86040a5SCatalin Marinas 	msr	cpsr_c, \reg
268b86040a5SCatalin Marinas 	.endm
269b86040a5SCatalin Marinas #else
270b86040a5SCatalin Marinas 	.macro	setmode, mode, reg
271b86040a5SCatalin Marinas 	msr	cpsr_c, #\mode
272b86040a5SCatalin Marinas 	.endm
273b86040a5SCatalin Marinas #endif
2748b592783SCatalin Marinas 
2758b592783SCatalin Marinas /*
27680c59dafSDave Martin  * Helper macro to enter SVC mode cleanly and mask interrupts. reg is
27780c59dafSDave Martin  * a scratch register for the macro to overwrite.
27880c59dafSDave Martin  *
27980c59dafSDave Martin  * This macro is intended for forcing the CPU into SVC mode at boot time.
28080c59dafSDave Martin  * you cannot return to the original mode.
28180c59dafSDave Martin  */
28280c59dafSDave Martin .macro safe_svcmode_maskall reg:req
2831ecec696SDave Martin #if __LINUX_ARM_ARCH__ >= 6
28480c59dafSDave Martin 	mrs	\reg , cpsr
2858e9c24a2SRussell King 	eor	\reg, \reg, #HYP_MODE
2868e9c24a2SRussell King 	tst	\reg, #MODE_MASK
28780c59dafSDave Martin 	bic	\reg , \reg , #MODE_MASK
2888e9c24a2SRussell King 	orr	\reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE
28980c59dafSDave Martin THUMB(	orr	\reg , \reg , #PSR_T_BIT	)
29080c59dafSDave Martin 	bne	1f
2912a552d5eSMarc Zyngier 	orr	\reg, \reg, #PSR_A_BIT
2922a552d5eSMarc Zyngier 	adr	lr, BSYM(2f)
2932a552d5eSMarc Zyngier 	msr	spsr_cxsf, \reg
29480c59dafSDave Martin 	__MSR_ELR_HYP(14)
29580c59dafSDave Martin 	__ERET
2962a552d5eSMarc Zyngier 1:	msr	cpsr_c, \reg
29780c59dafSDave Martin 2:
2981ecec696SDave Martin #else
2991ecec696SDave Martin /*
3001ecec696SDave Martin  * workaround for possibly broken pre-v6 hardware
3011ecec696SDave Martin  * (akita, Sharp Zaurus C-1000, PXA270-based)
3021ecec696SDave Martin  */
3031ecec696SDave Martin 	setmode	PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg
3041ecec696SDave Martin #endif
30580c59dafSDave Martin .endm
30680c59dafSDave Martin 
30780c59dafSDave Martin /*
3088b592783SCatalin Marinas  * STRT/LDRT access macros with ARM and Thumb-2 variants
3098b592783SCatalin Marinas  */
3108b592783SCatalin Marinas #ifdef CONFIG_THUMB2_KERNEL
3118b592783SCatalin Marinas 
3124e7682d0SCatalin Marinas 	.macro	usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER()
3138b592783SCatalin Marinas 9999:
3148b592783SCatalin Marinas 	.if	\inc == 1
315247055aaSCatalin Marinas 	\instr\cond\()b\()\t\().w \reg, [\ptr, #\off]
3168b592783SCatalin Marinas 	.elseif	\inc == 4
317247055aaSCatalin Marinas 	\instr\cond\()\t\().w \reg, [\ptr, #\off]
3188b592783SCatalin Marinas 	.else
3198b592783SCatalin Marinas 	.error	"Unsupported inc macro argument"
3208b592783SCatalin Marinas 	.endif
3218b592783SCatalin Marinas 
3224260415fSRussell King 	.pushsection __ex_table,"a"
3238b592783SCatalin Marinas 	.align	3
3248b592783SCatalin Marinas 	.long	9999b, \abort
3254260415fSRussell King 	.popsection
3268b592783SCatalin Marinas 	.endm
3278b592783SCatalin Marinas 
3288b592783SCatalin Marinas 	.macro	usracc, instr, reg, ptr, inc, cond, rept, abort
3298b592783SCatalin Marinas 	@ explicit IT instruction needed because of the label
3308b592783SCatalin Marinas 	@ introduced by the USER macro
3318b592783SCatalin Marinas 	.ifnc	\cond,al
3328b592783SCatalin Marinas 	.if	\rept == 1
3338b592783SCatalin Marinas 	itt	\cond
3348b592783SCatalin Marinas 	.elseif	\rept == 2
3358b592783SCatalin Marinas 	ittt	\cond
3368b592783SCatalin Marinas 	.else
3378b592783SCatalin Marinas 	.error	"Unsupported rept macro argument"
3388b592783SCatalin Marinas 	.endif
3398b592783SCatalin Marinas 	.endif
3408b592783SCatalin Marinas 
3418b592783SCatalin Marinas 	@ Slightly optimised to avoid incrementing the pointer twice
3428b592783SCatalin Marinas 	usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort
3438b592783SCatalin Marinas 	.if	\rept == 2
3441142b71dSWill Deacon 	usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort
3458b592783SCatalin Marinas 	.endif
3468b592783SCatalin Marinas 
3478b592783SCatalin Marinas 	add\cond \ptr, #\rept * \inc
3488b592783SCatalin Marinas 	.endm
3498b592783SCatalin Marinas 
3508b592783SCatalin Marinas #else	/* !CONFIG_THUMB2_KERNEL */
3518b592783SCatalin Marinas 
3524e7682d0SCatalin Marinas 	.macro	usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER()
3538b592783SCatalin Marinas 	.rept	\rept
3548b592783SCatalin Marinas 9999:
3558b592783SCatalin Marinas 	.if	\inc == 1
356247055aaSCatalin Marinas 	\instr\cond\()b\()\t \reg, [\ptr], #\inc
3578b592783SCatalin Marinas 	.elseif	\inc == 4
358247055aaSCatalin Marinas 	\instr\cond\()\t \reg, [\ptr], #\inc
3598b592783SCatalin Marinas 	.else
3608b592783SCatalin Marinas 	.error	"Unsupported inc macro argument"
3618b592783SCatalin Marinas 	.endif
3628b592783SCatalin Marinas 
3634260415fSRussell King 	.pushsection __ex_table,"a"
3648b592783SCatalin Marinas 	.align	3
3658b592783SCatalin Marinas 	.long	9999b, \abort
3664260415fSRussell King 	.popsection
3678b592783SCatalin Marinas 	.endr
3688b592783SCatalin Marinas 	.endm
3698b592783SCatalin Marinas 
3708b592783SCatalin Marinas #endif	/* CONFIG_THUMB2_KERNEL */
3718b592783SCatalin Marinas 
3728b592783SCatalin Marinas 	.macro	strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
3738b592783SCatalin Marinas 	usracc	str, \reg, \ptr, \inc, \cond, \rept, \abort
3748b592783SCatalin Marinas 	.endm
3758b592783SCatalin Marinas 
3768b592783SCatalin Marinas 	.macro	ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
3778b592783SCatalin Marinas 	usracc	ldr, \reg, \ptr, \inc, \cond, \rept, \abort
3788b592783SCatalin Marinas 	.endm
3798f51965eSDave Martin 
3808f51965eSDave Martin /* Utility macro for declaring string literals */
3818f51965eSDave Martin 	.macro	string name:req, string
3828f51965eSDave Martin 	.type \name , #object
3838f51965eSDave Martin \name:
3848f51965eSDave Martin 	.asciz "\string"
3858f51965eSDave Martin 	.size \name , . - \name
3868f51965eSDave Martin 	.endm
3878f51965eSDave Martin 
3888404663fSRussell King 	.macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req
3898404663fSRussell King #ifndef CONFIG_CPU_USE_DOMAINS
3908404663fSRussell King 	adds	\tmp, \addr, #\size - 1
3918404663fSRussell King 	sbcccs	\tmp, \tmp, \limit
3928404663fSRussell King 	bcs	\bad
3938404663fSRussell King #endif
3948404663fSRussell King 	.endm
3958404663fSRussell King 
3962bc58a6fSMagnus Damm #endif /* __ASM_ASSEMBLER_H__ */
397