xref: /openbmc/linux/arch/arm/include/asm/assembler.h (revision 3302cadd)
14baa9922SRussell King /*
24baa9922SRussell King  *  arch/arm/include/asm/assembler.h
34baa9922SRussell King  *
44baa9922SRussell King  *  Copyright (C) 1996-2000 Russell King
54baa9922SRussell King  *
64baa9922SRussell King  * This program is free software; you can redistribute it and/or modify
74baa9922SRussell King  * it under the terms of the GNU General Public License version 2 as
84baa9922SRussell King  * published by the Free Software Foundation.
94baa9922SRussell King  *
104baa9922SRussell King  *  This file contains arm architecture specific defines
114baa9922SRussell King  *  for the different processors.
124baa9922SRussell King  *
134baa9922SRussell King  *  Do not include any C declarations in this file - it is included by
144baa9922SRussell King  *  assembler source.
154baa9922SRussell King  */
162bc58a6fSMagnus Damm #ifndef __ASM_ASSEMBLER_H__
172bc58a6fSMagnus Damm #define __ASM_ASSEMBLER_H__
182bc58a6fSMagnus Damm 
194baa9922SRussell King #ifndef __ASSEMBLY__
204baa9922SRussell King #error "Only include this from assembly code"
214baa9922SRussell King #endif
224baa9922SRussell King 
234baa9922SRussell King #include <asm/ptrace.h>
24247055aaSCatalin Marinas #include <asm/domain.h>
2580c59dafSDave Martin #include <asm/opcodes-virt.h>
260b1f68e8SCatalin Marinas #include <asm/asm-offsets.h>
279a2b51b6SAndrey Ryabinin #include <asm/page.h>
289a2b51b6SAndrey Ryabinin #include <asm/thread_info.h>
294baa9922SRussell King 
306f6f6a70SRob Herring #define IOMEM(x)	(x)
316f6f6a70SRob Herring 
324baa9922SRussell King /*
334baa9922SRussell King  * Endian independent macros for shifting bytes within registers.
344baa9922SRussell King  */
354baa9922SRussell King #ifndef __ARMEB__
36d98b90eaSVictor Kamensky #define lspull          lsr
37d98b90eaSVictor Kamensky #define lspush          lsl
384baa9922SRussell King #define get_byte_0      lsl #0
394baa9922SRussell King #define get_byte_1	lsr #8
404baa9922SRussell King #define get_byte_2	lsr #16
414baa9922SRussell King #define get_byte_3	lsr #24
424baa9922SRussell King #define put_byte_0      lsl #0
434baa9922SRussell King #define put_byte_1	lsl #8
444baa9922SRussell King #define put_byte_2	lsl #16
454baa9922SRussell King #define put_byte_3	lsl #24
464baa9922SRussell King #else
47d98b90eaSVictor Kamensky #define lspull          lsl
48d98b90eaSVictor Kamensky #define lspush          lsr
494baa9922SRussell King #define get_byte_0	lsr #24
504baa9922SRussell King #define get_byte_1	lsr #16
514baa9922SRussell King #define get_byte_2	lsr #8
524baa9922SRussell King #define get_byte_3      lsl #0
534baa9922SRussell King #define put_byte_0	lsl #24
544baa9922SRussell King #define put_byte_1	lsl #16
554baa9922SRussell King #define put_byte_2	lsl #8
564baa9922SRussell King #define put_byte_3      lsl #0
574baa9922SRussell King #endif
584baa9922SRussell King 
59457c2403SBen Dooks /* Select code for any configuration running in BE8 mode */
60457c2403SBen Dooks #ifdef CONFIG_CPU_ENDIAN_BE8
61457c2403SBen Dooks #define ARM_BE8(code...) code
62457c2403SBen Dooks #else
63457c2403SBen Dooks #define ARM_BE8(code...)
64457c2403SBen Dooks #endif
65457c2403SBen Dooks 
664baa9922SRussell King /*
674baa9922SRussell King  * Data preload for architectures that support it
684baa9922SRussell King  */
694baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 5
704baa9922SRussell King #define PLD(code...)	code
714baa9922SRussell King #else
724baa9922SRussell King #define PLD(code...)
734baa9922SRussell King #endif
744baa9922SRussell King 
754baa9922SRussell King /*
764baa9922SRussell King  * This can be used to enable code to cacheline align the destination
774baa9922SRussell King  * pointer when bulk writing to memory.  Experiments on StrongARM and
784baa9922SRussell King  * XScale didn't show this a worthwhile thing to do when the cache is not
794baa9922SRussell King  * set to write-allocate (this would need further testing on XScale when WA
804baa9922SRussell King  * is used).
814baa9922SRussell King  *
824baa9922SRussell King  * On Feroceon there is much to gain however, regardless of cache mode.
834baa9922SRussell King  */
844baa9922SRussell King #ifdef CONFIG_CPU_FEROCEON
854baa9922SRussell King #define CALGN(code...) code
864baa9922SRussell King #else
874baa9922SRussell King #define CALGN(code...)
884baa9922SRussell King #endif
894baa9922SRussell King 
904baa9922SRussell King /*
914baa9922SRussell King  * Enable and disable interrupts
924baa9922SRussell King  */
934baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6
940d928b0bSUwe Kleine-König 	.macro	disable_irq_notrace
954baa9922SRussell King 	cpsid	i
964baa9922SRussell King 	.endm
974baa9922SRussell King 
980d928b0bSUwe Kleine-König 	.macro	enable_irq_notrace
994baa9922SRussell King 	cpsie	i
1004baa9922SRussell King 	.endm
1014baa9922SRussell King #else
1020d928b0bSUwe Kleine-König 	.macro	disable_irq_notrace
1034baa9922SRussell King 	msr	cpsr_c, #PSR_I_BIT | SVC_MODE
1044baa9922SRussell King 	.endm
1054baa9922SRussell King 
1060d928b0bSUwe Kleine-König 	.macro	enable_irq_notrace
1074baa9922SRussell King 	msr	cpsr_c, #SVC_MODE
1084baa9922SRussell King 	.endm
1094baa9922SRussell King #endif
1104baa9922SRussell King 
1113302caddSRussell King 	.macro asm_trace_hardirqs_off, save=1
1120d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS)
1133302caddSRussell King 	.if \save
1140d928b0bSUwe Kleine-König 	stmdb   sp!, {r0-r3, ip, lr}
1153302caddSRussell King 	.endif
1160d928b0bSUwe Kleine-König 	bl	trace_hardirqs_off
1173302caddSRussell King 	.if \save
1180d928b0bSUwe Kleine-König 	ldmia	sp!, {r0-r3, ip, lr}
1193302caddSRussell King 	.endif
1200d928b0bSUwe Kleine-König #endif
1210d928b0bSUwe Kleine-König 	.endm
1220d928b0bSUwe Kleine-König 
1233302caddSRussell King 	.macro asm_trace_hardirqs_on, cond=al, save=1
1240d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS)
1250d928b0bSUwe Kleine-König 	/*
1260d928b0bSUwe Kleine-König 	 * actually the registers should be pushed and pop'd conditionally, but
1270d928b0bSUwe Kleine-König 	 * after bl the flags are certainly clobbered
1280d928b0bSUwe Kleine-König 	 */
1293302caddSRussell King 	.if \save
1300d928b0bSUwe Kleine-König 	stmdb   sp!, {r0-r3, ip, lr}
1313302caddSRussell King 	.endif
1320d928b0bSUwe Kleine-König 	bl\cond	trace_hardirqs_on
1333302caddSRussell King 	.if \save
1340d928b0bSUwe Kleine-König 	ldmia	sp!, {r0-r3, ip, lr}
1353302caddSRussell King 	.endif
1360d928b0bSUwe Kleine-König #endif
1370d928b0bSUwe Kleine-König 	.endm
1380d928b0bSUwe Kleine-König 
1393302caddSRussell King 	.macro disable_irq, save=1
1400d928b0bSUwe Kleine-König 	disable_irq_notrace
1413302caddSRussell King 	asm_trace_hardirqs_off \save
1420d928b0bSUwe Kleine-König 	.endm
1430d928b0bSUwe Kleine-König 
1440d928b0bSUwe Kleine-König 	.macro enable_irq
1450d928b0bSUwe Kleine-König 	asm_trace_hardirqs_on
1460d928b0bSUwe Kleine-König 	enable_irq_notrace
1470d928b0bSUwe Kleine-König 	.endm
1484baa9922SRussell King /*
1494baa9922SRussell King  * Save the current IRQ state and disable IRQs.  Note that this macro
1504baa9922SRussell King  * assumes FIQs are enabled, and that the processor is in SVC mode.
1514baa9922SRussell King  */
1524baa9922SRussell King 	.macro	save_and_disable_irqs, oldcpsr
15355bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M
15455bdd694SCatalin Marinas 	mrs	\oldcpsr, primask
15555bdd694SCatalin Marinas #else
1564baa9922SRussell King 	mrs	\oldcpsr, cpsr
15755bdd694SCatalin Marinas #endif
1584baa9922SRussell King 	disable_irq
1594baa9922SRussell King 	.endm
1604baa9922SRussell King 
1618e43a905SRabin Vincent 	.macro	save_and_disable_irqs_notrace, oldcpsr
1628e43a905SRabin Vincent 	mrs	\oldcpsr, cpsr
1638e43a905SRabin Vincent 	disable_irq_notrace
1648e43a905SRabin Vincent 	.endm
1658e43a905SRabin Vincent 
1664baa9922SRussell King /*
1674baa9922SRussell King  * Restore interrupt state previously stored in a register.  We don't
1684baa9922SRussell King  * guarantee that this will preserve the flags.
1694baa9922SRussell King  */
1700d928b0bSUwe Kleine-König 	.macro	restore_irqs_notrace, oldcpsr
17155bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M
17255bdd694SCatalin Marinas 	msr	primask, \oldcpsr
17355bdd694SCatalin Marinas #else
1744baa9922SRussell King 	msr	cpsr_c, \oldcpsr
17555bdd694SCatalin Marinas #endif
1764baa9922SRussell King 	.endm
1774baa9922SRussell King 
1780d928b0bSUwe Kleine-König 	.macro restore_irqs, oldcpsr
1790d928b0bSUwe Kleine-König 	tst	\oldcpsr, #PSR_I_BIT
18001e09a28SRussell King 	asm_trace_hardirqs_on cond=eq
1810d928b0bSUwe Kleine-König 	restore_irqs_notrace \oldcpsr
1820d928b0bSUwe Kleine-König 	.endm
1830d928b0bSUwe Kleine-König 
18439ad04ccSCatalin Marinas /*
18514327c66SRussell King  * Assembly version of "adr rd, BSYM(sym)".  This should only be used to
18614327c66SRussell King  * reference local symbols in the same assembly file which are to be
18714327c66SRussell King  * resolved by the assembler.  Other usage is undefined.
18814327c66SRussell King  */
18914327c66SRussell King 	.irp	c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
19014327c66SRussell King 	.macro	badr\c, rd, sym
19114327c66SRussell King #ifdef CONFIG_THUMB2_KERNEL
19214327c66SRussell King 	adr\c	\rd, \sym + 1
19314327c66SRussell King #else
19414327c66SRussell King 	adr\c	\rd, \sym
19514327c66SRussell King #endif
19614327c66SRussell King 	.endm
19714327c66SRussell King 	.endr
19814327c66SRussell King 
19914327c66SRussell King /*
20039ad04ccSCatalin Marinas  * Get current thread_info.
20139ad04ccSCatalin Marinas  */
20239ad04ccSCatalin Marinas 	.macro	get_thread_info, rd
2039a2b51b6SAndrey Ryabinin  ARM(	mov	\rd, sp, lsr #THREAD_SIZE_ORDER + PAGE_SHIFT	)
20439ad04ccSCatalin Marinas  THUMB(	mov	\rd, sp			)
2059a2b51b6SAndrey Ryabinin  THUMB(	lsr	\rd, \rd, #THREAD_SIZE_ORDER + PAGE_SHIFT	)
2069a2b51b6SAndrey Ryabinin 	mov	\rd, \rd, lsl #THREAD_SIZE_ORDER + PAGE_SHIFT
20739ad04ccSCatalin Marinas 	.endm
20839ad04ccSCatalin Marinas 
2090b1f68e8SCatalin Marinas /*
2100b1f68e8SCatalin Marinas  * Increment/decrement the preempt count.
2110b1f68e8SCatalin Marinas  */
2120b1f68e8SCatalin Marinas #ifdef CONFIG_PREEMPT_COUNT
2130b1f68e8SCatalin Marinas 	.macro	inc_preempt_count, ti, tmp
2140b1f68e8SCatalin Marinas 	ldr	\tmp, [\ti, #TI_PREEMPT]	@ get preempt count
2150b1f68e8SCatalin Marinas 	add	\tmp, \tmp, #1			@ increment it
2160b1f68e8SCatalin Marinas 	str	\tmp, [\ti, #TI_PREEMPT]
2170b1f68e8SCatalin Marinas 	.endm
2180b1f68e8SCatalin Marinas 
2190b1f68e8SCatalin Marinas 	.macro	dec_preempt_count, ti, tmp
2200b1f68e8SCatalin Marinas 	ldr	\tmp, [\ti, #TI_PREEMPT]	@ get preempt count
2210b1f68e8SCatalin Marinas 	sub	\tmp, \tmp, #1			@ decrement it
2220b1f68e8SCatalin Marinas 	str	\tmp, [\ti, #TI_PREEMPT]
2230b1f68e8SCatalin Marinas 	.endm
2240b1f68e8SCatalin Marinas 
2250b1f68e8SCatalin Marinas 	.macro	dec_preempt_count_ti, ti, tmp
2260b1f68e8SCatalin Marinas 	get_thread_info \ti
2270b1f68e8SCatalin Marinas 	dec_preempt_count \ti, \tmp
2280b1f68e8SCatalin Marinas 	.endm
2290b1f68e8SCatalin Marinas #else
2300b1f68e8SCatalin Marinas 	.macro	inc_preempt_count, ti, tmp
2310b1f68e8SCatalin Marinas 	.endm
2320b1f68e8SCatalin Marinas 
2330b1f68e8SCatalin Marinas 	.macro	dec_preempt_count, ti, tmp
2340b1f68e8SCatalin Marinas 	.endm
2350b1f68e8SCatalin Marinas 
2360b1f68e8SCatalin Marinas 	.macro	dec_preempt_count_ti, ti, tmp
2370b1f68e8SCatalin Marinas 	.endm
2380b1f68e8SCatalin Marinas #endif
2390b1f68e8SCatalin Marinas 
2404baa9922SRussell King #define USER(x...)				\
2414baa9922SRussell King 9999:	x;					\
2424260415fSRussell King 	.pushsection __ex_table,"a";		\
2434baa9922SRussell King 	.align	3;				\
2444baa9922SRussell King 	.long	9999b,9001f;			\
2454260415fSRussell King 	.popsection
246bac4e960SRussell King 
247f00ec48fSRussell King #ifdef CONFIG_SMP
248f00ec48fSRussell King #define ALT_SMP(instr...)					\
249f00ec48fSRussell King 9998:	instr
250ed3768a8SDave Martin /*
251ed3768a8SDave Martin  * Note: if you get assembler errors from ALT_UP() when building with
252ed3768a8SDave Martin  * CONFIG_THUMB2_KERNEL, you almost certainly need to use
253ed3768a8SDave Martin  * ALT_SMP( W(instr) ... )
254ed3768a8SDave Martin  */
255f00ec48fSRussell King #define ALT_UP(instr...)					\
256f00ec48fSRussell King 	.pushsection ".alt.smp.init", "a"			;\
257f00ec48fSRussell King 	.long	9998b						;\
258ed3768a8SDave Martin 9997:	instr							;\
25989c6bc58SRussell King 	.if . - 9997b == 2					;\
26089c6bc58SRussell King 		nop						;\
26189c6bc58SRussell King 	.endif							;\
262ed3768a8SDave Martin 	.if . - 9997b != 4					;\
263ed3768a8SDave Martin 		.error "ALT_UP() content must assemble to exactly 4 bytes";\
264ed3768a8SDave Martin 	.endif							;\
265f00ec48fSRussell King 	.popsection
266f00ec48fSRussell King #define ALT_UP_B(label)					\
267f00ec48fSRussell King 	.equ	up_b_offset, label - 9998b			;\
268f00ec48fSRussell King 	.pushsection ".alt.smp.init", "a"			;\
269f00ec48fSRussell King 	.long	9998b						;\
270ed3768a8SDave Martin 	W(b)	. + up_b_offset					;\
271f00ec48fSRussell King 	.popsection
272f00ec48fSRussell King #else
273f00ec48fSRussell King #define ALT_SMP(instr...)
274f00ec48fSRussell King #define ALT_UP(instr...) instr
275f00ec48fSRussell King #define ALT_UP_B(label) b label
276f00ec48fSRussell King #endif
277f00ec48fSRussell King 
278bac4e960SRussell King /*
279d675d0bcSWill Deacon  * Instruction barrier
280d675d0bcSWill Deacon  */
281d675d0bcSWill Deacon 	.macro	instr_sync
282d675d0bcSWill Deacon #if __LINUX_ARM_ARCH__ >= 7
283d675d0bcSWill Deacon 	isb
284d675d0bcSWill Deacon #elif __LINUX_ARM_ARCH__ == 6
285d675d0bcSWill Deacon 	mcr	p15, 0, r0, c7, c5, 4
286d675d0bcSWill Deacon #endif
287d675d0bcSWill Deacon 	.endm
288d675d0bcSWill Deacon 
289d675d0bcSWill Deacon /*
290bac4e960SRussell King  * SMP data memory barrier
291bac4e960SRussell King  */
292ed3768a8SDave Martin 	.macro	smp_dmb mode
293bac4e960SRussell King #ifdef CONFIG_SMP
294bac4e960SRussell King #if __LINUX_ARM_ARCH__ >= 7
295ed3768a8SDave Martin 	.ifeqs "\mode","arm"
2963ea12806SWill Deacon 	ALT_SMP(dmb	ish)
297ed3768a8SDave Martin 	.else
2983ea12806SWill Deacon 	ALT_SMP(W(dmb)	ish)
299ed3768a8SDave Martin 	.endif
300bac4e960SRussell King #elif __LINUX_ARM_ARCH__ == 6
301f00ec48fSRussell King 	ALT_SMP(mcr	p15, 0, r0, c7, c10, 5)	@ dmb
302f00ec48fSRussell King #else
303f00ec48fSRussell King #error Incompatible SMP platform
304bac4e960SRussell King #endif
305ed3768a8SDave Martin 	.ifeqs "\mode","arm"
306f00ec48fSRussell King 	ALT_UP(nop)
307ed3768a8SDave Martin 	.else
308ed3768a8SDave Martin 	ALT_UP(W(nop))
309ed3768a8SDave Martin 	.endif
310bac4e960SRussell King #endif
311bac4e960SRussell King 	.endm
312b86040a5SCatalin Marinas 
31355bdd694SCatalin Marinas #if defined(CONFIG_CPU_V7M)
31455bdd694SCatalin Marinas 	/*
31555bdd694SCatalin Marinas 	 * setmode is used to assert to be in svc mode during boot. For v7-M
31655bdd694SCatalin Marinas 	 * this is done in __v7m_setup, so setmode can be empty here.
31755bdd694SCatalin Marinas 	 */
31855bdd694SCatalin Marinas 	.macro	setmode, mode, reg
31955bdd694SCatalin Marinas 	.endm
32055bdd694SCatalin Marinas #elif defined(CONFIG_THUMB2_KERNEL)
321b86040a5SCatalin Marinas 	.macro	setmode, mode, reg
322b86040a5SCatalin Marinas 	mov	\reg, #\mode
323b86040a5SCatalin Marinas 	msr	cpsr_c, \reg
324b86040a5SCatalin Marinas 	.endm
325b86040a5SCatalin Marinas #else
326b86040a5SCatalin Marinas 	.macro	setmode, mode, reg
327b86040a5SCatalin Marinas 	msr	cpsr_c, #\mode
328b86040a5SCatalin Marinas 	.endm
329b86040a5SCatalin Marinas #endif
3308b592783SCatalin Marinas 
3318b592783SCatalin Marinas /*
33280c59dafSDave Martin  * Helper macro to enter SVC mode cleanly and mask interrupts. reg is
33380c59dafSDave Martin  * a scratch register for the macro to overwrite.
33480c59dafSDave Martin  *
33580c59dafSDave Martin  * This macro is intended for forcing the CPU into SVC mode at boot time.
33680c59dafSDave Martin  * you cannot return to the original mode.
33780c59dafSDave Martin  */
33880c59dafSDave Martin .macro safe_svcmode_maskall reg:req
3390e0779daSLorenzo Pieralisi #if __LINUX_ARM_ARCH__ >= 6 && !defined(CONFIG_CPU_V7M)
34080c59dafSDave Martin 	mrs	\reg , cpsr
3418e9c24a2SRussell King 	eor	\reg, \reg, #HYP_MODE
3428e9c24a2SRussell King 	tst	\reg, #MODE_MASK
34380c59dafSDave Martin 	bic	\reg , \reg , #MODE_MASK
3448e9c24a2SRussell King 	orr	\reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE
34580c59dafSDave Martin THUMB(	orr	\reg , \reg , #PSR_T_BIT	)
34680c59dafSDave Martin 	bne	1f
3472a552d5eSMarc Zyngier 	orr	\reg, \reg, #PSR_A_BIT
34814327c66SRussell King 	badr	lr, 2f
3492a552d5eSMarc Zyngier 	msr	spsr_cxsf, \reg
35080c59dafSDave Martin 	__MSR_ELR_HYP(14)
35180c59dafSDave Martin 	__ERET
3522a552d5eSMarc Zyngier 1:	msr	cpsr_c, \reg
35380c59dafSDave Martin 2:
3541ecec696SDave Martin #else
3551ecec696SDave Martin /*
3561ecec696SDave Martin  * workaround for possibly broken pre-v6 hardware
3571ecec696SDave Martin  * (akita, Sharp Zaurus C-1000, PXA270-based)
3581ecec696SDave Martin  */
3591ecec696SDave Martin 	setmode	PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg
3601ecec696SDave Martin #endif
36180c59dafSDave Martin .endm
36280c59dafSDave Martin 
36380c59dafSDave Martin /*
3648b592783SCatalin Marinas  * STRT/LDRT access macros with ARM and Thumb-2 variants
3658b592783SCatalin Marinas  */
3668b592783SCatalin Marinas #ifdef CONFIG_THUMB2_KERNEL
3678b592783SCatalin Marinas 
3684e7682d0SCatalin Marinas 	.macro	usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER()
3698b592783SCatalin Marinas 9999:
3708b592783SCatalin Marinas 	.if	\inc == 1
371247055aaSCatalin Marinas 	\instr\cond\()b\()\t\().w \reg, [\ptr, #\off]
3728b592783SCatalin Marinas 	.elseif	\inc == 4
373247055aaSCatalin Marinas 	\instr\cond\()\t\().w \reg, [\ptr, #\off]
3748b592783SCatalin Marinas 	.else
3758b592783SCatalin Marinas 	.error	"Unsupported inc macro argument"
3768b592783SCatalin Marinas 	.endif
3778b592783SCatalin Marinas 
3784260415fSRussell King 	.pushsection __ex_table,"a"
3798b592783SCatalin Marinas 	.align	3
3808b592783SCatalin Marinas 	.long	9999b, \abort
3814260415fSRussell King 	.popsection
3828b592783SCatalin Marinas 	.endm
3838b592783SCatalin Marinas 
3848b592783SCatalin Marinas 	.macro	usracc, instr, reg, ptr, inc, cond, rept, abort
3858b592783SCatalin Marinas 	@ explicit IT instruction needed because of the label
3868b592783SCatalin Marinas 	@ introduced by the USER macro
3878b592783SCatalin Marinas 	.ifnc	\cond,al
3888b592783SCatalin Marinas 	.if	\rept == 1
3898b592783SCatalin Marinas 	itt	\cond
3908b592783SCatalin Marinas 	.elseif	\rept == 2
3918b592783SCatalin Marinas 	ittt	\cond
3928b592783SCatalin Marinas 	.else
3938b592783SCatalin Marinas 	.error	"Unsupported rept macro argument"
3948b592783SCatalin Marinas 	.endif
3958b592783SCatalin Marinas 	.endif
3968b592783SCatalin Marinas 
3978b592783SCatalin Marinas 	@ Slightly optimised to avoid incrementing the pointer twice
3988b592783SCatalin Marinas 	usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort
3998b592783SCatalin Marinas 	.if	\rept == 2
4001142b71dSWill Deacon 	usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort
4018b592783SCatalin Marinas 	.endif
4028b592783SCatalin Marinas 
4038b592783SCatalin Marinas 	add\cond \ptr, #\rept * \inc
4048b592783SCatalin Marinas 	.endm
4058b592783SCatalin Marinas 
4068b592783SCatalin Marinas #else	/* !CONFIG_THUMB2_KERNEL */
4078b592783SCatalin Marinas 
4084e7682d0SCatalin Marinas 	.macro	usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER()
4098b592783SCatalin Marinas 	.rept	\rept
4108b592783SCatalin Marinas 9999:
4118b592783SCatalin Marinas 	.if	\inc == 1
412247055aaSCatalin Marinas 	\instr\cond\()b\()\t \reg, [\ptr], #\inc
4138b592783SCatalin Marinas 	.elseif	\inc == 4
414247055aaSCatalin Marinas 	\instr\cond\()\t \reg, [\ptr], #\inc
4158b592783SCatalin Marinas 	.else
4168b592783SCatalin Marinas 	.error	"Unsupported inc macro argument"
4178b592783SCatalin Marinas 	.endif
4188b592783SCatalin Marinas 
4194260415fSRussell King 	.pushsection __ex_table,"a"
4208b592783SCatalin Marinas 	.align	3
4218b592783SCatalin Marinas 	.long	9999b, \abort
4224260415fSRussell King 	.popsection
4238b592783SCatalin Marinas 	.endr
4248b592783SCatalin Marinas 	.endm
4258b592783SCatalin Marinas 
4268b592783SCatalin Marinas #endif	/* CONFIG_THUMB2_KERNEL */
4278b592783SCatalin Marinas 
4288b592783SCatalin Marinas 	.macro	strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
4298b592783SCatalin Marinas 	usracc	str, \reg, \ptr, \inc, \cond, \rept, \abort
4308b592783SCatalin Marinas 	.endm
4318b592783SCatalin Marinas 
4328b592783SCatalin Marinas 	.macro	ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
4338b592783SCatalin Marinas 	usracc	ldr, \reg, \ptr, \inc, \cond, \rept, \abort
4348b592783SCatalin Marinas 	.endm
4358f51965eSDave Martin 
4368f51965eSDave Martin /* Utility macro for declaring string literals */
4378f51965eSDave Martin 	.macro	string name:req, string
4388f51965eSDave Martin 	.type \name , #object
4398f51965eSDave Martin \name:
4408f51965eSDave Martin 	.asciz "\string"
4418f51965eSDave Martin 	.size \name , . - \name
4428f51965eSDave Martin 	.endm
4438f51965eSDave Martin 
4448404663fSRussell King 	.macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req
4458404663fSRussell King #ifndef CONFIG_CPU_USE_DOMAINS
4468404663fSRussell King 	adds	\tmp, \addr, #\size - 1
4478404663fSRussell King 	sbcccs	\tmp, \tmp, \limit
4488404663fSRussell King 	bcs	\bad
4498404663fSRussell King #endif
4508404663fSRussell King 	.endm
4518404663fSRussell King 
4526ebbf2ceSRussell King 	.irp	c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
4536ebbf2ceSRussell King 	.macro	ret\c, reg
4546ebbf2ceSRussell King #if __LINUX_ARM_ARCH__ < 6
4556ebbf2ceSRussell King 	mov\c	pc, \reg
4566ebbf2ceSRussell King #else
4576ebbf2ceSRussell King 	.ifeqs	"\reg", "lr"
4586ebbf2ceSRussell King 	bx\c	\reg
4596ebbf2ceSRussell King 	.else
4606ebbf2ceSRussell King 	mov\c	pc, \reg
4616ebbf2ceSRussell King 	.endif
4626ebbf2ceSRussell King #endif
4636ebbf2ceSRussell King 	.endm
4646ebbf2ceSRussell King 	.endr
4656ebbf2ceSRussell King 
4666ebbf2ceSRussell King 	.macro	ret.w, reg
4676ebbf2ceSRussell King 	ret	\reg
4686ebbf2ceSRussell King #ifdef CONFIG_THUMB2_KERNEL
4696ebbf2ceSRussell King 	nop
4706ebbf2ceSRussell King #endif
4716ebbf2ceSRussell King 	.endm
4726ebbf2ceSRussell King 
4732bc58a6fSMagnus Damm #endif /* __ASM_ASSEMBLER_H__ */
474