xref: /openbmc/linux/arch/arm/include/asm/assembler.h (revision 89c6bc58)
14baa9922SRussell King /*
24baa9922SRussell King  *  arch/arm/include/asm/assembler.h
34baa9922SRussell King  *
44baa9922SRussell King  *  Copyright (C) 1996-2000 Russell King
54baa9922SRussell King  *
64baa9922SRussell King  * This program is free software; you can redistribute it and/or modify
74baa9922SRussell King  * it under the terms of the GNU General Public License version 2 as
84baa9922SRussell King  * published by the Free Software Foundation.
94baa9922SRussell King  *
104baa9922SRussell King  *  This file contains arm architecture specific defines
114baa9922SRussell King  *  for the different processors.
124baa9922SRussell King  *
134baa9922SRussell King  *  Do not include any C declarations in this file - it is included by
144baa9922SRussell King  *  assembler source.
154baa9922SRussell King  */
162bc58a6fSMagnus Damm #ifndef __ASM_ASSEMBLER_H__
172bc58a6fSMagnus Damm #define __ASM_ASSEMBLER_H__
182bc58a6fSMagnus Damm 
194baa9922SRussell King #ifndef __ASSEMBLY__
204baa9922SRussell King #error "Only include this from assembly code"
214baa9922SRussell King #endif
224baa9922SRussell King 
234baa9922SRussell King #include <asm/ptrace.h>
24247055aaSCatalin Marinas #include <asm/domain.h>
2580c59dafSDave Martin #include <asm/opcodes-virt.h>
260b1f68e8SCatalin Marinas #include <asm/asm-offsets.h>
279a2b51b6SAndrey Ryabinin #include <asm/page.h>
289a2b51b6SAndrey Ryabinin #include <asm/thread_info.h>
294baa9922SRussell King 
306f6f6a70SRob Herring #define IOMEM(x)	(x)
316f6f6a70SRob Herring 
324baa9922SRussell King /*
334baa9922SRussell King  * Endian independent macros for shifting bytes within registers.
344baa9922SRussell King  */
354baa9922SRussell King #ifndef __ARMEB__
36d98b90eaSVictor Kamensky #define lspull          lsr
37d98b90eaSVictor Kamensky #define lspush          lsl
384baa9922SRussell King #define get_byte_0      lsl #0
394baa9922SRussell King #define get_byte_1	lsr #8
404baa9922SRussell King #define get_byte_2	lsr #16
414baa9922SRussell King #define get_byte_3	lsr #24
424baa9922SRussell King #define put_byte_0      lsl #0
434baa9922SRussell King #define put_byte_1	lsl #8
444baa9922SRussell King #define put_byte_2	lsl #16
454baa9922SRussell King #define put_byte_3	lsl #24
464baa9922SRussell King #else
47d98b90eaSVictor Kamensky #define lspull          lsl
48d98b90eaSVictor Kamensky #define lspush          lsr
494baa9922SRussell King #define get_byte_0	lsr #24
504baa9922SRussell King #define get_byte_1	lsr #16
514baa9922SRussell King #define get_byte_2	lsr #8
524baa9922SRussell King #define get_byte_3      lsl #0
534baa9922SRussell King #define put_byte_0	lsl #24
544baa9922SRussell King #define put_byte_1	lsl #16
554baa9922SRussell King #define put_byte_2	lsl #8
564baa9922SRussell King #define put_byte_3      lsl #0
574baa9922SRussell King #endif
584baa9922SRussell King 
59457c2403SBen Dooks /* Select code for any configuration running in BE8 mode */
60457c2403SBen Dooks #ifdef CONFIG_CPU_ENDIAN_BE8
61457c2403SBen Dooks #define ARM_BE8(code...) code
62457c2403SBen Dooks #else
63457c2403SBen Dooks #define ARM_BE8(code...)
64457c2403SBen Dooks #endif
65457c2403SBen Dooks 
664baa9922SRussell King /*
674baa9922SRussell King  * Data preload for architectures that support it
684baa9922SRussell King  */
694baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 5
704baa9922SRussell King #define PLD(code...)	code
714baa9922SRussell King #else
724baa9922SRussell King #define PLD(code...)
734baa9922SRussell King #endif
744baa9922SRussell King 
754baa9922SRussell King /*
764baa9922SRussell King  * This can be used to enable code to cacheline align the destination
774baa9922SRussell King  * pointer when bulk writing to memory.  Experiments on StrongARM and
784baa9922SRussell King  * XScale didn't show this a worthwhile thing to do when the cache is not
794baa9922SRussell King  * set to write-allocate (this would need further testing on XScale when WA
804baa9922SRussell King  * is used).
814baa9922SRussell King  *
824baa9922SRussell King  * On Feroceon there is much to gain however, regardless of cache mode.
834baa9922SRussell King  */
844baa9922SRussell King #ifdef CONFIG_CPU_FEROCEON
854baa9922SRussell King #define CALGN(code...) code
864baa9922SRussell King #else
874baa9922SRussell King #define CALGN(code...)
884baa9922SRussell King #endif
894baa9922SRussell King 
904baa9922SRussell King /*
914baa9922SRussell King  * Enable and disable interrupts
924baa9922SRussell King  */
934baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6
940d928b0bSUwe Kleine-König 	.macro	disable_irq_notrace
954baa9922SRussell King 	cpsid	i
964baa9922SRussell King 	.endm
974baa9922SRussell King 
980d928b0bSUwe Kleine-König 	.macro	enable_irq_notrace
994baa9922SRussell King 	cpsie	i
1004baa9922SRussell King 	.endm
1014baa9922SRussell King #else
1020d928b0bSUwe Kleine-König 	.macro	disable_irq_notrace
1034baa9922SRussell King 	msr	cpsr_c, #PSR_I_BIT | SVC_MODE
1044baa9922SRussell King 	.endm
1054baa9922SRussell King 
1060d928b0bSUwe Kleine-König 	.macro	enable_irq_notrace
1074baa9922SRussell King 	msr	cpsr_c, #SVC_MODE
1084baa9922SRussell King 	.endm
1094baa9922SRussell King #endif
1104baa9922SRussell King 
1110d928b0bSUwe Kleine-König 	.macro asm_trace_hardirqs_off
1120d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS)
1130d928b0bSUwe Kleine-König 	stmdb   sp!, {r0-r3, ip, lr}
1140d928b0bSUwe Kleine-König 	bl	trace_hardirqs_off
1150d928b0bSUwe Kleine-König 	ldmia	sp!, {r0-r3, ip, lr}
1160d928b0bSUwe Kleine-König #endif
1170d928b0bSUwe Kleine-König 	.endm
1180d928b0bSUwe Kleine-König 
1190d928b0bSUwe Kleine-König 	.macro asm_trace_hardirqs_on_cond, cond
1200d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS)
1210d928b0bSUwe Kleine-König 	/*
1220d928b0bSUwe Kleine-König 	 * actually the registers should be pushed and pop'd conditionally, but
1230d928b0bSUwe Kleine-König 	 * after bl the flags are certainly clobbered
1240d928b0bSUwe Kleine-König 	 */
1250d928b0bSUwe Kleine-König 	stmdb   sp!, {r0-r3, ip, lr}
1260d928b0bSUwe Kleine-König 	bl\cond	trace_hardirqs_on
1270d928b0bSUwe Kleine-König 	ldmia	sp!, {r0-r3, ip, lr}
1280d928b0bSUwe Kleine-König #endif
1290d928b0bSUwe Kleine-König 	.endm
1300d928b0bSUwe Kleine-König 
1310d928b0bSUwe Kleine-König 	.macro asm_trace_hardirqs_on
1320d928b0bSUwe Kleine-König 	asm_trace_hardirqs_on_cond al
1330d928b0bSUwe Kleine-König 	.endm
1340d928b0bSUwe Kleine-König 
1350d928b0bSUwe Kleine-König 	.macro disable_irq
1360d928b0bSUwe Kleine-König 	disable_irq_notrace
1370d928b0bSUwe Kleine-König 	asm_trace_hardirqs_off
1380d928b0bSUwe Kleine-König 	.endm
1390d928b0bSUwe Kleine-König 
1400d928b0bSUwe Kleine-König 	.macro enable_irq
1410d928b0bSUwe Kleine-König 	asm_trace_hardirqs_on
1420d928b0bSUwe Kleine-König 	enable_irq_notrace
1430d928b0bSUwe Kleine-König 	.endm
1444baa9922SRussell King /*
1454baa9922SRussell King  * Save the current IRQ state and disable IRQs.  Note that this macro
1464baa9922SRussell King  * assumes FIQs are enabled, and that the processor is in SVC mode.
1474baa9922SRussell King  */
1484baa9922SRussell King 	.macro	save_and_disable_irqs, oldcpsr
14955bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M
15055bdd694SCatalin Marinas 	mrs	\oldcpsr, primask
15155bdd694SCatalin Marinas #else
1524baa9922SRussell King 	mrs	\oldcpsr, cpsr
15355bdd694SCatalin Marinas #endif
1544baa9922SRussell King 	disable_irq
1554baa9922SRussell King 	.endm
1564baa9922SRussell King 
1578e43a905SRabin Vincent 	.macro	save_and_disable_irqs_notrace, oldcpsr
1588e43a905SRabin Vincent 	mrs	\oldcpsr, cpsr
1598e43a905SRabin Vincent 	disable_irq_notrace
1608e43a905SRabin Vincent 	.endm
1618e43a905SRabin Vincent 
1624baa9922SRussell King /*
1634baa9922SRussell King  * Restore interrupt state previously stored in a register.  We don't
1644baa9922SRussell King  * guarantee that this will preserve the flags.
1654baa9922SRussell King  */
1660d928b0bSUwe Kleine-König 	.macro	restore_irqs_notrace, oldcpsr
16755bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M
16855bdd694SCatalin Marinas 	msr	primask, \oldcpsr
16955bdd694SCatalin Marinas #else
1704baa9922SRussell King 	msr	cpsr_c, \oldcpsr
17155bdd694SCatalin Marinas #endif
1724baa9922SRussell King 	.endm
1734baa9922SRussell King 
1740d928b0bSUwe Kleine-König 	.macro restore_irqs, oldcpsr
1750d928b0bSUwe Kleine-König 	tst	\oldcpsr, #PSR_I_BIT
1760d928b0bSUwe Kleine-König 	asm_trace_hardirqs_on_cond eq
1770d928b0bSUwe Kleine-König 	restore_irqs_notrace \oldcpsr
1780d928b0bSUwe Kleine-König 	.endm
1790d928b0bSUwe Kleine-König 
18039ad04ccSCatalin Marinas /*
18139ad04ccSCatalin Marinas  * Get current thread_info.
18239ad04ccSCatalin Marinas  */
18339ad04ccSCatalin Marinas 	.macro	get_thread_info, rd
1849a2b51b6SAndrey Ryabinin  ARM(	mov	\rd, sp, lsr #THREAD_SIZE_ORDER + PAGE_SHIFT	)
18539ad04ccSCatalin Marinas  THUMB(	mov	\rd, sp			)
1869a2b51b6SAndrey Ryabinin  THUMB(	lsr	\rd, \rd, #THREAD_SIZE_ORDER + PAGE_SHIFT	)
1879a2b51b6SAndrey Ryabinin 	mov	\rd, \rd, lsl #THREAD_SIZE_ORDER + PAGE_SHIFT
18839ad04ccSCatalin Marinas 	.endm
18939ad04ccSCatalin Marinas 
1900b1f68e8SCatalin Marinas /*
1910b1f68e8SCatalin Marinas  * Increment/decrement the preempt count.
1920b1f68e8SCatalin Marinas  */
1930b1f68e8SCatalin Marinas #ifdef CONFIG_PREEMPT_COUNT
1940b1f68e8SCatalin Marinas 	.macro	inc_preempt_count, ti, tmp
1950b1f68e8SCatalin Marinas 	ldr	\tmp, [\ti, #TI_PREEMPT]	@ get preempt count
1960b1f68e8SCatalin Marinas 	add	\tmp, \tmp, #1			@ increment it
1970b1f68e8SCatalin Marinas 	str	\tmp, [\ti, #TI_PREEMPT]
1980b1f68e8SCatalin Marinas 	.endm
1990b1f68e8SCatalin Marinas 
2000b1f68e8SCatalin Marinas 	.macro	dec_preempt_count, ti, tmp
2010b1f68e8SCatalin Marinas 	ldr	\tmp, [\ti, #TI_PREEMPT]	@ get preempt count
2020b1f68e8SCatalin Marinas 	sub	\tmp, \tmp, #1			@ decrement it
2030b1f68e8SCatalin Marinas 	str	\tmp, [\ti, #TI_PREEMPT]
2040b1f68e8SCatalin Marinas 	.endm
2050b1f68e8SCatalin Marinas 
2060b1f68e8SCatalin Marinas 	.macro	dec_preempt_count_ti, ti, tmp
2070b1f68e8SCatalin Marinas 	get_thread_info \ti
2080b1f68e8SCatalin Marinas 	dec_preempt_count \ti, \tmp
2090b1f68e8SCatalin Marinas 	.endm
2100b1f68e8SCatalin Marinas #else
2110b1f68e8SCatalin Marinas 	.macro	inc_preempt_count, ti, tmp
2120b1f68e8SCatalin Marinas 	.endm
2130b1f68e8SCatalin Marinas 
2140b1f68e8SCatalin Marinas 	.macro	dec_preempt_count, ti, tmp
2150b1f68e8SCatalin Marinas 	.endm
2160b1f68e8SCatalin Marinas 
2170b1f68e8SCatalin Marinas 	.macro	dec_preempt_count_ti, ti, tmp
2180b1f68e8SCatalin Marinas 	.endm
2190b1f68e8SCatalin Marinas #endif
2200b1f68e8SCatalin Marinas 
2214baa9922SRussell King #define USER(x...)				\
2224baa9922SRussell King 9999:	x;					\
2234260415fSRussell King 	.pushsection __ex_table,"a";		\
2244baa9922SRussell King 	.align	3;				\
2254baa9922SRussell King 	.long	9999b,9001f;			\
2264260415fSRussell King 	.popsection
227bac4e960SRussell King 
228f00ec48fSRussell King #ifdef CONFIG_SMP
229f00ec48fSRussell King #define ALT_SMP(instr...)					\
230f00ec48fSRussell King 9998:	instr
231ed3768a8SDave Martin /*
232ed3768a8SDave Martin  * Note: if you get assembler errors from ALT_UP() when building with
233ed3768a8SDave Martin  * CONFIG_THUMB2_KERNEL, you almost certainly need to use
234ed3768a8SDave Martin  * ALT_SMP( W(instr) ... )
235ed3768a8SDave Martin  */
236f00ec48fSRussell King #define ALT_UP(instr...)					\
237f00ec48fSRussell King 	.pushsection ".alt.smp.init", "a"			;\
238f00ec48fSRussell King 	.long	9998b						;\
239ed3768a8SDave Martin 9997:	instr							;\
24089c6bc58SRussell King 	.if . - 9997b == 2					;\
24189c6bc58SRussell King 		nop						;\
24289c6bc58SRussell King 	.endif							;\
243ed3768a8SDave Martin 	.if . - 9997b != 4					;\
244ed3768a8SDave Martin 		.error "ALT_UP() content must assemble to exactly 4 bytes";\
245ed3768a8SDave Martin 	.endif							;\
246f00ec48fSRussell King 	.popsection
247f00ec48fSRussell King #define ALT_UP_B(label)					\
248f00ec48fSRussell King 	.equ	up_b_offset, label - 9998b			;\
249f00ec48fSRussell King 	.pushsection ".alt.smp.init", "a"			;\
250f00ec48fSRussell King 	.long	9998b						;\
251ed3768a8SDave Martin 	W(b)	. + up_b_offset					;\
252f00ec48fSRussell King 	.popsection
253f00ec48fSRussell King #else
254f00ec48fSRussell King #define ALT_SMP(instr...)
255f00ec48fSRussell King #define ALT_UP(instr...) instr
256f00ec48fSRussell King #define ALT_UP_B(label) b label
257f00ec48fSRussell King #endif
258f00ec48fSRussell King 
259bac4e960SRussell King /*
260d675d0bcSWill Deacon  * Instruction barrier
261d675d0bcSWill Deacon  */
262d675d0bcSWill Deacon 	.macro	instr_sync
263d675d0bcSWill Deacon #if __LINUX_ARM_ARCH__ >= 7
264d675d0bcSWill Deacon 	isb
265d675d0bcSWill Deacon #elif __LINUX_ARM_ARCH__ == 6
266d675d0bcSWill Deacon 	mcr	p15, 0, r0, c7, c5, 4
267d675d0bcSWill Deacon #endif
268d675d0bcSWill Deacon 	.endm
269d675d0bcSWill Deacon 
270d675d0bcSWill Deacon /*
271bac4e960SRussell King  * SMP data memory barrier
272bac4e960SRussell King  */
273ed3768a8SDave Martin 	.macro	smp_dmb mode
274bac4e960SRussell King #ifdef CONFIG_SMP
275bac4e960SRussell King #if __LINUX_ARM_ARCH__ >= 7
276ed3768a8SDave Martin 	.ifeqs "\mode","arm"
2773ea12806SWill Deacon 	ALT_SMP(dmb	ish)
278ed3768a8SDave Martin 	.else
2793ea12806SWill Deacon 	ALT_SMP(W(dmb)	ish)
280ed3768a8SDave Martin 	.endif
281bac4e960SRussell King #elif __LINUX_ARM_ARCH__ == 6
282f00ec48fSRussell King 	ALT_SMP(mcr	p15, 0, r0, c7, c10, 5)	@ dmb
283f00ec48fSRussell King #else
284f00ec48fSRussell King #error Incompatible SMP platform
285bac4e960SRussell King #endif
286ed3768a8SDave Martin 	.ifeqs "\mode","arm"
287f00ec48fSRussell King 	ALT_UP(nop)
288ed3768a8SDave Martin 	.else
289ed3768a8SDave Martin 	ALT_UP(W(nop))
290ed3768a8SDave Martin 	.endif
291bac4e960SRussell King #endif
292bac4e960SRussell King 	.endm
293b86040a5SCatalin Marinas 
29455bdd694SCatalin Marinas #if defined(CONFIG_CPU_V7M)
29555bdd694SCatalin Marinas 	/*
29655bdd694SCatalin Marinas 	 * setmode is used to assert to be in svc mode during boot. For v7-M
29755bdd694SCatalin Marinas 	 * this is done in __v7m_setup, so setmode can be empty here.
29855bdd694SCatalin Marinas 	 */
29955bdd694SCatalin Marinas 	.macro	setmode, mode, reg
30055bdd694SCatalin Marinas 	.endm
30155bdd694SCatalin Marinas #elif defined(CONFIG_THUMB2_KERNEL)
302b86040a5SCatalin Marinas 	.macro	setmode, mode, reg
303b86040a5SCatalin Marinas 	mov	\reg, #\mode
304b86040a5SCatalin Marinas 	msr	cpsr_c, \reg
305b86040a5SCatalin Marinas 	.endm
306b86040a5SCatalin Marinas #else
307b86040a5SCatalin Marinas 	.macro	setmode, mode, reg
308b86040a5SCatalin Marinas 	msr	cpsr_c, #\mode
309b86040a5SCatalin Marinas 	.endm
310b86040a5SCatalin Marinas #endif
3118b592783SCatalin Marinas 
3128b592783SCatalin Marinas /*
31380c59dafSDave Martin  * Helper macro to enter SVC mode cleanly and mask interrupts. reg is
31480c59dafSDave Martin  * a scratch register for the macro to overwrite.
31580c59dafSDave Martin  *
31680c59dafSDave Martin  * This macro is intended for forcing the CPU into SVC mode at boot time.
31780c59dafSDave Martin  * you cannot return to the original mode.
31880c59dafSDave Martin  */
31980c59dafSDave Martin .macro safe_svcmode_maskall reg:req
3200e0779daSLorenzo Pieralisi #if __LINUX_ARM_ARCH__ >= 6 && !defined(CONFIG_CPU_V7M)
32180c59dafSDave Martin 	mrs	\reg , cpsr
3228e9c24a2SRussell King 	eor	\reg, \reg, #HYP_MODE
3238e9c24a2SRussell King 	tst	\reg, #MODE_MASK
32480c59dafSDave Martin 	bic	\reg , \reg , #MODE_MASK
3258e9c24a2SRussell King 	orr	\reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE
32680c59dafSDave Martin THUMB(	orr	\reg , \reg , #PSR_T_BIT	)
32780c59dafSDave Martin 	bne	1f
3282a552d5eSMarc Zyngier 	orr	\reg, \reg, #PSR_A_BIT
3292a552d5eSMarc Zyngier 	adr	lr, BSYM(2f)
3302a552d5eSMarc Zyngier 	msr	spsr_cxsf, \reg
33180c59dafSDave Martin 	__MSR_ELR_HYP(14)
33280c59dafSDave Martin 	__ERET
3332a552d5eSMarc Zyngier 1:	msr	cpsr_c, \reg
33480c59dafSDave Martin 2:
3351ecec696SDave Martin #else
3361ecec696SDave Martin /*
3371ecec696SDave Martin  * workaround for possibly broken pre-v6 hardware
3381ecec696SDave Martin  * (akita, Sharp Zaurus C-1000, PXA270-based)
3391ecec696SDave Martin  */
3401ecec696SDave Martin 	setmode	PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg
3411ecec696SDave Martin #endif
34280c59dafSDave Martin .endm
34380c59dafSDave Martin 
34480c59dafSDave Martin /*
3458b592783SCatalin Marinas  * STRT/LDRT access macros with ARM and Thumb-2 variants
3468b592783SCatalin Marinas  */
3478b592783SCatalin Marinas #ifdef CONFIG_THUMB2_KERNEL
3488b592783SCatalin Marinas 
3494e7682d0SCatalin Marinas 	.macro	usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER()
3508b592783SCatalin Marinas 9999:
3518b592783SCatalin Marinas 	.if	\inc == 1
352247055aaSCatalin Marinas 	\instr\cond\()b\()\t\().w \reg, [\ptr, #\off]
3538b592783SCatalin Marinas 	.elseif	\inc == 4
354247055aaSCatalin Marinas 	\instr\cond\()\t\().w \reg, [\ptr, #\off]
3558b592783SCatalin Marinas 	.else
3568b592783SCatalin Marinas 	.error	"Unsupported inc macro argument"
3578b592783SCatalin Marinas 	.endif
3588b592783SCatalin Marinas 
3594260415fSRussell King 	.pushsection __ex_table,"a"
3608b592783SCatalin Marinas 	.align	3
3618b592783SCatalin Marinas 	.long	9999b, \abort
3624260415fSRussell King 	.popsection
3638b592783SCatalin Marinas 	.endm
3648b592783SCatalin Marinas 
3658b592783SCatalin Marinas 	.macro	usracc, instr, reg, ptr, inc, cond, rept, abort
3668b592783SCatalin Marinas 	@ explicit IT instruction needed because of the label
3678b592783SCatalin Marinas 	@ introduced by the USER macro
3688b592783SCatalin Marinas 	.ifnc	\cond,al
3698b592783SCatalin Marinas 	.if	\rept == 1
3708b592783SCatalin Marinas 	itt	\cond
3718b592783SCatalin Marinas 	.elseif	\rept == 2
3728b592783SCatalin Marinas 	ittt	\cond
3738b592783SCatalin Marinas 	.else
3748b592783SCatalin Marinas 	.error	"Unsupported rept macro argument"
3758b592783SCatalin Marinas 	.endif
3768b592783SCatalin Marinas 	.endif
3778b592783SCatalin Marinas 
3788b592783SCatalin Marinas 	@ Slightly optimised to avoid incrementing the pointer twice
3798b592783SCatalin Marinas 	usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort
3808b592783SCatalin Marinas 	.if	\rept == 2
3811142b71dSWill Deacon 	usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort
3828b592783SCatalin Marinas 	.endif
3838b592783SCatalin Marinas 
3848b592783SCatalin Marinas 	add\cond \ptr, #\rept * \inc
3858b592783SCatalin Marinas 	.endm
3868b592783SCatalin Marinas 
3878b592783SCatalin Marinas #else	/* !CONFIG_THUMB2_KERNEL */
3888b592783SCatalin Marinas 
3894e7682d0SCatalin Marinas 	.macro	usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER()
3908b592783SCatalin Marinas 	.rept	\rept
3918b592783SCatalin Marinas 9999:
3928b592783SCatalin Marinas 	.if	\inc == 1
393247055aaSCatalin Marinas 	\instr\cond\()b\()\t \reg, [\ptr], #\inc
3948b592783SCatalin Marinas 	.elseif	\inc == 4
395247055aaSCatalin Marinas 	\instr\cond\()\t \reg, [\ptr], #\inc
3968b592783SCatalin Marinas 	.else
3978b592783SCatalin Marinas 	.error	"Unsupported inc macro argument"
3988b592783SCatalin Marinas 	.endif
3998b592783SCatalin Marinas 
4004260415fSRussell King 	.pushsection __ex_table,"a"
4018b592783SCatalin Marinas 	.align	3
4028b592783SCatalin Marinas 	.long	9999b, \abort
4034260415fSRussell King 	.popsection
4048b592783SCatalin Marinas 	.endr
4058b592783SCatalin Marinas 	.endm
4068b592783SCatalin Marinas 
4078b592783SCatalin Marinas #endif	/* CONFIG_THUMB2_KERNEL */
4088b592783SCatalin Marinas 
4098b592783SCatalin Marinas 	.macro	strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
4108b592783SCatalin Marinas 	usracc	str, \reg, \ptr, \inc, \cond, \rept, \abort
4118b592783SCatalin Marinas 	.endm
4128b592783SCatalin Marinas 
4138b592783SCatalin Marinas 	.macro	ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f
4148b592783SCatalin Marinas 	usracc	ldr, \reg, \ptr, \inc, \cond, \rept, \abort
4158b592783SCatalin Marinas 	.endm
4168f51965eSDave Martin 
4178f51965eSDave Martin /* Utility macro for declaring string literals */
4188f51965eSDave Martin 	.macro	string name:req, string
4198f51965eSDave Martin 	.type \name , #object
4208f51965eSDave Martin \name:
4218f51965eSDave Martin 	.asciz "\string"
4228f51965eSDave Martin 	.size \name , . - \name
4238f51965eSDave Martin 	.endm
4248f51965eSDave Martin 
4258404663fSRussell King 	.macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req
4268404663fSRussell King #ifndef CONFIG_CPU_USE_DOMAINS
4278404663fSRussell King 	adds	\tmp, \addr, #\size - 1
4288404663fSRussell King 	sbcccs	\tmp, \tmp, \limit
4298404663fSRussell King 	bcs	\bad
4308404663fSRussell King #endif
4318404663fSRussell King 	.endm
4328404663fSRussell King 
4336ebbf2ceSRussell King 	.irp	c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
4346ebbf2ceSRussell King 	.macro	ret\c, reg
4356ebbf2ceSRussell King #if __LINUX_ARM_ARCH__ < 6
4366ebbf2ceSRussell King 	mov\c	pc, \reg
4376ebbf2ceSRussell King #else
4386ebbf2ceSRussell King 	.ifeqs	"\reg", "lr"
4396ebbf2ceSRussell King 	bx\c	\reg
4406ebbf2ceSRussell King 	.else
4416ebbf2ceSRussell King 	mov\c	pc, \reg
4426ebbf2ceSRussell King 	.endif
4436ebbf2ceSRussell King #endif
4446ebbf2ceSRussell King 	.endm
4456ebbf2ceSRussell King 	.endr
4466ebbf2ceSRussell King 
4476ebbf2ceSRussell King 	.macro	ret.w, reg
4486ebbf2ceSRussell King 	ret	\reg
4496ebbf2ceSRussell King #ifdef CONFIG_THUMB2_KERNEL
4506ebbf2ceSRussell King 	nop
4516ebbf2ceSRussell King #endif
4526ebbf2ceSRussell King 	.endm
4536ebbf2ceSRussell King 
4542bc58a6fSMagnus Damm #endif /* __ASM_ASSEMBLER_H__ */
455