1d2912cb1SThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */ 24baa9922SRussell King /* 34baa9922SRussell King * arch/arm/include/asm/assembler.h 44baa9922SRussell King * 54baa9922SRussell King * Copyright (C) 1996-2000 Russell King 64baa9922SRussell King * 74baa9922SRussell King * This file contains arm architecture specific defines 84baa9922SRussell King * for the different processors. 94baa9922SRussell King * 104baa9922SRussell King * Do not include any C declarations in this file - it is included by 114baa9922SRussell King * assembler source. 124baa9922SRussell King */ 132bc58a6fSMagnus Damm #ifndef __ASM_ASSEMBLER_H__ 142bc58a6fSMagnus Damm #define __ASM_ASSEMBLER_H__ 152bc58a6fSMagnus Damm 164baa9922SRussell King #ifndef __ASSEMBLY__ 174baa9922SRussell King #error "Only include this from assembly code" 184baa9922SRussell King #endif 194baa9922SRussell King 204baa9922SRussell King #include <asm/ptrace.h> 2180c59dafSDave Martin #include <asm/opcodes-virt.h> 220b1f68e8SCatalin Marinas #include <asm/asm-offsets.h> 239a2b51b6SAndrey Ryabinin #include <asm/page.h> 249a2b51b6SAndrey Ryabinin #include <asm/thread_info.h> 25747ffc2fSRussell King #include <asm/uaccess-asm.h> 264baa9922SRussell King 276f6f6a70SRob Herring #define IOMEM(x) (x) 286f6f6a70SRob Herring 294baa9922SRussell King /* 304baa9922SRussell King * Endian independent macros for shifting bytes within registers. 314baa9922SRussell King */ 324baa9922SRussell King #ifndef __ARMEB__ 33d98b90eaSVictor Kamensky #define lspull lsr 34d98b90eaSVictor Kamensky #define lspush lsl 354baa9922SRussell King #define get_byte_0 lsl #0 364baa9922SRussell King #define get_byte_1 lsr #8 374baa9922SRussell King #define get_byte_2 lsr #16 384baa9922SRussell King #define get_byte_3 lsr #24 394baa9922SRussell King #define put_byte_0 lsl #0 404baa9922SRussell King #define put_byte_1 lsl #8 414baa9922SRussell King #define put_byte_2 lsl #16 424baa9922SRussell King #define put_byte_3 lsl #24 434baa9922SRussell King #else 44d98b90eaSVictor Kamensky #define lspull lsl 45d98b90eaSVictor Kamensky #define lspush lsr 464baa9922SRussell King #define get_byte_0 lsr #24 474baa9922SRussell King #define get_byte_1 lsr #16 484baa9922SRussell King #define get_byte_2 lsr #8 494baa9922SRussell King #define get_byte_3 lsl #0 504baa9922SRussell King #define put_byte_0 lsl #24 514baa9922SRussell King #define put_byte_1 lsl #16 524baa9922SRussell King #define put_byte_2 lsl #8 534baa9922SRussell King #define put_byte_3 lsl #0 544baa9922SRussell King #endif 554baa9922SRussell King 56457c2403SBen Dooks /* Select code for any configuration running in BE8 mode */ 57457c2403SBen Dooks #ifdef CONFIG_CPU_ENDIAN_BE8 58457c2403SBen Dooks #define ARM_BE8(code...) code 59457c2403SBen Dooks #else 60457c2403SBen Dooks #define ARM_BE8(code...) 61457c2403SBen Dooks #endif 62457c2403SBen Dooks 634baa9922SRussell King /* 644baa9922SRussell King * Data preload for architectures that support it 654baa9922SRussell King */ 664baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 5 674baa9922SRussell King #define PLD(code...) code 684baa9922SRussell King #else 694baa9922SRussell King #define PLD(code...) 704baa9922SRussell King #endif 714baa9922SRussell King 724baa9922SRussell King /* 734baa9922SRussell King * This can be used to enable code to cacheline align the destination 744baa9922SRussell King * pointer when bulk writing to memory. Experiments on StrongARM and 754baa9922SRussell King * XScale didn't show this a worthwhile thing to do when the cache is not 764baa9922SRussell King * set to write-allocate (this would need further testing on XScale when WA 774baa9922SRussell King * is used). 784baa9922SRussell King * 794baa9922SRussell King * On Feroceon there is much to gain however, regardless of cache mode. 804baa9922SRussell King */ 814baa9922SRussell King #ifdef CONFIG_CPU_FEROCEON 824baa9922SRussell King #define CALGN(code...) code 834baa9922SRussell King #else 844baa9922SRussell King #define CALGN(code...) 854baa9922SRussell King #endif 864baa9922SRussell King 87ffa47aa6SArnd Bergmann #define IMM12_MASK 0xfff 88ffa47aa6SArnd Bergmann 89d4664b6cSArd Biesheuvel /* the frame pointer used for stack unwinding */ 90d4664b6cSArd Biesheuvel ARM( fpreg .req r11 ) 91d4664b6cSArd Biesheuvel THUMB( fpreg .req r7 ) 92d4664b6cSArd Biesheuvel 934baa9922SRussell King /* 944baa9922SRussell King * Enable and disable interrupts 954baa9922SRussell King */ 964baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6 970d928b0bSUwe Kleine-König .macro disable_irq_notrace 984baa9922SRussell King cpsid i 994baa9922SRussell King .endm 1004baa9922SRussell King 1010d928b0bSUwe Kleine-König .macro enable_irq_notrace 1024baa9922SRussell King cpsie i 1034baa9922SRussell King .endm 1044baa9922SRussell King #else 1050d928b0bSUwe Kleine-König .macro disable_irq_notrace 1064baa9922SRussell King msr cpsr_c, #PSR_I_BIT | SVC_MODE 1074baa9922SRussell King .endm 1084baa9922SRussell King 1090d928b0bSUwe Kleine-König .macro enable_irq_notrace 1104baa9922SRussell King msr cpsr_c, #SVC_MODE 1114baa9922SRussell King .endm 1124baa9922SRussell King #endif 1134baa9922SRussell King 114b9baf5c8SRussell King (Oracle) #if __LINUX_ARM_ARCH__ < 7 115b9baf5c8SRussell King (Oracle) .macro dsb, args 116b9baf5c8SRussell King (Oracle) mcr p15, 0, r0, c7, c10, 4 117b9baf5c8SRussell King (Oracle) .endm 118b9baf5c8SRussell King (Oracle) 119b9baf5c8SRussell King (Oracle) .macro isb, args 12033970b03SRussell King (Oracle) mcr p15, 0, r0, c7, c5, 4 121b9baf5c8SRussell King (Oracle) .endm 122b9baf5c8SRussell King (Oracle) #endif 123b9baf5c8SRussell King (Oracle) 1243302caddSRussell King .macro asm_trace_hardirqs_off, save=1 1250d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS) 1263302caddSRussell King .if \save 1270d928b0bSUwe Kleine-König stmdb sp!, {r0-r3, ip, lr} 1283302caddSRussell King .endif 1290d928b0bSUwe Kleine-König bl trace_hardirqs_off 1303302caddSRussell King .if \save 1310d928b0bSUwe Kleine-König ldmia sp!, {r0-r3, ip, lr} 1323302caddSRussell King .endif 1330d928b0bSUwe Kleine-König #endif 1340d928b0bSUwe Kleine-König .endm 1350d928b0bSUwe Kleine-König 1363302caddSRussell King .macro asm_trace_hardirqs_on, cond=al, save=1 1370d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS) 1380d928b0bSUwe Kleine-König /* 1390d928b0bSUwe Kleine-König * actually the registers should be pushed and pop'd conditionally, but 1400d928b0bSUwe Kleine-König * after bl the flags are certainly clobbered 1410d928b0bSUwe Kleine-König */ 1423302caddSRussell King .if \save 1430d928b0bSUwe Kleine-König stmdb sp!, {r0-r3, ip, lr} 1443302caddSRussell King .endif 1450d928b0bSUwe Kleine-König bl\cond trace_hardirqs_on 1463302caddSRussell King .if \save 1470d928b0bSUwe Kleine-König ldmia sp!, {r0-r3, ip, lr} 1483302caddSRussell King .endif 1490d928b0bSUwe Kleine-König #endif 1500d928b0bSUwe Kleine-König .endm 1510d928b0bSUwe Kleine-König 1523302caddSRussell King .macro disable_irq, save=1 1530d928b0bSUwe Kleine-König disable_irq_notrace 1543302caddSRussell King asm_trace_hardirqs_off \save 1550d928b0bSUwe Kleine-König .endm 1560d928b0bSUwe Kleine-König 1570d928b0bSUwe Kleine-König .macro enable_irq 1580d928b0bSUwe Kleine-König asm_trace_hardirqs_on 1590d928b0bSUwe Kleine-König enable_irq_notrace 1600d928b0bSUwe Kleine-König .endm 1614baa9922SRussell King /* 1624baa9922SRussell King * Save the current IRQ state and disable IRQs. Note that this macro 1634baa9922SRussell King * assumes FIQs are enabled, and that the processor is in SVC mode. 1644baa9922SRussell King */ 1654baa9922SRussell King .macro save_and_disable_irqs, oldcpsr 16655bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M 16755bdd694SCatalin Marinas mrs \oldcpsr, primask 16855bdd694SCatalin Marinas #else 1694baa9922SRussell King mrs \oldcpsr, cpsr 17055bdd694SCatalin Marinas #endif 1714baa9922SRussell King disable_irq 1724baa9922SRussell King .endm 1734baa9922SRussell King 1748e43a905SRabin Vincent .macro save_and_disable_irqs_notrace, oldcpsr 175b2bf482aSVladimir Murzin #ifdef CONFIG_CPU_V7M 176b2bf482aSVladimir Murzin mrs \oldcpsr, primask 177b2bf482aSVladimir Murzin #else 1788e43a905SRabin Vincent mrs \oldcpsr, cpsr 179b2bf482aSVladimir Murzin #endif 1808e43a905SRabin Vincent disable_irq_notrace 1818e43a905SRabin Vincent .endm 1828e43a905SRabin Vincent 1834baa9922SRussell King /* 1844baa9922SRussell King * Restore interrupt state previously stored in a register. We don't 1854baa9922SRussell King * guarantee that this will preserve the flags. 1864baa9922SRussell King */ 1870d928b0bSUwe Kleine-König .macro restore_irqs_notrace, oldcpsr 18855bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M 18955bdd694SCatalin Marinas msr primask, \oldcpsr 19055bdd694SCatalin Marinas #else 1914baa9922SRussell King msr cpsr_c, \oldcpsr 19255bdd694SCatalin Marinas #endif 1934baa9922SRussell King .endm 1944baa9922SRussell King 1950d928b0bSUwe Kleine-König .macro restore_irqs, oldcpsr 1960d928b0bSUwe Kleine-König tst \oldcpsr, #PSR_I_BIT 19701e09a28SRussell King asm_trace_hardirqs_on cond=eq 1980d928b0bSUwe Kleine-König restore_irqs_notrace \oldcpsr 1990d928b0bSUwe Kleine-König .endm 2000d928b0bSUwe Kleine-König 20139ad04ccSCatalin Marinas /* 20214327c66SRussell King * Assembly version of "adr rd, BSYM(sym)". This should only be used to 20314327c66SRussell King * reference local symbols in the same assembly file which are to be 20414327c66SRussell King * resolved by the assembler. Other usage is undefined. 20514327c66SRussell King */ 20614327c66SRussell King .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo 20714327c66SRussell King .macro badr\c, rd, sym 20814327c66SRussell King #ifdef CONFIG_THUMB2_KERNEL 20914327c66SRussell King adr\c \rd, \sym + 1 21014327c66SRussell King #else 21114327c66SRussell King adr\c \rd, \sym 21214327c66SRussell King #endif 21314327c66SRussell King .endm 21414327c66SRussell King .endr 21514327c66SRussell King 21614327c66SRussell King /* 21739ad04ccSCatalin Marinas * Get current thread_info. 21839ad04ccSCatalin Marinas */ 21939ad04ccSCatalin Marinas .macro get_thread_info, rd 22018ed1c01SArd Biesheuvel /* thread_info is the first member of struct task_struct */ 22118ed1c01SArd Biesheuvel get_current \rd 22239ad04ccSCatalin Marinas .endm 22339ad04ccSCatalin Marinas 2240b1f68e8SCatalin Marinas /* 2250b1f68e8SCatalin Marinas * Increment/decrement the preempt count. 2260b1f68e8SCatalin Marinas */ 2270b1f68e8SCatalin Marinas #ifdef CONFIG_PREEMPT_COUNT 2280b1f68e8SCatalin Marinas .macro inc_preempt_count, ti, tmp 2290b1f68e8SCatalin Marinas ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count 2300b1f68e8SCatalin Marinas add \tmp, \tmp, #1 @ increment it 2310b1f68e8SCatalin Marinas str \tmp, [\ti, #TI_PREEMPT] 2320b1f68e8SCatalin Marinas .endm 2330b1f68e8SCatalin Marinas 2340b1f68e8SCatalin Marinas .macro dec_preempt_count, ti, tmp 2350b1f68e8SCatalin Marinas ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count 2360b1f68e8SCatalin Marinas sub \tmp, \tmp, #1 @ decrement it 2370b1f68e8SCatalin Marinas str \tmp, [\ti, #TI_PREEMPT] 2380b1f68e8SCatalin Marinas .endm 2390b1f68e8SCatalin Marinas #else 2400b1f68e8SCatalin Marinas .macro inc_preempt_count, ti, tmp 2410b1f68e8SCatalin Marinas .endm 2420b1f68e8SCatalin Marinas 2430b1f68e8SCatalin Marinas .macro dec_preempt_count, ti, tmp 2440b1f68e8SCatalin Marinas .endm 2450b1f68e8SCatalin Marinas #endif 2460b1f68e8SCatalin Marinas 247f441882aSVincent Whitchurch #define USERL(l, x...) \ 2484baa9922SRussell King 9999: x; \ 2494260415fSRussell King .pushsection __ex_table,"a"; \ 2504baa9922SRussell King .align 3; \ 251f441882aSVincent Whitchurch .long 9999b,l; \ 2524260415fSRussell King .popsection 253bac4e960SRussell King 254f441882aSVincent Whitchurch #define USER(x...) USERL(9001f, x) 255f441882aSVincent Whitchurch 256f00ec48fSRussell King #ifdef CONFIG_SMP 257f00ec48fSRussell King #define ALT_SMP(instr...) \ 258f00ec48fSRussell King 9998: instr 259ed3768a8SDave Martin /* 260ed3768a8SDave Martin * Note: if you get assembler errors from ALT_UP() when building with 261ed3768a8SDave Martin * CONFIG_THUMB2_KERNEL, you almost certainly need to use 262ed3768a8SDave Martin * ALT_SMP( W(instr) ... ) 263ed3768a8SDave Martin */ 264f00ec48fSRussell King #define ALT_UP(instr...) \ 265f00ec48fSRussell King .pushsection ".alt.smp.init", "a" ;\ 2669f80ccdaSArd Biesheuvel .align 2 ;\ 267450abd38SArd Biesheuvel .long 9998b - . ;\ 268ed3768a8SDave Martin 9997: instr ;\ 26989c6bc58SRussell King .if . - 9997b == 2 ;\ 27089c6bc58SRussell King nop ;\ 27189c6bc58SRussell King .endif ;\ 272ed3768a8SDave Martin .if . - 9997b != 4 ;\ 273ed3768a8SDave Martin .error "ALT_UP() content must assemble to exactly 4 bytes";\ 274ed3768a8SDave Martin .endif ;\ 275f00ec48fSRussell King .popsection 276f00ec48fSRussell King #define ALT_UP_B(label) \ 277f00ec48fSRussell King .pushsection ".alt.smp.init", "a" ;\ 2789f80ccdaSArd Biesheuvel .align 2 ;\ 279450abd38SArd Biesheuvel .long 9998b - . ;\ 280a780e485SJian Cai W(b) . + (label - 9998b) ;\ 281f00ec48fSRussell King .popsection 282f00ec48fSRussell King #else 283f00ec48fSRussell King #define ALT_SMP(instr...) 284f00ec48fSRussell King #define ALT_UP(instr...) instr 285f00ec48fSRussell King #define ALT_UP_B(label) b label 286f00ec48fSRussell King #endif 287f00ec48fSRussell King 288bac4e960SRussell King /* 2897b9896c3SArd Biesheuvel * this_cpu_offset - load the per-CPU offset of this CPU into 2907b9896c3SArd Biesheuvel * register 'rd' 2917b9896c3SArd Biesheuvel */ 2927b9896c3SArd Biesheuvel .macro this_cpu_offset, rd:req 2937b9896c3SArd Biesheuvel #ifdef CONFIG_SMP 2947b9896c3SArd Biesheuvel ALT_SMP(mrc p15, 0, \rd, c13, c0, 4) 2957b9896c3SArd Biesheuvel #ifdef CONFIG_CPU_V6 2967b9896c3SArd Biesheuvel ALT_UP_B(.L1_\@) 2977b9896c3SArd Biesheuvel .L0_\@: 2987b9896c3SArd Biesheuvel .subsection 1 2997b9896c3SArd Biesheuvel .L1_\@: ldr_va \rd, __per_cpu_offset 3007b9896c3SArd Biesheuvel b .L0_\@ 3017b9896c3SArd Biesheuvel .previous 3027b9896c3SArd Biesheuvel #endif 3037b9896c3SArd Biesheuvel #else 3047b9896c3SArd Biesheuvel mov \rd, #0 3057b9896c3SArd Biesheuvel #endif 3067b9896c3SArd Biesheuvel .endm 3077b9896c3SArd Biesheuvel 3087b9896c3SArd Biesheuvel /* 3099c46929eSArd Biesheuvel * set_current - store the task pointer of this CPU's current task 3109c46929eSArd Biesheuvel */ 3119c46929eSArd Biesheuvel .macro set_current, rn:req, tmp:req 3129c46929eSArd Biesheuvel #if defined(CONFIG_CURRENT_POINTER_IN_TPIDRURO) || defined(CONFIG_SMP) 3139c46929eSArd Biesheuvel 9998: mcr p15, 0, \rn, c13, c0, 3 @ set TPIDRURO register 3149c46929eSArd Biesheuvel #ifdef CONFIG_CPU_V6 3159c46929eSArd Biesheuvel ALT_UP_B(.L0_\@) 3169c46929eSArd Biesheuvel .subsection 1 3179c46929eSArd Biesheuvel .L0_\@: str_va \rn, __current, \tmp 3189c46929eSArd Biesheuvel b .L1_\@ 3199c46929eSArd Biesheuvel .previous 3209c46929eSArd Biesheuvel .L1_\@: 3219c46929eSArd Biesheuvel #endif 3229c46929eSArd Biesheuvel #else 3239c46929eSArd Biesheuvel str_va \rn, __current, \tmp 3249c46929eSArd Biesheuvel #endif 3259c46929eSArd Biesheuvel .endm 3269c46929eSArd Biesheuvel 3279c46929eSArd Biesheuvel /* 3289c46929eSArd Biesheuvel * get_current - load the task pointer of this CPU's current task 3299c46929eSArd Biesheuvel */ 3309c46929eSArd Biesheuvel .macro get_current, rd:req 3319c46929eSArd Biesheuvel #if defined(CONFIG_CURRENT_POINTER_IN_TPIDRURO) || defined(CONFIG_SMP) 3329c46929eSArd Biesheuvel 9998: mrc p15, 0, \rd, c13, c0, 3 @ get TPIDRURO register 3339c46929eSArd Biesheuvel #ifdef CONFIG_CPU_V6 3349c46929eSArd Biesheuvel ALT_UP_B(.L0_\@) 3359c46929eSArd Biesheuvel .subsection 1 3369c46929eSArd Biesheuvel .L0_\@: ldr_va \rd, __current 3379c46929eSArd Biesheuvel b .L1_\@ 3389c46929eSArd Biesheuvel .previous 3399c46929eSArd Biesheuvel .L1_\@: 3409c46929eSArd Biesheuvel #endif 3419c46929eSArd Biesheuvel #else 3429c46929eSArd Biesheuvel ldr_va \rd, __current 3439c46929eSArd Biesheuvel #endif 3449c46929eSArd Biesheuvel .endm 3459c46929eSArd Biesheuvel 3469c46929eSArd Biesheuvel /* 3479c46929eSArd Biesheuvel * reload_current - reload the task pointer of this CPU's current task 3489c46929eSArd Biesheuvel * into the TLS register 3499c46929eSArd Biesheuvel */ 3509c46929eSArd Biesheuvel .macro reload_current, t1:req, t2:req 3519c46929eSArd Biesheuvel #if defined(CONFIG_CURRENT_POINTER_IN_TPIDRURO) || defined(CONFIG_SMP) 3529c46929eSArd Biesheuvel #ifdef CONFIG_CPU_V6 3539c46929eSArd Biesheuvel ALT_SMP(nop) 3549c46929eSArd Biesheuvel ALT_UP_B(.L0_\@) 3559c46929eSArd Biesheuvel #endif 3569c46929eSArd Biesheuvel ldr_this_cpu \t1, __entry_task, \t1, \t2 3579c46929eSArd Biesheuvel mcr p15, 0, \t1, c13, c0, 3 @ store in TPIDRURO 3589c46929eSArd Biesheuvel .L0_\@: 3599c46929eSArd Biesheuvel #endif 3609c46929eSArd Biesheuvel .endm 3619c46929eSArd Biesheuvel 3629c46929eSArd Biesheuvel /* 363d675d0bcSWill Deacon * Instruction barrier 364d675d0bcSWill Deacon */ 365d675d0bcSWill Deacon .macro instr_sync 366d675d0bcSWill Deacon #if __LINUX_ARM_ARCH__ >= 7 367d675d0bcSWill Deacon isb 368d675d0bcSWill Deacon #elif __LINUX_ARM_ARCH__ == 6 369d675d0bcSWill Deacon mcr p15, 0, r0, c7, c5, 4 370d675d0bcSWill Deacon #endif 371d675d0bcSWill Deacon .endm 372d675d0bcSWill Deacon 373d675d0bcSWill Deacon /* 374bac4e960SRussell King * SMP data memory barrier 375bac4e960SRussell King */ 376ed3768a8SDave Martin .macro smp_dmb mode 377bac4e960SRussell King #ifdef CONFIG_SMP 378bac4e960SRussell King #if __LINUX_ARM_ARCH__ >= 7 379ed3768a8SDave Martin .ifeqs "\mode","arm" 3803ea12806SWill Deacon ALT_SMP(dmb ish) 381ed3768a8SDave Martin .else 3823ea12806SWill Deacon ALT_SMP(W(dmb) ish) 383ed3768a8SDave Martin .endif 384bac4e960SRussell King #elif __LINUX_ARM_ARCH__ == 6 385f00ec48fSRussell King ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb 386f00ec48fSRussell King #else 387f00ec48fSRussell King #error Incompatible SMP platform 388bac4e960SRussell King #endif 389ed3768a8SDave Martin .ifeqs "\mode","arm" 390f00ec48fSRussell King ALT_UP(nop) 391ed3768a8SDave Martin .else 392ed3768a8SDave Martin ALT_UP(W(nop)) 393ed3768a8SDave Martin .endif 394bac4e960SRussell King #endif 395bac4e960SRussell King .endm 396b86040a5SCatalin Marinas 397*dda5f312SMark Rutland /* 398*dda5f312SMark Rutland * Raw SMP data memory barrier 399*dda5f312SMark Rutland */ 400*dda5f312SMark Rutland .macro __smp_dmb mode 401*dda5f312SMark Rutland #if __LINUX_ARM_ARCH__ >= 7 402*dda5f312SMark Rutland .ifeqs "\mode","arm" 403*dda5f312SMark Rutland dmb ish 404*dda5f312SMark Rutland .else 405*dda5f312SMark Rutland W(dmb) ish 406*dda5f312SMark Rutland .endif 407*dda5f312SMark Rutland #elif __LINUX_ARM_ARCH__ == 6 408*dda5f312SMark Rutland mcr p15, 0, r0, c7, c10, 5 @ dmb 409*dda5f312SMark Rutland #else 410*dda5f312SMark Rutland .error "Incompatible SMP platform" 411*dda5f312SMark Rutland #endif 412*dda5f312SMark Rutland .endm 413*dda5f312SMark Rutland 41455bdd694SCatalin Marinas #if defined(CONFIG_CPU_V7M) 41555bdd694SCatalin Marinas /* 41655bdd694SCatalin Marinas * setmode is used to assert to be in svc mode during boot. For v7-M 41755bdd694SCatalin Marinas * this is done in __v7m_setup, so setmode can be empty here. 41855bdd694SCatalin Marinas */ 41955bdd694SCatalin Marinas .macro setmode, mode, reg 42055bdd694SCatalin Marinas .endm 42155bdd694SCatalin Marinas #elif defined(CONFIG_THUMB2_KERNEL) 422b86040a5SCatalin Marinas .macro setmode, mode, reg 423b86040a5SCatalin Marinas mov \reg, #\mode 424b86040a5SCatalin Marinas msr cpsr_c, \reg 425b86040a5SCatalin Marinas .endm 426b86040a5SCatalin Marinas #else 427b86040a5SCatalin Marinas .macro setmode, mode, reg 428b86040a5SCatalin Marinas msr cpsr_c, #\mode 429b86040a5SCatalin Marinas .endm 430b86040a5SCatalin Marinas #endif 4318b592783SCatalin Marinas 4328b592783SCatalin Marinas /* 43380c59dafSDave Martin * Helper macro to enter SVC mode cleanly and mask interrupts. reg is 43480c59dafSDave Martin * a scratch register for the macro to overwrite. 43580c59dafSDave Martin * 43680c59dafSDave Martin * This macro is intended for forcing the CPU into SVC mode at boot time. 43780c59dafSDave Martin * you cannot return to the original mode. 43880c59dafSDave Martin */ 43980c59dafSDave Martin .macro safe_svcmode_maskall reg:req 4400e0779daSLorenzo Pieralisi #if __LINUX_ARM_ARCH__ >= 6 && !defined(CONFIG_CPU_V7M) 44180c59dafSDave Martin mrs \reg , cpsr 4428e9c24a2SRussell King eor \reg, \reg, #HYP_MODE 4438e9c24a2SRussell King tst \reg, #MODE_MASK 44480c59dafSDave Martin bic \reg , \reg , #MODE_MASK 4458e9c24a2SRussell King orr \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE 44680c59dafSDave Martin THUMB( orr \reg , \reg , #PSR_T_BIT ) 44780c59dafSDave Martin bne 1f 4482a552d5eSMarc Zyngier orr \reg, \reg, #PSR_A_BIT 44914327c66SRussell King badr lr, 2f 4502a552d5eSMarc Zyngier msr spsr_cxsf, \reg 45180c59dafSDave Martin __MSR_ELR_HYP(14) 45280c59dafSDave Martin __ERET 4532a552d5eSMarc Zyngier 1: msr cpsr_c, \reg 45480c59dafSDave Martin 2: 4551ecec696SDave Martin #else 4561ecec696SDave Martin /* 4571ecec696SDave Martin * workaround for possibly broken pre-v6 hardware 4581ecec696SDave Martin * (akita, Sharp Zaurus C-1000, PXA270-based) 4591ecec696SDave Martin */ 4601ecec696SDave Martin setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg 4611ecec696SDave Martin #endif 46280c59dafSDave Martin .endm 46380c59dafSDave Martin 46480c59dafSDave Martin /* 4658b592783SCatalin Marinas * STRT/LDRT access macros with ARM and Thumb-2 variants 4668b592783SCatalin Marinas */ 4678b592783SCatalin Marinas #ifdef CONFIG_THUMB2_KERNEL 4688b592783SCatalin Marinas 4694e7682d0SCatalin Marinas .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() 4708b592783SCatalin Marinas 9999: 4718b592783SCatalin Marinas .if \inc == 1 472c001899aSStefan Agner \instr\()b\t\cond\().w \reg, [\ptr, #\off] 4738b592783SCatalin Marinas .elseif \inc == 4 474c001899aSStefan Agner \instr\t\cond\().w \reg, [\ptr, #\off] 4758b592783SCatalin Marinas .else 4768b592783SCatalin Marinas .error "Unsupported inc macro argument" 4778b592783SCatalin Marinas .endif 4788b592783SCatalin Marinas 4794260415fSRussell King .pushsection __ex_table,"a" 4808b592783SCatalin Marinas .align 3 4818b592783SCatalin Marinas .long 9999b, \abort 4824260415fSRussell King .popsection 4838b592783SCatalin Marinas .endm 4848b592783SCatalin Marinas 4858b592783SCatalin Marinas .macro usracc, instr, reg, ptr, inc, cond, rept, abort 4868b592783SCatalin Marinas @ explicit IT instruction needed because of the label 4878b592783SCatalin Marinas @ introduced by the USER macro 4888b592783SCatalin Marinas .ifnc \cond,al 4898b592783SCatalin Marinas .if \rept == 1 4908b592783SCatalin Marinas itt \cond 4918b592783SCatalin Marinas .elseif \rept == 2 4928b592783SCatalin Marinas ittt \cond 4938b592783SCatalin Marinas .else 4948b592783SCatalin Marinas .error "Unsupported rept macro argument" 4958b592783SCatalin Marinas .endif 4968b592783SCatalin Marinas .endif 4978b592783SCatalin Marinas 4988b592783SCatalin Marinas @ Slightly optimised to avoid incrementing the pointer twice 4998b592783SCatalin Marinas usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort 5008b592783SCatalin Marinas .if \rept == 2 5011142b71dSWill Deacon usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort 5028b592783SCatalin Marinas .endif 5038b592783SCatalin Marinas 5048b592783SCatalin Marinas add\cond \ptr, #\rept * \inc 5058b592783SCatalin Marinas .endm 5068b592783SCatalin Marinas 5078b592783SCatalin Marinas #else /* !CONFIG_THUMB2_KERNEL */ 5088b592783SCatalin Marinas 5094e7682d0SCatalin Marinas .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() 5108b592783SCatalin Marinas .rept \rept 5118b592783SCatalin Marinas 9999: 5128b592783SCatalin Marinas .if \inc == 1 513c001899aSStefan Agner \instr\()b\t\cond \reg, [\ptr], #\inc 5148b592783SCatalin Marinas .elseif \inc == 4 515c001899aSStefan Agner \instr\t\cond \reg, [\ptr], #\inc 5168b592783SCatalin Marinas .else 5178b592783SCatalin Marinas .error "Unsupported inc macro argument" 5188b592783SCatalin Marinas .endif 5198b592783SCatalin Marinas 5204260415fSRussell King .pushsection __ex_table,"a" 5218b592783SCatalin Marinas .align 3 5228b592783SCatalin Marinas .long 9999b, \abort 5234260415fSRussell King .popsection 5248b592783SCatalin Marinas .endr 5258b592783SCatalin Marinas .endm 5268b592783SCatalin Marinas 5278b592783SCatalin Marinas #endif /* CONFIG_THUMB2_KERNEL */ 5288b592783SCatalin Marinas 5298b592783SCatalin Marinas .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f 5308b592783SCatalin Marinas usracc str, \reg, \ptr, \inc, \cond, \rept, \abort 5318b592783SCatalin Marinas .endm 5328b592783SCatalin Marinas 5338b592783SCatalin Marinas .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f 5348b592783SCatalin Marinas usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort 5358b592783SCatalin Marinas .endm 5368f51965eSDave Martin 5378f51965eSDave Martin /* Utility macro for declaring string literals */ 5388f51965eSDave Martin .macro string name:req, string 5398f51965eSDave Martin .type \name , #object 5408f51965eSDave Martin \name: 5418f51965eSDave Martin .asciz "\string" 5428f51965eSDave Martin .size \name , . - \name 5438f51965eSDave Martin .endm 5448f51965eSDave Martin 5456ebbf2ceSRussell King .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo 5466ebbf2ceSRussell King .macro ret\c, reg 5476ebbf2ceSRussell King #if __LINUX_ARM_ARCH__ < 6 5486ebbf2ceSRussell King mov\c pc, \reg 5496ebbf2ceSRussell King #else 5506ebbf2ceSRussell King .ifeqs "\reg", "lr" 5516ebbf2ceSRussell King bx\c \reg 5526ebbf2ceSRussell King .else 5536ebbf2ceSRussell King mov\c pc, \reg 5546ebbf2ceSRussell King .endif 5556ebbf2ceSRussell King #endif 5566ebbf2ceSRussell King .endm 5576ebbf2ceSRussell King .endr 5586ebbf2ceSRussell King 5596ebbf2ceSRussell King .macro ret.w, reg 5606ebbf2ceSRussell King ret \reg 5616ebbf2ceSRussell King #ifdef CONFIG_THUMB2_KERNEL 5626ebbf2ceSRussell King nop 5636ebbf2ceSRussell King #endif 5646ebbf2ceSRussell King .endm 5656ebbf2ceSRussell King 5668bafae20SRussell King .macro bug, msg, line 5678bafae20SRussell King #ifdef CONFIG_THUMB2_KERNEL 5688bafae20SRussell King 1: .inst 0xde02 5698bafae20SRussell King #else 5708bafae20SRussell King 1: .inst 0xe7f001f2 5718bafae20SRussell King #endif 5728bafae20SRussell King #ifdef CONFIG_DEBUG_BUGVERBOSE 5738bafae20SRussell King .pushsection .rodata.str, "aMS", %progbits, 1 5748bafae20SRussell King 2: .asciz "\msg" 5758bafae20SRussell King .popsection 5768bafae20SRussell King .pushsection __bug_table, "aw" 5778bafae20SRussell King .align 2 5788bafae20SRussell King .word 1b, 2b 5798bafae20SRussell King .hword \line 5808bafae20SRussell King .popsection 5818bafae20SRussell King #endif 5828bafae20SRussell King .endm 5838bafae20SRussell King 5840d73c3f8SMasami Hiramatsu #ifdef CONFIG_KPROBES 5850d73c3f8SMasami Hiramatsu #define _ASM_NOKPROBE(entry) \ 5860d73c3f8SMasami Hiramatsu .pushsection "_kprobe_blacklist", "aw" ; \ 5870d73c3f8SMasami Hiramatsu .balign 4 ; \ 5880d73c3f8SMasami Hiramatsu .long entry; \ 5890d73c3f8SMasami Hiramatsu .popsection 5900d73c3f8SMasami Hiramatsu #else 5910d73c3f8SMasami Hiramatsu #define _ASM_NOKPROBE(entry) 5920d73c3f8SMasami Hiramatsu #endif 5930d73c3f8SMasami Hiramatsu 5940b167463SArd Biesheuvel .macro __adldst_l, op, reg, sym, tmp, c 5950b167463SArd Biesheuvel .if __LINUX_ARM_ARCH__ < 7 5960b167463SArd Biesheuvel ldr\c \tmp, .La\@ 5970b167463SArd Biesheuvel .subsection 1 5980b167463SArd Biesheuvel .align 2 5990b167463SArd Biesheuvel .La\@: .long \sym - .Lpc\@ 6000b167463SArd Biesheuvel .previous 6010b167463SArd Biesheuvel .else 6020b167463SArd Biesheuvel .ifnb \c 6030b167463SArd Biesheuvel THUMB( ittt \c ) 6040b167463SArd Biesheuvel .endif 6050b167463SArd Biesheuvel movw\c \tmp, #:lower16:\sym - .Lpc\@ 6060b167463SArd Biesheuvel movt\c \tmp, #:upper16:\sym - .Lpc\@ 6070b167463SArd Biesheuvel .endif 6080b167463SArd Biesheuvel 6090b167463SArd Biesheuvel #ifndef CONFIG_THUMB2_KERNEL 6100b167463SArd Biesheuvel .set .Lpc\@, . + 8 // PC bias 6110b167463SArd Biesheuvel .ifc \op, add 6120b167463SArd Biesheuvel add\c \reg, \tmp, pc 6130b167463SArd Biesheuvel .else 6140b167463SArd Biesheuvel \op\c \reg, [pc, \tmp] 6150b167463SArd Biesheuvel .endif 6160b167463SArd Biesheuvel #else 6170b167463SArd Biesheuvel .Lb\@: add\c \tmp, \tmp, pc 6180b167463SArd Biesheuvel /* 6190b167463SArd Biesheuvel * In Thumb-2 builds, the PC bias depends on whether we are currently 6200b167463SArd Biesheuvel * emitting into a .arm or a .thumb section. The size of the add opcode 6210b167463SArd Biesheuvel * above will be 2 bytes when emitting in Thumb mode and 4 bytes when 6220b167463SArd Biesheuvel * emitting in ARM mode, so let's use this to account for the bias. 6230b167463SArd Biesheuvel */ 6240b167463SArd Biesheuvel .set .Lpc\@, . + (. - .Lb\@) 6250b167463SArd Biesheuvel 6260b167463SArd Biesheuvel .ifnc \op, add 6270b167463SArd Biesheuvel \op\c \reg, [\tmp] 6280b167463SArd Biesheuvel .endif 6290b167463SArd Biesheuvel #endif 6300b167463SArd Biesheuvel .endm 6310b167463SArd Biesheuvel 6320b167463SArd Biesheuvel /* 6330b167463SArd Biesheuvel * mov_l - move a constant value or [relocated] address into a register 6340b167463SArd Biesheuvel */ 6354e918ab1SArd Biesheuvel .macro mov_l, dst:req, imm:req, cond 6360b167463SArd Biesheuvel .if __LINUX_ARM_ARCH__ < 7 6374e918ab1SArd Biesheuvel ldr\cond \dst, =\imm 6380b167463SArd Biesheuvel .else 6394e918ab1SArd Biesheuvel movw\cond \dst, #:lower16:\imm 6404e918ab1SArd Biesheuvel movt\cond \dst, #:upper16:\imm 6410b167463SArd Biesheuvel .endif 6420b167463SArd Biesheuvel .endm 6430b167463SArd Biesheuvel 6440b167463SArd Biesheuvel /* 6450b167463SArd Biesheuvel * adr_l - adr pseudo-op with unlimited range 6460b167463SArd Biesheuvel * 6470b167463SArd Biesheuvel * @dst: destination register 6480b167463SArd Biesheuvel * @sym: name of the symbol 6490b167463SArd Biesheuvel * @cond: conditional opcode suffix 6500b167463SArd Biesheuvel */ 6510b167463SArd Biesheuvel .macro adr_l, dst:req, sym:req, cond 6520b167463SArd Biesheuvel __adldst_l add, \dst, \sym, \dst, \cond 6530b167463SArd Biesheuvel .endm 6540b167463SArd Biesheuvel 6550b167463SArd Biesheuvel /* 6560b167463SArd Biesheuvel * ldr_l - ldr <literal> pseudo-op with unlimited range 6570b167463SArd Biesheuvel * 6580b167463SArd Biesheuvel * @dst: destination register 6590b167463SArd Biesheuvel * @sym: name of the symbol 6600b167463SArd Biesheuvel * @cond: conditional opcode suffix 6610b167463SArd Biesheuvel */ 6620b167463SArd Biesheuvel .macro ldr_l, dst:req, sym:req, cond 6630b167463SArd Biesheuvel __adldst_l ldr, \dst, \sym, \dst, \cond 6640b167463SArd Biesheuvel .endm 6650b167463SArd Biesheuvel 6660b167463SArd Biesheuvel /* 6670b167463SArd Biesheuvel * str_l - str <literal> pseudo-op with unlimited range 6680b167463SArd Biesheuvel * 6690b167463SArd Biesheuvel * @src: source register 6700b167463SArd Biesheuvel * @sym: name of the symbol 6710b167463SArd Biesheuvel * @tmp: mandatory scratch register 6720b167463SArd Biesheuvel * @cond: conditional opcode suffix 6730b167463SArd Biesheuvel */ 6740b167463SArd Biesheuvel .macro str_l, src:req, sym:req, tmp:req, cond 6750b167463SArd Biesheuvel __adldst_l str, \src, \sym, \tmp, \cond 6760b167463SArd Biesheuvel .endm 6770b167463SArd Biesheuvel 67850807460SArd Biesheuvel .macro __ldst_va, op, reg, tmp, sym, cond, offset 6794e918ab1SArd Biesheuvel #if __LINUX_ARM_ARCH__ >= 7 || \ 680d6905849SArd Biesheuvel !defined(CONFIG_ARM_HAS_GROUP_RELOCS) || \ 681d6905849SArd Biesheuvel (defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) 6824e918ab1SArd Biesheuvel mov_l \tmp, \sym, \cond 6834e918ab1SArd Biesheuvel #else 6844e918ab1SArd Biesheuvel /* 6854e918ab1SArd Biesheuvel * Avoid a literal load, by emitting a sequence of ADD/LDR instructions 6864e918ab1SArd Biesheuvel * with the appropriate relocations. The combined sequence has a range 6874e918ab1SArd Biesheuvel * of -/+ 256 MiB, which should be sufficient for the core kernel and 6884e918ab1SArd Biesheuvel * for modules loaded into the module region. 6894e918ab1SArd Biesheuvel */ 6904e918ab1SArd Biesheuvel .globl \sym 6914e918ab1SArd Biesheuvel .reloc .L0_\@, R_ARM_ALU_PC_G0_NC, \sym 6924e918ab1SArd Biesheuvel .reloc .L1_\@, R_ARM_ALU_PC_G1_NC, \sym 6934e918ab1SArd Biesheuvel .reloc .L2_\@, R_ARM_LDR_PC_G2, \sym 69450807460SArd Biesheuvel .L0_\@: sub\cond \tmp, pc, #8 - \offset 69550807460SArd Biesheuvel .L1_\@: sub\cond \tmp, \tmp, #4 - \offset 69650807460SArd Biesheuvel .L2_\@: 6974e918ab1SArd Biesheuvel #endif 69850807460SArd Biesheuvel \op\cond \reg, [\tmp, #\offset] 6994e918ab1SArd Biesheuvel .endm 7004e918ab1SArd Biesheuvel 7014e918ab1SArd Biesheuvel /* 7024e918ab1SArd Biesheuvel * ldr_va - load a 32-bit word from the virtual address of \sym 7034e918ab1SArd Biesheuvel */ 70450807460SArd Biesheuvel .macro ldr_va, rd:req, sym:req, cond, tmp, offset=0 705952f0331SArd Biesheuvel .ifnb \tmp 70650807460SArd Biesheuvel __ldst_va ldr, \rd, \tmp, \sym, \cond, \offset 707952f0331SArd Biesheuvel .else 70850807460SArd Biesheuvel __ldst_va ldr, \rd, \rd, \sym, \cond, \offset 709952f0331SArd Biesheuvel .endif 7104e918ab1SArd Biesheuvel .endm 7114e918ab1SArd Biesheuvel 7124e918ab1SArd Biesheuvel /* 7134e918ab1SArd Biesheuvel * str_va - store a 32-bit word to the virtual address of \sym 7144e918ab1SArd Biesheuvel */ 7154e918ab1SArd Biesheuvel .macro str_va, rn:req, sym:req, tmp:req, cond 71650807460SArd Biesheuvel __ldst_va str, \rn, \tmp, \sym, \cond, 0 7174e918ab1SArd Biesheuvel .endm 7184e918ab1SArd Biesheuvel 7196468e898SArd Biesheuvel /* 7207b9896c3SArd Biesheuvel * ldr_this_cpu_armv6 - Load a 32-bit word from the per-CPU variable 'sym', 7217b9896c3SArd Biesheuvel * without using a temp register. Supported in ARM mode 7227b9896c3SArd Biesheuvel * only. 7237b9896c3SArd Biesheuvel */ 7247b9896c3SArd Biesheuvel .macro ldr_this_cpu_armv6, rd:req, sym:req 7257b9896c3SArd Biesheuvel this_cpu_offset \rd 7267b9896c3SArd Biesheuvel .globl \sym 7277b9896c3SArd Biesheuvel .reloc .L0_\@, R_ARM_ALU_PC_G0_NC, \sym 7287b9896c3SArd Biesheuvel .reloc .L1_\@, R_ARM_ALU_PC_G1_NC, \sym 7297b9896c3SArd Biesheuvel .reloc .L2_\@, R_ARM_LDR_PC_G2, \sym 7307b9896c3SArd Biesheuvel add \rd, \rd, pc 7317b9896c3SArd Biesheuvel .L0_\@: sub \rd, \rd, #4 7327b9896c3SArd Biesheuvel .L1_\@: sub \rd, \rd, #0 7337b9896c3SArd Biesheuvel .L2_\@: ldr \rd, [\rd, #4] 7347b9896c3SArd Biesheuvel .endm 7357b9896c3SArd Biesheuvel 7367b9896c3SArd Biesheuvel /* 7377b9896c3SArd Biesheuvel * ldr_this_cpu - Load a 32-bit word from the per-CPU variable 'sym' 7387b9896c3SArd Biesheuvel * into register 'rd', which may be the stack pointer, 7397b9896c3SArd Biesheuvel * using 't1' and 't2' as general temp registers. These 7407b9896c3SArd Biesheuvel * are permitted to overlap with 'rd' if != sp 7417b9896c3SArd Biesheuvel */ 7427b9896c3SArd Biesheuvel .macro ldr_this_cpu, rd:req, sym:req, t1:req, t2:req 743952f0331SArd Biesheuvel #ifndef CONFIG_SMP 744952f0331SArd Biesheuvel ldr_va \rd, \sym, tmp=\t1 745952f0331SArd Biesheuvel #elif __LINUX_ARM_ARCH__ >= 7 || \ 746d6905849SArd Biesheuvel !defined(CONFIG_ARM_HAS_GROUP_RELOCS) || \ 747d6905849SArd Biesheuvel (defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) 7487b9896c3SArd Biesheuvel this_cpu_offset \t1 7497b9896c3SArd Biesheuvel mov_l \t2, \sym 7507b9896c3SArd Biesheuvel ldr \rd, [\t1, \t2] 7517b9896c3SArd Biesheuvel #else 7527b9896c3SArd Biesheuvel ldr_this_cpu_armv6 \rd, \sym 7537b9896c3SArd Biesheuvel #endif 7547b9896c3SArd Biesheuvel .endm 7557b9896c3SArd Biesheuvel 7566468e898SArd Biesheuvel /* 7576468e898SArd Biesheuvel * rev_l - byte-swap a 32-bit value 7586468e898SArd Biesheuvel * 7596468e898SArd Biesheuvel * @val: source/destination register 7606468e898SArd Biesheuvel * @tmp: scratch register 7616468e898SArd Biesheuvel */ 7626468e898SArd Biesheuvel .macro rev_l, val:req, tmp:req 7636468e898SArd Biesheuvel .if __LINUX_ARM_ARCH__ < 6 7646468e898SArd Biesheuvel eor \tmp, \val, \val, ror #16 7656468e898SArd Biesheuvel bic \tmp, \tmp, #0x00ff0000 7666468e898SArd Biesheuvel mov \val, \val, ror #8 7676468e898SArd Biesheuvel eor \val, \val, \tmp, lsr #8 7686468e898SArd Biesheuvel .else 7696468e898SArd Biesheuvel rev \val, \val 7706468e898SArd Biesheuvel .endif 7716468e898SArd Biesheuvel .endm 7726468e898SArd Biesheuvel 7732511d032SRussell King (Oracle) .if __LINUX_ARM_ARCH__ < 6 7742511d032SRussell King (Oracle) .set .Lrev_l_uses_tmp, 1 7752511d032SRussell King (Oracle) .else 7762511d032SRussell King (Oracle) .set .Lrev_l_uses_tmp, 0 7772511d032SRussell King (Oracle) .endif 7782511d032SRussell King (Oracle) 779b3ab60b1SArd Biesheuvel /* 780b3ab60b1SArd Biesheuvel * bl_r - branch and link to register 781b3ab60b1SArd Biesheuvel * 782b3ab60b1SArd Biesheuvel * @dst: target to branch to 783b3ab60b1SArd Biesheuvel * @c: conditional opcode suffix 784b3ab60b1SArd Biesheuvel */ 785b3ab60b1SArd Biesheuvel .macro bl_r, dst:req, c 786b3ab60b1SArd Biesheuvel .if __LINUX_ARM_ARCH__ < 6 787b3ab60b1SArd Biesheuvel mov\c lr, pc 788b3ab60b1SArd Biesheuvel mov\c pc, \dst 789b3ab60b1SArd Biesheuvel .else 790b3ab60b1SArd Biesheuvel blx\c \dst 791b3ab60b1SArd Biesheuvel .endif 792b3ab60b1SArd Biesheuvel .endm 793b3ab60b1SArd Biesheuvel 7942bc58a6fSMagnus Damm #endif /* __ASM_ASSEMBLER_H__ */ 795