1d2912cb1SThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */ 24baa9922SRussell King /* 34baa9922SRussell King * arch/arm/include/asm/assembler.h 44baa9922SRussell King * 54baa9922SRussell King * Copyright (C) 1996-2000 Russell King 64baa9922SRussell King * 74baa9922SRussell King * This file contains arm architecture specific defines 84baa9922SRussell King * for the different processors. 94baa9922SRussell King * 104baa9922SRussell King * Do not include any C declarations in this file - it is included by 114baa9922SRussell King * assembler source. 124baa9922SRussell King */ 132bc58a6fSMagnus Damm #ifndef __ASM_ASSEMBLER_H__ 142bc58a6fSMagnus Damm #define __ASM_ASSEMBLER_H__ 152bc58a6fSMagnus Damm 164baa9922SRussell King #ifndef __ASSEMBLY__ 174baa9922SRussell King #error "Only include this from assembly code" 184baa9922SRussell King #endif 194baa9922SRussell King 204baa9922SRussell King #include <asm/ptrace.h> 2180c59dafSDave Martin #include <asm/opcodes-virt.h> 220b1f68e8SCatalin Marinas #include <asm/asm-offsets.h> 239a2b51b6SAndrey Ryabinin #include <asm/page.h> 249a2b51b6SAndrey Ryabinin #include <asm/thread_info.h> 25747ffc2fSRussell King #include <asm/uaccess-asm.h> 264baa9922SRussell King 276f6f6a70SRob Herring #define IOMEM(x) (x) 286f6f6a70SRob Herring 294baa9922SRussell King /* 304baa9922SRussell King * Endian independent macros for shifting bytes within registers. 314baa9922SRussell King */ 324baa9922SRussell King #ifndef __ARMEB__ 33d98b90eaSVictor Kamensky #define lspull lsr 34d98b90eaSVictor Kamensky #define lspush lsl 354baa9922SRussell King #define get_byte_0 lsl #0 364baa9922SRussell King #define get_byte_1 lsr #8 374baa9922SRussell King #define get_byte_2 lsr #16 384baa9922SRussell King #define get_byte_3 lsr #24 394baa9922SRussell King #define put_byte_0 lsl #0 404baa9922SRussell King #define put_byte_1 lsl #8 414baa9922SRussell King #define put_byte_2 lsl #16 424baa9922SRussell King #define put_byte_3 lsl #24 434baa9922SRussell King #else 44d98b90eaSVictor Kamensky #define lspull lsl 45d98b90eaSVictor Kamensky #define lspush lsr 464baa9922SRussell King #define get_byte_0 lsr #24 474baa9922SRussell King #define get_byte_1 lsr #16 484baa9922SRussell King #define get_byte_2 lsr #8 494baa9922SRussell King #define get_byte_3 lsl #0 504baa9922SRussell King #define put_byte_0 lsl #24 514baa9922SRussell King #define put_byte_1 lsl #16 524baa9922SRussell King #define put_byte_2 lsl #8 534baa9922SRussell King #define put_byte_3 lsl #0 544baa9922SRussell King #endif 554baa9922SRussell King 56457c2403SBen Dooks /* Select code for any configuration running in BE8 mode */ 57457c2403SBen Dooks #ifdef CONFIG_CPU_ENDIAN_BE8 58457c2403SBen Dooks #define ARM_BE8(code...) code 59457c2403SBen Dooks #else 60457c2403SBen Dooks #define ARM_BE8(code...) 61457c2403SBen Dooks #endif 62457c2403SBen Dooks 634baa9922SRussell King /* 644baa9922SRussell King * Data preload for architectures that support it 654baa9922SRussell King */ 664baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 5 674baa9922SRussell King #define PLD(code...) code 684baa9922SRussell King #else 694baa9922SRussell King #define PLD(code...) 704baa9922SRussell King #endif 714baa9922SRussell King 724baa9922SRussell King /* 734baa9922SRussell King * This can be used to enable code to cacheline align the destination 744baa9922SRussell King * pointer when bulk writing to memory. Experiments on StrongARM and 754baa9922SRussell King * XScale didn't show this a worthwhile thing to do when the cache is not 764baa9922SRussell King * set to write-allocate (this would need further testing on XScale when WA 774baa9922SRussell King * is used). 784baa9922SRussell King * 794baa9922SRussell King * On Feroceon there is much to gain however, regardless of cache mode. 804baa9922SRussell King */ 814baa9922SRussell King #ifdef CONFIG_CPU_FEROCEON 824baa9922SRussell King #define CALGN(code...) code 834baa9922SRussell King #else 844baa9922SRussell King #define CALGN(code...) 854baa9922SRussell King #endif 864baa9922SRussell King 87ffa47aa6SArnd Bergmann #define IMM12_MASK 0xfff 88ffa47aa6SArnd Bergmann 894baa9922SRussell King /* 904baa9922SRussell King * Enable and disable interrupts 914baa9922SRussell King */ 924baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6 930d928b0bSUwe Kleine-König .macro disable_irq_notrace 944baa9922SRussell King cpsid i 954baa9922SRussell King .endm 964baa9922SRussell King 970d928b0bSUwe Kleine-König .macro enable_irq_notrace 984baa9922SRussell King cpsie i 994baa9922SRussell King .endm 1004baa9922SRussell King #else 1010d928b0bSUwe Kleine-König .macro disable_irq_notrace 1024baa9922SRussell King msr cpsr_c, #PSR_I_BIT | SVC_MODE 1034baa9922SRussell King .endm 1044baa9922SRussell King 1050d928b0bSUwe Kleine-König .macro enable_irq_notrace 1064baa9922SRussell King msr cpsr_c, #SVC_MODE 1074baa9922SRussell King .endm 1084baa9922SRussell King #endif 1094baa9922SRussell King 110*b9baf5c8SRussell King (Oracle) #if __LINUX_ARM_ARCH__ < 7 111*b9baf5c8SRussell King (Oracle) .macro dsb, args 112*b9baf5c8SRussell King (Oracle) mcr p15, 0, r0, c7, c10, 4 113*b9baf5c8SRussell King (Oracle) .endm 114*b9baf5c8SRussell King (Oracle) 115*b9baf5c8SRussell King (Oracle) .macro isb, args 116*b9baf5c8SRussell King (Oracle) mcr p15, 0, r0, c7, r5, 4 117*b9baf5c8SRussell King (Oracle) .endm 118*b9baf5c8SRussell King (Oracle) #endif 119*b9baf5c8SRussell King (Oracle) 1203302caddSRussell King .macro asm_trace_hardirqs_off, save=1 1210d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS) 1223302caddSRussell King .if \save 1230d928b0bSUwe Kleine-König stmdb sp!, {r0-r3, ip, lr} 1243302caddSRussell King .endif 1250d928b0bSUwe Kleine-König bl trace_hardirqs_off 1263302caddSRussell King .if \save 1270d928b0bSUwe Kleine-König ldmia sp!, {r0-r3, ip, lr} 1283302caddSRussell King .endif 1290d928b0bSUwe Kleine-König #endif 1300d928b0bSUwe Kleine-König .endm 1310d928b0bSUwe Kleine-König 1323302caddSRussell King .macro asm_trace_hardirqs_on, cond=al, save=1 1330d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS) 1340d928b0bSUwe Kleine-König /* 1350d928b0bSUwe Kleine-König * actually the registers should be pushed and pop'd conditionally, but 1360d928b0bSUwe Kleine-König * after bl the flags are certainly clobbered 1370d928b0bSUwe Kleine-König */ 1383302caddSRussell King .if \save 1390d928b0bSUwe Kleine-König stmdb sp!, {r0-r3, ip, lr} 1403302caddSRussell King .endif 1410d928b0bSUwe Kleine-König bl\cond trace_hardirqs_on 1423302caddSRussell King .if \save 1430d928b0bSUwe Kleine-König ldmia sp!, {r0-r3, ip, lr} 1443302caddSRussell King .endif 1450d928b0bSUwe Kleine-König #endif 1460d928b0bSUwe Kleine-König .endm 1470d928b0bSUwe Kleine-König 1483302caddSRussell King .macro disable_irq, save=1 1490d928b0bSUwe Kleine-König disable_irq_notrace 1503302caddSRussell King asm_trace_hardirqs_off \save 1510d928b0bSUwe Kleine-König .endm 1520d928b0bSUwe Kleine-König 1530d928b0bSUwe Kleine-König .macro enable_irq 1540d928b0bSUwe Kleine-König asm_trace_hardirqs_on 1550d928b0bSUwe Kleine-König enable_irq_notrace 1560d928b0bSUwe Kleine-König .endm 1574baa9922SRussell King /* 1584baa9922SRussell King * Save the current IRQ state and disable IRQs. Note that this macro 1594baa9922SRussell King * assumes FIQs are enabled, and that the processor is in SVC mode. 1604baa9922SRussell King */ 1614baa9922SRussell King .macro save_and_disable_irqs, oldcpsr 16255bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M 16355bdd694SCatalin Marinas mrs \oldcpsr, primask 16455bdd694SCatalin Marinas #else 1654baa9922SRussell King mrs \oldcpsr, cpsr 16655bdd694SCatalin Marinas #endif 1674baa9922SRussell King disable_irq 1684baa9922SRussell King .endm 1694baa9922SRussell King 1708e43a905SRabin Vincent .macro save_and_disable_irqs_notrace, oldcpsr 171b2bf482aSVladimir Murzin #ifdef CONFIG_CPU_V7M 172b2bf482aSVladimir Murzin mrs \oldcpsr, primask 173b2bf482aSVladimir Murzin #else 1748e43a905SRabin Vincent mrs \oldcpsr, cpsr 175b2bf482aSVladimir Murzin #endif 1768e43a905SRabin Vincent disable_irq_notrace 1778e43a905SRabin Vincent .endm 1788e43a905SRabin Vincent 1794baa9922SRussell King /* 1804baa9922SRussell King * Restore interrupt state previously stored in a register. We don't 1814baa9922SRussell King * guarantee that this will preserve the flags. 1824baa9922SRussell King */ 1830d928b0bSUwe Kleine-König .macro restore_irqs_notrace, oldcpsr 18455bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M 18555bdd694SCatalin Marinas msr primask, \oldcpsr 18655bdd694SCatalin Marinas #else 1874baa9922SRussell King msr cpsr_c, \oldcpsr 18855bdd694SCatalin Marinas #endif 1894baa9922SRussell King .endm 1904baa9922SRussell King 1910d928b0bSUwe Kleine-König .macro restore_irqs, oldcpsr 1920d928b0bSUwe Kleine-König tst \oldcpsr, #PSR_I_BIT 19301e09a28SRussell King asm_trace_hardirqs_on cond=eq 1940d928b0bSUwe Kleine-König restore_irqs_notrace \oldcpsr 1950d928b0bSUwe Kleine-König .endm 1960d928b0bSUwe Kleine-König 19739ad04ccSCatalin Marinas /* 19814327c66SRussell King * Assembly version of "adr rd, BSYM(sym)". This should only be used to 19914327c66SRussell King * reference local symbols in the same assembly file which are to be 20014327c66SRussell King * resolved by the assembler. Other usage is undefined. 20114327c66SRussell King */ 20214327c66SRussell King .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo 20314327c66SRussell King .macro badr\c, rd, sym 20414327c66SRussell King #ifdef CONFIG_THUMB2_KERNEL 20514327c66SRussell King adr\c \rd, \sym + 1 20614327c66SRussell King #else 20714327c66SRussell King adr\c \rd, \sym 20814327c66SRussell King #endif 20914327c66SRussell King .endm 21014327c66SRussell King .endr 21114327c66SRussell King 21250596b75SArd Biesheuvel .macro get_current, rd 21350596b75SArd Biesheuvel #ifdef CONFIG_CURRENT_POINTER_IN_TPIDRURO 21450596b75SArd Biesheuvel mrc p15, 0, \rd, c13, c0, 3 @ get TPIDRURO register 21550596b75SArd Biesheuvel #else 21650596b75SArd Biesheuvel get_thread_info \rd 21750596b75SArd Biesheuvel ldr \rd, [\rd, #TI_TASK] 21850596b75SArd Biesheuvel #endif 21950596b75SArd Biesheuvel .endm 22050596b75SArd Biesheuvel 22150596b75SArd Biesheuvel .macro set_current, rn 22250596b75SArd Biesheuvel #ifdef CONFIG_CURRENT_POINTER_IN_TPIDRURO 22350596b75SArd Biesheuvel mcr p15, 0, \rn, c13, c0, 3 @ set TPIDRURO register 22450596b75SArd Biesheuvel #endif 22550596b75SArd Biesheuvel .endm 22650596b75SArd Biesheuvel 22750596b75SArd Biesheuvel .macro reload_current, t1:req, t2:req 22850596b75SArd Biesheuvel #ifdef CONFIG_CURRENT_POINTER_IN_TPIDRURO 22950596b75SArd Biesheuvel adr_l \t1, __entry_task @ get __entry_task base address 23050596b75SArd Biesheuvel mrc p15, 0, \t2, c13, c0, 4 @ get per-CPU offset 23150596b75SArd Biesheuvel ldr \t1, [\t1, \t2] @ load variable 23250596b75SArd Biesheuvel mcr p15, 0, \t1, c13, c0, 3 @ store in TPIDRURO 23350596b75SArd Biesheuvel #endif 23450596b75SArd Biesheuvel .endm 23550596b75SArd Biesheuvel 23614327c66SRussell King /* 23739ad04ccSCatalin Marinas * Get current thread_info. 23839ad04ccSCatalin Marinas */ 23939ad04ccSCatalin Marinas .macro get_thread_info, rd 24018ed1c01SArd Biesheuvel #ifdef CONFIG_THREAD_INFO_IN_TASK 24118ed1c01SArd Biesheuvel /* thread_info is the first member of struct task_struct */ 24218ed1c01SArd Biesheuvel get_current \rd 24318ed1c01SArd Biesheuvel #else 2449a2b51b6SAndrey Ryabinin ARM( mov \rd, sp, lsr #THREAD_SIZE_ORDER + PAGE_SHIFT ) 24539ad04ccSCatalin Marinas THUMB( mov \rd, sp ) 2469a2b51b6SAndrey Ryabinin THUMB( lsr \rd, \rd, #THREAD_SIZE_ORDER + PAGE_SHIFT ) 2479a2b51b6SAndrey Ryabinin mov \rd, \rd, lsl #THREAD_SIZE_ORDER + PAGE_SHIFT 24818ed1c01SArd Biesheuvel #endif 24939ad04ccSCatalin Marinas .endm 25039ad04ccSCatalin Marinas 2510b1f68e8SCatalin Marinas /* 2520b1f68e8SCatalin Marinas * Increment/decrement the preempt count. 2530b1f68e8SCatalin Marinas */ 2540b1f68e8SCatalin Marinas #ifdef CONFIG_PREEMPT_COUNT 2550b1f68e8SCatalin Marinas .macro inc_preempt_count, ti, tmp 2560b1f68e8SCatalin Marinas ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count 2570b1f68e8SCatalin Marinas add \tmp, \tmp, #1 @ increment it 2580b1f68e8SCatalin Marinas str \tmp, [\ti, #TI_PREEMPT] 2590b1f68e8SCatalin Marinas .endm 2600b1f68e8SCatalin Marinas 2610b1f68e8SCatalin Marinas .macro dec_preempt_count, ti, tmp 2620b1f68e8SCatalin Marinas ldr \tmp, [\ti, #TI_PREEMPT] @ get preempt count 2630b1f68e8SCatalin Marinas sub \tmp, \tmp, #1 @ decrement it 2640b1f68e8SCatalin Marinas str \tmp, [\ti, #TI_PREEMPT] 2650b1f68e8SCatalin Marinas .endm 2660b1f68e8SCatalin Marinas 2670b1f68e8SCatalin Marinas .macro dec_preempt_count_ti, ti, tmp 2680b1f68e8SCatalin Marinas get_thread_info \ti 2690b1f68e8SCatalin Marinas dec_preempt_count \ti, \tmp 2700b1f68e8SCatalin Marinas .endm 2710b1f68e8SCatalin Marinas #else 2720b1f68e8SCatalin Marinas .macro inc_preempt_count, ti, tmp 2730b1f68e8SCatalin Marinas .endm 2740b1f68e8SCatalin Marinas 2750b1f68e8SCatalin Marinas .macro dec_preempt_count, ti, tmp 2760b1f68e8SCatalin Marinas .endm 2770b1f68e8SCatalin Marinas 2780b1f68e8SCatalin Marinas .macro dec_preempt_count_ti, ti, tmp 2790b1f68e8SCatalin Marinas .endm 2800b1f68e8SCatalin Marinas #endif 2810b1f68e8SCatalin Marinas 282f441882aSVincent Whitchurch #define USERL(l, x...) \ 2834baa9922SRussell King 9999: x; \ 2844260415fSRussell King .pushsection __ex_table,"a"; \ 2854baa9922SRussell King .align 3; \ 286f441882aSVincent Whitchurch .long 9999b,l; \ 2874260415fSRussell King .popsection 288bac4e960SRussell King 289f441882aSVincent Whitchurch #define USER(x...) USERL(9001f, x) 290f441882aSVincent Whitchurch 291f00ec48fSRussell King #ifdef CONFIG_SMP 292f00ec48fSRussell King #define ALT_SMP(instr...) \ 293f00ec48fSRussell King 9998: instr 294ed3768a8SDave Martin /* 295ed3768a8SDave Martin * Note: if you get assembler errors from ALT_UP() when building with 296ed3768a8SDave Martin * CONFIG_THUMB2_KERNEL, you almost certainly need to use 297ed3768a8SDave Martin * ALT_SMP( W(instr) ... ) 298ed3768a8SDave Martin */ 299f00ec48fSRussell King #define ALT_UP(instr...) \ 300f00ec48fSRussell King .pushsection ".alt.smp.init", "a" ;\ 301450abd38SArd Biesheuvel .long 9998b - . ;\ 302ed3768a8SDave Martin 9997: instr ;\ 30389c6bc58SRussell King .if . - 9997b == 2 ;\ 30489c6bc58SRussell King nop ;\ 30589c6bc58SRussell King .endif ;\ 306ed3768a8SDave Martin .if . - 9997b != 4 ;\ 307ed3768a8SDave Martin .error "ALT_UP() content must assemble to exactly 4 bytes";\ 308ed3768a8SDave Martin .endif ;\ 309f00ec48fSRussell King .popsection 310f00ec48fSRussell King #define ALT_UP_B(label) \ 311f00ec48fSRussell King .pushsection ".alt.smp.init", "a" ;\ 312450abd38SArd Biesheuvel .long 9998b - . ;\ 313a780e485SJian Cai W(b) . + (label - 9998b) ;\ 314f00ec48fSRussell King .popsection 315f00ec48fSRussell King #else 316f00ec48fSRussell King #define ALT_SMP(instr...) 317f00ec48fSRussell King #define ALT_UP(instr...) instr 318f00ec48fSRussell King #define ALT_UP_B(label) b label 319f00ec48fSRussell King #endif 320f00ec48fSRussell King 321bac4e960SRussell King /* 322d675d0bcSWill Deacon * Instruction barrier 323d675d0bcSWill Deacon */ 324d675d0bcSWill Deacon .macro instr_sync 325d675d0bcSWill Deacon #if __LINUX_ARM_ARCH__ >= 7 326d675d0bcSWill Deacon isb 327d675d0bcSWill Deacon #elif __LINUX_ARM_ARCH__ == 6 328d675d0bcSWill Deacon mcr p15, 0, r0, c7, c5, 4 329d675d0bcSWill Deacon #endif 330d675d0bcSWill Deacon .endm 331d675d0bcSWill Deacon 332d675d0bcSWill Deacon /* 333bac4e960SRussell King * SMP data memory barrier 334bac4e960SRussell King */ 335ed3768a8SDave Martin .macro smp_dmb mode 336bac4e960SRussell King #ifdef CONFIG_SMP 337bac4e960SRussell King #if __LINUX_ARM_ARCH__ >= 7 338ed3768a8SDave Martin .ifeqs "\mode","arm" 3393ea12806SWill Deacon ALT_SMP(dmb ish) 340ed3768a8SDave Martin .else 3413ea12806SWill Deacon ALT_SMP(W(dmb) ish) 342ed3768a8SDave Martin .endif 343bac4e960SRussell King #elif __LINUX_ARM_ARCH__ == 6 344f00ec48fSRussell King ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb 345f00ec48fSRussell King #else 346f00ec48fSRussell King #error Incompatible SMP platform 347bac4e960SRussell King #endif 348ed3768a8SDave Martin .ifeqs "\mode","arm" 349f00ec48fSRussell King ALT_UP(nop) 350ed3768a8SDave Martin .else 351ed3768a8SDave Martin ALT_UP(W(nop)) 352ed3768a8SDave Martin .endif 353bac4e960SRussell King #endif 354bac4e960SRussell King .endm 355b86040a5SCatalin Marinas 35655bdd694SCatalin Marinas #if defined(CONFIG_CPU_V7M) 35755bdd694SCatalin Marinas /* 35855bdd694SCatalin Marinas * setmode is used to assert to be in svc mode during boot. For v7-M 35955bdd694SCatalin Marinas * this is done in __v7m_setup, so setmode can be empty here. 36055bdd694SCatalin Marinas */ 36155bdd694SCatalin Marinas .macro setmode, mode, reg 36255bdd694SCatalin Marinas .endm 36355bdd694SCatalin Marinas #elif defined(CONFIG_THUMB2_KERNEL) 364b86040a5SCatalin Marinas .macro setmode, mode, reg 365b86040a5SCatalin Marinas mov \reg, #\mode 366b86040a5SCatalin Marinas msr cpsr_c, \reg 367b86040a5SCatalin Marinas .endm 368b86040a5SCatalin Marinas #else 369b86040a5SCatalin Marinas .macro setmode, mode, reg 370b86040a5SCatalin Marinas msr cpsr_c, #\mode 371b86040a5SCatalin Marinas .endm 372b86040a5SCatalin Marinas #endif 3738b592783SCatalin Marinas 3748b592783SCatalin Marinas /* 37580c59dafSDave Martin * Helper macro to enter SVC mode cleanly and mask interrupts. reg is 37680c59dafSDave Martin * a scratch register for the macro to overwrite. 37780c59dafSDave Martin * 37880c59dafSDave Martin * This macro is intended for forcing the CPU into SVC mode at boot time. 37980c59dafSDave Martin * you cannot return to the original mode. 38080c59dafSDave Martin */ 38180c59dafSDave Martin .macro safe_svcmode_maskall reg:req 3820e0779daSLorenzo Pieralisi #if __LINUX_ARM_ARCH__ >= 6 && !defined(CONFIG_CPU_V7M) 38380c59dafSDave Martin mrs \reg , cpsr 3848e9c24a2SRussell King eor \reg, \reg, #HYP_MODE 3858e9c24a2SRussell King tst \reg, #MODE_MASK 38680c59dafSDave Martin bic \reg , \reg , #MODE_MASK 3878e9c24a2SRussell King orr \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE 38880c59dafSDave Martin THUMB( orr \reg , \reg , #PSR_T_BIT ) 38980c59dafSDave Martin bne 1f 3902a552d5eSMarc Zyngier orr \reg, \reg, #PSR_A_BIT 39114327c66SRussell King badr lr, 2f 3922a552d5eSMarc Zyngier msr spsr_cxsf, \reg 39380c59dafSDave Martin __MSR_ELR_HYP(14) 39480c59dafSDave Martin __ERET 3952a552d5eSMarc Zyngier 1: msr cpsr_c, \reg 39680c59dafSDave Martin 2: 3971ecec696SDave Martin #else 3981ecec696SDave Martin /* 3991ecec696SDave Martin * workaround for possibly broken pre-v6 hardware 4001ecec696SDave Martin * (akita, Sharp Zaurus C-1000, PXA270-based) 4011ecec696SDave Martin */ 4021ecec696SDave Martin setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg 4031ecec696SDave Martin #endif 40480c59dafSDave Martin .endm 40580c59dafSDave Martin 40680c59dafSDave Martin /* 4078b592783SCatalin Marinas * STRT/LDRT access macros with ARM and Thumb-2 variants 4088b592783SCatalin Marinas */ 4098b592783SCatalin Marinas #ifdef CONFIG_THUMB2_KERNEL 4108b592783SCatalin Marinas 4114e7682d0SCatalin Marinas .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() 4128b592783SCatalin Marinas 9999: 4138b592783SCatalin Marinas .if \inc == 1 414c001899aSStefan Agner \instr\()b\t\cond\().w \reg, [\ptr, #\off] 4158b592783SCatalin Marinas .elseif \inc == 4 416c001899aSStefan Agner \instr\t\cond\().w \reg, [\ptr, #\off] 4178b592783SCatalin Marinas .else 4188b592783SCatalin Marinas .error "Unsupported inc macro argument" 4198b592783SCatalin Marinas .endif 4208b592783SCatalin Marinas 4214260415fSRussell King .pushsection __ex_table,"a" 4228b592783SCatalin Marinas .align 3 4238b592783SCatalin Marinas .long 9999b, \abort 4244260415fSRussell King .popsection 4258b592783SCatalin Marinas .endm 4268b592783SCatalin Marinas 4278b592783SCatalin Marinas .macro usracc, instr, reg, ptr, inc, cond, rept, abort 4288b592783SCatalin Marinas @ explicit IT instruction needed because of the label 4298b592783SCatalin Marinas @ introduced by the USER macro 4308b592783SCatalin Marinas .ifnc \cond,al 4318b592783SCatalin Marinas .if \rept == 1 4328b592783SCatalin Marinas itt \cond 4338b592783SCatalin Marinas .elseif \rept == 2 4348b592783SCatalin Marinas ittt \cond 4358b592783SCatalin Marinas .else 4368b592783SCatalin Marinas .error "Unsupported rept macro argument" 4378b592783SCatalin Marinas .endif 4388b592783SCatalin Marinas .endif 4398b592783SCatalin Marinas 4408b592783SCatalin Marinas @ Slightly optimised to avoid incrementing the pointer twice 4418b592783SCatalin Marinas usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort 4428b592783SCatalin Marinas .if \rept == 2 4431142b71dSWill Deacon usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort 4448b592783SCatalin Marinas .endif 4458b592783SCatalin Marinas 4468b592783SCatalin Marinas add\cond \ptr, #\rept * \inc 4478b592783SCatalin Marinas .endm 4488b592783SCatalin Marinas 4498b592783SCatalin Marinas #else /* !CONFIG_THUMB2_KERNEL */ 4508b592783SCatalin Marinas 4514e7682d0SCatalin Marinas .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() 4528b592783SCatalin Marinas .rept \rept 4538b592783SCatalin Marinas 9999: 4548b592783SCatalin Marinas .if \inc == 1 455c001899aSStefan Agner \instr\()b\t\cond \reg, [\ptr], #\inc 4568b592783SCatalin Marinas .elseif \inc == 4 457c001899aSStefan Agner \instr\t\cond \reg, [\ptr], #\inc 4588b592783SCatalin Marinas .else 4598b592783SCatalin Marinas .error "Unsupported inc macro argument" 4608b592783SCatalin Marinas .endif 4618b592783SCatalin Marinas 4624260415fSRussell King .pushsection __ex_table,"a" 4638b592783SCatalin Marinas .align 3 4648b592783SCatalin Marinas .long 9999b, \abort 4654260415fSRussell King .popsection 4668b592783SCatalin Marinas .endr 4678b592783SCatalin Marinas .endm 4688b592783SCatalin Marinas 4698b592783SCatalin Marinas #endif /* CONFIG_THUMB2_KERNEL */ 4708b592783SCatalin Marinas 4718b592783SCatalin Marinas .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f 4728b592783SCatalin Marinas usracc str, \reg, \ptr, \inc, \cond, \rept, \abort 4738b592783SCatalin Marinas .endm 4748b592783SCatalin Marinas 4758b592783SCatalin Marinas .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f 4768b592783SCatalin Marinas usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort 4778b592783SCatalin Marinas .endm 4788f51965eSDave Martin 4798f51965eSDave Martin /* Utility macro for declaring string literals */ 4808f51965eSDave Martin .macro string name:req, string 4818f51965eSDave Martin .type \name , #object 4828f51965eSDave Martin \name: 4838f51965eSDave Martin .asciz "\string" 4848f51965eSDave Martin .size \name , . - \name 4858f51965eSDave Martin .endm 4868f51965eSDave Martin 4876ebbf2ceSRussell King .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo 4886ebbf2ceSRussell King .macro ret\c, reg 4896ebbf2ceSRussell King #if __LINUX_ARM_ARCH__ < 6 4906ebbf2ceSRussell King mov\c pc, \reg 4916ebbf2ceSRussell King #else 4926ebbf2ceSRussell King .ifeqs "\reg", "lr" 4936ebbf2ceSRussell King bx\c \reg 4946ebbf2ceSRussell King .else 4956ebbf2ceSRussell King mov\c pc, \reg 4966ebbf2ceSRussell King .endif 4976ebbf2ceSRussell King #endif 4986ebbf2ceSRussell King .endm 4996ebbf2ceSRussell King .endr 5006ebbf2ceSRussell King 5016ebbf2ceSRussell King .macro ret.w, reg 5026ebbf2ceSRussell King ret \reg 5036ebbf2ceSRussell King #ifdef CONFIG_THUMB2_KERNEL 5046ebbf2ceSRussell King nop 5056ebbf2ceSRussell King #endif 5066ebbf2ceSRussell King .endm 5076ebbf2ceSRussell King 5088bafae20SRussell King .macro bug, msg, line 5098bafae20SRussell King #ifdef CONFIG_THUMB2_KERNEL 5108bafae20SRussell King 1: .inst 0xde02 5118bafae20SRussell King #else 5128bafae20SRussell King 1: .inst 0xe7f001f2 5138bafae20SRussell King #endif 5148bafae20SRussell King #ifdef CONFIG_DEBUG_BUGVERBOSE 5158bafae20SRussell King .pushsection .rodata.str, "aMS", %progbits, 1 5168bafae20SRussell King 2: .asciz "\msg" 5178bafae20SRussell King .popsection 5188bafae20SRussell King .pushsection __bug_table, "aw" 5198bafae20SRussell King .align 2 5208bafae20SRussell King .word 1b, 2b 5218bafae20SRussell King .hword \line 5228bafae20SRussell King .popsection 5238bafae20SRussell King #endif 5248bafae20SRussell King .endm 5258bafae20SRussell King 5260d73c3f8SMasami Hiramatsu #ifdef CONFIG_KPROBES 5270d73c3f8SMasami Hiramatsu #define _ASM_NOKPROBE(entry) \ 5280d73c3f8SMasami Hiramatsu .pushsection "_kprobe_blacklist", "aw" ; \ 5290d73c3f8SMasami Hiramatsu .balign 4 ; \ 5300d73c3f8SMasami Hiramatsu .long entry; \ 5310d73c3f8SMasami Hiramatsu .popsection 5320d73c3f8SMasami Hiramatsu #else 5330d73c3f8SMasami Hiramatsu #define _ASM_NOKPROBE(entry) 5340d73c3f8SMasami Hiramatsu #endif 5350d73c3f8SMasami Hiramatsu 5360b167463SArd Biesheuvel .macro __adldst_l, op, reg, sym, tmp, c 5370b167463SArd Biesheuvel .if __LINUX_ARM_ARCH__ < 7 5380b167463SArd Biesheuvel ldr\c \tmp, .La\@ 5390b167463SArd Biesheuvel .subsection 1 5400b167463SArd Biesheuvel .align 2 5410b167463SArd Biesheuvel .La\@: .long \sym - .Lpc\@ 5420b167463SArd Biesheuvel .previous 5430b167463SArd Biesheuvel .else 5440b167463SArd Biesheuvel .ifnb \c 5450b167463SArd Biesheuvel THUMB( ittt \c ) 5460b167463SArd Biesheuvel .endif 5470b167463SArd Biesheuvel movw\c \tmp, #:lower16:\sym - .Lpc\@ 5480b167463SArd Biesheuvel movt\c \tmp, #:upper16:\sym - .Lpc\@ 5490b167463SArd Biesheuvel .endif 5500b167463SArd Biesheuvel 5510b167463SArd Biesheuvel #ifndef CONFIG_THUMB2_KERNEL 5520b167463SArd Biesheuvel .set .Lpc\@, . + 8 // PC bias 5530b167463SArd Biesheuvel .ifc \op, add 5540b167463SArd Biesheuvel add\c \reg, \tmp, pc 5550b167463SArd Biesheuvel .else 5560b167463SArd Biesheuvel \op\c \reg, [pc, \tmp] 5570b167463SArd Biesheuvel .endif 5580b167463SArd Biesheuvel #else 5590b167463SArd Biesheuvel .Lb\@: add\c \tmp, \tmp, pc 5600b167463SArd Biesheuvel /* 5610b167463SArd Biesheuvel * In Thumb-2 builds, the PC bias depends on whether we are currently 5620b167463SArd Biesheuvel * emitting into a .arm or a .thumb section. The size of the add opcode 5630b167463SArd Biesheuvel * above will be 2 bytes when emitting in Thumb mode and 4 bytes when 5640b167463SArd Biesheuvel * emitting in ARM mode, so let's use this to account for the bias. 5650b167463SArd Biesheuvel */ 5660b167463SArd Biesheuvel .set .Lpc\@, . + (. - .Lb\@) 5670b167463SArd Biesheuvel 5680b167463SArd Biesheuvel .ifnc \op, add 5690b167463SArd Biesheuvel \op\c \reg, [\tmp] 5700b167463SArd Biesheuvel .endif 5710b167463SArd Biesheuvel #endif 5720b167463SArd Biesheuvel .endm 5730b167463SArd Biesheuvel 5740b167463SArd Biesheuvel /* 5750b167463SArd Biesheuvel * mov_l - move a constant value or [relocated] address into a register 5760b167463SArd Biesheuvel */ 5770b167463SArd Biesheuvel .macro mov_l, dst:req, imm:req 5780b167463SArd Biesheuvel .if __LINUX_ARM_ARCH__ < 7 5790b167463SArd Biesheuvel ldr \dst, =\imm 5800b167463SArd Biesheuvel .else 5810b167463SArd Biesheuvel movw \dst, #:lower16:\imm 5820b167463SArd Biesheuvel movt \dst, #:upper16:\imm 5830b167463SArd Biesheuvel .endif 5840b167463SArd Biesheuvel .endm 5850b167463SArd Biesheuvel 5860b167463SArd Biesheuvel /* 5870b167463SArd Biesheuvel * adr_l - adr pseudo-op with unlimited range 5880b167463SArd Biesheuvel * 5890b167463SArd Biesheuvel * @dst: destination register 5900b167463SArd Biesheuvel * @sym: name of the symbol 5910b167463SArd Biesheuvel * @cond: conditional opcode suffix 5920b167463SArd Biesheuvel */ 5930b167463SArd Biesheuvel .macro adr_l, dst:req, sym:req, cond 5940b167463SArd Biesheuvel __adldst_l add, \dst, \sym, \dst, \cond 5950b167463SArd Biesheuvel .endm 5960b167463SArd Biesheuvel 5970b167463SArd Biesheuvel /* 5980b167463SArd Biesheuvel * ldr_l - ldr <literal> pseudo-op with unlimited range 5990b167463SArd Biesheuvel * 6000b167463SArd Biesheuvel * @dst: destination register 6010b167463SArd Biesheuvel * @sym: name of the symbol 6020b167463SArd Biesheuvel * @cond: conditional opcode suffix 6030b167463SArd Biesheuvel */ 6040b167463SArd Biesheuvel .macro ldr_l, dst:req, sym:req, cond 6050b167463SArd Biesheuvel __adldst_l ldr, \dst, \sym, \dst, \cond 6060b167463SArd Biesheuvel .endm 6070b167463SArd Biesheuvel 6080b167463SArd Biesheuvel /* 6090b167463SArd Biesheuvel * str_l - str <literal> pseudo-op with unlimited range 6100b167463SArd Biesheuvel * 6110b167463SArd Biesheuvel * @src: source register 6120b167463SArd Biesheuvel * @sym: name of the symbol 6130b167463SArd Biesheuvel * @tmp: mandatory scratch register 6140b167463SArd Biesheuvel * @cond: conditional opcode suffix 6150b167463SArd Biesheuvel */ 6160b167463SArd Biesheuvel .macro str_l, src:req, sym:req, tmp:req, cond 6170b167463SArd Biesheuvel __adldst_l str, \src, \sym, \tmp, \cond 6180b167463SArd Biesheuvel .endm 6190b167463SArd Biesheuvel 6206468e898SArd Biesheuvel /* 6216468e898SArd Biesheuvel * rev_l - byte-swap a 32-bit value 6226468e898SArd Biesheuvel * 6236468e898SArd Biesheuvel * @val: source/destination register 6246468e898SArd Biesheuvel * @tmp: scratch register 6256468e898SArd Biesheuvel */ 6266468e898SArd Biesheuvel .macro rev_l, val:req, tmp:req 6276468e898SArd Biesheuvel .if __LINUX_ARM_ARCH__ < 6 6286468e898SArd Biesheuvel eor \tmp, \val, \val, ror #16 6296468e898SArd Biesheuvel bic \tmp, \tmp, #0x00ff0000 6306468e898SArd Biesheuvel mov \val, \val, ror #8 6316468e898SArd Biesheuvel eor \val, \val, \tmp, lsr #8 6326468e898SArd Biesheuvel .else 6336468e898SArd Biesheuvel rev \val, \val 6346468e898SArd Biesheuvel .endif 6356468e898SArd Biesheuvel .endm 6366468e898SArd Biesheuvel 6372bc58a6fSMagnus Damm #endif /* __ASM_ASSEMBLER_H__ */ 638