14baa9922SRussell King /* 24baa9922SRussell King * arch/arm/include/asm/assembler.h 34baa9922SRussell King * 44baa9922SRussell King * Copyright (C) 1996-2000 Russell King 54baa9922SRussell King * 64baa9922SRussell King * This program is free software; you can redistribute it and/or modify 74baa9922SRussell King * it under the terms of the GNU General Public License version 2 as 84baa9922SRussell King * published by the Free Software Foundation. 94baa9922SRussell King * 104baa9922SRussell King * This file contains arm architecture specific defines 114baa9922SRussell King * for the different processors. 124baa9922SRussell King * 134baa9922SRussell King * Do not include any C declarations in this file - it is included by 144baa9922SRussell King * assembler source. 154baa9922SRussell King */ 162bc58a6fSMagnus Damm #ifndef __ASM_ASSEMBLER_H__ 172bc58a6fSMagnus Damm #define __ASM_ASSEMBLER_H__ 182bc58a6fSMagnus Damm 194baa9922SRussell King #ifndef __ASSEMBLY__ 204baa9922SRussell King #error "Only include this from assembly code" 214baa9922SRussell King #endif 224baa9922SRussell King 234baa9922SRussell King #include <asm/ptrace.h> 24247055aaSCatalin Marinas #include <asm/domain.h> 2580c59dafSDave Martin #include <asm/opcodes-virt.h> 264baa9922SRussell King 276f6f6a70SRob Herring #define IOMEM(x) (x) 286f6f6a70SRob Herring 294baa9922SRussell King /* 304baa9922SRussell King * Endian independent macros for shifting bytes within registers. 314baa9922SRussell King */ 324baa9922SRussell King #ifndef __ARMEB__ 334baa9922SRussell King #define pull lsr 344baa9922SRussell King #define push lsl 354baa9922SRussell King #define get_byte_0 lsl #0 364baa9922SRussell King #define get_byte_1 lsr #8 374baa9922SRussell King #define get_byte_2 lsr #16 384baa9922SRussell King #define get_byte_3 lsr #24 394baa9922SRussell King #define put_byte_0 lsl #0 404baa9922SRussell King #define put_byte_1 lsl #8 414baa9922SRussell King #define put_byte_2 lsl #16 424baa9922SRussell King #define put_byte_3 lsl #24 434baa9922SRussell King #else 444baa9922SRussell King #define pull lsl 454baa9922SRussell King #define push lsr 464baa9922SRussell King #define get_byte_0 lsr #24 474baa9922SRussell King #define get_byte_1 lsr #16 484baa9922SRussell King #define get_byte_2 lsr #8 494baa9922SRussell King #define get_byte_3 lsl #0 504baa9922SRussell King #define put_byte_0 lsl #24 514baa9922SRussell King #define put_byte_1 lsl #16 524baa9922SRussell King #define put_byte_2 lsl #8 534baa9922SRussell King #define put_byte_3 lsl #0 544baa9922SRussell King #endif 554baa9922SRussell King 564baa9922SRussell King /* 574baa9922SRussell King * Data preload for architectures that support it 584baa9922SRussell King */ 594baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 5 604baa9922SRussell King #define PLD(code...) code 614baa9922SRussell King #else 624baa9922SRussell King #define PLD(code...) 634baa9922SRussell King #endif 644baa9922SRussell King 654baa9922SRussell King /* 664baa9922SRussell King * This can be used to enable code to cacheline align the destination 674baa9922SRussell King * pointer when bulk writing to memory. Experiments on StrongARM and 684baa9922SRussell King * XScale didn't show this a worthwhile thing to do when the cache is not 694baa9922SRussell King * set to write-allocate (this would need further testing on XScale when WA 704baa9922SRussell King * is used). 714baa9922SRussell King * 724baa9922SRussell King * On Feroceon there is much to gain however, regardless of cache mode. 734baa9922SRussell King */ 744baa9922SRussell King #ifdef CONFIG_CPU_FEROCEON 754baa9922SRussell King #define CALGN(code...) code 764baa9922SRussell King #else 774baa9922SRussell King #define CALGN(code...) 784baa9922SRussell King #endif 794baa9922SRussell King 804baa9922SRussell King /* 814baa9922SRussell King * Enable and disable interrupts 824baa9922SRussell King */ 834baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6 840d928b0bSUwe Kleine-König .macro disable_irq_notrace 854baa9922SRussell King cpsid i 864baa9922SRussell King .endm 874baa9922SRussell King 880d928b0bSUwe Kleine-König .macro enable_irq_notrace 894baa9922SRussell King cpsie i 904baa9922SRussell King .endm 914baa9922SRussell King #else 920d928b0bSUwe Kleine-König .macro disable_irq_notrace 934baa9922SRussell King msr cpsr_c, #PSR_I_BIT | SVC_MODE 944baa9922SRussell King .endm 954baa9922SRussell King 960d928b0bSUwe Kleine-König .macro enable_irq_notrace 974baa9922SRussell King msr cpsr_c, #SVC_MODE 984baa9922SRussell King .endm 994baa9922SRussell King #endif 1004baa9922SRussell King 1010d928b0bSUwe Kleine-König .macro asm_trace_hardirqs_off 1020d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS) 1030d928b0bSUwe Kleine-König stmdb sp!, {r0-r3, ip, lr} 1040d928b0bSUwe Kleine-König bl trace_hardirqs_off 1050d928b0bSUwe Kleine-König ldmia sp!, {r0-r3, ip, lr} 1060d928b0bSUwe Kleine-König #endif 1070d928b0bSUwe Kleine-König .endm 1080d928b0bSUwe Kleine-König 1090d928b0bSUwe Kleine-König .macro asm_trace_hardirqs_on_cond, cond 1100d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS) 1110d928b0bSUwe Kleine-König /* 1120d928b0bSUwe Kleine-König * actually the registers should be pushed and pop'd conditionally, but 1130d928b0bSUwe Kleine-König * after bl the flags are certainly clobbered 1140d928b0bSUwe Kleine-König */ 1150d928b0bSUwe Kleine-König stmdb sp!, {r0-r3, ip, lr} 1160d928b0bSUwe Kleine-König bl\cond trace_hardirqs_on 1170d928b0bSUwe Kleine-König ldmia sp!, {r0-r3, ip, lr} 1180d928b0bSUwe Kleine-König #endif 1190d928b0bSUwe Kleine-König .endm 1200d928b0bSUwe Kleine-König 1210d928b0bSUwe Kleine-König .macro asm_trace_hardirqs_on 1220d928b0bSUwe Kleine-König asm_trace_hardirqs_on_cond al 1230d928b0bSUwe Kleine-König .endm 1240d928b0bSUwe Kleine-König 1250d928b0bSUwe Kleine-König .macro disable_irq 1260d928b0bSUwe Kleine-König disable_irq_notrace 1270d928b0bSUwe Kleine-König asm_trace_hardirqs_off 1280d928b0bSUwe Kleine-König .endm 1290d928b0bSUwe Kleine-König 1300d928b0bSUwe Kleine-König .macro enable_irq 1310d928b0bSUwe Kleine-König asm_trace_hardirqs_on 1320d928b0bSUwe Kleine-König enable_irq_notrace 1330d928b0bSUwe Kleine-König .endm 1344baa9922SRussell King /* 1354baa9922SRussell King * Save the current IRQ state and disable IRQs. Note that this macro 1364baa9922SRussell King * assumes FIQs are enabled, and that the processor is in SVC mode. 1374baa9922SRussell King */ 1384baa9922SRussell King .macro save_and_disable_irqs, oldcpsr 13955bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M 14055bdd694SCatalin Marinas mrs \oldcpsr, primask 14155bdd694SCatalin Marinas #else 1424baa9922SRussell King mrs \oldcpsr, cpsr 14355bdd694SCatalin Marinas #endif 1444baa9922SRussell King disable_irq 1454baa9922SRussell King .endm 1464baa9922SRussell King 1478e43a905SRabin Vincent .macro save_and_disable_irqs_notrace, oldcpsr 1488e43a905SRabin Vincent mrs \oldcpsr, cpsr 1498e43a905SRabin Vincent disable_irq_notrace 1508e43a905SRabin Vincent .endm 1518e43a905SRabin Vincent 1524baa9922SRussell King /* 1534baa9922SRussell King * Restore interrupt state previously stored in a register. We don't 1544baa9922SRussell King * guarantee that this will preserve the flags. 1554baa9922SRussell King */ 1560d928b0bSUwe Kleine-König .macro restore_irqs_notrace, oldcpsr 15755bdd694SCatalin Marinas #ifdef CONFIG_CPU_V7M 15855bdd694SCatalin Marinas msr primask, \oldcpsr 15955bdd694SCatalin Marinas #else 1604baa9922SRussell King msr cpsr_c, \oldcpsr 16155bdd694SCatalin Marinas #endif 1624baa9922SRussell King .endm 1634baa9922SRussell King 1640d928b0bSUwe Kleine-König .macro restore_irqs, oldcpsr 1650d928b0bSUwe Kleine-König tst \oldcpsr, #PSR_I_BIT 1660d928b0bSUwe Kleine-König asm_trace_hardirqs_on_cond eq 1670d928b0bSUwe Kleine-König restore_irqs_notrace \oldcpsr 1680d928b0bSUwe Kleine-König .endm 1690d928b0bSUwe Kleine-König 1704baa9922SRussell King #define USER(x...) \ 1714baa9922SRussell King 9999: x; \ 1724260415fSRussell King .pushsection __ex_table,"a"; \ 1734baa9922SRussell King .align 3; \ 1744baa9922SRussell King .long 9999b,9001f; \ 1754260415fSRussell King .popsection 176bac4e960SRussell King 177f00ec48fSRussell King #ifdef CONFIG_SMP 178f00ec48fSRussell King #define ALT_SMP(instr...) \ 179f00ec48fSRussell King 9998: instr 180ed3768a8SDave Martin /* 181ed3768a8SDave Martin * Note: if you get assembler errors from ALT_UP() when building with 182ed3768a8SDave Martin * CONFIG_THUMB2_KERNEL, you almost certainly need to use 183ed3768a8SDave Martin * ALT_SMP( W(instr) ... ) 184ed3768a8SDave Martin */ 185f00ec48fSRussell King #define ALT_UP(instr...) \ 186f00ec48fSRussell King .pushsection ".alt.smp.init", "a" ;\ 187f00ec48fSRussell King .long 9998b ;\ 188ed3768a8SDave Martin 9997: instr ;\ 189ed3768a8SDave Martin .if . - 9997b != 4 ;\ 190ed3768a8SDave Martin .error "ALT_UP() content must assemble to exactly 4 bytes";\ 191ed3768a8SDave Martin .endif ;\ 192f00ec48fSRussell King .popsection 193f00ec48fSRussell King #define ALT_UP_B(label) \ 194f00ec48fSRussell King .equ up_b_offset, label - 9998b ;\ 195f00ec48fSRussell King .pushsection ".alt.smp.init", "a" ;\ 196f00ec48fSRussell King .long 9998b ;\ 197ed3768a8SDave Martin W(b) . + up_b_offset ;\ 198f00ec48fSRussell King .popsection 199f00ec48fSRussell King #else 200f00ec48fSRussell King #define ALT_SMP(instr...) 201f00ec48fSRussell King #define ALT_UP(instr...) instr 202f00ec48fSRussell King #define ALT_UP_B(label) b label 203f00ec48fSRussell King #endif 204f00ec48fSRussell King 205bac4e960SRussell King /* 206d675d0bcSWill Deacon * Instruction barrier 207d675d0bcSWill Deacon */ 208d675d0bcSWill Deacon .macro instr_sync 209d675d0bcSWill Deacon #if __LINUX_ARM_ARCH__ >= 7 210d675d0bcSWill Deacon isb 211d675d0bcSWill Deacon #elif __LINUX_ARM_ARCH__ == 6 212d675d0bcSWill Deacon mcr p15, 0, r0, c7, c5, 4 213d675d0bcSWill Deacon #endif 214d675d0bcSWill Deacon .endm 215d675d0bcSWill Deacon 216d675d0bcSWill Deacon /* 217bac4e960SRussell King * SMP data memory barrier 218bac4e960SRussell King */ 219ed3768a8SDave Martin .macro smp_dmb mode 220bac4e960SRussell King #ifdef CONFIG_SMP 221bac4e960SRussell King #if __LINUX_ARM_ARCH__ >= 7 222ed3768a8SDave Martin .ifeqs "\mode","arm" 2233ea12806SWill Deacon ALT_SMP(dmb ish) 224ed3768a8SDave Martin .else 2253ea12806SWill Deacon ALT_SMP(W(dmb) ish) 226ed3768a8SDave Martin .endif 227bac4e960SRussell King #elif __LINUX_ARM_ARCH__ == 6 228f00ec48fSRussell King ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb 229f00ec48fSRussell King #else 230f00ec48fSRussell King #error Incompatible SMP platform 231bac4e960SRussell King #endif 232ed3768a8SDave Martin .ifeqs "\mode","arm" 233f00ec48fSRussell King ALT_UP(nop) 234ed3768a8SDave Martin .else 235ed3768a8SDave Martin ALT_UP(W(nop)) 236ed3768a8SDave Martin .endif 237bac4e960SRussell King #endif 238bac4e960SRussell King .endm 239b86040a5SCatalin Marinas 24055bdd694SCatalin Marinas #if defined(CONFIG_CPU_V7M) 24155bdd694SCatalin Marinas /* 24255bdd694SCatalin Marinas * setmode is used to assert to be in svc mode during boot. For v7-M 24355bdd694SCatalin Marinas * this is done in __v7m_setup, so setmode can be empty here. 24455bdd694SCatalin Marinas */ 24555bdd694SCatalin Marinas .macro setmode, mode, reg 24655bdd694SCatalin Marinas .endm 24755bdd694SCatalin Marinas #elif defined(CONFIG_THUMB2_KERNEL) 248b86040a5SCatalin Marinas .macro setmode, mode, reg 249b86040a5SCatalin Marinas mov \reg, #\mode 250b86040a5SCatalin Marinas msr cpsr_c, \reg 251b86040a5SCatalin Marinas .endm 252b86040a5SCatalin Marinas #else 253b86040a5SCatalin Marinas .macro setmode, mode, reg 254b86040a5SCatalin Marinas msr cpsr_c, #\mode 255b86040a5SCatalin Marinas .endm 256b86040a5SCatalin Marinas #endif 2578b592783SCatalin Marinas 2588b592783SCatalin Marinas /* 25980c59dafSDave Martin * Helper macro to enter SVC mode cleanly and mask interrupts. reg is 26080c59dafSDave Martin * a scratch register for the macro to overwrite. 26180c59dafSDave Martin * 26280c59dafSDave Martin * This macro is intended for forcing the CPU into SVC mode at boot time. 26380c59dafSDave Martin * you cannot return to the original mode. 26480c59dafSDave Martin */ 26580c59dafSDave Martin .macro safe_svcmode_maskall reg:req 2661ecec696SDave Martin #if __LINUX_ARM_ARCH__ >= 6 26780c59dafSDave Martin mrs \reg , cpsr 2688e9c24a2SRussell King eor \reg, \reg, #HYP_MODE 2698e9c24a2SRussell King tst \reg, #MODE_MASK 27080c59dafSDave Martin bic \reg , \reg , #MODE_MASK 2718e9c24a2SRussell King orr \reg , \reg , #PSR_I_BIT | PSR_F_BIT | SVC_MODE 27280c59dafSDave Martin THUMB( orr \reg , \reg , #PSR_T_BIT ) 27380c59dafSDave Martin bne 1f 2742a552d5eSMarc Zyngier orr \reg, \reg, #PSR_A_BIT 2752a552d5eSMarc Zyngier adr lr, BSYM(2f) 2762a552d5eSMarc Zyngier msr spsr_cxsf, \reg 27780c59dafSDave Martin __MSR_ELR_HYP(14) 27880c59dafSDave Martin __ERET 2792a552d5eSMarc Zyngier 1: msr cpsr_c, \reg 28080c59dafSDave Martin 2: 2811ecec696SDave Martin #else 2821ecec696SDave Martin /* 2831ecec696SDave Martin * workaround for possibly broken pre-v6 hardware 2841ecec696SDave Martin * (akita, Sharp Zaurus C-1000, PXA270-based) 2851ecec696SDave Martin */ 2861ecec696SDave Martin setmode PSR_F_BIT | PSR_I_BIT | SVC_MODE, \reg 2871ecec696SDave Martin #endif 28880c59dafSDave Martin .endm 28980c59dafSDave Martin 29080c59dafSDave Martin /* 2918b592783SCatalin Marinas * STRT/LDRT access macros with ARM and Thumb-2 variants 2928b592783SCatalin Marinas */ 2938b592783SCatalin Marinas #ifdef CONFIG_THUMB2_KERNEL 2948b592783SCatalin Marinas 2954e7682d0SCatalin Marinas .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() 2968b592783SCatalin Marinas 9999: 2978b592783SCatalin Marinas .if \inc == 1 298247055aaSCatalin Marinas \instr\cond\()b\()\t\().w \reg, [\ptr, #\off] 2998b592783SCatalin Marinas .elseif \inc == 4 300247055aaSCatalin Marinas \instr\cond\()\t\().w \reg, [\ptr, #\off] 3018b592783SCatalin Marinas .else 3028b592783SCatalin Marinas .error "Unsupported inc macro argument" 3038b592783SCatalin Marinas .endif 3048b592783SCatalin Marinas 3054260415fSRussell King .pushsection __ex_table,"a" 3068b592783SCatalin Marinas .align 3 3078b592783SCatalin Marinas .long 9999b, \abort 3084260415fSRussell King .popsection 3098b592783SCatalin Marinas .endm 3108b592783SCatalin Marinas 3118b592783SCatalin Marinas .macro usracc, instr, reg, ptr, inc, cond, rept, abort 3128b592783SCatalin Marinas @ explicit IT instruction needed because of the label 3138b592783SCatalin Marinas @ introduced by the USER macro 3148b592783SCatalin Marinas .ifnc \cond,al 3158b592783SCatalin Marinas .if \rept == 1 3168b592783SCatalin Marinas itt \cond 3178b592783SCatalin Marinas .elseif \rept == 2 3188b592783SCatalin Marinas ittt \cond 3198b592783SCatalin Marinas .else 3208b592783SCatalin Marinas .error "Unsupported rept macro argument" 3218b592783SCatalin Marinas .endif 3228b592783SCatalin Marinas .endif 3238b592783SCatalin Marinas 3248b592783SCatalin Marinas @ Slightly optimised to avoid incrementing the pointer twice 3258b592783SCatalin Marinas usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort 3268b592783SCatalin Marinas .if \rept == 2 3271142b71dSWill Deacon usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort 3288b592783SCatalin Marinas .endif 3298b592783SCatalin Marinas 3308b592783SCatalin Marinas add\cond \ptr, #\rept * \inc 3318b592783SCatalin Marinas .endm 3328b592783SCatalin Marinas 3338b592783SCatalin Marinas #else /* !CONFIG_THUMB2_KERNEL */ 3348b592783SCatalin Marinas 3354e7682d0SCatalin Marinas .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() 3368b592783SCatalin Marinas .rept \rept 3378b592783SCatalin Marinas 9999: 3388b592783SCatalin Marinas .if \inc == 1 339247055aaSCatalin Marinas \instr\cond\()b\()\t \reg, [\ptr], #\inc 3408b592783SCatalin Marinas .elseif \inc == 4 341247055aaSCatalin Marinas \instr\cond\()\t \reg, [\ptr], #\inc 3428b592783SCatalin Marinas .else 3438b592783SCatalin Marinas .error "Unsupported inc macro argument" 3448b592783SCatalin Marinas .endif 3458b592783SCatalin Marinas 3464260415fSRussell King .pushsection __ex_table,"a" 3478b592783SCatalin Marinas .align 3 3488b592783SCatalin Marinas .long 9999b, \abort 3494260415fSRussell King .popsection 3508b592783SCatalin Marinas .endr 3518b592783SCatalin Marinas .endm 3528b592783SCatalin Marinas 3538b592783SCatalin Marinas #endif /* CONFIG_THUMB2_KERNEL */ 3548b592783SCatalin Marinas 3558b592783SCatalin Marinas .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f 3568b592783SCatalin Marinas usracc str, \reg, \ptr, \inc, \cond, \rept, \abort 3578b592783SCatalin Marinas .endm 3588b592783SCatalin Marinas 3598b592783SCatalin Marinas .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f 3608b592783SCatalin Marinas usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort 3618b592783SCatalin Marinas .endm 3628f51965eSDave Martin 3638f51965eSDave Martin /* Utility macro for declaring string literals */ 3648f51965eSDave Martin .macro string name:req, string 3658f51965eSDave Martin .type \name , #object 3668f51965eSDave Martin \name: 3678f51965eSDave Martin .asciz "\string" 3688f51965eSDave Martin .size \name , . - \name 3698f51965eSDave Martin .endm 3708f51965eSDave Martin 3718404663fSRussell King .macro check_uaccess, addr:req, size:req, limit:req, tmp:req, bad:req 3728404663fSRussell King #ifndef CONFIG_CPU_USE_DOMAINS 3738404663fSRussell King adds \tmp, \addr, #\size - 1 3748404663fSRussell King sbcccs \tmp, \tmp, \limit 3758404663fSRussell King bcs \bad 3768404663fSRussell King #endif 3778404663fSRussell King .endm 3788404663fSRussell King 3792bc58a6fSMagnus Damm #endif /* __ASM_ASSEMBLER_H__ */ 380