14baa9922SRussell King /* 24baa9922SRussell King * arch/arm/include/asm/assembler.h 34baa9922SRussell King * 44baa9922SRussell King * Copyright (C) 1996-2000 Russell King 54baa9922SRussell King * 64baa9922SRussell King * This program is free software; you can redistribute it and/or modify 74baa9922SRussell King * it under the terms of the GNU General Public License version 2 as 84baa9922SRussell King * published by the Free Software Foundation. 94baa9922SRussell King * 104baa9922SRussell King * This file contains arm architecture specific defines 114baa9922SRussell King * for the different processors. 124baa9922SRussell King * 134baa9922SRussell King * Do not include any C declarations in this file - it is included by 144baa9922SRussell King * assembler source. 154baa9922SRussell King */ 162bc58a6fSMagnus Damm #ifndef __ASM_ASSEMBLER_H__ 172bc58a6fSMagnus Damm #define __ASM_ASSEMBLER_H__ 182bc58a6fSMagnus Damm 194baa9922SRussell King #ifndef __ASSEMBLY__ 204baa9922SRussell King #error "Only include this from assembly code" 214baa9922SRussell King #endif 224baa9922SRussell King 234baa9922SRussell King #include <asm/ptrace.h> 24247055aaSCatalin Marinas #include <asm/domain.h> 2580c59dafSDave Martin #include <asm/opcodes-virt.h> 264baa9922SRussell King 276f6f6a70SRob Herring #define IOMEM(x) (x) 286f6f6a70SRob Herring 294baa9922SRussell King /* 304baa9922SRussell King * Endian independent macros for shifting bytes within registers. 314baa9922SRussell King */ 324baa9922SRussell King #ifndef __ARMEB__ 334baa9922SRussell King #define pull lsr 344baa9922SRussell King #define push lsl 354baa9922SRussell King #define get_byte_0 lsl #0 364baa9922SRussell King #define get_byte_1 lsr #8 374baa9922SRussell King #define get_byte_2 lsr #16 384baa9922SRussell King #define get_byte_3 lsr #24 394baa9922SRussell King #define put_byte_0 lsl #0 404baa9922SRussell King #define put_byte_1 lsl #8 414baa9922SRussell King #define put_byte_2 lsl #16 424baa9922SRussell King #define put_byte_3 lsl #24 434baa9922SRussell King #else 444baa9922SRussell King #define pull lsl 454baa9922SRussell King #define push lsr 464baa9922SRussell King #define get_byte_0 lsr #24 474baa9922SRussell King #define get_byte_1 lsr #16 484baa9922SRussell King #define get_byte_2 lsr #8 494baa9922SRussell King #define get_byte_3 lsl #0 504baa9922SRussell King #define put_byte_0 lsl #24 514baa9922SRussell King #define put_byte_1 lsl #16 524baa9922SRussell King #define put_byte_2 lsl #8 534baa9922SRussell King #define put_byte_3 lsl #0 544baa9922SRussell King #endif 554baa9922SRussell King 564baa9922SRussell King /* 574baa9922SRussell King * Data preload for architectures that support it 584baa9922SRussell King */ 594baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 5 604baa9922SRussell King #define PLD(code...) code 614baa9922SRussell King #else 624baa9922SRussell King #define PLD(code...) 634baa9922SRussell King #endif 644baa9922SRussell King 654baa9922SRussell King /* 664baa9922SRussell King * This can be used to enable code to cacheline align the destination 674baa9922SRussell King * pointer when bulk writing to memory. Experiments on StrongARM and 684baa9922SRussell King * XScale didn't show this a worthwhile thing to do when the cache is not 694baa9922SRussell King * set to write-allocate (this would need further testing on XScale when WA 704baa9922SRussell King * is used). 714baa9922SRussell King * 724baa9922SRussell King * On Feroceon there is much to gain however, regardless of cache mode. 734baa9922SRussell King */ 744baa9922SRussell King #ifdef CONFIG_CPU_FEROCEON 754baa9922SRussell King #define CALGN(code...) code 764baa9922SRussell King #else 774baa9922SRussell King #define CALGN(code...) 784baa9922SRussell King #endif 794baa9922SRussell King 804baa9922SRussell King /* 814baa9922SRussell King * Enable and disable interrupts 824baa9922SRussell King */ 834baa9922SRussell King #if __LINUX_ARM_ARCH__ >= 6 840d928b0bSUwe Kleine-König .macro disable_irq_notrace 854baa9922SRussell King cpsid i 864baa9922SRussell King .endm 874baa9922SRussell King 880d928b0bSUwe Kleine-König .macro enable_irq_notrace 894baa9922SRussell King cpsie i 904baa9922SRussell King .endm 914baa9922SRussell King #else 920d928b0bSUwe Kleine-König .macro disable_irq_notrace 934baa9922SRussell King msr cpsr_c, #PSR_I_BIT | SVC_MODE 944baa9922SRussell King .endm 954baa9922SRussell King 960d928b0bSUwe Kleine-König .macro enable_irq_notrace 974baa9922SRussell King msr cpsr_c, #SVC_MODE 984baa9922SRussell King .endm 994baa9922SRussell King #endif 1004baa9922SRussell King 1010d928b0bSUwe Kleine-König .macro asm_trace_hardirqs_off 1020d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS) 1030d928b0bSUwe Kleine-König stmdb sp!, {r0-r3, ip, lr} 1040d928b0bSUwe Kleine-König bl trace_hardirqs_off 1050d928b0bSUwe Kleine-König ldmia sp!, {r0-r3, ip, lr} 1060d928b0bSUwe Kleine-König #endif 1070d928b0bSUwe Kleine-König .endm 1080d928b0bSUwe Kleine-König 1090d928b0bSUwe Kleine-König .macro asm_trace_hardirqs_on_cond, cond 1100d928b0bSUwe Kleine-König #if defined(CONFIG_TRACE_IRQFLAGS) 1110d928b0bSUwe Kleine-König /* 1120d928b0bSUwe Kleine-König * actually the registers should be pushed and pop'd conditionally, but 1130d928b0bSUwe Kleine-König * after bl the flags are certainly clobbered 1140d928b0bSUwe Kleine-König */ 1150d928b0bSUwe Kleine-König stmdb sp!, {r0-r3, ip, lr} 1160d928b0bSUwe Kleine-König bl\cond trace_hardirqs_on 1170d928b0bSUwe Kleine-König ldmia sp!, {r0-r3, ip, lr} 1180d928b0bSUwe Kleine-König #endif 1190d928b0bSUwe Kleine-König .endm 1200d928b0bSUwe Kleine-König 1210d928b0bSUwe Kleine-König .macro asm_trace_hardirqs_on 1220d928b0bSUwe Kleine-König asm_trace_hardirqs_on_cond al 1230d928b0bSUwe Kleine-König .endm 1240d928b0bSUwe Kleine-König 1250d928b0bSUwe Kleine-König .macro disable_irq 1260d928b0bSUwe Kleine-König disable_irq_notrace 1270d928b0bSUwe Kleine-König asm_trace_hardirqs_off 1280d928b0bSUwe Kleine-König .endm 1290d928b0bSUwe Kleine-König 1300d928b0bSUwe Kleine-König .macro enable_irq 1310d928b0bSUwe Kleine-König asm_trace_hardirqs_on 1320d928b0bSUwe Kleine-König enable_irq_notrace 1330d928b0bSUwe Kleine-König .endm 1344baa9922SRussell King /* 1354baa9922SRussell King * Save the current IRQ state and disable IRQs. Note that this macro 1364baa9922SRussell King * assumes FIQs are enabled, and that the processor is in SVC mode. 1374baa9922SRussell King */ 1384baa9922SRussell King .macro save_and_disable_irqs, oldcpsr 1394baa9922SRussell King mrs \oldcpsr, cpsr 1404baa9922SRussell King disable_irq 1414baa9922SRussell King .endm 1424baa9922SRussell King 1438e43a905SRabin Vincent .macro save_and_disable_irqs_notrace, oldcpsr 1448e43a905SRabin Vincent mrs \oldcpsr, cpsr 1458e43a905SRabin Vincent disable_irq_notrace 1468e43a905SRabin Vincent .endm 1478e43a905SRabin Vincent 1484baa9922SRussell King /* 1494baa9922SRussell King * Restore interrupt state previously stored in a register. We don't 1504baa9922SRussell King * guarantee that this will preserve the flags. 1514baa9922SRussell King */ 1520d928b0bSUwe Kleine-König .macro restore_irqs_notrace, oldcpsr 1534baa9922SRussell King msr cpsr_c, \oldcpsr 1544baa9922SRussell King .endm 1554baa9922SRussell King 1560d928b0bSUwe Kleine-König .macro restore_irqs, oldcpsr 1570d928b0bSUwe Kleine-König tst \oldcpsr, #PSR_I_BIT 1580d928b0bSUwe Kleine-König asm_trace_hardirqs_on_cond eq 1590d928b0bSUwe Kleine-König restore_irqs_notrace \oldcpsr 1600d928b0bSUwe Kleine-König .endm 1610d928b0bSUwe Kleine-König 1624baa9922SRussell King #define USER(x...) \ 1634baa9922SRussell King 9999: x; \ 1644260415fSRussell King .pushsection __ex_table,"a"; \ 1654baa9922SRussell King .align 3; \ 1664baa9922SRussell King .long 9999b,9001f; \ 1674260415fSRussell King .popsection 168bac4e960SRussell King 169f00ec48fSRussell King #ifdef CONFIG_SMP 170f00ec48fSRussell King #define ALT_SMP(instr...) \ 171f00ec48fSRussell King 9998: instr 172ed3768a8SDave Martin /* 173ed3768a8SDave Martin * Note: if you get assembler errors from ALT_UP() when building with 174ed3768a8SDave Martin * CONFIG_THUMB2_KERNEL, you almost certainly need to use 175ed3768a8SDave Martin * ALT_SMP( W(instr) ... ) 176ed3768a8SDave Martin */ 177f00ec48fSRussell King #define ALT_UP(instr...) \ 178f00ec48fSRussell King .pushsection ".alt.smp.init", "a" ;\ 179f00ec48fSRussell King .long 9998b ;\ 180ed3768a8SDave Martin 9997: instr ;\ 181ed3768a8SDave Martin .if . - 9997b != 4 ;\ 182ed3768a8SDave Martin .error "ALT_UP() content must assemble to exactly 4 bytes";\ 183ed3768a8SDave Martin .endif ;\ 184f00ec48fSRussell King .popsection 185f00ec48fSRussell King #define ALT_UP_B(label) \ 186f00ec48fSRussell King .equ up_b_offset, label - 9998b ;\ 187f00ec48fSRussell King .pushsection ".alt.smp.init", "a" ;\ 188f00ec48fSRussell King .long 9998b ;\ 189ed3768a8SDave Martin W(b) . + up_b_offset ;\ 190f00ec48fSRussell King .popsection 191f00ec48fSRussell King #else 192f00ec48fSRussell King #define ALT_SMP(instr...) 193f00ec48fSRussell King #define ALT_UP(instr...) instr 194f00ec48fSRussell King #define ALT_UP_B(label) b label 195f00ec48fSRussell King #endif 196f00ec48fSRussell King 197bac4e960SRussell King /* 198d675d0bcSWill Deacon * Instruction barrier 199d675d0bcSWill Deacon */ 200d675d0bcSWill Deacon .macro instr_sync 201d675d0bcSWill Deacon #if __LINUX_ARM_ARCH__ >= 7 202d675d0bcSWill Deacon isb 203d675d0bcSWill Deacon #elif __LINUX_ARM_ARCH__ == 6 204d675d0bcSWill Deacon mcr p15, 0, r0, c7, c5, 4 205d675d0bcSWill Deacon #endif 206d675d0bcSWill Deacon .endm 207d675d0bcSWill Deacon 208d675d0bcSWill Deacon /* 209bac4e960SRussell King * SMP data memory barrier 210bac4e960SRussell King */ 211ed3768a8SDave Martin .macro smp_dmb mode 212bac4e960SRussell King #ifdef CONFIG_SMP 213bac4e960SRussell King #if __LINUX_ARM_ARCH__ >= 7 214ed3768a8SDave Martin .ifeqs "\mode","arm" 215f00ec48fSRussell King ALT_SMP(dmb) 216ed3768a8SDave Martin .else 217ed3768a8SDave Martin ALT_SMP(W(dmb)) 218ed3768a8SDave Martin .endif 219bac4e960SRussell King #elif __LINUX_ARM_ARCH__ == 6 220f00ec48fSRussell King ALT_SMP(mcr p15, 0, r0, c7, c10, 5) @ dmb 221f00ec48fSRussell King #else 222f00ec48fSRussell King #error Incompatible SMP platform 223bac4e960SRussell King #endif 224ed3768a8SDave Martin .ifeqs "\mode","arm" 225f00ec48fSRussell King ALT_UP(nop) 226ed3768a8SDave Martin .else 227ed3768a8SDave Martin ALT_UP(W(nop)) 228ed3768a8SDave Martin .endif 229bac4e960SRussell King #endif 230bac4e960SRussell King .endm 231b86040a5SCatalin Marinas 232b86040a5SCatalin Marinas #ifdef CONFIG_THUMB2_KERNEL 233b86040a5SCatalin Marinas .macro setmode, mode, reg 234b86040a5SCatalin Marinas mov \reg, #\mode 235b86040a5SCatalin Marinas msr cpsr_c, \reg 236b86040a5SCatalin Marinas .endm 237b86040a5SCatalin Marinas #else 238b86040a5SCatalin Marinas .macro setmode, mode, reg 239b86040a5SCatalin Marinas msr cpsr_c, #\mode 240b86040a5SCatalin Marinas .endm 241b86040a5SCatalin Marinas #endif 2428b592783SCatalin Marinas 2438b592783SCatalin Marinas /* 24480c59dafSDave Martin * Helper macro to enter SVC mode cleanly and mask interrupts. reg is 24580c59dafSDave Martin * a scratch register for the macro to overwrite. 24680c59dafSDave Martin * 24780c59dafSDave Martin * This macro is intended for forcing the CPU into SVC mode at boot time. 24880c59dafSDave Martin * you cannot return to the original mode. 24980c59dafSDave Martin * 25080c59dafSDave Martin * Beware, it also clobers LR. 25180c59dafSDave Martin */ 25280c59dafSDave Martin .macro safe_svcmode_maskall reg:req 25380c59dafSDave Martin mrs \reg , cpsr 25480c59dafSDave Martin mov lr , \reg 25580c59dafSDave Martin and lr , lr , #MODE_MASK 25680c59dafSDave Martin cmp lr , #HYP_MODE 25780c59dafSDave Martin orr \reg , \reg , #PSR_A_BIT | PSR_I_BIT | PSR_F_BIT 25880c59dafSDave Martin bic \reg , \reg , #MODE_MASK 25980c59dafSDave Martin orr \reg , \reg , #SVC_MODE 26080c59dafSDave Martin THUMB( orr \reg , \reg , #PSR_T_BIT ) 26180c59dafSDave Martin msr spsr_cxsf, \reg 26280c59dafSDave Martin adr lr, BSYM(2f) 26380c59dafSDave Martin bne 1f 26480c59dafSDave Martin __MSR_ELR_HYP(14) 26580c59dafSDave Martin __ERET 26680c59dafSDave Martin 1: movs pc, lr 26780c59dafSDave Martin 2: 26880c59dafSDave Martin .endm 26980c59dafSDave Martin 27080c59dafSDave Martin /* 2718b592783SCatalin Marinas * STRT/LDRT access macros with ARM and Thumb-2 variants 2728b592783SCatalin Marinas */ 2738b592783SCatalin Marinas #ifdef CONFIG_THUMB2_KERNEL 2748b592783SCatalin Marinas 2754e7682d0SCatalin Marinas .macro usraccoff, instr, reg, ptr, inc, off, cond, abort, t=TUSER() 2768b592783SCatalin Marinas 9999: 2778b592783SCatalin Marinas .if \inc == 1 278247055aaSCatalin Marinas \instr\cond\()b\()\t\().w \reg, [\ptr, #\off] 2798b592783SCatalin Marinas .elseif \inc == 4 280247055aaSCatalin Marinas \instr\cond\()\t\().w \reg, [\ptr, #\off] 2818b592783SCatalin Marinas .else 2828b592783SCatalin Marinas .error "Unsupported inc macro argument" 2838b592783SCatalin Marinas .endif 2848b592783SCatalin Marinas 2854260415fSRussell King .pushsection __ex_table,"a" 2868b592783SCatalin Marinas .align 3 2878b592783SCatalin Marinas .long 9999b, \abort 2884260415fSRussell King .popsection 2898b592783SCatalin Marinas .endm 2908b592783SCatalin Marinas 2918b592783SCatalin Marinas .macro usracc, instr, reg, ptr, inc, cond, rept, abort 2928b592783SCatalin Marinas @ explicit IT instruction needed because of the label 2938b592783SCatalin Marinas @ introduced by the USER macro 2948b592783SCatalin Marinas .ifnc \cond,al 2958b592783SCatalin Marinas .if \rept == 1 2968b592783SCatalin Marinas itt \cond 2978b592783SCatalin Marinas .elseif \rept == 2 2988b592783SCatalin Marinas ittt \cond 2998b592783SCatalin Marinas .else 3008b592783SCatalin Marinas .error "Unsupported rept macro argument" 3018b592783SCatalin Marinas .endif 3028b592783SCatalin Marinas .endif 3038b592783SCatalin Marinas 3048b592783SCatalin Marinas @ Slightly optimised to avoid incrementing the pointer twice 3058b592783SCatalin Marinas usraccoff \instr, \reg, \ptr, \inc, 0, \cond, \abort 3068b592783SCatalin Marinas .if \rept == 2 3071142b71dSWill Deacon usraccoff \instr, \reg, \ptr, \inc, \inc, \cond, \abort 3088b592783SCatalin Marinas .endif 3098b592783SCatalin Marinas 3108b592783SCatalin Marinas add\cond \ptr, #\rept * \inc 3118b592783SCatalin Marinas .endm 3128b592783SCatalin Marinas 3138b592783SCatalin Marinas #else /* !CONFIG_THUMB2_KERNEL */ 3148b592783SCatalin Marinas 3154e7682d0SCatalin Marinas .macro usracc, instr, reg, ptr, inc, cond, rept, abort, t=TUSER() 3168b592783SCatalin Marinas .rept \rept 3178b592783SCatalin Marinas 9999: 3188b592783SCatalin Marinas .if \inc == 1 319247055aaSCatalin Marinas \instr\cond\()b\()\t \reg, [\ptr], #\inc 3208b592783SCatalin Marinas .elseif \inc == 4 321247055aaSCatalin Marinas \instr\cond\()\t \reg, [\ptr], #\inc 3228b592783SCatalin Marinas .else 3238b592783SCatalin Marinas .error "Unsupported inc macro argument" 3248b592783SCatalin Marinas .endif 3258b592783SCatalin Marinas 3264260415fSRussell King .pushsection __ex_table,"a" 3278b592783SCatalin Marinas .align 3 3288b592783SCatalin Marinas .long 9999b, \abort 3294260415fSRussell King .popsection 3308b592783SCatalin Marinas .endr 3318b592783SCatalin Marinas .endm 3328b592783SCatalin Marinas 3338b592783SCatalin Marinas #endif /* CONFIG_THUMB2_KERNEL */ 3348b592783SCatalin Marinas 3358b592783SCatalin Marinas .macro strusr, reg, ptr, inc, cond=al, rept=1, abort=9001f 3368b592783SCatalin Marinas usracc str, \reg, \ptr, \inc, \cond, \rept, \abort 3378b592783SCatalin Marinas .endm 3388b592783SCatalin Marinas 3398b592783SCatalin Marinas .macro ldrusr, reg, ptr, inc, cond=al, rept=1, abort=9001f 3408b592783SCatalin Marinas usracc ldr, \reg, \ptr, \inc, \cond, \rept, \abort 3418b592783SCatalin Marinas .endm 3428f51965eSDave Martin 3438f51965eSDave Martin /* Utility macro for declaring string literals */ 3448f51965eSDave Martin .macro string name:req, string 3458f51965eSDave Martin .type \name , #object 3468f51965eSDave Martin \name: 3478f51965eSDave Martin .asciz "\string" 3488f51965eSDave Martin .size \name , . - \name 3498f51965eSDave Martin .endm 3508f51965eSDave Martin 3512bc58a6fSMagnus Damm #endif /* __ASM_ASSEMBLER_H__ */ 352