1 /* 2 * include/asm-arm/macro.h 3 * 4 * Copyright (C) 2009 Jean-Christophe PLAGNIOL-VILLARD <plagnioj@jcrosoft.com> 5 * 6 * SPDX-License-Identifier: GPL-2.0+ 7 */ 8 9 #ifndef __ASM_ARM_MACRO_H__ 10 #define __ASM_ARM_MACRO_H__ 11 #ifdef __ASSEMBLY__ 12 13 /* 14 * These macros provide a convenient way to write 8, 16 and 32 bit data 15 * to any address. 16 * Registers r4 and r5 are used, any data in these registers are 17 * overwritten by the macros. 18 * The macros are valid for any ARM architecture, they do not implement 19 * any memory barriers so caution is recommended when using these when the 20 * caches are enabled or on a multi-core system. 21 */ 22 23 .macro write32, addr, data 24 ldr r4, =\addr 25 ldr r5, =\data 26 str r5, [r4] 27 .endm 28 29 .macro write16, addr, data 30 ldr r4, =\addr 31 ldrh r5, =\data 32 strh r5, [r4] 33 .endm 34 35 .macro write8, addr, data 36 ldr r4, =\addr 37 ldrb r5, =\data 38 strb r5, [r4] 39 .endm 40 41 /* 42 * This macro generates a loop that can be used for delays in the code. 43 * Register r4 is used, any data in this register is overwritten by the 44 * macro. 45 * The macro is valid for any ARM architeture. The actual time spent in the 46 * loop will vary from CPU to CPU though. 47 */ 48 49 .macro wait_timer, time 50 ldr r4, =\time 51 1: 52 nop 53 subs r4, r4, #1 54 bcs 1b 55 .endm 56 57 #ifdef CONFIG_ARM64 58 /* 59 * Register aliases. 60 */ 61 lr .req x30 62 63 /* 64 * Branch according to exception level 65 */ 66 .macro switch_el, xreg, el3_label, el2_label, el1_label 67 mrs \xreg, CurrentEL 68 cmp \xreg, 0xc 69 b.eq \el3_label 70 cmp \xreg, 0x8 71 b.eq \el2_label 72 cmp \xreg, 0x4 73 b.eq \el1_label 74 .endm 75 76 /* 77 * Branch if current processor is a Cortex-A57 core. 78 */ 79 .macro branch_if_a57_core, xreg, a57_label 80 mrs \xreg, midr_el1 81 lsr \xreg, \xreg, #4 82 and \xreg, \xreg, #0x00000FFF 83 cmp \xreg, #0xD07 /* Cortex-A57 MPCore processor. */ 84 b.eq \a57_label 85 .endm 86 87 /* 88 * Branch if current processor is a Cortex-A53 core. 89 */ 90 .macro branch_if_a53_core, xreg, a53_label 91 mrs \xreg, midr_el1 92 lsr \xreg, \xreg, #4 93 and \xreg, \xreg, #0x00000FFF 94 cmp \xreg, #0xD03 /* Cortex-A53 MPCore processor. */ 95 b.eq \a53_label 96 .endm 97 98 /* 99 * Branch if current processor is a slave, 100 * choose processor with all zero affinity value as the master. 101 */ 102 .macro branch_if_slave, xreg, slave_label 103 #ifdef CONFIG_ARMV8_MULTIENTRY 104 /* NOTE: MPIDR handling will be erroneous on multi-cluster machines */ 105 mrs \xreg, mpidr_el1 106 tst \xreg, #0xff /* Test Affinity 0 */ 107 b.ne \slave_label 108 lsr \xreg, \xreg, #8 109 tst \xreg, #0xff /* Test Affinity 1 */ 110 b.ne \slave_label 111 lsr \xreg, \xreg, #8 112 tst \xreg, #0xff /* Test Affinity 2 */ 113 b.ne \slave_label 114 lsr \xreg, \xreg, #16 115 tst \xreg, #0xff /* Test Affinity 3 */ 116 b.ne \slave_label 117 #endif 118 .endm 119 120 /* 121 * Branch if current processor is a master, 122 * choose processor with all zero affinity value as the master. 123 */ 124 .macro branch_if_master, xreg1, xreg2, master_label 125 #ifdef CONFIG_ARMV8_MULTIENTRY 126 /* NOTE: MPIDR handling will be erroneous on multi-cluster machines */ 127 mrs \xreg1, mpidr_el1 128 lsr \xreg2, \xreg1, #32 129 lsl \xreg1, \xreg1, #40 130 lsr \xreg1, \xreg1, #40 131 orr \xreg1, \xreg1, \xreg2 132 cbz \xreg1, \master_label 133 #else 134 b \master_label 135 #endif 136 .endm 137 138 .macro armv8_switch_to_el2_m, xreg1 139 /* 64bit EL2 | HCE | SMD | RES1 (Bits[5:4]) | Non-secure EL0/EL1 */ 140 mov \xreg1, #0x5b1 141 msr scr_el3, \xreg1 142 msr cptr_el3, xzr /* Disable coprocessor traps to EL3 */ 143 mov \xreg1, #0x33ff 144 msr cptr_el2, \xreg1 /* Disable coprocessor traps to EL2 */ 145 146 /* Initialize SCTLR_EL2 147 * 148 * setting RES1 bits (29,28,23,22,18,16,11,5,4) to 1 149 * and RES0 bits (31,30,27,26,24,21,20,17,15-13,10-6) + 150 * EE,WXN,I,SA,C,A,M to 0 151 */ 152 mov \xreg1, #0x0830 153 movk \xreg1, #0x30C5, lsl #16 154 msr sctlr_el2, \xreg1 155 156 /* Return to the EL2_SP2 mode from EL3 */ 157 mov \xreg1, sp 158 msr sp_el2, \xreg1 /* Migrate SP */ 159 mrs \xreg1, vbar_el3 160 msr vbar_el2, \xreg1 /* Migrate VBAR */ 161 mov \xreg1, #0x3c9 162 msr spsr_el3, \xreg1 /* EL2_SP2 | D | A | I | F */ 163 msr elr_el3, lr 164 eret 165 .endm 166 167 .macro armv8_switch_to_el1_m, xreg1, xreg2 168 /* Initialize Generic Timers */ 169 mrs \xreg1, cnthctl_el2 170 orr \xreg1, \xreg1, #0x3 /* Enable EL1 access to timers */ 171 msr cnthctl_el2, \xreg1 172 msr cntvoff_el2, xzr 173 174 /* Initilize MPID/MPIDR registers */ 175 mrs \xreg1, midr_el1 176 mrs \xreg2, mpidr_el1 177 msr vpidr_el2, \xreg1 178 msr vmpidr_el2, \xreg2 179 180 /* Disable coprocessor traps */ 181 mov \xreg1, #0x33ff 182 msr cptr_el2, \xreg1 /* Disable coprocessor traps to EL2 */ 183 msr hstr_el2, xzr /* Disable coprocessor traps to EL2 */ 184 mov \xreg1, #3 << 20 185 msr cpacr_el1, \xreg1 /* Enable FP/SIMD at EL1 */ 186 187 /* Initialize HCR_EL2 */ 188 mov \xreg1, #(1 << 31) /* 64bit EL1 */ 189 orr \xreg1, \xreg1, #(1 << 29) /* Disable HVC */ 190 msr hcr_el2, \xreg1 191 192 /* SCTLR_EL1 initialization 193 * 194 * setting RES1 bits (29,28,23,22,20,11) to 1 195 * and RES0 bits (31,30,27,21,17,13,10,6) + 196 * UCI,EE,EOE,WXN,nTWE,nTWI,UCT,DZE,I,UMA,SED,ITD, 197 * CP15BEN,SA0,SA,C,A,M to 0 198 */ 199 mov \xreg1, #0x0800 200 movk \xreg1, #0x30d0, lsl #16 201 msr sctlr_el1, \xreg1 202 203 /* Return to the EL1_SP1 mode from EL2 */ 204 mov \xreg1, sp 205 msr sp_el1, \xreg1 /* Migrate SP */ 206 mrs \xreg1, vbar_el2 207 msr vbar_el1, \xreg1 /* Migrate VBAR */ 208 mov \xreg1, #0x3c5 209 msr spsr_el2, \xreg1 /* EL1_SP1 | D | A | I | F */ 210 msr elr_el2, lr 211 eret 212 .endm 213 214 #if defined(CONFIG_GICV3) 215 .macro gic_wait_for_interrupt_m xreg1 216 0 : wfi 217 mrs \xreg1, ICC_IAR1_EL1 218 msr ICC_EOIR1_EL1, \xreg1 219 cbnz \xreg1, 0b 220 .endm 221 #elif defined(CONFIG_GICV2) 222 .macro gic_wait_for_interrupt_m xreg1, wreg2 223 0 : wfi 224 ldr \wreg2, [\xreg1, GICC_AIAR] 225 str \wreg2, [\xreg1, GICC_AEOIR] 226 and \wreg2, \wreg2, #0x3ff 227 cbnz \wreg2, 0b 228 .endm 229 #endif 230 231 #endif /* CONFIG_ARM64 */ 232 233 #endif /* __ASSEMBLY__ */ 234 #endif /* __ASM_ARM_MACRO_H__ */ 235