1 /* 2 * Macros for accessing system registers with older binutils. 3 * 4 * Copyright (C) 2014 ARM Ltd. 5 * Author: Catalin Marinas <catalin.marinas@arm.com> 6 * 7 * This program is free software: you can redistribute it and/or modify 8 * it under the terms of the GNU General Public License version 2 as 9 * published by the Free Software Foundation. 10 * 11 * This program is distributed in the hope that it will be useful, 12 * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 * GNU General Public License for more details. 15 * 16 * You should have received a copy of the GNU General Public License 17 * along with this program. If not, see <http://www.gnu.org/licenses/>. 18 */ 19 20 #ifndef __ASM_SYSREG_H 21 #define __ASM_SYSREG_H 22 23 #include <linux/stringify.h> 24 25 #include <asm/opcodes.h> 26 27 /* 28 * ARMv8 ARM reserves the following encoding for system registers: 29 * (Ref: ARMv8 ARM, Section: "System instruction class encoding overview", 30 * C5.2, version:ARM DDI 0487A.f) 31 * [20-19] : Op0 32 * [18-16] : Op1 33 * [15-12] : CRn 34 * [11-8] : CRm 35 * [7-5] : Op2 36 */ 37 #define sys_reg(op0, op1, crn, crm, op2) \ 38 ((((op0)&3)<<19)|((op1)<<16)|((crn)<<12)|((crm)<<8)|((op2)<<5)) 39 40 #define SYS_MIDR_EL1 sys_reg(3, 0, 0, 0, 0) 41 #define SYS_MPIDR_EL1 sys_reg(3, 0, 0, 0, 5) 42 #define SYS_REVIDR_EL1 sys_reg(3, 0, 0, 0, 6) 43 44 #define SYS_ID_PFR0_EL1 sys_reg(3, 0, 0, 1, 0) 45 #define SYS_ID_PFR1_EL1 sys_reg(3, 0, 0, 1, 1) 46 #define SYS_ID_DFR0_EL1 sys_reg(3, 0, 0, 1, 2) 47 #define SYS_ID_MMFR0_EL1 sys_reg(3, 0, 0, 1, 4) 48 #define SYS_ID_MMFR1_EL1 sys_reg(3, 0, 0, 1, 5) 49 #define SYS_ID_MMFR2_EL1 sys_reg(3, 0, 0, 1, 6) 50 #define SYS_ID_MMFR3_EL1 sys_reg(3, 0, 0, 1, 7) 51 52 #define SYS_ID_ISAR0_EL1 sys_reg(3, 0, 0, 2, 0) 53 #define SYS_ID_ISAR1_EL1 sys_reg(3, 0, 0, 2, 1) 54 #define SYS_ID_ISAR2_EL1 sys_reg(3, 0, 0, 2, 2) 55 #define SYS_ID_ISAR3_EL1 sys_reg(3, 0, 0, 2, 3) 56 #define SYS_ID_ISAR4_EL1 sys_reg(3, 0, 0, 2, 4) 57 #define SYS_ID_ISAR5_EL1 sys_reg(3, 0, 0, 2, 5) 58 #define SYS_ID_MMFR4_EL1 sys_reg(3, 0, 0, 2, 6) 59 60 #define SYS_MVFR0_EL1 sys_reg(3, 0, 0, 3, 0) 61 #define SYS_MVFR1_EL1 sys_reg(3, 0, 0, 3, 1) 62 #define SYS_MVFR2_EL1 sys_reg(3, 0, 0, 3, 2) 63 64 #define SYS_ID_AA64PFR0_EL1 sys_reg(3, 0, 0, 4, 0) 65 #define SYS_ID_AA64PFR1_EL1 sys_reg(3, 0, 0, 4, 1) 66 67 #define SYS_ID_AA64DFR0_EL1 sys_reg(3, 0, 0, 5, 0) 68 #define SYS_ID_AA64DFR1_EL1 sys_reg(3, 0, 0, 5, 1) 69 70 #define SYS_ID_AA64ISAR0_EL1 sys_reg(3, 0, 0, 6, 0) 71 #define SYS_ID_AA64ISAR1_EL1 sys_reg(3, 0, 0, 6, 1) 72 73 #define SYS_ID_AA64MMFR0_EL1 sys_reg(3, 0, 0, 7, 0) 74 #define SYS_ID_AA64MMFR1_EL1 sys_reg(3, 0, 0, 7, 1) 75 76 #define SYS_CNTFRQ_EL0 sys_reg(3, 3, 14, 0, 0) 77 #define SYS_CTR_EL0 sys_reg(3, 3, 0, 0, 1) 78 #define SYS_DCZID_EL0 sys_reg(3, 3, 0, 0, 7) 79 80 #define REG_PSTATE_PAN_IMM sys_reg(0, 0, 4, 0, 4) 81 82 #define SET_PSTATE_PAN(x) __inst_arm(0xd5000000 | REG_PSTATE_PAN_IMM |\ 83 (!!x)<<8 | 0x1f) 84 85 /* SCTLR_EL1 */ 86 #define SCTLR_EL1_CP15BEN (0x1 << 5) 87 #define SCTLR_EL1_SED (0x1 << 8) 88 #define SCTLR_EL1_SPAN (0x1 << 23) 89 90 91 /* id_aa64isar0 */ 92 #define ID_AA64ISAR0_RDM_SHIFT 28 93 #define ID_AA64ISAR0_ATOMICS_SHIFT 20 94 #define ID_AA64ISAR0_CRC32_SHIFT 16 95 #define ID_AA64ISAR0_SHA2_SHIFT 12 96 #define ID_AA64ISAR0_SHA1_SHIFT 8 97 #define ID_AA64ISAR0_AES_SHIFT 4 98 99 /* id_aa64pfr0 */ 100 #define ID_AA64PFR0_GIC_SHIFT 24 101 #define ID_AA64PFR0_ASIMD_SHIFT 20 102 #define ID_AA64PFR0_FP_SHIFT 16 103 #define ID_AA64PFR0_EL3_SHIFT 12 104 #define ID_AA64PFR0_EL2_SHIFT 8 105 #define ID_AA64PFR0_EL1_SHIFT 4 106 #define ID_AA64PFR0_EL0_SHIFT 0 107 108 #define ID_AA64PFR0_FP_NI 0xf 109 #define ID_AA64PFR0_FP_SUPPORTED 0x0 110 #define ID_AA64PFR0_ASIMD_NI 0xf 111 #define ID_AA64PFR0_ASIMD_SUPPORTED 0x0 112 #define ID_AA64PFR0_EL1_64BIT_ONLY 0x1 113 #define ID_AA64PFR0_EL0_64BIT_ONLY 0x1 114 115 /* id_aa64mmfr0 */ 116 #define ID_AA64MMFR0_TGRAN4_SHIFT 28 117 #define ID_AA64MMFR0_TGRAN64_SHIFT 24 118 #define ID_AA64MMFR0_TGRAN16_SHIFT 20 119 #define ID_AA64MMFR0_BIGENDEL0_SHIFT 16 120 #define ID_AA64MMFR0_SNSMEM_SHIFT 12 121 #define ID_AA64MMFR0_BIGENDEL_SHIFT 8 122 #define ID_AA64MMFR0_ASID_SHIFT 4 123 #define ID_AA64MMFR0_PARANGE_SHIFT 0 124 125 #define ID_AA64MMFR0_TGRAN4_NI 0xf 126 #define ID_AA64MMFR0_TGRAN4_SUPPORTED 0x0 127 #define ID_AA64MMFR0_TGRAN64_NI 0xf 128 #define ID_AA64MMFR0_TGRAN64_SUPPORTED 0x0 129 #define ID_AA64MMFR0_TGRAN16_NI 0x0 130 #define ID_AA64MMFR0_TGRAN16_SUPPORTED 0x1 131 132 /* id_aa64mmfr1 */ 133 #define ID_AA64MMFR1_PAN_SHIFT 20 134 #define ID_AA64MMFR1_LOR_SHIFT 16 135 #define ID_AA64MMFR1_HPD_SHIFT 12 136 #define ID_AA64MMFR1_VHE_SHIFT 8 137 #define ID_AA64MMFR1_VMIDBITS_SHIFT 4 138 #define ID_AA64MMFR1_HADBS_SHIFT 0 139 140 /* id_aa64dfr0 */ 141 #define ID_AA64DFR0_CTX_CMPS_SHIFT 28 142 #define ID_AA64DFR0_WRPS_SHIFT 20 143 #define ID_AA64DFR0_BRPS_SHIFT 12 144 #define ID_AA64DFR0_PMUVER_SHIFT 8 145 #define ID_AA64DFR0_TRACEVER_SHIFT 4 146 #define ID_AA64DFR0_DEBUGVER_SHIFT 0 147 148 #define ID_ISAR5_RDM_SHIFT 24 149 #define ID_ISAR5_CRC32_SHIFT 16 150 #define ID_ISAR5_SHA2_SHIFT 12 151 #define ID_ISAR5_SHA1_SHIFT 8 152 #define ID_ISAR5_AES_SHIFT 4 153 #define ID_ISAR5_SEVL_SHIFT 0 154 155 #define MVFR0_FPROUND_SHIFT 28 156 #define MVFR0_FPSHVEC_SHIFT 24 157 #define MVFR0_FPSQRT_SHIFT 20 158 #define MVFR0_FPDIVIDE_SHIFT 16 159 #define MVFR0_FPTRAP_SHIFT 12 160 #define MVFR0_FPDP_SHIFT 8 161 #define MVFR0_FPSP_SHIFT 4 162 #define MVFR0_SIMD_SHIFT 0 163 164 #define MVFR1_SIMDFMAC_SHIFT 28 165 #define MVFR1_FPHP_SHIFT 24 166 #define MVFR1_SIMDHP_SHIFT 20 167 #define MVFR1_SIMDSP_SHIFT 16 168 #define MVFR1_SIMDINT_SHIFT 12 169 #define MVFR1_SIMDLS_SHIFT 8 170 #define MVFR1_FPDNAN_SHIFT 4 171 #define MVFR1_FPFTZ_SHIFT 0 172 173 174 #define ID_AA64MMFR0_TGRAN4_SHIFT 28 175 #define ID_AA64MMFR0_TGRAN64_SHIFT 24 176 #define ID_AA64MMFR0_TGRAN16_SHIFT 20 177 178 #define ID_AA64MMFR0_TGRAN4_NI 0xf 179 #define ID_AA64MMFR0_TGRAN4_SUPPORTED 0x0 180 #define ID_AA64MMFR0_TGRAN64_NI 0xf 181 #define ID_AA64MMFR0_TGRAN64_SUPPORTED 0x0 182 #define ID_AA64MMFR0_TGRAN16_NI 0x0 183 #define ID_AA64MMFR0_TGRAN16_SUPPORTED 0x1 184 185 #if defined(CONFIG_ARM64_4K_PAGES) 186 #define ID_AA64MMFR0_TGRAN_SHIFT ID_AA64MMFR0_TGRAN4_SHIFT 187 #define ID_AA64MMFR0_TGRAN_SUPPORTED ID_AA64MMFR0_TGRAN4_SUPPORTED 188 #elif defined(CONFIG_ARM64_16K_PAGES) 189 #define ID_AA64MMFR0_TGRAN_SHIFT ID_AA64MMFR0_TGRAN16_SHIFT 190 #define ID_AA64MMFR0_TGRAN_SUPPORTED ID_AA64MMFR0_TGRAN16_SUPPORTED 191 #elif defined(CONFIG_ARM64_64K_PAGES) 192 #define ID_AA64MMFR0_TGRAN_SHIFT ID_AA64MMFR0_TGRAN64_SHIFT 193 #define ID_AA64MMFR0_TGRAN_SUPPORTED ID_AA64MMFR0_TGRAN64_SUPPORTED 194 #endif 195 196 #ifdef __ASSEMBLY__ 197 198 .irp num,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30 199 .equ __reg_num_x\num, \num 200 .endr 201 .equ __reg_num_xzr, 31 202 203 .macro mrs_s, rt, sreg 204 .inst 0xd5200000|(\sreg)|(__reg_num_\rt) 205 .endm 206 207 .macro msr_s, sreg, rt 208 .inst 0xd5000000|(\sreg)|(__reg_num_\rt) 209 .endm 210 211 #else 212 213 #include <linux/types.h> 214 215 asm( 216 " .irp num,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30\n" 217 " .equ __reg_num_x\\num, \\num\n" 218 " .endr\n" 219 " .equ __reg_num_xzr, 31\n" 220 "\n" 221 " .macro mrs_s, rt, sreg\n" 222 " .inst 0xd5200000|(\\sreg)|(__reg_num_\\rt)\n" 223 " .endm\n" 224 "\n" 225 " .macro msr_s, sreg, rt\n" 226 " .inst 0xd5000000|(\\sreg)|(__reg_num_\\rt)\n" 227 " .endm\n" 228 ); 229 230 static inline void config_sctlr_el1(u32 clear, u32 set) 231 { 232 u32 val; 233 234 asm volatile("mrs %0, sctlr_el1" : "=r" (val)); 235 val &= ~clear; 236 val |= set; 237 asm volatile("msr sctlr_el1, %0" : : "r" (val)); 238 } 239 240 /* 241 * Unlike read_cpuid, calls to read_sysreg are never expected to be 242 * optimized away or replaced with synthetic values. 243 */ 244 #define read_sysreg(r) ({ \ 245 u64 __val; \ 246 asm volatile("mrs %0, " __stringify(r) : "=r" (__val)); \ 247 __val; \ 248 }) 249 250 #define write_sysreg(v, r) do { \ 251 u64 __val = (u64)v; \ 252 asm volatile("msr " __stringify(r) ", %0" \ 253 : : "r" (__val)); \ 254 } while (0) 255 256 #endif 257 258 #endif /* __ASM_SYSREG_H */ 259