1 // SPDX-License-Identifier: GPL-2.0-only 2 /* 3 * Copyright (C) 2020 Western Digital Corporation or its affiliates. 4 */ 5 #include <linux/kernel.h> 6 #include <linux/init.h> 7 #include <linux/mm.h> 8 #include <linux/module.h> 9 #include <linux/irq.h> 10 11 #include <asm/processor.h> 12 #include <asm/ptrace.h> 13 #include <asm/csr.h> 14 15 #define INSN_MATCH_LB 0x3 16 #define INSN_MASK_LB 0x707f 17 #define INSN_MATCH_LH 0x1003 18 #define INSN_MASK_LH 0x707f 19 #define INSN_MATCH_LW 0x2003 20 #define INSN_MASK_LW 0x707f 21 #define INSN_MATCH_LD 0x3003 22 #define INSN_MASK_LD 0x707f 23 #define INSN_MATCH_LBU 0x4003 24 #define INSN_MASK_LBU 0x707f 25 #define INSN_MATCH_LHU 0x5003 26 #define INSN_MASK_LHU 0x707f 27 #define INSN_MATCH_LWU 0x6003 28 #define INSN_MASK_LWU 0x707f 29 #define INSN_MATCH_SB 0x23 30 #define INSN_MASK_SB 0x707f 31 #define INSN_MATCH_SH 0x1023 32 #define INSN_MASK_SH 0x707f 33 #define INSN_MATCH_SW 0x2023 34 #define INSN_MASK_SW 0x707f 35 #define INSN_MATCH_SD 0x3023 36 #define INSN_MASK_SD 0x707f 37 38 #define INSN_MATCH_FLW 0x2007 39 #define INSN_MASK_FLW 0x707f 40 #define INSN_MATCH_FLD 0x3007 41 #define INSN_MASK_FLD 0x707f 42 #define INSN_MATCH_FLQ 0x4007 43 #define INSN_MASK_FLQ 0x707f 44 #define INSN_MATCH_FSW 0x2027 45 #define INSN_MASK_FSW 0x707f 46 #define INSN_MATCH_FSD 0x3027 47 #define INSN_MASK_FSD 0x707f 48 #define INSN_MATCH_FSQ 0x4027 49 #define INSN_MASK_FSQ 0x707f 50 51 #define INSN_MATCH_C_LD 0x6000 52 #define INSN_MASK_C_LD 0xe003 53 #define INSN_MATCH_C_SD 0xe000 54 #define INSN_MASK_C_SD 0xe003 55 #define INSN_MATCH_C_LW 0x4000 56 #define INSN_MASK_C_LW 0xe003 57 #define INSN_MATCH_C_SW 0xc000 58 #define INSN_MASK_C_SW 0xe003 59 #define INSN_MATCH_C_LDSP 0x6002 60 #define INSN_MASK_C_LDSP 0xe003 61 #define INSN_MATCH_C_SDSP 0xe002 62 #define INSN_MASK_C_SDSP 0xe003 63 #define INSN_MATCH_C_LWSP 0x4002 64 #define INSN_MASK_C_LWSP 0xe003 65 #define INSN_MATCH_C_SWSP 0xc002 66 #define INSN_MASK_C_SWSP 0xe003 67 68 #define INSN_MATCH_C_FLD 0x2000 69 #define INSN_MASK_C_FLD 0xe003 70 #define INSN_MATCH_C_FLW 0x6000 71 #define INSN_MASK_C_FLW 0xe003 72 #define INSN_MATCH_C_FSD 0xa000 73 #define INSN_MASK_C_FSD 0xe003 74 #define INSN_MATCH_C_FSW 0xe000 75 #define INSN_MASK_C_FSW 0xe003 76 #define INSN_MATCH_C_FLDSP 0x2002 77 #define INSN_MASK_C_FLDSP 0xe003 78 #define INSN_MATCH_C_FSDSP 0xa002 79 #define INSN_MASK_C_FSDSP 0xe003 80 #define INSN_MATCH_C_FLWSP 0x6002 81 #define INSN_MASK_C_FLWSP 0xe003 82 #define INSN_MATCH_C_FSWSP 0xe002 83 #define INSN_MASK_C_FSWSP 0xe003 84 85 #define INSN_LEN(insn) ((((insn) & 0x3) < 0x3) ? 2 : 4) 86 87 #if defined(CONFIG_64BIT) 88 #define LOG_REGBYTES 3 89 #define XLEN 64 90 #else 91 #define LOG_REGBYTES 2 92 #define XLEN 32 93 #endif 94 #define REGBYTES (1 << LOG_REGBYTES) 95 #define XLEN_MINUS_16 ((XLEN) - 16) 96 97 #define SH_RD 7 98 #define SH_RS1 15 99 #define SH_RS2 20 100 #define SH_RS2C 2 101 102 #define RV_X(x, s, n) (((x) >> (s)) & ((1 << (n)) - 1)) 103 #define RVC_LW_IMM(x) ((RV_X(x, 6, 1) << 2) | \ 104 (RV_X(x, 10, 3) << 3) | \ 105 (RV_X(x, 5, 1) << 6)) 106 #define RVC_LD_IMM(x) ((RV_X(x, 10, 3) << 3) | \ 107 (RV_X(x, 5, 2) << 6)) 108 #define RVC_LWSP_IMM(x) ((RV_X(x, 4, 3) << 2) | \ 109 (RV_X(x, 12, 1) << 5) | \ 110 (RV_X(x, 2, 2) << 6)) 111 #define RVC_LDSP_IMM(x) ((RV_X(x, 5, 2) << 3) | \ 112 (RV_X(x, 12, 1) << 5) | \ 113 (RV_X(x, 2, 3) << 6)) 114 #define RVC_SWSP_IMM(x) ((RV_X(x, 9, 4) << 2) | \ 115 (RV_X(x, 7, 2) << 6)) 116 #define RVC_SDSP_IMM(x) ((RV_X(x, 10, 3) << 3) | \ 117 (RV_X(x, 7, 3) << 6)) 118 #define RVC_RS1S(insn) (8 + RV_X(insn, SH_RD, 3)) 119 #define RVC_RS2S(insn) (8 + RV_X(insn, SH_RS2C, 3)) 120 #define RVC_RS2(insn) RV_X(insn, SH_RS2C, 5) 121 122 #define SHIFT_RIGHT(x, y) \ 123 ((y) < 0 ? ((x) << -(y)) : ((x) >> (y))) 124 125 #define REG_MASK \ 126 ((1 << (5 + LOG_REGBYTES)) - (1 << LOG_REGBYTES)) 127 128 #define REG_OFFSET(insn, pos) \ 129 (SHIFT_RIGHT((insn), (pos) - LOG_REGBYTES) & REG_MASK) 130 131 #define REG_PTR(insn, pos, regs) \ 132 (ulong *)((ulong)(regs) + REG_OFFSET(insn, pos)) 133 134 #define GET_RM(insn) (((insn) >> 12) & 7) 135 136 #define GET_RS1(insn, regs) (*REG_PTR(insn, SH_RS1, regs)) 137 #define GET_RS2(insn, regs) (*REG_PTR(insn, SH_RS2, regs)) 138 #define GET_RS1S(insn, regs) (*REG_PTR(RVC_RS1S(insn), 0, regs)) 139 #define GET_RS2S(insn, regs) (*REG_PTR(RVC_RS2S(insn), 0, regs)) 140 #define GET_RS2C(insn, regs) (*REG_PTR(insn, SH_RS2C, regs)) 141 #define GET_SP(regs) (*REG_PTR(2, 0, regs)) 142 #define SET_RD(insn, regs, val) (*REG_PTR(insn, SH_RD, regs) = (val)) 143 #define IMM_I(insn) ((s32)(insn) >> 20) 144 #define IMM_S(insn) (((s32)(insn) >> 25 << 5) | \ 145 (s32)(((insn) >> 7) & 0x1f)) 146 #define MASK_FUNCT3 0x7000 147 148 #define GET_PRECISION(insn) (((insn) >> 25) & 3) 149 #define GET_RM(insn) (((insn) >> 12) & 7) 150 #define PRECISION_S 0 151 #define PRECISION_D 1 152 153 #define STR(x) XSTR(x) 154 #define XSTR(x) #x 155 156 #define DECLARE_UNPRIVILEGED_LOAD_FUNCTION(type, insn) \ 157 static inline type load_##type(const type *addr) \ 158 { \ 159 type val; \ 160 asm (#insn " %0, %1" \ 161 : "=&r" (val) : "m" (*addr)); \ 162 return val; \ 163 } 164 165 #define DECLARE_UNPRIVILEGED_STORE_FUNCTION(type, insn) \ 166 static inline void store_##type(type *addr, type val) \ 167 { \ 168 asm volatile (#insn " %0, %1\n" \ 169 : : "r" (val), "m" (*addr)); \ 170 } 171 172 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u8, lbu) 173 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u16, lhu) 174 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s8, lb) 175 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s16, lh) 176 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(s32, lw) 177 DECLARE_UNPRIVILEGED_STORE_FUNCTION(u8, sb) 178 DECLARE_UNPRIVILEGED_STORE_FUNCTION(u16, sh) 179 DECLARE_UNPRIVILEGED_STORE_FUNCTION(u32, sw) 180 #if defined(CONFIG_64BIT) 181 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lwu) 182 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u64, ld) 183 DECLARE_UNPRIVILEGED_STORE_FUNCTION(u64, sd) 184 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, ld) 185 #else 186 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(u32, lw) 187 DECLARE_UNPRIVILEGED_LOAD_FUNCTION(ulong, lw) 188 189 static inline u64 load_u64(const u64 *addr) 190 { 191 return load_u32((u32 *)addr) 192 + ((u64)load_u32((u32 *)addr + 1) << 32); 193 } 194 195 static inline void store_u64(u64 *addr, u64 val) 196 { 197 store_u32((u32 *)addr, val); 198 store_u32((u32 *)addr + 1, val >> 32); 199 } 200 #endif 201 202 static inline ulong get_insn(ulong mepc) 203 { 204 register ulong __mepc asm ("a2") = mepc; 205 ulong val, rvc_mask = 3, tmp; 206 207 asm ("and %[tmp], %[addr], 2\n" 208 "bnez %[tmp], 1f\n" 209 #if defined(CONFIG_64BIT) 210 STR(LWU) " %[insn], (%[addr])\n" 211 #else 212 STR(LW) " %[insn], (%[addr])\n" 213 #endif 214 "and %[tmp], %[insn], %[rvc_mask]\n" 215 "beq %[tmp], %[rvc_mask], 2f\n" 216 "sll %[insn], %[insn], %[xlen_minus_16]\n" 217 "srl %[insn], %[insn], %[xlen_minus_16]\n" 218 "j 2f\n" 219 "1:\n" 220 "lhu %[insn], (%[addr])\n" 221 "and %[tmp], %[insn], %[rvc_mask]\n" 222 "bne %[tmp], %[rvc_mask], 2f\n" 223 "lhu %[tmp], 2(%[addr])\n" 224 "sll %[tmp], %[tmp], 16\n" 225 "add %[insn], %[insn], %[tmp]\n" 226 "2:" 227 : [insn] "=&r" (val), [tmp] "=&r" (tmp) 228 : [addr] "r" (__mepc), [rvc_mask] "r" (rvc_mask), 229 [xlen_minus_16] "i" (XLEN_MINUS_16)); 230 231 return val; 232 } 233 234 union reg_data { 235 u8 data_bytes[8]; 236 ulong data_ulong; 237 u64 data_u64; 238 }; 239 240 int handle_misaligned_load(struct pt_regs *regs) 241 { 242 union reg_data val; 243 unsigned long epc = regs->epc; 244 unsigned long insn = get_insn(epc); 245 unsigned long addr = csr_read(mtval); 246 int i, fp = 0, shift = 0, len = 0; 247 248 regs->epc = 0; 249 250 if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) { 251 len = 4; 252 shift = 8 * (sizeof(unsigned long) - len); 253 #if defined(CONFIG_64BIT) 254 } else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) { 255 len = 8; 256 shift = 8 * (sizeof(unsigned long) - len); 257 } else if ((insn & INSN_MASK_LWU) == INSN_MATCH_LWU) { 258 len = 4; 259 #endif 260 } else if ((insn & INSN_MASK_FLD) == INSN_MATCH_FLD) { 261 fp = 1; 262 len = 8; 263 } else if ((insn & INSN_MASK_FLW) == INSN_MATCH_FLW) { 264 fp = 1; 265 len = 4; 266 } else if ((insn & INSN_MASK_LH) == INSN_MATCH_LH) { 267 len = 2; 268 shift = 8 * (sizeof(unsigned long) - len); 269 } else if ((insn & INSN_MASK_LHU) == INSN_MATCH_LHU) { 270 len = 2; 271 #if defined(CONFIG_64BIT) 272 } else if ((insn & INSN_MASK_C_LD) == INSN_MATCH_C_LD) { 273 len = 8; 274 shift = 8 * (sizeof(unsigned long) - len); 275 insn = RVC_RS2S(insn) << SH_RD; 276 } else if ((insn & INSN_MASK_C_LDSP) == INSN_MATCH_C_LDSP && 277 ((insn >> SH_RD) & 0x1f)) { 278 len = 8; 279 shift = 8 * (sizeof(unsigned long) - len); 280 #endif 281 } else if ((insn & INSN_MASK_C_LW) == INSN_MATCH_C_LW) { 282 len = 4; 283 shift = 8 * (sizeof(unsigned long) - len); 284 insn = RVC_RS2S(insn) << SH_RD; 285 } else if ((insn & INSN_MASK_C_LWSP) == INSN_MATCH_C_LWSP && 286 ((insn >> SH_RD) & 0x1f)) { 287 len = 4; 288 shift = 8 * (sizeof(unsigned long) - len); 289 } else if ((insn & INSN_MASK_C_FLD) == INSN_MATCH_C_FLD) { 290 fp = 1; 291 len = 8; 292 insn = RVC_RS2S(insn) << SH_RD; 293 } else if ((insn & INSN_MASK_C_FLDSP) == INSN_MATCH_C_FLDSP) { 294 fp = 1; 295 len = 8; 296 #if defined(CONFIG_32BIT) 297 } else if ((insn & INSN_MASK_C_FLW) == INSN_MATCH_C_FLW) { 298 fp = 1; 299 len = 4; 300 insn = RVC_RS2S(insn) << SH_RD; 301 } else if ((insn & INSN_MASK_C_FLWSP) == INSN_MATCH_C_FLWSP) { 302 fp = 1; 303 len = 4; 304 #endif 305 } else { 306 regs->epc = epc; 307 return -1; 308 } 309 310 val.data_u64 = 0; 311 for (i = 0; i < len; i++) 312 val.data_bytes[i] = load_u8((void *)(addr + i)); 313 314 if (fp) 315 return -1; 316 SET_RD(insn, regs, val.data_ulong << shift >> shift); 317 318 regs->epc = epc + INSN_LEN(insn); 319 320 return 0; 321 } 322 323 int handle_misaligned_store(struct pt_regs *regs) 324 { 325 union reg_data val; 326 unsigned long epc = regs->epc; 327 unsigned long insn = get_insn(epc); 328 unsigned long addr = csr_read(mtval); 329 int i, len = 0; 330 331 regs->epc = 0; 332 333 val.data_ulong = GET_RS2(insn, regs); 334 335 if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) { 336 len = 4; 337 #if defined(CONFIG_64BIT) 338 } else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) { 339 len = 8; 340 #endif 341 } else if ((insn & INSN_MASK_SH) == INSN_MATCH_SH) { 342 len = 2; 343 #if defined(CONFIG_64BIT) 344 } else if ((insn & INSN_MASK_C_SD) == INSN_MATCH_C_SD) { 345 len = 8; 346 val.data_ulong = GET_RS2S(insn, regs); 347 } else if ((insn & INSN_MASK_C_SDSP) == INSN_MATCH_C_SDSP && 348 ((insn >> SH_RD) & 0x1f)) { 349 len = 8; 350 val.data_ulong = GET_RS2C(insn, regs); 351 #endif 352 } else if ((insn & INSN_MASK_C_SW) == INSN_MATCH_C_SW) { 353 len = 4; 354 val.data_ulong = GET_RS2S(insn, regs); 355 } else if ((insn & INSN_MASK_C_SWSP) == INSN_MATCH_C_SWSP && 356 ((insn >> SH_RD) & 0x1f)) { 357 len = 4; 358 val.data_ulong = GET_RS2C(insn, regs); 359 } else { 360 regs->epc = epc; 361 return -1; 362 } 363 364 for (i = 0; i < len; i++) 365 store_u8((void *)(addr + i), val.data_bytes[i]); 366 367 regs->epc = epc + INSN_LEN(insn); 368 369 return 0; 370 } 371