1 // SPDX-License-Identifier: GPL-2.0 2 // Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd. 3 4 #include <linux/ptrace.h> 5 #include <linux/uaccess.h> 6 #include <abi/reg_ops.h> 7 8 #define MTCR_MASK 0xFC00FFE0 9 #define MFCR_MASK 0xFC00FFE0 10 #define MTCR_DIST 0xC0006420 11 #define MFCR_DIST 0xC0006020 12 13 void __init init_fpu(void) 14 { 15 mtcr("cr<1, 2>", 0); 16 } 17 18 /* 19 * fpu_libc_helper() is to help libc to excute: 20 * - mfcr %a, cr<1, 2> 21 * - mfcr %a, cr<2, 2> 22 * - mtcr %a, cr<1, 2> 23 * - mtcr %a, cr<2, 2> 24 */ 25 int fpu_libc_helper(struct pt_regs *regs) 26 { 27 int fault; 28 unsigned long instrptr, regx = 0; 29 unsigned long index = 0, tmp = 0; 30 unsigned long tinstr = 0; 31 u16 instr_hi, instr_low; 32 33 instrptr = instruction_pointer(regs); 34 if (instrptr & 1) 35 return 0; 36 37 fault = __get_user(instr_low, (u16 *)instrptr); 38 if (fault) 39 return 0; 40 41 fault = __get_user(instr_hi, (u16 *)(instrptr + 2)); 42 if (fault) 43 return 0; 44 45 tinstr = instr_hi | ((unsigned long)instr_low << 16); 46 47 if (((tinstr >> 21) & 0x1F) != 2) 48 return 0; 49 50 if ((tinstr & MTCR_MASK) == MTCR_DIST) { 51 index = (tinstr >> 16) & 0x1F; 52 if (index > 13) 53 return 0; 54 55 tmp = tinstr & 0x1F; 56 if (tmp > 2) 57 return 0; 58 59 regx = *(®s->a0 + index); 60 61 if (tmp == 1) 62 mtcr("cr<1, 2>", regx); 63 else if (tmp == 2) 64 mtcr("cr<2, 2>", regx); 65 else 66 return 0; 67 68 regs->pc += 4; 69 return 1; 70 } 71 72 if ((tinstr & MFCR_MASK) == MFCR_DIST) { 73 index = tinstr & 0x1F; 74 if (index > 13) 75 return 0; 76 77 tmp = ((tinstr >> 16) & 0x1F); 78 if (tmp > 2) 79 return 0; 80 81 if (tmp == 1) 82 regx = mfcr("cr<1, 2>"); 83 else if (tmp == 2) 84 regx = mfcr("cr<2, 2>"); 85 else 86 return 0; 87 88 *(®s->a0 + index) = regx; 89 90 regs->pc += 4; 91 return 1; 92 } 93 94 return 0; 95 } 96 97 void fpu_fpe(struct pt_regs *regs) 98 { 99 int sig, code; 100 unsigned int fesr; 101 102 fesr = mfcr("cr<2, 2>"); 103 104 sig = SIGFPE; 105 code = FPE_FLTUNK; 106 107 if (fesr & FPE_ILLE) { 108 sig = SIGILL; 109 code = ILL_ILLOPC; 110 } else if (fesr & FPE_IDC) { 111 sig = SIGILL; 112 code = ILL_ILLOPN; 113 } else if (fesr & FPE_FEC) { 114 sig = SIGFPE; 115 if (fesr & FPE_IOC) 116 code = FPE_FLTINV; 117 else if (fesr & FPE_DZC) 118 code = FPE_FLTDIV; 119 else if (fesr & FPE_UFC) 120 code = FPE_FLTUND; 121 else if (fesr & FPE_OFC) 122 code = FPE_FLTOVF; 123 else if (fesr & FPE_IXC) 124 code = FPE_FLTRES; 125 } 126 127 force_sig_fault(sig, code, (void __user *)regs->pc); 128 } 129 130 #define FMFVR_FPU_REGS(vrx, vry) \ 131 "fmfvrl %0, "#vrx"\n" \ 132 "fmfvrh %1, "#vrx"\n" \ 133 "fmfvrl %2, "#vry"\n" \ 134 "fmfvrh %3, "#vry"\n" 135 136 #define FMTVR_FPU_REGS(vrx, vry) \ 137 "fmtvrl "#vrx", %0\n" \ 138 "fmtvrh "#vrx", %1\n" \ 139 "fmtvrl "#vry", %2\n" \ 140 "fmtvrh "#vry", %3\n" 141 142 #define STW_FPU_REGS(a, b, c, d) \ 143 "stw %0, (%4, "#a")\n" \ 144 "stw %1, (%4, "#b")\n" \ 145 "stw %2, (%4, "#c")\n" \ 146 "stw %3, (%4, "#d")\n" 147 148 #define LDW_FPU_REGS(a, b, c, d) \ 149 "ldw %0, (%4, "#a")\n" \ 150 "ldw %1, (%4, "#b")\n" \ 151 "ldw %2, (%4, "#c")\n" \ 152 "ldw %3, (%4, "#d")\n" 153 154 void save_to_user_fp(struct user_fp *user_fp) 155 { 156 unsigned long flg; 157 unsigned long tmp1, tmp2; 158 unsigned long *fpregs; 159 160 local_irq_save(flg); 161 162 tmp1 = mfcr("cr<1, 2>"); 163 tmp2 = mfcr("cr<2, 2>"); 164 165 user_fp->fcr = tmp1; 166 user_fp->fesr = tmp2; 167 168 fpregs = &user_fp->vr[0]; 169 #ifdef CONFIG_CPU_HAS_FPUV2 170 #ifdef CONFIG_CPU_HAS_VDSP 171 asm volatile( 172 "vstmu.32 vr0-vr3, (%0)\n" 173 "vstmu.32 vr4-vr7, (%0)\n" 174 "vstmu.32 vr8-vr11, (%0)\n" 175 "vstmu.32 vr12-vr15, (%0)\n" 176 "fstmu.64 vr16-vr31, (%0)\n" 177 : "+a"(fpregs) 178 ::"memory"); 179 #else 180 asm volatile( 181 "fstmu.64 vr0-vr31, (%0)\n" 182 : "+a"(fpregs) 183 ::"memory"); 184 #endif 185 #else 186 { 187 unsigned long tmp3, tmp4; 188 189 asm volatile( 190 FMFVR_FPU_REGS(vr0, vr1) 191 STW_FPU_REGS(0, 4, 16, 20) 192 FMFVR_FPU_REGS(vr2, vr3) 193 STW_FPU_REGS(32, 36, 48, 52) 194 FMFVR_FPU_REGS(vr4, vr5) 195 STW_FPU_REGS(64, 68, 80, 84) 196 FMFVR_FPU_REGS(vr6, vr7) 197 STW_FPU_REGS(96, 100, 112, 116) 198 "addi %4, 128\n" 199 FMFVR_FPU_REGS(vr8, vr9) 200 STW_FPU_REGS(0, 4, 16, 20) 201 FMFVR_FPU_REGS(vr10, vr11) 202 STW_FPU_REGS(32, 36, 48, 52) 203 FMFVR_FPU_REGS(vr12, vr13) 204 STW_FPU_REGS(64, 68, 80, 84) 205 FMFVR_FPU_REGS(vr14, vr15) 206 STW_FPU_REGS(96, 100, 112, 116) 207 : "=a"(tmp1), "=a"(tmp2), "=a"(tmp3), 208 "=a"(tmp4), "+a"(fpregs) 209 ::"memory"); 210 } 211 #endif 212 213 local_irq_restore(flg); 214 } 215 216 void restore_from_user_fp(struct user_fp *user_fp) 217 { 218 unsigned long flg; 219 unsigned long tmp1, tmp2; 220 unsigned long *fpregs; 221 222 local_irq_save(flg); 223 224 tmp1 = user_fp->fcr; 225 tmp2 = user_fp->fesr; 226 227 mtcr("cr<1, 2>", tmp1); 228 mtcr("cr<2, 2>", tmp2); 229 230 fpregs = &user_fp->vr[0]; 231 #ifdef CONFIG_CPU_HAS_FPUV2 232 #ifdef CONFIG_CPU_HAS_VDSP 233 asm volatile( 234 "vldmu.32 vr0-vr3, (%0)\n" 235 "vldmu.32 vr4-vr7, (%0)\n" 236 "vldmu.32 vr8-vr11, (%0)\n" 237 "vldmu.32 vr12-vr15, (%0)\n" 238 "fldmu.64 vr16-vr31, (%0)\n" 239 : "+a"(fpregs) 240 ::"memory"); 241 #else 242 asm volatile( 243 "fldmu.64 vr0-vr31, (%0)\n" 244 : "+a"(fpregs) 245 ::"memory"); 246 #endif 247 #else 248 { 249 unsigned long tmp3, tmp4; 250 251 asm volatile( 252 LDW_FPU_REGS(0, 4, 16, 20) 253 FMTVR_FPU_REGS(vr0, vr1) 254 LDW_FPU_REGS(32, 36, 48, 52) 255 FMTVR_FPU_REGS(vr2, vr3) 256 LDW_FPU_REGS(64, 68, 80, 84) 257 FMTVR_FPU_REGS(vr4, vr5) 258 LDW_FPU_REGS(96, 100, 112, 116) 259 FMTVR_FPU_REGS(vr6, vr7) 260 "addi %4, 128\n" 261 LDW_FPU_REGS(0, 4, 16, 20) 262 FMTVR_FPU_REGS(vr8, vr9) 263 LDW_FPU_REGS(32, 36, 48, 52) 264 FMTVR_FPU_REGS(vr10, vr11) 265 LDW_FPU_REGS(64, 68, 80, 84) 266 FMTVR_FPU_REGS(vr12, vr13) 267 LDW_FPU_REGS(96, 100, 112, 116) 268 FMTVR_FPU_REGS(vr14, vr15) 269 : "=a"(tmp1), "=a"(tmp2), "=a"(tmp3), 270 "=a"(tmp4), "+a"(fpregs) 271 ::"memory"); 272 } 273 #endif 274 local_irq_restore(flg); 275 } 276