1 /* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org> 7 * Copyright (C) MIPS Technologies, Inc. 8 * written by Ralf Baechle <ralf@linux-mips.org> 9 */ 10 #ifndef _ASM_HAZARDS_H 11 #define _ASM_HAZARDS_H 12 13 #ifdef __ASSEMBLY__ 14 #define ASMMACRO(name, code...) .macro name; code; .endm 15 #else 16 17 #include <asm/cpu-features.h> 18 19 #define ASMMACRO(name, code...) \ 20 __asm__(".macro " #name "; " #code "; .endm"); \ 21 \ 22 static inline void name(void) \ 23 { \ 24 __asm__ __volatile__ (#name); \ 25 } 26 27 /* 28 * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine. 29 */ 30 extern void mips_ihb(void); 31 32 #endif 33 34 ASMMACRO(_ssnop, 35 sll $0, $0, 1 36 ) 37 38 ASMMACRO(_ehb, 39 sll $0, $0, 3 40 ) 41 42 /* 43 * TLB hazards 44 */ 45 #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON) 46 47 /* 48 * MIPSR2 defines ehb for hazard avoidance 49 */ 50 51 ASMMACRO(mtc0_tlbw_hazard, 52 _ehb 53 ) 54 ASMMACRO(tlbw_use_hazard, 55 _ehb 56 ) 57 ASMMACRO(tlb_probe_hazard, 58 _ehb 59 ) 60 ASMMACRO(irq_enable_hazard, 61 _ehb 62 ) 63 ASMMACRO(irq_disable_hazard, 64 _ehb 65 ) 66 ASMMACRO(back_to_back_c0_hazard, 67 _ehb 68 ) 69 /* 70 * gcc has a tradition of misscompiling the previous construct using the 71 * address of a label as argument to inline assembler. Gas otoh has the 72 * annoying difference between la and dla which are only usable for 32-bit 73 * rsp. 64-bit code, so can't be used without conditional compilation. 74 * The alterantive is switching the assembler to 64-bit code which happens 75 * to work right even for 32-bit code ... 76 */ 77 #define instruction_hazard() \ 78 do { \ 79 unsigned long tmp; \ 80 \ 81 __asm__ __volatile__( \ 82 " .set mips64r2 \n" \ 83 " dla %0, 1f \n" \ 84 " jr.hb %0 \n" \ 85 " .set mips0 \n" \ 86 "1: \n" \ 87 : "=r" (tmp)); \ 88 } while (0) 89 90 #elif defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MACH_ALCHEMY) 91 92 /* 93 * These are slightly complicated by the fact that we guarantee R1 kernels to 94 * run fine on R2 processors. 95 */ 96 ASMMACRO(mtc0_tlbw_hazard, 97 _ssnop; _ssnop; _ehb 98 ) 99 ASMMACRO(tlbw_use_hazard, 100 _ssnop; _ssnop; _ssnop; _ehb 101 ) 102 ASMMACRO(tlb_probe_hazard, 103 _ssnop; _ssnop; _ssnop; _ehb 104 ) 105 ASMMACRO(irq_enable_hazard, 106 _ssnop; _ssnop; _ssnop; _ehb 107 ) 108 ASMMACRO(irq_disable_hazard, 109 _ssnop; _ssnop; _ssnop; _ehb 110 ) 111 ASMMACRO(back_to_back_c0_hazard, 112 _ssnop; _ssnop; _ssnop; _ehb 113 ) 114 /* 115 * gcc has a tradition of misscompiling the previous construct using the 116 * address of a label as argument to inline assembler. Gas otoh has the 117 * annoying difference between la and dla which are only usable for 32-bit 118 * rsp. 64-bit code, so can't be used without conditional compilation. 119 * The alterantive is switching the assembler to 64-bit code which happens 120 * to work right even for 32-bit code ... 121 */ 122 #define __instruction_hazard() \ 123 do { \ 124 unsigned long tmp; \ 125 \ 126 __asm__ __volatile__( \ 127 " .set mips64r2 \n" \ 128 " dla %0, 1f \n" \ 129 " jr.hb %0 \n" \ 130 " .set mips0 \n" \ 131 "1: \n" \ 132 : "=r" (tmp)); \ 133 } while (0) 134 135 #define instruction_hazard() \ 136 do { \ 137 if (cpu_has_mips_r2) \ 138 __instruction_hazard(); \ 139 } while (0) 140 141 #elif defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \ 142 defined(CONFIG_CPU_R5500) || defined(CONFIG_MACH_ALCHEMY) 143 144 /* 145 * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer. 146 */ 147 148 ASMMACRO(mtc0_tlbw_hazard, 149 ) 150 ASMMACRO(tlbw_use_hazard, 151 ) 152 ASMMACRO(tlb_probe_hazard, 153 ) 154 ASMMACRO(irq_enable_hazard, 155 ) 156 ASMMACRO(irq_disable_hazard, 157 ) 158 ASMMACRO(back_to_back_c0_hazard, 159 ) 160 #define instruction_hazard() do { } while (0) 161 162 #elif defined(CONFIG_CPU_RM9000) 163 164 /* 165 * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent 166 * use of the JTLB for instructions should not occur for 4 cpu cycles and use 167 * for data translations should not occur for 3 cpu cycles. 168 */ 169 170 ASMMACRO(mtc0_tlbw_hazard, 171 _ssnop; _ssnop; _ssnop; _ssnop 172 ) 173 ASMMACRO(tlbw_use_hazard, 174 _ssnop; _ssnop; _ssnop; _ssnop 175 ) 176 ASMMACRO(tlb_probe_hazard, 177 _ssnop; _ssnop; _ssnop; _ssnop 178 ) 179 ASMMACRO(irq_enable_hazard, 180 ) 181 ASMMACRO(irq_disable_hazard, 182 ) 183 ASMMACRO(back_to_back_c0_hazard, 184 ) 185 #define instruction_hazard() do { } while (0) 186 187 #elif defined(CONFIG_CPU_SB1) 188 189 /* 190 * Mostly like R4000 for historic reasons 191 */ 192 ASMMACRO(mtc0_tlbw_hazard, 193 ) 194 ASMMACRO(tlbw_use_hazard, 195 ) 196 ASMMACRO(tlb_probe_hazard, 197 ) 198 ASMMACRO(irq_enable_hazard, 199 ) 200 ASMMACRO(irq_disable_hazard, 201 _ssnop; _ssnop; _ssnop 202 ) 203 ASMMACRO(back_to_back_c0_hazard, 204 ) 205 #define instruction_hazard() do { } while (0) 206 207 #else 208 209 /* 210 * Finally the catchall case for all other processors including R4000, R4400, 211 * R4600, R4700, R5000, RM7000, NEC VR41xx etc. 212 * 213 * The taken branch will result in a two cycle penalty for the two killed 214 * instructions on R4000 / R4400. Other processors only have a single cycle 215 * hazard so this is nice trick to have an optimal code for a range of 216 * processors. 217 */ 218 ASMMACRO(mtc0_tlbw_hazard, 219 nop; nop 220 ) 221 ASMMACRO(tlbw_use_hazard, 222 nop; nop; nop 223 ) 224 ASMMACRO(tlb_probe_hazard, 225 nop; nop; nop 226 ) 227 ASMMACRO(irq_enable_hazard, 228 _ssnop; _ssnop; _ssnop; 229 ) 230 ASMMACRO(irq_disable_hazard, 231 nop; nop; nop 232 ) 233 ASMMACRO(back_to_back_c0_hazard, 234 _ssnop; _ssnop; _ssnop; 235 ) 236 #define instruction_hazard() do { } while (0) 237 238 #endif 239 240 241 /* FPU hazards */ 242 243 #if defined(CONFIG_CPU_SB1) 244 ASMMACRO(enable_fpu_hazard, 245 .set push; 246 .set mips64; 247 .set noreorder; 248 _ssnop; 249 bnezl $0, .+4; 250 _ssnop; 251 .set pop 252 ) 253 ASMMACRO(disable_fpu_hazard, 254 ) 255 256 #elif defined(CONFIG_CPU_MIPSR2) 257 ASMMACRO(enable_fpu_hazard, 258 _ehb 259 ) 260 ASMMACRO(disable_fpu_hazard, 261 _ehb 262 ) 263 #else 264 ASMMACRO(enable_fpu_hazard, 265 nop; nop; nop; nop 266 ) 267 ASMMACRO(disable_fpu_hazard, 268 _ehb 269 ) 270 #endif 271 272 #endif /* _ASM_HAZARDS_H */ 273