1 /* 2 * This file is subject to the terms and conditions of the GNU General Public 3 * License. See the file "COPYING" in the main directory of this archive 4 * for more details. 5 * 6 * Copyright (C) 2003, 2004 Ralf Baechle 7 * Copyright (C) 2004 Maciej W. Rozycki 8 */ 9 #ifndef __ASM_CPU_FEATURES_H 10 #define __ASM_CPU_FEATURES_H 11 12 #include <asm/cpu.h> 13 #include <asm/cpu-info.h> 14 #include <asm/isa-rev.h> 15 #include <cpu-feature-overrides.h> 16 17 #define __ase(ase) (cpu_data[0].ases & (ase)) 18 #define __isa(isa) (cpu_data[0].isa_level & (isa)) 19 #define __opt(opt) (cpu_data[0].options & (opt)) 20 21 /* 22 * Check if MIPS_ISA_REV is >= isa *and* an option or ASE is detected during 23 * boot (typically by cpu_probe()). 24 * 25 * Note that these should only be used in cases where a kernel built for an 26 * older ISA *cannot* run on a CPU which supports the feature in question. For 27 * example this may be used for features introduced with MIPSr6, since a kernel 28 * built for an older ISA cannot run on a MIPSr6 CPU. This should not be used 29 * for MIPSr2 features however, since a MIPSr1 or earlier kernel might run on a 30 * MIPSr2 CPU. 31 */ 32 #define __isa_ge_and_ase(isa, ase) ((MIPS_ISA_REV >= (isa)) && __ase(ase)) 33 #define __isa_ge_and_opt(isa, opt) ((MIPS_ISA_REV >= (isa)) && __opt(opt)) 34 35 /* 36 * Check if MIPS_ISA_REV is >= isa *or* an option or ASE is detected during 37 * boot (typically by cpu_probe()). 38 * 39 * These are for use with features that are optional up until a particular ISA 40 * revision & then become required. 41 */ 42 #define __isa_ge_or_ase(isa, ase) ((MIPS_ISA_REV >= (isa)) || __ase(ase)) 43 #define __isa_ge_or_opt(isa, opt) ((MIPS_ISA_REV >= (isa)) || __opt(opt)) 44 45 /* 46 * Check if MIPS_ISA_REV is < isa *and* an option or ASE is detected during 47 * boot (typically by cpu_probe()). 48 * 49 * These are for use with features that are optional up until a particular ISA 50 * revision & are then removed - ie. no longer present in any CPU implementing 51 * the given ISA revision. 52 */ 53 #define __isa_lt_and_ase(isa, ase) ((MIPS_ISA_REV < (isa)) && __ase(ase)) 54 #define __isa_lt_and_opt(isa, opt) ((MIPS_ISA_REV < (isa)) && __opt(opt)) 55 56 /* 57 * Similarly allow for ISA level checks that take into account knowledge of the 58 * ISA targeted by the kernel build, provided by MIPS_ISA_REV. 59 */ 60 #define __isa_ge_and_flag(isa, flag) ((MIPS_ISA_REV >= (isa)) && __isa(flag)) 61 #define __isa_ge_or_flag(isa, flag) ((MIPS_ISA_REV >= (isa)) || __isa(flag)) 62 #define __isa_lt_and_flag(isa, flag) ((MIPS_ISA_REV < (isa)) && __isa(flag)) 63 #define __isa_range(ge, lt) \ 64 ((MIPS_ISA_REV >= (ge)) && (MIPS_ISA_REV < (lt))) 65 #define __isa_range_or_flag(ge, lt, flag) \ 66 (__isa_range(ge, lt) || ((MIPS_ISA_REV < (lt)) && __isa(flag))) 67 68 /* 69 * SMP assumption: Options of CPU 0 are a superset of all processors. 70 * This is true for all known MIPS systems. 71 */ 72 #ifndef cpu_has_tlb 73 #define cpu_has_tlb __opt(MIPS_CPU_TLB) 74 #endif 75 #ifndef cpu_has_ftlb 76 #define cpu_has_ftlb __opt(MIPS_CPU_FTLB) 77 #endif 78 #ifndef cpu_has_tlbinv 79 #define cpu_has_tlbinv __opt(MIPS_CPU_TLBINV) 80 #endif 81 #ifndef cpu_has_segments 82 #define cpu_has_segments __opt(MIPS_CPU_SEGMENTS) 83 #endif 84 #ifndef cpu_has_eva 85 #define cpu_has_eva __opt(MIPS_CPU_EVA) 86 #endif 87 #ifndef cpu_has_htw 88 #define cpu_has_htw __opt(MIPS_CPU_HTW) 89 #endif 90 #ifndef cpu_has_ldpte 91 #define cpu_has_ldpte __opt(MIPS_CPU_LDPTE) 92 #endif 93 #ifndef cpu_has_rixiex 94 #define cpu_has_rixiex __isa_ge_or_opt(6, MIPS_CPU_RIXIEX) 95 #endif 96 #ifndef cpu_has_maar 97 #define cpu_has_maar __opt(MIPS_CPU_MAAR) 98 #endif 99 #ifndef cpu_has_rw_llb 100 #define cpu_has_rw_llb __isa_ge_or_opt(6, MIPS_CPU_RW_LLB) 101 #endif 102 103 /* 104 * For the moment we don't consider R6000 and R8000 so we can assume that 105 * anything that doesn't support R4000-style exceptions and interrupts is 106 * R3000-like. Users should still treat these two macro definitions as 107 * opaque. 108 */ 109 #ifndef cpu_has_3kex 110 #define cpu_has_3kex (!cpu_has_4kex) 111 #endif 112 #ifndef cpu_has_4kex 113 #define cpu_has_4kex __isa_ge_or_opt(1, MIPS_CPU_4KEX) 114 #endif 115 #ifndef cpu_has_3k_cache 116 #define cpu_has_3k_cache __isa_lt_and_opt(1, MIPS_CPU_3K_CACHE) 117 #endif 118 #define cpu_has_6k_cache 0 119 #define cpu_has_8k_cache 0 120 #ifndef cpu_has_4k_cache 121 #define cpu_has_4k_cache __isa_ge_or_opt(1, MIPS_CPU_4K_CACHE) 122 #endif 123 #ifndef cpu_has_tx39_cache 124 #define cpu_has_tx39_cache __opt(MIPS_CPU_TX39_CACHE) 125 #endif 126 #ifndef cpu_has_octeon_cache 127 #define cpu_has_octeon_cache 0 128 #endif 129 /* Don't override `cpu_has_fpu' to 1 or the "nofpu" option won't work. */ 130 #ifndef cpu_has_fpu 131 # ifdef CONFIG_MIPS_FP_SUPPORT 132 # define cpu_has_fpu (current_cpu_data.options & MIPS_CPU_FPU) 133 # define raw_cpu_has_fpu (raw_current_cpu_data.options & MIPS_CPU_FPU) 134 # else 135 # define cpu_has_fpu 0 136 # define raw_cpu_has_fpu 0 137 # endif 138 #else 139 # define raw_cpu_has_fpu cpu_has_fpu 140 #endif 141 #ifndef cpu_has_32fpr 142 #define cpu_has_32fpr __isa_ge_or_opt(1, MIPS_CPU_32FPR) 143 #endif 144 #ifndef cpu_has_counter 145 #define cpu_has_counter __opt(MIPS_CPU_COUNTER) 146 #endif 147 #ifndef cpu_has_watch 148 #define cpu_has_watch __opt(MIPS_CPU_WATCH) 149 #endif 150 #ifndef cpu_has_divec 151 #define cpu_has_divec __isa_ge_or_opt(1, MIPS_CPU_DIVEC) 152 #endif 153 #ifndef cpu_has_vce 154 #define cpu_has_vce __opt(MIPS_CPU_VCE) 155 #endif 156 #ifndef cpu_has_cache_cdex_p 157 #define cpu_has_cache_cdex_p __opt(MIPS_CPU_CACHE_CDEX_P) 158 #endif 159 #ifndef cpu_has_cache_cdex_s 160 #define cpu_has_cache_cdex_s __opt(MIPS_CPU_CACHE_CDEX_S) 161 #endif 162 #ifndef cpu_has_prefetch 163 #define cpu_has_prefetch __isa_ge_or_opt(1, MIPS_CPU_PREFETCH) 164 #endif 165 #ifndef cpu_has_mcheck 166 #define cpu_has_mcheck __isa_ge_or_opt(1, MIPS_CPU_MCHECK) 167 #endif 168 #ifndef cpu_has_ejtag 169 #define cpu_has_ejtag __opt(MIPS_CPU_EJTAG) 170 #endif 171 #ifndef cpu_has_llsc 172 #define cpu_has_llsc __isa_ge_or_opt(1, MIPS_CPU_LLSC) 173 #endif 174 #ifndef cpu_has_bp_ghist 175 #define cpu_has_bp_ghist __opt(MIPS_CPU_BP_GHIST) 176 #endif 177 #ifndef kernel_uses_llsc 178 #define kernel_uses_llsc cpu_has_llsc 179 #endif 180 #ifndef cpu_has_guestctl0ext 181 #define cpu_has_guestctl0ext __opt(MIPS_CPU_GUESTCTL0EXT) 182 #endif 183 #ifndef cpu_has_guestctl1 184 #define cpu_has_guestctl1 __opt(MIPS_CPU_GUESTCTL1) 185 #endif 186 #ifndef cpu_has_guestctl2 187 #define cpu_has_guestctl2 __opt(MIPS_CPU_GUESTCTL2) 188 #endif 189 #ifndef cpu_has_guestid 190 #define cpu_has_guestid __opt(MIPS_CPU_GUESTID) 191 #endif 192 #ifndef cpu_has_drg 193 #define cpu_has_drg __opt(MIPS_CPU_DRG) 194 #endif 195 #ifndef cpu_has_mips16 196 #define cpu_has_mips16 __isa_lt_and_ase(6, MIPS_ASE_MIPS16) 197 #endif 198 #ifndef cpu_has_mips16e2 199 #define cpu_has_mips16e2 __isa_lt_and_ase(6, MIPS_ASE_MIPS16E2) 200 #endif 201 #ifndef cpu_has_mdmx 202 #define cpu_has_mdmx __isa_lt_and_ase(6, MIPS_ASE_MDMX) 203 #endif 204 #ifndef cpu_has_mips3d 205 #define cpu_has_mips3d __isa_lt_and_ase(6, MIPS_ASE_MIPS3D) 206 #endif 207 #ifndef cpu_has_smartmips 208 #define cpu_has_smartmips __isa_lt_and_ase(6, MIPS_ASE_SMARTMIPS) 209 #endif 210 211 #ifndef cpu_has_rixi 212 #define cpu_has_rixi __isa_ge_or_opt(6, MIPS_CPU_RIXI) 213 #endif 214 215 #ifndef cpu_has_mmips 216 # if defined(__mips_micromips) 217 # define cpu_has_mmips 1 218 # elif defined(CONFIG_SYS_SUPPORTS_MICROMIPS) 219 # define cpu_has_mmips __opt(MIPS_CPU_MICROMIPS) 220 # else 221 # define cpu_has_mmips 0 222 # endif 223 #endif 224 225 #ifndef cpu_has_lpa 226 #define cpu_has_lpa __opt(MIPS_CPU_LPA) 227 #endif 228 #ifndef cpu_has_mvh 229 #define cpu_has_mvh __opt(MIPS_CPU_MVH) 230 #endif 231 #ifndef cpu_has_xpa 232 #define cpu_has_xpa (cpu_has_lpa && cpu_has_mvh) 233 #endif 234 #ifndef cpu_has_vtag_icache 235 #define cpu_has_vtag_icache (cpu_data[0].icache.flags & MIPS_CACHE_VTAG) 236 #endif 237 #ifndef cpu_has_dc_aliases 238 #define cpu_has_dc_aliases (cpu_data[0].dcache.flags & MIPS_CACHE_ALIASES) 239 #endif 240 #ifndef cpu_has_ic_fills_f_dc 241 #define cpu_has_ic_fills_f_dc (cpu_data[0].icache.flags & MIPS_CACHE_IC_F_DC) 242 #endif 243 #ifndef cpu_has_pindexed_dcache 244 #define cpu_has_pindexed_dcache (cpu_data[0].dcache.flags & MIPS_CACHE_PINDEX) 245 #endif 246 247 /* 248 * I-Cache snoops remote store. This only matters on SMP. Some multiprocessors 249 * such as the R10000 have I-Caches that snoop local stores; the embedded ones 250 * don't. For maintaining I-cache coherency this means we need to flush the 251 * D-cache all the way back to whever the I-cache does refills from, so the 252 * I-cache has a chance to see the new data at all. Then we have to flush the 253 * I-cache also. 254 * Note we may have been rescheduled and may no longer be running on the CPU 255 * that did the store so we can't optimize this into only doing the flush on 256 * the local CPU. 257 */ 258 #ifndef cpu_icache_snoops_remote_store 259 #ifdef CONFIG_SMP 260 #define cpu_icache_snoops_remote_store (cpu_data[0].icache.flags & MIPS_IC_SNOOPS_REMOTE) 261 #else 262 #define cpu_icache_snoops_remote_store 1 263 #endif 264 #endif 265 266 #ifndef cpu_has_mips_1 267 # define cpu_has_mips_1 (MIPS_ISA_REV < 6) 268 #endif 269 #ifndef cpu_has_mips_2 270 # define cpu_has_mips_2 __isa_lt_and_flag(6, MIPS_CPU_ISA_II) 271 #endif 272 #ifndef cpu_has_mips_3 273 # define cpu_has_mips_3 __isa_lt_and_flag(6, MIPS_CPU_ISA_III) 274 #endif 275 #ifndef cpu_has_mips_4 276 # define cpu_has_mips_4 __isa_lt_and_flag(6, MIPS_CPU_ISA_IV) 277 #endif 278 #ifndef cpu_has_mips_5 279 # define cpu_has_mips_5 __isa_lt_and_flag(6, MIPS_CPU_ISA_V) 280 #endif 281 #ifndef cpu_has_mips32r1 282 # define cpu_has_mips32r1 __isa_range_or_flag(1, 6, MIPS_CPU_ISA_M32R1) 283 #endif 284 #ifndef cpu_has_mips32r2 285 # define cpu_has_mips32r2 __isa_range_or_flag(2, 6, MIPS_CPU_ISA_M32R2) 286 #endif 287 #ifndef cpu_has_mips32r5 288 # define cpu_has_mips32r5 __isa_range_or_flag(5, 6, MIPS_CPU_ISA_M32R5) 289 #endif 290 #ifndef cpu_has_mips32r6 291 # define cpu_has_mips32r6 __isa_ge_or_flag(6, MIPS_CPU_ISA_M32R6) 292 #endif 293 #ifndef cpu_has_mips64r1 294 # define cpu_has_mips64r1 (cpu_has_64bits && \ 295 __isa_range_or_flag(1, 6, MIPS_CPU_ISA_M64R1)) 296 #endif 297 #ifndef cpu_has_mips64r2 298 # define cpu_has_mips64r2 (cpu_has_64bits && \ 299 __isa_range_or_flag(2, 6, MIPS_CPU_ISA_M64R2)) 300 #endif 301 #ifndef cpu_has_mips64r5 302 # define cpu_has_mips64r5 (cpu_has_64bits && \ 303 __isa_range_or_flag(5, 6, MIPS_CPU_ISA_M64R5)) 304 #endif 305 #ifndef cpu_has_mips64r6 306 # define cpu_has_mips64r6 __isa_ge_and_flag(6, MIPS_CPU_ISA_M64R6) 307 #endif 308 309 /* 310 * Shortcuts ... 311 */ 312 #define cpu_has_mips_2_3_4_5 (cpu_has_mips_2 | cpu_has_mips_3_4_5) 313 #define cpu_has_mips_3_4_5 (cpu_has_mips_3 | cpu_has_mips_4_5) 314 #define cpu_has_mips_4_5 (cpu_has_mips_4 | cpu_has_mips_5) 315 316 #define cpu_has_mips_2_3_4_5_r (cpu_has_mips_2 | cpu_has_mips_3_4_5_r) 317 #define cpu_has_mips_3_4_5_r (cpu_has_mips_3 | cpu_has_mips_4_5_r) 318 #define cpu_has_mips_4_5_r (cpu_has_mips_4 | cpu_has_mips_5_r) 319 #define cpu_has_mips_5_r (cpu_has_mips_5 | cpu_has_mips_r) 320 321 #define cpu_has_mips_3_4_5_64_r2_r6 \ 322 (cpu_has_mips_3 | cpu_has_mips_4_5_64_r2_r6) 323 #define cpu_has_mips_4_5_64_r2_r6 \ 324 (cpu_has_mips_4_5 | cpu_has_mips64r1 | \ 325 cpu_has_mips_r2 | cpu_has_mips_r5 | \ 326 cpu_has_mips_r6) 327 328 #define cpu_has_mips32 (cpu_has_mips32r1 | cpu_has_mips32r2 | \ 329 cpu_has_mips32r5 | cpu_has_mips32r6) 330 #define cpu_has_mips64 (cpu_has_mips64r1 | cpu_has_mips64r2 | \ 331 cpu_has_mips64r5 | cpu_has_mips64r6) 332 #define cpu_has_mips_r1 (cpu_has_mips32r1 | cpu_has_mips64r1) 333 #define cpu_has_mips_r2 (cpu_has_mips32r2 | cpu_has_mips64r2) 334 #define cpu_has_mips_r5 (cpu_has_mips32r5 | cpu_has_mips64r5) 335 #define cpu_has_mips_r6 (cpu_has_mips32r6 | cpu_has_mips64r6) 336 #define cpu_has_mips_r (cpu_has_mips32r1 | cpu_has_mips32r2 | \ 337 cpu_has_mips32r5 | cpu_has_mips32r6 | \ 338 cpu_has_mips64r1 | cpu_has_mips64r2 | \ 339 cpu_has_mips64r5 | cpu_has_mips64r6) 340 341 /* MIPSR2 - MIPSR6 have a lot of similarities */ 342 #define cpu_has_mips_r2_r6 (cpu_has_mips_r2 | cpu_has_mips_r5 | \ 343 cpu_has_mips_r6) 344 345 /* 346 * cpu_has_mips_r2_exec_hazard - return if IHB is required on current processor 347 * 348 * Returns non-zero value if the current processor implementation requires 349 * an IHB instruction to deal with an instruction hazard as per MIPS R2 350 * architecture specification, zero otherwise. 351 */ 352 #ifndef cpu_has_mips_r2_exec_hazard 353 #define cpu_has_mips_r2_exec_hazard \ 354 ({ \ 355 int __res; \ 356 \ 357 switch (current_cpu_type()) { \ 358 case CPU_M14KC: \ 359 case CPU_74K: \ 360 case CPU_1074K: \ 361 case CPU_PROAPTIV: \ 362 case CPU_P5600: \ 363 case CPU_M5150: \ 364 case CPU_QEMU_GENERIC: \ 365 case CPU_CAVIUM_OCTEON: \ 366 case CPU_CAVIUM_OCTEON_PLUS: \ 367 case CPU_CAVIUM_OCTEON2: \ 368 case CPU_CAVIUM_OCTEON3: \ 369 __res = 0; \ 370 break; \ 371 \ 372 default: \ 373 __res = 1; \ 374 } \ 375 \ 376 __res; \ 377 }) 378 #endif 379 380 /* 381 * MIPS32, MIPS64, VR5500, IDT32332, IDT32334 and maybe a few other 382 * pre-MIPS32/MIPS64 processors have CLO, CLZ. The IDT RC64574 is 64-bit and 383 * has CLO and CLZ but not DCLO nor DCLZ. For 64-bit kernels 384 * cpu_has_clo_clz also indicates the availability of DCLO and DCLZ. 385 */ 386 #ifndef cpu_has_clo_clz 387 #define cpu_has_clo_clz cpu_has_mips_r 388 #endif 389 390 /* 391 * MIPS32 R2, MIPS64 R2, Loongson 3A and Octeon have WSBH. 392 * MIPS64 R2, Loongson 3A and Octeon have WSBH, DSBH and DSHD. 393 * This indicates the availability of WSBH and in case of 64 bit CPUs also 394 * DSBH and DSHD. 395 */ 396 #ifndef cpu_has_wsbh 397 #define cpu_has_wsbh cpu_has_mips_r2 398 #endif 399 400 #ifndef cpu_has_dsp 401 #define cpu_has_dsp __ase(MIPS_ASE_DSP) 402 #endif 403 404 #ifndef cpu_has_dsp2 405 #define cpu_has_dsp2 __ase(MIPS_ASE_DSP2P) 406 #endif 407 408 #ifndef cpu_has_dsp3 409 #define cpu_has_dsp3 __ase(MIPS_ASE_DSP3) 410 #endif 411 412 #ifndef cpu_has_loongson_mmi 413 #define cpu_has_loongson_mmi __ase(MIPS_ASE_LOONGSON_MMI) 414 #endif 415 416 #ifndef cpu_has_loongson_cam 417 #define cpu_has_loongson_cam __ase(MIPS_ASE_LOONGSON_CAM) 418 #endif 419 420 #ifndef cpu_has_loongson_ext 421 #define cpu_has_loongson_ext __ase(MIPS_ASE_LOONGSON_EXT) 422 #endif 423 424 #ifndef cpu_has_loongson_ext2 425 #define cpu_has_loongson_ext2 __ase(MIPS_ASE_LOONGSON_EXT2) 426 #endif 427 428 #ifndef cpu_has_mipsmt 429 #define cpu_has_mipsmt __isa_lt_and_ase(6, MIPS_ASE_MIPSMT) 430 #endif 431 432 #ifndef cpu_has_vp 433 #define cpu_has_vp __isa_ge_and_opt(6, MIPS_CPU_VP) 434 #endif 435 436 #ifndef cpu_has_userlocal 437 #define cpu_has_userlocal __isa_ge_or_opt(6, MIPS_CPU_ULRI) 438 #endif 439 440 #ifdef CONFIG_32BIT 441 # ifndef cpu_has_nofpuex 442 # define cpu_has_nofpuex __isa_lt_and_opt(1, MIPS_CPU_NOFPUEX) 443 # endif 444 # ifndef cpu_has_64bits 445 # define cpu_has_64bits (cpu_data[0].isa_level & MIPS_CPU_ISA_64BIT) 446 # endif 447 # ifndef cpu_has_64bit_zero_reg 448 # define cpu_has_64bit_zero_reg (cpu_data[0].isa_level & MIPS_CPU_ISA_64BIT) 449 # endif 450 # ifndef cpu_has_64bit_gp_regs 451 # define cpu_has_64bit_gp_regs 0 452 # endif 453 # ifndef cpu_vmbits 454 # define cpu_vmbits 31 455 # endif 456 #endif 457 458 #ifdef CONFIG_64BIT 459 # ifndef cpu_has_nofpuex 460 # define cpu_has_nofpuex 0 461 # endif 462 # ifndef cpu_has_64bits 463 # define cpu_has_64bits 1 464 # endif 465 # ifndef cpu_has_64bit_zero_reg 466 # define cpu_has_64bit_zero_reg 1 467 # endif 468 # ifndef cpu_has_64bit_gp_regs 469 # define cpu_has_64bit_gp_regs 1 470 # endif 471 # ifndef cpu_vmbits 472 # define cpu_vmbits cpu_data[0].vmbits 473 # define __NEED_VMBITS_PROBE 474 # endif 475 #endif 476 477 #if defined(CONFIG_CPU_MIPSR2_IRQ_VI) && !defined(cpu_has_vint) 478 # define cpu_has_vint __opt(MIPS_CPU_VINT) 479 #elif !defined(cpu_has_vint) 480 # define cpu_has_vint 0 481 #endif 482 483 #if defined(CONFIG_CPU_MIPSR2_IRQ_EI) && !defined(cpu_has_veic) 484 # define cpu_has_veic __opt(MIPS_CPU_VEIC) 485 #elif !defined(cpu_has_veic) 486 # define cpu_has_veic 0 487 #endif 488 489 #ifndef cpu_has_inclusive_pcaches 490 #define cpu_has_inclusive_pcaches __opt(MIPS_CPU_INCLUSIVE_CACHES) 491 #endif 492 493 #ifndef cpu_dcache_line_size 494 #define cpu_dcache_line_size() cpu_data[0].dcache.linesz 495 #endif 496 #ifndef cpu_icache_line_size 497 #define cpu_icache_line_size() cpu_data[0].icache.linesz 498 #endif 499 #ifndef cpu_scache_line_size 500 #define cpu_scache_line_size() cpu_data[0].scache.linesz 501 #endif 502 #ifndef cpu_tcache_line_size 503 #define cpu_tcache_line_size() cpu_data[0].tcache.linesz 504 #endif 505 506 #ifndef cpu_hwrena_impl_bits 507 #define cpu_hwrena_impl_bits 0 508 #endif 509 510 #ifndef cpu_has_perf_cntr_intr_bit 511 #define cpu_has_perf_cntr_intr_bit __opt(MIPS_CPU_PCI) 512 #endif 513 514 #ifndef cpu_has_vz 515 #define cpu_has_vz __ase(MIPS_ASE_VZ) 516 #endif 517 518 #if defined(CONFIG_CPU_HAS_MSA) && !defined(cpu_has_msa) 519 # define cpu_has_msa __ase(MIPS_ASE_MSA) 520 #elif !defined(cpu_has_msa) 521 # define cpu_has_msa 0 522 #endif 523 524 #ifndef cpu_has_ufr 525 # define cpu_has_ufr __opt(MIPS_CPU_UFR) 526 #endif 527 528 #ifndef cpu_has_fre 529 # define cpu_has_fre __opt(MIPS_CPU_FRE) 530 #endif 531 532 #ifndef cpu_has_cdmm 533 # define cpu_has_cdmm __opt(MIPS_CPU_CDMM) 534 #endif 535 536 #ifndef cpu_has_small_pages 537 # define cpu_has_small_pages __opt(MIPS_CPU_SP) 538 #endif 539 540 #ifndef cpu_has_nan_legacy 541 #define cpu_has_nan_legacy __isa_lt_and_opt(6, MIPS_CPU_NAN_LEGACY) 542 #endif 543 #ifndef cpu_has_nan_2008 544 #define cpu_has_nan_2008 __isa_ge_or_opt(6, MIPS_CPU_NAN_2008) 545 #endif 546 547 #ifndef cpu_has_ebase_wg 548 # define cpu_has_ebase_wg __opt(MIPS_CPU_EBASE_WG) 549 #endif 550 551 #ifndef cpu_has_badinstr 552 # define cpu_has_badinstr __isa_ge_or_opt(6, MIPS_CPU_BADINSTR) 553 #endif 554 555 #ifndef cpu_has_badinstrp 556 # define cpu_has_badinstrp __isa_ge_or_opt(6, MIPS_CPU_BADINSTRP) 557 #endif 558 559 #ifndef cpu_has_contextconfig 560 # define cpu_has_contextconfig __opt(MIPS_CPU_CTXTC) 561 #endif 562 563 #ifndef cpu_has_perf 564 # define cpu_has_perf __opt(MIPS_CPU_PERF) 565 #endif 566 567 #ifndef cpu_has_mac2008_only 568 # define cpu_has_mac2008_only __opt(MIPS_CPU_MAC_2008_ONLY) 569 #endif 570 571 #ifdef CONFIG_SMP 572 /* 573 * Some systems share FTLB RAMs between threads within a core (siblings in 574 * kernel parlance). This means that FTLB entries may become invalid at almost 575 * any point when an entry is evicted due to a sibling thread writing an entry 576 * to the shared FTLB RAM. 577 * 578 * This is only relevant to SMP systems, and the only systems that exhibit this 579 * property implement MIPSr6 or higher so we constrain support for this to 580 * kernels that will run on such systems. 581 */ 582 # ifndef cpu_has_shared_ftlb_ram 583 # define cpu_has_shared_ftlb_ram \ 584 __isa_ge_and_opt(6, MIPS_CPU_SHARED_FTLB_RAM) 585 # endif 586 587 /* 588 * Some systems take this a step further & share FTLB entries between siblings. 589 * This is implemented as TLB writes happening as usual, but if an entry 590 * written by a sibling exists in the shared FTLB for a translation which would 591 * otherwise cause a TLB refill exception then the CPU will use the entry 592 * written by its sibling rather than triggering a refill & writing a matching 593 * TLB entry for itself. 594 * 595 * This is naturally only valid if a TLB entry is known to be suitable for use 596 * on all siblings in a CPU, and so it only takes effect when MMIDs are in use 597 * rather than ASIDs or when a TLB entry is marked global. 598 */ 599 # ifndef cpu_has_shared_ftlb_entries 600 # define cpu_has_shared_ftlb_entries \ 601 __isa_ge_and_opt(6, MIPS_CPU_SHARED_FTLB_ENTRIES) 602 # endif 603 #endif /* SMP */ 604 605 #ifndef cpu_has_shared_ftlb_ram 606 # define cpu_has_shared_ftlb_ram 0 607 #endif 608 #ifndef cpu_has_shared_ftlb_entries 609 # define cpu_has_shared_ftlb_entries 0 610 #endif 611 612 #ifdef CONFIG_MIPS_MT_SMP 613 # define cpu_has_mipsmt_pertccounters \ 614 __isa_lt_and_opt(6, MIPS_CPU_MT_PER_TC_PERF_COUNTERS) 615 #else 616 # define cpu_has_mipsmt_pertccounters 0 617 #endif /* CONFIG_MIPS_MT_SMP */ 618 619 /* 620 * We only enable MMID support for configurations which natively support 64 bit 621 * atomics because getting good performance from the allocator relies upon 622 * efficient atomic64_*() functions. 623 */ 624 #ifndef cpu_has_mmid 625 # ifdef CONFIG_GENERIC_ATOMIC64 626 # define cpu_has_mmid 0 627 # else 628 # define cpu_has_mmid __isa_ge_and_opt(6, MIPS_CPU_MMID) 629 # endif 630 #endif 631 632 #ifndef cpu_has_mm_sysad 633 # define cpu_has_mm_sysad __opt(MIPS_CPU_MM_SYSAD) 634 #endif 635 636 #ifndef cpu_has_mm_full 637 # define cpu_has_mm_full __opt(MIPS_CPU_MM_FULL) 638 #endif 639 640 /* 641 * Guest capabilities 642 */ 643 #ifndef cpu_guest_has_conf1 644 #define cpu_guest_has_conf1 (cpu_data[0].guest.conf & (1 << 1)) 645 #endif 646 #ifndef cpu_guest_has_conf2 647 #define cpu_guest_has_conf2 (cpu_data[0].guest.conf & (1 << 2)) 648 #endif 649 #ifndef cpu_guest_has_conf3 650 #define cpu_guest_has_conf3 (cpu_data[0].guest.conf & (1 << 3)) 651 #endif 652 #ifndef cpu_guest_has_conf4 653 #define cpu_guest_has_conf4 (cpu_data[0].guest.conf & (1 << 4)) 654 #endif 655 #ifndef cpu_guest_has_conf5 656 #define cpu_guest_has_conf5 (cpu_data[0].guest.conf & (1 << 5)) 657 #endif 658 #ifndef cpu_guest_has_conf6 659 #define cpu_guest_has_conf6 (cpu_data[0].guest.conf & (1 << 6)) 660 #endif 661 #ifndef cpu_guest_has_conf7 662 #define cpu_guest_has_conf7 (cpu_data[0].guest.conf & (1 << 7)) 663 #endif 664 #ifndef cpu_guest_has_fpu 665 #define cpu_guest_has_fpu (cpu_data[0].guest.options & MIPS_CPU_FPU) 666 #endif 667 #ifndef cpu_guest_has_watch 668 #define cpu_guest_has_watch (cpu_data[0].guest.options & MIPS_CPU_WATCH) 669 #endif 670 #ifndef cpu_guest_has_contextconfig 671 #define cpu_guest_has_contextconfig (cpu_data[0].guest.options & MIPS_CPU_CTXTC) 672 #endif 673 #ifndef cpu_guest_has_segments 674 #define cpu_guest_has_segments (cpu_data[0].guest.options & MIPS_CPU_SEGMENTS) 675 #endif 676 #ifndef cpu_guest_has_badinstr 677 #define cpu_guest_has_badinstr (cpu_data[0].guest.options & MIPS_CPU_BADINSTR) 678 #endif 679 #ifndef cpu_guest_has_badinstrp 680 #define cpu_guest_has_badinstrp (cpu_data[0].guest.options & MIPS_CPU_BADINSTRP) 681 #endif 682 #ifndef cpu_guest_has_htw 683 #define cpu_guest_has_htw (cpu_data[0].guest.options & MIPS_CPU_HTW) 684 #endif 685 #ifndef cpu_guest_has_ldpte 686 #define cpu_guest_has_ldpte (cpu_data[0].guest.options & MIPS_CPU_LDPTE) 687 #endif 688 #ifndef cpu_guest_has_mvh 689 #define cpu_guest_has_mvh (cpu_data[0].guest.options & MIPS_CPU_MVH) 690 #endif 691 #ifndef cpu_guest_has_msa 692 #define cpu_guest_has_msa (cpu_data[0].guest.ases & MIPS_ASE_MSA) 693 #endif 694 #ifndef cpu_guest_has_kscr 695 #define cpu_guest_has_kscr(n) (cpu_data[0].guest.kscratch_mask & (1u << (n))) 696 #endif 697 #ifndef cpu_guest_has_rw_llb 698 #define cpu_guest_has_rw_llb (cpu_has_mips_r6 || (cpu_data[0].guest.options & MIPS_CPU_RW_LLB)) 699 #endif 700 #ifndef cpu_guest_has_perf 701 #define cpu_guest_has_perf (cpu_data[0].guest.options & MIPS_CPU_PERF) 702 #endif 703 #ifndef cpu_guest_has_maar 704 #define cpu_guest_has_maar (cpu_data[0].guest.options & MIPS_CPU_MAAR) 705 #endif 706 #ifndef cpu_guest_has_userlocal 707 #define cpu_guest_has_userlocal (cpu_data[0].guest.options & MIPS_CPU_ULRI) 708 #endif 709 710 /* 711 * Guest dynamic capabilities 712 */ 713 #ifndef cpu_guest_has_dyn_fpu 714 #define cpu_guest_has_dyn_fpu (cpu_data[0].guest.options_dyn & MIPS_CPU_FPU) 715 #endif 716 #ifndef cpu_guest_has_dyn_watch 717 #define cpu_guest_has_dyn_watch (cpu_data[0].guest.options_dyn & MIPS_CPU_WATCH) 718 #endif 719 #ifndef cpu_guest_has_dyn_contextconfig 720 #define cpu_guest_has_dyn_contextconfig (cpu_data[0].guest.options_dyn & MIPS_CPU_CTXTC) 721 #endif 722 #ifndef cpu_guest_has_dyn_perf 723 #define cpu_guest_has_dyn_perf (cpu_data[0].guest.options_dyn & MIPS_CPU_PERF) 724 #endif 725 #ifndef cpu_guest_has_dyn_msa 726 #define cpu_guest_has_dyn_msa (cpu_data[0].guest.ases_dyn & MIPS_ASE_MSA) 727 #endif 728 #ifndef cpu_guest_has_dyn_maar 729 #define cpu_guest_has_dyn_maar (cpu_data[0].guest.options_dyn & MIPS_CPU_MAAR) 730 #endif 731 732 #endif /* __ASM_CPU_FEATURES_H */ 733