1/* SPDX-License-Identifier: GPL-2.0-or-later */ 2/* 3 * This file contains low-level functions for performing various 4 * types of TLB invalidations on various processors with no hash 5 * table. 6 * 7 * This file implements the following functions for all no-hash 8 * processors. Some aren't implemented for some variants. Some 9 * are inline in tlbflush.h 10 * 11 * - tlbil_va 12 * - tlbil_pid 13 * - tlbil_all 14 * - tlbivax_bcast 15 * 16 * Code mostly moved over from misc_32.S 17 * 18 * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) 19 * 20 * Partially rewritten by Cort Dougan (cort@cs.nmt.edu) 21 * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt. 22 */ 23 24#include <asm/reg.h> 25#include <asm/page.h> 26#include <asm/cputable.h> 27#include <asm/mmu.h> 28#include <asm/ppc_asm.h> 29#include <asm/asm-offsets.h> 30#include <asm/processor.h> 31#include <asm/bug.h> 32#include <asm/asm-compat.h> 33#include <asm/feature-fixups.h> 34 35#if defined(CONFIG_40x) 36 37/* 38 * 40x implementation needs only tlbil_va 39 */ 40_GLOBAL(__tlbil_va) 41 /* We run the search with interrupts disabled because we have to change 42 * the PID and I don't want to preempt when that happens. 43 */ 44 mfmsr r5 45 mfspr r6,SPRN_PID 46 wrteei 0 47 mtspr SPRN_PID,r4 48 tlbsx. r3, 0, r3 49 mtspr SPRN_PID,r6 50 wrtee r5 51 bne 1f 52 sync 53 /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is 54 * clear. Since 25 is the V bit in the TLB_TAG, loading this value 55 * will invalidate the TLB entry. */ 56 tlbwe r3, r3, TLB_TAG 57 isync 581: blr 59 60#elif defined(CONFIG_PPC_8xx) 61 62/* 63 * Nothing to do for 8xx, everything is inline 64 */ 65 66#elif defined(CONFIG_44x) /* Includes 47x */ 67 68/* 69 * 440 implementation uses tlbsx/we for tlbil_va and a full sweep 70 * of the TLB for everything else. 71 */ 72_GLOBAL(__tlbil_va) 73 mfspr r5,SPRN_MMUCR 74 mfmsr r10 75 76 /* 77 * We write 16 bits of STID since 47x supports that much, we 78 * will never be passed out of bounds values on 440 (hopefully) 79 */ 80 rlwimi r5,r4,0,16,31 81 82 /* We have to run the search with interrupts disabled, otherwise 83 * an interrupt which causes a TLB miss can clobber the MMUCR 84 * between the mtspr and the tlbsx. 85 * 86 * Critical and Machine Check interrupts take care of saving 87 * and restoring MMUCR, so only normal interrupts have to be 88 * taken care of. 89 */ 90 wrteei 0 91 mtspr SPRN_MMUCR,r5 92 tlbsx. r6,0,r3 93 bne 10f 94 sync 95#ifndef CONFIG_PPC_47x 96 /* On 440 There are only 64 TLB entries, so r3 < 64, which means bit 97 * 22, is clear. Since 22 is the V bit in the TLB_PAGEID, loading this 98 * value will invalidate the TLB entry. 99 */ 100 tlbwe r6,r6,PPC44x_TLB_PAGEID 101#else 102 oris r7,r6,0x8000 /* specify way explicitly */ 103 clrrwi r4,r3,12 /* get an EPN for the hashing with V = 0 */ 104 ori r4,r4,PPC47x_TLBE_SIZE 105 tlbwe r4,r7,0 /* write it */ 106#endif /* !CONFIG_PPC_47x */ 107 isync 10810: wrtee r10 109 blr 110 111_GLOBAL(_tlbil_all) 112_GLOBAL(_tlbil_pid) 113#ifndef CONFIG_PPC_47x 114 li r3,0 115 sync 116 117 /* Load high watermark */ 118 lis r4,tlb_44x_hwater@ha 119 lwz r5,tlb_44x_hwater@l(r4) 120 1211: tlbwe r3,r3,PPC44x_TLB_PAGEID 122 addi r3,r3,1 123 cmpw 0,r3,r5 124 ble 1b 125 126 isync 127 blr 128#else 129 /* 476 variant. There's not simple way to do this, hopefully we'll 130 * try to limit the amount of such full invalidates 131 */ 132 mfmsr r11 /* Interrupts off */ 133 wrteei 0 134 li r3,-1 /* Current set */ 135 lis r10,tlb_47x_boltmap@h 136 ori r10,r10,tlb_47x_boltmap@l 137 lis r7,0x8000 /* Specify way explicitly */ 138 139 b 9f /* For each set */ 140 1411: li r9,4 /* Number of ways */ 142 li r4,0 /* Current way */ 143 li r6,0 /* Default entry value 0 */ 144 andi. r0,r8,1 /* Check if way 0 is bolted */ 145 mtctr r9 /* Load way counter */ 146 bne- 3f /* Bolted, skip loading it */ 147 1482: /* For each way */ 149 or r5,r3,r4 /* Make way|index for tlbre */ 150 rlwimi r5,r5,16,8,15 /* Copy index into position */ 151 tlbre r6,r5,0 /* Read entry */ 1523: addis r4,r4,0x2000 /* Next way */ 153 andi. r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */ 154 beq 4f /* Nope, skip it */ 155 rlwimi r7,r5,0,1,2 /* Insert way number */ 156 rlwinm r6,r6,0,21,19 /* Clear V */ 157 tlbwe r6,r7,0 /* Write it */ 1584: bdnz 2b /* Loop for each way */ 159 srwi r8,r8,1 /* Next boltmap bit */ 1609: cmpwi cr1,r3,255 /* Last set done ? */ 161 addi r3,r3,1 /* Next set */ 162 beq cr1,1f /* End of loop */ 163 andi. r0,r3,0x1f /* Need to load a new boltmap word ? */ 164 bne 1b /* No, loop */ 165 lwz r8,0(r10) /* Load boltmap entry */ 166 addi r10,r10,4 /* Next word */ 167 b 1b /* Then loop */ 1681: isync /* Sync shadows */ 169 wrtee r11 170 blr 171#endif /* !CONFIG_PPC_47x */ 172 173#ifdef CONFIG_PPC_47x 174 175/* 176 * _tlbivax_bcast is only on 47x. We don't bother doing a runtime 177 * check though, it will blow up soon enough if we mistakenly try 178 * to use it on a 440. 179 */ 180_GLOBAL(_tlbivax_bcast) 181 mfspr r5,SPRN_MMUCR 182 mfmsr r10 183 rlwimi r5,r4,0,16,31 184 wrteei 0 185 mtspr SPRN_MMUCR,r5 186 isync 187 PPC_TLBIVAX(0, R3) 188 isync 189 eieio 190 tlbsync 191BEGIN_FTR_SECTION 192 b 1f 193END_FTR_SECTION_IFSET(CPU_FTR_476_DD2) 194 sync 195 wrtee r10 196 blr 197/* 198 * DD2 HW could hang if in instruction fetch happens before msync completes. 199 * Touch enough instruction cache lines to ensure cache hits 200 */ 2011: mflr r9 202 bcl 20,31,$+4 2032: mflr r6 204 li r7,32 205 PPC_ICBT(0,R6,R7) /* touch next cache line */ 206 add r6,r6,r7 207 PPC_ICBT(0,R6,R7) /* touch next cache line */ 208 add r6,r6,r7 209 PPC_ICBT(0,R6,R7) /* touch next cache line */ 210 sync 211 nop 212 nop 213 nop 214 nop 215 nop 216 nop 217 nop 218 nop 219 mtlr r9 220 wrtee r10 221 blr 222#endif /* CONFIG_PPC_47x */ 223 224#elif defined(CONFIG_FSL_BOOKE) 225/* 226 * FSL BookE implementations. 227 * 228 * Since feature sections are using _SECTION_ELSE we need 229 * to have the larger code path before the _SECTION_ELSE 230 */ 231 232/* 233 * Flush MMU TLB on the local processor 234 */ 235_GLOBAL(_tlbil_all) 236BEGIN_MMU_FTR_SECTION 237 li r3,(MMUCSR0_TLBFI)@l 238 mtspr SPRN_MMUCSR0, r3 2391: 240 mfspr r3,SPRN_MMUCSR0 241 andi. r3,r3,MMUCSR0_TLBFI@l 242 bne 1b 243MMU_FTR_SECTION_ELSE 244 PPC_TLBILX_ALL(0,R0) 245ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) 246 msync 247 isync 248 blr 249 250_GLOBAL(_tlbil_pid) 251BEGIN_MMU_FTR_SECTION 252 slwi r3,r3,16 253 mfmsr r10 254 wrteei 0 255 mfspr r4,SPRN_MAS6 /* save MAS6 */ 256 mtspr SPRN_MAS6,r3 257 PPC_TLBILX_PID(0,R0) 258 mtspr SPRN_MAS6,r4 /* restore MAS6 */ 259 wrtee r10 260MMU_FTR_SECTION_ELSE 261 li r3,(MMUCSR0_TLBFI)@l 262 mtspr SPRN_MMUCSR0, r3 2631: 264 mfspr r3,SPRN_MMUCSR0 265 andi. r3,r3,MMUCSR0_TLBFI@l 266 bne 1b 267ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX) 268 msync 269 isync 270 blr 271 272/* 273 * Flush MMU TLB for a particular address, but only on the local processor 274 * (no broadcast) 275 */ 276_GLOBAL(__tlbil_va) 277 mfmsr r10 278 wrteei 0 279 slwi r4,r4,16 280 ori r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l 281 mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ 282BEGIN_MMU_FTR_SECTION 283 tlbsx 0,r3 284 mfspr r4,SPRN_MAS1 /* check valid */ 285 andis. r3,r4,MAS1_VALID@h 286 beq 1f 287 rlwinm r4,r4,0,1,31 288 mtspr SPRN_MAS1,r4 289 tlbwe 290MMU_FTR_SECTION_ELSE 291 PPC_TLBILX_VA(0,R3) 292ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) 293 msync 294 isync 2951: wrtee r10 296 blr 297#elif defined(CONFIG_PPC_BOOK3E) 298/* 299 * New Book3E (>= 2.06) implementation 300 * 301 * Note: We may be able to get away without the interrupt masking stuff 302 * if we save/restore MAS6 on exceptions that might modify it 303 */ 304_GLOBAL(_tlbil_pid) 305 slwi r4,r3,MAS6_SPID_SHIFT 306 mfmsr r10 307 wrteei 0 308 mtspr SPRN_MAS6,r4 309 PPC_TLBILX_PID(0,R0) 310 wrtee r10 311 msync 312 isync 313 blr 314 315_GLOBAL(_tlbil_pid_noind) 316 slwi r4,r3,MAS6_SPID_SHIFT 317 mfmsr r10 318 ori r4,r4,MAS6_SIND 319 wrteei 0 320 mtspr SPRN_MAS6,r4 321 PPC_TLBILX_PID(0,R0) 322 wrtee r10 323 msync 324 isync 325 blr 326 327_GLOBAL(_tlbil_all) 328 PPC_TLBILX_ALL(0,R0) 329 msync 330 isync 331 blr 332 333_GLOBAL(_tlbil_va) 334 mfmsr r10 335 wrteei 0 336 cmpwi cr0,r6,0 337 slwi r4,r4,MAS6_SPID_SHIFT 338 rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK 339 beq 1f 340 rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND 3411: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ 342 PPC_TLBILX_VA(0,R3) 343 msync 344 isync 345 wrtee r10 346 blr 347 348_GLOBAL(_tlbivax_bcast) 349 mfmsr r10 350 wrteei 0 351 cmpwi cr0,r6,0 352 slwi r4,r4,MAS6_SPID_SHIFT 353 rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK 354 beq 1f 355 rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND 3561: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ 357 PPC_TLBIVAX(0,R3) 358 eieio 359 tlbsync 360 sync 361 wrtee r10 362 blr 363#else 364#error Unsupported processor type ! 365#endif 366 367#if defined(CONFIG_PPC_FSL_BOOK3E) 368/* 369 * extern void loadcam_entry(unsigned int index) 370 * 371 * Load TLBCAM[index] entry in to the L2 CAM MMU 372 * Must preserve r7, r8, r9, r10, r11, r12 373 */ 374_GLOBAL(loadcam_entry) 375 mflr r5 376 LOAD_REG_ADDR_PIC(r4, TLBCAM) 377 mtlr r5 378 mulli r5,r3,TLBCAM_SIZE 379 add r3,r5,r4 380 lwz r4,TLBCAM_MAS0(r3) 381 mtspr SPRN_MAS0,r4 382 lwz r4,TLBCAM_MAS1(r3) 383 mtspr SPRN_MAS1,r4 384 PPC_LL r4,TLBCAM_MAS2(r3) 385 mtspr SPRN_MAS2,r4 386 lwz r4,TLBCAM_MAS3(r3) 387 mtspr SPRN_MAS3,r4 388BEGIN_MMU_FTR_SECTION 389 lwz r4,TLBCAM_MAS7(r3) 390 mtspr SPRN_MAS7,r4 391END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS) 392 isync 393 tlbwe 394 isync 395 blr 396 397/* 398 * Load multiple TLB entries at once, using an alternate-space 399 * trampoline so that we don't have to care about whether the same 400 * TLB entry maps us before and after. 401 * 402 * r3 = first entry to write 403 * r4 = number of entries to write 404 * r5 = temporary tlb entry (0 means no switch to AS1) 405 */ 406_GLOBAL(loadcam_multi) 407 mflr r8 408 /* Don't switch to AS=1 if already there */ 409 mfmsr r11 410 andi. r11,r11,MSR_IS 411 bne 10f 412 mr. r12, r5 413 beq 10f 414 415 /* 416 * Set up temporary TLB entry that is the same as what we're 417 * running from, but in AS=1. 418 */ 419 bcl 20,31,$+4 4201: mflr r6 421 tlbsx 0,r8 422 mfspr r6,SPRN_MAS1 423 ori r6,r6,MAS1_TS 424 mtspr SPRN_MAS1,r6 425 mfspr r6,SPRN_MAS0 426 rlwimi r6,r5,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK 427 mr r7,r5 428 mtspr SPRN_MAS0,r6 429 isync 430 tlbwe 431 isync 432 433 /* Switch to AS=1 */ 434 mfmsr r6 435 ori r6,r6,MSR_IS|MSR_DS 436 mtmsr r6 437 isync 438 43910: 440 mr r9,r3 441 add r10,r3,r4 4422: bl loadcam_entry 443 addi r9,r9,1 444 cmpw r9,r10 445 mr r3,r9 446 blt 2b 447 448 /* Don't return to AS=0 if we were in AS=1 at function start */ 449 andi. r11,r11,MSR_IS 450 bne 3f 451 cmpwi r12, 0 452 beq 3f 453 454 /* Return to AS=0 and clear the temporary entry */ 455 mfmsr r6 456 rlwinm. r6,r6,0,~(MSR_IS|MSR_DS) 457 mtmsr r6 458 isync 459 460 li r6,0 461 mtspr SPRN_MAS1,r6 462 rlwinm r6,r7,MAS0_ESEL_SHIFT,MAS0_ESEL_MASK 463 oris r6,r6,MAS0_TLBSEL(1)@h 464 mtspr SPRN_MAS0,r6 465 isync 466 tlbwe 467 isync 468 4693: 470 mtlr r8 471 blr 472#endif 473