12874c5fdSThomas Gleixner/* SPDX-License-Identifier: GPL-2.0-or-later */ 2f6d57916SPaul Mackerras/* 3f6d57916SPaul Mackerras * This file contains low-level cache management functions 4f6d57916SPaul Mackerras * used for sleep and CPU speed changes on Apple machines. 5f6d57916SPaul Mackerras * (In fact the only thing that is Apple-specific is that we assume 6f6d57916SPaul Mackerras * that we can read from ROM at physical address 0xfff00000.) 7f6d57916SPaul Mackerras * 8f6d57916SPaul Mackerras * Copyright (C) 2004 Paul Mackerras (paulus@samba.org) and 9f6d57916SPaul Mackerras * Benjamin Herrenschmidt (benh@kernel.crashing.org) 10f6d57916SPaul Mackerras */ 11f6d57916SPaul Mackerras 12f6d57916SPaul Mackerras#include <asm/processor.h> 13f6d57916SPaul Mackerras#include <asm/ppc_asm.h> 14f6d57916SPaul Mackerras#include <asm/cputable.h> 152c86cd18SChristophe Leroy#include <asm/feature-fixups.h> 16f6d57916SPaul Mackerras 17f6d57916SPaul Mackerras/* 18f6d57916SPaul Mackerras * Flush and disable all data caches (dL1, L2, L3). This is used 19f6d57916SPaul Mackerras * when going to sleep, when doing a PMU based cpufreq transition, 20f6d57916SPaul Mackerras * or when "offlining" a CPU on SMP machines. This code is over 21f6d57916SPaul Mackerras * paranoid, but I've had enough issues with various CPU revs and 22446957baSAdam Buchbinder * bugs that I decided it was worth being over cautious 23f6d57916SPaul Mackerras */ 24f6d57916SPaul Mackerras 25f6d57916SPaul Mackerras_GLOBAL(flush_disable_caches) 26d7cceda9SChristophe Leroy#ifndef CONFIG_PPC_BOOK3S_32 27f6d57916SPaul Mackerras blr 28f6d57916SPaul Mackerras#else 29f6d57916SPaul MackerrasBEGIN_FTR_SECTION 30f6d57916SPaul Mackerras b flush_disable_745x 31f6d57916SPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_SPEC7450) 32f6d57916SPaul MackerrasBEGIN_FTR_SECTION 33f6d57916SPaul Mackerras b flush_disable_75x 34f6d57916SPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_L2CR) 35f6d57916SPaul Mackerras b __flush_disable_L1 36f6d57916SPaul Mackerras 37f6d57916SPaul Mackerras/* This is the code for G3 and 74[01]0 */ 38f6d57916SPaul Mackerrasflush_disable_75x: 39f6d57916SPaul Mackerras mflr r10 40f6d57916SPaul Mackerras 41f6d57916SPaul Mackerras /* Turn off EE and DR in MSR */ 42f6d57916SPaul Mackerras mfmsr r11 43f6d57916SPaul Mackerras rlwinm r0,r11,0,~MSR_EE 44f6d57916SPaul Mackerras rlwinm r0,r0,0,~MSR_DR 45f6d57916SPaul Mackerras sync 46f6d57916SPaul Mackerras mtmsr r0 47f6d57916SPaul Mackerras isync 48f6d57916SPaul Mackerras 49f6d57916SPaul Mackerras /* Stop DST streams */ 50f6d57916SPaul MackerrasBEGIN_FTR_SECTION 51*d51f86cfSAlexey Kardashevskiy PPC_DSSALL 52f6d57916SPaul Mackerras sync 53f6d57916SPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) 54f6d57916SPaul Mackerras 55f6d57916SPaul Mackerras /* Stop DPM */ 56f6d57916SPaul Mackerras mfspr r8,SPRN_HID0 /* Save SPRN_HID0 in r8 */ 57f6d57916SPaul Mackerras rlwinm r4,r8,0,12,10 /* Turn off HID0[DPM] */ 58f6d57916SPaul Mackerras sync 59f6d57916SPaul Mackerras mtspr SPRN_HID0,r4 /* Disable DPM */ 60f6d57916SPaul Mackerras sync 61f6d57916SPaul Mackerras 62f6d57916SPaul Mackerras /* Disp-flush L1. We have a weird problem here that I never 63f6d57916SPaul Mackerras * totally figured out. On 750FX, using the ROM for the flush 64f6d57916SPaul Mackerras * results in a non-working flush. We use that workaround for 65f6d57916SPaul Mackerras * now until I finally understand what's going on. --BenH 66f6d57916SPaul Mackerras */ 67f6d57916SPaul Mackerras 68f6d57916SPaul Mackerras /* ROM base by default */ 69f6d57916SPaul Mackerras lis r4,0xfff0 70f6d57916SPaul Mackerras mfpvr r3 71f6d57916SPaul Mackerras srwi r3,r3,16 72f6d57916SPaul Mackerras cmplwi cr0,r3,0x7000 73f6d57916SPaul Mackerras bne+ 1f 74f6d57916SPaul Mackerras /* RAM base on 750FX */ 75f6d57916SPaul Mackerras li r4,0 76f6d57916SPaul Mackerras1: li r4,0x4000 77f6d57916SPaul Mackerras mtctr r4 78f6d57916SPaul Mackerras1: lwz r0,0(r4) 79f6d57916SPaul Mackerras addi r4,r4,32 80f6d57916SPaul Mackerras bdnz 1b 81f6d57916SPaul Mackerras sync 82f6d57916SPaul Mackerras isync 83f6d57916SPaul Mackerras 84f6d57916SPaul Mackerras /* Disable / invalidate / enable L1 data */ 85f6d57916SPaul Mackerras mfspr r3,SPRN_HID0 86f6d57916SPaul Mackerras rlwinm r3,r3,0,~(HID0_DCE | HID0_ICE) 87f6d57916SPaul Mackerras mtspr SPRN_HID0,r3 88f6d57916SPaul Mackerras sync 89f6d57916SPaul Mackerras isync 90f6d57916SPaul Mackerras ori r3,r3,(HID0_DCE|HID0_DCI|HID0_ICE|HID0_ICFI) 91f6d57916SPaul Mackerras sync 92f6d57916SPaul Mackerras isync 93f6d57916SPaul Mackerras mtspr SPRN_HID0,r3 94f6d57916SPaul Mackerras xori r3,r3,(HID0_DCI|HID0_ICFI) 95f6d57916SPaul Mackerras mtspr SPRN_HID0,r3 96f6d57916SPaul Mackerras sync 97f6d57916SPaul Mackerras 98f6d57916SPaul Mackerras /* Get the current enable bit of the L2CR into r4 */ 99f6d57916SPaul Mackerras mfspr r5,SPRN_L2CR 100f6d57916SPaul Mackerras /* Set to data-only (pre-745x bit) */ 101f6d57916SPaul Mackerras oris r3,r5,L2CR_L2DO@h 102f6d57916SPaul Mackerras b 2f 103f6d57916SPaul Mackerras /* When disabling L2, code must be in L1 */ 104f6d57916SPaul Mackerras .balign 32 105f6d57916SPaul Mackerras1: mtspr SPRN_L2CR,r3 106f6d57916SPaul Mackerras3: sync 107f6d57916SPaul Mackerras isync 108f6d57916SPaul Mackerras b 1f 109f6d57916SPaul Mackerras2: b 3f 110f6d57916SPaul Mackerras3: sync 111f6d57916SPaul Mackerras isync 112f6d57916SPaul Mackerras b 1b 113f6d57916SPaul Mackerras1: /* disp-flush L2. The interesting thing here is that the L2 can be 114f6d57916SPaul Mackerras * up to 2Mb ... so using the ROM, we'll end up wrapping back to memory 115f6d57916SPaul Mackerras * but that is probbaly fine. We disp-flush over 4Mb to be safe 116f6d57916SPaul Mackerras */ 117f6d57916SPaul Mackerras lis r4,2 118f6d57916SPaul Mackerras mtctr r4 119f6d57916SPaul Mackerras lis r4,0xfff0 120f6d57916SPaul Mackerras1: lwz r0,0(r4) 121f6d57916SPaul Mackerras addi r4,r4,32 122f6d57916SPaul Mackerras bdnz 1b 123f6d57916SPaul Mackerras sync 124f6d57916SPaul Mackerras isync 125f6d57916SPaul Mackerras lis r4,2 126f6d57916SPaul Mackerras mtctr r4 127f6d57916SPaul Mackerras lis r4,0xfff0 128f6d57916SPaul Mackerras1: dcbf 0,r4 129f6d57916SPaul Mackerras addi r4,r4,32 130f6d57916SPaul Mackerras bdnz 1b 131f6d57916SPaul Mackerras sync 132f6d57916SPaul Mackerras isync 133f6d57916SPaul Mackerras 134f6d57916SPaul Mackerras /* now disable L2 */ 135f6d57916SPaul Mackerras rlwinm r5,r5,0,~L2CR_L2E 136f6d57916SPaul Mackerras b 2f 137f6d57916SPaul Mackerras /* When disabling L2, code must be in L1 */ 138f6d57916SPaul Mackerras .balign 32 139f6d57916SPaul Mackerras1: mtspr SPRN_L2CR,r5 140f6d57916SPaul Mackerras3: sync 141f6d57916SPaul Mackerras isync 142f6d57916SPaul Mackerras b 1f 143f6d57916SPaul Mackerras2: b 3f 144f6d57916SPaul Mackerras3: sync 145f6d57916SPaul Mackerras isync 146f6d57916SPaul Mackerras b 1b 147f6d57916SPaul Mackerras1: sync 148f6d57916SPaul Mackerras isync 149f6d57916SPaul Mackerras /* Invalidate L2. This is pre-745x, we clear the L2I bit ourselves */ 150f6d57916SPaul Mackerras oris r4,r5,L2CR_L2I@h 151f6d57916SPaul Mackerras mtspr SPRN_L2CR,r4 152f6d57916SPaul Mackerras sync 153f6d57916SPaul Mackerras isync 154f6d57916SPaul Mackerras 155f6d57916SPaul Mackerras /* Wait for the invalidation to complete */ 156f6d57916SPaul Mackerras1: mfspr r3,SPRN_L2CR 157f6d57916SPaul Mackerras rlwinm. r0,r3,0,31,31 158f6d57916SPaul Mackerras bne 1b 159f6d57916SPaul Mackerras 160f6d57916SPaul Mackerras /* Clear L2I */ 161f6d57916SPaul Mackerras xoris r4,r4,L2CR_L2I@h 162f6d57916SPaul Mackerras sync 163f6d57916SPaul Mackerras mtspr SPRN_L2CR,r4 164f6d57916SPaul Mackerras sync 165f6d57916SPaul Mackerras 166f6d57916SPaul Mackerras /* now disable the L1 data cache */ 167f6d57916SPaul Mackerras mfspr r0,SPRN_HID0 168f6d57916SPaul Mackerras rlwinm r0,r0,0,~(HID0_DCE|HID0_ICE) 169f6d57916SPaul Mackerras mtspr SPRN_HID0,r0 170f6d57916SPaul Mackerras sync 171f6d57916SPaul Mackerras isync 172f6d57916SPaul Mackerras 173f6d57916SPaul Mackerras /* Restore HID0[DPM] to whatever it was before */ 174f6d57916SPaul Mackerras sync 175f6d57916SPaul Mackerras mfspr r0,SPRN_HID0 176f6d57916SPaul Mackerras rlwimi r0,r8,0,11,11 /* Turn back HID0[DPM] */ 177f6d57916SPaul Mackerras mtspr SPRN_HID0,r0 178f6d57916SPaul Mackerras sync 179f6d57916SPaul Mackerras 180f6d57916SPaul Mackerras /* restore DR and EE */ 181f6d57916SPaul Mackerras sync 182f6d57916SPaul Mackerras mtmsr r11 183f6d57916SPaul Mackerras isync 184f6d57916SPaul Mackerras 185f6d57916SPaul Mackerras mtlr r10 186f6d57916SPaul Mackerras blr 18732a82067SChristophe Leroy_ASM_NOKPROBE_SYMBOL(flush_disable_75x) 188f6d57916SPaul Mackerras 189f6d57916SPaul Mackerras/* This code is for 745x processors */ 190f6d57916SPaul Mackerrasflush_disable_745x: 191f6d57916SPaul Mackerras /* Turn off EE and DR in MSR */ 192f6d57916SPaul Mackerras mfmsr r11 193f6d57916SPaul Mackerras rlwinm r0,r11,0,~MSR_EE 194f6d57916SPaul Mackerras rlwinm r0,r0,0,~MSR_DR 195f6d57916SPaul Mackerras sync 196f6d57916SPaul Mackerras mtmsr r0 197f6d57916SPaul Mackerras isync 198f6d57916SPaul Mackerras 199f6d57916SPaul Mackerras /* Stop prefetch streams */ 200*d51f86cfSAlexey Kardashevskiy PPC_DSSALL 201f6d57916SPaul Mackerras sync 202f6d57916SPaul Mackerras 203f6d57916SPaul Mackerras /* Disable L2 prefetching */ 204f6d57916SPaul Mackerras mfspr r0,SPRN_MSSCR0 205f6d57916SPaul Mackerras rlwinm r0,r0,0,0,29 206f6d57916SPaul Mackerras mtspr SPRN_MSSCR0,r0 207f6d57916SPaul Mackerras sync 208f6d57916SPaul Mackerras isync 209f6d57916SPaul Mackerras lis r4,0 210f6d57916SPaul Mackerras dcbf 0,r4 211f6d57916SPaul Mackerras dcbf 0,r4 212f6d57916SPaul Mackerras dcbf 0,r4 213f6d57916SPaul Mackerras dcbf 0,r4 214f6d57916SPaul Mackerras dcbf 0,r4 215f6d57916SPaul Mackerras dcbf 0,r4 216f6d57916SPaul Mackerras dcbf 0,r4 217f6d57916SPaul Mackerras dcbf 0,r4 218f6d57916SPaul Mackerras 219f6d57916SPaul Mackerras /* Due to a bug with the HW flush on some CPU revs, we occasionally 220f6d57916SPaul Mackerras * experience data corruption. I'm adding a displacement flush along 221f6d57916SPaul Mackerras * with a dcbf loop over a few Mb to "help". The problem isn't totally 222f6d57916SPaul Mackerras * fixed by this in theory, but at least, in practice, I couldn't reproduce 223f6d57916SPaul Mackerras * it even with a big hammer... 224f6d57916SPaul Mackerras */ 225f6d57916SPaul Mackerras 226f6d57916SPaul Mackerras lis r4,0x0002 227f6d57916SPaul Mackerras mtctr r4 228f6d57916SPaul Mackerras li r4,0 229f6d57916SPaul Mackerras1: 230f6d57916SPaul Mackerras lwz r0,0(r4) 231f6d57916SPaul Mackerras addi r4,r4,32 /* Go to start of next cache line */ 232f6d57916SPaul Mackerras bdnz 1b 233f6d57916SPaul Mackerras isync 234f6d57916SPaul Mackerras 235f6d57916SPaul Mackerras /* Now, flush the first 4MB of memory */ 236f6d57916SPaul Mackerras lis r4,0x0002 237f6d57916SPaul Mackerras mtctr r4 238f6d57916SPaul Mackerras li r4,0 239f6d57916SPaul Mackerras sync 240f6d57916SPaul Mackerras1: 241f6d57916SPaul Mackerras dcbf 0,r4 242f6d57916SPaul Mackerras addi r4,r4,32 /* Go to start of next cache line */ 243f6d57916SPaul Mackerras bdnz 1b 244f6d57916SPaul Mackerras 245f6d57916SPaul Mackerras /* Flush and disable the L1 data cache */ 246f6d57916SPaul Mackerras mfspr r6,SPRN_LDSTCR 247f6d57916SPaul Mackerras lis r3,0xfff0 /* read from ROM for displacement flush */ 248f6d57916SPaul Mackerras li r4,0xfe /* start with only way 0 unlocked */ 249f6d57916SPaul Mackerras li r5,128 /* 128 lines in each way */ 250f6d57916SPaul Mackerras1: mtctr r5 251f6d57916SPaul Mackerras rlwimi r6,r4,0,24,31 252f6d57916SPaul Mackerras mtspr SPRN_LDSTCR,r6 253f6d57916SPaul Mackerras sync 254f6d57916SPaul Mackerras isync 255f6d57916SPaul Mackerras2: lwz r0,0(r3) /* touch each cache line */ 256f6d57916SPaul Mackerras addi r3,r3,32 257f6d57916SPaul Mackerras bdnz 2b 258f6d57916SPaul Mackerras rlwinm r4,r4,1,24,30 /* move on to the next way */ 259f6d57916SPaul Mackerras ori r4,r4,1 260f6d57916SPaul Mackerras cmpwi r4,0xff /* all done? */ 261f6d57916SPaul Mackerras bne 1b 262f6d57916SPaul Mackerras /* now unlock the L1 data cache */ 263f6d57916SPaul Mackerras li r4,0 264f6d57916SPaul Mackerras rlwimi r6,r4,0,24,31 265f6d57916SPaul Mackerras sync 266f6d57916SPaul Mackerras mtspr SPRN_LDSTCR,r6 267f6d57916SPaul Mackerras sync 268f6d57916SPaul Mackerras isync 269f6d57916SPaul Mackerras 270f6d57916SPaul Mackerras /* Flush the L2 cache using the hardware assist */ 271f6d57916SPaul Mackerras mfspr r3,SPRN_L2CR 272f6d57916SPaul Mackerras cmpwi r3,0 /* check if it is enabled first */ 273f6d57916SPaul Mackerras bge 4f 274f6d57916SPaul Mackerras oris r0,r3,(L2CR_L2IO_745x|L2CR_L2DO_745x)@h 275f6d57916SPaul Mackerras b 2f 276f6d57916SPaul Mackerras /* When disabling/locking L2, code must be in L1 */ 277f6d57916SPaul Mackerras .balign 32 278f6d57916SPaul Mackerras1: mtspr SPRN_L2CR,r0 /* lock the L2 cache */ 279f6d57916SPaul Mackerras3: sync 280f6d57916SPaul Mackerras isync 281f6d57916SPaul Mackerras b 1f 282f6d57916SPaul Mackerras2: b 3f 283f6d57916SPaul Mackerras3: sync 284f6d57916SPaul Mackerras isync 285f6d57916SPaul Mackerras b 1b 286f6d57916SPaul Mackerras1: sync 287f6d57916SPaul Mackerras isync 288f6d57916SPaul Mackerras ori r0,r3,L2CR_L2HWF_745x 289f6d57916SPaul Mackerras sync 290f6d57916SPaul Mackerras mtspr SPRN_L2CR,r0 /* set the hardware flush bit */ 291f6d57916SPaul Mackerras3: mfspr r0,SPRN_L2CR /* wait for it to go to 0 */ 292f6d57916SPaul Mackerras andi. r0,r0,L2CR_L2HWF_745x 293f6d57916SPaul Mackerras bne 3b 294f6d57916SPaul Mackerras sync 295f6d57916SPaul Mackerras rlwinm r3,r3,0,~L2CR_L2E 296f6d57916SPaul Mackerras b 2f 297f6d57916SPaul Mackerras /* When disabling L2, code must be in L1 */ 298f6d57916SPaul Mackerras .balign 32 299f6d57916SPaul Mackerras1: mtspr SPRN_L2CR,r3 /* disable the L2 cache */ 300f6d57916SPaul Mackerras3: sync 301f6d57916SPaul Mackerras isync 302f6d57916SPaul Mackerras b 1f 303f6d57916SPaul Mackerras2: b 3f 304f6d57916SPaul Mackerras3: sync 305f6d57916SPaul Mackerras isync 306f6d57916SPaul Mackerras b 1b 307f6d57916SPaul Mackerras1: sync 308f6d57916SPaul Mackerras isync 309f6d57916SPaul Mackerras oris r4,r3,L2CR_L2I@h 310f6d57916SPaul Mackerras mtspr SPRN_L2CR,r4 311f6d57916SPaul Mackerras sync 312f6d57916SPaul Mackerras isync 313f6d57916SPaul Mackerras1: mfspr r4,SPRN_L2CR 314f6d57916SPaul Mackerras andis. r0,r4,L2CR_L2I@h 315f6d57916SPaul Mackerras bne 1b 316f6d57916SPaul Mackerras sync 317f6d57916SPaul Mackerras 318f6d57916SPaul MackerrasBEGIN_FTR_SECTION 319f6d57916SPaul Mackerras /* Flush the L3 cache using the hardware assist */ 320f6d57916SPaul Mackerras4: mfspr r3,SPRN_L3CR 321f6d57916SPaul Mackerras cmpwi r3,0 /* check if it is enabled */ 322f6d57916SPaul Mackerras bge 6f 323f6d57916SPaul Mackerras oris r0,r3,L3CR_L3IO@h 324f6d57916SPaul Mackerras ori r0,r0,L3CR_L3DO 325f6d57916SPaul Mackerras sync 326f6d57916SPaul Mackerras mtspr SPRN_L3CR,r0 /* lock the L3 cache */ 327f6d57916SPaul Mackerras sync 328f6d57916SPaul Mackerras isync 329f6d57916SPaul Mackerras ori r0,r0,L3CR_L3HWF 330f6d57916SPaul Mackerras sync 331f6d57916SPaul Mackerras mtspr SPRN_L3CR,r0 /* set the hardware flush bit */ 332f6d57916SPaul Mackerras5: mfspr r0,SPRN_L3CR /* wait for it to go to zero */ 333f6d57916SPaul Mackerras andi. r0,r0,L3CR_L3HWF 334f6d57916SPaul Mackerras bne 5b 335f6d57916SPaul Mackerras rlwinm r3,r3,0,~L3CR_L3E 336f6d57916SPaul Mackerras sync 337f6d57916SPaul Mackerras mtspr SPRN_L3CR,r3 /* disable the L3 cache */ 338f6d57916SPaul Mackerras sync 339f6d57916SPaul Mackerras ori r4,r3,L3CR_L3I 340f6d57916SPaul Mackerras mtspr SPRN_L3CR,r4 341f6d57916SPaul Mackerras1: mfspr r4,SPRN_L3CR 342f6d57916SPaul Mackerras andi. r0,r4,L3CR_L3I 343f6d57916SPaul Mackerras bne 1b 344f6d57916SPaul Mackerras sync 345f6d57916SPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_L3CR) 346f6d57916SPaul Mackerras 347f6d57916SPaul Mackerras6: mfspr r0,SPRN_HID0 /* now disable the L1 data cache */ 348f6d57916SPaul Mackerras rlwinm r0,r0,0,~HID0_DCE 349f6d57916SPaul Mackerras mtspr SPRN_HID0,r0 350f6d57916SPaul Mackerras sync 351f6d57916SPaul Mackerras isync 352f6d57916SPaul Mackerras mtmsr r11 /* restore DR and EE */ 353f6d57916SPaul Mackerras isync 354f6d57916SPaul Mackerras blr 35532a82067SChristophe Leroy_ASM_NOKPROBE_SYMBOL(flush_disable_745x) 356d7cceda9SChristophe Leroy#endif /* CONFIG_PPC_BOOK3S_32 */ 357