1/* 2 * This file contains low level CPU setup functions. 3 * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org) 4 * 5 * This program is free software; you can redistribute it and/or 6 * modify it under the terms of the GNU General Public License 7 * as published by the Free Software Foundation; either version 8 * 2 of the License, or (at your option) any later version. 9 * 10 */ 11 12#include <asm/processor.h> 13#include <asm/page.h> 14#include <asm/cputable.h> 15#include <asm/ppc_asm.h> 16#include <asm/asm-offsets.h> 17#include <asm/cache.h> 18 19_GLOBAL(__setup_cpu_603) 20 b setup_common_caches 21_GLOBAL(__setup_cpu_604) 22 mflr r4 23 bl setup_common_caches 24 bl setup_604_hid0 25 mtlr r4 26 blr 27_GLOBAL(__setup_cpu_750) 28 mflr r4 29 bl __init_fpu_registers 30 bl setup_common_caches 31 bl setup_750_7400_hid0 32 mtlr r4 33 blr 34_GLOBAL(__setup_cpu_750cx) 35 mflr r4 36 bl __init_fpu_registers 37 bl setup_common_caches 38 bl setup_750_7400_hid0 39 bl setup_750cx 40 mtlr r4 41 blr 42_GLOBAL(__setup_cpu_750fx) 43 mflr r4 44 bl __init_fpu_registers 45 bl setup_common_caches 46 bl setup_750_7400_hid0 47 bl setup_750fx 48 mtlr r4 49 blr 50_GLOBAL(__setup_cpu_7400) 51 mflr r4 52 bl __init_fpu_registers 53 bl setup_7400_workarounds 54 bl setup_common_caches 55 bl setup_750_7400_hid0 56 mtlr r4 57 blr 58_GLOBAL(__setup_cpu_7410) 59 mflr r4 60 bl __init_fpu_registers 61 bl setup_7410_workarounds 62 bl setup_common_caches 63 bl setup_750_7400_hid0 64 li r3,0 65 mtspr SPRN_L2CR2,r3 66 mtlr r4 67 blr 68_GLOBAL(__setup_cpu_745x) 69 mflr r4 70 bl setup_common_caches 71 bl setup_745x_specifics 72 mtlr r4 73 blr 74 75/* Enable caches for 603's, 604, 750 & 7400 */ 76setup_common_caches: 77 mfspr r11,SPRN_HID0 78 andi. r0,r11,HID0_DCE 79 ori r11,r11,HID0_ICE|HID0_DCE 80 ori r8,r11,HID0_ICFI 81 bne 1f /* don't invalidate the D-cache */ 82 ori r8,r8,HID0_DCI /* unless it wasn't enabled */ 831: sync 84 mtspr SPRN_HID0,r8 /* enable and invalidate caches */ 85 sync 86 mtspr SPRN_HID0,r11 /* enable caches */ 87 sync 88 isync 89 blr 90 91/* 604, 604e, 604ev, ... 92 * Enable superscalar execution & branch history table 93 */ 94setup_604_hid0: 95 mfspr r11,SPRN_HID0 96 ori r11,r11,HID0_SIED|HID0_BHTE 97 ori r8,r11,HID0_BTCD 98 sync 99 mtspr SPRN_HID0,r8 /* flush branch target address cache */ 100 sync /* on 604e/604r */ 101 mtspr SPRN_HID0,r11 102 sync 103 isync 104 blr 105 106/* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some 107 * erratas we work around here. 108 * Moto MPC710CE.pdf describes them, those are errata 109 * #3, #4 and #5 110 * Note that we assume the firmware didn't choose to 111 * apply other workarounds (there are other ones documented 112 * in the .pdf). It appear that Apple firmware only works 113 * around #3 and with the same fix we use. We may want to 114 * check if the CPU is using 60x bus mode in which case 115 * the workaround for errata #4 is useless. Also, we may 116 * want to explicitely clear HID0_NOPDST as this is not 117 * needed once we have applied workaround #5 (though it's 118 * not set by Apple's firmware at least). 119 */ 120setup_7400_workarounds: 121 mfpvr r3 122 rlwinm r3,r3,0,20,31 123 cmpwi 0,r3,0x0207 124 ble 1f 125 blr 126setup_7410_workarounds: 127 mfpvr r3 128 rlwinm r3,r3,0,20,31 129 cmpwi 0,r3,0x0100 130 bnelr 1311: 132 mfspr r11,SPRN_MSSSR0 133 /* Errata #3: Set L1OPQ_SIZE to 0x10 */ 134 rlwinm r11,r11,0,9,6 135 oris r11,r11,0x0100 136 /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */ 137 oris r11,r11,0x0002 138 /* Errata #5: Set DRLT_SIZE to 0x01 */ 139 rlwinm r11,r11,0,5,2 140 oris r11,r11,0x0800 141 sync 142 mtspr SPRN_MSSSR0,r11 143 sync 144 isync 145 blr 146 147/* 740/750/7400/7410 148 * Enable Store Gathering (SGE), Address Brodcast (ABE), 149 * Branch History Table (BHTE), Branch Target ICache (BTIC) 150 * Dynamic Power Management (DPM), Speculative (SPD) 151 * Clear Instruction cache throttling (ICTC) 152 */ 153setup_750_7400_hid0: 154 mfspr r11,SPRN_HID0 155 ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC 156 oris r11,r11,HID0_DPM@h 157BEGIN_FTR_SECTION 158 xori r11,r11,HID0_BTIC 159END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC) 160BEGIN_FTR_SECTION 161 xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */ 162END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM) 163 li r3,HID0_SPD 164 andc r11,r11,r3 /* clear SPD: enable speculative */ 165 li r3,0 166 mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */ 167 isync 168 mtspr SPRN_HID0,r11 169 sync 170 isync 171 blr 172 173/* 750cx specific 174 * Looks like we have to disable NAP feature for some PLL settings... 175 * (waiting for confirmation) 176 */ 177setup_750cx: 178 mfspr r10, SPRN_HID1 179 rlwinm r10,r10,4,28,31 180 cmpwi cr0,r10,7 181 cmpwi cr1,r10,9 182 cmpwi cr2,r10,11 183 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 184 cror 4*cr0+eq,4*cr0+eq,4*cr2+eq 185 bnelr 186 lwz r6,CPU_SPEC_FEATURES(r5) 187 li r7,CPU_FTR_CAN_NAP 188 andc r6,r6,r7 189 stw r6,CPU_SPEC_FEATURES(r5) 190 blr 191 192/* 750fx specific 193 */ 194setup_750fx: 195 blr 196 197/* MPC 745x 198 * Enable Store Gathering (SGE), Branch Folding (FOLD) 199 * Branch History Table (BHTE), Branch Target ICache (BTIC) 200 * Dynamic Power Management (DPM), Speculative (SPD) 201 * Ensure our data cache instructions really operate. 202 * Timebase has to be running or we wouldn't have made it here, 203 * just ensure we don't disable it. 204 * Clear Instruction cache throttling (ICTC) 205 * Enable L2 HW prefetch 206 */ 207setup_745x_specifics: 208 /* We check for the presence of an L3 cache setup by 209 * the firmware. If any, we disable NAP capability as 210 * it's known to be bogus on rev 2.1 and earlier 211 */ 212BEGIN_FTR_SECTION 213 mfspr r11,SPRN_L3CR 214 andis. r11,r11,L3CR_L3E@h 215 beq 1f 216END_FTR_SECTION_IFSET(CPU_FTR_L3CR) 217 lwz r6,CPU_SPEC_FEATURES(r5) 218 andi. r0,r6,CPU_FTR_L3_DISABLE_NAP 219 beq 1f 220 li r7,CPU_FTR_CAN_NAP 221 andc r6,r6,r7 222 stw r6,CPU_SPEC_FEATURES(r5) 2231: 224 mfspr r11,SPRN_HID0 225 226 /* All of the bits we have to set..... 227 */ 228 ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE 229 ori r11,r11,HID0_LRSTK | HID0_BTIC 230 oris r11,r11,HID0_DPM@h 231BEGIN_FTR_SECTION 232 xori r11,r11,HID0_BTIC 233END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC) 234BEGIN_FTR_SECTION 235 xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */ 236END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM) 237 238 /* All of the bits we have to clear.... 239 */ 240 li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI 241 andc r11,r11,r3 /* clear SPD: enable speculative */ 242 li r3,0 243 244 mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */ 245 isync 246 mtspr SPRN_HID0,r11 247 sync 248 isync 249 250 /* Enable L2 HW prefetch, if L2 is enabled 251 */ 252 mfspr r3,SPRN_L2CR 253 andis. r3,r3,L2CR_L2E@h 254 beqlr 255 mfspr r3,SPRN_MSSCR0 256 ori r3,r3,3 257 sync 258 mtspr SPRN_MSSCR0,r3 259 sync 260 isync 261 blr 262 263/* 264 * Initialize the FPU registers. This is needed to work around an errata 265 * in some 750 cpus where using a not yet initialized FPU register after 266 * power on reset may hang the CPU 267 */ 268_GLOBAL(__init_fpu_registers) 269 mfmsr r10 270 ori r11,r10,MSR_FP 271 mtmsr r11 272 isync 273 addis r9,r3,empty_zero_page@ha 274 addi r9,r9,empty_zero_page@l 275 REST_32FPRS(0,r9) 276 sync 277 mtmsr r10 278 isync 279 blr 280 281 282/* Definitions for the table use to save CPU states */ 283#define CS_HID0 0 284#define CS_HID1 4 285#define CS_HID2 8 286#define CS_MSSCR0 12 287#define CS_MSSSR0 16 288#define CS_ICTRL 20 289#define CS_LDSTCR 24 290#define CS_LDSTDB 28 291#define CS_SIZE 32 292 293 .data 294 .balign L1_CACHE_BYTES 295cpu_state_storage: 296 .space CS_SIZE 297 .balign L1_CACHE_BYTES,0 298 .text 299 300/* Called in normal context to backup CPU 0 state. This 301 * does not include cache settings. This function is also 302 * called for machine sleep. This does not include the MMU 303 * setup, BATs, etc... but rather the "special" registers 304 * like HID0, HID1, MSSCR0, etc... 305 */ 306_GLOBAL(__save_cpu_setup) 307 /* Some CR fields are volatile, we back it up all */ 308 mfcr r7 309 310 /* Get storage ptr */ 311 lis r5,cpu_state_storage@h 312 ori r5,r5,cpu_state_storage@l 313 314 /* Save HID0 (common to all CONFIG_6xx cpus) */ 315 mfspr r3,SPRN_HID0 316 stw r3,CS_HID0(r5) 317 318 /* Now deal with CPU type dependent registers */ 319 mfspr r3,SPRN_PVR 320 srwi r3,r3,16 321 cmplwi cr0,r3,0x8000 /* 7450 */ 322 cmplwi cr1,r3,0x000c /* 7400 */ 323 cmplwi cr2,r3,0x800c /* 7410 */ 324 cmplwi cr3,r3,0x8001 /* 7455 */ 325 cmplwi cr4,r3,0x8002 /* 7457 */ 326 cmplwi cr5,r3,0x8003 /* 7447A */ 327 cmplwi cr6,r3,0x7000 /* 750FX */ 328 cmplwi cr7,r3,0x8004 /* 7448 */ 329 /* cr1 is 7400 || 7410 */ 330 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 331 /* cr0 is 74xx */ 332 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq 333 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq 334 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 335 cror 4*cr0+eq,4*cr0+eq,4*cr5+eq 336 cror 4*cr0+eq,4*cr0+eq,4*cr7+eq 337 bne 1f 338 /* Backup 74xx specific regs */ 339 mfspr r4,SPRN_MSSCR0 340 stw r4,CS_MSSCR0(r5) 341 mfspr r4,SPRN_MSSSR0 342 stw r4,CS_MSSSR0(r5) 343 beq cr1,1f 344 /* Backup 745x specific registers */ 345 mfspr r4,SPRN_HID1 346 stw r4,CS_HID1(r5) 347 mfspr r4,SPRN_ICTRL 348 stw r4,CS_ICTRL(r5) 349 mfspr r4,SPRN_LDSTCR 350 stw r4,CS_LDSTCR(r5) 351 mfspr r4,SPRN_LDSTDB 352 stw r4,CS_LDSTDB(r5) 3531: 354 bne cr6,1f 355 /* Backup 750FX specific registers */ 356 mfspr r4,SPRN_HID1 357 stw r4,CS_HID1(r5) 358 /* If rev 2.x, backup HID2 */ 359 mfspr r3,SPRN_PVR 360 andi. r3,r3,0xff00 361 cmpwi cr0,r3,0x0200 362 bne 1f 363 mfspr r4,SPRN_HID2 364 stw r4,CS_HID2(r5) 3651: 366 mtcr r7 367 blr 368 369/* Called with no MMU context (typically MSR:IR/DR off) to 370 * restore CPU state as backed up by the previous 371 * function. This does not include cache setting 372 */ 373_GLOBAL(__restore_cpu_setup) 374 /* Some CR fields are volatile, we back it up all */ 375 mfcr r7 376 377 /* Get storage ptr */ 378 lis r5,(cpu_state_storage-KERNELBASE)@h 379 ori r5,r5,cpu_state_storage@l 380 381 /* Restore HID0 */ 382 lwz r3,CS_HID0(r5) 383 sync 384 isync 385 mtspr SPRN_HID0,r3 386 sync 387 isync 388 389 /* Now deal with CPU type dependent registers */ 390 mfspr r3,SPRN_PVR 391 srwi r3,r3,16 392 cmplwi cr0,r3,0x8000 /* 7450 */ 393 cmplwi cr1,r3,0x000c /* 7400 */ 394 cmplwi cr2,r3,0x800c /* 7410 */ 395 cmplwi cr3,r3,0x8001 /* 7455 */ 396 cmplwi cr4,r3,0x8002 /* 7457 */ 397 cmplwi cr5,r3,0x8003 /* 7447A */ 398 cmplwi cr6,r3,0x7000 /* 750FX */ 399 cmplwi cr7,r3,0x8004 /* 7448 */ 400 /* cr1 is 7400 || 7410 */ 401 cror 4*cr1+eq,4*cr1+eq,4*cr2+eq 402 /* cr0 is 74xx */ 403 cror 4*cr0+eq,4*cr0+eq,4*cr3+eq 404 cror 4*cr0+eq,4*cr0+eq,4*cr4+eq 405 cror 4*cr0+eq,4*cr0+eq,4*cr1+eq 406 cror 4*cr0+eq,4*cr0+eq,4*cr5+eq 407 cror 4*cr0+eq,4*cr0+eq,4*cr7+eq 408 bne 2f 409 /* Restore 74xx specific regs */ 410 lwz r4,CS_MSSCR0(r5) 411 sync 412 mtspr SPRN_MSSCR0,r4 413 sync 414 isync 415 lwz r4,CS_MSSSR0(r5) 416 sync 417 mtspr SPRN_MSSSR0,r4 418 sync 419 isync 420 bne cr2,1f 421 /* Clear 7410 L2CR2 */ 422 li r4,0 423 mtspr SPRN_L2CR2,r4 4241: beq cr1,2f 425 /* Restore 745x specific registers */ 426 lwz r4,CS_HID1(r5) 427 sync 428 mtspr SPRN_HID1,r4 429 isync 430 sync 431 lwz r4,CS_ICTRL(r5) 432 sync 433 mtspr SPRN_ICTRL,r4 434 isync 435 sync 436 lwz r4,CS_LDSTCR(r5) 437 sync 438 mtspr SPRN_LDSTCR,r4 439 isync 440 sync 441 lwz r4,CS_LDSTDB(r5) 442 sync 443 mtspr SPRN_LDSTDB,r4 444 isync 445 sync 4462: bne cr6,1f 447 /* Restore 750FX specific registers 448 * that is restore HID2 on rev 2.x and PLL config & switch 449 * to PLL 0 on all 450 */ 451 /* If rev 2.x, restore HID2 with low voltage bit cleared */ 452 mfspr r3,SPRN_PVR 453 andi. r3,r3,0xff00 454 cmpwi cr0,r3,0x0200 455 bne 4f 456 lwz r4,CS_HID2(r5) 457 rlwinm r4,r4,0,19,17 458 mtspr SPRN_HID2,r4 459 sync 4604: 461 lwz r4,CS_HID1(r5) 462 rlwinm r5,r4,0,16,14 463 mtspr SPRN_HID1,r5 464 /* Wait for PLL to stabilize */ 465 mftbl r5 4663: mftbl r6 467 sub r6,r6,r5 468 cmplwi cr0,r6,10000 469 ble 3b 470 /* Setup final PLL */ 471 mtspr SPRN_HID1,r4 4721: 473 mtcr r7 474 blr 475 476