1/* 2 * Copyright 2004, 2007, 2011 Freescale Semiconductor. 3 * Srikanth Srinivasan <srikanth.srinivaan@freescale.com> 4 * 5 * SPDX-License-Identifier: GPL-2.0+ 6 */ 7 8/* U-Boot - Startup Code for 86xx PowerPC based Embedded Boards 9 * 10 * 11 * The processor starts at 0xfff00100 and the code is executed 12 * from flash. The code is organized to be at an other address 13 * in memory, but as long we don't jump around before relocating. 14 * board_init lies at a quite high address and when the cpu has 15 * jumped there, everything is ok. 16 */ 17#include <asm-offsets.h> 18#include <config.h> 19#include <mpc86xx.h> 20#include <version.h> 21 22#include <ppc_asm.tmpl> 23#include <ppc_defs.h> 24 25#include <asm/cache.h> 26#include <asm/mmu.h> 27#include <asm/u-boot.h> 28 29/* 30 * Need MSR_DR | MSR_IR enabled to access I/O (printf) in exceptions 31 */ 32 33/* 34 * Set up GOT: Global Offset Table 35 * 36 * Use r12 to access the GOT 37 */ 38 START_GOT 39 GOT_ENTRY(_GOT2_TABLE_) 40 GOT_ENTRY(_FIXUP_TABLE_) 41 42 GOT_ENTRY(_start) 43 GOT_ENTRY(_start_of_vectors) 44 GOT_ENTRY(_end_of_vectors) 45 GOT_ENTRY(transfer_to_handler) 46 47 GOT_ENTRY(__init_end) 48 GOT_ENTRY(__bss_end) 49 GOT_ENTRY(__bss_start) 50 END_GOT 51 52/* 53 * r3 - 1st arg to board_init(): IMMP pointer 54 * r4 - 2nd arg to board_init(): boot flag 55 */ 56 .text 57 .long 0x27051956 /* U-Boot Magic Number */ 58 .globl version_string 59version_string: 60 .ascii U_BOOT_VERSION_STRING, "\0" 61 62 . = EXC_OFF_SYS_RESET 63 .globl _start 64_start: 65 b boot_cold 66 67 /* the boot code is located below the exception table */ 68 69 .globl _start_of_vectors 70_start_of_vectors: 71 72/* Machine check */ 73 STD_EXCEPTION(0x200, MachineCheck, MachineCheckException) 74 75/* Data Storage exception. */ 76 STD_EXCEPTION(0x300, DataStorage, UnknownException) 77 78/* Instruction Storage exception. */ 79 STD_EXCEPTION(0x400, InstStorage, UnknownException) 80 81/* External Interrupt exception. */ 82 STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt) 83 84/* Alignment exception. */ 85 . = 0x600 86Alignment: 87 EXCEPTION_PROLOG(SRR0, SRR1) 88 mfspr r4,DAR 89 stw r4,_DAR(r21) 90 mfspr r5,DSISR 91 stw r5,_DSISR(r21) 92 addi r3,r1,STACK_FRAME_OVERHEAD 93 EXC_XFER_TEMPLATE(Alignment, AlignmentException, MSR_KERNEL, COPY_EE) 94 95/* Program check exception */ 96 . = 0x700 97ProgramCheck: 98 EXCEPTION_PROLOG(SRR0, SRR1) 99 addi r3,r1,STACK_FRAME_OVERHEAD 100 EXC_XFER_TEMPLATE(ProgramCheck, ProgramCheckException, 101 MSR_KERNEL, COPY_EE) 102 103 STD_EXCEPTION(0x800, FPUnavailable, UnknownException) 104 105 /* I guess we could implement decrementer, and may have 106 * to someday for timekeeping. 107 */ 108 STD_EXCEPTION(0x900, Decrementer, timer_interrupt) 109 STD_EXCEPTION(0xa00, Trap_0a, UnknownException) 110 STD_EXCEPTION(0xb00, Trap_0b, UnknownException) 111 STD_EXCEPTION(0xc00, SystemCall, UnknownException) 112 STD_EXCEPTION(0xd00, SingleStep, UnknownException) 113 STD_EXCEPTION(0xe00, Trap_0e, UnknownException) 114 STD_EXCEPTION(0xf00, Trap_0f, UnknownException) 115 STD_EXCEPTION(0x1000, SoftEmu, SoftEmuException) 116 STD_EXCEPTION(0x1100, InstructionTLBMiss, UnknownException) 117 STD_EXCEPTION(0x1200, DataTLBMiss, UnknownException) 118 STD_EXCEPTION(0x1300, InstructionTLBError, UnknownException) 119 STD_EXCEPTION(0x1400, DataTLBError, UnknownException) 120 STD_EXCEPTION(0x1500, Reserved5, UnknownException) 121 STD_EXCEPTION(0x1600, Reserved6, UnknownException) 122 STD_EXCEPTION(0x1700, Reserved7, UnknownException) 123 STD_EXCEPTION(0x1800, Reserved8, UnknownException) 124 STD_EXCEPTION(0x1900, Reserved9, UnknownException) 125 STD_EXCEPTION(0x1a00, ReservedA, UnknownException) 126 STD_EXCEPTION(0x1b00, ReservedB, UnknownException) 127 STD_EXCEPTION(0x1c00, DataBreakpoint, UnknownException) 128 STD_EXCEPTION(0x1d00, InstructionBreakpoint, UnknownException) 129 STD_EXCEPTION(0x1e00, PeripheralBreakpoint, UnknownException) 130 STD_EXCEPTION(0x1f00, DevPortBreakpoint, UnknownException) 131 132 .globl _end_of_vectors 133_end_of_vectors: 134 135 . = 0x2000 136 137boot_cold: 138 /* 139 * NOTE: Only Cpu 0 will ever come here. Other cores go to an 140 * address specified by the BPTR 141 */ 1421: 143#ifdef CONFIG_SYS_RAMBOOT 144 /* disable everything */ 145 li r0, 0 146 mtspr HID0, r0 147 sync 148 mtmsr 0 149#endif 150 151 /* Invalidate BATs */ 152 bl invalidate_bats 153 sync 154 /* Invalidate all of TLB before MMU turn on */ 155 bl clear_tlbs 156 sync 157 158#ifdef CONFIG_SYS_L2 159 /* init the L2 cache */ 160 lis r3, L2_INIT@h 161 ori r3, r3, L2_INIT@l 162 mtspr l2cr, r3 163 /* invalidate the L2 cache */ 164 bl l2cache_invalidate 165 sync 166#endif 167 168 /* 169 * Calculate absolute address in FLASH and jump there 170 *------------------------------------------------------*/ 171 lis r3, CONFIG_SYS_MONITOR_BASE_EARLY@h 172 ori r3, r3, CONFIG_SYS_MONITOR_BASE_EARLY@l 173 addi r3, r3, in_flash - _start + EXC_OFF_SYS_RESET 174 mtlr r3 175 blr 176 177in_flash: 178 /* let the C-code set up the rest */ 179 /* */ 180 /* Be careful to keep code relocatable ! */ 181 /*------------------------------------------------------*/ 182 /* perform low-level init */ 183 184 /* enable extended addressing */ 185 bl enable_ext_addr 186 187 /* setup the bats */ 188 bl early_bats 189 190 /* 191 * Cache must be enabled here for stack-in-cache trick. 192 * This means we need to enable the BATS. 193 * Cache should be turned on after BATs, since by default 194 * everything is write-through. 195 */ 196 197 /* enable address translation */ 198 mfmsr r5 199 ori r5, r5, (MSR_IR | MSR_DR) 200 lis r3,addr_trans_enabled@h 201 ori r3, r3, addr_trans_enabled@l 202 mtspr SPRN_SRR0,r3 203 mtspr SPRN_SRR1,r5 204 rfi 205 206addr_trans_enabled: 207 /* enable and invalidate the data cache */ 208/* bl l1dcache_enable */ 209 bl dcache_enable 210 sync 211 212#if 1 213 bl icache_enable 214#endif 215 216#ifdef CONFIG_SYS_INIT_RAM_LOCK 217 bl lock_ram_in_cache 218 sync 219#endif 220 221#if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR) 222 bl setup_ccsrbar 223#endif 224 225 /* set up the stack pointer in our newly created 226 * cache-ram (r1) */ 227 lis r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@h 228 ori r1, r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@l 229 230 li r0, 0 /* Make room for stack frame header and */ 231 stwu r0, -4(r1) /* clear final stack frame so that */ 232 stwu r0, -4(r1) /* stack backtraces terminate cleanly */ 233 234 GET_GOT /* initialize GOT access */ 235 236 /* run low-level CPU init code (from Flash) */ 237 bl cpu_init_f 238 sync 239 240#ifdef RUN_DIAG 241 242 /* Load PX_AUX register address in r4 */ 243 lis r4, PIXIS_BASE@h 244 ori r4, r4, 0x6 245 /* Load contents of PX_AUX in r3 bits 24 to 31*/ 246 lbz r3, 0(r4) 247 248 /* Mask and obtain the bit in r3 */ 249 rlwinm. r3, r3, 0, 24, 24 250 /* If not zero, jump and continue with u-boot */ 251 bne diag_done 252 253 /* Load back contents of PX_AUX in r3 bits 24 to 31 */ 254 lbz r3, 0(r4) 255 /* Set the MSB of the register value */ 256 ori r3, r3, 0x80 257 /* Write value in r3 back to PX_AUX */ 258 stb r3, 0(r4) 259 260 /* Get the address to jump to in r3*/ 261 lis r3, CONFIG_SYS_DIAG_ADDR@h 262 ori r3, r3, CONFIG_SYS_DIAG_ADDR@l 263 264 /* Load the LR with the branch address */ 265 mtlr r3 266 267 /* Branch to diagnostic */ 268 blr 269 270diag_done: 271#endif 272 273/* bl l2cache_enable */ 274 275 /* run 1st part of board init code (from Flash) */ 276 li r3, 0 /* clear boot_flag for calling board_init_f */ 277 bl board_init_f 278 sync 279 280 /* NOTREACHED - board_init_f() does not return */ 281 282 .globl invalidate_bats 283invalidate_bats: 284 285 li r0, 0 286 /* invalidate BATs */ 287 mtspr IBAT0U, r0 288 mtspr IBAT1U, r0 289 mtspr IBAT2U, r0 290 mtspr IBAT3U, r0 291 mtspr IBAT4U, r0 292 mtspr IBAT5U, r0 293 mtspr IBAT6U, r0 294 mtspr IBAT7U, r0 295 296 isync 297 mtspr DBAT0U, r0 298 mtspr DBAT1U, r0 299 mtspr DBAT2U, r0 300 mtspr DBAT3U, r0 301 mtspr DBAT4U, r0 302 mtspr DBAT5U, r0 303 mtspr DBAT6U, r0 304 mtspr DBAT7U, r0 305 306 isync 307 sync 308 blr 309 310#define CONFIG_BAT_PAIR(n) \ 311 lis r4, CONFIG_SYS_IBAT##n##L@h; \ 312 ori r4, r4, CONFIG_SYS_IBAT##n##L@l; \ 313 lis r3, CONFIG_SYS_IBAT##n##U@h; \ 314 ori r3, r3, CONFIG_SYS_IBAT##n##U@l; \ 315 mtspr IBAT##n##L, r4; \ 316 mtspr IBAT##n##U, r3; \ 317 lis r4, CONFIG_SYS_DBAT##n##L@h; \ 318 ori r4, r4, CONFIG_SYS_DBAT##n##L@l; \ 319 lis r3, CONFIG_SYS_DBAT##n##U@h; \ 320 ori r3, r3, CONFIG_SYS_DBAT##n##U@l; \ 321 mtspr DBAT##n##L, r4; \ 322 mtspr DBAT##n##U, r3; 323 324/* 325 * setup_bats: 326 * 327 * Set up the final BAT registers now that setup is done. 328 * 329 * Assumes that: 330 * 1) Address translation is enabled upon entry 331 * 2) The boot rom is still accessible via 1:1 translation 332 */ 333 .globl setup_bats 334setup_bats: 335 mflr r5 336 sync 337 338 /* 339 * When we disable address translation, we will get 1:1 (VA==PA) 340 * translation. The only place we know for sure is safe for that is 341 * the bootrom where we originally started out. Pop back into there. 342 */ 343 lis r4, CONFIG_SYS_MONITOR_BASE_EARLY@h 344 ori r4, r4, CONFIG_SYS_MONITOR_BASE_EARLY@l 345 addi r4, r4, trans_disabled - _start + EXC_OFF_SYS_RESET 346 347 /* disable address translation */ 348 mfmsr r3 349 rlwinm r3, r3, 0, 28, 25 350 mtspr SRR0, r4 351 mtspr SRR1, r3 352 rfi 353 354trans_disabled: 355#if defined(CONFIG_SYS_DBAT0U) && defined(CONFIG_SYS_DBAT0L) \ 356 && defined(CONFIG_SYS_IBAT0U) && defined(CONFIG_SYS_IBAT0L) 357 CONFIG_BAT_PAIR(0) 358#endif 359 CONFIG_BAT_PAIR(1) 360 CONFIG_BAT_PAIR(2) 361 CONFIG_BAT_PAIR(3) 362 CONFIG_BAT_PAIR(4) 363 CONFIG_BAT_PAIR(5) 364 CONFIG_BAT_PAIR(6) 365 CONFIG_BAT_PAIR(7) 366 367 sync 368 isync 369 370 /* Turn translation back on and return */ 371 mfmsr r3 372 ori r3, r3, (MSR_IR | MSR_DR) 373 mtspr SPRN_SRR0,r5 374 mtspr SPRN_SRR1,r3 375 rfi 376 377/* 378 * early_bats: 379 * 380 * Set up bats needed early on - this is usually the BAT for the 381 * stack-in-cache, the Flash, and CCSR space 382 */ 383 .globl early_bats 384early_bats: 385 /* IBAT 3 */ 386 lis r4, CONFIG_SYS_IBAT3L@h 387 ori r4, r4, CONFIG_SYS_IBAT3L@l 388 lis r3, CONFIG_SYS_IBAT3U@h 389 ori r3, r3, CONFIG_SYS_IBAT3U@l 390 mtspr IBAT3L, r4 391 mtspr IBAT3U, r3 392 isync 393 394 /* DBAT 3 */ 395 lis r4, CONFIG_SYS_DBAT3L@h 396 ori r4, r4, CONFIG_SYS_DBAT3L@l 397 lis r3, CONFIG_SYS_DBAT3U@h 398 ori r3, r3, CONFIG_SYS_DBAT3U@l 399 mtspr DBAT3L, r4 400 mtspr DBAT3U, r3 401 isync 402 403 /* IBAT 5 */ 404 lis r4, CONFIG_SYS_IBAT5L@h 405 ori r4, r4, CONFIG_SYS_IBAT5L@l 406 lis r3, CONFIG_SYS_IBAT5U@h 407 ori r3, r3, CONFIG_SYS_IBAT5U@l 408 mtspr IBAT5L, r4 409 mtspr IBAT5U, r3 410 isync 411 412 /* DBAT 5 */ 413 lis r4, CONFIG_SYS_DBAT5L@h 414 ori r4, r4, CONFIG_SYS_DBAT5L@l 415 lis r3, CONFIG_SYS_DBAT5U@h 416 ori r3, r3, CONFIG_SYS_DBAT5U@l 417 mtspr DBAT5L, r4 418 mtspr DBAT5U, r3 419 isync 420 421 /* IBAT 6 */ 422 lis r4, CONFIG_SYS_IBAT6L_EARLY@h 423 ori r4, r4, CONFIG_SYS_IBAT6L_EARLY@l 424 lis r3, CONFIG_SYS_IBAT6U_EARLY@h 425 ori r3, r3, CONFIG_SYS_IBAT6U_EARLY@l 426 mtspr IBAT6L, r4 427 mtspr IBAT6U, r3 428 isync 429 430 /* DBAT 6 */ 431 lis r4, CONFIG_SYS_DBAT6L_EARLY@h 432 ori r4, r4, CONFIG_SYS_DBAT6L_EARLY@l 433 lis r3, CONFIG_SYS_DBAT6U_EARLY@h 434 ori r3, r3, CONFIG_SYS_DBAT6U_EARLY@l 435 mtspr DBAT6L, r4 436 mtspr DBAT6U, r3 437 isync 438 439#if(CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR) 440 /* IBAT 7 */ 441 lis r4, CONFIG_SYS_CCSR_DEFAULT_IBATL@h 442 ori r4, r4, CONFIG_SYS_CCSR_DEFAULT_IBATL@l 443 lis r3, CONFIG_SYS_CCSR_DEFAULT_IBATU@h 444 ori r3, r3, CONFIG_SYS_CCSR_DEFAULT_IBATU@l 445 mtspr IBAT7L, r4 446 mtspr IBAT7U, r3 447 isync 448 449 /* DBAT 7 */ 450 lis r4, CONFIG_SYS_CCSR_DEFAULT_DBATL@h 451 ori r4, r4, CONFIG_SYS_CCSR_DEFAULT_DBATL@l 452 lis r3, CONFIG_SYS_CCSR_DEFAULT_DBATU@h 453 ori r3, r3, CONFIG_SYS_CCSR_DEFAULT_DBATU@l 454 mtspr DBAT7L, r4 455 mtspr DBAT7U, r3 456 isync 457#endif 458 blr 459 460 .globl clear_tlbs 461clear_tlbs: 462 addis r3, 0, 0x0000 463 addis r5, 0, 0x4 464 isync 465tlblp: 466 tlbie r3 467 sync 468 addi r3, r3, 0x1000 469 cmp 0, 0, r3, r5 470 blt tlblp 471 blr 472 473 .globl disable_addr_trans 474disable_addr_trans: 475 /* disable address translation */ 476 mflr r4 477 mfmsr r3 478 andi. r0, r3, (MSR_IR | MSR_DR) 479 beqlr 480 andc r3, r3, r0 481 mtspr SRR0, r4 482 mtspr SRR1, r3 483 rfi 484 485/* 486 * This code finishes saving the registers to the exception frame 487 * and jumps to the appropriate handler for the exception. 488 * Register r21 is pointer into trap frame, r1 has new stack pointer. 489 */ 490 .globl transfer_to_handler 491transfer_to_handler: 492 stw r22,_NIP(r21) 493 lis r22,MSR_POW@h 494 andc r23,r23,r22 495 stw r23,_MSR(r21) 496 SAVE_GPR(7, r21) 497 SAVE_4GPRS(8, r21) 498 SAVE_8GPRS(12, r21) 499 SAVE_8GPRS(24, r21) 500 mflr r23 501 andi. r24,r23,0x3f00 /* get vector offset */ 502 stw r24,TRAP(r21) 503 li r22,0 504 stw r22,RESULT(r21) 505 mtspr SPRG2,r22 /* r1 is now kernel sp */ 506 lwz r24,0(r23) /* virtual address of handler */ 507 lwz r23,4(r23) /* where to go when done */ 508 mtspr SRR0,r24 509 mtspr SRR1,r20 510 mtlr r23 511 SYNC 512 rfi /* jump to handler, enable MMU */ 513 514int_return: 515 mfmsr r28 /* Disable interrupts */ 516 li r4,0 517 ori r4,r4,MSR_EE 518 andc r28,r28,r4 519 SYNC /* Some chip revs need this... */ 520 mtmsr r28 521 SYNC 522 lwz r2,_CTR(r1) 523 lwz r0,_LINK(r1) 524 mtctr r2 525 mtlr r0 526 lwz r2,_XER(r1) 527 lwz r0,_CCR(r1) 528 mtspr XER,r2 529 mtcrf 0xFF,r0 530 REST_10GPRS(3, r1) 531 REST_10GPRS(13, r1) 532 REST_8GPRS(23, r1) 533 REST_GPR(31, r1) 534 lwz r2,_NIP(r1) /* Restore environment */ 535 lwz r0,_MSR(r1) 536 mtspr SRR0,r2 537 mtspr SRR1,r0 538 lwz r0,GPR0(r1) 539 lwz r2,GPR2(r1) 540 lwz r1,GPR1(r1) 541 SYNC 542 rfi 543 544 .globl dc_read 545dc_read: 546 blr 547 548 .globl get_pvr 549get_pvr: 550 mfspr r3, PVR 551 blr 552 553 .globl get_svr 554get_svr: 555 mfspr r3, SVR 556 blr 557 558 559/* 560 * Function: in8 561 * Description: Input 8 bits 562 */ 563 .globl in8 564in8: 565 lbz r3,0x0000(r3) 566 blr 567 568/* 569 * Function: out8 570 * Description: Output 8 bits 571 */ 572 .globl out8 573out8: 574 stb r4,0x0000(r3) 575 blr 576 577/* 578 * Function: out16 579 * Description: Output 16 bits 580 */ 581 .globl out16 582out16: 583 sth r4,0x0000(r3) 584 blr 585 586/* 587 * Function: out16r 588 * Description: Byte reverse and output 16 bits 589 */ 590 .globl out16r 591out16r: 592 sthbrx r4,r0,r3 593 blr 594 595/* 596 * Function: out32 597 * Description: Output 32 bits 598 */ 599 .globl out32 600out32: 601 stw r4,0x0000(r3) 602 blr 603 604/* 605 * Function: out32r 606 * Description: Byte reverse and output 32 bits 607 */ 608 .globl out32r 609out32r: 610 stwbrx r4,r0,r3 611 blr 612 613/* 614 * Function: in16 615 * Description: Input 16 bits 616 */ 617 .globl in16 618in16: 619 lhz r3,0x0000(r3) 620 blr 621 622/* 623 * Function: in16r 624 * Description: Input 16 bits and byte reverse 625 */ 626 .globl in16r 627in16r: 628 lhbrx r3,r0,r3 629 blr 630 631/* 632 * Function: in32 633 * Description: Input 32 bits 634 */ 635 .globl in32 636in32: 637 lwz 3,0x0000(3) 638 blr 639 640/* 641 * Function: in32r 642 * Description: Input 32 bits and byte reverse 643 */ 644 .globl in32r 645in32r: 646 lwbrx r3,r0,r3 647 blr 648 649/* 650 * void relocate_code (addr_sp, gd, addr_moni) 651 * 652 * This "function" does not return, instead it continues in RAM 653 * after relocating the monitor code. 654 * 655 * r3 = dest 656 * r4 = src 657 * r5 = length in bytes 658 * r6 = cachelinesize 659 */ 660 .globl relocate_code 661relocate_code: 662 663 mr r1, r3 /* Set new stack pointer */ 664 mr r9, r4 /* Save copy of Global Data pointer */ 665 mr r10, r5 /* Save copy of Destination Address */ 666 667 GET_GOT 668 mr r3, r5 /* Destination Address */ 669 lis r4, CONFIG_SYS_MONITOR_BASE@h /* Source Address */ 670 ori r4, r4, CONFIG_SYS_MONITOR_BASE@l 671 lwz r5, GOT(__init_end) 672 sub r5, r5, r4 673 li r6, CONFIG_SYS_CACHELINE_SIZE /* Cache Line Size */ 674 675 /* 676 * Fix GOT pointer: 677 * 678 * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE) + Destination Address 679 * 680 * Offset: 681 */ 682 sub r15, r10, r4 683 684 /* First our own GOT */ 685 add r12, r12, r15 686 /* then the one used by the C code */ 687 add r30, r30, r15 688 689 /* 690 * Now relocate code 691 */ 692 cmplw cr1,r3,r4 693 addi r0,r5,3 694 srwi. r0,r0,2 695 beq cr1,4f /* In place copy is not necessary */ 696 beq 7f /* Protect against 0 count */ 697 mtctr r0 698 bge cr1,2f 699 700 la r8,-4(r4) 701 la r7,-4(r3) 7021: lwzu r0,4(r8) 703 stwu r0,4(r7) 704 bdnz 1b 705 b 4f 706 7072: slwi r0,r0,2 708 add r8,r4,r0 709 add r7,r3,r0 7103: lwzu r0,-4(r8) 711 stwu r0,-4(r7) 712 bdnz 3b 713/* 714 * Now flush the cache: note that we must start from a cache aligned 715 * address. Otherwise we might miss one cache line. 716 */ 7174: cmpwi r6,0 718 add r5,r3,r5 719 beq 7f /* Always flush prefetch queue in any case */ 720 subi r0,r6,1 721 andc r3,r3,r0 722 mr r4,r3 7235: dcbst 0,r4 724 add r4,r4,r6 725 cmplw r4,r5 726 blt 5b 727 sync /* Wait for all dcbst to complete on bus */ 728 mr r4,r3 7296: icbi 0,r4 730 add r4,r4,r6 731 cmplw r4,r5 732 blt 6b 7337: sync /* Wait for all icbi to complete on bus */ 734 isync 735 736/* 737 * We are done. Do not return, instead branch to second part of board 738 * initialization, now running from RAM. 739 */ 740 addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET 741 mtlr r0 742 blr 743 744in_ram: 745 /* 746 * Relocation Function, r12 point to got2+0x8000 747 * 748 * Adjust got2 pointers, no need to check for 0, this code 749 * already puts a few entries in the table. 750 */ 751 li r0,__got2_entries@sectoff@l 752 la r3,GOT(_GOT2_TABLE_) 753 lwz r11,GOT(_GOT2_TABLE_) 754 mtctr r0 755 sub r11,r3,r11 756 addi r3,r3,-4 7571: lwzu r0,4(r3) 758 cmpwi r0,0 759 beq- 2f 760 add r0,r0,r11 761 stw r0,0(r3) 7622: bdnz 1b 763 764 /* 765 * Now adjust the fixups and the pointers to the fixups 766 * in case we need to move ourselves again. 767 */ 768 li r0,__fixup_entries@sectoff@l 769 lwz r3,GOT(_FIXUP_TABLE_) 770 cmpwi r0,0 771 mtctr r0 772 addi r3,r3,-4 773 beq 4f 7743: lwzu r4,4(r3) 775 lwzux r0,r4,r11 776 cmpwi r0,0 777 add r0,r0,r11 778 stw r4,0(r3) 779 beq- 5f 780 stw r0,0(r4) 7815: bdnz 3b 7824: 783/* clear_bss: */ 784 /* 785 * Now clear BSS segment 786 */ 787 lwz r3,GOT(__bss_start) 788 lwz r4,GOT(__bss_end) 789 790 cmplw 0, r3, r4 791 beq 6f 792 793 li r0, 0 7945: 795 stw r0, 0(r3) 796 addi r3, r3, 4 797 cmplw 0, r3, r4 798 bne 5b 7996: 800 mr r3, r9 /* Init Date pointer */ 801 mr r4, r10 /* Destination Address */ 802 bl board_init_r 803 804 /* not reached - end relocate_code */ 805/*-----------------------------------------------------------------------*/ 806 807 /* 808 * Copy exception vector code to low memory 809 * 810 * r3: dest_addr 811 * r7: source address, r8: end address, r9: target address 812 */ 813 .globl trap_init 814trap_init: 815 mflr r4 /* save link register */ 816 GET_GOT 817 lwz r7, GOT(_start) 818 lwz r8, GOT(_end_of_vectors) 819 820 li r9, 0x100 /* reset vector always at 0x100 */ 821 822 cmplw 0, r7, r8 823 bgelr /* return if r7>=r8 - just in case */ 8241: 825 lwz r0, 0(r7) 826 stw r0, 0(r9) 827 addi r7, r7, 4 828 addi r9, r9, 4 829 cmplw 0, r7, r8 830 bne 1b 831 832 /* 833 * relocate `hdlr' and `int_return' entries 834 */ 835 li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET 836 li r8, Alignment - _start + EXC_OFF_SYS_RESET 8372: 838 bl trap_reloc 839 addi r7, r7, 0x100 /* next exception vector */ 840 cmplw 0, r7, r8 841 blt 2b 842 843 li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET 844 bl trap_reloc 845 846 li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET 847 bl trap_reloc 848 849 li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET 850 li r8, SystemCall - _start + EXC_OFF_SYS_RESET 8513: 852 bl trap_reloc 853 addi r7, r7, 0x100 /* next exception vector */ 854 cmplw 0, r7, r8 855 blt 3b 856 857 li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET 858 li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET 8594: 860 bl trap_reloc 861 addi r7, r7, 0x100 /* next exception vector */ 862 cmplw 0, r7, r8 863 blt 4b 864 865 /* enable execptions from RAM vectors */ 866 mfmsr r7 867 li r8,MSR_IP 868 andc r7,r7,r8 869 ori r7,r7,MSR_ME /* Enable Machine Check */ 870 mtmsr r7 871 872 mtlr r4 /* restore link register */ 873 blr 874 875.globl enable_ext_addr 876enable_ext_addr: 877 mfspr r0, HID0 878 lis r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h 879 ori r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l 880 mtspr HID0, r0 881 sync 882 isync 883 blr 884 885#if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR) 886.globl setup_ccsrbar 887setup_ccsrbar: 888 /* Special sequence needed to update CCSRBAR itself */ 889 lis r4, CONFIG_SYS_CCSRBAR_DEFAULT@h 890 ori r4, r4, CONFIG_SYS_CCSRBAR_DEFAULT@l 891 892 lis r5, CONFIG_SYS_CCSRBAR_PHYS_LOW@h 893 ori r5, r5, CONFIG_SYS_CCSRBAR_PHYS_LOW@l 894 srwi r5,r5,12 895 li r6, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l 896 rlwimi r5,r6,20,8,11 897 stw r5, 0(r4) /* Store physical value of CCSR */ 898 isync 899 900 lis r5, CONFIG_SYS_TEXT_BASE@h 901 ori r5,r5,CONFIG_SYS_TEXT_BASE@l 902 lwz r5, 0(r5) 903 isync 904 905 /* Use VA of CCSR to do read */ 906 lis r3, CONFIG_SYS_CCSRBAR@h 907 lwz r5, CONFIG_SYS_CCSRBAR@l(r3) 908 isync 909 910 blr 911#endif 912 913#ifdef CONFIG_SYS_INIT_RAM_LOCK 914lock_ram_in_cache: 915 /* Allocate Initial RAM in data cache. 916 */ 917 lis r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@h 918 ori r3, r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@l 919 li r4, ((CONFIG_SYS_INIT_RAM_SIZE & ~31) + \ 920 (CONFIG_SYS_INIT_RAM_ADDR & 31) + 31) / 32 921 mtctr r4 9221: 923 dcbz r0, r3 924 addi r3, r3, 32 925 bdnz 1b 926#if 1 927/* Lock the data cache */ 928 mfspr r0, HID0 929 ori r0, r0, 0x1000 930 sync 931 mtspr HID0, r0 932 sync 933 blr 934#endif 935#if 0 936 /* Lock the first way of the data cache */ 937 mfspr r0, LDSTCR 938 ori r0, r0, 0x0080 939#if defined(CONFIG_ALTIVEC) 940 dssall 941#endif 942 sync 943 mtspr LDSTCR, r0 944 sync 945 isync 946 blr 947#endif 948 949.globl unlock_ram_in_cache 950unlock_ram_in_cache: 951 /* invalidate the INIT_RAM section */ 952 lis r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@h 953 ori r3, r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@l 954 li r4, ((CONFIG_SYS_INIT_RAM_SIZE & ~31) + \ 955 (CONFIG_SYS_INIT_RAM_ADDR & 31) + 31) / 32 956 mtctr r4 9571: icbi r0, r3 958 addi r3, r3, 32 959 bdnz 1b 960 sync /* Wait for all icbi to complete on bus */ 961 isync 962#if 1 963/* Unlock the data cache and invalidate it */ 964 mfspr r0, HID0 965 li r3,0x1000 966 andc r0,r0,r3 967 li r3,0x0400 968 or r0,r0,r3 969 sync 970 mtspr HID0, r0 971 sync 972 blr 973#endif 974#if 0 975 /* Unlock the first way of the data cache */ 976 mfspr r0, LDSTCR 977 li r3,0x0080 978 andc r0,r0,r3 979#ifdef CONFIG_ALTIVEC 980 dssall 981#endif 982 sync 983 mtspr LDSTCR, r0 984 sync 985 isync 986 li r3,0x0400 987 or r0,r0,r3 988 sync 989 mtspr HID0, r0 990 sync 991 blr 992#endif 993#endif 994