Lines Matching +full:bd +full:- +full:address

1 // SPDX-License-Identifier: GPL-2.0+
4 * Copyright 2014-2015 Freescale Semiconductor, Inc.
22 #include <fsl-mc/fsl_mc.h>
86 /* For IFC Region #1, only the first 4MB is cache-enabled */
93 CONFIG_SYS_FSL_IFC_SIZE1 - CONFIG_SYS_FSL_IFC_SIZE1_1,
114 CONFIG_SYS_FLASH_BASE - CONFIG_SYS_FSL_IFC_BASE2,
227 /* For QBMAN portal, only the first 64MB is cache-enabled */
235 CONFIG_SYS_FSL_QBMAN_SIZE - CONFIG_SYS_FSL_QBMAN_SIZE_1,
368 svr = gur_in32(&gur->svr); in cpu_name()
394 * The base address of SRAM is CONFIG_SYS_FSL_OCRAM_BASE. We use three
395 * levels of translation tables here to cover 40-bit address space.
396 * We use 4KB granule size, with 40 bits physical address, T0SZ=24
397 * Address above EARLY_PGTABLE_SIZE (0x5000) is free for other purpose.
407 gd->arch.tlb_addr = CONFIG_SYS_FSL_OCRAM_BASE; in early_mmu_setup()
409 gd->arch.tlb_addr = CONFIG_SYS_DDR_SDRAM_BASE; in early_mmu_setup()
410 gd->arch.tlb_fillptr = gd->arch.tlb_addr; in early_mmu_setup()
411 gd->arch.tlb_size = EARLY_PGTABLE_SIZE; in early_mmu_setup()
417 set_ttbr_tcr_mair(el, gd->arch.tlb_addr, in early_mmu_setup()
432 svr = gur_in32(&gur->svr); in fix_pcie_mmu_map()
474 * Put the MMU table in secure memory if gd->arch.secure_ram is valid.
475 * OCRAM will be not used for this purpose so gd->arch.secure_ram can't be 0.
479 u64 tlb_addr_save = gd->arch.tlb_addr; in final_mmu_setup()
489 for (index = 0; index < ARRAY_SIZE(final_map) - 2; index++) { in final_mmu_setup()
491 * Find the entry for DDR mapping and update the address and in final_mmu_setup()
492 * size. Zero-sized mapping will be skipped when creating MMU in final_mmu_setup()
497 final_map[index].virt = gd->bd->bi_dram[0].start; in final_mmu_setup()
498 final_map[index].phys = gd->bd->bi_dram[0].start; in final_mmu_setup()
499 final_map[index].size = gd->bd->bi_dram[0].size; in final_mmu_setup()
504 final_map[index].virt = gd->bd->bi_dram[1].start; in final_mmu_setup()
505 final_map[index].phys = gd->bd->bi_dram[1].start; in final_mmu_setup()
506 final_map[index].size = gd->bd->bi_dram[1].size; in final_mmu_setup()
515 final_map[index].virt = gd->bd->bi_dram[2].start; in final_mmu_setup()
516 final_map[index].phys = gd->bd->bi_dram[2].start; in final_mmu_setup()
517 final_map[index].size = gd->bd->bi_dram[2].size; in final_mmu_setup()
529 if (gd->arch.secure_ram & MEM_RESERVE_SECURE_MAINTAINED) { in final_mmu_setup()
532 * Only use gd->arch.secure_ram if the address is in final_mmu_setup()
536 index = ARRAY_SIZE(final_map) - 2; in final_mmu_setup()
537 gd->arch.tlb_addr = gd->arch.secure_ram & ~0xfff; in final_mmu_setup()
538 final_map[index].virt = gd->arch.secure_ram & ~0x3; in final_mmu_setup()
542 gd->arch.secure_ram |= MEM_RESERVE_SECURE_SECURED; in final_mmu_setup()
543 tlb_addr_save = gd->arch.tlb_addr; in final_mmu_setup()
546 tlb_addr_save = gd->arch.tlb_allocated; in final_mmu_setup()
547 gd->arch.tlb_addr = tlb_addr_save; in final_mmu_setup()
553 gd->arch.tlb_fillptr = tlb_addr_save; in final_mmu_setup()
559 gd->arch.tlb_addr = gd->arch.tlb_fillptr; in final_mmu_setup()
560 gd->arch.tlb_emerg = gd->arch.tlb_addr; in final_mmu_setup()
562 gd->arch.tlb_addr = tlb_addr_save; in final_mmu_setup()
569 set_ttbr_tcr_mair(el, gd->arch.tlb_addr, get_tcr(el, NULL, NULL), in final_mmu_setup()
583 * This function is called before U-Boot relocates itself to speed up in arch_cpu_init()
750 porsr1 = in_be32(&gur->porsr1); in get_boot_src()
836 type = gur_in32(&gur->tp_ityp[idx]); in initiator_type()
852 cluster = gur_in32(&gur->tp_cluster[i].lower); in cpu_pos_mask()
873 cluster = gur_in32(&gur->tp_cluster[i].lower); in cpu_mask()
906 cluster = gur_in32(&gur->tp_cluster[i].lower); in fsl_qoriq_core_to_cluster()
917 return -1; /* cannot identify the cluster */ in fsl_qoriq_core_to_cluster()
930 cluster = gur_in32(&gur->tp_cluster[i].lower); in fsl_qoriq_core_to_type()
942 return -1; /* cannot identify the cluster */ in fsl_qoriq_core_to_type()
950 return gur_in32(&gur->svr); in get_svr()
961 u32 type, rcw, svr = gur_in32(&gur->svr); in print_cpuinfo()
974 printf("CPU%d(%s):%-4s MHz ", core, in print_cpuinfo()
982 printf("\n Bus: %-4s MHz ", in print_cpuinfo()
984 printf("DDR: %-4s MT/s", strmhz(buf, sysinfo.freq_ddrbus)); in print_cpuinfo()
986 printf(" FMAN: %-4s MHz", strmhz(buf, sysinfo.freq_fman[0])); in print_cpuinfo()
990 printf(" DP-DDR: %-4s MT/s", in print_cpuinfo()
1001 for (i = 0; i < ARRAY_SIZE(gur->rcwsr); i++) { in print_cpuinfo()
1002 rcw = gur_in32(&gur->rcwsr[i]); in print_cpuinfo()
1244 ram_top = ram_size - ram_top; in board_reserve_ram_top()
1245 /* The start address of MC reserved memory needs to be aligned. */ in board_reserve_ram_top()
1246 ram_top &= ~(CONFIG_SYS_MC_RSV_MEM_ALIGN - 1); in board_reserve_ram_top()
1249 return ram_size - ram_top; in board_reserve_ram_top()
1263 * U-Boot doesn't relocate itself into higher address. Should DDR be in get_effective_memsize()
1267 if (gd->ram_size > CONFIG_MAX_MEM_MAPPED) { in get_effective_memsize()
1269 rem = gd->ram_size - ea_size; in get_effective_memsize()
1271 ea_size = gd->ram_size; in get_effective_memsize()
1277 ea_size -= CONFIG_SYS_MEM_RESERVE_SECURE; in get_effective_memsize()
1285 ea_size -= board_reserve_ram_top(ea_size); in get_effective_memsize()
1300 regs.regs[1] = -1; in tfa_get_dram_size()
1319 return -EINVAL; in tfa_dram_init_banksize()
1327 ret = -EINVAL; in tfa_dram_init_banksize()
1333 gd->bd->bi_dram[i].start = regs.regs[1]; in tfa_dram_init_banksize()
1334 gd->bd->bi_dram[i].size = regs.regs[2]; in tfa_dram_init_banksize()
1336 dram_size -= gd->bd->bi_dram[i].size; in tfa_dram_init_banksize()
1347 if (gd->bd->bi_dram[2].size >= in tfa_dram_init_banksize()
1348 board_reserve_ram_top(gd->bd->bi_dram[2].size)) { in tfa_dram_init_banksize()
1349 gd->arch.resv_ram = gd->bd->bi_dram[2].start + in tfa_dram_init_banksize()
1350 gd->bd->bi_dram[2].size - in tfa_dram_init_banksize()
1351 board_reserve_ram_top(gd->bd->bi_dram[2].size); in tfa_dram_init_banksize()
1355 if (gd->bd->bi_dram[1].size >= in tfa_dram_init_banksize()
1356 board_reserve_ram_top(gd->bd->bi_dram[1].size)) { in tfa_dram_init_banksize()
1357 gd->arch.resv_ram = gd->bd->bi_dram[1].start + in tfa_dram_init_banksize()
1358 gd->bd->bi_dram[1].size - in tfa_dram_init_banksize()
1359 board_reserve_ram_top(gd->bd->bi_dram[1].size); in tfa_dram_init_banksize()
1360 } else if (gd->bd->bi_dram[0].size > in tfa_dram_init_banksize()
1361 board_reserve_ram_top(gd->bd->bi_dram[0].size)) { in tfa_dram_init_banksize()
1362 gd->arch.resv_ram = gd->bd->bi_dram[0].start + in tfa_dram_init_banksize()
1363 gd->bd->bi_dram[0].size - in tfa_dram_init_banksize()
1364 board_reserve_ram_top(gd->bd->bi_dram[0].size); in tfa_dram_init_banksize()
1384 * gd->ram_size has the total size of DDR memory, less reserved secure in dram_init_banksize()
1386 * no hole is created with DDR configuration. gd->arch.secure_ram tracks in dram_init_banksize()
1387 * the location of secure memory. gd->arch.resv_ram tracks the location in dram_init_banksize()
1388 * of reserved memory for Management Complex (MC). Because gd->ram_size in dram_init_banksize()
1390 * gd->arch.secure_ram should be done to avoid running it repeatedly. in dram_init_banksize()
1394 if (gd->arch.secure_ram & MEM_RESERVE_SECURE_MAINTAINED) { in dram_init_banksize()
1401 gd->bd->bi_dram[0].start = CONFIG_SYS_SDRAM_BASE; in dram_init_banksize()
1402 if (gd->ram_size > CONFIG_SYS_DDR_BLOCK1_SIZE) { in dram_init_banksize()
1403 gd->bd->bi_dram[0].size = CONFIG_SYS_DDR_BLOCK1_SIZE; in dram_init_banksize()
1404 gd->bd->bi_dram[1].start = CONFIG_SYS_DDR_BLOCK2_BASE; in dram_init_banksize()
1405 gd->bd->bi_dram[1].size = gd->ram_size - in dram_init_banksize()
1408 if (gd->bi_dram[1].size > CONFIG_SYS_DDR_BLOCK2_SIZE) { in dram_init_banksize()
1409 gd->bd->bi_dram[2].start = CONFIG_SYS_DDR_BLOCK3_BASE; in dram_init_banksize()
1410 gd->bd->bi_dram[2].size = gd->bd->bi_dram[1].size - in dram_init_banksize()
1412 gd->bd->bi_dram[1].size = CONFIG_SYS_DDR_BLOCK2_SIZE; in dram_init_banksize()
1416 gd->bd->bi_dram[0].size = gd->ram_size; in dram_init_banksize()
1419 if (gd->bd->bi_dram[0].size > in dram_init_banksize()
1421 gd->bd->bi_dram[0].size -= in dram_init_banksize()
1423 gd->arch.secure_ram = gd->bd->bi_dram[0].start + in dram_init_banksize()
1424 gd->bd->bi_dram[0].size; in dram_init_banksize()
1425 gd->arch.secure_ram |= MEM_RESERVE_SECURE_MAINTAINED; in dram_init_banksize()
1426 gd->ram_size -= CONFIG_SYS_MEM_RESERVE_SECURE; in dram_init_banksize()
1433 if (gd->bd->bi_dram[2].size >= in dram_init_banksize()
1434 board_reserve_ram_top(gd->bd->bi_dram[2].size)) { in dram_init_banksize()
1435 gd->arch.resv_ram = gd->bd->bi_dram[2].start + in dram_init_banksize()
1436 gd->bd->bi_dram[2].size - in dram_init_banksize()
1437 board_reserve_ram_top(gd->bd->bi_dram[2].size); in dram_init_banksize()
1441 if (gd->bd->bi_dram[1].size >= in dram_init_banksize()
1442 board_reserve_ram_top(gd->bd->bi_dram[1].size)) { in dram_init_banksize()
1443 gd->arch.resv_ram = gd->bd->bi_dram[1].start + in dram_init_banksize()
1444 gd->bd->bi_dram[1].size - in dram_init_banksize()
1445 board_reserve_ram_top(gd->bd->bi_dram[1].size); in dram_init_banksize()
1446 } else if (gd->bd->bi_dram[0].size > in dram_init_banksize()
1447 board_reserve_ram_top(gd->bd->bi_dram[0].size)) { in dram_init_banksize()
1448 gd->arch.resv_ram = gd->bd->bi_dram[0].start + in dram_init_banksize()
1449 gd->bd->bi_dram[0].size - in dram_init_banksize()
1450 board_reserve_ram_top(gd->bd->bi_dram[0].size); in dram_init_banksize()
1460 /* initialize DP-DDR here */ in dram_init_banksize()
1461 puts("DP-DDR: "); in dram_init_banksize()
1463 * DDR controller use 0 as the base address for binding. in dram_init_banksize()
1472 gd->bd->bi_dram[2].start = CONFIG_SYS_DP_DDR_BASE; in dram_init_banksize()
1473 gd->bd->bi_dram[2].size = dp_ddr_size; in dram_init_banksize()
1481 debug("%s is called. gd->ram_size is reduced to %lu\n", in dram_init_banksize()
1482 __func__, (ulong)gd->ram_size); in dram_init_banksize()
1503 continue; /* skip DP-DDR */ in efi_add_known_memory()
1505 ram_start = gd->bd->bi_dram[i].start; in efi_add_known_memory()
1506 ram_size = gd->bd->bi_dram[i].size; in efi_add_known_memory()
1508 if (gd->arch.resv_ram >= ram_start && in efi_add_known_memory()
1509 gd->arch.resv_ram < ram_start + ram_size) in efi_add_known_memory()
1510 ram_size = gd->arch.resv_ram - ram_start; in efi_add_known_memory()
1523 * to avoid speculative access. To relocate U-Boot to DDR, "normal memory"
1530 if (!gd->arch.tlb_addr) in update_early_mmu_table()
1533 if (gd->ram_size <= CONFIG_SYS_FSL_DRAM_SIZE1) { in update_early_mmu_table()
1536 gd->ram_size, in update_early_mmu_table()
1553 if (gd->ram_size - CONFIG_SYS_DDR_BLOCK1_SIZE > in update_early_mmu_table()
1564 gd->ram_size - in update_early_mmu_table()
1565 CONFIG_SYS_DDR_BLOCK1_SIZE - in update_early_mmu_table()
1576 gd->ram_size - in update_early_mmu_table()
1591 /* This will break-before-make MMU for DDR */ in dram_init()