1 /* 2 * Copyright 2008-2011 Freescale Semiconductor, Inc. 3 * 4 * (C) Copyright 2000 5 * Wolfgang Denk, DENX Software Engineering, wd@denx.de. 6 * 7 * SPDX-License-Identifier: GPL-2.0+ 8 */ 9 10 #include <common.h> 11 #include <asm/processor.h> 12 #include <asm/mmu.h> 13 #ifdef CONFIG_ADDR_MAP 14 #include <addr_map.h> 15 #endif 16 17 DECLARE_GLOBAL_DATA_PTR; 18 19 void invalidate_tlb(u8 tlb) 20 { 21 if (tlb == 0) 22 mtspr(MMUCSR0, 0x4); 23 if (tlb == 1) 24 mtspr(MMUCSR0, 0x2); 25 } 26 27 __weak void init_tlbs(void) 28 { 29 int i; 30 31 for (i = 0; i < num_tlb_entries; i++) { 32 write_tlb(tlb_table[i].mas0, 33 tlb_table[i].mas1, 34 tlb_table[i].mas2, 35 tlb_table[i].mas3, 36 tlb_table[i].mas7); 37 } 38 39 return ; 40 } 41 42 #if !defined(CONFIG_NAND_SPL) && \ 43 (!defined(CONFIG_SPL_BUILD) || !defined(CONFIG_SPL_INIT_MINIMAL)) 44 void read_tlbcam_entry(int idx, u32 *valid, u32 *tsize, unsigned long *epn, 45 phys_addr_t *rpn) 46 { 47 u32 _mas1; 48 49 mtspr(MAS0, FSL_BOOKE_MAS0(1, idx, 0)); 50 asm volatile("tlbre;isync"); 51 _mas1 = mfspr(MAS1); 52 53 *valid = (_mas1 & MAS1_VALID); 54 *tsize = (_mas1 >> 7) & 0x1f; 55 *epn = mfspr(MAS2) & MAS2_EPN; 56 *rpn = mfspr(MAS3) & MAS3_RPN; 57 #ifdef CONFIG_ENABLE_36BIT_PHYS 58 *rpn |= ((u64)mfspr(MAS7)) << 32; 59 #endif 60 } 61 62 void print_tlbcam(void) 63 { 64 int i; 65 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff; 66 67 /* walk all the entries */ 68 printf("TLBCAM entries\n"); 69 for (i = 0; i < num_cam; i++) { 70 unsigned long epn; 71 u32 tsize, valid; 72 phys_addr_t rpn; 73 74 read_tlbcam_entry(i, &valid, &tsize, &epn, &rpn); 75 printf("entry %02d: V: %d EPN 0x%08x RPN 0x%08llx size:", 76 i, (valid == 0) ? 0 : 1, (unsigned int)epn, 77 (unsigned long long)rpn); 78 print_size(TSIZE_TO_BYTES(tsize), "\n"); 79 } 80 } 81 82 static inline void use_tlb_cam(u8 idx) 83 { 84 int i = idx / 32; 85 int bit = idx % 32; 86 87 gd->arch.used_tlb_cams[i] |= (1 << bit); 88 } 89 90 static inline void free_tlb_cam(u8 idx) 91 { 92 int i = idx / 32; 93 int bit = idx % 32; 94 95 gd->arch.used_tlb_cams[i] &= ~(1 << bit); 96 } 97 98 void init_used_tlb_cams(void) 99 { 100 int i; 101 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff; 102 103 for (i = 0; i < ((CONFIG_SYS_NUM_TLBCAMS+31)/32); i++) 104 gd->arch.used_tlb_cams[i] = 0; 105 106 /* walk all the entries */ 107 for (i = 0; i < num_cam; i++) { 108 mtspr(MAS0, FSL_BOOKE_MAS0(1, i, 0)); 109 asm volatile("tlbre;isync"); 110 if (mfspr(MAS1) & MAS1_VALID) 111 use_tlb_cam(i); 112 } 113 } 114 115 int find_free_tlbcam(void) 116 { 117 int i; 118 u32 idx; 119 120 for (i = 0; i < ((CONFIG_SYS_NUM_TLBCAMS+31)/32); i++) { 121 idx = ffz(gd->arch.used_tlb_cams[i]); 122 123 if (idx != 32) 124 break; 125 } 126 127 idx += i * 32; 128 129 if (idx >= CONFIG_SYS_NUM_TLBCAMS) 130 return -1; 131 132 return idx; 133 } 134 135 void set_tlb(u8 tlb, u32 epn, u64 rpn, 136 u8 perms, u8 wimge, 137 u8 ts, u8 esel, u8 tsize, u8 iprot) 138 { 139 u32 _mas0, _mas1, _mas2, _mas3, _mas7; 140 141 if (tlb == 1) 142 use_tlb_cam(esel); 143 144 if ((mfspr(SPRN_MMUCFG) & MMUCFG_MAVN) == MMUCFG_MAVN_V1 && 145 tsize & 1) { 146 printf("%s: bad tsize %d on entry %d at 0x%08x\n", 147 __func__, tsize, tlb, epn); 148 return; 149 } 150 151 _mas0 = FSL_BOOKE_MAS0(tlb, esel, 0); 152 _mas1 = FSL_BOOKE_MAS1(1, iprot, 0, ts, tsize); 153 _mas2 = FSL_BOOKE_MAS2(epn, wimge); 154 _mas3 = FSL_BOOKE_MAS3(rpn, 0, perms); 155 _mas7 = FSL_BOOKE_MAS7(rpn); 156 157 write_tlb(_mas0, _mas1, _mas2, _mas3, _mas7); 158 159 #ifdef CONFIG_ADDR_MAP 160 if ((tlb == 1) && (gd->flags & GD_FLG_RELOC)) 161 addrmap_set_entry(epn, rpn, TSIZE_TO_BYTES(tsize), esel); 162 #endif 163 } 164 165 void disable_tlb(u8 esel) 166 { 167 u32 _mas0, _mas1, _mas2, _mas3; 168 169 free_tlb_cam(esel); 170 171 _mas0 = FSL_BOOKE_MAS0(1, esel, 0); 172 _mas1 = 0; 173 _mas2 = 0; 174 _mas3 = 0; 175 176 mtspr(MAS0, _mas0); 177 mtspr(MAS1, _mas1); 178 mtspr(MAS2, _mas2); 179 mtspr(MAS3, _mas3); 180 #ifdef CONFIG_ENABLE_36BIT_PHYS 181 mtspr(MAS7, 0); 182 #endif 183 asm volatile("isync;msync;tlbwe;isync"); 184 185 #ifdef CONFIG_ADDR_MAP 186 if (gd->flags & GD_FLG_RELOC) 187 addrmap_set_entry(0, 0, 0, esel); 188 #endif 189 } 190 191 static void tlbsx (const volatile unsigned *addr) 192 { 193 __asm__ __volatile__ ("tlbsx 0,%0" : : "r" (addr), "m" (*addr)); 194 } 195 196 /* return -1 if we didn't find anything */ 197 int find_tlb_idx(void *addr, u8 tlbsel) 198 { 199 u32 _mas0, _mas1; 200 201 /* zero out Search PID, AS */ 202 mtspr(MAS6, 0); 203 204 tlbsx(addr); 205 206 _mas0 = mfspr(MAS0); 207 _mas1 = mfspr(MAS1); 208 209 /* we found something, and its in the TLB we expect */ 210 if ((MAS1_VALID & _mas1) && 211 (MAS0_TLBSEL(tlbsel) == (_mas0 & MAS0_TLBSEL_MSK))) { 212 return ((_mas0 & MAS0_ESEL_MSK) >> 16); 213 } 214 215 return -1; 216 } 217 218 #ifdef CONFIG_ADDR_MAP 219 void init_addr_map(void) 220 { 221 int i; 222 unsigned int num_cam = mfspr(SPRN_TLB1CFG) & 0xfff; 223 224 /* walk all the entries */ 225 for (i = 0; i < num_cam; i++) { 226 unsigned long epn; 227 u32 tsize, valid; 228 phys_addr_t rpn; 229 230 read_tlbcam_entry(i, &valid, &tsize, &epn, &rpn); 231 if (valid & MAS1_VALID) 232 addrmap_set_entry(epn, rpn, TSIZE_TO_BYTES(tsize), i); 233 } 234 235 return ; 236 } 237 #endif 238 239 uint64_t tlb_map_range(ulong v_addr, phys_addr_t p_addr, uint64_t size, 240 enum tlb_map_type map_type) 241 { 242 int i; 243 unsigned int tlb_size; 244 unsigned int wimge; 245 unsigned int perm; 246 unsigned int max_cam, tsize_mask; 247 248 if (map_type == TLB_MAP_RAM) { 249 perm = MAS3_SX|MAS3_SW|MAS3_SR; 250 wimge = MAS2_M; 251 #ifdef CONFIG_SYS_PPC_DDR_WIMGE 252 wimge = CONFIG_SYS_PPC_DDR_WIMGE; 253 #endif 254 } else { 255 perm = MAS3_SW|MAS3_SR; 256 wimge = MAS2_I|MAS2_G; 257 } 258 259 if ((mfspr(SPRN_MMUCFG) & MMUCFG_MAVN) == MMUCFG_MAVN_V1) { 260 /* Convert (4^max) kB to (2^max) bytes */ 261 max_cam = ((mfspr(SPRN_TLB1CFG) >> 16) & 0xf) * 2 + 10; 262 tsize_mask = ~1U; 263 } else { 264 /* Convert (2^max) kB to (2^max) bytes */ 265 max_cam = __ilog2(mfspr(SPRN_TLB1PS)) + 10; 266 tsize_mask = ~0U; 267 } 268 269 for (i = 0; size && i < 8; i++) { 270 int tlb_index = find_free_tlbcam(); 271 u32 camsize = __ilog2_u64(size) & tsize_mask; 272 u32 align = __ilog2(v_addr) & tsize_mask; 273 274 if (tlb_index == -1) 275 break; 276 277 if (align == -2) align = max_cam; 278 if (camsize > align) 279 camsize = align; 280 281 if (camsize > max_cam) 282 camsize = max_cam; 283 284 tlb_size = camsize - 10; 285 286 set_tlb(1, v_addr, p_addr, perm, wimge, 287 0, tlb_index, tlb_size, 1); 288 289 size -= 1ULL << camsize; 290 v_addr += 1UL << camsize; 291 p_addr += 1UL << camsize; 292 } 293 294 return size; 295 } 296 297 unsigned int setup_ddr_tlbs_phys(phys_addr_t p_addr, 298 unsigned int memsize_in_meg) 299 { 300 unsigned int ram_tlb_address = (unsigned int)CONFIG_SYS_DDR_SDRAM_BASE; 301 u64 memsize = (u64)memsize_in_meg << 20; 302 303 memsize = min(memsize, CONFIG_MAX_MEM_MAPPED); 304 memsize = tlb_map_range(ram_tlb_address, p_addr, memsize, TLB_MAP_RAM); 305 306 if (memsize) 307 print_size(memsize, " left unmapped\n"); 308 309 return memsize_in_meg; 310 } 311 312 unsigned int setup_ddr_tlbs(unsigned int memsize_in_meg) 313 { 314 return 315 setup_ddr_tlbs_phys(CONFIG_SYS_DDR_SDRAM_BASE, memsize_in_meg); 316 } 317 318 /* Invalidate the DDR TLBs for the requested size */ 319 void clear_ddr_tlbs_phys(phys_addr_t p_addr, unsigned int memsize_in_meg) 320 { 321 u32 vstart = CONFIG_SYS_DDR_SDRAM_BASE; 322 unsigned long epn; 323 u32 tsize, valid, ptr; 324 phys_addr_t rpn = 0; 325 int ddr_esel; 326 u64 memsize = (u64)memsize_in_meg << 20; 327 328 ptr = vstart; 329 330 while (ptr < (vstart + memsize)) { 331 ddr_esel = find_tlb_idx((void *)ptr, 1); 332 if (ddr_esel != -1) { 333 read_tlbcam_entry(ddr_esel, &valid, &tsize, &epn, &rpn); 334 disable_tlb(ddr_esel); 335 } 336 ptr += TSIZE_TO_BYTES(tsize); 337 } 338 } 339 340 void clear_ddr_tlbs(unsigned int memsize_in_meg) 341 { 342 clear_ddr_tlbs_phys(CONFIG_SYS_DDR_SDRAM_BASE, memsize_in_meg); 343 } 344 345 346 #endif /* not SPL */ 347