1/* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling. 2 * 3 * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net> 4 * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de) 5 * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx) 6 * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz) 7 */ 8 9#include <asm/head.h> 10#include <asm/asi.h> 11#include <asm/page.h> 12#include <asm/pgtable.h> 13#include <asm/tsb.h> 14 15 .text 16 .align 32 17 18kvmap_itlb: 19 /* g6: TAG TARGET */ 20 mov TLB_TAG_ACCESS, %g4 21 ldxa [%g4] ASI_IMMU, %g4 22 23 /* sun4v_itlb_miss branches here with the missing virtual 24 * address already loaded into %g4 25 */ 26kvmap_itlb_4v: 27 28 /* Catch kernel NULL pointer calls. */ 29 sethi %hi(PAGE_SIZE), %g5 30 cmp %g4, %g5 31 blu,pn %xcc, kvmap_itlb_longpath 32 nop 33 34 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load) 35 36kvmap_itlb_tsb_miss: 37 sethi %hi(LOW_OBP_ADDRESS), %g5 38 cmp %g4, %g5 39 blu,pn %xcc, kvmap_itlb_vmalloc_addr 40 mov 0x1, %g5 41 sllx %g5, 32, %g5 42 cmp %g4, %g5 43 blu,pn %xcc, kvmap_itlb_obp 44 nop 45 46kvmap_itlb_vmalloc_addr: 47 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) 48 49 TSB_LOCK_TAG(%g1, %g2, %g7) 50 51 /* Load and check PTE. */ 52 ldxa [%g5] ASI_PHYS_USE_EC, %g5 53 mov 1, %g7 54 sllx %g7, TSB_TAG_INVALID_BIT, %g7 55 brgez,a,pn %g5, kvmap_itlb_longpath 56 TSB_STORE(%g1, %g7) 57 58 TSB_WRITE(%g1, %g5, %g6) 59 60 /* fallthrough to TLB load */ 61 62kvmap_itlb_load: 63 64661: stxa %g5, [%g0] ASI_ITLB_DATA_IN 65 retry 66 .section .sun4v_2insn_patch, "ax" 67 .word 661b 68 nop 69 nop 70 .previous 71 72 /* For sun4v the ASI_ITLB_DATA_IN store and the retry 73 * instruction get nop'd out and we get here to branch 74 * to the sun4v tlb load code. The registers are setup 75 * as follows: 76 * 77 * %g4: vaddr 78 * %g5: PTE 79 * %g6: TAG 80 * 81 * The sun4v TLB load wants the PTE in %g3 so we fix that 82 * up here. 83 */ 84 ba,pt %xcc, sun4v_itlb_load 85 mov %g5, %g3 86 87kvmap_itlb_longpath: 88 89661: rdpr %pstate, %g5 90 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate 91 .section .sun4v_2insn_patch, "ax" 92 .word 661b 93 SET_GL(1) 94 nop 95 .previous 96 97 rdpr %tpc, %g5 98 ba,pt %xcc, sparc64_realfault_common 99 mov FAULT_CODE_ITLB, %g4 100 101kvmap_itlb_obp: 102 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath) 103 104 TSB_LOCK_TAG(%g1, %g2, %g7) 105 106 TSB_WRITE(%g1, %g5, %g6) 107 108 ba,pt %xcc, kvmap_itlb_load 109 nop 110 111kvmap_dtlb_obp: 112 OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath) 113 114 TSB_LOCK_TAG(%g1, %g2, %g7) 115 116 TSB_WRITE(%g1, %g5, %g6) 117 118 ba,pt %xcc, kvmap_dtlb_load 119 nop 120 121 .align 32 122kvmap_dtlb_tsb4m_load: 123 TSB_LOCK_TAG(%g1, %g2, %g7) 124 TSB_WRITE(%g1, %g5, %g6) 125 ba,pt %xcc, kvmap_dtlb_load 126 nop 127 128kvmap_dtlb: 129 /* %g6: TAG TARGET */ 130 mov TLB_TAG_ACCESS, %g4 131 ldxa [%g4] ASI_DMMU, %g4 132 133 /* sun4v_dtlb_miss branches here with the missing virtual 134 * address already loaded into %g4 135 */ 136kvmap_dtlb_4v: 137 brgez,pn %g4, kvmap_dtlb_nonlinear 138 nop 139 140#ifdef CONFIG_DEBUG_PAGEALLOC 141 /* Index through the base page size TSB even for linear 142 * mappings when using page allocation debugging. 143 */ 144 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) 145#else 146 /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */ 147 KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) 148#endif 149 /* TSB entry address left in %g1, lookup linear PTE. 150 * Must preserve %g1 and %g6 (TAG). 151 */ 152kvmap_dtlb_tsb4m_miss: 153 /* Clear the PAGE_OFFSET top virtual bits, shift 154 * down to get PFN, and make sure PFN is in range. 155 */ 156661: sllx %g4, 0, %g5 157 .section .page_offset_shift_patch, "ax" 158 .word 661b 159 .previous 160 161 /* Check to see if we know about valid memory at the 4MB 162 * chunk this physical address will reside within. 163 */ 164661: srlx %g5, MAX_PHYS_ADDRESS_BITS, %g2 165 .section .page_offset_shift_patch, "ax" 166 .word 661b 167 .previous 168 169 brnz,pn %g2, kvmap_dtlb_longpath 170 nop 171 172 /* This unconditional branch and delay-slot nop gets patched 173 * by the sethi sequence once the bitmap is properly setup. 174 */ 175 .globl valid_addr_bitmap_insn 176valid_addr_bitmap_insn: 177 ba,pt %xcc, 2f 178 nop 179 .subsection 2 180 .globl valid_addr_bitmap_patch 181valid_addr_bitmap_patch: 182 sethi %hi(sparc64_valid_addr_bitmap), %g7 183 or %g7, %lo(sparc64_valid_addr_bitmap), %g7 184 .previous 185 186661: srlx %g5, ILOG2_4MB, %g2 187 .section .page_offset_shift_patch, "ax" 188 .word 661b 189 .previous 190 191 srlx %g2, 6, %g5 192 and %g2, 63, %g2 193 sllx %g5, 3, %g5 194 ldx [%g7 + %g5], %g5 195 mov 1, %g7 196 sllx %g7, %g2, %g7 197 andcc %g5, %g7, %g0 198 be,pn %xcc, kvmap_dtlb_longpath 199 2002: sethi %hi(kpte_linear_bitmap), %g2 201 202 /* Get the 256MB physical address index. */ 203661: sllx %g4, 0, %g5 204 .section .page_offset_shift_patch, "ax" 205 .word 661b 206 .previous 207 208 or %g2, %lo(kpte_linear_bitmap), %g2 209 210661: srlx %g5, ILOG2_256MB, %g5 211 .section .page_offset_shift_patch, "ax" 212 .word 661b 213 .previous 214 215 and %g5, (32 - 1), %g7 216 217 /* Divide by 32 to get the offset into the bitmask. */ 218 srlx %g5, 5, %g5 219 add %g7, %g7, %g7 220 sllx %g5, 3, %g5 221 222 /* kern_linear_pte_xor[(mask >> shift) & 3)] */ 223 ldx [%g2 + %g5], %g2 224 srlx %g2, %g7, %g7 225 sethi %hi(kern_linear_pte_xor), %g5 226 and %g7, 3, %g7 227 or %g5, %lo(kern_linear_pte_xor), %g5 228 sllx %g7, 3, %g7 229 ldx [%g5 + %g7], %g2 230 231 .globl kvmap_linear_patch 232kvmap_linear_patch: 233 ba,pt %xcc, kvmap_dtlb_tsb4m_load 234 xor %g2, %g4, %g5 235 236kvmap_dtlb_vmalloc_addr: 237 KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) 238 239 TSB_LOCK_TAG(%g1, %g2, %g7) 240 241 /* Load and check PTE. */ 242 ldxa [%g5] ASI_PHYS_USE_EC, %g5 243 mov 1, %g7 244 sllx %g7, TSB_TAG_INVALID_BIT, %g7 245 brgez,a,pn %g5, kvmap_dtlb_longpath 246 TSB_STORE(%g1, %g7) 247 248 TSB_WRITE(%g1, %g5, %g6) 249 250 /* fallthrough to TLB load */ 251 252kvmap_dtlb_load: 253 254661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB 255 retry 256 .section .sun4v_2insn_patch, "ax" 257 .word 661b 258 nop 259 nop 260 .previous 261 262 /* For sun4v the ASI_DTLB_DATA_IN store and the retry 263 * instruction get nop'd out and we get here to branch 264 * to the sun4v tlb load code. The registers are setup 265 * as follows: 266 * 267 * %g4: vaddr 268 * %g5: PTE 269 * %g6: TAG 270 * 271 * The sun4v TLB load wants the PTE in %g3 so we fix that 272 * up here. 273 */ 274 ba,pt %xcc, sun4v_dtlb_load 275 mov %g5, %g3 276 277#ifdef CONFIG_SPARSEMEM_VMEMMAP 278kvmap_vmemmap: 279 sub %g4, %g5, %g5 280 srlx %g5, 22, %g5 281 sethi %hi(vmemmap_table), %g1 282 sllx %g5, 3, %g5 283 or %g1, %lo(vmemmap_table), %g1 284 ba,pt %xcc, kvmap_dtlb_load 285 ldx [%g1 + %g5], %g5 286#endif 287 288kvmap_dtlb_nonlinear: 289 /* Catch kernel NULL pointer derefs. */ 290 sethi %hi(PAGE_SIZE), %g5 291 cmp %g4, %g5 292 bleu,pn %xcc, kvmap_dtlb_longpath 293 nop 294 295#ifdef CONFIG_SPARSEMEM_VMEMMAP 296 /* Do not use the TSB for vmemmap. */ 297 mov (VMEMMAP_BASE >> 40), %g5 298 sllx %g5, 40, %g5 299 cmp %g4,%g5 300 bgeu,pn %xcc, kvmap_vmemmap 301 nop 302#endif 303 304 KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load) 305 306kvmap_dtlb_tsbmiss: 307 sethi %hi(MODULES_VADDR), %g5 308 cmp %g4, %g5 309 blu,pn %xcc, kvmap_dtlb_longpath 310 mov (VMALLOC_END >> 40), %g5 311 sllx %g5, 40, %g5 312 cmp %g4, %g5 313 bgeu,pn %xcc, kvmap_dtlb_longpath 314 nop 315 316kvmap_check_obp: 317 sethi %hi(LOW_OBP_ADDRESS), %g5 318 cmp %g4, %g5 319 blu,pn %xcc, kvmap_dtlb_vmalloc_addr 320 mov 0x1, %g5 321 sllx %g5, 32, %g5 322 cmp %g4, %g5 323 blu,pn %xcc, kvmap_dtlb_obp 324 nop 325 ba,pt %xcc, kvmap_dtlb_vmalloc_addr 326 nop 327 328kvmap_dtlb_longpath: 329 330661: rdpr %pstate, %g5 331 wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate 332 .section .sun4v_2insn_patch, "ax" 333 .word 661b 334 SET_GL(1) 335 ldxa [%g0] ASI_SCRATCHPAD, %g5 336 .previous 337 338 rdpr %tl, %g3 339 cmp %g3, 1 340 341661: mov TLB_TAG_ACCESS, %g4 342 ldxa [%g4] ASI_DMMU, %g5 343 .section .sun4v_2insn_patch, "ax" 344 .word 661b 345 ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5 346 nop 347 .previous 348 349 be,pt %xcc, sparc64_realfault_common 350 mov FAULT_CODE_DTLB, %g4 351 ba,pt %xcc, winfix_trampoline 352 nop 353