1 /* spitfire.h: SpitFire/BlackBird/Cheetah inline MMU operations. 2 * 3 * Copyright (C) 1996 David S. Miller (davem@davemloft.net) 4 */ 5 6 #ifndef _SPARC64_SPITFIRE_H 7 #define _SPARC64_SPITFIRE_H 8 9 #ifdef CONFIG_SPARC64 10 11 #include <asm/asi.h> 12 13 /* The following register addresses are accessible via ASI_DMMU 14 * and ASI_IMMU, that is there is a distinct and unique copy of 15 * each these registers for each TLB. 16 */ 17 #define TSB_TAG_TARGET 0x0000000000000000 /* All chips */ 18 #define TLB_SFSR 0x0000000000000018 /* All chips */ 19 #define TSB_REG 0x0000000000000028 /* All chips */ 20 #define TLB_TAG_ACCESS 0x0000000000000030 /* All chips */ 21 #define VIRT_WATCHPOINT 0x0000000000000038 /* All chips */ 22 #define PHYS_WATCHPOINT 0x0000000000000040 /* All chips */ 23 #define TSB_EXTENSION_P 0x0000000000000048 /* Ultra-III and later */ 24 #define TSB_EXTENSION_S 0x0000000000000050 /* Ultra-III and later, D-TLB only */ 25 #define TSB_EXTENSION_N 0x0000000000000058 /* Ultra-III and later */ 26 #define TLB_TAG_ACCESS_EXT 0x0000000000000060 /* Ultra-III+ and later */ 27 28 /* These registers only exist as one entity, and are accessed 29 * via ASI_DMMU only. 30 */ 31 #define PRIMARY_CONTEXT 0x0000000000000008 32 #define SECONDARY_CONTEXT 0x0000000000000010 33 #define DMMU_SFAR 0x0000000000000020 34 #define VIRT_WATCHPOINT 0x0000000000000038 35 #define PHYS_WATCHPOINT 0x0000000000000040 36 37 #define SPITFIRE_HIGHEST_LOCKED_TLBENT (64 - 1) 38 #define CHEETAH_HIGHEST_LOCKED_TLBENT (16 - 1) 39 40 #define L1DCACHE_SIZE 0x4000 41 42 #define SUN4V_CHIP_INVALID 0x00 43 #define SUN4V_CHIP_NIAGARA1 0x01 44 #define SUN4V_CHIP_NIAGARA2 0x02 45 #define SUN4V_CHIP_NIAGARA3 0x03 46 #define SUN4V_CHIP_UNKNOWN 0xff 47 48 #ifndef __ASSEMBLY__ 49 50 enum ultra_tlb_layout { 51 spitfire = 0, 52 cheetah = 1, 53 cheetah_plus = 2, 54 hypervisor = 3, 55 }; 56 57 extern enum ultra_tlb_layout tlb_type; 58 59 extern int sun4v_chip_type; 60 61 extern int cheetah_pcache_forced_on; 62 extern void cheetah_enable_pcache(void); 63 64 #define sparc64_highest_locked_tlbent() \ 65 (tlb_type == spitfire ? \ 66 SPITFIRE_HIGHEST_LOCKED_TLBENT : \ 67 CHEETAH_HIGHEST_LOCKED_TLBENT) 68 69 extern int num_kernel_image_mappings; 70 71 /* The data cache is write through, so this just invalidates the 72 * specified line. 73 */ 74 static inline void spitfire_put_dcache_tag(unsigned long addr, unsigned long tag) 75 { 76 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 77 "membar #Sync" 78 : /* No outputs */ 79 : "r" (tag), "r" (addr), "i" (ASI_DCACHE_TAG)); 80 } 81 82 /* The instruction cache lines are flushed with this, but note that 83 * this does not flush the pipeline. It is possible for a line to 84 * get flushed but stale instructions to still be in the pipeline, 85 * a flush instruction (to any address) is sufficient to handle 86 * this issue after the line is invalidated. 87 */ 88 static inline void spitfire_put_icache_tag(unsigned long addr, unsigned long tag) 89 { 90 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 91 "membar #Sync" 92 : /* No outputs */ 93 : "r" (tag), "r" (addr), "i" (ASI_IC_TAG)); 94 } 95 96 static inline unsigned long spitfire_get_dtlb_data(int entry) 97 { 98 unsigned long data; 99 100 __asm__ __volatile__("ldxa [%1] %2, %0" 101 : "=r" (data) 102 : "r" (entry << 3), "i" (ASI_DTLB_DATA_ACCESS)); 103 104 /* Clear TTE diag bits. */ 105 data &= ~0x0003fe0000000000UL; 106 107 return data; 108 } 109 110 static inline unsigned long spitfire_get_dtlb_tag(int entry) 111 { 112 unsigned long tag; 113 114 __asm__ __volatile__("ldxa [%1] %2, %0" 115 : "=r" (tag) 116 : "r" (entry << 3), "i" (ASI_DTLB_TAG_READ)); 117 return tag; 118 } 119 120 static inline void spitfire_put_dtlb_data(int entry, unsigned long data) 121 { 122 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 123 "membar #Sync" 124 : /* No outputs */ 125 : "r" (data), "r" (entry << 3), 126 "i" (ASI_DTLB_DATA_ACCESS)); 127 } 128 129 static inline unsigned long spitfire_get_itlb_data(int entry) 130 { 131 unsigned long data; 132 133 __asm__ __volatile__("ldxa [%1] %2, %0" 134 : "=r" (data) 135 : "r" (entry << 3), "i" (ASI_ITLB_DATA_ACCESS)); 136 137 /* Clear TTE diag bits. */ 138 data &= ~0x0003fe0000000000UL; 139 140 return data; 141 } 142 143 static inline unsigned long spitfire_get_itlb_tag(int entry) 144 { 145 unsigned long tag; 146 147 __asm__ __volatile__("ldxa [%1] %2, %0" 148 : "=r" (tag) 149 : "r" (entry << 3), "i" (ASI_ITLB_TAG_READ)); 150 return tag; 151 } 152 153 static inline void spitfire_put_itlb_data(int entry, unsigned long data) 154 { 155 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 156 "membar #Sync" 157 : /* No outputs */ 158 : "r" (data), "r" (entry << 3), 159 "i" (ASI_ITLB_DATA_ACCESS)); 160 } 161 162 static inline void spitfire_flush_dtlb_nucleus_page(unsigned long page) 163 { 164 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t" 165 "membar #Sync" 166 : /* No outputs */ 167 : "r" (page | 0x20), "i" (ASI_DMMU_DEMAP)); 168 } 169 170 static inline void spitfire_flush_itlb_nucleus_page(unsigned long page) 171 { 172 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t" 173 "membar #Sync" 174 : /* No outputs */ 175 : "r" (page | 0x20), "i" (ASI_IMMU_DEMAP)); 176 } 177 178 /* Cheetah has "all non-locked" tlb flushes. */ 179 static inline void cheetah_flush_dtlb_all(void) 180 { 181 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t" 182 "membar #Sync" 183 : /* No outputs */ 184 : "r" (0x80), "i" (ASI_DMMU_DEMAP)); 185 } 186 187 static inline void cheetah_flush_itlb_all(void) 188 { 189 __asm__ __volatile__("stxa %%g0, [%0] %1\n\t" 190 "membar #Sync" 191 : /* No outputs */ 192 : "r" (0x80), "i" (ASI_IMMU_DEMAP)); 193 } 194 195 /* Cheetah has a 4-tlb layout so direct access is a bit different. 196 * The first two TLBs are fully assosciative, hold 16 entries, and are 197 * used only for locked and >8K sized translations. One exists for 198 * data accesses and one for instruction accesses. 199 * 200 * The third TLB is for data accesses to 8K non-locked translations, is 201 * 2 way assosciative, and holds 512 entries. The fourth TLB is for 202 * instruction accesses to 8K non-locked translations, is 2 way 203 * assosciative, and holds 128 entries. 204 * 205 * Cheetah has some bug where bogus data can be returned from 206 * ASI_{D,I}TLB_DATA_ACCESS loads, doing the load twice fixes 207 * the problem for me. -DaveM 208 */ 209 static inline unsigned long cheetah_get_ldtlb_data(int entry) 210 { 211 unsigned long data; 212 213 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t" 214 "ldxa [%1] %2, %0" 215 : "=r" (data) 216 : "r" ((0 << 16) | (entry << 3)), 217 "i" (ASI_DTLB_DATA_ACCESS)); 218 219 return data; 220 } 221 222 static inline unsigned long cheetah_get_litlb_data(int entry) 223 { 224 unsigned long data; 225 226 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t" 227 "ldxa [%1] %2, %0" 228 : "=r" (data) 229 : "r" ((0 << 16) | (entry << 3)), 230 "i" (ASI_ITLB_DATA_ACCESS)); 231 232 return data; 233 } 234 235 static inline unsigned long cheetah_get_ldtlb_tag(int entry) 236 { 237 unsigned long tag; 238 239 __asm__ __volatile__("ldxa [%1] %2, %0" 240 : "=r" (tag) 241 : "r" ((0 << 16) | (entry << 3)), 242 "i" (ASI_DTLB_TAG_READ)); 243 244 return tag; 245 } 246 247 static inline unsigned long cheetah_get_litlb_tag(int entry) 248 { 249 unsigned long tag; 250 251 __asm__ __volatile__("ldxa [%1] %2, %0" 252 : "=r" (tag) 253 : "r" ((0 << 16) | (entry << 3)), 254 "i" (ASI_ITLB_TAG_READ)); 255 256 return tag; 257 } 258 259 static inline void cheetah_put_ldtlb_data(int entry, unsigned long data) 260 { 261 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 262 "membar #Sync" 263 : /* No outputs */ 264 : "r" (data), 265 "r" ((0 << 16) | (entry << 3)), 266 "i" (ASI_DTLB_DATA_ACCESS)); 267 } 268 269 static inline void cheetah_put_litlb_data(int entry, unsigned long data) 270 { 271 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 272 "membar #Sync" 273 : /* No outputs */ 274 : "r" (data), 275 "r" ((0 << 16) | (entry << 3)), 276 "i" (ASI_ITLB_DATA_ACCESS)); 277 } 278 279 static inline unsigned long cheetah_get_dtlb_data(int entry, int tlb) 280 { 281 unsigned long data; 282 283 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t" 284 "ldxa [%1] %2, %0" 285 : "=r" (data) 286 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_DATA_ACCESS)); 287 288 return data; 289 } 290 291 static inline unsigned long cheetah_get_dtlb_tag(int entry, int tlb) 292 { 293 unsigned long tag; 294 295 __asm__ __volatile__("ldxa [%1] %2, %0" 296 : "=r" (tag) 297 : "r" ((tlb << 16) | (entry << 3)), "i" (ASI_DTLB_TAG_READ)); 298 return tag; 299 } 300 301 static inline void cheetah_put_dtlb_data(int entry, unsigned long data, int tlb) 302 { 303 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 304 "membar #Sync" 305 : /* No outputs */ 306 : "r" (data), 307 "r" ((tlb << 16) | (entry << 3)), 308 "i" (ASI_DTLB_DATA_ACCESS)); 309 } 310 311 static inline unsigned long cheetah_get_itlb_data(int entry) 312 { 313 unsigned long data; 314 315 __asm__ __volatile__("ldxa [%1] %2, %%g0\n\t" 316 "ldxa [%1] %2, %0" 317 : "=r" (data) 318 : "r" ((2 << 16) | (entry << 3)), 319 "i" (ASI_ITLB_DATA_ACCESS)); 320 321 return data; 322 } 323 324 static inline unsigned long cheetah_get_itlb_tag(int entry) 325 { 326 unsigned long tag; 327 328 __asm__ __volatile__("ldxa [%1] %2, %0" 329 : "=r" (tag) 330 : "r" ((2 << 16) | (entry << 3)), "i" (ASI_ITLB_TAG_READ)); 331 return tag; 332 } 333 334 static inline void cheetah_put_itlb_data(int entry, unsigned long data) 335 { 336 __asm__ __volatile__("stxa %0, [%1] %2\n\t" 337 "membar #Sync" 338 : /* No outputs */ 339 : "r" (data), "r" ((2 << 16) | (entry << 3)), 340 "i" (ASI_ITLB_DATA_ACCESS)); 341 } 342 343 #endif /* !(__ASSEMBLY__) */ 344 #endif /* CONFIG_SPARC64 */ 345 #endif /* !(_SPARC64_SPITFIRE_H) */ 346