1*2c451f78SAneesh V /* 2*2c451f78SAneesh V * (C) Copyright 2010 3*2c451f78SAneesh V * Texas Instruments, <www.ti.com> 4*2c451f78SAneesh V * Aneesh V <aneesh@ti.com> 5*2c451f78SAneesh V * 6*2c451f78SAneesh V * See file CREDITS for list of people who contributed to this 7*2c451f78SAneesh V * project. 8*2c451f78SAneesh V * 9*2c451f78SAneesh V * This program is free software; you can redistribute it and/or 10*2c451f78SAneesh V * modify it under the terms of the GNU General Public License as 11*2c451f78SAneesh V * published by the Free Software Foundation; either version 2 of 12*2c451f78SAneesh V * the License, or (at your option) any later version. 13*2c451f78SAneesh V * 14*2c451f78SAneesh V * This program is distributed in the hope that it will be useful, 15*2c451f78SAneesh V * but WITHOUT ANY WARRANTY; without even the implied warranty of 16*2c451f78SAneesh V * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 17*2c451f78SAneesh V * GNU General Public License for more details. 18*2c451f78SAneesh V * 19*2c451f78SAneesh V * You should have received a copy of the GNU General Public License 20*2c451f78SAneesh V * along with this program; if not, write to the Free Software 21*2c451f78SAneesh V * Foundation, Inc., 59 Temple Place, Suite 330, Boston, 22*2c451f78SAneesh V * MA 02111-1307 USA 23*2c451f78SAneesh V */ 24*2c451f78SAneesh V #include <linux/types.h> 25*2c451f78SAneesh V #include <common.h> 26*2c451f78SAneesh V #include <asm/armv7.h> 27*2c451f78SAneesh V #include <asm/utils.h> 28*2c451f78SAneesh V 29*2c451f78SAneesh V #define ARMV7_DCACHE_INVAL_ALL 1 30*2c451f78SAneesh V #define ARMV7_DCACHE_CLEAN_INVAL_ALL 2 31*2c451f78SAneesh V #define ARMV7_DCACHE_INVAL_RANGE 3 32*2c451f78SAneesh V #define ARMV7_DCACHE_CLEAN_INVAL_RANGE 4 33*2c451f78SAneesh V 34*2c451f78SAneesh V #ifndef CONFIG_SYS_DCACHE_OFF 35*2c451f78SAneesh V /* 36*2c451f78SAneesh V * Write the level and type you want to Cache Size Selection Register(CSSELR) 37*2c451f78SAneesh V * to get size details from Current Cache Size ID Register(CCSIDR) 38*2c451f78SAneesh V */ 39*2c451f78SAneesh V static void set_csselr(u32 level, u32 type) 40*2c451f78SAneesh V { u32 csselr = level << 1 | type; 41*2c451f78SAneesh V 42*2c451f78SAneesh V /* Write to Cache Size Selection Register(CSSELR) */ 43*2c451f78SAneesh V asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (csselr)); 44*2c451f78SAneesh V } 45*2c451f78SAneesh V 46*2c451f78SAneesh V static u32 get_ccsidr(void) 47*2c451f78SAneesh V { 48*2c451f78SAneesh V u32 ccsidr; 49*2c451f78SAneesh V 50*2c451f78SAneesh V /* Read current CP15 Cache Size ID Register */ 51*2c451f78SAneesh V asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr)); 52*2c451f78SAneesh V return ccsidr; 53*2c451f78SAneesh V } 54*2c451f78SAneesh V 55*2c451f78SAneesh V static u32 get_clidr(void) 56*2c451f78SAneesh V { 57*2c451f78SAneesh V u32 clidr; 58*2c451f78SAneesh V 59*2c451f78SAneesh V /* Read current CP15 Cache Level ID Register */ 60*2c451f78SAneesh V asm volatile ("mrc p15,1,%0,c0,c0,1" : "=r" (clidr)); 61*2c451f78SAneesh V return clidr; 62*2c451f78SAneesh V } 63*2c451f78SAneesh V 64*2c451f78SAneesh V static void v7_inval_dcache_level_setway(u32 level, u32 num_sets, 65*2c451f78SAneesh V u32 num_ways, u32 way_shift, 66*2c451f78SAneesh V u32 log2_line_len) 67*2c451f78SAneesh V { 68*2c451f78SAneesh V int way, set, setway; 69*2c451f78SAneesh V 70*2c451f78SAneesh V /* 71*2c451f78SAneesh V * For optimal assembly code: 72*2c451f78SAneesh V * a. count down 73*2c451f78SAneesh V * b. have bigger loop inside 74*2c451f78SAneesh V */ 75*2c451f78SAneesh V for (way = num_ways - 1; way >= 0 ; way--) { 76*2c451f78SAneesh V for (set = num_sets - 1; set >= 0; set--) { 77*2c451f78SAneesh V setway = (level << 1) | (set << log2_line_len) | 78*2c451f78SAneesh V (way << way_shift); 79*2c451f78SAneesh V /* Invalidate data/unified cache line by set/way */ 80*2c451f78SAneesh V asm volatile (" mcr p15, 0, %0, c7, c6, 2" 81*2c451f78SAneesh V : : "r" (setway)); 82*2c451f78SAneesh V } 83*2c451f78SAneesh V } 84*2c451f78SAneesh V /* DMB to make sure the operation is complete */ 85*2c451f78SAneesh V CP15DMB; 86*2c451f78SAneesh V } 87*2c451f78SAneesh V 88*2c451f78SAneesh V static void v7_clean_inval_dcache_level_setway(u32 level, u32 num_sets, 89*2c451f78SAneesh V u32 num_ways, u32 way_shift, 90*2c451f78SAneesh V u32 log2_line_len) 91*2c451f78SAneesh V { 92*2c451f78SAneesh V int way, set, setway; 93*2c451f78SAneesh V 94*2c451f78SAneesh V /* 95*2c451f78SAneesh V * For optimal assembly code: 96*2c451f78SAneesh V * a. count down 97*2c451f78SAneesh V * b. have bigger loop inside 98*2c451f78SAneesh V */ 99*2c451f78SAneesh V for (way = num_ways - 1; way >= 0 ; way--) { 100*2c451f78SAneesh V for (set = num_sets - 1; set >= 0; set--) { 101*2c451f78SAneesh V setway = (level << 1) | (set << log2_line_len) | 102*2c451f78SAneesh V (way << way_shift); 103*2c451f78SAneesh V /* 104*2c451f78SAneesh V * Clean & Invalidate data/unified 105*2c451f78SAneesh V * cache line by set/way 106*2c451f78SAneesh V */ 107*2c451f78SAneesh V asm volatile (" mcr p15, 0, %0, c7, c14, 2" 108*2c451f78SAneesh V : : "r" (setway)); 109*2c451f78SAneesh V } 110*2c451f78SAneesh V } 111*2c451f78SAneesh V /* DMB to make sure the operation is complete */ 112*2c451f78SAneesh V CP15DMB; 113*2c451f78SAneesh V } 114*2c451f78SAneesh V 115*2c451f78SAneesh V static void v7_maint_dcache_level_setway(u32 level, u32 operation) 116*2c451f78SAneesh V { 117*2c451f78SAneesh V u32 ccsidr; 118*2c451f78SAneesh V u32 num_sets, num_ways, log2_line_len, log2_num_ways; 119*2c451f78SAneesh V u32 way_shift; 120*2c451f78SAneesh V 121*2c451f78SAneesh V set_csselr(level, ARMV7_CSSELR_IND_DATA_UNIFIED); 122*2c451f78SAneesh V 123*2c451f78SAneesh V ccsidr = get_ccsidr(); 124*2c451f78SAneesh V 125*2c451f78SAneesh V log2_line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >> 126*2c451f78SAneesh V CCSIDR_LINE_SIZE_OFFSET) + 2; 127*2c451f78SAneesh V /* Converting from words to bytes */ 128*2c451f78SAneesh V log2_line_len += 2; 129*2c451f78SAneesh V 130*2c451f78SAneesh V num_ways = ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >> 131*2c451f78SAneesh V CCSIDR_ASSOCIATIVITY_OFFSET) + 1; 132*2c451f78SAneesh V num_sets = ((ccsidr & CCSIDR_NUM_SETS_MASK) >> 133*2c451f78SAneesh V CCSIDR_NUM_SETS_OFFSET) + 1; 134*2c451f78SAneesh V /* 135*2c451f78SAneesh V * According to ARMv7 ARM number of sets and number of ways need 136*2c451f78SAneesh V * not be a power of 2 137*2c451f78SAneesh V */ 138*2c451f78SAneesh V log2_num_ways = log_2_n_round_up(num_ways); 139*2c451f78SAneesh V 140*2c451f78SAneesh V way_shift = (32 - log2_num_ways); 141*2c451f78SAneesh V if (operation == ARMV7_DCACHE_INVAL_ALL) { 142*2c451f78SAneesh V v7_inval_dcache_level_setway(level, num_sets, num_ways, 143*2c451f78SAneesh V way_shift, log2_line_len); 144*2c451f78SAneesh V } else if (operation == ARMV7_DCACHE_CLEAN_INVAL_ALL) { 145*2c451f78SAneesh V v7_clean_inval_dcache_level_setway(level, num_sets, num_ways, 146*2c451f78SAneesh V way_shift, log2_line_len); 147*2c451f78SAneesh V } 148*2c451f78SAneesh V } 149*2c451f78SAneesh V 150*2c451f78SAneesh V static void v7_maint_dcache_all(u32 operation) 151*2c451f78SAneesh V { 152*2c451f78SAneesh V u32 level, cache_type, level_start_bit = 0; 153*2c451f78SAneesh V 154*2c451f78SAneesh V u32 clidr = get_clidr(); 155*2c451f78SAneesh V 156*2c451f78SAneesh V for (level = 0; level < 7; level++) { 157*2c451f78SAneesh V cache_type = (clidr >> level_start_bit) & 0x7; 158*2c451f78SAneesh V if ((cache_type == ARMV7_CLIDR_CTYPE_DATA_ONLY) || 159*2c451f78SAneesh V (cache_type == ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA) || 160*2c451f78SAneesh V (cache_type == ARMV7_CLIDR_CTYPE_UNIFIED)) 161*2c451f78SAneesh V v7_maint_dcache_level_setway(level, operation); 162*2c451f78SAneesh V level_start_bit += 3; 163*2c451f78SAneesh V } 164*2c451f78SAneesh V } 165*2c451f78SAneesh V 166*2c451f78SAneesh V static void v7_dcache_clean_inval_range(u32 start, 167*2c451f78SAneesh V u32 stop, u32 line_len) 168*2c451f78SAneesh V { 169*2c451f78SAneesh V u32 mva; 170*2c451f78SAneesh V 171*2c451f78SAneesh V /* Align start to cache line boundary */ 172*2c451f78SAneesh V start &= ~(line_len - 1); 173*2c451f78SAneesh V for (mva = start; mva < stop; mva = mva + line_len) { 174*2c451f78SAneesh V /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */ 175*2c451f78SAneesh V asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva)); 176*2c451f78SAneesh V } 177*2c451f78SAneesh V } 178*2c451f78SAneesh V 179*2c451f78SAneesh V static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len) 180*2c451f78SAneesh V { 181*2c451f78SAneesh V u32 mva; 182*2c451f78SAneesh V 183*2c451f78SAneesh V /* 184*2c451f78SAneesh V * If start address is not aligned to cache-line flush the first 185*2c451f78SAneesh V * line to prevent affecting somebody else's buffer 186*2c451f78SAneesh V */ 187*2c451f78SAneesh V if (start & (line_len - 1)) { 188*2c451f78SAneesh V v7_dcache_clean_inval_range(start, start + 1, line_len); 189*2c451f78SAneesh V /* move to next cache line */ 190*2c451f78SAneesh V start = (start + line_len - 1) & ~(line_len - 1); 191*2c451f78SAneesh V } 192*2c451f78SAneesh V 193*2c451f78SAneesh V /* 194*2c451f78SAneesh V * If stop address is not aligned to cache-line flush the last 195*2c451f78SAneesh V * line to prevent affecting somebody else's buffer 196*2c451f78SAneesh V */ 197*2c451f78SAneesh V if (stop & (line_len - 1)) { 198*2c451f78SAneesh V v7_dcache_clean_inval_range(stop, stop + 1, line_len); 199*2c451f78SAneesh V /* align to the beginning of this cache line */ 200*2c451f78SAneesh V stop &= ~(line_len - 1); 201*2c451f78SAneesh V } 202*2c451f78SAneesh V 203*2c451f78SAneesh V for (mva = start; mva < stop; mva = mva + line_len) { 204*2c451f78SAneesh V /* DCIMVAC - Invalidate data cache by MVA to PoC */ 205*2c451f78SAneesh V asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva)); 206*2c451f78SAneesh V } 207*2c451f78SAneesh V } 208*2c451f78SAneesh V 209*2c451f78SAneesh V static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op) 210*2c451f78SAneesh V { 211*2c451f78SAneesh V u32 line_len, ccsidr; 212*2c451f78SAneesh V 213*2c451f78SAneesh V ccsidr = get_ccsidr(); 214*2c451f78SAneesh V line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >> 215*2c451f78SAneesh V CCSIDR_LINE_SIZE_OFFSET) + 2; 216*2c451f78SAneesh V /* Converting from words to bytes */ 217*2c451f78SAneesh V line_len += 2; 218*2c451f78SAneesh V /* converting from log2(linelen) to linelen */ 219*2c451f78SAneesh V line_len = 1 << line_len; 220*2c451f78SAneesh V 221*2c451f78SAneesh V switch (range_op) { 222*2c451f78SAneesh V case ARMV7_DCACHE_CLEAN_INVAL_RANGE: 223*2c451f78SAneesh V v7_dcache_clean_inval_range(start, stop, line_len); 224*2c451f78SAneesh V break; 225*2c451f78SAneesh V case ARMV7_DCACHE_INVAL_RANGE: 226*2c451f78SAneesh V v7_dcache_inval_range(start, stop, line_len); 227*2c451f78SAneesh V break; 228*2c451f78SAneesh V } 229*2c451f78SAneesh V 230*2c451f78SAneesh V /* DMB to make sure the operation is complete */ 231*2c451f78SAneesh V CP15DMB; 232*2c451f78SAneesh V } 233*2c451f78SAneesh V 234*2c451f78SAneesh V /* Invalidate TLB */ 235*2c451f78SAneesh V static void v7_inval_tlb(void) 236*2c451f78SAneesh V { 237*2c451f78SAneesh V /* Invalidate entire unified TLB */ 238*2c451f78SAneesh V asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0)); 239*2c451f78SAneesh V /* Invalidate entire data TLB */ 240*2c451f78SAneesh V asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0)); 241*2c451f78SAneesh V /* Invalidate entire instruction TLB */ 242*2c451f78SAneesh V asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0)); 243*2c451f78SAneesh V /* Full system DSB - make sure that the invalidation is complete */ 244*2c451f78SAneesh V CP15DSB; 245*2c451f78SAneesh V /* Full system ISB - make sure the instruction stream sees it */ 246*2c451f78SAneesh V CP15ISB; 247*2c451f78SAneesh V } 248*2c451f78SAneesh V 249*2c451f78SAneesh V void invalidate_dcache_all(void) 250*2c451f78SAneesh V { 251*2c451f78SAneesh V v7_maint_dcache_all(ARMV7_DCACHE_INVAL_ALL); 252*2c451f78SAneesh V 253*2c451f78SAneesh V v7_outer_cache_inval_all(); 254*2c451f78SAneesh V } 255*2c451f78SAneesh V 256*2c451f78SAneesh V /* 257*2c451f78SAneesh V * Performs a clean & invalidation of the entire data cache 258*2c451f78SAneesh V * at all levels 259*2c451f78SAneesh V */ 260*2c451f78SAneesh V void flush_dcache_all(void) 261*2c451f78SAneesh V { 262*2c451f78SAneesh V v7_maint_dcache_all(ARMV7_DCACHE_CLEAN_INVAL_ALL); 263*2c451f78SAneesh V 264*2c451f78SAneesh V v7_outer_cache_flush_all(); 265*2c451f78SAneesh V } 266*2c451f78SAneesh V 267*2c451f78SAneesh V /* 268*2c451f78SAneesh V * Invalidates range in all levels of D-cache/unified cache used: 269*2c451f78SAneesh V * Affects the range [start, stop - 1] 270*2c451f78SAneesh V */ 271*2c451f78SAneesh V void invalidate_dcache_range(unsigned long start, unsigned long stop) 272*2c451f78SAneesh V { 273*2c451f78SAneesh V 274*2c451f78SAneesh V v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE); 275*2c451f78SAneesh V 276*2c451f78SAneesh V v7_outer_cache_inval_range(start, stop); 277*2c451f78SAneesh V } 278*2c451f78SAneesh V 279*2c451f78SAneesh V /* 280*2c451f78SAneesh V * Flush range(clean & invalidate) from all levels of D-cache/unified 281*2c451f78SAneesh V * cache used: 282*2c451f78SAneesh V * Affects the range [start, stop - 1] 283*2c451f78SAneesh V */ 284*2c451f78SAneesh V void flush_dcache_range(unsigned long start, unsigned long stop) 285*2c451f78SAneesh V { 286*2c451f78SAneesh V v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE); 287*2c451f78SAneesh V 288*2c451f78SAneesh V v7_outer_cache_flush_range(start, stop); 289*2c451f78SAneesh V } 290*2c451f78SAneesh V 291*2c451f78SAneesh V void arm_init_before_mmu(void) 292*2c451f78SAneesh V { 293*2c451f78SAneesh V v7_outer_cache_enable(); 294*2c451f78SAneesh V invalidate_dcache_all(); 295*2c451f78SAneesh V v7_inval_tlb(); 296*2c451f78SAneesh V } 297*2c451f78SAneesh V 298*2c451f78SAneesh V /* 299*2c451f78SAneesh V * Flush range from all levels of d-cache/unified-cache used: 300*2c451f78SAneesh V * Affects the range [start, start + size - 1] 301*2c451f78SAneesh V */ 302*2c451f78SAneesh V void flush_cache(unsigned long start, unsigned long size) 303*2c451f78SAneesh V { 304*2c451f78SAneesh V flush_dcache_range(start, start + size); 305*2c451f78SAneesh V } 306*2c451f78SAneesh V #else /* #ifndef CONFIG_SYS_DCACHE_OFF */ 307*2c451f78SAneesh V void invalidate_dcache_all(void) 308*2c451f78SAneesh V { 309*2c451f78SAneesh V } 310*2c451f78SAneesh V 311*2c451f78SAneesh V void flush_dcache_all(void) 312*2c451f78SAneesh V { 313*2c451f78SAneesh V } 314*2c451f78SAneesh V 315*2c451f78SAneesh V void invalidate_dcache_range(unsigned long start, unsigned long stop) 316*2c451f78SAneesh V { 317*2c451f78SAneesh V } 318*2c451f78SAneesh V 319*2c451f78SAneesh V void flush_dcache_range(unsigned long start, unsigned long stop) 320*2c451f78SAneesh V { 321*2c451f78SAneesh V } 322*2c451f78SAneesh V 323*2c451f78SAneesh V void arm_init_before_mmu(void) 324*2c451f78SAneesh V { 325*2c451f78SAneesh V } 326*2c451f78SAneesh V 327*2c451f78SAneesh V void flush_cache(unsigned long start, unsigned long size) 328*2c451f78SAneesh V { 329*2c451f78SAneesh V } 330*2c451f78SAneesh V #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */ 331*2c451f78SAneesh V 332*2c451f78SAneesh V #ifndef CONFIG_SYS_ICACHE_OFF 333*2c451f78SAneesh V /* Invalidate entire I-cache and branch predictor array */ 334*2c451f78SAneesh V void invalidate_icache_all(void) 335*2c451f78SAneesh V { 336*2c451f78SAneesh V /* 337*2c451f78SAneesh V * Invalidate all instruction caches to PoU. 338*2c451f78SAneesh V * Also flushes branch target cache. 339*2c451f78SAneesh V */ 340*2c451f78SAneesh V asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0)); 341*2c451f78SAneesh V 342*2c451f78SAneesh V /* Invalidate entire branch predictor array */ 343*2c451f78SAneesh V asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0)); 344*2c451f78SAneesh V 345*2c451f78SAneesh V /* Full system DSB - make sure that the invalidation is complete */ 346*2c451f78SAneesh V CP15DSB; 347*2c451f78SAneesh V 348*2c451f78SAneesh V /* ISB - make sure the instruction stream sees it */ 349*2c451f78SAneesh V CP15ISB; 350*2c451f78SAneesh V } 351*2c451f78SAneesh V #else 352*2c451f78SAneesh V void invalidate_icache_all(void) 353*2c451f78SAneesh V { 354*2c451f78SAneesh V } 355*2c451f78SAneesh V #endif 356*2c451f78SAneesh V 357*2c451f78SAneesh V /* 358*2c451f78SAneesh V * Stub implementations for outer cache operations 359*2c451f78SAneesh V */ 360*2c451f78SAneesh V void __v7_outer_cache_enable(void) 361*2c451f78SAneesh V { 362*2c451f78SAneesh V } 363*2c451f78SAneesh V void v7_outer_cache_enable(void) 364*2c451f78SAneesh V __attribute__((weak, alias("__v7_outer_cache_enable"))); 365*2c451f78SAneesh V 366*2c451f78SAneesh V void __v7_outer_cache_disable(void) 367*2c451f78SAneesh V { 368*2c451f78SAneesh V } 369*2c451f78SAneesh V void v7_outer_cache_disable(void) 370*2c451f78SAneesh V __attribute__((weak, alias("__v7_outer_cache_disable"))); 371*2c451f78SAneesh V 372*2c451f78SAneesh V void __v7_outer_cache_flush_all(void) 373*2c451f78SAneesh V { 374*2c451f78SAneesh V } 375*2c451f78SAneesh V void v7_outer_cache_flush_all(void) 376*2c451f78SAneesh V __attribute__((weak, alias("__v7_outer_cache_flush_all"))); 377*2c451f78SAneesh V 378*2c451f78SAneesh V void __v7_outer_cache_inval_all(void) 379*2c451f78SAneesh V { 380*2c451f78SAneesh V } 381*2c451f78SAneesh V void v7_outer_cache_inval_all(void) 382*2c451f78SAneesh V __attribute__((weak, alias("__v7_outer_cache_inval_all"))); 383*2c451f78SAneesh V 384*2c451f78SAneesh V void __v7_outer_cache_flush_range(u32 start, u32 end) 385*2c451f78SAneesh V { 386*2c451f78SAneesh V } 387*2c451f78SAneesh V void v7_outer_cache_flush_range(u32 start, u32 end) 388*2c451f78SAneesh V __attribute__((weak, alias("__v7_outer_cache_flush_range"))); 389*2c451f78SAneesh V 390*2c451f78SAneesh V void __v7_outer_cache_inval_range(u32 start, u32 end) 391*2c451f78SAneesh V { 392*2c451f78SAneesh V } 393*2c451f78SAneesh V void v7_outer_cache_inval_range(u32 start, u32 end) 394*2c451f78SAneesh V __attribute__((weak, alias("__v7_outer_cache_inval_range"))); 395