1 /* 2 * (C) Copyright 2010 3 * Texas Instruments, <www.ti.com> 4 * Aneesh V <aneesh@ti.com> 5 * 6 * SPDX-License-Identifier: GPL-2.0+ 7 */ 8 #include <linux/types.h> 9 #include <common.h> 10 #include <asm/armv7.h> 11 #include <asm/utils.h> 12 13 #define ARMV7_DCACHE_INVAL_RANGE 1 14 #define ARMV7_DCACHE_CLEAN_INVAL_RANGE 2 15 16 #ifndef CONFIG_SYS_DCACHE_OFF 17 18 /* Asm functions from cache_v7_asm.S */ 19 void v7_flush_dcache_all(void); 20 void v7_invalidate_dcache_all(void); 21 22 static u32 get_ccsidr(void) 23 { 24 u32 ccsidr; 25 26 /* Read current CP15 Cache Size ID Register */ 27 asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr)); 28 return ccsidr; 29 } 30 31 static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len) 32 { 33 u32 mva; 34 35 /* Align start to cache line boundary */ 36 start &= ~(line_len - 1); 37 for (mva = start; mva < stop; mva = mva + line_len) { 38 /* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */ 39 asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva)); 40 } 41 } 42 43 static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len) 44 { 45 u32 mva; 46 47 if (!check_cache_range(start, stop)) 48 return; 49 50 for (mva = start; mva < stop; mva = mva + line_len) { 51 /* DCIMVAC - Invalidate data cache by MVA to PoC */ 52 asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva)); 53 } 54 } 55 56 static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op) 57 { 58 u32 line_len, ccsidr; 59 60 ccsidr = get_ccsidr(); 61 line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >> 62 CCSIDR_LINE_SIZE_OFFSET) + 2; 63 /* Converting from words to bytes */ 64 line_len += 2; 65 /* converting from log2(linelen) to linelen */ 66 line_len = 1 << line_len; 67 68 switch (range_op) { 69 case ARMV7_DCACHE_CLEAN_INVAL_RANGE: 70 v7_dcache_clean_inval_range(start, stop, line_len); 71 break; 72 case ARMV7_DCACHE_INVAL_RANGE: 73 v7_dcache_inval_range(start, stop, line_len); 74 break; 75 } 76 77 /* DSB to make sure the operation is complete */ 78 dsb(); 79 } 80 81 /* Invalidate TLB */ 82 static void v7_inval_tlb(void) 83 { 84 /* Invalidate entire unified TLB */ 85 asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0)); 86 /* Invalidate entire data TLB */ 87 asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0)); 88 /* Invalidate entire instruction TLB */ 89 asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0)); 90 /* Full system DSB - make sure that the invalidation is complete */ 91 dsb(); 92 /* Full system ISB - make sure the instruction stream sees it */ 93 isb(); 94 } 95 96 void invalidate_dcache_all(void) 97 { 98 v7_invalidate_dcache_all(); 99 100 v7_outer_cache_inval_all(); 101 } 102 103 /* 104 * Performs a clean & invalidation of the entire data cache 105 * at all levels 106 */ 107 void flush_dcache_all(void) 108 { 109 v7_flush_dcache_all(); 110 111 v7_outer_cache_flush_all(); 112 } 113 114 /* 115 * Invalidates range in all levels of D-cache/unified cache used: 116 * Affects the range [start, stop - 1] 117 */ 118 void invalidate_dcache_range(unsigned long start, unsigned long stop) 119 { 120 check_cache_range(start, stop); 121 122 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE); 123 124 v7_outer_cache_inval_range(start, stop); 125 } 126 127 /* 128 * Flush range(clean & invalidate) from all levels of D-cache/unified 129 * cache used: 130 * Affects the range [start, stop - 1] 131 */ 132 void flush_dcache_range(unsigned long start, unsigned long stop) 133 { 134 check_cache_range(start, stop); 135 136 v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE); 137 138 v7_outer_cache_flush_range(start, stop); 139 } 140 141 void arm_init_before_mmu(void) 142 { 143 v7_outer_cache_enable(); 144 invalidate_dcache_all(); 145 v7_inval_tlb(); 146 } 147 148 void mmu_page_table_flush(unsigned long start, unsigned long stop) 149 { 150 flush_dcache_range(start, stop); 151 v7_inval_tlb(); 152 } 153 #else /* #ifndef CONFIG_SYS_DCACHE_OFF */ 154 void invalidate_dcache_all(void) 155 { 156 } 157 158 void flush_dcache_all(void) 159 { 160 } 161 162 void invalidate_dcache_range(unsigned long start, unsigned long stop) 163 { 164 } 165 166 void flush_dcache_range(unsigned long start, unsigned long stop) 167 { 168 } 169 170 void arm_init_before_mmu(void) 171 { 172 } 173 174 void mmu_page_table_flush(unsigned long start, unsigned long stop) 175 { 176 } 177 178 void arm_init_domains(void) 179 { 180 } 181 #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */ 182 183 #ifndef CONFIG_SYS_ICACHE_OFF 184 /* Invalidate entire I-cache and branch predictor array */ 185 void invalidate_icache_all(void) 186 { 187 /* 188 * Invalidate all instruction caches to PoU. 189 * Also flushes branch target cache. 190 */ 191 asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0)); 192 193 /* Invalidate entire branch predictor array */ 194 asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0)); 195 196 /* Full system DSB - make sure that the invalidation is complete */ 197 dsb(); 198 199 /* ISB - make sure the instruction stream sees it */ 200 isb(); 201 } 202 #else 203 void invalidate_icache_all(void) 204 { 205 } 206 #endif 207 208 /* Stub implementations for outer cache operations */ 209 __weak void v7_outer_cache_enable(void) {} 210 __weak void v7_outer_cache_disable(void) {} 211 __weak void v7_outer_cache_flush_all(void) {} 212 __weak void v7_outer_cache_inval_all(void) {} 213 __weak void v7_outer_cache_flush_range(u32 start, u32 end) {} 214 __weak void v7_outer_cache_inval_range(u32 start, u32 end) {} 215