xref: /openbmc/u-boot/arch/arm/cpu/armv7/cache_v7.c (revision c09d29057ab0b04db0857d319c6bff74de31b9c3)
1 /*
2  * (C) Copyright 2010
3  * Texas Instruments, <www.ti.com>
4  * Aneesh V <aneesh@ti.com>
5  *
6  * SPDX-License-Identifier:	GPL-2.0+
7  */
8 #include <linux/types.h>
9 #include <common.h>
10 #include <asm/armv7.h>
11 #include <asm/utils.h>
12 
13 #define ARMV7_DCACHE_INVAL_ALL		1
14 #define ARMV7_DCACHE_CLEAN_INVAL_ALL	2
15 #define ARMV7_DCACHE_INVAL_RANGE	3
16 #define ARMV7_DCACHE_CLEAN_INVAL_RANGE	4
17 
18 #ifndef CONFIG_SYS_DCACHE_OFF
19 
20 /* Asm functions from cache_v7_asm.S */
21 void v7_flush_dcache_all(void);
22 
23 static int check_cache_range(unsigned long start, unsigned long stop)
24 {
25 	int ok = 1;
26 
27 	if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
28 		ok = 0;
29 
30 	if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
31 		ok = 0;
32 
33 	if (!ok)
34 		debug("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
35 			start, stop);
36 
37 	return ok;
38 }
39 
40 /*
41  * Write the level and type you want to Cache Size Selection Register(CSSELR)
42  * to get size details from Current Cache Size ID Register(CCSIDR)
43  */
44 static void set_csselr(u32 level, u32 type)
45 {
46 	u32 csselr = level << 1 | type;
47 
48 	/* Write to Cache Size Selection Register(CSSELR) */
49 	asm volatile ("mcr p15, 2, %0, c0, c0, 0" : : "r" (csselr));
50 }
51 
52 static u32 get_ccsidr(void)
53 {
54 	u32 ccsidr;
55 
56 	/* Read current CP15 Cache Size ID Register */
57 	asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
58 	return ccsidr;
59 }
60 
61 static u32 get_clidr(void)
62 {
63 	u32 clidr;
64 
65 	/* Read current CP15 Cache Level ID Register */
66 	asm volatile ("mrc p15,1,%0,c0,c0,1" : "=r" (clidr));
67 	return clidr;
68 }
69 
70 static void v7_inval_dcache_level_setway(u32 level, u32 num_sets,
71 					 u32 num_ways, u32 way_shift,
72 					 u32 log2_line_len)
73 {
74 	int way, set;
75 	u32 setway;
76 
77 	/*
78 	 * For optimal assembly code:
79 	 *	a. count down
80 	 *	b. have bigger loop inside
81 	 */
82 	for (way = num_ways - 1; way >= 0 ; way--) {
83 		for (set = num_sets - 1; set >= 0; set--) {
84 			setway = (level << 1) | (set << log2_line_len) |
85 				 (way << way_shift);
86 			/* Invalidate data/unified cache line by set/way */
87 			asm volatile ("	mcr p15, 0, %0, c7, c6, 2"
88 					: : "r" (setway));
89 		}
90 	}
91 	/* DSB to make sure the operation is complete */
92 	DSB;
93 }
94 
95 static void v7_maint_dcache_level_setway(u32 level, u32 operation)
96 {
97 	u32 ccsidr;
98 	u32 num_sets, num_ways, log2_line_len, log2_num_ways;
99 	u32 way_shift;
100 
101 	set_csselr(level, ARMV7_CSSELR_IND_DATA_UNIFIED);
102 
103 	ccsidr = get_ccsidr();
104 
105 	log2_line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
106 				CCSIDR_LINE_SIZE_OFFSET) + 2;
107 	/* Converting from words to bytes */
108 	log2_line_len += 2;
109 
110 	num_ways  = ((ccsidr & CCSIDR_ASSOCIATIVITY_MASK) >>
111 			CCSIDR_ASSOCIATIVITY_OFFSET) + 1;
112 	num_sets  = ((ccsidr & CCSIDR_NUM_SETS_MASK) >>
113 			CCSIDR_NUM_SETS_OFFSET) + 1;
114 	/*
115 	 * According to ARMv7 ARM number of sets and number of ways need
116 	 * not be a power of 2
117 	 */
118 	log2_num_ways = log_2_n_round_up(num_ways);
119 
120 	way_shift = (32 - log2_num_ways);
121 	v7_inval_dcache_level_setway(level, num_sets, num_ways,
122 				      way_shift, log2_line_len);
123 }
124 
125 static void v7_maint_dcache_all(u32 operation)
126 {
127 	u32 level, cache_type, level_start_bit = 0;
128 	u32 clidr = get_clidr();
129 
130 	for (level = 0; level < 7; level++) {
131 		cache_type = (clidr >> level_start_bit) & 0x7;
132 		if ((cache_type == ARMV7_CLIDR_CTYPE_DATA_ONLY) ||
133 		    (cache_type == ARMV7_CLIDR_CTYPE_INSTRUCTION_DATA) ||
134 		    (cache_type == ARMV7_CLIDR_CTYPE_UNIFIED))
135 			v7_maint_dcache_level_setway(level, operation);
136 		level_start_bit += 3;
137 	}
138 }
139 
140 static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
141 {
142 	u32 mva;
143 
144 	/* Align start to cache line boundary */
145 	start &= ~(line_len - 1);
146 	for (mva = start; mva < stop; mva = mva + line_len) {
147 		/* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
148 		asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
149 	}
150 }
151 
152 static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
153 {
154 	u32 mva;
155 
156 	/*
157 	 * If start address is not aligned to cache-line do not
158 	 * invalidate the first cache-line
159 	 */
160 	if (start & (line_len - 1)) {
161 		printf("ERROR: %s - start address is not aligned - 0x%08x\n",
162 			__func__, start);
163 		/* move to next cache line */
164 		start = (start + line_len - 1) & ~(line_len - 1);
165 	}
166 
167 	/*
168 	 * If stop address is not aligned to cache-line do not
169 	 * invalidate the last cache-line
170 	 */
171 	if (stop & (line_len - 1)) {
172 		printf("ERROR: %s - stop address is not aligned - 0x%08x\n",
173 			__func__, stop);
174 		/* align to the beginning of this cache line */
175 		stop &= ~(line_len - 1);
176 	}
177 
178 	for (mva = start; mva < stop; mva = mva + line_len) {
179 		/* DCIMVAC - Invalidate data cache by MVA to PoC */
180 		asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
181 	}
182 }
183 
184 static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
185 {
186 	u32 line_len, ccsidr;
187 
188 	ccsidr = get_ccsidr();
189 	line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
190 			CCSIDR_LINE_SIZE_OFFSET) + 2;
191 	/* Converting from words to bytes */
192 	line_len += 2;
193 	/* converting from log2(linelen) to linelen */
194 	line_len = 1 << line_len;
195 
196 	switch (range_op) {
197 	case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
198 		v7_dcache_clean_inval_range(start, stop, line_len);
199 		break;
200 	case ARMV7_DCACHE_INVAL_RANGE:
201 		v7_dcache_inval_range(start, stop, line_len);
202 		break;
203 	}
204 
205 	/* DSB to make sure the operation is complete */
206 	DSB;
207 }
208 
209 /* Invalidate TLB */
210 static void v7_inval_tlb(void)
211 {
212 	/* Invalidate entire unified TLB */
213 	asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
214 	/* Invalidate entire data TLB */
215 	asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
216 	/* Invalidate entire instruction TLB */
217 	asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
218 	/* Full system DSB - make sure that the invalidation is complete */
219 	DSB;
220 	/* Full system ISB - make sure the instruction stream sees it */
221 	ISB;
222 }
223 
224 void invalidate_dcache_all(void)
225 {
226 	v7_maint_dcache_all(ARMV7_DCACHE_INVAL_ALL);
227 
228 	v7_outer_cache_inval_all();
229 }
230 
231 /*
232  * Performs a clean & invalidation of the entire data cache
233  * at all levels
234  */
235 void flush_dcache_all(void)
236 {
237 	v7_flush_dcache_all();
238 
239 	v7_outer_cache_flush_all();
240 }
241 
242 /*
243  * Invalidates range in all levels of D-cache/unified cache used:
244  * Affects the range [start, stop - 1]
245  */
246 void invalidate_dcache_range(unsigned long start, unsigned long stop)
247 {
248 	check_cache_range(start, stop);
249 
250 	v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
251 
252 	v7_outer_cache_inval_range(start, stop);
253 }
254 
255 /*
256  * Flush range(clean & invalidate) from all levels of D-cache/unified
257  * cache used:
258  * Affects the range [start, stop - 1]
259  */
260 void flush_dcache_range(unsigned long start, unsigned long stop)
261 {
262 	check_cache_range(start, stop);
263 
264 	v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
265 
266 	v7_outer_cache_flush_range(start, stop);
267 }
268 
269 void arm_init_before_mmu(void)
270 {
271 	v7_outer_cache_enable();
272 	invalidate_dcache_all();
273 	v7_inval_tlb();
274 }
275 
276 void mmu_page_table_flush(unsigned long start, unsigned long stop)
277 {
278 	flush_dcache_range(start, stop);
279 	v7_inval_tlb();
280 }
281 #else /* #ifndef CONFIG_SYS_DCACHE_OFF */
282 void invalidate_dcache_all(void)
283 {
284 }
285 
286 void flush_dcache_all(void)
287 {
288 }
289 
290 void arm_init_before_mmu(void)
291 {
292 }
293 
294 void mmu_page_table_flush(unsigned long start, unsigned long stop)
295 {
296 }
297 
298 void arm_init_domains(void)
299 {
300 }
301 #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
302 
303 #ifndef CONFIG_SYS_ICACHE_OFF
304 /* Invalidate entire I-cache and branch predictor array */
305 void invalidate_icache_all(void)
306 {
307 	/*
308 	 * Invalidate all instruction caches to PoU.
309 	 * Also flushes branch target cache.
310 	 */
311 	asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
312 
313 	/* Invalidate entire branch predictor array */
314 	asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
315 
316 	/* Full system DSB - make sure that the invalidation is complete */
317 	DSB;
318 
319 	/* ISB - make sure the instruction stream sees it */
320 	ISB;
321 }
322 #else
323 void invalidate_icache_all(void)
324 {
325 }
326 #endif
327 
328 /*  Stub implementations for outer cache operations */
329 __weak void v7_outer_cache_enable(void) {}
330 __weak void v7_outer_cache_disable(void) {}
331 __weak void v7_outer_cache_flush_all(void) {}
332 __weak void v7_outer_cache_inval_all(void) {}
333 __weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
334 __weak void v7_outer_cache_inval_range(u32 start, u32 end) {}
335