xref: /openbmc/u-boot/arch/arm/cpu/armv7/cache_v7.c (revision 397b5697ad242408979a00dda14138aa1439f52b)
1 /*
2  * (C) Copyright 2010
3  * Texas Instruments, <www.ti.com>
4  * Aneesh V <aneesh@ti.com>
5  *
6  * SPDX-License-Identifier:	GPL-2.0+
7  */
8 #include <linux/types.h>
9 #include <common.h>
10 #include <asm/armv7.h>
11 #include <asm/utils.h>
12 
13 #define ARMV7_DCACHE_INVAL_RANGE	1
14 #define ARMV7_DCACHE_CLEAN_INVAL_RANGE	2
15 
16 #ifndef CONFIG_SYS_DCACHE_OFF
17 
18 /* Asm functions from cache_v7_asm.S */
19 void v7_flush_dcache_all(void);
20 void v7_invalidate_dcache_all(void);
21 
22 static u32 get_ccsidr(void)
23 {
24 	u32 ccsidr;
25 
26 	/* Read current CP15 Cache Size ID Register */
27 	asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
28 	return ccsidr;
29 }
30 
31 static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
32 {
33 	u32 mva;
34 
35 	/* Align start to cache line boundary */
36 	start &= ~(line_len - 1);
37 	for (mva = start; mva < stop; mva = mva + line_len) {
38 		/* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
39 		asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
40 	}
41 }
42 
43 static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
44 {
45 	u32 mva;
46 
47 	/*
48 	 * If start address is not aligned to cache-line do not
49 	 * invalidate the first cache-line
50 	 */
51 	if (start & (line_len - 1)) {
52 		printf("ERROR: %s - start address is not aligned - 0x%08x\n",
53 			__func__, start);
54 		/* move to next cache line */
55 		start = (start + line_len - 1) & ~(line_len - 1);
56 	}
57 
58 	/*
59 	 * If stop address is not aligned to cache-line do not
60 	 * invalidate the last cache-line
61 	 */
62 	if (stop & (line_len - 1)) {
63 		printf("ERROR: %s - stop address is not aligned - 0x%08x\n",
64 			__func__, stop);
65 		/* align to the beginning of this cache line */
66 		stop &= ~(line_len - 1);
67 	}
68 
69 	for (mva = start; mva < stop; mva = mva + line_len) {
70 		/* DCIMVAC - Invalidate data cache by MVA to PoC */
71 		asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
72 	}
73 }
74 
75 static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
76 {
77 	u32 line_len, ccsidr;
78 
79 	ccsidr = get_ccsidr();
80 	line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
81 			CCSIDR_LINE_SIZE_OFFSET) + 2;
82 	/* Converting from words to bytes */
83 	line_len += 2;
84 	/* converting from log2(linelen) to linelen */
85 	line_len = 1 << line_len;
86 
87 	switch (range_op) {
88 	case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
89 		v7_dcache_clean_inval_range(start, stop, line_len);
90 		break;
91 	case ARMV7_DCACHE_INVAL_RANGE:
92 		v7_dcache_inval_range(start, stop, line_len);
93 		break;
94 	}
95 
96 	/* DSB to make sure the operation is complete */
97 	DSB;
98 }
99 
100 /* Invalidate TLB */
101 static void v7_inval_tlb(void)
102 {
103 	/* Invalidate entire unified TLB */
104 	asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
105 	/* Invalidate entire data TLB */
106 	asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
107 	/* Invalidate entire instruction TLB */
108 	asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
109 	/* Full system DSB - make sure that the invalidation is complete */
110 	DSB;
111 	/* Full system ISB - make sure the instruction stream sees it */
112 	ISB;
113 }
114 
115 void invalidate_dcache_all(void)
116 {
117 	v7_invalidate_dcache_all();
118 
119 	v7_outer_cache_inval_all();
120 }
121 
122 /*
123  * Performs a clean & invalidation of the entire data cache
124  * at all levels
125  */
126 void flush_dcache_all(void)
127 {
128 	v7_flush_dcache_all();
129 
130 	v7_outer_cache_flush_all();
131 }
132 
133 /*
134  * Invalidates range in all levels of D-cache/unified cache used:
135  * Affects the range [start, stop - 1]
136  */
137 void invalidate_dcache_range(unsigned long start, unsigned long stop)
138 {
139 	check_cache_range(start, stop);
140 
141 	v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
142 
143 	v7_outer_cache_inval_range(start, stop);
144 }
145 
146 /*
147  * Flush range(clean & invalidate) from all levels of D-cache/unified
148  * cache used:
149  * Affects the range [start, stop - 1]
150  */
151 void flush_dcache_range(unsigned long start, unsigned long stop)
152 {
153 	check_cache_range(start, stop);
154 
155 	v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
156 
157 	v7_outer_cache_flush_range(start, stop);
158 }
159 
160 void arm_init_before_mmu(void)
161 {
162 	v7_outer_cache_enable();
163 	invalidate_dcache_all();
164 	v7_inval_tlb();
165 }
166 
167 void mmu_page_table_flush(unsigned long start, unsigned long stop)
168 {
169 	flush_dcache_range(start, stop);
170 	v7_inval_tlb();
171 }
172 #else /* #ifndef CONFIG_SYS_DCACHE_OFF */
173 void invalidate_dcache_all(void)
174 {
175 }
176 
177 void flush_dcache_all(void)
178 {
179 }
180 
181 void invalidate_dcache_range(unsigned long start, unsigned long stop)
182 {
183 }
184 
185 void flush_dcache_range(unsigned long start, unsigned long stop)
186 {
187 }
188 
189 void arm_init_before_mmu(void)
190 {
191 }
192 
193 void mmu_page_table_flush(unsigned long start, unsigned long stop)
194 {
195 }
196 
197 void arm_init_domains(void)
198 {
199 }
200 #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
201 
202 #ifndef CONFIG_SYS_ICACHE_OFF
203 /* Invalidate entire I-cache and branch predictor array */
204 void invalidate_icache_all(void)
205 {
206 	/*
207 	 * Invalidate all instruction caches to PoU.
208 	 * Also flushes branch target cache.
209 	 */
210 	asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
211 
212 	/* Invalidate entire branch predictor array */
213 	asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
214 
215 	/* Full system DSB - make sure that the invalidation is complete */
216 	DSB;
217 
218 	/* ISB - make sure the instruction stream sees it */
219 	ISB;
220 }
221 #else
222 void invalidate_icache_all(void)
223 {
224 }
225 #endif
226 
227 /*  Stub implementations for outer cache operations */
228 __weak void v7_outer_cache_enable(void) {}
229 __weak void v7_outer_cache_disable(void) {}
230 __weak void v7_outer_cache_flush_all(void) {}
231 __weak void v7_outer_cache_inval_all(void) {}
232 __weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
233 __weak void v7_outer_cache_inval_range(u32 start, u32 end) {}
234