xref: /openbmc/u-boot/arch/arm/cpu/armv7/cache_v7.c (revision 63e22517)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * (C) Copyright 2010
4  * Texas Instruments, <www.ti.com>
5  * Aneesh V <aneesh@ti.com>
6  */
7 #include <linux/types.h>
8 #include <common.h>
9 #include <asm/armv7.h>
10 #include <asm/utils.h>
11 
12 #define ARMV7_DCACHE_INVAL_RANGE	1
13 #define ARMV7_DCACHE_CLEAN_INVAL_RANGE	2
14 
15 #ifndef CONFIG_SYS_DCACHE_OFF
16 
17 /* Asm functions from cache_v7_asm.S */
18 void v7_flush_dcache_all(void);
19 void v7_invalidate_dcache_all(void);
20 
21 static u32 get_ccsidr(void)
22 {
23 	u32 ccsidr;
24 
25 	/* Read current CP15 Cache Size ID Register */
26 	asm volatile ("mrc p15, 1, %0, c0, c0, 0" : "=r" (ccsidr));
27 	return ccsidr;
28 }
29 
30 static void v7_dcache_clean_inval_range(u32 start, u32 stop, u32 line_len)
31 {
32 	u32 mva;
33 
34 	/* Align start to cache line boundary */
35 	start &= ~(line_len - 1);
36 	for (mva = start; mva < stop; mva = mva + line_len) {
37 		/* DCCIMVAC - Clean & Invalidate data cache by MVA to PoC */
38 		asm volatile ("mcr p15, 0, %0, c7, c14, 1" : : "r" (mva));
39 	}
40 }
41 
42 static void v7_dcache_inval_range(u32 start, u32 stop, u32 line_len)
43 {
44 	u32 mva;
45 
46 	if (!check_cache_range(start, stop))
47 		return;
48 
49 	for (mva = start; mva < stop; mva = mva + line_len) {
50 		/* DCIMVAC - Invalidate data cache by MVA to PoC */
51 		asm volatile ("mcr p15, 0, %0, c7, c6, 1" : : "r" (mva));
52 	}
53 }
54 
55 static void v7_dcache_maint_range(u32 start, u32 stop, u32 range_op)
56 {
57 	u32 line_len, ccsidr;
58 
59 	ccsidr = get_ccsidr();
60 	line_len = ((ccsidr & CCSIDR_LINE_SIZE_MASK) >>
61 			CCSIDR_LINE_SIZE_OFFSET) + 2;
62 	/* Converting from words to bytes */
63 	line_len += 2;
64 	/* converting from log2(linelen) to linelen */
65 	line_len = 1 << line_len;
66 
67 	switch (range_op) {
68 	case ARMV7_DCACHE_CLEAN_INVAL_RANGE:
69 		v7_dcache_clean_inval_range(start, stop, line_len);
70 		break;
71 	case ARMV7_DCACHE_INVAL_RANGE:
72 		v7_dcache_inval_range(start, stop, line_len);
73 		break;
74 	}
75 
76 	/* DSB to make sure the operation is complete */
77 	dsb();
78 }
79 
80 /* Invalidate TLB */
81 static void v7_inval_tlb(void)
82 {
83 	/* Invalidate entire unified TLB */
84 	asm volatile ("mcr p15, 0, %0, c8, c7, 0" : : "r" (0));
85 	/* Invalidate entire data TLB */
86 	asm volatile ("mcr p15, 0, %0, c8, c6, 0" : : "r" (0));
87 	/* Invalidate entire instruction TLB */
88 	asm volatile ("mcr p15, 0, %0, c8, c5, 0" : : "r" (0));
89 	/* Full system DSB - make sure that the invalidation is complete */
90 	dsb();
91 	/* Full system ISB - make sure the instruction stream sees it */
92 	isb();
93 }
94 
95 void invalidate_dcache_all(void)
96 {
97 	v7_invalidate_dcache_all();
98 
99 	v7_outer_cache_inval_all();
100 }
101 
102 /*
103  * Performs a clean & invalidation of the entire data cache
104  * at all levels
105  */
106 void flush_dcache_all(void)
107 {
108 	v7_flush_dcache_all();
109 
110 	v7_outer_cache_flush_all();
111 }
112 
113 /*
114  * Invalidates range in all levels of D-cache/unified cache used:
115  * Affects the range [start, stop - 1]
116  */
117 void invalidate_dcache_range(unsigned long start, unsigned long stop)
118 {
119 	check_cache_range(start, stop);
120 
121 	v7_dcache_maint_range(start, stop, ARMV7_DCACHE_INVAL_RANGE);
122 
123 	v7_outer_cache_inval_range(start, stop);
124 }
125 
126 /*
127  * Flush range(clean & invalidate) from all levels of D-cache/unified
128  * cache used:
129  * Affects the range [start, stop - 1]
130  */
131 void flush_dcache_range(unsigned long start, unsigned long stop)
132 {
133 	check_cache_range(start, stop);
134 
135 	v7_dcache_maint_range(start, stop, ARMV7_DCACHE_CLEAN_INVAL_RANGE);
136 
137 	v7_outer_cache_flush_range(start, stop);
138 }
139 
140 void arm_init_before_mmu(void)
141 {
142 	v7_outer_cache_enable();
143 	invalidate_dcache_all();
144 	v7_inval_tlb();
145 }
146 
147 void mmu_page_table_flush(unsigned long start, unsigned long stop)
148 {
149 	flush_dcache_range(start, stop);
150 	v7_inval_tlb();
151 }
152 #else /* #ifndef CONFIG_SYS_DCACHE_OFF */
153 void invalidate_dcache_all(void)
154 {
155 }
156 
157 void flush_dcache_all(void)
158 {
159 }
160 
161 void invalidate_dcache_range(unsigned long start, unsigned long stop)
162 {
163 }
164 
165 void flush_dcache_range(unsigned long start, unsigned long stop)
166 {
167 }
168 
169 void arm_init_before_mmu(void)
170 {
171 }
172 
173 void mmu_page_table_flush(unsigned long start, unsigned long stop)
174 {
175 }
176 
177 void arm_init_domains(void)
178 {
179 }
180 #endif /* #ifndef CONFIG_SYS_DCACHE_OFF */
181 
182 #ifndef CONFIG_SYS_ICACHE_OFF
183 /* Invalidate entire I-cache and branch predictor array */
184 void invalidate_icache_all(void)
185 {
186 	/*
187 	 * Invalidate all instruction caches to PoU.
188 	 * Also flushes branch target cache.
189 	 */
190 	asm volatile ("mcr p15, 0, %0, c7, c5, 0" : : "r" (0));
191 
192 	/* Invalidate entire branch predictor array */
193 	asm volatile ("mcr p15, 0, %0, c7, c5, 6" : : "r" (0));
194 
195 	/* Full system DSB - make sure that the invalidation is complete */
196 	dsb();
197 
198 	/* ISB - make sure the instruction stream sees it */
199 	isb();
200 }
201 #else
202 void invalidate_icache_all(void)
203 {
204 }
205 #endif
206 
207 /*  Stub implementations for outer cache operations */
208 __weak void v7_outer_cache_enable(void) {}
209 __weak void v7_outer_cache_disable(void) {}
210 __weak void v7_outer_cache_flush_all(void) {}
211 __weak void v7_outer_cache_inval_all(void) {}
212 __weak void v7_outer_cache_flush_range(u32 start, u32 end) {}
213 __weak void v7_outer_cache_inval_range(u32 start, u32 end) {}
214