xref: /openbmc/u-boot/arch/nds32/lib/cache.c (revision a22bbfda)
1 /*
2  * Copyright (C) 2012 Andes Technology Corporation
3  * Shawn Lin, Andes Technology Corporation <nobuhiro@andestech.com>
4  * Macpaul Lin, Andes Technology Corporation <macpaul@andestech.com>
5  *
6  * SPDX-License-Identifier:	GPL-2.0+
7  */
8 
9 #include <common.h>
10 #if (!defined(CONFIG_SYS_ICACHE_OFF) || !defined(CONFIG_SYS_DCACHE_OFF))
11 static inline unsigned long CACHE_SET(unsigned char cache)
12 {
13 	if (cache == ICACHE)
14 		return 64 << ((GET_ICM_CFG() & ICM_CFG_MSK_ISET) \
15 			>> ICM_CFG_OFF_ISET);
16 	else
17 		return 64 << ((GET_DCM_CFG() & DCM_CFG_MSK_DSET) \
18 			>> DCM_CFG_OFF_DSET);
19 }
20 
21 static inline unsigned long CACHE_WAY(unsigned char cache)
22 {
23 	if (cache == ICACHE)
24 		return 1 + ((GET_ICM_CFG() & ICM_CFG_MSK_IWAY) \
25 			>> ICM_CFG_OFF_IWAY);
26 	else
27 		return 1 + ((GET_DCM_CFG() & DCM_CFG_MSK_DWAY) \
28 			>> DCM_CFG_OFF_DWAY);
29 }
30 
31 static inline unsigned long CACHE_LINE_SIZE(enum cache_t cache)
32 {
33 	if (cache == ICACHE)
34 		return 8 << (((GET_ICM_CFG() & ICM_CFG_MSK_ISZ) \
35 			>> ICM_CFG_OFF_ISZ) - 1);
36 	else
37 		return 8 << (((GET_DCM_CFG() & DCM_CFG_MSK_DSZ) \
38 			>> DCM_CFG_OFF_DSZ) - 1);
39 }
40 #endif
41 
42 #ifndef CONFIG_SYS_ICACHE_OFF
43 void invalidate_icache_all(void)
44 {
45 	unsigned long end, line_size;
46 	line_size = CACHE_LINE_SIZE(ICACHE);
47 	end = line_size * CACHE_WAY(ICACHE) * CACHE_SET(ICACHE);
48 	do {
49 		end -= line_size;
50 		__asm__ volatile ("\n\tcctl %0, L1I_IX_INVAL" : : "r" (end));
51 
52 		end -= line_size;
53 		__asm__ volatile ("\n\tcctl %0, L1I_IX_INVAL" : : "r" (end));
54 
55 		end -= line_size;
56 		__asm__ volatile ("\n\tcctl %0, L1I_IX_INVAL" : : "r" (end));
57 		end -= line_size;
58 		__asm__ volatile ("\n\tcctl %0, L1I_IX_INVAL" : : "r" (end));
59 	} while (end > 0);
60 }
61 
62 void invalidate_icache_range(unsigned long start, unsigned long end)
63 {
64 	unsigned long line_size;
65 
66 	line_size = CACHE_LINE_SIZE(ICACHE);
67 	while (end > start) {
68 		asm volatile (
69 			"\n\tcctl %0, L1I_VA_INVAL"
70 			:
71 			: "r"(start)
72 		);
73 		start += line_size;
74 	}
75 }
76 
77 void icache_enable(void)
78 {
79 	asm volatile (
80 		"mfsr	$p0, $mr8\n\t"
81 		"ori	$p0, $p0, 0x01\n\t"
82 		"mtsr	$p0, $mr8\n\t"
83 		"isb\n\t"
84 	);
85 }
86 
87 void icache_disable(void)
88 {
89 	asm volatile (
90 		"mfsr	$p0, $mr8\n\t"
91 		"li	$p1, ~0x01\n\t"
92 		"and	$p0, $p0, $p1\n\t"
93 		"mtsr	$p0, $mr8\n\t"
94 		"isb\n\t"
95 	);
96 }
97 
98 int icache_status(void)
99 {
100 	int ret;
101 
102 	asm volatile (
103 		"mfsr	$p0, $mr8\n\t"
104 		"andi	%0,  $p0, 0x01\n\t"
105 		: "=r" (ret)
106 		:
107 		: "memory"
108 	);
109 
110 	return ret;
111 }
112 
113 #else
114 void invalidate_icache_all(void)
115 {
116 }
117 
118 void invalidate_icache_range(unsigned long start, unsigned long end)
119 {
120 }
121 
122 void icache_enable(void)
123 {
124 }
125 
126 void icache_disable(void)
127 {
128 }
129 
130 int icache_status(void)
131 {
132 	return 0;
133 }
134 
135 #endif
136 
137 #ifndef CONFIG_SYS_DCACHE_OFF
138 void dcache_wbinval_all(void)
139 {
140 	unsigned long end, line_size;
141 	line_size = CACHE_LINE_SIZE(DCACHE);
142 	end = line_size * CACHE_WAY(DCACHE) * CACHE_SET(DCACHE);
143 	do {
144 		end -= line_size;
145 		__asm__ volatile ("\n\tcctl %0, L1D_IX_WB" : : "r" (end));
146 		__asm__ volatile ("\n\tcctl %0, L1D_IX_INVAL" : : "r" (end));
147 		end -= line_size;
148 		__asm__ volatile ("\n\tcctl %0, L1D_IX_WB" : : "r" (end));
149 		__asm__ volatile ("\n\tcctl %0, L1D_IX_INVAL" : : "r" (end));
150 		end -= line_size;
151 		__asm__ volatile ("\n\tcctl %0, L1D_IX_WB" : : "r" (end));
152 		__asm__ volatile ("\n\tcctl %0, L1D_IX_INVAL" : : "r" (end));
153 		end -= line_size;
154 		__asm__ volatile ("\n\tcctl %0, L1D_IX_WB" : : "r" (end));
155 		__asm__ volatile ("\n\tcctl %0, L1D_IX_INVAL" : : "r" (end));
156 
157 	} while (end > 0);
158 }
159 
160 void flush_dcache_range(unsigned long start, unsigned long end)
161 {
162 	unsigned long line_size;
163 
164 	line_size = CACHE_LINE_SIZE(DCACHE);
165 
166 	while (end > start) {
167 		asm volatile (
168 			"\n\tcctl %0, L1D_VA_WB"
169 			"\n\tcctl %0, L1D_VA_INVAL" : : "r" (start)
170 		);
171 		start += line_size;
172 	}
173 }
174 
175 void invalidate_dcache_range(unsigned long start, unsigned long end)
176 {
177 	unsigned long line_size;
178 
179 	line_size = CACHE_LINE_SIZE(DCACHE);
180 	while (end > start) {
181 		asm volatile (
182 			"\n\tcctl %0, L1D_VA_INVAL" : : "r"(start)
183 		);
184 		start += line_size;
185 	}
186 }
187 
188 void dcache_enable(void)
189 {
190 	asm volatile (
191 		"mfsr	$p0, $mr8\n\t"
192 		"ori	$p0, $p0, 0x02\n\t"
193 		"mtsr	$p0, $mr8\n\t"
194 		"isb\n\t"
195 	);
196 }
197 
198 void dcache_disable(void)
199 {
200 	asm volatile (
201 		"mfsr	$p0, $mr8\n\t"
202 		"li	$p1, ~0x02\n\t"
203 		"and	$p0, $p0, $p1\n\t"
204 		"mtsr	$p0, $mr8\n\t"
205 		"isb\n\t"
206 	);
207 }
208 
209 int dcache_status(void)
210 {
211 	int ret;
212 	asm volatile (
213 		"mfsr	$p0, $mr8\n\t"
214 		"andi	%0, $p0, 0x02\n\t"
215 		: "=r" (ret)
216 		:
217 		: "memory"
218 	);
219 	return ret;
220 }
221 
222 #else
223 void dcache_wbinval_all(void)
224 {
225 }
226 
227 void flush_dcache_range(unsigned long start, unsigned long end)
228 {
229 }
230 
231 void invalidate_dcache_range(unsigned long start, unsigned long end)
232 {
233 }
234 
235 void dcache_enable(void)
236 {
237 }
238 
239 void dcache_disable(void)
240 {
241 }
242 
243 int dcache_status(void)
244 {
245 	return 0;
246 }
247 
248 #endif
249 
250 
251 void flush_dcache_all(void)
252 {
253 	dcache_wbinval_all();
254 }
255 
256 void cache_flush(void)
257 {
258 	flush_dcache_all();
259 	invalidate_icache_all();
260 }
261 
262 
263 void flush_cache(unsigned long addr, unsigned long size)
264 {
265 	flush_dcache_range(addr, addr + size);
266 	invalidate_icache_range(addr, addr + size);
267 }
268