1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * Copyright (C) 2012-2014 Panasonic Corporation
4  * Copyright (C) 2015-2016 Socionext Inc.
5  *   Author: Masahiro Yamada <yamada.masahiro@socionext.com>
6  */
7 
8 #include <common.h>
9 #include <linux/io.h>
10 #include <linux/kernel.h>
11 #include <asm/armv7.h>
12 #include <asm/processor.h>
13 
14 #include "cache-uniphier.h"
15 
16 /* control registers */
17 #define UNIPHIER_SSCC		0x500c0000	/* Control Register */
18 #define    UNIPHIER_SSCC_BST			(0x1 << 20)	/* UCWG burst read */
19 #define    UNIPHIER_SSCC_ACT			(0x1 << 19)	/* Inst-Data separate */
20 #define    UNIPHIER_SSCC_WTG			(0x1 << 18)	/* WT gathering on */
21 #define    UNIPHIER_SSCC_PRD			(0x1 << 17)	/* enable pre-fetch */
22 #define    UNIPHIER_SSCC_ON			(0x1 <<  0)	/* enable cache */
23 #define UNIPHIER_SSCLPDAWCR	0x500c0030	/* Unified/Data Active Way Control */
24 #define UNIPHIER_SSCLPIAWCR	0x500c0034	/* Instruction Active Way Control */
25 
26 /* revision registers */
27 #define UNIPHIER_SSCID		0x503c0100	/* ID Register */
28 
29 /* operation registers */
30 #define UNIPHIER_SSCOPE		0x506c0244	/* Cache Operation Primitive Entry */
31 #define    UNIPHIER_SSCOPE_CM_INV		0x0	/* invalidate */
32 #define    UNIPHIER_SSCOPE_CM_CLEAN		0x1	/* clean */
33 #define    UNIPHIER_SSCOPE_CM_FLUSH		0x2	/* flush */
34 #define    UNIPHIER_SSCOPE_CM_SYNC		0x8	/* sync (drain bufs) */
35 #define    UNIPHIER_SSCOPE_CM_FLUSH_PREFETCH	0x9	/* flush p-fetch buf */
36 #define UNIPHIER_SSCOQM		0x506c0248
37 #define    UNIPHIER_SSCOQM_TID_MASK		(0x3 << 21)
38 #define    UNIPHIER_SSCOQM_TID_LRU_DATA		(0x0 << 21)
39 #define    UNIPHIER_SSCOQM_TID_LRU_INST		(0x1 << 21)
40 #define    UNIPHIER_SSCOQM_TID_WAY		(0x2 << 21)
41 #define    UNIPHIER_SSCOQM_S_MASK		(0x3 << 17)
42 #define    UNIPHIER_SSCOQM_S_RANGE		(0x0 << 17)
43 #define    UNIPHIER_SSCOQM_S_ALL		(0x1 << 17)
44 #define    UNIPHIER_SSCOQM_S_WAY		(0x2 << 17)
45 #define    UNIPHIER_SSCOQM_CE			(0x1 << 15)	/* notify completion */
46 #define    UNIPHIER_SSCOQM_CW			(0x1 << 14)
47 #define    UNIPHIER_SSCOQM_CM_MASK		(0x7)
48 #define    UNIPHIER_SSCOQM_CM_INV		0x0	/* invalidate */
49 #define    UNIPHIER_SSCOQM_CM_CLEAN		0x1	/* clean */
50 #define    UNIPHIER_SSCOQM_CM_FLUSH		0x2	/* flush */
51 #define    UNIPHIER_SSCOQM_CM_PREFETCH		0x3	/* prefetch to cache */
52 #define    UNIPHIER_SSCOQM_CM_PREFETCH_BUF	0x4	/* prefetch to pf-buf */
53 #define    UNIPHIER_SSCOQM_CM_TOUCH		0x5	/* touch */
54 #define    UNIPHIER_SSCOQM_CM_TOUCH_ZERO	0x6	/* touch to zero */
55 #define    UNIPHIER_SSCOQM_CM_TOUCH_DIRTY	0x7	/* touch with dirty */
56 #define UNIPHIER_SSCOQAD	0x506c024c	/* Cache Operation Queue Address */
57 #define UNIPHIER_SSCOQSZ	0x506c0250	/* Cache Operation Queue Size */
58 #define UNIPHIER_SSCOQMASK	0x506c0254	/* Cache Operation Queue Address Mask */
59 #define UNIPHIER_SSCOQWN	0x506c0258	/* Cache Operation Queue Way Number */
60 #define UNIPHIER_SSCOPPQSEF	0x506c025c	/* Cache Operation Queue Set Complete */
61 #define    UNIPHIER_SSCOPPQSEF_FE		(0x1 << 1)
62 #define    UNIPHIER_SSCOPPQSEF_OE		(0x1 << 0)
63 #define UNIPHIER_SSCOLPQS	0x506c0260	/* Cache Operation Queue Status */
64 #define    UNIPHIER_SSCOLPQS_EF			(0x1 << 2)
65 #define    UNIPHIER_SSCOLPQS_EST		(0x1 << 1)
66 #define    UNIPHIER_SSCOLPQS_QST		(0x1 << 0)
67 
68 #define UNIPHIER_SSC_LINE_SIZE		128
69 #define UNIPHIER_SSC_RANGE_OP_MAX_SIZE	(0x00400000 - (UNIPHIER_SSC_LINE_SIZE))
70 
71 #define UNIPHIER_SSCOQAD_IS_NEEDED(op) \
72 		((op & UNIPHIER_SSCOQM_S_MASK) == UNIPHIER_SSCOQM_S_RANGE)
73 #define UNIPHIER_SSCOQWM_IS_NEEDED(op) \
74 		(((op & UNIPHIER_SSCOQM_S_MASK) == UNIPHIER_SSCOQM_S_WAY) || \
75 		 ((op & UNIPHIER_SSCOQM_TID_MASK) == UNIPHIER_SSCOQM_TID_WAY))
76 
77 /* uniphier_cache_sync - perform a sync point for a particular cache level */
uniphier_cache_sync(void)78 static void uniphier_cache_sync(void)
79 {
80 	/* drain internal buffers */
81 	writel(UNIPHIER_SSCOPE_CM_SYNC, UNIPHIER_SSCOPE);
82 	/* need a read back to confirm */
83 	readl(UNIPHIER_SSCOPE);
84 }
85 
86 /**
87  * uniphier_cache_maint_common - run a queue operation
88  *
89  * @start: start address of range operation (don't care for "all" operation)
90  * @size: data size of range operation (don't care for "all" operation)
91  * @ways: target ways (don't care for operations other than pre-fetch, touch
92  * @operation: flags to specify the desired cache operation
93  */
uniphier_cache_maint_common(u32 start,u32 size,u32 ways,u32 operation)94 static void uniphier_cache_maint_common(u32 start, u32 size, u32 ways,
95 					u32 operation)
96 {
97 	/* clear the complete notification flag */
98 	writel(UNIPHIER_SSCOLPQS_EF, UNIPHIER_SSCOLPQS);
99 
100 	do {
101 		/* set cache operation */
102 		writel(UNIPHIER_SSCOQM_CE | operation, UNIPHIER_SSCOQM);
103 
104 		/* set address range if needed */
105 		if (likely(UNIPHIER_SSCOQAD_IS_NEEDED(operation))) {
106 			writel(start, UNIPHIER_SSCOQAD);
107 			writel(size, UNIPHIER_SSCOQSZ);
108 		}
109 
110 		/* set target ways if needed */
111 		if (unlikely(UNIPHIER_SSCOQWM_IS_NEEDED(operation)))
112 			writel(ways, UNIPHIER_SSCOQWN);
113 	} while (unlikely(readl(UNIPHIER_SSCOPPQSEF) &
114 			  (UNIPHIER_SSCOPPQSEF_FE | UNIPHIER_SSCOPPQSEF_OE)));
115 
116 	/* wait until the operation is completed */
117 	while (likely(readl(UNIPHIER_SSCOLPQS) != UNIPHIER_SSCOLPQS_EF))
118 		cpu_relax();
119 }
120 
uniphier_cache_maint_all(u32 operation)121 static void uniphier_cache_maint_all(u32 operation)
122 {
123 	uniphier_cache_maint_common(0, 0, 0, UNIPHIER_SSCOQM_S_ALL | operation);
124 
125 	uniphier_cache_sync();
126 }
127 
uniphier_cache_maint_range(u32 start,u32 end,u32 ways,u32 operation)128 static void uniphier_cache_maint_range(u32 start, u32 end, u32 ways,
129 				       u32 operation)
130 {
131 	u32 size;
132 
133 	/*
134 	 * If the start address is not aligned,
135 	 * perform a cache operation for the first cache-line
136 	 */
137 	start = start & ~(UNIPHIER_SSC_LINE_SIZE - 1);
138 
139 	size = end - start;
140 
141 	if (unlikely(size >= (u32)(-UNIPHIER_SSC_LINE_SIZE))) {
142 		/* this means cache operation for all range */
143 		uniphier_cache_maint_all(operation);
144 		return;
145 	}
146 
147 	/*
148 	 * If the end address is not aligned,
149 	 * perform a cache operation for the last cache-line
150 	 */
151 	size = ALIGN(size, UNIPHIER_SSC_LINE_SIZE);
152 
153 	while (size) {
154 		u32 chunk_size = min_t(u32, size, UNIPHIER_SSC_RANGE_OP_MAX_SIZE);
155 
156 		uniphier_cache_maint_common(start, chunk_size, ways,
157 					    UNIPHIER_SSCOQM_S_RANGE | operation);
158 
159 		start += chunk_size;
160 		size -= chunk_size;
161 	}
162 
163 	uniphier_cache_sync();
164 }
165 
uniphier_cache_prefetch_range(u32 start,u32 end,u32 ways)166 void uniphier_cache_prefetch_range(u32 start, u32 end, u32 ways)
167 {
168 	uniphier_cache_maint_range(start, end, ways,
169 				   UNIPHIER_SSCOQM_TID_WAY |
170 				   UNIPHIER_SSCOQM_CM_PREFETCH);
171 }
172 
uniphier_cache_touch_range(u32 start,u32 end,u32 ways)173 void uniphier_cache_touch_range(u32 start, u32 end, u32 ways)
174 {
175 	uniphier_cache_maint_range(start, end, ways,
176 				   UNIPHIER_SSCOQM_TID_WAY |
177 				   UNIPHIER_SSCOQM_CM_TOUCH);
178 }
179 
uniphier_cache_touch_zero_range(u32 start,u32 end,u32 ways)180 void uniphier_cache_touch_zero_range(u32 start, u32 end, u32 ways)
181 {
182 	uniphier_cache_maint_range(start, end, ways,
183 				   UNIPHIER_SSCOQM_TID_WAY |
184 				   UNIPHIER_SSCOQM_CM_TOUCH_ZERO);
185 }
186 
uniphier_cache_inv_way(u32 ways)187 void uniphier_cache_inv_way(u32 ways)
188 {
189 	uniphier_cache_maint_common(0, 0, ways,
190 				    UNIPHIER_SSCOQM_S_WAY |
191 				    UNIPHIER_SSCOQM_CM_INV);
192 }
193 
uniphier_cache_set_active_ways(int cpu,u32 active_ways)194 void uniphier_cache_set_active_ways(int cpu, u32 active_ways)
195 {
196 	void __iomem *base = (void __iomem *)UNIPHIER_SSCC + 0xc00;
197 
198 	switch (readl(UNIPHIER_SSCID)) { /* revision */
199 	case 0x12:	/* LD4 */
200 	case 0x16:	/* sld8 */
201 		base = (void __iomem *)UNIPHIER_SSCC + 0x840;
202 		break;
203 	default:
204 		base = (void __iomem *)UNIPHIER_SSCC + 0xc00;
205 		break;
206 	}
207 
208 	writel(active_ways, base + 4 * cpu);
209 }
210 
uniphier_cache_endisable(int enable)211 static void uniphier_cache_endisable(int enable)
212 {
213 	u32 tmp;
214 
215 	tmp = readl(UNIPHIER_SSCC);
216 	if (enable)
217 		tmp |= UNIPHIER_SSCC_ON;
218 	else
219 		tmp &= ~UNIPHIER_SSCC_ON;
220 	writel(tmp, UNIPHIER_SSCC);
221 }
222 
uniphier_cache_enable(void)223 void uniphier_cache_enable(void)
224 {
225 	uniphier_cache_endisable(1);
226 }
227 
uniphier_cache_disable(void)228 void uniphier_cache_disable(void)
229 {
230 	uniphier_cache_endisable(0);
231 }
232 
233 #ifdef CONFIG_CACHE_UNIPHIER
v7_outer_cache_flush_all(void)234 void v7_outer_cache_flush_all(void)
235 {
236 	uniphier_cache_maint_all(UNIPHIER_SSCOQM_CM_FLUSH);
237 }
238 
v7_outer_cache_inval_all(void)239 void v7_outer_cache_inval_all(void)
240 {
241 	uniphier_cache_maint_all(UNIPHIER_SSCOQM_CM_INV);
242 }
243 
v7_outer_cache_flush_range(u32 start,u32 end)244 void v7_outer_cache_flush_range(u32 start, u32 end)
245 {
246 	uniphier_cache_maint_range(start, end, 0, UNIPHIER_SSCOQM_CM_FLUSH);
247 }
248 
v7_outer_cache_inval_range(u32 start,u32 end)249 void v7_outer_cache_inval_range(u32 start, u32 end)
250 {
251 	if (start & (UNIPHIER_SSC_LINE_SIZE - 1)) {
252 		start &= ~(UNIPHIER_SSC_LINE_SIZE - 1);
253 		uniphier_cache_maint_range(start, UNIPHIER_SSC_LINE_SIZE, 0,
254 					   UNIPHIER_SSCOQM_CM_FLUSH);
255 		start += UNIPHIER_SSC_LINE_SIZE;
256 	}
257 
258 	if (start >= end) {
259 		uniphier_cache_sync();
260 		return;
261 	}
262 
263 	if (end & (UNIPHIER_SSC_LINE_SIZE - 1)) {
264 		end &= ~(UNIPHIER_SSC_LINE_SIZE - 1);
265 		uniphier_cache_maint_range(end, UNIPHIER_SSC_LINE_SIZE, 0,
266 					   UNIPHIER_SSCOQM_CM_FLUSH);
267 	}
268 
269 	if (start >= end) {
270 		uniphier_cache_sync();
271 		return;
272 	}
273 
274 	uniphier_cache_maint_range(start, end, 0, UNIPHIER_SSCOQM_CM_INV);
275 }
276 
v7_outer_cache_enable(void)277 void v7_outer_cache_enable(void)
278 {
279 	uniphier_cache_set_active_ways(0, U32_MAX);	/* activate all ways */
280 	uniphier_cache_enable();
281 }
282 
v7_outer_cache_disable(void)283 void v7_outer_cache_disable(void)
284 {
285 	uniphier_cache_disable();
286 }
287 #endif
288 
enable_caches(void)289 void enable_caches(void)
290 {
291 	dcache_enable();
292 }
293