xref: /openbmc/u-boot/arch/mips/lib/cache_init.S (revision fcf2fba4)
1/*
2 *  Cache-handling routined for MIPS CPUs
3 *
4 *  Copyright (c) 2003	Wolfgang Denk <wd@denx.de>
5 *
6 * SPDX-License-Identifier:	GPL-2.0+
7 */
8
9#include <asm-offsets.h>
10#include <config.h>
11#include <asm/asm.h>
12#include <asm/regdef.h>
13#include <asm/mipsregs.h>
14#include <asm/addrspace.h>
15#include <asm/cacheops.h>
16#include <asm/cm.h>
17
18#ifndef CONFIG_SYS_MIPS_CACHE_MODE
19#define CONFIG_SYS_MIPS_CACHE_MODE CONF_CM_CACHABLE_NONCOHERENT
20#endif
21
22#define INDEX_BASE	CKSEG0
23
24	.macro	f_fill64 dst, offset, val
25	LONG_S	\val, (\offset +  0 * LONGSIZE)(\dst)
26	LONG_S	\val, (\offset +  1 * LONGSIZE)(\dst)
27	LONG_S	\val, (\offset +  2 * LONGSIZE)(\dst)
28	LONG_S	\val, (\offset +  3 * LONGSIZE)(\dst)
29	LONG_S	\val, (\offset +  4 * LONGSIZE)(\dst)
30	LONG_S	\val, (\offset +  5 * LONGSIZE)(\dst)
31	LONG_S	\val, (\offset +  6 * LONGSIZE)(\dst)
32	LONG_S	\val, (\offset +  7 * LONGSIZE)(\dst)
33#if LONGSIZE == 4
34	LONG_S	\val, (\offset +  8 * LONGSIZE)(\dst)
35	LONG_S	\val, (\offset +  9 * LONGSIZE)(\dst)
36	LONG_S	\val, (\offset + 10 * LONGSIZE)(\dst)
37	LONG_S	\val, (\offset + 11 * LONGSIZE)(\dst)
38	LONG_S	\val, (\offset + 12 * LONGSIZE)(\dst)
39	LONG_S	\val, (\offset + 13 * LONGSIZE)(\dst)
40	LONG_S	\val, (\offset + 14 * LONGSIZE)(\dst)
41	LONG_S	\val, (\offset + 15 * LONGSIZE)(\dst)
42#endif
43	.endm
44
45	.macro cache_loop	curr, end, line_sz, op
4610:	cache		\op, 0(\curr)
47	PTR_ADDU	\curr, \curr, \line_sz
48	bne		\curr, \end, 10b
49	.endm
50
51	.macro	l1_info		sz, line_sz, off
52	.set	push
53	.set	noat
54
55	mfc0	$1, CP0_CONFIG, 1
56
57	/* detect line size */
58	srl	\line_sz, $1, \off + MIPS_CONF1_DL_SHF - MIPS_CONF1_DA_SHF
59	andi	\line_sz, \line_sz, (MIPS_CONF1_DL >> MIPS_CONF1_DL_SHF)
60	move	\sz, zero
61	beqz	\line_sz, 10f
62	li	\sz, 2
63	sllv	\line_sz, \sz, \line_sz
64
65	/* detect associativity */
66	srl	\sz, $1, \off + MIPS_CONF1_DA_SHF - MIPS_CONF1_DA_SHF
67	andi	\sz, \sz, (MIPS_CONF1_DA >> MIPS_CONF1_DA_SHF)
68	addiu	\sz, \sz, 1
69
70	/* sz *= line_sz */
71	mul	\sz, \sz, \line_sz
72
73	/* detect log32(sets) */
74	srl	$1, $1, \off + MIPS_CONF1_DS_SHF - MIPS_CONF1_DA_SHF
75	andi	$1, $1, (MIPS_CONF1_DS >> MIPS_CONF1_DS_SHF)
76	addiu	$1, $1, 1
77	andi	$1, $1, 0x7
78
79	/* sz <<= log32(sets) */
80	sllv	\sz, \sz, $1
81
82	/* sz *= 32 */
83	li	$1, 32
84	mul	\sz, \sz, $1
8510:
86	.set	pop
87	.endm
88/*
89 * mips_cache_reset - low level initialisation of the primary caches
90 *
91 * This routine initialises the primary caches to ensure that they have good
92 * parity.  It must be called by the ROM before any cached locations are used
93 * to prevent the possibility of data with bad parity being written to memory.
94 *
95 * To initialise the instruction cache it is essential that a source of data
96 * with good parity is available. This routine will initialise an area of
97 * memory starting at location zero to be used as a source of parity.
98 *
99 * Note that this function does not follow the standard calling convention &
100 * may clobber typically callee-saved registers.
101 *
102 * RETURNS: N/A
103 *
104 */
105#define R_RETURN	s0
106#define R_IC_SIZE	s1
107#define R_IC_LINE	s2
108#define R_DC_SIZE	s3
109#define R_DC_LINE	s4
110#define R_L2_SIZE	s5
111#define R_L2_LINE	s6
112#define R_L2_BYPASSED	s7
113#define R_L2_L2C	t8
114LEAF(mips_cache_reset)
115	move	R_RETURN, ra
116
117#ifdef CONFIG_MIPS_L2_CACHE
118	/*
119	 * For there to be an L2 present, Config2 must be present. If it isn't
120	 * then we proceed knowing there's no L2 cache.
121	 */
122	move	R_L2_SIZE, zero
123	move	R_L2_LINE, zero
124	move	R_L2_BYPASSED, zero
125	move	R_L2_L2C, zero
126	mfc0	t0, CP0_CONFIG, 1
127	bgez	t0, l2_probe_done
128
129	/*
130	 * From MIPSr6 onwards the L2 cache configuration might not be reported
131	 * by Config2. The Config5.L2C bit indicates whether this is the case,
132	 * and if it is then we need knowledge of where else to look. For cores
133	 * from Imagination Technologies this is a CM GCR.
134	 */
135# if __mips_isa_rev >= 6
136	/* Check that Config5 exists */
137	mfc0	t0, CP0_CONFIG, 2
138	bgez	t0, l2_probe_cop0
139	mfc0	t0, CP0_CONFIG, 3
140	bgez	t0, l2_probe_cop0
141	mfc0	t0, CP0_CONFIG, 4
142	bgez	t0, l2_probe_cop0
143
144	/* Check Config5.L2C is set */
145	mfc0	t0, CP0_CONFIG, 5
146	and	R_L2_L2C, t0, MIPS_CONF5_L2C
147	beqz	R_L2_L2C, l2_probe_cop0
148
149	/* Config5.L2C is set */
150#  ifdef CONFIG_MIPS_CM
151	/* The CM will provide L2 configuration */
152	PTR_LI	t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE)
153	lw	t1, GCR_L2_CONFIG(t0)
154	bgez	t1, l2_probe_done
155
156	ext	R_L2_LINE, t1, \
157		GCR_L2_CONFIG_LINESZ_SHIFT, GCR_L2_CONFIG_LINESZ_BITS
158	beqz	R_L2_LINE, l2_probe_done
159	li	t2, 2
160	sllv	R_L2_LINE, t2, R_L2_LINE
161
162	ext	t2, t1, GCR_L2_CONFIG_ASSOC_SHIFT, GCR_L2_CONFIG_ASSOC_BITS
163	addiu	t2, t2, 1
164	mul	R_L2_SIZE, R_L2_LINE, t2
165
166	ext	t2, t1, GCR_L2_CONFIG_SETSZ_SHIFT, GCR_L2_CONFIG_SETSZ_BITS
167	sllv	R_L2_SIZE, R_L2_SIZE, t2
168	li	t2, 64
169	mul	R_L2_SIZE, R_L2_SIZE, t2
170
171	/* Bypass the L2 cache so that we can init the L1s early */
172	or	t1, t1, GCR_L2_CONFIG_BYPASS
173	sw	t1, GCR_L2_CONFIG(t0)
174	sync
175	li	R_L2_BYPASSED, 1
176
177	/* Zero the L2 tag registers */
178	sw	zero, GCR_L2_TAG_ADDR(t0)
179	sw	zero, GCR_L2_TAG_ADDR_UPPER(t0)
180	sw	zero, GCR_L2_TAG_STATE(t0)
181	sw	zero, GCR_L2_TAG_STATE_UPPER(t0)
182	sw	zero, GCR_L2_DATA(t0)
183	sw	zero, GCR_L2_DATA_UPPER(t0)
184	sync
185#  else
186	/* We don't know how to retrieve L2 configuration on this system */
187#  endif
188	b	l2_probe_done
189# endif
190
191	/*
192	 * For pre-r6 systems, or r6 systems with Config5.L2C==0, probe the L2
193	 * cache configuration from the cop0 Config2 register.
194	 */
195l2_probe_cop0:
196	mfc0	t0, CP0_CONFIG, 2
197
198	srl	R_L2_LINE, t0, MIPS_CONF2_SL_SHF
199	andi	R_L2_LINE, R_L2_LINE, MIPS_CONF2_SL >> MIPS_CONF2_SL_SHF
200	beqz	R_L2_LINE, l2_probe_done
201	li	t1, 2
202	sllv	R_L2_LINE, t1, R_L2_LINE
203
204	srl	t1, t0, MIPS_CONF2_SA_SHF
205	andi	t1, t1, MIPS_CONF2_SA >> MIPS_CONF2_SA_SHF
206	addiu	t1, t1, 1
207	mul	R_L2_SIZE, R_L2_LINE, t1
208
209	srl	t1, t0, MIPS_CONF2_SS_SHF
210	andi	t1, t1, MIPS_CONF2_SS >> MIPS_CONF2_SS_SHF
211	sllv	R_L2_SIZE, R_L2_SIZE, t1
212	li	t1, 64
213	mul	R_L2_SIZE, R_L2_SIZE, t1
214
215	/* Attempt to bypass the L2 so that we can init the L1s early */
216	or	t0, t0, MIPS_CONF2_L2B
217	mtc0	t0, CP0_CONFIG, 2
218	ehb
219	mfc0	t0, CP0_CONFIG, 2
220	and	R_L2_BYPASSED, t0, MIPS_CONF2_L2B
221
222	/* Zero the L2 tag registers */
223	mtc0	zero, CP0_TAGLO, 4
224	ehb
225l2_probe_done:
226#endif
227
228#ifndef CONFIG_SYS_CACHE_SIZE_AUTO
229	li	R_IC_SIZE, CONFIG_SYS_ICACHE_SIZE
230	li	R_IC_LINE, CONFIG_SYS_ICACHE_LINE_SIZE
231#else
232	l1_info	R_IC_SIZE, R_IC_LINE, MIPS_CONF1_IA_SHF
233#endif
234
235#ifndef CONFIG_SYS_CACHE_SIZE_AUTO
236	li	R_DC_SIZE, CONFIG_SYS_DCACHE_SIZE
237	li	R_DC_LINE, CONFIG_SYS_DCACHE_LINE_SIZE
238#else
239	l1_info	R_DC_SIZE, R_DC_LINE, MIPS_CONF1_DA_SHF
240#endif
241
242#ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
243
244	/* Determine the largest L1 cache size */
245#ifndef CONFIG_SYS_CACHE_SIZE_AUTO
246#if CONFIG_SYS_ICACHE_SIZE > CONFIG_SYS_DCACHE_SIZE
247	li	v0, CONFIG_SYS_ICACHE_SIZE
248#else
249	li	v0, CONFIG_SYS_DCACHE_SIZE
250#endif
251#else
252	move	v0, R_IC_SIZE
253	sltu	t1, R_IC_SIZE, R_DC_SIZE
254	movn	v0, R_DC_SIZE, t1
255#endif
256	/*
257	 * Now clear that much memory starting from zero.
258	 */
259	PTR_LI		a0, CKSEG1
260	PTR_ADDU	a1, a0, v0
2612:	PTR_ADDIU	a0, 64
262	f_fill64	a0, -64, zero
263	bne		a0, a1, 2b
264
265#endif /* CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD */
266
267#ifdef CONFIG_MIPS_L2_CACHE
268	/*
269	 * If the L2 is bypassed, init the L1 first so that we can execute the
270	 * rest of the cache initialisation using the L1 instruction cache.
271	 */
272	bnez		R_L2_BYPASSED, l1_init
273
274l2_init:
275	PTR_LI		t0, INDEX_BASE
276	PTR_ADDU	t1, t0, R_L2_SIZE
2771:	cache		INDEX_STORE_TAG_SD, 0(t0)
278	PTR_ADDU	t0, t0, R_L2_LINE
279	bne		t0, t1, 1b
280
281	/*
282	 * If the L2 was bypassed then we already initialised the L1s before
283	 * the L2, so we are now done.
284	 */
285	bnez		R_L2_BYPASSED, l2_unbypass
286#endif
287
288	/*
289	 * The TagLo registers used depend upon the CPU implementation, but the
290	 * architecture requires that it is safe for software to write to both
291	 * TagLo selects 0 & 2 covering supported cases.
292	 */
293l1_init:
294	mtc0		zero, CP0_TAGLO
295	mtc0		zero, CP0_TAGLO, 2
296	ehb
297
298	/*
299	 * The caches are probably in an indeterminate state, so we force good
300	 * parity into them by doing an invalidate for each line. If
301	 * CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD is set then we'll proceed to
302	 * perform a load/fill & a further invalidate for each line, assuming
303	 * that the bottom of RAM (having just been cleared) will generate good
304	 * parity for the cache.
305	 */
306
307	/*
308	 * Initialize the I-cache first,
309	 */
310	blez		R_IC_SIZE, 1f
311	PTR_LI		t0, INDEX_BASE
312	PTR_ADDU	t1, t0, R_IC_SIZE
313	/* clear tag to invalidate */
314	cache_loop	t0, t1, R_IC_LINE, INDEX_STORE_TAG_I
315#ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
316	/* fill once, so data field parity is correct */
317	PTR_LI		t0, INDEX_BASE
318	cache_loop	t0, t1, R_IC_LINE, FILL
319	/* invalidate again - prudent but not strictly neccessary */
320	PTR_LI		t0, INDEX_BASE
321	cache_loop	t0, t1, R_IC_LINE, INDEX_STORE_TAG_I
322#endif
323
324	/* Enable use of the I-cache by setting Config.K0 */
325	sync
326	mfc0		t0, CP0_CONFIG
327	li		t1, CONFIG_SYS_MIPS_CACHE_MODE
328#if __mips_isa_rev >= 2
329	ins		t0, t1, 0, 3
330#else
331	ori		t0, t0, CONF_CM_CMASK
332	xori		t0, t0, CONF_CM_CMASK
333	or		t0, t0, t1
334#endif
335	mtc0		t0, CP0_CONFIG
336
337	/*
338	 * then initialize D-cache.
339	 */
3401:	blez		R_DC_SIZE, 3f
341	PTR_LI		t0, INDEX_BASE
342	PTR_ADDU	t1, t0, R_DC_SIZE
343	/* clear all tags */
344	cache_loop	t0, t1, R_DC_LINE, INDEX_STORE_TAG_D
345#ifdef CONFIG_SYS_MIPS_CACHE_INIT_RAM_LOAD
346	/* load from each line (in cached space) */
347	PTR_LI		t0, INDEX_BASE
3482:	LONG_L		zero, 0(t0)
349	PTR_ADDU	t0, R_DC_LINE
350	bne		t0, t1, 2b
351	/* clear all tags */
352	PTR_LI		t0, INDEX_BASE
353	cache_loop	t0, t1, R_DC_LINE, INDEX_STORE_TAG_D
354#endif
3553:
356
357#ifdef CONFIG_MIPS_L2_CACHE
358	/* If the L2 isn't bypassed then we're done */
359	beqz		R_L2_BYPASSED, return
360
361	/* The L2 is bypassed - go initialise it */
362	b		l2_init
363
364l2_unbypass:
365# if __mips_isa_rev >= 6
366	beqz		R_L2_L2C, 1f
367
368	li		t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE)
369	lw		t1, GCR_L2_CONFIG(t0)
370	xor		t1, t1, GCR_L2_CONFIG_BYPASS
371	sw		t1, GCR_L2_CONFIG(t0)
372	sync
373	ehb
374	b		2f
375# endif
3761:	mfc0		t0, CP0_CONFIG, 2
377	xor		t0, t0, MIPS_CONF2_L2B
378	mtc0		t0, CP0_CONFIG, 2
379	ehb
380
3812:
382# ifdef CONFIG_MIPS_CM
383	/* Config3 must exist for a CM to be present */
384	mfc0		t0, CP0_CONFIG, 1
385	bgez		t0, 2f
386	mfc0		t0, CP0_CONFIG, 2
387	bgez		t0, 2f
388
389	/* Check Config3.CMGCR to determine CM presence */
390	mfc0		t0, CP0_CONFIG, 3
391	and		t0, t0, MIPS_CONF3_CMGCR
392	beqz		t0, 2f
393
394	/* Change Config.K0 to a coherent CCA */
395	mfc0		t0, CP0_CONFIG
396	li		t1, CONF_CM_CACHABLE_COW
397#if __mips_isa_rev >= 2
398	ins		t0, t1, 0, 3
399#else
400	ori		t0, t0, CONF_CM_CMASK
401	xori		t0, t0, CONF_CM_CMASK
402	or		t0, t0, t1
403#endif
404	mtc0		t0, CP0_CONFIG
405
406	/*
407	 * Join the coherent domain such that the caches of this core are kept
408	 * coherent with those of other cores.
409	 */
410	PTR_LI		t0, CKSEG1ADDR(CONFIG_MIPS_CM_BASE)
411	lw		t1, GCR_REV(t0)
412	li		t2, GCR_REV_CM3
413	li		t3, GCR_Cx_COHERENCE_EN
414	bge		t1, t2, 1f
415	li		t3, GCR_Cx_COHERENCE_DOM_EN
4161:	sw		t3, GCR_Cx_COHERENCE(t0)
417	ehb
4182:
419# endif
420#endif
421
422return:
423	/* Ensure all cache operations complete before returning */
424	sync
425	jr	ra
426	END(mips_cache_reset)
427
428/*
429 * dcache_status - get cache status
430 *
431 * RETURNS: 0 - cache disabled; 1 - cache enabled
432 *
433 */
434LEAF(dcache_status)
435	mfc0	t0, CP0_CONFIG
436	li	t1, CONF_CM_UNCACHED
437	andi	t0, t0, CONF_CM_CMASK
438	move	v0, zero
439	beq	t0, t1, 2f
440	li	v0, 1
4412:	jr	ra
442	END(dcache_status)
443
444/*
445 * dcache_disable - disable cache
446 *
447 * RETURNS: N/A
448 *
449 */
450LEAF(dcache_disable)
451	mfc0	t0, CP0_CONFIG
452	li	t1, -8
453	and	t0, t0, t1
454	ori	t0, t0, CONF_CM_UNCACHED
455	mtc0	t0, CP0_CONFIG
456	jr	ra
457	END(dcache_disable)
458
459/*
460 * dcache_enable - enable cache
461 *
462 * RETURNS: N/A
463 *
464 */
465LEAF(dcache_enable)
466	mfc0	t0, CP0_CONFIG
467	ori	t0, CONF_CM_CMASK
468	xori	t0, CONF_CM_CMASK
469	ori	t0, CONFIG_SYS_MIPS_CACHE_MODE
470	mtc0	t0, CP0_CONFIG
471	jr	ra
472	END(dcache_enable)
473