1/*
2 * This file contains low-level cache management functions
3 * used for sleep and CPU speed changes on Apple machines.
4 * (In fact the only thing that is Apple-specific is that we assume
5 * that we can read from ROM at physical address 0xfff00000.)
6 *
7 *    Copyright (C) 2004 Paul Mackerras (paulus@samba.org) and
8 *                       Benjamin Herrenschmidt (benh@kernel.crashing.org)
9 *
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License
12 * as published by the Free Software Foundation; either version
13 * 2 of the License, or (at your option) any later version.
14 *
15 */
16
17#include <asm/processor.h>
18#include <asm/ppc_asm.h>
19#include <asm/cputable.h>
20#include <asm/feature-fixups.h>
21
22/*
23 * Flush and disable all data caches (dL1, L2, L3). This is used
24 * when going to sleep, when doing a PMU based cpufreq transition,
25 * or when "offlining" a CPU on SMP machines. This code is over
26 * paranoid, but I've had enough issues with various CPU revs and
27 * bugs that I decided it was worth being over cautious
28 */
29
30_GLOBAL(flush_disable_caches)
31#ifndef CONFIG_6xx
32	blr
33#else
34BEGIN_FTR_SECTION
35	b	flush_disable_745x
36END_FTR_SECTION_IFSET(CPU_FTR_SPEC7450)
37BEGIN_FTR_SECTION
38	b	flush_disable_75x
39END_FTR_SECTION_IFSET(CPU_FTR_L2CR)
40	b	__flush_disable_L1
41
42/* This is the code for G3 and 74[01]0 */
43flush_disable_75x:
44	mflr	r10
45
46	/* Turn off EE and DR in MSR */
47	mfmsr	r11
48	rlwinm	r0,r11,0,~MSR_EE
49	rlwinm	r0,r0,0,~MSR_DR
50	sync
51	mtmsr	r0
52	isync
53
54	/* Stop DST streams */
55BEGIN_FTR_SECTION
56	DSSALL
57	sync
58END_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC)
59
60	/* Stop DPM */
61	mfspr	r8,SPRN_HID0		/* Save SPRN_HID0 in r8 */
62	rlwinm	r4,r8,0,12,10		/* Turn off HID0[DPM] */
63	sync
64	mtspr	SPRN_HID0,r4		/* Disable DPM */
65	sync
66
67	/* Disp-flush L1. We have a weird problem here that I never
68	 * totally figured out. On 750FX, using the ROM for the flush
69	 * results in a non-working flush. We use that workaround for
70	 * now until I finally understand what's going on. --BenH
71	 */
72
73	/* ROM base by default */
74	lis	r4,0xfff0
75	mfpvr	r3
76	srwi	r3,r3,16
77	cmplwi	cr0,r3,0x7000
78	bne+	1f
79	/* RAM base on 750FX */
80	li	r4,0
811:	li	r4,0x4000
82	mtctr	r4
831:	lwz	r0,0(r4)
84	addi	r4,r4,32
85	bdnz	1b
86	sync
87	isync
88
89	/* Disable / invalidate / enable L1 data */
90	mfspr	r3,SPRN_HID0
91	rlwinm	r3,r3,0,~(HID0_DCE | HID0_ICE)
92	mtspr	SPRN_HID0,r3
93	sync
94	isync
95	ori	r3,r3,(HID0_DCE|HID0_DCI|HID0_ICE|HID0_ICFI)
96	sync
97	isync
98	mtspr	SPRN_HID0,r3
99	xori	r3,r3,(HID0_DCI|HID0_ICFI)
100	mtspr	SPRN_HID0,r3
101	sync
102
103	/* Get the current enable bit of the L2CR into r4 */
104	mfspr	r5,SPRN_L2CR
105	/* Set to data-only (pre-745x bit) */
106	oris	r3,r5,L2CR_L2DO@h
107	b	2f
108	/* When disabling L2, code must be in L1 */
109	.balign 32
1101:	mtspr	SPRN_L2CR,r3
1113:	sync
112	isync
113	b	1f
1142:	b	3f
1153:	sync
116	isync
117	b	1b
1181:	/* disp-flush L2. The interesting thing here is that the L2 can be
119	 * up to 2Mb ... so using the ROM, we'll end up wrapping back to memory
120	 * but that is probbaly fine. We disp-flush over 4Mb to be safe
121	 */
122	lis	r4,2
123	mtctr	r4
124	lis	r4,0xfff0
1251:	lwz	r0,0(r4)
126	addi	r4,r4,32
127	bdnz	1b
128	sync
129	isync
130	lis	r4,2
131	mtctr	r4
132	lis	r4,0xfff0
1331:	dcbf	0,r4
134	addi	r4,r4,32
135	bdnz	1b
136	sync
137	isync
138
139	/* now disable L2 */
140	rlwinm	r5,r5,0,~L2CR_L2E
141	b	2f
142	/* When disabling L2, code must be in L1 */
143	.balign 32
1441:	mtspr	SPRN_L2CR,r5
1453:	sync
146	isync
147	b	1f
1482:	b	3f
1493:	sync
150	isync
151	b	1b
1521:	sync
153	isync
154	/* Invalidate L2. This is pre-745x, we clear the L2I bit ourselves */
155	oris	r4,r5,L2CR_L2I@h
156	mtspr	SPRN_L2CR,r4
157	sync
158	isync
159
160	/* Wait for the invalidation to complete */
1611:	mfspr	r3,SPRN_L2CR
162	rlwinm.	r0,r3,0,31,31
163	bne	1b
164
165	/* Clear L2I */
166	xoris	r4,r4,L2CR_L2I@h
167	sync
168	mtspr	SPRN_L2CR,r4
169	sync
170
171	/* now disable the L1 data cache */
172	mfspr	r0,SPRN_HID0
173	rlwinm	r0,r0,0,~(HID0_DCE|HID0_ICE)
174	mtspr	SPRN_HID0,r0
175	sync
176	isync
177
178	/* Restore HID0[DPM] to whatever it was before */
179	sync
180	mfspr	r0,SPRN_HID0
181	rlwimi	r0,r8,0,11,11		/* Turn back HID0[DPM] */
182	mtspr	SPRN_HID0,r0
183	sync
184
185	/* restore DR and EE */
186	sync
187	mtmsr	r11
188	isync
189
190	mtlr	r10
191	blr
192
193/* This code is for 745x processors */
194flush_disable_745x:
195	/* Turn off EE and DR in MSR */
196	mfmsr	r11
197	rlwinm	r0,r11,0,~MSR_EE
198	rlwinm	r0,r0,0,~MSR_DR
199	sync
200	mtmsr	r0
201	isync
202
203	/* Stop prefetch streams */
204	DSSALL
205	sync
206
207	/* Disable L2 prefetching */
208	mfspr	r0,SPRN_MSSCR0
209	rlwinm	r0,r0,0,0,29
210	mtspr	SPRN_MSSCR0,r0
211	sync
212	isync
213	lis	r4,0
214	dcbf	0,r4
215	dcbf	0,r4
216	dcbf	0,r4
217	dcbf	0,r4
218	dcbf	0,r4
219	dcbf	0,r4
220	dcbf	0,r4
221	dcbf	0,r4
222
223	/* Due to a bug with the HW flush on some CPU revs, we occasionally
224	 * experience data corruption. I'm adding a displacement flush along
225	 * with a dcbf loop over a few Mb to "help". The problem isn't totally
226	 * fixed by this in theory, but at least, in practice, I couldn't reproduce
227	 * it even with a big hammer...
228	 */
229
230        lis     r4,0x0002
231        mtctr   r4
232 	li      r4,0
2331:
234        lwz     r0,0(r4)
235        addi    r4,r4,32                /* Go to start of next cache line */
236        bdnz    1b
237        isync
238
239        /* Now, flush the first 4MB of memory */
240        lis     r4,0x0002
241        mtctr   r4
242	li      r4,0
243        sync
2441:
245        dcbf    0,r4
246        addi    r4,r4,32                /* Go to start of next cache line */
247        bdnz    1b
248
249	/* Flush and disable the L1 data cache */
250	mfspr	r6,SPRN_LDSTCR
251	lis	r3,0xfff0	/* read from ROM for displacement flush */
252	li	r4,0xfe		/* start with only way 0 unlocked */
253	li	r5,128		/* 128 lines in each way */
2541:	mtctr	r5
255	rlwimi	r6,r4,0,24,31
256	mtspr	SPRN_LDSTCR,r6
257	sync
258	isync
2592:	lwz	r0,0(r3)	/* touch each cache line */
260	addi	r3,r3,32
261	bdnz	2b
262	rlwinm	r4,r4,1,24,30	/* move on to the next way */
263	ori	r4,r4,1
264	cmpwi	r4,0xff		/* all done? */
265	bne	1b
266	/* now unlock the L1 data cache */
267	li	r4,0
268	rlwimi	r6,r4,0,24,31
269	sync
270	mtspr	SPRN_LDSTCR,r6
271	sync
272	isync
273
274	/* Flush the L2 cache using the hardware assist */
275	mfspr	r3,SPRN_L2CR
276	cmpwi	r3,0		/* check if it is enabled first */
277	bge	4f
278	oris	r0,r3,(L2CR_L2IO_745x|L2CR_L2DO_745x)@h
279	b	2f
280	/* When disabling/locking L2, code must be in L1 */
281	.balign 32
2821:	mtspr	SPRN_L2CR,r0	/* lock the L2 cache */
2833:	sync
284	isync
285	b	1f
2862:	b	3f
2873:	sync
288	isync
289	b	1b
2901:	sync
291	isync
292	ori	r0,r3,L2CR_L2HWF_745x
293	sync
294	mtspr	SPRN_L2CR,r0	/* set the hardware flush bit */
2953:	mfspr	r0,SPRN_L2CR	/* wait for it to go to 0 */
296	andi.	r0,r0,L2CR_L2HWF_745x
297	bne	3b
298	sync
299	rlwinm	r3,r3,0,~L2CR_L2E
300	b	2f
301	/* When disabling L2, code must be in L1 */
302	.balign 32
3031:	mtspr	SPRN_L2CR,r3	/* disable the L2 cache */
3043:	sync
305	isync
306	b	1f
3072:	b	3f
3083:	sync
309	isync
310	b	1b
3111:	sync
312	isync
313	oris	r4,r3,L2CR_L2I@h
314	mtspr	SPRN_L2CR,r4
315	sync
316	isync
3171:	mfspr	r4,SPRN_L2CR
318	andis.	r0,r4,L2CR_L2I@h
319	bne	1b
320	sync
321
322BEGIN_FTR_SECTION
323	/* Flush the L3 cache using the hardware assist */
3244:	mfspr	r3,SPRN_L3CR
325	cmpwi	r3,0		/* check if it is enabled */
326	bge	6f
327	oris	r0,r3,L3CR_L3IO@h
328	ori	r0,r0,L3CR_L3DO
329	sync
330	mtspr	SPRN_L3CR,r0	/* lock the L3 cache */
331	sync
332	isync
333	ori	r0,r0,L3CR_L3HWF
334	sync
335	mtspr	SPRN_L3CR,r0	/* set the hardware flush bit */
3365:	mfspr	r0,SPRN_L3CR	/* wait for it to go to zero */
337	andi.	r0,r0,L3CR_L3HWF
338	bne	5b
339	rlwinm	r3,r3,0,~L3CR_L3E
340	sync
341	mtspr	SPRN_L3CR,r3	/* disable the L3 cache */
342	sync
343	ori	r4,r3,L3CR_L3I
344	mtspr	SPRN_L3CR,r4
3451:	mfspr	r4,SPRN_L3CR
346	andi.	r0,r4,L3CR_L3I
347	bne	1b
348	sync
349END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
350
3516:	mfspr	r0,SPRN_HID0	/* now disable the L1 data cache */
352	rlwinm	r0,r0,0,~HID0_DCE
353	mtspr	SPRN_HID0,r0
354	sync
355	isync
356	mtmsr	r11		/* restore DR and EE */
357	isync
358	blr
359#endif	/* CONFIG_6xx */
360