1/*
2 * This file contains low level CPU setup functions.
3 *    Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org)
4 *
5 * This program is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU General Public License
7 * as published by the Free Software Foundation; either version
8 * 2 of the License, or (at your option) any later version.
9 *
10 */
11
12#include <asm/processor.h>
13#include <asm/page.h>
14#include <asm/cputable.h>
15#include <asm/ppc_asm.h>
16#include <asm/asm-offsets.h>
17#include <asm/cache.h>
18#include <asm/mmu.h>
19#include <asm/feature-fixups.h>
20
21_GLOBAL(__setup_cpu_603)
22	mflr	r5
23BEGIN_MMU_FTR_SECTION
24	li	r10,0
25	mtspr	SPRN_SPRG_603_LRU,r10		/* init SW LRU tracking */
26END_MMU_FTR_SECTION_IFSET(MMU_FTR_NEED_DTLB_SW_LRU)
27	lis	r10, (swapper_pg_dir - PAGE_OFFSET)@h
28	ori	r10, r10, (swapper_pg_dir - PAGE_OFFSET)@l
29	mtspr	SPRN_SPRG_PGDIR, r10
30
31BEGIN_FTR_SECTION
32	bl	__init_fpu_registers
33END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
34	bl	setup_common_caches
35	mtlr	r5
36	blr
37_GLOBAL(__setup_cpu_604)
38	mflr	r5
39	bl	setup_common_caches
40	bl	setup_604_hid0
41	mtlr	r5
42	blr
43_GLOBAL(__setup_cpu_750)
44	mflr	r5
45	bl	__init_fpu_registers
46	bl	setup_common_caches
47	bl	setup_750_7400_hid0
48	mtlr	r5
49	blr
50_GLOBAL(__setup_cpu_750cx)
51	mflr	r5
52	bl	__init_fpu_registers
53	bl	setup_common_caches
54	bl	setup_750_7400_hid0
55	bl	setup_750cx
56	mtlr	r5
57	blr
58_GLOBAL(__setup_cpu_750fx)
59	mflr	r5
60	bl	__init_fpu_registers
61	bl	setup_common_caches
62	bl	setup_750_7400_hid0
63	bl	setup_750fx
64	mtlr	r5
65	blr
66_GLOBAL(__setup_cpu_7400)
67	mflr	r5
68	bl	__init_fpu_registers
69	bl	setup_7400_workarounds
70	bl	setup_common_caches
71	bl	setup_750_7400_hid0
72	mtlr	r5
73	blr
74_GLOBAL(__setup_cpu_7410)
75	mflr	r5
76	bl	__init_fpu_registers
77	bl	setup_7410_workarounds
78	bl	setup_common_caches
79	bl	setup_750_7400_hid0
80	li	r3,0
81	mtspr	SPRN_L2CR2,r3
82	mtlr	r5
83	blr
84_GLOBAL(__setup_cpu_745x)
85	mflr	r5
86	bl	setup_common_caches
87	bl	setup_745x_specifics
88	mtlr	r5
89	blr
90
91/* Enable caches for 603's, 604, 750 & 7400 */
92setup_common_caches:
93	mfspr	r11,SPRN_HID0
94	andi.	r0,r11,HID0_DCE
95	ori	r11,r11,HID0_ICE|HID0_DCE
96	ori	r8,r11,HID0_ICFI
97	bne	1f			/* don't invalidate the D-cache */
98	ori	r8,r8,HID0_DCI		/* unless it wasn't enabled */
991:	sync
100	mtspr	SPRN_HID0,r8		/* enable and invalidate caches */
101	sync
102	mtspr	SPRN_HID0,r11		/* enable caches */
103	sync
104	isync
105	blr
106
107/* 604, 604e, 604ev, ...
108 * Enable superscalar execution & branch history table
109 */
110setup_604_hid0:
111	mfspr	r11,SPRN_HID0
112	ori	r11,r11,HID0_SIED|HID0_BHTE
113	ori	r8,r11,HID0_BTCD
114	sync
115	mtspr	SPRN_HID0,r8	/* flush branch target address cache */
116	sync			/* on 604e/604r */
117	mtspr	SPRN_HID0,r11
118	sync
119	isync
120	blr
121
122/* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
123 * erratas we work around here.
124 * Moto MPC710CE.pdf describes them, those are errata
125 * #3, #4 and #5
126 * Note that we assume the firmware didn't choose to
127 * apply other workarounds (there are other ones documented
128 * in the .pdf). It appear that Apple firmware only works
129 * around #3 and with the same fix we use. We may want to
130 * check if the CPU is using 60x bus mode in which case
131 * the workaround for errata #4 is useless. Also, we may
132 * want to explicitly clear HID0_NOPDST as this is not
133 * needed once we have applied workaround #5 (though it's
134 * not set by Apple's firmware at least).
135 */
136setup_7400_workarounds:
137	mfpvr	r3
138	rlwinm	r3,r3,0,20,31
139	cmpwi	0,r3,0x0207
140	ble	1f
141	blr
142setup_7410_workarounds:
143	mfpvr	r3
144	rlwinm	r3,r3,0,20,31
145	cmpwi	0,r3,0x0100
146	bnelr
1471:
148	mfspr	r11,SPRN_MSSSR0
149	/* Errata #3: Set L1OPQ_SIZE to 0x10 */
150	rlwinm	r11,r11,0,9,6
151	oris	r11,r11,0x0100
152	/* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
153	oris	r11,r11,0x0002
154	/* Errata #5: Set DRLT_SIZE to 0x01 */
155	rlwinm	r11,r11,0,5,2
156	oris	r11,r11,0x0800
157	sync
158	mtspr	SPRN_MSSSR0,r11
159	sync
160	isync
161	blr
162
163/* 740/750/7400/7410
164 * Enable Store Gathering (SGE), Address Broadcast (ABE),
165 * Branch History Table (BHTE), Branch Target ICache (BTIC)
166 * Dynamic Power Management (DPM), Speculative (SPD)
167 * Clear Instruction cache throttling (ICTC)
168 */
169setup_750_7400_hid0:
170	mfspr	r11,SPRN_HID0
171	ori	r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
172	oris	r11,r11,HID0_DPM@h
173BEGIN_FTR_SECTION
174	xori	r11,r11,HID0_BTIC
175END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
176BEGIN_FTR_SECTION
177	xoris	r11,r11,HID0_DPM@h	/* disable dynamic power mgmt */
178END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
179	li	r3,HID0_SPD
180	andc	r11,r11,r3		/* clear SPD: enable speculative */
181 	li	r3,0
182 	mtspr	SPRN_ICTC,r3		/* Instruction Cache Throttling off */
183	isync
184	mtspr	SPRN_HID0,r11
185	sync
186	isync
187	blr
188
189/* 750cx specific
190 * Looks like we have to disable NAP feature for some PLL settings...
191 * (waiting for confirmation)
192 */
193setup_750cx:
194	mfspr	r10, SPRN_HID1
195	rlwinm	r10,r10,4,28,31
196	cmpwi	cr0,r10,7
197	cmpwi	cr1,r10,9
198	cmpwi	cr2,r10,11
199	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
200	cror	4*cr0+eq,4*cr0+eq,4*cr2+eq
201	bnelr
202	lwz	r6,CPU_SPEC_FEATURES(r4)
203	li	r7,CPU_FTR_CAN_NAP
204	andc	r6,r6,r7
205	stw	r6,CPU_SPEC_FEATURES(r4)
206	blr
207
208/* 750fx specific
209 */
210setup_750fx:
211	blr
212
213/* MPC 745x
214 * Enable Store Gathering (SGE), Branch Folding (FOLD)
215 * Branch History Table (BHTE), Branch Target ICache (BTIC)
216 * Dynamic Power Management (DPM), Speculative (SPD)
217 * Ensure our data cache instructions really operate.
218 * Timebase has to be running or we wouldn't have made it here,
219 * just ensure we don't disable it.
220 * Clear Instruction cache throttling (ICTC)
221 * Enable L2 HW prefetch
222 */
223setup_745x_specifics:
224	/* We check for the presence of an L3 cache setup by
225	 * the firmware. If any, we disable NAP capability as
226	 * it's known to be bogus on rev 2.1 and earlier
227	 */
228BEGIN_FTR_SECTION
229	mfspr	r11,SPRN_L3CR
230	andis.	r11,r11,L3CR_L3E@h
231	beq	1f
232END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
233	lwz	r6,CPU_SPEC_FEATURES(r4)
234	andis.	r0,r6,CPU_FTR_L3_DISABLE_NAP@h
235	beq	1f
236	li	r7,CPU_FTR_CAN_NAP
237	andc	r6,r6,r7
238	stw	r6,CPU_SPEC_FEATURES(r4)
2391:
240	mfspr	r11,SPRN_HID0
241
242	/* All of the bits we have to set.....
243	 */
244	ori	r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
245	ori	r11,r11,HID0_LRSTK | HID0_BTIC
246	oris	r11,r11,HID0_DPM@h
247BEGIN_MMU_FTR_SECTION
248	oris	r11,r11,HID0_HIGH_BAT@h
249END_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS)
250BEGIN_FTR_SECTION
251	xori	r11,r11,HID0_BTIC
252END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
253BEGIN_FTR_SECTION
254	xoris	r11,r11,HID0_DPM@h	/* disable dynamic power mgmt */
255END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
256
257	/* All of the bits we have to clear....
258	 */
259	li	r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
260	andc	r11,r11,r3		/* clear SPD: enable speculative */
261 	li	r3,0
262
263 	mtspr	SPRN_ICTC,r3		/* Instruction Cache Throttling off */
264	isync
265	mtspr	SPRN_HID0,r11
266	sync
267	isync
268
269	/* Enable L2 HW prefetch, if L2 is enabled
270	 */
271	mfspr	r3,SPRN_L2CR
272	andis.	r3,r3,L2CR_L2E@h
273	beqlr
274	mfspr	r3,SPRN_MSSCR0
275	ori	r3,r3,3
276	sync
277	mtspr	SPRN_MSSCR0,r3
278	sync
279	isync
280	blr
281
282/*
283 * Initialize the FPU registers. This is needed to work around an errata
284 * in some 750 cpus where using a not yet initialized FPU register after
285 * power on reset may hang the CPU
286 */
287_GLOBAL(__init_fpu_registers)
288	mfmsr	r10
289	ori	r11,r10,MSR_FP
290	mtmsr	r11
291	isync
292	addis	r9,r3,empty_zero_page@ha
293	addi	r9,r9,empty_zero_page@l
294	REST_32FPRS(0,r9)
295	sync
296	mtmsr	r10
297	isync
298	blr
299
300
301/* Definitions for the table use to save CPU states */
302#define CS_HID0		0
303#define CS_HID1		4
304#define CS_HID2		8
305#define	CS_MSSCR0	12
306#define CS_MSSSR0	16
307#define CS_ICTRL	20
308#define CS_LDSTCR	24
309#define CS_LDSTDB	28
310#define CS_SIZE		32
311
312	.data
313	.balign	L1_CACHE_BYTES
314cpu_state_storage:
315	.space	CS_SIZE
316	.balign	L1_CACHE_BYTES,0
317	.text
318
319/* Called in normal context to backup CPU 0 state. This
320 * does not include cache settings. This function is also
321 * called for machine sleep. This does not include the MMU
322 * setup, BATs, etc... but rather the "special" registers
323 * like HID0, HID1, MSSCR0, etc...
324 */
325_GLOBAL(__save_cpu_setup)
326	/* Some CR fields are volatile, we back it up all */
327	mfcr	r7
328
329	/* Get storage ptr */
330	lis	r5,cpu_state_storage@h
331	ori	r5,r5,cpu_state_storage@l
332
333	/* Save HID0 (common to all CONFIG_PPC_BOOK3S_32 cpus) */
334	mfspr	r3,SPRN_HID0
335	stw	r3,CS_HID0(r5)
336
337	/* Now deal with CPU type dependent registers */
338	mfspr	r3,SPRN_PVR
339	srwi	r3,r3,16
340	cmplwi	cr0,r3,0x8000	/* 7450 */
341	cmplwi	cr1,r3,0x000c	/* 7400 */
342	cmplwi	cr2,r3,0x800c	/* 7410 */
343	cmplwi	cr3,r3,0x8001	/* 7455 */
344	cmplwi	cr4,r3,0x8002	/* 7457 */
345	cmplwi	cr5,r3,0x8003	/* 7447A */
346	cmplwi	cr6,r3,0x7000	/* 750FX */
347	cmplwi	cr7,r3,0x8004	/* 7448 */
348	/* cr1 is 7400 || 7410 */
349	cror	4*cr1+eq,4*cr1+eq,4*cr2+eq
350	/* cr0 is 74xx */
351	cror	4*cr0+eq,4*cr0+eq,4*cr3+eq
352	cror	4*cr0+eq,4*cr0+eq,4*cr4+eq
353	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
354	cror	4*cr0+eq,4*cr0+eq,4*cr5+eq
355	cror	4*cr0+eq,4*cr0+eq,4*cr7+eq
356	bne	1f
357	/* Backup 74xx specific regs */
358	mfspr	r4,SPRN_MSSCR0
359	stw	r4,CS_MSSCR0(r5)
360	mfspr	r4,SPRN_MSSSR0
361	stw	r4,CS_MSSSR0(r5)
362	beq	cr1,1f
363	/* Backup 745x specific registers */
364	mfspr	r4,SPRN_HID1
365	stw	r4,CS_HID1(r5)
366	mfspr	r4,SPRN_ICTRL
367	stw	r4,CS_ICTRL(r5)
368	mfspr	r4,SPRN_LDSTCR
369	stw	r4,CS_LDSTCR(r5)
370	mfspr	r4,SPRN_LDSTDB
371	stw	r4,CS_LDSTDB(r5)
3721:
373	bne	cr6,1f
374	/* Backup 750FX specific registers */
375	mfspr	r4,SPRN_HID1
376	stw	r4,CS_HID1(r5)
377	/* If rev 2.x, backup HID2 */
378	mfspr	r3,SPRN_PVR
379	andi.	r3,r3,0xff00
380	cmpwi	cr0,r3,0x0200
381	bne	1f
382	mfspr	r4,SPRN_HID2
383	stw	r4,CS_HID2(r5)
3841:
385	mtcr	r7
386	blr
387
388/* Called with no MMU context (typically MSR:IR/DR off) to
389 * restore CPU state as backed up by the previous
390 * function. This does not include cache setting
391 */
392_GLOBAL(__restore_cpu_setup)
393	/* Some CR fields are volatile, we back it up all */
394	mfcr	r7
395
396	/* Get storage ptr */
397	lis	r5,(cpu_state_storage-KERNELBASE)@h
398	ori	r5,r5,cpu_state_storage@l
399
400	/* Restore HID0 */
401	lwz	r3,CS_HID0(r5)
402	sync
403	isync
404	mtspr	SPRN_HID0,r3
405	sync
406	isync
407
408	/* Now deal with CPU type dependent registers */
409	mfspr	r3,SPRN_PVR
410	srwi	r3,r3,16
411	cmplwi	cr0,r3,0x8000	/* 7450 */
412	cmplwi	cr1,r3,0x000c	/* 7400 */
413	cmplwi	cr2,r3,0x800c	/* 7410 */
414	cmplwi	cr3,r3,0x8001	/* 7455 */
415	cmplwi	cr4,r3,0x8002	/* 7457 */
416	cmplwi	cr5,r3,0x8003	/* 7447A */
417	cmplwi	cr6,r3,0x7000	/* 750FX */
418	cmplwi	cr7,r3,0x8004	/* 7448 */
419	/* cr1 is 7400 || 7410 */
420	cror	4*cr1+eq,4*cr1+eq,4*cr2+eq
421	/* cr0 is 74xx */
422	cror	4*cr0+eq,4*cr0+eq,4*cr3+eq
423	cror	4*cr0+eq,4*cr0+eq,4*cr4+eq
424	cror	4*cr0+eq,4*cr0+eq,4*cr1+eq
425	cror	4*cr0+eq,4*cr0+eq,4*cr5+eq
426	cror	4*cr0+eq,4*cr0+eq,4*cr7+eq
427	bne	2f
428	/* Restore 74xx specific regs */
429	lwz	r4,CS_MSSCR0(r5)
430	sync
431	mtspr	SPRN_MSSCR0,r4
432	sync
433	isync
434	lwz	r4,CS_MSSSR0(r5)
435	sync
436	mtspr	SPRN_MSSSR0,r4
437	sync
438	isync
439	bne	cr2,1f
440	/* Clear 7410 L2CR2 */
441	li	r4,0
442	mtspr	SPRN_L2CR2,r4
4431:	beq	cr1,2f
444	/* Restore 745x specific registers */
445	lwz	r4,CS_HID1(r5)
446	sync
447	mtspr	SPRN_HID1,r4
448	isync
449	sync
450	lwz	r4,CS_ICTRL(r5)
451	sync
452	mtspr	SPRN_ICTRL,r4
453	isync
454	sync
455	lwz	r4,CS_LDSTCR(r5)
456	sync
457	mtspr	SPRN_LDSTCR,r4
458	isync
459	sync
460	lwz	r4,CS_LDSTDB(r5)
461	sync
462	mtspr	SPRN_LDSTDB,r4
463	isync
464	sync
4652:	bne	cr6,1f
466	/* Restore 750FX specific registers
467	 * that is restore HID2 on rev 2.x and PLL config & switch
468	 * to PLL 0 on all
469	 */
470	/* If rev 2.x, restore HID2 with low voltage bit cleared */
471	mfspr	r3,SPRN_PVR
472	andi.	r3,r3,0xff00
473	cmpwi	cr0,r3,0x0200
474	bne	4f
475	lwz	r4,CS_HID2(r5)
476	rlwinm	r4,r4,0,19,17
477	mtspr	SPRN_HID2,r4
478	sync
4794:
480	lwz	r4,CS_HID1(r5)
481	rlwinm  r5,r4,0,16,14
482	mtspr	SPRN_HID1,r5
483		/* Wait for PLL to stabilize */
484	mftbl	r5
4853:	mftbl	r6
486	sub	r6,r6,r5
487	cmplwi	cr0,r6,10000
488	ble	3b
489	/* Setup final PLL */
490	mtspr	SPRN_HID1,r4
4911:
492	mtcr	r7
493	blr
494
495