xref: /openbmc/linux/arch/powerpc/include/asm/reg_8xx.h (revision de2bdb3d)
1 /*
2  * Contains register definitions common to PowerPC 8xx CPUs.  Notice
3  */
4 #ifndef _ASM_POWERPC_REG_8xx_H
5 #define _ASM_POWERPC_REG_8xx_H
6 
7 #include <asm/mmu-8xx.h>
8 
9 /* Cache control on the MPC8xx is provided through some additional
10  * special purpose registers.
11  */
12 #define SPRN_IC_CST	560	/* Instruction cache control/status */
13 #define SPRN_IC_ADR	561	/* Address needed for some commands */
14 #define SPRN_IC_DAT	562	/* Read-only data register */
15 #define SPRN_DC_CST	568	/* Data cache control/status */
16 #define SPRN_DC_ADR	569	/* Address needed for some commands */
17 #define SPRN_DC_DAT	570	/* Read-only data register */
18 
19 /* Misc Debug */
20 #define SPRN_DPDR	630
21 #define SPRN_MI_CAM	816
22 #define SPRN_MI_RAM0	817
23 #define SPRN_MI_RAM1	818
24 #define SPRN_MD_CAM	824
25 #define SPRN_MD_RAM0	825
26 #define SPRN_MD_RAM1	826
27 
28 /* Special MSR manipulation registers */
29 #define SPRN_EIE	80	/* External interrupt enable (EE=1, RI=1) */
30 #define SPRN_EID	81	/* External interrupt disable (EE=0, RI=1) */
31 
32 /* Commands.  Only the first few are available to the instruction cache.
33 */
34 #define	IDC_ENABLE	0x02000000	/* Cache enable */
35 #define IDC_DISABLE	0x04000000	/* Cache disable */
36 #define IDC_LDLCK	0x06000000	/* Load and lock */
37 #define IDC_UNLINE	0x08000000	/* Unlock line */
38 #define IDC_UNALL	0x0a000000	/* Unlock all */
39 #define IDC_INVALL	0x0c000000	/* Invalidate all */
40 
41 #define DC_FLINE	0x0e000000	/* Flush data cache line */
42 #define DC_SFWT		0x01000000	/* Set forced writethrough mode */
43 #define DC_CFWT		0x03000000	/* Clear forced writethrough mode */
44 #define DC_SLES		0x05000000	/* Set little endian swap mode */
45 #define DC_CLES		0x07000000	/* Clear little endian swap mode */
46 
47 /* Status.
48 */
49 #define IDC_ENABLED	0x80000000	/* Cache is enabled */
50 #define IDC_CERR1	0x00200000	/* Cache error 1 */
51 #define IDC_CERR2	0x00100000	/* Cache error 2 */
52 #define IDC_CERR3	0x00080000	/* Cache error 3 */
53 
54 #define DC_DFWT		0x40000000	/* Data cache is forced write through */
55 #define DC_LES		0x20000000	/* Caches are little endian mode */
56 
57 #ifdef CONFIG_8xx_CPU6
58 #define do_mtspr_cpu6(rn, rn_addr, v)	\
59 	do {								\
60 		int _reg_cpu6 = rn_addr, _tmp_cpu6;		\
61 		asm volatile("stw %0, %1;"				\
62 			     "lwz %0, %1;"				\
63 			     "mtspr " __stringify(rn) ",%2" :		\
64 			     : "r" (_reg_cpu6), "m"(_tmp_cpu6),		\
65 			       "r" ((unsigned long)(v))			\
66 			     : "memory");				\
67 	} while (0)
68 
69 #define do_mtspr(rn, v)	asm volatile("mtspr " __stringify(rn) ",%0" :	\
70 				     : "r" ((unsigned long)(v))		\
71 				     : "memory")
72 #define mtspr(rn, v) \
73 	do {								\
74 		if (rn == SPRN_IMMR)					\
75 			do_mtspr_cpu6(rn, 0x3d30, v);			\
76 		else if (rn == SPRN_IC_CST)				\
77 			do_mtspr_cpu6(rn, 0x2110, v);			\
78 		else if (rn == SPRN_IC_ADR)				\
79 			do_mtspr_cpu6(rn, 0x2310, v);			\
80 		else if (rn == SPRN_IC_DAT)				\
81 			do_mtspr_cpu6(rn, 0x2510, v);			\
82 		else if (rn == SPRN_DC_CST)				\
83 			do_mtspr_cpu6(rn, 0x3110, v);			\
84 		else if (rn == SPRN_DC_ADR)				\
85 			do_mtspr_cpu6(rn, 0x3310, v);			\
86 		else if (rn == SPRN_DC_DAT)				\
87 			do_mtspr_cpu6(rn, 0x3510, v);			\
88 		else if (rn == SPRN_MI_CTR)				\
89 			do_mtspr_cpu6(rn, 0x2180, v);			\
90 		else if (rn == SPRN_MI_AP)				\
91 			do_mtspr_cpu6(rn, 0x2580, v);			\
92 		else if (rn == SPRN_MI_EPN)				\
93 			do_mtspr_cpu6(rn, 0x2780, v);			\
94 		else if (rn == SPRN_MI_TWC)				\
95 			do_mtspr_cpu6(rn, 0x2b80, v);			\
96 		else if (rn == SPRN_MI_RPN)				\
97 			do_mtspr_cpu6(rn, 0x2d80, v);			\
98 		else if (rn == SPRN_MI_CAM)				\
99 			do_mtspr_cpu6(rn, 0x2190, v);			\
100 		else if (rn == SPRN_MI_RAM0)				\
101 			do_mtspr_cpu6(rn, 0x2390, v);			\
102 		else if (rn == SPRN_MI_RAM1)				\
103 			do_mtspr_cpu6(rn, 0x2590, v);			\
104 		else if (rn == SPRN_MD_CTR)				\
105 			do_mtspr_cpu6(rn, 0x3180, v);			\
106 		else if (rn == SPRN_M_CASID)				\
107 			do_mtspr_cpu6(rn, 0x3380, v);			\
108 		else if (rn == SPRN_MD_AP)				\
109 			do_mtspr_cpu6(rn, 0x3580, v);			\
110 		else if (rn == SPRN_MD_EPN)				\
111 			do_mtspr_cpu6(rn, 0x3780, v);			\
112 		else if (rn == SPRN_M_TWB)				\
113 			do_mtspr_cpu6(rn, 0x3980, v);			\
114 		else if (rn == SPRN_MD_TWC)				\
115 			do_mtspr_cpu6(rn, 0x3b80, v);			\
116 		else if (rn == SPRN_MD_RPN)				\
117 			do_mtspr_cpu6(rn, 0x3d80, v);			\
118 		else if (rn == SPRN_M_TW)				\
119 			do_mtspr_cpu6(rn, 0x3f80, v);			\
120 		else if (rn == SPRN_MD_CAM)				\
121 			do_mtspr_cpu6(rn, 0x3190, v);			\
122 		else if (rn == SPRN_MD_RAM0)				\
123 			do_mtspr_cpu6(rn, 0x3390, v);			\
124 		else if (rn == SPRN_MD_RAM1)				\
125 			do_mtspr_cpu6(rn, 0x3590, v);			\
126 		else if (rn == SPRN_DEC)				\
127 			do_mtspr_cpu6(rn, 0x2c00, v);			\
128 		else if (rn == SPRN_TBWL)				\
129 			do_mtspr_cpu6(rn, 0x3880, v);			\
130 		else if (rn == SPRN_TBWU)				\
131 			do_mtspr_cpu6(rn, 0x3a80, v);			\
132 		else if (rn == SPRN_DPDR)				\
133 			do_mtspr_cpu6(rn, 0x2d30, v);			\
134 		else							\
135 			do_mtspr(rn, v);				\
136 	} while (0)
137 #endif
138 
139 #endif /* _ASM_POWERPC_REG_8xx_H */
140