xref: /openbmc/linux/arch/powerpc/include/asm/reg_8xx.h (revision 68198dca)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * Contains register definitions common to PowerPC 8xx CPUs.  Notice
4  */
5 #ifndef _ASM_POWERPC_REG_8xx_H
6 #define _ASM_POWERPC_REG_8xx_H
7 
8 #include <asm/mmu.h>
9 
10 /* Cache control on the MPC8xx is provided through some additional
11  * special purpose registers.
12  */
13 #define SPRN_IC_CST	560	/* Instruction cache control/status */
14 #define SPRN_IC_ADR	561	/* Address needed for some commands */
15 #define SPRN_IC_DAT	562	/* Read-only data register */
16 #define SPRN_DC_CST	568	/* Data cache control/status */
17 #define SPRN_DC_ADR	569	/* Address needed for some commands */
18 #define SPRN_DC_DAT	570	/* Read-only data register */
19 
20 /* Misc Debug */
21 #define SPRN_DPDR	630
22 #define SPRN_MI_CAM	816
23 #define SPRN_MI_RAM0	817
24 #define SPRN_MI_RAM1	818
25 #define SPRN_MD_CAM	824
26 #define SPRN_MD_RAM0	825
27 #define SPRN_MD_RAM1	826
28 
29 /* Special MSR manipulation registers */
30 #define SPRN_EIE	80	/* External interrupt enable (EE=1, RI=1) */
31 #define SPRN_EID	81	/* External interrupt disable (EE=0, RI=1) */
32 #define SPRN_NRI	82	/* Non recoverable interrupt (EE=0, RI=0) */
33 
34 /* Debug registers */
35 #define SPRN_CMPA	144
36 #define SPRN_COUNTA	150
37 #define SPRN_CMPE	152
38 #define SPRN_CMPF	153
39 #define SPRN_LCTRL1	156
40 #define SPRN_LCTRL2	157
41 #define SPRN_ICTRL	158
42 #define SPRN_BAR	159
43 
44 /* Commands.  Only the first few are available to the instruction cache.
45 */
46 #define	IDC_ENABLE	0x02000000	/* Cache enable */
47 #define IDC_DISABLE	0x04000000	/* Cache disable */
48 #define IDC_LDLCK	0x06000000	/* Load and lock */
49 #define IDC_UNLINE	0x08000000	/* Unlock line */
50 #define IDC_UNALL	0x0a000000	/* Unlock all */
51 #define IDC_INVALL	0x0c000000	/* Invalidate all */
52 
53 #define DC_FLINE	0x0e000000	/* Flush data cache line */
54 #define DC_SFWT		0x01000000	/* Set forced writethrough mode */
55 #define DC_CFWT		0x03000000	/* Clear forced writethrough mode */
56 #define DC_SLES		0x05000000	/* Set little endian swap mode */
57 #define DC_CLES		0x07000000	/* Clear little endian swap mode */
58 
59 /* Status.
60 */
61 #define IDC_ENABLED	0x80000000	/* Cache is enabled */
62 #define IDC_CERR1	0x00200000	/* Cache error 1 */
63 #define IDC_CERR2	0x00100000	/* Cache error 2 */
64 #define IDC_CERR3	0x00080000	/* Cache error 3 */
65 
66 #define DC_DFWT		0x40000000	/* Data cache is forced write through */
67 #define DC_LES		0x20000000	/* Caches are little endian mode */
68 
69 #ifdef CONFIG_8xx_CPU6
70 #define do_mtspr_cpu6(rn, rn_addr, v)	\
71 	do {								\
72 		int _reg_cpu6 = rn_addr, _tmp_cpu6;		\
73 		asm volatile("stw %0, %1;"				\
74 			     "lwz %0, %1;"				\
75 			     "mtspr " __stringify(rn) ",%2" :		\
76 			     : "r" (_reg_cpu6), "m"(_tmp_cpu6),		\
77 			       "r" ((unsigned long)(v))			\
78 			     : "memory");				\
79 	} while (0)
80 
81 #define do_mtspr(rn, v)	asm volatile("mtspr " __stringify(rn) ",%0" :	\
82 				     : "r" ((unsigned long)(v))		\
83 				     : "memory")
84 #define mtspr(rn, v) \
85 	do {								\
86 		if (rn == SPRN_IMMR)					\
87 			do_mtspr_cpu6(rn, 0x3d30, v);			\
88 		else if (rn == SPRN_IC_CST)				\
89 			do_mtspr_cpu6(rn, 0x2110, v);			\
90 		else if (rn == SPRN_IC_ADR)				\
91 			do_mtspr_cpu6(rn, 0x2310, v);			\
92 		else if (rn == SPRN_IC_DAT)				\
93 			do_mtspr_cpu6(rn, 0x2510, v);			\
94 		else if (rn == SPRN_DC_CST)				\
95 			do_mtspr_cpu6(rn, 0x3110, v);			\
96 		else if (rn == SPRN_DC_ADR)				\
97 			do_mtspr_cpu6(rn, 0x3310, v);			\
98 		else if (rn == SPRN_DC_DAT)				\
99 			do_mtspr_cpu6(rn, 0x3510, v);			\
100 		else if (rn == SPRN_MI_CTR)				\
101 			do_mtspr_cpu6(rn, 0x2180, v);			\
102 		else if (rn == SPRN_MI_AP)				\
103 			do_mtspr_cpu6(rn, 0x2580, v);			\
104 		else if (rn == SPRN_MI_EPN)				\
105 			do_mtspr_cpu6(rn, 0x2780, v);			\
106 		else if (rn == SPRN_MI_TWC)				\
107 			do_mtspr_cpu6(rn, 0x2b80, v);			\
108 		else if (rn == SPRN_MI_RPN)				\
109 			do_mtspr_cpu6(rn, 0x2d80, v);			\
110 		else if (rn == SPRN_MI_CAM)				\
111 			do_mtspr_cpu6(rn, 0x2190, v);			\
112 		else if (rn == SPRN_MI_RAM0)				\
113 			do_mtspr_cpu6(rn, 0x2390, v);			\
114 		else if (rn == SPRN_MI_RAM1)				\
115 			do_mtspr_cpu6(rn, 0x2590, v);			\
116 		else if (rn == SPRN_MD_CTR)				\
117 			do_mtspr_cpu6(rn, 0x3180, v);			\
118 		else if (rn == SPRN_M_CASID)				\
119 			do_mtspr_cpu6(rn, 0x3380, v);			\
120 		else if (rn == SPRN_MD_AP)				\
121 			do_mtspr_cpu6(rn, 0x3580, v);			\
122 		else if (rn == SPRN_MD_EPN)				\
123 			do_mtspr_cpu6(rn, 0x3780, v);			\
124 		else if (rn == SPRN_M_TWB)				\
125 			do_mtspr_cpu6(rn, 0x3980, v);			\
126 		else if (rn == SPRN_MD_TWC)				\
127 			do_mtspr_cpu6(rn, 0x3b80, v);			\
128 		else if (rn == SPRN_MD_RPN)				\
129 			do_mtspr_cpu6(rn, 0x3d80, v);			\
130 		else if (rn == SPRN_M_TW)				\
131 			do_mtspr_cpu6(rn, 0x3f80, v);			\
132 		else if (rn == SPRN_MD_CAM)				\
133 			do_mtspr_cpu6(rn, 0x3190, v);			\
134 		else if (rn == SPRN_MD_RAM0)				\
135 			do_mtspr_cpu6(rn, 0x3390, v);			\
136 		else if (rn == SPRN_MD_RAM1)				\
137 			do_mtspr_cpu6(rn, 0x3590, v);			\
138 		else if (rn == SPRN_DEC)				\
139 			do_mtspr_cpu6(rn, 0x2c00, v);			\
140 		else if (rn == SPRN_TBWL)				\
141 			do_mtspr_cpu6(rn, 0x3880, v);			\
142 		else if (rn == SPRN_TBWU)				\
143 			do_mtspr_cpu6(rn, 0x3a80, v);			\
144 		else if (rn == SPRN_DPDR)				\
145 			do_mtspr_cpu6(rn, 0x2d30, v);			\
146 		else							\
147 			do_mtspr(rn, v);				\
148 	} while (0)
149 #endif
150 
151 #endif /* _ASM_POWERPC_REG_8xx_H */
152