1 /*
2  * arch/xtensa/include/asm/initialize_mmu.h
3  *
4  * Initializes MMU:
5  *
6  *      For the new V3 MMU we remap the TLB from virtual == physical
7  *      to the standard Linux mapping used in earlier MMU's.
8  *
9  *      The the MMU we also support a new configuration register that
10  *      specifies how the S32C1I instruction operates with the cache
11  *      controller.
12  *
13  * This file is subject to the terms and conditions of the GNU General
14  * Public License.  See the file "COPYING" in the main directory of
15  * this archive for more details.
16  *
17  * Copyright (C) 2008 - 2012 Tensilica, Inc.
18  *
19  *   Marc Gauthier <marc@tensilica.com>
20  *   Pete Delaney <piet@tensilica.com>
21  */
22 
23 #ifndef _XTENSA_INITIALIZE_MMU_H
24 #define _XTENSA_INITIALIZE_MMU_H
25 
26 #include <asm/pgtable.h>
27 #include <asm/vectors.h>
28 
29 #if XCHAL_HAVE_PTP_MMU
30 #define CA_BYPASS	(_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
31 #define CA_WRITEBACK	(_PAGE_CA_WB     | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
32 #else
33 #define CA_WRITEBACK	(0x4)
34 #endif
35 
36 #ifndef XCHAL_SPANNING_WAY
37 #define XCHAL_SPANNING_WAY 0
38 #endif
39 
40 #ifdef __ASSEMBLY__
41 
42 #define XTENSA_HWVERSION_RC_2009_0 230000
43 
44 	.macro	initialize_mmu
45 
46 #if XCHAL_HAVE_S32C1I && (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
47 /*
48  * We Have Atomic Operation Control (ATOMCTL) Register; Initialize it.
49  * For details see Documentation/xtensa/atomctl.txt
50  */
51 #if XCHAL_DCACHE_IS_COHERENT
52 	movi	a3, 0x25	/* For SMP/MX -- internal for writeback,
53 				 * RCW otherwise
54 				 */
55 #else
56 	movi	a3, 0x29	/* non-MX -- Most cores use Std Memory
57 				 * Controlers which usually can't use RCW
58 				 */
59 #endif
60 	wsr	a3, atomctl
61 #endif  /* XCHAL_HAVE_S32C1I &&
62 	 * (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
63 	 */
64 
65 #if defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
66 /*
67  * Have MMU v3
68  */
69 
70 #if !XCHAL_HAVE_VECBASE
71 # error "MMU v3 requires reloc vectors"
72 #endif
73 
74 	movi	a1, 0
75 	_call0	1f
76 	_j	2f
77 
78 	.align	4
79 1:	movi	a2, 0x10000000
80 	movi	a3, 0x18000000
81 	add	a2, a2, a0
82 9:	bgeu	a2, a3, 9b	/* PC is out of the expected range */
83 
84 	/* Step 1: invalidate mapping at 0x40000000..0x5FFFFFFF. */
85 
86 	movi	a2, 0x40000000 | XCHAL_SPANNING_WAY
87 	idtlb	a2
88 	iitlb	a2
89 	isync
90 
91 	/* Step 2: map 0x40000000..0x47FFFFFF to paddr containing this code
92 	 * and jump to the new mapping.
93 	 */
94 
95 	srli	a3, a0, 27
96 	slli	a3, a3, 27
97 	addi	a3, a3, CA_BYPASS
98 	addi	a7, a2, -1
99 	wdtlb	a3, a7
100 	witlb	a3, a7
101 	isync
102 
103 	slli	a4, a0, 5
104 	srli	a4, a4, 5
105 	addi	a5, a2, -6
106 	add	a4, a4, a5
107 	jx	a4
108 
109 	/* Step 3: unmap everything other than current area.
110 	 *	   Start at 0x60000000, wrap around, and end with 0x20000000
111 	 */
112 2:	movi	a4, 0x20000000
113 	add	a5, a2, a4
114 3:	idtlb	a5
115 	iitlb	a5
116 	add	a5, a5, a4
117 	bne	a5, a2, 3b
118 
119 	/* Step 4: Setup MMU with the old V2 mappings. */
120 	movi	a6, 0x01000000
121 	wsr	a6, ITLBCFG
122 	wsr	a6, DTLBCFG
123 	isync
124 
125 	movi	a5, 0xd0000005
126 	movi	a4, CA_WRITEBACK
127 	wdtlb	a4, a5
128 	witlb	a4, a5
129 
130 	movi	a5, 0xd8000005
131 	movi	a4, CA_BYPASS
132 	wdtlb	a4, a5
133 	witlb	a4, a5
134 
135 	movi	a5, XCHAL_KIO_CACHED_VADDR + 6
136 	movi	a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK
137 	wdtlb	a4, a5
138 	witlb	a4, a5
139 
140 	movi	a5, XCHAL_KIO_BYPASS_VADDR + 6
141 	movi	a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS
142 	wdtlb	a4, a5
143 	witlb	a4, a5
144 
145 	isync
146 
147 	/* Jump to self, using MMU v2 mappings. */
148 	movi	a4, 1f
149 	jx	a4
150 
151 1:
152 	/* Step 5: remove temporary mapping. */
153 	idtlb	a7
154 	iitlb	a7
155 	isync
156 
157 	movi	a0, 0
158 	wsr	a0, ptevaddr
159 	rsync
160 
161 #endif /* defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU &&
162 	  XCHAL_HAVE_SPANNING_WAY */
163 
164 #if !defined(CONFIG_MMU) && XCHAL_HAVE_TLBS
165 	/* Enable data and instruction cache in the DEFAULT_MEMORY region
166 	 * if the processor has DTLB and ITLB.
167 	 */
168 
169 	movi	a5, PLATFORM_DEFAULT_MEM_START | XCHAL_SPANNING_WAY
170 	movi	a6, ~_PAGE_ATTRIB_MASK
171 	movi	a7, CA_WRITEBACK
172 	movi	a8, 0x20000000
173 	movi	a9, PLATFORM_DEFAULT_MEM_SIZE
174 	j	2f
175 1:
176 	sub	a9, a9, a8
177 2:
178 	rdtlb1	a3, a5
179 	ritlb1	a4, a5
180 	and	a3, a3, a6
181 	and	a4, a4, a6
182 	or	a3, a3, a7
183 	or	a4, a4, a7
184 	wdtlb	a3, a5
185 	witlb	a4, a5
186 	add	a5, a5, a8
187 	bltu	a8, a9, 1b
188 
189 #endif
190 
191 	.endm
192 
193 #endif /*__ASSEMBLY__*/
194 
195 #endif /* _XTENSA_INITIALIZE_MMU_H */
196