xref: /openbmc/u-boot/arch/arm/cpu/armv8/cache_v8.c (revision 0b304a24)
1 /*
2  * (C) Copyright 2013
3  * David Feng <fenghua@phytium.com.cn>
4  *
5  * SPDX-License-Identifier:	GPL-2.0+
6  */
7 
8 #include <common.h>
9 #include <asm/system.h>
10 #include <asm/armv8/mmu.h>
11 
12 DECLARE_GLOBAL_DATA_PTR;
13 
14 #ifndef CONFIG_SYS_DCACHE_OFF
15 void set_pgtable_section(u64 *page_table, u64 index, u64 section,
16 			 u64 memory_type)
17 {
18 	u64 value;
19 
20 	value = section | PMD_TYPE_SECT | PMD_SECT_AF;
21 	value |= PMD_ATTRINDX(memory_type);
22 	page_table[index] = value;
23 }
24 
25 /* to activate the MMU we need to set up virtual memory */
26 static void mmu_setup(void)
27 {
28 	int i, j, el;
29 	bd_t *bd = gd->bd;
30 	u64 *page_table = (u64 *)gd->arch.tlb_addr;
31 
32 	/* Setup an identity-mapping for all spaces */
33 	for (i = 0; i < (PGTABLE_SIZE >> 3); i++) {
34 		set_pgtable_section(page_table, i, i << SECTION_SHIFT,
35 				    MT_DEVICE_NGNRNE);
36 	}
37 
38 	/* Setup an identity-mapping for all RAM space */
39 	for (i = 0; i < CONFIG_NR_DRAM_BANKS; i++) {
40 		ulong start = bd->bi_dram[i].start;
41 		ulong end = bd->bi_dram[i].start + bd->bi_dram[i].size;
42 		for (j = start >> SECTION_SHIFT;
43 		     j < end >> SECTION_SHIFT; j++) {
44 			set_pgtable_section(page_table, j, j << SECTION_SHIFT,
45 					    MT_NORMAL);
46 		}
47 	}
48 
49 	/* load TTBR0 */
50 	el = current_el();
51 	if (el == 1) {
52 		set_ttbr_tcr_mair(el, gd->arch.tlb_addr,
53 				  TCR_FLAGS | TCR_EL1_IPS_BITS,
54 				  MEMORY_ATTRIBUTES);
55 	} else if (el == 2) {
56 		set_ttbr_tcr_mair(el, gd->arch.tlb_addr,
57 				  TCR_FLAGS | TCR_EL2_IPS_BITS,
58 				  MEMORY_ATTRIBUTES);
59 	} else {
60 		set_ttbr_tcr_mair(el, gd->arch.tlb_addr,
61 				  TCR_FLAGS | TCR_EL3_IPS_BITS,
62 				  MEMORY_ATTRIBUTES);
63 	}
64 	/* enable the mmu */
65 	set_sctlr(get_sctlr() | CR_M);
66 }
67 
68 /*
69  * Performs a invalidation of the entire data cache at all levels
70  */
71 void invalidate_dcache_all(void)
72 {
73 	__asm_invalidate_dcache_all();
74 }
75 
76 void __weak flush_l3_cache(void)
77 {
78 }
79 
80 /*
81  * Performs a clean & invalidation of the entire data cache at all levels
82  */
83 void flush_dcache_all(void)
84 {
85 	__asm_flush_dcache_all();
86 	flush_l3_cache();
87 }
88 
89 /*
90  * Invalidates range in all levels of D-cache/unified cache
91  */
92 void invalidate_dcache_range(unsigned long start, unsigned long stop)
93 {
94 	__asm_flush_dcache_range(start, stop);
95 }
96 
97 /*
98  * Flush range(clean & invalidate) from all levels of D-cache/unified cache
99  */
100 void flush_dcache_range(unsigned long start, unsigned long stop)
101 {
102 	__asm_flush_dcache_range(start, stop);
103 }
104 
105 void dcache_enable(void)
106 {
107 	/* The data cache is not active unless the mmu is enabled */
108 	if (!(get_sctlr() & CR_M)) {
109 		invalidate_dcache_all();
110 		__asm_invalidate_tlb_all();
111 		mmu_setup();
112 	}
113 
114 	set_sctlr(get_sctlr() | CR_C);
115 }
116 
117 void dcache_disable(void)
118 {
119 	uint32_t sctlr;
120 
121 	sctlr = get_sctlr();
122 
123 	/* if cache isn't enabled no need to disable */
124 	if (!(sctlr & CR_C))
125 		return;
126 
127 	set_sctlr(sctlr & ~(CR_C|CR_M));
128 
129 	flush_dcache_all();
130 	__asm_invalidate_tlb_all();
131 }
132 
133 int dcache_status(void)
134 {
135 	return (get_sctlr() & CR_C) != 0;
136 }
137 
138 #else	/* CONFIG_SYS_DCACHE_OFF */
139 
140 void invalidate_dcache_all(void)
141 {
142 }
143 
144 void flush_dcache_all(void)
145 {
146 }
147 
148 void invalidate_dcache_range(unsigned long start, unsigned long stop)
149 {
150 }
151 
152 void flush_dcache_range(unsigned long start, unsigned long stop)
153 {
154 }
155 
156 void dcache_enable(void)
157 {
158 }
159 
160 void dcache_disable(void)
161 {
162 }
163 
164 int dcache_status(void)
165 {
166 	return 0;
167 }
168 
169 #endif	/* CONFIG_SYS_DCACHE_OFF */
170 
171 #ifndef CONFIG_SYS_ICACHE_OFF
172 
173 void icache_enable(void)
174 {
175 	__asm_invalidate_icache_all();
176 	set_sctlr(get_sctlr() | CR_I);
177 }
178 
179 void icache_disable(void)
180 {
181 	set_sctlr(get_sctlr() & ~CR_I);
182 }
183 
184 int icache_status(void)
185 {
186 	return (get_sctlr() & CR_I) != 0;
187 }
188 
189 void invalidate_icache_all(void)
190 {
191 	__asm_invalidate_icache_all();
192 }
193 
194 #else	/* CONFIG_SYS_ICACHE_OFF */
195 
196 void icache_enable(void)
197 {
198 }
199 
200 void icache_disable(void)
201 {
202 }
203 
204 int icache_status(void)
205 {
206 	return 0;
207 }
208 
209 void invalidate_icache_all(void)
210 {
211 }
212 
213 #endif	/* CONFIG_SYS_ICACHE_OFF */
214 
215 /*
216  * Enable dCache & iCache, whether cache is actually enabled
217  * depend on CONFIG_SYS_DCACHE_OFF and CONFIG_SYS_ICACHE_OFF
218  */
219 void __weak enable_caches(void)
220 {
221 	icache_enable();
222 	dcache_enable();
223 }
224 
225 /*
226  * Flush range from all levels of d-cache/unified-cache
227  */
228 void flush_cache(unsigned long start, unsigned long size)
229 {
230 	flush_dcache_range(start, start + size);
231 }
232