xref: /openbmc/linux/arch/arm/mm/proc-arm1022.S (revision 65fddcfc)
1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 *  linux/arch/arm/mm/proc-arm1022.S: MMU functions for ARM1022E
4 *
5 *  Copyright (C) 2000 ARM Limited
6 *  Copyright (C) 2000 Deep Blue Solutions Ltd.
7 *  hacked for non-paged-MM by Hyok S. Choi, 2003.
8 *
9 * These are the low level assembler for performing cache and TLB
10 * functions on the ARM1022E.
11 */
12#include <linux/linkage.h>
13#include <linux/init.h>
14#include <linux/pgtable.h>
15#include <asm/assembler.h>
16#include <asm/asm-offsets.h>
17#include <asm/hwcap.h>
18#include <asm/pgtable-hwdef.h>
19#include <asm/ptrace.h>
20
21#include "proc-macros.S"
22
23/*
24 * This is the maximum size of an area which will be invalidated
25 * using the single invalidate entry instructions.  Anything larger
26 * than this, and we go for the whole cache.
27 *
28 * This value should be chosen such that we choose the cheapest
29 * alternative.
30 */
31#define MAX_AREA_SIZE	32768
32
33/*
34 * The size of one data cache line.
35 */
36#define CACHE_DLINESIZE	32
37
38/*
39 * The number of data cache segments.
40 */
41#define CACHE_DSEGMENTS	16
42
43/*
44 * The number of lines in a cache segment.
45 */
46#define CACHE_DENTRIES	64
47
48/*
49 * This is the size at which it becomes more efficient to
50 * clean the whole cache, rather than using the individual
51 * cache line maintenance instructions.
52 */
53#define CACHE_DLIMIT	32768
54
55	.text
56/*
57 * cpu_arm1022_proc_init()
58 */
59ENTRY(cpu_arm1022_proc_init)
60	ret	lr
61
62/*
63 * cpu_arm1022_proc_fin()
64 */
65ENTRY(cpu_arm1022_proc_fin)
66	mrc	p15, 0, r0, c1, c0, 0		@ ctrl register
67	bic	r0, r0, #0x1000 		@ ...i............
68	bic	r0, r0, #0x000e 		@ ............wca.
69	mcr	p15, 0, r0, c1, c0, 0		@ disable caches
70	ret	lr
71
72/*
73 * cpu_arm1022_reset(loc)
74 *
75 * Perform a soft reset of the system.	Put the CPU into the
76 * same state as it would be if it had been reset, and branch
77 * to what would be the reset vector.
78 *
79 * loc: location to jump to for soft reset
80 */
81	.align	5
82	.pushsection	.idmap.text, "ax"
83ENTRY(cpu_arm1022_reset)
84	mov	ip, #0
85	mcr	p15, 0, ip, c7, c7, 0		@ invalidate I,D caches
86	mcr	p15, 0, ip, c7, c10, 4		@ drain WB
87#ifdef CONFIG_MMU
88	mcr	p15, 0, ip, c8, c7, 0		@ invalidate I & D TLBs
89#endif
90	mrc	p15, 0, ip, c1, c0, 0		@ ctrl register
91	bic	ip, ip, #0x000f 		@ ............wcam
92	bic	ip, ip, #0x1100 		@ ...i...s........
93	mcr	p15, 0, ip, c1, c0, 0		@ ctrl register
94	ret	r0
95ENDPROC(cpu_arm1022_reset)
96	.popsection
97
98/*
99 * cpu_arm1022_do_idle()
100 */
101	.align	5
102ENTRY(cpu_arm1022_do_idle)
103	mcr	p15, 0, r0, c7, c0, 4		@ Wait for interrupt
104	ret	lr
105
106/* ================================= CACHE ================================ */
107
108	.align	5
109
110/*
111 *	flush_icache_all()
112 *
113 *	Unconditionally clean and invalidate the entire icache.
114 */
115ENTRY(arm1022_flush_icache_all)
116#ifndef CONFIG_CPU_ICACHE_DISABLE
117	mov	r0, #0
118	mcr	p15, 0, r0, c7, c5, 0		@ invalidate I cache
119#endif
120	ret	lr
121ENDPROC(arm1022_flush_icache_all)
122
123/*
124 *	flush_user_cache_all()
125 *
126 *	Invalidate all cache entries in a particular address
127 *	space.
128 */
129ENTRY(arm1022_flush_user_cache_all)
130	/* FALLTHROUGH */
131/*
132 *	flush_kern_cache_all()
133 *
134 *	Clean and invalidate the entire cache.
135 */
136ENTRY(arm1022_flush_kern_cache_all)
137	mov	r2, #VM_EXEC
138	mov	ip, #0
139__flush_whole_cache:
140#ifndef CONFIG_CPU_DCACHE_DISABLE
141	mov	r1, #(CACHE_DSEGMENTS - 1) << 5	@ 16 segments
1421:	orr	r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries
1432:	mcr	p15, 0, r3, c7, c14, 2		@ clean+invalidate D index
144	subs	r3, r3, #1 << 26
145	bcs	2b				@ entries 63 to 0
146	subs	r1, r1, #1 << 5
147	bcs	1b				@ segments 15 to 0
148#endif
149	tst	r2, #VM_EXEC
150#ifndef CONFIG_CPU_ICACHE_DISABLE
151	mcrne	p15, 0, ip, c7, c5, 0		@ invalidate I cache
152#endif
153	mcrne	p15, 0, ip, c7, c10, 4		@ drain WB
154	ret	lr
155
156/*
157 *	flush_user_cache_range(start, end, flags)
158 *
159 *	Invalidate a range of cache entries in the specified
160 *	address space.
161 *
162 *	- start	- start address (inclusive)
163 *	- end	- end address (exclusive)
164 *	- flags	- vm_flags for this space
165 */
166ENTRY(arm1022_flush_user_cache_range)
167	mov	ip, #0
168	sub	r3, r1, r0			@ calculate total size
169	cmp	r3, #CACHE_DLIMIT
170	bhs	__flush_whole_cache
171
172#ifndef CONFIG_CPU_DCACHE_DISABLE
1731:	mcr	p15, 0, r0, c7, c14, 1		@ clean+invalidate D entry
174	add	r0, r0, #CACHE_DLINESIZE
175	cmp	r0, r1
176	blo	1b
177#endif
178	tst	r2, #VM_EXEC
179#ifndef CONFIG_CPU_ICACHE_DISABLE
180	mcrne	p15, 0, ip, c7, c5, 0		@ invalidate I cache
181#endif
182	mcrne	p15, 0, ip, c7, c10, 4		@ drain WB
183	ret	lr
184
185/*
186 *	coherent_kern_range(start, end)
187 *
188 *	Ensure coherency between the Icache and the Dcache in the
189 *	region described by start.  If you have non-snooping
190 *	Harvard caches, you need to implement this function.
191 *
192 *	- start	- virtual start address
193 *	- end	- virtual end address
194 */
195ENTRY(arm1022_coherent_kern_range)
196	/* FALLTHROUGH */
197
198/*
199 *	coherent_user_range(start, end)
200 *
201 *	Ensure coherency between the Icache and the Dcache in the
202 *	region described by start.  If you have non-snooping
203 *	Harvard caches, you need to implement this function.
204 *
205 *	- start	- virtual start address
206 *	- end	- virtual end address
207 */
208ENTRY(arm1022_coherent_user_range)
209	mov	ip, #0
210	bic	r0, r0, #CACHE_DLINESIZE - 1
2111:
212#ifndef CONFIG_CPU_DCACHE_DISABLE
213	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
214#endif
215#ifndef CONFIG_CPU_ICACHE_DISABLE
216	mcr	p15, 0, r0, c7, c5, 1		@ invalidate I entry
217#endif
218	add	r0, r0, #CACHE_DLINESIZE
219	cmp	r0, r1
220	blo	1b
221	mcr	p15, 0, ip, c7, c10, 4		@ drain WB
222	mov	r0, #0
223	ret	lr
224
225/*
226 *	flush_kern_dcache_area(void *addr, size_t size)
227 *
228 *	Ensure no D cache aliasing occurs, either with itself or
229 *	the I cache
230 *
231 *	- addr	- kernel address
232 *	- size	- region size
233 */
234ENTRY(arm1022_flush_kern_dcache_area)
235	mov	ip, #0
236#ifndef CONFIG_CPU_DCACHE_DISABLE
237	add	r1, r0, r1
2381:	mcr	p15, 0, r0, c7, c14, 1		@ clean+invalidate D entry
239	add	r0, r0, #CACHE_DLINESIZE
240	cmp	r0, r1
241	blo	1b
242#endif
243	mcr	p15, 0, ip, c7, c10, 4		@ drain WB
244	ret	lr
245
246/*
247 *	dma_inv_range(start, end)
248 *
249 *	Invalidate (discard) the specified virtual address range.
250 *	May not write back any entries.  If 'start' or 'end'
251 *	are not cache line aligned, those lines must be written
252 *	back.
253 *
254 *	- start	- virtual start address
255 *	- end	- virtual end address
256 *
257 * (same as v4wb)
258 */
259arm1022_dma_inv_range:
260	mov	ip, #0
261#ifndef CONFIG_CPU_DCACHE_DISABLE
262	tst	r0, #CACHE_DLINESIZE - 1
263	bic	r0, r0, #CACHE_DLINESIZE - 1
264	mcrne	p15, 0, r0, c7, c10, 1		@ clean D entry
265	tst	r1, #CACHE_DLINESIZE - 1
266	mcrne	p15, 0, r1, c7, c10, 1		@ clean D entry
2671:	mcr	p15, 0, r0, c7, c6, 1		@ invalidate D entry
268	add	r0, r0, #CACHE_DLINESIZE
269	cmp	r0, r1
270	blo	1b
271#endif
272	mcr	p15, 0, ip, c7, c10, 4		@ drain WB
273	ret	lr
274
275/*
276 *	dma_clean_range(start, end)
277 *
278 *	Clean the specified virtual address range.
279 *
280 *	- start	- virtual start address
281 *	- end	- virtual end address
282 *
283 * (same as v4wb)
284 */
285arm1022_dma_clean_range:
286	mov	ip, #0
287#ifndef CONFIG_CPU_DCACHE_DISABLE
288	bic	r0, r0, #CACHE_DLINESIZE - 1
2891:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
290	add	r0, r0, #CACHE_DLINESIZE
291	cmp	r0, r1
292	blo	1b
293#endif
294	mcr	p15, 0, ip, c7, c10, 4		@ drain WB
295	ret	lr
296
297/*
298 *	dma_flush_range(start, end)
299 *
300 *	Clean and invalidate the specified virtual address range.
301 *
302 *	- start	- virtual start address
303 *	- end	- virtual end address
304 */
305ENTRY(arm1022_dma_flush_range)
306	mov	ip, #0
307#ifndef CONFIG_CPU_DCACHE_DISABLE
308	bic	r0, r0, #CACHE_DLINESIZE - 1
3091:	mcr	p15, 0, r0, c7, c14, 1		@ clean+invalidate D entry
310	add	r0, r0, #CACHE_DLINESIZE
311	cmp	r0, r1
312	blo	1b
313#endif
314	mcr	p15, 0, ip, c7, c10, 4		@ drain WB
315	ret	lr
316
317/*
318 *	dma_map_area(start, size, dir)
319 *	- start	- kernel virtual start address
320 *	- size	- size of region
321 *	- dir	- DMA direction
322 */
323ENTRY(arm1022_dma_map_area)
324	add	r1, r1, r0
325	cmp	r2, #DMA_TO_DEVICE
326	beq	arm1022_dma_clean_range
327	bcs	arm1022_dma_inv_range
328	b	arm1022_dma_flush_range
329ENDPROC(arm1022_dma_map_area)
330
331/*
332 *	dma_unmap_area(start, size, dir)
333 *	- start	- kernel virtual start address
334 *	- size	- size of region
335 *	- dir	- DMA direction
336 */
337ENTRY(arm1022_dma_unmap_area)
338	ret	lr
339ENDPROC(arm1022_dma_unmap_area)
340
341	.globl	arm1022_flush_kern_cache_louis
342	.equ	arm1022_flush_kern_cache_louis, arm1022_flush_kern_cache_all
343
344	@ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
345	define_cache_functions arm1022
346
347	.align	5
348ENTRY(cpu_arm1022_dcache_clean_area)
349#ifndef CONFIG_CPU_DCACHE_DISABLE
350	mov	ip, #0
3511:	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
352	add	r0, r0, #CACHE_DLINESIZE
353	subs	r1, r1, #CACHE_DLINESIZE
354	bhi	1b
355#endif
356	ret	lr
357
358/* =============================== PageTable ============================== */
359
360/*
361 * cpu_arm1022_switch_mm(pgd)
362 *
363 * Set the translation base pointer to be as described by pgd.
364 *
365 * pgd: new page tables
366 */
367	.align	5
368ENTRY(cpu_arm1022_switch_mm)
369#ifdef CONFIG_MMU
370#ifndef CONFIG_CPU_DCACHE_DISABLE
371	mov	r1, #(CACHE_DSEGMENTS - 1) << 5	@ 16 segments
3721:	orr	r3, r1, #(CACHE_DENTRIES - 1) << 26 @ 64 entries
3732:	mcr	p15, 0, r3, c7, c14, 2		@ clean+invalidate D index
374	subs	r3, r3, #1 << 26
375	bcs	2b				@ entries 63 to 0
376	subs	r1, r1, #1 << 5
377	bcs	1b				@ segments 15 to 0
378#endif
379	mov	r1, #0
380#ifndef CONFIG_CPU_ICACHE_DISABLE
381	mcr	p15, 0, r1, c7, c5, 0		@ invalidate I cache
382#endif
383	mcr	p15, 0, r1, c7, c10, 4		@ drain WB
384	mcr	p15, 0, r0, c2, c0, 0		@ load page table pointer
385	mcr	p15, 0, r1, c8, c7, 0		@ invalidate I & D TLBs
386#endif
387	ret	lr
388
389/*
390 * cpu_arm1022_set_pte_ext(ptep, pte, ext)
391 *
392 * Set a PTE and flush it out
393 */
394	.align	5
395ENTRY(cpu_arm1022_set_pte_ext)
396#ifdef CONFIG_MMU
397	armv3_set_pte_ext
398	mov	r0, r0
399#ifndef CONFIG_CPU_DCACHE_DISABLE
400	mcr	p15, 0, r0, c7, c10, 1		@ clean D entry
401#endif
402#endif /* CONFIG_MMU */
403	ret	lr
404
405	.type	__arm1022_setup, #function
406__arm1022_setup:
407	mov	r0, #0
408	mcr	p15, 0, r0, c7, c7		@ invalidate I,D caches on v4
409	mcr	p15, 0, r0, c7, c10, 4		@ drain write buffer on v4
410#ifdef CONFIG_MMU
411	mcr	p15, 0, r0, c8, c7		@ invalidate I,D TLBs on v4
412#endif
413	adr	r5, arm1022_crval
414	ldmia	r5, {r5, r6}
415	mrc	p15, 0, r0, c1, c0		@ get control register v4
416	bic	r0, r0, r5
417	orr	r0, r0, r6
418#ifdef CONFIG_CPU_CACHE_ROUND_ROBIN
419	orr	r0, r0, #0x4000 		@ .R..............
420#endif
421	ret	lr
422	.size	__arm1022_setup, . - __arm1022_setup
423
424	/*
425	 *  R
426	 * .RVI ZFRS BLDP WCAM
427	 * .011 1001 ..11 0101
428	 *
429	 */
430	.type	arm1022_crval, #object
431arm1022_crval:
432	crval	clear=0x00007f3f, mmuset=0x00003935, ucset=0x00001930
433
434	__INITDATA
435	@ define struct processor (see <asm/proc-fns.h> and proc-macros.S)
436	define_processor_functions arm1022, dabort=v4t_early_abort, pabort=legacy_pabort
437
438	.section ".rodata"
439
440	string	cpu_arch_name, "armv5te"
441	string	cpu_elf_name, "v5"
442	string	cpu_arm1022_name, "ARM1022"
443
444	.align
445
446	.section ".proc.info.init", "a"
447
448	.type	__arm1022_proc_info,#object
449__arm1022_proc_info:
450	.long	0x4105a220			@ ARM 1022E (v5TE)
451	.long	0xff0ffff0
452	.long   PMD_TYPE_SECT | \
453		PMD_BIT4 | \
454		PMD_SECT_AP_WRITE | \
455		PMD_SECT_AP_READ
456	.long   PMD_TYPE_SECT | \
457		PMD_BIT4 | \
458		PMD_SECT_AP_WRITE | \
459		PMD_SECT_AP_READ
460	initfn	__arm1022_setup, __arm1022_proc_info
461	.long	cpu_arch_name
462	.long	cpu_elf_name
463	.long	HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB | HWCAP_EDSP
464	.long	cpu_arm1022_name
465	.long	arm1022_processor_functions
466	.long	v4wbi_tlb_fns
467	.long	v4wb_user_fns
468	.long	arm1022_cache_fns
469	.size	__arm1022_proc_info, . - __arm1022_proc_info
470