xref: /openbmc/u-boot/arch/arm/lib/cache.c (revision ae485b54)
1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * (C) Copyright 2002
4  * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
5  */
6 
7 /* for now: just dummy functions to satisfy the linker */
8 
9 #include <common.h>
10 #include <malloc.h>
11 
12 /*
13  * Flush range from all levels of d-cache/unified-cache.
14  * Affects the range [start, start + size - 1].
15  */
16 __weak void flush_cache(unsigned long start, unsigned long size)
17 {
18 	flush_dcache_range(start, start + size);
19 }
20 
21 /*
22  * Default implementation:
23  * do a range flush for the entire range
24  */
25 __weak void flush_dcache_all(void)
26 {
27 	flush_cache(0, ~0);
28 }
29 
30 /*
31  * Default implementation of enable_caches()
32  * Real implementation should be in platform code
33  */
34 __weak void enable_caches(void)
35 {
36 	puts("WARNING: Caches not enabled\n");
37 }
38 
39 __weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
40 {
41 	/* An empty stub, real implementation should be in platform code */
42 }
43 __weak void flush_dcache_range(unsigned long start, unsigned long stop)
44 {
45 	/* An empty stub, real implementation should be in platform code */
46 }
47 
48 int check_cache_range(unsigned long start, unsigned long stop)
49 {
50 	int ok = 1;
51 
52 	if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
53 		ok = 0;
54 
55 	if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
56 		ok = 0;
57 
58 	if (!ok) {
59 		warn_non_spl("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
60 			     start, stop);
61 	}
62 
63 	return ok;
64 }
65 
66 #ifdef CONFIG_SYS_NONCACHED_MEMORY
67 /*
68  * Reserve one MMU section worth of address space below the malloc() area that
69  * will be mapped uncached.
70  */
71 static unsigned long noncached_start;
72 static unsigned long noncached_end;
73 static unsigned long noncached_next;
74 
75 void noncached_init(void)
76 {
77 	phys_addr_t start, end;
78 	size_t size;
79 
80 	end = ALIGN(mem_malloc_start, MMU_SECTION_SIZE) - MMU_SECTION_SIZE;
81 	size = ALIGN(CONFIG_SYS_NONCACHED_MEMORY, MMU_SECTION_SIZE);
82 	start = end - size;
83 
84 	debug("mapping memory %pa-%pa non-cached\n", &start, &end);
85 
86 	noncached_start = start;
87 	noncached_end = end;
88 	noncached_next = start;
89 
90 #ifndef CONFIG_SYS_DCACHE_OFF
91 	mmu_set_region_dcache_behaviour(noncached_start, size, DCACHE_OFF);
92 #endif
93 }
94 
95 phys_addr_t noncached_alloc(size_t size, size_t align)
96 {
97 	phys_addr_t next = ALIGN(noncached_next, align);
98 
99 	if (next >= noncached_end || (noncached_end - next) < size)
100 		return 0;
101 
102 	debug("allocated %zu bytes of uncached memory @%pa\n", size, &next);
103 	noncached_next = next + size;
104 
105 	return next;
106 }
107 #endif /* CONFIG_SYS_NONCACHED_MEMORY */
108 
109 #if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
110 void invalidate_l2_cache(void)
111 {
112 	unsigned int val = 0;
113 
114 	asm volatile("mcr p15, 1, %0, c15, c11, 0 @ invl l2 cache"
115 		: : "r" (val) : "cc");
116 	isb();
117 }
118 #endif
119