xref: /openbmc/u-boot/arch/arm/lib/cache.c (revision d9b23e26)
1 /*
2  * (C) Copyright 2002
3  * Wolfgang Denk, DENX Software Engineering, wd@denx.de.
4  *
5  * SPDX-License-Identifier:	GPL-2.0+
6  */
7 
8 /* for now: just dummy functions to satisfy the linker */
9 
10 #include <common.h>
11 #include <malloc.h>
12 
13 /*
14  * Flush range from all levels of d-cache/unified-cache.
15  * Affects the range [start, start + size - 1].
16  */
17 __weak void flush_cache(unsigned long start, unsigned long size)
18 {
19 	flush_dcache_range(start, start + size);
20 }
21 
22 /*
23  * Default implementation:
24  * do a range flush for the entire range
25  */
26 __weak void flush_dcache_all(void)
27 {
28 	flush_cache(0, ~0);
29 }
30 
31 /*
32  * Default implementation of enable_caches()
33  * Real implementation should be in platform code
34  */
35 __weak void enable_caches(void)
36 {
37 	puts("WARNING: Caches not enabled\n");
38 }
39 
40 __weak void invalidate_dcache_range(unsigned long start, unsigned long stop)
41 {
42 	/* An empty stub, real implementation should be in platform code */
43 }
44 __weak void flush_dcache_range(unsigned long start, unsigned long stop)
45 {
46 	/* An empty stub, real implementation should be in platform code */
47 }
48 
49 int check_cache_range(unsigned long start, unsigned long stop)
50 {
51 	int ok = 1;
52 
53 	if (start & (CONFIG_SYS_CACHELINE_SIZE - 1))
54 		ok = 0;
55 
56 	if (stop & (CONFIG_SYS_CACHELINE_SIZE - 1))
57 		ok = 0;
58 
59 	if (!ok) {
60 		warn_non_spl("CACHE: Misaligned operation at range [%08lx, %08lx]\n",
61 			     start, stop);
62 	}
63 
64 	return ok;
65 }
66 
67 #ifdef CONFIG_SYS_NONCACHED_MEMORY
68 /*
69  * Reserve one MMU section worth of address space below the malloc() area that
70  * will be mapped uncached.
71  */
72 static unsigned long noncached_start;
73 static unsigned long noncached_end;
74 static unsigned long noncached_next;
75 
76 void noncached_init(void)
77 {
78 	phys_addr_t start, end;
79 	size_t size;
80 
81 	end = ALIGN(mem_malloc_start, MMU_SECTION_SIZE) - MMU_SECTION_SIZE;
82 	size = ALIGN(CONFIG_SYS_NONCACHED_MEMORY, MMU_SECTION_SIZE);
83 	start = end - size;
84 
85 	debug("mapping memory %pa-%pa non-cached\n", &start, &end);
86 
87 	noncached_start = start;
88 	noncached_end = end;
89 	noncached_next = start;
90 
91 #ifndef CONFIG_SYS_DCACHE_OFF
92 	mmu_set_region_dcache_behaviour(noncached_start, size, DCACHE_OFF);
93 #endif
94 }
95 
96 phys_addr_t noncached_alloc(size_t size, size_t align)
97 {
98 	phys_addr_t next = ALIGN(noncached_next, align);
99 
100 	if (next >= noncached_end || (noncached_end - next) < size)
101 		return 0;
102 
103 	debug("allocated %zu bytes of uncached memory @%pa\n", size, &next);
104 	noncached_next = next + size;
105 
106 	return next;
107 }
108 #endif /* CONFIG_SYS_NONCACHED_MEMORY */
109 
110 #if CONFIG_IS_ENABLED(SYS_THUMB_BUILD)
111 void invalidate_l2_cache(void)
112 {
113 	unsigned int val = 0;
114 
115 	asm volatile("mcr p15, 1, %0, c15, c11, 0 @ invl l2 cache"
116 		: : "r" (val) : "cc");
117 	isb();
118 }
119 #endif
120