xref: /openbmc/u-boot/arch/sh/cpu/sh4/cache.c (revision 4611d5ba)
1 /*
2  * (C) Copyright 2007
3  * Nobuhiro Iwamatsu <iwamatsu@nigauri.org>
4  *
5  * SPDX-License-Identifier:	GPL-2.0+
6  */
7 
8 #include <common.h>
9 #include <command.h>
10 #include <asm/processor.h>
11 #include <asm/io.h>
12 
13 /*
14  * Jump to P2 area.
15  * When handling TLB or caches, we need to do it from P2 area.
16  */
17 #define jump_to_P2()			\
18   do {					\
19     unsigned long __dummy;		\
20     __asm__ __volatile__(		\
21 		"mov.l	1f, %0\n\t"	\
22 		"or	%1, %0\n\t"	\
23 		"jmp	@%0\n\t"	\
24 		" nop\n\t"		\
25 		".balign 4\n"		\
26 		"1:	.long 2f\n"	\
27 		"2:"			\
28 		: "=&r" (__dummy)	\
29 		: "r" (0x20000000));	\
30   } while (0)
31 
32 /*
33  * Back to P1 area.
34  */
35 #define back_to_P1()					\
36   do {							\
37     unsigned long __dummy;				\
38     __asm__ __volatile__(				\
39 		"nop;nop;nop;nop;nop;nop;nop\n\t"	\
40 		"mov.l	1f, %0\n\t"			\
41 		"jmp	@%0\n\t"			\
42 		" nop\n\t"				\
43 		".balign 4\n"				\
44 		"1:	.long 2f\n"			\
45 		"2:"					\
46 		: "=&r" (__dummy));			\
47   } while (0)
48 
49 #define CACHE_VALID       1
50 #define CACHE_UPDATED     2
51 
52 static inline void cache_wback_all(void)
53 {
54 	unsigned long addr, data, i, j;
55 
56 	jump_to_P2();
57 	for (i = 0; i < CACHE_OC_NUM_ENTRIES; i++){
58 		for (j = 0; j < CACHE_OC_NUM_WAYS; j++) {
59 			addr = CACHE_OC_ADDRESS_ARRAY | (j << CACHE_OC_WAY_SHIFT)
60 				| (i << CACHE_OC_ENTRY_SHIFT);
61 			data = inl(addr);
62 			if (data & CACHE_UPDATED) {
63 				data &= ~CACHE_UPDATED;
64 				outl(data, addr);
65 			}
66 		}
67 	}
68 	back_to_P1();
69 }
70 
71 
72 #define CACHE_ENABLE      0
73 #define CACHE_DISABLE     1
74 
75 int cache_control(unsigned int cmd)
76 {
77 	unsigned long ccr;
78 
79 	jump_to_P2();
80 	ccr = inl(CCR);
81 
82 	if (ccr & CCR_CACHE_ENABLE)
83 		cache_wback_all();
84 
85 	if (cmd == CACHE_DISABLE)
86 		outl(CCR_CACHE_STOP, CCR);
87 	else
88 		outl(CCR_CACHE_INIT, CCR);
89 	back_to_P1();
90 
91 	return 0;
92 }
93 
94 void flush_dcache_range(unsigned long start, unsigned long end)
95 {
96 	u32 v;
97 
98 	start &= ~(L1_CACHE_BYTES - 1);
99 	for (v = start; v < end; v += L1_CACHE_BYTES) {
100 		asm volatile ("ocbwb     %0" :	/* no output */
101 			      : "m" (__m(v)));
102 	}
103 }
104 
105 void invalidate_dcache_range(unsigned long start, unsigned long end)
106 {
107 	u32 v;
108 
109 	start &= ~(L1_CACHE_BYTES - 1);
110 	for (v = start; v < end; v += L1_CACHE_BYTES) {
111 		asm volatile ("ocbi     %0" :	/* no output */
112 			      : "m" (__m(v)));
113 	}
114 }
115