xref: /openbmc/linux/arch/csky/mm/cachev1.c (revision 2a598d0b)
1 // SPDX-License-Identifier: GPL-2.0
2 // Copyright (C) 2018 Hangzhou C-SKY Microsystems co.,ltd.
3 
4 #include <linux/spinlock.h>
5 #include <asm/cache.h>
6 #include <abi/reg_ops.h>
7 
8 /* for L1-cache */
9 #define INS_CACHE		(1 << 0)
10 #define DATA_CACHE		(1 << 1)
11 #define CACHE_INV		(1 << 4)
12 #define CACHE_CLR		(1 << 5)
13 #define CACHE_OMS		(1 << 6)
14 #define CACHE_ITS		(1 << 7)
15 #define CACHE_LICF		(1 << 31)
16 
17 /* for L2-cache */
18 #define CR22_LEVEL_SHIFT	(1)
19 #define CR22_SET_SHIFT		(7)
20 #define CR22_WAY_SHIFT		(30)
21 #define CR22_WAY_SHIFT_L2	(29)
22 
23 static DEFINE_SPINLOCK(cache_lock);
24 
25 static inline void cache_op_line(unsigned long i, unsigned int val)
26 {
27 	mtcr("cr22", i);
28 	mtcr("cr17", val);
29 }
30 
31 #define CCR2_L2E (1 << 3)
32 static void cache_op_all(unsigned int value, unsigned int l2)
33 {
34 	mtcr("cr17", value | CACHE_CLR);
35 	mb();
36 
37 	if (l2 && (mfcr_ccr2() & CCR2_L2E)) {
38 		mtcr("cr24", value | CACHE_CLR);
39 		mb();
40 	}
41 }
42 
43 static void cache_op_range(
44 	unsigned int start,
45 	unsigned int end,
46 	unsigned int value,
47 	unsigned int l2)
48 {
49 	unsigned long i, flags;
50 	unsigned int val = value | CACHE_CLR | CACHE_OMS;
51 	bool l2_sync;
52 
53 	if (unlikely((end - start) >= PAGE_SIZE) ||
54 	    unlikely(start < PAGE_OFFSET) ||
55 	    unlikely(start >= PAGE_OFFSET + LOWMEM_LIMIT)) {
56 		cache_op_all(value, l2);
57 		return;
58 	}
59 
60 	if ((mfcr_ccr2() & CCR2_L2E) && l2)
61 		l2_sync = 1;
62 	else
63 		l2_sync = 0;
64 
65 	spin_lock_irqsave(&cache_lock, flags);
66 
67 	i = start & ~(L1_CACHE_BYTES - 1);
68 	for (; i < end; i += L1_CACHE_BYTES) {
69 		cache_op_line(i, val);
70 		if (l2_sync) {
71 			mb();
72 			mtcr("cr24", val);
73 		}
74 	}
75 	spin_unlock_irqrestore(&cache_lock, flags);
76 
77 	mb();
78 }
79 
80 void dcache_wb_line(unsigned long start)
81 {
82 	asm volatile("idly4\n":::"memory");
83 	cache_op_line(start, DATA_CACHE|CACHE_CLR);
84 	mb();
85 }
86 
87 void icache_inv_range(unsigned long start, unsigned long end)
88 {
89 	cache_op_range(start, end, INS_CACHE|CACHE_INV, 0);
90 }
91 
92 void icache_inv_all(void)
93 {
94 	cache_op_all(INS_CACHE|CACHE_INV, 0);
95 }
96 
97 void local_icache_inv_all(void *priv)
98 {
99 	cache_op_all(INS_CACHE|CACHE_INV, 0);
100 }
101 
102 void dcache_wb_range(unsigned long start, unsigned long end)
103 {
104 	cache_op_range(start, end, DATA_CACHE|CACHE_CLR, 0);
105 }
106 
107 void dcache_wbinv_all(void)
108 {
109 	cache_op_all(DATA_CACHE|CACHE_CLR|CACHE_INV, 0);
110 }
111 
112 void cache_wbinv_range(unsigned long start, unsigned long end)
113 {
114 	cache_op_range(start, end, INS_CACHE|DATA_CACHE|CACHE_CLR|CACHE_INV, 0);
115 }
116 EXPORT_SYMBOL(cache_wbinv_range);
117 
118 void cache_wbinv_all(void)
119 {
120 	cache_op_all(INS_CACHE|DATA_CACHE|CACHE_CLR|CACHE_INV, 0);
121 }
122 
123 void dma_wbinv_range(unsigned long start, unsigned long end)
124 {
125 	cache_op_range(start, end, DATA_CACHE|CACHE_CLR|CACHE_INV, 1);
126 }
127 
128 void dma_inv_range(unsigned long start, unsigned long end)
129 {
130 	cache_op_range(start, end, DATA_CACHE|CACHE_CLR|CACHE_INV, 1);
131 }
132 
133 void dma_wb_range(unsigned long start, unsigned long end)
134 {
135 	cache_op_range(start, end, DATA_CACHE|CACHE_CLR|CACHE_INV, 1);
136 }
137