xref: /openbmc/linux/arch/powerpc/include/asm/cache.h (revision 82e6fdd6)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 #ifndef _ASM_POWERPC_CACHE_H
3 #define _ASM_POWERPC_CACHE_H
4 
5 #ifdef __KERNEL__
6 
7 
8 /* bytes per L1 cache line */
9 #if defined(CONFIG_PPC_8xx) || defined(CONFIG_403GCX)
10 #define L1_CACHE_SHIFT		4
11 #define MAX_COPY_PREFETCH	1
12 #elif defined(CONFIG_PPC_E500MC)
13 #define L1_CACHE_SHIFT		6
14 #define MAX_COPY_PREFETCH	4
15 #elif defined(CONFIG_PPC32)
16 #define MAX_COPY_PREFETCH	4
17 #if defined(CONFIG_PPC_47x)
18 #define L1_CACHE_SHIFT		7
19 #else
20 #define L1_CACHE_SHIFT		5
21 #endif
22 #else /* CONFIG_PPC64 */
23 #define L1_CACHE_SHIFT		7
24 #define IFETCH_ALIGN_SHIFT	4 /* POWER8,9 */
25 #endif
26 
27 #define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
28 
29 #define	SMP_CACHE_BYTES		L1_CACHE_BYTES
30 
31 #define IFETCH_ALIGN_BYTES	(1 << IFETCH_ALIGN_SHIFT)
32 
33 #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
34 
35 struct ppc_cache_info {
36 	u32 size;
37 	u32 line_size;
38 	u32 block_size;	/* L1 only */
39 	u32 log_block_size;
40 	u32 blocks_per_page;
41 	u32 sets;
42 	u32 assoc;
43 };
44 
45 struct ppc64_caches {
46 	struct ppc_cache_info l1d;
47 	struct ppc_cache_info l1i;
48 	struct ppc_cache_info l2;
49 	struct ppc_cache_info l3;
50 };
51 
52 extern struct ppc64_caches ppc64_caches;
53 #endif /* __powerpc64__ && ! __ASSEMBLY__ */
54 
55 #if defined(__ASSEMBLY__)
56 /*
57  * For a snooping icache, we still need a dummy icbi to purge all the
58  * prefetched instructions from the ifetch buffers. We also need a sync
59  * before the icbi to order the the actual stores to memory that might
60  * have modified instructions with the icbi.
61  */
62 #define PURGE_PREFETCHED_INS	\
63 	sync;			\
64 	icbi	0,r3;		\
65 	sync;			\
66 	isync
67 
68 #else
69 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
70 
71 #ifdef CONFIG_6xx
72 extern long _get_L2CR(void);
73 extern long _get_L3CR(void);
74 extern void _set_L2CR(unsigned long);
75 extern void _set_L3CR(unsigned long);
76 #else
77 #define _get_L2CR()	0L
78 #define _get_L3CR()	0L
79 #define _set_L2CR(val)	do { } while(0)
80 #define _set_L3CR(val)	do { } while(0)
81 #endif
82 
83 static inline void dcbz(void *addr)
84 {
85 	__asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
86 }
87 
88 static inline void dcbi(void *addr)
89 {
90 	__asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
91 }
92 
93 static inline void dcbf(void *addr)
94 {
95 	__asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
96 }
97 
98 static inline void dcbst(void *addr)
99 {
100 	__asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
101 }
102 #endif /* !__ASSEMBLY__ */
103 #endif /* __KERNEL__ */
104 #endif /* _ASM_POWERPC_CACHE_H */
105