xref: /openbmc/linux/arch/powerpc/include/asm/cache.h (revision 65e01f38)
1 #ifndef _ASM_POWERPC_CACHE_H
2 #define _ASM_POWERPC_CACHE_H
3 
4 #ifdef __KERNEL__
5 
6 
7 /* bytes per L1 cache line */
8 #if defined(CONFIG_8xx) || defined(CONFIG_403GCX)
9 #define L1_CACHE_SHIFT		4
10 #define MAX_COPY_PREFETCH	1
11 #elif defined(CONFIG_PPC_E500MC)
12 #define L1_CACHE_SHIFT		6
13 #define MAX_COPY_PREFETCH	4
14 #elif defined(CONFIG_PPC32)
15 #define MAX_COPY_PREFETCH	4
16 #if defined(CONFIG_PPC_47x)
17 #define L1_CACHE_SHIFT		7
18 #else
19 #define L1_CACHE_SHIFT		5
20 #endif
21 #else /* CONFIG_PPC64 */
22 #define L1_CACHE_SHIFT		7
23 #define IFETCH_ALIGN_SHIFT	4 /* POWER8,9 */
24 #endif
25 
26 #define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
27 
28 #define	SMP_CACHE_BYTES		L1_CACHE_BYTES
29 
30 #define IFETCH_ALIGN_BYTES	(1 << IFETCH_ALIGN_SHIFT)
31 
32 #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
33 
34 struct ppc_cache_info {
35 	u32 size;
36 	u32 line_size;
37 	u32 block_size;	/* L1 only */
38 	u32 log_block_size;
39 	u32 blocks_per_page;
40 	u32 sets;
41 };
42 
43 struct ppc64_caches {
44 	struct ppc_cache_info l1d;
45 	struct ppc_cache_info l1i;
46 	struct ppc_cache_info l2;
47 	struct ppc_cache_info l3;
48 };
49 
50 extern struct ppc64_caches ppc64_caches;
51 #endif /* __powerpc64__ && ! __ASSEMBLY__ */
52 
53 #if defined(__ASSEMBLY__)
54 /*
55  * For a snooping icache, we still need a dummy icbi to purge all the
56  * prefetched instructions from the ifetch buffers. We also need a sync
57  * before the icbi to order the the actual stores to memory that might
58  * have modified instructions with the icbi.
59  */
60 #define PURGE_PREFETCHED_INS	\
61 	sync;			\
62 	icbi	0,r3;		\
63 	sync;			\
64 	isync
65 
66 #else
67 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
68 
69 #ifdef CONFIG_6xx
70 extern long _get_L2CR(void);
71 extern long _get_L3CR(void);
72 extern void _set_L2CR(unsigned long);
73 extern void _set_L3CR(unsigned long);
74 #else
75 #define _get_L2CR()	0L
76 #define _get_L3CR()	0L
77 #define _set_L2CR(val)	do { } while(0)
78 #define _set_L3CR(val)	do { } while(0)
79 #endif
80 
81 static inline void dcbz(void *addr)
82 {
83 	__asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
84 }
85 
86 static inline void dcbi(void *addr)
87 {
88 	__asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
89 }
90 
91 static inline void dcbf(void *addr)
92 {
93 	__asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
94 }
95 
96 static inline void dcbst(void *addr)
97 {
98 	__asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
99 }
100 #endif /* !__ASSEMBLY__ */
101 #endif /* __KERNEL__ */
102 #endif /* _ASM_POWERPC_CACHE_H */
103