xref: /openbmc/linux/arch/powerpc/include/asm/cache.h (revision 95e9fd10)
1 #ifndef _ASM_POWERPC_CACHE_H
2 #define _ASM_POWERPC_CACHE_H
3 
4 #ifdef __KERNEL__
5 
6 
7 /* bytes per L1 cache line */
8 #if defined(CONFIG_8xx) || defined(CONFIG_403GCX)
9 #define L1_CACHE_SHIFT		4
10 #define MAX_COPY_PREFETCH	1
11 #elif defined(CONFIG_PPC_E500MC)
12 #define L1_CACHE_SHIFT		6
13 #define MAX_COPY_PREFETCH	4
14 #elif defined(CONFIG_PPC32)
15 #define MAX_COPY_PREFETCH	4
16 #if defined(CONFIG_PPC_47x)
17 #define L1_CACHE_SHIFT		7
18 #else
19 #define L1_CACHE_SHIFT		5
20 #endif
21 #else /* CONFIG_PPC64 */
22 #define L1_CACHE_SHIFT		7
23 #endif
24 
25 #define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
26 
27 #define	SMP_CACHE_BYTES		L1_CACHE_BYTES
28 
29 #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
30 struct ppc64_caches {
31 	u32	dsize;			/* L1 d-cache size */
32 	u32	dline_size;		/* L1 d-cache line size	*/
33 	u32	log_dline_size;
34 	u32	dlines_per_page;
35 	u32	isize;			/* L1 i-cache size */
36 	u32	iline_size;		/* L1 i-cache line size	*/
37 	u32	log_iline_size;
38 	u32	ilines_per_page;
39 };
40 
41 extern struct ppc64_caches ppc64_caches;
42 #endif /* __powerpc64__ && ! __ASSEMBLY__ */
43 
44 #if !defined(__ASSEMBLY__)
45 
46 #define __read_mostly __attribute__((__section__(".data..read_mostly")))
47 
48 #ifdef CONFIG_6xx
49 extern long _get_L2CR(void);
50 extern long _get_L3CR(void);
51 extern void _set_L2CR(unsigned long);
52 extern void _set_L3CR(unsigned long);
53 #else
54 #define _get_L2CR()	0L
55 #define _get_L3CR()	0L
56 #define _set_L2CR(val)	do { } while(0)
57 #define _set_L3CR(val)	do { } while(0)
58 #endif
59 
60 extern void cacheable_memzero(void *p, unsigned int nb);
61 extern void *cacheable_memcpy(void *, const void *, unsigned int);
62 
63 #endif /* !__ASSEMBLY__ */
64 #endif /* __KERNEL__ */
65 #endif /* _ASM_POWERPC_CACHE_H */
66