xref: /openbmc/u-boot/arch/powerpc/include/asm/cache.h (revision 2290fe06)
1 /*
2  * include/asm-ppc/cache.h
3  */
4 #ifndef __ARCH_PPC_CACHE_H
5 #define __ARCH_PPC_CACHE_H
6 
7 #include <asm/processor.h>
8 
9 /* bytes per L1 cache line */
10 #if defined(CONFIG_8xx)
11 #define	L1_CACHE_SHIFT	4
12 #elif defined(CONFIG_PPC64BRIDGE)
13 #define L1_CACHE_SHIFT	7
14 #elif defined(CONFIG_E500MC)
15 #define L1_CACHE_SHIFT	6
16 #else
17 #define	L1_CACHE_SHIFT	5
18 #endif
19 
20 #define L1_CACHE_BYTES          (1 << L1_CACHE_SHIFT)
21 
22 /*
23  * Use the L1 data cache line size value for the minimum DMA buffer alignment
24  * on PowerPC.
25  */
26 #define ARCH_DMA_MINALIGN	L1_CACHE_BYTES
27 
28 /*
29  * For compatibility reasons support the CONFIG_SYS_CACHELINE_SIZE too
30  */
31 #ifndef CONFIG_SYS_CACHELINE_SIZE
32 #define CONFIG_SYS_CACHELINE_SIZE	L1_CACHE_BYTES
33 #endif
34 
35 #define	L1_CACHE_ALIGN(x)       (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))
36 #define	L1_CACHE_PAGES		8
37 
38 #define	SMP_CACHE_BYTES L1_CACHE_BYTES
39 
40 #ifdef MODULE
41 #define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES)))
42 #else
43 #define __cacheline_aligned					\
44   __attribute__((__aligned__(L1_CACHE_BYTES),			\
45 		 __section__(".data.cacheline_aligned")))
46 #endif
47 
48 #if defined(__KERNEL__) && !defined(__ASSEMBLY__)
49 extern void flush_dcache_range(unsigned long start, unsigned long stop);
50 extern void clean_dcache_range(unsigned long start, unsigned long stop);
51 extern void invalidate_dcache_range(unsigned long start, unsigned long stop);
52 extern void flush_dcache(void);
53 extern void invalidate_dcache(void);
54 extern void invalidate_icache(void);
55 #ifdef CONFIG_SYS_INIT_RAM_LOCK
56 extern void unlock_ram_in_cache(void);
57 #endif /* CONFIG_SYS_INIT_RAM_LOCK */
58 #endif /* __ASSEMBLY__ */
59 
60 #if defined(__KERNEL__) && !defined(__ASSEMBLY__)
61 int l2cache_init(void);
62 void enable_cpc(void);
63 void disable_cpc_sram(void);
64 #endif
65 
66 /* prep registers for L2 */
67 #define CACHECRBA       0x80000823      /* Cache configuration register address */
68 #define L2CACHE_MASK	0x03	/* Mask for 2 L2 Cache bits */
69 #define L2CACHE_512KB	0x00	/* 512KB */
70 #define L2CACHE_256KB	0x01	/* 256KB */
71 #define L2CACHE_1MB	0x02	/* 1MB */
72 #define L2CACHE_NONE	0x03	/* NONE */
73 #define L2CACHE_PARITY  0x08    /* Mask for L2 Cache Parity Protected bit */
74 
75 #ifdef CONFIG_8xx
76 /* Cache control on the MPC8xx is provided through some additional
77  * special purpose registers.
78  */
79 #define IC_CST		560	/* Instruction cache control/status */
80 #define IC_ADR		561	/* Address needed for some commands */
81 #define IC_DAT		562	/* Read-only data register */
82 #define DC_CST		568	/* Data cache control/status */
83 #define DC_ADR		569	/* Address needed for some commands */
84 #define DC_DAT		570	/* Read-only data register */
85 
86 /* Commands.  Only the first few are available to the instruction cache.
87 */
88 #define	IDC_ENABLE	0x02000000	/* Cache enable */
89 #define IDC_DISABLE	0x04000000	/* Cache disable */
90 #define IDC_LDLCK	0x06000000	/* Load and lock */
91 #define IDC_UNLINE	0x08000000	/* Unlock line */
92 #define IDC_UNALL	0x0a000000	/* Unlock all */
93 #define IDC_INVALL	0x0c000000	/* Invalidate all */
94 
95 #define DC_FLINE	0x0e000000	/* Flush data cache line */
96 #define DC_SFWT		0x01000000	/* Set forced writethrough mode */
97 #define DC_CFWT		0x03000000	/* Clear forced writethrough mode */
98 #define DC_SLES		0x05000000	/* Set little endian swap mode */
99 #define DC_CLES		0x07000000	/* Clear little endian swap mode */
100 
101 /* Status.
102 */
103 #define IDC_ENABLED	0x80000000	/* Cache is enabled */
104 #define IDC_CERR1	0x00200000	/* Cache error 1 */
105 #define IDC_CERR2	0x00100000	/* Cache error 2 */
106 #define IDC_CERR3	0x00080000	/* Cache error 3 */
107 
108 #define DC_DFWT		0x40000000	/* Data cache is forced write through */
109 #define DC_LES		0x20000000	/* Caches are little endian mode */
110 #endif /* CONFIG_8xx */
111 
112 #endif
113