1b8b572e1SStephen Rothwell #ifndef _ASM_POWERPC_CACHE_H 2b8b572e1SStephen Rothwell #define _ASM_POWERPC_CACHE_H 3b8b572e1SStephen Rothwell 4b8b572e1SStephen Rothwell #ifdef __KERNEL__ 5b8b572e1SStephen Rothwell 6b8b572e1SStephen Rothwell 7b8b572e1SStephen Rothwell /* bytes per L1 cache line */ 8b8b572e1SStephen Rothwell #if defined(CONFIG_8xx) || defined(CONFIG_403GCX) 9b8b572e1SStephen Rothwell #define L1_CACHE_SHIFT 4 10b8b572e1SStephen Rothwell #define MAX_COPY_PREFETCH 1 11b8b572e1SStephen Rothwell #elif defined(CONFIG_PPC_E500MC) 12b8b572e1SStephen Rothwell #define L1_CACHE_SHIFT 6 13b8b572e1SStephen Rothwell #define MAX_COPY_PREFETCH 4 14b8b572e1SStephen Rothwell #elif defined(CONFIG_PPC32) 15b8b572e1SStephen Rothwell #define MAX_COPY_PREFETCH 4 16e7f75ad0SDave Kleikamp #if defined(CONFIG_PPC_47x) 17e7f75ad0SDave Kleikamp #define L1_CACHE_SHIFT 7 18e7f75ad0SDave Kleikamp #else 19e7f75ad0SDave Kleikamp #define L1_CACHE_SHIFT 5 20e7f75ad0SDave Kleikamp #endif 21b8b572e1SStephen Rothwell #else /* CONFIG_PPC64 */ 22b8b572e1SStephen Rothwell #define L1_CACHE_SHIFT 7 23b8b572e1SStephen Rothwell #endif 24b8b572e1SStephen Rothwell 25b8b572e1SStephen Rothwell #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) 26b8b572e1SStephen Rothwell 27b8b572e1SStephen Rothwell #define SMP_CACHE_BYTES L1_CACHE_BYTES 28b8b572e1SStephen Rothwell 29b8b572e1SStephen Rothwell #if defined(__powerpc64__) && !defined(__ASSEMBLY__) 30b8b572e1SStephen Rothwell struct ppc64_caches { 31b8b572e1SStephen Rothwell u32 dsize; /* L1 d-cache size */ 32b8b572e1SStephen Rothwell u32 dline_size; /* L1 d-cache line size */ 33b8b572e1SStephen Rothwell u32 log_dline_size; 34b8b572e1SStephen Rothwell u32 dlines_per_page; 35b8b572e1SStephen Rothwell u32 isize; /* L1 i-cache size */ 36b8b572e1SStephen Rothwell u32 iline_size; /* L1 i-cache line size */ 37b8b572e1SStephen Rothwell u32 log_iline_size; 38b8b572e1SStephen Rothwell u32 ilines_per_page; 39b8b572e1SStephen Rothwell }; 40b8b572e1SStephen Rothwell 41b8b572e1SStephen Rothwell extern struct ppc64_caches ppc64_caches; 42b8b572e1SStephen Rothwell #endif /* __powerpc64__ && ! __ASSEMBLY__ */ 43b8b572e1SStephen Rothwell 44b8b572e1SStephen Rothwell #if !defined(__ASSEMBLY__) 45*ae3a197eSDavid Howells 4654cb27a7SDenys Vlasenko #define __read_mostly __attribute__((__section__(".data..read_mostly"))) 47*ae3a197eSDavid Howells 48*ae3a197eSDavid Howells #ifdef CONFIG_6xx 49*ae3a197eSDavid Howells extern long _get_L2CR(void); 50*ae3a197eSDavid Howells extern long _get_L3CR(void); 51*ae3a197eSDavid Howells extern void _set_L2CR(unsigned long); 52*ae3a197eSDavid Howells extern void _set_L3CR(unsigned long); 53*ae3a197eSDavid Howells #else 54*ae3a197eSDavid Howells #define _get_L2CR() 0L 55*ae3a197eSDavid Howells #define _get_L3CR() 0L 56*ae3a197eSDavid Howells #define _set_L2CR(val) do { } while(0) 57*ae3a197eSDavid Howells #define _set_L3CR(val) do { } while(0) 58b8b572e1SStephen Rothwell #endif 59b8b572e1SStephen Rothwell 60*ae3a197eSDavid Howells extern void cacheable_memzero(void *p, unsigned int nb); 61*ae3a197eSDavid Howells extern void *cacheable_memcpy(void *, const void *, unsigned int); 62*ae3a197eSDavid Howells 63*ae3a197eSDavid Howells #endif /* !__ASSEMBLY__ */ 64b8b572e1SStephen Rothwell #endif /* __KERNEL__ */ 65b8b572e1SStephen Rothwell #endif /* _ASM_POWERPC_CACHE_H */ 66