150acfb2bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */ 276d2a049SPalmer Dabbelt /* 376d2a049SPalmer Dabbelt * Copyright (C) 2017 Chen Liqin <liqin.chen@sunplusct.com> 476d2a049SPalmer Dabbelt * Copyright (C) 2012 Regents of the University of California 576d2a049SPalmer Dabbelt */ 676d2a049SPalmer Dabbelt 776d2a049SPalmer Dabbelt #ifndef _ASM_RISCV_CACHE_H 876d2a049SPalmer Dabbelt #define _ASM_RISCV_CACHE_H 976d2a049SPalmer Dabbelt 1076d2a049SPalmer Dabbelt #define L1_CACHE_SHIFT 6 1176d2a049SPalmer Dabbelt 1276d2a049SPalmer Dabbelt #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) 1376d2a049SPalmer Dabbelt 141631ba12SHeiko Stuebner #ifdef CONFIG_RISCV_DMA_NONCOHERENT 151631ba12SHeiko Stuebner #define ARCH_DMA_MINALIGN L1_CACHE_BYTES 16*29267151SJisheng Zhang #define ARCH_KMALLOC_MINALIGN (8) 171631ba12SHeiko Stuebner #endif 181631ba12SHeiko Stuebner 196bd33e1eSChristoph Hellwig /* 206bd33e1eSChristoph Hellwig * RISC-V requires the stack pointer to be 16-byte aligned, so ensure that 216bd33e1eSChristoph Hellwig * the flat loader aligns it accordingly. 226bd33e1eSChristoph Hellwig */ 236bd33e1eSChristoph Hellwig #ifndef CONFIG_MMU 246bd33e1eSChristoph Hellwig #define ARCH_SLAB_MINALIGN 16 256bd33e1eSChristoph Hellwig #endif 266bd33e1eSChristoph Hellwig 27*29267151SJisheng Zhang #ifndef __ASSEMBLY__ 28*29267151SJisheng Zhang 29*29267151SJisheng Zhang #ifdef CONFIG_RISCV_DMA_NONCOHERENT 30*29267151SJisheng Zhang extern int dma_cache_alignment; 31*29267151SJisheng Zhang #define dma_get_cache_alignment dma_get_cache_alignment dma_get_cache_alignment(void)32*29267151SJisheng Zhangstatic inline int dma_get_cache_alignment(void) 33*29267151SJisheng Zhang { 34*29267151SJisheng Zhang return dma_cache_alignment; 35*29267151SJisheng Zhang } 36*29267151SJisheng Zhang #endif 37*29267151SJisheng Zhang 38*29267151SJisheng Zhang #endif /* __ASSEMBLY__ */ 39*29267151SJisheng Zhang 4076d2a049SPalmer Dabbelt #endif /* _ASM_RISCV_CACHE_H */ 41