150acfb2bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */ 276d2a049SPalmer Dabbelt /* 376d2a049SPalmer Dabbelt * Copyright (C) 2017 Chen Liqin <liqin.chen@sunplusct.com> 476d2a049SPalmer Dabbelt * Copyright (C) 2012 Regents of the University of California 576d2a049SPalmer Dabbelt */ 676d2a049SPalmer Dabbelt 776d2a049SPalmer Dabbelt #ifndef _ASM_RISCV_CACHE_H 876d2a049SPalmer Dabbelt #define _ASM_RISCV_CACHE_H 976d2a049SPalmer Dabbelt 1076d2a049SPalmer Dabbelt #define L1_CACHE_SHIFT 6 1176d2a049SPalmer Dabbelt 1276d2a049SPalmer Dabbelt #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) 1376d2a049SPalmer Dabbelt 14*1631ba12SHeiko Stuebner #ifdef CONFIG_RISCV_DMA_NONCOHERENT 15*1631ba12SHeiko Stuebner #define ARCH_DMA_MINALIGN L1_CACHE_BYTES 16*1631ba12SHeiko Stuebner #endif 17*1631ba12SHeiko Stuebner 186bd33e1eSChristoph Hellwig /* 196bd33e1eSChristoph Hellwig * RISC-V requires the stack pointer to be 16-byte aligned, so ensure that 206bd33e1eSChristoph Hellwig * the flat loader aligns it accordingly. 216bd33e1eSChristoph Hellwig */ 226bd33e1eSChristoph Hellwig #ifndef CONFIG_MMU 236bd33e1eSChristoph Hellwig #define ARCH_SLAB_MINALIGN 16 246bd33e1eSChristoph Hellwig #endif 256bd33e1eSChristoph Hellwig 2676d2a049SPalmer Dabbelt #endif /* _ASM_RISCV_CACHE_H */ 27