xref: /openbmc/linux/arch/riscv/include/asm/cache.h (revision 6bd33e1ece528f67646db33bf97406b747dafda0)
150acfb2bSThomas Gleixner /* SPDX-License-Identifier: GPL-2.0-only */
276d2a049SPalmer Dabbelt /*
376d2a049SPalmer Dabbelt  * Copyright (C) 2017 Chen Liqin <liqin.chen@sunplusct.com>
476d2a049SPalmer Dabbelt  * Copyright (C) 2012 Regents of the University of California
576d2a049SPalmer Dabbelt  */
676d2a049SPalmer Dabbelt 
776d2a049SPalmer Dabbelt #ifndef _ASM_RISCV_CACHE_H
876d2a049SPalmer Dabbelt #define _ASM_RISCV_CACHE_H
976d2a049SPalmer Dabbelt 
1076d2a049SPalmer Dabbelt #define L1_CACHE_SHIFT		6
1176d2a049SPalmer Dabbelt 
1276d2a049SPalmer Dabbelt #define L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
1376d2a049SPalmer Dabbelt 
14*6bd33e1eSChristoph Hellwig /*
15*6bd33e1eSChristoph Hellwig  * RISC-V requires the stack pointer to be 16-byte aligned, so ensure that
16*6bd33e1eSChristoph Hellwig  * the flat loader aligns it accordingly.
17*6bd33e1eSChristoph Hellwig  */
18*6bd33e1eSChristoph Hellwig #ifndef CONFIG_MMU
19*6bd33e1eSChristoph Hellwig #define ARCH_SLAB_MINALIGN	16
20*6bd33e1eSChristoph Hellwig #endif
21*6bd33e1eSChristoph Hellwig 
2276d2a049SPalmer Dabbelt #endif /* _ASM_RISCV_CACHE_H */
23