1 /*
2  * Copyright (C) 2003 Microtronix Datacom Ltd.
3  * Copyright (C) 2000-2002 Greg Ungerer <gerg@snapgear.com>
4  *
5  * This file is subject to the terms and conditions of the GNU General Public
6  * License. See the file "COPYING" in the main directory of this archive
7  * for more details.
8  */
9 
10 #ifndef _ASM_NIOS2_CACHEFLUSH_H
11 #define _ASM_NIOS2_CACHEFLUSH_H
12 
13 #include <linux/mm_types.h>
14 
15 /*
16  * This flag is used to indicate that the page pointed to by a pte is clean
17  * and does not require cleaning before returning it to the user.
18  */
19 #define PG_dcache_clean PG_arch_1
20 
21 struct mm_struct;
22 
23 extern void flush_cache_all(void);
24 extern void flush_cache_mm(struct mm_struct *mm);
25 extern void flush_cache_dup_mm(struct mm_struct *mm);
26 extern void flush_cache_range(struct vm_area_struct *vma, unsigned long start,
27 	unsigned long end);
28 extern void flush_cache_page(struct vm_area_struct *vma, unsigned long vmaddr,
29 	unsigned long pfn);
30 #define ARCH_IMPLEMENTS_FLUSH_DCACHE_PAGE 1
31 void flush_dcache_page(struct page *page);
32 void flush_dcache_folio(struct folio *folio);
33 #define flush_dcache_folio flush_dcache_folio
34 
35 extern void flush_icache_range(unsigned long start, unsigned long end);
36 void flush_icache_pages(struct vm_area_struct *vma, struct page *page,
37 		unsigned int nr);
38 #define flush_icache_pages flush_icache_pages
39 
40 #define flush_cache_vmap(start, end)		flush_dcache_range(start, end)
41 #define flush_cache_vunmap(start, end)		flush_dcache_range(start, end)
42 
43 extern void copy_to_user_page(struct vm_area_struct *vma, struct page *page,
44 				unsigned long user_vaddr,
45 				void *dst, void *src, int len);
46 extern void copy_from_user_page(struct vm_area_struct *vma, struct page *page,
47 				unsigned long user_vaddr,
48 				void *dst, void *src, int len);
49 
50 extern void flush_dcache_range(unsigned long start, unsigned long end);
51 extern void invalidate_dcache_range(unsigned long start, unsigned long end);
52 
53 #define flush_dcache_mmap_lock(mapping)		xa_lock_irq(&mapping->i_pages)
54 #define flush_dcache_mmap_unlock(mapping)	xa_unlock_irq(&mapping->i_pages)
55 #define flush_dcache_mmap_lock_irqsave(mapping, flags)		\
56 		xa_lock_irqsave(&mapping->i_pages, flags)
57 #define flush_dcache_mmap_unlock_irqrestore(mapping, flags)	\
58 		xa_unlock_irqrestore(&mapping->i_pages, flags)
59 
60 #endif /* _ASM_NIOS2_CACHEFLUSH_H */
61