1 #ifndef _M68K_PAGE_NO_H 2 #define _M68K_PAGE_NO_H 3 4 #ifndef __ASSEMBLY__ 5 6 extern unsigned long memory_start; 7 extern unsigned long memory_end; 8 9 #define get_user_page(vaddr) __get_free_page(GFP_KERNEL) 10 #define free_user_page(page, addr) free_page(addr) 11 12 #define clear_page(page) memset((page), 0, PAGE_SIZE) 13 #define copy_page(to,from) memcpy((to), (from), PAGE_SIZE) 14 15 #define clear_user_page(page, vaddr, pg) clear_page(page) 16 #define copy_user_page(to, from, vaddr, pg) copy_page(to, from) 17 18 #define __alloc_zeroed_user_highpage(movableflags, vma, vaddr) \ 19 alloc_page_vma(GFP_HIGHUSER | __GFP_ZERO | movableflags, vma, vaddr) 20 #define __HAVE_ARCH_ALLOC_ZEROED_USER_HIGHPAGE 21 22 #define __pa(vaddr) ((unsigned long)(vaddr)) 23 #define __va(paddr) ((void *)(paddr)) 24 25 #define virt_to_pfn(kaddr) (__pa(kaddr) >> PAGE_SHIFT) 26 #define pfn_to_virt(pfn) __va((pfn) << PAGE_SHIFT) 27 28 #define virt_to_page(addr) (mem_map + (((unsigned long)(addr)-PAGE_OFFSET) >> PAGE_SHIFT)) 29 #define page_to_virt(page) __va(((((page) - mem_map) << PAGE_SHIFT) + PAGE_OFFSET)) 30 31 #define pfn_to_page(pfn) virt_to_page(pfn_to_virt(pfn)) 32 #define page_to_pfn(page) virt_to_pfn(page_to_virt(page)) 33 #define pfn_valid(pfn) ((pfn) < max_mapnr) 34 35 #define virt_addr_valid(kaddr) (((void *)(kaddr) >= (void *)PAGE_OFFSET) && \ 36 ((void *)(kaddr) < (void *)memory_end)) 37 38 #endif /* __ASSEMBLY__ */ 39 40 #endif /* _M68K_PAGE_NO_H */ 41