1 #ifndef _ASM_DMA_MAPPING_H
2 #define _ASM_DMA_MAPPING_H
3 
4 #include <asm/scatterlist.h>
5 #include <asm/cache.h>
6 #include <asm-generic/dma-coherent.h>
7 
8 void *dma_alloc_noncoherent(struct device *dev, size_t size,
9 			   dma_addr_t *dma_handle, gfp_t flag);
10 
11 void dma_free_noncoherent(struct device *dev, size_t size,
12 			 void *vaddr, dma_addr_t dma_handle);
13 
14 void *dma_alloc_coherent(struct device *dev, size_t size,
15 			   dma_addr_t *dma_handle, gfp_t flag);
16 
17 void dma_free_coherent(struct device *dev, size_t size,
18 			 void *vaddr, dma_addr_t dma_handle);
19 
20 extern dma_addr_t dma_map_single(struct device *dev, void *ptr, size_t size,
21 	enum dma_data_direction direction);
22 extern void dma_unmap_single(struct device *dev, dma_addr_t dma_addr,
23 	size_t size, enum dma_data_direction direction);
24 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
25 	enum dma_data_direction direction);
26 extern dma_addr_t dma_map_page(struct device *dev, struct page *page,
27 	unsigned long offset, size_t size, enum dma_data_direction direction);
28 
29 static inline void dma_unmap_page(struct device *dev, dma_addr_t dma_address,
30 	size_t size, enum dma_data_direction direction)
31 {
32 	dma_unmap_single(dev, dma_address, size, direction);
33 }
34 
35 extern void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
36 	int nhwentries, enum dma_data_direction direction);
37 extern void dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle,
38 	size_t size, enum dma_data_direction direction);
39 extern void dma_sync_single_for_device(struct device *dev,
40 	dma_addr_t dma_handle, size_t size, enum dma_data_direction direction);
41 extern void dma_sync_single_range_for_cpu(struct device *dev,
42 	dma_addr_t dma_handle, unsigned long offset, size_t size,
43 	enum dma_data_direction direction);
44 extern void dma_sync_single_range_for_device(struct device *dev,
45 	dma_addr_t dma_handle, unsigned long offset, size_t size,
46 	enum dma_data_direction direction);
47 extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
48 	int nelems, enum dma_data_direction direction);
49 extern void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
50 	int nelems, enum dma_data_direction direction);
51 extern int dma_mapping_error(struct device *dev, dma_addr_t dma_addr);
52 extern int dma_supported(struct device *dev, u64 mask);
53 
54 static inline int
55 dma_set_mask(struct device *dev, u64 mask)
56 {
57 	if(!dev->dma_mask || !dma_supported(dev, mask))
58 		return -EIO;
59 
60 	*dev->dma_mask = mask;
61 
62 	return 0;
63 }
64 
65 static inline int
66 dma_get_cache_alignment(void)
67 {
68 	/* XXX Largest on any MIPS */
69 	return 128;
70 }
71 
72 extern int dma_is_consistent(struct device *dev, dma_addr_t dma_addr);
73 
74 extern void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
75 	       enum dma_data_direction direction);
76 
77 #endif /* _ASM_DMA_MAPPING_H */
78