dma-mapping.h 6.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236
  1. #ifndef _ASM_DMA_MAPPING_H_
  2. #define _ASM_DMA_MAPPING_H_
  3. /*
  4. * IOMMU interface. See Documentation/DMA-mapping.txt and DMA-API.txt for
  5. * documentation.
  6. */
  7. #include <linux/scatterlist.h>
  8. #include <asm/io.h>
  9. #include <asm/swiotlb.h>
  10. extern dma_addr_t bad_dma_address;
  11. extern int iommu_merge;
  12. extern struct device fallback_dev;
  13. extern int panic_on_overflow;
  14. extern int force_iommu;
  15. struct dma_mapping_ops {
  16. int (*mapping_error)(dma_addr_t dma_addr);
  17. void* (*alloc_coherent)(struct device *dev, size_t size,
  18. dma_addr_t *dma_handle, gfp_t gfp);
  19. void (*free_coherent)(struct device *dev, size_t size,
  20. void *vaddr, dma_addr_t dma_handle);
  21. dma_addr_t (*map_single)(struct device *hwdev, phys_addr_t ptr,
  22. size_t size, int direction);
  23. /* like map_single, but doesn't check the device mask */
  24. dma_addr_t (*map_simple)(struct device *hwdev, phys_addr_t ptr,
  25. size_t size, int direction);
  26. void (*unmap_single)(struct device *dev, dma_addr_t addr,
  27. size_t size, int direction);
  28. void (*sync_single_for_cpu)(struct device *hwdev,
  29. dma_addr_t dma_handle, size_t size,
  30. int direction);
  31. void (*sync_single_for_device)(struct device *hwdev,
  32. dma_addr_t dma_handle, size_t size,
  33. int direction);
  34. void (*sync_single_range_for_cpu)(struct device *hwdev,
  35. dma_addr_t dma_handle, unsigned long offset,
  36. size_t size, int direction);
  37. void (*sync_single_range_for_device)(struct device *hwdev,
  38. dma_addr_t dma_handle, unsigned long offset,
  39. size_t size, int direction);
  40. void (*sync_sg_for_cpu)(struct device *hwdev,
  41. struct scatterlist *sg, int nelems,
  42. int direction);
  43. void (*sync_sg_for_device)(struct device *hwdev,
  44. struct scatterlist *sg, int nelems,
  45. int direction);
  46. int (*map_sg)(struct device *hwdev, struct scatterlist *sg,
  47. int nents, int direction);
  48. void (*unmap_sg)(struct device *hwdev,
  49. struct scatterlist *sg, int nents,
  50. int direction);
  51. int (*dma_supported)(struct device *hwdev, u64 mask);
  52. int is_phys;
  53. };
  54. extern const struct dma_mapping_ops *dma_ops;
  55. static inline int dma_mapping_error(dma_addr_t dma_addr)
  56. {
  57. if (dma_ops->mapping_error)
  58. return dma_ops->mapping_error(dma_addr);
  59. return (dma_addr == bad_dma_address);
  60. }
  61. #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
  62. #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
  63. void *dma_alloc_coherent(struct device *dev, size_t size,
  64. dma_addr_t *dma_handle, gfp_t flag);
  65. void dma_free_coherent(struct device *dev, size_t size,
  66. void *vaddr, dma_addr_t dma_handle);
  67. extern int dma_supported(struct device *hwdev, u64 mask);
  68. extern int dma_set_mask(struct device *dev, u64 mask);
  69. static inline dma_addr_t
  70. dma_map_single(struct device *hwdev, void *ptr, size_t size,
  71. int direction)
  72. {
  73. BUG_ON(!valid_dma_direction(direction));
  74. return dma_ops->map_single(hwdev, virt_to_phys(ptr), size, direction);
  75. }
  76. static inline void
  77. dma_unmap_single(struct device *dev, dma_addr_t addr, size_t size,
  78. int direction)
  79. {
  80. BUG_ON(!valid_dma_direction(direction));
  81. if (dma_ops->unmap_single)
  82. dma_ops->unmap_single(dev, addr, size, direction);
  83. }
  84. static inline int
  85. dma_map_sg(struct device *hwdev, struct scatterlist *sg,
  86. int nents, int direction)
  87. {
  88. BUG_ON(!valid_dma_direction(direction));
  89. return dma_ops->map_sg(hwdev, sg, nents, direction);
  90. }
  91. static inline void
  92. dma_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
  93. int direction)
  94. {
  95. BUG_ON(!valid_dma_direction(direction));
  96. if (dma_ops->unmap_sg)
  97. dma_ops->unmap_sg(hwdev, sg, nents, direction);
  98. }
  99. static inline void
  100. dma_sync_single_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
  101. size_t size, int direction)
  102. {
  103. BUG_ON(!valid_dma_direction(direction));
  104. if (dma_ops->sync_single_for_cpu)
  105. dma_ops->sync_single_for_cpu(hwdev, dma_handle, size,
  106. direction);
  107. flush_write_buffers();
  108. }
  109. static inline void
  110. dma_sync_single_for_device(struct device *hwdev, dma_addr_t dma_handle,
  111. size_t size, int direction)
  112. {
  113. BUG_ON(!valid_dma_direction(direction));
  114. if (dma_ops->sync_single_for_device)
  115. dma_ops->sync_single_for_device(hwdev, dma_handle, size,
  116. direction);
  117. flush_write_buffers();
  118. }
  119. static inline void
  120. dma_sync_single_range_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
  121. unsigned long offset, size_t size, int direction)
  122. {
  123. BUG_ON(!valid_dma_direction(direction));
  124. if (dma_ops->sync_single_range_for_cpu)
  125. dma_ops->sync_single_range_for_cpu(hwdev, dma_handle, offset,
  126. size, direction);
  127. flush_write_buffers();
  128. }
  129. static inline void
  130. dma_sync_single_range_for_device(struct device *hwdev, dma_addr_t dma_handle,
  131. unsigned long offset, size_t size,
  132. int direction)
  133. {
  134. BUG_ON(!valid_dma_direction(direction));
  135. if (dma_ops->sync_single_range_for_device)
  136. dma_ops->sync_single_range_for_device(hwdev, dma_handle,
  137. offset, size, direction);
  138. flush_write_buffers();
  139. }
  140. static inline void
  141. dma_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
  142. int nelems, int direction)
  143. {
  144. BUG_ON(!valid_dma_direction(direction));
  145. if (dma_ops->sync_sg_for_cpu)
  146. dma_ops->sync_sg_for_cpu(hwdev, sg, nelems, direction);
  147. flush_write_buffers();
  148. }
  149. static inline void
  150. dma_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
  151. int nelems, int direction)
  152. {
  153. BUG_ON(!valid_dma_direction(direction));
  154. if (dma_ops->sync_sg_for_device)
  155. dma_ops->sync_sg_for_device(hwdev, sg, nelems, direction);
  156. flush_write_buffers();
  157. }
  158. static inline dma_addr_t dma_map_page(struct device *dev, struct page *page,
  159. size_t offset, size_t size,
  160. int direction)
  161. {
  162. BUG_ON(!valid_dma_direction(direction));
  163. return dma_ops->map_single(dev, page_to_phys(page)+offset,
  164. size, direction);
  165. }
  166. static inline void dma_unmap_page(struct device *dev, dma_addr_t addr,
  167. size_t size, int direction)
  168. {
  169. dma_unmap_single(dev, addr, size, direction);
  170. }
  171. static inline void
  172. dma_cache_sync(struct device *dev, void *vaddr, size_t size,
  173. enum dma_data_direction dir)
  174. {
  175. flush_write_buffers();
  176. }
  177. static inline int dma_get_cache_alignment(void)
  178. {
  179. /* no easy way to get cache size on all x86, so return the
  180. * maximum possible, to be safe */
  181. return boot_cpu_data.x86_clflush_size;
  182. }
  183. #define dma_is_consistent(d, h) (1)
  184. #ifdef CONFIG_X86_32
  185. # define ARCH_HAS_DMA_DECLARE_COHERENT_MEMORY
  186. struct dma_coherent_mem {
  187. void *virt_base;
  188. u32 device_base;
  189. int size;
  190. int flags;
  191. unsigned long *bitmap;
  192. };
  193. extern int
  194. dma_declare_coherent_memory(struct device *dev, dma_addr_t bus_addr,
  195. dma_addr_t device_addr, size_t size, int flags);
  196. extern void
  197. dma_release_declared_memory(struct device *dev);
  198. extern void *
  199. dma_mark_declared_memory_occupied(struct device *dev,
  200. dma_addr_t device_addr, size_t size);
  201. #endif /* CONFIG_X86_32 */
  202. #endif