dma-mapping.h 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199
  1. #ifndef _ASM_DMA_MAPPING_H_
  2. #define _ASM_DMA_MAPPING_H_
  3. /*
  4. * IOMMU interface. See Documentation/DMA-mapping.txt and DMA-API.txt for
  5. * documentation.
  6. */
  7. #include <linux/scatterlist.h>
  8. #include <asm/io.h>
  9. #include <asm/swiotlb.h>
  10. extern dma_addr_t bad_dma_address;
  11. struct dma_mapping_ops {
  12. int (*mapping_error)(dma_addr_t dma_addr);
  13. void* (*alloc_coherent)(struct device *dev, size_t size,
  14. dma_addr_t *dma_handle, gfp_t gfp);
  15. void (*free_coherent)(struct device *dev, size_t size,
  16. void *vaddr, dma_addr_t dma_handle);
  17. dma_addr_t (*map_single)(struct device *hwdev, phys_addr_t ptr,
  18. size_t size, int direction);
  19. /* like map_single, but doesn't check the device mask */
  20. dma_addr_t (*map_simple)(struct device *hwdev, phys_addr_t ptr,
  21. size_t size, int direction);
  22. void (*unmap_single)(struct device *dev, dma_addr_t addr,
  23. size_t size, int direction);
  24. void (*sync_single_for_cpu)(struct device *hwdev,
  25. dma_addr_t dma_handle, size_t size,
  26. int direction);
  27. void (*sync_single_for_device)(struct device *hwdev,
  28. dma_addr_t dma_handle, size_t size,
  29. int direction);
  30. void (*sync_single_range_for_cpu)(struct device *hwdev,
  31. dma_addr_t dma_handle, unsigned long offset,
  32. size_t size, int direction);
  33. void (*sync_single_range_for_device)(struct device *hwdev,
  34. dma_addr_t dma_handle, unsigned long offset,
  35. size_t size, int direction);
  36. void (*sync_sg_for_cpu)(struct device *hwdev,
  37. struct scatterlist *sg, int nelems,
  38. int direction);
  39. void (*sync_sg_for_device)(struct device *hwdev,
  40. struct scatterlist *sg, int nelems,
  41. int direction);
  42. int (*map_sg)(struct device *hwdev, struct scatterlist *sg,
  43. int nents, int direction);
  44. void (*unmap_sg)(struct device *hwdev,
  45. struct scatterlist *sg, int nents,
  46. int direction);
  47. int (*dma_supported)(struct device *hwdev, u64 mask);
  48. int is_phys;
  49. };
  50. extern const struct dma_mapping_ops *dma_ops;
  51. #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
  52. #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
  53. void *dma_alloc_coherent(struct device *dev, size_t size,
  54. dma_addr_t *dma_handle, gfp_t flag);
  55. void dma_free_coherent(struct device *dev, size_t size,
  56. void *vaddr, dma_addr_t dma_handle);
  57. extern int dma_supported(struct device *hwdev, u64 mask);
  58. extern int dma_set_mask(struct device *dev, u64 mask);
  59. #ifdef CONFIG_X86_32
  60. # include "dma-mapping_32.h"
  61. #else
  62. # include "dma-mapping_64.h"
  63. #endif
  64. static inline dma_addr_t
  65. dma_map_single(struct device *hwdev, void *ptr, size_t size,
  66. int direction)
  67. {
  68. BUG_ON(!valid_dma_direction(direction));
  69. return dma_ops->map_single(hwdev, virt_to_phys(ptr), size, direction);
  70. }
  71. static inline void
  72. dma_unmap_single(struct device *dev, dma_addr_t addr, size_t size,
  73. int direction)
  74. {
  75. BUG_ON(!valid_dma_direction(direction));
  76. if (dma_ops->unmap_single)
  77. dma_ops->unmap_single(dev, addr, size, direction);
  78. }
  79. static inline int
  80. dma_map_sg(struct device *hwdev, struct scatterlist *sg,
  81. int nents, int direction)
  82. {
  83. BUG_ON(!valid_dma_direction(direction));
  84. return dma_ops->map_sg(hwdev, sg, nents, direction);
  85. }
  86. static inline void
  87. dma_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
  88. int direction)
  89. {
  90. BUG_ON(!valid_dma_direction(direction));
  91. if (dma_ops->unmap_sg)
  92. dma_ops->unmap_sg(hwdev, sg, nents, direction);
  93. }
  94. static inline void
  95. dma_sync_single_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
  96. size_t size, int direction)
  97. {
  98. BUG_ON(!valid_dma_direction(direction));
  99. if (dma_ops->sync_single_for_cpu)
  100. dma_ops->sync_single_for_cpu(hwdev, dma_handle, size,
  101. direction);
  102. flush_write_buffers();
  103. }
  104. static inline void
  105. dma_sync_single_for_device(struct device *hwdev, dma_addr_t dma_handle,
  106. size_t size, int direction)
  107. {
  108. BUG_ON(!valid_dma_direction(direction));
  109. if (dma_ops->sync_single_for_device)
  110. dma_ops->sync_single_for_device(hwdev, dma_handle, size,
  111. direction);
  112. flush_write_buffers();
  113. }
  114. static inline void
  115. dma_sync_single_range_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
  116. unsigned long offset, size_t size, int direction)
  117. {
  118. BUG_ON(!valid_dma_direction(direction));
  119. if (dma_ops->sync_single_range_for_cpu)
  120. dma_ops->sync_single_range_for_cpu(hwdev, dma_handle, offset,
  121. size, direction);
  122. flush_write_buffers();
  123. }
  124. static inline void
  125. dma_sync_single_range_for_device(struct device *hwdev, dma_addr_t dma_handle,
  126. unsigned long offset, size_t size,
  127. int direction)
  128. {
  129. BUG_ON(!valid_dma_direction(direction));
  130. if (dma_ops->sync_single_range_for_device)
  131. dma_ops->sync_single_range_for_device(hwdev, dma_handle,
  132. offset, size, direction);
  133. flush_write_buffers();
  134. }
  135. static inline void
  136. dma_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
  137. int nelems, int direction)
  138. {
  139. BUG_ON(!valid_dma_direction(direction));
  140. if (dma_ops->sync_sg_for_cpu)
  141. dma_ops->sync_sg_for_cpu(hwdev, sg, nelems, direction);
  142. flush_write_buffers();
  143. }
  144. static inline void
  145. dma_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
  146. int nelems, int direction)
  147. {
  148. BUG_ON(!valid_dma_direction(direction));
  149. if (dma_ops->sync_sg_for_device)
  150. dma_ops->sync_sg_for_device(hwdev, sg, nelems, direction);
  151. flush_write_buffers();
  152. }
  153. static inline dma_addr_t dma_map_page(struct device *dev, struct page *page,
  154. size_t offset, size_t size,
  155. int direction)
  156. {
  157. BUG_ON(!valid_dma_direction(direction));
  158. return dma_ops->map_single(dev, page_to_phys(page)+offset,
  159. size, direction);
  160. }
  161. static inline void dma_unmap_page(struct device *dev, dma_addr_t addr,
  162. size_t size, int direction)
  163. {
  164. dma_unmap_single(dev, addr, size, direction);
  165. }
  166. static inline void
  167. dma_cache_sync(struct device *dev, void *vaddr, size_t size,
  168. enum dma_data_direction dir)
  169. {
  170. flush_write_buffers();
  171. }
  172. #endif