dma-mapping.h 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. #ifndef _X8664_DMA_MAPPING_H
  2. #define _X8664_DMA_MAPPING_H 1
  3. /*
  4. * IOMMU interface. See Documentation/DMA-mapping.txt and DMA-API.txt for
  5. * documentation.
  6. */
  7. #include <linux/config.h>
  8. #include <asm/scatterlist.h>
  9. #include <asm/io.h>
  10. #include <asm/swiotlb.h>
  11. struct dma_mapping_ops {
  12. int (*mapping_error)(dma_addr_t dma_addr);
  13. void* (*alloc_coherent)(struct device *dev, size_t size,
  14. dma_addr_t *dma_handle, gfp_t gfp);
  15. void (*free_coherent)(struct device *dev, size_t size,
  16. void *vaddr, dma_addr_t dma_handle);
  17. dma_addr_t (*map_single)(struct device *hwdev, void *ptr,
  18. size_t size, int direction);
  19. /* like map_single, but doesn't check the device mask */
  20. dma_addr_t (*map_simple)(struct device *hwdev, char *ptr,
  21. size_t size, int direction);
  22. void (*unmap_single)(struct device *dev, dma_addr_t addr,
  23. size_t size, int direction);
  24. void (*sync_single_for_cpu)(struct device *hwdev,
  25. dma_addr_t dma_handle, size_t size,
  26. int direction);
  27. void (*sync_single_for_device)(struct device *hwdev,
  28. dma_addr_t dma_handle, size_t size,
  29. int direction);
  30. void (*sync_single_range_for_cpu)(struct device *hwdev,
  31. dma_addr_t dma_handle, unsigned long offset,
  32. size_t size, int direction);
  33. void (*sync_single_range_for_device)(struct device *hwdev,
  34. dma_addr_t dma_handle, unsigned long offset,
  35. size_t size, int direction);
  36. void (*sync_sg_for_cpu)(struct device *hwdev,
  37. struct scatterlist *sg, int nelems,
  38. int direction);
  39. void (*sync_sg_for_device)(struct device *hwdev,
  40. struct scatterlist *sg, int nelems,
  41. int direction);
  42. int (*map_sg)(struct device *hwdev, struct scatterlist *sg,
  43. int nents, int direction);
  44. void (*unmap_sg)(struct device *hwdev,
  45. struct scatterlist *sg, int nents,
  46. int direction);
  47. int (*dma_supported)(struct device *hwdev, u64 mask);
  48. int is_phys;
  49. };
  50. extern dma_addr_t bad_dma_address;
  51. extern struct dma_mapping_ops* dma_ops;
  52. extern int iommu_merge;
  53. static inline int dma_mapping_error(dma_addr_t dma_addr)
  54. {
  55. if (dma_ops->mapping_error)
  56. return dma_ops->mapping_error(dma_addr);
  57. return (dma_addr == bad_dma_address);
  58. }
  59. extern void *dma_alloc_coherent(struct device *dev, size_t size,
  60. dma_addr_t *dma_handle, gfp_t gfp);
  61. extern void dma_free_coherent(struct device *dev, size_t size, void *vaddr,
  62. dma_addr_t dma_handle);
  63. static inline dma_addr_t
  64. dma_map_single(struct device *hwdev, void *ptr, size_t size,
  65. int direction)
  66. {
  67. return dma_ops->map_single(hwdev, ptr, size, direction);
  68. }
  69. static inline void
  70. dma_unmap_single(struct device *dev, dma_addr_t addr,size_t size,
  71. int direction)
  72. {
  73. dma_ops->unmap_single(dev, addr, size, direction);
  74. }
  75. #define dma_map_page(dev,page,offset,size,dir) \
  76. dma_map_single((dev), page_address(page)+(offset), (size), (dir))
  77. #define dma_unmap_page dma_unmap_single
  78. static inline void
  79. dma_sync_single_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
  80. size_t size, int direction)
  81. {
  82. if (dma_ops->sync_single_for_cpu)
  83. dma_ops->sync_single_for_cpu(hwdev, dma_handle, size,
  84. direction);
  85. flush_write_buffers();
  86. }
  87. static inline void
  88. dma_sync_single_for_device(struct device *hwdev, dma_addr_t dma_handle,
  89. size_t size, int direction)
  90. {
  91. if (dma_ops->sync_single_for_device)
  92. dma_ops->sync_single_for_device(hwdev, dma_handle, size,
  93. direction);
  94. flush_write_buffers();
  95. }
  96. static inline void
  97. dma_sync_single_range_for_cpu(struct device *hwdev, dma_addr_t dma_handle,
  98. unsigned long offset, size_t size, int direction)
  99. {
  100. if (dma_ops->sync_single_range_for_cpu) {
  101. dma_ops->sync_single_range_for_cpu(hwdev, dma_handle, offset, size, direction);
  102. }
  103. flush_write_buffers();
  104. }
  105. static inline void
  106. dma_sync_single_range_for_device(struct device *hwdev, dma_addr_t dma_handle,
  107. unsigned long offset, size_t size, int direction)
  108. {
  109. if (dma_ops->sync_single_range_for_device)
  110. dma_ops->sync_single_range_for_device(hwdev, dma_handle,
  111. offset, size, direction);
  112. flush_write_buffers();
  113. }
  114. static inline void
  115. dma_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
  116. int nelems, int direction)
  117. {
  118. if (dma_ops->sync_sg_for_cpu)
  119. dma_ops->sync_sg_for_cpu(hwdev, sg, nelems, direction);
  120. flush_write_buffers();
  121. }
  122. static inline void
  123. dma_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
  124. int nelems, int direction)
  125. {
  126. if (dma_ops->sync_sg_for_device) {
  127. dma_ops->sync_sg_for_device(hwdev, sg, nelems, direction);
  128. }
  129. flush_write_buffers();
  130. }
  131. static inline int
  132. dma_map_sg(struct device *hwdev, struct scatterlist *sg, int nents, int direction)
  133. {
  134. return dma_ops->map_sg(hwdev, sg, nents, direction);
  135. }
  136. static inline void
  137. dma_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
  138. int direction)
  139. {
  140. dma_ops->unmap_sg(hwdev, sg, nents, direction);
  141. }
  142. extern int dma_supported(struct device *hwdev, u64 mask);
  143. /* same for gart, swiotlb, and nommu */
  144. static inline int dma_get_cache_alignment(void)
  145. {
  146. return boot_cpu_data.x86_clflush_size;
  147. }
  148. #define dma_is_consistent(h) 1
  149. extern int dma_set_mask(struct device *dev, u64 mask);
  150. static inline void
  151. dma_cache_sync(void *vaddr, size_t size, enum dma_data_direction dir)
  152. {
  153. flush_write_buffers();
  154. }
  155. extern struct device fallback_dev;
  156. extern int panic_on_overflow;
  157. #endif /* _X8664_DMA_MAPPING_H */