dma-mapping.h 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157
  1. /*
  2. * Copyright 2004-2009 Analog Devices Inc.
  3. *
  4. * Licensed under the GPL-2 or later.
  5. */
  6. #ifndef _BLACKFIN_DMA_MAPPING_H
  7. #define _BLACKFIN_DMA_MAPPING_H
  8. #include <asm/cacheflush.h>
  9. struct scatterlist;
  10. void *dma_alloc_coherent(struct device *dev, size_t size,
  11. dma_addr_t *dma_handle, gfp_t gfp);
  12. void dma_free_coherent(struct device *dev, size_t size, void *vaddr,
  13. dma_addr_t dma_handle);
  14. /*
  15. * Now for the API extensions over the pci_ one
  16. */
  17. #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
  18. #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
  19. #define dma_supported(d, m) (1)
  20. static inline int
  21. dma_set_mask(struct device *dev, u64 dma_mask)
  22. {
  23. if (!dev->dma_mask || !dma_supported(dev, dma_mask))
  24. return -EIO;
  25. *dev->dma_mask = dma_mask;
  26. return 0;
  27. }
  28. static inline int
  29. dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
  30. {
  31. return 0;
  32. }
  33. extern void
  34. __dma_sync(dma_addr_t addr, size_t size, enum dma_data_direction dir);
  35. static inline void
  36. __dma_sync_inline(dma_addr_t addr, size_t size, enum dma_data_direction dir)
  37. {
  38. switch (dir) {
  39. case DMA_NONE:
  40. BUG();
  41. case DMA_TO_DEVICE: /* writeback only */
  42. flush_dcache_range(addr, addr + size);
  43. break;
  44. case DMA_FROM_DEVICE: /* invalidate only */
  45. case DMA_BIDIRECTIONAL: /* flush and invalidate */
  46. /* Blackfin has no dedicated invalidate (it includes a flush) */
  47. invalidate_dcache_range(addr, addr + size);
  48. break;
  49. }
  50. }
  51. static inline void
  52. _dma_sync(dma_addr_t addr, size_t size, enum dma_data_direction dir)
  53. {
  54. if (__builtin_constant_p(dir))
  55. __dma_sync_inline(addr, size, dir);
  56. else
  57. __dma_sync(addr, size, dir);
  58. }
  59. static inline dma_addr_t
  60. dma_map_single(struct device *dev, void *ptr, size_t size,
  61. enum dma_data_direction dir)
  62. {
  63. _dma_sync((dma_addr_t)ptr, size, dir);
  64. return (dma_addr_t) ptr;
  65. }
  66. static inline dma_addr_t
  67. dma_map_page(struct device *dev, struct page *page,
  68. unsigned long offset, size_t size,
  69. enum dma_data_direction dir)
  70. {
  71. return dma_map_single(dev, page_address(page) + offset, size, dir);
  72. }
  73. static inline void
  74. dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size,
  75. enum dma_data_direction dir)
  76. {
  77. BUG_ON(!valid_dma_direction(dir));
  78. }
  79. static inline void
  80. dma_unmap_page(struct device *dev, dma_addr_t dma_addr, size_t size,
  81. enum dma_data_direction dir)
  82. {
  83. dma_unmap_single(dev, dma_addr, size, dir);
  84. }
  85. extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
  86. enum dma_data_direction dir);
  87. static inline void
  88. dma_unmap_sg(struct device *dev, struct scatterlist *sg,
  89. int nhwentries, enum dma_data_direction dir)
  90. {
  91. BUG_ON(!valid_dma_direction(dir));
  92. }
  93. static inline void
  94. dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t handle,
  95. unsigned long offset, size_t size,
  96. enum dma_data_direction dir)
  97. {
  98. BUG_ON(!valid_dma_direction(dir));
  99. }
  100. static inline void
  101. dma_sync_single_range_for_device(struct device *dev, dma_addr_t handle,
  102. unsigned long offset, size_t size,
  103. enum dma_data_direction dir)
  104. {
  105. _dma_sync(handle + offset, size, dir);
  106. }
  107. static inline void
  108. dma_sync_single_for_cpu(struct device *dev, dma_addr_t handle, size_t size,
  109. enum dma_data_direction dir)
  110. {
  111. dma_sync_single_range_for_cpu(dev, handle, 0, size, dir);
  112. }
  113. static inline void
  114. dma_sync_single_for_device(struct device *dev, dma_addr_t handle, size_t size,
  115. enum dma_data_direction dir)
  116. {
  117. dma_sync_single_range_for_device(dev, handle, 0, size, dir);
  118. }
  119. static inline void
  120. dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nents,
  121. enum dma_data_direction dir)
  122. {
  123. BUG_ON(!valid_dma_direction(dir));
  124. }
  125. extern void
  126. dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
  127. int nents, enum dma_data_direction dir);
  128. static inline void
  129. dma_cache_sync(struct device *dev, void *vaddr, size_t size,
  130. enum dma_data_direction dir)
  131. {
  132. _dma_sync((dma_addr_t)vaddr, size, dir);
  133. }
  134. #endif /* _BLACKFIN_DMA_MAPPING_H */