dma-mapping.h 4.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. /*
  2. * Copyright 2004-2009 Analog Devices Inc.
  3. *
  4. * Licensed under the GPL-2 or later.
  5. */
  6. #ifndef _BLACKFIN_DMA_MAPPING_H
  7. #define _BLACKFIN_DMA_MAPPING_H
  8. #include <asm/cacheflush.h>
  9. struct scatterlist;
  10. void *dma_alloc_coherent(struct device *dev, size_t size,
  11. dma_addr_t *dma_handle, gfp_t gfp);
  12. void dma_free_coherent(struct device *dev, size_t size, void *vaddr,
  13. dma_addr_t dma_handle);
  14. /*
  15. * Now for the API extensions over the pci_ one
  16. */
  17. #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
  18. #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
  19. #define dma_supported(d, m) (1)
  20. #define dma_get_cache_alignment() (32)
  21. #define dma_is_consistent(d, h) (1)
  22. static inline int
  23. dma_set_mask(struct device *dev, u64 dma_mask)
  24. {
  25. if (!dev->dma_mask || !dma_supported(dev, dma_mask))
  26. return -EIO;
  27. *dev->dma_mask = dma_mask;
  28. return 0;
  29. }
  30. static inline int
  31. dma_mapping_error(struct device *dev, dma_addr_t dma_addr)
  32. {
  33. return 0;
  34. }
  35. extern void
  36. __dma_sync(dma_addr_t addr, size_t size, enum dma_data_direction dir);
  37. static inline void
  38. __dma_sync_inline(dma_addr_t addr, size_t size, enum dma_data_direction dir)
  39. {
  40. switch (dir) {
  41. case DMA_NONE:
  42. BUG();
  43. case DMA_TO_DEVICE: /* writeback only */
  44. flush_dcache_range(addr, addr + size);
  45. break;
  46. case DMA_FROM_DEVICE: /* invalidate only */
  47. case DMA_BIDIRECTIONAL: /* flush and invalidate */
  48. /* Blackfin has no dedicated invalidate (it includes a flush) */
  49. invalidate_dcache_range(addr, addr + size);
  50. break;
  51. }
  52. }
  53. static inline void
  54. _dma_sync(dma_addr_t addr, size_t size, enum dma_data_direction dir)
  55. {
  56. if (__builtin_constant_p(dir))
  57. __dma_sync_inline(addr, size, dir);
  58. else
  59. __dma_sync(addr, size, dir);
  60. }
  61. static inline dma_addr_t
  62. dma_map_single(struct device *dev, void *ptr, size_t size,
  63. enum dma_data_direction dir)
  64. {
  65. _dma_sync((dma_addr_t)ptr, size, dir);
  66. return (dma_addr_t) ptr;
  67. }
  68. static inline dma_addr_t
  69. dma_map_page(struct device *dev, struct page *page,
  70. unsigned long offset, size_t size,
  71. enum dma_data_direction dir)
  72. {
  73. return dma_map_single(dev, page_address(page) + offset, size, dir);
  74. }
  75. static inline void
  76. dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size,
  77. enum dma_data_direction dir)
  78. {
  79. BUG_ON(!valid_dma_direction(dir));
  80. }
  81. static inline void
  82. dma_unmap_page(struct device *dev, dma_addr_t dma_addr, size_t size,
  83. enum dma_data_direction dir)
  84. {
  85. dma_unmap_single(dev, dma_addr, size, dir);
  86. }
  87. extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
  88. enum dma_data_direction dir);
  89. static inline void
  90. dma_unmap_sg(struct device *dev, struct scatterlist *sg,
  91. int nhwentries, enum dma_data_direction dir)
  92. {
  93. BUG_ON(!valid_dma_direction(dir));
  94. }
  95. static inline void
  96. dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t handle,
  97. unsigned long offset, size_t size,
  98. enum dma_data_direction dir)
  99. {
  100. BUG_ON(!valid_dma_direction(dir));
  101. }
  102. static inline void
  103. dma_sync_single_range_for_device(struct device *dev, dma_addr_t handle,
  104. unsigned long offset, size_t size,
  105. enum dma_data_direction dir)
  106. {
  107. _dma_sync(handle + offset, size, dir);
  108. }
  109. static inline void
  110. dma_sync_single_for_cpu(struct device *dev, dma_addr_t handle, size_t size,
  111. enum dma_data_direction dir)
  112. {
  113. dma_sync_single_range_for_cpu(dev, handle, 0, size, dir);
  114. }
  115. static inline void
  116. dma_sync_single_for_device(struct device *dev, dma_addr_t handle, size_t size,
  117. enum dma_data_direction dir)
  118. {
  119. dma_sync_single_range_for_device(dev, handle, 0, size, dir);
  120. }
  121. static inline void
  122. dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nents,
  123. enum dma_data_direction dir)
  124. {
  125. BUG_ON(!valid_dma_direction(dir));
  126. }
  127. extern void
  128. dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
  129. int nents, enum dma_data_direction dir);
  130. static inline void
  131. dma_cache_sync(struct device *dev, void *vaddr, size_t size,
  132. enum dma_data_direction dir)
  133. {
  134. _dma_sync((dma_addr_t)vaddr, size, dir);
  135. }
  136. #endif /* _BLACKFIN_DMA_MAPPING_H */