dma-mapping.h 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182
  1. /*
  2. * include/asm-xtensa/dma_mapping.h
  3. *
  4. * This file is subject to the terms and conditions of the GNU General Public
  5. * License. See the file "COPYING" in the main directory of this archive
  6. * for more details.
  7. *
  8. * Copyright (C) 2003 - 2005 Tensilica Inc.
  9. */
  10. #ifndef _XTENSA_DMA_MAPPING_H
  11. #define _XTENSA_DMA_MAPPING_H
  12. #include <asm/scatterlist.h>
  13. #include <asm/cache.h>
  14. #include <asm/io.h>
  15. #include <linux/mm.h>
  16. /*
  17. * DMA-consistent mapping functions.
  18. */
  19. extern void *consistent_alloc(int, size_t, dma_addr_t, unsigned long);
  20. extern void consistent_free(void*, size_t, dma_addr_t);
  21. extern void consistent_sync(void*, size_t, int);
  22. #define dma_alloc_noncoherent(d, s, h, f) dma_alloc_coherent(d, s, h, f)
  23. #define dma_free_noncoherent(d, s, v, h) dma_free_coherent(d, s, v, h)
  24. void *dma_alloc_coherent(struct device *dev, size_t size,
  25. dma_addr_t *dma_handle, gfp_t flag);
  26. void dma_free_coherent(struct device *dev, size_t size,
  27. void *vaddr, dma_addr_t dma_handle);
  28. static inline dma_addr_t
  29. dma_map_single(struct device *dev, void *ptr, size_t size,
  30. enum dma_data_direction direction)
  31. {
  32. BUG_ON(direction == DMA_NONE);
  33. consistent_sync(ptr, size, direction);
  34. return virt_to_phys(ptr);
  35. }
  36. static inline void
  37. dma_unmap_single(struct device *dev, dma_addr_t dma_addr, size_t size,
  38. enum dma_data_direction direction)
  39. {
  40. BUG_ON(direction == DMA_NONE);
  41. }
  42. static inline int
  43. dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
  44. enum dma_data_direction direction)
  45. {
  46. int i;
  47. BUG_ON(direction == DMA_NONE);
  48. for (i = 0; i < nents; i++, sg++ ) {
  49. BUG_ON(!sg->page);
  50. sg->dma_address = page_to_phys(sg->page) + sg->offset;
  51. consistent_sync(page_address(sg->page) + sg->offset,
  52. sg->length, direction);
  53. }
  54. return nents;
  55. }
  56. static inline dma_addr_t
  57. dma_map_page(struct device *dev, struct page *page, unsigned long offset,
  58. size_t size, enum dma_data_direction direction)
  59. {
  60. BUG_ON(direction == DMA_NONE);
  61. return (dma_addr_t)(page_to_pfn(page)) * PAGE_SIZE + offset;
  62. }
  63. static inline void
  64. dma_unmap_page(struct device *dev, dma_addr_t dma_address, size_t size,
  65. enum dma_data_direction direction)
  66. {
  67. BUG_ON(direction == DMA_NONE);
  68. }
  69. static inline void
  70. dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries,
  71. enum dma_data_direction direction)
  72. {
  73. BUG_ON(direction == DMA_NONE);
  74. }
  75. static inline void
  76. dma_sync_single_for_cpu(struct device *dev, dma_addr_t dma_handle, size_t size,
  77. enum dma_data_direction direction)
  78. {
  79. consistent_sync((void *)bus_to_virt(dma_handle), size, direction);
  80. }
  81. static inline void
  82. dma_sync_single_for_device(struct device *dev, dma_addr_t dma_handle, size_t size,
  83. enum dma_data_direction direction)
  84. {
  85. consistent_sync((void *)bus_to_virt(dma_handle), size, direction);
  86. }
  87. static inline void
  88. dma_sync_single_range_for_cpu(struct device *dev, dma_addr_t dma_handle,
  89. unsigned long offset, size_t size,
  90. enum dma_data_direction direction)
  91. {
  92. consistent_sync((void *)bus_to_virt(dma_handle)+offset,size,direction);
  93. }
  94. static inline void
  95. dma_sync_single_range_for_device(struct device *dev, dma_addr_t dma_handle,
  96. unsigned long offset, size_t size,
  97. enum dma_data_direction direction)
  98. {
  99. consistent_sync((void *)bus_to_virt(dma_handle)+offset,size,direction);
  100. }
  101. static inline void
  102. dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems,
  103. enum dma_data_direction dir)
  104. {
  105. int i;
  106. for (i = 0; i < nelems; i++, sg++)
  107. consistent_sync(page_address(sg->page) + sg->offset,
  108. sg->length, dir);
  109. }
  110. static inline void
  111. dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems,
  112. enum dma_data_direction dir)
  113. {
  114. int i;
  115. for (i = 0; i < nelems; i++, sg++)
  116. consistent_sync(page_address(sg->page) + sg->offset,
  117. sg->length, dir);
  118. }
  119. static inline int
  120. dma_mapping_error(dma_addr_t dma_addr)
  121. {
  122. return 0;
  123. }
  124. static inline int
  125. dma_supported(struct device *dev, u64 mask)
  126. {
  127. return 1;
  128. }
  129. static inline int
  130. dma_set_mask(struct device *dev, u64 mask)
  131. {
  132. if(!dev->dma_mask || !dma_supported(dev, mask))
  133. return -EIO;
  134. *dev->dma_mask = mask;
  135. return 0;
  136. }
  137. static inline int
  138. dma_get_cache_alignment(void)
  139. {
  140. return L1_CACHE_BYTES;
  141. }
  142. #define dma_is_consistent(d) (1)
  143. static inline void
  144. dma_cache_sync(void *vaddr, size_t size,
  145. enum dma_data_direction direction)
  146. {
  147. consistent_sync(vaddr, size, direction);
  148. }
  149. #endif /* _XTENSA_DMA_MAPPING_H */