|
@@ -123,7 +123,12 @@ static inline void dma_sync_single_range_for_cpu(struct device *dev,
|
|
size_t size,
|
|
size_t size,
|
|
enum dma_data_direction dir)
|
|
enum dma_data_direction dir)
|
|
{
|
|
{
|
|
- dma_sync_single_for_cpu(dev, addr + offset, size, dir);
|
|
|
|
|
|
+ const struct dma_map_ops *ops = get_dma_ops(dev);
|
|
|
|
+
|
|
|
|
+ BUG_ON(!valid_dma_direction(dir));
|
|
|
|
+ if (ops->sync_single_for_cpu)
|
|
|
|
+ ops->sync_single_for_cpu(dev, addr + offset, size, dir);
|
|
|
|
+ debug_dma_sync_single_range_for_cpu(dev, addr, offset, size, dir);
|
|
}
|
|
}
|
|
|
|
|
|
static inline void dma_sync_single_range_for_device(struct device *dev,
|
|
static inline void dma_sync_single_range_for_device(struct device *dev,
|
|
@@ -132,7 +137,12 @@ static inline void dma_sync_single_range_for_device(struct device *dev,
|
|
size_t size,
|
|
size_t size,
|
|
enum dma_data_direction dir)
|
|
enum dma_data_direction dir)
|
|
{
|
|
{
|
|
- dma_sync_single_for_device(dev, addr + offset, size, dir);
|
|
|
|
|
|
+ const struct dma_map_ops *ops = get_dma_ops(dev);
|
|
|
|
+
|
|
|
|
+ BUG_ON(!valid_dma_direction(dir));
|
|
|
|
+ if (ops->sync_single_for_device)
|
|
|
|
+ ops->sync_single_for_device(dev, addr + offset, size, dir);
|
|
|
|
+ debug_dma_sync_single_range_for_device(dev, addr, offset, size, dir);
|
|
}
|
|
}
|
|
|
|
|
|
static inline void
|
|
static inline void
|