|
@@ -21,13 +21,6 @@
|
|
|
* default the offset is PCI_DRAM_OFFSET.
|
|
|
*/
|
|
|
|
|
|
-unsigned long get_dma_direct_offset(struct device *dev)
|
|
|
-{
|
|
|
- if (dev)
|
|
|
- return (unsigned long)dev->archdata.dma_data;
|
|
|
-
|
|
|
- return PCI_DRAM_OFFSET;
|
|
|
-}
|
|
|
|
|
|
void *dma_direct_alloc_coherent(struct device *dev, size_t size,
|
|
|
dma_addr_t *dma_handle, gfp_t flag)
|
|
@@ -37,7 +30,7 @@ void *dma_direct_alloc_coherent(struct device *dev, size_t size,
|
|
|
ret = __dma_alloc_coherent(dev, size, dma_handle, flag);
|
|
|
if (ret == NULL)
|
|
|
return NULL;
|
|
|
- *dma_handle += get_dma_direct_offset(dev);
|
|
|
+ *dma_handle += get_dma_offset(dev);
|
|
|
return ret;
|
|
|
#else
|
|
|
struct page *page;
|
|
@@ -51,7 +44,7 @@ void *dma_direct_alloc_coherent(struct device *dev, size_t size,
|
|
|
return NULL;
|
|
|
ret = page_address(page);
|
|
|
memset(ret, 0, size);
|
|
|
- *dma_handle = virt_to_abs(ret) + get_dma_direct_offset(dev);
|
|
|
+ *dma_handle = virt_to_abs(ret) + get_dma_offset(dev);
|
|
|
|
|
|
return ret;
|
|
|
#endif
|
|
@@ -75,7 +68,7 @@ static int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl,
|
|
|
int i;
|
|
|
|
|
|
for_each_sg(sgl, sg, nents, i) {
|
|
|
- sg->dma_address = sg_phys(sg) + get_dma_direct_offset(dev);
|
|
|
+ sg->dma_address = sg_phys(sg) + get_dma_offset(dev);
|
|
|
sg->dma_length = sg->length;
|
|
|
__dma_sync_page(sg_page(sg), sg->offset, sg->length, direction);
|
|
|
}
|
|
@@ -110,7 +103,7 @@ static inline dma_addr_t dma_direct_map_page(struct device *dev,
|
|
|
{
|
|
|
BUG_ON(dir == DMA_NONE);
|
|
|
__dma_sync_page(page, offset, size, dir);
|
|
|
- return page_to_phys(page) + offset + get_dma_direct_offset(dev);
|
|
|
+ return page_to_phys(page) + offset + get_dma_offset(dev);
|
|
|
}
|
|
|
|
|
|
static inline void dma_direct_unmap_page(struct device *dev,
|