|
@@ -208,6 +208,7 @@ __dma_alloc(struct device *dev, size_t size, dma_addr_t *handle, gfp_t gfp,
|
|
unsigned long kaddr = (unsigned long)page_address(page);
|
|
unsigned long kaddr = (unsigned long)page_address(page);
|
|
memset(page_address(page), 0, size);
|
|
memset(page_address(page), 0, size);
|
|
dmac_flush_range(kaddr, kaddr + size);
|
|
dmac_flush_range(kaddr, kaddr + size);
|
|
|
|
+ outer_flush_range(__pa(kaddr), __pa(kaddr) + size);
|
|
}
|
|
}
|
|
|
|
|
|
/*
|
|
/*
|
|
@@ -485,15 +486,20 @@ void consistent_sync(void *vaddr, size_t size, int direction)
|
|
unsigned long start = (unsigned long)vaddr;
|
|
unsigned long start = (unsigned long)vaddr;
|
|
unsigned long end = start + size;
|
|
unsigned long end = start + size;
|
|
|
|
|
|
|
|
+ BUG_ON(!virt_addr_valid(start) || !virt_addr_valid(end));
|
|
|
|
+
|
|
switch (direction) {
|
|
switch (direction) {
|
|
case DMA_FROM_DEVICE: /* invalidate only */
|
|
case DMA_FROM_DEVICE: /* invalidate only */
|
|
dmac_inv_range(start, end);
|
|
dmac_inv_range(start, end);
|
|
|
|
+ outer_inv_range(__pa(start), __pa(end));
|
|
break;
|
|
break;
|
|
case DMA_TO_DEVICE: /* writeback only */
|
|
case DMA_TO_DEVICE: /* writeback only */
|
|
dmac_clean_range(start, end);
|
|
dmac_clean_range(start, end);
|
|
|
|
+ outer_clean_range(__pa(start), __pa(end));
|
|
break;
|
|
break;
|
|
case DMA_BIDIRECTIONAL: /* writeback and invalidate */
|
|
case DMA_BIDIRECTIONAL: /* writeback and invalidate */
|
|
dmac_flush_range(start, end);
|
|
dmac_flush_range(start, end);
|
|
|
|
+ outer_flush_range(__pa(start), __pa(end));
|
|
break;
|
|
break;
|
|
default:
|
|
default:
|
|
BUG();
|
|
BUG();
|