|
@@ -573,23 +573,22 @@ static int dma_xfer(struct fsmc_nand_data *host, void *buffer, int len,
|
|
|
dma_dev = chan->device;
|
|
|
dma_addr = dma_map_single(dma_dev->dev, buffer, len, direction);
|
|
|
|
|
|
+ flags |= DMA_COMPL_SKIP_SRC_UNMAP | DMA_COMPL_SKIP_DEST_UNMAP;
|
|
|
+
|
|
|
if (direction == DMA_TO_DEVICE) {
|
|
|
dma_src = dma_addr;
|
|
|
dma_dst = host->data_pa;
|
|
|
- flags |= DMA_COMPL_SRC_UNMAP_SINGLE | DMA_COMPL_SKIP_DEST_UNMAP;
|
|
|
} else {
|
|
|
dma_src = host->data_pa;
|
|
|
dma_dst = dma_addr;
|
|
|
- flags |= DMA_COMPL_DEST_UNMAP_SINGLE | DMA_COMPL_SKIP_SRC_UNMAP;
|
|
|
}
|
|
|
|
|
|
tx = dma_dev->device_prep_dma_memcpy(chan, dma_dst, dma_src,
|
|
|
len, flags);
|
|
|
-
|
|
|
if (!tx) {
|
|
|
dev_err(host->dev, "device_prep_dma_memcpy error\n");
|
|
|
- dma_unmap_single(dma_dev->dev, dma_addr, len, direction);
|
|
|
- return -EIO;
|
|
|
+ ret = -EIO;
|
|
|
+ goto unmap_dma;
|
|
|
}
|
|
|
|
|
|
tx->callback = dma_complete;
|
|
@@ -599,7 +598,7 @@ static int dma_xfer(struct fsmc_nand_data *host, void *buffer, int len,
|
|
|
ret = dma_submit_error(cookie);
|
|
|
if (ret) {
|
|
|
dev_err(host->dev, "dma_submit_error %d\n", cookie);
|
|
|
- return ret;
|
|
|
+ goto unmap_dma;
|
|
|
}
|
|
|
|
|
|
dma_async_issue_pending(chan);
|
|
@@ -610,10 +609,17 @@ static int dma_xfer(struct fsmc_nand_data *host, void *buffer, int len,
|
|
|
if (ret <= 0) {
|
|
|
chan->device->device_control(chan, DMA_TERMINATE_ALL, 0);
|
|
|
dev_err(host->dev, "wait_for_completion_timeout\n");
|
|
|
- return ret ? ret : -ETIMEDOUT;
|
|
|
+ if (!ret)
|
|
|
+ ret = -ETIMEDOUT;
|
|
|
+ goto unmap_dma;
|
|
|
}
|
|
|
|
|
|
- return 0;
|
|
|
+ ret = 0;
|
|
|
+
|
|
|
+unmap_dma:
|
|
|
+ dma_unmap_single(dma_dev->dev, dma_addr, len, direction);
|
|
|
+
|
|
|
+ return ret;
|
|
|
}
|
|
|
|
|
|
/*
|