|
@@ -545,13 +545,12 @@ static void ep93xx_spi_pio_transfer(struct ep93xx_spi *espi)
|
|
|
* in case of failure.
|
|
|
*/
|
|
|
static struct dma_async_tx_descriptor *
|
|
|
-ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
|
|
|
+ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_transfer_direction dir)
|
|
|
{
|
|
|
struct spi_transfer *t = espi->current_msg->state;
|
|
|
struct dma_async_tx_descriptor *txd;
|
|
|
enum dma_slave_buswidth buswidth;
|
|
|
struct dma_slave_config conf;
|
|
|
- enum dma_transfer_direction slave_dirn;
|
|
|
struct scatterlist *sg;
|
|
|
struct sg_table *sgt;
|
|
|
struct dma_chan *chan;
|
|
@@ -567,14 +566,13 @@ ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
|
|
|
memset(&conf, 0, sizeof(conf));
|
|
|
conf.direction = dir;
|
|
|
|
|
|
- if (dir == DMA_FROM_DEVICE) {
|
|
|
+ if (dir == DMA_DEV_TO_MEM) {
|
|
|
chan = espi->dma_rx;
|
|
|
buf = t->rx_buf;
|
|
|
sgt = &espi->rx_sgt;
|
|
|
|
|
|
conf.src_addr = espi->sspdr_phys;
|
|
|
conf.src_addr_width = buswidth;
|
|
|
- slave_dirn = DMA_DEV_TO_MEM;
|
|
|
} else {
|
|
|
chan = espi->dma_tx;
|
|
|
buf = t->tx_buf;
|
|
@@ -582,7 +580,6 @@ ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
|
|
|
|
|
|
conf.dst_addr = espi->sspdr_phys;
|
|
|
conf.dst_addr_width = buswidth;
|
|
|
- slave_dirn = DMA_MEM_TO_DEV;
|
|
|
}
|
|
|
|
|
|
ret = dmaengine_slave_config(chan, &conf);
|
|
@@ -633,8 +630,7 @@ ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
|
|
|
if (!nents)
|
|
|
return ERR_PTR(-ENOMEM);
|
|
|
|
|
|
- txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents,
|
|
|
- slave_dirn, DMA_CTRL_ACK);
|
|
|
+ txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK);
|
|
|
if (!txd) {
|
|
|
dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir);
|
|
|
return ERR_PTR(-ENOMEM);
|
|
@@ -651,12 +647,12 @@ ep93xx_spi_dma_prepare(struct ep93xx_spi *espi, enum dma_data_direction dir)
|
|
|
* unmapped.
|
|
|
*/
|
|
|
static void ep93xx_spi_dma_finish(struct ep93xx_spi *espi,
|
|
|
- enum dma_data_direction dir)
|
|
|
+ enum dma_transfer_direction dir)
|
|
|
{
|
|
|
struct dma_chan *chan;
|
|
|
struct sg_table *sgt;
|
|
|
|
|
|
- if (dir == DMA_FROM_DEVICE) {
|
|
|
+ if (dir == DMA_DEV_TO_MEM) {
|
|
|
chan = espi->dma_rx;
|
|
|
sgt = &espi->rx_sgt;
|
|
|
} else {
|
|
@@ -677,16 +673,16 @@ static void ep93xx_spi_dma_transfer(struct ep93xx_spi *espi)
|
|
|
struct spi_message *msg = espi->current_msg;
|
|
|
struct dma_async_tx_descriptor *rxd, *txd;
|
|
|
|
|
|
- rxd = ep93xx_spi_dma_prepare(espi, DMA_FROM_DEVICE);
|
|
|
+ rxd = ep93xx_spi_dma_prepare(espi, DMA_DEV_TO_MEM);
|
|
|
if (IS_ERR(rxd)) {
|
|
|
dev_err(&espi->pdev->dev, "DMA RX failed: %ld\n", PTR_ERR(rxd));
|
|
|
msg->status = PTR_ERR(rxd);
|
|
|
return;
|
|
|
}
|
|
|
|
|
|
- txd = ep93xx_spi_dma_prepare(espi, DMA_TO_DEVICE);
|
|
|
+ txd = ep93xx_spi_dma_prepare(espi, DMA_MEM_TO_DEV);
|
|
|
if (IS_ERR(txd)) {
|
|
|
- ep93xx_spi_dma_finish(espi, DMA_FROM_DEVICE);
|
|
|
+ ep93xx_spi_dma_finish(espi, DMA_DEV_TO_MEM);
|
|
|
dev_err(&espi->pdev->dev, "DMA TX failed: %ld\n", PTR_ERR(rxd));
|
|
|
msg->status = PTR_ERR(txd);
|
|
|
return;
|
|
@@ -705,8 +701,8 @@ static void ep93xx_spi_dma_transfer(struct ep93xx_spi *espi)
|
|
|
|
|
|
wait_for_completion(&espi->wait);
|
|
|
|
|
|
- ep93xx_spi_dma_finish(espi, DMA_TO_DEVICE);
|
|
|
- ep93xx_spi_dma_finish(espi, DMA_FROM_DEVICE);
|
|
|
+ ep93xx_spi_dma_finish(espi, DMA_MEM_TO_DEV);
|
|
|
+ ep93xx_spi_dma_finish(espi, DMA_DEV_TO_MEM);
|
|
|
}
|
|
|
|
|
|
/**
|