|
@@ -352,21 +352,21 @@ static int map_dma_buffers(struct driver_data *drv_data)
|
|
|
} else
|
|
|
drv_data->tx_map_len = drv_data->len;
|
|
|
|
|
|
- /* Stream map the rx buffer */
|
|
|
- drv_data->rx_dma = dma_map_single(dev, drv_data->rx,
|
|
|
- drv_data->rx_map_len,
|
|
|
- DMA_FROM_DEVICE);
|
|
|
- if (dma_mapping_error(dev, drv_data->rx_dma))
|
|
|
- return 0;
|
|
|
-
|
|
|
- /* Stream map the tx buffer */
|
|
|
+ /* Stream map the tx buffer. Always do DMA_TO_DEVICE first
|
|
|
+ * so we flush the cache *before* invalidating it, in case
|
|
|
+ * the tx and rx buffers overlap.
|
|
|
+ */
|
|
|
drv_data->tx_dma = dma_map_single(dev, drv_data->tx,
|
|
|
- drv_data->tx_map_len,
|
|
|
- DMA_TO_DEVICE);
|
|
|
+ drv_data->tx_map_len, DMA_TO_DEVICE);
|
|
|
+ if (dma_mapping_error(dev, drv_data->tx_dma))
|
|
|
+ return 0;
|
|
|
|
|
|
- if (dma_mapping_error(dev, drv_data->tx_dma)) {
|
|
|
- dma_unmap_single(dev, drv_data->rx_dma,
|
|
|
+ /* Stream map the rx buffer */
|
|
|
+ drv_data->rx_dma = dma_map_single(dev, drv_data->rx,
|
|
|
drv_data->rx_map_len, DMA_FROM_DEVICE);
|
|
|
+ if (dma_mapping_error(dev, drv_data->rx_dma)) {
|
|
|
+ dma_unmap_single(dev, drv_data->tx_dma,
|
|
|
+ drv_data->tx_map_len, DMA_TO_DEVICE);
|
|
|
return 0;
|
|
|
}
|
|
|
|