|
@@ -2505,6 +2505,10 @@ static dma_cookie_t pl330_tx_submit(struct dma_async_tx_descriptor *tx)
|
|
|
/* Assign cookies to all nodes */
|
|
|
while (!list_empty(&last->node)) {
|
|
|
desc = list_entry(last->node.next, struct dma_pl330_desc, node);
|
|
|
+ if (pch->cyclic) {
|
|
|
+ desc->txd.callback = last->txd.callback;
|
|
|
+ desc->txd.callback_param = last->txd.callback_param;
|
|
|
+ }
|
|
|
|
|
|
dma_cookie_assign(&desc->txd);
|
|
|
|
|
@@ -2688,45 +2692,82 @@ static struct dma_async_tx_descriptor *pl330_prep_dma_cyclic(
|
|
|
size_t period_len, enum dma_transfer_direction direction,
|
|
|
unsigned long flags, void *context)
|
|
|
{
|
|
|
- struct dma_pl330_desc *desc;
|
|
|
+ struct dma_pl330_desc *desc = NULL, *first = NULL;
|
|
|
struct dma_pl330_chan *pch = to_pchan(chan);
|
|
|
+ struct dma_pl330_dmac *pdmac = pch->dmac;
|
|
|
+ unsigned int i;
|
|
|
dma_addr_t dst;
|
|
|
dma_addr_t src;
|
|
|
|
|
|
- desc = pl330_get_desc(pch);
|
|
|
- if (!desc) {
|
|
|
- dev_err(pch->dmac->pif.dev, "%s:%d Unable to fetch desc\n",
|
|
|
- __func__, __LINE__);
|
|
|
+ if (len % period_len != 0)
|
|
|
return NULL;
|
|
|
- }
|
|
|
|
|
|
- switch (direction) {
|
|
|
- case DMA_MEM_TO_DEV:
|
|
|
- desc->rqcfg.src_inc = 1;
|
|
|
- desc->rqcfg.dst_inc = 0;
|
|
|
- desc->req.rqtype = MEMTODEV;
|
|
|
- src = dma_addr;
|
|
|
- dst = pch->fifo_addr;
|
|
|
- break;
|
|
|
- case DMA_DEV_TO_MEM:
|
|
|
- desc->rqcfg.src_inc = 0;
|
|
|
- desc->rqcfg.dst_inc = 1;
|
|
|
- desc->req.rqtype = DEVTOMEM;
|
|
|
- src = pch->fifo_addr;
|
|
|
- dst = dma_addr;
|
|
|
- break;
|
|
|
- default:
|
|
|
+ if (!is_slave_direction(direction)) {
|
|
|
dev_err(pch->dmac->pif.dev, "%s:%d Invalid dma direction\n",
|
|
|
__func__, __LINE__);
|
|
|
return NULL;
|
|
|
}
|
|
|
|
|
|
- desc->rqcfg.brst_size = pch->burst_sz;
|
|
|
- desc->rqcfg.brst_len = 1;
|
|
|
+ for (i = 0; i < len / period_len; i++) {
|
|
|
+ desc = pl330_get_desc(pch);
|
|
|
+ if (!desc) {
|
|
|
+ dev_err(pch->dmac->pif.dev, "%s:%d Unable to fetch desc\n",
|
|
|
+ __func__, __LINE__);
|
|
|
|
|
|
- pch->cyclic = true;
|
|
|
+ if (!first)
|
|
|
+ return NULL;
|
|
|
+
|
|
|
+ spin_lock_irqsave(&pdmac->pool_lock, flags);
|
|
|
+
|
|
|
+ while (!list_empty(&first->node)) {
|
|
|
+ desc = list_entry(first->node.next,
|
|
|
+ struct dma_pl330_desc, node);
|
|
|
+ list_move_tail(&desc->node, &pdmac->desc_pool);
|
|
|
+ }
|
|
|
+
|
|
|
+ list_move_tail(&first->node, &pdmac->desc_pool);
|
|
|
|
|
|
- fill_px(&desc->px, dst, src, period_len);
|
|
|
+ spin_unlock_irqrestore(&pdmac->pool_lock, flags);
|
|
|
+
|
|
|
+ return NULL;
|
|
|
+ }
|
|
|
+
|
|
|
+ switch (direction) {
|
|
|
+ case DMA_MEM_TO_DEV:
|
|
|
+ desc->rqcfg.src_inc = 1;
|
|
|
+ desc->rqcfg.dst_inc = 0;
|
|
|
+ desc->req.rqtype = MEMTODEV;
|
|
|
+ src = dma_addr;
|
|
|
+ dst = pch->fifo_addr;
|
|
|
+ break;
|
|
|
+ case DMA_DEV_TO_MEM:
|
|
|
+ desc->rqcfg.src_inc = 0;
|
|
|
+ desc->rqcfg.dst_inc = 1;
|
|
|
+ desc->req.rqtype = DEVTOMEM;
|
|
|
+ src = pch->fifo_addr;
|
|
|
+ dst = dma_addr;
|
|
|
+ break;
|
|
|
+ default:
|
|
|
+ break;
|
|
|
+ }
|
|
|
+
|
|
|
+ desc->rqcfg.brst_size = pch->burst_sz;
|
|
|
+ desc->rqcfg.brst_len = 1;
|
|
|
+ fill_px(&desc->px, dst, src, period_len);
|
|
|
+
|
|
|
+ if (!first)
|
|
|
+ first = desc;
|
|
|
+ else
|
|
|
+ list_add_tail(&desc->node, &first->node);
|
|
|
+
|
|
|
+ dma_addr += period_len;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (!desc)
|
|
|
+ return NULL;
|
|
|
+
|
|
|
+ pch->cyclic = true;
|
|
|
+ desc->txd.flags = flags;
|
|
|
|
|
|
return &desc->txd;
|
|
|
}
|