|
@@ -128,7 +128,6 @@ static inline u8 iwl_tfd_get_num_tbs(struct iwl_tfd *tfd)
|
|
|
static void iwlagn_unmap_tfd(struct iwl_priv *priv, struct iwl_cmd_meta *meta,
|
|
|
struct iwl_tfd *tfd)
|
|
|
{
|
|
|
- struct pci_dev *dev = priv->pci_dev;
|
|
|
int i;
|
|
|
int num_tbs;
|
|
|
|
|
@@ -143,15 +142,15 @@ static void iwlagn_unmap_tfd(struct iwl_priv *priv, struct iwl_cmd_meta *meta,
|
|
|
|
|
|
/* Unmap tx_cmd */
|
|
|
if (num_tbs)
|
|
|
- pci_unmap_single(dev,
|
|
|
+ dma_unmap_single(priv->bus.dev,
|
|
|
dma_unmap_addr(meta, mapping),
|
|
|
dma_unmap_len(meta, len),
|
|
|
- PCI_DMA_BIDIRECTIONAL);
|
|
|
+ DMA_BIDIRECTIONAL);
|
|
|
|
|
|
/* Unmap chunks, if any. */
|
|
|
for (i = 1; i < num_tbs; i++)
|
|
|
- pci_unmap_single(dev, iwl_tfd_tb_get_addr(tfd, i),
|
|
|
- iwl_tfd_tb_get_len(tfd, i), PCI_DMA_TODEVICE);
|
|
|
+ dma_unmap_single(priv->bus.dev, iwl_tfd_tb_get_addr(tfd, i),
|
|
|
+ iwl_tfd_tb_get_len(tfd, i), DMA_TO_DEVICE);
|
|
|
}
|
|
|
|
|
|
/**
|
|
@@ -310,10 +309,10 @@ void iwl_cmd_queue_unmap(struct iwl_priv *priv)
|
|
|
i = get_cmd_index(q, q->read_ptr);
|
|
|
|
|
|
if (txq->meta[i].flags & CMD_MAPPED) {
|
|
|
- pci_unmap_single(priv->pci_dev,
|
|
|
+ dma_unmap_single(priv->bus.dev,
|
|
|
dma_unmap_addr(&txq->meta[i], mapping),
|
|
|
dma_unmap_len(&txq->meta[i], len),
|
|
|
- PCI_DMA_BIDIRECTIONAL);
|
|
|
+ DMA_BIDIRECTIONAL);
|
|
|
txq->meta[i].flags = 0;
|
|
|
}
|
|
|
|
|
@@ -456,7 +455,7 @@ static int iwl_tx_queue_alloc(struct iwl_priv *priv,
|
|
|
txq->tfds = dma_alloc_coherent(dev, tfd_sz, &txq->q.dma_addr,
|
|
|
GFP_KERNEL);
|
|
|
if (!txq->tfds) {
|
|
|
- IWL_ERR(priv, "pci_alloc_consistent(%zd) failed\n", tfd_sz);
|
|
|
+ IWL_ERR(priv, "dma_alloc_coherent(%zd) failed\n", tfd_sz);
|
|
|
goto error;
|
|
|
}
|
|
|
txq->q.id = id;
|
|
@@ -677,9 +676,9 @@ int iwl_enqueue_hcmd(struct iwl_priv *priv, struct iwl_host_cmd *cmd)
|
|
|
le16_to_cpu(out_cmd->hdr.sequence), cmd_size,
|
|
|
q->write_ptr, idx, priv->cmd_queue);
|
|
|
|
|
|
- phys_addr = pci_map_single(priv->pci_dev, &out_cmd->hdr,
|
|
|
- copy_size, PCI_DMA_BIDIRECTIONAL);
|
|
|
- if (unlikely(pci_dma_mapping_error(priv->pci_dev, phys_addr))) {
|
|
|
+ phys_addr = dma_map_single(priv->bus.dev, &out_cmd->hdr, copy_size,
|
|
|
+ DMA_BIDIRECTIONAL);
|
|
|
+ if (unlikely(dma_mapping_error(priv->bus.dev, phys_addr))) {
|
|
|
idx = -ENOMEM;
|
|
|
goto out;
|
|
|
}
|
|
@@ -699,9 +698,9 @@ int iwl_enqueue_hcmd(struct iwl_priv *priv, struct iwl_host_cmd *cmd)
|
|
|
continue;
|
|
|
if (!(cmd->dataflags[i] & IWL_HCMD_DFL_NOCOPY))
|
|
|
continue;
|
|
|
- phys_addr = pci_map_single(priv->pci_dev, (void *)cmd->data[i],
|
|
|
- cmd->len[i], PCI_DMA_TODEVICE);
|
|
|
- if (pci_dma_mapping_error(priv->pci_dev, phys_addr)) {
|
|
|
+ phys_addr = dma_map_single(priv->bus.dev, (void *)cmd->data[i],
|
|
|
+ cmd->len[i], DMA_TO_DEVICE);
|
|
|
+ if (dma_mapping_error(priv->bus.dev, phys_addr)) {
|
|
|
iwlagn_unmap_tfd(priv, out_meta,
|
|
|
&txq->tfds[q->write_ptr]);
|
|
|
idx = -ENOMEM;
|