Lines Matching refs:fifo

96 static inline void VXGE_COMPLETE_VPATH_TX(struct vxge_fifo *fifo)
108 if (__netif_tx_trylock(fifo->txq)) {
109 vxge_hw_vpath_poll_tx(fifo->handle, &skb_ptr,
111 __netif_tx_unlock(fifo->txq);
126 VXGE_COMPLETE_VPATH_TX(&vdev->vpaths[i].fifo);
545 struct vxge_fifo *fifo = (struct vxge_fifo *)userdata;
565 "tcode = 0x%x", fifo->ndev->name, __func__,
571 fifo->ndev->name, __func__, __LINE__,
574 fifo->stats.tx_errors++;
577 "error t_code %01x", fifo->ndev->name,
583 pci_unmap_single(fifo->pdev, txd_priv->dma_buffers[i++],
587 pci_unmap_page(fifo->pdev,
596 u64_stats_update_begin(&fifo->stats.syncp);
597 fifo->stats.tx_frms++;
598 fifo->stats.tx_bytes += skb->len;
599 u64_stats_update_end(&fifo->stats.syncp);
609 if (pkt_cnt > fifo->indicate_max_pkts)
616 if (netif_tx_queue_stopped(fifo->txq))
617 netif_tx_wake_queue(fifo->txq);
621 fifo->ndev->name, __func__, __LINE__);
803 struct vxge_fifo *fifo = NULL;
858 fifo = &vdev->vpaths[vpath_no].fifo;
859 fifo_hw = fifo->handle;
861 if (netif_tx_queue_stopped(fifo->txq))
868 fifo->stats.txd_not_free++;
876 netif_tx_stop_queue(fifo->txq);
882 fifo->stats.txd_out_of_desc++;
898 dma_pointer = pci_map_single(fifo->pdev, skb->data, first_frg_len,
901 if (unlikely(pci_dma_mapping_error(fifo->pdev, dma_pointer))) {
903 fifo->stats.pci_map_fail++;
927 dma_pointer = (u64)skb_frag_dma_map(&fifo->pdev->dev, frag,
931 if (unlikely(dma_mapping_error(&fifo->pdev->dev, dma_pointer)))
978 pci_unmap_single(fifo->pdev, txdl_priv->dma_buffers[j++],
982 pci_unmap_page(fifo->pdev, txdl_priv->dma_buffers[j],
989 netif_tx_stop_queue(fifo->txq);
1032 struct vxge_fifo *fifo = (struct vxge_fifo *)userdata;
1049 pci_unmap_single(fifo->pdev, txd_priv->dma_buffers[i++],
1053 pci_unmap_page(fifo->pdev, txd_priv->dma_buffers[i++],
1571 if (netif_tx_queue_stopped(vpath->fifo.txq))
1572 netif_tx_wake_queue(vpath->fifo.txq);
1594 struct __vxge_hw_fifo *hw_fifo = vdev->vpaths[i].fifo.handle;
2055 attr.fifo_attr.userdata = &vpath->fifo;
2068 vpath->fifo.handle =
2072 vpath->fifo.tx_steering_type =
2074 vpath->fifo.ndev = vdev->ndev;
2075 vpath->fifo.pdev = vdev->pdev;
2077 vpath->fifo.txq =
2080 vpath->fifo.txq =
2082 vpath->fifo.indicate_max_pkts =
2084 vpath->fifo.tx_vector_no = 0;
2111 * @fifo: pointer to transmit fifo structure
2116 static void adaptive_coalesce_tx_interrupts(struct vxge_fifo *fifo)
2118 fifo->interrupt_count++;
2119 if (jiffies > fifo->jiffies + HZ / 100) {
2120 struct __vxge_hw_fifo *hw_fifo = fifo->handle;
2122 fifo->jiffies = jiffies;
2123 if (fifo->interrupt_count > VXGE_T1A_MAX_TX_INTERRUPT_COUNT &&
2131 fifo->interrupt_count = 0;
2225 struct vxge_fifo *fifo = (struct vxge_fifo *)dev_id;
2227 adaptive_coalesce_tx_interrupts(fifo);
2229 vxge_hw_channel_msix_mask((struct __vxge_hw_channel *)fifo->handle,
2230 fifo->tx_vector_no);
2232 vxge_hw_channel_msix_clear((struct __vxge_hw_channel *)fifo->handle,
2233 fifo->tx_vector_no);
2235 VXGE_COMPLETE_VPATH_TX(fifo);
2237 vxge_hw_channel_msix_unmask((struct __vxge_hw_channel *)fifo->handle,
2238 fifo->tx_vector_no);
2332 /* Initialize the fifo vector */
2398 /* If fifo or ring are not enabled, the MSIX vector for
2404 vpath->fifo.tx_vector_no = (vpath->device_id *
2492 &vdev->vpaths[vp_idx].fifo);
2494 &vdev->vpaths[vp_idx].fifo;
2583 vxge_hw_vpath_tti_ci_set(vdev->vpaths[0].fifo.handle);
2652 netif_tx_stop_queue(vpath->fifo.txq);
3130 struct vxge_fifo_stats *txstats = &vdev->vpaths[k].fifo.stats;
3608 netif_tx_stop_queue(vpath->fifo.txq);
3622 /* 1. If user enters 0 for some fifo, give equal priority to all */
3750 /* Configure Tx fifo's */
3751 device_config->vp_config[i].fifo.enable =
3753 device_config->vp_config[i].fifo.max_frags =
3755 device_config->vp_config[i].fifo.memblock_size =
3758 txdl_size = device_config->vp_config[i].fifo.max_frags *
3762 device_config->vp_config[i].fifo.fifo_blocks =
3765 device_config->vp_config[i].fifo.intr =
4006 config.vp_config[i].fifo.max_frags);