Searched refs:tx_ring (Results 26 - 50 of 197) sorted by relevance

12345678

/drivers/net/ethernet/packetengines/
H A Dyellowfin.c310 struct yellowfin_desc *tx_ring; member in struct:yellowfin_private
441 np->tx_ring = ring_space;
514 pci_free_consistent(pdev, TX_TOTAL_SIZE, np->tx_ring, np->tx_ring_dma);
700 pr_warn(" Tx ring %p: ", yp->tx_ring);
704 yp->tx_ring[i].result_status);
762 yp->tx_ring[i].dbdma_cmd = cpu_to_le32(CMD_STOP);
763 yp->tx_ring[i].branch_addr = cpu_to_le32(yp->tx_ring_dma +
767 yp->tx_ring[--i].dbdma_cmd = cpu_to_le32(CMD_STOP | BRANCH_ALWAYS);
775 yp->tx_ring[j].dbdma_cmd = cpu_to_le32(CMD_STOP);
776 yp->tx_ring[
[all...]
H A Dhamachi.c485 struct hamachi_desc *tx_ring; member in struct:hamachi_private
651 hmp->tx_ring = ring_space;
780 pci_free_consistent(pdev, TX_TOTAL_SIZE, hmp->tx_ring,
1002 if (hmp->tx_ring[entry].status_n_length & cpu_to_le32(DescOwn))
1008 leXX_to_cpu(hmp->tx_ring[entry].addr),
1013 hmp->tx_ring[entry].status_n_length = 0;
1015 hmp->tx_ring[TX_RING_SIZE-1].status_n_length |=
1063 printk(KERN_DEBUG" Tx ring %p: ", hmp->tx_ring);
1066 le32_to_cpu(hmp->tx_ring[i].status_n_length));
1091 hmp->tx_ring[
[all...]
/drivers/net/ethernet/intel/igbvf/
H A Dethtool.c207 struct igbvf_ring *tx_ring = adapter->tx_ring; local
213 ring->tx_pending = tx_ring->count;
235 if ((new_tx_count == adapter->tx_ring->count) &&
245 adapter->tx_ring->count = new_tx_count;
263 if (new_tx_count != adapter->tx_ring->count) {
264 memcpy(temp_ring, adapter->tx_ring, sizeof(struct igbvf_ring));
271 igbvf_free_tx_resources(adapter->tx_ring);
273 memcpy(adapter->tx_ring, temp_ring, sizeof(struct igbvf_ring));
/drivers/net/ethernet/oki-semi/pch_gbe/
H A Dpch_gbe_ethtool.c270 struct pch_gbe_tx_ring *txdr = adapter->tx_ring;
303 tx_old = adapter->tx_ring;
316 adapter->tx_ring = txdr;
332 err = pch_gbe_setup_tx_resources(adapter, adapter->tx_ring);
339 adapter->tx_ring = tx_old;
341 pch_gbe_free_tx_resources(adapter, adapter->tx_ring);
345 adapter->tx_ring = txdr;
352 adapter->tx_ring = txdr;
362 adapter->tx_ring = tx_old;
H A Dpch_gbe_param.c459 struct pch_gbe_tx_ring *tx_ring = adapter->tx_ring; local
460 tx_ring->count = TxDescriptors;
461 pch_gbe_validate_option(&tx_ring->count, &opt, adapter);
462 tx_ring->count = roundup(tx_ring->count,
/drivers/net/ethernet/qlogic/qlcnic/
H A Dqlcnic_ethtool.c525 struct qlcnic_host_tx_ring *tx_ring; local
553 tx_ring = &adapter->tx_ring[ring];
554 regs_buff[i++] = le32_to_cpu(*(tx_ring->hw_consumer));
555 regs_buff[i++] = tx_ring->sw_consumer;
556 regs_buff[i++] = readl(tx_ring->crb_cmd_producer);
557 regs_buff[i++] = tx_ring->producer;
558 if (tx_ring->crb_intr_mask)
559 regs_buff[i++] = readl(tx_ring->crb_intr_mask);
686 u8 rx_ring, u8 tx_ring)
685 qlcnic_validate_ring_count(struct qlcnic_adapter *adapter, u8 rx_ring, u8 tx_ring) argument
1294 struct qlcnic_host_tx_ring *tx_ring; local
1316 struct qlcnic_host_tx_ring *tx_ring; local
1333 struct qlcnic_host_tx_ring *tx_ring; local
[all...]
H A Dqlcnic_main.c124 inline void qlcnic_update_cmd_producer(struct qlcnic_host_tx_ring *tx_ring) argument
126 writel(tx_ring->producer, tx_ring->crb_cmd_producer);
1711 struct qlcnic_host_tx_ring *tx_ring; local
1781 tx_ring = &adapter->tx_ring[ring];
1782 snprintf(tx_ring->name, sizeof(tx_ring->name),
1784 err = request_irq(tx_ring->irq, handler, flags,
1785 tx_ring
1799 struct qlcnic_host_tx_ring *tx_ring; local
2370 struct qlcnic_host_tx_ring *tx_ring; local
2387 struct qlcnic_host_tx_ring *tx_ring; local
2984 dump_tx_ring_desc(struct qlcnic_host_tx_ring *tx_ring) argument
3004 struct qlcnic_host_tx_ring *tx_ring; local
3182 struct qlcnic_host_tx_ring *tx_ring = data; local
3194 struct qlcnic_host_tx_ring *tx_ring; local
[all...]
/drivers/net/ethernet/intel/e1000e/
H A Dethtool.c696 adapter->tx_ring->count = new_tx_count;
731 memcpy(temp_tx, adapter->tx_ring, size);
747 e1000e_free_tx_resources(adapter->tx_ring);
748 memcpy(adapter->tx_ring, temp_tx, size);
1114 struct e1000_ring *tx_ring = &adapter->test_tx_ring; local
1120 if (tx_ring->desc && tx_ring->buffer_info) {
1121 for (i = 0; i < tx_ring->count; i++) {
1122 buffer_info = &tx_ring->buffer_info[i];
1147 if (tx_ring
1166 struct e1000_ring *tx_ring = &adapter->test_tx_ring; local
1607 struct e1000_ring *tx_ring = &adapter->test_tx_ring; local
[all...]
/drivers/net/vmxnet3/
H A Dvmxnet3_drv.c336 BUG_ON(tq->buf_info[eop_idx].sop_idx != tq->tx_ring.next2comp);
337 BUG_ON(VMXNET3_TXDESC_GET_EOP(&(tq->tx_ring.base[eop_idx].txd)) != 1);
343 VMXNET3_INC_RING_IDX_ONLY(eop_idx, tq->tx_ring.size);
345 while (tq->tx_ring.next2comp != eop_idx) {
346 vmxnet3_unmap_tx_buf(tq->buf_info + tq->tx_ring.next2comp,
354 vmxnet3_cmd_ring_adv_next2comp(&tq->tx_ring);
383 vmxnet3_cmd_ring_desc_avail(&tq->tx_ring) >
400 while (tq->tx_ring.next2comp != tq->tx_ring.next2fill) {
403 tbi = tq->buf_info + tq->tx_ring
[all...]
/drivers/net/ethernet/qlogic/netxen/
H A Dnetxen_nic_init.c136 struct nx_host_tx_ring *tx_ring = adapter->tx_ring; local
139 cmd_buf = tx_ring->cmd_buf_arr;
140 for (i = 0; i < tx_ring->num_desc; i++) {
169 struct nx_host_tx_ring *tx_ring; local
185 if (adapter->tx_ring == NULL)
188 tx_ring = adapter->tx_ring;
189 vfree(tx_ring->cmd_buf_arr);
190 kfree(tx_ring);
199 struct nx_host_tx_ring *tx_ring; local
1765 struct nx_host_tx_ring *tx_ring = adapter->tx_ring; local
[all...]
/drivers/net/ethernet/amd/
H A Dariadne.c88 volatile struct TDRE *tx_ring[TX_RING_SIZE]; member in struct:ariadne_private
100 struct TDRE tx_ring[TX_RING_SIZE]; member in struct:lancedata
130 volatile struct TDRE *t = &lancedata->tx_ring[i];
138 priv->tx_ring[i] = &lancedata->tx_ring[i];
141 i, &lancedata->tx_ring[i], lancedata->tx_buff[i]);
310 int status = lowb(priv->tx_ring[entry]->TMD1);
315 priv->tx_ring[entry]->TMD1 &= 0xff00;
319 int err_status = priv->tx_ring[entry]->TMD3;
456 lance->RDP = swloww(ARIADNE_RAM + offsetof(struct lancedata, tx_ring));
[all...]
H A Dlance.c232 u32 tx_ring; member in struct:lance_init_block
238 struct lance_tx_head tx_ring[TX_RING_SIZE]; member in struct:lance_private
576 lp->init_block.tx_ring = ((u32)isa_virt_to_bus(lp->tx_ring) & 0xffffff) | TX_RING_LEN_BITS;
789 (u32) isa_virt_to_bus(lp->tx_ring),
890 lp->tx_ring[i].base = 0;
899 lp->init_block.tx_ring = ((u32)isa_virt_to_bus(lp->tx_ring) & 0xffffff) | TX_RING_LEN_BITS;
939 lp->tx_ring[i].base, -lp->tx_ring[
[all...]
H A Dpcnet32.c241 __le32 tx_ring; member in struct:pcnet32_init_block
263 struct pcnet32_tx_head *tx_ring; member in struct:pcnet32_private
507 lp->tx_ring, lp->tx_ring_dma_addr);
512 lp->tx_ring = new_tx_ring;
922 lp->tx_ring[x].length = cpu_to_le16(-skb->len);
923 lp->tx_ring[x].misc = 0;
948 lp->tx_ring[x].base = cpu_to_le32(lp->tx_dma_addr[x]);
950 lp->tx_ring[x].status = cpu_to_le16(status);
1254 int status = (short)le16_to_cpu(lp->tx_ring[entry].status);
1259 lp->tx_ring[entr
[all...]
/drivers/net/ethernet/amd/xgbe/
H A Dxgbe-main.c139 struct xgbe_ring *tx_ring, *rx_ring; local
151 tx_ring = devm_kcalloc(pdata->dev, pdata->tx_ring_count,
153 if (!tx_ring)
169 spin_lock_init(&tx_ring->lock);
170 channel->tx_ring = tx_ring++;
180 channel->tx_ring, channel->rx_ring);
/drivers/net/ethernet/
H A Dfealnx.c378 struct fealnx_desc *tx_ring; member in struct:netdev_private
574 np->tx_ring = ring_space;
674 pci_free_consistent(pdev, TX_TOTAL_SIZE, np->tx_ring, np->tx_ring_dma);
694 pci_free_consistent(pdev, TX_TOTAL_SIZE, np->tx_ring,
1157 iowrite32(np->tx_ring_dma + ((char*)np->cur_tx - (char*)np->tx_ring),
1216 printk(KERN_DEBUG " Tx ring %p: ", np->tx_ring);
1218 printk(KERN_CONT " %4.4x", np->tx_ring[i].status);
1280 np->cur_tx = &np->tx_ring[0];
1281 np->cur_tx_copy = &np->tx_ring[0];
1286 np->tx_ring[
[all...]
/drivers/net/ethernet/intel/ixgbe/
H A Dixgbe_lib.c74 adapter->tx_ring[i]->reg_idx = reg_idx;
101 adapter->tx_ring[i]->reg_idx = reg_idx;
182 adapter->tx_ring[offset + i]->reg_idx = tx_idx;
184 adapter->tx_ring[offset + i]->dcb_tc = tc;
245 adapter->tx_ring[i]->reg_idx = reg_idx;
251 adapter->tx_ring[i]->reg_idx = reg_idx;
272 adapter->tx_ring[i]->reg_idx = i;
292 adapter->tx_ring[0]->reg_idx = 0;
898 adapter->tx_ring[txr_idx] = ring;
973 adapter->tx_ring[rin
1200 ixgbe_tx_ctxtdesc(struct ixgbe_ring *tx_ring, u32 vlan_macip_lens, u32 fcoe_sof_eof, u32 type_tucmd, u32 mss_l4len_idx) argument
[all...]
/drivers/infiniband/hw/mlx4/
H A Dmad.c528 tun_mad = (struct mlx4_rcv_tunnel_mad *) (tun_qp->tx_ring[tun_tx_ix].buf.addr);
529 if (tun_qp->tx_ring[tun_tx_ix].ah)
530 ib_destroy_ah(tun_qp->tx_ring[tun_tx_ix].ah);
531 tun_qp->tx_ring[tun_tx_ix].ah = ah;
533 tun_qp->tx_ring[tun_tx_ix].buf.map,
575 tun_qp->tx_ring[tun_tx_ix].buf.map,
579 list.addr = tun_qp->tx_ring[tun_tx_ix].buf.map;
1211 sqp_mad = (struct mlx4_mad_snd_buf *) (sqp->tx_ring[wire_tx_ix].buf.addr);
1212 if (sqp->tx_ring[wire_tx_ix].ah)
1213 ib_destroy_ah(sqp->tx_ring[wire_tx_i
[all...]
/drivers/net/ethernet/intel/e1000/
H A De1000_param.c281 struct e1000_tx_ring *tx_ring = adapter->tx_ring; local
298 tx_ring->count = TxDescriptors[bd];
299 e1000_validate_option(&tx_ring->count, &opt, adapter);
300 tx_ring->count = ALIGN(tx_ring->count,
303 tx_ring->count = opt.def;
306 tx_ring[i].count = tx_ring->count;
/drivers/net/irda/
H A Dau1k_ir.c155 volatile struct ring_dest *tx_ring[NUM_IR_DESC]; member in struct:au1k_private
260 aup->tx_ring[i] = (volatile struct ring_dest *)
315 ptxd = aup->tx_ring[i];
425 ptxd = aup->tx_ring[aup->tx_tail];
433 ptxd = aup->tx_ring[aup->tx_tail];
683 ptxd = aup->tx_ring[aup->tx_head];
864 aup->tx_ring[i]->addr_0 = (u8)(pDB->dma_addr & 0xff);
865 aup->tx_ring[i]->addr_1 = (u8)((pDB->dma_addr >> 8) & 0xff);
866 aup->tx_ring[i]->addr_2 = (u8)((pDB->dma_addr >> 16) & 0xff);
867 aup->tx_ring[
[all...]
/drivers/net/ethernet/atheros/atl1e/
H A Datl1e_param.c206 adapter->tx_ring.count = (u16) val & 0xFFFC;
208 adapter->tx_ring.count = (u16)opt.def;
/drivers/net/ethernet/samsung/sxgbe/
H A Dsxgbe_main.c371 * @tx_ring: ring to be intialised
376 struct sxgbe_tx_queue *tx_ring, int tx_rsize)
379 if (!tx_ring) {
385 tx_ring->dma_tx = dma_zalloc_coherent(dev,
387 &tx_ring->dma_tx_phy, GFP_KERNEL);
388 if (!tx_ring->dma_tx)
392 tx_ring->tx_skbuff_dma = devm_kcalloc(dev, tx_rsize,
394 if (!tx_ring->tx_skbuff_dma)
397 tx_ring->tx_skbuff = devm_kcalloc(dev, tx_rsize,
400 if (!tx_ring
375 init_tx_ring(struct device *dev, u8 queue_no, struct sxgbe_tx_queue *tx_ring, int tx_rsize) argument
522 free_tx_ring(struct device *dev, struct sxgbe_tx_queue *tx_ring, int tx_rsize) argument
800 struct sxgbe_tx_queue *tx_ring = priv->txq[queue_num]; local
[all...]
/drivers/net/ethernet/intel/i40evf/
H A Di40e_txrx.h284 void i40evf_clean_tx_ring(struct i40e_ring *tx_ring);
286 int i40evf_setup_tx_descriptors(struct i40e_ring *tx_ring);
288 void i40evf_free_tx_resources(struct i40e_ring *tx_ring);
/drivers/net/ethernet/qlogic/qlge/
H A Dqlge_dbg.c1644 DUMP_QDEV_FIELD(qdev, "%p", tx_ring);
1669 void ql_dump_tx_ring(struct tx_ring *tx_ring) argument
1671 if (tx_ring == NULL)
1673 pr_err("===================== Dumping tx_ring %d ===============\n",
1674 tx_ring->wq_id);
1675 pr_err("tx_ring->base = %p\n", tx_ring->wq_base);
1676 pr_err("tx_ring->base_dma = 0x%llx\n",
1677 (unsigned long long) tx_ring
[all...]
/drivers/net/ethernet/smsc/
H A Dsmsc9420.c62 struct smsc9420_dma_desc *tx_ring; member in struct:smsc9420_pdata
548 BUG_ON(!pd->tx_ring);
563 pd->tx_ring[i].status = 0;
564 pd->tx_ring[i].length = 0;
565 pd->tx_ring[i].buffer1 = 0;
566 pd->tx_ring[i].buffer2 = 0;
951 status = pd->tx_ring[index].status;
952 length = pd->tx_ring[index].length;
970 pd->tx_ring[index].buffer1 = 0;
990 BUG_ON(pd->tx_ring[inde
[all...]
/drivers/net/ethernet/dlink/
H A Dsundance.c372 struct netdev_desc *tx_ring; member in struct:netdev_private
562 np->tx_ring = (struct netdev_desc *)ring_space;
704 np->tx_ring, np->tx_ring_dma);
990 (unsigned long long)(np->tx_ring_dma + i*sizeof(*np->tx_ring)),
991 le32_to_cpu(np->tx_ring[i].next_desc),
992 le32_to_cpu(np->tx_ring[i].status),
993 (le32_to_cpu(np->tx_ring[i].status) >> 2) & 0xff,
994 le32_to_cpu(np->tx_ring[i].frag[0].addr),
995 le32_to_cpu(np->tx_ring[i].frag[0].length));
1068 np->tx_ring[
[all...]

Completed in 417 milliseconds

12345678