Searched refs:TX_RING_SIZE (Results 26 - 47 of 47) sorted by relevance

12

/drivers/net/ethernet/via/
H A Dvia-rhine.c77 #define TX_RING_SIZE 16 macro
164 the list. The ring sizes are set at compile time by RX/TX_RING_SIZE.
435 struct sk_buff *tx_skbuff[TX_RING_SIZE];
436 dma_addr_t tx_skbuff_dma[TX_RING_SIZE];
439 unsigned char *tx_buf[TX_RING_SIZE];
1086 TX_RING_SIZE * sizeof(struct tx_desc),
1094 PKT_BUF_SZ * TX_RING_SIZE,
1099 TX_RING_SIZE * sizeof(struct tx_desc),
1119 TX_RING_SIZE * sizeof(struct tx_desc),
1124 pci_free_consistent(rp->pdev, PKT_BUF_SZ * TX_RING_SIZE,
[all...]
/drivers/net/ethernet/
H A Dfealnx.c56 // #define TX_RING_SIZE 16
58 #define TX_RING_SIZE 6 macro
60 #define TX_TOTAL_SIZE TX_RING_SIZE*sizeof(struct fealnx_desc)
1220 for (i = 0; i < TX_RING_SIZE; i++)
1286 np->free_tx_count = TX_RING_SIZE;
1288 for (i = 0; i < TX_RING_SIZE; i++) {
1391 np->free_tx_count = TX_RING_SIZE;
1393 for (i = 0; i < TX_RING_SIZE; i++) {
1409 np->tx_ring[TX_RING_SIZE - 1].next_desc = np->tx_ring_dma;
1410 np->tx_ring[TX_RING_SIZE
[all...]
/drivers/net/ethernet/pasemi/
H A Dpasemi_mac.c516 ring->size = TX_RING_SIZE;
518 TX_RING_SIZE, GFP_KERNEL);
523 if (pasemi_dma_alloc_ring(&ring->chan, TX_RING_SIZE))
529 val |= PAS_DMA_TXCHAN_BASEU_SIZ(TX_RING_SIZE >> 3);
571 limit += TX_RING_SIZE;
574 info = &txring->ring_info[(i+1) & (TX_RING_SIZE-1)];
579 (TX_RING_SIZE-1)].dma;
890 ring_limit += TX_RING_SIZE;
938 txring->next_to_clean = i & (TX_RING_SIZE-1);
1574 txring->next_to_fill = (fill + nfrags + 1) & (TX_RING_SIZE
[all...]
/drivers/net/ethernet/smsc/
H A Depic100.c53 #define TX_RING_SIZE 256 macro
56 #define TX_TOTAL_SIZE TX_RING_SIZE*sizeof(struct epic_tx_desc)
260 struct sk_buff* tx_skbuff[TX_RING_SIZE];
824 outl(ep->tx_ring_dma + (ep->dirty_tx%TX_RING_SIZE)*
950 for (i = 0; i < TX_RING_SIZE; i++) {
975 entry = ep->cur_tx % TX_RING_SIZE;
1046 int entry = dirty_tx % TX_RING_SIZE;
1068 if (cur_tx - dirty_tx > TX_RING_SIZE) {
1072 dirty_tx += TX_RING_SIZE;
1339 for (i = 0; i < TX_RING_SIZE;
[all...]
H A Dsmsc9420.h25 #define TX_RING_SIZE (32) macro
/drivers/net/ethernet/freescale/
H A Dfec.c153 #define TX_RING_SIZE 16 /* Must be power of two */ macro
156 #if (((RX_RING_SIZE + TX_RING_SIZE) * 8) > PAGE_SIZE)
212 unsigned char *tx_bounce[TX_RING_SIZE];
213 struct sk_buff* tx_skbuff[TX_RING_SIZE];
1200 for (i = 0; i < TX_RING_SIZE; i++)
1231 for (i = 0; i < TX_RING_SIZE; i++) {
1475 for (i = 0; i < TX_RING_SIZE; i++) {
/drivers/net/ethernet/adaptec/
H A Dstarfire.c132 #define TX_RING_SIZE 32 macro
229 RX/TX_RING_SIZE pay close attention to page sizes and the ring-empty warning
552 struct tx_ring_info tx_info[TX_RING_SIZE];
933 tx_ring_size = ((sizeof(starfire_tx_desc) * TX_RING_SIZE + QUEUE_ALIGN - 1) / QUEUE_ALIGN) * QUEUE_ALIGN;
1208 for (i = 0; i < TX_RING_SIZE; i++)
1224 if ((np->cur_tx - np->dirty_tx) + skb_num_frags(skb) * 2 > TX_RING_SIZE) {
1236 entry = np->cur_tx % TX_RING_SIZE;
1244 if (entry >= TX_RING_SIZE - skb_num_frags(skb)) {
1277 np->tx_info[entry].used_slots = TX_RING_SIZE - entry;
1285 /* scavenge the tx descriptors twice per TX_RING_SIZE */
[all...]
/drivers/net/ethernet/3com/
H A D3c59x.c38 #define TX_RING_SIZE 16 macro
603 struct sk_buff* tx_skbuff[TX_RING_SIZE];
1220 + sizeof(struct boom_tx_desc) * TX_RING_SIZE,
1486 + sizeof(struct boom_tx_desc) * TX_RING_SIZE,
1704 for (i = 0; i < TX_RING_SIZE; i++)
1941 iowrite32(vp->tx_ring_dma + (vp->dirty_tx % TX_RING_SIZE) * sizeof(struct boom_tx_desc),
1943 if (vp->cur_tx - vp->dirty_tx < TX_RING_SIZE)
2130 int entry = vp->cur_tx % TX_RING_SIZE;
2131 struct boom_tx_desc *prev_entry = &vp->tx_ring[(vp->cur_tx-1) % TX_RING_SIZE];
2149 if (vp->cur_tx - vp->dirty_tx >= TX_RING_SIZE) {
[all...]
/drivers/net/ethernet/renesas/
H A Dsh_eth.c643 for (i = 0; i < TX_RING_SIZE; i++) {
660 int tx_ringsize = sizeof(*txdesc) * TX_RING_SIZE;
702 for (i = 0; i < TX_RING_SIZE; i++) {
744 mdp->tx_skbuff = kmalloc(sizeof(*mdp->tx_skbuff) * TX_RING_SIZE,
767 tx_ringsize = sizeof(struct sh_eth_txdesc) * TX_RING_SIZE;
883 entry = mdp->dirty_tx % TX_RING_SIZE;
896 if (entry >= TX_RING_SIZE - 1)
1476 for (i = 0; i < TX_RING_SIZE; i++) {
1499 if ((mdp->cur_tx - mdp->dirty_tx) >= (TX_RING_SIZE - 4)) {
1510 entry = mdp->cur_tx % TX_RING_SIZE;
[all...]
H A Dsh_eth.h28 #define TX_RING_SIZE 64 /* Tx ring size */ macro
/drivers/net/ethernet/amd/
H A Da2065.c71 #define TX_RING_SIZE (1 << LANCE_LOG_TX_BUFFERS) macro
74 #define TX_RING_MOD_MASK (TX_RING_SIZE - 1)
96 struct lance_tx_desc btx_ring[TX_RING_SIZE];
99 char tx_buf[TX_RING_SIZE][TX_BUFF_SIZE];
H A Dsunlance.c181 #define TX_RING_SIZE (1 << (LANCE_LOG_TX_BUFFERS)) macro
182 #define TX_RING_MOD_MASK (TX_RING_SIZE - 1)
228 struct lance_tx_desc btx_ring[TX_RING_SIZE];
230 u8 tx_buf [TX_RING_SIZE][TX_BUFF_SIZE];
345 for (i = 0; i < TX_RING_SIZE; i++) {
401 for (i = 0; i < TX_RING_SIZE; i++) {
H A D7990.c104 for (t=0; t < TX_RING_SIZE; t++) { \
H A Dpcnet32.c171 #define TX_RING_SIZE (1 << (PCNET32_LOG_TX_BUFFERS)) macro
1757 lp->tx_ring_size = TX_RING_SIZE; /* default tx ring size */
/drivers/net/ethernet/natsemi/
H A Dnatsemi.c102 #define TX_RING_SIZE 16 macro
174 the list. The ring sizes are set at compile time by RX/TX_RING_SIZE.
547 struct sk_buff *tx_skbuff[TX_RING_SIZE];
548 dma_addr_t tx_dma[TX_RING_SIZE];
1868 for (i = 0; i < TX_RING_SIZE; i++) {
1919 sizeof(struct netdev_desc) * (RX_RING_SIZE+TX_RING_SIZE),
1971 for (i = 0; i < TX_RING_SIZE; i++) {
1975 *((i+1)%TX_RING_SIZE+RX_RING_SIZE));
2007 for (i = 0; i < TX_RING_SIZE; i++) {
2049 sizeof(struct netdev_desc) * (RX_RING_SIZE+TX_RING_SIZE),
[all...]
/drivers/net/ethernet/i825xx/
H A D82596.c284 #define TX_RING_SIZE 64 macro
331 struct tx_cmd tx_cmds[TX_RING_SIZE];
332 struct i596_tbd tbds[TX_RING_SIZE];
369 static int max_cmd_backlog = TX_RING_SIZE-1;
1103 if (++lp->next_tx_cmd == TX_RING_SIZE)
H A Dlib82596.c272 #define TX_RING_SIZE 32 macro
311 struct tx_cmd tx_cmds[TX_RING_SIZE] __attribute__((aligned(32)));
312 struct i596_tbd tbds[TX_RING_SIZE] __attribute__((aligned(32)));
363 static int max_cmd_backlog = TX_RING_SIZE-1;
1001 if (++lp->next_tx_cmd == TX_RING_SIZE)
/drivers/net/ethernet/sun/
H A Dsunqe.c589 txbuf = &qbufs->tx_buf[entry & (TX_RING_SIZE - 1)][0];
591 qebuf_offset(tx_buf, (entry & (TX_RING_SIZE - 1)));
H A Dsungem.c667 last &= (TX_RING_SIZE - 1);
994 if (!(entry & ((TX_RING_SIZE>>1)-1)))
1594 for (i = 0; i < TX_RING_SIZE; i++) {
1603 int ent = i & (TX_RING_SIZE - 1);
1657 for (i = 0; i < TX_RING_SIZE; i++) {
H A Dsunbmac.c204 for (i = 0; i < TX_RING_SIZE; i++) {
251 for (i = 0; i < TX_RING_SIZE; i++)
H A Dsunhme.c144 for (i = 0; i < TX_RING_SIZE; i+=4) {
1216 for (i = 0; i < TX_RING_SIZE; i++) {
1285 for (i = 0; i < TX_RING_SIZE; i++)
1617 HMD(("tx rsize=%d oreg[%08x], ", (int)TX_RING_SIZE,
1619 hme_write32(hp, etxregs + ETX_RSIZE, (TX_RING_SIZE >> ETX_RSIZE_SHIFT) - 1);
1938 last &= (TX_RING_SIZE - 1);
/drivers/net/ethernet/alteon/
H A Dacenic.h448 #define TX_RING_SIZE (MAX_TX_RING_ENTRIES * sizeof(struct tx_desc)) macro

Completed in 480 milliseconds

12