Lines Matching refs:lp

180 	struct arcnet_local *lp = netdev_priv(dev);
189 spin_lock_irqsave(&lp->lock, flags);
191 lp->hw.copy_from_card(dev, bufnum, 0, buf, 512);
193 spin_unlock_irqrestore(&lp->lock, flags);
243 struct arcnet_local *lp = netdev_priv(dev);
246 lp->buf_queue[lp->first_free_buf++] = bufnum;
247 lp->first_free_buf %= 5;
252 for (i = lp->next_buf; i != lp->first_free_buf; i = (i+1) % 5)
253 BUGMSG2(D_DURING, "#%d ", lp->buf_queue[i]);
265 struct arcnet_local *lp = netdev_priv(dev);
268 if (!atomic_dec_and_test(&lp->buf_lock)) {
271 lp->buf_lock.counter);
274 if (lp->next_buf >= 5)
275 lp->next_buf -= 5;
277 if (lp->next_buf == lp->first_free_buf)
280 buf = lp->buf_queue[lp->next_buf++];
281 lp->next_buf %= 5;
288 for (i = lp->next_buf; i != lp->first_free_buf; i = (i+1) % 5)
289 BUGMSG2(D_DURING, "#%d ", lp->buf_queue[i]);
293 atomic_inc(&lp->buf_lock);
352 struct arcnet_local *lp = netdev_priv(dev);
353 spin_lock_init(&lp->lock);
369 struct arcnet_local *lp = netdev_priv(dev);
374 if (!try_module_get(lp->hw.owner))
402 memset(lp->default_proto, 0, sizeof(lp->default_proto));
407 lp->default_proto[0] = count;
413 atomic_set(&lp->buf_lock, 1);
415 lp->next_buf = lp->first_free_buf = 0;
420 lp->cur_tx = lp->next_tx = -1;
421 lp->cur_rx = -1;
423 lp->rfc1201.sequence = 1;
426 if (lp->hw.open)
427 lp->hw.open(dev);
451 lp->intmask = NORXflag | RECONflag;
452 AINTMASK(lp->intmask);
460 module_put(lp->hw.owner);
468 struct arcnet_local *lp = netdev_priv(dev);
479 lp->hw.close(dev);
480 module_put(lp->hw.owner);
489 const struct arcnet_local *lp = netdev_priv(dev);
528 proto_num = lp->default_proto[_daddr];
550 struct arcnet_local *lp = netdev_priv(dev);
586 proto = arc_proto_map[lp->default_proto[daddr]];
598 struct arcnet_local *lp = netdev_priv(dev);
608 ASTATUS(), lp->cur_tx, lp->next_tx, skb->len,skb->protocol);
628 spin_lock_irqsave(&lp->lock, flags);
630 if(lp->next_tx == -1)
644 lp->outgoing.proto = proto;
645 lp->outgoing.skb = skb;
646 lp->outgoing.pkt = pkt;
658 lp->next_tx = txbuf;
669 lp->intmask |= TXFREEflag|EXCNAKflag;
670 AINTMASK(lp->intmask);
673 spin_unlock_irqrestore(&lp->lock, flags);
687 struct arcnet_local *lp = netdev_priv(dev);
690 ASTATUS(), lp->intmask, lp->next_tx, lp->cur_tx);
692 if (lp->cur_tx != -1 || lp->next_tx == -1)
695 BUGLVL(D_TX) arcnet_dump_packet(dev, lp->next_tx, "go_tx", 0);
697 lp->cur_tx = lp->next_tx;
698 lp->next_tx = -1;
701 ACOMMAND(TXcmd | (lp->cur_tx << 3));
704 lp->lasttrans_dest = lp->lastload_dest;
705 lp->lastload_dest = 0;
706 lp->excnak_pending = 0;
707 lp->intmask |= TXFREEflag|EXCNAKflag;
717 struct arcnet_local *lp = netdev_priv(dev);
721 spin_lock_irqsave(&lp->lock, flags);
727 lp->timed_out = 1;
728 ACOMMAND(NOTXcmd | (lp->cur_tx << 3));
734 lp->intmask |= TXFREEflag|EXCNAKflag;
735 AINTMASK(lp->intmask);
737 spin_unlock_irqrestore(&lp->lock, flags);
739 if (time_after(jiffies, lp->last_timeout + 10*HZ)) {
741 msg, status, lp->intmask, lp->lasttrans_dest);
742 lp->last_timeout = jiffies;
745 if (lp->cur_tx == -1)
758 struct arcnet_local *lp;
766 lp = netdev_priv(dev);
767 BUG_ON(!lp);
769 spin_lock(&lp->lock);
779 spin_unlock(&lp->lock);
784 ASTATUS(), lp->intmask);
819 if (status & lp->intmask & NORXflag) {
820 recbuf = lp->cur_rx;
824 lp->cur_rx = get_arcbuf(dev);
825 if (lp->cur_rx != -1) {
827 lp->cur_rx);
828 ACOMMAND(RXcmd | (lp->cur_rx << 3) | RXbcasts);
838 lp->excnak_pending = 1;
841 lp->intmask &= ~(EXCNAKflag);
847 if ((status & lp->intmask & TXFREEflag) || lp->timed_out) {
848 lp->intmask &= ~(TXFREEflag|EXCNAKflag);
852 if (lp->cur_tx != -1 && !lp->timed_out) {
854 if (lp->lasttrans_dest != 0) {
858 status, lp->lasttrans_dest);
865 status, lp->lasttrans_dest);
869 if (lp->outgoing.proto &&
870 lp->outgoing.proto->ack_tx) {
874 else if(lp->excnak_pending)
879 lp->outgoing.proto
883 if (lp->cur_tx != -1)
884 release_arcbuf(dev, lp->cur_tx);
886 lp->cur_tx = -1;
887 lp->timed_out = 0;
894 if (lp->outgoing.proto && lp->outgoing.proto->continue_tx) {
897 if (lp->outgoing.proto->continue_tx(dev, txbuf)) {
899 dev->stats.tx_bytes += lp->outgoing.skb->len;
900 if(!lp->outgoing.proto->ack_tx)
902 dev_kfree_skb_irq(lp->outgoing.skb);
903 lp->outgoing.proto = NULL;
906 lp->next_tx = txbuf;
910 if (lp->cur_tx == -1)
922 if (status & lp->intmask & RECONflag) {
933 if (!lp->first_recon || !lp->last_recon ||
934 time_after(jiffies, lp->last_recon + HZ * 10)) {
935 if (lp->network_down)
937 lp->first_recon = lp->last_recon = jiffies;
938 lp->num_recons = lp->network_down = 0;
942 lp->last_recon = jiffies;
943 lp->num_recons++;
946 lp->num_recons,
947 (lp->last_recon - lp->first_recon) / HZ,
948 lp->network_down);
956 if (!lp->network_down &&
957 (lp->last_recon - lp->first_recon) <= HZ * 60 &&
958 lp->num_recons >= RECON_THRESHOLD) {
959 lp->network_down = 1;
961 } else if (!lp->network_down &&
962 lp->last_recon - lp->first_recon > HZ * 60) {
964 lp->first_recon = lp->last_recon;
965 lp->num_recons = 1;
968 } else if (lp->network_down &&
969 time_after(jiffies, lp->last_recon + HZ * 10)) {
970 if (lp->network_down)
972 lp->first_recon = lp->last_recon = 0;
973 lp->num_recons = lp->network_down = 0;
991 AINTMASK(lp->intmask);
993 spin_unlock(&lp->lock);
1004 struct arcnet_local *lp = netdev_priv(dev);
1011 lp->hw.copy_from_card(dev, bufnum, 0, &pkt, ARC_HDR_SIZE);
1022 lp->hw.copy_from_card(dev, bufnum, ofs, soft, sizeof(pkt.soft));
1025 lp->hw.copy_from_card(dev, bufnum, ofs, soft, length);
1039 *oldp = arc_proto_map[lp->default_proto[pkt.hard.source]],
1051 lp->default_proto[0] = soft->proto;
1054 lp->default_proto[pkt.hard.source] = soft->proto;
1073 struct arcnet_local *lp = netdev_priv(dev);
1077 lp->default_proto[daddr]);
1088 struct arcnet_local *lp = netdev_priv(dev);
1099 lp->hw.copy_to_card(dev, bufnum, 0, &newpkt, ARC_HDR_SIZE);