Lines Matching refs:card

106 _set_debug(struct tiger_hw *card)
108 card->isac.dch.debug = debug;
109 card->bc[0].bch.debug = debug;
110 card->bc[1].bch.debug = debug;
117 struct tiger_hw *card;
122 list_for_each_entry(card, &Cards, list)
123 _set_debug(card);
136 nj_disable_hwirq(struct tiger_hw *card)
138 outb(0, card->base + NJ_IRQMASK0);
139 outb(0, card->base + NJ_IRQMASK1);
146 struct tiger_hw *card = p;
149 card->auxd &= 0xfc;
150 card->auxd |= (offset >> 4) & 3;
151 outb(card->auxd, card->base + NJ_AUXDATA);
152 ret = inb(card->base + NJ_ISAC_OFF + ((offset & 0x0f) << 2));
159 struct tiger_hw *card = p;
161 card->auxd &= 0xfc;
162 card->auxd |= (offset >> 4) & 3;
163 outb(card->auxd, card->base + NJ_AUXDATA);
164 outb(value, card->base + NJ_ISAC_OFF + ((offset & 0x0f) << 2));
170 struct tiger_hw *card = p;
172 card->auxd &= 0xfc;
173 outb(card->auxd, card->base + NJ_AUXDATA);
174 insb(card->base + NJ_ISAC_OFF, data, size);
180 struct tiger_hw *card = p;
182 card->auxd &= 0xfc;
183 outb(card->auxd, card->base + NJ_AUXDATA);
184 outsb(card->base + NJ_ISAC_OFF, data, size);
190 struct tiger_hw *card = bc->bch.hw;
193 pr_debug("%s: B%1d fill %02x len %d idx %d/%d\n", card->name,
194 bc->bch.nr, fill, cnt, idx, card->send.idx);
201 val = card->send.start[idx];
204 card->send.start[idx++] = val;
205 if (idx >= card->send.size)
213 struct tiger_hw *card = bc->bch.hw;
215 pr_debug("%s: B%1d protocol %x-->%x\n", card->name,
221 fill_mem(bc, 0, card->send.size, 0xff);
224 if ((card->bc[0].bch.state == ISDN_P_NONE) &&
225 (card->bc[1].bch.state == ISDN_P_NONE)) {
226 card->dmactrl = 0;
227 outb(card->dmactrl, card->base + NJ_DMACTRL);
228 outb(0, card->base + NJ_IRQMASK0);
240 bc->free = card->send.size / 2;
244 if (!card->dmactrl) {
245 card->dmactrl = 1;
246 outb(card->dmactrl, card->base + NJ_DMACTRL);
247 outb(0x0f, card->base + NJ_IRQMASK0);
254 bc->free = card->send.size / 2;
260 if (!card->dmactrl) {
261 card->dmactrl = 1;
262 outb(card->dmactrl, card->base + NJ_DMACTRL);
263 outb(0x0f, card->base + NJ_IRQMASK0);
267 pr_info("%s: %s protocol %x not handled\n", card->name,
271 card->send.dmacur = inl(card->base + NJ_DMA_READ_ADR);
272 card->recv.dmacur = inl(card->base + NJ_DMA_WRITE_ADR);
273 card->send.idx = (card->send.dmacur - card->send.dmastart) >> 2;
274 card->recv.idx = (card->recv.dmacur - card->recv.dmastart) >> 2;
276 card->name, __func__,
277 inb(card->base + NJ_DMACTRL),
278 inb(card->base + NJ_IRQMASK0),
279 inb(card->base + NJ_IRQSTAT0),
280 card->send.idx,
281 card->recv.idx);
286 nj_reset(struct tiger_hw *card)
288 outb(0xff, card->base + NJ_CTRL); /* Reset On */
293 if (card->typ == NETJET_S_TJ320) /* TJ320 */
294 card->ctrlreg = 0x40; /* Reset Off and status read clear */
296 card->ctrlreg = 0x00; /* Reset Off and status read clear */
297 outb(card->ctrlreg, card->base + NJ_CTRL);
301 card->auxd = 0;
302 card->dmactrl = 0;
303 outb(~NJ_ISACIRQ, card->base + NJ_AUXCTRL);
304 outb(NJ_ISACIRQ, card->base + NJ_IRQMASK1);
305 outb(card->auxd, card->base + NJ_AUXDATA);
309 inittiger(struct tiger_hw *card)
313 card->dma_p = pci_alloc_consistent(card->pdev, NJ_DMA_SIZE,
314 &card->dma);
315 if (!card->dma_p) {
316 pr_info("%s: No DMA memory\n", card->name);
319 if ((u64)card->dma > 0xffffffff) {
320 pr_info("%s: DMA outside 32 bit\n", card->name);
324 card->bc[i].hsbuf = kmalloc(NJ_DMA_TXSIZE, GFP_ATOMIC);
325 if (!card->bc[i].hsbuf) {
326 pr_info("%s: no B%d send buffer\n", card->name, i + 1);
329 card->bc[i].hrbuf = kmalloc(NJ_DMA_RXSIZE, GFP_ATOMIC);
330 if (!card->bc[i].hrbuf) {
331 pr_info("%s: no B%d recv buffer\n", card->name, i + 1);
335 memset(card->dma_p, 0xff, NJ_DMA_SIZE);
337 card->send.start = card->dma_p;
338 card->send.dmastart = (u32)card->dma;
339 card->send.dmaend = card->send.dmastart +
341 card->send.dmairq = card->send.dmastart +
343 card->send.size = NJ_DMA_TXSIZE;
347 " size %zu u32\n", card->name,
348 card->send.dmastart, card->send.dmairq,
349 card->send.dmaend, card->send.start, card->send.size);
351 outl(card->send.dmastart, card->base + NJ_DMA_READ_START);
352 outl(card->send.dmairq, card->base + NJ_DMA_READ_IRQ);
353 outl(card->send.dmaend, card->base + NJ_DMA_READ_END);
355 card->recv.start = card->dma_p + (NJ_DMA_SIZE / 2);
356 card->recv.dmastart = (u32)card->dma + (NJ_DMA_SIZE / 2);
357 card->recv.dmaend = card->recv.dmastart +
359 card->recv.dmairq = card->recv.dmastart +
361 card->recv.size = NJ_DMA_RXSIZE;
365 " size %zu u32\n", card->name,
366 card->recv.dmastart, card->recv.dmairq,
367 card->recv.dmaend, card->recv.start, card->recv.size);
369 outl(card->recv.dmastart, card->base + NJ_DMA_WRITE_START);
370 outl(card->recv.dmairq, card->base + NJ_DMA_WRITE_IRQ);
371 outl(card->recv.dmaend, card->base + NJ_DMA_WRITE_END);
378 struct tiger_hw *card = bc->bch.hw;
385 pr_info("%s: B%1d overrun at idx %d\n", card->name,
393 card->name, bc->bch.nr);
400 pr_debug("%s: B%1d overrun %d\n", card->name,
410 val = card->recv.start[idx++];
413 if (idx >= card->recv.size)
426 card->name, bc->bch.nr);
429 card->name, bc->bch.nr);
432 card->name, bc->bch.nr, bc->bch.maxlen);
438 snprintf(card->log, LOG_SIZE, "B%1d-recv %s %d ",
439 bc->bch.nr, card->name, stat);
440 print_hex_dump_bytes(card->log, DUMP_PREFIX_OFFSET,
453 card->name, bc->bch.nr);
463 recv_tiger(struct tiger_hw *card, u8 irq_stat)
466 int cnt = card->recv.size / 2;
469 card->last_is0 &= ~NJ_IRQM0_WR_MASK;
470 card->last_is0 |= (irq_stat & NJ_IRQM0_WR_MASK);
475 idx = card->recv.size - 1;
477 if (test_bit(FLG_ACTIVE, &card->bc[0].bch.Flags))
478 read_dma(&card->bc[0], idx, cnt);
479 if (test_bit(FLG_ACTIVE, &card->bc[1].bch.Flags))
480 read_dma(&card->bc[1], idx, cnt);
485 resync(struct tiger_ch *bc, struct tiger_hw *card)
487 card->send.dmacur = inl(card->base | NJ_DMA_READ_ADR);
488 card->send.idx = (card->send.dmacur - card->send.dmastart) >> 2;
489 if (bc->free > card->send.size / 2)
490 bc->free = card->send.size / 2;
495 if (card->send.idx < ((card->send.size / 2) - 1))
496 bc->idx = (card->recv.size / 2) - 1;
498 bc->idx = card->recv.size - 1;
500 pr_debug("%s: %s B%1d free %d idx %d/%d\n", card->name,
501 __func__, bc->bch.nr, bc->free, bc->idx, card->send.idx);
509 struct tiger_hw *card = bc->bch.hw;
516 pr_debug("%s: %s B%1d %d state %x idx %d/%d\n", card->name,
518 bc->idx, card->send.idx);
520 resync(bc, card);
523 pr_debug("%s: B%1d hdlc encoded %d flags\n", card->name,
529 if (bc->idx >= card->send.size)
531 v = card->send.start[bc->idx];
534 card->send.start[bc->idx++] = v;
537 snprintf(card->log, LOG_SIZE, "B%1d-send %s %d ",
538 bc->bch.nr, card->name, count);
539 print_hex_dump_bytes(card->log, DUMP_PREFIX_OFFSET, p, count);
546 struct tiger_hw *card = bc->bch.hw;
556 pr_debug("%s: %s B%1d %d/%d/%d/%d state %x idx %d/%d\n", card->name,
558 bc->bch.tx_skb->len, bc->txstate, bc->idx, card->send.idx);
560 resync(bc, card);
565 pr_debug("%s: B%1d hdlc encoded %d in %d\n", card->name,
578 if (bc->idx >= card->send.size)
580 v = card->send.start[bc->idx];
583 card->send.start[bc->idx++] = v;
586 snprintf(card->log, LOG_SIZE, "B%1d-send %s %d ",
587 bc->bch.nr, card->name, count);
588 print_hex_dump_bytes(card->log, DUMP_PREFIX_OFFSET, p, count);
616 send_tiger_bc(struct tiger_hw *card, struct tiger_ch *bc)
620 bc->free += card->send.size / 2;
621 if (bc->free >= card->send.size) {
623 pr_info("%s: B%1d TX underrun state %x\n", card->name,
627 bc->free = card->send.size;
635 pr_debug("%s: B%1d TX no data free %d idx %d/%d\n", card->name,
636 bc->bch.nr, bc->free, bc->idx, card->send.idx);
639 if (bc->free == card->send.size)
646 send_tiger(struct tiger_hw *card, u8 irq_stat)
651 if ((irq_stat & card->last_is0) & NJ_IRQM0_RD_MASK) {
653 card->name, irq_stat, card->last_is0);
656 card->last_is0 &= ~NJ_IRQM0_RD_MASK;
657 card->last_is0 |= (irq_stat & NJ_IRQM0_RD_MASK);
660 if (test_bit(FLG_ACTIVE, &card->bc[i].bch.Flags))
661 send_tiger_bc(card, &card->bc[i]);
668 struct tiger_hw *card = dev_id;
671 spin_lock(&card->lock);
672 s0val = inb(card->base | NJ_IRQSTAT0);
673 s1val = inb(card->base | NJ_IRQSTAT1);
676 spin_unlock(&card->lock);
679 pr_debug("%s: IRQSTAT0 %02x IRQSTAT1 %02x\n", card->name, s0val, s1val);
680 card->irqcnt++;
682 val = ReadISAC_nj(card, ISAC_ISTA);
684 mISDNisac_irq(&card->isac, val);
689 outb(s0val, card->base | NJ_IRQSTAT0);
694 card->recv.dmacur = inl(card->base | NJ_DMA_WRITE_ADR);
695 card->recv.idx = (card->recv.dmacur - card->recv.dmastart) >> 2;
696 if (card->recv.dmacur < card->recv.dmairq)
701 card->send.dmacur = inl(card->base | NJ_DMA_READ_ADR);
702 card->send.idx = (card->send.dmacur - card->send.dmastart) >> 2;
703 if (card->send.dmacur < card->send.dmairq)
708 pr_debug("%s: DMA Status %02x/%02x/%02x %d/%d\n", card->name,
709 s1val, s0val, card->last_is0,
710 card->recv.idx, card->send.idx);
712 if (s0val != card->last_is0) {
714 (card->last_is0 & NJ_IRQM0_RD_MASK))
716 send_tiger(card, s0val);
718 (card->last_is0 & NJ_IRQM0_WR_MASK))
720 recv_tiger(card, s0val);
723 spin_unlock(&card->lock);
733 struct tiger_hw *card = bch->hw;
740 spin_lock_irqsave(&card->lock, flags);
746 spin_unlock_irqrestore(&card->lock, flags);
750 spin_unlock_irqrestore(&card->lock, flags);
753 spin_lock_irqsave(&card->lock, flags);
758 spin_unlock_irqrestore(&card->lock, flags);
764 spin_lock_irqsave(&card->lock, flags);
767 spin_unlock_irqrestore(&card->lock, flags);
782 struct tiger_hw *card = bc->bch.hw;
791 pr_info("%s: %s unknown Op %x\n", card->name, __func__, cq->op);
803 struct tiger_hw *card = bch->hw;
807 pr_debug("%s: %s cmd:%x %p\n", card->name, __func__, cmd, arg);
812 spin_lock_irqsave(&card->lock, flags);
817 spin_unlock_irqrestore(&card->lock, flags);
828 pr_info("%s: %s unknown prim(%x)\n", card->name, __func__, cmd);
834 channel_ctrl(struct tiger_hw *card, struct mISDN_ctrl_req *cq)
848 ret = card->isac.ctrl(&card->isac, HW_TESTLOOP, cq->channel);
851 pr_info("%s: %s unknown Op %x\n", card->name, __func__, cq->op);
859 open_bchannel(struct tiger_hw *card, struct channel_req *rq)
867 bch = &card->bc[rq->adr.channel - 1].bch;
884 struct tiger_hw *card = dch->hw;
888 pr_debug("%s: %s cmd:%x %p\n", card->name, __func__, cmd, arg);
893 err = card->isac.open(&card->isac, rq);
895 err = open_bchannel(card, rq);
899 pr_info("%s: cannot get module\n", card->name);
902 pr_debug("%s: dev(%d) close from %p\n", card->name, dch->dev.id,
907 err = channel_ctrl(card, arg);
911 card->name, __func__, cmd);
918 nj_init_card(struct tiger_hw *card)
923 spin_lock_irqsave(&card->lock, flags);
924 nj_disable_hwirq(card);
925 spin_unlock_irqrestore(&card->lock, flags);
927 card->irq = card->pdev->irq;
928 if (request_irq(card->irq, nj_irq, IRQF_SHARED, card->name, card)) {
930 card->name, card->irq);
931 card->irq = -1;
935 spin_lock_irqsave(&card->lock, flags);
936 nj_reset(card);
937 ret = card->isac.init(&card->isac);
940 ret = inittiger(card);
943 mode_tiger(&card->bc[0], ISDN_P_NONE);
944 mode_tiger(&card->bc[1], ISDN_P_NONE);
946 spin_unlock_irqrestore(&card->lock, flags);
952 nj_release(struct tiger_hw *card)
957 if (card->base_s) {
958 spin_lock_irqsave(&card->lock, flags);
959 nj_disable_hwirq(card);
960 mode_tiger(&card->bc[0], ISDN_P_NONE);
961 mode_tiger(&card->bc[1], ISDN_P_NONE);
962 card->isac.release(&card->isac);
963 spin_unlock_irqrestore(&card->lock, flags);
964 release_region(card->base, card->base_s);
965 card->base_s = 0;
967 if (card->irq > 0)
968 free_irq(card->irq, card);
969 if (card->isac.dch.dev.dev.class)
970 mISDN_unregister_device(&card->isac.dch.dev);
973 mISDN_freebchannel(&card->bc[i].bch);
974 kfree(card->bc[i].hsbuf);
975 kfree(card->bc[i].hrbuf);
977 if (card->dma_p)
978 pci_free_consistent(card->pdev, NJ_DMA_SIZE,
979 card->dma_p, card->dma);
981 list_del(&card->list);
983 pci_clear_master(card->pdev);
984 pci_disable_device(card->pdev);
985 pci_set_drvdata(card->pdev, NULL);
986 kfree(card);
991 nj_setup(struct tiger_hw *card)
993 card->base = pci_resource_start(card->pdev, 0);
994 card->base_s = pci_resource_len(card->pdev, 0);
995 if (!request_region(card->base, card->base_s, card->name)) {
997 card->name, card->base,
998 (u32)(card->base + card->base_s - 1));
999 card->base_s = 0;
1002 ASSIGN_FUNC(nj, ISAC, card->isac);
1008 setup_instance(struct tiger_hw *card)
1013 snprintf(card->name, MISDN_MAX_IDLEN - 1, "netjet.%d", nj_cnt + 1);
1015 list_add_tail(&card->list, &Cards);
1018 _set_debug(card);
1019 card->isac.name = card->name;
1020 spin_lock_init(&card->lock);
1021 card->isac.hwlock = &card->lock;
1022 mISDNisac_init(&card->isac, card);
1024 card->isac.dch.dev.Bprotocols = (1 << (ISDN_P_B_RAW & ISDN_P_B_MASK)) |
1026 card->isac.dch.dev.D.ctrl = nj_dctrl;
1028 card->bc[i].bch.nr = i + 1;
1029 set_channelmap(i + 1, card->isac.dch.dev.channelmap);
1030 mISDN_initbchannel(&card->bc[i].bch, MAX_DATA_MEM);
1031 card->bc[i].bch.hw = card;
1032 card->bc[i].bch.ch.send = nj_l2l1B;
1033 card->bc[i].bch.ch.ctrl = nj_bctrl;
1034 card->bc[i].bch.ch.nr = i + 1;
1035 list_add(&card->bc[i].bch.ch.list,
1036 &card->isac.dch.dev.bchannels);
1037 card->bc[i].bch.hw = card;
1039 err = nj_setup(card);
1042 err = mISDN_register_device(&card->isac.dch.dev, &card->pdev->dev,
1043 card->name);
1046 err = nj_init_card(card);
1053 nj_release(card);
1062 struct tiger_hw *card;
1082 card = kzalloc(sizeof(struct tiger_hw), GFP_ATOMIC);
1083 if (!card) {
1088 card->pdev = pdev;
1092 kfree(card);
1107 card->typ = NETJET_S_TJ320;
1109 card->typ = NETJET_S_TJ300;
1111 card->base = pci_resource_start(pdev, 0);
1112 card->irq = pdev->irq;
1113 pci_set_drvdata(pdev, card);
1114 err = setup_instance(card);
1124 struct tiger_hw *card = pci_get_drvdata(pdev);
1126 if (card)
1127 nj_release(card);