Lines Matching refs:lp

63 u32 temac_ior(struct temac_local *lp, int offset)
65 return in_be32((u32 *)(lp->regs + offset));
68 void temac_iow(struct temac_local *lp, int offset, u32 value)
70 out_be32((u32 *) (lp->regs + offset), value);
73 int temac_indirect_busywait(struct temac_local *lp)
77 while (!(temac_ior(lp, XTE_RDY0_OFFSET) & XTE_RDY0_HARD_ACS_RDY_MASK)) {
90 * lp->indirect_mutex must be held when calling this function
92 u32 temac_indirect_in32(struct temac_local *lp, int reg)
96 if (temac_indirect_busywait(lp))
98 temac_iow(lp, XTE_CTL0_OFFSET, reg);
99 if (temac_indirect_busywait(lp))
101 val = temac_ior(lp, XTE_LSW0_OFFSET);
109 * lp->indirect_mutex must be held when calling this function
111 void temac_indirect_out32(struct temac_local *lp, int reg, u32 value)
113 if (temac_indirect_busywait(lp))
115 temac_iow(lp, XTE_LSW0_OFFSET, value);
116 temac_iow(lp, XTE_CTL0_OFFSET, CNTLREG_WRITE_ENABLE_MASK | reg);
117 temac_indirect_busywait(lp);
125 static u32 temac_dma_in32(struct temac_local *lp, int reg)
127 return in_be32((u32 *)(lp->sdma_regs + (reg << 2)));
135 static void temac_dma_out32(struct temac_local *lp, int reg, u32 value)
137 out_be32((u32 *)(lp->sdma_regs + (reg << 2)), value);
149 static u32 temac_dma_dcr_in(struct temac_local *lp, int reg)
151 return dcr_read(lp->sdma_dcrs, reg);
157 static void temac_dma_dcr_out(struct temac_local *lp, int reg, u32 value)
159 dcr_write(lp->sdma_dcrs, reg, value);
166 static int temac_dcr_setup(struct temac_local *lp, struct platform_device *op,
175 lp->sdma_dcrs = dcr_map(np, dcrs, dcr_resource_len(np, 0));
176 lp->dma_in = temac_dma_dcr_in;
177 lp->dma_out = temac_dma_dcr_out;
191 static int temac_dcr_setup(struct temac_local *lp, struct platform_device *op,
204 struct temac_local *lp = netdev_priv(ndev);
208 lp->dma_out(lp, DMA_CONTROL_REG, DMA_CONTROL_RST);
211 if (!lp->rx_skb[i])
214 dma_unmap_single(ndev->dev.parent, lp->rx_bd_v[i].phys,
216 dev_kfree_skb(lp->rx_skb[i]);
219 if (lp->rx_bd_v)
221 sizeof(*lp->rx_bd_v) * RX_BD_NUM,
222 lp->rx_bd_v, lp->rx_bd_p);
223 if (lp->tx_bd_v)
225 sizeof(*lp->tx_bd_v) * TX_BD_NUM,
226 lp->tx_bd_v, lp->tx_bd_p);
227 if (lp->rx_skb)
228 kfree(lp->rx_skb);
236 struct temac_local *lp = netdev_priv(ndev);
240 lp->rx_skb = kcalloc(RX_BD_NUM, sizeof(*lp->rx_skb), GFP_KERNEL);
241 if (!lp->rx_skb)
246 lp->tx_bd_v = dma_zalloc_coherent(ndev->dev.parent,
247 sizeof(*lp->tx_bd_v) * TX_BD_NUM,
248 &lp->tx_bd_p, GFP_KERNEL);
249 if (!lp->tx_bd_v)
252 lp->rx_bd_v = dma_zalloc_coherent(ndev->dev.parent,
253 sizeof(*lp->rx_bd_v) * RX_BD_NUM,
254 &lp->rx_bd_p, GFP_KERNEL);
255 if (!lp->rx_bd_v)
259 lp->tx_bd_v[i].next = lp->tx_bd_p +
260 sizeof(*lp->tx_bd_v) * ((i + 1) % TX_BD_NUM);
264 lp->rx_bd_v[i].next = lp->rx_bd_p +
265 sizeof(*lp->rx_bd_v) * ((i + 1) % RX_BD_NUM);
272 lp->rx_skb[i] = skb;
274 lp->rx_bd_v[i].phys = dma_map_single(ndev->dev.parent,
278 lp->rx_bd_v[i].len = XTE_MAX_JUMBO_FRAME_SIZE;
279 lp->rx_bd_v[i].app0 = STS_CTRL_APP0_IRQONEND;
282 lp->dma_out(lp, TX_CHNL_CTRL, 0x10220400 |
288 lp->dma_out(lp, RX_CHNL_CTRL, 0xff070000 |
295 lp->dma_out(lp, RX_CURDESC_PTR, lp->rx_bd_p);
296 lp->dma_out(lp, RX_TAILDESC_PTR,
297 lp->rx_bd_p + (sizeof(*lp->rx_bd_v) * (RX_BD_NUM - 1)));
298 lp->dma_out(lp, TX_CURDESC_PTR, lp->tx_bd_p);
301 lp->tx_bd_ci = 0;
302 lp->tx_bd_next = 0;
303 lp->tx_bd_tail = 0;
304 lp->rx_bd_ci = 0;
319 struct temac_local *lp = netdev_priv(ndev);
322 mutex_lock(&lp->indirect_mutex);
323 temac_indirect_out32(lp, XTE_UAW0_OFFSET,
330 temac_indirect_out32(lp, XTE_UAW1_OFFSET,
333 mutex_unlock(&lp->indirect_mutex);
358 struct temac_local *lp = netdev_priv(ndev);
362 mutex_lock(&lp->indirect_mutex);
372 temac_indirect_out32(lp, XTE_AFM_OFFSET, XTE_AFM_EPPRM_MASK);
385 temac_indirect_out32(lp, XTE_MAW0_OFFSET,
389 temac_indirect_out32(lp, XTE_MAW1_OFFSET,
394 val = temac_indirect_in32(lp, XTE_AFM_OFFSET);
395 temac_indirect_out32(lp, XTE_AFM_OFFSET,
397 temac_indirect_out32(lp, XTE_MAW0_OFFSET, 0);
398 temac_indirect_out32(lp, XTE_MAW1_OFFSET, 0);
401 mutex_unlock(&lp->indirect_mutex);
489 struct temac_local *lp = netdev_priv(ndev);
493 mutex_lock(&lp->indirect_mutex);
495 reg = temac_indirect_in32(lp, tp->reg) & ~tp->m_or;
498 temac_indirect_out32(lp, tp->reg, reg);
501 lp->options |= options;
502 mutex_unlock(&lp->indirect_mutex);
510 struct temac_local *lp = netdev_priv(ndev);
521 mutex_lock(&lp->indirect_mutex);
523 temac_indirect_out32(lp, XTE_RXC1_OFFSET, XTE_RXC1_RXRST_MASK);
525 while (temac_indirect_in32(lp, XTE_RXC1_OFFSET) & XTE_RXC1_RXRST_MASK) {
535 temac_indirect_out32(lp, XTE_TXC_OFFSET, XTE_TXC_TXRST_MASK);
537 while (temac_indirect_in32(lp, XTE_TXC_OFFSET) & XTE_TXC_TXRST_MASK) {
547 val = temac_indirect_in32(lp, XTE_RXC1_OFFSET);
548 temac_indirect_out32(lp, XTE_RXC1_OFFSET, val & ~XTE_RXC1_RXEN_MASK);
551 lp->dma_out(lp, DMA_CONTROL_REG, DMA_CONTROL_RST);
553 while (lp->dma_in(lp, DMA_CONTROL_REG) & DMA_CONTROL_RST) {
561 lp->dma_out(lp, DMA_CONTROL_REG, DMA_TAIL_ENABLE);
568 temac_indirect_out32(lp, XTE_RXC0_OFFSET, 0);
569 temac_indirect_out32(lp, XTE_RXC1_OFFSET, 0);
570 temac_indirect_out32(lp, XTE_TXC_OFFSET, 0);
571 temac_indirect_out32(lp, XTE_FCC_OFFSET, XTE_FCC_RXFLO_MASK);
573 mutex_unlock(&lp->indirect_mutex);
578 lp->options & ~(XTE_OPTION_TXEN | XTE_OPTION_RXEN));
584 if (temac_setoptions(ndev, lp->options))
593 struct temac_local *lp = netdev_priv(ndev);
594 struct phy_device *phy = lp->phy_dev;
601 mutex_lock(&lp->indirect_mutex);
602 if (lp->last_link != link_state) {
603 mii_speed = temac_indirect_in32(lp, XTE_EMCFG_OFFSET);
613 temac_indirect_out32(lp, XTE_EMCFG_OFFSET, mii_speed);
614 lp->last_link = link_state;
617 mutex_unlock(&lp->indirect_mutex);
622 struct temac_local *lp = netdev_priv(ndev);
626 cur_p = &lp->tx_bd_v[lp->tx_bd_ci];
643 lp->tx_bd_ci++;
644 if (lp->tx_bd_ci >= TX_BD_NUM)
645 lp->tx_bd_ci = 0;
647 cur_p = &lp->tx_bd_v[lp->tx_bd_ci];
654 static inline int temac_check_tx_bd_space(struct temac_local *lp, int num_frag)
659 tail = lp->tx_bd_tail;
660 cur_p = &lp->tx_bd_v[tail];
670 cur_p = &lp->tx_bd_v[tail];
679 struct temac_local *lp = netdev_priv(ndev);
688 start_p = lp->tx_bd_p + sizeof(*lp->tx_bd_v) * lp->tx_bd_tail;
689 cur_p = &lp->tx_bd_v[lp->tx_bd_tail];
691 if (temac_check_tx_bd_space(lp, num_frag)) {
716 lp->tx_bd_tail++;
717 if (lp->tx_bd_tail >= TX_BD_NUM)
718 lp->tx_bd_tail = 0;
720 cur_p = &lp->tx_bd_v[lp->tx_bd_tail];
730 tail_p = lp->tx_bd_p + sizeof(*lp->tx_bd_v) * lp->tx_bd_tail;
731 lp->tx_bd_tail++;
732 if (lp->tx_bd_tail >= TX_BD_NUM)
733 lp->tx_bd_tail = 0;
738 lp->dma_out(lp, TX_TAILDESC_PTR, tail_p); /* DMA start */
746 struct temac_local *lp = netdev_priv(ndev);
754 spin_lock_irqsave(&lp->rx_lock, flags);
756 tail_p = lp->rx_bd_p + sizeof(*lp->rx_bd_v) * lp->rx_bd_ci;
757 cur_p = &lp->rx_bd_v[lp->rx_bd_ci];
762 skb = lp->rx_skb[lp->rx_bd_ci];
773 if (((lp->temac_features & TEMAC_FEATURE_RX_CSUM) != 0) &&
790 spin_unlock_irqrestore(&lp->rx_lock, flags);
799 lp->rx_skb[lp->rx_bd_ci] = new_skb;
801 lp->rx_bd_ci++;
802 if (lp->rx_bd_ci >= RX_BD_NUM)
803 lp->rx_bd_ci = 0;
805 cur_p = &lp->rx_bd_v[lp->rx_bd_ci];
808 lp->dma_out(lp, RX_TAILDESC_PTR, tail_p);
810 spin_unlock_irqrestore(&lp->rx_lock, flags);
816 struct temac_local *lp = netdev_priv(ndev);
819 status = lp->dma_in(lp, TX_IRQ_REG);
820 lp->dma_out(lp, TX_IRQ_REG, status);
823 temac_start_xmit_done(lp->ndev);
833 struct temac_local *lp = netdev_priv(ndev);
837 status = lp->dma_in(lp, RX_IRQ_REG);
838 lp->dma_out(lp, RX_IRQ_REG, status);
841 ll_temac_recv(lp->ndev);
848 struct temac_local *lp = netdev_priv(ndev);
853 if (lp->phy_node) {
854 lp->phy_dev = of_phy_connect(lp->ndev, lp->phy_node,
856 if (!lp->phy_dev) {
857 dev_err(lp->dev, "of_phy_connect() failed\n");
861 phy_start(lp->phy_dev);
866 rc = request_irq(lp->tx_irq, ll_temac_tx_irq, 0, ndev->name, ndev);
869 rc = request_irq(lp->rx_irq, ll_temac_rx_irq, 0, ndev->name, ndev);
876 free_irq(lp->tx_irq, ndev);
878 if (lp->phy_dev)
879 phy_disconnect(lp->phy_dev);
880 lp->phy_dev = NULL;
881 dev_err(lp->dev, "request_irq() failed\n");
887 struct temac_local *lp = netdev_priv(ndev);
891 free_irq(lp->tx_irq, ndev);
892 free_irq(lp->rx_irq, ndev);
894 if (lp->phy_dev)
895 phy_disconnect(lp->phy_dev);
896 lp->phy_dev = NULL;
907 struct temac_local *lp = netdev_priv(ndev);
909 disable_irq(lp->tx_irq);
910 disable_irq(lp->rx_irq);
912 ll_temac_rx_irq(lp->tx_irq, ndev);
913 ll_temac_tx_irq(lp->rx_irq, ndev);
915 enable_irq(lp->tx_irq);
916 enable_irq(lp->rx_irq);
922 struct temac_local *lp = netdev_priv(ndev);
927 if (!lp->phy_dev)
930 return phy_mii_ioctl(lp->phy_dev, rq, cmd);
952 struct temac_local *lp = netdev_priv(ndev);
956 len += sprintf(buf + len, "%.8x%s", lp->dma_in(lp, i),
977 struct temac_local *lp = netdev_priv(ndev);
978 return phy_ethtool_gset(lp->phy_dev, cmd);
983 struct temac_local *lp = netdev_priv(ndev);
984 return phy_ethtool_sset(lp->phy_dev, cmd);
989 struct temac_local *lp = netdev_priv(ndev);
990 return phy_start_aneg(lp->phy_dev);
1004 struct temac_local *lp;
1011 ndev = alloc_etherdev(sizeof(*lp));
1036 lp = netdev_priv(ndev);
1037 lp->ndev = ndev;
1038 lp->dev = &op->dev;
1039 lp->options = XTE_OPTION_DEFAULTS;
1040 spin_lock_init(&lp->rx_lock);
1041 mutex_init(&lp->indirect_mutex);
1044 lp->regs = of_iomap(op->dev.of_node, 0);
1045 if (!lp->regs) {
1051 lp->temac_features = 0;
1054 lp->temac_features |= TEMAC_FEATURE_TX_CSUM;
1060 lp->temac_features |= TEMAC_FEATURE_RX_CSUM;
1070 if (temac_dcr_setup(lp, op, np)) {
1073 lp->sdma_regs = of_iomap(np, 0);
1074 if (lp->sdma_regs) {
1075 lp->dma_in = temac_dma_in32;
1076 lp->dma_out = temac_dma_out32;
1077 dev_dbg(&op->dev, "MEM base: %p\n", lp->sdma_regs);
1085 lp->rx_irq = irq_of_parse_and_map(np, 0);
1086 lp->tx_irq = irq_of_parse_and_map(np, 1);
1090 if (!lp->rx_irq || !lp->tx_irq) {
1106 rc = temac_mdio_setup(lp, op->dev.of_node);
1110 lp->phy_node = of_parse_phandle(op->dev.of_node, "phy-handle", 0);
1111 if (lp->phy_node)
1112 dev_dbg(lp->dev, "using PHY node %s (%p)\n", np->full_name, np);
1115 rc = sysfs_create_group(&lp->dev->kobj, &temac_attr_group);
1117 dev_err(lp->dev, "Error creating sysfs files\n");
1121 rc = register_netdev(lp->ndev);
1123 dev_err(lp->dev, "register_netdev() error (%i)\n", rc);
1130 sysfs_remove_group(&lp->dev->kobj, &temac_attr_group);
1132 if (lp->sdma_regs)
1133 iounmap(lp->sdma_regs);
1135 iounmap(lp->regs);
1145 struct temac_local *lp = netdev_priv(ndev);
1147 temac_mdio_teardown(lp);
1149 sysfs_remove_group(&lp->dev->kobj, &temac_attr_group);
1150 of_node_put(lp->phy_node);
1151 lp->phy_node = NULL;
1152 iounmap(lp->regs);
1153 if (lp->sdma_regs)
1154 iounmap(lp->sdma_regs);