Lines Matching refs:tdev

301 	struct mmp_tdma_device *tdev = dev_id;
306 struct mmp_tdma_chan *tdmac = tdev->tdmac[i];
513 struct mmp_tdma_device *tdev = platform_get_drvdata(pdev);
515 dma_async_device_unregister(&tdev->device);
519 static int mmp_tdma_chan_init(struct mmp_tdma_device *tdev,
526 dev_err(tdev->dev, "too many channels for device!\n");
531 tdmac = devm_kzalloc(tdev->dev, sizeof(*tdmac), GFP_KERNEL);
533 dev_err(tdev->dev, "no free memory for DMA channels!\n");
538 tdmac->dev = tdev->dev;
539 tdmac->chan.device = &tdev->device;
542 tdmac->reg_base = tdev->base + idx * 4;
545 tdev->tdmac[tdmac->idx] = tdmac;
550 &tdev->device.channels);
577 struct mmp_tdma_device *tdev = ofdma->of_dma_data;
578 dma_cap_mask_t mask = tdev->device.cap_mask;
604 struct mmp_tdma_device *tdev;
618 tdev = devm_kzalloc(&pdev->dev, sizeof(*tdev), GFP_KERNEL);
619 if (!tdev)
622 tdev->dev = &pdev->dev;
630 tdev->base = devm_ioremap_resource(&pdev->dev, iores);
631 if (IS_ERR(tdev->base))
632 return PTR_ERR(tdev->base);
634 INIT_LIST_HEAD(&tdev->device.channels);
648 mmp_tdma_int_handler, 0, "tdma", tdev);
656 ret = mmp_tdma_chan_init(tdev, i, irq, type, pool);
661 dma_cap_set(DMA_SLAVE, tdev->device.cap_mask);
662 dma_cap_set(DMA_CYCLIC, tdev->device.cap_mask);
663 tdev->device.dev = &pdev->dev;
664 tdev->device.device_alloc_chan_resources =
666 tdev->device.device_free_chan_resources =
668 tdev->device.device_prep_dma_cyclic = mmp_tdma_prep_dma_cyclic;
669 tdev->device.device_tx_status = mmp_tdma_tx_status;
670 tdev->device.device_issue_pending = mmp_tdma_issue_pending;
671 tdev->device.device_control = mmp_tdma_control;
672 tdev->device.copy_align = TDMA_ALIGNMENT;
675 platform_set_drvdata(pdev, tdev);
677 ret = dma_async_device_register(&tdev->device);
679 dev_err(tdev->device.dev, "unable to register\n");
685 mmp_tdma_xlate, tdev);
687 dev_err(tdev->device.dev,
689 dma_async_device_unregister(&tdev->device);
693 dev_info(tdev->device.dev, "initialized\n");