/drivers/dma/ |
H A D | sa11x0-dma.c | 157 static void sa11x0_dma_start_desc(struct sa11x0_dma_phy *p, struct sa11x0_dma_desc *txd) argument 159 list_del(&txd->node); 160 p->txd_load = txd; 163 dev_vdbg(p->dev->slave.dev, "pchan %u: txd %p[%x]: starting: DDAR:%x\n", 164 p->num, txd, txd->tx.cookie, txd->ddar); 170 struct sa11x0_dma_desc *txd = p->txd_load; local 176 if (!txd) 185 if (p->sg_load == txd 229 struct sa11x0_dma_desc *txd = p->txd_done; local 305 struct sa11x0_dma_desc *txd = sa11x0_dma_next_desc(c); local 335 struct sa11x0_dma_desc *txd, *txn; local 421 struct sa11x0_dma_desc *txd, *txn; local 475 struct sa11x0_dma_desc *txd; local 568 struct sa11x0_dma_desc *txd = to_sa11x0_dma_tx(tx); local 591 struct sa11x0_dma_desc *txd; local 729 struct sa11x0_dma_desc *txd, *txn; local 1037 struct sa11x0_dma_desc *txd = NULL; local [all...] |
H A D | amba-pl08x.c | 189 struct pl08x_txd *txd) 193 struct pl08x_lli *lli = &txd->llis_va[0]; 196 plchan->at = txd; 206 txd->ccfg); 212 writel(txd->ccfg, phychan->base + PL080_CH_CONFIG); 309 struct pl08x_txd *txd; local 315 txd = plchan->at; 321 if (ch && txd) { 328 struct pl08x_lli *llis_va = txd->llis_va; 329 dma_addr_t llis_bus = txd 188 pl08x_start_txd(struct pl08x_dma_chan *plchan, struct pl08x_txd *txd) argument 490 struct pl08x_txd *txd; member in struct:pl08x_lli_build_data 567 pl08x_fill_llis_for_desc(struct pl08x_driver_data *pl08x, struct pl08x_txd *txd) argument 802 pl08x_free_txd(struct pl08x_driver_data *pl08x, struct pl08x_txd *txd) argument 851 prep_phy_channel(struct pl08x_dma_chan *plchan, struct pl08x_txd *txd) argument 922 struct pl08x_txd *txd = to_pl08x_txd(tx); local 1185 pl08x_prep_channel_resources(struct pl08x_dma_chan *plchan, struct pl08x_txd *txd) argument 1244 struct pl08x_txd *txd = kzalloc(sizeof(*txd), GFP_NOWAIT); local 1269 struct pl08x_txd *txd; local 1320 struct pl08x_txd *txd; local 1490 pl08x_unmap_buffers(struct pl08x_txd *txd) argument 1522 struct pl08x_txd *txd; local [all...] |
H A D | txx9dmac.c | 142 txd_to_txx9dmac_desc(struct dma_async_tx_descriptor *txd) argument 144 return container_of(txd, struct txx9dmac_desc, txd); 202 dma_async_tx_descriptor_init(&desc->txd, &dc->chan); 203 desc->txd.tx_submit = txx9dmac_tx_submit; 204 /* txd.flags will be overwritten in prep funcs */ 205 desc->txd.flags = DMA_CTRL_ACK; 206 desc->txd.phys = dma_map_single(chan2parent(&dc->chan), &desc->hwdesc, 219 if (async_tx_test_ack(&desc->txd)) { 252 child->txd 408 struct dma_async_tx_descriptor *txd = &desc->txd; local [all...] |
H A D | timb_dma.c | 76 struct dma_async_tx_descriptor txd; member in struct:timb_dma_desc 178 struct timb_dma_chan *td_chan = container_of(td_desc->txd.chan, 243 iowrite32(td_desc->txd.phys, td_chan->membase + 253 iowrite32(td_desc->txd.phys, td_chan->membase + 267 struct dma_async_tx_descriptor *txd; local 276 txd = &td_desc->txd; 279 txd->cookie); 288 dma_cookie_complete(txd); 291 callback = txd 344 td_tx_submit(struct dma_async_tx_descriptor *txd) argument [all...] |
H A D | ep93xx_dma.c | 102 * @txd: dmaengine API descriptor 111 struct dma_async_tx_descriptor txd; member in struct:ep93xx_dma_desc 238 d->txd.callback = desc->txd.callback; 239 d->txd.callback_param = desc->txd.callback_param; 281 * If txd.cookie is set it means that we are back in the first 284 return !desc->txd.cookie; 404 desc->txd.cookie, desc->src_addr, desc->dst_addr, 604 if (async_tx_test_ack(&desc->txd)) { [all...] |
H A D | at_hdmac.c | 95 dma_async_tx_descriptor_init(&desc->txd, chan); 96 /* txd.flags will be overwritten in prep functions */ 97 desc->txd.flags = DMA_CTRL_ACK; 98 desc->txd.tx_submit = atc_tx_submit; 99 desc->txd.phys = phys; 120 if (async_tx_test_ack(&desc->txd)) { 187 (*prev)->lli.dscr = desc->txd.phys; 228 channel_writel(atchan, DSCR, first->txd.phys); 243 struct dma_async_tx_descriptor *txd = &desc->txd; local 439 struct dma_async_tx_descriptor *txd = &first->txd; local [all...] |
H A D | dw_dmac_regs.h | 242 struct dma_async_tx_descriptor txd; member in struct:dw_desc 247 txd_to_dw_desc(struct dma_async_tx_descriptor *txd) argument 249 return container_of(txd, struct dw_desc, txd);
|
H A D | pch_dma.c | 96 struct dma_async_tx_descriptor txd; member in struct:pch_dma_desc 149 struct pch_dma_desc *to_pd_desc(struct dma_async_tx_descriptor *txd) argument 151 return container_of(txd, struct pch_dma_desc, txd); 354 channel_writel(pd_chan, NEXT, desc->txd.phys); 362 struct dma_async_tx_descriptor *txd = &desc->txd; local 363 dma_async_tx_callback callback = txd->callback; 364 void *param = txd->callback_param; 404 bad_desc->txd 420 pd_tx_submit(struct dma_async_tx_descriptor *txd) argument [all...] |
H A D | dw_dmac.c | 107 if (async_tx_test_ack(&desc->txd)) { 128 child->txd.phys, sizeof(child->lli), 131 desc->txd.phys, sizeof(desc->lli), 216 channel_writel(dwc, LLP, first->txd.phys); 231 struct dma_async_tx_descriptor *txd = &desc->txd; local 235 dev_vdbg(chan2dev(&dwc->chan), "descriptor %u complete\n", txd->cookie); 238 dma_cookie_complete(txd); 240 callback = txd->callback; 241 param = txd [all...] |
H A D | intel_mid_dma.c | 212 if (async_tx_test_ack(&desc->txd)) { 288 struct dma_async_tx_descriptor *txd = &desc->txd; variable in typeref:struct:dma_async_tx_descriptor 293 dma_cookie_complete(txd); variable 294 callback_txd = txd->callback; 295 param_txd = txd->callback_param; 703 return &desc->txd; 729 struct dma_async_tx_descriptor *txd = NULL; local 747 txd = intel_mid_dma_prep_memcpy(chan, 752 return txd; [all...] |
H A D | intel_mid_dma_regs.h | 249 struct dma_async_tx_descriptor txd; member in struct:intel_mid_dma_desc 285 (struct dma_async_tx_descriptor *txd) 287 return container_of(txd, struct intel_mid_dma_desc, txd); 284 to_intel_mid_dma_desc(struct dma_async_tx_descriptor *txd) argument
|
H A D | at_hdmac_regs.h | 163 * @txd: support for the async_tx api 173 struct dma_async_tx_descriptor txd; member in struct:at_desc 179 txd_to_at_desc(struct dma_async_tx_descriptor *txd) argument 181 return container_of(txd, struct at_desc, txd);
|
H A D | fsldma.c | 854 struct dma_async_tx_descriptor *txd = &desc->async_tx; local 861 if (txd->callback) { 865 txd->callback(txd->callback_param); 869 dma_run_dependencies(txd); 872 if (!(txd->flags & DMA_COMPL_SKIP_DEST_UNMAP)) { 873 if (txd->flags & DMA_COMPL_DEST_UNMAP_SINGLE) 880 if (!(txd->flags & DMA_COMPL_SKIP_SRC_UNMAP)) { 881 if (txd->flags & DMA_COMPL_SRC_UNMAP_SINGLE) 890 dma_pool_free(chan->desc_pool, desc, txd [all...] |
/drivers/media/dvb/mantis/ |
H A D | mantis_i2c.c | 87 u32 txd = 0, stat, trials; local 94 txd = (msg->addr << 25) | (msg->buf[i] << 8) 100 txd &= ~MANTIS_I2C_STOP; 103 mmwrite(txd, MANTIS_I2CDATA_CTL); 131 u32 stat, data, txd; local 154 txd = msgs[i].addr << 25 | (0x1 << 24) 158 mmwrite(txd, MANTIS_I2CDATA_CTL);
|
/drivers/video/ |
H A D | mx3fb.c | 268 struct dma_async_tx_descriptor *txd; member in struct:mx3fb_info 332 if (mx3_fbi->txd) 334 to_tx_desc(mx3_fbi->txd), to_tx_desc(mx3_fbi->txd)->sg); 336 dev_dbg(mx3fb->dev, "mx3fbi %p, txd = NULL\n", mx3_fbi); 340 mx3_fbi->txd = dmaengine_prep_slave_sg(dma_chan, 342 if (!mx3_fbi->txd) { 348 mx3_fbi->txd->callback_param = mx3_fbi->txd; 349 mx3_fbi->txd 1035 struct dma_async_tx_descriptor *txd; local [all...] |
/drivers/net/fddi/skfp/h/ |
H A D | hwmtm.h | 140 SMbuf *txd_tx_pipe ; /* points to first mb in the txd ring */ 141 SMbuf *txd_tx_tail ; /* points to last mb in the txd ring */ 142 int queued_txd_mb ; /* number of SMT MBufs in txd ring */ 200 * u_long HWM_GET_TX_PHYS(txd) 206 * para txd pointer to the TxD 210 #define HWM_GET_TX_PHYS(txd) (u_long)AIX_REVERSE((txd)->txd_tbadr) 214 * int HWM_GET_TX_LEN(txd) 226 #define HWM_GET_TX_LEN(txd) ((int)AIX_REVERSE((txd) [all...] |
/drivers/media/video/ |
H A D | mx3_camera.c | 76 struct dma_async_tx_descriptor *txd; member in struct:mx3_camera_buffer 152 struct dma_chan *chan = desc->txd.chan; 157 desc->txd.cookie, mx3_cam->active ? sg_dma_address(&mx3_cam->active->sg) : 0); 266 struct dma_async_tx_descriptor *txd; local 289 txd = dmaengine_prep_slave_sg( 292 if (!txd) 295 txd->callback_param = txd; 296 txd->callback = mx3_cam_dma_done; 299 buf->txd 370 struct dma_async_tx_descriptor *txd = buf->txd; local [all...] |
/drivers/dma/ioat/ |
H A D | dma_v2.c | 116 async_tx_ack(&desc->txd); 117 ioat2_set_chainaddr(ioat, desc->txd.phys); 148 tx = &desc->txd; 216 ioat2_set_chainaddr(ioat, desc->txd.phys); 444 dma_async_tx_descriptor_init(&desc->txd, chan); 445 desc->txd.tx_submit = ioat2_tx_submit_unlock; 447 desc->txd.phys = phys; 456 pci_pool_free(dma->dma_pool, desc->hw, desc->txd.phys); 489 hw->next = next->txd.phys; 491 ring[i]->hw->next = ring[0]->txd [all...] |
H A D | dma.h | 40 #define tx_to_ioat_desc(tx) container_of(tx, struct ioat_desc_sw, txd) 152 * @txd: the generic software descriptor for all engines 160 struct dma_async_tx_descriptor txd; member in struct:ioat_desc_sw 188 ({ if (d) __dump_desc_dbg(&c->base, d->hw, &d->txd, desc_id(d)); 0; })
|
H A D | dma.c | 249 chain_tail->hw->next = first->txd.phys; 293 dma_async_tx_descriptor_init(&desc_sw->txd, &ioat->base.common); 294 desc_sw->txd.tx_submit = ioat1_tx_submit; 296 desc_sw->txd.phys = phys; 401 desc->txd.phys); 408 desc->txd.phys); 495 async_tx_ack(&desc->txd); 497 hw->next = next ? next->txd.phys : 0; 515 desc->txd.flags = flags; 523 return &desc->txd; [all...] |
H A D | dma_v3.c | 121 struct dma_async_tx_descriptor *tx = &desc->txd; 279 tx = &desc->txd; 459 desc->txd.flags = flags; 467 return &desc->txd; 540 desc->txd.flags = flags; 548 compl_desc->txd.flags = flags & DMA_PREP_INTERRUPT; 558 return &compl_desc->txd; 594 desc_id(desc), (unsigned long long) desc->txd.phys, 596 desc->txd.flags, pq->size, pq->ctl, pq->ctl_f.op, pq->ctl_f.int_en, 697 desc->txd [all...] |
/drivers/dma/ipu/ |
H A D | ipu_idmac.c | 783 if (async_tx_test_ack(&desc->txd)) 929 struct dma_async_tx_descriptor *txd = &desc->txd; local 931 memset(txd, 0, sizeof(*txd)); 932 dma_async_tx_descriptor_init(txd, &ichan->dma_chan); 933 txd->tx_submit = idmac_tx_submit; 1280 callback = descnew->txd.callback; 1281 callback_param = descnew->txd.callback_param; 1292 dma_cookie_complete(&desc->txd); 1343 struct dma_async_tx_descriptor *txd = NULL; local [all...] |
/drivers/net/tokenring/ |
H A D | 3c359.c | 161 struct xl_tx_desc *txd ; local 169 txd = &(xl_priv->xl_tx_ring[i]) ; 170 printk("%d, %08lx, %08x, %08x, %08x, %08x\n", i, virt_to_bus(txd), 171 txd->framestartheader, txd->dnnextptr, txd->buffer, txd->buffer_length ) ; 1194 struct xl_tx_desc *txd ; local 1209 txd = &(xl_priv->xl_tx_ring[tx_head]) ; 1210 txd 1256 struct xl_tx_desc *txd ; local [all...] |
/drivers/net/ethernet/sun/ |
H A D | sungem.c | 653 struct gem_txd *txd; local 682 txd = &gp->init_block->txd[entry]; 684 dma_addr = le64_to_cpu(txd->buffer); 685 dma_len = le64_to_cpu(txd->control_word) & TXDCTRL_BUFSZ; 1030 struct gem_txd *txd = &gp->init_block->txd[entry]; local 1042 txd->buffer = cpu_to_le64(mapping); 1044 txd->control_word = cpu_to_le64(ctrl); 1047 struct gem_txd *txd; local 1596 struct gem_txd *txd; local 1658 struct gem_txd *txd = &gb->txd[i]; local [all...] |
/drivers/net/ethernet/icplus/ |
H A D | ipg.c | 161 offset = (u32) &sp->txd[i].next_desc - (u32) sp->txd; 163 i, offset, (unsigned long)sp->txd[i].next_desc); 165 offset = (u32) &sp->txd[i].tfc - (u32) sp->txd; 167 i, offset, (unsigned long) sp->txd[i].tfc); 168 offset = (u32) &sp->txd[i].frag_info - (u32) sp->txd; 170 i, offset, (unsigned long) sp->txd[i].frag_info); 822 struct ipg_tx *txfd = sp->txd [all...] |