Searched defs:tx (Results 1 - 25 of 288) sorted by path

1234567891011>>

/drivers/acpi/
H A Dprocessor_throttling.c254 if (p_limit->thermal.tx > target_state)
255 target_state = p_limit->thermal.tx;
256 if (p_limit->user.tx > target_state)
257 target_state = p_limit->user.tx;
350 if (limit->thermal.tx > target_state)
351 target_state = limit->thermal.tx;
352 if (limit->user.tx > target_state)
353 target_state = limit->user.tx;
550 struct acpi_processor_tx_tss *tx = local
555 state.pointer = tx;
843 struct acpi_processor_tx_tss *tx = local
858 struct acpi_processor_tx_tss *tx = local
[all...]
/drivers/ata/
H A Dpata_arasan_cf.c397 struct dma_async_tx_descriptor *tx; local
403 tx = chan->device->device_prep_dma_memcpy(chan, dest, src, len, flags);
404 if (!tx) {
409 tx->callback = dma_callback;
410 tx->callback_param = acdev;
411 cookie = tx->tx_submit(tx);
/drivers/atm/
H A Dambassador.c450 static void tx_complete (amb_dev * dev, tx_out * tx) { argument
451 tx_simple * tx_descr = bus_to_virt (tx->handle);
454 PRINTD (DBG_FLOW|DBG_TX, "tx_complete %p %p", dev, tx);
457 atomic_inc(&ATM_SKB(skb)->vcc->stats->tx);
627 static int tx_give (amb_dev * dev, tx_in * tx) { argument
641 *txq->in.ptr = *tx;
1315 tx_in tx; local
1372 tx.vc = cpu_to_be16 (vcc->tx_frame_bits | vc);
1373 tx.tx_descr_length = cpu_to_be16 (sizeof(tx_frag)+sizeof(tx_frag_end));
1374 tx
[all...]
H A Deni.c188 if (eni_dev->tx[i].send)
190 eni_dev->tx[i].send,eni_dev->tx[i].words*4);
1033 struct eni_tx *tx; local
1047 tx = eni_vcc->tx;
1048 NULLCHECK(tx);
1087 if (!NEPMOK(tx->tx_pos,size+TX_GAP,
1088 eni_in(MID_TX_RDPTR(tx->index)),tx
1181 struct eni_tx *tx; local
1208 struct eni_tx *tx; local
1289 struct eni_tx *tx; local
1955 struct eni_tx *tx = ENI_VCC(vcc)->tx; local
2154 struct eni_tx *tx = eni_dev->tx+i; local
[all...]
H A Deni.h58 struct eni_tx *tx; /* TXer, NULL if none */ member in struct:eni_vcc
84 struct eni_tx tx[NR_CHAN]; /* TX channels */ member in struct:eni_dev
H A Dlanai.c32 * o AAL0 is stubbed in but the actual rx/tx path isn't written yet:
245 } tx; member in struct:lanai_vcc
284 DECLARE_BITMAP(backlog_vccs, NUM_VCI); /* VCCs with tx backlog */
731 dma_addr_t dmaaddr = lvcc->tx.buf.dmaaddr;
739 (lvcc->tx.atmvcc->qos.txtp.traffic_class == ATM_CBR) ?
744 TXADDR1_SET_SIZE(lanai_buf_size_cardorder(&lvcc->tx.buf)),
785 while ((skb = skb_dequeue(&lvcc->tx.backlog)) != NULL)
786 lanai_free_skb(lvcc->tx.atmvcc, skb);
796 (((lanai_buf_size(&lvcc->tx.buf) / 1024) * HZ) >> 7);
801 (lvcc->tx
[all...]
H A Dnicstar.h670 to the sk_buffs used for tx */
697 volatile unsigned int tx:1; /* TX vc? */ member in struct:vc_map
/drivers/crypto/
H A Domap-sham.c556 struct dma_async_tx_descriptor *tx; local
590 tx = dmaengine_prep_slave_sg(dd->dma_lch, &ctx->sgl, 1,
593 tx = dmaengine_prep_slave_single(dd->dma_lch, dma_addr, len32,
597 if (!tx) {
602 tx->callback = omap_sham_dma_callback;
603 tx->callback_param = dd;
614 dmaengine_submit(tx);
/drivers/dma/
H A Damba-pl08x.c300 static inline struct pl08x_txd *to_pl08x_txd(struct dma_async_tx_descriptor *tx) argument
302 return container_of(tx, struct pl08x_txd, vd.tx);
397 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx);
1170 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx);
1171 struct pl08x_dma_chan *plchan = to_pl08x_chan(vd->tx.chan);
1173 dma_descriptor_unmap(&vd->tx);
1245 struct pl08x_txd *txd = to_pl08x_txd(&vd->tx);
1815 struct pl08x_txd *tx; local
1825 tx
[all...]
H A Dat_hdmac.c57 static dma_cookie_t atc_tx_submit(struct dma_async_tx_descriptor *tx);
560 static dma_cookie_t atc_tx_submit(struct dma_async_tx_descriptor *tx) argument
562 struct at_desc *desc = txd_to_at_desc(tx);
563 struct at_dma_chan *atchan = to_at_dma_chan(tx->chan);
568 cookie = dma_cookie_assign(tx);
571 dev_vdbg(chan2dev(tx->chan), "tx_submit: started %u\n",
576 dev_vdbg(chan2dev(tx->chan), "tx_submit: queued %u\n",
592 * @flags: tx descriptor status flags
679 * @flags: tx descriptor status flags
897 * @flags: tx descripto
[all...]
H A Dcoh901318.c2164 coh901318_tx_submit(struct dma_async_tx_descriptor *tx) argument
2166 struct coh901318_desc *cohd = container_of(tx, struct coh901318_desc,
2168 struct coh901318_chan *cohc = to_coh901318_chan(tx->chan);
2173 cookie = dma_cookie_assign(tx);
H A Dcppi41.c339 static dma_cookie_t cppi41_tx_submit(struct dma_async_tx_descriptor *tx) argument
343 cookie = dma_cookie_assign(tx);
H A Ddmaengine.c1084 void dma_async_tx_descriptor_init(struct dma_async_tx_descriptor *tx, argument
1087 tx->chan = chan;
1089 spin_lock_init(&tx->lock);
1095 * @tx: in-flight transaction to wait on
1098 dma_wait_for_async_tx(struct dma_async_tx_descriptor *tx) argument
1102 if (!tx)
1105 while (tx->cookie == -EBUSY) {
1113 return dma_sync_wait(tx->chan, tx->cookie);
1119 * @tx
1121 dma_run_dependencies(struct dma_async_tx_descriptor *tx) argument
[all...]
H A Ddmaengine.h23 * @tx: descriptor needing cookie
28 static inline dma_cookie_t dma_cookie_assign(struct dma_async_tx_descriptor *tx) argument
30 struct dma_chan *chan = tx->chan;
36 tx->cookie = chan->cookie = cookie;
43 * @tx: descriptor to complete
51 static inline void dma_cookie_complete(struct dma_async_tx_descriptor *tx) argument
53 BUG_ON(tx->cookie < DMA_MIN_COOKIE);
54 tx->chan->completed_cookie = tx->cookie;
55 tx
[all...]
H A Ddmatest.c483 struct dma_async_tx_descriptor *tx = NULL; local
580 tx = dev->device_prep_dma_memcpy(chan,
584 tx = dev->device_prep_dma_xor(chan,
593 tx = dev->device_prep_dma_pq(chan, dma_pq, srcs,
598 if (!tx) {
608 tx->callback = dmatest_callback;
609 tx->callback_param = &done;
610 cookie = tx->tx_submit(tx);
H A Dedma.c138 *to_edma_desc(struct dma_async_tx_descriptor *tx) argument
140 return container_of(tx, struct edma_desc, vdesc.tx);
166 echan->edesc = to_edma_desc(&vdesc->tx);
963 if (echan->edesc && echan->edesc->vdesc.tx.cookie == cookie)
966 txstate->residue = to_edma_desc(&vdesc->tx)->residue;
H A Dep93xx_dma.c815 * @tx: descriptor to be executed
821 static dma_cookie_t ep93xx_dma_tx_submit(struct dma_async_tx_descriptor *tx) argument
823 struct ep93xx_dma_chan *edmac = to_ep93xx_dma_chan(tx->chan);
829 cookie = dma_cookie_assign(tx);
831 desc = container_of(tx, struct ep93xx_dma_desc, txd);
1094 * @flags: tx descriptor status flags
H A Dfsldma.c408 static dma_cookie_t fsl_dma_tx_submit(struct dma_async_tx_descriptor *tx) argument
410 struct fsldma_chan *chan = to_fsl_chan(tx->chan);
411 struct fsl_desc_sw *desc = tx_to_fsl_desc(tx);
H A Dimx-dma.c744 static dma_cookie_t imxdma_tx_submit(struct dma_async_tx_descriptor *tx) argument
746 struct imxdma_channel *imxdmac = to_imxdma_chan(tx->chan);
753 cookie = dma_cookie_assign(tx);
H A Dimx-sdma.c942 static dma_cookie_t sdma_tx_submit(struct dma_async_tx_descriptor *tx) argument
945 struct sdma_channel *sdmac = to_sdma_chan(tx->chan);
950 cookie = dma_cookie_assign(tx);
H A Dintel_mid_dma.c327 * mark completed when tx is completete
339 /*tx is complete*/
428 * @tx: dma engine descriptor
432 static dma_cookie_t intel_mid_dma_tx_submit(struct dma_async_tx_descriptor *tx) argument
434 struct intel_mid_dma_desc *desc = to_intel_mid_dma_desc(tx);
435 struct intel_mid_dma_chan *midc = to_intel_mid_dma_chan(tx->chan);
439 cookie = dma_cookie_assign(tx);
/drivers/dma/dw/
H A Dcore.c651 static dma_cookie_t dwc_tx_submit(struct dma_async_tx_descriptor *tx) argument
653 struct dw_desc *desc = txd_to_dw_desc(tx);
654 struct dw_dma_chan *dwc = to_dw_dma_chan(tx->chan);
659 cookie = dma_cookie_assign(tx);
667 dev_vdbg(chan2dev(tx->chan), "%s: queued %u\n", __func__, desc->txd.cookie);
/drivers/dma/ioat/
H A Ddma.c230 static dma_cookie_t ioat1_tx_submit(struct dma_async_tx_descriptor *tx) argument
232 struct dma_chan *c = tx->chan;
234 struct ioat_desc_sw *desc = tx_to_ioat_desc(tx);
242 cookie = dma_cookie_assign(tx);
610 struct dma_async_tx_descriptor *tx; local
619 tx = &desc->txd;
626 if (tx->cookie) {
627 dma_cookie_complete(tx);
628 dma_descriptor_unmap(tx);
630 if (tx
827 struct dma_async_tx_descriptor *tx; local
[all...]
H A Ddma.h40 #define tx_to_ioat_desc(tx) container_of(tx, struct ioat_desc_sw, txd)
203 struct dma_async_tx_descriptor *tx, int id)
209 (unsigned long long) tx->phys,
210 (unsigned long long) hw->next, tx->cookie, tx->flags,
202 __dump_desc_dbg(struct ioat_chan_common *chan, struct ioat_dma_descriptor *hw, struct dma_async_tx_descriptor *tx, int id) argument
H A Ddma_v2.c134 struct dma_async_tx_descriptor *tx; local
148 tx = &desc->txd;
150 if (tx->cookie) {
151 dma_descriptor_unmap(tx);
152 dma_cookie_complete(tx);
153 if (tx->callback) {
154 tx->callback(tx->callback_param);
155 tx->callback = NULL;
159 if (tx
415 ioat2_tx_submit_unlock(struct dma_async_tx_descriptor *tx) argument
[all...]

Completed in 3576 milliseconds

1234567891011>>