Searched defs:tq (Results 1 - 12 of 12) sorted by relevance

/drivers/net/
H A Difb.c51 struct sk_buff_head tq; member in struct:ifb_private
71 if ((skb = skb_peek(&dp->tq)) == NULL) {
73 skb_queue_splice_tail_init(&dp->rq, &dp->tq);
81 while ((skb = __skb_dequeue(&dp->tq)) != NULL) {
98 if (skb_queue_len(&dp->tq) != 0)
229 __skb_queue_purge(&dp->tq);
239 __skb_queue_head_init(&dp->tq);
/drivers/net/wireless/ath/ath5k/
H A Dqcu.c292 struct ath5k_txq_info *tq = &ah->ah_txq[queue]; local
298 (tq->tqi_cw_min << AR5K_NODCU_RETRY_LMT_CW_MIN_S)
332 struct ath5k_txq_info *tq = &ah->ah_txq[queue]; local
336 tq = &ah->ah_txq[queue];
341 (tq->tqi_type == AR5K_TX_QUEUE_INACTIVE))
349 AR5K_REG_SM(tq->tqi_cw_min, AR5K_DCU_LCL_IFS_CW_MIN) |
350 AR5K_REG_SM(tq->tqi_cw_max, AR5K_DCU_LCL_IFS_CW_MAX) |
351 AR5K_REG_SM(tq->tqi_aifs, AR5K_DCU_LCL_IFS_AIFS),
374 if (tq->tqi_cbr_period) {
375 ath5k_hw_reg_write(ah, AR5K_REG_SM(tq
[all...]
/drivers/input/keyboard/
H A Dsunkbd.c79 struct work_struct tq; member in struct:sunkbd
118 schedule_work(&sunkbd->tq);
225 struct sunkbd *sunkbd = container_of(work, struct sunkbd, tq);
268 INIT_WORK(&sunkbd->tq, sunkbd_reinit);
H A Dlkkbd.c282 struct work_struct tq; member in struct:lkkbd
469 schedule_work(&lk->tq);
580 struct lkkbd *lk = container_of(work, struct lkkbd, tq);
635 INIT_WORK(&lk->tq, lkkbd_reinit);
/drivers/staging/lustre/lnet/lnet/
H A Drouter_proc.c708 struct lnet_tx_queue *tq; local
730 cfs_percpt_for_each(tq, i, ni->ni_tx_queues) {
749 tq->tq_credits_max,
750 tq->tq_credits, tq->tq_credits_min);
H A Dapi-ni.c1037 struct lnet_tx_queue *tq; local
1139 cfs_percpt_for_each(tq, i, ni->ni_tx_queues) {
1140 tq->tq_credits_min =
1141 tq->tq_credits_max =
1142 tq->tq_credits = lnet_ni_tq_credits(ni);
H A Dconfig.c114 struct lnet_tx_queue *tq; local
144 cfs_percpt_for_each(tq, i, ni->ni_tx_queues)
145 INIT_LIST_HEAD(&tq->tq_delayed);
H A Dlib-move.c793 struct lnet_tx_queue *tq = ni->ni_tx_queues[cpt]; local
849 LASSERT((tq->tq_credits < 0) ==
850 !list_empty(&tq->tq_delayed));
853 tq->tq_credits--;
855 if (tq->tq_credits < tq->tq_credits_min)
856 tq->tq_credits_min = tq->tq_credits;
858 if (tq->tq_credits < 0) {
860 list_add_tail(&msg->msg_list, &tq
978 struct lnet_tx_queue *tq = ni->ni_tx_queues[msg->msg_tx_cpt]; local
[all...]
/drivers/net/ethernet/chelsio/cxgb4vf/
H A Dcxgb4vf_main.c443 struct sge_txq *tq; local
460 tq = s->egr_map[eq_idx];
461 if (unlikely(tq == NULL)) {
466 txq = container_of(tq, struct sge_eth_txq, q);
467 if (unlikely(tq->abs_id != qid)) {
470 qid, tq->abs_id);
H A Dsge.c228 * @tq: the TX queue
232 static inline unsigned int txq_avail(const struct sge_txq *tq) argument
234 return tq->size - 1 - tq->in_use;
306 const struct ulptx_sgl *sgl, const struct sge_txq *tq)
325 if (likely((u8 *)(p + 1) <= (u8 *)tq->stat)) {
332 } else if ((u8 *)p == (u8 *)tq->stat) {
333 p = (const struct ulptx_sge_pair *)tq->desc;
335 } else if ((u8 *)p + 8 == (u8 *)tq->stat) {
336 const __be64 *addr = (const __be64 *)tq
305 unmap_sgl(struct device *dev, const struct sk_buff *skb, const struct ulptx_sgl *sgl, const struct sge_txq *tq) argument
376 free_tx_desc(struct adapter *adapter, struct sge_txq *tq, unsigned int n, bool unmap) argument
410 reclaimable(const struct sge_txq *tq) argument
429 reclaim_completed_tx(struct adapter *adapter, struct sge_txq *tq, bool unmap) argument
885 write_sgl(const struct sk_buff *skb, struct sge_txq *tq, struct ulptx_sgl *sgl, u64 *end, unsigned int start, const dma_addr_t *addr) argument
948 ring_tx_db(struct adapter *adapter, struct sge_txq *tq, int n) argument
972 inline_tx_skb(const struct sk_buff *skb, const struct sge_txq *tq, void *pos) argument
1057 txq_advance(struct sge_txq *tq, unsigned int n) argument
1317 struct sge_txq *tq = &txq->q; local
2312 free_txq(struct adapter *adapter, struct sge_txq *tq) argument
[all...]
/drivers/net/vmxnet3/
H A Dvmxnet3_drv.c103 vmxnet3_tq_stopped(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
105 return tq->stopped;
110 vmxnet3_tq_start(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
112 tq->stopped = false;
113 netif_start_subqueue(adapter->netdev, tq - adapter->tx_queue);
118 vmxnet3_tq_wake(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
120 tq->stopped = false;
121 netif_wake_subqueue(adapter->netdev, (tq - adapter->tx_queue));
126 vmxnet3_tq_stop(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
128 tq
329 vmxnet3_unmap_pkt(u32 eop_idx, struct vmxnet3_tx_queue *tq, struct pci_dev *pdev, struct vmxnet3_adapter *adapter) argument
364 vmxnet3_tq_tx_complete(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
395 vmxnet3_tq_cleanup(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
428 vmxnet3_tq_destroy(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
470 vmxnet3_tq_init(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
500 vmxnet3_tq_create(struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter) argument
659 vmxnet3_map_pkt(struct sk_buff *skb, struct vmxnet3_tx_ctx *ctx, struct vmxnet3_tx_queue *tq, struct pci_dev *pdev, struct vmxnet3_adapter *adapter) argument
817 vmxnet3_parse_and_copy_hdr(struct sk_buff *skb, struct vmxnet3_tx_queue *tq, struct vmxnet3_tx_ctx *ctx, struct vmxnet3_adapter *adapter) argument
925 vmxnet3_tq_xmit(struct sk_buff *skb, struct vmxnet3_tx_queue *tq, struct vmxnet3_adapter *adapter, struct net_device *netdev) argument
1633 struct vmxnet3_tx_queue *tq = local
1658 struct vmxnet3_tx_queue *tq = data; local
2162 struct vmxnet3_tx_queue *tq = &adapter->tx_queue[i]; local
2533 struct vmxnet3_tx_queue *tq = &adapter->tx_queue[i]; local
[all...]
/drivers/tty/ipwireless/
H A Dhardware.c1728 struct ipw_tx_packet *tp, *tq; local
1738 list_for_each_entry_safe(tp, tq, &hw->tx_queue[i], queue) {

Completed in 206 milliseconds