Lines Matching refs:queue
244 struct data_queue *queue;
247 tx_queue_for_each(rt2x00dev, queue) {
248 while (!rt2x00queue_empty(queue)) {
249 entry = rt2x00queue_get_entry(queue, Q_INDEX_DONE);
263 struct rt2x00_dev *rt2x00dev = entry->queue->rt2x00dev;
290 struct rt2x00_dev *rt2x00dev = entry->queue->rt2x00dev;
318 usb_sndbulkpipe(usb_dev, entry->queue->usb_endpoint),
356 skbdesc->desc_len = entry->queue->desc_size;
368 struct rt2x00_dev *rt2x00dev = entry->queue->rt2x00dev;
383 if (urb->actual_length < entry->queue->desc_size || urb->status)
395 struct rt2x00_dev *rt2x00dev = entry->queue->rt2x00dev;
407 usb_rcvbulkpipe(usb_dev, entry->queue->usb_endpoint),
422 void rt2x00usb_kick_queue(struct data_queue *queue)
424 switch (queue->qid) {
429 if (!rt2x00queue_empty(queue))
430 rt2x00queue_for_each_entry(queue,
437 if (!rt2x00queue_full(queue))
438 rt2x00queue_for_each_entry(queue,
452 struct rt2x00_dev *rt2x00dev = entry->queue->rt2x00dev;
464 if ((entry->queue->qid == QID_BEACON) &&
471 void rt2x00usb_flush_queue(struct data_queue *queue, bool drop)
477 rt2x00queue_for_each_entry(queue, Q_INDEX_DONE, Q_INDEX, NULL,
481 * Obtain the queue completion handler
483 switch (queue->qid) {
488 completion = &queue->rt2x00dev->txdone_work;
491 completion = &queue->rt2x00dev->rxdone_work;
503 if (rt2x00queue_empty(queue))
508 * worker function runs, it should cleanup the queue.
510 queue_work(queue->rt2x00dev->workqueue, completion);
521 static void rt2x00usb_watchdog_tx_dma(struct data_queue *queue)
523 WARNING(queue->rt2x00dev, "TX queue %d DMA timed out,"
524 " invoke forced forced reset\n", queue->qid);
526 rt2x00queue_flush_queue(queue, true);
529 static int rt2x00usb_dma_timeout(struct data_queue *queue)
533 entry = rt2x00queue_get_entry(queue, Q_INDEX_DMA_DONE);
539 struct data_queue *queue;
541 tx_queue_for_each(rt2x00dev, queue) {
542 if (!rt2x00queue_empty(queue)) {
543 if (rt2x00usb_dma_timeout(queue))
544 rt2x00usb_watchdog_tx_dma(queue);
567 if (entry->queue->qid == QID_RX)
572 static void rt2x00usb_assign_endpoint(struct data_queue *queue,
575 struct usb_device *usb_dev = to_usb_device_intf(queue->rt2x00dev->dev);
578 queue->usb_endpoint = usb_endpoint_num(ep_desc);
580 if (queue->qid == QID_RX) {
581 pipe = usb_rcvbulkpipe(usb_dev, queue->usb_endpoint);
582 queue->usb_maxpacket = usb_maxpacket(usb_dev, pipe, 0);
584 pipe = usb_sndbulkpipe(usb_dev, queue->usb_endpoint);
585 queue->usb_maxpacket = usb_maxpacket(usb_dev, pipe, 1);
588 if (!queue->usb_maxpacket)
589 queue->usb_maxpacket = 1;
597 struct data_queue *queue = rt2x00dev->tx;
605 * to the queue.
613 (queue != queue_end(rt2x00dev))) {
614 rt2x00usb_assign_endpoint(queue, ep_desc);
615 queue = queue_next(queue);
634 txall_queue_for_each(rt2x00dev, queue) {
635 if (!queue->usb_endpoint)
636 rt2x00usb_assign_endpoint(queue, tx_ep_desc);
642 static int rt2x00usb_alloc_entries(struct data_queue *queue)
644 struct rt2x00_dev *rt2x00dev = queue->rt2x00dev;
649 for (i = 0; i < queue->limit; i++) {
650 entry_priv = queue->entries[i].priv_data;
657 * If this is not the beacon queue or
661 if (queue->qid != QID_BEACON ||
665 for (i = 0; i < queue->limit; i++) {
666 bcn_priv = queue->entries[i].priv_data;
675 static void rt2x00usb_free_entries(struct data_queue *queue)
677 struct rt2x00_dev *rt2x00dev = queue->rt2x00dev;
682 if (!queue->entries)
685 for (i = 0; i < queue->limit; i++) {
686 entry_priv = queue->entries[i].priv_data;
692 * If this is not the beacon queue or
696 if (queue->qid != QID_BEACON ||
700 for (i = 0; i < queue->limit; i++) {
701 bcn_priv = queue->entries[i].priv_data;
709 struct data_queue *queue;
713 * Find endpoints for each queue
722 queue_for_each(rt2x00dev, queue) {
723 status = rt2x00usb_alloc_entries(queue);
739 struct data_queue *queue;
741 queue_for_each(rt2x00dev, queue)
742 rt2x00usb_free_entries(queue);