Searched refs:wq (Results 51 - 75 of 265) sorted by relevance

1234567891011

/drivers/media/i2c/
H A Dsaa7110.c63 wait_queue_head_t wq; member in struct:saa7110
199 prepare_to_wait(&decoder->wq, &wait, TASK_UNINTERRUPTIBLE);
201 finish_wait(&decoder->wq, &wait);
234 prepare_to_wait(&decoder->wq, &wait, TASK_UNINTERRUPTIBLE);
236 finish_wait(&decoder->wq, &wait);
426 init_waitqueue_head(&decoder->wq);
/drivers/mtd/ubi/
H A Dblock.c86 struct workqueue_struct *wq; member in struct:ubiblock
302 queue_work(dev->wq, &dev->work);
436 dev->wq = alloc_workqueue("%s", 0, 0, gd->disk_name);
437 if (!dev->wq) {
495 destroy_workqueue(dev->wq);
625 destroy_workqueue(dev->wq);
/drivers/infiniband/hw/mlx4/
H A Dcq.c600 struct mlx4_ib_wq *wq; local
687 wq = &(*cur_qp)->sq;
690 wq->tail += (u16) (wqe_ctr - (u16) wq->tail);
692 wc->wr_id = wq->wrid[wq->tail & (wq->wqe_cnt - 1)];
693 ++wq->tail;
705 wq = &(*cur_qp)->rq;
706 tail = wq
[all...]
H A Dalias_GUID.c301 queue_delayed_work(dev->sriov.alias_guid.ports_guid[port_index].wq,
426 queue_delayed_work(dev->sriov.alias_guid.ports_guid[port - 1].wq,
457 queue_delayed_work(dev->sriov.alias_guid.ports_guid[port - 1].wq,
565 queue_delayed_work(dev->sriov.alias_guid.ports_guid[port].wq,
601 flush_workqueue(dev->sriov.alias_guid.ports_guid[i].wq);
602 destroy_workqueue(dev->sriov.alias_guid.ports_guid[i].wq);
665 dev->sriov.alias_guid.ports_guid[i].wq =
667 if (!dev->sriov.alias_guid.ports_guid[i].wq) {
678 destroy_workqueue(dev->sriov.alias_guid.ports_guid[i].wq);
679 dev->sriov.alias_guid.ports_guid[i].wq
[all...]
/drivers/misc/cxl/
H A Dcontext.c59 init_waitqueue_head(&ctx->wq);
149 wake_up_all(&ctx->wq);
/drivers/scsi/libsas/
H A Dsas_event.c59 struct workqueue_struct *wq = ha->core.shost->work_q; local
67 drain_workqueue(wq);
/drivers/tty/serial/
H A Dmrst_max3110.c61 wait_queue_head_t wq; member in struct:uart_max3110
203 wake_up(&pmax->wq);
339 wake_up(&max->wq);
410 wait_queue_head_t *wq = &max->wq; local
417 wait_event_interruptible(*wq,
448 wake_up(&max->wq);
821 init_waitqueue_head(&max->wq);
/drivers/gpu/host1x/
H A Dintr.c124 wait_queue_head_t *wq = waiter->data; local
125 wake_up(wq);
130 wait_queue_head_t *wq = waiter->data; local
131 wake_up_interruptible(wq);
/drivers/i2c/busses/
H A Di2c-ibm_iic.h48 wait_queue_head_t wq; member in struct:ibm_iic_private
/drivers/media/platform/vsp1/
H A Dvsp1_video.h69 wait_queue_head_t wq; member in struct:vsp1_pipeline
/drivers/pci/hotplug/
H A Dshpchp_core.c131 slot->wq = alloc_workqueue("shpchp-%d", 0, 0, slot->number);
132 if (!slot->wq) {
168 destroy_workqueue(slot->wq);
189 destroy_workqueue(slot->wq);
/drivers/staging/nvec/
H A Dnvec.h122 * @wq: The work queue in which @rx_work and @tx_work are executed
149 struct workqueue_struct *wq; member in struct:nvec_chip
/drivers/staging/rtl8192e/rtl8192e/
H A Drtl_ps.c69 queue_delayed_work_rsl(priv->rtllib->wq,
114 queue_delayed_work_rsl(priv->rtllib->wq,
116 queue_delayed_work_rsl(priv->rtllib->wq,
207 queue_work_rsl(priv->rtllib->wq,
/drivers/gpu/host1x/hw/
H A Dcdma_hw.c243 timeout.wq);
296 INIT_DELAYED_WORK(&cdma->timeout.wq, cdma_timeout_handler);
308 cancel_delayed_work(&cdma->timeout.wq);
/drivers/infiniband/hw/cxgb3/
H A Diwch.c70 cxio_disable_wq_db(&qhp->wq);
79 ring_doorbell(qhp->rhp->rdev.ctrl_qp.doorbell, qhp->wq.qpid);
80 cxio_enable_wq_db(&qhp->wq);
H A Diwch_provider.c872 remove_handle(rhp, &rhp->qpidr, qhp->wq.qpid);
879 cxio_destroy_qp(&rhp->rdev, &qhp->wq,
883 ib_qp, qhp->wq.qpid, qhp);
935 * Kernel users need more wq space for fastreg WRs which can take
947 qhp->wq.size_log2 = ilog2(wqsize);
948 qhp->wq.rq_size_log2 = ilog2(rqsize);
949 qhp->wq.sq_size_log2 = ilog2(sqsize);
950 if (cxio_create_qp(&rhp->rdev, !udata, &qhp->wq,
987 if (insert_handle(rhp, &rhp->qpidr, qhp, qhp->wq.qpid)) {
988 cxio_destroy_qp(&rhp->rdev, &qhp->wq,
[all...]
/drivers/md/bcache/
H A Dmovinggc.c116 continue_at(cl, write_moving_finish, op->wq);
126 continue_at(cl, write_moving, io->op.wq);
161 io->op.wq = c->moving_gc_wq;
/drivers/staging/android/
H A Dsync_debug.c161 spin_lock_irqsave(&fence->wq.lock, flags);
162 list_for_each_entry(pos, &fence->wq.task_list, task_list) {
172 spin_unlock_irqrestore(&fence->wq.lock, flags);
/drivers/sbus/char/
H A Dbbc_i2c.c129 add_wait_queue(&bp->wq, &wait);
134 bp->wq,
143 remove_wait_queue(&bp->wq, &wait);
279 wake_up_interruptible(&bp->wq);
317 init_waitqueue_head(&bp->wq);
/drivers/infiniband/hw/ipath/
H A Dipath_ruc.c170 struct ipath_rwq *wq; local
193 wq = rq->wq;
194 tail = wq->tail;
199 if (unlikely(tail == wq->head)) {
213 wq->tail = tail;
224 n = wq->head;
H A Dipath_ud.c60 struct ipath_rwq *wq; local
123 wq = rq->wq;
124 tail = wq->tail;
128 if (unlikely(tail == wq->head)) {
148 wq->tail = tail;
157 n = wq->head;
/drivers/mtd/chips/
H A Dcfi_cmdset_0002.c666 init_waitqueue_head(&(cfi->chips[i].wq));
872 add_wait_queue(&chip->wq, &wait);
875 remove_wait_queue(&chip->wq, &wait);
907 wake_up(&chip->wq);
1022 add_wait_queue(&chip->wq, &wait);
1025 remove_wait_queue(&chip->wq, &wait);
1222 add_wait_queue(&chip->wq, &wait);
1227 remove_wait_queue(&chip->wq, &wait);
1241 wake_up(&chip->wq);
1619 add_wait_queue(&chip->wq,
[all...]
/drivers/iommu/
H A Damd_iommu_v2.c59 wait_queue_head_t wq; /* To wait for count == 0 */ member in struct:pasid_state
74 wait_queue_head_t wq; member in struct:device_state
158 wake_up(&dev_state->wq);
165 prepare_to_wait(&dev_state->wq, &wait, TASK_UNINTERRUPTIBLE);
168 finish_wait(&dev_state->wq, &wait);
284 wake_up(&pasid_state->wq);
292 prepare_to_wait(&pasid_state->wq, &wait, TASK_UNINTERRUPTIBLE);
299 finish_wait(&pasid_state->wq, &wait);
662 init_waitqueue_head(&pasid_state->wq);
786 init_waitqueue_head(&dev_state->wq);
[all...]
/drivers/mtd/lpddr/
H A Dlpddr_cmds.c101 init_waitqueue_head(&chip->wq);
159 add_wait_queue(&chip->wq, &wait);
162 remove_wait_queue(&chip->wq, &wait);
258 add_wait_queue(&chip->wq, &wait);
261 remove_wait_queue(&chip->wq, &wait);
325 add_wait_queue(&chip->wq, &wait);
328 remove_wait_queue(&chip->wq, &wait);
351 wake_up(&chip->wq);
365 wake_up(&chip->wq);
386 wake_up(&chip->wq);
[all...]
/drivers/infiniband/hw/mlx5/
H A Dmr.c356 queue_delayed_work(cache->wq, &ent->dwork,
361 queue_delayed_work(cache->wq, &ent->dwork,
364 queue_work(cache->wq, &ent->work);
372 queue_work(cache->wq, &ent->work);
374 queue_delayed_work(cache->wq, &ent->dwork, 300 * HZ);
422 queue_work(cache->wq, &ent->work);
427 queue_work(cache->wq, &ent->work);
460 queue_work(cache->wq, &ent->work);
557 cache->wq = create_singlethread_workqueue("mkey_cache");
558 if (!cache->wq) {
[all...]

Completed in 554 milliseconds

1234567891011