Searched defs:wq (Results 1 - 25 of 138) sorted by relevance

123456

/drivers/gpu/drm/
H A Ddrm_flip_work.c49 * @wq: the work-queue to run the queued work on
57 struct workqueue_struct *wq)
62 queue_work(wq, &work->worker);
56 drm_flip_work_commit(struct drm_flip_work *work, struct workqueue_struct *wq) argument
/drivers/infiniband/hw/cxgb3/
H A Diwch_cq.c49 struct t3_wq *wq; local
62 wq = NULL;
65 wq = &(qhp->wq);
67 ret = cxio_poll_cq(wq, &(chp->cq), &cqe, &cqe_flushed, &cookie,
190 if (wq)
/drivers/md/bcache/
H A Drequest.h8 struct workqueue_struct *wq; member in struct:data_insert_op
H A Dclosure.h151 struct workqueue_struct *wq; member in struct:closure::__anon1970::__anon1971
239 struct workqueue_struct *wq)
244 cl->wq = wq;
251 struct workqueue_struct *wq = cl->wq; local
252 if (wq) {
254 BUG_ON(!queue_work(wq, &cl->work));
311 * of @wq (or, if @wq i
238 set_closure_fn(struct closure *cl, closure_fn *fn, struct workqueue_struct *wq) argument
378 closure_call(struct closure *cl, closure_fn fn, struct workqueue_struct *wq, struct closure *parent) argument
[all...]
/drivers/scsi/fnic/
H A Dvnic_wq_copy.c25 void vnic_wq_copy_enable(struct vnic_wq_copy *wq) argument
27 iowrite32(1, &wq->ctrl->enable);
30 int vnic_wq_copy_disable(struct vnic_wq_copy *wq) argument
34 iowrite32(0, &wq->ctrl->enable);
38 if (!(ioread32(&wq->ctrl->running)))
45 wq->index, ioread32(&wq->ctrl->fetch_index),
46 ioread32(&wq->ctrl->posted_index));
51 void vnic_wq_copy_clean(struct vnic_wq_copy *wq, argument
52 void (*q_clean)(struct vnic_wq_copy *wq,
69 vnic_wq_copy_free(struct vnic_wq_copy *wq) argument
78 vnic_wq_copy_alloc(struct vnic_dev *vdev, struct vnic_wq_copy *wq, unsigned int index, unsigned int desc_count, unsigned int desc_size) argument
102 vnic_wq_copy_init(struct vnic_wq_copy *wq, unsigned int cq_index, unsigned int error_interrupt_enable, unsigned int error_interrupt_offset) argument
[all...]
H A Dvnic_wq.c27 static int vnic_wq_alloc_bufs(struct vnic_wq *wq) argument
31 unsigned int i, j, count = wq->ring.desc_count;
34 vdev = wq->vdev;
37 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ, GFP_ATOMIC);
38 if (!wq->bufs[i]) {
45 buf = wq->bufs[i];
48 buf->desc = (u8 *)wq->ring.descs +
49 wq->ring.desc_size * buf->index;
51 buf->next = wq->bufs[0];
54 buf->next = wq
67 vnic_wq_free(struct vnic_wq *wq) argument
85 vnic_wq_alloc(struct vnic_dev *vdev, struct vnic_wq *wq, unsigned int index, unsigned int desc_count, unsigned int desc_size) argument
114 vnic_wq_init(struct vnic_wq *wq, unsigned int cq_index, unsigned int error_interrupt_enable, unsigned int error_interrupt_offset) argument
131 vnic_wq_error_status(struct vnic_wq *wq) argument
136 vnic_wq_enable(struct vnic_wq *wq) argument
141 vnic_wq_disable(struct vnic_wq *wq) argument
159 vnic_wq_clean(struct vnic_wq *wq, void (*buf_clean)(struct vnic_wq *wq, struct vnic_wq_buf *buf)) argument
[all...]
H A Dvnic_wq_copy.h36 static inline unsigned int vnic_wq_copy_desc_avail(struct vnic_wq_copy *wq) argument
38 return wq->ring.desc_avail;
41 static inline unsigned int vnic_wq_copy_desc_in_use(struct vnic_wq_copy *wq) argument
43 return wq->ring.desc_count - 1 - wq->ring.desc_avail;
46 static inline void *vnic_wq_copy_next_desc(struct vnic_wq_copy *wq) argument
48 struct fcpio_host_req *desc = wq->ring.descs;
49 return &desc[wq->to_use_index];
52 static inline void vnic_wq_copy_post(struct vnic_wq_copy *wq) argument
55 ((wq
69 vnic_wq_copy_desc_process(struct vnic_wq_copy *wq, u16 index) argument
83 vnic_wq_copy_service(struct vnic_wq_copy *wq, u16 completed_index, void (*q_service)(struct vnic_wq_copy *wq, struct fcpio_host_req *wq_desc)) argument
[all...]
/drivers/net/ethernet/cisco/enic/
H A Dvnic_wq.c30 static int vnic_wq_alloc_bufs(struct vnic_wq *wq) argument
33 unsigned int i, j, count = wq->ring.desc_count;
37 wq->bufs[i] = kzalloc(VNIC_WQ_BUF_BLK_SZ(count), GFP_ATOMIC);
38 if (!wq->bufs[i])
43 buf = wq->bufs[i];
46 buf->desc = (u8 *)wq->ring.descs +
47 wq->ring.desc_size * buf->index;
49 buf->next = wq->bufs[0];
52 buf->next = wq->bufs[i + 1];
60 wq
65 vnic_wq_free(struct vnic_wq *wq) argument
84 vnic_wq_alloc(struct vnic_dev *vdev, struct vnic_wq *wq, unsigned int index, unsigned int desc_count, unsigned int desc_size) argument
113 vnic_wq_init_start(struct vnic_wq *wq, unsigned int cq_index, unsigned int fetch_index, unsigned int posted_index, unsigned int error_interrupt_enable, unsigned int error_interrupt_offset) argument
136 vnic_wq_init(struct vnic_wq *wq, unsigned int cq_index, unsigned int error_interrupt_enable, unsigned int error_interrupt_offset) argument
145 vnic_wq_error_status(struct vnic_wq *wq) argument
150 vnic_wq_enable(struct vnic_wq *wq) argument
155 vnic_wq_disable(struct vnic_wq *wq) argument
173 vnic_wq_clean(struct vnic_wq *wq, void (*buf_clean)(struct vnic_wq *wq, struct vnic_wq_buf *buf)) argument
[all...]
/drivers/scsi/libsas/
H A Dsas_event.c59 struct workqueue_struct *wq = ha->core.shost->work_q; local
67 drain_workqueue(wq);
/drivers/char/tpm/
H A Dtpm_ibmvtpm.h45 wait_queue_head_t wq; member in struct:ibmvtpm_dev
/drivers/gpu/host1x/
H A Dcdma.h53 struct delayed_work wq; /* work queue */ member in struct:buffer_timeout
78 struct buffer_timeout timeout; /* channel's timeout state/wq */
H A Dintr.c124 wait_queue_head_t *wq = waiter->data; local
125 wake_up(wq);
130 wait_queue_head_t *wq = waiter->data; local
131 wake_up_interruptible(wq);
/drivers/infiniband/hw/ipath/
H A Dipath_srq.c52 struct ipath_rwq *wq; local
68 wq = srq->rq.wq;
69 next = wq->head + 1;
72 if (next == wq->tail) {
79 wqe = get_rwqe_ptr(&srq->rq, wq->head);
86 wq->head = next;
139 srq->rq.wq = vmalloc_user(sizeof(struct ipath_rwq) + srq->rq.size * sz);
140 if (!srq->rq.wq) {
156 srq->rq.wq);
220 struct ipath_rwq *wq; local
[all...]
H A Dipath_ruc.c170 struct ipath_rwq *wq; local
193 wq = rq->wq;
194 tail = wq->tail;
199 if (unlikely(tail == wq->head)) {
213 wq->tail = tail;
224 n = wq->head;
H A Dipath_ud.c60 struct ipath_rwq *wq; local
123 wq = rq->wq;
124 tail = wq->tail;
128 if (unlikely(tail == wq->head)) {
148 wq->tail = tail;
157 n = wq->head;
/drivers/infiniband/hw/qib/
H A Dqib_srq.c52 struct qib_rwq *wq; local
68 wq = srq->rq.wq;
69 next = wq->head + 1;
72 if (next == wq->tail) {
79 wqe = get_rwqe_ptr(&srq->rq, wq->head);
86 wq->head = next;
136 srq->rq.wq = vmalloc_user(sizeof(struct qib_rwq) + srq->rq.size * sz);
137 if (!srq->rq.wq) {
152 srq->rq.wq);
216 struct qib_rwq *wq; local
[all...]
H A Dqib_ruc.c142 struct qib_rwq *wq; local
165 wq = rq->wq;
166 tail = wq->tail;
170 if (unlikely(tail == wq->head)) {
184 wq->tail = tail;
200 n = wq->head;
/drivers/net/ethernet/qlogic/qlcnic/
H A Dqlcnic_dcb.h39 struct workqueue_struct *wq; member in struct:qlcnic_dcb
/drivers/gpu/drm/msm/
H A Dmsm_drv.h95 struct workqueue_struct *wq; member in struct:msm_drm_private
134 /* callback from wq once fence has passed: */
/drivers/gpu/drm/omapdrm/
H A Domap_drv.h106 struct workqueue_struct *wq; member in struct:omap_drm_private
/drivers/gpu/drm/tilcdc/
H A Dtilcdc_drv.h77 struct workqueue_struct *wq; member in struct:tilcdc_drm_private
/drivers/hid/
H A Dhid-elo.c35 static struct workqueue_struct *wq; variable in typeref:struct:workqueue_struct
175 queue_delayed_work(wq, &priv->work, ELO_PERIODIC_READ_INTERVAL);
248 queue_delayed_work(wq, &priv->work, ELO_PERIODIC_READ_INTERVAL);
262 flush_workqueue(wq);
286 wq = create_singlethread_workqueue("elousb");
287 if (!wq)
292 destroy_workqueue(wq);
301 destroy_workqueue(wq);
/drivers/input/
H A Dkeycombo.c30 struct workqueue_struct *wq; member in struct:keycombo_state
97 if (queue_delayed_work(state->wq, &state->key_down_work,
103 queue_work(state->wq, &state->key_up_work);
202 state->wq = alloc_ordered_workqueue("keycombo", 0);
203 if (!state->wq)
238 destroy_workqueue(state->wq);
/drivers/media/i2c/
H A Dsaa7110.c63 wait_queue_head_t wq; member in struct:saa7110
199 prepare_to_wait(&decoder->wq, &wait, TASK_UNINTERRUPTIBLE);
201 finish_wait(&decoder->wq, &wait);
234 prepare_to_wait(&decoder->wq, &wait, TASK_UNINTERRUPTIBLE);
236 finish_wait(&decoder->wq, &wait);
426 init_waitqueue_head(&decoder->wq);
/drivers/media/platform/vsp1/
H A Dvsp1_video.h69 wait_queue_head_t wq; member in struct:vsp1_pipeline

Completed in 1905 milliseconds

123456