Searched defs:fence (Results 1 - 25 of 34) sorted by relevance

12

/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_reg.h47 __le32 fence; member in struct:svga_fifo_cmd_fence
H A Dvmwgfx_execbuf.c64 * @fence_flags: Fence flags to be or'ed with any other fence flags for
366 * issue a dummy occlusion query wait used as a query barrier. When the fence
370 * that fence, we can do an asynchronus unpin now, and be sure that the
371 * old query buffer won't be moved until the fence has signaled.
991 * vmw_execbuf_fence_commands - create and submit a command stream fence
993 * Creates a fence object and submits a command stream marker.
995 * It is then safe to fence buffers with a NULL pointer.
1040 * vmw_execbuf_copy_fence_user - copy fence object information to
1045 * @ret: Return value from fence object creation.
1048 * @fence
1060 vmw_execbuf_copy_fence_user(struct vmw_private *dev_priv, struct vmw_fpriv *vmw_fp, int ret, struct drm_vmw_fence_rep __user *user_fence_rep, struct vmw_fence_obj *fence, uint32_t fence_handle) argument
1116 struct vmw_fence_obj *fence = NULL; local
1281 struct vmw_fence_obj *fence; local
[all...]
H A Dvmwgfx_fence.c53 struct vmw_fence_obj fence; member in struct:vmw_user_fence
57 * struct vmw_event_fence_action - fence action that delivers a drm event.
60 * @action: A struct vmw_fence_action to hook up to a fence.
61 * @fence: A referenced pointer to the fence to keep it alive while @action
67 * current time tv_sec val when the fence signals.
69 * be assigned the current time tv_usec val when the fence signals.
76 struct vmw_fence_obj *fence; member in struct:vmw_event_fence_action
87 * a) When a new fence seqno has been submitted by the fifo code.
90 * irq is received. When the last fence waite
107 struct vmw_fence_obj *fence = local
209 vmw_fence_obj_init(struct vmw_fence_manager *fman, struct vmw_fence_obj *fence, u32 seqno, uint32_t mask, void (*destroy) (struct vmw_fence_obj *fence)) argument
242 vmw_fence_obj_reference(struct vmw_fence_obj *fence) argument
260 struct vmw_fence_obj *fence = *fence_p; local
315 struct vmw_fence_obj *fence; local
354 vmw_fence_goal_check_locked(struct vmw_fence_obj *fence) argument
377 struct vmw_fence_obj *fence, *next_fence; local
420 vmw_fence_obj_signaled(struct vmw_fence_obj *fence, uint32_t flags) argument
445 vmw_fence_obj_wait(struct vmw_fence_obj *fence, uint32_t flags, bool lazy, bool interruptible, unsigned long timeout) argument
479 vmw_fence_obj_flush(struct vmw_fence_obj *fence) argument
486 vmw_fence_destroy(struct vmw_fence_obj *fence) argument
504 struct vmw_fence_obj *fence; local
534 vmw_user_fence_destroy(struct vmw_fence_obj *fence) argument
553 struct vmw_fence_obj *fence = &ufence->fence; local
644 struct vmw_fence_obj *fence = local
689 struct vmw_fence_obj *fence; local
747 struct vmw_fence_obj *fence; local
900 vmw_fence_obj_add_action(struct vmw_fence_obj *fence, struct vmw_fence_action *action) argument
955 vmw_event_fence_action_queue(struct drm_file *file_priv, struct vmw_fence_obj *fence, struct drm_pending_event *event, uint32_t *tv_sec, uint32_t *tv_usec, bool interruptible) argument
996 vmw_event_fence_action_create(struct drm_file *file_priv, struct vmw_fence_obj *fence, uint32_t flags, uint64_t user_data, bool interruptible) argument
1068 struct vmw_fence_obj *fence = NULL; local
[all...]
H A Dvmwgfx_resource.c892 * Create a fence object and fence the backup buffer.
896 struct vmw_fence_obj *fence; local
899 &fence, NULL);
900 ttm_eu_fence_buffer_objects(&val_list, fence);
901 if (likely(fence != NULL))
902 vmw_fence_obj_unreference(&fence);
945 struct vmw_fence_obj *fence; local
1008 * Create a fence object and fence th
[all...]
/drivers/gpu/drm/radeon/
H A Dradeon_benchmark.c41 struct radeon_fence *fence = NULL; local
48 r = radeon_fence_create(rdev, &fence, radeon_copy_dma_ring_index(rdev));
53 fence);
56 r = radeon_fence_create(rdev, &fence, radeon_copy_blit_ring_index(rdev));
61 fence);
69 r = radeon_fence_wait(fence, false);
72 radeon_fence_unref(&fence);
78 if (fence)
79 radeon_fence_unref(&fence);
H A Dr200.c88 struct radeon_fence *fence)
122 if (fence) {
123 r = radeon_fence_emit(rdev, fence);
84 r200_copy_dma(struct radeon_device *rdev, uint64_t src_offset, uint64_t dst_offset, unsigned num_gpu_pages, struct radeon_fence *fence) argument
H A Dradeon_test.c35 struct radeon_fence *fence = NULL; local
109 r = radeon_fence_create(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX);
111 DRM_ERROR("Failed to create GTT->VRAM fence %d\n", i);
115 r = radeon_copy(rdev, gtt_addr, vram_addr, size / RADEON_GPU_PAGE_SIZE, fence);
121 r = radeon_fence_wait(fence, false);
123 DRM_ERROR("Failed to wait for GTT->VRAM fence %d\n", i);
127 radeon_fence_unref(&fence);
158 r = radeon_fence_create(rdev, &fence, RADEON_RING_TYPE_GFX_INDEX);
160 DRM_ERROR("Failed to create VRAM->GTT fence %d\n", i);
164 r = radeon_copy(rdev, vram_addr, gtt_addr, size / RADEON_GPU_PAGE_SIZE, fence);
[all...]
H A Dr300.c176 struct radeon_fence *fence)
178 struct radeon_ring *ring = &rdev->ring[fence->ring];
202 /* Emit fence sequence & fire IRQ */
203 radeon_ring_write(ring, PACKET0(rdev->fence_drv[fence->ring].scratch_reg, 0));
204 radeon_ring_write(ring, fence->seq);
175 r300_fence_ring_emit(struct radeon_device *rdev, struct radeon_fence *fence) argument
H A Dradeon_cs.c128 struct radeon_fence *fence = p->relocs[i].robj->tbo.sync_obj; local
129 if (!radeon_fence_signaled(fence)) {
130 sync_to_ring[fence->ring] = true;
140 if (!p->ib->fence->semaphore) {
141 r = radeon_semaphore_create(p->rdev, &p->ib->fence->semaphore);
149 radeon_semaphore_emit_signal(p->rdev, i, p->ib->fence->semaphore);
155 radeon_semaphore_emit_wait(p->rdev, p->ring, p->ib->fence->semaphore);
314 parser->ib->fence);
497 if (vm->fence) {
498 radeon_fence_unref(&vm->fence);
[all...]
H A Dradeon_fence.c64 int radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence *fence) argument
69 if (fence->emitted) {
73 fence->seq = atomic_add_return(1, &rdev->fence_drv[fence->ring].seq);
74 if (!rdev->ring[fence->ring].ready)
78 radeon_fence_write(rdev, fence->seq, fence->ring);
80 radeon_fence_ring_emit(rdev, fence->ring, fence);
82 trace_radeon_fence_emit(rdev->ddev, fence
91 struct radeon_fence *fence; local
149 struct radeon_fence *fence; local
161 radeon_fence_create(struct radeon_device *rdev, struct radeon_fence **fence, int ring) argument
186 radeon_fence_signaled(struct radeon_fence *fence) argument
215 radeon_fence_wait(struct radeon_fence *fence, bool intr) argument
289 struct radeon_fence *fence; local
312 struct radeon_fence *fence; local
332 radeon_fence_ref(struct radeon_fence *fence) argument
338 radeon_fence_unref(struct radeon_fence **fence) argument
472 struct radeon_fence *fence; local
[all...]
H A Dradeon_pm.c276 struct radeon_fence *fence; local
278 radeon_fence_create(rdev, &fence, radeon_ring_index(rdev, ring));
279 radeon_fence_emit(rdev, fence);
281 radeon_fence_wait(fence, false);
282 radeon_fence_unref(&fence);
H A Dradeon_ring.c85 if (ib->fence && ib->fence->emitted) {
86 if (radeon_fence_signaled(ib->fence)) {
87 radeon_fence_unref(&ib->fence);
98 struct radeon_fence *fence; local
106 r = radeon_fence_create(rdev, &fence, ring);
108 dev_err(rdev->dev, "failed to create fence for new IB\n");
118 radeon_fence_unref(&fence);
124 if (rdev->ib_pool.ibs[idx].fence == NULL) {
134 (*ib)->fence
[all...]
H A Dr600_blit_kms.c516 rdev->r600_blit.ring_size_common += 16; /* fence emit for VB IB */
518 rdev->r600_blit.ring_size_common += 16; /* fence emit for done copy */
641 radeon_fence_emit(rdev, rdev->r600_blit.vb_ib->fence);
724 void r600_blit_done_copy(struct radeon_device *rdev, struct radeon_fence *fence) argument
731 if (fence)
732 r = radeon_fence_emit(rdev, fence);
H A Dradeon_ttm.c225 struct radeon_fence *fence; local
229 r = radeon_fence_create(rdev, &fence, radeon_copy_ring_index(rdev));
272 if (!fence->semaphore) {
273 r = radeon_semaphore_create(rdev, &fence->semaphore);
283 radeon_semaphore_emit_signal(rdev, i, fence->semaphore);
290 radeon_semaphore_emit_wait(rdev, radeon_copy_ring_index(rdev), fence->semaphore);
297 fence);
299 r = ttm_bo_move_accel_cleanup(bo, (void *)fence, NULL,
301 radeon_fence_unref(&fence);
H A Dni.c1121 struct radeon_fence *fence)
1123 struct radeon_ring *ring = &rdev->ring[fence->ring];
1124 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
1140 radeon_ring_write(ring, fence->seq);
1146 struct radeon_ring *ring = &rdev->ring[ib->fence->ring];
1120 cayman_fence_ring_emit(struct radeon_device *rdev, struct radeon_fence *fence) argument
/drivers/gpu/drm/nouveau/
H A Dnouveau_channel.c160 INIT_LIST_HEAD(&chan->fence.pending);
161 spin_lock_init(&chan->fence.lock);
361 struct nouveau_fence *fence = NULL; local
366 if (chan->fence.sequence != chan->fence.sequence_ack) {
367 ret = nouveau_fence_new(chan, &fence, true);
369 ret = nouveau_fence_wait(fence, false, false);
370 nouveau_fence_unref(&fence);
H A Dnouveau_display.c410 struct nouveau_fence *fence)
412 nouveau_bo_fence(new_bo, fence);
415 nouveau_bo_fence(old_bo, fence);
454 OUT_RING (chan, ++chan->fence.sequence);
481 struct nouveau_fence *fence; local
520 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence);
528 nouveau_page_flip_unreserve(old_bo, new_bo, fence);
529 nouveau_fence_unref(&fence);
408 nouveau_page_flip_unreserve(struct nouveau_bo *old_bo, struct nouveau_bo *new_bo, struct nouveau_fence *fence) argument
H A Dnouveau_fence.c67 struct nouveau_fence *fence = local
70 nouveau_channel_ref(NULL, &fence->channel);
71 kfree(fence);
78 struct nouveau_fence *tmp, *fence; local
81 spin_lock(&chan->fence.lock);
84 if (likely(!list_empty(&chan->fence.pending))) {
88 sequence = atomic_read(&chan->fence.last_sequence_irq);
90 if (chan->fence.sequence_ack == sequence)
92 chan->fence.sequence_ack = sequence;
95 list_for_each_entry_safe(fence, tm
115 struct nouveau_fence *fence; local
134 nouveau_fence_channel(struct nouveau_fence *fence) argument
140 nouveau_fence_emit(struct nouveau_fence *fence) argument
180 nouveau_fence_work(struct nouveau_fence *fence, void (*work)(void *priv, bool signalled), void *priv) argument
201 struct nouveau_fence *fence = nouveau_fence(*sync_obj); local
211 struct nouveau_fence *fence = nouveau_fence(sync_obj); local
220 struct nouveau_fence *fence = nouveau_fence(sync_obj); local
337 struct nouveau_fence *fence = NULL; local
390 struct nouveau_fence *fence = NULL; local
441 nouveau_fence_sync(struct nouveau_fence *fence, struct nouveau_channel *wchan) argument
549 struct nouveau_fence *tmp, *fence; local
[all...]
H A Dnouveau_gem.c276 validate_fini_list(struct list_head *list, struct nouveau_fence *fence) argument
284 nouveau_bo_fence(nvbo, fence);
299 validate_fini(struct validate_op *op, struct nouveau_fence* fence) argument
301 validate_fini_list(&op->vram_list, fence);
302 validate_fini_list(&op->gart_list, fence);
303 validate_fini_list(&op->both_list, fence);
385 struct nouveau_fence *fence = NULL; local
390 fence = nouveau_fence_ref(nvbo->bo.sync_obj);
393 if (fence) {
394 ret = nouveau_fence_sync(fence, cha
633 struct nouveau_fence *fence = NULL; local
[all...]
H A Dnouveau_bo.c473 struct nouveau_fence *fence = NULL; local
476 ret = nouveau_fence_new(chan, &fence, true);
480 ret = ttm_bo_move_accel_cleanup(&nvbo->bo, fence, NULL, evict,
482 nouveau_fence_unref(&fence);
1038 nouveau_bo_fence(struct nouveau_bo *nvbo, struct nouveau_fence *fence) argument
1042 if (likely(fence))
1043 nouveau_fence_ref(fence);
1047 nvbo->bo.sync_obj = fence;
H A Dnouveau_mem.c58 nouveau_fence_unref(&tile->fence);
92 (!tile->fence || nouveau_fence_signalled(tile->fence)))
103 struct nouveau_fence *fence)
109 if (fence) {
111 tile->fence = fence;
112 nouveau_fence_ref(fence);
102 nv10_mem_put_tile_region(struct drm_device *dev, struct nouveau_tile_reg *tile, struct nouveau_fence *fence) argument
/drivers/base/
H A Dsw_sync.c169 struct sync_fence *fence; local
187 fence = sync_fence_create(data.name, pt);
188 if (fence == NULL) {
194 data.fence = fd;
196 sync_fence_put(fence);
201 sync_fence_install(fence, fd);
H A Dsync.c146 kref_get(&pt->fence->kref);
158 kref_put(&pt->fence->kref, sync_fence_free);
217 /* Adds a sync pt to the active queue. Called when added to a fence */
250 struct sync_fence *fence; local
253 fence = kzalloc(sizeof(struct sync_fence), GFP_KERNEL);
254 if (fence == NULL)
257 fence->file = anon_inode_getfile("sync_fence", &sync_fence_fops,
258 fence, 0);
259 if (fence->file == NULL)
262 kref_init(&fence
285 struct sync_fence *fence; local
371 sync_fence_detach_pts(struct sync_fence *fence) argument
381 sync_fence_free_pts(struct sync_fence *fence) argument
409 sync_fence_put(struct sync_fence *fence) argument
415 sync_fence_install(struct sync_fence *fence, int fd) argument
421 sync_fence_get_status(struct sync_fence *fence) argument
444 struct sync_fence *fence; local
472 struct sync_fence *fence = pt->fence; local
508 sync_fence_wait_async(struct sync_fence *fence, struct sync_fence_waiter *waiter) argument
529 sync_fence_cancel_async(struct sync_fence *fence, struct sync_fence_waiter *waiter) argument
558 sync_fence_wait(struct sync_fence *fence, long timeout) argument
590 struct sync_fence *fence = container_of(kref, struct sync_fence, kref); local
599 struct sync_fence *fence = file->private_data; local
625 struct sync_fence *fence = file->private_data; local
637 sync_fence_ioctl_wait(struct sync_fence *fence, unsigned long arg) argument
647 sync_fence_ioctl_merge(struct sync_fence *fence, unsigned long arg) argument
724 sync_fence_ioctl_fence_info(struct sync_fence *fence, unsigned long arg) argument
778 struct sync_fence *fence = file->private_data; local
805 sync_print_pt(struct seq_file *s, struct sync_pt *pt, bool fence) argument
848 sync_print_fence(struct seq_file *s, struct sync_fence *fence) argument
895 struct sync_fence *fence = local
[all...]
/drivers/dma/ioat/
H A Dhw.h53 unsigned int fence:1; member in struct:ioat_dma_descriptor::__anon458::__anon459
87 unsigned int fence:1; member in struct:ioat_fill_descriptor::__anon461::__anon462
114 unsigned int fence:1; member in struct:ioat_xor_descriptor::__anon463::__anon464
151 unsigned int fence:1; member in struct:ioat_pq_descriptor::__anon465::__anon466
192 unsigned int fence:1; member in struct:ioat_pq_update_descriptor::__anon467::__anon468
/drivers/gpu/drm/mga/
H A Dmga_state.c1044 u32 *fence = data; local
1054 /* I would normal do this assignment in the declaration of fence,
1058 *fence = dev_priv->next_fence_to_post;
1074 u32 *fence = data; local
1083 mga_driver_fence_wait(dev, fence);

Completed in 286 milliseconds

12