Lines Matching defs:fenced_buf

169 fenced_buffer_destroy_cpu_storage_locked(struct fenced_buffer *fenced_buf);
173 struct fenced_buffer *fenced_buf);
176 fenced_buffer_destroy_gpu_storage_locked(struct fenced_buffer *fenced_buf);
180 struct fenced_buffer *fenced_buf,
184 fenced_buffer_copy_storage_to_gpu_locked(struct fenced_buffer *fenced_buf);
187 fenced_buffer_copy_storage_to_cpu_locked(struct fenced_buffer *fenced_buf);
201 struct fenced_buffer *fenced_buf;
209 fenced_buf = LIST_ENTRY(struct fenced_buffer, curr, head);
210 assert(!fenced_buf->fence);
212 (void *) fenced_buf,
213 fenced_buf->base.size,
214 p_atomic_read(&fenced_buf->base.reference.count),
215 fenced_buf->buffer ? "gpu" : (fenced_buf->data ? "cpu" : "none"));
224 fenced_buf = LIST_ENTRY(struct fenced_buffer, curr, head);
225 assert(fenced_buf->buffer);
226 signaled = ops->fence_signalled(ops, fenced_buf->fence, 0);
228 (void *) fenced_buf,
229 fenced_buf->base.size,
230 p_atomic_read(&fenced_buf->base.reference.count),
232 (void *) fenced_buf->fence,
245 struct fenced_buffer *fenced_buf)
247 assert(!pipe_is_referenced(&fenced_buf->base.reference));
249 assert(!fenced_buf->fence);
250 assert(fenced_buf->head.prev);
251 assert(fenced_buf->head.next);
252 LIST_DEL(&fenced_buf->head);
256 fenced_buffer_destroy_gpu_storage_locked(fenced_buf);
257 fenced_buffer_destroy_cpu_storage_locked(fenced_buf);
259 FREE(fenced_buf);
270 struct fenced_buffer *fenced_buf)
272 assert(pipe_is_referenced(&fenced_buf->base.reference));
273 assert(fenced_buf->flags & PB_USAGE_GPU_READ_WRITE);
274 assert(fenced_buf->fence);
276 p_atomic_inc(&fenced_buf->base.reference.count);
278 LIST_DEL(&fenced_buf->head);
281 LIST_ADDTAIL(&fenced_buf->head, &fenced_mgr->fenced);
294 struct fenced_buffer *fenced_buf)
298 assert(fenced_buf->fence);
299 assert(fenced_buf->mgr == fenced_mgr);
301 ops->fence_reference(ops, &fenced_buf->fence, NULL);
302 fenced_buf->flags &= ~PB_USAGE_GPU_READ_WRITE;
304 assert(fenced_buf->head.prev);
305 assert(fenced_buf->head.next);
307 LIST_DEL(&fenced_buf->head);
311 LIST_ADDTAIL(&fenced_buf->head, &fenced_mgr->unfenced);
314 if (p_atomic_dec_zero(&fenced_buf->base.reference.count)) {
315 fenced_buffer_destroy_locked(fenced_mgr, fenced_buf);
331 struct fenced_buffer *fenced_buf)
340 assert(pipe_is_referenced(&fenced_buf->base.reference));
341 assert(fenced_buf->fence);
343 if(fenced_buf->fence) {
348 ops->fence_reference(ops, &fence, fenced_buf->fence);
352 finished = ops->fence_finish(ops, fenced_buf->fence, 0);
356 assert(pipe_is_referenced(&fenced_buf->base.reference));
363 proceed = fence == fenced_buf->fence ? TRUE : FALSE;
374 destroyed = fenced_buffer_remove_locked(fenced_mgr, fenced_buf);
380 fenced_buf->flags &= ~PB_USAGE_GPU_READ_WRITE;
401 struct fenced_buffer *fenced_buf;
408 fenced_buf = LIST_ENTRY(struct fenced_buffer, curr, head);
410 if(fenced_buf->fence != prev_fence) {
414 signaled = ops->fence_finish(ops, fenced_buf->fence, 0);
423 signaled = ops->fence_signalled(ops, fenced_buf->fence, 0);
430 prev_fence = fenced_buf->fence;
436 assert(ops->fence_signalled(ops, fenced_buf->fence, 0) == 0);
439 fenced_buffer_remove_locked(fenced_mgr, fenced_buf);
460 struct fenced_buffer *fenced_buf;
465 fenced_buf = LIST_ENTRY(struct fenced_buffer, curr, head);
471 if(fenced_buf->buffer &&
472 !fenced_buf->mapcount &&
473 !fenced_buf->vl) {
476 ret = fenced_buffer_create_cpu_storage_locked(fenced_mgr, fenced_buf);
478 ret = fenced_buffer_copy_storage_to_cpu_locked(fenced_buf);
480 fenced_buffer_destroy_gpu_storage_locked(fenced_buf);
483 fenced_buffer_destroy_cpu_storage_locked(fenced_buf);
499 fenced_buffer_destroy_cpu_storage_locked(struct fenced_buffer *fenced_buf)
501 if(fenced_buf->data) {
502 align_free(fenced_buf->data);
503 fenced_buf->data = NULL;
504 assert(fenced_buf->mgr->cpu_total_size >= fenced_buf->size);
505 fenced_buf->mgr->cpu_total_size -= fenced_buf->size;
515 struct fenced_buffer *fenced_buf)
517 assert(!fenced_buf->data);
518 if(fenced_buf->data)
521 if (fenced_mgr->cpu_total_size + fenced_buf->size > fenced_mgr->max_cpu_total_size)
524 fenced_buf->data = align_malloc(fenced_buf->size, fenced_buf->desc.alignment);
525 if(!fenced_buf->data)
528 fenced_mgr->cpu_total_size += fenced_buf->size;
538 fenced_buffer_destroy_gpu_storage_locked(struct fenced_buffer *fenced_buf)
540 if(fenced_buf->buffer) {
541 pb_reference(&fenced_buf->buffer, NULL);
554 struct fenced_buffer *fenced_buf)
558 assert(!fenced_buf->buffer);
560 fenced_buf->buffer = provider->create_buffer(fenced_mgr->provider,
561 fenced_buf->size,
562 &fenced_buf->desc);
563 return fenced_buf->buffer ? TRUE : FALSE;
572 struct fenced_buffer *fenced_buf,
575 assert(!fenced_buf->buffer);
582 fenced_buffer_try_create_gpu_storage_locked(fenced_mgr, fenced_buf);
589 while(!fenced_buf->buffer &&
592 fenced_buffer_try_create_gpu_storage_locked(fenced_mgr, fenced_buf);
595 if(!fenced_buf->buffer && wait) {
600 while(!fenced_buf->buffer &&
603 fenced_buffer_try_create_gpu_storage_locked(fenced_mgr, fenced_buf);
607 if(!fenced_buf->buffer) {
620 fenced_buffer_copy_storage_to_gpu_locked(struct fenced_buffer *fenced_buf)
624 assert(fenced_buf->data);
625 assert(fenced_buf->buffer);
627 map = pb_map(fenced_buf->buffer, PB_USAGE_CPU_WRITE, NULL);
631 memcpy(map, fenced_buf->data, fenced_buf->size);
633 pb_unmap(fenced_buf->buffer);
640 fenced_buffer_copy_storage_to_cpu_locked(struct fenced_buffer *fenced_buf)
644 assert(fenced_buf->data);
645 assert(fenced_buf->buffer);
647 map = pb_map(fenced_buf->buffer, PB_USAGE_CPU_READ, NULL);
651 memcpy(fenced_buf->data, map, fenced_buf->size);
653 pb_unmap(fenced_buf->buffer);
662 struct fenced_buffer *fenced_buf = fenced_buffer(buf);
663 struct fenced_manager *fenced_mgr = fenced_buf->mgr;
665 assert(!pipe_is_referenced(&fenced_buf->base.reference));
669 fenced_buffer_destroy_locked(fenced_mgr, fenced_buf);
679 struct fenced_buffer *fenced_buf = fenced_buffer(buf);
680 struct fenced_manager *fenced_mgr = fenced_buf->mgr;
691 while((fenced_buf->flags & PB_USAGE_GPU_WRITE) ||
692 ((fenced_buf->flags & PB_USAGE_GPU_READ) &&
699 ops->fence_signalled(ops, fenced_buf->fence, 0) != 0) {
711 fenced_buffer_finish_locked(fenced_mgr, fenced_buf);
714 if(fenced_buf->buffer) {
715 map = pb_map(fenced_buf->buffer, flags, flush_ctx);
718 assert(fenced_buf->data);
719 map = fenced_buf->data;
723 ++fenced_buf->mapcount;
724 fenced_buf->flags |= flags & PB_USAGE_CPU_READ_WRITE;
737 struct fenced_buffer *fenced_buf = fenced_buffer(buf);
738 struct fenced_manager *fenced_mgr = fenced_buf->mgr;
742 assert(fenced_buf->mapcount);
743 if(fenced_buf->mapcount) {
744 if (fenced_buf->buffer)
745 pb_unmap(fenced_buf->buffer);
746 --fenced_buf->mapcount;
747 if(!fenced_buf->mapcount)
748 fenced_buf->flags &= ~PB_USAGE_CPU_READ_WRITE;
760 struct fenced_buffer *fenced_buf = fenced_buffer(buf);
761 struct fenced_manager *fenced_mgr = fenced_buf->mgr;
768 fenced_buf->vl = NULL;
769 fenced_buf->validation_flags = 0;
779 if(fenced_buf->vl && fenced_buf->vl != vl) {
784 if(fenced_buf->vl == vl &&
785 (fenced_buf->validation_flags & flags) == flags) {
794 if(!fenced_buf->buffer) {
795 assert(!fenced_buf->mapcount);
797 ret = fenced_buffer_create_gpu_storage_locked(fenced_mgr, fenced_buf, TRUE);
802 ret = fenced_buffer_copy_storage_to_gpu_locked(fenced_buf);
804 fenced_buffer_destroy_gpu_storage_locked(fenced_buf);
808 if(fenced_buf->mapcount) {
812 fenced_buffer_destroy_cpu_storage_locked(fenced_buf);
816 ret = pb_validate(fenced_buf->buffer, vl, flags);
820 fenced_buf->vl = vl;
821 fenced_buf->validation_flags |= flags;
834 struct fenced_buffer *fenced_buf = fenced_buffer(buf);
835 struct fenced_manager *fenced_mgr = fenced_buf->mgr;
840 assert(pipe_is_referenced(&fenced_buf->base.reference));
841 assert(fenced_buf->buffer);
843 if(fence != fenced_buf->fence) {
844 assert(fenced_buf->vl);
845 assert(fenced_buf->validation_flags);
847 if (fenced_buf->fence) {
849 destroyed = fenced_buffer_remove_locked(fenced_mgr, fenced_buf);
853 ops->fence_reference(ops, &fenced_buf->fence, fence);
854 fenced_buf->flags |= fenced_buf->validation_flags;
855 fenced_buffer_add_locked(fenced_mgr, fenced_buf);
858 pb_fence(fenced_buf->buffer, fence);
860 fenced_buf->vl = NULL;
861 fenced_buf->validation_flags = 0;
873 struct fenced_buffer *fenced_buf = fenced_buffer(buf);
874 struct fenced_manager *fenced_mgr = fenced_buf->mgr;
882 assert(fenced_buf->vl);
883 assert(fenced_buf->buffer);
885 if(fenced_buf->buffer)
886 pb_get_base_buffer(fenced_buf->buffer, base_buf, offset);
916 struct fenced_buffer *fenced_buf;
928 fenced_buf = CALLOC_STRUCT(fenced_buffer);
929 if(!fenced_buf)
932 pipe_reference_init(&fenced_buf->base.reference, 1);
933 fenced_buf->base.alignment = desc->alignment;
934 fenced_buf->base.usage = desc->usage;
935 fenced_buf->base.size = size;
936 fenced_buf->size = size;
937 fenced_buf->desc = *desc;
939 fenced_buf->base.vtbl = &fenced_buffer_vtbl;
940 fenced_buf->mgr = fenced_mgr;
947 ret = fenced_buffer_create_gpu_storage_locked(fenced_mgr, fenced_buf, FALSE);
953 ret = fenced_buffer_create_cpu_storage_locked(fenced_mgr, fenced_buf);
960 ret = fenced_buffer_create_gpu_storage_locked(fenced_mgr, fenced_buf, TRUE);
970 assert(fenced_buf->buffer || fenced_buf->data);
972 LIST_ADDTAIL(&fenced_buf->head, &fenced_mgr->unfenced);
976 return &fenced_buf->base;
980 FREE(fenced_buf);