Searched refs:bo (Results 1 - 25 of 112) sorted by relevance

12345

/drivers/gpu/drm/qxl/
H A Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo, bool no_wait) argument
34 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL);
37 struct qxl_device *qdev = (struct qxl_device *)bo->gem_base.dev->dev_private;
38 dev_err(qdev->dev, "%p reserve failed\n", bo);
45 static inline void qxl_bo_unreserve(struct qxl_bo *bo) argument
47 ttm_bo_unreserve(&bo->tbo);
50 static inline u64 qxl_bo_gpu_offset(struct qxl_bo *bo) argument
52 return bo->tbo.offset;
55 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) argument
57 return bo
60 qxl_bo_mmap_offset(struct qxl_bo *bo) argument
65 qxl_bo_wait(struct qxl_bo *bo, u32 *mem_type, bool no_wait) argument
[all...]
H A Dqxl_object.c32 struct qxl_bo *bo; local
35 bo = container_of(tbo, struct qxl_bo, tbo);
36 qdev = (struct qxl_device *)bo->gem_base.dev->dev_private;
38 qxl_surface_evict(qdev, bo, false);
40 list_del_init(&bo->list);
42 drm_gem_object_release(&bo->gem_base);
43 kfree(bo);
46 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) argument
48 if (bo->destroy == &qxl_ttm_bo_destroy)
83 struct qxl_bo *bo; local
125 qxl_bo_kmap(struct qxl_bo *bo, void **ptr) argument
144 qxl_bo_kmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, int page_offset) argument
178 qxl_bo_kunmap(struct qxl_bo *bo) argument
186 qxl_bo_kunmap_atomic_page(struct qxl_device *qdev, struct qxl_bo *bo, void *pmap) argument
209 qxl_bo_unref(struct qxl_bo **bo) argument
221 qxl_bo_ref(struct qxl_bo *bo) argument
227 qxl_bo_pin(struct qxl_bo *bo, u32 domain, u64 *gpu_addr) argument
250 qxl_bo_unpin(struct qxl_bo *bo) argument
272 struct qxl_bo *bo, *n; local
301 qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo) argument
[all...]
H A Dqxl_release.c36 /* put an alloc/dealloc surface cmd into one bo and round up to 128 */
166 struct qxl_bo *bo; local
170 bo = to_qxl_bo(entry->tv.bo);
171 qxl_bo_unref(&bo);
204 struct qxl_bo **bo)
207 /* pin releases bo's they are too messy to evict */
210 bo);
214 int qxl_release_list_add(struct qxl_release *release, struct qxl_bo *bo) argument
219 if (entry->tv.bo
203 qxl_release_bo_alloc(struct qxl_device *qdev, struct qxl_bo **bo) argument
234 qxl_release_validate_bo(struct qxl_bo *bo) argument
272 struct qxl_bo *bo = to_qxl_bo(entry->tv.bo); local
302 struct qxl_bo *bo; local
331 struct qxl_bo *bo; local
411 struct qxl_bo *bo = to_qxl_bo(entry->tv.bo); local
425 struct qxl_bo *bo = to_qxl_bo(entry->tv.bo); local
434 struct ttm_buffer_object *bo; local
[all...]
/drivers/gpu/drm/radeon/
H A Dradeon_object.c42 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
49 static void radeon_update_memory_usage(struct radeon_bo *bo, argument
52 struct radeon_device *rdev = bo->rdev;
53 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT;
73 struct radeon_bo *bo; local
75 bo = container_of(tbo, struct radeon_bo, tbo);
77 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1);
78 radeon_mn_unregister(bo);
80 mutex_lock(&bo
89 radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo) argument
175 struct radeon_bo *bo; local
240 radeon_bo_kmap(struct radeon_bo *bo, void **ptr) argument
263 radeon_bo_kunmap(struct radeon_bo *bo) argument
272 radeon_bo_ref(struct radeon_bo *bo) argument
281 radeon_bo_unref(struct radeon_bo **bo) argument
295 radeon_bo_pin_restricted(struct radeon_bo *bo, u32 domain, u64 max_offset, u64 *gpu_addr) argument
350 radeon_bo_pin(struct radeon_bo *bo, u32 domain, u64 *gpu_addr) argument
355 radeon_bo_unpin(struct radeon_bo *bo) argument
395 struct radeon_bo *bo, *n; local
493 struct radeon_bo *bo; local
552 radeon_bo_fbdev_mmap(struct radeon_bo *bo, struct vm_area_struct *vma) argument
558 radeon_bo_get_surface_reg(struct radeon_bo *bo) argument
613 radeon_bo_clear_surface_reg(struct radeon_bo *bo) argument
628 radeon_bo_set_tiling_flags(struct radeon_bo *bo, uint32_t tiling_flags, uint32_t pitch) argument
688 radeon_bo_get_tiling_flags(struct radeon_bo *bo, uint32_t *tiling_flags, uint32_t *pitch) argument
700 radeon_bo_check_tiling(struct radeon_bo *bo, bool has_moved, bool force_drop) argument
729 radeon_bo_move_notify(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem) argument
749 radeon_bo_fault_reserve_notify(struct ttm_buffer_object *bo) argument
788 radeon_bo_wait(struct radeon_bo *bo, u32 *mem_type, bool no_wait) argument
[all...]
H A Dradeon_prime.c34 struct radeon_bo *bo = gem_to_radeon_bo(obj); local
35 int npages = bo->tbo.num_pages;
37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages);
42 struct radeon_bo *bo = gem_to_radeon_bo(obj); local
45 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages,
46 &bo->dma_buf_vmap);
50 return bo->dma_buf_vmap.virtual;
55 struct radeon_bo *bo = gem_to_radeon_bo(obj); local
57 ttm_bo_kunmap(&bo
66 struct radeon_bo *bo; local
85 struct radeon_bo *bo = gem_to_radeon_bo(obj); local
100 struct radeon_bo *bo = gem_to_radeon_bo(obj); local
114 struct radeon_bo *bo = gem_to_radeon_bo(obj); local
123 struct radeon_bo *bo = gem_to_radeon_bo(gobj); local
[all...]
H A Dradeon_object.h56 * radeon_bo_reserve - reserve bo
57 * @bo: bo structure
64 static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr) argument
68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, false, NULL);
71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo);
77 static inline void radeon_bo_unreserve(struct radeon_bo *bo) argument
79 ttm_bo_unreserve(&bo->tbo);
83 * radeon_bo_gpu_offset - return GPU offset of bo
91 radeon_bo_gpu_offset(struct radeon_bo *bo) argument
96 radeon_bo_size(struct radeon_bo *bo) argument
101 radeon_bo_ngpu_pages(struct radeon_bo *bo) argument
106 radeon_bo_gpu_page_alignment(struct radeon_bo *bo) argument
117 radeon_bo_mmap_offset(struct radeon_bo *bo) argument
[all...]
H A Dradeon_mn.c67 struct radeon_bo *bo, *next; local
72 rbtree_postorder_for_each_entry_safe(bo, next, &rmn->objects, mn_it.rb) {
73 interval_tree_remove(&bo->mn_it, &rmn->objects);
74 bo->mn = NULL;
124 struct radeon_bo *bo; local
128 bo = container_of(it, struct radeon_bo, mn_it);
131 r = radeon_bo_reserve(bo, true);
133 DRM_ERROR("(%d) failed to reserve user bo\n", r);
137 fence = reservation_object_get_excl(bo->tbo.resv);
141 DRM_ERROR("(%d) failed to wait for user bo\
221 radeon_mn_register(struct radeon_bo *bo, unsigned long addr) argument
257 radeon_mn_unregister(struct radeon_bo *bo) argument
[all...]
/drivers/gpu/drm/ttm/
H A Dttm_execbuf_util.c39 struct ttm_buffer_object *bo = entry->bo; local
41 __ttm_bo_unreserve(bo);
50 struct ttm_buffer_object *bo = entry->bo; local
51 unsigned put_count = ttm_bo_del_from_lru(bo);
53 ttm_bo_list_ref_sub(bo, put_count, true);
67 glob = entry->bo->glob;
71 struct ttm_buffer_object *bo = entry->bo; local
112 struct ttm_buffer_object *bo = entry->bo; local
178 struct ttm_buffer_object *bo; local
[all...]
H A Dttm_bo_vm.c44 static int ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, argument
50 if (likely(!test_bit(TTM_BO_PRIV_FLAG_MOVING, &bo->priv_flags)))
56 ret = ttm_bo_wait(bo, false, false, true);
70 (void) ttm_bo_wait(bo, false, true, false);
77 ret = ttm_bo_wait(bo, false, true, false);
88 struct ttm_buffer_object *bo = (struct ttm_buffer_object *) local
90 struct ttm_bo_device *bdev = bo->bdev;
101 &bdev->man[bo->mem.mem_type];
110 ret = ttm_bo_reserve(bo, true, true, false, NULL);
118 (void) ttm_bo_wait_unreserved(bo);
262 struct ttm_buffer_object *bo = local
272 struct ttm_buffer_object *bo = (struct ttm_buffer_object *)vma->vm_private_data; local
289 struct ttm_buffer_object *bo = NULL; local
312 struct ttm_buffer_object *bo; local
353 ttm_fbdev_mmap(struct vm_area_struct *vma, struct ttm_buffer_object *bo) argument
[all...]
H A Dttm_bo.c85 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, argument
91 bo, bo->mem.num_pages, bo->mem.size >> 10,
92 bo->mem.size >> 20);
100 ttm_mem_type_debug(bo->bdev, mem_type);
138 struct ttm_buffer_object *bo = local
140 struct ttm_bo_device *bdev = bo->bdev;
141 size_t acc_size = bo->acc_size;
143 BUG_ON(atomic_read(&bo
164 ttm_bo_add_to_lru(struct ttm_buffer_object *bo) argument
187 ttm_bo_del_from_lru(struct ttm_buffer_object *bo) argument
213 ttm_bo_list_ref_sub(struct ttm_buffer_object *bo, int count, bool never_free) argument
220 ttm_bo_del_sub_from_lru(struct ttm_buffer_object *bo) argument
234 ttm_bo_add_ttm(struct ttm_buffer_object *bo, bool zero_alloc) argument
276 ttm_bo_handle_move_mem(struct ttm_buffer_object *bo, struct ttm_mem_reg *mem, bool evict, bool interruptible, bool no_wait_gpu) argument
391 ttm_bo_cleanup_memtype_use(struct ttm_buffer_object *bo) argument
406 ttm_bo_flush_all_fences(struct ttm_buffer_object *bo) argument
426 ttm_bo_cleanup_refs_or_queue(struct ttm_buffer_object *bo) argument
482 ttm_bo_cleanup_refs_and_unlock(struct ttm_buffer_object *bo, bool interruptible, bool no_wait_gpu) argument
623 struct ttm_buffer_object *bo = local
638 struct ttm_buffer_object *bo = *p_bo; local
659 ttm_bo_evict(struct ttm_buffer_object *bo, bool interruptible, bool no_wait_gpu) argument
718 struct ttm_buffer_object *bo; local
769 ttm_bo_mem_put(struct ttm_buffer_object *bo, struct ttm_mem_reg *mem) argument
782 ttm_bo_mem_force_space(struct ttm_buffer_object *bo, uint32_t mem_type, const struct ttm_place *place, struct ttm_mem_reg *mem, bool interruptible, bool no_wait_gpu) argument
862 ttm_bo_mem_space(struct ttm_buffer_object *bo, struct ttm_placement *placement, struct ttm_mem_reg *mem, bool interruptible, bool no_wait_gpu) argument
965 ttm_bo_move_buffer(struct ttm_buffer_object *bo, struct ttm_placement *placement, bool interruptible, bool no_wait_gpu) argument
1038 ttm_bo_validate(struct ttm_buffer_object *bo, struct ttm_placement *placement, bool interruptible, bool no_wait_gpu) argument
1075 ttm_bo_init(struct ttm_bo_device *bdev, struct ttm_buffer_object *bo, unsigned long size, enum ttm_bo_type type, struct ttm_placement *placement, uint32_t page_alignment, bool interruptible, struct file *persistent_swap_storage, size_t acc_size, struct sg_table *sg, struct reservation_object *resv, void (*destroy) (struct ttm_buffer_object *)) argument
1217 struct ttm_buffer_object *bo; local
1513 ttm_bo_unmap_virtual_locked(struct ttm_buffer_object *bo) argument
1521 ttm_bo_unmap_virtual(struct ttm_buffer_object *bo) argument
1534 ttm_bo_wait(struct ttm_buffer_object *bo, bool lazy, bool interruptible, bool no_wait) argument
1582 ttm_bo_synccpu_write_grab(struct ttm_buffer_object *bo, bool no_wait) argument
1601 ttm_bo_synccpu_write_release(struct ttm_buffer_object *bo) argument
1616 struct ttm_buffer_object *bo; local
1706 ttm_bo_wait_unreserved(struct ttm_buffer_object *bo) argument
[all...]
H A Dttm_bo_util.c42 void ttm_bo_free_old_node(struct ttm_buffer_object *bo) argument
44 ttm_bo_mem_put(bo, &bo->mem);
47 int ttm_bo_move_ttm(struct ttm_buffer_object *bo, argument
51 struct ttm_tt *ttm = bo->ttm;
52 struct ttm_mem_reg *old_mem = &bo->mem;
57 ttm_bo_free_old_node(bo);
104 struct ttm_buffer_object *bo; local
109 bo = list_first_entry(&man->io_reserve_lru,
112 list_del_init(&bo
160 ttm_mem_io_reserve_vm(struct ttm_buffer_object *bo) argument
180 ttm_mem_io_free_vm(struct ttm_buffer_object *bo) argument
323 ttm_bo_move_memcpy(struct ttm_buffer_object *bo, bool evict, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
424 ttm_transfered_destroy(struct ttm_buffer_object *bo) argument
444 ttm_buffer_object_transfer(struct ttm_buffer_object *bo, struct ttm_buffer_object **new_obj) argument
506 ttm_bo_ioremap(struct ttm_buffer_object *bo, unsigned long offset, unsigned long size, struct ttm_bo_kmap_obj *map) argument
528 ttm_bo_kmap_ttm(struct ttm_buffer_object *bo, unsigned long start_page, unsigned long num_pages, struct ttm_bo_kmap_obj *map) argument
567 ttm_bo_kmap(struct ttm_buffer_object *bo, unsigned long start_page, unsigned long num_pages, struct ttm_bo_kmap_obj *map) argument
604 struct ttm_buffer_object *bo = map->bo; local
633 ttm_bo_move_accel_cleanup(struct ttm_buffer_object *bo, struct fence *fence, bool evict, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
[all...]
/drivers/gpu/drm/tegra/
H A Dgem.c22 static inline struct tegra_bo *host1x_to_tegra_bo(struct host1x_bo *bo) argument
24 return container_of(bo, struct tegra_bo, base);
27 static void tegra_bo_put(struct host1x_bo *bo) argument
29 struct tegra_bo *obj = host1x_to_tegra_bo(bo);
37 static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) argument
39 struct tegra_bo *obj = host1x_to_tegra_bo(bo);
44 static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) argument
48 static void *tegra_bo_mmap(struct host1x_bo *bo) argument
50 struct tegra_bo *obj = host1x_to_tegra_bo(bo);
55 static void tegra_bo_munmap(struct host1x_bo *bo, voi argument
59 tegra_bo_kmap(struct host1x_bo *bo, unsigned int page) argument
66 tegra_bo_kunmap(struct host1x_bo *bo, unsigned int page, void *addr) argument
71 tegra_bo_get(struct host1x_bo *bo) argument
94 tegra_bo_destroy(struct drm_device *drm, struct tegra_bo *bo) argument
102 struct tegra_bo *bo; local
153 struct tegra_bo *bo; local
177 struct tegra_bo *bo; local
243 struct tegra_bo *bo = to_tegra_bo(gem); local
264 struct tegra_bo *bo; local
285 struct tegra_bo *bo; local
315 struct tegra_bo *bo; local
338 struct tegra_bo *bo = to_tegra_bo(gem); local
399 struct tegra_bo *bo = to_tegra_bo(gem); local
432 struct tegra_bo *bo; local
[all...]
/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_dmabuf.c45 * Flushes and unpins the query bo to avoid failures.
55 struct ttm_buffer_object *bo = &buf->base; local
64 ret = ttm_bo_reserve(bo, interruptible, false, false, NULL);
68 ret = ttm_bo_validate(bo, placement, interruptible, false);
70 ttm_bo_unreserve(bo);
83 * Flushes and unpins the query bo if @pin == true to avoid failures.
97 struct ttm_buffer_object *bo = &buf->base; local
108 ret = ttm_bo_reserve(bo, interruptible, false, false, NULL);
124 ret = ttm_bo_validate(bo, placement, interruptible, false);
139 ret = ttm_bo_validate(bo, placemen
199 struct ttm_buffer_object *bo = &buf->base; local
280 vmw_bo_get_guest_ptr(const struct ttm_buffer_object *bo, SVGAGuestPtr *ptr) argument
300 vmw_bo_pin(struct ttm_buffer_object *bo, bool pin) argument
[all...]
/drivers/gpu/drm/msm/
H A Dmsm_ringbuffer.c35 ring->bo = msm_gem_new(gpu->dev, size, MSM_BO_WC);
36 if (IS_ERR(ring->bo)) {
37 ret = PTR_ERR(ring->bo);
38 ring->bo = NULL;
42 ring->start = msm_gem_vaddr_locked(ring->bo);
58 if (ring->bo)
59 drm_gem_object_unreference(ring->bo);
/drivers/gpu/drm/ast/
H A Dast_ttm.c97 struct ast_bo *bo; local
99 bo = container_of(tbo, struct ast_bo, bo);
101 drm_gem_object_release(&bo->gem);
102 kfree(bo);
105 static bool ast_ttm_bo_is_ast_bo(struct ttm_buffer_object *bo) argument
107 if (bo->destroy == &ast_bo_ttm_destroy)
138 ast_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) argument
140 struct ast_bo *astbo = ast_bo(bo);
142 if (!ast_ttm_bo_is_ast_bo(bo))
149 ast_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) argument
189 ast_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
293 ast_ttm_placement(struct ast_bo *bo, int domain) argument
350 ast_bo_gpu_offset(struct ast_bo *bo) argument
355 ast_bo_pin(struct ast_bo *bo, u32 pl_flag, u64 *gpu_addr) argument
378 ast_bo_unpin(struct ast_bo *bo) argument
398 ast_bo_push_sysram(struct ast_bo *bo) argument
[all...]
/drivers/gpu/drm/mgag200/
H A Dmgag200_ttm.c97 struct mgag200_bo *bo; local
99 bo = container_of(tbo, struct mgag200_bo, bo);
101 drm_gem_object_release(&bo->gem);
102 kfree(bo);
105 static bool mgag200_ttm_bo_is_mgag200_bo(struct ttm_buffer_object *bo) argument
107 if (bo->destroy == &mgag200_bo_ttm_destroy)
138 mgag200_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) argument
140 struct mgag200_bo *mgabo = mgag200_bo(bo);
142 if (!mgag200_ttm_bo_is_mgag200_bo(bo))
149 mgag200_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) argument
189 mgag200_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
293 mgag200_ttm_placement(struct mgag200_bo *bo, int domain) argument
350 mgag200_bo_gpu_offset(struct mgag200_bo *bo) argument
355 mgag200_bo_pin(struct mgag200_bo *bo, u32 pl_flag, u64 *gpu_addr) argument
379 mgag200_bo_unpin(struct mgag200_bo *bo) argument
399 mgag200_bo_push_sysram(struct mgag200_bo *bo) argument
[all...]
/drivers/gpu/host1x/
H A Djob.h25 struct host1x_bo *bo; member in struct:host1x_job_gather
38 struct host1x_bo *bo; member in struct:host1x_waitchk
45 struct host1x_bo *bo; member in struct:host1x_job_unpin_data
/drivers/gpu/drm/bochs/
H A Dbochs_mm.c10 static void bochs_ttm_placement(struct bochs_bo *bo, int domain);
76 struct bochs_bo *bo; local
78 bo = container_of(tbo, struct bochs_bo, bo);
79 drm_gem_object_release(&bo->gem);
80 kfree(bo);
83 static bool bochs_ttm_bo_is_bochs_bo(struct ttm_buffer_object *bo) argument
85 if (bo->destroy == &bochs_bo_ttm_destroy)
115 bochs_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) argument
117 struct bochs_bo *bochsbo = bochs_bo(bo);
126 bochs_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) argument
168 bochs_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
258 bochs_ttm_placement(struct bochs_bo *bo, int domain) argument
285 bochs_bo_gpu_offset(struct bochs_bo *bo) argument
290 bochs_bo_pin(struct bochs_bo *bo, u32 pl_flag, u64 *gpu_addr) argument
314 bochs_bo_unpin(struct bochs_bo *bo) argument
434 bochs_bo_unref(struct bochs_bo **bo) argument
458 struct bochs_bo *bo; local
[all...]
H A Dbochs_fbdev.c53 struct bochs_bo *bo = NULL; local
66 /* alloc, pin & map bo */
73 bo = gem_to_bochs_bo(gobj);
75 ret = ttm_bo_reserve(&bo->bo, true, false, false, NULL);
79 ret = bochs_bo_pin(bo, TTM_PL_FLAG_VRAM, NULL);
82 ttm_bo_unreserve(&bo->bo);
86 ret = ttm_bo_kmap(&bo->bo,
[all...]
/drivers/gpu/drm/cirrus/
H A Dcirrus_ttm.c97 struct cirrus_bo *bo; local
99 bo = container_of(tbo, struct cirrus_bo, bo);
101 drm_gem_object_release(&bo->gem);
102 kfree(bo);
105 static bool cirrus_ttm_bo_is_cirrus_bo(struct ttm_buffer_object *bo) argument
107 if (bo->destroy == &cirrus_bo_ttm_destroy)
138 cirrus_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) argument
140 struct cirrus_bo *cirrusbo = cirrus_bo(bo);
142 if (!cirrus_ttm_bo_is_cirrus_bo(bo))
149 cirrus_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) argument
189 cirrus_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
298 cirrus_ttm_placement(struct cirrus_bo *bo, int domain) argument
354 cirrus_bo_gpu_offset(struct cirrus_bo *bo) argument
359 cirrus_bo_pin(struct cirrus_bo *bo, u32 pl_flag, u64 *gpu_addr) argument
382 cirrus_bo_push_sysram(struct cirrus_bo *bo) argument
[all...]
/drivers/gpu/drm/nouveau/
H A Dnv50_fence.c40 struct ttm_mem_reg *mem = &priv->bo->bo.mem;
65 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); local
66 u32 start = bo->bo.mem.start * PAGE_SIZE;
67 u32 limit = start + bo->bo.mem.size - 1;
103 0, 0x0000, NULL, NULL, &priv->bo);
105 ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM);
107 ret = nouveau_bo_map(priv->bo);
[all...]
H A Dnv10_fence.h15 struct nouveau_bo *bo; member in struct:nv10_fence_priv
H A Dnouveau_bo.h11 struct ttm_buffer_object bo; member in struct:nouveau_bo
44 nouveau_bo(struct ttm_buffer_object *bo) argument
46 return container_of(bo, struct nouveau_bo, bo);
58 *pnvbo = ref ? nouveau_bo(ttm_bo_reference(&ref->bo)) : NULL;
60 struct ttm_buffer_object *bo = &prev->bo; local
62 ttm_bo_unref(&bo);
H A Dnouveau_prime.c34 int npages = nvbo->bo.num_pages;
36 return drm_prime_pages_to_sg(nvbo->bo.ttm->pages, npages);
44 ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.num_pages,
81 ret = drm_gem_object_init(dev, &nvbo->gem, nvbo->bo.mem.size);
114 return nvbo->bo.resv;
H A Dnv84_fence.c107 return nouveau_bo_rd32(priv->bo, chan->chid * 16/4);
119 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); local
120 nouveau_bo_vma_del(bo, &fctx->dispc_vma[i]);
123 nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence);
124 nouveau_bo_vma_del(priv->bo, &fctx->vma_gart);
125 nouveau_bo_vma_del(priv->bo, &fctx->vma);
151 ret = nouveau_bo_vma_add(priv->bo, cli->vm, &fctx->vma);
159 struct nouveau_bo *bo = nv50_display_crtc_sema(chan->drm->dev, i); local
160 ret = nouveau_bo_vma_add(bo, cli->vm, &fctx->dispc_vma[i]);
177 priv->suspend[i] = nouveau_bo_rd32(priv->bo,
[all...]

Completed in 552 milliseconds

12345