Searched refs:ttm (Results 1 - 22 of 22) sorted by relevance

/drivers/gpu/drm/ttm/
H A Dttm_tt.c43 #include "ttm/ttm_module.h"
44 #include "ttm/ttm_bo_driver.h"
45 #include "ttm/ttm_placement.h"
46 #include "ttm/ttm_page_alloc.h"
49 * Allocates storage for pointers to the pages that back the ttm.
51 static void ttm_tt_alloc_page_directory(struct ttm_tt *ttm) argument
53 ttm->pages = drm_calloc_large(ttm->num_pages, sizeof(void*));
56 static void ttm_dma_tt_alloc_page_directory(struct ttm_dma_tt *ttm) argument
58 ttm
103 ttm_tt_set_caching(struct ttm_tt *ttm, enum ttm_caching_state c_state) argument
149 ttm_tt_set_placement_caching(struct ttm_tt *ttm, uint32_t placement) argument
164 ttm_tt_destroy(struct ttm_tt *ttm) argument
185 ttm_tt_init(struct ttm_tt *ttm, struct ttm_bo_device *bdev, unsigned long size, uint32_t page_flags, struct page *dummy_read_page) argument
208 ttm_tt_fini(struct ttm_tt *ttm) argument
219 struct ttm_tt *ttm = &ttm_dma->ttm; local
243 struct ttm_tt *ttm = &ttm_dma->ttm; local
252 ttm_tt_unbind(struct ttm_tt *ttm) argument
263 ttm_tt_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem) argument
287 ttm_tt_swapin(struct ttm_tt *ttm) argument
333 ttm_tt_swapout(struct ttm_tt *ttm, struct file *persistent_swap_storage) argument
[all...]
H A Dttm_agp_backend.c34 #include "ttm/ttm_module.h"
35 #include "ttm/ttm_bo_driver.h"
36 #include "ttm/ttm_page_alloc.h"
38 #include "ttm/ttm_placement.h"
46 struct ttm_tt ttm; member in struct:ttm_agp_backend
51 static int ttm_agp_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem) argument
53 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm);
59 mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY);
64 for (i = 0; i < ttm
84 ttm_agp_unbind(struct ttm_tt *ttm) argument
97 ttm_agp_destroy(struct ttm_tt *ttm) argument
136 ttm_agp_tt_populate(struct ttm_tt *ttm) argument
145 ttm_agp_tt_unpopulate(struct ttm_tt *ttm) argument
[all...]
H A DMakefile5 ttm-y := ttm_agp_backend.o ttm_memory.o ttm_tt.o ttm_bo.o \
11 ttm-y += ttm_page_alloc_dma.o
14 obj-$(CONFIG_DRM_TTM) += ttm.o
H A Dttm_bo_util.c31 #include "ttm/ttm_bo_driver.h"
32 #include "ttm/ttm_placement.h"
49 struct ttm_tt *ttm = bo->ttm; local
54 ttm_tt_unbind(ttm);
61 ret = ttm_tt_set_placement_caching(ttm, new_mem->placement);
66 ret = ttm_tt_bind(ttm, new_mem);
243 static int ttm_copy_io_ttm_page(struct ttm_tt *ttm, void *src, argument
247 struct page *d = ttm->pages[page];
280 static int ttm_copy_ttm_io_page(struct ttm_tt *ttm, voi argument
322 struct ttm_tt *ttm = bo->ttm; local
511 struct ttm_tt *ttm = bo->ttm; local
[all...]
H A Dttm_page_alloc.c48 #include "ttm/ttm_bo_driver.h"
49 #include "ttm/ttm_page_alloc.h"
255 * Select the right pool or requested caching state and ttm flags. */
840 int ttm_pool_populate(struct ttm_tt *ttm) argument
842 struct ttm_mem_global *mem_glob = ttm->glob->mem_glob;
846 if (ttm->state != tt_unpopulated)
849 for (i = 0; i < ttm->num_pages; ++i) {
850 ret = ttm_get_pages(&ttm->pages[i], 1,
851 ttm->page_flags,
852 ttm
879 ttm_pool_unpopulate(struct ttm_tt *ttm) argument
[all...]
H A Dttm_page_alloc_dma.c50 #include "ttm/ttm_bo_driver.h"
51 #include "ttm/ttm_page_alloc.h"
840 struct ttm_tt *ttm = &ttm_dma->ttm; local
848 ttm->pages[index] = d_page->p;
865 struct ttm_tt *ttm = &ttm_dma->ttm; local
866 struct ttm_mem_global *mem_glob = ttm->glob->mem_glob;
873 if (ttm->state != tt_unpopulated)
876 type = ttm_to_type(ttm
937 struct ttm_tt *ttm = &ttm_dma->ttm; local
[all...]
H A Dttm_bo_vm.c33 #include <ttm/ttm_module.h>
34 #include <ttm/ttm_bo_driver.h>
35 #include <ttm/ttm_placement.h>
82 struct ttm_tt *ttm = NULL; local
175 ttm = bo->ttm;
181 if (ttm->bdev->driver->ttm_tt_populate(ttm)) {
195 page = ttm->pages[page_offset];
H A Dttm_bo.c33 #include "ttm/ttm_module.h"
34 #include "ttm/ttm_bo_driver.h"
35 #include "ttm/ttm_placement.h"
150 if (bo->ttm)
151 ttm_tt_destroy(bo->ttm);
188 if (bo->ttm != NULL) {
331 bo->ttm = NULL;
341 bo->ttm = bdev->driver->ttm_tt_create(bdev, bo->num_pages << PAGE_SHIFT,
343 if (unlikely(bo->ttm == NULL))
377 * Create and bind a ttm i
[all...]
/drivers/gpu/drm/nouveau/
H A Dnouveau_ttm.c41 return ttm_bo_mmap(filp, vma, &dev_priv->ttm.bdev);
62 global_ref = &dev_priv->ttm.mem_global_ref;
71 dev_priv->ttm.mem_global_ref.release = NULL;
75 dev_priv->ttm.bo_global_ref.mem_glob = global_ref->object;
76 global_ref = &dev_priv->ttm.bo_global_ref.ref;
85 drm_global_item_unref(&dev_priv->ttm.mem_global_ref);
86 dev_priv->ttm.mem_global_ref.release = NULL;
96 if (dev_priv->ttm.mem_global_ref.release == NULL)
99 drm_global_item_unref(&dev_priv->ttm.bo_global_ref.ref);
100 drm_global_item_unref(&dev_priv->ttm
[all...]
H A Dnouveau_sgdma.c14 struct ttm_dma_tt ttm; member in struct:nouveau_sgdma_be
20 nouveau_sgdma_destroy(struct ttm_tt *ttm) argument
22 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
24 if (ttm) {
26 ttm_dma_tt_fini(&nvbe->ttm);
32 nv04_sgdma_bind(struct ttm_tt *ttm, struct ttm_mem_reg *mem) argument
34 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm;
44 for (i = 0; i < ttm->num_pages; i++) {
45 dma_addr_t dma_offset = nvbe->ttm.dma_address[i];
58 nv04_sgdma_unbind(struct ttm_tt *ttm) argument
99 nv41_sgdma_bind(struct ttm_tt *ttm, struct ttm_mem_reg *mem) argument
120 nv41_sgdma_unbind(struct ttm_tt *ttm) argument
144 nv44_sgdma_flush(struct ttm_tt *ttm) argument
208 nv44_sgdma_bind(struct ttm_tt *ttm, struct ttm_mem_reg *mem) argument
248 nv44_sgdma_unbind(struct ttm_tt *ttm) argument
287 nv50_sgdma_bind(struct ttm_tt *ttm, struct ttm_mem_reg *mem) argument
298 nv50_sgdma_unbind(struct ttm_tt *ttm) argument
[all...]
H A Dnouveau_bo.c31 #include "ttm/ttm_page_alloc.h"
107 nvbo->bo.bdev = &dev_priv->ttm.bdev;
119 acc_size = ttm_bo_dma_acc_size(&dev_priv->ttm.bdev, size,
122 ret = ttm_bo_init(&dev_priv->ttm.bdev, &nvbo->bo, size,
127 /* ttm will call nouveau_bo_del_ttm if it fails.. */
704 * old nouveau_mem node, these will get cleaned up after ttm has
757 ret = ttm_tt_bind(bo->ttm, &tmp_mem);
810 /* ttm can now (stupidly) pass the driver bos it didn't create... */
881 if (old_mem->mem_type == TTM_PL_SYSTEM && !bo->ttm) {
1054 nouveau_ttm_tt_populate(struct ttm_tt *ttm) argument
1103 nouveau_ttm_tt_unpopulate(struct ttm_tt *ttm) argument
[all...]
H A Dnouveau_debugfs.c36 #include <ttm/ttm_page_alloc.h>
167 ret = ttm_bo_evict_mm(&dev_priv->ttm.bdev, TTM_PL_VRAM);
H A Dnouveau_mem.c158 ttm_bo_device_release(&dev_priv->ttm.bdev);
341 struct ttm_bo_device *bdev = &dev_priv->ttm.bdev;
371 ret = ttm_bo_device_init(&dev_priv->ttm.bdev,
372 dev_priv->ttm.bo_global_ref.ref.object,
440 struct ttm_bo_device *bdev = &dev_priv->ttm.bdev;
H A Dnouveau_gem.c205 if (unlikely(dev_priv->ttm.bdev.dev_mapping == NULL))
206 dev_priv->ttm.bdev.dev_mapping = dev_priv->dev->dev_mapping;
317 sequence = atomic_add_return(1, &dev_priv->ttm.validate_sequence);
H A Dnouveau_drv.c214 ttm_bo_evict_mm(&dev_priv->ttm.bdev, TTM_PL_VRAM);
H A Dnouveau_drv.h42 #include "ttm/ttm_bo_api.h"
43 #include "ttm/ttm_bo_driver.h"
44 #include "ttm/ttm_placement.h"
45 #include "ttm/ttm_memory.h"
46 #include "ttm/ttm_module.h"
743 } ttm; member in struct:drm_nouveau_private
858 return container_of(bd, struct drm_nouveau_private, ttm.bdev);
H A Dnouveau_state.c952 ttm_bo_clean_mm(&dev_priv->ttm.bdev, TTM_PL_VRAM);
953 ttm_bo_clean_mm(&dev_priv->ttm.bdev, TTM_PL_TT);
/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_buffer.c29 #include "ttm/ttm_bo_driver.h"
30 #include "ttm/ttm_placement.h"
31 #include "ttm/ttm_page_alloc.h"
144 struct ttm_tt ttm; member in struct:vmw_ttm_tt
149 static int vmw_ttm_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem) argument
151 struct vmw_ttm_tt *vmw_be = container_of(ttm, struct vmw_ttm_tt, ttm);
155 return vmw_gmr_bind(vmw_be->dev_priv, ttm->pages,
156 ttm->num_pages, vmw_be->gmr_id);
159 static int vmw_ttm_unbind(struct ttm_tt *ttm) argument
167 vmw_ttm_destroy(struct ttm_tt *ttm) argument
[all...]
/drivers/gpu/drm/radeon/
H A Dradeon_ttm.c32 #include <ttm/ttm_bo_api.h>
33 #include <ttm/ttm_bo_driver.h>
34 #include <ttm/ttm_placement.h>
35 #include <ttm/ttm_module.h>
36 #include <ttm/ttm_page_alloc.h>
333 r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement);
338 r = ttm_tt_bind(bo->ttm, &tmp_mem);
401 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) {
535 struct ttm_dma_tt ttm; member in struct:radeon_ttm_tt
540 static int radeon_ttm_backend_bind(struct ttm_tt *ttm, argument
561 radeon_ttm_backend_unbind(struct ttm_tt *ttm) argument
569 radeon_ttm_backend_destroy(struct ttm_tt *ttm) argument
611 radeon_ttm_tt_populate(struct ttm_tt *ttm) argument
656 radeon_ttm_tt_unpopulate(struct ttm_tt *ttm) argument
[all...]
H A Dradeon.h68 #include <ttm/ttm_bo_api.h>
69 #include <ttm/ttm_bo_driver.h>
70 #include <ttm/ttm_placement.h>
71 #include <ttm/ttm_module.h>
72 #include <ttm/ttm_execbuf_util.h>
/drivers/gpu/drm/
H A DMakefile30 obj-$(CONFIG_DRM_TTM) += ttm/
/drivers/net/ethernet/dec/tulip/
H A Dde4x5.c516 u_int ttm; /* Transmit Threshold Mode for each media */ member in struct:mii_phy
4644 lp->phy[lp->active].ttm = get_unaligned_le16(p);
4725 lp->phy[lp->active].ttm = get_unaligned_le16(p); p += 2;

Completed in 259 milliseconds