Searched defs:new_mem (Results 1 - 12 of 12) sorted by last modified time

/drivers/vfio/pci/
H A Dvfio_pci_config.c504 bool phys_mem, virt_mem, new_mem, phys_io, virt_io, new_io; local
515 new_mem = !!(new_cmd & PCI_COMMAND_MEMORY);
522 * If the user is writing mem/io enable (new_mem/io) and we
529 if ((new_mem && virt_mem && !phys_mem) ||
/drivers/pci/hotplug/
H A Dibmphp_res.c208 struct resource_node *new_mem = NULL; local
307 new_mem = alloc_resources (curr);
308 if (!new_mem)
310 new_mem->type = MEM;
318 if (ibmphp_add_resource (new_mem) < 0) {
322 newbus->firstMem = new_mem;
324 new_mem->rangeno = -1;
326 debug ("Memory resource for device %x, bus %x, [%x - %x]\n", new_mem->devfunc, new_mem->busno, new_mem
[all...]
/drivers/gpu/drm/mgag200/
H A Dmgag200_ttm.c192 struct ttm_mem_reg *new_mem)
195 r = ttm_bo_move_memcpy(bo, evict, no_wait_gpu, new_mem);
189 mgag200_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
/drivers/gpu/drm/nouveau/
H A Dnouveau_bo.c587 struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem)
600 OUT_RING (chan, new_mem->num_pages);
619 struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem)
624 u32 page_count = new_mem->num_pages;
627 page_count = new_mem->num_pages;
657 struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem)
662 u32 page_count = new_mem->num_pages;
665 page_count = new_mem->num_pages;
696 struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem)
701 u32 page_count = new_mem
586 nve0_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem) argument
618 nvc0_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem) argument
656 nvc0_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem) argument
695 nva3_bo_move_copy(struct nouveau_channel *chan, struct ttm_buffer_object *bo, struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem) argument
733 nv98_bo_move_exec(struct nouveau_channel *chan, struct ttm_buffer_object *bo, struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem) argument
751 nv84_bo_move_exec(struct nouveau_channel *chan, struct ttm_buffer_object *bo, struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem) argument
785 nv50_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem) argument
881 nv04_bo_move_m2mf(struct nouveau_channel *chan, struct ttm_buffer_object *bo, struct ttm_mem_reg *old_mem, struct ttm_mem_reg *new_mem) argument
953 nouveau_bo_move_m2mf(struct ttm_buffer_object *bo, int evict, bool intr, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
1051 nouveau_bo_move_flipd(struct ttm_buffer_object *bo, bool evict, bool intr, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
1087 nouveau_bo_move_flips(struct ttm_buffer_object *bo, bool evict, bool intr, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
1122 nouveau_bo_move_ntfy(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem) argument
1143 nouveau_bo_vm_bind(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem, struct nouveau_drm_tile **new_tile) argument
1178 nouveau_bo_move(struct ttm_buffer_object *bo, bool evict, bool intr, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
[all...]
/drivers/gpu/drm/qxl/
H A Dqxl_cmd.c487 struct ttm_mem_reg *new_mem)
512 if (new_mem) {
519 cmd->u.surface_create.data |= (new_mem->start << PAGE_SHIFT) + surf->tbo.bdev->man[new_mem->mem_type].gpu_offset;
485 qxl_hw_surface_alloc(struct qxl_device *qdev, struct qxl_bo *surf, struct ttm_mem_reg *new_mem) argument
H A Dqxl_ttm.c338 struct ttm_mem_reg *new_mem)
343 *old_mem = *new_mem;
344 new_mem->mm_node = NULL;
350 struct ttm_mem_reg *new_mem)
354 qxl_move_null(bo, new_mem);
357 return ttm_bo_move_memcpy(bo, evict, no_wait_gpu, new_mem);
361 struct ttm_mem_reg *new_mem)
372 qxl_surface_evict(qdev, qbo, new_mem ? true : false);
337 qxl_move_null(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem) argument
347 qxl_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
360 qxl_bo_move_notify(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem) argument
/drivers/gpu/drm/radeon/
H A Dradeon_object.c730 struct ttm_mem_reg *new_mem)
742 if (!new_mem)
746 radeon_update_memory_usage(rbo, new_mem->mem_type, 1);
729 radeon_bo_move_notify(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem) argument
H A Dradeon_ttm.c219 struct ttm_mem_reg *new_mem)
224 *old_mem = *new_mem;
225 new_mem->mm_node = NULL;
230 struct ttm_mem_reg *new_mem,
242 new_start = new_mem->start << PAGE_SHIFT;
255 switch (new_mem->mem_type) {
273 num_pages = new_mem->num_pages * (PAGE_SIZE / RADEON_GPU_PAGE_SIZE);
279 evict, no_wait_gpu, new_mem);
287 struct ttm_mem_reg *new_mem)
297 tmp_mem = *new_mem;
218 radeon_move_null(struct ttm_buffer_object *bo, struct ttm_mem_reg *new_mem) argument
228 radeon_move_blit(struct ttm_buffer_object *bo, bool evict, bool no_wait_gpu, struct ttm_mem_reg *new_mem, struct ttm_mem_reg *old_mem) argument
284 radeon_move_vram_ram(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
331 radeon_move_ram_vram(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
371 radeon_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
[all...]
/drivers/gpu/drm/ttm/
H A Dttm_bo_util.c49 bool no_wait_gpu, struct ttm_mem_reg *new_mem)
63 ret = ttm_tt_set_placement_caching(ttm, new_mem->placement);
67 if (new_mem->mem_type != TTM_PL_SYSTEM) {
68 ret = ttm_tt_bind(ttm, new_mem);
73 *old_mem = *new_mem;
74 new_mem->mm_node = NULL;
325 struct ttm_mem_reg *new_mem)
328 struct ttm_mem_type_manager *man = &bdev->man[new_mem->mem_type];
343 ret = ttm_mem_reg_ioremap(bdev, new_mem, &new_iomap);
359 memset_io(new_iomap, 0, new_mem
47 ttm_bo_move_ttm(struct ttm_buffer_object *bo, bool evict, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
323 ttm_bo_move_memcpy(struct ttm_buffer_object *bo, bool evict, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
633 ttm_bo_move_accel_cleanup(struct ttm_buffer_object *bo, struct fence *fence, bool evict, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
[all...]
/drivers/gpu/drm/ast/
H A Dast_ttm.c192 struct ttm_mem_reg *new_mem)
195 r = ttm_bo_move_memcpy(bo, evict, no_wait_gpu, new_mem);
189 ast_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
/drivers/gpu/drm/bochs/
H A Dbochs_mm.c171 struct ttm_mem_reg *new_mem)
173 return ttm_bo_move_memcpy(bo, evict, no_wait_gpu, new_mem);
168 bochs_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument
/drivers/gpu/drm/cirrus/
H A Dcirrus_ttm.c192 struct ttm_mem_reg *new_mem)
195 r = ttm_bo_move_memcpy(bo, evict, no_wait_gpu, new_mem);
189 cirrus_bo_move(struct ttm_buffer_object *bo, bool evict, bool interruptible, bool no_wait_gpu, struct ttm_mem_reg *new_mem) argument

Completed in 430 milliseconds