Searched defs:vma (Results 1 - 25 of 187) sorted by relevance

12345678

/drivers/gpu/drm/nouveau/
H A Dnouveau_fb.h33 struct nouveau_vma vma; member in struct:nouveau_framebuffer
H A Dnouveau_ttm.c32 nouveau_ttm_mmap(struct file *filp, struct vm_area_struct *vma) argument
38 if (unlikely(vma->vm_pgoff < DRM_FILE_PAGE_OFFSET))
39 return drm_mmap(filp, vma);
41 return ttm_bo_mmap(filp, vma, &dev_priv->ttm.bdev);
H A Dnvc0_vm.c46 nvc0_vm_addr(struct nouveau_vma *vma, u64 phys, u32 memtype, u32 target) argument
51 if (vma->access & NV_MEM_ACCESS_SYS)
61 nvc0_vm_map(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, argument
64 u32 next = 1 << (vma->node->type - 8);
66 phys = nvc0_vm_addr(vma, phys, mem->memtype, 0);
77 nvc0_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, argument
80 u32 target = (vma->access & NV_MEM_ACCESS_NOSNOOP) ? 7 : 5;
84 u64 phys = nvc0_vm_addr(vma, *list++, mem->memtype, target);
H A Dnouveau_dma.c113 struct nouveau_vma *vma; local
117 vma = nouveau_bo_vma_find(bo, chan->vm);
118 BUG_ON(!vma);
119 offset = vma->offset + delta;
H A Dnv50_vm.c60 vm_addr(struct nouveau_vma *vma, u64 phys, u32 memtype, u32 target) argument
65 if (vma->access & NV_MEM_ACCESS_SYS)
67 if (!(vma->access & NV_MEM_ACCESS_WO))
73 nv50_vm_map(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, argument
76 struct drm_nouveau_private *dev_priv = vma->vm->dev->dev_private;
88 phys = vm_addr(vma, phys, mem->memtype, target);
103 phys += block << (vma->node->type - 3);
108 delta += block << (vma->node->type - 3);
121 nv50_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, argument
124 u32 target = (vma
[all...]
/drivers/auxdisplay/
H A Dcfag12864bfb.c67 static int cfag12864bfb_mmap(struct fb_info *info, struct vm_area_struct *vma) argument
69 return vm_insert_page(vma, vma->vm_start,
/drivers/char/
H A Duv_mmtimer.c43 static int uv_mmtimer_mmap(struct file *file, struct vm_area_struct *vma);
142 * @vma: VMA to map the registers into
147 static int uv_mmtimer_mmap(struct file *file, struct vm_area_struct *vma) argument
151 if (vma->vm_end - vma->vm_start != PAGE_SIZE)
154 if (vma->vm_flags & VM_WRITE)
160 vma->vm_page_prot = pgprot_noncached(vma->vm_page_prot);
166 if (remap_pfn_range(vma, vma
[all...]
/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_ttm_glue.c31 int vmw_mmap(struct file *filp, struct vm_area_struct *vma) argument
36 if (unlikely(vma->vm_pgoff < VMWGFX_FILE_PAGE_OFFSET)) {
43 return ttm_bo_mmap(filp, vma, &dev_priv->bdev);
/drivers/gpu/ion/
H A Dion_system_mapper.c65 struct vm_area_struct *vma,
74 ret = remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff,
75 vma->vm_end - vma->vm_start,
76 vma->vm_page_prot);
80 ret = remap_vmalloc_range(vma, buffer->priv, vma->vm_pgoff);
63 ion_kernel_mapper_map_user(struct ion_mapper *mapper, struct ion_buffer *buffer, struct vm_area_struct *vma, struct ion_mapping *mapping) argument
/drivers/xen/xenbus/
H A Dxenbus_dev_backend.c41 static int xenbus_backend_mmap(struct file *file, struct vm_area_struct *vma) argument
43 size_t size = vma->vm_end - vma->vm_start;
48 if ((size > PAGE_SIZE) || (vma->vm_pgoff != 0))
51 if (remap_pfn_range(vma, vma->vm_start,
53 size, vma->vm_page_prot))
/drivers/xen/xenfs/
H A Dxenstored.c33 static int xsd_kva_mmap(struct file *file, struct vm_area_struct *vma) argument
35 size_t size = vma->vm_end - vma->vm_start;
37 if ((size > PAGE_SIZE) || (vma->vm_pgoff != 0))
40 if (remap_pfn_range(vma, vma->vm_start,
42 size, vma->vm_page_prot))
/drivers/infiniband/hw/ipath/
H A Dipath_mmap.c64 static void ipath_vma_open(struct vm_area_struct *vma) argument
66 struct ipath_mmap_info *ip = vma->vm_private_data;
71 static void ipath_vma_close(struct vm_area_struct *vma) argument
73 struct ipath_mmap_info *ip = vma->vm_private_data;
86 * @vma: the VMA to be initialized
89 int ipath_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) argument
92 unsigned long offset = vma->vm_pgoff << PAGE_SHIFT;
93 unsigned long size = vma->vm_end - vma->vm_start;
115 ret = remap_vmalloc_range(vma, i
[all...]
H A Dipath_user_pages.c57 struct page **p, struct vm_area_struct **vma)
77 p + got, vma);
56 __ipath_get_user_pages(unsigned long start_page, size_t num_pages, struct page **p, struct vm_area_struct **vma) argument
/drivers/infiniband/hw/qib/
H A Dqib_mmap.c64 static void qib_vma_open(struct vm_area_struct *vma) argument
66 struct qib_mmap_info *ip = vma->vm_private_data;
71 static void qib_vma_close(struct vm_area_struct *vma) argument
73 struct qib_mmap_info *ip = vma->vm_private_data;
86 * @vma: the VMA to be initialized
89 int qib_mmap(struct ib_ucontext *context, struct vm_area_struct *vma) argument
92 unsigned long offset = vma->vm_pgoff << PAGE_SHIFT;
93 unsigned long size = vma->vm_end - vma->vm_start;
115 ret = remap_vmalloc_range(vma, i
[all...]
H A Dqib_user_pages.c55 struct page **p, struct vm_area_struct **vma)
72 p + got, vma);
54 __qib_get_user_pages(unsigned long start_page, size_t num_pages, struct page **p, struct vm_area_struct **vma) argument
/drivers/gpu/drm/gma500/
H A Dgem.c179 * @vma: the VMA of the GEM object
191 * vma->vm_private_data points to the GEM object that is backing this
194 int psb_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf) argument
204 obj = vma->vm_private_data; /* GEM object */
227 page_offset = ((unsigned long) vmf->virtual_address - vma->vm_start)
235 ret = vm_insert_pfn(vma, (unsigned long)vmf->virtual_address, pfn);
/drivers/media/video/
H A Dvideobuf2-dma-contig.c30 struct vm_area_struct *vma; member in struct:vb2_dc_buf
101 static int vb2_dma_contig_mmap(void *buf_priv, struct vm_area_struct *vma) argument
110 return vb2_mmap_pfn_range(vma, buf->dma_addr, buf->size,
118 struct vm_area_struct *vma; local
126 ret = vb2_get_contig_userptr(vaddr, size, &vma, &dma_addr);
136 buf->vma = vma;
148 vb2_put_vma(buf->vma);
H A Dvideobuf2-memops.c27 * @vma: given virtual memory area
36 struct vm_area_struct *vb2_get_vma(struct vm_area_struct *vma) argument
44 if (vma->vm_ops && vma->vm_ops->open)
45 vma->vm_ops->open(vma);
47 if (vma->vm_file)
48 get_file(vma->vm_file);
50 memcpy(vma_copy, vma, sizeof(*vma));
67 vb2_put_vma(struct vm_area_struct *vma) argument
100 struct vm_area_struct *vma; local
149 vb2_mmap_pfn_range(struct vm_area_struct *vma, unsigned long paddr, unsigned long size, const struct vm_operations_struct *vm_ops, void *priv) argument
186 vb2_common_vm_open(struct vm_area_struct *vma) argument
204 vb2_common_vm_close(struct vm_area_struct *vma) argument
[all...]
H A Dvideobuf2-vmalloc.c26 struct vm_area_struct *vma; member in struct:vb2_vmalloc_buf
76 struct vm_area_struct *vma; local
88 vma = find_vma(current->mm, vaddr);
89 if (vma && (vma->vm_flags & VM_PFNMAP) && (vma->vm_pgoff)) {
90 if (vb2_get_contig_userptr(vaddr, size, &vma, &physp))
92 buf->vma = vma;
151 if (buf->vma)
177 vb2_vmalloc_mmap(void *buf_priv, struct vm_area_struct *vma) argument
[all...]
/drivers/video/pnx4008/
H A Dpnxrgbfb.c80 static int rgbfb_mmap(struct fb_info *info, struct vm_area_struct *vma) argument
82 return pnx4008_sdum_mmap(info, vma, NULL);
/drivers/base/
H A Ddma-buf.c47 static int dma_buf_mmap_internal(struct file *file, struct vm_area_struct *vma) argument
57 if (vma->vm_pgoff + ((vma->vm_end - vma->vm_start) >> PAGE_SHIFT) >
61 return dmabuf->ops->mmap(dmabuf, vma);
431 * dma_buf_mmap - Setup up a userspace mmap with the given vma
432 * @dma_buf: [in] buffer that should back the vma
433 * @vma: [in] vma for the mmap
437 * This function adjusts the passed in vma s
444 dma_buf_mmap(struct dma_buf *dmabuf, struct vm_area_struct *vma, unsigned long pgoff) argument
[all...]
/drivers/char/agp/
H A Dalpha-agp.c14 static int alpha_core_agp_vm_fault(struct vm_area_struct *vma, argument
22 dma_addr = (unsigned long)vmf->virtual_address - vma->vm_start
/drivers/gpu/drm/
H A Ddrm_drv.c182 struct drm_vma_entry *vma, *vma_temp; local
223 /* Clear vma list (only built for debugging) */
224 list_for_each_entry_safe(vma, vma_temp, &dev->vmalist, head) {
225 list_del(&vma->head);
226 kfree(vma);
H A Ddrm_info.c269 struct vm_area_struct *vma; local
275 seq_printf(m, "vma use count: %d, high_memory = %pK, 0x%pK\n",
280 vma = pt->vma;
281 if (!vma)
286 (void *)vma->vm_start, (void *)vma->vm_end,
287 vma->vm_flags & VM_READ ? 'r' : '-',
288 vma->vm_flags & VM_WRITE ? 'w' : '-',
289 vma
[all...]
/drivers/gpu/drm/udl/
H A Dudl_gem.c74 int udl_drm_gem_mmap(struct file *filp, struct vm_area_struct *vma) argument
78 ret = drm_gem_mmap(filp, vma);
82 vma->vm_flags &= ~VM_PFNMAP;
83 vma->vm_flags |= VM_MIXEDMAP;
88 int udl_gem_fault(struct vm_area_struct *vma, struct vm_fault *vmf) argument
90 struct udl_gem_object *obj = to_udl_bo(vma->vm_private_data);
95 page_offset = ((unsigned long)vmf->virtual_address - vma->vm_start) >>
102 ret = vm_insert_page(vma, (unsigned long)vmf->virtual_address, page);

Completed in 3618 milliseconds

12345678