Searched defs:pfn (Results 1 - 25 of 51) sorted by relevance

123

/drivers/gpu/ion/
H A Dion_system_mapper.c73 unsigned long pfn = __phys_to_pfn(virt_to_phys(buffer->priv)); local
74 ret = remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff,
H A Dion_system_heap.c447 unsigned long pfn = __phys_to_pfn(virt_to_phys(buffer->priv_virt)); local
448 return remap_pfn_range(vma, vma->vm_start, pfn + vma->vm_pgoff,
/drivers/gpu/drm/gma500/
H A Dgem.c199 unsigned long pfn; local
232 pfn = (dev_priv->stolen_base + r->offset) >> PAGE_SHIFT;
234 pfn = page_to_pfn(r->pages[page_offset]);
235 ret = vm_insert_pfn(vma, (unsigned long)vmf->virtual_address, pfn);
H A Dgtt.c33 * @pfn: page number to encode
38 static inline uint32_t psb_gtt_mask_pte(uint32_t pfn, int type) argument
49 return (pfn << PAGE_SHIFT) | mask;
H A Dframebuffer.c123 unsigned long pfn; local
133 pfn = (phys_addr >> PAGE_SHIFT);
135 ret = vm_insert_mixed(vma, address, pfn);
/drivers/edac/
H A Dcell_edac.c37 unsigned long address, pfn, offset, syndrome; local
46 pfn = address >> PAGE_SHIFT;
51 edac_mc_handle_ce(mci, csrow->first_page + pfn, offset,
59 unsigned long address, pfn, offset; local
68 pfn = address >> PAGE_SHIFT;
72 edac_mc_handle_ue(mci, csrow->first_page + pfn, offset, 0, "");
H A Di3000_edac.c50 * unsigned long pfn and offset from hardware regs which are u8/u32.
237 unsigned long pfn, offset; local
252 pfn = deap_pfn(info->edeap, info->deap);
256 row = edac_mc_find_csrow_by_page(mci, pfn);
259 edac_mc_handle_ue(mci, pfn, offset, row, "i3000 UE");
261 edac_mc_handle_ce(mci, pfn, offset, info->derrsyn, row,
H A Dmpc85xx_edac.c782 u32 pfn; local
813 pfn = err_addr >> PAGE_SHIFT;
817 if ((pfn >= csrow->first_page) && (pfn <= csrow->last_page))
850 mpc85xx_mc_printk(mci, KERN_ERR, "PFN: %#8.8x\n", pfn);
857 edac_mc_handle_ce(mci, pfn, err_addr & ~PAGE_MASK,
861 edac_mc_handle_ue(mci, pfn, err_addr & ~PAGE_MASK,
/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_gmr.c117 unsigned long pfn; local
147 pfn = page_to_pfn(*pages);
149 if (pfn != prev_pfn + 1) {
155 (++desc_virtual)->ppn = cpu_to_le32(pfn);
162 prev_pfn = pfn;
223 unsigned long pfn; local
227 pfn = page_to_pfn(*pages++);
228 if (prev_pfn + 1 != pfn)
230 prev_pfn = pfn;
/drivers/char/
H A Dmspec.c203 unsigned long pfn; local
229 pfn = paddr >> PAGE_SHIFT;
236 vm_insert_pfn(vma, (unsigned long)vmf->virtual_address, pfn); local
H A Dmem.c28 #include <linux/pfn.h>
54 static inline int valid_mmap_phys_addr_range(unsigned long pfn, size_t size) argument
62 static inline int range_is_allowed(unsigned long pfn, unsigned long size) argument
64 u64 from = ((u64)pfn) << PAGE_SHIFT;
69 if (!devmem_is_allowed(pfn)) {
76 pfn++;
81 static inline int range_is_allowed(unsigned long pfn, unsigned long size) argument
220 unsigned long pfn, unsigned long size, pgprot_t *vma_prot)
261 static pgprot_t phys_mem_access_prot(struct file *file, unsigned long pfn, argument
265 unsigned long offset = pfn << PAGE_SHIF
219 phys_mem_access_prot_allowed(struct file *file, unsigned long pfn, unsigned long size, pgprot_t *vma_prot) argument
344 unsigned long pfn; local
371 unsigned long pfn, offset; local
[all...]
/drivers/gpu/drm/exynos/
H A Dexynos_drm_gem.c149 unsigned long pfn; local
155 pfn = page_to_pfn(buf->pages[page_offset++]);
157 pfn = (buf->dma_addr >> PAGE_SHIFT) + page_offset;
159 return vm_insert_mixed(vma, f_vaddr, pfn);
489 unsigned long pfn, vm_size, usize, uaddr = vma->vm_start; local
534 pfn = ((unsigned long)exynos_gem_obj->buffer->dma_addr) >>
537 DRM_DEBUG_KMS("pfn = 0x%lx\n", pfn);
539 if (remap_pfn_range(vma, vma->vm_start, pfn, vm_size,
541 DRM_ERROR("failed to remap pfn rang
[all...]
/drivers/gpu/drm/radeon/
H A Dradeon_gart.c522 unsigned pfn)
529 addr += pfn * RADEON_GPU_PAGE_SIZE;
535 addr += pfn * RADEON_GPU_PAGE_SIZE;
540 addr += (pfn * RADEON_GPU_PAGE_SIZE) & (~PAGE_MASK);
556 uint64_t addr = 0, pfn; local
584 pfn = bo_va->soffset / RADEON_GPU_PAGE_SIZE;
590 rdev->vm_manager.funcs->set_page(rdev, bo_va->vm, i + pfn, addr, flags);
520 radeon_vm_get_addr(struct radeon_device *rdev, struct ttm_mem_reg *mem, unsigned pfn) argument
/drivers/gpu/drm/ttm/
H A Dttm_bo_vm.c81 unsigned long pfn; local
193 pfn = ((bo->mem.bus.base + bo->mem.bus.offset) >> PAGE_SHIFT) + page_offset;
202 pfn = page_to_pfn(page);
205 ret = vm_insert_mixed(vma, address, pfn);
/drivers/iommu/
H A Diova.c70 /* only cache if it's below 32bit pfn */
240 * find_iova - find's an iova for a given pfn
242 * pfn - page frame number
244 * given doamin which matches the given pfn.
246 struct iova *find_iova(struct iova_domain *iovad, unsigned long pfn) argument
257 /* If pfn falls within iova's range, return iova */
258 if ((pfn >= iova->pfn_lo) && (pfn <= iova->pfn_hi)) {
269 if (pfn < iova->pfn_lo)
271 else if (pfn > iov
305 free_iova(struct iova_domain *iovad, unsigned long pfn) argument
[all...]
H A Dtegra-gart.c240 unsigned long pfn; local
246 pfn = __phys_to_pfn(pa);
247 if (!pfn_valid(pfn)) {
252 gart_set_pte(gart, iova, GART_PTE(pfn));
/drivers/xen/
H A Dballoon.c317 unsigned long pfn, i; local
357 pfn = page_to_pfn(page);
359 phys_to_machine_mapping_valid(pfn));
361 set_phys_to_machine(pfn, frame_list[i]);
367 (unsigned long)__va(pfn << PAGE_SHIFT),
387 unsigned long pfn, i; local
415 pfn = page_to_pfn(page);
416 frame_list[i] = pfn_to_mfn(pfn);
422 (unsigned long)__va(pfn << PAGE_SHIFT),
435 pfn
565 unsigned long pfn, extra_pfn_end; local
[all...]
H A Dswiotlb-xen.c72 static int check_pages_physically_contiguous(unsigned long pfn, argument
80 next_mfn = pfn_to_mfn(pfn);
84 if (pfn_to_mfn(++pfn) != ++next_mfn)
92 unsigned long pfn = PFN_DOWN(p); local
97 if (check_pages_physically_contiguous(pfn, offset, size))
105 unsigned long pfn = mfn_to_local_pfn(mfn); local
112 if (pfn_valid(pfn)) {
113 paddr = PFN_PHYS(pfn);
/drivers/base/
H A Dmemory.c151 unsigned long i, pfn; local
157 pfn = section_nr_to_pfn(mem->start_section_nr + i);
158 ret &= is_mem_section_removable(pfn, PAGES_PER_SECTION);
217 unsigned long pfn = start_pfn; local
224 for (i = 0; i < sections_per_block; i++, pfn += PAGES_PER_SECTION) {
225 if (WARN_ON_ONCE(!pfn_valid(pfn)))
227 page = pfn_to_page(pfn);
235 pfn_to_section_nr(pfn), j);
444 u64 pfn; local
447 if (strict_strtoull(buf, 0, &pfn) <
463 u64 pfn; local
[all...]
H A Dnode.c359 static int get_nid_for_pfn(unsigned long pfn) argument
363 if (!pfn_valid_within(pfn))
365 page = pfn_to_page(pfn);
368 return pfn_to_nid(pfn);
375 unsigned long pfn, sect_start_pfn, sect_end_pfn; local
385 for (pfn = sect_start_pfn; pfn <= sect_end_pfn; pfn++) {
388 page_nid = get_nid_for_pfn(pfn);
412 unsigned long pfn, sect_start_pf local
447 unsigned long pfn; local
[all...]
/drivers/block/
H A Dbrd.c366 void **kaddr, unsigned long *pfn)
381 *pfn = page_to_pfn(page);
365 brd_direct_access(struct block_device *bdev, sector_t sector, void **kaddr, unsigned long *pfn) argument
/drivers/hv/
H A Dchannel.c250 unsigned long long pfn; local
260 pfn = virt_to_phys(kbuffer) >> PAGE_SHIFT;
289 gpadl_header->range[0].pfn_array[i] = pfn+i;
342 gpadl_body->pfn[i] = pfn + pfnsum + i;
368 gpadl_header->range[0].pfn_array[i] = pfn+i;
641 desc.range[i].pfn = pagebuffers[i].pfn;
/drivers/media/video/omap3isp/
H A Dispqueue.c230 unsigned long pfn = buf->paddr >> PAGE_SHIFT; local
239 for (i = 0; i < buf->npages; ++i, ++pfn) {
240 sg_set_page(&sglist[i], pfn_to_page(pfn), PAGE_SIZE - offset,
245 sg_dma_address(&sglist[i]) = (pfn << PAGE_SHIFT) + offset;
/drivers/virtio/
H A Dvirtio_balloon.c77 unsigned long pfn = page_to_pfn(page); local
80 /* Convert pfn from Linux page size to balloon page size. */
81 return pfn * VIRTIO_BALLOON_PAGES_PER_PAGE;
84 static struct page *balloon_pfn_to_page(u32 pfn) argument
86 BUG_ON(pfn % VIRTIO_BALLOON_PAGES_PER_PAGE);
87 return pfn_to_page(pfn / VIRTIO_BALLOON_PAGES_PER_PAGE);
122 * Note that the first pfn points at start of the page. */
/drivers/block/xen-blkback/
H A Dblkback.c116 unsigned long pfn = page_to_pfn(blkbk->pending_page(req, seg)); local
117 return (unsigned long)pfn_to_kaddr(pfn);

Completed in 1609 milliseconds

123