Searched defs:pte (Results 1 - 22 of 22) sorted by relevance

/drivers/gpu/drm/nouveau/
H A Dnvc0_vm.c62 struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta)
67 pte <<= 3;
69 nv_wo32(pgt, pte + 0, lower_32_bits(phys));
70 nv_wo32(pgt, pte + 4, upper_32_bits(phys));
72 pte += 8;
78 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list)
82 pte <<= 3;
85 nv_wo32(pgt, pte + 0, lower_32_bits(phys));
86 nv_wo32(pgt, pte + 4, upper_32_bits(phys));
87 pte
61 nvc0_vm_map(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) argument
77 nvc0_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) argument
92 nvc0_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) argument
[all...]
H A Dnv50_vm.c74 struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta)
89 pte <<= 3;
98 if (cnt >= block && !(pte & (block - 1)))
112 nv_wo32(pgt, pte + 0, offset_l);
113 nv_wo32(pgt, pte + 4, offset_h);
114 pte += 8;
122 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list)
125 pte <<= 3;
128 nv_wo32(pgt, pte + 0, lower_32_bits(phys));
129 nv_wo32(pgt, pte
73 nv50_vm_map(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) argument
121 nv50_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) argument
135 nv50_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) argument
[all...]
H A Dnouveau_sgdma.c38 unsigned i, j, pte; local
43 pte = (nvbe->offset >> NV_CTXDMA_PAGE_SHIFT) + 2;
48 for (j = 0; j < PAGE_SIZE / NV_CTXDMA_PAGE_SIZE; j++, pte++) {
49 nv_wo32(gpuobj, (pte * 4) + 0, offset_l | 3);
64 unsigned i, j, pte; local
71 pte = (nvbe->offset >> NV_CTXDMA_PAGE_SHIFT) + 2;
73 for (j = 0; j < PAGE_SIZE / NV_CTXDMA_PAGE_SIZE; j++, pte++)
74 nv_wo32(gpuobj, (pte * 4) + 0, 0x00000000);
105 u32 pte = mem->start << 2; local
111 nv_wo32(pgt, pte, (*lis
125 u32 pte = (nvbe->offset >> 12) << 2; local
162 u32 pte, tmp[4]; local
214 u32 pte = mem->start << 2, tmp[4]; local
253 u32 pte = (nvbe->offset >> 12) << 2; local
438 int pte = (offset >> NV_CTXDMA_PAGE_SHIFT) + 2; local
[all...]
H A Dnouveau_vm.c39 u32 pte = (offset & ((1 << vm->pgt_bits) - 1)) >> bits; local
51 end = (pte + num);
54 len = end - pte;
56 vm->map(vma, pgt, node, pte, len, phys, delta);
59 pte += len;
63 pte = 0;
90 u32 pte = (offset & ((1 << vm->pgt_bits) - 1)) >> bits; local
97 end = (pte + num);
100 len = end - pte;
102 vm->map_sg(vma, pgt, mem, pte, le
125 u32 pte = (offset & ((1 << vm->pgt_bits) - 1)) >> bits; local
[all...]
/drivers/gpu/drm/gma500/
H A Dgtt.c32 * psb_gtt_mask_pte - generate GTT pte entry
81 u32 *gtt_slot, pte; local
100 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), 0);
101 iowrite32(pte, gtt_slot++);
104 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]), 0);
105 iowrite32(pte, gtt_slot++);
125 u32 *gtt_slot, pte; local
131 pte = psb_gtt_mask_pte(page_to_pfn(dev_priv->scratch_page), 0);
134 iowrite32(pte, gtt_slot++);
151 u32 *gtt_slot, pte; local
417 uint32_t pte; local
[all...]
H A Dmmu.c436 unsigned long addr, uint32_t pte)
438 pt->v[psb_mmu_pt_index(addr)] = pte;
702 uint32_t pte; local
722 pte = psb_mmu_mask_pte(start_pfn++, type);
723 psb_mmu_set_pte(pt, addr, pte);
750 uint32_t pte; local
785 pte =
788 psb_mmu_set_pte(pt, addr, pte);
435 psb_mmu_set_pte(struct psb_mmu_pt *pt, unsigned long addr, uint32_t pte) argument
/drivers/gpu/drm/i915/
H A Di915_gem_gtt.c177 uint32_t *pt_vaddr, pte; local
195 pte = GEN6_PTE_ADDR_ENCODE(page_addr);
196 pt_vaddr[j] = pte | pte_flags;
222 uint32_t *pt_vaddr, pte; local
236 pte = GEN6_PTE_ADDR_ENCODE(page_addr);
237 pt_vaddr[i] = pte | pte_flags;
/drivers/char/agp/
H A Damd64-agp.c50 u32 pte; local
86 pte = (tmp & 0x000000ff00000000ULL) >> 28;
87 pte |=(tmp & 0x00000000fffff000ULL);
88 pte |= GPTE_VALID | GPTE_COHERENT;
90 writel(pte, agp_bridge->gatt_table+j);
/drivers/iommu/
H A Dtegra-gart.c82 unsigned long offs, u32 pte)
85 writel(pte, gart->regs + GART_ENTRY_DATA);
88 pte ? "map" : "unmap", offs, pte & GART_PAGE_MASK);
94 unsigned long pte; local
97 pte = readl(gart->regs + GART_ENTRY_DATA);
99 return pte;
121 unsigned long pte; local
123 pte = gart_read_pte(gart, iova);
126 (GART_ENTRY_PHYS_ADDR_VALID & pte)
81 gart_set_pte(struct gart_device *gart, unsigned long offs, u32 pte) argument
278 unsigned long pte; local
[all...]
H A Damd_iommu_v2.c424 pte_t pte)
421 mn_change_pte(struct mmu_notifier *mn, struct mm_struct *mm, unsigned long address, pte_t pte) argument
H A Domap-iommu.c518 dev_vdbg(obj->dev, "%s: a new pte:%p\n", __func__, iopte);
528 "%s: da:%08x pgd:%p *pgd:%08x pte:%p *pte:%08x\n",
577 dev_vdbg(obj->dev, "%s: da:%08x pa:%08x pte:%p *pte:%08x\n",
643 * omap_iopgtable_store_entry - Make an iommu pte entry
660 * iopgtable_lookup_entry - Lookup an iommu pte entry
664 * @ppte: iommu pte entry pointer to be returned
731 * iopgtable_clear_entry - Remove an iommu pte entry
813 "pte
1168 u32 *pgd, *pte; local
[all...]
H A Dtegra-smmu.c409 unsigned long *pte, struct page *page, int is_pde)
416 val = SMMU_PTC_FLUSH_TYPE_ADR | VA_PAGE_TO_PA(pte, page);
587 unsigned long *pte; local
591 pte = locate_pte(as, iova, false, &page, &count);
592 if (WARN_ON(!pte))
595 if (WARN_ON(*pte == _PTE_VACANT(iova)))
598 *pte = _PTE_VACANT(iova);
599 FLUSH_CPU_DCACHE(pte, page, sizeof(*pte));
600 flush_ptc_and_tlb(as->smmu, as, iova, pte, pag
407 flush_ptc_and_tlb(struct smmu_device *smmu, struct smmu_as *as, dma_addr_t iova, unsigned long *pte, struct page *page, int is_pde) argument
611 unsigned long *pte; local
665 unsigned long *pte; local
[all...]
H A Damd_iommu.c1101 u64 *pte; local
1107 pte = (void *)get_zeroed_page(gfp);
1108 if (!pte)
1111 *pte = PM_LEVEL_PDE(domain->mode,
1113 domain->pt_root = pte;
1127 u64 *pte, *page; local
1135 pte = &domain->pt_root[PM_LEVEL_INDEX(level, address)];
1140 if (!IOMMU_PTE_PRESENT(*pte)) {
1144 *pte = PM_LEVEL_PDE(level, virt_to_phys(page));
1148 if (PM_PTE_LEVEL(*pte) !
1171 u64 *pte; local
1227 u64 __pte, *pte; local
1266 u64 *pte; local
1461 u64 *pte, *pte_page; local
1513 u64 *pte = fetch_pte(&dma_dom->domain, i); local
2371 u64 *pte, *pte_page; local
2399 u64 *pte, __pte; local
2432 u64 *pte; local
3207 u64 *pte, __pte; local
3502 u64 *pte; local
3534 u64 *pte; local
3550 u64 *pte; local
[all...]
H A Dintel-iommu.c282 static inline void dma_clear_pte(struct dma_pte *pte) argument
284 pte->val = 0;
287 static inline void dma_set_pte_readable(struct dma_pte *pte) argument
289 pte->val |= DMA_PTE_READ;
292 static inline void dma_set_pte_writable(struct dma_pte *pte) argument
294 pte->val |= DMA_PTE_WRITE;
297 static inline void dma_set_pte_snp(struct dma_pte *pte) argument
299 pte->val |= DMA_PTE_SNP;
302 static inline void dma_set_pte_prot(struct dma_pte *pte, unsigned long prot) argument
304 pte
307 dma_pte_addr(struct dma_pte *pte) argument
317 dma_set_pte_pfn(struct dma_pte *pte, unsigned long pfn) argument
322 dma_pte_present(struct dma_pte *pte) argument
327 dma_pte_superpage(struct dma_pte *pte) argument
332 first_pte_in_page(struct dma_pte *pte) argument
774 struct dma_pte *parent, *pte = NULL; local
823 struct dma_pte *parent, *pte = NULL; local
857 struct dma_pte *first_pte, *pte; local
893 struct dma_pte *first_pte, *pte; local
1788 struct dma_pte *first_pte = NULL, *pte = NULL; local
3992 struct dma_pte *pte; local
4072 struct dma_pte *pte; local
[all...]
/drivers/misc/sgi-gru/
H A Dgrufault.c225 pte_t pte; local
240 pte = *(pte_t *) pmdp;
243 pte = *pte_offset_kernel(pmdp, vaddr);
245 if (unlikely(!pte_present(pte) ||
246 (write && (!pte_write(pte) || !pte_dirty(pte)))))
249 *paddr = pte_pfn(pte) << PAGE_SHIFT;
/drivers/xen/xenbus/
H A Dxenbus_client.c477 pte_t *pte; local
485 area = alloc_vm_area(PAGE_SIZE, &pte);
491 op.host_addr = arbitrary_virt_to_machine(pte).maddr;
/drivers/lguest/
H A Dpage_tables.c267 static void release_pte(pte_t pte) argument
273 if (pte_flags(pte) & _PAGE_PRESENT)
274 put_page(pte_page(pte));
347 kill_guest(cpu, "out of memory allocating pte page");
382 kill_guest(cpu, "out of memory allocating pte page");
514 * Check the flags on the pte entry itself: it must be present and
1092 pte_t *pte = switcher_pte_page(cpu); local
1096 set_pte(&pte[i], mk_pte(switcher_page[i],
1104 set_pte(&pte[i], pfn_pte(page_to_pfn(switcher_page[i]),
1111 set_pte(&pte[
[all...]
/drivers/staging/comedi/
H A Ddrivers.c391 pte_t *ptep, pte; local
399 pte = *ptep;
400 if (pte_present(pte)) {
402 page_address(pte_page(pte));
/drivers/xen/
H A Dgntdev.c216 static int find_grant_ptes(pte_t *pte, pgtable_t token, argument
225 pte_maddr = arbitrary_virt_to_machine(pte).maddr;
254 * Setup the map_ops corresponding to the pte entries pointing
H A Dgrant-table.c744 pte_t *pte; local
760 pte = (pte_t *) (mfn_to_virt(PFN_DOWN(map_ops[i].host_addr)) +
762 mfn = pte_mfn(*pte);
/drivers/staging/tidspbridge/core/
H A Dtiomap3430.c1551 pte_t *ptep, pte; local
1559 pte = *ptep;
1560 if (pte_present(pte))
1561 return pte & PAGE_MASK;
/drivers/net/ethernet/tile/
H A Dtilepro.c469 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)va); local
470 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3)
472 va, hv_pte_get_mode(pte), hv_pte_val(pte));
1023 .pte = hv_pte(0),
1026 ea.pte = hv_pte_set_lotar(ea.pte, epp_lotar);
1027 ea.pte = hv_pte_set_mode(ea.pte, HV_PTE_MODE_CACHE_TILE_L3);
1974 HV_PTE pte local
[all...]

Completed in 401 milliseconds