Searched refs:pte (Results 1 - 25 of 30) sorted by relevance

12

/drivers/gpu/drm/nouveau/core/subdev/vm/
H A Dnv44.c42 dma_addr_t *list, u32 pte, u32 cnt)
44 u32 base = (pte << 2) & ~0x0000000f;
54 switch (pte++ & 0x3) {
88 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list)
94 if (pte & 3) {
95 u32 max = 4 - (pte & 3);
97 nv44_vm_fill(pgt, priv->null, list, pte, part);
98 pte += part;
106 nv_wo32(pgt, pte++ * 4, tmp[0] >> 0 | tmp[1] << 27);
107 nv_wo32(pgt, pte
41 nv44_vm_fill(struct nouveau_gpuobj *pgt, dma_addr_t null, dma_addr_t *list, u32 pte, u32 cnt) argument
87 nv44_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) argument
118 nv44_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) argument
[all...]
H A Dnv04.c38 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list)
40 pte = 0x00008 + (pte * 4);
45 nv_wo32(pgt, pte, phys | 3);
47 pte += 4;
54 nv04_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) argument
56 pte = 0x00008 + (pte * 4);
58 nv_wo32(pgt, pte, 0x00000000);
59 pte
37 nv04_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) argument
[all...]
H A Dnv50.c81 struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta)
95 pte <<= 3;
104 if (cnt >= block && !(pte & (block - 1)))
118 nv_wo32(pgt, pte + 0, offset_l);
119 nv_wo32(pgt, pte + 4, offset_h);
120 pte += 8;
128 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list)
131 pte <<= 3;
134 nv_wo32(pgt, pte + 0, lower_32_bits(phys));
135 nv_wo32(pgt, pte
80 nv50_vm_map(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) argument
127 nv50_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) argument
141 nv50_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) argument
[all...]
H A Dnv41.c42 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list)
44 pte = pte * 4;
49 nv_wo32(pgt, pte, (phys >> 7) | 1);
51 pte += 4;
58 nv41_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) argument
60 pte = pte * 4;
62 nv_wo32(pgt, pte, 0x00000000);
63 pte
41 nv41_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) argument
[all...]
H A Dnvc0.c111 struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta)
116 pte <<= 3;
128 nv_wo32(pgt, pte + 0, lower_32_bits(phys));
129 nv_wo32(pgt, pte + 4, upper_32_bits(phys));
131 pte += 8;
137 struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list)
143 pte <<= 3;
146 nv_wo32(pgt, pte + 0, lower_32_bits(phys));
147 nv_wo32(pgt, pte + 4, upper_32_bits(phys));
148 pte
110 nvc0_vm_map(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, u64 phys, u64 delta) argument
136 nvc0_vm_map_sg(struct nouveau_vma *vma, struct nouveau_gpuobj *pgt, struct nouveau_mem *mem, u32 pte, u32 cnt, dma_addr_t *list) argument
153 nvc0_vm_unmap(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt) argument
[all...]
H A Dbase.c41 u32 pte = (offset & ((1 << vmm->pgt_bits) - 1)) >> bits; local
53 end = (pte + num);
56 len = end - pte;
58 vmm->map(vma, pgt, node, pte, len, phys, delta);
61 pte += len;
65 pte = 0;
86 u32 pte = (offset & ((1 << vmm->pgt_bits) - 1)) >> bits; local
97 end = pte + sglen;
100 len = end - pte;
105 vmm->map_sg(vma, pgt, mem, pte,
145 u32 pte = (offset & ((1 << vmm->pgt_bits) - 1)) >> bits; local
193 u32 pte = (offset & ((1 << vmm->pgt_bits) - 1)) >> bits; local
[all...]
/drivers/gpu/drm/gma500/
H A Dgtt.c33 * psb_gtt_mask_pte - generate GTT pte entry
88 u32 pte; local
109 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]),
111 iowrite32(pte, gtt_slot++);
114 pte = psb_gtt_mask_pte(page_to_pfn(r->pages[i]),
116 iowrite32(pte, gtt_slot++);
137 u32 pte; local
143 pte = psb_gtt_mask_pte(page_to_pfn(dev_priv->scratch_page),
147 iowrite32(pte, gtt_slot++);
165 u32 pte; local
424 uint32_t pte; local
[all...]
H A Dmmu.c407 uint32_t pte)
409 pt->v[psb_mmu_pt_index(addr)] = pte;
665 uint32_t pte; local
685 pte = psb_mmu_mask_pte(start_pfn++, type);
686 psb_mmu_set_pte(pt, addr, pte);
714 uint32_t pte; local
747 pte = psb_mmu_mask_pte(page_to_pfn(*pages++),
749 psb_mmu_set_pte(pt, addr, pte);
406 psb_mmu_set_pte(struct psb_mmu_pt *pt, unsigned long addr, uint32_t pte) argument
/drivers/iommu/
H A Dipmmu-vmsa.c516 * We can't use the (pgd|pud|pmd|pte)_populate or the set_(pgd|pud|pmd|pte)
523 pte_t *pte; local
528 pte = (pte_t *)get_zeroed_page(GFP_ATOMIC);
529 if (!pte)
532 ipmmu_flush_pgtable(mmu, pte, PAGE_SIZE);
533 *pmd = __pmd(__pa(pte) | PMD_NSTABLE | PMD_TYPE_TABLE);
536 return pte + pte_index(iova);
586 pte_t *pte, *start; local
589 pte
716 ipmmu_clear_pte(struct ipmmu_vmsa_device *mmu, pud_t *pud, pmd_t *pmd, pte_t *pte, unsigned int num_ptes) argument
740 pte_t *pte, *start; local
766 ipmmu_split_pte(struct ipmmu_vmsa_device *mmu, pte_t *pte) argument
784 pte_t *pte; local
973 pte_t pte; local
[all...]
H A Dtegra-gart.c84 unsigned long offs, u32 pte)
87 writel(pte, gart->regs + GART_ENTRY_DATA);
90 pte ? "map" : "unmap", offs, pte & GART_PAGE_MASK);
96 unsigned long pte; local
99 pte = readl(gart->regs + GART_ENTRY_DATA);
101 return pte;
123 unsigned long pte; local
125 pte = gart_read_pte(gart, iova);
128 (GART_ENTRY_PHYS_ADDR_VALID & pte)
83 gart_set_pte(struct gart_device *gart, unsigned long offs, u32 pte) argument
285 unsigned long pte; local
[all...]
H A Damd_iommu_types.h243 #define PM_PTE_LEVEL(pte) (((pte) >> 9) & 0x7ULL)
282 #define PTE_PAGE_SIZE(pte) \
283 (1ULL << (1 + ffz(((pte) | 0xfffULL))))
312 #define IOMMU_PTE_PRESENT(pte) ((pte) & IOMMU_PTE_P)
313 #define IOMMU_PTE_PAGE(pte) (phys_to_virt((pte) & IOMMU_PAGE_MASK))
314 #define IOMMU_PTE_MODE(pte) (((pte) >>
[all...]
H A Damd_iommu.c1257 u64 *pte; local
1263 pte = (void *)get_zeroed_page(gfp);
1264 if (!pte)
1267 *pte = PM_LEVEL_PDE(domain->mode,
1269 domain->pt_root = pte;
1283 u64 *pte, *page; local
1291 pte = &domain->pt_root[PM_LEVEL_INDEX(level, address)];
1296 if (!IOMMU_PTE_PRESENT(*pte)) {
1300 *pte = PM_LEVEL_PDE(level, virt_to_phys(page));
1304 if (PM_PTE_LEVEL(*pte) !
1327 u64 *pte; local
1383 u64 __pte, *pte; local
1425 u64 *pte; local
1624 u64 *pte, *pte_page; local
1676 u64 *pte = fetch_pte(&dma_dom->domain, i); local
2561 u64 *pte, *pte_page; local
2589 u64 *pte, __pte; local
2622 u64 *pte; local
3386 u64 *pte, __pte; local
3653 u64 *pte; local
3685 u64 *pte; local
3701 u64 *pte; local
[all...]
H A Dintel-iommu.c284 static inline void dma_clear_pte(struct dma_pte *pte) argument
286 pte->val = 0;
289 static inline u64 dma_pte_addr(struct dma_pte *pte) argument
292 return pte->val & VTD_PAGE_MASK;
295 return __cmpxchg64(&pte->val, 0ULL, 0ULL) & VTD_PAGE_MASK;
299 static inline bool dma_pte_present(struct dma_pte *pte) argument
301 return (pte->val & 3) != 0;
304 static inline bool dma_pte_superpage(struct dma_pte *pte) argument
306 return (pte->val & DMA_PTE_LARGE_PAGE);
309 static inline int first_pte_in_page(struct dma_pte *pte) argument
839 struct dma_pte *parent, *pte = NULL; local
896 struct dma_pte *parent, *pte = NULL; local
929 struct dma_pte *first_pte, *pte; local
955 dma_pte_free_level(struct dmar_domain *domain, int level, struct dma_pte *pte, unsigned long pfn, unsigned long start_pfn, unsigned long last_pfn) argument
1016 dma_pte_list_pagetables(struct dmar_domain *domain, int level, struct dma_pte *pte, struct page *freelist) argument
1040 dma_pte_clear_level(struct dmar_domain *domain, int level, struct dma_pte *pte, unsigned long pfn, unsigned long start_pfn, unsigned long last_pfn, struct page *freelist) argument
1984 struct dma_pte *first_pte = NULL, *pte = NULL; local
4294 struct dma_pte *pte; local
4406 struct dma_pte *pte; local
[all...]
H A Dtegra-smmu.c491 unsigned long *pte, struct page *page, int is_pde)
498 val = SMMU_PTC_FLUSH_TYPE_ADR | VA_PAGE_TO_PA(pte, page);
688 unsigned long *pte; local
692 pte = locate_pte(as, iova, false, &page, &count);
693 if (WARN_ON(!pte))
696 if (WARN_ON(*pte == _PTE_VACANT(iova)))
699 *pte = _PTE_VACANT(iova);
700 FLUSH_CPU_DCACHE(pte, page, sizeof(*pte));
701 flush_ptc_and_tlb(as->smmu, as, iova, pte, pag
489 flush_ptc_and_tlb(struct smmu_device *smmu, struct smmu_as *as, dma_addr_t iova, unsigned long *pte, struct page *page, int is_pde) argument
710 unsigned long *pte; local
764 unsigned long *pte; local
[all...]
H A Domap-iommu.c554 dev_vdbg(obj->dev, "%s: a new pte:%p\n", __func__, iopte);
564 "%s: da:%08x pgd:%p *pgd:%08x pte:%p *pte:%08x\n",
613 dev_vdbg(obj->dev, "%s: da:%08x pa:%08x pte:%p *pte:%08x\n",
679 * omap_iopgtable_store_entry - Make an iommu pte entry
696 * iopgtable_lookup_entry - Lookup an iommu pte entry
700 * @ppte: iommu pte entry pointer to be returned
767 * iopgtable_clear_entry - Remove an iommu pte entry
846 dev_err(obj->dev, "%s: errs:0x%08x da:0x%08x pgd:0x%p *pgd:0x%08x pte
1208 u32 *pgd, *pte; local
[all...]
H A Darm-smmu.c26 * - 4k and 64k pages, with contiguous pte hints.
750 * overridden by the ttbcr/pte.
1283 pte_t *pte, *start; local
1326 pte = start;
1336 * - Each pte in the region has the contiguous hint bit set
1357 } else if (pte_val(*pte) &
1370 sizeof(*pte) *
1375 *pte = pfn_pte(pfn, __pgprot(pteval));
1376 } while (pte++, pfn++, addr += PAGE_SIZE, --i);
1379 arm_smmu_flush_pgtable(smmu, start, sizeof(*pte) * (pt
1533 pte_t pte; local
[all...]
/drivers/lguest/
H A Dpage_tables.c242 static void release_pte(pte_t pte) argument
248 if (pte_flags(pte) & _PAGE_PRESENT)
249 put_page(pte_page(pte));
318 kill_guest(cpu, "out of memory allocating pte page");
542 * Check the flags on the pte entry itself: it must be present and
757 pte_t *pte = find_spte(cpu, switcher_addr + i * PAGE_SIZE, true, local
759 if (!pte)
768 if (i == 0 && !(pte_flags(*pte) & _PAGE_PRESENT)) {
772 set_pte(pte,
951 kill_guest(cpu, "attempt to set pte int
1102 pte_t *pte; local
1128 pte_t *pte; local
[all...]
/drivers/gpu/drm/i915/
H A Di915_gem_gtt.c75 gen8_gtt_pte_t pte = valid ? _PAGE_PRESENT | _PAGE_RW : 0; local
76 pte |= addr;
80 pte |= PPAT_UNCACHED_INDEX;
83 pte |= PPAT_DISPLAY_ELLC_INDEX;
86 pte |= PPAT_CACHED_INDEX;
90 return pte;
110 gen6_gtt_pte_t pte = valid ? GEN6_PTE_VALID : 0; local
111 pte |= GEN6_PTE_ADDR_ENCODE(addr);
116 pte |= GEN6_PTE_CACHE_LLC;
119 pte |
132 gen6_gtt_pte_t pte = valid ? GEN6_PTE_VALID : 0; local
156 gen6_gtt_pte_t pte = valid ? GEN6_PTE_VALID : 0; local
175 gen6_gtt_pte_t pte = valid ? GEN6_PTE_VALID : 0; local
188 gen6_gtt_pte_t pte = valid ? GEN6_PTE_VALID : 0; local
256 unsigned pte = start >> GEN8_PTE_SHIFT & GEN8_PTE_MASK; local
299 unsigned pte = start >> GEN8_PTE_SHIFT & GEN8_PTE_MASK; local
624 int pte, pde; local
1372 gen8_set_pte(void __iomem *addr, gen8_gtt_pte_t pte) argument
[all...]
/drivers/usb/host/
H A Dehci-tilegx.c105 pte_t pte = { 0 }; local
163 pte = pte_set_home(pte, PAGE_HOME_HASH);
164 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0);
H A Dohci-tilegx.c99 pte_t pte = { 0 }; local
150 pte = pte_set_home(pte, PAGE_HOME_HASH);
151 ret = gxio_usb_host_register_client_memory(&pdata->usb_ctx, pte, 0);
/drivers/gpu/drm/nouveau/core/include/subdev/
H A Dvm.h83 struct nouveau_mem *, u32 pte, u32 cnt,
86 struct nouveau_mem *, u32 pte, u32 cnt, dma_addr_t *);
87 void (*unmap)(struct nouveau_gpuobj *pgt, u32 pte, u32 cnt);
/drivers/md/
H A Ddm-switch.c179 region_table_slot_t pte; local
183 pte = sctx->region_table[region_index];
184 pte &= ~((((region_table_slot_t)1 << sctx->region_table_entry_bits) - 1) << bit);
185 pte |= (region_table_slot_t)value << bit;
186 sctx->region_table[region_index] = pte;
/drivers/net/ethernet/tile/
H A Dtilepro.c464 HV_PTE pte = *virt_to_pte(current->mm, (unsigned long)va); local
465 if (hv_pte_get_mode(pte) != HV_PTE_MODE_CACHE_HASH_L3)
467 va, hv_pte_get_mode(pte), hv_pte_val(pte));
941 .pte = hv_pte(0),
944 ea.pte = hv_pte_set_lotar(ea.pte, epp_lotar);
945 ea.pte = hv_pte_set_mode(ea.pte, HV_PTE_MODE_CACHE_TILE_L3);
1896 HV_PTE pte local
[all...]
/drivers/char/agp/
H A Damd64-agp.c50 u32 pte; local
86 pte = (tmp & 0x000000ff00000000ULL) >> 28;
87 pte |=(tmp & 0x00000000fffff000ULL);
88 pte |= GPTE_VALID | GPTE_COHERENT;
90 writel(pte, agp_bridge->gatt_table+j);
/drivers/misc/sgi-gru/
H A Dgrufault.c225 pte_t pte; local
240 pte = *(pte_t *) pmdp;
243 pte = *pte_offset_kernel(pmdp, vaddr);
245 if (unlikely(!pte_present(pte) ||
246 (write && (!pte_write(pte) || !pte_dirty(pte)))))
249 *paddr = pte_pfn(pte) << PAGE_SHIFT;

Completed in 536 milliseconds

12