Searched refs:pte (Results 1 - 19 of 19) sorted by relevance

/external/kernel-headers/original/asm-mips/
H A Dpgtable.h91 #define pte_none(pte) (!(((pte).pte_low | (pte).pte_high) & ~_PAGE_GLOBAL))
92 #define pte_present(pte) ((pte).pte_low & _PAGE_PRESENT)
94 static inline void set_pte(pte_t *ptep, pte_t pte) argument
96 ptep->pte_high = pte.pte_high;
98 ptep->pte_low = pte.pte_low;
101 if (pte.pte_low & _PAGE_GLOBAL) {
127 #define pte_none(pte) (!(pte_va
193 pte_write(pte_t pte) argument
194 pte_dirty(pte_t pte) argument
195 pte_young(pte_t pte) argument
196 pte_file(pte_t pte) argument
198 pte_wrprotect(pte_t pte) argument
205 pte_mkclean(pte_t pte) argument
212 pte_mkold(pte_t pte) argument
219 pte_mkwrite(pte_t pte) argument
229 pte_mkdirty(pte_t pte) argument
239 pte_mkyoung(pte_t pte) argument
249 pte_write(pte_t pte) argument
250 pte_dirty(pte_t pte) argument
251 pte_young(pte_t pte) argument
252 pte_file(pte_t pte) argument
254 pte_wrprotect(pte_t pte) argument
260 pte_mkclean(pte_t pte) argument
266 pte_mkold(pte_t pte) argument
272 pte_mkwrite(pte_t pte) argument
280 pte_mkdirty(pte_t pte) argument
288 pte_mkyoung(pte_t pte) argument
296 pte_special(pte_t pte) argument
297 pte_mkspecial(pte_t pte) argument
323 pte_modify(pte_t pte, pgprot_t newprot) argument
332 pte_modify(pte_t pte, pgprot_t newprot) argument
344 update_mmu_cache(struct vm_area_struct *vma, unsigned long address, pte_t pte) argument
[all...]
H A Dpgalloc.h17 pte_t *pte)
19 set_pmd(pmd, __pmd((unsigned long)pte));
23 pgtable_t pte)
25 set_pmd(pmd, __pmd((unsigned long)page_address(pte)));
70 pte_t *pte; local
72 pte = (pte_t *) __get_free_pages(GFP_KERNEL|__GFP_REPEAT|__GFP_ZERO, PTE_ORDER);
74 return pte;
80 struct page *pte; local
82 pte = alloc_pages(GFP_KERNEL | __GFP_REPEAT, PTE_ORDER);
83 if (pte) {
16 pmd_populate_kernel(struct mm_struct *mm, pmd_t *pmd, pte_t *pte) argument
22 pmd_populate(struct mm_struct *mm, pmd_t *pmd, pgtable_t pte) argument
90 pte_free_kernel(struct mm_struct *mm, pte_t *pte) argument
95 pte_free(struct mm_struct *mm, pgtable_t pte) argument
[all...]
H A Dpgtable-32.h62 printk("%s:%d: bad pte %016Lx.\n", __FILE__, __LINE__, pte_val(e))
65 printk("%s:%d: bad pte %08lx.\n", __FILE__, __LINE__, pte_val(e))
100 pte_t pte; local
101 pte.pte_high = (pfn << 6) | (pgprot_val(prot) & 0x3f);
102 pte.pte_low = pgprot_val(prot);
103 return pte;
111 #define pte_pfn(x) ((unsigned long)((x).pte >> (PAGE_SHIFT + 2)))
114 #define pte_pfn(x) ((unsigned long)((x).pte >> _PFN_SHIFT))
141 #define pte_unmap(pte) ((void)(pte))
[all...]
/external/kernel-headers/original/asm-x86/
H A Dpgalloc_32.h18 #define pmd_populate_kernel(mm, pmd, pte) \
20 paravirt_alloc_pt(mm, __pa(pte) >> PAGE_SHIFT); \
21 set_pmd(pmd, __pmd(_PAGE_TABLE + __pa(pte))); \
24 #define pmd_populate(mm, pmd, pte) \
26 paravirt_alloc_pt(mm, page_to_pfn(pte)); \
28 ((unsigned long long)page_to_pfn(pte) << \
41 static inline void pte_free_kernel(pte_t *pte) argument
43 free_page((unsigned long)pte);
46 static inline void pte_free(struct page *pte) argument
48 __free_page(pte);
[all...]
H A Dpgtable_32.h218 static inline int pte_dirty(pte_t pte) { return (pte).pte_low & _PAGE_DIRTY; } argument
219 static inline int pte_young(pte_t pte) { return (pte).pte_low & _PAGE_ACCESSED; } argument
220 static inline int pte_write(pte_t pte) { return (pte).pte_low & _PAGE_RW; } argument
221 static inline int pte_huge(pte_t pte) { return (pte).pte_low & _PAGE_PSE; } argument
226 static inline int pte_file(pte_t pte) { return (pte) argument
228 pte_mkclean(pte_t pte) argument
229 pte_mkold(pte_t pte) argument
230 pte_wrprotect(pte_t pte) argument
231 pte_mkdirty(pte_t pte) argument
232 pte_mkyoung(pte_t pte) argument
233 pte_mkwrite(pte_t pte) argument
234 pte_mkhuge(pte_t pte) argument
313 pte_t pte = native_ptep_get_and_clear(ptep); local
321 pte_t pte; local
370 pte_modify(pte_t pte, pgprot_t newprot) argument
[all...]
H A Dpgtable-2level.h5 printk("%s:%d: bad pte %08lx.\n", __FILE__, __LINE__, (e).pte_low)
14 static inline void native_set_pte(pte_t *ptep , pte_t pte) argument
16 *ptep = pte;
19 pte_t *ptep , pte_t pte)
21 native_set_pte(ptep, pte);
62 static inline int pte_exec_kernel(pte_t pte) argument
73 #define pte_to_pgoff(pte) \
74 ((((pte).pte_low >> 1) & 0x1f ) + (((pte).pte_low >> 8) << 5 ))
83 #define __pte_to_swp_entry(pte) ((swp_entry_
18 native_set_pte_at(struct mm_struct *mm, unsigned long addr, pte_t *ptep , pte_t pte) argument
[all...]
H A Dpage_32.h62 static inline unsigned long long native_pte_val(pte_t pte) argument
64 return pte.pte_low | ((unsigned long long)pte.pte_high << 32);
100 static inline unsigned long native_pte_val(pte_t pte) argument
102 return pte.pte_low;
H A Dparavirt.h218 pte_t *ptep, pte_t pte);
227 pte_t (*make_pte)(unsigned long long pte);
234 pte_t (*make_pte)(unsigned long pte);
883 pte_t *ptep, pte_t pte)
886 pv_mmu_ops.set_pte_present(mm, addr, ptep, pte);
882 set_pte_present(struct mm_struct *mm, unsigned long addr, pte_t *ptep, pte_t pte) argument
/external/kernel-headers/original/asm-arm/
H A Dpgalloc.h31 #define pgd_populate(mm,pmd,pte) BUG()
58 pte_t *pte; local
60 pte = (pte_t *)__get_free_page(GFP_KERNEL|__GFP_REPEAT|__GFP_ZERO);
61 if (pte) {
62 clean_dcache_area(pte, sizeof(pte_t) * PTRS_PER_PTE);
63 pte += PTRS_PER_PTE;
66 return pte;
72 struct page *pte; local
74 pte = alloc_pages(GFP_KERNEL|__GFP_REPEAT|__GFP_ZERO, 0);
75 if (pte) {
86 pte_free_kernel(pte_t *pte) argument
94 pte_free(struct page *pte) argument
[all...]
H A Dpgtable.h62 * pgd pte
118 #define pte_ERROR(pte) __pte_error(__FILE__, __LINE__, pte_val(pte))
221 #define pte_pfn(pte) (pte_val(pte) >> PAGE_SHIFT)
224 #define pte_none(pte) (!pte_val(pte))
226 #define pte_page(pte) (pfn_to_page(pte_pfn(pte)))
230 #define pte_unmap(pte) d
342 pte_modify(pte_t pte, pgprot_t newprot) argument
[all...]
H A Dpage.h143 typedef struct { unsigned long pte; } pte_t; member in struct:__anon19092
148 #define pte_val(x) ((x).pte)
H A Dtlbflush.h423 extern void update_mmu_cache(struct vm_area_struct *vma, unsigned long addr, pte_t pte);
/external/oprofile/module/
H A Dop_util.c27 pte_t * ptep, pte; local
33 pte = *ptep;
34 if (pte_present(pte)) {
35 ret = (unsigned long) pte_page_address(pte);
/external/oprofile/module/x86/
H A Dop_fixmap.c46 pte_t * pte; local
50 pte = pte_offset(pmd, vaddr);
56 set_pte(pte, mk_pte_phys(phys, prot));
72 pte_t * pte; local
86 pte = pte_offset(pmd, vaddr);
87 if (!pte)
91 pte_clear(pte);
/external/kernel-headers/original/asm-generic/
H A Dpgtable.h11 * We hold the mm semaphore for reading, and the pte lock.
13 * Note: the old pte is known to not be writable, so we don't need to
145 #define pte_maybe_dirty(pte) pte_dirty(pte)
147 #define pte_maybe_dirty(pte) (1)
159 #define lazy_mmu_prot_update(pte) do { } while (0)
163 #define move_pte(pte, prot, old_addr, new_addr) (pte)
H A Dpgtable-nopmd.h35 #define pud_populate(mm, pmd, pte) do { } while (0)
/external/llvm/runtime/libprofile/
H A DPathProfiling.c70 PathProfileTableEntry pte; local
71 pte.pathNumber = arrayIterator;
72 pte.pathCounter = pc;
84 if (write(outFile, &pte, sizeof(PathProfileTableEntry)) < 0) {
135 PathProfileTableEntry pte; local
136 pte.pathNumber = hashEntry->pathNumber;
137 pte.pathCounter = hashEntry->pathCount;
139 if (write(outFile, &pte, sizeof(PathProfileTableEntry)) < 0) {
/external/qemu/target-i386/
H A Dhelper.c984 uint64_t ptep, pte; local
999 pte = addr;
1105 pte = pde & ((PHYS_ADDR_MASK & ~(page_size - 1)) | 0xfff);
1115 pte = ldq_phys(pte_addr);
1116 if (!(pte & PG_PRESENT_MASK)) {
1120 if (!(env->efer & MSR_EFER_NXE) && (pte & PG_NX_MASK)) {
1124 /* combine pde and pte nx, user and rw protections */
1125 ptep &= pte ^ PG_NX_MASK;
1139 is_dirty = is_write && !(pte & PG_DIRTY_MASK);
1140 if (!(pte
1274 uint64_t pte; local
[all...]
/external/kernel-headers/original/linux/
H A Dmm.h861 #define pte_unmap_unlock(pte, ptl) do { \
863 pte_unmap(pte); \
1025 #define FOLL_WRITE 0x01 /* check pte is writable */

Completed in 312 milliseconds