Searched defs:paddr (Results 1 - 25 of 116) sorted by relevance

12345

/arch/frv/mm/
H A Dhighmem.c42 unsigned long paddr; local
47 paddr = page_to_phys(page);
53 case 0: return __kmap_atomic_primary(0, paddr, 6);
54 case 1: return __kmap_atomic_primary(0, paddr, 7);
55 case 2: return __kmap_atomic_primary(0, paddr, 8);
56 case 3: return __kmap_atomic_primary(0, paddr, 9);
57 case 4: return __kmap_atomic_primary(0, paddr, 10);
60 return __kmap_atomic_secondary(type - 5, paddr);
/arch/arc/mm/
H A Ddma.c32 void *paddr; local
35 paddr = alloc_pages_exact(size, gfp);
36 if (!paddr)
40 *dma_handle = plat_kernel_addr_to_dma(dev, paddr);
42 return paddr;
57 void *paddr, *kvaddr; local
60 paddr = alloc_pages_exact(size, gfp);
61 if (!paddr)
65 kvaddr = ioremap_nocache((unsigned long)paddr, size);
70 *dma_handle = plat_kernel_addr_to_dma(dev, paddr);
89 __arc_dma_cache_sync(unsigned long paddr, size_t size, enum dma_data_direction dir) argument
[all...]
H A Dioremap.c17 void __iomem *ioremap(unsigned long paddr, unsigned long size) argument
22 end = paddr + size - 1;
23 if (!size || (end < paddr))
27 if (paddr >= ARC_UNCACHED_ADDR_SPACE)
28 return (void __iomem *)paddr;
30 return ioremap_prot(paddr, size, PAGE_KERNEL_NO_CACHE);
41 void __iomem *ioremap_prot(phys_addr_t paddr, unsigned long size, argument
50 end = paddr + size - 1;
51 if ((!size) || (end < paddr))
62 off = paddr
[all...]
/arch/arm/plat-samsung/
H A Dpm-debug.c57 unsigned long paddr; local
60 debug_ll_addr(&paddr, &vaddr);
/arch/m68k/mm/
H A Dcache.c17 unsigned long paddr; local
35 : "=a" (paddr)
37 return paddr;
H A Dmemory.c130 static inline void clear040(unsigned long paddr) argument
137 : : "a" (paddr));
141 static inline void cleari040(unsigned long paddr) argument
148 : : "a" (paddr));
153 static inline void push040(unsigned long paddr) argument
160 : : "a" (paddr));
165 static inline void pushcl040(unsigned long paddr) argument
170 push040(paddr);
172 clear040(paddr);
177 * 040: Hit every page containing an address in the range paddr
203 cache_clear(unsigned long paddr, int len) argument
252 cache_push(unsigned long paddr, int len) argument
[all...]
/arch/frv/include/asm/
H A Dhighmem.h79 #define __kmap_atomic_primary(cached, paddr, ampr) \
83 dampr = paddr | xAMPRx_L | xAMPRx_M | xAMPRx_S | xAMPRx_SS_16Kb | xAMPRx_V; \
101 #define __kmap_atomic_secondary(slot, paddr) \
104 unsigned long dampr = paddr | xAMPRx_L | xAMPRx_M | xAMPRx_S | xAMPRx_SS_16Kb | xAMPRx_V; \
118 unsigned long paddr; local
121 paddr = page_to_phys(page);
123 return __kmap_atomic_primary(1, paddr, 2);
/arch/ia64/kvm/
H A Dmisc.h35 u64 paddr, u64 mem_flags)
40 pte = PAGE_ALIGN(paddr) | mem_flags;
34 kvm_set_pmt_entry(struct kvm *kvm, gfn_t gfn, u64 paddr, u64 mem_flags) argument
/arch/ia64/mm/
H A Dnuma.c41 paddr_to_nid(unsigned long paddr) argument
46 if (paddr >= node_memblk[i].start_paddr &&
47 paddr < node_memblk[i].start_paddr + node_memblk[i].size)
/arch/mips/include/asm/mach-generic/
H A Ddma-coherence.h56 static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) argument
58 return paddr;
/arch/sparc/kernel/
H A Diommu_common.h44 unsigned long paddr = SG_ENT_PHYS_ADDRESS(outs); local
45 int nr = iommu_num_pages(paddr, outs->dma_length + sg->length,
/arch/c6x/kernel/
H A Ddma.c20 unsigned long paddr = handle; local
26 L2_cache_block_invalidate(paddr, paddr + size);
29 L2_cache_block_writeback(paddr, paddr + size);
32 L2_cache_block_writeback_invalidate(paddr, paddr + size);
/arch/c6x/mm/
H A Ddma-coherent.c79 u32 paddr; local
87 paddr = __alloc_dma_pages(order);
90 *handle = paddr;
92 if (!paddr)
95 return phys_to_virt(paddr);
/arch/m68k/include/asm/
H A Dcacheflush_no.h86 static inline void cache_push(unsigned long paddr, int len) argument
95 static inline void cache_clear(unsigned long paddr, int len) argument
/arch/mips/loongson/common/
H A Ddma-swiotlb.c106 dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) argument
108 return paddr;
/arch/powerpc/kernel/
H A Dcrash_dump.c102 phys_addr_t paddr; local
108 paddr = pfn << PAGE_SHIFT;
110 if (memblock_is_region_memory(paddr, csize)) {
111 vaddr = __va(paddr);
114 vaddr = __ioremap(paddr, PAGE_SIZE, 0);
/arch/powerpc/platforms/embedded6xx/
H A Dc2k.c45 phys_addr_t paddr; local
54 paddr = of_translate_address(np, reg);
56 mv64x60_mpp_reg_base = ioremap(paddr, reg[1]);
60 paddr = of_translate_address(np, reg);
62 mv64x60_gpp_reg_base = ioremap(paddr, reg[1]);
/arch/sh/mm/
H A Dtlb-sh5.c115 * @paddr: Physical address.
117 * Load up a virtual<->physical translation for @eaddr<->@paddr in the
121 unsigned long asid, unsigned long paddr)
128 ptel = neff_sign_extend(paddr);
153 unsigned long paddr, flags; local
162 paddr = pte_val(pte) & _PAGE_FLAGS_HARDWARE_MASK;
163 paddr &= ~PAGE_MASK;
165 sh64_setup_tlb_slot(entry, addr, get_asid(), paddr);
120 sh64_setup_tlb_slot(unsigned long long config_addr, unsigned long eaddr, unsigned long asid, unsigned long paddr) argument
/arch/arc/include/asm/
H A Ddma-mapping.h58 * CPU accesses page via normal paddr, thus needs to explicitly made
62 static inline void __inline_dma_cache_sync(unsigned long paddr, size_t size, argument
67 dma_cache_inv(paddr, size);
70 dma_cache_wback(paddr, size);
73 dma_cache_wback_inv(paddr, size);
76 pr_err("Invalid DMA dir [%d] for OP @ %lx\n", dir, paddr);
80 void __arc_dma_cache_sync(unsigned long paddr, size_t size,
111 unsigned long paddr = page_to_phys(page) + offset; local
112 return dma_map_single(dev, (void *)paddr, size, dir);
/arch/arm/include/asm/
H A Ddma-mapping.h131 static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) argument
133 unsigned int offset = paddr & ~PAGE_MASK;
134 return pfn_to_dma(dev, __phys_to_pfn(paddr)) + offset;
/arch/arm/include/asm/xen/
H A Dpage.h27 phys_addr_t paddr; member in struct:xpaddr
31 #define XPADDR(x) ((xpaddr_t) { .paddr = (x) })
60 unsigned offset = phys.paddr & ~PAGE_MASK;
61 return XMADDR(PFN_PHYS(pfn_to_mfn(PFN_DOWN(phys.paddr))) | offset);
/arch/arm/mach-omap2/
H A Domap4-common.c62 static phys_addr_t paddr; variable
100 paddr = arm_memblock_steal(size, SZ_1M);
110 dram_io_desc[0].pfn = __phys_to_pfn(paddr);
117 (long long) paddr, dram_io_desc[0].virtual);
/arch/arm64/include/asm/
H A Ddma-mapping.h64 static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) argument
66 return (dma_addr_t)paddr;
/arch/arm64/kernel/
H A Defi.c158 u64 paddr, npages, size; local
164 paddr = md->phys_addr;
171 paddr, paddr + (npages << EFI_PAGE_SHIFT) - 1,
175 memrange_efi_to_native(&paddr, &npages);
179 early_init_dt_add_memory_arch(paddr, size);
182 memblock_reserve(paddr, size);
325 u64 paddr, npages, size; local
335 paddr = md->phys_addr;
337 memrange_efi_to_native(&paddr,
[all...]
/arch/ia64/include/asm/
H A Ddma-mapping.h88 static inline dma_addr_t phys_to_dma(struct device *dev, phys_addr_t paddr) argument
90 return paddr;

Completed in 2977 milliseconds

12345