Searched refs:page_count (Results 1 - 25 of 70) sorted by relevance

123

/drivers/media/video/ivtv/
H A Divtv-udma.c33 dma_page->page_count = dma_page->last - dma_page->first + 1;
34 if (dma_page->page_count == 1) dma_page->tail -= dma_page->offset;
48 for (i = 0; i < dma_page->page_count; i++) {
49 unsigned int len = (i == dma_page->page_count - 1) ?
112 if (dma->SG_length || dma->page_count) {
113 IVTV_DEBUG_WARN("ivtv_udma_setup: SG_length %d page_count %d still full?\n",
114 dma->SG_length, dma->page_count);
120 if (user_dma.page_count <= 0) {
121 IVTV_DEBUG_WARN("ivtv_udma_setup: Error %d page_count from %d bytes %d offset\n",
122 user_dma.page_count, size_in_byte
[all...]
H A Divtv-yuv.c67 if (dma->SG_length || dma->page_count) {
69 ("prep_user_dma: SG_length %d page_count %d still full?\n",
70 dma->SG_length, dma->page_count);
79 y_pages = get_user_pages(current, current->mm, y_dma.uaddr, y_dma.page_count, 0, 1, &dma->map[0], NULL);
81 if (y_pages == y_dma.page_count) {
83 uv_dma.uaddr, uv_dma.page_count, 0, 1,
88 if (y_pages != y_dma.page_count || uv_pages != uv_dma.page_count) {
91 if (y_pages == y_dma.page_count) {
94 "expecting %d\n", uv_pages, uv_dma.page_count);
[all...]
/drivers/gpu/drm/udl/
H A Dudl_gem.c125 int page_count, i; local
133 page_count = obj->base.size / PAGE_SIZE;
135 obj->pages = drm_malloc_ab(page_count, sizeof(struct page *));
143 for (i = 0; i < page_count; i++) {
161 int page_count = obj->base.size / PAGE_SIZE; local
164 for (i = 0; i < page_count; i++)
173 int page_count = obj->base.size / PAGE_SIZE; local
180 obj->vmapping = vmap(obj->pages, page_count, 0, PAGE_KERNEL);
/drivers/char/agp/
H A Dgeneric.c198 if (curr->page_count != 0) {
203 for (i = 0; i < curr->page_count; i++) {
208 for (i = 0; i < curr->page_count; i++) {
226 * @page_count: size_t argument of the number of pages
235 size_t page_count, u32 type)
246 if ((cur_memory + page_count > bridge->max_memory_agp) ||
247 (cur_memory + page_count < page_count))
251 new = agp_generic_alloc_user(page_count, type);
258 new = bridge->driver->alloc_by_type(page_count, typ
234 agp_allocate_memory(struct agp_bridge_data *bridge, size_t page_count, u32 type) argument
1167 agp_generic_alloc_by_type(size_t page_count, int type) argument
1181 agp_generic_alloc_user(size_t page_count, int type) argument
[all...]
H A Dsgi-agp.c175 if ((pg_start + mem->page_count) > num_entries)
180 while (j < (pg_start + mem->page_count)) {
191 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
219 for (i = pg_start; i < (mem->page_count + pg_start); i++) {
H A Dnvidia-agp.c209 if (mem->page_count == 0)
212 if ((pg_start + mem->page_count) >
216 for (j = pg_start; j < (pg_start + mem->page_count); j++) {
225 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
249 if (mem->page_count == 0)
252 for (i = pg_start; i < (mem->page_count + pg_start); i++)
H A Di460-agp.c311 if ((io_pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count) > num_entries) {
317 while (j < (io_pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count)) {
327 for (i = 0, j = io_pg_start; i < mem->page_count; i++) {
346 for (i = pg_start; i < (pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count); i++)
415 end = &i460.lp_desc[(pg_start + mem->page_count - 1) / I460_KPAGES_PER_IOPAGE];
417 end_offset = (pg_start + mem->page_count - 1) % I460_KPAGES_PER_IOPAGE;
473 end = &i460.lp_desc[(pg_start + mem->page_count - 1) / I460_KPAGES_PER_IOPAGE];
475 end_offset = (pg_start + mem->page_count - 1) % I460_KPAGES_PER_IOPAGE;
H A Defficeon-agp.c241 int i, count = mem->page_count, num_entries;
249 if ((pg_start + mem->page_count) > num_entries)
290 int i, count = mem->page_count, num_entries;
296 if ((pg_start + mem->page_count) > num_entries)
H A Dagp.h199 struct agp_memory *agp_generic_alloc_by_type(size_t page_count, int type);
203 struct agp_memory *memory, size_t page_count);
220 struct agp_memory *agp_generic_alloc_user(size_t page_count, int type);
H A Dati-agp.c280 if (mem->page_count == 0)
283 if ((pg_start + mem->page_count) > num_entries)
287 while (j < (pg_start + mem->page_count)) {
301 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
326 if (mem->page_count == 0)
329 for (i = pg_start; i < (mem->page_count + pg_start); i++) {
H A Duninorth-agp.c165 if (mem->page_count == 0)
171 if ((pg_start + mem->page_count) > num_entries)
175 for (i = 0; i < mem->page_count; ++i) {
184 for (i = 0; i < mem->page_count; i++) {
214 if (mem->page_count == 0)
218 for (i = 0; i < mem->page_count; ++i) {
H A Dali-agp.c128 int i, page_count; local
133 page_count = 1 << A_SIZE_32(agp_bridge->current_size)->page_order;
134 for (i = 0; i < PAGE_SIZE * page_count; i += PAGE_SIZE) {
H A Dintel-gtt.c123 DBG("try unmapping %lu pages\n", (unsigned long)mem->page_count);
215 if ((pg_start + mem->page_count)
222 for (i = pg_start; i < (pg_start + mem->page_count); i++) {
267 new->page_count = pg_count;
278 if (curr->page_count == 4)
939 if (mem->page_count == 0)
942 if (pg_start + mem->page_count > intel_private.base.gtt_total_entries)
955 ret = intel_gtt_map_memory(mem->pages, mem->page_count,
963 intel_gtt_insert_pages(pg_start, mem->page_count, mem->pages,
988 if (mem->page_count
[all...]
H A Dparisc-agp.c137 io_pg_count = info->io_pages_per_kpage * mem->page_count;
154 for (i = 0, j = io_pg_start; i < mem->page_count; i++) {
183 io_pg_count = info->io_pages_per_kpage * mem->page_count;
H A Damd-k7-agp.c297 if ((pg_start + mem->page_count) > num_entries)
301 while (j < (pg_start + mem->page_count)) {
314 for (i = 0, j = pg_start; i < mem->page_count; i++, j++) {
337 for (i = pg_start; i < (mem->page_count + pg_start); i++) {
/drivers/firewire/
H A Dcore-iso.c43 int page_count, enum dma_data_direction direction)
48 buffer->page_count = page_count;
51 buffer->pages = kmalloc(page_count * sizeof(buffer->pages[0]),
56 for (i = 0; i < buffer->page_count; i++) {
93 for (i = 0; i < buffer->page_count; i++) {
110 for (i = 0; i < buffer->page_count; i++) {
129 for (i = 0; i < buffer->page_count; i++) {
42 fw_iso_buffer_init(struct fw_iso_buffer *buffer, struct fw_card *card, int page_count, enum dma_data_direction direction) argument
/drivers/gpu/drm/ttm/
H A Dttm_agp_backend.c63 mem->page_count = 0;
70 mem->pages[mem->page_count++] = page;
/drivers/staging/android/
H A Dpersistent_ram.c327 unsigned int page_count; local
332 page_count = DIV_ROUND_UP(size + offset_in_page(start), PAGE_SIZE);
336 pages = kmalloc(sizeof(struct page *) * page_count, GFP_KERNEL);
339 page_count);
343 for (i = 0; i < page_count; i++) {
347 prz->vaddr = vmap(pages, page_count, VM_MAP, prot);
350 pr_err("%s: Failed to map %u pages\n", __func__, page_count);
/drivers/gpu/drm/i915/
H A Di915_gem_tiling.c461 int page_count = obj->base.size >> PAGE_SHIFT; local
467 for (i = 0; i < page_count; i++) {
480 int page_count = obj->base.size >> PAGE_SHIFT; local
484 obj->bit_17 = kmalloc(BITS_TO_LONGS(page_count) *
493 for (i = 0; i < page_count; i++) {
/drivers/iommu/
H A Dtegra-gart.c56 u32 page_count; /* total remappable size */ member in struct:gart_device
78 iova < gart->iovmm_base + GART_PAGE_SIZE * gart->page_count; \
145 gart_end = gart_start + gart->page_count * GART_PAGE_SIZE - 1;
381 gart->page_count = (resource_size(res_remap) >> GART_PAGE_SHIFT);
383 gart->savedata = vmalloc(sizeof(u32) * gart->page_count);
/drivers/gpu/drm/nouveau/
H A Dnouveau_bo.c493 u32 page_count = new_mem->num_pages; local
496 page_count = new_mem->num_pages;
497 while (page_count) {
498 int line_count = (page_count > 2047) ? 2047 : page_count;
517 page_count -= line_count;
630 u32 page_count = new_mem->num_pages; local
641 page_count = new_mem->num_pages;
642 while (page_count) {
643 int line_count = (page_count > 204
[all...]
/drivers/net/ethernet/brocade/bna/
H A Dbna_types.h321 u32 page_count; member in struct:bna_qpt
469 int page_count; member in struct:bna_tcb
604 int page_count; member in struct:bna_rcb
669 int page_count; member in struct:bna_ccb
H A Dbna_tx_rx.c1352 (bfi_q)->pages = htons((u16)(bna_qpt)->page_count); \
1911 u32 page_count,
1922 rxq->qpt.page_count = page_count;
1927 for (i = 0; i < rxq->qpt.page_count; i++) {
1938 u32 page_count,
1949 rxp->cq.qpt.page_count = page_count;
1954 for (i = 0; i < rxp->cq.qpt.page_count; i++) {
2328 u32 page_count; local
1909 bna_rxq_qpt_setup(struct bna_rxq *rxq, struct bna_rxp *rxp, u32 page_count, u32 page_size, struct bna_mem_descr *qpt_mem, struct bna_mem_descr *swqpt_mem, struct bna_mem_descr *page_mem) argument
1937 bna_rxp_cqpt_setup(struct bna_rxp *rxp, u32 page_count, u32 page_size, struct bna_mem_descr *qpt_mem, struct bna_mem_descr *swqpt_mem, struct bna_mem_descr *page_mem) argument
3235 bna_txq_qpt_setup(struct bna_txq *txq, int page_count, int page_size, struct bna_mem_descr *qpt_mem, struct bna_mem_descr *swqpt_mem, struct bna_mem_descr *page_mem) argument
3412 u32 page_count; local
3466 int page_count; local
[all...]
/drivers/target/
H A Dtarget_core_rd.c91 u32 i, j, page_count = 0, sg_per_table; local
109 page_count++;
118 rd_dev->rd_host->rd_host_id, rd_dev->rd_dev_id, page_count,
119 rd_dev->sg_table_count, (unsigned long)page_count * PAGE_SIZE);
/drivers/gpu/drm/
H A Ddrm_bufs.c748 dma->page_count += byte_count >> PAGE_SHIFT;
782 int page_count; local
852 temp_pagelist = kmalloc((dma->page_count + (count << page_order)) *
862 dma->pagelist, dma->page_count * sizeof(*dma->pagelist));
864 dma->page_count + (count << page_order));
869 page_count = 0;
888 dma->page_count + page_count,
890 temp_pagelist[dma->page_count + page_count
[all...]

Completed in 8495 milliseconds

123