Searched refs:ib (Results 1 - 25 of 57) sorted by relevance

123

/drivers/gpu/drm/radeon/
H A Dsi_dma.c62 * @ib: indirect buffer to fill with commands
70 struct radeon_ib *ib,
79 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY,
81 ib->ptr[ib->length_dw++] = lower_32_bits(pe);
82 ib->ptr[ib->length_dw++] = lower_32_bits(src);
83 ib->ptr[ib
69 si_dma_vm_copy_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t src, unsigned count) argument
105 si_dma_vm_write_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t addr, unsigned count, uint32_t incr, uint32_t flags) argument
153 si_dma_vm_set_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t addr, unsigned count, uint32_t incr, uint32_t flags) argument
[all...]
H A Dni_dma.c118 * @ib: IB object to schedule
123 struct radeon_ib *ib)
125 struct radeon_ring *ring = &rdev->ring[ib->ring];
143 radeon_ring_write(ring, DMA_IB_PACKET(DMA_PACKET_INDIRECT_BUFFER, ib->vm ? ib->vm->id : 0, 0));
144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
307 * @ib: indirect buffer to fill with commands
315 struct radeon_ib *ib,
122 cayman_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) argument
314 cayman_dma_vm_copy_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t src, unsigned count) argument
352 cayman_dma_vm_write_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t addr, unsigned count, uint32_t incr, uint32_t flags) argument
401 cayman_dma_vm_set_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t addr, unsigned count, uint32_t incr, uint32_t flags) argument
443 cayman_dma_vm_pad_ib(struct radeon_ib *ib) argument
[all...]
H A Dradeon_ib.c48 * @ib: IB object returned
56 struct radeon_ib *ib, struct radeon_vm *vm,
61 r = radeon_sa_bo_new(rdev, &rdev->ring_tmp_bo, &ib->sa_bo, size, 256);
67 r = radeon_semaphore_create(rdev, &ib->semaphore);
72 ib->ring = ring;
73 ib->fence = NULL;
74 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo);
75 ib->vm = vm;
77 /* ib poo
55 radeon_ib_get(struct radeon_device *rdev, int ring, struct radeon_ib *ib, struct radeon_vm *vm, unsigned size) argument
97 radeon_ib_free(struct radeon_device *rdev, struct radeon_ib *ib) argument
125 radeon_ib_schedule(struct radeon_device *rdev, struct radeon_ib *ib, struct radeon_ib *const_ib, bool hdp_flush) argument
[all...]
H A Dradeon_vce.c332 struct radeon_ib ib; local
336 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4);
338 DRM_ERROR("radeon: failed to get ib (%d).\n", r);
342 dummy = ib.gpu_addr + 1024;
345 ib.length_dw = 0;
346 ib.ptr[ib.length_dw++] = 0x0000000c; /* len */
347 ib.ptr[ib.length_dw++] = 0x00000001; /* session cmd */
348 ib
399 struct radeon_ib ib; local
669 radeon_vce_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) argument
[all...]
H A Dcik_sdma.c129 * @ib: IB object to schedule
134 struct radeon_ib *ib)
136 struct radeon_ring *ring = &rdev->ring[ib->ring];
137 u32 extra_bits = (ib->vm ? ib->vm->id : 0) & 0xf;
155 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */
156 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr));
157 radeon_ring_write(ring, ib->length_dw);
668 struct radeon_ib ib; local
685 r = radeon_ib_get(rdev, ring->idx, &ib, NUL
133 cik_sdma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) argument
762 cik_sdma_vm_copy_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t src, unsigned count) argument
800 cik_sdma_vm_write_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t addr, unsigned count, uint32_t incr, uint32_t flags) argument
850 cik_sdma_vm_set_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t addr, unsigned count, uint32_t incr, uint32_t flags) argument
893 cik_sdma_vm_pad_ib(struct radeon_ib *ib) argument
[all...]
H A Dradeon_vm.c349 * @ib: indirect buffer to fill with commands
360 struct radeon_ib *ib,
369 radeon_asic_vm_copy_pages(rdev, ib, pe, src, count);
372 radeon_asic_vm_write_pages(rdev, ib, pe, addr,
376 radeon_asic_vm_set_pages(rdev, ib, pe, addr,
393 struct radeon_ib ib; local
416 r = radeon_ib_get(rdev, R600_RING_TYPE_DMA_INDEX, &ib, NULL, 256);
420 ib.length_dw = 0;
422 radeon_vm_set_pages(rdev, &ib, addr, 0, entries, 0, 0);
423 radeon_asic_vm_pad_ib(rdev, &ib);
359 radeon_vm_set_pages(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe, uint64_t addr, unsigned count, uint32_t incr, uint32_t flags) argument
646 struct radeon_ib ib; local
731 radeon_vm_frag_ptes(struct radeon_device *rdev, struct radeon_ib *ib, uint64_t pe_start, uint64_t pe_end, uint64_t addr, uint32_t flags) argument
811 radeon_vm_update_ptes(struct radeon_device *rdev, struct radeon_vm *vm, struct radeon_ib *ib, uint64_t start, uint64_t end, uint64_t dst, uint32_t flags) argument
883 struct radeon_ib ib; local
[all...]
H A Dr600_cs.c356 volatile u32 *ib = p->ib.ptr; local
467 ib[track->cb_color_size_idx[i]] = tmp;
526 volatile u32 *ib = p->ib.ptr; local
564 ib[track->db_depth_size_idx] = S_028000_SLICE_TILE_MAX(tmp - 1) | (track->db_depth_size & 0x3FF);
834 volatile uint32_t *ib; local
836 ib = p->ib.ptr;
899 ib[h_id
973 u32 m, i, tmp, *ib; local
1631 volatile u32 *ib; local
2364 r600_cs_legacy(struct drm_device *dev, void *data, struct drm_file *filp, unsigned family, u32 *ib, int *l) argument
2478 volatile u32 *ib = p->ib.ptr; local
[all...]
H A Devergreen_cs.c446 volatile u32 *ib = p->ib.ptr; local
468 ib[track->cb_color_slice_idx[id]] = slice;
1053 DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
1099 u32 m, i, tmp, *ib; local
1120 ib = p->ib.ptr;
1152 ib[idx] = 0;*/
1168 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff);
1197 ib[id
1797 volatile u32 *ib; local
2690 volatile u32 *ib = p->ib.ptr; local
3236 evergreen_vm_packet3_check(struct radeon_device *rdev, u32 *ib, struct radeon_cs_packet *pkt) argument
3381 evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib) argument
3426 evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib) argument
[all...]
H A Dr600_dma.c339 struct radeon_ib ib; local
353 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256);
355 DRM_ERROR("radeon: failed to get ib (%d).\n", r);
359 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 1);
360 ib.ptr[1] = lower_32_bits(gpu_addr);
361 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff;
362 ib.ptr[3] = 0xDEADBEEF;
363 ib.length_dw = 4;
365 r = radeon_ib_schedule(rdev, &ib, NULL, false);
367 radeon_ib_free(rdev, &ib);
400 r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) argument
[all...]
H A Devergreen_dma.c63 * @ib: IB object to schedule
68 struct radeon_ib *ib)
70 struct radeon_ring *ring = &rdev->ring[ib->ring];
89 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
90 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
67 evergreen_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib) argument
H A Dr200.c151 volatile uint32_t *ib; local
159 ib = p->ib.ptr;
166 DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
183 DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
191 ib[idx] = idx_value + ((u32)reloc->gpu_offset);
196 DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
204 ib[idx] = idx_value + ((u32)reloc->gpu_offset);
215 DRM_ERROR("No reloc for ib[%d]=0x%04X\n",
228 ib[id
[all...]
H A Dradeon_asic.h94 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
330 void r600_dma_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
340 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
511 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
538 struct radeon_ib *ib);
599 void cayman_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
604 int evergreen_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
605 int evergreen_dma_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib);
607 struct radeon_ib *ib);
612 struct radeon_ib *ib,
[all...]
H A Dradeon_cs.c196 p->vm_bos = radeon_vm_get_bos(p->rdev, p->ib.vm,
261 r = radeon_semaphore_sync_resv(p->rdev, p->ib.semaphore, resv,
284 p->ib.sa_bo = NULL;
285 p->ib.semaphore = NULL;
435 &parser->ib.fence->base);
455 radeon_ib_free(parser->rdev, &parser->ib);
489 r = radeon_ib_schedule(rdev, &parser->ib, NULL, true);
562 r = radeon_ring_ib_parse(rdev, parser->ring, &parser->ib);
582 radeon_semaphore_sync_fence(parser->ib.semaphore, vm->fence);
586 r = radeon_ib_schedule(rdev, &parser->ib,
810 volatile uint32_t *ib; local
[all...]
/drivers/infiniband/hw/mlx4/
H A Dah.c47 ah->av.ib.port_pd = cpu_to_be32(to_mpd(pd)->pdn | (ah_attr->port_num << 24));
48 ah->av.ib.g_slid = ah_attr->src_path_bits;
50 ah->av.ib.g_slid |= 0x80;
51 ah->av.ib.gid_index = ah_attr->grh.sgid_index;
52 ah->av.ib.hop_limit = ah_attr->grh.hop_limit;
53 ah->av.ib.sl_tclass_flowlabel |=
56 memcpy(ah->av.ib.dgid, ah_attr->grh.dgid.raw, 16);
59 ah->av.ib.dlid = cpu_to_be16(ah_attr->dlid);
61 ah->av.ib.stat_rate = ah_attr->static_rate + MLX4_STAT_RATE_OFFSET;
62 while (ah->av.ib
[all...]
/drivers/net/ethernet/amd/
H A D7990.c99 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \
100 ib->brx_ring[t].length, \
101 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \
105 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \
106 ib->btx_ring[t].length, \
107 ib->btx_ring[t].misc, ib
139 volatile struct lance_init_block *ib = lp->init_block; local
274 volatile struct lance_init_block *ib = lp->init_block; local
354 volatile struct lance_init_block *ib = lp->init_block; local
541 volatile struct lance_init_block *ib = lp->init_block; local
598 volatile struct lance_init_block *ib = lp->init_block; local
625 volatile struct lance_init_block *ib = lp->init_block; local
[all...]
H A Dsunlance.c323 struct lance_init_block *ib = lp->init_block_mem; local
336 ib->phys_addr [0] = dev->dev_addr [1];
337 ib->phys_addr [1] = dev->dev_addr [0];
338 ib->phys_addr [2] = dev->dev_addr [3];
339 ib->phys_addr [3] = dev->dev_addr [2];
340 ib->phys_addr [4] = dev->dev_addr [5];
341 ib->phys_addr [5] = dev->dev_addr [4];
346 ib->btx_ring [i].tmd0 = leptr;
347 ib->btx_ring [i].tmd1_hadr = leptr >> 16;
348 ib
380 struct lance_init_block __iomem *ib = lp->init_block_iomem; local
510 struct lance_init_block *ib = lp->init_block_mem; local
569 struct lance_init_block *ib = lp->init_block_mem; local
679 struct lance_init_block __iomem *ib = lp->init_block_iomem; local
737 struct lance_init_block __iomem *ib = lp->init_block_iomem; local
885 struct lance_init_block __iomem *ib = lp->init_block_iomem; local
898 struct lance_init_block *ib = lp->init_block_mem; local
940 struct lance_init_block __iomem *ib = lp->init_block_iomem; local
945 struct lance_init_block *ib = lp->init_block_mem; local
1124 struct lance_init_block __iomem *ib = lp->init_block_iomem; local
1132 struct lance_init_block *ib = lp->init_block_mem; local
1177 struct lance_init_block __iomem *ib = lp->init_block_iomem; local
1181 struct lance_init_block *ib = lp->init_block_mem; local
1194 struct lance_init_block __iomem *ib = lp->init_block_iomem; local
1200 struct lance_init_block *ib = lp->init_block_mem; local
[all...]
H A Da2065.c152 volatile struct lance_init_block *ib = lp->init_block; local
163 ib->mode = 0;
168 ib->phys_addr[0] = dev->dev_addr[1];
169 ib->phys_addr[1] = dev->dev_addr[0];
170 ib->phys_addr[2] = dev->dev_addr[3];
171 ib->phys_addr[3] = dev->dev_addr[2];
172 ib->phys_addr[4] = dev->dev_addr[5];
173 ib->phys_addr[5] = dev->dev_addr[4];
179 ib->btx_ring[i].tmd0 = leptr;
180 ib
247 volatile struct lance_init_block *ib = lp->init_block; local
326 volatile struct lance_init_block *ib = lp->init_block; local
539 volatile struct lance_init_block *ib = lp->init_block; local
587 volatile struct lance_init_block *ib = lp->init_block; local
613 volatile struct lance_init_block *ib = lp->init_block; local
[all...]
H A Ddeclance.c234 #define lib_ptr(ib, rt, type) \
235 ((volatile u16 *)((u8 *)(ib) + lib_off(rt, type)))
451 volatile u16 *ib = (volatile u16 *)dev->mem_start; local
463 *lib_ptr(ib, phys_addr[0], lp->type) = (dev->dev_addr[1] << 8) |
465 *lib_ptr(ib, phys_addr[1], lp->type) = (dev->dev_addr[3] << 8) |
467 *lib_ptr(ib, phys_addr[2], lp->type) = (dev->dev_addr[5] << 8) |
473 *lib_ptr(ib, rx_len, lp->type) = (LANCE_LOG_RX_BUFFERS << 13) |
475 *lib_ptr(ib, rx_ptr, lp->type) = leptr;
482 *lib_ptr(ib, tx_len, lp->type) = (LANCE_LOG_TX_BUFFERS << 13) |
484 *lib_ptr(ib, tx_pt
557 volatile u16 *ib = (volatile u16 *)dev->mem_start; local
644 volatile u16 *ib = (volatile u16 *)dev->mem_start; local
780 volatile u16 *ib = (volatile u16 *)dev->mem_start; local
900 volatile u16 *ib = (volatile u16 *)dev->mem_start; local
944 volatile u16 *ib = (volatile u16 *)dev->mem_start; local
973 volatile u16 *ib = (volatile u16 *)dev->mem_start; local
[all...]
/drivers/isdn/hysdn/
H A Dhysdn_proclog.c103 struct log_data *ib; local
117 if (!(ib = kmalloc(sizeof(struct log_data) + strlen(cp), GFP_ATOMIC)))
119 strcpy(ib->log_start, cp); /* set output string */
120 ib->next = NULL;
121 ib->proc_ctrl = pd; /* point to own control structure */
123 ib->usage_cnt = pd->if_used;
125 pd->log_head = ib; /* new head */
127 pd->log_tail->next = ib; /* follows existing messages */
128 pd->log_tail = ib; /* new tail */
137 ib
[all...]
/drivers/isdn/divert/
H A Ddivert_procfs.c43 struct divert_info *ib; local
52 if (!(ib = kmalloc(sizeof(struct divert_info) + strlen(cp), GFP_ATOMIC)))
54 strcpy(ib->info_start, cp); /* set output string */
55 ib->next = NULL;
57 ib->usage_cnt = if_used;
59 divert_info_head = ib; /* new head */
61 divert_info_tail->next = ib; /* follows existing messages */
62 divert_info_tail = ib; /* new tail */
68 ib = divert_info_head;
70 kfree(ib);
[all...]
/drivers/s390/char/
H A Dfs3270.c247 struct idal_buffer *ib; local
255 ib = idal_buffer_alloc(count, 0);
256 if (IS_ERR(ib))
263 raw3270_request_set_idal(rq, ib);
270 if (idal_buffer_to_user(ib, data, count) != 0)
280 idal_buffer_free(ib);
292 struct idal_buffer *ib; local
299 ib = idal_buffer_alloc(count, 0);
300 if (IS_ERR(ib))
304 if (idal_buffer_from_user(ib, dat
433 struct idal_buffer *ib; local
[all...]
/drivers/net/ethernet/brocade/bna/
H A Dbna_hw_defs.h226 /* Set the coalescing timer for the given ib */
230 /* Acks 'events' # of events for a given ib while disabling interrupts */
235 /* Acks 'events' # of events for a given ib */
243 struct bna_ib *ib = _ib; \
244 if ((ib->intr_type == BNA_INTR_T_INTX)) { \
246 intx_mask &= ~(ib->intr_vector); \
249 bna_ib_coalescing_timer_set(&ib->door_bell, \
250 ib->coalescing_timeo); \
252 bna_ib_ack(&ib->door_bell, 0); \
258 struct bna_ib *ib
[all...]
/drivers/media/platform/vivid/
H A Dvivid-tpg-colors.c167 double ir, ig, ib; local
171 ib = m[2][0] * (*r) + m[2][1] * (*g) + m[2][2] * (*b);
174 *b = ib;
/drivers/infiniband/core/
H A Dcma.c53 #include <rdma/ib.h>
136 struct ib_cm_id *ib; member in union:rdma_id_private::__anon1415
154 struct ib_sa_multicast *ib; member in union:cma_multicast::__anon1416
268 kfree(mc->multicast.ib);
740 if (!id_priv->cm_id.ib || (id_priv->id.qp_type == IB_QPT_UD))
743 ret = ib_cm_init_qp_attr(id_priv->cm_id.ib, qp_attr,
843 struct sockaddr_ib *listen_ib, *ib; local
846 ib = (struct sockaddr_ib *) &id->route.addr.src_addr;
847 ib->sib_family = listen_ib->sib_family;
848 ib
[all...]
/drivers/infiniband/hw/ehca/
H A Dehca_mrmw.c176 &e_maxmr->ib.ib_mr.lkey,
177 &e_maxmr->ib.ib_mr.rkey);
183 ib_mr = &e_maxmr->ib.ib_mr;
260 e_pd, &e_mr->ib.ib_mr.lkey,
261 &e_mr->ib.ib_mr.rkey);
289 e_pd, &pginfo, &e_mr->ib.ib_mr.lkey,
290 &e_mr->ib.ib_mr.rkey, EHCA_REG_MR);
298 return &e_mr->ib.ib_mr;
405 e_pd, &pginfo, &e_mr->ib.ib_mr.lkey,
406 &e_mr->ib
[all...]

Completed in 1630 milliseconds

123