Searched refs:ring (Results 1 - 25 of 328) sorted by relevance

1234567891011>>

/drivers/gpu/drm/msm/
H A Dmsm_ringbuffer.c23 struct msm_ringbuffer *ring; local
28 ring = kzalloc(sizeof(*ring), GFP_KERNEL);
29 if (!ring) {
34 ring->gpu = gpu;
35 ring->bo = msm_gem_new(gpu->dev, size, MSM_BO_WC);
36 if (IS_ERR(ring->bo)) {
37 ret = PTR_ERR(ring->bo);
38 ring->bo = NULL;
42 ring
56 msm_ringbuffer_destroy(struct msm_ringbuffer *ring) argument
[all...]
H A Dmsm_ringbuffer.h31 void msm_ringbuffer_destroy(struct msm_ringbuffer *ring);
36 OUT_RING(struct msm_ringbuffer *ring, uint32_t data) argument
38 if (ring->cur == ring->end)
39 ring->cur = ring->start;
40 *(ring->cur++) = data;
/drivers/gpu/drm/radeon/
H A Duvd_v3_1.c34 * @ring: radeon_ring pointer
38 * Emit a semaphore command (either wait or signal) to the UVD ring.
41 struct radeon_ring *ring,
47 radeon_ring_write(ring, PACKET0(UVD_SEMA_ADDR_LOW, 0));
48 radeon_ring_write(ring, (addr >> 3) & 0x000FFFFF);
50 radeon_ring_write(ring, PACKET0(UVD_SEMA_ADDR_HIGH, 0));
51 radeon_ring_write(ring, (addr >> 23) & 0x000FFFFF);
53 radeon_ring_write(ring, PACKET0(UVD_SEMA_CMD, 0));
54 radeon_ring_write(ring, 0x80 | (emit_wait ? 1 : 0));
40 uvd_v3_1_semaphore_emit(struct radeon_device *rdev, struct radeon_ring *ring, struct radeon_semaphore *semaphore, bool emit_wait) argument
H A Dvce_v1_0.c38 * @ring: radeon_ring pointer
43 struct radeon_ring *ring)
45 if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
55 * @ring: radeon_ring pointer
60 struct radeon_ring *ring)
62 if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
72 * @ring: radeon_ring pointer
77 struct radeon_ring *ring)
79 if (ring->idx == TN_RING_TYPE_VCE1_INDEX)
80 WREG32(VCE_RB_WPTR, ring
42 vce_v1_0_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
59 vce_v1_0_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
76 vce_v1_0_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
94 struct radeon_ring *ring; local
161 struct radeon_ring *ring; local
[all...]
H A Dradeon_ring.c34 * Most engines on the GPU are fed via ring buffers. Ring
40 * pointers are equal, the ring is idle. When the host
41 * writes commands to the ring buffer, it increments the
45 static int radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring);
48 * radeon_ring_supports_scratch_reg - check if the ring supports
52 * @ring: radeon_ring structure holding ring information
54 * Check if a specific ring supports writing to scratch registers (all asics).
55 * Returns true if the ring supports writing to scratch regs, false if not.
58 struct radeon_ring *ring)
57 radeon_ring_supports_scratch_reg(struct radeon_device *rdev, struct radeon_ring *ring) argument
78 radeon_ring_free_size(struct radeon_device *rdev, struct radeon_ring *ring) argument
104 radeon_ring_alloc(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) argument
140 radeon_ring_lock(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ndw) argument
164 radeon_ring_commit(struct radeon_device *rdev, struct radeon_ring *ring, bool hdp_flush) argument
195 radeon_ring_unlock_commit(struct radeon_device *rdev, struct radeon_ring *ring, bool hdp_flush) argument
209 radeon_ring_undo(struct radeon_ring *ring) argument
221 radeon_ring_unlock_undo(struct radeon_device *rdev, struct radeon_ring *ring) argument
234 radeon_ring_lockup_update(struct radeon_device *rdev, struct radeon_ring *ring) argument
247 radeon_ring_test_lockup(struct radeon_device *rdev, struct radeon_ring *ring) argument
277 radeon_ring_backup(struct radeon_device *rdev, struct radeon_ring *ring, uint32_t **data) argument
341 radeon_ring_restore(struct radeon_device *rdev, struct radeon_ring *ring, unsigned size, uint32_t *data) argument
375 radeon_ring_init(struct radeon_device *rdev, struct radeon_ring *ring, unsigned ring_size, unsigned rptr_offs, u32 nop) argument
432 radeon_ring_fini(struct radeon_device *rdev, struct radeon_ring *ring) argument
466 struct radeon_ring *ring = &rdev->ring[ridx]; local
539 radeon_debugfs_ring_init(struct radeon_device *rdev, struct radeon_ring *ring) argument
[all...]
H A Devergreen_dma.c32 * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
37 * Add a DMA fence packet to the ring to write
44 struct radeon_ring *ring = &rdev->ring[fence->ring]; local
45 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
47 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
48 radeon_ring_write(ring, addr & 0xfffffffc);
49 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
50 radeon_ring_write(ring, fenc
70 struct radeon_ring *ring = &rdev->ring[ib->ring]; local
116 struct radeon_ring *ring = &rdev->ring[ring_index]; local
175 evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) argument
[all...]
H A Duvd_v1_0.c35 * @ring: radeon_ring pointer
40 struct radeon_ring *ring)
49 * @ring: radeon_ring pointer
54 struct radeon_ring *ring)
63 * @ring: radeon_ring pointer
68 struct radeon_ring *ring)
70 WREG32(UVD_RBC_RB_WPTR, ring->wptr);
79 * Write a fence and a trap command to the ring.
84 struct radeon_ring *ring = &rdev->ring[fenc local
39 uvd_v1_0_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
53 uvd_v1_0_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
67 uvd_v1_0_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
158 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; local
249 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; local
264 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX]; local
420 uvd_v1_0_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) argument
464 uvd_v1_0_semaphore_emit(struct radeon_device *rdev, struct radeon_ring *ring, struct radeon_semaphore *semaphore, bool emit_wait) argument
493 struct radeon_ring *ring = &rdev->ring[ib->ring]; local
509 uvd_v1_0_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) argument
[all...]
H A Dradeon_fence.c47 * are no longer in use by the associated ring on the GPU and
58 * @ring: ring index the fence is associated with
62 static void radeon_fence_write(struct radeon_device *rdev, u32 seq, int ring) argument
64 struct radeon_fence_driver *drv = &rdev->fence_drv[ring];
78 * @ring: ring index the fence is associated with
83 static u32 radeon_fence_read(struct radeon_device *rdev, int ring) argument
85 struct radeon_fence_driver *drv = &rdev->fence_drv[ring];
104 * @ring
108 radeon_fence_schedule_check(struct radeon_device *rdev, int ring) argument
129 radeon_fence_emit(struct radeon_device *rdev, struct radeon_fence **fence, int ring) argument
196 radeon_fence_activity(struct radeon_device *rdev, int ring) argument
270 int ring; local
319 radeon_fence_process(struct radeon_device *rdev, int ring) argument
339 radeon_fence_seq_signaled(struct radeon_device *rdev, u64 seq, unsigned ring) argument
357 unsigned ring = fence->ring; local
618 radeon_fence_wait_next(struct radeon_device *rdev, int ring) argument
645 radeon_fence_wait_empty(struct radeon_device *rdev, int ring) argument
706 radeon_fence_count_emitted(struct radeon_device *rdev, int ring) argument
800 radeon_fence_driver_start_ring(struct radeon_device *rdev, int ring) argument
850 radeon_fence_driver_init_ring(struct radeon_device *rdev, int ring) argument
880 int ring; local
902 int ring, r; local
930 radeon_fence_driver_force_completion(struct radeon_device *rdev, int ring) argument
[all...]
H A Dni_dma.c36 * to the 3D engine (ring buffer, IBs, etc.), but the
49 * @ring: radeon ring pointer
54 struct radeon_ring *ring)
59 rptr = rdev->wb.wb[ring->rptr_offs/4];
61 if (ring->idx == R600_RING_TYPE_DMA_INDEX)
76 * @ring: radeon ring pointer
81 struct radeon_ring *ring)
85 if (ring
53 cayman_dma_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
80 cayman_dma_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
101 cayman_dma_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
125 struct radeon_ring *ring = &rdev->ring[ib->ring]; local
188 struct radeon_ring *ring; local
286 cayman_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) argument
451 struct radeon_ring *ring = &rdev->ring[ridx]; local
[all...]
H A Dr600_dma.c35 * to the 3D engine (ring buffer, IBs, etc.), but the
47 * @ring: radeon ring pointer
52 struct radeon_ring *ring)
57 rptr = rdev->wb.wb[ring->rptr_offs/4];
68 * @ring: radeon ring pointer
73 struct radeon_ring *ring)
82 * @ring: radeon ring pointe
51 r600_dma_get_rptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
72 r600_dma_get_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
86 r600_dma_set_wptr(struct radeon_device *rdev, struct radeon_ring *ring) argument
122 struct radeon_ring *ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX]; local
208 r600_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring) argument
230 r600_dma_ring_test(struct radeon_device *rdev, struct radeon_ring *ring) argument
290 struct radeon_ring *ring = &rdev->ring[fence->ring]; local
313 r600_dma_semaphore_ring_emit(struct radeon_device *rdev, struct radeon_ring *ring, struct radeon_semaphore *semaphore, bool emit_wait) argument
337 r600_dma_ib_test(struct radeon_device *rdev, struct radeon_ring *ring) argument
402 struct radeon_ring *ring = &rdev->ring[ib->ring]; local
447 struct radeon_ring *ring = &rdev->ring[ring_index]; local
[all...]
H A Duvd_v2_2.c37 * Write a fence and a trap command to the ring.
42 struct radeon_ring *ring = &rdev->ring[fence->ring]; local
43 uint64_t addr = rdev->fence_drv[fence->ring].gpu_addr;
45 radeon_ring_write(ring, PACKET0(UVD_CONTEXT_ID, 0));
46 radeon_ring_write(ring, fence->seq);
47 radeon_ring_write(ring, PACKET0(UVD_GPCOM_VCPU_DATA0, 0));
48 radeon_ring_write(ring, lower_32_bits(addr));
49 radeon_ring_write(ring, PACKET
[all...]
H A Dradeon_trace.h34 __field(u32, ring)
40 __entry->ring = p->ring;
43 p->rdev, p->ring);
45 TP_printk("ring=%u, dw=%u, fences=%u",
46 __entry->ring, __entry->dw,
51 TP_PROTO(unsigned vmid, int ring),
52 TP_ARGS(vmid, ring),
55 __field(u32, ring)
60 __entry->ring
[all...]
H A Drv770_dma.c50 struct radeon_ring *ring = &rdev->ring[ring_index]; local
63 r = radeon_ring_lock(rdev, ring, num_loops * 5 + 8);
71 radeon_semaphore_sync_rings(rdev, sem, ring->idx);
78 radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, 0, cur_size_in_dw));
79 radeon_ring_write(ring, dst_offset & 0xfffffffc);
80 radeon_ring_write(ring, src_offset & 0xfffffffc);
81 radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
82 radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
87 r = radeon_fence_emit(rdev, &fence, ring
[all...]
/drivers/gpu/drm/i915/
H A Dintel_ringbuffer.c37 intel_ring_initialized(struct intel_engine_cs *ring) argument
39 struct drm_device *dev = ring->dev;
45 struct intel_context *dctx = ring->default_context;
46 struct intel_ringbuffer *ringbuf = dctx->engine[ring->id].ringbuf;
50 return ring->buffer && ring->buffer->obj;
67 bool intel_ring_stopped(struct intel_engine_cs *ring) argument
69 struct drm_i915_private *dev_priv = ring->dev->dev_private;
70 return dev_priv->gpu_error.stop_rings & intel_ring_flag(ring);
73 void __intel_ring_advance(struct intel_engine_cs *ring) argument
83 gen2_render_ring_flush(struct intel_engine_cs *ring, u32 invalidate_domains, u32 flush_domains) argument
109 gen4_render_ring_flush(struct intel_engine_cs *ring, u32 invalidate_domains, u32 flush_domains) argument
204 intel_emit_post_sync_nonzero_flush(struct intel_engine_cs *ring) argument
239 gen6_render_ring_flush(struct intel_engine_cs *ring, u32 invalidate_domains, u32 flush_domains) argument
291 gen7_render_ring_cs_stall_wa(struct intel_engine_cs *ring) argument
309 gen7_ring_fbc_flush(struct intel_engine_cs *ring, u32 value) argument
333 gen7_render_ring_flush(struct intel_engine_cs *ring, u32 invalidate_domains, u32 flush_domains) argument
394 gen8_emit_pipe_control(struct intel_engine_cs *ring, u32 flags, u32 scratch_addr) argument
415 gen8_render_ring_flush(struct intel_engine_cs *ring, u32 invalidate_domains, u32 flush_domains) argument
457 ring_write_tail(struct intel_engine_cs *ring, u32 value) argument
464 intel_ring_get_active_head(struct intel_engine_cs *ring) argument
480 ring_setup_phys_status_page(struct intel_engine_cs *ring) argument
491 stop_ring(struct intel_engine_cs *ring) argument
520 init_ring_common(struct intel_engine_cs *ring) argument
610 intel_fini_pipe_control(struct intel_engine_cs *ring) argument
627 intel_init_pipe_control(struct intel_engine_cs *ring) argument
668 intel_ring_emit_wa(struct intel_engine_cs *ring, u32 addr, u32 value) argument
692 bdw_init_workarounds(struct intel_engine_cs *ring) argument
758 chv_init_workarounds(struct intel_engine_cs *ring) argument
797 init_render_ring(struct intel_engine_cs *ring) argument
855 render_ring_cleanup(struct intel_engine_cs *ring) argument
986 gen6_add_request(struct intel_engine_cs *ring) argument
1097 pc_render_add_request(struct intel_engine_cs *ring) argument
1145 gen6_ring_get_seqno(struct intel_engine_cs *ring, bool lazy_coherency) argument
1159 ring_get_seqno(struct intel_engine_cs *ring, bool lazy_coherency) argument
1165 ring_set_seqno(struct intel_engine_cs *ring, u32 seqno) argument
1171 pc_render_get_seqno(struct intel_engine_cs *ring, bool lazy_coherency) argument
1177 pc_render_set_seqno(struct intel_engine_cs *ring, u32 seqno) argument
1183 gen5_ring_get_irq(struct intel_engine_cs *ring) argument
1201 gen5_ring_put_irq(struct intel_engine_cs *ring) argument
1214 i9xx_ring_get_irq(struct intel_engine_cs *ring) argument
1235 i9xx_ring_put_irq(struct intel_engine_cs *ring) argument
1251 i8xx_ring_get_irq(struct intel_engine_cs *ring) argument
1272 i8xx_ring_put_irq(struct intel_engine_cs *ring) argument
1287 intel_ring_setup_status_page(struct intel_engine_cs *ring) argument
1350 bsd_ring_flush(struct intel_engine_cs *ring, u32 invalidate_domains, u32 flush_domains) argument
1367 i9xx_add_request(struct intel_engine_cs *ring) argument
1385 gen6_ring_get_irq(struct intel_engine_cs *ring) argument
1410 gen6_ring_put_irq(struct intel_engine_cs *ring) argument
1428 hsw_vebox_get_irq(struct intel_engine_cs *ring) argument
1448 hsw_vebox_put_irq(struct intel_engine_cs *ring) argument
1466 gen8_ring_get_irq(struct intel_engine_cs *ring) argument
1492 gen8_ring_put_irq(struct intel_engine_cs *ring) argument
1512 i965_dispatch_execbuffer(struct intel_engine_cs *ring, u64 offset, u32 length, unsigned flags) argument
1537 i830_dispatch_execbuffer(struct intel_engine_cs *ring, u64 offset, u32 len, unsigned flags) argument
1598 i915_dispatch_execbuffer(struct intel_engine_cs *ring, u64 offset, u32 len, unsigned flags) argument
1615 cleanup_status_page(struct intel_engine_cs *ring) argument
1629 init_status_page(struct intel_engine_cs *ring) argument
1680 init_phys_status_page(struct intel_engine_cs *ring) argument
1755 intel_init_ring_buffer(struct drm_device *dev, struct intel_engine_cs *ring) argument
1819 intel_cleanup_ring_buffer(struct intel_engine_cs *ring) argument
1845 intel_ring_wait_request(struct intel_engine_cs *ring, int n) argument
1884 ring_wait_for_space(struct intel_engine_cs *ring, int n) argument
1943 intel_wrap_ring_buffer(struct intel_engine_cs *ring) argument
1966 intel_ring_idle(struct intel_engine_cs *ring) argument
1990 intel_ring_alloc_seqno(struct intel_engine_cs *ring) argument
2008 __intel_ring_prepare(struct intel_engine_cs *ring, int bytes) argument
2029 intel_ring_begin(struct intel_engine_cs *ring, int num_dwords) argument
2054 intel_ring_cacheline_align(struct intel_engine_cs *ring) argument
2075 intel_ring_init_seqno(struct intel_engine_cs *ring, u32 seqno) argument
2093 gen6_bsd_ring_write_tail(struct intel_engine_cs *ring, u32 value) argument
2126 gen6_bsd_ring_flush(struct intel_engine_cs *ring, u32 invalidate, u32 flush) argument
2162 gen8_ring_dispatch_execbuffer(struct intel_engine_cs *ring, u64 offset, u32 len, unsigned flags) argument
2184 hsw_ring_dispatch_execbuffer(struct intel_engine_cs *ring, u64 offset, u32 len, unsigned flags) argument
2206 gen6_ring_dispatch_execbuffer(struct intel_engine_cs *ring, u64 offset, u32 len, unsigned flags) argument
2228 gen6_ring_flush(struct intel_engine_cs *ring, u32 invalidate, u32 flush) argument
2271 struct intel_engine_cs *ring = &dev_priv->ring[RCS]; local
2412 struct intel_engine_cs *ring = &dev_priv->ring[RCS]; local
2497 struct intel_engine_cs *ring = &dev_priv->ring[VCS]; local
2574 struct intel_engine_cs *ring = &dev_priv->ring[VCS2]; local
2609 struct intel_engine_cs *ring = &dev_priv->ring[BCS]; local
2666 struct intel_engine_cs *ring = &dev_priv->ring[VECS]; local
2715 intel_ring_flush_all_caches(struct intel_engine_cs *ring) argument
2733 intel_ring_invalidate_all_caches(struct intel_engine_cs *ring) argument
2753 intel_stop_ring_buffer(struct intel_engine_cs *ring) argument
[all...]
H A Dintel_lrc.h28 #define RING_ELSP(ring) ((ring)->mmio_base+0x230)
29 #define RING_EXECLIST_STATUS(ring) ((ring)->mmio_base+0x234)
30 #define RING_CONTEXT_CONTROL(ring) ((ring)->mmio_base+0x244)
31 #define RING_CONTEXT_STATUS_BUF(ring) ((ring)->mmio_base+0x370)
32 #define RING_CONTEXT_STATUS_PTR(ring) ((ring)
103 struct intel_engine_cs *ring; member in struct:intel_ctx_submit_request
[all...]
H A Dintel_ringbuffer.h32 #define I915_READ_TAIL(ring) I915_READ(RING_TAIL((ring)->mmio_base))
33 #define I915_WRITE_TAIL(ring, val) I915_WRITE(RING_TAIL((ring)->mmio_base), val)
35 #define I915_READ_START(ring) I915_READ(RING_START((ring)->mmio_base))
36 #define I915_WRITE_START(ring, val) I915_WRITE(RING_START((ring)->mmio_base), val)
38 #define I915_READ_HEAD(ring) I915_READ(RING_HEAD((ring)
100 struct intel_engine_cs *ring; member in struct:intel_ringbuffer
323 intel_ring_flag(struct intel_engine_cs *ring) argument
329 intel_ring_sync_index(struct intel_engine_cs *ring, struct intel_engine_cs *other) argument
350 intel_read_status_page(struct intel_engine_cs *ring, int reg) argument
359 intel_write_status_page(struct intel_engine_cs *ring, int reg, u32 value) argument
393 intel_ring_emit(struct intel_engine_cs *ring, u32 data) argument
400 intel_ring_advance(struct intel_engine_cs *ring) argument
432 intel_ring_get_seqno(struct intel_engine_cs *ring) argument
438 i915_trace_irq_get(struct intel_engine_cs *ring, u32 seqno) argument
[all...]
H A Dintel_lrc.c40 * ring contexts incorporate many more things to the context's state, like
51 * rings, the engine cs shifts to a new "ring buffer" with every context
53 * context, B) find its appropriate virtualized ring, C) write commands to it
86 * only allowed with the render ring, we can allocate & populate them right
100 * tail after the request was written to the ring buffer and a pointer to the
274 static void execlists_elsp_write(struct intel_engine_cs *ring, argument
278 struct drm_i915_private *dev_priv = ring->dev->dev_private;
317 I915_WRITE(RING_ELSP(ring), desc[1]);
318 I915_WRITE(RING_ELSP(ring), desc[0]);
319 I915_WRITE(RING_ELSP(ring), des
359 execlists_submit_context(struct intel_engine_cs *ring, struct intel_context *to0, u32 tail0, struct intel_context *to1, u32 tail1) argument
385 execlists_context_unqueue(struct intel_engine_cs *ring) argument
425 execlists_check_remove_request(struct intel_engine_cs *ring, u32 request_id) argument
462 intel_execlists_handle_ctx_events(struct intel_engine_cs *ring) argument
531 execlists_context_queue(struct intel_engine_cs *ring, struct intel_context *to, u32 tail) argument
583 struct intel_engine_cs *ring = ringbuf->ring; local
602 struct intel_engine_cs *ring = ringbuf->ring; local
647 intel_execlists_submission(struct drm_device *dev, struct drm_file *file, struct intel_engine_cs *ring, struct intel_context *ctx, struct drm_i915_gem_execbuffer2 *args, struct list_head *vmas, struct drm_i915_gem_object *batch_obj, u64 exec_start, u32 flags) argument
736 intel_logical_ring_stop(struct intel_engine_cs *ring) argument
760 struct intel_engine_cs *ring = ringbuf->ring; local
785 struct intel_engine_cs *ring = ringbuf->ring; local
796 logical_ring_alloc_seqno(struct intel_engine_cs *ring, struct intel_context *ctx) argument
825 struct intel_engine_cs *ring = ringbuf->ring; local
865 struct intel_engine_cs *ring = ringbuf->ring; local
971 struct intel_engine_cs *ring = ringbuf->ring; local
994 gen8_init_common_ring(struct intel_engine_cs *ring) argument
1013 gen8_init_render_ring(struct intel_engine_cs *ring) argument
1060 gen8_logical_ring_get_irq(struct intel_engine_cs *ring) argument
1079 gen8_logical_ring_put_irq(struct intel_engine_cs *ring) argument
1097 struct intel_engine_cs *ring = ringbuf->ring; local
1135 struct intel_engine_cs *ring = ringbuf->ring; local
1173 gen8_get_seqno(struct intel_engine_cs *ring, bool lazy_coherency) argument
1178 gen8_set_seqno(struct intel_engine_cs *ring, u32 seqno) argument
1185 struct intel_engine_cs *ring = ringbuf->ring; local
1215 intel_logical_ring_cleanup(struct intel_engine_cs *ring) argument
1238 logical_ring_init(struct drm_device *dev, struct intel_engine_cs *ring) argument
1272 struct intel_engine_cs *ring = &dev_priv->ring[RCS]; local
1300 struct intel_engine_cs *ring = &dev_priv->ring[VCS]; local
1325 struct intel_engine_cs *ring = &dev_priv->ring[VCS2]; local
1350 struct intel_engine_cs *ring = &dev_priv->ring[BCS]; local
1375 struct intel_engine_cs *ring = &dev_priv->ring[VECS]; local
1460 intel_lr_context_render_state_init(struct intel_engine_cs *ring, struct intel_context *ctx) argument
1493 populate_lr_context(struct intel_context *ctx, struct drm_i915_gem_object *ctx_obj, struct intel_engine_cs *ring, struct intel_ringbuffer *ringbuf) argument
1631 get_lr_context_size(struct intel_engine_cs *ring) argument
1665 intel_lr_context_deferred_create(struct intel_context *ctx, struct intel_engine_cs *ring) argument
[all...]
/drivers/thunderbolt/
H A Dnhi.c22 #define RING_TYPE(ring) ((ring)->is_tx ? "TX ring" : "RX ring")
25 static int ring_interrupt_index(struct tb_ring *ring) argument
27 int bit = ring->hop;
28 if (!ring->is_tx)
29 bit += ring->nhi->hop_count;
34 * ring_interrupt_active() - activate/deactivate interrupts for a single ring
36 * ring
38 ring_interrupt_active(struct tb_ring *ring, bool active) argument
81 ring_desc_base(struct tb_ring *ring) argument
89 ring_options_base(struct tb_ring *ring) argument
97 ring_iowrite16desc(struct tb_ring *ring, u32 value, u32 offset) argument
102 ring_iowrite32desc(struct tb_ring *ring, u32 value, u32 offset) argument
107 ring_iowrite64desc(struct tb_ring *ring, u64 value, u32 offset) argument
113 ring_iowrite32options(struct tb_ring *ring, u32 value, u32 offset) argument
118 ring_full(struct tb_ring *ring) argument
123 ring_empty(struct tb_ring *ring) argument
133 ring_write_descriptors(struct tb_ring *ring) argument
166 struct tb_ring *ring = container_of(work, typeof(*ring), work); local
227 __ring_enqueue(struct tb_ring *ring, struct ring_frame *frame) argument
244 struct tb_ring *ring = NULL; local
312 ring_start(struct tb_ring *ring) argument
356 ring_stop(struct tb_ring *ring) argument
398 ring_free(struct tb_ring *ring) argument
446 struct tb_ring *ring; local
[all...]
H A Dnhi.h18 * Must be held during ring creation/destruction.
31 * struct tb_ring - thunderbolt TX or RX ring associated with a NHI
69 void ring_start(struct tb_ring *ring);
70 void ring_stop(struct tb_ring *ring);
71 void ring_free(struct tb_ring *ring);
73 int __ring_enqueue(struct tb_ring *ring, struct ring_frame *frame);
76 * ring_rx() - enqueue a frame on an RX ring
89 static inline int ring_rx(struct tb_ring *ring, struct ring_frame *frame) argument
91 WARN_ON(ring->is_tx);
92 return __ring_enqueue(ring, fram
108 ring_tx(struct tb_ring *ring, struct ring_frame *frame) argument
[all...]
/drivers/net/wireless/b43legacy/
H A Ddma.c45 struct b43legacy_dmadesc32 *op32_idx2desc(struct b43legacy_dmaring *ring, argument
51 *meta = &(ring->meta[slot]);
52 desc = ring->descbase;
58 static void op32_fill_descriptor(struct b43legacy_dmaring *ring, argument
63 struct b43legacy_dmadesc32 *descbase = ring->descbase;
70 B43legacy_WARN_ON(!(slot >= 0 && slot < ring->nr_slots));
75 addr |= ring->dev->dma.translation;
76 ctl = (bufsize - ring->frameoffset)
78 if (slot == ring->nr_slots - 1)
93 static void op32_poke_tx(struct b43legacy_dmaring *ring, in argument
99 op32_tx_suspend(struct b43legacy_dmaring *ring) argument
106 op32_tx_resume(struct b43legacy_dmaring *ring) argument
113 op32_get_current_rxslot(struct b43legacy_dmaring *ring) argument
123 op32_set_current_rxslot(struct b43legacy_dmaring *ring, int slot) argument
130 free_slots(struct b43legacy_dmaring *ring) argument
135 next_slot(struct b43legacy_dmaring *ring, int slot) argument
143 prev_slot(struct b43legacy_dmaring *ring, int slot) argument
152 update_max_used_slots(struct b43legacy_dmaring *ring, int current_used_slots) argument
167 update_max_used_slots(struct b43legacy_dmaring *ring, int current_used_slots) argument
174 request_slot(struct b43legacy_dmaring *ring) argument
196 struct b43legacy_dmaring *ring; local
230 txring_to_priority(struct b43legacy_dmaring *ring) argument
260 map_descbuffer(struct b43legacy_dmaring *ring, unsigned char *buf, size_t len, int tx) argument
280 unmap_descbuffer(struct b43legacy_dmaring *ring, dma_addr_t addr, size_t len, int tx) argument
296 sync_descbuffer_for_cpu(struct b43legacy_dmaring *ring, dma_addr_t addr, size_t len) argument
307 sync_descbuffer_for_device(struct b43legacy_dmaring *ring, dma_addr_t addr, size_t len) argument
318 free_descriptor_buffer(struct b43legacy_dmaring *ring, struct b43legacy_dmadesc_meta *meta, int irq_context) argument
331 alloc_ringmemory(struct b43legacy_dmaring *ring) argument
343 free_ringmemory(struct b43legacy_dmaring *ring) argument
424 b43legacy_dma_mapping_error(struct b43legacy_dmaring *ring, dma_addr_t addr, size_t buffersize, bool dma_to_device) argument
453 setup_rx_descbuffer(struct b43legacy_dmaring *ring, struct b43legacy_dmadesc32 *desc, struct b43legacy_dmadesc_meta *meta, gfp_t gfp_flags) argument
503 alloc_initial_descbuffers(struct b43legacy_dmaring *ring) argument
540 dmacontroller_setup(struct b43legacy_dmaring *ring) argument
582 dmacontroller_cleanup(struct b43legacy_dmaring *ring) argument
595 free_all_descbuffers(struct b43legacy_dmaring *ring) argument
653 struct b43legacy_dmaring *ring; local
752 b43legacy_destroy_dmaring(struct b43legacy_dmaring *ring) argument
840 struct b43legacy_dmaring *ring; local
937 generate_cookie(struct b43legacy_dmaring *ring, int slot) argument
980 struct b43legacy_dmaring *ring = NULL; local
1010 dma_tx_fragment(struct b43legacy_dmaring *ring, struct sk_buff **in_skb) argument
1111 should_inject_overflow(struct b43legacy_dmaring *ring) argument
1136 struct b43legacy_dmaring *ring; local
1189 struct b43legacy_dmaring *ring; local
1305 dma_rx(struct b43legacy_dmaring *ring, int *slot) argument
1403 b43legacy_dma_rx(struct b43legacy_dmaring *ring) argument
1423 b43legacy_dma_tx_suspend_ring(struct b43legacy_dmaring *ring) argument
1429 b43legacy_dma_tx_resume_ring(struct b43legacy_dmaring *ring) argument
[all...]
/drivers/net/wireless/b43/
H A Ddma.c85 struct b43_dmadesc_generic *op32_idx2desc(struct b43_dmaring *ring, argument
91 *meta = &(ring->meta[slot]);
92 desc = ring->descbase;
98 static void op32_fill_descriptor(struct b43_dmaring *ring, argument
103 struct b43_dmadesc32 *descbase = ring->descbase;
110 B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots));
112 addr = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW);
113 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT);
116 if (slot == ring->nr_slots - 1)
131 static void op32_poke_tx(struct b43_dmaring *ring, in argument
137 op32_tx_suspend(struct b43_dmaring *ring) argument
143 op32_tx_resume(struct b43_dmaring *ring) argument
149 op32_get_current_rxslot(struct b43_dmaring *ring) argument
159 op32_set_current_rxslot(struct b43_dmaring *ring, int slot) argument
177 op64_idx2desc(struct b43_dmaring *ring, int slot, struct b43_dmadesc_meta **meta) argument
190 op64_fill_descriptor(struct b43_dmaring *ring, struct b43_dmadesc_generic *desc, dma_addr_t dmaaddr, u16 bufsize, int start, int end, int irq) argument
226 op64_poke_tx(struct b43_dmaring *ring, int slot) argument
232 op64_tx_suspend(struct b43_dmaring *ring) argument
238 op64_tx_resume(struct b43_dmaring *ring) argument
244 op64_get_current_rxslot(struct b43_dmaring *ring) argument
254 op64_set_current_rxslot(struct b43_dmaring *ring, int slot) argument
270 free_slots(struct b43_dmaring *ring) argument
275 next_slot(struct b43_dmaring *ring, int slot) argument
283 prev_slot(struct b43_dmaring *ring, int slot) argument
292 update_max_used_slots(struct b43_dmaring *ring, int current_used_slots) argument
307 update_max_used_slots(struct b43_dmaring *ring, int current_used_slots) argument
313 request_slot(struct b43_dmaring *ring) argument
360 map_descbuffer(struct b43_dmaring *ring, unsigned char *buf, size_t len, int tx) argument
377 unmap_descbuffer(struct b43_dmaring *ring, dma_addr_t addr, size_t len, int tx) argument
390 sync_descbuffer_for_cpu(struct b43_dmaring *ring, dma_addr_t addr, size_t len) argument
399 sync_descbuffer_for_device(struct b43_dmaring *ring, dma_addr_t addr, size_t len) argument
408 free_descriptor_buffer(struct b43_dmaring *ring, struct b43_dmadesc_meta *meta) argument
420 alloc_ringmemory(struct b43_dmaring *ring) argument
443 free_ringmemory(struct b43_dmaring *ring) argument
551 b43_dma_mapping_error(struct b43_dmaring *ring, dma_addr_t addr, size_t buffersize, bool dma_to_device) argument
583 b43_rx_buffer_is_poisoned(struct b43_dmaring *ring, struct sk_buff *skb) argument
590 b43_poison_rx_buffer(struct b43_dmaring *ring, struct sk_buff *skb) argument
605 setup_rx_descbuffer(struct b43_dmaring *ring, struct b43_dmadesc_generic *desc, struct b43_dmadesc_meta *meta, gfp_t gfp_flags) argument
649 alloc_initial_descbuffers(struct b43_dmaring *ring) argument
685 dmacontroller_setup(struct b43_dmaring *ring) argument
766 dmacontroller_cleanup(struct b43_dmaring *ring) argument
787 free_all_descbuffers(struct b43_dmaring *ring) argument
863 struct b43_dmaring *ring; local
992 b43_destroy_dmaring(struct b43_dmaring *ring, const char *ringname) argument
1198 generate_cookie(struct b43_dmaring *ring, int slot) argument
1222 struct b43_dmaring *ring = NULL; local
1251 dma_tx_fragment(struct b43_dmaring *ring, struct sk_buff *skb) argument
1350 should_inject_overflow(struct b43_dmaring *ring) argument
1375 struct b43_dmaring *ring; local
1404 struct b43_dmaring *ring; local
1480 struct b43_dmaring *ring; local
1642 dma_rx(struct b43_dmaring *ring, int *slot) argument
1728 b43_dma_handle_rx_overflow(struct b43_dmaring *ring) argument
1747 b43_dma_rx(struct b43_dmaring *ring) argument
1767 b43_dma_tx_suspend_ring(struct b43_dmaring *ring) argument
1773 b43_dma_tx_resume_ring(struct b43_dmaring *ring) argument
[all...]
/drivers/net/ethernet/mellanox/mlx4/
H A Den_tx.c53 struct mlx4_en_tx_ring *ring; local
57 ring = kzalloc_node(sizeof(*ring), GFP_KERNEL, node);
58 if (!ring) {
59 ring = kzalloc(sizeof(*ring), GFP_KERNEL);
60 if (!ring) {
61 en_err(priv, "Failed allocating TX ring\n");
66 ring->size = size;
67 ring
166 struct mlx4_en_tx_ring *ring = *pring; local
183 mlx4_en_activate_tx_ring(struct mlx4_en_priv *priv, struct mlx4_en_tx_ring *ring, int cq, int user_prio) argument
215 mlx4_en_deactivate_tx_ring(struct mlx4_en_priv *priv, struct mlx4_en_tx_ring *ring) argument
224 mlx4_en_stamp_wqe(struct mlx4_en_priv *priv, struct mlx4_en_tx_ring *ring, int index, u8 owner) argument
258 mlx4_en_free_tx_desc(struct mlx4_en_priv *priv, struct mlx4_en_tx_ring *ring, int index, u8 owner, u64 timestamp) argument
336 mlx4_en_free_tx_buf(struct net_device *dev, struct mlx4_en_tx_ring *ring) argument
373 struct mlx4_en_tx_ring *ring = priv->tx_ring[cq->ring]; local
506 mlx4_en_bounce_to_desc(struct mlx4_en_priv *priv, struct mlx4_en_tx_ring *ring, u32 index, unsigned int desc_size) argument
694 struct mlx4_en_tx_ring *ring; local
[all...]
/drivers/crypto/qat/qat_common/
H A Dadf_transport.c80 static int adf_reserve_ring(struct adf_etr_bank_data *bank, uint32_t ring) argument
83 if (bank->ring_mask & (1 << ring)) {
87 bank->ring_mask |= (1 << ring);
92 static void adf_unreserve_ring(struct adf_etr_bank_data *bank, uint32_t ring) argument
95 bank->ring_mask &= ~(1 << ring);
99 static void adf_enable_ring_irq(struct adf_etr_bank_data *bank, uint32_t ring) argument
102 bank->irq_mask |= (1 << ring);
109 static void adf_disable_ring_irq(struct adf_etr_bank_data *bank, uint32_t ring) argument
112 bank->irq_mask &= ~(1 << ring);
117 int adf_send_message(struct adf_etr_ring_data *ring, uint32_ argument
137 adf_handle_response(struct adf_etr_ring_data *ring) argument
160 adf_configure_tx_ring(struct adf_etr_ring_data *ring) argument
168 adf_configure_rx_ring(struct adf_etr_ring_data *ring) argument
179 adf_init_ring(struct adf_etr_ring_data *ring) argument
217 adf_cleanup_ring(struct adf_etr_ring_data *ring) argument
239 struct adf_etr_ring_data *ring; local
307 adf_remove_ring(struct adf_etr_ring_data *ring) argument
397 struct adf_etr_ring_data *ring; local
526 struct adf_etr_ring_data *ring = &bank->rings[i]; local
[all...]
H A Dadf_transport_debug.c59 struct adf_etr_ring_data *ring = sfile->private; local
65 if (*pos >= (ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size) /
66 ADF_MSG_SIZE_TO_BYTES(ring->msg_size)))
69 return ring->base_addr +
70 (ADF_MSG_SIZE_TO_BYTES(ring->msg_size) * (*pos)++);
75 struct adf_etr_ring_data *ring = sfile->private; local
77 if (*pos >= (ADF_SIZE_TO_RING_SIZE_IN_BYTES(ring->ring_size) /
78 ADF_MSG_SIZE_TO_BYTES(ring->msg_size)))
81 return ring->base_addr +
82 (ADF_MSG_SIZE_TO_BYTES(ring
87 struct adf_etr_ring_data *ring = sfile->private; local
160 adf_ring_debugfs_add(struct adf_etr_ring_data *ring, const char *name) argument
185 adf_ring_debugfs_rm(struct adf_etr_ring_data *ring) argument
223 struct adf_etr_ring_data *ring = &bank->rings[ring_id]; local
[all...]
/drivers/net/ethernet/amd/xgbe/
H A Dxgbe-desc.c123 struct xgbe_ring *ring)
128 if (!ring)
131 if (ring->rdata) {
132 for (i = 0; i < ring->rdesc_count; i++) {
133 rdata = XGBE_GET_DESC_DATA(ring, i);
137 kfree(ring->rdata);
138 ring->rdata = NULL;
141 if (ring->rdesc) {
144 ring->rdesc_count),
145 ring
122 xgbe_free_ring(struct xgbe_prv_data *pdata, struct xgbe_ring *ring) argument
166 xgbe_init_ring(struct xgbe_prv_data *pdata, struct xgbe_ring *ring, unsigned int rdesc_count) argument
240 struct xgbe_ring *ring; local
281 struct xgbe_ring *ring; local
375 struct xgbe_ring *ring = channel->tx_ring; local
509 struct xgbe_ring *ring = channel->rx_ring; local
[all...]

Completed in 3383 milliseconds

1234567891011>>