intel_ringbuffer.h revision 1ec14ad3132702694f2e1a90b30641cf111183b9
1#ifndef _INTEL_RINGBUFFER_H_
2#define _INTEL_RINGBUFFER_H_
3
4enum {
5    RCS = 0x0,
6    VCS,
7    BCS,
8    I915_NUM_RINGS,
9};
10
11struct  intel_hw_status_page {
12	u32	__iomem	*page_addr;
13	unsigned int	gfx_addr;
14	struct		drm_i915_gem_object *obj;
15};
16
17#define I915_RING_READ(reg) i915_safe_read(dev_priv, reg)
18
19#define I915_READ_TAIL(ring) I915_RING_READ(RING_TAIL(ring->mmio_base))
20#define I915_WRITE_TAIL(ring, val) I915_WRITE(RING_TAIL(ring->mmio_base), val)
21
22#define I915_READ_START(ring) I915_RING_READ(RING_START(ring->mmio_base))
23#define I915_WRITE_START(ring, val) I915_WRITE(RING_START(ring->mmio_base), val)
24
25#define I915_READ_HEAD(ring)  I915_RING_READ(RING_HEAD(ring->mmio_base))
26#define I915_WRITE_HEAD(ring, val) I915_WRITE(RING_HEAD(ring->mmio_base), val)
27
28#define I915_READ_CTL(ring) I915_RING_READ(RING_CTL(ring->mmio_base))
29#define I915_WRITE_CTL(ring, val) I915_WRITE(RING_CTL(ring->mmio_base), val)
30
31#define I915_READ_NOPID(ring) I915_RING_READ(RING_NOPID(ring->mmio_base))
32#define I915_READ_SYNC_0(ring) I915_RING_READ(RING_SYNC_0(ring->mmio_base))
33#define I915_READ_SYNC_1(ring) I915_RING_READ(RING_SYNC_1(ring->mmio_base))
34
35struct  intel_ring_buffer {
36	const char	*name;
37	enum intel_ring_id {
38		RING_RENDER = 0x1,
39		RING_BSD = 0x2,
40		RING_BLT = 0x4,
41	} id;
42	u32		mmio_base;
43	void		*virtual_start;
44	struct		drm_device *dev;
45	struct		drm_i915_gem_object *obj;
46
47	unsigned int	head;
48	unsigned int	tail;
49	int		space;
50	int		size;
51	struct intel_hw_status_page status_page;
52
53	u32		irq_seqno;		/* last seq seem at irq time */
54	u32		waiting_seqno;
55	u32		sync_seqno[I915_NUM_RINGS-1];
56	u32		irq_refcount;
57	void		(*irq_get)(struct intel_ring_buffer *ring);
58	void		(*irq_put)(struct intel_ring_buffer *ring);
59
60	int		(*init)(struct intel_ring_buffer *ring);
61
62	void		(*write_tail)(struct intel_ring_buffer *ring,
63				      u32 value);
64	void		(*flush)(struct intel_ring_buffer *ring,
65				 u32	invalidate_domains,
66				 u32	flush_domains);
67	int		(*add_request)(struct intel_ring_buffer *ring,
68				       u32 *seqno);
69	u32		(*get_seqno)(struct intel_ring_buffer *ring);
70	int		(*dispatch_execbuffer)(struct intel_ring_buffer *ring,
71					       u32 offset, u32 length);
72	void		(*cleanup)(struct intel_ring_buffer *ring);
73
74	/**
75	 * List of objects currently involved in rendering from the
76	 * ringbuffer.
77	 *
78	 * Includes buffers having the contents of their GPU caches
79	 * flushed, not necessarily primitives.  last_rendering_seqno
80	 * represents when the rendering involved will be completed.
81	 *
82	 * A reference is held on the buffer while on this list.
83	 */
84	struct list_head active_list;
85
86	/**
87	 * List of breadcrumbs associated with GPU requests currently
88	 * outstanding.
89	 */
90	struct list_head request_list;
91
92	/**
93	 * List of objects currently pending a GPU write flush.
94	 *
95	 * All elements on this list will belong to either the
96	 * active_list or flushing_list, last_rendering_seqno can
97	 * be used to differentiate between the two elements.
98	 */
99	struct list_head gpu_write_list;
100
101	/**
102	 * Do we have some not yet emitted requests outstanding?
103	 */
104	u32 outstanding_lazy_request;
105
106	wait_queue_head_t irq_queue;
107	drm_local_map_t map;
108
109	void *private;
110};
111
112static inline u32
113intel_ring_sync_index(struct intel_ring_buffer *ring,
114		      struct intel_ring_buffer *other)
115{
116	int idx;
117
118	/*
119	 * cs -> 0 = vcs, 1 = bcs
120	 * vcs -> 0 = bcs, 1 = cs,
121	 * bcs -> 0 = cs, 1 = vcs.
122	 */
123
124	idx = (other - ring) - 1;
125	if (idx < 0)
126		idx += I915_NUM_RINGS;
127
128	return idx;
129}
130
131static inline u32
132intel_read_status_page(struct intel_ring_buffer *ring,
133		       int reg)
134{
135	return ioread32(ring->status_page.page_addr + reg);
136}
137
138void intel_cleanup_ring_buffer(struct intel_ring_buffer *ring);
139int __must_check intel_wait_ring_buffer(struct intel_ring_buffer *ring, int n);
140int __must_check intel_ring_begin(struct intel_ring_buffer *ring, int n);
141
142static inline void intel_ring_emit(struct intel_ring_buffer *ring,
143				   u32 data)
144{
145	iowrite32(data, ring->virtual_start + ring->tail);
146	ring->tail += 4;
147}
148
149void intel_ring_advance(struct intel_ring_buffer *ring);
150
151u32 intel_ring_get_seqno(struct intel_ring_buffer *ring);
152int intel_ring_sync(struct intel_ring_buffer *ring,
153		    struct intel_ring_buffer *to,
154		    u32 seqno);
155
156int intel_init_render_ring_buffer(struct drm_device *dev);
157int intel_init_bsd_ring_buffer(struct drm_device *dev);
158int intel_init_blt_ring_buffer(struct drm_device *dev);
159
160u32 intel_ring_get_active_head(struct intel_ring_buffer *ring);
161void intel_ring_setup_status_page(struct intel_ring_buffer *ring);
162
163#endif /* _INTEL_RINGBUFFER_H_ */
164