intel_batchbuffer.h revision 3faf56ffbdebef04345ebb1fa8e0d50b4beeedb2
1#ifndef INTEL_BATCHBUFFER_H
2#define INTEL_BATCHBUFFER_H
3
4#include "main/mtypes.h"
5
6#include "intel_context.h"
7#include "intel_bufmgr.h"
8#include "intel_reg.h"
9
10#define BATCH_RESERVED 16
11
12void intel_batchbuffer_init(struct intel_context *intel);
13void intel_batchbuffer_reset(struct intel_context *intel);
14void intel_batchbuffer_free(struct intel_context *intel);
15void intel_batchbuffer_save_state(struct intel_context *intel);
16void intel_batchbuffer_reset_to_saved(struct intel_context *intel);
17
18void _intel_batchbuffer_flush(struct intel_context *intel,
19			      const char *file, int line);
20
21#define intel_batchbuffer_flush(intel) \
22	_intel_batchbuffer_flush(intel, __FILE__, __LINE__)
23
24
25
26/* Unlike bmBufferData, this currently requires the buffer be mapped.
27 * Consider it a convenience function wrapping multple
28 * intel_buffer_dword() calls.
29 */
30void intel_batchbuffer_data(struct intel_context *intel,
31                            const void *data, GLuint bytes, bool is_blit);
32
33bool intel_batchbuffer_emit_reloc(struct intel_context *intel,
34                                       drm_intel_bo *buffer,
35				       uint32_t read_domains,
36				       uint32_t write_domain,
37				       uint32_t offset);
38bool intel_batchbuffer_emit_reloc_fenced(struct intel_context *intel,
39					      drm_intel_bo *buffer,
40					      uint32_t read_domains,
41					      uint32_t write_domain,
42					      uint32_t offset);
43void intel_batchbuffer_emit_mi_flush(struct intel_context *intel);
44void intel_emit_post_sync_nonzero_flush(struct intel_context *intel);
45void intel_emit_depth_stall_flushes(struct intel_context *intel);
46
47static INLINE uint32_t float_as_int(float f)
48{
49   union {
50      float f;
51      uint32_t d;
52   } fi;
53
54   fi.f = f;
55   return fi.d;
56}
57
58/* Inline functions - might actually be better off with these
59 * non-inlined.  Certainly better off switching all command packets to
60 * be passed as structs rather than dwords, but that's a little bit of
61 * work...
62 */
63static INLINE unsigned
64intel_batchbuffer_space(struct intel_context *intel)
65{
66   return (intel->batch.state_batch_offset - intel->batch.reserved_space)
67      - intel->batch.used*4;
68}
69
70
71static INLINE void
72intel_batchbuffer_emit_dword(struct intel_context *intel, GLuint dword)
73{
74#ifdef DEBUG
75   assert(intel_batchbuffer_space(intel) >= 4);
76#endif
77   intel->batch.map[intel->batch.used++] = dword;
78}
79
80static INLINE void
81intel_batchbuffer_emit_float(struct intel_context *intel, float f)
82{
83   intel_batchbuffer_emit_dword(intel, float_as_int(f));
84}
85
86static INLINE void
87intel_batchbuffer_require_space(struct intel_context *intel,
88                                GLuint sz, int is_blit)
89{
90
91   if (intel->gen >= 6 &&
92       intel->batch.is_blit != is_blit && intel->batch.used) {
93      intel_batchbuffer_flush(intel);
94   }
95
96   intel->batch.is_blit = is_blit;
97
98#ifdef DEBUG
99   assert(sz < sizeof(intel->batch.map) - BATCH_RESERVED);
100#endif
101   if (intel_batchbuffer_space(intel) < sz)
102      intel_batchbuffer_flush(intel);
103}
104
105static INLINE void
106intel_batchbuffer_begin(struct intel_context *intel, int n, bool is_blit)
107{
108   intel_batchbuffer_require_space(intel, n * 4, is_blit);
109
110   intel->batch.emit = intel->batch.used;
111#ifdef DEBUG
112   intel->batch.total = n;
113#endif
114}
115
116static INLINE void
117intel_batchbuffer_advance(struct intel_context *intel)
118{
119#ifdef DEBUG
120   struct intel_batchbuffer *batch = &intel->batch;
121   unsigned int _n = batch->used - batch->emit;
122   assert(batch->total != 0);
123   if (_n != batch->total) {
124      fprintf(stderr, "ADVANCE_BATCH: %d of %d dwords emitted\n",
125	      _n, batch->total);
126      abort();
127   }
128   batch->total = 0;
129#endif
130}
131
132void intel_batchbuffer_cached_advance(struct intel_context *intel);
133
134/* Here are the crusty old macros, to be removed:
135 */
136#define BATCH_LOCALS
137
138#define BEGIN_BATCH(n) intel_batchbuffer_begin(intel, n, false)
139#define BEGIN_BATCH_BLT(n) intel_batchbuffer_begin(intel, n, true)
140#define OUT_BATCH(d) intel_batchbuffer_emit_dword(intel, d)
141#define OUT_BATCH_F(f) intel_batchbuffer_emit_float(intel,f)
142#define OUT_RELOC(buf, read_domains, write_domain, delta) do {		\
143   intel_batchbuffer_emit_reloc(intel, buf,			\
144				read_domains, write_domain, delta);	\
145} while (0)
146#define OUT_RELOC_FENCED(buf, read_domains, write_domain, delta) do {	\
147   intel_batchbuffer_emit_reloc_fenced(intel, buf,		\
148				       read_domains, write_domain, delta); \
149} while (0)
150
151#define ADVANCE_BATCH() intel_batchbuffer_advance(intel);
152#define CACHED_BATCH() intel_batchbuffer_cached_advance(intel);
153
154#endif
155