u_inlines.h revision eafb7f234d11a290b00dcaf5492b9bdad1cf5148
1/**************************************************************************
2 *
3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28#ifndef U_INLINES_H
29#define U_INLINES_H
30
31#include "pipe/p_context.h"
32#include "pipe/p_defines.h"
33#include "pipe/p_state.h"
34#include "pipe/p_screen.h"
35#include "util/u_debug.h"
36#include "util/u_debug_describe.h"
37#include "util/u_debug_refcnt.h"
38#include "util/u_atomic.h"
39#include "util/u_box.h"
40#include "util/u_math.h"
41
42
43#ifdef __cplusplus
44extern "C" {
45#endif
46
47
48/*
49 * Reference counting helper functions.
50 */
51
52
53static INLINE void
54pipe_reference_init(struct pipe_reference *reference, unsigned count)
55{
56   p_atomic_set(&reference->count, count);
57}
58
59static INLINE boolean
60pipe_is_referenced(struct pipe_reference *reference)
61{
62   return p_atomic_read(&reference->count) != 0;
63}
64
65/**
66 * Update reference counting.
67 * The old thing pointed to, if any, will be unreferenced.
68 * Both 'ptr' and 'reference' may be NULL.
69 * \return TRUE if the object's refcount hits zero and should be destroyed.
70 */
71static INLINE boolean
72pipe_reference_described(struct pipe_reference *ptr,
73                         struct pipe_reference *reference,
74                         debug_reference_descriptor get_desc)
75{
76   boolean destroy = FALSE;
77
78   if(ptr != reference) {
79      /* bump the reference.count first */
80      if (reference) {
81         assert(pipe_is_referenced(reference));
82         p_atomic_inc(&reference->count);
83         debug_reference(reference, get_desc, 1);
84      }
85
86      if (ptr) {
87         assert(pipe_is_referenced(ptr));
88         if (p_atomic_dec_zero(&ptr->count)) {
89            destroy = TRUE;
90         }
91         debug_reference(ptr, get_desc, -1);
92      }
93   }
94
95   return destroy;
96}
97
98static INLINE boolean
99pipe_reference(struct pipe_reference *ptr, struct pipe_reference *reference)
100{
101   return pipe_reference_described(ptr, reference,
102                                   (debug_reference_descriptor)debug_describe_reference);
103}
104
105static INLINE void
106pipe_surface_reference(struct pipe_surface **ptr, struct pipe_surface *surf)
107{
108   struct pipe_surface *old_surf = *ptr;
109
110   if (pipe_reference_described(&(*ptr)->reference, &surf->reference,
111                                (debug_reference_descriptor)debug_describe_surface))
112      old_surf->context->surface_destroy(old_surf->context, old_surf);
113   *ptr = surf;
114}
115
116static INLINE void
117pipe_resource_reference(struct pipe_resource **ptr, struct pipe_resource *tex)
118{
119   struct pipe_resource *old_tex = *ptr;
120
121   if (pipe_reference_described(&(*ptr)->reference, &tex->reference,
122                                (debug_reference_descriptor)debug_describe_resource))
123      old_tex->screen->resource_destroy(old_tex->screen, old_tex);
124   *ptr = tex;
125}
126
127static INLINE void
128pipe_sampler_view_reference(struct pipe_sampler_view **ptr, struct pipe_sampler_view *view)
129{
130   struct pipe_sampler_view *old_view = *ptr;
131
132   if (pipe_reference_described(&(*ptr)->reference, &view->reference,
133                                (debug_reference_descriptor)debug_describe_sampler_view))
134      old_view->context->sampler_view_destroy(old_view->context, old_view);
135   *ptr = view;
136}
137
138static INLINE void
139pipe_surface_reset(struct pipe_context *ctx, struct pipe_surface* ps,
140                   struct pipe_resource *pt, unsigned level, unsigned layer,
141                   unsigned flags)
142{
143   pipe_resource_reference(&ps->texture, pt);
144   ps->format = pt->format;
145   ps->width = u_minify(pt->width0, level);
146   ps->height = u_minify(pt->height0, level);
147   ps->usage = flags;
148   ps->u.tex.level = level;
149   ps->u.tex.first_layer = ps->u.tex.last_layer = layer;
150   ps->context = ctx;
151}
152
153static INLINE void
154pipe_surface_init(struct pipe_context *ctx, struct pipe_surface* ps,
155                  struct pipe_resource *pt, unsigned level, unsigned layer,
156                  unsigned flags)
157{
158   ps->texture = 0;
159   pipe_reference_init(&ps->reference, 1);
160   pipe_surface_reset(ctx, ps, pt, level, layer, flags);
161}
162
163/* Return true if the surfaces are equal. */
164static INLINE boolean
165pipe_surface_equal(struct pipe_surface *s1, struct pipe_surface *s2)
166{
167   return s1->texture == s2->texture &&
168          s1->format == s2->format &&
169          (s1->texture->target != PIPE_BUFFER ||
170           (s1->u.buf.first_element == s2->u.buf.first_element &&
171            s1->u.buf.last_element == s2->u.buf.last_element)) &&
172          (s1->texture->target == PIPE_BUFFER ||
173           (s1->u.tex.level == s2->u.tex.level &&
174            s1->u.tex.first_layer == s2->u.tex.first_layer &&
175            s1->u.tex.last_layer == s2->u.tex.last_layer));
176}
177
178/*
179 * Convenience wrappers for screen buffer functions.
180 */
181
182static INLINE struct pipe_resource *
183pipe_buffer_create( struct pipe_screen *screen,
184		    unsigned bind,
185		    unsigned usage,
186		    unsigned size )
187{
188   struct pipe_resource buffer;
189   memset(&buffer, 0, sizeof buffer);
190   buffer.target = PIPE_BUFFER;
191   buffer.format = PIPE_FORMAT_R8_UNORM; /* want TYPELESS or similar */
192   buffer.bind = bind;
193   buffer.usage = usage;
194   buffer.flags = 0;
195   buffer.width0 = size;
196   buffer.height0 = 1;
197   buffer.depth0 = 1;
198   buffer.array_size = 1;
199   return screen->resource_create(screen, &buffer);
200}
201
202
203static INLINE struct pipe_resource *
204pipe_user_buffer_create( struct pipe_screen *screen, void *ptr, unsigned size,
205			 unsigned usage )
206{
207   return screen->user_buffer_create(screen, ptr, size, usage);
208}
209
210static INLINE void *
211pipe_buffer_map_range(struct pipe_context *pipe,
212		      struct pipe_resource *buffer,
213		      unsigned offset,
214		      unsigned length,
215		      unsigned usage,
216		      struct pipe_transfer **transfer)
217{
218   struct pipe_box box;
219   void *map;
220
221   assert(offset < buffer->width0);
222   assert(offset + length <= buffer->width0);
223   assert(length);
224
225   u_box_1d(offset, length, &box);
226
227   *transfer = pipe->get_transfer( pipe,
228                                   buffer,
229                                   0,
230                                   usage,
231                                   &box);
232
233   if (*transfer == NULL)
234      return NULL;
235
236   map = pipe->transfer_map( pipe, *transfer );
237   if (map == NULL) {
238      pipe->transfer_destroy( pipe, *transfer );
239      *transfer = NULL;
240      return NULL;
241   }
242
243   /* Match old screen->buffer_map_range() behaviour, return pointer
244    * to where the beginning of the buffer would be:
245    */
246   return (void *)((char *)map - offset);
247}
248
249
250static INLINE void *
251pipe_buffer_map(struct pipe_context *pipe,
252                struct pipe_resource *buffer,
253                unsigned usage,
254                struct pipe_transfer **transfer)
255{
256   return pipe_buffer_map_range(pipe, buffer, 0, buffer->width0, usage, transfer);
257}
258
259
260static INLINE void
261pipe_buffer_unmap(struct pipe_context *pipe,
262                  struct pipe_transfer *transfer)
263{
264   if (transfer) {
265      pipe->transfer_unmap(pipe, transfer);
266      pipe->transfer_destroy(pipe, transfer);
267   }
268}
269
270static INLINE void
271pipe_buffer_flush_mapped_range(struct pipe_context *pipe,
272                               struct pipe_transfer *transfer,
273                               unsigned offset,
274                               unsigned length)
275{
276   struct pipe_box box;
277   int transfer_offset;
278
279   assert(length);
280   assert(transfer->box.x <= offset);
281   assert(offset + length <= transfer->box.x + transfer->box.width);
282
283   /* Match old screen->buffer_flush_mapped_range() behaviour, where
284    * offset parameter is relative to the start of the buffer, not the
285    * mapped range.
286    */
287   transfer_offset = offset - transfer->box.x;
288
289   u_box_1d(transfer_offset, length, &box);
290
291   pipe->transfer_flush_region(pipe, transfer, &box);
292}
293
294static INLINE void
295pipe_buffer_write(struct pipe_context *pipe,
296                  struct pipe_resource *buf,
297                  unsigned offset,
298                  unsigned size,
299                  const void *data)
300{
301   struct pipe_box box;
302
303   u_box_1d(offset, size, &box);
304
305   pipe->transfer_inline_write( pipe,
306                                buf,
307                                0,
308                                PIPE_TRANSFER_WRITE,
309                                &box,
310                                data,
311                                size,
312                                0);
313}
314
315/**
316 * Special case for writing non-overlapping ranges.
317 *
318 * We can avoid GPU/CPU synchronization when writing range that has never
319 * been written before.
320 */
321static INLINE void
322pipe_buffer_write_nooverlap(struct pipe_context *pipe,
323                            struct pipe_resource *buf,
324                            unsigned offset, unsigned size,
325                            const void *data)
326{
327   struct pipe_box box;
328
329   u_box_1d(offset, size, &box);
330
331   pipe->transfer_inline_write(pipe,
332                               buf,
333                               0,
334                               (PIPE_TRANSFER_WRITE |
335                                PIPE_TRANSFER_NOOVERWRITE),
336                               &box,
337                               data,
338                               0, 0);
339}
340
341static INLINE void
342pipe_buffer_read(struct pipe_context *pipe,
343                 struct pipe_resource *buf,
344                 unsigned offset,
345                 unsigned size,
346                 void *data)
347{
348   struct pipe_transfer *src_transfer;
349   ubyte *map;
350
351   map = (ubyte *) pipe_buffer_map_range(pipe,
352					 buf,
353					 offset, size,
354					 PIPE_TRANSFER_READ,
355					 &src_transfer);
356
357   if (map)
358      memcpy(data, map + offset, size);
359
360   pipe_buffer_unmap(pipe, src_transfer);
361}
362
363static INLINE struct pipe_transfer *
364pipe_get_transfer( struct pipe_context *context,
365                   struct pipe_resource *resource,
366                   unsigned level, unsigned layer,
367                   enum pipe_transfer_usage usage,
368                   unsigned x, unsigned y,
369                   unsigned w, unsigned h)
370{
371   struct pipe_box box;
372   u_box_2d_zslice( x, y, layer, w, h, &box );
373   return context->get_transfer( context,
374                                 resource,
375                                 level,
376                                 usage,
377                                 &box );
378}
379
380static INLINE void *
381pipe_transfer_map( struct pipe_context *context,
382                   struct pipe_transfer *transfer )
383{
384   return context->transfer_map( context, transfer );
385}
386
387static INLINE void
388pipe_transfer_unmap( struct pipe_context *context,
389                     struct pipe_transfer *transfer )
390{
391   context->transfer_unmap( context, transfer );
392}
393
394
395static INLINE void
396pipe_transfer_destroy( struct pipe_context *context,
397                       struct pipe_transfer *transfer )
398{
399   context->transfer_destroy(context, transfer);
400}
401
402
403static INLINE boolean util_get_offset(
404   const struct pipe_rasterizer_state *templ,
405   unsigned fill_mode)
406{
407   switch(fill_mode) {
408   case PIPE_POLYGON_MODE_POINT:
409      return templ->offset_point;
410   case PIPE_POLYGON_MODE_LINE:
411      return templ->offset_line;
412   case PIPE_POLYGON_MODE_FILL:
413      return templ->offset_tri;
414   default:
415      assert(0);
416      return FALSE;
417   }
418}
419
420/**
421 * This function is used to copy an array of pipe_vertex_buffer structures,
422 * while properly referencing the pipe_vertex_buffer::buffer member.
423 *
424 * \sa util_copy_framebuffer_state
425 */
426static INLINE void util_copy_vertex_buffers(struct pipe_vertex_buffer *dst,
427                                            unsigned *dst_count,
428                                            const struct pipe_vertex_buffer *src,
429                                            unsigned src_count)
430{
431   unsigned i;
432
433   /* Reference the buffers of 'src' in 'dst'. */
434   for (i = 0; i < src_count; i++) {
435      pipe_resource_reference(&dst[i].buffer, src[i].buffer);
436   }
437   /* Unreference the rest of the buffers in 'dst'. */
438   for (; i < *dst_count; i++) {
439      pipe_resource_reference(&dst[i].buffer, NULL);
440   }
441
442   /* Update the size of 'dst' and copy over the other members
443    * of pipe_vertex_buffer. */
444   *dst_count = src_count;
445   memcpy(dst, src, src_count * sizeof(struct pipe_vertex_buffer));
446}
447
448#ifdef __cplusplus
449}
450#endif
451
452#endif /* U_INLINES_H */
453