u_inlines.h revision e3256ccb045032960f099318938991392b896b44
1/************************************************************************** 2 * 3 * Copyright 2007 Tungsten Graphics, Inc., Cedar Park, Texas. 4 * All Rights Reserved. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the 8 * "Software"), to deal in the Software without restriction, including 9 * without limitation the rights to use, copy, modify, merge, publish, 10 * distribute, sub license, and/or sell copies of the Software, and to 11 * permit persons to whom the Software is furnished to do so, subject to 12 * the following conditions: 13 * 14 * The above copyright notice and this permission notice (including the 15 * next paragraph) shall be included in all copies or substantial portions 16 * of the Software. 17 * 18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. 21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR 22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 25 * 26 **************************************************************************/ 27 28#ifndef U_INLINES_H 29#define U_INLINES_H 30 31#include "pipe/p_context.h" 32#include "pipe/p_defines.h" 33#include "pipe/p_state.h" 34#include "pipe/p_screen.h" 35#include "util/u_debug.h" 36#include "util/u_debug_describe.h" 37#include "util/u_debug_refcnt.h" 38#include "util/u_atomic.h" 39#include "util/u_box.h" 40#include "util/u_math.h" 41 42 43#ifdef __cplusplus 44extern "C" { 45#endif 46 47 48/* 49 * Reference counting helper functions. 50 */ 51 52 53static INLINE void 54pipe_reference_init(struct pipe_reference *reference, unsigned count) 55{ 56 p_atomic_set(&reference->count, count); 57} 58 59static INLINE boolean 60pipe_is_referenced(struct pipe_reference *reference) 61{ 62 return p_atomic_read(&reference->count) != 0; 63} 64 65/** 66 * Update reference counting. 67 * The old thing pointed to, if any, will be unreferenced. 68 * Both 'ptr' and 'reference' may be NULL. 69 * \return TRUE if the object's refcount hits zero and should be destroyed. 70 */ 71static INLINE boolean 72pipe_reference_described(struct pipe_reference *ptr, 73 struct pipe_reference *reference, 74 debug_reference_descriptor get_desc) 75{ 76 boolean destroy = FALSE; 77 78 if(ptr != reference) { 79 /* bump the reference.count first */ 80 if (reference) { 81 assert(pipe_is_referenced(reference)); 82 p_atomic_inc(&reference->count); 83 debug_reference(reference, get_desc, 1); 84 } 85 86 if (ptr) { 87 assert(pipe_is_referenced(ptr)); 88 if (p_atomic_dec_zero(&ptr->count)) { 89 destroy = TRUE; 90 } 91 debug_reference(ptr, get_desc, -1); 92 } 93 } 94 95 return destroy; 96} 97 98static INLINE boolean 99pipe_reference(struct pipe_reference *ptr, struct pipe_reference *reference) 100{ 101 return pipe_reference_described(ptr, reference, 102 (debug_reference_descriptor)debug_describe_reference); 103} 104 105static INLINE void 106pipe_surface_reference(struct pipe_surface **ptr, struct pipe_surface *surf) 107{ 108 struct pipe_surface *old_surf = *ptr; 109 110 if (pipe_reference_described(&(*ptr)->reference, &surf->reference, 111 (debug_reference_descriptor)debug_describe_surface)) 112 old_surf->context->surface_destroy(old_surf->context, old_surf); 113 *ptr = surf; 114} 115 116static INLINE void 117pipe_resource_reference(struct pipe_resource **ptr, struct pipe_resource *tex) 118{ 119 struct pipe_resource *old_tex = *ptr; 120 121 if (pipe_reference_described(&(*ptr)->reference, &tex->reference, 122 (debug_reference_descriptor)debug_describe_resource)) 123 old_tex->screen->resource_destroy(old_tex->screen, old_tex); 124 *ptr = tex; 125} 126 127static INLINE void 128pipe_sampler_view_reference(struct pipe_sampler_view **ptr, struct pipe_sampler_view *view) 129{ 130 struct pipe_sampler_view *old_view = *ptr; 131 132 if (pipe_reference_described(&(*ptr)->reference, &view->reference, 133 (debug_reference_descriptor)debug_describe_sampler_view)) 134 old_view->context->sampler_view_destroy(old_view->context, old_view); 135 *ptr = view; 136} 137 138static INLINE void 139pipe_surface_reset(struct pipe_context *ctx, struct pipe_surface* ps, 140 struct pipe_resource *pt, unsigned level, unsigned layer, 141 unsigned flags) 142{ 143 pipe_resource_reference(&ps->texture, pt); 144 ps->format = pt->format; 145 ps->width = u_minify(pt->width0, level); 146 ps->height = u_minify(pt->height0, level); 147 ps->usage = flags; 148 ps->u.tex.level = level; 149 ps->u.tex.first_layer = ps->u.tex.last_layer = layer; 150 ps->context = ctx; 151} 152 153static INLINE void 154pipe_surface_init(struct pipe_context *ctx, struct pipe_surface* ps, 155 struct pipe_resource *pt, unsigned level, unsigned layer, 156 unsigned flags) 157{ 158 ps->texture = 0; 159 pipe_reference_init(&ps->reference, 1); 160 pipe_surface_reset(ctx, ps, pt, level, layer, flags); 161} 162 163/* 164 * Convenience wrappers for screen buffer functions. 165 */ 166 167static INLINE struct pipe_resource * 168pipe_buffer_create( struct pipe_screen *screen, 169 unsigned bind, 170 unsigned size ) 171{ 172 struct pipe_resource buffer; 173 memset(&buffer, 0, sizeof buffer); 174 buffer.target = PIPE_BUFFER; 175 buffer.format = PIPE_FORMAT_R8_UNORM; /* want TYPELESS or similar */ 176 buffer.bind = bind; 177 buffer.usage = PIPE_USAGE_DEFAULT; 178 buffer.flags = 0; 179 buffer.width0 = size; 180 buffer.height0 = 1; 181 buffer.depth0 = 1; 182 buffer.array_size = 1; 183 return screen->resource_create(screen, &buffer); 184} 185 186 187static INLINE struct pipe_resource * 188pipe_user_buffer_create( struct pipe_screen *screen, void *ptr, unsigned size, 189 unsigned usage ) 190{ 191 return screen->user_buffer_create(screen, ptr, size, usage); 192} 193 194static INLINE void * 195pipe_buffer_map_range(struct pipe_context *pipe, 196 struct pipe_resource *buffer, 197 unsigned offset, 198 unsigned length, 199 unsigned usage, 200 struct pipe_transfer **transfer) 201{ 202 struct pipe_box box; 203 void *map; 204 205 assert(offset < buffer->width0); 206 assert(offset + length <= buffer->width0); 207 assert(length); 208 209 u_box_1d(offset, length, &box); 210 211 *transfer = pipe->get_transfer( pipe, 212 buffer, 213 0, 214 usage, 215 &box); 216 217 if (*transfer == NULL) 218 return NULL; 219 220 map = pipe->transfer_map( pipe, *transfer ); 221 if (map == NULL) { 222 pipe->transfer_destroy( pipe, *transfer ); 223 return NULL; 224 } 225 226 /* Match old screen->buffer_map_range() behaviour, return pointer 227 * to where the beginning of the buffer would be: 228 */ 229 return (void *)((char *)map - offset); 230} 231 232 233static INLINE void * 234pipe_buffer_map(struct pipe_context *pipe, 235 struct pipe_resource *buffer, 236 unsigned usage, 237 struct pipe_transfer **transfer) 238{ 239 return pipe_buffer_map_range(pipe, buffer, 0, buffer->width0, usage, transfer); 240} 241 242 243static INLINE void 244pipe_buffer_unmap(struct pipe_context *pipe, 245 struct pipe_resource *buf, 246 struct pipe_transfer *transfer) 247{ 248 if (transfer) { 249 pipe->transfer_unmap(pipe, transfer); 250 pipe->transfer_destroy(pipe, transfer); 251 } 252} 253 254static INLINE void 255pipe_buffer_flush_mapped_range(struct pipe_context *pipe, 256 struct pipe_transfer *transfer, 257 unsigned offset, 258 unsigned length) 259{ 260 struct pipe_box box; 261 int transfer_offset; 262 263 assert(length); 264 assert(transfer->box.x <= offset); 265 assert(offset + length <= transfer->box.x + transfer->box.width); 266 267 /* Match old screen->buffer_flush_mapped_range() behaviour, where 268 * offset parameter is relative to the start of the buffer, not the 269 * mapped range. 270 */ 271 transfer_offset = offset - transfer->box.x; 272 273 u_box_1d(transfer_offset, length, &box); 274 275 pipe->transfer_flush_region(pipe, transfer, &box); 276} 277 278static INLINE void 279pipe_buffer_write(struct pipe_context *pipe, 280 struct pipe_resource *buf, 281 unsigned offset, 282 unsigned size, 283 const void *data) 284{ 285 struct pipe_box box; 286 287 u_box_1d(offset, size, &box); 288 289 pipe->transfer_inline_write( pipe, 290 buf, 291 0, 292 PIPE_TRANSFER_WRITE, 293 &box, 294 data, 295 size, 296 0); 297} 298 299/** 300 * Special case for writing non-overlapping ranges. 301 * 302 * We can avoid GPU/CPU synchronization when writing range that has never 303 * been written before. 304 */ 305static INLINE void 306pipe_buffer_write_nooverlap(struct pipe_context *pipe, 307 struct pipe_resource *buf, 308 unsigned offset, unsigned size, 309 const void *data) 310{ 311 struct pipe_box box; 312 313 u_box_1d(offset, size, &box); 314 315 pipe->transfer_inline_write(pipe, 316 buf, 317 0, 318 (PIPE_TRANSFER_WRITE | 319 PIPE_TRANSFER_NOOVERWRITE), 320 &box, 321 data, 322 0, 0); 323} 324 325static INLINE void 326pipe_buffer_read(struct pipe_context *pipe, 327 struct pipe_resource *buf, 328 unsigned offset, 329 unsigned size, 330 void *data) 331{ 332 struct pipe_transfer *src_transfer; 333 ubyte *map; 334 335 map = (ubyte *) pipe_buffer_map_range(pipe, 336 buf, 337 offset, size, 338 PIPE_TRANSFER_READ, 339 &src_transfer); 340 341 if (map) 342 memcpy(data, map + offset, size); 343 344 pipe_buffer_unmap(pipe, buf, src_transfer); 345} 346 347static INLINE struct pipe_transfer * 348pipe_get_transfer( struct pipe_context *context, 349 struct pipe_resource *resource, 350 unsigned level, unsigned layer, 351 enum pipe_transfer_usage usage, 352 unsigned x, unsigned y, 353 unsigned w, unsigned h) 354{ 355 struct pipe_box box; 356 u_box_2d_zslice( x, y, layer, w, h, &box ); 357 return context->get_transfer( context, 358 resource, 359 level, 360 usage, 361 &box ); 362} 363 364static INLINE void * 365pipe_transfer_map( struct pipe_context *context, 366 struct pipe_transfer *transfer ) 367{ 368 return context->transfer_map( context, transfer ); 369} 370 371static INLINE void 372pipe_transfer_unmap( struct pipe_context *context, 373 struct pipe_transfer *transfer ) 374{ 375 context->transfer_unmap( context, transfer ); 376} 377 378 379static INLINE void 380pipe_transfer_destroy( struct pipe_context *context, 381 struct pipe_transfer *transfer ) 382{ 383 context->transfer_destroy(context, transfer); 384} 385 386 387static INLINE boolean util_get_offset( 388 const struct pipe_rasterizer_state *templ, 389 unsigned fill_mode) 390{ 391 switch(fill_mode) { 392 case PIPE_POLYGON_MODE_POINT: 393 return templ->offset_point; 394 case PIPE_POLYGON_MODE_LINE: 395 return templ->offset_line; 396 case PIPE_POLYGON_MODE_FILL: 397 return templ->offset_tri; 398 default: 399 assert(0); 400 return FALSE; 401 } 402} 403 404#ifdef __cplusplus 405} 406#endif 407 408#endif /* U_INLINES_H */ 409