1/* 2 * Copyright 2010 Christoph Bumiller 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice shall be included in 12 * all copies or substantial portions of the Software. 13 * 14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 17 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR 18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 20 * OTHER DEALINGS IN THE SOFTWARE. 21 */ 22 23#include "pipe/p_defines.h" 24#include "util/u_framebuffer.h" 25 26#include "nvc0/nvc0_context.h" 27#include "nvc0/nvc0_screen.h" 28#include "nvc0/nvc0_resource.h" 29 30static void 31nvc0_flush(struct pipe_context *pipe, 32 struct pipe_fence_handle **fence, 33 unsigned flags) 34{ 35 struct nvc0_context *nvc0 = nvc0_context(pipe); 36 struct nouveau_screen *screen = &nvc0->screen->base; 37 38 if (fence) 39 nouveau_fence_ref(screen->fence.current, (struct nouveau_fence **)fence); 40 41 PUSH_KICK(nvc0->base.pushbuf); /* fencing handled in kick_notify */ 42 43 nouveau_context_update_frame_stats(&nvc0->base); 44} 45 46static void 47nvc0_texture_barrier(struct pipe_context *pipe, unsigned flags) 48{ 49 struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf; 50 51 IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0); 52 IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0); 53} 54 55static void 56nvc0_memory_barrier(struct pipe_context *pipe, unsigned flags) 57{ 58 struct nvc0_context *nvc0 = nvc0_context(pipe); 59 struct nouveau_pushbuf *push = nvc0->base.pushbuf; 60 int i, s; 61 62 if (flags & PIPE_BARRIER_MAPPED_BUFFER) { 63 for (i = 0; i < nvc0->num_vtxbufs; ++i) { 64 if (!nvc0->vtxbuf[i].buffer) 65 continue; 66 if (nvc0->vtxbuf[i].buffer->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT) 67 nvc0->base.vbo_dirty = true; 68 } 69 70 if (nvc0->idxbuf.buffer && 71 nvc0->idxbuf.buffer->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT) 72 nvc0->base.vbo_dirty = true; 73 74 for (s = 0; s < 5 && !nvc0->cb_dirty; ++s) { 75 uint32_t valid = nvc0->constbuf_valid[s]; 76 77 while (valid && !nvc0->cb_dirty) { 78 const unsigned i = ffs(valid) - 1; 79 struct pipe_resource *res; 80 81 valid &= ~(1 << i); 82 if (nvc0->constbuf[s][i].user) 83 continue; 84 85 res = nvc0->constbuf[s][i].u.buf; 86 if (!res) 87 continue; 88 89 if (res->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT) 90 nvc0->cb_dirty = true; 91 } 92 } 93 } else { 94 /* Pretty much any writing by shaders needs a serialize after 95 * it. Especially when moving between 3d and compute pipelines, but even 96 * without that. 97 */ 98 IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0); 99 } 100 101 /* If we're going to texture from a buffer/image written by a shader, we 102 * must flush the texture cache. 103 */ 104 if (flags & PIPE_BARRIER_TEXTURE) 105 IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0); 106 107 if (flags & PIPE_BARRIER_CONSTANT_BUFFER) 108 nvc0->cb_dirty = true; 109 if (flags & (PIPE_BARRIER_VERTEX_BUFFER | PIPE_BARRIER_INDEX_BUFFER)) 110 nvc0->base.vbo_dirty = true; 111} 112 113static void 114nvc0_emit_string_marker(struct pipe_context *pipe, const char *str, int len) 115{ 116 struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf; 117 int string_words = len / 4; 118 int data_words; 119 120 if (len <= 0) 121 return; 122 string_words = MIN2(string_words, NV04_PFIFO_MAX_PACKET_LEN); 123 if (string_words == NV04_PFIFO_MAX_PACKET_LEN) 124 data_words = string_words; 125 else 126 data_words = string_words + !!(len & 3); 127 BEGIN_NIC0(push, SUBC_3D(NV04_GRAPH_NOP), data_words); 128 if (string_words) 129 PUSH_DATAp(push, str, string_words); 130 if (string_words != data_words) { 131 int data = 0; 132 memcpy(&data, &str[string_words * 4], len & 3); 133 PUSH_DATA (push, data); 134 } 135} 136 137static void 138nvc0_context_unreference_resources(struct nvc0_context *nvc0) 139{ 140 unsigned s, i; 141 142 nouveau_bufctx_del(&nvc0->bufctx_3d); 143 nouveau_bufctx_del(&nvc0->bufctx); 144 nouveau_bufctx_del(&nvc0->bufctx_cp); 145 146 util_unreference_framebuffer_state(&nvc0->framebuffer); 147 148 for (i = 0; i < nvc0->num_vtxbufs; ++i) 149 pipe_resource_reference(&nvc0->vtxbuf[i].buffer, NULL); 150 151 pipe_resource_reference(&nvc0->idxbuf.buffer, NULL); 152 153 for (s = 0; s < 6; ++s) { 154 for (i = 0; i < nvc0->num_textures[s]; ++i) 155 pipe_sampler_view_reference(&nvc0->textures[s][i], NULL); 156 157 for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i) 158 if (!nvc0->constbuf[s][i].user) 159 pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, NULL); 160 161 for (i = 0; i < NVC0_MAX_BUFFERS; ++i) 162 pipe_resource_reference(&nvc0->buffers[s][i].buffer, NULL); 163 164 for (i = 0; i < NVC0_MAX_IMAGES; ++i) { 165 pipe_resource_reference(&nvc0->images[s][i].resource, NULL); 166 if (nvc0->screen->base.class_3d >= GM107_3D_CLASS) 167 pipe_sampler_view_reference(&nvc0->images_tic[s][i], NULL); 168 } 169 } 170 171 for (s = 0; s < 2; ++s) { 172 for (i = 0; i < NVC0_MAX_SURFACE_SLOTS; ++i) 173 pipe_surface_reference(&nvc0->surfaces[s][i], NULL); 174 } 175 176 for (i = 0; i < nvc0->num_tfbbufs; ++i) 177 pipe_so_target_reference(&nvc0->tfbbuf[i], NULL); 178 179 for (i = 0; i < nvc0->global_residents.size / sizeof(struct pipe_resource *); 180 ++i) { 181 struct pipe_resource **res = util_dynarray_element( 182 &nvc0->global_residents, struct pipe_resource *, i); 183 pipe_resource_reference(res, NULL); 184 } 185 util_dynarray_fini(&nvc0->global_residents); 186 187 if (nvc0->tcp_empty) 188 nvc0->base.pipe.delete_tcs_state(&nvc0->base.pipe, nvc0->tcp_empty); 189} 190 191static void 192nvc0_destroy(struct pipe_context *pipe) 193{ 194 struct nvc0_context *nvc0 = nvc0_context(pipe); 195 196 if (nvc0->screen->cur_ctx == nvc0) { 197 nvc0->screen->cur_ctx = NULL; 198 nvc0->screen->save_state = nvc0->state; 199 nvc0->screen->save_state.tfb = NULL; 200 } 201 202 /* Unset bufctx, we don't want to revalidate any resources after the flush. 203 * Other contexts will always set their bufctx again on action calls. 204 */ 205 nouveau_pushbuf_bufctx(nvc0->base.pushbuf, NULL); 206 nouveau_pushbuf_kick(nvc0->base.pushbuf, nvc0->base.pushbuf->channel); 207 208 nvc0_context_unreference_resources(nvc0); 209 nvc0_blitctx_destroy(nvc0); 210 211 nouveau_context_destroy(&nvc0->base); 212} 213 214void 215nvc0_default_kick_notify(struct nouveau_pushbuf *push) 216{ 217 struct nvc0_screen *screen = push->user_priv; 218 219 if (screen) { 220 nouveau_fence_next(&screen->base); 221 nouveau_fence_update(&screen->base, true); 222 if (screen->cur_ctx) 223 screen->cur_ctx->state.flushed = true; 224 NOUVEAU_DRV_STAT(&screen->base, pushbuf_count, 1); 225 } 226} 227 228static int 229nvc0_invalidate_resource_storage(struct nouveau_context *ctx, 230 struct pipe_resource *res, 231 int ref) 232{ 233 struct nvc0_context *nvc0 = nvc0_context(&ctx->pipe); 234 unsigned s, i; 235 236 if (res->bind & PIPE_BIND_RENDER_TARGET) { 237 for (i = 0; i < nvc0->framebuffer.nr_cbufs; ++i) { 238 if (nvc0->framebuffer.cbufs[i] && 239 nvc0->framebuffer.cbufs[i]->texture == res) { 240 nvc0->dirty_3d |= NVC0_NEW_3D_FRAMEBUFFER; 241 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB); 242 if (!--ref) 243 return ref; 244 } 245 } 246 } 247 if (res->bind & PIPE_BIND_DEPTH_STENCIL) { 248 if (nvc0->framebuffer.zsbuf && 249 nvc0->framebuffer.zsbuf->texture == res) { 250 nvc0->dirty_3d |= NVC0_NEW_3D_FRAMEBUFFER; 251 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB); 252 if (!--ref) 253 return ref; 254 } 255 } 256 257 if (res->target == PIPE_BUFFER) { 258 for (i = 0; i < nvc0->num_vtxbufs; ++i) { 259 if (nvc0->vtxbuf[i].buffer == res) { 260 nvc0->dirty_3d |= NVC0_NEW_3D_ARRAYS; 261 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_VTX); 262 if (!--ref) 263 return ref; 264 } 265 } 266 267 if (nvc0->idxbuf.buffer == res) { 268 nvc0->dirty_3d |= NVC0_NEW_3D_IDXBUF; 269 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_IDX); 270 if (!--ref) 271 return ref; 272 } 273 274 for (s = 0; s < 6; ++s) { 275 for (i = 0; i < nvc0->num_textures[s]; ++i) { 276 if (nvc0->textures[s][i] && 277 nvc0->textures[s][i]->texture == res) { 278 nvc0->textures_dirty[s] |= 1 << i; 279 if (unlikely(s == 5)) { 280 nvc0->dirty_cp |= NVC0_NEW_CP_TEXTURES; 281 nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_TEX(i)); 282 } else { 283 nvc0->dirty_3d |= NVC0_NEW_3D_TEXTURES; 284 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_TEX(s, i)); 285 } 286 if (!--ref) 287 return ref; 288 } 289 } 290 } 291 292 for (s = 0; s < 6; ++s) { 293 for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i) { 294 if (!(nvc0->constbuf_valid[s] & (1 << i))) 295 continue; 296 if (!nvc0->constbuf[s][i].user && 297 nvc0->constbuf[s][i].u.buf == res) { 298 nvc0->constbuf_dirty[s] |= 1 << i; 299 if (unlikely(s == 5)) { 300 nvc0->dirty_cp |= NVC0_NEW_CP_CONSTBUF; 301 nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_CB(i)); 302 } else { 303 nvc0->dirty_3d |= NVC0_NEW_3D_CONSTBUF; 304 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_CB(s, i)); 305 } 306 if (!--ref) 307 return ref; 308 } 309 } 310 } 311 312 for (s = 0; s < 6; ++s) { 313 for (i = 0; i < NVC0_MAX_BUFFERS; ++i) { 314 if (nvc0->buffers[s][i].buffer == res) { 315 nvc0->buffers_dirty[s] |= 1 << i; 316 if (unlikely(s == 5)) { 317 nvc0->dirty_cp |= NVC0_NEW_CP_BUFFERS; 318 nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_BUF); 319 } else { 320 nvc0->dirty_3d |= NVC0_NEW_3D_BUFFERS; 321 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_BUF); 322 } 323 if (!--ref) 324 return ref; 325 } 326 } 327 } 328 329 for (s = 0; s < 6; ++s) { 330 for (i = 0; i < NVC0_MAX_IMAGES; ++i) { 331 if (nvc0->images[s][i].resource == res) { 332 nvc0->images_dirty[s] |= 1 << i; 333 if (unlikely(s == 5)) { 334 nvc0->dirty_cp |= NVC0_NEW_CP_SURFACES; 335 nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_SUF); 336 } else { 337 nvc0->dirty_3d |= NVC0_NEW_3D_SURFACES; 338 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_SUF); 339 } 340 } 341 if (!--ref) 342 return ref; 343 } 344 } 345 } 346 347 return ref; 348} 349 350static void 351nvc0_context_get_sample_position(struct pipe_context *, unsigned, unsigned, 352 float *); 353 354struct pipe_context * 355nvc0_create(struct pipe_screen *pscreen, void *priv, unsigned ctxflags) 356{ 357 struct nvc0_screen *screen = nvc0_screen(pscreen); 358 struct nvc0_context *nvc0; 359 struct pipe_context *pipe; 360 int ret; 361 uint32_t flags; 362 363 nvc0 = CALLOC_STRUCT(nvc0_context); 364 if (!nvc0) 365 return NULL; 366 pipe = &nvc0->base.pipe; 367 368 if (!nvc0_blitctx_create(nvc0)) 369 goto out_err; 370 371 nvc0->base.pushbuf = screen->base.pushbuf; 372 nvc0->base.client = screen->base.client; 373 374 ret = nouveau_bufctx_new(screen->base.client, 2, &nvc0->bufctx); 375 if (!ret) 376 ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_3D_COUNT, 377 &nvc0->bufctx_3d); 378 if (!ret) 379 ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_CP_COUNT, 380 &nvc0->bufctx_cp); 381 if (ret) 382 goto out_err; 383 384 nvc0->screen = screen; 385 nvc0->base.screen = &screen->base; 386 387 pipe->screen = pscreen; 388 pipe->priv = priv; 389 390 pipe->destroy = nvc0_destroy; 391 392 pipe->draw_vbo = nvc0_draw_vbo; 393 pipe->clear = nvc0_clear; 394 pipe->launch_grid = (nvc0->screen->base.class_3d >= NVE4_3D_CLASS) ? 395 nve4_launch_grid : nvc0_launch_grid; 396 397 pipe->flush = nvc0_flush; 398 pipe->texture_barrier = nvc0_texture_barrier; 399 pipe->memory_barrier = nvc0_memory_barrier; 400 pipe->get_sample_position = nvc0_context_get_sample_position; 401 pipe->emit_string_marker = nvc0_emit_string_marker; 402 403 nouveau_context_init(&nvc0->base); 404 nvc0_init_query_functions(nvc0); 405 nvc0_init_surface_functions(nvc0); 406 nvc0_init_state_functions(nvc0); 407 nvc0_init_transfer_functions(nvc0); 408 nvc0_init_resource_functions(pipe); 409 410 nvc0->base.invalidate_resource_storage = nvc0_invalidate_resource_storage; 411 412 pipe->create_video_codec = nvc0_create_decoder; 413 pipe->create_video_buffer = nvc0_video_buffer_create; 414 415 /* shader builtin library is per-screen, but we need a context for m2mf */ 416 nvc0_program_library_upload(nvc0); 417 nvc0_program_init_tcp_empty(nvc0); 418 if (!nvc0->tcp_empty) 419 goto out_err; 420 /* set the empty tctl prog on next draw in case one is never set */ 421 nvc0->dirty_3d |= NVC0_NEW_3D_TCTLPROG; 422 423 /* Do not bind the COMPUTE driver constbuf at screen initialization because 424 * CBs are aliased between 3D and COMPUTE, but make sure it will be bound if 425 * a grid is launched later. */ 426 nvc0->dirty_cp |= NVC0_NEW_CP_DRIVERCONST; 427 428 /* now that there are no more opportunities for errors, set the current 429 * context if there isn't already one. 430 */ 431 if (!screen->cur_ctx) { 432 nvc0->state = screen->save_state; 433 screen->cur_ctx = nvc0; 434 nouveau_pushbuf_bufctx(screen->base.pushbuf, nvc0->bufctx); 435 } 436 screen->base.pushbuf->kick_notify = nvc0_default_kick_notify; 437 438 /* add permanently resident buffers to bufctxts */ 439 440 flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RD; 441 442 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_TEXT, flags, screen->text); 443 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->uniform_bo); 444 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->txc); 445 if (screen->compute) { 446 BCTX_REFN_bo(nvc0->bufctx_cp, CP_TEXT, flags, screen->text); 447 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->uniform_bo); 448 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->txc); 449 } 450 451 flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RDWR; 452 453 if (screen->poly_cache) 454 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->poly_cache); 455 if (screen->compute) 456 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->tls); 457 458 flags = NOUVEAU_BO_GART | NOUVEAU_BO_WR; 459 460 BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->fence.bo); 461 BCTX_REFN_bo(nvc0->bufctx, FENCE, flags, screen->fence.bo); 462 if (screen->compute) 463 BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->fence.bo); 464 465 nvc0->base.scratch.bo_size = 2 << 20; 466 467 memset(nvc0->tex_handles, ~0, sizeof(nvc0->tex_handles)); 468 469 util_dynarray_init(&nvc0->global_residents); 470 471 return pipe; 472 473out_err: 474 if (nvc0) { 475 if (nvc0->bufctx_3d) 476 nouveau_bufctx_del(&nvc0->bufctx_3d); 477 if (nvc0->bufctx_cp) 478 nouveau_bufctx_del(&nvc0->bufctx_cp); 479 if (nvc0->bufctx) 480 nouveau_bufctx_del(&nvc0->bufctx); 481 FREE(nvc0->blit); 482 FREE(nvc0); 483 } 484 return NULL; 485} 486 487void 488nvc0_bufctx_fence(struct nvc0_context *nvc0, struct nouveau_bufctx *bufctx, 489 bool on_flush) 490{ 491 struct nouveau_list *list = on_flush ? &bufctx->current : &bufctx->pending; 492 struct nouveau_list *it; 493 NOUVEAU_DRV_STAT_IFD(unsigned count = 0); 494 495 for (it = list->next; it != list; it = it->next) { 496 struct nouveau_bufref *ref = (struct nouveau_bufref *)it; 497 struct nv04_resource *res = ref->priv; 498 if (res) 499 nvc0_resource_validate(res, (unsigned)ref->priv_data); 500 NOUVEAU_DRV_STAT_IFD(count++); 501 } 502 NOUVEAU_DRV_STAT(&nvc0->screen->base, resource_validate_count, count); 503} 504 505const void * 506nvc0_get_sample_locations(unsigned sample_count) 507{ 508 static const uint8_t ms1[1][2] = { { 0x8, 0x8 } }; 509 static const uint8_t ms2[2][2] = { 510 { 0x4, 0x4 }, { 0xc, 0xc } }; /* surface coords (0,0), (1,0) */ 511 static const uint8_t ms4[4][2] = { 512 { 0x6, 0x2 }, { 0xe, 0x6 }, /* (0,0), (1,0) */ 513 { 0x2, 0xa }, { 0xa, 0xe } }; /* (0,1), (1,1) */ 514 static const uint8_t ms8[8][2] = { 515 { 0x1, 0x7 }, { 0x5, 0x3 }, /* (0,0), (1,0) */ 516 { 0x3, 0xd }, { 0x7, 0xb }, /* (0,1), (1,1) */ 517 { 0x9, 0x5 }, { 0xf, 0x1 }, /* (2,0), (3,0) */ 518 { 0xb, 0xf }, { 0xd, 0x9 } }; /* (2,1), (3,1) */ 519#if 0 520 /* NOTE: there are alternative modes for MS2 and MS8, currently not used */ 521 static const uint8_t ms8_alt[8][2] = { 522 { 0x9, 0x5 }, { 0x7, 0xb }, /* (2,0), (1,1) */ 523 { 0xd, 0x9 }, { 0x5, 0x3 }, /* (3,1), (1,0) */ 524 { 0x3, 0xd }, { 0x1, 0x7 }, /* (0,1), (0,0) */ 525 { 0xb, 0xf }, { 0xf, 0x1 } }; /* (2,1), (3,0) */ 526#endif 527 528 const uint8_t (*ptr)[2]; 529 530 switch (sample_count) { 531 case 0: 532 case 1: ptr = ms1; break; 533 case 2: ptr = ms2; break; 534 case 4: ptr = ms4; break; 535 case 8: ptr = ms8; break; 536 default: 537 assert(0); 538 return NULL; /* bad sample count -> undefined locations */ 539 } 540 return ptr; 541} 542 543static void 544nvc0_context_get_sample_position(struct pipe_context *pipe, 545 unsigned sample_count, unsigned sample_index, 546 float *xy) 547{ 548 const uint8_t (*ptr)[2]; 549 550 ptr = nvc0_get_sample_locations(sample_count); 551 if (!ptr) 552 return; 553 554 xy[0] = ptr[sample_index][0] * 0.0625f; 555 xy[1] = ptr[sample_index][1] * 0.0625f; 556} 557