GrGpu.cpp revision 18055afb838a278b5a8436cd51dbfbb688e1e0a0
1 2/* 3 * Copyright 2010 Google Inc. 4 * 5 * Use of this source code is governed by a BSD-style license that can be 6 * found in the LICENSE file. 7 */ 8 9 10#include "GrGpu.h" 11 12#include "GrBufferAllocPool.h" 13#include "GrContext.h" 14#include "GrDrawTargetCaps.h" 15#include "GrIndexBuffer.h" 16#include "GrStencilBuffer.h" 17#include "GrVertexBuffer.h" 18 19// probably makes no sense for this to be less than a page 20static const size_t VERTEX_POOL_VB_SIZE = 1 << 18; 21static const int VERTEX_POOL_VB_COUNT = 4; 22static const size_t INDEX_POOL_IB_SIZE = 1 << 16; 23static const int INDEX_POOL_IB_COUNT = 4; 24 25//////////////////////////////////////////////////////////////////////////////// 26 27#define DEBUG_INVAL_BUFFER 0xdeadcafe 28#define DEBUG_INVAL_START_IDX -1 29 30GrGpu::GrGpu(GrContext* context) 31 : GrDrawTarget(context) 32 , fResetTimestamp(kExpiredTimestamp+1) 33 , fResetBits(kAll_GrBackendState) 34 , fVertexPool(NULL) 35 , fIndexPool(NULL) 36 , fVertexPoolUseCnt(0) 37 , fIndexPoolUseCnt(0) 38 , fQuadIndexBuffer(NULL) { 39 40 fClipMaskManager.setGpu(this); 41 42 fGeomPoolStateStack.push_back(); 43#ifdef SK_DEBUG 44 GeometryPoolState& poolState = fGeomPoolStateStack.back(); 45 poolState.fPoolVertexBuffer = (GrVertexBuffer*)DEBUG_INVAL_BUFFER; 46 poolState.fPoolStartVertex = DEBUG_INVAL_START_IDX; 47 poolState.fPoolIndexBuffer = (GrIndexBuffer*)DEBUG_INVAL_BUFFER; 48 poolState.fPoolStartIndex = DEBUG_INVAL_START_IDX; 49#endif 50} 51 52GrGpu::~GrGpu() { 53 SkSafeSetNull(fQuadIndexBuffer); 54 delete fVertexPool; 55 fVertexPool = NULL; 56 delete fIndexPool; 57 fIndexPool = NULL; 58} 59 60void GrGpu::contextAbandoned() {} 61 62//////////////////////////////////////////////////////////////////////////////// 63 64GrTexture* GrGpu::createTexture(const GrTextureDesc& desc, 65 const void* srcData, size_t rowBytes) { 66 if (!this->caps()->isConfigTexturable(desc.fConfig)) { 67 return NULL; 68 } 69 70 if ((desc.fFlags & kRenderTarget_GrTextureFlagBit) && 71 !this->caps()->isConfigRenderable(desc.fConfig, desc.fSampleCnt > 0)) { 72 return NULL; 73 } 74 75 GrTexture *tex = NULL; 76 if (GrPixelConfigIsCompressed(desc.fConfig)) { 77 // We shouldn't be rendering into this 78 SkASSERT((desc.fFlags & kRenderTarget_GrTextureFlagBit) == 0); 79 80 if (!this->caps()->npotTextureTileSupport() && 81 (!SkIsPow2(desc.fWidth) || !SkIsPow2(desc.fHeight))) { 82 return NULL; 83 } 84 85 this->handleDirtyContext(); 86 tex = this->onCreateCompressedTexture(desc, srcData); 87 } else { 88 this->handleDirtyContext(); 89 tex = this->onCreateTexture(desc, srcData, rowBytes); 90 if (tex && 91 (kRenderTarget_GrTextureFlagBit & desc.fFlags) && 92 !(kNoStencil_GrTextureFlagBit & desc.fFlags)) { 93 SkASSERT(tex->asRenderTarget()); 94 // TODO: defer this and attach dynamically 95 if (!this->attachStencilBufferToRenderTarget(tex->asRenderTarget())) { 96 tex->unref(); 97 return NULL; 98 } 99 } 100 } 101 return tex; 102} 103 104bool GrGpu::attachStencilBufferToRenderTarget(GrRenderTarget* rt) { 105 SkASSERT(NULL == rt->getStencilBuffer()); 106 GrStencilBuffer* sb = 107 this->getContext()->findStencilBuffer(rt->width(), 108 rt->height(), 109 rt->numSamples()); 110 if (sb) { 111 rt->setStencilBuffer(sb); 112 bool attached = this->attachStencilBufferToRenderTarget(sb, rt); 113 if (!attached) { 114 rt->setStencilBuffer(NULL); 115 } 116 return attached; 117 } 118 if (this->createStencilBufferForRenderTarget(rt, 119 rt->width(), rt->height())) { 120 // Right now we're clearing the stencil buffer here after it is 121 // attached to an RT for the first time. When we start matching 122 // stencil buffers with smaller color targets this will no longer 123 // be correct because it won't be guaranteed to clear the entire 124 // sb. 125 // We used to clear down in the GL subclass using a special purpose 126 // FBO. But iOS doesn't allow a stencil-only FBO. It reports unsupported 127 // FBO status. 128 this->clearStencil(rt); 129 return true; 130 } else { 131 return false; 132 } 133} 134 135GrTexture* GrGpu::wrapBackendTexture(const GrBackendTextureDesc& desc) { 136 this->handleDirtyContext(); 137 GrTexture* tex = this->onWrapBackendTexture(desc); 138 if (NULL == tex) { 139 return NULL; 140 } 141 // TODO: defer this and attach dynamically 142 GrRenderTarget* tgt = tex->asRenderTarget(); 143 if (tgt && 144 !this->attachStencilBufferToRenderTarget(tgt)) { 145 tex->unref(); 146 return NULL; 147 } else { 148 return tex; 149 } 150} 151 152GrRenderTarget* GrGpu::wrapBackendRenderTarget(const GrBackendRenderTargetDesc& desc) { 153 this->handleDirtyContext(); 154 return this->onWrapBackendRenderTarget(desc); 155} 156 157GrVertexBuffer* GrGpu::createVertexBuffer(size_t size, bool dynamic) { 158 this->handleDirtyContext(); 159 return this->onCreateVertexBuffer(size, dynamic); 160} 161 162GrIndexBuffer* GrGpu::createIndexBuffer(size_t size, bool dynamic) { 163 this->handleDirtyContext(); 164 return this->onCreateIndexBuffer(size, dynamic); 165} 166 167GrIndexBuffer* GrGpu::createInstancedIndexBuffer(const uint16_t* pattern, 168 int patternSize, 169 int reps, 170 int vertCount, 171 bool isDynamic) { 172 size_t bufferSize = patternSize * reps * sizeof(uint16_t); 173 GrGpu* me = const_cast<GrGpu*>(this); 174 GrIndexBuffer* buffer = me->createIndexBuffer(bufferSize, isDynamic); 175 if (buffer) { 176 uint16_t* data = (uint16_t*) buffer->map(); 177 bool useTempData = (NULL == data); 178 if (useTempData) { 179 data = SkNEW_ARRAY(uint16_t, reps * patternSize); 180 } 181 for (int i = 0; i < reps; ++i) { 182 int baseIdx = i * patternSize; 183 uint16_t baseVert = (uint16_t)(i * vertCount); 184 for (int j = 0; j < patternSize; ++j) { 185 data[baseIdx+j] = baseVert + pattern[j]; 186 } 187 } 188 if (useTempData) { 189 if (!buffer->updateData(data, bufferSize)) { 190 SkFAIL("Can't get indices into buffer!"); 191 } 192 SkDELETE_ARRAY(data); 193 } else { 194 buffer->unmap(); 195 } 196 } 197 return buffer; 198} 199 200void GrGpu::clear(const SkIRect* rect, 201 GrColor color, 202 bool canIgnoreRect, 203 GrRenderTarget* renderTarget) { 204 if (NULL == renderTarget) { 205 renderTarget = this->getDrawState().getRenderTarget(); 206 } 207 if (NULL == renderTarget) { 208 SkASSERT(0); 209 return; 210 } 211 this->handleDirtyContext(); 212 this->onClear(renderTarget, rect, color, canIgnoreRect); 213} 214 215bool GrGpu::readPixels(GrRenderTarget* target, 216 int left, int top, int width, int height, 217 GrPixelConfig config, void* buffer, 218 size_t rowBytes) { 219 this->handleDirtyContext(); 220 return this->onReadPixels(target, left, top, width, height, 221 config, buffer, rowBytes); 222} 223 224bool GrGpu::writeTexturePixels(GrTexture* texture, 225 int left, int top, int width, int height, 226 GrPixelConfig config, const void* buffer, 227 size_t rowBytes) { 228 this->handleDirtyContext(); 229 return this->onWriteTexturePixels(texture, left, top, width, height, 230 config, buffer, rowBytes); 231} 232 233void GrGpu::resolveRenderTarget(GrRenderTarget* target) { 234 SkASSERT(target); 235 this->handleDirtyContext(); 236 this->onResolveRenderTarget(target); 237} 238 239static const GrStencilSettings& winding_path_stencil_settings() { 240 GR_STATIC_CONST_SAME_STENCIL_STRUCT(gSettings, 241 kIncClamp_StencilOp, 242 kIncClamp_StencilOp, 243 kAlwaysIfInClip_StencilFunc, 244 0xFFFF, 0xFFFF, 0xFFFF); 245 return *GR_CONST_STENCIL_SETTINGS_PTR_FROM_STRUCT_PTR(&gSettings); 246} 247 248static const GrStencilSettings& even_odd_path_stencil_settings() { 249 GR_STATIC_CONST_SAME_STENCIL_STRUCT(gSettings, 250 kInvert_StencilOp, 251 kInvert_StencilOp, 252 kAlwaysIfInClip_StencilFunc, 253 0xFFFF, 0xFFFF, 0xFFFF); 254 return *GR_CONST_STENCIL_SETTINGS_PTR_FROM_STRUCT_PTR(&gSettings); 255} 256 257void GrGpu::getPathStencilSettingsForFillType(SkPath::FillType fill, GrStencilSettings* outStencilSettings) { 258 259 switch (fill) { 260 default: 261 SkFAIL("Unexpected path fill."); 262 /* fallthrough */; 263 case SkPath::kWinding_FillType: 264 case SkPath::kInverseWinding_FillType: 265 *outStencilSettings = winding_path_stencil_settings(); 266 break; 267 case SkPath::kEvenOdd_FillType: 268 case SkPath::kInverseEvenOdd_FillType: 269 *outStencilSettings = even_odd_path_stencil_settings(); 270 break; 271 } 272 fClipMaskManager.adjustPathStencilParams(outStencilSettings); 273} 274 275 276//////////////////////////////////////////////////////////////////////////////// 277 278static const int MAX_QUADS = 1 << 12; // max possible: (1 << 14) - 1; 279 280GR_STATIC_ASSERT(4 * MAX_QUADS <= 65535); 281 282static const uint16_t gQuadIndexPattern[] = { 283 0, 1, 2, 0, 2, 3 284}; 285 286const GrIndexBuffer* GrGpu::getQuadIndexBuffer() const { 287 if (NULL == fQuadIndexBuffer || fQuadIndexBuffer->wasDestroyed()) { 288 SkSafeUnref(fQuadIndexBuffer); 289 GrGpu* me = const_cast<GrGpu*>(this); 290 fQuadIndexBuffer = me->createInstancedIndexBuffer(gQuadIndexPattern, 291 6, 292 MAX_QUADS, 293 4); 294 } 295 296 return fQuadIndexBuffer; 297} 298 299//////////////////////////////////////////////////////////////////////////////// 300 301bool GrGpu::setupClipAndFlushState(DrawType type, const GrDeviceCoordTexture* dstCopy, 302 GrDrawState::AutoRestoreEffects* are, 303 const SkRect* devBounds) { 304 if (!fClipMaskManager.setupClipping(this->getClip(), are, devBounds)) { 305 return false; 306 } 307 308 if (!this->flushGraphicsState(type, dstCopy)) { 309 return false; 310 } 311 312 return true; 313} 314 315//////////////////////////////////////////////////////////////////////////////// 316 317void GrGpu::geometrySourceWillPush() { 318 const GeometrySrcState& geoSrc = this->getGeomSrc(); 319 if (kArray_GeometrySrcType == geoSrc.fVertexSrc || 320 kReserved_GeometrySrcType == geoSrc.fVertexSrc) { 321 this->finalizeReservedVertices(); 322 } 323 if (kArray_GeometrySrcType == geoSrc.fIndexSrc || 324 kReserved_GeometrySrcType == geoSrc.fIndexSrc) { 325 this->finalizeReservedIndices(); 326 } 327 GeometryPoolState& newState = fGeomPoolStateStack.push_back(); 328#ifdef SK_DEBUG 329 newState.fPoolVertexBuffer = (GrVertexBuffer*)DEBUG_INVAL_BUFFER; 330 newState.fPoolStartVertex = DEBUG_INVAL_START_IDX; 331 newState.fPoolIndexBuffer = (GrIndexBuffer*)DEBUG_INVAL_BUFFER; 332 newState.fPoolStartIndex = DEBUG_INVAL_START_IDX; 333#else 334 (void) newState; // silence compiler warning 335#endif 336} 337 338void GrGpu::geometrySourceWillPop(const GeometrySrcState& restoredState) { 339 // if popping last entry then pops are unbalanced with pushes 340 SkASSERT(fGeomPoolStateStack.count() > 1); 341 fGeomPoolStateStack.pop_back(); 342} 343 344void GrGpu::onDraw(const DrawInfo& info) { 345 this->handleDirtyContext(); 346 GrDrawState::AutoRestoreEffects are; 347 if (!this->setupClipAndFlushState(PrimTypeToDrawType(info.primitiveType()), 348 info.getDstCopy(), &are, info.getDevBounds())) { 349 return; 350 } 351 this->onGpuDraw(info); 352} 353 354void GrGpu::onStencilPath(const GrPath* path, SkPath::FillType fill) { 355 this->handleDirtyContext(); 356 357 GrDrawState::AutoRestoreEffects are; 358 if (!this->setupClipAndFlushState(kStencilPath_DrawType, NULL, &are, NULL)) { 359 return; 360 } 361 362 this->pathRendering()->stencilPath(path, fill); 363} 364 365 366void GrGpu::onDrawPath(const GrPath* path, SkPath::FillType fill, 367 const GrDeviceCoordTexture* dstCopy) { 368 this->handleDirtyContext(); 369 370 drawState()->setDefaultVertexAttribs(); 371 372 GrDrawState::AutoRestoreEffects are; 373 if (!this->setupClipAndFlushState(kDrawPath_DrawType, dstCopy, &are, NULL)) { 374 return; 375 } 376 377 this->pathRendering()->drawPath(path, fill); 378} 379 380void GrGpu::onDrawPaths(const GrPathRange* pathRange, 381 const uint32_t indices[], int count, 382 const float transforms[], PathTransformType transformsType, 383 SkPath::FillType fill, const GrDeviceCoordTexture* dstCopy) { 384 this->handleDirtyContext(); 385 386 drawState()->setDefaultVertexAttribs(); 387 388 GrDrawState::AutoRestoreEffects are; 389 if (!this->setupClipAndFlushState(kDrawPaths_DrawType, dstCopy, &are, NULL)) { 390 return; 391 } 392 393 pathRange->willDrawPaths(indices, count); 394 this->pathRendering()->drawPaths(pathRange, indices, count, transforms, transformsType, fill); 395} 396 397void GrGpu::finalizeReservedVertices() { 398 SkASSERT(fVertexPool); 399 fVertexPool->unmap(); 400} 401 402void GrGpu::finalizeReservedIndices() { 403 SkASSERT(fIndexPool); 404 fIndexPool->unmap(); 405} 406 407void GrGpu::prepareVertexPool() { 408 if (NULL == fVertexPool) { 409 SkASSERT(0 == fVertexPoolUseCnt); 410 fVertexPool = SkNEW_ARGS(GrVertexBufferAllocPool, (this, true, 411 VERTEX_POOL_VB_SIZE, 412 VERTEX_POOL_VB_COUNT)); 413 fVertexPool->releaseGpuRef(); 414 } else if (!fVertexPoolUseCnt) { 415 // the client doesn't have valid data in the pool 416 fVertexPool->reset(); 417 } 418} 419 420void GrGpu::prepareIndexPool() { 421 if (NULL == fIndexPool) { 422 SkASSERT(0 == fIndexPoolUseCnt); 423 fIndexPool = SkNEW_ARGS(GrIndexBufferAllocPool, (this, true, 424 INDEX_POOL_IB_SIZE, 425 INDEX_POOL_IB_COUNT)); 426 fIndexPool->releaseGpuRef(); 427 } else if (!fIndexPoolUseCnt) { 428 // the client doesn't have valid data in the pool 429 fIndexPool->reset(); 430 } 431} 432 433bool GrGpu::onReserveVertexSpace(size_t vertexSize, 434 int vertexCount, 435 void** vertices) { 436 GeometryPoolState& geomPoolState = fGeomPoolStateStack.back(); 437 438 SkASSERT(vertexCount > 0); 439 SkASSERT(vertices); 440 441 this->prepareVertexPool(); 442 443 *vertices = fVertexPool->makeSpace(vertexSize, 444 vertexCount, 445 &geomPoolState.fPoolVertexBuffer, 446 &geomPoolState.fPoolStartVertex); 447 if (NULL == *vertices) { 448 return false; 449 } 450 ++fVertexPoolUseCnt; 451 return true; 452} 453 454bool GrGpu::onReserveIndexSpace(int indexCount, void** indices) { 455 GeometryPoolState& geomPoolState = fGeomPoolStateStack.back(); 456 457 SkASSERT(indexCount > 0); 458 SkASSERT(indices); 459 460 this->prepareIndexPool(); 461 462 *indices = fIndexPool->makeSpace(indexCount, 463 &geomPoolState.fPoolIndexBuffer, 464 &geomPoolState.fPoolStartIndex); 465 if (NULL == *indices) { 466 return false; 467 } 468 ++fIndexPoolUseCnt; 469 return true; 470} 471 472void GrGpu::releaseReservedVertexSpace() { 473 const GeometrySrcState& geoSrc = this->getGeomSrc(); 474 SkASSERT(kReserved_GeometrySrcType == geoSrc.fVertexSrc); 475 size_t bytes = geoSrc.fVertexCount * geoSrc.fVertexSize; 476 fVertexPool->putBack(bytes); 477 --fVertexPoolUseCnt; 478} 479 480void GrGpu::releaseReservedIndexSpace() { 481 const GeometrySrcState& geoSrc = this->getGeomSrc(); 482 SkASSERT(kReserved_GeometrySrcType == geoSrc.fIndexSrc); 483 size_t bytes = geoSrc.fIndexCount * sizeof(uint16_t); 484 fIndexPool->putBack(bytes); 485 --fIndexPoolUseCnt; 486} 487 488void GrGpu::onSetVertexSourceToArray(const void* vertexArray, int vertexCount) { 489 this->prepareVertexPool(); 490 GeometryPoolState& geomPoolState = fGeomPoolStateStack.back(); 491#ifdef SK_DEBUG 492 bool success = 493#endif 494 fVertexPool->appendVertices(this->getVertexSize(), 495 vertexCount, 496 vertexArray, 497 &geomPoolState.fPoolVertexBuffer, 498 &geomPoolState.fPoolStartVertex); 499 ++fVertexPoolUseCnt; 500 GR_DEBUGASSERT(success); 501} 502 503void GrGpu::onSetIndexSourceToArray(const void* indexArray, int indexCount) { 504 this->prepareIndexPool(); 505 GeometryPoolState& geomPoolState = fGeomPoolStateStack.back(); 506#ifdef SK_DEBUG 507 bool success = 508#endif 509 fIndexPool->appendIndices(indexCount, 510 indexArray, 511 &geomPoolState.fPoolIndexBuffer, 512 &geomPoolState.fPoolStartIndex); 513 ++fIndexPoolUseCnt; 514 GR_DEBUGASSERT(success); 515} 516 517void GrGpu::releaseVertexArray() { 518 // if vertex source was array, we stowed data in the pool 519 const GeometrySrcState& geoSrc = this->getGeomSrc(); 520 SkASSERT(kArray_GeometrySrcType == geoSrc.fVertexSrc); 521 size_t bytes = geoSrc.fVertexCount * geoSrc.fVertexSize; 522 fVertexPool->putBack(bytes); 523 --fVertexPoolUseCnt; 524} 525 526void GrGpu::releaseIndexArray() { 527 // if index source was array, we stowed data in the pool 528 const GeometrySrcState& geoSrc = this->getGeomSrc(); 529 SkASSERT(kArray_GeometrySrcType == geoSrc.fIndexSrc); 530 size_t bytes = geoSrc.fIndexCount * sizeof(uint16_t); 531 fIndexPool->putBack(bytes); 532 --fIndexPoolUseCnt; 533} 534