GrInOrderDrawBuffer.cpp revision 9b62aa156bcf1db6f11af9302bf8bb8ef2567142
1/*
2 * Copyright 2011 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrInOrderDrawBuffer.h"
9
10#include "GrBufferAllocPool.h"
11#include "GrDrawTargetCaps.h"
12#include "GrGpu.h"
13#include "GrIndexBuffer.h"
14#include "GrPath.h"
15#include "GrPoint.h"
16#include "GrRenderTarget.h"
17#include "GrTemplates.h"
18#include "GrTexture.h"
19#include "GrVertexBuffer.h"
20
21GrInOrderDrawBuffer::GrInOrderDrawBuffer(GrGpu* gpu,
22                                         GrVertexBufferAllocPool* vertexPool,
23                                         GrIndexBufferAllocPool* indexPool)
24    : GrDrawTarget(gpu->getContext())
25    , fDstGpu(gpu)
26    , fClipSet(true)
27    , fClipProxyState(kUnknown_ClipProxyState)
28    , fVertexPool(*vertexPool)
29    , fIndexPool(*indexPool)
30    , fFlushing(false)
31    , fDrawID(0) {
32
33    fDstGpu->ref();
34    fCaps.reset(SkRef(fDstGpu->caps()));
35
36    SkASSERT(NULL != vertexPool);
37    SkASSERT(NULL != indexPool);
38
39    GeometryPoolState& poolState = fGeoPoolStateStack.push_back();
40    poolState.fUsedPoolVertexBytes = 0;
41    poolState.fUsedPoolIndexBytes = 0;
42#ifdef SK_DEBUG
43    poolState.fPoolVertexBuffer = (GrVertexBuffer*)~0;
44    poolState.fPoolStartVertex = ~0;
45    poolState.fPoolIndexBuffer = (GrIndexBuffer*)~0;
46    poolState.fPoolStartIndex = ~0;
47#endif
48    this->reset();
49}
50
51GrInOrderDrawBuffer::~GrInOrderDrawBuffer() {
52    this->reset();
53    // This must be called by before the GrDrawTarget destructor
54    this->releaseGeometry();
55    fDstGpu->unref();
56}
57
58////////////////////////////////////////////////////////////////////////////////
59
60namespace {
61void get_vertex_bounds(const void* vertices,
62                       size_t vertexSize,
63                       int vertexCount,
64                       SkRect* bounds) {
65    SkASSERT(vertexSize >= sizeof(GrPoint));
66    SkASSERT(vertexCount > 0);
67    const GrPoint* point = static_cast<const GrPoint*>(vertices);
68    bounds->fLeft = bounds->fRight = point->fX;
69    bounds->fTop = bounds->fBottom = point->fY;
70    for (int i = 1; i < vertexCount; ++i) {
71        point = reinterpret_cast<GrPoint*>(reinterpret_cast<intptr_t>(point) + vertexSize);
72        bounds->growToInclude(point->fX, point->fY);
73    }
74}
75}
76
77
78namespace {
79
80extern const GrVertexAttrib kRectPosColorUVAttribs[] = {
81    {kVec2f_GrVertexAttribType,  0,               kPosition_GrVertexAttribBinding},
82    {kVec4ub_GrVertexAttribType, sizeof(GrPoint), kColor_GrVertexAttribBinding},
83    {kVec2f_GrVertexAttribType,  sizeof(GrPoint)+sizeof(GrColor),
84                                                  kLocalCoord_GrVertexAttribBinding},
85};
86
87extern const GrVertexAttrib kRectPosUVAttribs[] = {
88    {kVec2f_GrVertexAttribType,  0,              kPosition_GrVertexAttribBinding},
89    {kVec2f_GrVertexAttribType, sizeof(GrPoint), kLocalCoord_GrVertexAttribBinding},
90};
91
92static void set_vertex_attributes(GrDrawState* drawState,
93                                  bool hasColor, bool hasUVs,
94                                  int* colorOffset, int* localOffset) {
95    *colorOffset = -1;
96    *localOffset = -1;
97
98    // Using per-vertex colors allows batching across colors. (A lot of rects in a row differing
99    // only in color is a common occurrence in tables). However, having per-vertex colors disables
100    // blending optimizations because we don't know if the color will be solid or not. These
101    // optimizations help determine whether coverage and color can be blended correctly when
102    // dual-source blending isn't available. This comes into play when there is coverage. If colors
103    // were a stage it could take a hint that every vertex's color will be opaque.
104    if (hasColor && hasUVs) {
105        *colorOffset = sizeof(GrPoint);
106        *localOffset = sizeof(GrPoint) + sizeof(GrColor);
107        drawState->setVertexAttribs<kRectPosColorUVAttribs>(3);
108    } else if (hasColor) {
109        *colorOffset = sizeof(GrPoint);
110        drawState->setVertexAttribs<kRectPosColorUVAttribs>(2);
111    } else if (hasUVs) {
112        *localOffset = sizeof(GrPoint);
113        drawState->setVertexAttribs<kRectPosUVAttribs>(2);
114    } else {
115        drawState->setVertexAttribs<kRectPosUVAttribs>(1);
116    }
117}
118
119};
120
121void GrInOrderDrawBuffer::onDrawRect(const SkRect& rect,
122                                     const SkMatrix* matrix,
123                                     const SkRect* localRect,
124                                     const SkMatrix* localMatrix) {
125    GrDrawState::AutoColorRestore acr;
126
127    GrDrawState* drawState = this->drawState();
128
129    GrColor color = drawState->getColor();
130
131    int colorOffset, localOffset;
132    set_vertex_attributes(drawState,
133                   this->caps()->dualSourceBlendingSupport() || drawState->hasSolidCoverage(),
134                   NULL != localRect,
135                   &colorOffset, &localOffset);
136    if (colorOffset >= 0) {
137        // We set the draw state's color to white here. This is done so that any batching performed
138        // in our subclass's onDraw() won't get a false from GrDrawState::op== due to a color
139        // mismatch. TODO: Once vertex layout is owned by GrDrawState it should skip comparing the
140        // constant color in its op== when the kColor layout bit is set and then we can remove
141        // this.
142        acr.set(drawState, 0xFFFFFFFF);
143    }
144
145    AutoReleaseGeometry geo(this, 4, 0);
146    if (!geo.succeeded()) {
147        GrPrintf("Failed to get space for vertices!\n");
148        return;
149    }
150
151    // Go to device coords to allow batching across matrix changes
152    SkMatrix combinedMatrix;
153    if (NULL != matrix) {
154        combinedMatrix = *matrix;
155    } else {
156        combinedMatrix.reset();
157    }
158    combinedMatrix.postConcat(drawState->getViewMatrix());
159    // When the caller has provided an explicit source rect for a stage then we don't want to
160    // modify that stage's matrix. Otherwise if the effect is generating its source rect from
161    // the vertex positions then we have to account for the view matrix change.
162    GrDrawState::AutoViewMatrixRestore avmr;
163    if (!avmr.setIdentity(drawState)) {
164        return;
165    }
166
167    size_t vsize = drawState->getVertexSize();
168
169    geo.positions()->setRectFan(rect.fLeft, rect.fTop, rect.fRight, rect.fBottom, vsize);
170    combinedMatrix.mapPointsWithStride(geo.positions(), vsize, 4);
171
172    SkRect devBounds;
173    // since we already computed the dev verts, set the bounds hint. This will help us avoid
174    // unnecessary clipping in our onDraw().
175    get_vertex_bounds(geo.vertices(), vsize, 4, &devBounds);
176
177    if (localOffset >= 0) {
178        GrPoint* coords = GrTCast<GrPoint*>(GrTCast<intptr_t>(geo.vertices()) + localOffset);
179        coords->setRectFan(localRect->fLeft, localRect->fTop,
180                           localRect->fRight, localRect->fBottom,
181                            vsize);
182        if (NULL != localMatrix) {
183            localMatrix->mapPointsWithStride(coords, vsize, 4);
184        }
185    }
186
187    if (colorOffset >= 0) {
188        GrColor* vertColor = GrTCast<GrColor*>(GrTCast<intptr_t>(geo.vertices()) + colorOffset);
189        for (int i = 0; i < 4; ++i) {
190            *vertColor = color;
191            vertColor = (GrColor*) ((intptr_t) vertColor + vsize);
192        }
193    }
194
195    this->setIndexSourceToBuffer(this->getContext()->getQuadIndexBuffer());
196    this->drawIndexedInstances(kTriangles_GrPrimitiveType, 1, 4, 6, &devBounds);
197
198    // to ensure that stashing the drawState ptr is valid
199    SkASSERT(this->drawState() == drawState);
200}
201
202bool GrInOrderDrawBuffer::quickInsideClip(const SkRect& devBounds) {
203    if (!this->getDrawState().isClipState()) {
204        return true;
205    }
206    if (kUnknown_ClipProxyState == fClipProxyState) {
207        SkIRect rect;
208        bool iior;
209        this->getClip()->getConservativeBounds(this->getDrawState().getRenderTarget(), &rect, &iior);
210        if (iior) {
211            // The clip is a rect. We will remember that in fProxyClip. It is common for an edge (or
212            // all edges) of the clip to be at the edge of the RT. However, we get that clipping for
213            // free via the viewport. We don't want to think that clipping must be enabled in this
214            // case. So we extend the clip outward from the edge to avoid these false negatives.
215            fClipProxyState = kValid_ClipProxyState;
216            fClipProxy = SkRect::Make(rect);
217
218            if (fClipProxy.fLeft <= 0) {
219                fClipProxy.fLeft = SK_ScalarMin;
220            }
221            if (fClipProxy.fTop <= 0) {
222                fClipProxy.fTop = SK_ScalarMin;
223            }
224            if (fClipProxy.fRight >= this->getDrawState().getRenderTarget()->width()) {
225                fClipProxy.fRight = SK_ScalarMax;
226            }
227            if (fClipProxy.fBottom >= this->getDrawState().getRenderTarget()->height()) {
228                fClipProxy.fBottom = SK_ScalarMax;
229            }
230        } else {
231            fClipProxyState = kInvalid_ClipProxyState;
232        }
233    }
234    if (kValid_ClipProxyState == fClipProxyState) {
235        return fClipProxy.contains(devBounds);
236    }
237    SkPoint originOffset = {SkIntToScalar(this->getClip()->fOrigin.fX),
238                            SkIntToScalar(this->getClip()->fOrigin.fY)};
239    SkRect clipSpaceBounds = devBounds;
240    clipSpaceBounds.offset(originOffset);
241    return this->getClip()->fClipStack->quickContains(clipSpaceBounds);
242}
243
244int GrInOrderDrawBuffer::concatInstancedDraw(const DrawInfo& info) {
245    SkASSERT(info.isInstanced());
246
247    const GeometrySrcState& geomSrc = this->getGeomSrc();
248    const GrDrawState& drawState = this->getDrawState();
249
250    // we only attempt to concat the case when reserved verts are used with a client-specified index
251    // buffer. To make this work with client-specified VBs we'd need to know if the VB was updated
252    // between draws.
253    if (kReserved_GeometrySrcType != geomSrc.fVertexSrc ||
254        kBuffer_GeometrySrcType != geomSrc.fIndexSrc) {
255        return 0;
256    }
257    // Check if there is a draw info that is compatible that uses the same VB from the pool and
258    // the same IB
259    if (kDraw_Cmd != fCmds.back()) {
260        return 0;
261    }
262
263    DrawRecord* draw = &fDraws.back();
264    GeometryPoolState& poolState = fGeoPoolStateStack.back();
265    const GrVertexBuffer* vertexBuffer = poolState.fPoolVertexBuffer;
266
267    if (!draw->isInstanced() ||
268        draw->verticesPerInstance() != info.verticesPerInstance() ||
269        draw->indicesPerInstance() != info.indicesPerInstance() ||
270        draw->fVertexBuffer != vertexBuffer ||
271        draw->fIndexBuffer != geomSrc.fIndexBuffer) {
272        return 0;
273    }
274    // info does not yet account for the offset from the start of the pool's VB while the previous
275    // draw record does.
276    int adjustedStartVertex = poolState.fPoolStartVertex + info.startVertex();
277    if (draw->startVertex() + draw->vertexCount() != adjustedStartVertex) {
278        return 0;
279    }
280
281    SkASSERT(poolState.fPoolStartVertex == draw->startVertex() + draw->vertexCount());
282
283    // how many instances can be concat'ed onto draw given the size of the index buffer
284    int instancesToConcat = this->indexCountInCurrentSource() / info.indicesPerInstance();
285    instancesToConcat -= draw->instanceCount();
286    instancesToConcat = GrMin(instancesToConcat, info.instanceCount());
287
288    // update the amount of reserved vertex data actually referenced in draws
289    size_t vertexBytes = instancesToConcat * info.verticesPerInstance() *
290                         drawState.getVertexSize();
291    poolState.fUsedPoolVertexBytes = GrMax(poolState.fUsedPoolVertexBytes, vertexBytes);
292
293    draw->adjustInstanceCount(instancesToConcat);
294    return instancesToConcat;
295}
296
297class AutoClipReenable {
298public:
299    AutoClipReenable() : fDrawState(NULL) {}
300    ~AutoClipReenable() {
301        if (NULL != fDrawState) {
302            fDrawState->enableState(GrDrawState::kClip_StateBit);
303        }
304    }
305    void set(GrDrawState* drawState) {
306        if (drawState->isClipState()) {
307            fDrawState = drawState;
308            drawState->disableState(GrDrawState::kClip_StateBit);
309        }
310    }
311private:
312    GrDrawState*    fDrawState;
313};
314
315void GrInOrderDrawBuffer::onDraw(const DrawInfo& info) {
316
317    GeometryPoolState& poolState = fGeoPoolStateStack.back();
318    const GrDrawState& drawState = this->getDrawState();
319    AutoClipReenable acr;
320
321    if (drawState.isClipState() &&
322        NULL != info.getDevBounds() &&
323        this->quickInsideClip(*info.getDevBounds())) {
324        acr.set(this->drawState());
325    }
326
327    if (this->needsNewClip()) {
328       this->recordClip();
329    }
330    if (this->needsNewState()) {
331        this->recordState();
332    }
333
334    DrawRecord* draw;
335    if (info.isInstanced()) {
336        int instancesConcated = this->concatInstancedDraw(info);
337        if (info.instanceCount() > instancesConcated) {
338            draw = this->recordDraw(info);
339            draw->adjustInstanceCount(-instancesConcated);
340        } else {
341            return;
342        }
343    } else {
344        draw = this->recordDraw(info);
345    }
346
347    switch (this->getGeomSrc().fVertexSrc) {
348        case kBuffer_GeometrySrcType:
349            draw->fVertexBuffer = this->getGeomSrc().fVertexBuffer;
350            break;
351        case kReserved_GeometrySrcType: // fallthrough
352        case kArray_GeometrySrcType: {
353            size_t vertexBytes = (info.vertexCount() + info.startVertex()) *
354                                 drawState.getVertexSize();
355            poolState.fUsedPoolVertexBytes = GrMax(poolState.fUsedPoolVertexBytes, vertexBytes);
356            draw->fVertexBuffer = poolState.fPoolVertexBuffer;
357            draw->adjustStartVertex(poolState.fPoolStartVertex);
358            break;
359        }
360        default:
361            GrCrash("unknown geom src type");
362    }
363    draw->fVertexBuffer->ref();
364
365    if (info.isIndexed()) {
366        switch (this->getGeomSrc().fIndexSrc) {
367            case kBuffer_GeometrySrcType:
368                draw->fIndexBuffer = this->getGeomSrc().fIndexBuffer;
369                break;
370            case kReserved_GeometrySrcType: // fallthrough
371            case kArray_GeometrySrcType: {
372                size_t indexBytes = (info.indexCount() + info.startIndex()) * sizeof(uint16_t);
373                poolState.fUsedPoolIndexBytes = GrMax(poolState.fUsedPoolIndexBytes, indexBytes);
374                draw->fIndexBuffer = poolState.fPoolIndexBuffer;
375                draw->adjustStartIndex(poolState.fPoolStartIndex);
376                break;
377            }
378            default:
379                GrCrash("unknown geom src type");
380        }
381        draw->fIndexBuffer->ref();
382    } else {
383        draw->fIndexBuffer = NULL;
384    }
385}
386
387GrInOrderDrawBuffer::StencilPath::StencilPath() {}
388GrInOrderDrawBuffer::DrawPath::DrawPath() {}
389GrInOrderDrawBuffer::DrawPaths::DrawPaths() {}
390GrInOrderDrawBuffer::DrawPaths::~DrawPaths() {
391    if (fTransforms) {
392        SkDELETE_ARRAY(fTransforms);
393    }
394    for (size_t i = 0; i < fPathCount; ++i) {
395        fPaths[i]->unref();
396    }
397    SkDELETE_ARRAY(fPaths);
398}
399
400void GrInOrderDrawBuffer::onStencilPath(const GrPath* path, SkPath::FillType fill) {
401    if (this->needsNewClip()) {
402        this->recordClip();
403    }
404    // Only compare the subset of GrDrawState relevant to path stenciling?
405    if (this->needsNewState()) {
406        this->recordState();
407    }
408    StencilPath* sp = this->recordStencilPath();
409    sp->fPath.reset(path);
410    path->ref();
411    sp->fFill = fill;
412}
413
414void GrInOrderDrawBuffer::onDrawPath(const GrPath* path,
415                                     SkPath::FillType fill, const GrDeviceCoordTexture* dstCopy) {
416    if (this->needsNewClip()) {
417        this->recordClip();
418    }
419    // TODO: Only compare the subset of GrDrawState relevant to path covering?
420    if (this->needsNewState()) {
421        this->recordState();
422    }
423    DrawPath* cp = this->recordDrawPath();
424    cp->fPath.reset(path);
425    path->ref();
426    cp->fFill = fill;
427    if (NULL != dstCopy) {
428        cp->fDstCopy = *dstCopy;
429    }
430}
431
432void GrInOrderDrawBuffer::onDrawPaths(size_t pathCount, const GrPath** paths,
433                                      const SkMatrix* transforms,
434                                      SkPath::FillType fill,
435                                      SkStrokeRec::Style stroke,
436                                      const GrDeviceCoordTexture* dstCopy) {
437    SkASSERT(pathCount);
438
439    if (this->needsNewClip()) {
440        this->recordClip();
441    }
442    if (this->needsNewState()) {
443        this->recordState();
444    }
445    DrawPaths* dp = this->recordDrawPaths();
446    dp->fPathCount = pathCount;
447    dp->fPaths = SkNEW_ARRAY(const GrPath*, pathCount);
448    memcpy(dp->fPaths, paths, sizeof(GrPath*) * pathCount);
449    for (size_t i = 0; i < pathCount; ++i) {
450        dp->fPaths[i]->ref();
451    }
452
453    dp->fTransforms = SkNEW_ARRAY(SkMatrix, pathCount);
454    memcpy(dp->fTransforms, transforms, sizeof(SkMatrix) * pathCount);
455
456    dp->fFill = fill;
457    dp->fStroke = stroke;
458
459    if (NULL != dstCopy) {
460        dp->fDstCopy = *dstCopy;
461    }
462}
463
464void GrInOrderDrawBuffer::clear(const SkIRect* rect, GrColor color,
465                                bool canIgnoreRect, GrRenderTarget* renderTarget) {
466    SkIRect r;
467    if (NULL == renderTarget) {
468        renderTarget = this->drawState()->getRenderTarget();
469        SkASSERT(NULL != renderTarget);
470    }
471    if (NULL == rect) {
472        // We could do something smart and remove previous draws and clears to
473        // the current render target. If we get that smart we have to make sure
474        // those draws aren't read before this clear (render-to-texture).
475        r.setLTRB(0, 0, renderTarget->width(), renderTarget->height());
476        rect = &r;
477    }
478    Clear* clr = this->recordClear();
479    clr->fColor = color;
480    clr->fRect = *rect;
481    clr->fCanIgnoreRect = canIgnoreRect;
482    clr->fRenderTarget = renderTarget;
483    renderTarget->ref();
484}
485
486void GrInOrderDrawBuffer::onInstantGpuTraceEvent(const char* marker) {
487    // TODO: adds command to buffer
488}
489
490void GrInOrderDrawBuffer::onPushGpuTraceEvent(const char* marker) {
491    // TODO: adds command to buffer
492}
493
494void GrInOrderDrawBuffer::onPopGpuTraceEvent() {
495    // TODO: adds command to buffer
496}
497
498void GrInOrderDrawBuffer::reset() {
499    SkASSERT(1 == fGeoPoolStateStack.count());
500    this->resetVertexSource();
501    this->resetIndexSource();
502    int numDraws = fDraws.count();
503    for (int d = 0; d < numDraws; ++d) {
504        // we always have a VB, but not always an IB
505        SkASSERT(NULL != fDraws[d].fVertexBuffer);
506        fDraws[d].fVertexBuffer->unref();
507        SkSafeUnref(fDraws[d].fIndexBuffer);
508    }
509    fCmds.reset();
510    fDraws.reset();
511    fStencilPaths.reset();
512    fDrawPath.reset();
513    fDrawPaths.reset();
514    fStates.reset();
515    fClears.reset();
516    fVertexPool.reset();
517    fIndexPool.reset();
518    fClips.reset();
519    fClipOrigins.reset();
520    fCopySurfaces.reset();
521    fClipSet = true;
522}
523
524void GrInOrderDrawBuffer::flush() {
525    if (fFlushing) {
526        return;
527    }
528
529    SkASSERT(kReserved_GeometrySrcType != this->getGeomSrc().fVertexSrc);
530    SkASSERT(kReserved_GeometrySrcType != this->getGeomSrc().fIndexSrc);
531
532    int numCmds = fCmds.count();
533    if (0 == numCmds) {
534        return;
535    }
536
537    GrAutoTRestore<bool> flushRestore(&fFlushing);
538    fFlushing = true;
539
540    fVertexPool.unlock();
541    fIndexPool.unlock();
542
543    GrDrawTarget::AutoClipRestore acr(fDstGpu);
544    AutoGeometryAndStatePush agasp(fDstGpu, kPreserve_ASRInit);
545
546    GrDrawState playbackState;
547    GrDrawState* prevDrawState = fDstGpu->drawState();
548    prevDrawState->ref();
549    fDstGpu->setDrawState(&playbackState);
550
551    GrClipData clipData;
552
553    int currState       = 0;
554    int currClip        = 0;
555    int currClear       = 0;
556    int currDraw        = 0;
557    int currStencilPath = 0;
558    int currDrawPath    = 0;
559    int currDrawPaths   = 0;
560    int currCopySurface = 0;
561
562    for (int c = 0; c < numCmds; ++c) {
563        switch (fCmds[c]) {
564            case kDraw_Cmd: {
565                const DrawRecord& draw = fDraws[currDraw];
566                fDstGpu->setVertexSourceToBuffer(draw.fVertexBuffer);
567                if (draw.isIndexed()) {
568                    fDstGpu->setIndexSourceToBuffer(draw.fIndexBuffer);
569                }
570                fDstGpu->executeDraw(draw);
571
572                ++currDraw;
573                break;
574            }
575            case kStencilPath_Cmd: {
576                const StencilPath& sp = fStencilPaths[currStencilPath];
577                fDstGpu->stencilPath(sp.fPath.get(), sp.fFill);
578                ++currStencilPath;
579                break;
580            }
581            case kDrawPath_Cmd: {
582                const DrawPath& cp = fDrawPath[currDrawPath];
583                fDstGpu->executeDrawPath(cp.fPath.get(), cp.fFill,
584                                         NULL != cp.fDstCopy.texture() ? &cp.fDstCopy : NULL);
585                ++currDrawPath;
586                break;
587            }
588            case kDrawPaths_Cmd: {
589                DrawPaths& dp = fDrawPaths[currDrawPaths];
590                const GrDeviceCoordTexture* dstCopy =
591                    NULL != dp.fDstCopy.texture() ? &dp.fDstCopy : NULL;
592                fDstGpu->executeDrawPaths(dp.fPathCount, dp.fPaths,
593                                          dp.fTransforms, dp.fFill, dp.fStroke,
594                                          dstCopy);
595                ++currDrawPaths;
596                break;
597            }
598            case kSetState_Cmd:
599                fStates[currState].restoreTo(&playbackState);
600                ++currState;
601                break;
602            case kSetClip_Cmd:
603                clipData.fClipStack = &fClips[currClip];
604                clipData.fOrigin = fClipOrigins[currClip];
605                fDstGpu->setClip(&clipData);
606                ++currClip;
607                break;
608            case kClear_Cmd:
609                fDstGpu->clear(&fClears[currClear].fRect,
610                               fClears[currClear].fColor,
611                               fClears[currClear].fCanIgnoreRect,
612                               fClears[currClear].fRenderTarget);
613                ++currClear;
614                break;
615            case kCopySurface_Cmd:
616                fDstGpu->copySurface(fCopySurfaces[currCopySurface].fDst.get(),
617                                     fCopySurfaces[currCopySurface].fSrc.get(),
618                                     fCopySurfaces[currCopySurface].fSrcRect,
619                                     fCopySurfaces[currCopySurface].fDstPoint);
620                ++currCopySurface;
621                break;
622        }
623    }
624    // we should have consumed all the states, clips, etc.
625    SkASSERT(fStates.count() == currState);
626    SkASSERT(fClips.count() == currClip);
627    SkASSERT(fClipOrigins.count() == currClip);
628    SkASSERT(fClears.count() == currClear);
629    SkASSERT(fDraws.count()  == currDraw);
630    SkASSERT(fCopySurfaces.count() == currCopySurface);
631
632    fDstGpu->setDrawState(prevDrawState);
633    prevDrawState->unref();
634    this->reset();
635    ++fDrawID;
636}
637
638bool GrInOrderDrawBuffer::onCopySurface(GrSurface* dst,
639                                        GrSurface* src,
640                                        const SkIRect& srcRect,
641                                        const SkIPoint& dstPoint) {
642    if (fDstGpu->canCopySurface(dst, src, srcRect, dstPoint)) {
643        CopySurface* cs = this->recordCopySurface();
644        cs->fDst.reset(SkRef(dst));
645        cs->fSrc.reset(SkRef(src));
646        cs->fSrcRect = srcRect;
647        cs->fDstPoint = dstPoint;
648        return true;
649    } else {
650        return false;
651    }
652}
653
654bool GrInOrderDrawBuffer::onCanCopySurface(GrSurface* dst,
655                                           GrSurface* src,
656                                           const SkIRect& srcRect,
657                                           const SkIPoint& dstPoint) {
658    return fDstGpu->canCopySurface(dst, src, srcRect, dstPoint);
659}
660
661void GrInOrderDrawBuffer::initCopySurfaceDstDesc(const GrSurface* src, GrTextureDesc* desc) {
662    fDstGpu->initCopySurfaceDstDesc(src, desc);
663}
664
665void GrInOrderDrawBuffer::willReserveVertexAndIndexSpace(int vertexCount,
666                                                         int indexCount) {
667    // We use geometryHints() to know whether to flush the draw buffer. We
668    // can't flush if we are inside an unbalanced pushGeometrySource.
669    // Moreover, flushing blows away vertex and index data that was
670    // previously reserved. So if the vertex or index data is pulled from
671    // reserved space and won't be released by this request then we can't
672    // flush.
673    bool insideGeoPush = fGeoPoolStateStack.count() > 1;
674
675    bool unreleasedVertexSpace =
676        !vertexCount &&
677        kReserved_GeometrySrcType == this->getGeomSrc().fVertexSrc;
678
679    bool unreleasedIndexSpace =
680        !indexCount &&
681        kReserved_GeometrySrcType == this->getGeomSrc().fIndexSrc;
682
683    // we don't want to finalize any reserved geom on the target since
684    // we don't know that the client has finished writing to it.
685    bool targetHasReservedGeom = fDstGpu->hasReservedVerticesOrIndices();
686
687    int vcount = vertexCount;
688    int icount = indexCount;
689
690    if (!insideGeoPush &&
691        !unreleasedVertexSpace &&
692        !unreleasedIndexSpace &&
693        !targetHasReservedGeom &&
694        this->geometryHints(&vcount, &icount)) {
695
696        this->flush();
697    }
698}
699
700bool GrInOrderDrawBuffer::geometryHints(int* vertexCount,
701                                        int* indexCount) const {
702    // we will recommend a flush if the data could fit in a single
703    // preallocated buffer but none are left and it can't fit
704    // in the current buffer (which may not be prealloced).
705    bool flush = false;
706    if (NULL != indexCount) {
707        int32_t currIndices = fIndexPool.currentBufferIndices();
708        if (*indexCount > currIndices &&
709            (!fIndexPool.preallocatedBuffersRemaining() &&
710             *indexCount <= fIndexPool.preallocatedBufferIndices())) {
711
712            flush = true;
713        }
714        *indexCount = currIndices;
715    }
716    if (NULL != vertexCount) {
717        size_t vertexSize = this->getDrawState().getVertexSize();
718        int32_t currVertices = fVertexPool.currentBufferVertices(vertexSize);
719        if (*vertexCount > currVertices &&
720            (!fVertexPool.preallocatedBuffersRemaining() &&
721             *vertexCount <= fVertexPool.preallocatedBufferVertices(vertexSize))) {
722
723            flush = true;
724        }
725        *vertexCount = currVertices;
726    }
727    return flush;
728}
729
730bool GrInOrderDrawBuffer::onReserveVertexSpace(size_t vertexSize,
731                                               int vertexCount,
732                                               void** vertices) {
733    GeometryPoolState& poolState = fGeoPoolStateStack.back();
734    SkASSERT(vertexCount > 0);
735    SkASSERT(NULL != vertices);
736    SkASSERT(0 == poolState.fUsedPoolVertexBytes);
737
738    *vertices = fVertexPool.makeSpace(vertexSize,
739                                      vertexCount,
740                                      &poolState.fPoolVertexBuffer,
741                                      &poolState.fPoolStartVertex);
742    return NULL != *vertices;
743}
744
745bool GrInOrderDrawBuffer::onReserveIndexSpace(int indexCount, void** indices) {
746    GeometryPoolState& poolState = fGeoPoolStateStack.back();
747    SkASSERT(indexCount > 0);
748    SkASSERT(NULL != indices);
749    SkASSERT(0 == poolState.fUsedPoolIndexBytes);
750
751    *indices = fIndexPool.makeSpace(indexCount,
752                                    &poolState.fPoolIndexBuffer,
753                                    &poolState.fPoolStartIndex);
754    return NULL != *indices;
755}
756
757void GrInOrderDrawBuffer::releaseReservedVertexSpace() {
758    GeometryPoolState& poolState = fGeoPoolStateStack.back();
759    const GeometrySrcState& geoSrc = this->getGeomSrc();
760
761    // If we get a release vertex space call then our current source should either be reserved
762    // or array (which we copied into reserved space).
763    SkASSERT(kReserved_GeometrySrcType == geoSrc.fVertexSrc ||
764             kArray_GeometrySrcType == geoSrc.fVertexSrc);
765
766    // When the caller reserved vertex buffer space we gave it back a pointer
767    // provided by the vertex buffer pool. At each draw we tracked the largest
768    // offset into the pool's pointer that was referenced. Now we return to the
769    // pool any portion at the tail of the allocation that no draw referenced.
770    size_t reservedVertexBytes = geoSrc.fVertexSize * geoSrc.fVertexCount;
771    fVertexPool.putBack(reservedVertexBytes -
772                        poolState.fUsedPoolVertexBytes);
773    poolState.fUsedPoolVertexBytes = 0;
774    poolState.fPoolVertexBuffer = NULL;
775    poolState.fPoolStartVertex = 0;
776}
777
778void GrInOrderDrawBuffer::releaseReservedIndexSpace() {
779    GeometryPoolState& poolState = fGeoPoolStateStack.back();
780    const GeometrySrcState& geoSrc = this->getGeomSrc();
781
782    // If we get a release index space call then our current source should either be reserved
783    // or array (which we copied into reserved space).
784    SkASSERT(kReserved_GeometrySrcType == geoSrc.fIndexSrc ||
785             kArray_GeometrySrcType == geoSrc.fIndexSrc);
786
787    // Similar to releaseReservedVertexSpace we return any unused portion at
788    // the tail
789    size_t reservedIndexBytes = sizeof(uint16_t) * geoSrc.fIndexCount;
790    fIndexPool.putBack(reservedIndexBytes - poolState.fUsedPoolIndexBytes);
791    poolState.fUsedPoolIndexBytes = 0;
792    poolState.fPoolIndexBuffer = NULL;
793    poolState.fPoolStartIndex = 0;
794}
795
796void GrInOrderDrawBuffer::onSetVertexSourceToArray(const void* vertexArray,
797                                                   int vertexCount) {
798
799    GeometryPoolState& poolState = fGeoPoolStateStack.back();
800    SkASSERT(0 == poolState.fUsedPoolVertexBytes);
801#ifdef SK_DEBUG
802    bool success =
803#endif
804    fVertexPool.appendVertices(this->getVertexSize(),
805                               vertexCount,
806                               vertexArray,
807                               &poolState.fPoolVertexBuffer,
808                               &poolState.fPoolStartVertex);
809    GR_DEBUGASSERT(success);
810}
811
812void GrInOrderDrawBuffer::onSetIndexSourceToArray(const void* indexArray,
813                                                  int indexCount) {
814    GeometryPoolState& poolState = fGeoPoolStateStack.back();
815    SkASSERT(0 == poolState.fUsedPoolIndexBytes);
816#ifdef SK_DEBUG
817    bool success =
818#endif
819    fIndexPool.appendIndices(indexCount,
820                             indexArray,
821                             &poolState.fPoolIndexBuffer,
822                             &poolState.fPoolStartIndex);
823    GR_DEBUGASSERT(success);
824}
825
826void GrInOrderDrawBuffer::releaseVertexArray() {
827    // When the client provides an array as the vertex source we handled it
828    // by copying their array into reserved space.
829    this->GrInOrderDrawBuffer::releaseReservedVertexSpace();
830}
831
832void GrInOrderDrawBuffer::releaseIndexArray() {
833    // When the client provides an array as the index source we handled it
834    // by copying their array into reserved space.
835    this->GrInOrderDrawBuffer::releaseReservedIndexSpace();
836}
837
838void GrInOrderDrawBuffer::geometrySourceWillPush() {
839    GeometryPoolState& poolState = fGeoPoolStateStack.push_back();
840    poolState.fUsedPoolVertexBytes = 0;
841    poolState.fUsedPoolIndexBytes = 0;
842#ifdef SK_DEBUG
843    poolState.fPoolVertexBuffer = (GrVertexBuffer*)~0;
844    poolState.fPoolStartVertex = ~0;
845    poolState.fPoolIndexBuffer = (GrIndexBuffer*)~0;
846    poolState.fPoolStartIndex = ~0;
847#endif
848}
849
850void GrInOrderDrawBuffer::geometrySourceWillPop(
851                                        const GeometrySrcState& restoredState) {
852    SkASSERT(fGeoPoolStateStack.count() > 1);
853    fGeoPoolStateStack.pop_back();
854    GeometryPoolState& poolState = fGeoPoolStateStack.back();
855    // we have to assume that any slack we had in our vertex/index data
856    // is now unreleasable because data may have been appended later in the
857    // pool.
858    if (kReserved_GeometrySrcType == restoredState.fVertexSrc ||
859        kArray_GeometrySrcType == restoredState.fVertexSrc) {
860        poolState.fUsedPoolVertexBytes = restoredState.fVertexSize * restoredState.fVertexCount;
861    }
862    if (kReserved_GeometrySrcType == restoredState.fIndexSrc ||
863        kArray_GeometrySrcType == restoredState.fIndexSrc) {
864        poolState.fUsedPoolIndexBytes = sizeof(uint16_t) *
865                                         restoredState.fIndexCount;
866    }
867}
868
869bool GrInOrderDrawBuffer::needsNewState() const {
870    return fStates.empty() || !fStates.back().isEqual(this->getDrawState());
871}
872
873bool GrInOrderDrawBuffer::needsNewClip() const {
874    SkASSERT(fClips.count() == fClipOrigins.count());
875    if (this->getDrawState().isClipState()) {
876       if (fClipSet &&
877           (fClips.empty() ||
878            fClips.back() != *this->getClip()->fClipStack ||
879            fClipOrigins.back() != this->getClip()->fOrigin)) {
880           return true;
881       }
882    }
883    return false;
884}
885
886void GrInOrderDrawBuffer::recordClip() {
887    fClips.push_back() = *this->getClip()->fClipStack;
888    fClipOrigins.push_back() = this->getClip()->fOrigin;
889    fClipSet = false;
890    fCmds.push_back(kSetClip_Cmd);
891}
892
893void GrInOrderDrawBuffer::recordState() {
894    fStates.push_back().saveFrom(this->getDrawState());
895    fCmds.push_back(kSetState_Cmd);
896}
897
898GrInOrderDrawBuffer::DrawRecord* GrInOrderDrawBuffer::recordDraw(const DrawInfo& info) {
899    fCmds.push_back(kDraw_Cmd);
900    return &fDraws.push_back(info);
901}
902
903GrInOrderDrawBuffer::StencilPath* GrInOrderDrawBuffer::recordStencilPath() {
904    fCmds.push_back(kStencilPath_Cmd);
905    return &fStencilPaths.push_back();
906}
907
908GrInOrderDrawBuffer::DrawPath* GrInOrderDrawBuffer::recordDrawPath() {
909    fCmds.push_back(kDrawPath_Cmd);
910    return &fDrawPath.push_back();
911}
912
913GrInOrderDrawBuffer::DrawPaths* GrInOrderDrawBuffer::recordDrawPaths() {
914    fCmds.push_back(kDrawPaths_Cmd);
915    return &fDrawPaths.push_back();
916}
917
918GrInOrderDrawBuffer::Clear* GrInOrderDrawBuffer::recordClear() {
919    fCmds.push_back(kClear_Cmd);
920    return &fClears.push_back();
921}
922
923GrInOrderDrawBuffer::CopySurface* GrInOrderDrawBuffer::recordCopySurface() {
924    fCmds.push_back(kCopySurface_Cmd);
925    return &fCopySurfaces.push_back();
926}
927
928
929void GrInOrderDrawBuffer::clipWillBeSet(const GrClipData* newClipData) {
930    INHERITED::clipWillBeSet(newClipData);
931    fClipSet = true;
932    fClipProxyState = kUnknown_ClipProxyState;
933}
934