GrInOrderDrawBuffer.cpp revision d686ac77c2c485c4a3302eda9c1de597a6f8c568
1
2/*
3 * Copyright 2011 Google Inc.
4 *
5 * Use of this source code is governed by a BSD-style license that can be
6 * found in the LICENSE file.
7 */
8
9
10#include "GrInOrderDrawBuffer.h"
11#include "GrBufferAllocPool.h"
12#include "GrGpu.h"
13#include "GrIndexBuffer.h"
14#include "GrPath.h"
15#include "GrRenderTarget.h"
16#include "GrTexture.h"
17#include "GrVertexBuffer.h"
18
19GrInOrderDrawBuffer::GrInOrderDrawBuffer(const GrGpu* gpu,
20                                         GrVertexBufferAllocPool* vertexPool,
21                                         GrIndexBufferAllocPool* indexPool)
22    : fAutoFlushTarget(NULL)
23    , fClipSet(true)
24    , fClipProxyState(kUnknown_ClipProxyState)
25    , fVertexPool(*vertexPool)
26    , fIndexPool(*indexPool)
27    , fFlushing(false) {
28
29    fGpu.reset(SkRef(gpu));
30    fCaps = gpu->getCaps();
31
32    GrAssert(NULL != vertexPool);
33    GrAssert(NULL != indexPool);
34
35    GeometryPoolState& poolState = fGeoPoolStateStack.push_back();
36    poolState.fUsedPoolVertexBytes = 0;
37    poolState.fUsedPoolIndexBytes = 0;
38#if GR_DEBUG
39    poolState.fPoolVertexBuffer = (GrVertexBuffer*)~0;
40    poolState.fPoolStartVertex = ~0;
41    poolState.fPoolIndexBuffer = (GrIndexBuffer*)~0;
42    poolState.fPoolStartIndex = ~0;
43#endif
44    this->reset();
45}
46
47GrInOrderDrawBuffer::~GrInOrderDrawBuffer() {
48    this->reset();
49    // This must be called by before the GrDrawTarget destructor
50    this->releaseGeometry();
51    GrSafeUnref(fAutoFlushTarget);
52}
53
54////////////////////////////////////////////////////////////////////////////////
55
56namespace {
57void get_vertex_bounds(const void* vertices,
58                       size_t vertexSize,
59                       int vertexCount,
60                       SkRect* bounds) {
61    GrAssert(vertexSize >= sizeof(GrPoint));
62    GrAssert(vertexCount > 0);
63    const GrPoint* point = static_cast<const GrPoint*>(vertices);
64    bounds->fLeft = bounds->fRight = point->fX;
65    bounds->fTop = bounds->fBottom = point->fY;
66    for (int i = 1; i < vertexCount; ++i) {
67        point = reinterpret_cast<GrPoint*>(reinterpret_cast<intptr_t>(point) + vertexSize);
68        bounds->growToInclude(point->fX, point->fY);
69    }
70}
71}
72
73void GrInOrderDrawBuffer::drawRect(const GrRect& rect,
74                                   const SkMatrix* matrix,
75                                   const GrRect* srcRects[],
76                                   const SkMatrix* srcMatrices[]) {
77
78    GrVertexLayout layout = 0;
79    GrDrawState::AutoColorRestore acr;
80    GrColor color = this->drawState()->getColor();
81
82    // Using per-vertex colors allows batching across colors. (A lot of rects in a row differing
83    // only in color is a common occurrence in tables). However, having per-vertex colors disables
84    // blending optimizations because we don't know if the color will be solid or not. These
85    // optimizations help determine whether coverage and color can be blended correctly when
86    // dual-source blending isn't available. This comes into play when there is coverage. If colors
87    // were a stage it could take a hint that every vertex's color will be opaque.
88    if (this->getCaps().dualSourceBlendingSupport() ||
89        this->getDrawState().hasSolidCoverage(this->getGeomSrc().fVertexLayout)) {
90        layout |= GrDrawState::kColor_VertexLayoutBit;;
91        // We set the draw state's color to white here. This is done so that any batching performed
92        // in our subclass's onDraw() won't get a false from GrDrawState::op== due to a color
93        // mismatch. TODO: Once vertex layout is owned by GrDrawState it should skip comparing the
94        // constant color in its op== when the kColor layout bit is set and then we can remove this.
95        acr.set(this->drawState(), 0xFFFFFFFF);
96    }
97
98    uint32_t explicitCoordMask = 0;
99    if (NULL != srcRects) {
100        for (int s = 0; s < GrDrawState::kNumStages; ++s) {
101            int numTC = 0;
102            if (NULL != srcRects[s]) {
103                layout |= GrDrawState::StageTexCoordVertexLayoutBit(s, numTC);
104                ++numTC;
105                explicitCoordMask |= (1 << s);
106            }
107        }
108    }
109
110    AutoReleaseGeometry geo(this, layout, 4, 0);
111    if (!geo.succeeded()) {
112        GrPrintf("Failed to get space for vertices!\n");
113        return;
114    }
115
116    // Go to device coords to allow batching across matrix changes
117    SkMatrix combinedMatrix;
118    if (NULL != matrix) {
119        combinedMatrix = *matrix;
120    } else {
121        combinedMatrix.reset();
122    }
123    combinedMatrix.postConcat(this->drawState()->getViewMatrix());
124    // When the caller has provided an explicit source rects for a stage then we don't want to
125    // modify that stage's matrix. Otherwise if the effect is generating its source rect from
126    // the vertex positions then we have to account for the view matrix change.
127    GrDrawState::AutoDeviceCoordDraw adcd(this->drawState(), explicitCoordMask);
128    if (!adcd.succeeded()) {
129        return;
130    }
131
132    int stageOffsets[GrDrawState::kNumStages], colorOffset;
133    int vsize = GrDrawState::VertexSizeAndOffsetsByStage(layout, stageOffsets,
134                                                         &colorOffset, NULL, NULL);
135
136    geo.positions()->setRectFan(rect.fLeft, rect.fTop, rect.fRight, rect.fBottom, vsize);
137    combinedMatrix.mapPointsWithStride(geo.positions(), vsize, 4);
138
139    SkRect devBounds;
140    // since we already computed the dev verts, set the bounds hint. This will help us avoid
141    // unnecessary clipping in our onDraw().
142    get_vertex_bounds(geo.vertices(), vsize, 4, &devBounds);
143
144    for (int i = 0; i < GrDrawState::kNumStages; ++i) {
145        if (explicitCoordMask & (1 << i)) {
146            GrAssert(0 != stageOffsets[i]);
147            GrPoint* coords = GrTCast<GrPoint*>(GrTCast<intptr_t>(geo.vertices()) +
148                                                stageOffsets[i]);
149            coords->setRectFan(srcRects[i]->fLeft, srcRects[i]->fTop,
150                               srcRects[i]->fRight, srcRects[i]->fBottom,
151                               vsize);
152            if (NULL != srcMatrices && NULL != srcMatrices[i]) {
153                srcMatrices[i]->mapPointsWithStride(coords, vsize, 4);
154            }
155        } else {
156            GrAssert(0 == stageOffsets[i]);
157        }
158    }
159
160    if (colorOffset >= 0) {
161        GrColor* vertColor = GrTCast<GrColor*>(GrTCast<intptr_t>(geo.vertices()) + colorOffset);
162        for (int i = 0; i < 4; ++i) {
163            *vertColor = color;
164            vertColor = (GrColor*) ((intptr_t) vertColor + vsize);
165        }
166    }
167
168    this->setIndexSourceToBuffer(fGpu->getQuadIndexBuffer());
169    this->drawIndexedInstances(kTriangles_GrPrimitiveType, 1, 4, 6, &devBounds);
170}
171
172bool GrInOrderDrawBuffer::quickInsideClip(const SkRect& devBounds) {
173    if (!this->getDrawState().isClipState()) {
174        return true;
175    }
176    if (kUnknown_ClipProxyState == fClipProxyState) {
177        SkIRect rect;
178        bool iior;
179        this->getClip()->getConservativeBounds(this->getDrawState().getRenderTarget(), &rect, &iior);
180        if (iior) {
181            // The clip is a rect. We will remember that in fProxyClip. It is common for an edge (or
182            // all edges) of the clip to be at the edge of the RT. However, we get that clipping for
183            // free via the viewport. We don't want to think that clipping must be enabled in this
184            // case. So we extend the clip outward from the edge to avoid these false negatives.
185            fClipProxyState = kValid_ClipProxyState;
186            fClipProxy = SkRect::MakeFromIRect(rect);
187
188            if (fClipProxy.fLeft <= 0) {
189                fClipProxy.fLeft = SK_ScalarMin;
190            }
191            if (fClipProxy.fTop <= 0) {
192                fClipProxy.fTop = SK_ScalarMin;
193            }
194            if (fClipProxy.fRight >= this->getDrawState().getRenderTarget()->width()) {
195                fClipProxy.fRight = SK_ScalarMax;
196            }
197            if (fClipProxy.fBottom >= this->getDrawState().getRenderTarget()->height()) {
198                fClipProxy.fBottom = SK_ScalarMax;
199            }
200        } else {
201            fClipProxyState = kInvalid_ClipProxyState;
202        }
203    }
204    if (kValid_ClipProxyState == fClipProxyState) {
205        return fClipProxy.contains(devBounds);
206    }
207    SkPoint originOffset = {SkIntToScalar(this->getClip()->fOrigin.fX),
208                            SkIntToScalar(this->getClip()->fOrigin.fY)};
209    SkRect clipSpaceBounds = devBounds;
210    clipSpaceBounds.offset(originOffset);
211    return this->getClip()->fClipStack->quickContains(clipSpaceBounds);
212}
213
214int GrInOrderDrawBuffer::concatInstancedDraw(const DrawInfo& info) {
215    GrAssert(info.isInstanced());
216
217    const GeometrySrcState& geomSrc = this->getGeomSrc();
218
219    // we only attempt to concat the case when reserved verts are used with a client-specified index
220    // buffer. To make this work with client-specified VBs we'd need to know if the VB was updated
221    // between draws.
222    if (kReserved_GeometrySrcType != geomSrc.fVertexSrc ||
223        kBuffer_GeometrySrcType != geomSrc.fIndexSrc) {
224        return 0;
225    }
226    // Check if there is a draw info that is compatible that uses the same VB from the pool and
227    // the same IB
228    if (kDraw_Cmd != fCmds.back()) {
229        return 0;
230    }
231
232    DrawRecord* draw = &fDraws.back();
233    GeometryPoolState& poolState = fGeoPoolStateStack.back();
234    const GrVertexBuffer* vertexBuffer = poolState.fPoolVertexBuffer;
235
236    if (!draw->isInstanced() ||
237        draw->verticesPerInstance() != info.verticesPerInstance() ||
238        draw->indicesPerInstance() != info.indicesPerInstance() ||
239        draw->fVertexBuffer != vertexBuffer ||
240        draw->fIndexBuffer != geomSrc.fIndexBuffer ||
241        draw->fVertexLayout != geomSrc.fVertexLayout) {
242        return 0;
243    }
244    // info does not yet account for the offset from the start of the pool's VB while the previous
245    // draw record does.
246    int adjustedStartVertex = poolState.fPoolStartVertex + info.startVertex();
247    if (draw->startVertex() + draw->vertexCount() != adjustedStartVertex) {
248        return 0;
249    }
250
251    GrAssert(poolState.fPoolStartVertex == draw->startVertex() + draw->vertexCount());
252
253    // how many instances can be concat'ed onto draw given the size of the index buffer
254    int instancesToConcat = this->indexCountInCurrentSource() / info.indicesPerInstance();
255    instancesToConcat -= draw->instanceCount();
256    instancesToConcat = GrMin(instancesToConcat, info.instanceCount());
257
258    // update the amount of reserved vertex data actually referenced in draws
259    size_t vertexBytes = instancesToConcat * info.verticesPerInstance() *
260                         GrDrawState::VertexSize(draw->fVertexLayout);
261    poolState.fUsedPoolVertexBytes = GrMax(poolState.fUsedPoolVertexBytes, vertexBytes);
262
263    draw->adjustInstanceCount(instancesToConcat);
264    return instancesToConcat;
265}
266
267class AutoClipReenable {
268public:
269    AutoClipReenable() : fDrawState(NULL) {}
270    ~AutoClipReenable() {
271        if (NULL != fDrawState) {
272            fDrawState->enableState(GrDrawState::kClip_StateBit);
273        }
274    }
275    void set(GrDrawState* drawState) {
276        if (drawState->isClipState()) {
277            fDrawState = drawState;
278            drawState->disableState(GrDrawState::kClip_StateBit);
279        }
280    }
281private:
282    GrDrawState*    fDrawState;
283};
284
285void GrInOrderDrawBuffer::onDraw(const DrawInfo& info) {
286
287    GeometryPoolState& poolState = fGeoPoolStateStack.back();
288    AutoClipReenable acr;
289
290    if (this->getDrawState().isClipState() &&
291        NULL != info.getDevBounds() &&
292        this->quickInsideClip(*info.getDevBounds())) {
293        acr.set(this->drawState());
294    }
295
296    if (this->needsNewClip()) {
297       this->recordClip();
298    }
299    if (this->needsNewState()) {
300        this->recordState();
301    }
302
303    DrawRecord* draw;
304    if (info.isInstanced()) {
305        int instancesConcated = this->concatInstancedDraw(info);
306        if (info.instanceCount() > instancesConcated) {
307            draw = this->recordDraw(info);
308            draw->adjustInstanceCount(-instancesConcated);
309        } else {
310            return;
311        }
312    } else {
313        draw = this->recordDraw(info);
314    }
315    draw->fVertexLayout = this->getVertexLayout();
316
317    switch (this->getGeomSrc().fVertexSrc) {
318        case kBuffer_GeometrySrcType:
319            draw->fVertexBuffer = this->getGeomSrc().fVertexBuffer;
320            break;
321        case kReserved_GeometrySrcType: // fallthrough
322        case kArray_GeometrySrcType: {
323            size_t vertexBytes = (info.vertexCount() + info.startVertex()) *
324                                 GrDrawState::VertexSize(draw->fVertexLayout);
325            poolState.fUsedPoolVertexBytes = GrMax(poolState.fUsedPoolVertexBytes, vertexBytes);
326            draw->fVertexBuffer = poolState.fPoolVertexBuffer;
327            draw->adjustStartVertex(poolState.fPoolStartVertex);
328            break;
329        }
330        default:
331            GrCrash("unknown geom src type");
332    }
333    draw->fVertexBuffer->ref();
334
335    if (info.isIndexed()) {
336        switch (this->getGeomSrc().fIndexSrc) {
337            case kBuffer_GeometrySrcType:
338                draw->fIndexBuffer = this->getGeomSrc().fIndexBuffer;
339                break;
340            case kReserved_GeometrySrcType: // fallthrough
341            case kArray_GeometrySrcType: {
342                size_t indexBytes = (info.indexCount() + info.startIndex()) * sizeof(uint16_t);
343                poolState.fUsedPoolIndexBytes = GrMax(poolState.fUsedPoolIndexBytes, indexBytes);
344                draw->fIndexBuffer = poolState.fPoolIndexBuffer;
345                draw->adjustStartIndex(poolState.fPoolStartIndex);
346                break;
347            }
348            default:
349                GrCrash("unknown geom src type");
350        }
351        draw->fIndexBuffer->ref();
352    } else {
353        draw->fIndexBuffer = NULL;
354    }
355}
356
357GrInOrderDrawBuffer::StencilPath::StencilPath() : fStroke(SkStrokeRec::kFill_InitStyle) {}
358
359void GrInOrderDrawBuffer::onStencilPath(const GrPath* path, const SkStrokeRec& stroke,
360                                        SkPath::FillType fill) {
361    if (this->needsNewClip()) {
362        this->recordClip();
363    }
364    // Only compare the subset of GrDrawState relevant to path stenciling?
365    if (this->needsNewState()) {
366        this->recordState();
367    }
368    StencilPath* sp = this->recordStencilPath();
369    sp->fPath.reset(path);
370    path->ref();
371    sp->fFill = fill;
372    sp->fStroke = stroke;
373}
374
375void GrInOrderDrawBuffer::clear(const GrIRect* rect, GrColor color, GrRenderTarget* renderTarget) {
376    GrIRect r;
377    if (NULL == renderTarget) {
378        renderTarget = this->drawState()->getRenderTarget();
379        GrAssert(NULL != renderTarget);
380    }
381    if (NULL == rect) {
382        // We could do something smart and remove previous draws and clears to
383        // the current render target. If we get that smart we have to make sure
384        // those draws aren't read before this clear (render-to-texture).
385        r.setLTRB(0, 0, renderTarget->width(), renderTarget->height());
386        rect = &r;
387    }
388    Clear* clr = this->recordClear();
389    clr->fColor = color;
390    clr->fRect = *rect;
391    clr->fRenderTarget = renderTarget;
392    renderTarget->ref();
393}
394
395void GrInOrderDrawBuffer::reset() {
396    GrAssert(1 == fGeoPoolStateStack.count());
397    this->resetVertexSource();
398    this->resetIndexSource();
399    int numDraws = fDraws.count();
400    for (int d = 0; d < numDraws; ++d) {
401        // we always have a VB, but not always an IB
402        GrAssert(NULL != fDraws[d].fVertexBuffer);
403        fDraws[d].fVertexBuffer->unref();
404        GrSafeUnref(fDraws[d].fIndexBuffer);
405    }
406    fCmds.reset();
407    fDraws.reset();
408    fStencilPaths.reset();
409    fStates.reset();
410    fClears.reset();
411    fVertexPool.reset();
412    fIndexPool.reset();
413    fClips.reset();
414    fClipOrigins.reset();
415    fClipSet = true;
416}
417
418bool GrInOrderDrawBuffer::flushTo(GrDrawTarget* target) {
419    GrAssert(kReserved_GeometrySrcType != this->getGeomSrc().fVertexSrc);
420    GrAssert(kReserved_GeometrySrcType != this->getGeomSrc().fIndexSrc);
421
422    GrAssert(NULL != target);
423    GrAssert(target != this); // not considered and why?
424
425    int numCmds = fCmds.count();
426    if (0 == numCmds) {
427        return false;
428    }
429
430    fVertexPool.unlock();
431    fIndexPool.unlock();
432
433    GrDrawTarget::AutoClipRestore acr(target);
434    AutoGeometryPush agp(target);
435
436    GrDrawState playbackState;
437    GrDrawState* prevDrawState = target->drawState();
438    prevDrawState->ref();
439    target->setDrawState(&playbackState);
440
441    GrClipData clipData;
442
443    int currState       = 0;
444    int currClip        = 0;
445    int currClear       = 0;
446    int currDraw        = 0;
447    int currStencilPath = 0;
448
449
450    for (int c = 0; c < numCmds; ++c) {
451        switch (fCmds[c]) {
452            case kDraw_Cmd: {
453                const DrawRecord& draw = fDraws[currDraw];
454                target->setVertexSourceToBuffer(draw.fVertexLayout, draw.fVertexBuffer);
455                if (draw.isIndexed()) {
456                    target->setIndexSourceToBuffer(draw.fIndexBuffer);
457                }
458                target->executeDraw(draw);
459
460                ++currDraw;
461                break;
462            }
463            case kStencilPath_Cmd: {
464                const StencilPath& sp = fStencilPaths[currStencilPath];
465                target->stencilPath(sp.fPath.get(), sp.fStroke, sp.fFill);
466                ++currStencilPath;
467                break;
468            }
469            case kSetState_Cmd:
470                fStates[currState].restoreTo(&playbackState);
471                ++currState;
472                break;
473            case kSetClip_Cmd:
474                clipData.fClipStack = &fClips[currClip];
475                clipData.fOrigin = fClipOrigins[currClip];
476                target->setClip(&clipData);
477                ++currClip;
478                break;
479            case kClear_Cmd:
480                target->clear(&fClears[currClear].fRect,
481                              fClears[currClear].fColor,
482                              fClears[currClear].fRenderTarget);
483                ++currClear;
484                break;
485        }
486    }
487    // we should have consumed all the states, clips, etc.
488    GrAssert(fStates.count() == currState);
489    GrAssert(fClips.count() == currClip);
490    GrAssert(fClipOrigins.count() == currClip);
491    GrAssert(fClears.count() == currClear);
492    GrAssert(fDraws.count()  == currDraw);
493
494    target->setDrawState(prevDrawState);
495    prevDrawState->unref();
496    this->reset();
497    return true;
498}
499
500void GrInOrderDrawBuffer::setAutoFlushTarget(GrDrawTarget* target) {
501    GrSafeAssign(fAutoFlushTarget, target);
502}
503
504void GrInOrderDrawBuffer::willReserveVertexAndIndexSpace(
505                                size_t vertexSize,
506                                int vertexCount,
507                                int indexCount) {
508    if (NULL != fAutoFlushTarget) {
509        // We use geometryHints() to know whether to flush the draw buffer. We
510        // can't flush if we are inside an unbalanced pushGeometrySource.
511        // Moreover, flushing blows away vertex and index data that was
512        // previously reserved. So if the vertex or index data is pulled from
513        // reserved space and won't be released by this request then we can't
514        // flush.
515        bool insideGeoPush = fGeoPoolStateStack.count() > 1;
516
517        bool unreleasedVertexSpace =
518            !vertexCount &&
519            kReserved_GeometrySrcType == this->getGeomSrc().fVertexSrc;
520
521        bool unreleasedIndexSpace =
522            !indexCount &&
523            kReserved_GeometrySrcType == this->getGeomSrc().fIndexSrc;
524
525        // we don't want to finalize any reserved geom on the target since
526        // we don't know that the client has finished writing to it.
527        bool targetHasReservedGeom =
528            fAutoFlushTarget->hasReservedVerticesOrIndices();
529
530        int vcount = vertexCount;
531        int icount = indexCount;
532
533        if (!insideGeoPush &&
534            !unreleasedVertexSpace &&
535            !unreleasedIndexSpace &&
536            !targetHasReservedGeom &&
537            this->geometryHints(vertexSize, &vcount, &icount)) {
538
539            this->flushTo(fAutoFlushTarget);
540        }
541    }
542}
543
544bool GrInOrderDrawBuffer::geometryHints(size_t vertexSize,
545                                        int* vertexCount,
546                                        int* indexCount) const {
547    // we will recommend a flush if the data could fit in a single
548    // preallocated buffer but none are left and it can't fit
549    // in the current buffer (which may not be prealloced).
550    bool flush = false;
551    if (NULL != indexCount) {
552        int32_t currIndices = fIndexPool.currentBufferIndices();
553        if (*indexCount > currIndices &&
554            (!fIndexPool.preallocatedBuffersRemaining() &&
555             *indexCount <= fIndexPool.preallocatedBufferIndices())) {
556
557            flush = true;
558        }
559        *indexCount = currIndices;
560    }
561    if (NULL != vertexCount) {
562        int32_t currVertices = fVertexPool.currentBufferVertices(vertexSize);
563        if (*vertexCount > currVertices &&
564            (!fVertexPool.preallocatedBuffersRemaining() &&
565             *vertexCount <= fVertexPool.preallocatedBufferVertices(vertexSize))) {
566
567            flush = true;
568        }
569        *vertexCount = currVertices;
570    }
571    return flush;
572}
573
574bool GrInOrderDrawBuffer::onReserveVertexSpace(size_t vertexSize,
575                                               int vertexCount,
576                                               void** vertices) {
577    GeometryPoolState& poolState = fGeoPoolStateStack.back();
578    GrAssert(vertexCount > 0);
579    GrAssert(NULL != vertices);
580    GrAssert(0 == poolState.fUsedPoolVertexBytes);
581
582    *vertices = fVertexPool.makeSpace(vertexSize,
583                                      vertexCount,
584                                      &poolState.fPoolVertexBuffer,
585                                      &poolState.fPoolStartVertex);
586    return NULL != *vertices;
587}
588
589bool GrInOrderDrawBuffer::onReserveIndexSpace(int indexCount, void** indices) {
590    GeometryPoolState& poolState = fGeoPoolStateStack.back();
591    GrAssert(indexCount > 0);
592    GrAssert(NULL != indices);
593    GrAssert(0 == poolState.fUsedPoolIndexBytes);
594
595    *indices = fIndexPool.makeSpace(indexCount,
596                                    &poolState.fPoolIndexBuffer,
597                                    &poolState.fPoolStartIndex);
598    return NULL != *indices;
599}
600
601void GrInOrderDrawBuffer::releaseReservedVertexSpace() {
602    GeometryPoolState& poolState = fGeoPoolStateStack.back();
603    const GeometrySrcState& geoSrc = this->getGeomSrc();
604
605    // If we get a release vertex space call then our current source should either be reserved
606    // or array (which we copied into reserved space).
607    GrAssert(kReserved_GeometrySrcType == geoSrc.fVertexSrc ||
608             kArray_GeometrySrcType == geoSrc.fVertexSrc);
609
610    // When the caller reserved vertex buffer space we gave it back a pointer
611    // provided by the vertex buffer pool. At each draw we tracked the largest
612    // offset into the pool's pointer that was referenced. Now we return to the
613    // pool any portion at the tail of the allocation that no draw referenced.
614    size_t reservedVertexBytes = GrDrawState::VertexSize(geoSrc.fVertexLayout) *
615                                 geoSrc.fVertexCount;
616    fVertexPool.putBack(reservedVertexBytes -
617                        poolState.fUsedPoolVertexBytes);
618    poolState.fUsedPoolVertexBytes = 0;
619    poolState.fPoolVertexBuffer = NULL;
620    poolState.fPoolStartVertex = 0;
621}
622
623void GrInOrderDrawBuffer::releaseReservedIndexSpace() {
624    GeometryPoolState& poolState = fGeoPoolStateStack.back();
625    const GeometrySrcState& geoSrc = this->getGeomSrc();
626
627    // If we get a release index space call then our current source should either be reserved
628    // or array (which we copied into reserved space).
629    GrAssert(kReserved_GeometrySrcType == geoSrc.fIndexSrc ||
630             kArray_GeometrySrcType == geoSrc.fIndexSrc);
631
632    // Similar to releaseReservedVertexSpace we return any unused portion at
633    // the tail
634    size_t reservedIndexBytes = sizeof(uint16_t) * geoSrc.fIndexCount;
635    fIndexPool.putBack(reservedIndexBytes - poolState.fUsedPoolIndexBytes);
636    poolState.fUsedPoolIndexBytes = 0;
637    poolState.fPoolIndexBuffer = NULL;
638    poolState.fPoolStartIndex = 0;
639}
640
641void GrInOrderDrawBuffer::onSetVertexSourceToArray(const void* vertexArray,
642                                                   int vertexCount) {
643
644    GeometryPoolState& poolState = fGeoPoolStateStack.back();
645    GrAssert(0 == poolState.fUsedPoolVertexBytes);
646#if GR_DEBUG
647    bool success =
648#endif
649    fVertexPool.appendVertices(GrDrawState::VertexSize(this->getVertexLayout()),
650                               vertexCount,
651                               vertexArray,
652                               &poolState.fPoolVertexBuffer,
653                               &poolState.fPoolStartVertex);
654    GR_DEBUGASSERT(success);
655}
656
657void GrInOrderDrawBuffer::onSetIndexSourceToArray(const void* indexArray,
658                                                  int indexCount) {
659    GeometryPoolState& poolState = fGeoPoolStateStack.back();
660    GrAssert(0 == poolState.fUsedPoolIndexBytes);
661#if GR_DEBUG
662    bool success =
663#endif
664    fIndexPool.appendIndices(indexCount,
665                             indexArray,
666                             &poolState.fPoolIndexBuffer,
667                             &poolState.fPoolStartIndex);
668    GR_DEBUGASSERT(success);
669}
670
671void GrInOrderDrawBuffer::releaseVertexArray() {
672    // When the client provides an array as the vertex source we handled it
673    // by copying their array into reserved space.
674    this->GrInOrderDrawBuffer::releaseReservedVertexSpace();
675}
676
677void GrInOrderDrawBuffer::releaseIndexArray() {
678    // When the client provides an array as the index source we handled it
679    // by copying their array into reserved space.
680    this->GrInOrderDrawBuffer::releaseReservedIndexSpace();
681}
682
683void GrInOrderDrawBuffer::geometrySourceWillPush() {
684    GeometryPoolState& poolState = fGeoPoolStateStack.push_back();
685    poolState.fUsedPoolVertexBytes = 0;
686    poolState.fUsedPoolIndexBytes = 0;
687#if GR_DEBUG
688    poolState.fPoolVertexBuffer = (GrVertexBuffer*)~0;
689    poolState.fPoolStartVertex = ~0;
690    poolState.fPoolIndexBuffer = (GrIndexBuffer*)~0;
691    poolState.fPoolStartIndex = ~0;
692#endif
693}
694
695void GrInOrderDrawBuffer::geometrySourceWillPop(
696                                        const GeometrySrcState& restoredState) {
697    GrAssert(fGeoPoolStateStack.count() > 1);
698    fGeoPoolStateStack.pop_back();
699    GeometryPoolState& poolState = fGeoPoolStateStack.back();
700    // we have to assume that any slack we had in our vertex/index data
701    // is now unreleasable because data may have been appended later in the
702    // pool.
703    if (kReserved_GeometrySrcType == restoredState.fVertexSrc ||
704        kArray_GeometrySrcType == restoredState.fVertexSrc) {
705        poolState.fUsedPoolVertexBytes =
706            GrDrawState::VertexSize(restoredState.fVertexLayout) *
707            restoredState.fVertexCount;
708    }
709    if (kReserved_GeometrySrcType == restoredState.fIndexSrc ||
710        kArray_GeometrySrcType == restoredState.fIndexSrc) {
711        poolState.fUsedPoolIndexBytes = sizeof(uint16_t) *
712                                         restoredState.fIndexCount;
713    }
714}
715
716bool GrInOrderDrawBuffer::needsNewState() const {
717    return fStates.empty() || !fStates.back().isEqual(this->getDrawState());
718}
719
720bool GrInOrderDrawBuffer::needsNewClip() const {
721    GrAssert(fClips.count() == fClipOrigins.count());
722    if (this->getDrawState().isClipState()) {
723       if (fClipSet &&
724           (fClips.empty() ||
725            fClips.back() != *this->getClip()->fClipStack ||
726            fClipOrigins.back() != this->getClip()->fOrigin)) {
727           return true;
728       }
729    }
730    return false;
731}
732
733void GrInOrderDrawBuffer::recordClip() {
734    fClips.push_back() = *this->getClip()->fClipStack;
735    fClipOrigins.push_back() = this->getClip()->fOrigin;
736    fClipSet = false;
737    fCmds.push_back(kSetClip_Cmd);
738}
739
740void GrInOrderDrawBuffer::recordState() {
741    fStates.push_back().saveFrom(this->getDrawState());
742    fCmds.push_back(kSetState_Cmd);
743}
744
745GrInOrderDrawBuffer::DrawRecord* GrInOrderDrawBuffer::recordDraw(const DrawInfo& info) {
746    fCmds.push_back(kDraw_Cmd);
747    return &fDraws.push_back(info);
748}
749
750GrInOrderDrawBuffer::StencilPath* GrInOrderDrawBuffer::recordStencilPath() {
751    fCmds.push_back(kStencilPath_Cmd);
752    return &fStencilPaths.push_back();
753}
754
755GrInOrderDrawBuffer::Clear* GrInOrderDrawBuffer::recordClear() {
756    fCmds.push_back(kClear_Cmd);
757    return &fClears.push_back();
758}
759
760void GrInOrderDrawBuffer::clipWillBeSet(const GrClipData* newClipData) {
761    INHERITED::clipWillBeSet(newClipData);
762    fClipSet = true;
763    fClipProxyState = kUnknown_ClipProxyState;
764}
765