GrDrawOpAtlas.cpp revision 342bfc25de5b0452b1551bf9db4bf45eac7718b2
1/*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "GrBatchAtlas.h"
9#include "GrBatchFlushState.h"
10#include "GrRectanizer.h"
11#include "GrTracing.h"
12
13////////////////////////////////////////////////////////////////////////////////
14
15GrBatchAtlas::BatchPlot::BatchPlot(int index, uint64_t genID, int offX, int offY, int width,
16                                   int height, GrPixelConfig config)
17    : fLastUpload(GrBatchDrawToken::AlreadyFlushedToken())
18    , fLastUse(GrBatchDrawToken::AlreadyFlushedToken())
19    , fIndex(index)
20    , fGenID(genID)
21    , fID(CreateId(fIndex, fGenID))
22    , fData(nullptr)
23    , fWidth(width)
24    , fHeight(height)
25    , fX(offX)
26    , fY(offY)
27    , fRects(nullptr)
28    , fOffset(SkIPoint16::Make(fX * fWidth, fY * fHeight))
29    , fConfig(config)
30    , fBytesPerPixel(GrBytesPerPixel(config))
31#ifdef SK_DEBUG
32    , fDirty(false)
33#endif
34{
35    fDirtyRect.setEmpty();
36}
37
38GrBatchAtlas::BatchPlot::~BatchPlot() {
39    sk_free(fData);
40    delete fRects;
41}
42
43bool GrBatchAtlas::BatchPlot::addSubImage(int width, int height, const void* image,
44                                          SkIPoint16* loc) {
45    SkASSERT(width <= fWidth && height <= fHeight);
46
47    if (!fRects) {
48        fRects = GrRectanizer::Factory(fWidth, fHeight);
49    }
50
51    if (!fRects->addRect(width, height, loc)) {
52        return false;
53    }
54
55    if (!fData) {
56        fData = reinterpret_cast<unsigned char*>(sk_calloc_throw(fBytesPerPixel * fWidth *
57                                                                 fHeight));
58    }
59    size_t rowBytes = width * fBytesPerPixel;
60    const unsigned char* imagePtr = (const unsigned char*)image;
61    // point ourselves at the right starting spot
62    unsigned char* dataPtr = fData;
63    dataPtr += fBytesPerPixel * fWidth * loc->fY;
64    dataPtr += fBytesPerPixel * loc->fX;
65    // copy into the data buffer
66    for (int i = 0; i < height; ++i) {
67        memcpy(dataPtr, imagePtr, rowBytes);
68        dataPtr += fBytesPerPixel * fWidth;
69        imagePtr += rowBytes;
70    }
71
72    fDirtyRect.join(loc->fX, loc->fY, loc->fX + width, loc->fY + height);
73
74    loc->fX += fOffset.fX;
75    loc->fY += fOffset.fY;
76    SkDEBUGCODE(fDirty = true;)
77
78    return true;
79}
80
81void GrBatchAtlas::BatchPlot::uploadToTexture(GrDrawBatch::WritePixelsFn& writePixels,
82                                              GrTexture* texture) {
83    // We should only be issuing uploads if we are in fact dirty
84    SkASSERT(fDirty && fData && texture);
85    TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("skia.gpu"), "GrBatchPlot::uploadToTexture");
86    size_t rowBytes = fBytesPerPixel * fWidth;
87    const unsigned char* dataPtr = fData;
88    dataPtr += rowBytes * fDirtyRect.fTop;
89    dataPtr += fBytesPerPixel * fDirtyRect.fLeft;
90    writePixels(texture, fOffset.fX + fDirtyRect.fLeft, fOffset.fY + fDirtyRect.fTop,
91                fDirtyRect.width(), fDirtyRect.height(), fConfig, dataPtr, rowBytes);
92    fDirtyRect.setEmpty();
93    SkDEBUGCODE(fDirty = false;)
94}
95
96void GrBatchAtlas::BatchPlot::resetRects() {
97    if (fRects) {
98        fRects->reset();
99    }
100
101    fGenID++;
102    fID = CreateId(fIndex, fGenID);
103
104    // zero out the plot
105    if (fData) {
106        sk_bzero(fData, fBytesPerPixel * fWidth * fHeight);
107    }
108
109    fDirtyRect.setEmpty();
110    SkDEBUGCODE(fDirty = false;)
111}
112
113///////////////////////////////////////////////////////////////////////////////
114
115GrBatchAtlas::GrBatchAtlas(GrTexture* texture, int numPlotsX, int numPlotsY)
116    : fTexture(texture)
117    , fAtlasGeneration(kInvalidAtlasGeneration + 1) {
118
119    int plotWidth = texture->width() / numPlotsX;
120    int plotHeight = texture->height() / numPlotsY;
121    SkASSERT(numPlotsX * numPlotsY <= BulkUseTokenUpdater::kMaxPlots);
122    SkASSERT(plotWidth * numPlotsX == texture->width());
123    SkASSERT(plotHeight * numPlotsY == texture->height());
124
125    SkDEBUGCODE(fNumPlots = numPlotsX * numPlotsY;)
126
127    // We currently do not support compressed atlases...
128    SkASSERT(!GrPixelConfigIsCompressed(texture->desc().fConfig));
129
130    // set up allocated plots
131    fPlotArray = new SkAutoTUnref<BatchPlot>[numPlotsX * numPlotsY];
132
133    SkAutoTUnref<BatchPlot>* currPlot = fPlotArray;
134    for (int y = numPlotsY - 1, r = 0; y >= 0; --y, ++r) {
135        for (int x = numPlotsX - 1, c = 0; x >= 0; --x, ++c) {
136            uint32_t index = r * numPlotsX + c;
137            currPlot->reset(new BatchPlot(index, 1, x, y, plotWidth, plotHeight,
138                                          texture->desc().fConfig));
139
140            // build LRU list
141            fPlotList.addToHead(currPlot->get());
142            ++currPlot;
143        }
144    }
145}
146
147GrBatchAtlas::~GrBatchAtlas() {
148    SkSafeUnref(fTexture);
149    delete[] fPlotArray;
150}
151
152void GrBatchAtlas::processEviction(AtlasID id) {
153    for (int i = 0; i < fEvictionCallbacks.count(); i++) {
154        (*fEvictionCallbacks[i].fFunc)(id, fEvictionCallbacks[i].fData);
155    }
156}
157
158inline void GrBatchAtlas::updatePlot(GrDrawBatch::Target* target, AtlasID* id, BatchPlot* plot) {
159    this->makeMRU(plot);
160
161    // If our most recent upload has already occurred then we have to insert a new
162    // upload. Otherwise, we already have a scheduled upload that hasn't yet ocurred.
163    // This new update will piggy back on that previously scheduled update.
164    if (target->hasDrawBeenFlushed(plot->lastUploadToken())) {
165        // With c+14 we could move sk_sp into lamba to only ref once.
166        sk_sp<BatchPlot> plotsp(SkRef(plot));
167        GrTexture* texture = fTexture;
168        GrBatchDrawToken lastUploadToken = target->addAsapUpload(
169            [plotsp, texture] (GrDrawBatch::WritePixelsFn& writePixels) {
170               plotsp->uploadToTexture(writePixels, texture);
171            }
172        );
173        plot->setLastUploadToken(lastUploadToken);
174    }
175    *id = plot->id();
176}
177
178bool GrBatchAtlas::addToAtlas(AtlasID* id, GrDrawBatch::Target* target,
179                              int width, int height, const void* image, SkIPoint16* loc) {
180    // We should already have a texture, TODO clean this up
181    SkASSERT(fTexture);
182
183    // now look through all allocated plots for one we can share, in Most Recently Refed order
184    GrBatchPlotList::Iter plotIter;
185    plotIter.init(fPlotList, GrBatchPlotList::Iter::kHead_IterStart);
186    BatchPlot* plot;
187    while ((plot = plotIter.get())) {
188        SkASSERT(GrBytesPerPixel(fTexture->desc().fConfig) == plot->bpp());
189        if (plot->addSubImage(width, height, image, loc)) {
190            this->updatePlot(target, id, plot);
191            return true;
192        }
193        plotIter.next();
194    }
195
196    // If the above fails, then see if the least recently refed plot has already been flushed to the
197    // gpu
198    plot = fPlotList.tail();
199    SkASSERT(plot);
200    if (target->hasDrawBeenFlushed(plot->lastUseToken())) {
201        this->processEviction(plot->id());
202        plot->resetRects();
203        SkASSERT(GrBytesPerPixel(fTexture->desc().fConfig) == plot->bpp());
204        SkDEBUGCODE(bool verify = )plot->addSubImage(width, height, image, loc);
205        SkASSERT(verify);
206        this->updatePlot(target, id, plot);
207        fAtlasGeneration++;
208        return true;
209    }
210
211    // If this plot has been used in a draw that is currently being prepared by a batch, then we
212    // have to fail. This gives the batch a chance to enqueue the draw, and call back into this
213    // function. When that draw is enqueued, the draw token advances, and the subsequent call will
214    // continue past this branch and prepare an inline upload that will occur after the enqueued
215    // draw which references the plot's pre-upload content.
216    if (plot->lastUseToken() == target->nextDrawToken()) {
217        return false;
218    }
219
220    SkASSERT(!plot->unique());  // The GrPlotUpdater should have a ref too
221
222    this->processEviction(plot->id());
223    fPlotList.remove(plot);
224    SkAutoTUnref<BatchPlot>& newPlot = fPlotArray[plot->index()];
225    newPlot.reset(plot->clone());
226
227    fPlotList.addToHead(newPlot.get());
228    SkASSERT(GrBytesPerPixel(fTexture->desc().fConfig) == newPlot->bpp());
229    SkDEBUGCODE(bool verify = )newPlot->addSubImage(width, height, image, loc);
230    SkASSERT(verify);
231
232    // Note that this plot will be uploaded inline with the draws whereas the
233    // one it displaced most likely was uploaded asap.
234    // With c+14 we could move sk_sp into lamba to only ref once.
235    sk_sp<BatchPlot> plotsp(SkRef(newPlot.get()));
236    GrTexture* texture = fTexture;
237    GrBatchDrawToken lastUploadToken = target->addInlineUpload(
238        [plotsp, texture] (GrDrawBatch::WritePixelsFn& writePixels) {
239            plotsp->uploadToTexture(writePixels, texture);
240        }
241    );
242    newPlot->setLastUploadToken(lastUploadToken);
243
244    *id = newPlot->id();
245
246    fAtlasGeneration++;
247    return true;
248}
249