GrBufferAllocPool.cpp revision 71bd1ef2807ac2c9d3f82f93f1b958a57a7f612f
1
2/*
3 * Copyright 2010 Google Inc.
4 *
5 * Use of this source code is governed by a BSD-style license that can be
6 * found in the LICENSE file.
7 */
8
9
10#include "GrBufferAllocPool.h"
11#include "GrTypes.h"
12#include "GrVertexBuffer.h"
13#include "GrIndexBuffer.h"
14#include "GrGpu.h"
15
16#if GR_DEBUG
17    #define VALIDATE validate
18#else
19    static void VALIDATE(bool x = false) {}
20#endif
21
22// page size
23#define GrBufferAllocPool_MIN_BLOCK_SIZE ((size_t)1 << 12)
24
25GrBufferAllocPool::GrBufferAllocPool(GrGpu* gpu,
26                                     BufferType bufferType,
27                                     bool frequentResetHint,
28                                     size_t blockSize,
29                                     int preallocBufferCnt) :
30        fBlocks(GrMax(8, 2*preallocBufferCnt)) {
31
32    GrAssert(NULL != gpu);
33    fGpu = gpu;
34    fGpu->ref();
35    fGpuIsReffed = true;
36
37    fBufferType = bufferType;
38    fFrequentResetHint = frequentResetHint;
39    fBufferPtr = NULL;
40    fMinBlockSize = GrMax(GrBufferAllocPool_MIN_BLOCK_SIZE, blockSize);
41
42    fBytesInUse = 0;
43
44    fPreallocBuffersInUse = 0;
45    fFirstPreallocBuffer = 0;
46    for (int i = 0; i < preallocBufferCnt; ++i) {
47        GrGeometryBuffer* buffer = this->createBuffer(fMinBlockSize);
48        if (NULL != buffer) {
49            *fPreallocBuffers.append() = buffer;
50            buffer->ref();
51        }
52    }
53}
54
55GrBufferAllocPool::~GrBufferAllocPool() {
56    VALIDATE();
57    if (fBlocks.count()) {
58        GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
59        if (buffer->isLocked()) {
60            buffer->unlock();
61        }
62    }
63    while (!fBlocks.empty()) {
64        destroyBlock();
65    }
66    fPreallocBuffers.unrefAll();
67    releaseGpuRef();
68}
69
70void GrBufferAllocPool::releaseGpuRef() {
71    if (fGpuIsReffed) {
72        fGpu->unref();
73        fGpuIsReffed = false;
74    }
75}
76
77void GrBufferAllocPool::reset() {
78    VALIDATE();
79    fBytesInUse = 0;
80    if (fBlocks.count()) {
81        GrGeometryBuffer* buffer = fBlocks.back().fBuffer;
82        if (buffer->isLocked()) {
83            buffer->unlock();
84        }
85    }
86    while (!fBlocks.empty()) {
87        destroyBlock();
88    }
89    if (fPreallocBuffers.count()) {
90        // must set this after above loop.
91        fFirstPreallocBuffer = (fFirstPreallocBuffer + fPreallocBuffersInUse) %
92                               fPreallocBuffers.count();
93    }
94    fCpuData.reset(fGpu->getCaps().fBufferLockSupport ? 0 : fMinBlockSize);
95    GrAssert(0 == fPreallocBuffersInUse);
96    VALIDATE();
97}
98
99void GrBufferAllocPool::unlock() {
100    VALIDATE();
101
102    if (NULL != fBufferPtr) {
103        BufferBlock& block = fBlocks.back();
104        if (block.fBuffer->isLocked()) {
105            block.fBuffer->unlock();
106        } else {
107            size_t flushSize = block.fBuffer->sizeInBytes() - block.fBytesFree;
108            flushCpuData(fBlocks.back().fBuffer, flushSize);
109        }
110        fBufferPtr = NULL;
111    }
112    VALIDATE();
113}
114
115#if GR_DEBUG
116void GrBufferAllocPool::validate(bool unusedBlockAllowed) const {
117    if (NULL != fBufferPtr) {
118        GrAssert(!fBlocks.empty());
119        if (fBlocks.back().fBuffer->isLocked()) {
120            GrGeometryBuffer* buf = fBlocks.back().fBuffer;
121            GrAssert(buf->lockPtr() == fBufferPtr);
122        } else {
123            GrAssert(fCpuData.get() == fBufferPtr);
124        }
125    } else {
126        GrAssert(fBlocks.empty() || !fBlocks.back().fBuffer->isLocked());
127    }
128    size_t bytesInUse = 0;
129    for (int i = 0; i < fBlocks.count() - 1; ++i) {
130        GrAssert(!fBlocks[i].fBuffer->isLocked());
131    }
132    for (int i = 0; i < fBlocks.count(); ++i) {
133        size_t bytes = fBlocks[i].fBuffer->sizeInBytes() - fBlocks[i].fBytesFree;
134        bytesInUse += bytes;
135        GrAssert(bytes || unusedBlockAllowed);
136    }
137
138    GrAssert(bytesInUse == fBytesInUse);
139    if (unusedBlockAllowed) {
140        GrAssert((fBytesInUse && !fBlocks.empty()) ||
141                 (!fBytesInUse && (fBlocks.count() < 2)));
142    } else {
143        GrAssert((0 == fBytesInUse) == fBlocks.empty());
144    }
145}
146#endif
147
148void* GrBufferAllocPool::makeSpace(size_t size,
149                                   size_t alignment,
150                                   const GrGeometryBuffer** buffer,
151                                   size_t* offset) {
152    VALIDATE();
153
154    GrAssert(NULL != buffer);
155    GrAssert(NULL != offset);
156
157    if (NULL != fBufferPtr) {
158        BufferBlock& back = fBlocks.back();
159        size_t usedBytes = back.fBuffer->sizeInBytes() - back.fBytesFree;
160        size_t pad = GrSizeAlignUpPad(usedBytes,
161                                      alignment);
162        if ((size + pad) <= back.fBytesFree) {
163            usedBytes += pad;
164            *offset = usedBytes;
165            *buffer = back.fBuffer;
166            back.fBytesFree -= size + pad;
167            fBytesInUse += size;
168            return (void*)(reinterpret_cast<intptr_t>(fBufferPtr) + usedBytes);
169        }
170    }
171
172    // We could honor the space request using by a partial update of the current
173    // VB (if there is room). But we don't currently use draw calls to GL that
174    // allow the driver to know that previously issued draws won't read from
175    // the part of the buffer we update. Also, the GL buffer implementation
176    // may be cheating on the actual buffer size by shrinking the buffer on
177    // updateData() if the amount of data passed is less than the full buffer
178    // size.
179
180    if (!createBlock(size)) {
181        return NULL;
182    }
183    GrAssert(NULL != fBufferPtr);
184
185    *offset = 0;
186    BufferBlock& back = fBlocks.back();
187    *buffer = back.fBuffer;
188    back.fBytesFree -= size;
189    fBytesInUse += size;
190    VALIDATE();
191    return fBufferPtr;
192}
193
194int GrBufferAllocPool::currentBufferItems(size_t itemSize) const {
195    VALIDATE();
196    if (NULL != fBufferPtr) {
197        const BufferBlock& back = fBlocks.back();
198        size_t usedBytes = back.fBuffer->sizeInBytes() - back.fBytesFree;
199        size_t pad = GrSizeAlignUpPad(usedBytes, itemSize);
200        return (back.fBytesFree - pad) / itemSize;
201    } else if (fPreallocBuffersInUse < fPreallocBuffers.count()) {
202        return fMinBlockSize / itemSize;
203    }
204    return 0;
205}
206
207int GrBufferAllocPool::preallocatedBuffersRemaining() const {
208    return fPreallocBuffers.count() - fPreallocBuffersInUse;
209}
210
211int GrBufferAllocPool::preallocatedBufferCount() const {
212    return fPreallocBuffers.count();
213}
214
215void GrBufferAllocPool::putBack(size_t bytes) {
216    VALIDATE();
217
218    while (bytes) {
219        // caller shouldnt try to put back more than they've taken
220        GrAssert(!fBlocks.empty());
221        BufferBlock& block = fBlocks.back();
222        size_t bytesUsed = block.fBuffer->sizeInBytes() - block.fBytesFree;
223        if (bytes >= bytesUsed) {
224            bytes -= bytesUsed;
225            fBytesInUse -= bytesUsed;
226            // if we locked a vb to satisfy the make space and we're releasing
227            // beyond it, then unlock it.
228            if (block.fBuffer->isLocked()) {
229                block.fBuffer->unlock();
230            }
231            this->destroyBlock();
232        } else {
233            block.fBytesFree += bytes;
234            fBytesInUse -= bytes;
235            bytes = 0;
236            break;
237        }
238    }
239    VALIDATE();
240}
241
242bool GrBufferAllocPool::createBlock(size_t requestSize) {
243
244    size_t size = GrMax(requestSize, fMinBlockSize);
245    GrAssert(size >= GrBufferAllocPool_MIN_BLOCK_SIZE);
246
247    VALIDATE();
248
249    BufferBlock& block = fBlocks.push_back();
250
251    if (size == fMinBlockSize &&
252        fPreallocBuffersInUse < fPreallocBuffers.count()) {
253
254        uint32_t nextBuffer = (fPreallocBuffersInUse + fFirstPreallocBuffer) %
255                               fPreallocBuffers.count();
256        block.fBuffer = fPreallocBuffers[nextBuffer];
257        block.fBuffer->ref();
258        ++fPreallocBuffersInUse;
259    } else {
260        block.fBuffer = this->createBuffer(size);
261        if (NULL == block.fBuffer) {
262            fBlocks.pop_back();
263            return false;
264        }
265    }
266
267    block.fBytesFree = size;
268    if (NULL != fBufferPtr) {
269        GrAssert(fBlocks.count() > 1);
270        BufferBlock& prev = fBlocks.fromBack(1);
271        if (prev.fBuffer->isLocked()) {
272            prev.fBuffer->unlock();
273        } else {
274            flushCpuData(prev.fBuffer,
275                         prev.fBuffer->sizeInBytes() - prev.fBytesFree);
276        }
277        fBufferPtr = NULL;
278    }
279
280    GrAssert(NULL == fBufferPtr);
281
282    if (fGpu->getCaps().fBufferLockSupport &&
283        size > GR_GEOM_BUFFER_LOCK_THRESHOLD &&
284        (!fFrequentResetHint || requestSize > GR_GEOM_BUFFER_LOCK_THRESHOLD)) {
285        fBufferPtr = block.fBuffer->lock();
286    }
287
288    if (NULL == fBufferPtr) {
289        fBufferPtr = fCpuData.reset(size);
290    }
291
292    VALIDATE(true);
293
294    return true;
295}
296
297void GrBufferAllocPool::destroyBlock() {
298    GrAssert(!fBlocks.empty());
299
300    BufferBlock& block = fBlocks.back();
301    if (fPreallocBuffersInUse > 0) {
302        uint32_t prevPreallocBuffer = (fPreallocBuffersInUse +
303                                       fFirstPreallocBuffer +
304                                       (fPreallocBuffers.count() - 1)) %
305                                      fPreallocBuffers.count();
306        if (block.fBuffer == fPreallocBuffers[prevPreallocBuffer]) {
307            --fPreallocBuffersInUse;
308        }
309    }
310    GrAssert(!block.fBuffer->isLocked());
311    block.fBuffer->unref();
312    fBlocks.pop_back();
313    fBufferPtr = NULL;
314}
315
316void GrBufferAllocPool::flushCpuData(GrGeometryBuffer* buffer,
317                                     size_t flushSize) {
318    GrAssert(NULL != buffer);
319    GrAssert(!buffer->isLocked());
320    GrAssert(fCpuData.get() == fBufferPtr);
321    GrAssert(flushSize <= buffer->sizeInBytes());
322
323    if (fGpu->getCaps().fBufferLockSupport &&
324        flushSize > GR_GEOM_BUFFER_LOCK_THRESHOLD) {
325        void* data = buffer->lock();
326        if (NULL != data) {
327            memcpy(data, fBufferPtr, flushSize);
328            buffer->unlock();
329            return;
330        }
331    }
332    buffer->updateData(fBufferPtr, flushSize);
333}
334
335GrGeometryBuffer* GrBufferAllocPool::createBuffer(size_t size) {
336    if (kIndex_BufferType == fBufferType) {
337        return fGpu->createIndexBuffer(size, true);
338    } else {
339        GrAssert(kVertex_BufferType == fBufferType);
340        return fGpu->createVertexBuffer(size, true);
341    }
342}
343
344////////////////////////////////////////////////////////////////////////////////
345
346GrVertexBufferAllocPool::GrVertexBufferAllocPool(GrGpu* gpu,
347                                                 bool frequentResetHint,
348                                                 size_t bufferSize,
349                                                 int preallocBufferCnt)
350: GrBufferAllocPool(gpu,
351                    kVertex_BufferType,
352                    frequentResetHint,
353                    bufferSize,
354                    preallocBufferCnt) {
355}
356
357void* GrVertexBufferAllocPool::makeSpace(GrVertexLayout layout,
358                                         int vertexCount,
359                                         const GrVertexBuffer** buffer,
360                                         int* startVertex) {
361
362    GrAssert(vertexCount >= 0);
363    GrAssert(NULL != buffer);
364    GrAssert(NULL != startVertex);
365
366    size_t vSize = GrDrawTarget::VertexSize(layout);
367    size_t offset = 0; // assign to suppress warning
368    const GrGeometryBuffer* geomBuffer = NULL; // assign to suppress warning
369    void* ptr = INHERITED::makeSpace(vSize * vertexCount,
370                                     vSize,
371                                     &geomBuffer,
372                                     &offset);
373
374    *buffer = (const GrVertexBuffer*) geomBuffer;
375    GrAssert(0 == offset % vSize);
376    *startVertex = offset / vSize;
377    return ptr;
378}
379
380bool GrVertexBufferAllocPool::appendVertices(GrVertexLayout layout,
381                                             int vertexCount,
382                                             const void* vertices,
383                                             const GrVertexBuffer** buffer,
384                                             int* startVertex) {
385    void* space = makeSpace(layout, vertexCount, buffer, startVertex);
386    if (NULL != space) {
387        memcpy(space,
388               vertices,
389               GrDrawTarget::VertexSize(layout) * vertexCount);
390        return true;
391    } else {
392        return false;
393    }
394}
395
396int GrVertexBufferAllocPool::preallocatedBufferVertices(GrVertexLayout layout) const {
397    return INHERITED::preallocatedBufferSize() /
398            GrDrawTarget::VertexSize(layout);
399}
400
401int GrVertexBufferAllocPool::currentBufferVertices(GrVertexLayout layout) const {
402    return currentBufferItems(GrDrawTarget::VertexSize(layout));
403}
404
405////////////////////////////////////////////////////////////////////////////////
406
407GrIndexBufferAllocPool::GrIndexBufferAllocPool(GrGpu* gpu,
408                                               bool frequentResetHint,
409                                               size_t bufferSize,
410                                               int preallocBufferCnt)
411: GrBufferAllocPool(gpu,
412                    kIndex_BufferType,
413                    frequentResetHint,
414                    bufferSize,
415                    preallocBufferCnt) {
416}
417
418void* GrIndexBufferAllocPool::makeSpace(int indexCount,
419                                        const GrIndexBuffer** buffer,
420                                        int* startIndex) {
421
422    GrAssert(indexCount >= 0);
423    GrAssert(NULL != buffer);
424    GrAssert(NULL != startIndex);
425
426    size_t offset = 0; // assign to suppress warning
427    const GrGeometryBuffer* geomBuffer = NULL; // assign to suppress warning
428    void* ptr = INHERITED::makeSpace(indexCount * sizeof(uint16_t),
429                                     sizeof(uint16_t),
430                                     &geomBuffer,
431                                     &offset);
432
433    *buffer = (const GrIndexBuffer*) geomBuffer;
434    GrAssert(0 == offset % sizeof(uint16_t));
435    *startIndex = offset / sizeof(uint16_t);
436    return ptr;
437}
438
439bool GrIndexBufferAllocPool::appendIndices(int indexCount,
440                                           const void* indices,
441                                           const GrIndexBuffer** buffer,
442                                           int* startIndex) {
443    void* space = makeSpace(indexCount, buffer, startIndex);
444    if (NULL != space) {
445        memcpy(space, indices, sizeof(uint16_t) * indexCount);
446        return true;
447    } else {
448        return false;
449    }
450}
451
452int GrIndexBufferAllocPool::preallocatedBufferIndices() const {
453    return INHERITED::preallocatedBufferSize() / sizeof(uint16_t);
454}
455
456int GrIndexBufferAllocPool::currentBufferIndices() const {
457    return currentBufferItems(sizeof(uint16_t));
458}
459