SkScaledImageCache.cpp revision 60c8d24f84105dd341e36a7fc97e25f88ceef309
1/*
2 * Copyright 2013 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#include "SkScaledImageCache.h"
9#include "SkMipMap.h"
10#include "SkPixelRef.h"
11#include "SkRect.h"
12
13// This can be defined by the caller's build system
14//#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE
15
16#ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT
17#   define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT   1024
18#endif
19
20#ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT
21    #define SK_DEFAULT_IMAGE_CACHE_LIMIT     (2 * 1024 * 1024)
22#endif
23
24static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
25    return reinterpret_cast<SkScaledImageCache::ID*>(rec);
26}
27
28static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
29    return reinterpret_cast<SkScaledImageCache::Rec*>(id);
30}
31
32 // Implemented from en.wikipedia.org/wiki/MurmurHash.
33static uint32_t compute_hash(const uint32_t data[], int count) {
34    uint32_t hash = 0;
35
36    for (int i = 0; i < count; ++i) {
37        uint32_t k = data[i];
38        k *= 0xcc9e2d51;
39        k = (k << 15) | (k >> 17);
40        k *= 0x1b873593;
41
42        hash ^= k;
43        hash = (hash << 13) | (hash >> 19);
44        hash *= 5;
45        hash += 0xe6546b64;
46    }
47
48    //    hash ^= size;
49    hash ^= hash >> 16;
50    hash *= 0x85ebca6b;
51    hash ^= hash >> 13;
52    hash *= 0xc2b2ae35;
53    hash ^= hash >> 16;
54
55    return hash;
56}
57
58struct SkScaledImageCache::Key {
59    Key(uint32_t genID,
60        SkScalar scaleX,
61        SkScalar scaleY,
62        SkIRect  bounds)
63        : fGenID(genID)
64        , fScaleX(scaleX)
65        , fScaleY(scaleY)
66        , fBounds(bounds) {
67        fHash = compute_hash(&fGenID, 7);
68    }
69
70    bool operator<(const Key& other) const {
71        const uint32_t* a = &fGenID;
72        const uint32_t* b = &other.fGenID;
73        for (int i = 0; i < 7; ++i) {
74            if (a[i] < b[i]) {
75                return true;
76            }
77            if (a[i] > b[i]) {
78                return false;
79            }
80        }
81        return false;
82    }
83
84    bool operator==(const Key& other) const {
85        const uint32_t* a = &fHash;
86        const uint32_t* b = &other.fHash;
87        for (int i = 0; i < 8; ++i) {
88            if (a[i] != b[i]) {
89                return false;
90            }
91        }
92        return true;
93    }
94
95    uint32_t    fHash;
96    uint32_t    fGenID;
97    float       fScaleX;
98    float       fScaleY;
99    SkIRect     fBounds;
100};
101
102struct SkScaledImageCache::Rec {
103    Rec(const Key& key, const SkBitmap& bm) : fKey(key), fBitmap(bm) {
104        fLockCount = 1;
105        fMip = NULL;
106    }
107
108    Rec(const Key& key, const SkMipMap* mip) : fKey(key) {
109        fLockCount = 1;
110        fMip = mip;
111        mip->ref();
112    }
113
114    ~Rec() {
115        SkSafeUnref(fMip);
116    }
117
118    static const Key& GetKey(const Rec& rec) { return rec.fKey; }
119    static uint32_t Hash(const Key& key) { return key.fHash; }
120
121    size_t bytesUsed() const {
122        return fMip ? fMip->getSize() : fBitmap.getSize();
123    }
124
125    Rec*    fNext;
126    Rec*    fPrev;
127
128    // this guy wants to be 64bit aligned
129    Key     fKey;
130
131    int32_t fLockCount;
132
133    // we use either fBitmap or fMip, but not both
134    SkBitmap fBitmap;
135    const SkMipMap* fMip;
136};
137
138#include "SkTDynamicHash.h"
139
140class SkScaledImageCache::Hash :
141    public SkTDynamicHash<SkScaledImageCache::Rec, SkScaledImageCache::Key> {};
142
143
144///////////////////////////////////////////////////////////////////////////////
145
146// experimental hash to speed things up
147#define USE_HASH
148
149#if !defined(USE_HASH)
150static inline SkScaledImageCache::Rec* find_rec_in_list(
151        SkScaledImageCache::Rec* head, const Key & key) {
152    SkScaledImageCache::Rec* rec = head;
153    while ((rec != NULL) && (rec->fKey != key)) {
154        rec = rec->fNext;
155    }
156    return rec;
157}
158#endif
159
160void SkScaledImageCache::init() {
161    fHead = NULL;
162    fTail = NULL;
163#ifdef USE_HASH
164    fHash = new Hash;
165#else
166    fHash = NULL;
167#endif
168    fBytesUsed = 0;
169    fCount = 0;
170    fAllocator = NULL;
171
172    // One of these should be explicit set by the caller after we return.
173    fByteLimit = 0;
174    fDiscardableFactory = NULL;
175}
176
177#include "SkDiscardableMemory.h"
178
179class SkOneShotDiscardablePixelRef : public SkPixelRef {
180public:
181    SK_DECLARE_INST_COUNT(SkOneShotDiscardablePixelRef)
182    // Ownership of the discardablememory is transfered to the pixelref
183    SkOneShotDiscardablePixelRef(const SkImageInfo&, SkDiscardableMemory*, size_t rowBytes);
184    ~SkOneShotDiscardablePixelRef();
185
186    SK_DECLARE_UNFLATTENABLE_OBJECT()
187
188protected:
189    virtual bool onNewLockPixels(LockRec*) SK_OVERRIDE;
190    virtual void onUnlockPixels() SK_OVERRIDE;
191    virtual size_t getAllocatedSizeInBytes() const SK_OVERRIDE;
192
193private:
194    SkDiscardableMemory* fDM;
195    size_t               fRB;
196    bool                 fFirstTime;
197
198    typedef SkPixelRef INHERITED;
199};
200
201SkOneShotDiscardablePixelRef::SkOneShotDiscardablePixelRef(const SkImageInfo& info,
202                                             SkDiscardableMemory* dm,
203                                             size_t rowBytes)
204    : INHERITED(info)
205    , fDM(dm)
206    , fRB(rowBytes)
207{
208    SkASSERT(dm->data());
209    fFirstTime = true;
210}
211
212SkOneShotDiscardablePixelRef::~SkOneShotDiscardablePixelRef() {
213    SkDELETE(fDM);
214}
215
216bool SkOneShotDiscardablePixelRef::onNewLockPixels(LockRec* rec) {
217    if (fFirstTime) {
218        // we're already locked
219        SkASSERT(fDM->data());
220        fFirstTime = false;
221        goto SUCCESS;
222    }
223
224    // A previous call to onUnlock may have deleted our DM, so check for that
225    if (NULL == fDM) {
226        return false;
227    }
228
229    if (!fDM->lock()) {
230        // since it failed, we delete it now, to free-up the resource
231        delete fDM;
232        fDM = NULL;
233        return false;
234    }
235
236SUCCESS:
237    rec->fPixels = fDM->data();
238    rec->fColorTable = NULL;
239    rec->fRowBytes = fRB;
240    return true;
241}
242
243void SkOneShotDiscardablePixelRef::onUnlockPixels() {
244    SkASSERT(!fFirstTime);
245    fDM->unlock();
246}
247
248size_t SkOneShotDiscardablePixelRef::getAllocatedSizeInBytes() const {
249    return this->info().getSafeSize(fRB);
250}
251
252class SkScaledImageCacheDiscardableAllocator : public SkBitmap::Allocator {
253public:
254    SkScaledImageCacheDiscardableAllocator(
255                            SkScaledImageCache::DiscardableFactory factory) {
256        SkASSERT(factory);
257        fFactory = factory;
258    }
259
260    virtual bool allocPixelRef(SkBitmap*, SkColorTable*) SK_OVERRIDE;
261
262private:
263    SkScaledImageCache::DiscardableFactory fFactory;
264};
265
266bool SkScaledImageCacheDiscardableAllocator::allocPixelRef(SkBitmap* bitmap,
267                                                       SkColorTable* ctable) {
268    size_t size = bitmap->getSize();
269    if (0 == size) {
270        return false;
271    }
272
273    SkDiscardableMemory* dm = fFactory(size);
274    if (NULL == dm) {
275        return false;
276    }
277
278    // can we relax this?
279    if (kN32_SkColorType != bitmap->colorType()) {
280        return false;
281    }
282
283    SkImageInfo info = bitmap->info();
284    bitmap->setPixelRef(SkNEW_ARGS(SkOneShotDiscardablePixelRef,
285                                   (info, dm, bitmap->rowBytes())))->unref();
286    bitmap->lockPixels();
287    return bitmap->readyToDraw();
288}
289
290SkScaledImageCache::SkScaledImageCache(DiscardableFactory factory) {
291    this->init();
292    fDiscardableFactory = factory;
293
294    fAllocator = SkNEW_ARGS(SkScaledImageCacheDiscardableAllocator, (factory));
295}
296
297SkScaledImageCache::SkScaledImageCache(size_t byteLimit) {
298    this->init();
299    fByteLimit = byteLimit;
300}
301
302SkScaledImageCache::~SkScaledImageCache() {
303    SkSafeUnref(fAllocator);
304
305    Rec* rec = fHead;
306    while (rec) {
307        Rec* next = rec->fNext;
308        SkDELETE(rec);
309        rec = next;
310    }
311    delete fHash;
312}
313
314////////////////////////////////////////////////////////////////////////////////
315
316
317SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID,
318                                                        SkScalar scaleX,
319                                                        SkScalar scaleY,
320                                                        const SkIRect& bounds) {
321    const Key key(genID, scaleX, scaleY, bounds);
322    return this->findAndLock(key);
323}
324
325/**
326   This private method is the fully general record finder. All other
327   record finders should call this function or the one above. */
328SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCache::Key& key) {
329    if (key.fBounds.isEmpty()) {
330        return NULL;
331    }
332#ifdef USE_HASH
333    Rec* rec = fHash->find(key);
334#else
335    Rec* rec = find_rec_in_list(fHead, key);
336#endif
337    if (rec) {
338        this->moveToHead(rec);  // for our LRU
339        rec->fLockCount += 1;
340    }
341    return rec;
342}
343
344/**
345   This function finds the bounds of the bitmap *within its pixelRef*.
346   If the bitmap lacks a pixelRef, it will return an empty rect, since
347   that doesn't make sense.  This may be a useful enough function that
348   it should be somewhere else (in SkBitmap?). */
349static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) {
350    if (!(bm.pixelRef())) {
351        return SkIRect::MakeEmpty();
352    }
353    SkIPoint origin = bm.pixelRefOrigin();
354    return SkIRect::MakeXYWH(origin.fX, origin.fY, bm.width(), bm.height());
355}
356
357
358SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID,
359                                                        int32_t width,
360                                                        int32_t height,
361                                                        SkBitmap* bitmap) {
362    Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1,
363                                 SkIRect::MakeWH(width, height));
364    if (rec) {
365        SkASSERT(NULL == rec->fMip);
366        SkASSERT(rec->fBitmap.pixelRef());
367        *bitmap = rec->fBitmap;
368    }
369    return rec_to_id(rec);
370}
371
372SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
373                                                        SkScalar scaleX,
374                                                        SkScalar scaleY,
375                                                        SkBitmap* scaled) {
376    if (0 == scaleX || 0 == scaleY) {
377        // degenerate, and the key we use for mipmaps
378        return NULL;
379    }
380    Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX,
381                                 scaleY, get_bounds_from_bitmap(orig));
382    if (rec) {
383        SkASSERT(NULL == rec->fMip);
384        SkASSERT(rec->fBitmap.pixelRef());
385        *scaled = rec->fBitmap;
386    }
387    return rec_to_id(rec);
388}
389
390SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig,
391                                                           SkMipMap const ** mip) {
392    Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0,
393                                 get_bounds_from_bitmap(orig));
394    if (rec) {
395        SkASSERT(rec->fMip);
396        SkASSERT(NULL == rec->fBitmap.pixelRef());
397        *mip = rec->fMip;
398    }
399    return rec_to_id(rec);
400}
401
402
403////////////////////////////////////////////////////////////////////////////////
404/**
405   This private method is the fully general record adder. All other
406   record adders should call this funtion. */
407SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
408    SkASSERT(rec);
409    // See if we already have this key (racy inserts, etc.)
410    Rec* existing = this->findAndLock(rec->fKey);
411    if (NULL != existing) {
412        // Since we already have a matching entry, just delete the new one and return.
413        // Call sites cannot assume the passed in object will live past this call.
414        existing->fBitmap = rec->fBitmap;
415        SkDELETE(rec);
416        return rec_to_id(existing);
417    }
418
419    this->addToHead(rec);
420    SkASSERT(1 == rec->fLockCount);
421#ifdef USE_HASH
422    SkASSERT(fHash);
423    fHash->add(rec);
424#endif
425    // We may (now) be overbudget, so see if we need to purge something.
426    this->purgeAsNeeded();
427    return rec_to_id(rec);
428}
429
430SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID,
431                                                       int32_t width,
432                                                       int32_t height,
433                                                       const SkBitmap& bitmap) {
434    Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height));
435    Rec* rec = SkNEW_ARGS(Rec, (key, bitmap));
436    return this->addAndLock(rec);
437}
438
439SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
440                                                       SkScalar scaleX,
441                                                       SkScalar scaleY,
442                                                       const SkBitmap& scaled) {
443    if (0 == scaleX || 0 == scaleY) {
444        // degenerate, and the key we use for mipmaps
445        return NULL;
446    }
447    SkIRect bounds = get_bounds_from_bitmap(orig);
448    if (bounds.isEmpty()) {
449        return NULL;
450    }
451    Key key(orig.getGenerationID(), scaleX, scaleY, bounds);
452    Rec* rec = SkNEW_ARGS(Rec, (key, scaled));
453    return this->addAndLock(rec);
454}
455
456SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig,
457                                                          const SkMipMap* mip) {
458    SkIRect bounds = get_bounds_from_bitmap(orig);
459    if (bounds.isEmpty()) {
460        return NULL;
461    }
462    Key key(orig.getGenerationID(), 0, 0, bounds);
463    Rec* rec = SkNEW_ARGS(Rec, (key, mip));
464    return this->addAndLock(rec);
465}
466
467void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
468    SkASSERT(id);
469
470#ifdef SK_DEBUG
471    {
472        bool found = false;
473        Rec* rec = fHead;
474        while (rec != NULL) {
475            if (rec == id_to_rec(id)) {
476                found = true;
477                break;
478            }
479            rec = rec->fNext;
480        }
481        SkASSERT(found);
482    }
483#endif
484    Rec* rec = id_to_rec(id);
485    SkASSERT(rec->fLockCount > 0);
486    rec->fLockCount -= 1;
487
488    // we may have been over-budget, but now have released something, so check
489    // if we should purge.
490    if (0 == rec->fLockCount) {
491        this->purgeAsNeeded();
492    }
493}
494
495void SkScaledImageCache::purgeAsNeeded() {
496    size_t byteLimit;
497    int    countLimit;
498
499    if (fDiscardableFactory) {
500        countLimit = SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT;
501        byteLimit = SK_MaxU32;  // no limit based on bytes
502    } else {
503        countLimit = SK_MaxS32; // no limit based on count
504        byteLimit = fByteLimit;
505    }
506
507    size_t bytesUsed = fBytesUsed;
508    int    countUsed = fCount;
509
510    Rec* rec = fTail;
511    while (rec) {
512        if (bytesUsed < byteLimit && countUsed < countLimit) {
513            break;
514        }
515
516        Rec* prev = rec->fPrev;
517        if (0 == rec->fLockCount) {
518            size_t used = rec->bytesUsed();
519            SkASSERT(used <= bytesUsed);
520            this->detach(rec);
521#ifdef USE_HASH
522            fHash->remove(rec->fKey);
523#endif
524
525            SkDELETE(rec);
526
527            bytesUsed -= used;
528            countUsed -= 1;
529        }
530        rec = prev;
531    }
532
533    fBytesUsed = bytesUsed;
534    fCount = countUsed;
535}
536
537size_t SkScaledImageCache::setByteLimit(size_t newLimit) {
538    size_t prevLimit = fByteLimit;
539    fByteLimit = newLimit;
540    if (newLimit < prevLimit) {
541        this->purgeAsNeeded();
542    }
543    return prevLimit;
544}
545
546///////////////////////////////////////////////////////////////////////////////
547
548void SkScaledImageCache::detach(Rec* rec) {
549    Rec* prev = rec->fPrev;
550    Rec* next = rec->fNext;
551
552    if (!prev) {
553        SkASSERT(fHead == rec);
554        fHead = next;
555    } else {
556        prev->fNext = next;
557    }
558
559    if (!next) {
560        fTail = prev;
561    } else {
562        next->fPrev = prev;
563    }
564
565    rec->fNext = rec->fPrev = NULL;
566}
567
568void SkScaledImageCache::moveToHead(Rec* rec) {
569    if (fHead == rec) {
570        return;
571    }
572
573    SkASSERT(fHead);
574    SkASSERT(fTail);
575
576    this->validate();
577
578    this->detach(rec);
579
580    fHead->fPrev = rec;
581    rec->fNext = fHead;
582    fHead = rec;
583
584    this->validate();
585}
586
587void SkScaledImageCache::addToHead(Rec* rec) {
588    this->validate();
589
590    rec->fPrev = NULL;
591    rec->fNext = fHead;
592    if (fHead) {
593        fHead->fPrev = rec;
594    }
595    fHead = rec;
596    if (!fTail) {
597        fTail = rec;
598    }
599    fBytesUsed += rec->bytesUsed();
600    fCount += 1;
601
602    this->validate();
603}
604
605///////////////////////////////////////////////////////////////////////////////
606
607#ifdef SK_DEBUG
608void SkScaledImageCache::validate() const {
609    if (NULL == fHead) {
610        SkASSERT(NULL == fTail);
611        SkASSERT(0 == fBytesUsed);
612        return;
613    }
614
615    if (fHead == fTail) {
616        SkASSERT(NULL == fHead->fPrev);
617        SkASSERT(NULL == fHead->fNext);
618        SkASSERT(fHead->bytesUsed() == fBytesUsed);
619        return;
620    }
621
622    SkASSERT(NULL == fHead->fPrev);
623    SkASSERT(NULL != fHead->fNext);
624    SkASSERT(NULL == fTail->fNext);
625    SkASSERT(NULL != fTail->fPrev);
626
627    size_t used = 0;
628    int count = 0;
629    const Rec* rec = fHead;
630    while (rec) {
631        count += 1;
632        used += rec->bytesUsed();
633        SkASSERT(used <= fBytesUsed);
634        rec = rec->fNext;
635    }
636    SkASSERT(fCount == count);
637
638    rec = fTail;
639    while (rec) {
640        SkASSERT(count > 0);
641        count -= 1;
642        SkASSERT(used >= rec->bytesUsed());
643        used -= rec->bytesUsed();
644        rec = rec->fPrev;
645    }
646
647    SkASSERT(0 == count);
648    SkASSERT(0 == used);
649}
650#endif
651
652void SkScaledImageCache::dump() const {
653    this->validate();
654
655    const Rec* rec = fHead;
656    int locked = 0;
657    while (rec) {
658        locked += rec->fLockCount > 0;
659        rec = rec->fNext;
660    }
661
662    SkDebugf("SkScaledImageCache: count=%d bytes=%d locked=%d %s\n",
663             fCount, fBytesUsed, locked,
664             fDiscardableFactory ? "discardable" : "malloc");
665}
666
667///////////////////////////////////////////////////////////////////////////////
668
669#include "SkThread.h"
670
671SK_DECLARE_STATIC_MUTEX(gMutex);
672static SkScaledImageCache* gScaledImageCache = NULL;
673static void cleanup_gScaledImageCache() { SkDELETE(gScaledImageCache); }
674
675/** Must hold gMutex when calling. */
676static SkScaledImageCache* get_cache() {
677    // gMutex is always held when this is called, so we don't need to be fancy in here.
678    if (NULL == gScaledImageCache) {
679#ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE
680        gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory::Create));
681#else
682        gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
683#endif
684        atexit(cleanup_gScaledImageCache);
685    }
686    return gScaledImageCache;
687}
688
689
690SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(
691                                uint32_t pixelGenerationID,
692                                int32_t width,
693                                int32_t height,
694                                SkBitmap* scaled) {
695    SkAutoMutexAcquire am(gMutex);
696    return get_cache()->findAndLock(pixelGenerationID, width, height, scaled);
697}
698
699SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(
700                               uint32_t pixelGenerationID,
701                               int32_t width,
702                               int32_t height,
703                               const SkBitmap& scaled) {
704    SkAutoMutexAcquire am(gMutex);
705    return get_cache()->addAndLock(pixelGenerationID, width, height, scaled);
706}
707
708
709SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig,
710                                                        SkScalar scaleX,
711                                                        SkScalar scaleY,
712                                                        SkBitmap* scaled) {
713    SkAutoMutexAcquire am(gMutex);
714    return get_cache()->findAndLock(orig, scaleX, scaleY, scaled);
715}
716
717SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig,
718                                                       SkMipMap const ** mip) {
719    SkAutoMutexAcquire am(gMutex);
720    return get_cache()->findAndLockMip(orig, mip);
721}
722
723SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const SkBitmap& orig,
724                                                       SkScalar scaleX,
725                                                       SkScalar scaleY,
726                                                       const SkBitmap& scaled) {
727    SkAutoMutexAcquire am(gMutex);
728    return get_cache()->addAndLock(orig, scaleX, scaleY, scaled);
729}
730
731SkScaledImageCache::ID* SkScaledImageCache::AddAndLockMip(const SkBitmap& orig,
732                                                          const SkMipMap* mip) {
733    SkAutoMutexAcquire am(gMutex);
734    return get_cache()->addAndLockMip(orig, mip);
735}
736
737void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) {
738    SkAutoMutexAcquire am(gMutex);
739    get_cache()->unlock(id);
740
741//    get_cache()->dump();
742}
743
744size_t SkScaledImageCache::GetBytesUsed() {
745    SkAutoMutexAcquire am(gMutex);
746    return get_cache()->getBytesUsed();
747}
748
749size_t SkScaledImageCache::GetByteLimit() {
750    SkAutoMutexAcquire am(gMutex);
751    return get_cache()->getByteLimit();
752}
753
754size_t SkScaledImageCache::SetByteLimit(size_t newLimit) {
755    SkAutoMutexAcquire am(gMutex);
756    return get_cache()->setByteLimit(newLimit);
757}
758
759SkBitmap::Allocator* SkScaledImageCache::GetAllocator() {
760    SkAutoMutexAcquire am(gMutex);
761    return get_cache()->allocator();
762}
763
764void SkScaledImageCache::Dump() {
765    SkAutoMutexAcquire am(gMutex);
766    get_cache()->dump();
767}
768
769///////////////////////////////////////////////////////////////////////////////
770
771#include "SkGraphics.h"
772
773size_t SkGraphics::GetImageCacheBytesUsed() {
774    return SkScaledImageCache::GetBytesUsed();
775}
776
777size_t SkGraphics::GetImageCacheByteLimit() {
778    return SkScaledImageCache::GetByteLimit();
779}
780
781size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) {
782    return SkScaledImageCache::SetByteLimit(newLimit);
783}
784