michael@0: /* michael@0: * Copyright 2013 Google Inc. michael@0: * michael@0: * Use of this source code is governed by a BSD-style license that can be michael@0: * found in the LICENSE file. michael@0: */ michael@0: michael@0: #include "SkScaledImageCache.h" michael@0: #include "SkMipMap.h" michael@0: #include "SkOnce.h" michael@0: #include "SkPixelRef.h" michael@0: #include "SkRect.h" michael@0: michael@0: // This can be defined by the caller's build system michael@0: //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE michael@0: michael@0: #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT michael@0: # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024 michael@0: #endif michael@0: michael@0: #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT michael@0: #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) michael@0: #endif michael@0: michael@0: static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) { michael@0: return reinterpret_cast(rec); michael@0: } michael@0: michael@0: static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) { michael@0: return reinterpret_cast(id); michael@0: } michael@0: michael@0: // Implemented from en.wikipedia.org/wiki/MurmurHash. michael@0: static uint32_t compute_hash(const uint32_t data[], int count) { michael@0: uint32_t hash = 0; michael@0: michael@0: for (int i = 0; i < count; ++i) { michael@0: uint32_t k = data[i]; michael@0: k *= 0xcc9e2d51; michael@0: k = (k << 15) | (k >> 17); michael@0: k *= 0x1b873593; michael@0: michael@0: hash ^= k; michael@0: hash = (hash << 13) | (hash >> 19); michael@0: hash *= 5; michael@0: hash += 0xe6546b64; michael@0: } michael@0: michael@0: // hash ^= size; michael@0: hash ^= hash >> 16; michael@0: hash *= 0x85ebca6b; michael@0: hash ^= hash >> 13; michael@0: hash *= 0xc2b2ae35; michael@0: hash ^= hash >> 16; michael@0: michael@0: return hash; michael@0: } michael@0: michael@0: struct SkScaledImageCache::Key { michael@0: Key(uint32_t genID, michael@0: SkScalar scaleX, michael@0: SkScalar scaleY, michael@0: SkIRect bounds) michael@0: : fGenID(genID) michael@0: , fScaleX(scaleX) michael@0: , fScaleY(scaleY) michael@0: , fBounds(bounds) { michael@0: fHash = compute_hash(&fGenID, 7); michael@0: } michael@0: michael@0: bool operator<(const Key& other) const { michael@0: const uint32_t* a = &fGenID; michael@0: const uint32_t* b = &other.fGenID; michael@0: for (int i = 0; i < 7; ++i) { michael@0: if (a[i] < b[i]) { michael@0: return true; michael@0: } michael@0: if (a[i] > b[i]) { michael@0: return false; michael@0: } michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: bool operator==(const Key& other) const { michael@0: const uint32_t* a = &fHash; michael@0: const uint32_t* b = &other.fHash; michael@0: for (int i = 0; i < 8; ++i) { michael@0: if (a[i] != b[i]) { michael@0: return false; michael@0: } michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: uint32_t fHash; michael@0: uint32_t fGenID; michael@0: float fScaleX; michael@0: float fScaleY; michael@0: SkIRect fBounds; michael@0: }; michael@0: michael@0: struct SkScaledImageCache::Rec { michael@0: Rec(const Key& key, const SkBitmap& bm) : fKey(key), fBitmap(bm) { michael@0: fLockCount = 1; michael@0: fMip = NULL; michael@0: } michael@0: michael@0: Rec(const Key& key, const SkMipMap* mip) : fKey(key) { michael@0: fLockCount = 1; michael@0: fMip = mip; michael@0: mip->ref(); michael@0: } michael@0: michael@0: ~Rec() { michael@0: SkSafeUnref(fMip); michael@0: } michael@0: michael@0: size_t bytesUsed() const { michael@0: return fMip ? fMip->getSize() : fBitmap.getSize(); michael@0: } michael@0: michael@0: Rec* fNext; michael@0: Rec* fPrev; michael@0: michael@0: // this guy wants to be 64bit aligned michael@0: Key fKey; michael@0: michael@0: int32_t fLockCount; michael@0: michael@0: // we use either fBitmap or fMip, but not both michael@0: SkBitmap fBitmap; michael@0: const SkMipMap* fMip; michael@0: }; michael@0: michael@0: #include "SkTDynamicHash.h" michael@0: michael@0: namespace { // can't use static functions w/ template parameters michael@0: const SkScaledImageCache::Key& key_from_rec(const SkScaledImageCache::Rec& rec) { michael@0: return rec.fKey; michael@0: } michael@0: michael@0: uint32_t hash_from_key(const SkScaledImageCache::Key& key) { michael@0: return key.fHash; michael@0: } michael@0: michael@0: bool eq_rec_key(const SkScaledImageCache::Rec& rec, const SkScaledImageCache::Key& key) { michael@0: return rec.fKey == key; michael@0: } michael@0: } michael@0: michael@0: class SkScaledImageCache::Hash : public SkTDynamicHash {}; michael@0: michael@0: /////////////////////////////////////////////////////////////////////////////// michael@0: michael@0: // experimental hash to speed things up michael@0: #define USE_HASH michael@0: michael@0: #if !defined(USE_HASH) michael@0: static inline SkScaledImageCache::Rec* find_rec_in_list( michael@0: SkScaledImageCache::Rec* head, const Key & key) { michael@0: SkScaledImageCache::Rec* rec = head; michael@0: while ((rec != NULL) && (rec->fKey != key)) { michael@0: rec = rec->fNext; michael@0: } michael@0: return rec; michael@0: } michael@0: #endif michael@0: michael@0: void SkScaledImageCache::init() { michael@0: fHead = NULL; michael@0: fTail = NULL; michael@0: #ifdef USE_HASH michael@0: fHash = new Hash; michael@0: #else michael@0: fHash = NULL; michael@0: #endif michael@0: fBytesUsed = 0; michael@0: fCount = 0; michael@0: fAllocator = NULL; michael@0: michael@0: // One of these should be explicit set by the caller after we return. michael@0: fByteLimit = 0; michael@0: fDiscardableFactory = NULL; michael@0: } michael@0: michael@0: #include "SkDiscardableMemory.h" michael@0: michael@0: class SkOneShotDiscardablePixelRef : public SkPixelRef { michael@0: public: michael@0: SK_DECLARE_INST_COUNT(SkOneShotDiscardablePixelRef) michael@0: // Ownership of the discardablememory is transfered to the pixelref michael@0: SkOneShotDiscardablePixelRef(const SkImageInfo&, SkDiscardableMemory*, size_t rowBytes); michael@0: ~SkOneShotDiscardablePixelRef(); michael@0: michael@0: SK_DECLARE_UNFLATTENABLE_OBJECT() michael@0: michael@0: protected: michael@0: virtual bool onNewLockPixels(LockRec*) SK_OVERRIDE; michael@0: virtual void onUnlockPixels() SK_OVERRIDE; michael@0: virtual size_t getAllocatedSizeInBytes() const SK_OVERRIDE; michael@0: michael@0: private: michael@0: SkDiscardableMemory* fDM; michael@0: size_t fRB; michael@0: bool fFirstTime; michael@0: michael@0: typedef SkPixelRef INHERITED; michael@0: }; michael@0: michael@0: SkOneShotDiscardablePixelRef::SkOneShotDiscardablePixelRef(const SkImageInfo& info, michael@0: SkDiscardableMemory* dm, michael@0: size_t rowBytes) michael@0: : INHERITED(info) michael@0: , fDM(dm) michael@0: , fRB(rowBytes) michael@0: { michael@0: SkASSERT(dm->data()); michael@0: fFirstTime = true; michael@0: } michael@0: michael@0: SkOneShotDiscardablePixelRef::~SkOneShotDiscardablePixelRef() { michael@0: SkDELETE(fDM); michael@0: } michael@0: michael@0: bool SkOneShotDiscardablePixelRef::onNewLockPixels(LockRec* rec) { michael@0: if (fFirstTime) { michael@0: // we're already locked michael@0: SkASSERT(fDM->data()); michael@0: fFirstTime = false; michael@0: goto SUCCESS; michael@0: } michael@0: michael@0: // A previous call to onUnlock may have deleted our DM, so check for that michael@0: if (NULL == fDM) { michael@0: return false; michael@0: } michael@0: michael@0: if (!fDM->lock()) { michael@0: // since it failed, we delete it now, to free-up the resource michael@0: delete fDM; michael@0: fDM = NULL; michael@0: return false; michael@0: } michael@0: michael@0: SUCCESS: michael@0: rec->fPixels = fDM->data(); michael@0: rec->fColorTable = NULL; michael@0: rec->fRowBytes = fRB; michael@0: return true; michael@0: } michael@0: michael@0: void SkOneShotDiscardablePixelRef::onUnlockPixels() { michael@0: SkASSERT(!fFirstTime); michael@0: fDM->unlock(); michael@0: } michael@0: michael@0: size_t SkOneShotDiscardablePixelRef::getAllocatedSizeInBytes() const { michael@0: return this->info().getSafeSize(fRB); michael@0: } michael@0: michael@0: class SkScaledImageCacheDiscardableAllocator : public SkBitmap::Allocator { michael@0: public: michael@0: SkScaledImageCacheDiscardableAllocator( michael@0: SkScaledImageCache::DiscardableFactory factory) { michael@0: SkASSERT(factory); michael@0: fFactory = factory; michael@0: } michael@0: michael@0: virtual bool allocPixelRef(SkBitmap*, SkColorTable*) SK_OVERRIDE; michael@0: michael@0: private: michael@0: SkScaledImageCache::DiscardableFactory fFactory; michael@0: }; michael@0: michael@0: bool SkScaledImageCacheDiscardableAllocator::allocPixelRef(SkBitmap* bitmap, michael@0: SkColorTable* ctable) { michael@0: size_t size = bitmap->getSize(); michael@0: if (0 == size) { michael@0: return false; michael@0: } michael@0: michael@0: SkDiscardableMemory* dm = fFactory(size); michael@0: if (NULL == dm) { michael@0: return false; michael@0: } michael@0: michael@0: // can we relax this? michael@0: if (kPMColor_SkColorType != bitmap->colorType()) { michael@0: return false; michael@0: } michael@0: michael@0: SkImageInfo info = bitmap->info(); michael@0: bitmap->setPixelRef(SkNEW_ARGS(SkOneShotDiscardablePixelRef, michael@0: (info, dm, bitmap->rowBytes())))->unref(); michael@0: bitmap->lockPixels(); michael@0: return bitmap->readyToDraw(); michael@0: } michael@0: michael@0: SkScaledImageCache::SkScaledImageCache(DiscardableFactory factory) { michael@0: this->init(); michael@0: fDiscardableFactory = factory; michael@0: michael@0: fAllocator = SkNEW_ARGS(SkScaledImageCacheDiscardableAllocator, (factory)); michael@0: } michael@0: michael@0: SkScaledImageCache::SkScaledImageCache(size_t byteLimit) { michael@0: this->init(); michael@0: fByteLimit = byteLimit; michael@0: } michael@0: michael@0: SkScaledImageCache::~SkScaledImageCache() { michael@0: SkSafeUnref(fAllocator); michael@0: michael@0: Rec* rec = fHead; michael@0: while (rec) { michael@0: Rec* next = rec->fNext; michael@0: SkDELETE(rec); michael@0: rec = next; michael@0: } michael@0: delete fHash; michael@0: } michael@0: michael@0: //////////////////////////////////////////////////////////////////////////////// michael@0: michael@0: michael@0: SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID, michael@0: SkScalar scaleX, michael@0: SkScalar scaleY, michael@0: const SkIRect& bounds) { michael@0: const Key key(genID, scaleX, scaleY, bounds); michael@0: return this->findAndLock(key); michael@0: } michael@0: michael@0: /** michael@0: This private method is the fully general record finder. All other michael@0: record finders should call this function or the one above. */ michael@0: SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCache::Key& key) { michael@0: if (key.fBounds.isEmpty()) { michael@0: return NULL; michael@0: } michael@0: #ifdef USE_HASH michael@0: Rec* rec = fHash->find(key); michael@0: #else michael@0: Rec* rec = find_rec_in_list(fHead, key); michael@0: #endif michael@0: if (rec) { michael@0: this->moveToHead(rec); // for our LRU michael@0: rec->fLockCount += 1; michael@0: } michael@0: return rec; michael@0: } michael@0: michael@0: /** michael@0: This function finds the bounds of the bitmap *within its pixelRef*. michael@0: If the bitmap lacks a pixelRef, it will return an empty rect, since michael@0: that doesn't make sense. This may be a useful enough function that michael@0: it should be somewhere else (in SkBitmap?). */ michael@0: static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) { michael@0: if (!(bm.pixelRef())) { michael@0: return SkIRect::MakeEmpty(); michael@0: } michael@0: SkIPoint origin = bm.pixelRefOrigin(); michael@0: return SkIRect::MakeXYWH(origin.fX, origin.fY, bm.width(), bm.height()); michael@0: } michael@0: michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID, michael@0: int32_t width, michael@0: int32_t height, michael@0: SkBitmap* bitmap) { michael@0: Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1, michael@0: SkIRect::MakeWH(width, height)); michael@0: if (rec) { michael@0: SkASSERT(NULL == rec->fMip); michael@0: SkASSERT(rec->fBitmap.pixelRef()); michael@0: *bitmap = rec->fBitmap; michael@0: } michael@0: return rec_to_id(rec); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, michael@0: SkScalar scaleX, michael@0: SkScalar scaleY, michael@0: SkBitmap* scaled) { michael@0: if (0 == scaleX || 0 == scaleY) { michael@0: // degenerate, and the key we use for mipmaps michael@0: return NULL; michael@0: } michael@0: Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX, michael@0: scaleY, get_bounds_from_bitmap(orig)); michael@0: if (rec) { michael@0: SkASSERT(NULL == rec->fMip); michael@0: SkASSERT(rec->fBitmap.pixelRef()); michael@0: *scaled = rec->fBitmap; michael@0: } michael@0: return rec_to_id(rec); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, michael@0: SkMipMap const ** mip) { michael@0: Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0, michael@0: get_bounds_from_bitmap(orig)); michael@0: if (rec) { michael@0: SkASSERT(rec->fMip); michael@0: SkASSERT(NULL == rec->fBitmap.pixelRef()); michael@0: *mip = rec->fMip; michael@0: } michael@0: return rec_to_id(rec); michael@0: } michael@0: michael@0: michael@0: //////////////////////////////////////////////////////////////////////////////// michael@0: /** michael@0: This private method is the fully general record adder. All other michael@0: record adders should call this funtion. */ michael@0: SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) { michael@0: SkASSERT(rec); michael@0: // See if we already have this key (racy inserts, etc.) michael@0: Rec* existing = this->findAndLock(rec->fKey); michael@0: if (NULL != existing) { michael@0: // Since we already have a matching entry, just delete the new one and return. michael@0: // Call sites cannot assume the passed in object will live past this call. michael@0: existing->fBitmap = rec->fBitmap; michael@0: SkDELETE(rec); michael@0: return rec_to_id(existing); michael@0: } michael@0: michael@0: this->addToHead(rec); michael@0: SkASSERT(1 == rec->fLockCount); michael@0: #ifdef USE_HASH michael@0: SkASSERT(fHash); michael@0: fHash->add(rec); michael@0: #endif michael@0: // We may (now) be overbudget, so see if we need to purge something. michael@0: this->purgeAsNeeded(); michael@0: return rec_to_id(rec); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID, michael@0: int32_t width, michael@0: int32_t height, michael@0: const SkBitmap& bitmap) { michael@0: Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height)); michael@0: Rec* rec = SkNEW_ARGS(Rec, (key, bitmap)); michael@0: return this->addAndLock(rec); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, michael@0: SkScalar scaleX, michael@0: SkScalar scaleY, michael@0: const SkBitmap& scaled) { michael@0: if (0 == scaleX || 0 == scaleY) { michael@0: // degenerate, and the key we use for mipmaps michael@0: return NULL; michael@0: } michael@0: SkIRect bounds = get_bounds_from_bitmap(orig); michael@0: if (bounds.isEmpty()) { michael@0: return NULL; michael@0: } michael@0: Key key(orig.getGenerationID(), scaleX, scaleY, bounds); michael@0: Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); michael@0: return this->addAndLock(rec); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, michael@0: const SkMipMap* mip) { michael@0: SkIRect bounds = get_bounds_from_bitmap(orig); michael@0: if (bounds.isEmpty()) { michael@0: return NULL; michael@0: } michael@0: Key key(orig.getGenerationID(), 0, 0, bounds); michael@0: Rec* rec = SkNEW_ARGS(Rec, (key, mip)); michael@0: return this->addAndLock(rec); michael@0: } michael@0: michael@0: void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { michael@0: SkASSERT(id); michael@0: michael@0: #ifdef SK_DEBUG michael@0: { michael@0: bool found = false; michael@0: Rec* rec = fHead; michael@0: while (rec != NULL) { michael@0: if (rec == id_to_rec(id)) { michael@0: found = true; michael@0: break; michael@0: } michael@0: rec = rec->fNext; michael@0: } michael@0: SkASSERT(found); michael@0: } michael@0: #endif michael@0: Rec* rec = id_to_rec(id); michael@0: SkASSERT(rec->fLockCount > 0); michael@0: rec->fLockCount -= 1; michael@0: michael@0: // we may have been over-budget, but now have released something, so check michael@0: // if we should purge. michael@0: if (0 == rec->fLockCount) { michael@0: this->purgeAsNeeded(); michael@0: } michael@0: } michael@0: michael@0: void SkScaledImageCache::purgeAsNeeded() { michael@0: size_t byteLimit; michael@0: int countLimit; michael@0: michael@0: if (fDiscardableFactory) { michael@0: countLimit = SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT; michael@0: byteLimit = SK_MaxU32; // no limit based on bytes michael@0: } else { michael@0: countLimit = SK_MaxS32; // no limit based on count michael@0: byteLimit = fByteLimit; michael@0: } michael@0: michael@0: size_t bytesUsed = fBytesUsed; michael@0: int countUsed = fCount; michael@0: michael@0: Rec* rec = fTail; michael@0: while (rec) { michael@0: if (bytesUsed < byteLimit && countUsed < countLimit) { michael@0: break; michael@0: } michael@0: michael@0: Rec* prev = rec->fPrev; michael@0: if (0 == rec->fLockCount) { michael@0: size_t used = rec->bytesUsed(); michael@0: SkASSERT(used <= bytesUsed); michael@0: this->detach(rec); michael@0: #ifdef USE_HASH michael@0: fHash->remove(rec->fKey); michael@0: #endif michael@0: michael@0: SkDELETE(rec); michael@0: michael@0: bytesUsed -= used; michael@0: countUsed -= 1; michael@0: } michael@0: rec = prev; michael@0: } michael@0: michael@0: fBytesUsed = bytesUsed; michael@0: fCount = countUsed; michael@0: } michael@0: michael@0: size_t SkScaledImageCache::setByteLimit(size_t newLimit) { michael@0: size_t prevLimit = fByteLimit; michael@0: fByteLimit = newLimit; michael@0: if (newLimit < prevLimit) { michael@0: this->purgeAsNeeded(); michael@0: } michael@0: return prevLimit; michael@0: } michael@0: michael@0: /////////////////////////////////////////////////////////////////////////////// michael@0: michael@0: void SkScaledImageCache::detach(Rec* rec) { michael@0: Rec* prev = rec->fPrev; michael@0: Rec* next = rec->fNext; michael@0: michael@0: if (!prev) { michael@0: SkASSERT(fHead == rec); michael@0: fHead = next; michael@0: } else { michael@0: prev->fNext = next; michael@0: } michael@0: michael@0: if (!next) { michael@0: fTail = prev; michael@0: } else { michael@0: next->fPrev = prev; michael@0: } michael@0: michael@0: rec->fNext = rec->fPrev = NULL; michael@0: } michael@0: michael@0: void SkScaledImageCache::moveToHead(Rec* rec) { michael@0: if (fHead == rec) { michael@0: return; michael@0: } michael@0: michael@0: SkASSERT(fHead); michael@0: SkASSERT(fTail); michael@0: michael@0: this->validate(); michael@0: michael@0: this->detach(rec); michael@0: michael@0: fHead->fPrev = rec; michael@0: rec->fNext = fHead; michael@0: fHead = rec; michael@0: michael@0: this->validate(); michael@0: } michael@0: michael@0: void SkScaledImageCache::addToHead(Rec* rec) { michael@0: this->validate(); michael@0: michael@0: rec->fPrev = NULL; michael@0: rec->fNext = fHead; michael@0: if (fHead) { michael@0: fHead->fPrev = rec; michael@0: } michael@0: fHead = rec; michael@0: if (!fTail) { michael@0: fTail = rec; michael@0: } michael@0: fBytesUsed += rec->bytesUsed(); michael@0: fCount += 1; michael@0: michael@0: this->validate(); michael@0: } michael@0: michael@0: /////////////////////////////////////////////////////////////////////////////// michael@0: michael@0: #ifdef SK_DEBUG michael@0: void SkScaledImageCache::validate() const { michael@0: if (NULL == fHead) { michael@0: SkASSERT(NULL == fTail); michael@0: SkASSERT(0 == fBytesUsed); michael@0: return; michael@0: } michael@0: michael@0: if (fHead == fTail) { michael@0: SkASSERT(NULL == fHead->fPrev); michael@0: SkASSERT(NULL == fHead->fNext); michael@0: SkASSERT(fHead->bytesUsed() == fBytesUsed); michael@0: return; michael@0: } michael@0: michael@0: SkASSERT(NULL == fHead->fPrev); michael@0: SkASSERT(NULL != fHead->fNext); michael@0: SkASSERT(NULL == fTail->fNext); michael@0: SkASSERT(NULL != fTail->fPrev); michael@0: michael@0: size_t used = 0; michael@0: int count = 0; michael@0: const Rec* rec = fHead; michael@0: while (rec) { michael@0: count += 1; michael@0: used += rec->bytesUsed(); michael@0: SkASSERT(used <= fBytesUsed); michael@0: rec = rec->fNext; michael@0: } michael@0: SkASSERT(fCount == count); michael@0: michael@0: rec = fTail; michael@0: while (rec) { michael@0: SkASSERT(count > 0); michael@0: count -= 1; michael@0: SkASSERT(used >= rec->bytesUsed()); michael@0: used -= rec->bytesUsed(); michael@0: rec = rec->fPrev; michael@0: } michael@0: michael@0: SkASSERT(0 == count); michael@0: SkASSERT(0 == used); michael@0: } michael@0: #endif michael@0: michael@0: void SkScaledImageCache::dump() const { michael@0: this->validate(); michael@0: michael@0: const Rec* rec = fHead; michael@0: int locked = 0; michael@0: while (rec) { michael@0: locked += rec->fLockCount > 0; michael@0: rec = rec->fNext; michael@0: } michael@0: michael@0: SkDebugf("SkScaledImageCache: count=%d bytes=%d locked=%d %s\n", michael@0: fCount, fBytesUsed, locked, michael@0: fDiscardableFactory ? "discardable" : "malloc"); michael@0: } michael@0: michael@0: /////////////////////////////////////////////////////////////////////////////// michael@0: michael@0: #include "SkThread.h" michael@0: michael@0: SK_DECLARE_STATIC_MUTEX(gMutex); michael@0: static SkScaledImageCache* gScaledImageCache = NULL; michael@0: static void cleanup_gScaledImageCache() { SkDELETE(gScaledImageCache); } michael@0: michael@0: static void create_cache(int) { michael@0: #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE michael@0: gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory::Create)); michael@0: #else michael@0: gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); michael@0: #endif michael@0: } michael@0: michael@0: static SkScaledImageCache* get_cache() { michael@0: SK_DECLARE_STATIC_ONCE(once); michael@0: SkOnce(&once, create_cache, 0, cleanup_gScaledImageCache); michael@0: SkASSERT(NULL != gScaledImageCache); michael@0: return gScaledImageCache; michael@0: } michael@0: michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::FindAndLock( michael@0: uint32_t pixelGenerationID, michael@0: int32_t width, michael@0: int32_t height, michael@0: SkBitmap* scaled) { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->findAndLock(pixelGenerationID, width, height, scaled); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::AddAndLock( michael@0: uint32_t pixelGenerationID, michael@0: int32_t width, michael@0: int32_t height, michael@0: const SkBitmap& scaled) { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->addAndLock(pixelGenerationID, width, height, scaled); michael@0: } michael@0: michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig, michael@0: SkScalar scaleX, michael@0: SkScalar scaleY, michael@0: SkBitmap* scaled) { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->findAndLock(orig, scaleX, scaleY, scaled); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig, michael@0: SkMipMap const ** mip) { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->findAndLockMip(orig, mip); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const SkBitmap& orig, michael@0: SkScalar scaleX, michael@0: SkScalar scaleY, michael@0: const SkBitmap& scaled) { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->addAndLock(orig, scaleX, scaleY, scaled); michael@0: } michael@0: michael@0: SkScaledImageCache::ID* SkScaledImageCache::AddAndLockMip(const SkBitmap& orig, michael@0: const SkMipMap* mip) { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->addAndLockMip(orig, mip); michael@0: } michael@0: michael@0: void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: get_cache()->unlock(id); michael@0: michael@0: // get_cache()->dump(); michael@0: } michael@0: michael@0: size_t SkScaledImageCache::GetBytesUsed() { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->getBytesUsed(); michael@0: } michael@0: michael@0: size_t SkScaledImageCache::GetByteLimit() { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->getByteLimit(); michael@0: } michael@0: michael@0: size_t SkScaledImageCache::SetByteLimit(size_t newLimit) { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->setByteLimit(newLimit); michael@0: } michael@0: michael@0: SkBitmap::Allocator* SkScaledImageCache::GetAllocator() { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: return get_cache()->allocator(); michael@0: } michael@0: michael@0: void SkScaledImageCache::Dump() { michael@0: SkAutoMutexAcquire am(gMutex); michael@0: get_cache()->dump(); michael@0: } michael@0: michael@0: /////////////////////////////////////////////////////////////////////////////// michael@0: michael@0: #include "SkGraphics.h" michael@0: michael@0: size_t SkGraphics::GetImageCacheBytesUsed() { michael@0: return SkScaledImageCache::GetBytesUsed(); michael@0: } michael@0: michael@0: size_t SkGraphics::GetImageCacheByteLimit() { michael@0: return SkScaledImageCache::GetByteLimit(); michael@0: } michael@0: michael@0: size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) { michael@0: return SkScaledImageCache::SetByteLimit(newLimit); michael@0: }