Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
1 /*
2 * Copyright 2013 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
8 #include "SkScaledImageCache.h"
9 #include "SkMipMap.h"
10 #include "SkOnce.h"
11 #include "SkPixelRef.h"
12 #include "SkRect.h"
14 // This can be defined by the caller's build system
15 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE
17 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT
18 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024
19 #endif
21 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT
22 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024)
23 #endif
25 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) {
26 return reinterpret_cast<SkScaledImageCache::ID*>(rec);
27 }
29 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) {
30 return reinterpret_cast<SkScaledImageCache::Rec*>(id);
31 }
33 // Implemented from en.wikipedia.org/wiki/MurmurHash.
34 static uint32_t compute_hash(const uint32_t data[], int count) {
35 uint32_t hash = 0;
37 for (int i = 0; i < count; ++i) {
38 uint32_t k = data[i];
39 k *= 0xcc9e2d51;
40 k = (k << 15) | (k >> 17);
41 k *= 0x1b873593;
43 hash ^= k;
44 hash = (hash << 13) | (hash >> 19);
45 hash *= 5;
46 hash += 0xe6546b64;
47 }
49 // hash ^= size;
50 hash ^= hash >> 16;
51 hash *= 0x85ebca6b;
52 hash ^= hash >> 13;
53 hash *= 0xc2b2ae35;
54 hash ^= hash >> 16;
56 return hash;
57 }
59 struct SkScaledImageCache::Key {
60 Key(uint32_t genID,
61 SkScalar scaleX,
62 SkScalar scaleY,
63 SkIRect bounds)
64 : fGenID(genID)
65 , fScaleX(scaleX)
66 , fScaleY(scaleY)
67 , fBounds(bounds) {
68 fHash = compute_hash(&fGenID, 7);
69 }
71 bool operator<(const Key& other) const {
72 const uint32_t* a = &fGenID;
73 const uint32_t* b = &other.fGenID;
74 for (int i = 0; i < 7; ++i) {
75 if (a[i] < b[i]) {
76 return true;
77 }
78 if (a[i] > b[i]) {
79 return false;
80 }
81 }
82 return false;
83 }
85 bool operator==(const Key& other) const {
86 const uint32_t* a = &fHash;
87 const uint32_t* b = &other.fHash;
88 for (int i = 0; i < 8; ++i) {
89 if (a[i] != b[i]) {
90 return false;
91 }
92 }
93 return true;
94 }
96 uint32_t fHash;
97 uint32_t fGenID;
98 float fScaleX;
99 float fScaleY;
100 SkIRect fBounds;
101 };
103 struct SkScaledImageCache::Rec {
104 Rec(const Key& key, const SkBitmap& bm) : fKey(key), fBitmap(bm) {
105 fLockCount = 1;
106 fMip = NULL;
107 }
109 Rec(const Key& key, const SkMipMap* mip) : fKey(key) {
110 fLockCount = 1;
111 fMip = mip;
112 mip->ref();
113 }
115 ~Rec() {
116 SkSafeUnref(fMip);
117 }
119 size_t bytesUsed() const {
120 return fMip ? fMip->getSize() : fBitmap.getSize();
121 }
123 Rec* fNext;
124 Rec* fPrev;
126 // this guy wants to be 64bit aligned
127 Key fKey;
129 int32_t fLockCount;
131 // we use either fBitmap or fMip, but not both
132 SkBitmap fBitmap;
133 const SkMipMap* fMip;
134 };
136 #include "SkTDynamicHash.h"
138 namespace { // can't use static functions w/ template parameters
139 const SkScaledImageCache::Key& key_from_rec(const SkScaledImageCache::Rec& rec) {
140 return rec.fKey;
141 }
143 uint32_t hash_from_key(const SkScaledImageCache::Key& key) {
144 return key.fHash;
145 }
147 bool eq_rec_key(const SkScaledImageCache::Rec& rec, const SkScaledImageCache::Key& key) {
148 return rec.fKey == key;
149 }
150 }
152 class SkScaledImageCache::Hash : public SkTDynamicHash<SkScaledImageCache::Rec,
153 SkScaledImageCache::Key,
154 key_from_rec,
155 hash_from_key,
156 eq_rec_key> {};
158 ///////////////////////////////////////////////////////////////////////////////
160 // experimental hash to speed things up
161 #define USE_HASH
163 #if !defined(USE_HASH)
164 static inline SkScaledImageCache::Rec* find_rec_in_list(
165 SkScaledImageCache::Rec* head, const Key & key) {
166 SkScaledImageCache::Rec* rec = head;
167 while ((rec != NULL) && (rec->fKey != key)) {
168 rec = rec->fNext;
169 }
170 return rec;
171 }
172 #endif
174 void SkScaledImageCache::init() {
175 fHead = NULL;
176 fTail = NULL;
177 #ifdef USE_HASH
178 fHash = new Hash;
179 #else
180 fHash = NULL;
181 #endif
182 fBytesUsed = 0;
183 fCount = 0;
184 fAllocator = NULL;
186 // One of these should be explicit set by the caller after we return.
187 fByteLimit = 0;
188 fDiscardableFactory = NULL;
189 }
191 #include "SkDiscardableMemory.h"
193 class SkOneShotDiscardablePixelRef : public SkPixelRef {
194 public:
195 SK_DECLARE_INST_COUNT(SkOneShotDiscardablePixelRef)
196 // Ownership of the discardablememory is transfered to the pixelref
197 SkOneShotDiscardablePixelRef(const SkImageInfo&, SkDiscardableMemory*, size_t rowBytes);
198 ~SkOneShotDiscardablePixelRef();
200 SK_DECLARE_UNFLATTENABLE_OBJECT()
202 protected:
203 virtual bool onNewLockPixels(LockRec*) SK_OVERRIDE;
204 virtual void onUnlockPixels() SK_OVERRIDE;
205 virtual size_t getAllocatedSizeInBytes() const SK_OVERRIDE;
207 private:
208 SkDiscardableMemory* fDM;
209 size_t fRB;
210 bool fFirstTime;
212 typedef SkPixelRef INHERITED;
213 };
215 SkOneShotDiscardablePixelRef::SkOneShotDiscardablePixelRef(const SkImageInfo& info,
216 SkDiscardableMemory* dm,
217 size_t rowBytes)
218 : INHERITED(info)
219 , fDM(dm)
220 , fRB(rowBytes)
221 {
222 SkASSERT(dm->data());
223 fFirstTime = true;
224 }
226 SkOneShotDiscardablePixelRef::~SkOneShotDiscardablePixelRef() {
227 SkDELETE(fDM);
228 }
230 bool SkOneShotDiscardablePixelRef::onNewLockPixels(LockRec* rec) {
231 if (fFirstTime) {
232 // we're already locked
233 SkASSERT(fDM->data());
234 fFirstTime = false;
235 goto SUCCESS;
236 }
238 // A previous call to onUnlock may have deleted our DM, so check for that
239 if (NULL == fDM) {
240 return false;
241 }
243 if (!fDM->lock()) {
244 // since it failed, we delete it now, to free-up the resource
245 delete fDM;
246 fDM = NULL;
247 return false;
248 }
250 SUCCESS:
251 rec->fPixels = fDM->data();
252 rec->fColorTable = NULL;
253 rec->fRowBytes = fRB;
254 return true;
255 }
257 void SkOneShotDiscardablePixelRef::onUnlockPixels() {
258 SkASSERT(!fFirstTime);
259 fDM->unlock();
260 }
262 size_t SkOneShotDiscardablePixelRef::getAllocatedSizeInBytes() const {
263 return this->info().getSafeSize(fRB);
264 }
266 class SkScaledImageCacheDiscardableAllocator : public SkBitmap::Allocator {
267 public:
268 SkScaledImageCacheDiscardableAllocator(
269 SkScaledImageCache::DiscardableFactory factory) {
270 SkASSERT(factory);
271 fFactory = factory;
272 }
274 virtual bool allocPixelRef(SkBitmap*, SkColorTable*) SK_OVERRIDE;
276 private:
277 SkScaledImageCache::DiscardableFactory fFactory;
278 };
280 bool SkScaledImageCacheDiscardableAllocator::allocPixelRef(SkBitmap* bitmap,
281 SkColorTable* ctable) {
282 size_t size = bitmap->getSize();
283 if (0 == size) {
284 return false;
285 }
287 SkDiscardableMemory* dm = fFactory(size);
288 if (NULL == dm) {
289 return false;
290 }
292 // can we relax this?
293 if (kPMColor_SkColorType != bitmap->colorType()) {
294 return false;
295 }
297 SkImageInfo info = bitmap->info();
298 bitmap->setPixelRef(SkNEW_ARGS(SkOneShotDiscardablePixelRef,
299 (info, dm, bitmap->rowBytes())))->unref();
300 bitmap->lockPixels();
301 return bitmap->readyToDraw();
302 }
304 SkScaledImageCache::SkScaledImageCache(DiscardableFactory factory) {
305 this->init();
306 fDiscardableFactory = factory;
308 fAllocator = SkNEW_ARGS(SkScaledImageCacheDiscardableAllocator, (factory));
309 }
311 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) {
312 this->init();
313 fByteLimit = byteLimit;
314 }
316 SkScaledImageCache::~SkScaledImageCache() {
317 SkSafeUnref(fAllocator);
319 Rec* rec = fHead;
320 while (rec) {
321 Rec* next = rec->fNext;
322 SkDELETE(rec);
323 rec = next;
324 }
325 delete fHash;
326 }
328 ////////////////////////////////////////////////////////////////////////////////
331 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID,
332 SkScalar scaleX,
333 SkScalar scaleY,
334 const SkIRect& bounds) {
335 const Key key(genID, scaleX, scaleY, bounds);
336 return this->findAndLock(key);
337 }
339 /**
340 This private method is the fully general record finder. All other
341 record finders should call this function or the one above. */
342 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCache::Key& key) {
343 if (key.fBounds.isEmpty()) {
344 return NULL;
345 }
346 #ifdef USE_HASH
347 Rec* rec = fHash->find(key);
348 #else
349 Rec* rec = find_rec_in_list(fHead, key);
350 #endif
351 if (rec) {
352 this->moveToHead(rec); // for our LRU
353 rec->fLockCount += 1;
354 }
355 return rec;
356 }
358 /**
359 This function finds the bounds of the bitmap *within its pixelRef*.
360 If the bitmap lacks a pixelRef, it will return an empty rect, since
361 that doesn't make sense. This may be a useful enough function that
362 it should be somewhere else (in SkBitmap?). */
363 static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) {
364 if (!(bm.pixelRef())) {
365 return SkIRect::MakeEmpty();
366 }
367 SkIPoint origin = bm.pixelRefOrigin();
368 return SkIRect::MakeXYWH(origin.fX, origin.fY, bm.width(), bm.height());
369 }
372 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID,
373 int32_t width,
374 int32_t height,
375 SkBitmap* bitmap) {
376 Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1,
377 SkIRect::MakeWH(width, height));
378 if (rec) {
379 SkASSERT(NULL == rec->fMip);
380 SkASSERT(rec->fBitmap.pixelRef());
381 *bitmap = rec->fBitmap;
382 }
383 return rec_to_id(rec);
384 }
386 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig,
387 SkScalar scaleX,
388 SkScalar scaleY,
389 SkBitmap* scaled) {
390 if (0 == scaleX || 0 == scaleY) {
391 // degenerate, and the key we use for mipmaps
392 return NULL;
393 }
394 Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX,
395 scaleY, get_bounds_from_bitmap(orig));
396 if (rec) {
397 SkASSERT(NULL == rec->fMip);
398 SkASSERT(rec->fBitmap.pixelRef());
399 *scaled = rec->fBitmap;
400 }
401 return rec_to_id(rec);
402 }
404 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig,
405 SkMipMap const ** mip) {
406 Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0,
407 get_bounds_from_bitmap(orig));
408 if (rec) {
409 SkASSERT(rec->fMip);
410 SkASSERT(NULL == rec->fBitmap.pixelRef());
411 *mip = rec->fMip;
412 }
413 return rec_to_id(rec);
414 }
417 ////////////////////////////////////////////////////////////////////////////////
418 /**
419 This private method is the fully general record adder. All other
420 record adders should call this funtion. */
421 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) {
422 SkASSERT(rec);
423 // See if we already have this key (racy inserts, etc.)
424 Rec* existing = this->findAndLock(rec->fKey);
425 if (NULL != existing) {
426 // Since we already have a matching entry, just delete the new one and return.
427 // Call sites cannot assume the passed in object will live past this call.
428 existing->fBitmap = rec->fBitmap;
429 SkDELETE(rec);
430 return rec_to_id(existing);
431 }
433 this->addToHead(rec);
434 SkASSERT(1 == rec->fLockCount);
435 #ifdef USE_HASH
436 SkASSERT(fHash);
437 fHash->add(rec);
438 #endif
439 // We may (now) be overbudget, so see if we need to purge something.
440 this->purgeAsNeeded();
441 return rec_to_id(rec);
442 }
444 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID,
445 int32_t width,
446 int32_t height,
447 const SkBitmap& bitmap) {
448 Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height));
449 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap));
450 return this->addAndLock(rec);
451 }
453 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig,
454 SkScalar scaleX,
455 SkScalar scaleY,
456 const SkBitmap& scaled) {
457 if (0 == scaleX || 0 == scaleY) {
458 // degenerate, and the key we use for mipmaps
459 return NULL;
460 }
461 SkIRect bounds = get_bounds_from_bitmap(orig);
462 if (bounds.isEmpty()) {
463 return NULL;
464 }
465 Key key(orig.getGenerationID(), scaleX, scaleY, bounds);
466 Rec* rec = SkNEW_ARGS(Rec, (key, scaled));
467 return this->addAndLock(rec);
468 }
470 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig,
471 const SkMipMap* mip) {
472 SkIRect bounds = get_bounds_from_bitmap(orig);
473 if (bounds.isEmpty()) {
474 return NULL;
475 }
476 Key key(orig.getGenerationID(), 0, 0, bounds);
477 Rec* rec = SkNEW_ARGS(Rec, (key, mip));
478 return this->addAndLock(rec);
479 }
481 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) {
482 SkASSERT(id);
484 #ifdef SK_DEBUG
485 {
486 bool found = false;
487 Rec* rec = fHead;
488 while (rec != NULL) {
489 if (rec == id_to_rec(id)) {
490 found = true;
491 break;
492 }
493 rec = rec->fNext;
494 }
495 SkASSERT(found);
496 }
497 #endif
498 Rec* rec = id_to_rec(id);
499 SkASSERT(rec->fLockCount > 0);
500 rec->fLockCount -= 1;
502 // we may have been over-budget, but now have released something, so check
503 // if we should purge.
504 if (0 == rec->fLockCount) {
505 this->purgeAsNeeded();
506 }
507 }
509 void SkScaledImageCache::purgeAsNeeded() {
510 size_t byteLimit;
511 int countLimit;
513 if (fDiscardableFactory) {
514 countLimit = SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT;
515 byteLimit = SK_MaxU32; // no limit based on bytes
516 } else {
517 countLimit = SK_MaxS32; // no limit based on count
518 byteLimit = fByteLimit;
519 }
521 size_t bytesUsed = fBytesUsed;
522 int countUsed = fCount;
524 Rec* rec = fTail;
525 while (rec) {
526 if (bytesUsed < byteLimit && countUsed < countLimit) {
527 break;
528 }
530 Rec* prev = rec->fPrev;
531 if (0 == rec->fLockCount) {
532 size_t used = rec->bytesUsed();
533 SkASSERT(used <= bytesUsed);
534 this->detach(rec);
535 #ifdef USE_HASH
536 fHash->remove(rec->fKey);
537 #endif
539 SkDELETE(rec);
541 bytesUsed -= used;
542 countUsed -= 1;
543 }
544 rec = prev;
545 }
547 fBytesUsed = bytesUsed;
548 fCount = countUsed;
549 }
551 size_t SkScaledImageCache::setByteLimit(size_t newLimit) {
552 size_t prevLimit = fByteLimit;
553 fByteLimit = newLimit;
554 if (newLimit < prevLimit) {
555 this->purgeAsNeeded();
556 }
557 return prevLimit;
558 }
560 ///////////////////////////////////////////////////////////////////////////////
562 void SkScaledImageCache::detach(Rec* rec) {
563 Rec* prev = rec->fPrev;
564 Rec* next = rec->fNext;
566 if (!prev) {
567 SkASSERT(fHead == rec);
568 fHead = next;
569 } else {
570 prev->fNext = next;
571 }
573 if (!next) {
574 fTail = prev;
575 } else {
576 next->fPrev = prev;
577 }
579 rec->fNext = rec->fPrev = NULL;
580 }
582 void SkScaledImageCache::moveToHead(Rec* rec) {
583 if (fHead == rec) {
584 return;
585 }
587 SkASSERT(fHead);
588 SkASSERT(fTail);
590 this->validate();
592 this->detach(rec);
594 fHead->fPrev = rec;
595 rec->fNext = fHead;
596 fHead = rec;
598 this->validate();
599 }
601 void SkScaledImageCache::addToHead(Rec* rec) {
602 this->validate();
604 rec->fPrev = NULL;
605 rec->fNext = fHead;
606 if (fHead) {
607 fHead->fPrev = rec;
608 }
609 fHead = rec;
610 if (!fTail) {
611 fTail = rec;
612 }
613 fBytesUsed += rec->bytesUsed();
614 fCount += 1;
616 this->validate();
617 }
619 ///////////////////////////////////////////////////////////////////////////////
621 #ifdef SK_DEBUG
622 void SkScaledImageCache::validate() const {
623 if (NULL == fHead) {
624 SkASSERT(NULL == fTail);
625 SkASSERT(0 == fBytesUsed);
626 return;
627 }
629 if (fHead == fTail) {
630 SkASSERT(NULL == fHead->fPrev);
631 SkASSERT(NULL == fHead->fNext);
632 SkASSERT(fHead->bytesUsed() == fBytesUsed);
633 return;
634 }
636 SkASSERT(NULL == fHead->fPrev);
637 SkASSERT(NULL != fHead->fNext);
638 SkASSERT(NULL == fTail->fNext);
639 SkASSERT(NULL != fTail->fPrev);
641 size_t used = 0;
642 int count = 0;
643 const Rec* rec = fHead;
644 while (rec) {
645 count += 1;
646 used += rec->bytesUsed();
647 SkASSERT(used <= fBytesUsed);
648 rec = rec->fNext;
649 }
650 SkASSERT(fCount == count);
652 rec = fTail;
653 while (rec) {
654 SkASSERT(count > 0);
655 count -= 1;
656 SkASSERT(used >= rec->bytesUsed());
657 used -= rec->bytesUsed();
658 rec = rec->fPrev;
659 }
661 SkASSERT(0 == count);
662 SkASSERT(0 == used);
663 }
664 #endif
666 void SkScaledImageCache::dump() const {
667 this->validate();
669 const Rec* rec = fHead;
670 int locked = 0;
671 while (rec) {
672 locked += rec->fLockCount > 0;
673 rec = rec->fNext;
674 }
676 SkDebugf("SkScaledImageCache: count=%d bytes=%d locked=%d %s\n",
677 fCount, fBytesUsed, locked,
678 fDiscardableFactory ? "discardable" : "malloc");
679 }
681 ///////////////////////////////////////////////////////////////////////////////
683 #include "SkThread.h"
685 SK_DECLARE_STATIC_MUTEX(gMutex);
686 static SkScaledImageCache* gScaledImageCache = NULL;
687 static void cleanup_gScaledImageCache() { SkDELETE(gScaledImageCache); }
689 static void create_cache(int) {
690 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE
691 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory::Create));
692 #else
693 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT));
694 #endif
695 }
697 static SkScaledImageCache* get_cache() {
698 SK_DECLARE_STATIC_ONCE(once);
699 SkOnce(&once, create_cache, 0, cleanup_gScaledImageCache);
700 SkASSERT(NULL != gScaledImageCache);
701 return gScaledImageCache;
702 }
705 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(
706 uint32_t pixelGenerationID,
707 int32_t width,
708 int32_t height,
709 SkBitmap* scaled) {
710 SkAutoMutexAcquire am(gMutex);
711 return get_cache()->findAndLock(pixelGenerationID, width, height, scaled);
712 }
714 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(
715 uint32_t pixelGenerationID,
716 int32_t width,
717 int32_t height,
718 const SkBitmap& scaled) {
719 SkAutoMutexAcquire am(gMutex);
720 return get_cache()->addAndLock(pixelGenerationID, width, height, scaled);
721 }
724 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig,
725 SkScalar scaleX,
726 SkScalar scaleY,
727 SkBitmap* scaled) {
728 SkAutoMutexAcquire am(gMutex);
729 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled);
730 }
732 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig,
733 SkMipMap const ** mip) {
734 SkAutoMutexAcquire am(gMutex);
735 return get_cache()->findAndLockMip(orig, mip);
736 }
738 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const SkBitmap& orig,
739 SkScalar scaleX,
740 SkScalar scaleY,
741 const SkBitmap& scaled) {
742 SkAutoMutexAcquire am(gMutex);
743 return get_cache()->addAndLock(orig, scaleX, scaleY, scaled);
744 }
746 SkScaledImageCache::ID* SkScaledImageCache::AddAndLockMip(const SkBitmap& orig,
747 const SkMipMap* mip) {
748 SkAutoMutexAcquire am(gMutex);
749 return get_cache()->addAndLockMip(orig, mip);
750 }
752 void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) {
753 SkAutoMutexAcquire am(gMutex);
754 get_cache()->unlock(id);
756 // get_cache()->dump();
757 }
759 size_t SkScaledImageCache::GetBytesUsed() {
760 SkAutoMutexAcquire am(gMutex);
761 return get_cache()->getBytesUsed();
762 }
764 size_t SkScaledImageCache::GetByteLimit() {
765 SkAutoMutexAcquire am(gMutex);
766 return get_cache()->getByteLimit();
767 }
769 size_t SkScaledImageCache::SetByteLimit(size_t newLimit) {
770 SkAutoMutexAcquire am(gMutex);
771 return get_cache()->setByteLimit(newLimit);
772 }
774 SkBitmap::Allocator* SkScaledImageCache::GetAllocator() {
775 SkAutoMutexAcquire am(gMutex);
776 return get_cache()->allocator();
777 }
779 void SkScaledImageCache::Dump() {
780 SkAutoMutexAcquire am(gMutex);
781 get_cache()->dump();
782 }
784 ///////////////////////////////////////////////////////////////////////////////
786 #include "SkGraphics.h"
788 size_t SkGraphics::GetImageCacheBytesUsed() {
789 return SkScaledImageCache::GetBytesUsed();
790 }
792 size_t SkGraphics::GetImageCacheByteLimit() {
793 return SkScaledImageCache::GetByteLimit();
794 }
796 size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) {
797 return SkScaledImageCache::SetByteLimit(newLimit);
798 }