|
1 /* |
|
2 * Copyright 2013 Google Inc. |
|
3 * |
|
4 * Use of this source code is governed by a BSD-style license that can be |
|
5 * found in the LICENSE file. |
|
6 */ |
|
7 |
|
8 #include "SkScaledImageCache.h" |
|
9 #include "SkMipMap.h" |
|
10 #include "SkOnce.h" |
|
11 #include "SkPixelRef.h" |
|
12 #include "SkRect.h" |
|
13 |
|
14 // This can be defined by the caller's build system |
|
15 //#define SK_USE_DISCARDABLE_SCALEDIMAGECACHE |
|
16 |
|
17 #ifndef SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT |
|
18 # define SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT 1024 |
|
19 #endif |
|
20 |
|
21 #ifndef SK_DEFAULT_IMAGE_CACHE_LIMIT |
|
22 #define SK_DEFAULT_IMAGE_CACHE_LIMIT (2 * 1024 * 1024) |
|
23 #endif |
|
24 |
|
25 static inline SkScaledImageCache::ID* rec_to_id(SkScaledImageCache::Rec* rec) { |
|
26 return reinterpret_cast<SkScaledImageCache::ID*>(rec); |
|
27 } |
|
28 |
|
29 static inline SkScaledImageCache::Rec* id_to_rec(SkScaledImageCache::ID* id) { |
|
30 return reinterpret_cast<SkScaledImageCache::Rec*>(id); |
|
31 } |
|
32 |
|
33 // Implemented from en.wikipedia.org/wiki/MurmurHash. |
|
34 static uint32_t compute_hash(const uint32_t data[], int count) { |
|
35 uint32_t hash = 0; |
|
36 |
|
37 for (int i = 0; i < count; ++i) { |
|
38 uint32_t k = data[i]; |
|
39 k *= 0xcc9e2d51; |
|
40 k = (k << 15) | (k >> 17); |
|
41 k *= 0x1b873593; |
|
42 |
|
43 hash ^= k; |
|
44 hash = (hash << 13) | (hash >> 19); |
|
45 hash *= 5; |
|
46 hash += 0xe6546b64; |
|
47 } |
|
48 |
|
49 // hash ^= size; |
|
50 hash ^= hash >> 16; |
|
51 hash *= 0x85ebca6b; |
|
52 hash ^= hash >> 13; |
|
53 hash *= 0xc2b2ae35; |
|
54 hash ^= hash >> 16; |
|
55 |
|
56 return hash; |
|
57 } |
|
58 |
|
59 struct SkScaledImageCache::Key { |
|
60 Key(uint32_t genID, |
|
61 SkScalar scaleX, |
|
62 SkScalar scaleY, |
|
63 SkIRect bounds) |
|
64 : fGenID(genID) |
|
65 , fScaleX(scaleX) |
|
66 , fScaleY(scaleY) |
|
67 , fBounds(bounds) { |
|
68 fHash = compute_hash(&fGenID, 7); |
|
69 } |
|
70 |
|
71 bool operator<(const Key& other) const { |
|
72 const uint32_t* a = &fGenID; |
|
73 const uint32_t* b = &other.fGenID; |
|
74 for (int i = 0; i < 7; ++i) { |
|
75 if (a[i] < b[i]) { |
|
76 return true; |
|
77 } |
|
78 if (a[i] > b[i]) { |
|
79 return false; |
|
80 } |
|
81 } |
|
82 return false; |
|
83 } |
|
84 |
|
85 bool operator==(const Key& other) const { |
|
86 const uint32_t* a = &fHash; |
|
87 const uint32_t* b = &other.fHash; |
|
88 for (int i = 0; i < 8; ++i) { |
|
89 if (a[i] != b[i]) { |
|
90 return false; |
|
91 } |
|
92 } |
|
93 return true; |
|
94 } |
|
95 |
|
96 uint32_t fHash; |
|
97 uint32_t fGenID; |
|
98 float fScaleX; |
|
99 float fScaleY; |
|
100 SkIRect fBounds; |
|
101 }; |
|
102 |
|
103 struct SkScaledImageCache::Rec { |
|
104 Rec(const Key& key, const SkBitmap& bm) : fKey(key), fBitmap(bm) { |
|
105 fLockCount = 1; |
|
106 fMip = NULL; |
|
107 } |
|
108 |
|
109 Rec(const Key& key, const SkMipMap* mip) : fKey(key) { |
|
110 fLockCount = 1; |
|
111 fMip = mip; |
|
112 mip->ref(); |
|
113 } |
|
114 |
|
115 ~Rec() { |
|
116 SkSafeUnref(fMip); |
|
117 } |
|
118 |
|
119 size_t bytesUsed() const { |
|
120 return fMip ? fMip->getSize() : fBitmap.getSize(); |
|
121 } |
|
122 |
|
123 Rec* fNext; |
|
124 Rec* fPrev; |
|
125 |
|
126 // this guy wants to be 64bit aligned |
|
127 Key fKey; |
|
128 |
|
129 int32_t fLockCount; |
|
130 |
|
131 // we use either fBitmap or fMip, but not both |
|
132 SkBitmap fBitmap; |
|
133 const SkMipMap* fMip; |
|
134 }; |
|
135 |
|
136 #include "SkTDynamicHash.h" |
|
137 |
|
138 namespace { // can't use static functions w/ template parameters |
|
139 const SkScaledImageCache::Key& key_from_rec(const SkScaledImageCache::Rec& rec) { |
|
140 return rec.fKey; |
|
141 } |
|
142 |
|
143 uint32_t hash_from_key(const SkScaledImageCache::Key& key) { |
|
144 return key.fHash; |
|
145 } |
|
146 |
|
147 bool eq_rec_key(const SkScaledImageCache::Rec& rec, const SkScaledImageCache::Key& key) { |
|
148 return rec.fKey == key; |
|
149 } |
|
150 } |
|
151 |
|
152 class SkScaledImageCache::Hash : public SkTDynamicHash<SkScaledImageCache::Rec, |
|
153 SkScaledImageCache::Key, |
|
154 key_from_rec, |
|
155 hash_from_key, |
|
156 eq_rec_key> {}; |
|
157 |
|
158 /////////////////////////////////////////////////////////////////////////////// |
|
159 |
|
160 // experimental hash to speed things up |
|
161 #define USE_HASH |
|
162 |
|
163 #if !defined(USE_HASH) |
|
164 static inline SkScaledImageCache::Rec* find_rec_in_list( |
|
165 SkScaledImageCache::Rec* head, const Key & key) { |
|
166 SkScaledImageCache::Rec* rec = head; |
|
167 while ((rec != NULL) && (rec->fKey != key)) { |
|
168 rec = rec->fNext; |
|
169 } |
|
170 return rec; |
|
171 } |
|
172 #endif |
|
173 |
|
174 void SkScaledImageCache::init() { |
|
175 fHead = NULL; |
|
176 fTail = NULL; |
|
177 #ifdef USE_HASH |
|
178 fHash = new Hash; |
|
179 #else |
|
180 fHash = NULL; |
|
181 #endif |
|
182 fBytesUsed = 0; |
|
183 fCount = 0; |
|
184 fAllocator = NULL; |
|
185 |
|
186 // One of these should be explicit set by the caller after we return. |
|
187 fByteLimit = 0; |
|
188 fDiscardableFactory = NULL; |
|
189 } |
|
190 |
|
191 #include "SkDiscardableMemory.h" |
|
192 |
|
193 class SkOneShotDiscardablePixelRef : public SkPixelRef { |
|
194 public: |
|
195 SK_DECLARE_INST_COUNT(SkOneShotDiscardablePixelRef) |
|
196 // Ownership of the discardablememory is transfered to the pixelref |
|
197 SkOneShotDiscardablePixelRef(const SkImageInfo&, SkDiscardableMemory*, size_t rowBytes); |
|
198 ~SkOneShotDiscardablePixelRef(); |
|
199 |
|
200 SK_DECLARE_UNFLATTENABLE_OBJECT() |
|
201 |
|
202 protected: |
|
203 virtual bool onNewLockPixels(LockRec*) SK_OVERRIDE; |
|
204 virtual void onUnlockPixels() SK_OVERRIDE; |
|
205 virtual size_t getAllocatedSizeInBytes() const SK_OVERRIDE; |
|
206 |
|
207 private: |
|
208 SkDiscardableMemory* fDM; |
|
209 size_t fRB; |
|
210 bool fFirstTime; |
|
211 |
|
212 typedef SkPixelRef INHERITED; |
|
213 }; |
|
214 |
|
215 SkOneShotDiscardablePixelRef::SkOneShotDiscardablePixelRef(const SkImageInfo& info, |
|
216 SkDiscardableMemory* dm, |
|
217 size_t rowBytes) |
|
218 : INHERITED(info) |
|
219 , fDM(dm) |
|
220 , fRB(rowBytes) |
|
221 { |
|
222 SkASSERT(dm->data()); |
|
223 fFirstTime = true; |
|
224 } |
|
225 |
|
226 SkOneShotDiscardablePixelRef::~SkOneShotDiscardablePixelRef() { |
|
227 SkDELETE(fDM); |
|
228 } |
|
229 |
|
230 bool SkOneShotDiscardablePixelRef::onNewLockPixels(LockRec* rec) { |
|
231 if (fFirstTime) { |
|
232 // we're already locked |
|
233 SkASSERT(fDM->data()); |
|
234 fFirstTime = false; |
|
235 goto SUCCESS; |
|
236 } |
|
237 |
|
238 // A previous call to onUnlock may have deleted our DM, so check for that |
|
239 if (NULL == fDM) { |
|
240 return false; |
|
241 } |
|
242 |
|
243 if (!fDM->lock()) { |
|
244 // since it failed, we delete it now, to free-up the resource |
|
245 delete fDM; |
|
246 fDM = NULL; |
|
247 return false; |
|
248 } |
|
249 |
|
250 SUCCESS: |
|
251 rec->fPixels = fDM->data(); |
|
252 rec->fColorTable = NULL; |
|
253 rec->fRowBytes = fRB; |
|
254 return true; |
|
255 } |
|
256 |
|
257 void SkOneShotDiscardablePixelRef::onUnlockPixels() { |
|
258 SkASSERT(!fFirstTime); |
|
259 fDM->unlock(); |
|
260 } |
|
261 |
|
262 size_t SkOneShotDiscardablePixelRef::getAllocatedSizeInBytes() const { |
|
263 return this->info().getSafeSize(fRB); |
|
264 } |
|
265 |
|
266 class SkScaledImageCacheDiscardableAllocator : public SkBitmap::Allocator { |
|
267 public: |
|
268 SkScaledImageCacheDiscardableAllocator( |
|
269 SkScaledImageCache::DiscardableFactory factory) { |
|
270 SkASSERT(factory); |
|
271 fFactory = factory; |
|
272 } |
|
273 |
|
274 virtual bool allocPixelRef(SkBitmap*, SkColorTable*) SK_OVERRIDE; |
|
275 |
|
276 private: |
|
277 SkScaledImageCache::DiscardableFactory fFactory; |
|
278 }; |
|
279 |
|
280 bool SkScaledImageCacheDiscardableAllocator::allocPixelRef(SkBitmap* bitmap, |
|
281 SkColorTable* ctable) { |
|
282 size_t size = bitmap->getSize(); |
|
283 if (0 == size) { |
|
284 return false; |
|
285 } |
|
286 |
|
287 SkDiscardableMemory* dm = fFactory(size); |
|
288 if (NULL == dm) { |
|
289 return false; |
|
290 } |
|
291 |
|
292 // can we relax this? |
|
293 if (kPMColor_SkColorType != bitmap->colorType()) { |
|
294 return false; |
|
295 } |
|
296 |
|
297 SkImageInfo info = bitmap->info(); |
|
298 bitmap->setPixelRef(SkNEW_ARGS(SkOneShotDiscardablePixelRef, |
|
299 (info, dm, bitmap->rowBytes())))->unref(); |
|
300 bitmap->lockPixels(); |
|
301 return bitmap->readyToDraw(); |
|
302 } |
|
303 |
|
304 SkScaledImageCache::SkScaledImageCache(DiscardableFactory factory) { |
|
305 this->init(); |
|
306 fDiscardableFactory = factory; |
|
307 |
|
308 fAllocator = SkNEW_ARGS(SkScaledImageCacheDiscardableAllocator, (factory)); |
|
309 } |
|
310 |
|
311 SkScaledImageCache::SkScaledImageCache(size_t byteLimit) { |
|
312 this->init(); |
|
313 fByteLimit = byteLimit; |
|
314 } |
|
315 |
|
316 SkScaledImageCache::~SkScaledImageCache() { |
|
317 SkSafeUnref(fAllocator); |
|
318 |
|
319 Rec* rec = fHead; |
|
320 while (rec) { |
|
321 Rec* next = rec->fNext; |
|
322 SkDELETE(rec); |
|
323 rec = next; |
|
324 } |
|
325 delete fHash; |
|
326 } |
|
327 |
|
328 //////////////////////////////////////////////////////////////////////////////// |
|
329 |
|
330 |
|
331 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(uint32_t genID, |
|
332 SkScalar scaleX, |
|
333 SkScalar scaleY, |
|
334 const SkIRect& bounds) { |
|
335 const Key key(genID, scaleX, scaleY, bounds); |
|
336 return this->findAndLock(key); |
|
337 } |
|
338 |
|
339 /** |
|
340 This private method is the fully general record finder. All other |
|
341 record finders should call this function or the one above. */ |
|
342 SkScaledImageCache::Rec* SkScaledImageCache::findAndLock(const SkScaledImageCache::Key& key) { |
|
343 if (key.fBounds.isEmpty()) { |
|
344 return NULL; |
|
345 } |
|
346 #ifdef USE_HASH |
|
347 Rec* rec = fHash->find(key); |
|
348 #else |
|
349 Rec* rec = find_rec_in_list(fHead, key); |
|
350 #endif |
|
351 if (rec) { |
|
352 this->moveToHead(rec); // for our LRU |
|
353 rec->fLockCount += 1; |
|
354 } |
|
355 return rec; |
|
356 } |
|
357 |
|
358 /** |
|
359 This function finds the bounds of the bitmap *within its pixelRef*. |
|
360 If the bitmap lacks a pixelRef, it will return an empty rect, since |
|
361 that doesn't make sense. This may be a useful enough function that |
|
362 it should be somewhere else (in SkBitmap?). */ |
|
363 static SkIRect get_bounds_from_bitmap(const SkBitmap& bm) { |
|
364 if (!(bm.pixelRef())) { |
|
365 return SkIRect::MakeEmpty(); |
|
366 } |
|
367 SkIPoint origin = bm.pixelRefOrigin(); |
|
368 return SkIRect::MakeXYWH(origin.fX, origin.fY, bm.width(), bm.height()); |
|
369 } |
|
370 |
|
371 |
|
372 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(uint32_t genID, |
|
373 int32_t width, |
|
374 int32_t height, |
|
375 SkBitmap* bitmap) { |
|
376 Rec* rec = this->findAndLock(genID, SK_Scalar1, SK_Scalar1, |
|
377 SkIRect::MakeWH(width, height)); |
|
378 if (rec) { |
|
379 SkASSERT(NULL == rec->fMip); |
|
380 SkASSERT(rec->fBitmap.pixelRef()); |
|
381 *bitmap = rec->fBitmap; |
|
382 } |
|
383 return rec_to_id(rec); |
|
384 } |
|
385 |
|
386 SkScaledImageCache::ID* SkScaledImageCache::findAndLock(const SkBitmap& orig, |
|
387 SkScalar scaleX, |
|
388 SkScalar scaleY, |
|
389 SkBitmap* scaled) { |
|
390 if (0 == scaleX || 0 == scaleY) { |
|
391 // degenerate, and the key we use for mipmaps |
|
392 return NULL; |
|
393 } |
|
394 Rec* rec = this->findAndLock(orig.getGenerationID(), scaleX, |
|
395 scaleY, get_bounds_from_bitmap(orig)); |
|
396 if (rec) { |
|
397 SkASSERT(NULL == rec->fMip); |
|
398 SkASSERT(rec->fBitmap.pixelRef()); |
|
399 *scaled = rec->fBitmap; |
|
400 } |
|
401 return rec_to_id(rec); |
|
402 } |
|
403 |
|
404 SkScaledImageCache::ID* SkScaledImageCache::findAndLockMip(const SkBitmap& orig, |
|
405 SkMipMap const ** mip) { |
|
406 Rec* rec = this->findAndLock(orig.getGenerationID(), 0, 0, |
|
407 get_bounds_from_bitmap(orig)); |
|
408 if (rec) { |
|
409 SkASSERT(rec->fMip); |
|
410 SkASSERT(NULL == rec->fBitmap.pixelRef()); |
|
411 *mip = rec->fMip; |
|
412 } |
|
413 return rec_to_id(rec); |
|
414 } |
|
415 |
|
416 |
|
417 //////////////////////////////////////////////////////////////////////////////// |
|
418 /** |
|
419 This private method is the fully general record adder. All other |
|
420 record adders should call this funtion. */ |
|
421 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(SkScaledImageCache::Rec* rec) { |
|
422 SkASSERT(rec); |
|
423 // See if we already have this key (racy inserts, etc.) |
|
424 Rec* existing = this->findAndLock(rec->fKey); |
|
425 if (NULL != existing) { |
|
426 // Since we already have a matching entry, just delete the new one and return. |
|
427 // Call sites cannot assume the passed in object will live past this call. |
|
428 existing->fBitmap = rec->fBitmap; |
|
429 SkDELETE(rec); |
|
430 return rec_to_id(existing); |
|
431 } |
|
432 |
|
433 this->addToHead(rec); |
|
434 SkASSERT(1 == rec->fLockCount); |
|
435 #ifdef USE_HASH |
|
436 SkASSERT(fHash); |
|
437 fHash->add(rec); |
|
438 #endif |
|
439 // We may (now) be overbudget, so see if we need to purge something. |
|
440 this->purgeAsNeeded(); |
|
441 return rec_to_id(rec); |
|
442 } |
|
443 |
|
444 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(uint32_t genID, |
|
445 int32_t width, |
|
446 int32_t height, |
|
447 const SkBitmap& bitmap) { |
|
448 Key key(genID, SK_Scalar1, SK_Scalar1, SkIRect::MakeWH(width, height)); |
|
449 Rec* rec = SkNEW_ARGS(Rec, (key, bitmap)); |
|
450 return this->addAndLock(rec); |
|
451 } |
|
452 |
|
453 SkScaledImageCache::ID* SkScaledImageCache::addAndLock(const SkBitmap& orig, |
|
454 SkScalar scaleX, |
|
455 SkScalar scaleY, |
|
456 const SkBitmap& scaled) { |
|
457 if (0 == scaleX || 0 == scaleY) { |
|
458 // degenerate, and the key we use for mipmaps |
|
459 return NULL; |
|
460 } |
|
461 SkIRect bounds = get_bounds_from_bitmap(orig); |
|
462 if (bounds.isEmpty()) { |
|
463 return NULL; |
|
464 } |
|
465 Key key(orig.getGenerationID(), scaleX, scaleY, bounds); |
|
466 Rec* rec = SkNEW_ARGS(Rec, (key, scaled)); |
|
467 return this->addAndLock(rec); |
|
468 } |
|
469 |
|
470 SkScaledImageCache::ID* SkScaledImageCache::addAndLockMip(const SkBitmap& orig, |
|
471 const SkMipMap* mip) { |
|
472 SkIRect bounds = get_bounds_from_bitmap(orig); |
|
473 if (bounds.isEmpty()) { |
|
474 return NULL; |
|
475 } |
|
476 Key key(orig.getGenerationID(), 0, 0, bounds); |
|
477 Rec* rec = SkNEW_ARGS(Rec, (key, mip)); |
|
478 return this->addAndLock(rec); |
|
479 } |
|
480 |
|
481 void SkScaledImageCache::unlock(SkScaledImageCache::ID* id) { |
|
482 SkASSERT(id); |
|
483 |
|
484 #ifdef SK_DEBUG |
|
485 { |
|
486 bool found = false; |
|
487 Rec* rec = fHead; |
|
488 while (rec != NULL) { |
|
489 if (rec == id_to_rec(id)) { |
|
490 found = true; |
|
491 break; |
|
492 } |
|
493 rec = rec->fNext; |
|
494 } |
|
495 SkASSERT(found); |
|
496 } |
|
497 #endif |
|
498 Rec* rec = id_to_rec(id); |
|
499 SkASSERT(rec->fLockCount > 0); |
|
500 rec->fLockCount -= 1; |
|
501 |
|
502 // we may have been over-budget, but now have released something, so check |
|
503 // if we should purge. |
|
504 if (0 == rec->fLockCount) { |
|
505 this->purgeAsNeeded(); |
|
506 } |
|
507 } |
|
508 |
|
509 void SkScaledImageCache::purgeAsNeeded() { |
|
510 size_t byteLimit; |
|
511 int countLimit; |
|
512 |
|
513 if (fDiscardableFactory) { |
|
514 countLimit = SK_DISCARDABLEMEMORY_SCALEDIMAGECACHE_COUNT_LIMIT; |
|
515 byteLimit = SK_MaxU32; // no limit based on bytes |
|
516 } else { |
|
517 countLimit = SK_MaxS32; // no limit based on count |
|
518 byteLimit = fByteLimit; |
|
519 } |
|
520 |
|
521 size_t bytesUsed = fBytesUsed; |
|
522 int countUsed = fCount; |
|
523 |
|
524 Rec* rec = fTail; |
|
525 while (rec) { |
|
526 if (bytesUsed < byteLimit && countUsed < countLimit) { |
|
527 break; |
|
528 } |
|
529 |
|
530 Rec* prev = rec->fPrev; |
|
531 if (0 == rec->fLockCount) { |
|
532 size_t used = rec->bytesUsed(); |
|
533 SkASSERT(used <= bytesUsed); |
|
534 this->detach(rec); |
|
535 #ifdef USE_HASH |
|
536 fHash->remove(rec->fKey); |
|
537 #endif |
|
538 |
|
539 SkDELETE(rec); |
|
540 |
|
541 bytesUsed -= used; |
|
542 countUsed -= 1; |
|
543 } |
|
544 rec = prev; |
|
545 } |
|
546 |
|
547 fBytesUsed = bytesUsed; |
|
548 fCount = countUsed; |
|
549 } |
|
550 |
|
551 size_t SkScaledImageCache::setByteLimit(size_t newLimit) { |
|
552 size_t prevLimit = fByteLimit; |
|
553 fByteLimit = newLimit; |
|
554 if (newLimit < prevLimit) { |
|
555 this->purgeAsNeeded(); |
|
556 } |
|
557 return prevLimit; |
|
558 } |
|
559 |
|
560 /////////////////////////////////////////////////////////////////////////////// |
|
561 |
|
562 void SkScaledImageCache::detach(Rec* rec) { |
|
563 Rec* prev = rec->fPrev; |
|
564 Rec* next = rec->fNext; |
|
565 |
|
566 if (!prev) { |
|
567 SkASSERT(fHead == rec); |
|
568 fHead = next; |
|
569 } else { |
|
570 prev->fNext = next; |
|
571 } |
|
572 |
|
573 if (!next) { |
|
574 fTail = prev; |
|
575 } else { |
|
576 next->fPrev = prev; |
|
577 } |
|
578 |
|
579 rec->fNext = rec->fPrev = NULL; |
|
580 } |
|
581 |
|
582 void SkScaledImageCache::moveToHead(Rec* rec) { |
|
583 if (fHead == rec) { |
|
584 return; |
|
585 } |
|
586 |
|
587 SkASSERT(fHead); |
|
588 SkASSERT(fTail); |
|
589 |
|
590 this->validate(); |
|
591 |
|
592 this->detach(rec); |
|
593 |
|
594 fHead->fPrev = rec; |
|
595 rec->fNext = fHead; |
|
596 fHead = rec; |
|
597 |
|
598 this->validate(); |
|
599 } |
|
600 |
|
601 void SkScaledImageCache::addToHead(Rec* rec) { |
|
602 this->validate(); |
|
603 |
|
604 rec->fPrev = NULL; |
|
605 rec->fNext = fHead; |
|
606 if (fHead) { |
|
607 fHead->fPrev = rec; |
|
608 } |
|
609 fHead = rec; |
|
610 if (!fTail) { |
|
611 fTail = rec; |
|
612 } |
|
613 fBytesUsed += rec->bytesUsed(); |
|
614 fCount += 1; |
|
615 |
|
616 this->validate(); |
|
617 } |
|
618 |
|
619 /////////////////////////////////////////////////////////////////////////////// |
|
620 |
|
621 #ifdef SK_DEBUG |
|
622 void SkScaledImageCache::validate() const { |
|
623 if (NULL == fHead) { |
|
624 SkASSERT(NULL == fTail); |
|
625 SkASSERT(0 == fBytesUsed); |
|
626 return; |
|
627 } |
|
628 |
|
629 if (fHead == fTail) { |
|
630 SkASSERT(NULL == fHead->fPrev); |
|
631 SkASSERT(NULL == fHead->fNext); |
|
632 SkASSERT(fHead->bytesUsed() == fBytesUsed); |
|
633 return; |
|
634 } |
|
635 |
|
636 SkASSERT(NULL == fHead->fPrev); |
|
637 SkASSERT(NULL != fHead->fNext); |
|
638 SkASSERT(NULL == fTail->fNext); |
|
639 SkASSERT(NULL != fTail->fPrev); |
|
640 |
|
641 size_t used = 0; |
|
642 int count = 0; |
|
643 const Rec* rec = fHead; |
|
644 while (rec) { |
|
645 count += 1; |
|
646 used += rec->bytesUsed(); |
|
647 SkASSERT(used <= fBytesUsed); |
|
648 rec = rec->fNext; |
|
649 } |
|
650 SkASSERT(fCount == count); |
|
651 |
|
652 rec = fTail; |
|
653 while (rec) { |
|
654 SkASSERT(count > 0); |
|
655 count -= 1; |
|
656 SkASSERT(used >= rec->bytesUsed()); |
|
657 used -= rec->bytesUsed(); |
|
658 rec = rec->fPrev; |
|
659 } |
|
660 |
|
661 SkASSERT(0 == count); |
|
662 SkASSERT(0 == used); |
|
663 } |
|
664 #endif |
|
665 |
|
666 void SkScaledImageCache::dump() const { |
|
667 this->validate(); |
|
668 |
|
669 const Rec* rec = fHead; |
|
670 int locked = 0; |
|
671 while (rec) { |
|
672 locked += rec->fLockCount > 0; |
|
673 rec = rec->fNext; |
|
674 } |
|
675 |
|
676 SkDebugf("SkScaledImageCache: count=%d bytes=%d locked=%d %s\n", |
|
677 fCount, fBytesUsed, locked, |
|
678 fDiscardableFactory ? "discardable" : "malloc"); |
|
679 } |
|
680 |
|
681 /////////////////////////////////////////////////////////////////////////////// |
|
682 |
|
683 #include "SkThread.h" |
|
684 |
|
685 SK_DECLARE_STATIC_MUTEX(gMutex); |
|
686 static SkScaledImageCache* gScaledImageCache = NULL; |
|
687 static void cleanup_gScaledImageCache() { SkDELETE(gScaledImageCache); } |
|
688 |
|
689 static void create_cache(int) { |
|
690 #ifdef SK_USE_DISCARDABLE_SCALEDIMAGECACHE |
|
691 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SkDiscardableMemory::Create)); |
|
692 #else |
|
693 gScaledImageCache = SkNEW_ARGS(SkScaledImageCache, (SK_DEFAULT_IMAGE_CACHE_LIMIT)); |
|
694 #endif |
|
695 } |
|
696 |
|
697 static SkScaledImageCache* get_cache() { |
|
698 SK_DECLARE_STATIC_ONCE(once); |
|
699 SkOnce(&once, create_cache, 0, cleanup_gScaledImageCache); |
|
700 SkASSERT(NULL != gScaledImageCache); |
|
701 return gScaledImageCache; |
|
702 } |
|
703 |
|
704 |
|
705 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock( |
|
706 uint32_t pixelGenerationID, |
|
707 int32_t width, |
|
708 int32_t height, |
|
709 SkBitmap* scaled) { |
|
710 SkAutoMutexAcquire am(gMutex); |
|
711 return get_cache()->findAndLock(pixelGenerationID, width, height, scaled); |
|
712 } |
|
713 |
|
714 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock( |
|
715 uint32_t pixelGenerationID, |
|
716 int32_t width, |
|
717 int32_t height, |
|
718 const SkBitmap& scaled) { |
|
719 SkAutoMutexAcquire am(gMutex); |
|
720 return get_cache()->addAndLock(pixelGenerationID, width, height, scaled); |
|
721 } |
|
722 |
|
723 |
|
724 SkScaledImageCache::ID* SkScaledImageCache::FindAndLock(const SkBitmap& orig, |
|
725 SkScalar scaleX, |
|
726 SkScalar scaleY, |
|
727 SkBitmap* scaled) { |
|
728 SkAutoMutexAcquire am(gMutex); |
|
729 return get_cache()->findAndLock(orig, scaleX, scaleY, scaled); |
|
730 } |
|
731 |
|
732 SkScaledImageCache::ID* SkScaledImageCache::FindAndLockMip(const SkBitmap& orig, |
|
733 SkMipMap const ** mip) { |
|
734 SkAutoMutexAcquire am(gMutex); |
|
735 return get_cache()->findAndLockMip(orig, mip); |
|
736 } |
|
737 |
|
738 SkScaledImageCache::ID* SkScaledImageCache::AddAndLock(const SkBitmap& orig, |
|
739 SkScalar scaleX, |
|
740 SkScalar scaleY, |
|
741 const SkBitmap& scaled) { |
|
742 SkAutoMutexAcquire am(gMutex); |
|
743 return get_cache()->addAndLock(orig, scaleX, scaleY, scaled); |
|
744 } |
|
745 |
|
746 SkScaledImageCache::ID* SkScaledImageCache::AddAndLockMip(const SkBitmap& orig, |
|
747 const SkMipMap* mip) { |
|
748 SkAutoMutexAcquire am(gMutex); |
|
749 return get_cache()->addAndLockMip(orig, mip); |
|
750 } |
|
751 |
|
752 void SkScaledImageCache::Unlock(SkScaledImageCache::ID* id) { |
|
753 SkAutoMutexAcquire am(gMutex); |
|
754 get_cache()->unlock(id); |
|
755 |
|
756 // get_cache()->dump(); |
|
757 } |
|
758 |
|
759 size_t SkScaledImageCache::GetBytesUsed() { |
|
760 SkAutoMutexAcquire am(gMutex); |
|
761 return get_cache()->getBytesUsed(); |
|
762 } |
|
763 |
|
764 size_t SkScaledImageCache::GetByteLimit() { |
|
765 SkAutoMutexAcquire am(gMutex); |
|
766 return get_cache()->getByteLimit(); |
|
767 } |
|
768 |
|
769 size_t SkScaledImageCache::SetByteLimit(size_t newLimit) { |
|
770 SkAutoMutexAcquire am(gMutex); |
|
771 return get_cache()->setByteLimit(newLimit); |
|
772 } |
|
773 |
|
774 SkBitmap::Allocator* SkScaledImageCache::GetAllocator() { |
|
775 SkAutoMutexAcquire am(gMutex); |
|
776 return get_cache()->allocator(); |
|
777 } |
|
778 |
|
779 void SkScaledImageCache::Dump() { |
|
780 SkAutoMutexAcquire am(gMutex); |
|
781 get_cache()->dump(); |
|
782 } |
|
783 |
|
784 /////////////////////////////////////////////////////////////////////////////// |
|
785 |
|
786 #include "SkGraphics.h" |
|
787 |
|
788 size_t SkGraphics::GetImageCacheBytesUsed() { |
|
789 return SkScaledImageCache::GetBytesUsed(); |
|
790 } |
|
791 |
|
792 size_t SkGraphics::GetImageCacheByteLimit() { |
|
793 return SkScaledImageCache::GetByteLimit(); |
|
794 } |
|
795 |
|
796 size_t SkGraphics::SetImageCacheByteLimit(size_t newLimit) { |
|
797 return SkScaledImageCache::SetByteLimit(newLimit); |
|
798 } |