gfx/skia/trunk/src/core/SkBitmapHeap.cpp

Sat, 03 Jan 2015 20:18:00 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Sat, 03 Jan 2015 20:18:00 +0100
branch
TOR_BUG_3246
changeset 7
129ffea94266
permissions
-rw-r--r--

Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.

michael@0 1
michael@0 2 /*
michael@0 3 * Copyright 2012 Google Inc.
michael@0 4 *
michael@0 5 * Use of this source code is governed by a BSD-style license that can be
michael@0 6 * found in the LICENSE file.
michael@0 7 */
michael@0 8
michael@0 9 #include "SkBitmapHeap.h"
michael@0 10
michael@0 11 #include "SkBitmap.h"
michael@0 12 #include "SkReadBuffer.h"
michael@0 13 #include "SkWriteBuffer.h"
michael@0 14 #include "SkTSearch.h"
michael@0 15
michael@0 16 SkBitmapHeapEntry::SkBitmapHeapEntry()
michael@0 17 : fSlot(-1)
michael@0 18 , fRefCount(0)
michael@0 19 , fBytesAllocated(0) {
michael@0 20 }
michael@0 21
michael@0 22 SkBitmapHeapEntry::~SkBitmapHeapEntry() {
michael@0 23 SkASSERT(0 == fRefCount);
michael@0 24 }
michael@0 25
michael@0 26 void SkBitmapHeapEntry::addReferences(int count) {
michael@0 27 if (0 == fRefCount) {
michael@0 28 // If there are no current owners then the heap manager
michael@0 29 // will be the only one able to modify it, so it does not
michael@0 30 // need to be an atomic operation.
michael@0 31 fRefCount = count;
michael@0 32 } else {
michael@0 33 sk_atomic_add(&fRefCount, count);
michael@0 34 }
michael@0 35 }
michael@0 36
michael@0 37 ///////////////////////////////////////////////////////////////////////////////
michael@0 38
michael@0 39 static bool operator<(const SkIPoint& a, const SkIPoint& b) {
michael@0 40 return *(const int64_t*)&a < *(const int64_t*)&b;
michael@0 41 }
michael@0 42
michael@0 43 static bool operator>(const SkIPoint& a, const SkIPoint& b) {
michael@0 44 return *(const int64_t*)&a > *(const int64_t*)&b;
michael@0 45 }
michael@0 46
michael@0 47 bool SkBitmapHeap::LookupEntry::Less(const SkBitmapHeap::LookupEntry& a,
michael@0 48 const SkBitmapHeap::LookupEntry& b) {
michael@0 49 if (a.fGenerationId < b.fGenerationId) {
michael@0 50 return true;
michael@0 51 } else if (a.fGenerationId > b.fGenerationId) {
michael@0 52 return false;
michael@0 53 } else if (a.fPixelOrigin < b.fPixelOrigin) {
michael@0 54 return true;
michael@0 55 } else if (a.fPixelOrigin > b.fPixelOrigin) {
michael@0 56 return false;
michael@0 57 } else if (a.fWidth < b.fWidth) {
michael@0 58 return true;
michael@0 59 } else if (a.fWidth > b.fWidth) {
michael@0 60 return false;
michael@0 61 } else if (a.fHeight < b.fHeight) {
michael@0 62 return true;
michael@0 63 }
michael@0 64 return false;
michael@0 65 }
michael@0 66
michael@0 67 ///////////////////////////////////////////////////////////////////////////////
michael@0 68
michael@0 69 SkBitmapHeap::SkBitmapHeap(int32_t preferredSize, int32_t ownerCount)
michael@0 70 : INHERITED()
michael@0 71 , fExternalStorage(NULL)
michael@0 72 , fMostRecentlyUsed(NULL)
michael@0 73 , fLeastRecentlyUsed(NULL)
michael@0 74 , fPreferredCount(preferredSize)
michael@0 75 , fOwnerCount(ownerCount)
michael@0 76 , fBytesAllocated(0)
michael@0 77 , fDeferAddingOwners(false) {
michael@0 78 }
michael@0 79
michael@0 80 SkBitmapHeap::SkBitmapHeap(ExternalStorage* storage, int32_t preferredSize)
michael@0 81 : INHERITED()
michael@0 82 , fExternalStorage(storage)
michael@0 83 , fMostRecentlyUsed(NULL)
michael@0 84 , fLeastRecentlyUsed(NULL)
michael@0 85 , fPreferredCount(preferredSize)
michael@0 86 , fOwnerCount(IGNORE_OWNERS)
michael@0 87 , fBytesAllocated(0)
michael@0 88 , fDeferAddingOwners(false) {
michael@0 89 SkSafeRef(storage);
michael@0 90 }
michael@0 91
michael@0 92 SkBitmapHeap::~SkBitmapHeap() {
michael@0 93 SkDEBUGCODE(
michael@0 94 for (int i = 0; i < fStorage.count(); i++) {
michael@0 95 bool unused = false;
michael@0 96 for (int j = 0; j < fUnusedSlots.count(); j++) {
michael@0 97 if (fUnusedSlots[j] == fStorage[i]->fSlot) {
michael@0 98 unused = true;
michael@0 99 break;
michael@0 100 }
michael@0 101 }
michael@0 102 if (!unused) {
michael@0 103 fBytesAllocated -= fStorage[i]->fBytesAllocated;
michael@0 104 }
michael@0 105 }
michael@0 106 fBytesAllocated -= (fStorage.count() * sizeof(SkBitmapHeapEntry));
michael@0 107 )
michael@0 108 SkASSERT(0 == fBytesAllocated);
michael@0 109 fStorage.deleteAll();
michael@0 110 SkSafeUnref(fExternalStorage);
michael@0 111 fLookupTable.deleteAll();
michael@0 112 }
michael@0 113
michael@0 114 SkTRefArray<SkBitmap>* SkBitmapHeap::extractBitmaps() const {
michael@0 115 const int size = fStorage.count();
michael@0 116 SkTRefArray<SkBitmap>* array = NULL;
michael@0 117 if (size > 0) {
michael@0 118 array = SkTRefArray<SkBitmap>::Create(size);
michael@0 119 for (int i = 0; i < size; i++) {
michael@0 120 // make a shallow copy of the bitmap
michael@0 121 array->writableAt(i) = fStorage[i]->fBitmap;
michael@0 122 }
michael@0 123 }
michael@0 124 return array;
michael@0 125 }
michael@0 126
michael@0 127 void SkBitmapHeap::removeFromLRU(SkBitmapHeap::LookupEntry* entry) {
michael@0 128 if (fMostRecentlyUsed == entry) {
michael@0 129 fMostRecentlyUsed = entry->fLessRecentlyUsed;
michael@0 130 if (NULL == fMostRecentlyUsed) {
michael@0 131 SkASSERT(fLeastRecentlyUsed == entry);
michael@0 132 fLeastRecentlyUsed = NULL;
michael@0 133 } else {
michael@0 134 fMostRecentlyUsed->fMoreRecentlyUsed = NULL;
michael@0 135 }
michael@0 136 } else {
michael@0 137 // Remove entry from its prior place, and make sure to cover the hole.
michael@0 138 if (fLeastRecentlyUsed == entry) {
michael@0 139 SkASSERT(entry->fMoreRecentlyUsed != NULL);
michael@0 140 fLeastRecentlyUsed = entry->fMoreRecentlyUsed;
michael@0 141 }
michael@0 142 // Since we have already considered the case where entry is the most recently used, it must
michael@0 143 // have a more recently used at this point.
michael@0 144 SkASSERT(entry->fMoreRecentlyUsed != NULL);
michael@0 145 entry->fMoreRecentlyUsed->fLessRecentlyUsed = entry->fLessRecentlyUsed;
michael@0 146
michael@0 147 if (entry->fLessRecentlyUsed != NULL) {
michael@0 148 SkASSERT(fLeastRecentlyUsed != entry);
michael@0 149 entry->fLessRecentlyUsed->fMoreRecentlyUsed = entry->fMoreRecentlyUsed;
michael@0 150 }
michael@0 151 }
michael@0 152 entry->fMoreRecentlyUsed = NULL;
michael@0 153 }
michael@0 154
michael@0 155 void SkBitmapHeap::appendToLRU(SkBitmapHeap::LookupEntry* entry) {
michael@0 156 if (fMostRecentlyUsed != NULL) {
michael@0 157 SkASSERT(NULL == fMostRecentlyUsed->fMoreRecentlyUsed);
michael@0 158 fMostRecentlyUsed->fMoreRecentlyUsed = entry;
michael@0 159 entry->fLessRecentlyUsed = fMostRecentlyUsed;
michael@0 160 }
michael@0 161 fMostRecentlyUsed = entry;
michael@0 162 if (NULL == fLeastRecentlyUsed) {
michael@0 163 fLeastRecentlyUsed = entry;
michael@0 164 }
michael@0 165 }
michael@0 166
michael@0 167 // iterate through our LRU cache and try to find an entry to evict
michael@0 168 SkBitmapHeap::LookupEntry* SkBitmapHeap::findEntryToReplace(const SkBitmap& replacement) {
michael@0 169 SkASSERT(fPreferredCount != UNLIMITED_SIZE);
michael@0 170 SkASSERT(fStorage.count() >= fPreferredCount);
michael@0 171
michael@0 172 SkBitmapHeap::LookupEntry* iter = fLeastRecentlyUsed;
michael@0 173 while (iter != NULL) {
michael@0 174 SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
michael@0 175 if (heapEntry->fRefCount > 0) {
michael@0 176 // If the least recently used bitmap has not been unreferenced
michael@0 177 // by its owner, then according to our LRU specifications a more
michael@0 178 // recently used one can not have used all its references yet either.
michael@0 179 return NULL;
michael@0 180 }
michael@0 181 if (replacement.getGenerationID() == iter->fGenerationId) {
michael@0 182 // Do not replace a bitmap with a new one using the same
michael@0 183 // pixel ref. Instead look for a different one that will
michael@0 184 // potentially free up more space.
michael@0 185 iter = iter->fMoreRecentlyUsed;
michael@0 186 } else {
michael@0 187 return iter;
michael@0 188 }
michael@0 189 }
michael@0 190 return NULL;
michael@0 191 }
michael@0 192
michael@0 193 size_t SkBitmapHeap::freeMemoryIfPossible(size_t bytesToFree) {
michael@0 194 if (UNLIMITED_SIZE == fPreferredCount) {
michael@0 195 return 0;
michael@0 196 }
michael@0 197 LookupEntry* iter = fLeastRecentlyUsed;
michael@0 198 size_t origBytesAllocated = fBytesAllocated;
michael@0 199 // Purge starting from LRU until a non-evictable bitmap is found or until
michael@0 200 // everything is evicted.
michael@0 201 while (iter != NULL) {
michael@0 202 SkBitmapHeapEntry* heapEntry = fStorage[iter->fStorageSlot];
michael@0 203 if (heapEntry->fRefCount > 0) {
michael@0 204 break;
michael@0 205 }
michael@0 206 LookupEntry* next = iter->fMoreRecentlyUsed;
michael@0 207 this->removeEntryFromLookupTable(iter);
michael@0 208 // Free the pixel memory. removeEntryFromLookupTable already reduced
michael@0 209 // fBytesAllocated properly.
michael@0 210 heapEntry->fBitmap.reset();
michael@0 211 // Add to list of unused slots which can be reused in the future.
michael@0 212 fUnusedSlots.push(heapEntry->fSlot);
michael@0 213 iter = next;
michael@0 214 if (origBytesAllocated - fBytesAllocated >= bytesToFree) {
michael@0 215 break;
michael@0 216 }
michael@0 217 }
michael@0 218
michael@0 219 if (fLeastRecentlyUsed != iter) {
michael@0 220 // There was at least one eviction.
michael@0 221 fLeastRecentlyUsed = iter;
michael@0 222 if (NULL == fLeastRecentlyUsed) {
michael@0 223 // Everything was evicted
michael@0 224 fMostRecentlyUsed = NULL;
michael@0 225 fBytesAllocated -= (fStorage.count() * sizeof(SkBitmapHeapEntry));
michael@0 226 fStorage.deleteAll();
michael@0 227 fUnusedSlots.reset();
michael@0 228 SkASSERT(0 == fBytesAllocated);
michael@0 229 } else {
michael@0 230 fLeastRecentlyUsed->fLessRecentlyUsed = NULL;
michael@0 231 }
michael@0 232 }
michael@0 233
michael@0 234 return origBytesAllocated - fBytesAllocated;
michael@0 235 }
michael@0 236
michael@0 237 int SkBitmapHeap::findInLookupTable(const LookupEntry& indexEntry, SkBitmapHeapEntry** entry) {
michael@0 238 int index = SkTSearch<const LookupEntry, LookupEntry::Less>(
michael@0 239 (const LookupEntry**)fLookupTable.begin(),
michael@0 240 fLookupTable.count(),
michael@0 241 &indexEntry, sizeof(void*));
michael@0 242
michael@0 243 if (index < 0) {
michael@0 244 // insert ourselves into the bitmapIndex
michael@0 245 index = ~index;
michael@0 246 *fLookupTable.insert(index) = SkNEW_ARGS(LookupEntry, (indexEntry));
michael@0 247 } else if (entry != NULL) {
michael@0 248 // populate the entry if needed
michael@0 249 *entry = fStorage[fLookupTable[index]->fStorageSlot];
michael@0 250 }
michael@0 251
michael@0 252 return index;
michael@0 253 }
michael@0 254
michael@0 255 bool SkBitmapHeap::copyBitmap(const SkBitmap& originalBitmap, SkBitmap& copiedBitmap) {
michael@0 256 SkASSERT(!fExternalStorage);
michael@0 257
michael@0 258 // If the bitmap is mutable, we need to do a deep copy, since the
michael@0 259 // caller may modify it afterwards.
michael@0 260 if (originalBitmap.isImmutable()) {
michael@0 261 copiedBitmap = originalBitmap;
michael@0 262 // TODO if we have the pixel ref in the heap we could pass it here to avoid a potential deep copy
michael@0 263 // else if (sharedPixelRef != NULL) {
michael@0 264 // copiedBitmap = orig;
michael@0 265 // copiedBitmap.setPixelRef(sharedPixelRef, originalBitmap.pixelRefOffset());
michael@0 266 } else if (originalBitmap.empty()) {
michael@0 267 copiedBitmap.reset();
michael@0 268 } else if (!originalBitmap.deepCopyTo(&copiedBitmap)) {
michael@0 269 return false;
michael@0 270 }
michael@0 271 copiedBitmap.setImmutable();
michael@0 272 return true;
michael@0 273 }
michael@0 274
michael@0 275 int SkBitmapHeap::removeEntryFromLookupTable(LookupEntry* entry) {
michael@0 276 // remove the bitmap index for the deleted entry
michael@0 277 SkDEBUGCODE(int count = fLookupTable.count();)
michael@0 278 int index = this->findInLookupTable(*entry, NULL);
michael@0 279 // Verify that findInLookupTable found an existing entry rather than adding
michael@0 280 // a new entry to the lookup table.
michael@0 281 SkASSERT(count == fLookupTable.count());
michael@0 282 fBytesAllocated -= fStorage[entry->fStorageSlot]->fBytesAllocated;
michael@0 283 SkDELETE(fLookupTable[index]);
michael@0 284 fLookupTable.remove(index);
michael@0 285 return index;
michael@0 286 }
michael@0 287
michael@0 288 int32_t SkBitmapHeap::insert(const SkBitmap& originalBitmap) {
michael@0 289 SkBitmapHeapEntry* entry = NULL;
michael@0 290 int searchIndex = this->findInLookupTable(LookupEntry(originalBitmap), &entry);
michael@0 291
michael@0 292 if (entry) {
michael@0 293 // Already had a copy of the bitmap in the heap.
michael@0 294 if (fOwnerCount != IGNORE_OWNERS) {
michael@0 295 if (fDeferAddingOwners) {
michael@0 296 *fDeferredEntries.append() = entry->fSlot;
michael@0 297 } else {
michael@0 298 entry->addReferences(fOwnerCount);
michael@0 299 }
michael@0 300 }
michael@0 301 if (fPreferredCount != UNLIMITED_SIZE) {
michael@0 302 LookupEntry* lookupEntry = fLookupTable[searchIndex];
michael@0 303 if (lookupEntry != fMostRecentlyUsed) {
michael@0 304 this->removeFromLRU(lookupEntry);
michael@0 305 this->appendToLRU(lookupEntry);
michael@0 306 }
michael@0 307 }
michael@0 308 return entry->fSlot;
michael@0 309 }
michael@0 310
michael@0 311 // decide if we need to evict an existing heap entry or create a new one
michael@0 312 if (fPreferredCount != UNLIMITED_SIZE && fStorage.count() >= fPreferredCount) {
michael@0 313 // iterate through our LRU cache and try to find an entry to evict
michael@0 314 LookupEntry* lookupEntry = this->findEntryToReplace(originalBitmap);
michael@0 315 if (lookupEntry != NULL) {
michael@0 316 // we found an entry to evict
michael@0 317 entry = fStorage[lookupEntry->fStorageSlot];
michael@0 318 // Remove it from the LRU. The new entry will be added to the LRU later.
michael@0 319 this->removeFromLRU(lookupEntry);
michael@0 320 int index = this->removeEntryFromLookupTable(lookupEntry);
michael@0 321
michael@0 322 // update the current search index now that we have removed one
michael@0 323 if (index < searchIndex) {
michael@0 324 searchIndex--;
michael@0 325 }
michael@0 326 }
michael@0 327 }
michael@0 328
michael@0 329 // if we didn't have an entry yet we need to create one
michael@0 330 if (!entry) {
michael@0 331 if (fPreferredCount != UNLIMITED_SIZE && fUnusedSlots.count() > 0) {
michael@0 332 int slot;
michael@0 333 fUnusedSlots.pop(&slot);
michael@0 334 entry = fStorage[slot];
michael@0 335 } else {
michael@0 336 entry = SkNEW(SkBitmapHeapEntry);
michael@0 337 fStorage.append(1, &entry);
michael@0 338 entry->fSlot = fStorage.count() - 1;
michael@0 339 fBytesAllocated += sizeof(SkBitmapHeapEntry);
michael@0 340 }
michael@0 341 }
michael@0 342
michael@0 343 // create a copy of the bitmap
michael@0 344 bool copySucceeded;
michael@0 345 if (fExternalStorage) {
michael@0 346 copySucceeded = fExternalStorage->insert(originalBitmap, entry->fSlot);
michael@0 347 } else {
michael@0 348 copySucceeded = copyBitmap(originalBitmap, entry->fBitmap);
michael@0 349 }
michael@0 350
michael@0 351 // if the copy failed then we must abort
michael@0 352 if (!copySucceeded) {
michael@0 353 // delete the index
michael@0 354 SkDELETE(fLookupTable[searchIndex]);
michael@0 355 fLookupTable.remove(searchIndex);
michael@0 356 // If entry is the last slot in storage, it is safe to delete it.
michael@0 357 if (fStorage.count() - 1 == entry->fSlot) {
michael@0 358 // free the slot
michael@0 359 fStorage.remove(entry->fSlot);
michael@0 360 fBytesAllocated -= sizeof(SkBitmapHeapEntry);
michael@0 361 SkDELETE(entry);
michael@0 362 } else {
michael@0 363 fUnusedSlots.push(entry->fSlot);
michael@0 364 }
michael@0 365 return INVALID_SLOT;
michael@0 366 }
michael@0 367
michael@0 368 // update the index with the appropriate slot in the heap
michael@0 369 fLookupTable[searchIndex]->fStorageSlot = entry->fSlot;
michael@0 370
michael@0 371 // compute the space taken by this entry
michael@0 372 // TODO if there is a shared pixel ref don't count it
michael@0 373 // If the SkBitmap does not share an SkPixelRef with an SkBitmap already
michael@0 374 // in the SharedHeap, also include the size of its pixels.
michael@0 375 entry->fBytesAllocated = originalBitmap.getSize();
michael@0 376
michael@0 377 // add the bytes from this entry to the total count
michael@0 378 fBytesAllocated += entry->fBytesAllocated;
michael@0 379
michael@0 380 if (fOwnerCount != IGNORE_OWNERS) {
michael@0 381 if (fDeferAddingOwners) {
michael@0 382 *fDeferredEntries.append() = entry->fSlot;
michael@0 383 } else {
michael@0 384 entry->addReferences(fOwnerCount);
michael@0 385 }
michael@0 386 }
michael@0 387 if (fPreferredCount != UNLIMITED_SIZE) {
michael@0 388 this->appendToLRU(fLookupTable[searchIndex]);
michael@0 389 }
michael@0 390 return entry->fSlot;
michael@0 391 }
michael@0 392
michael@0 393 void SkBitmapHeap::deferAddingOwners() {
michael@0 394 fDeferAddingOwners = true;
michael@0 395 }
michael@0 396
michael@0 397 void SkBitmapHeap::endAddingOwnersDeferral(bool add) {
michael@0 398 if (add) {
michael@0 399 for (int i = 0; i < fDeferredEntries.count(); i++) {
michael@0 400 SkASSERT(fOwnerCount != IGNORE_OWNERS);
michael@0 401 SkBitmapHeapEntry* heapEntry = this->getEntry(fDeferredEntries[i]);
michael@0 402 SkASSERT(heapEntry != NULL);
michael@0 403 heapEntry->addReferences(fOwnerCount);
michael@0 404 }
michael@0 405 }
michael@0 406 fDeferAddingOwners = false;
michael@0 407 fDeferredEntries.reset();
michael@0 408 }

mercurial