Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
michael@0 | 1 | /* |
michael@0 | 2 | * Copyright 2012 Google Inc. |
michael@0 | 3 | * |
michael@0 | 4 | * Use of this source code is governed by a BSD-style license that can be |
michael@0 | 5 | * found in the LICENSE file. |
michael@0 | 6 | */ |
michael@0 | 7 | |
michael@0 | 8 | #include "GrMemoryPool.h" |
michael@0 | 9 | |
michael@0 | 10 | #ifdef SK_DEBUG |
michael@0 | 11 | #define VALIDATE this->validate() |
michael@0 | 12 | #else |
michael@0 | 13 | #define VALIDATE |
michael@0 | 14 | #endif |
michael@0 | 15 | |
michael@0 | 16 | GrMemoryPool::GrMemoryPool(size_t preallocSize, size_t minAllocSize) { |
michael@0 | 17 | SkDEBUGCODE(fAllocationCnt = 0); |
michael@0 | 18 | |
michael@0 | 19 | minAllocSize = GrMax<size_t>(minAllocSize, 1 << 10); |
michael@0 | 20 | fMinAllocSize = GrSizeAlignUp(minAllocSize + kPerAllocPad, kAlignment), |
michael@0 | 21 | fPreallocSize = GrSizeAlignUp(preallocSize + kPerAllocPad, kAlignment); |
michael@0 | 22 | fPreallocSize = GrMax(fPreallocSize, fMinAllocSize); |
michael@0 | 23 | |
michael@0 | 24 | fHead = CreateBlock(fPreallocSize); |
michael@0 | 25 | fTail = fHead; |
michael@0 | 26 | fHead->fNext = NULL; |
michael@0 | 27 | fHead->fPrev = NULL; |
michael@0 | 28 | VALIDATE; |
michael@0 | 29 | }; |
michael@0 | 30 | |
michael@0 | 31 | GrMemoryPool::~GrMemoryPool() { |
michael@0 | 32 | VALIDATE; |
michael@0 | 33 | SkASSERT(0 == fAllocationCnt); |
michael@0 | 34 | SkASSERT(fHead == fTail); |
michael@0 | 35 | SkASSERT(0 == fHead->fLiveCount); |
michael@0 | 36 | DeleteBlock(fHead); |
michael@0 | 37 | }; |
michael@0 | 38 | |
michael@0 | 39 | void* GrMemoryPool::allocate(size_t size) { |
michael@0 | 40 | VALIDATE; |
michael@0 | 41 | size = GrSizeAlignUp(size, kAlignment); |
michael@0 | 42 | size += kPerAllocPad; |
michael@0 | 43 | if (fTail->fFreeSize < size) { |
michael@0 | 44 | size_t blockSize = size; |
michael@0 | 45 | blockSize = GrMax<size_t>(blockSize, fMinAllocSize); |
michael@0 | 46 | BlockHeader* block = CreateBlock(blockSize); |
michael@0 | 47 | |
michael@0 | 48 | block->fPrev = fTail; |
michael@0 | 49 | block->fNext = NULL; |
michael@0 | 50 | SkASSERT(NULL == fTail->fNext); |
michael@0 | 51 | fTail->fNext = block; |
michael@0 | 52 | fTail = block; |
michael@0 | 53 | } |
michael@0 | 54 | SkASSERT(fTail->fFreeSize >= size); |
michael@0 | 55 | intptr_t ptr = fTail->fCurrPtr; |
michael@0 | 56 | // We stash a pointer to the block header, just before the allocated space, |
michael@0 | 57 | // so that we can decrement the live count on delete in constant time. |
michael@0 | 58 | *reinterpret_cast<BlockHeader**>(ptr) = fTail; |
michael@0 | 59 | ptr += kPerAllocPad; |
michael@0 | 60 | fTail->fPrevPtr = fTail->fCurrPtr; |
michael@0 | 61 | fTail->fCurrPtr += size; |
michael@0 | 62 | fTail->fFreeSize -= size; |
michael@0 | 63 | fTail->fLiveCount += 1; |
michael@0 | 64 | SkDEBUGCODE(++fAllocationCnt); |
michael@0 | 65 | VALIDATE; |
michael@0 | 66 | return reinterpret_cast<void*>(ptr); |
michael@0 | 67 | } |
michael@0 | 68 | |
michael@0 | 69 | void GrMemoryPool::release(void* p) { |
michael@0 | 70 | VALIDATE; |
michael@0 | 71 | intptr_t ptr = reinterpret_cast<intptr_t>(p) - kPerAllocPad; |
michael@0 | 72 | BlockHeader* block = *reinterpret_cast<BlockHeader**>(ptr); |
michael@0 | 73 | if (1 == block->fLiveCount) { |
michael@0 | 74 | // the head block is special, it is reset rather than deleted |
michael@0 | 75 | if (fHead == block) { |
michael@0 | 76 | fHead->fCurrPtr = reinterpret_cast<intptr_t>(fHead) + |
michael@0 | 77 | kHeaderSize; |
michael@0 | 78 | fHead->fLiveCount = 0; |
michael@0 | 79 | fHead->fFreeSize = fPreallocSize; |
michael@0 | 80 | } else { |
michael@0 | 81 | BlockHeader* prev = block->fPrev; |
michael@0 | 82 | BlockHeader* next = block->fNext; |
michael@0 | 83 | SkASSERT(prev); |
michael@0 | 84 | prev->fNext = next; |
michael@0 | 85 | if (next) { |
michael@0 | 86 | next->fPrev = prev; |
michael@0 | 87 | } else { |
michael@0 | 88 | SkASSERT(fTail == block); |
michael@0 | 89 | fTail = prev; |
michael@0 | 90 | } |
michael@0 | 91 | DeleteBlock(block); |
michael@0 | 92 | } |
michael@0 | 93 | } else { |
michael@0 | 94 | --block->fLiveCount; |
michael@0 | 95 | // Trivial reclaim: if we're releasing the most recent allocation, reuse it |
michael@0 | 96 | if (block->fPrevPtr == ptr) { |
michael@0 | 97 | block->fFreeSize += (block->fCurrPtr - block->fPrevPtr); |
michael@0 | 98 | block->fCurrPtr = block->fPrevPtr; |
michael@0 | 99 | } |
michael@0 | 100 | } |
michael@0 | 101 | SkDEBUGCODE(--fAllocationCnt); |
michael@0 | 102 | VALIDATE; |
michael@0 | 103 | } |
michael@0 | 104 | |
michael@0 | 105 | GrMemoryPool::BlockHeader* GrMemoryPool::CreateBlock(size_t size) { |
michael@0 | 106 | BlockHeader* block = |
michael@0 | 107 | reinterpret_cast<BlockHeader*>(sk_malloc_throw(size + kHeaderSize)); |
michael@0 | 108 | // we assume malloc gives us aligned memory |
michael@0 | 109 | SkASSERT(!(reinterpret_cast<intptr_t>(block) % kAlignment)); |
michael@0 | 110 | block->fLiveCount = 0; |
michael@0 | 111 | block->fFreeSize = size; |
michael@0 | 112 | block->fCurrPtr = reinterpret_cast<intptr_t>(block) + kHeaderSize; |
michael@0 | 113 | block->fPrevPtr = 0; // gcc warns on assigning NULL to an intptr_t. |
michael@0 | 114 | return block; |
michael@0 | 115 | } |
michael@0 | 116 | |
michael@0 | 117 | void GrMemoryPool::DeleteBlock(BlockHeader* block) { |
michael@0 | 118 | sk_free(block); |
michael@0 | 119 | } |
michael@0 | 120 | |
michael@0 | 121 | void GrMemoryPool::validate() { |
michael@0 | 122 | #ifdef SK_DEBUG |
michael@0 | 123 | BlockHeader* block = fHead; |
michael@0 | 124 | BlockHeader* prev = NULL; |
michael@0 | 125 | SkASSERT(block); |
michael@0 | 126 | int allocCount = 0; |
michael@0 | 127 | do { |
michael@0 | 128 | allocCount += block->fLiveCount; |
michael@0 | 129 | SkASSERT(prev == block->fPrev); |
michael@0 | 130 | if (NULL != prev) { |
michael@0 | 131 | SkASSERT(prev->fNext == block); |
michael@0 | 132 | } |
michael@0 | 133 | |
michael@0 | 134 | intptr_t b = reinterpret_cast<intptr_t>(block); |
michael@0 | 135 | size_t ptrOffset = block->fCurrPtr - b; |
michael@0 | 136 | size_t totalSize = ptrOffset + block->fFreeSize; |
michael@0 | 137 | size_t userSize = totalSize - kHeaderSize; |
michael@0 | 138 | intptr_t userStart = b + kHeaderSize; |
michael@0 | 139 | |
michael@0 | 140 | SkASSERT(!(b % kAlignment)); |
michael@0 | 141 | SkASSERT(!(totalSize % kAlignment)); |
michael@0 | 142 | SkASSERT(!(userSize % kAlignment)); |
michael@0 | 143 | SkASSERT(!(block->fCurrPtr % kAlignment)); |
michael@0 | 144 | if (fHead != block) { |
michael@0 | 145 | SkASSERT(block->fLiveCount); |
michael@0 | 146 | SkASSERT(userSize >= fMinAllocSize); |
michael@0 | 147 | } else { |
michael@0 | 148 | SkASSERT(userSize == fPreallocSize); |
michael@0 | 149 | } |
michael@0 | 150 | if (!block->fLiveCount) { |
michael@0 | 151 | SkASSERT(ptrOffset == kHeaderSize); |
michael@0 | 152 | SkASSERT(userStart == block->fCurrPtr); |
michael@0 | 153 | } else { |
michael@0 | 154 | SkASSERT(block == *reinterpret_cast<BlockHeader**>(userStart)); |
michael@0 | 155 | } |
michael@0 | 156 | prev = block; |
michael@0 | 157 | } while ((block = block->fNext)); |
michael@0 | 158 | SkASSERT(allocCount == fAllocationCnt); |
michael@0 | 159 | SkASSERT(prev == fTail); |
michael@0 | 160 | #endif |
michael@0 | 161 | } |