Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
michael@0 | 1 | |
michael@0 | 2 | /* |
michael@0 | 3 | * Copyright 2011 Google Inc. |
michael@0 | 4 | * |
michael@0 | 5 | * Use of this source code is governed by a BSD-style license that can be |
michael@0 | 6 | * found in the LICENSE file. |
michael@0 | 7 | */ |
michael@0 | 8 | #include "SkPixelRef.h" |
michael@0 | 9 | #include "SkReadBuffer.h" |
michael@0 | 10 | #include "SkWriteBuffer.h" |
michael@0 | 11 | #include "SkThread.h" |
michael@0 | 12 | |
michael@0 | 13 | #ifdef SK_USE_POSIX_THREADS |
michael@0 | 14 | |
michael@0 | 15 | static SkBaseMutex gPixelRefMutexRing[] = { |
michael@0 | 16 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 17 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 18 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 19 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 20 | |
michael@0 | 21 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 22 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 23 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 24 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 25 | |
michael@0 | 26 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 27 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 28 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 29 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 30 | |
michael@0 | 31 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 32 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 33 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 34 | { PTHREAD_MUTEX_INITIALIZER }, { PTHREAD_MUTEX_INITIALIZER }, |
michael@0 | 35 | }; |
michael@0 | 36 | |
michael@0 | 37 | // must be a power-of-2. undef to just use 1 mutex |
michael@0 | 38 | #define PIXELREF_MUTEX_RING_COUNT SK_ARRAY_COUNT(gPixelRefMutexRing) |
michael@0 | 39 | |
michael@0 | 40 | #else // not pthreads |
michael@0 | 41 | |
michael@0 | 42 | // must be a power-of-2. undef to just use 1 mutex |
michael@0 | 43 | #define PIXELREF_MUTEX_RING_COUNT 32 |
michael@0 | 44 | static SkBaseMutex gPixelRefMutexRing[PIXELREF_MUTEX_RING_COUNT]; |
michael@0 | 45 | |
michael@0 | 46 | #endif |
michael@0 | 47 | |
michael@0 | 48 | static SkBaseMutex* get_default_mutex() { |
michael@0 | 49 | static int32_t gPixelRefMutexRingIndex; |
michael@0 | 50 | |
michael@0 | 51 | SkASSERT(SkIsPow2(PIXELREF_MUTEX_RING_COUNT)); |
michael@0 | 52 | |
michael@0 | 53 | // atomic_inc might be overkill here. It may be fine if once in a while |
michael@0 | 54 | // we hit a race-condition and two subsequent calls get the same index... |
michael@0 | 55 | int index = sk_atomic_inc(&gPixelRefMutexRingIndex); |
michael@0 | 56 | return &gPixelRefMutexRing[index & (PIXELREF_MUTEX_RING_COUNT - 1)]; |
michael@0 | 57 | } |
michael@0 | 58 | |
michael@0 | 59 | /////////////////////////////////////////////////////////////////////////////// |
michael@0 | 60 | |
michael@0 | 61 | int32_t SkNextPixelRefGenerationID(); |
michael@0 | 62 | |
michael@0 | 63 | int32_t SkNextPixelRefGenerationID() { |
michael@0 | 64 | static int32_t gPixelRefGenerationID; |
michael@0 | 65 | // do a loop in case our global wraps around, as we never want to |
michael@0 | 66 | // return a 0 |
michael@0 | 67 | int32_t genID; |
michael@0 | 68 | do { |
michael@0 | 69 | genID = sk_atomic_inc(&gPixelRefGenerationID) + 1; |
michael@0 | 70 | } while (0 == genID); |
michael@0 | 71 | return genID; |
michael@0 | 72 | } |
michael@0 | 73 | |
michael@0 | 74 | /////////////////////////////////////////////////////////////////////////////// |
michael@0 | 75 | |
michael@0 | 76 | void SkPixelRef::setMutex(SkBaseMutex* mutex) { |
michael@0 | 77 | if (NULL == mutex) { |
michael@0 | 78 | mutex = get_default_mutex(); |
michael@0 | 79 | } |
michael@0 | 80 | fMutex = mutex; |
michael@0 | 81 | } |
michael@0 | 82 | |
michael@0 | 83 | // just need a > 0 value, so pick a funny one to aid in debugging |
michael@0 | 84 | #define SKPIXELREF_PRELOCKED_LOCKCOUNT 123456789 |
michael@0 | 85 | |
michael@0 | 86 | SkPixelRef::SkPixelRef(const SkImageInfo& info) : fInfo(info) { |
michael@0 | 87 | this->setMutex(NULL); |
michael@0 | 88 | fRec.zero(); |
michael@0 | 89 | fLockCount = 0; |
michael@0 | 90 | this->needsNewGenID(); |
michael@0 | 91 | fIsImmutable = false; |
michael@0 | 92 | fPreLocked = false; |
michael@0 | 93 | } |
michael@0 | 94 | |
michael@0 | 95 | |
michael@0 | 96 | SkPixelRef::SkPixelRef(const SkImageInfo& info, SkBaseMutex* mutex) : fInfo(info) { |
michael@0 | 97 | this->setMutex(mutex); |
michael@0 | 98 | fRec.zero(); |
michael@0 | 99 | fLockCount = 0; |
michael@0 | 100 | this->needsNewGenID(); |
michael@0 | 101 | fIsImmutable = false; |
michael@0 | 102 | fPreLocked = false; |
michael@0 | 103 | } |
michael@0 | 104 | |
michael@0 | 105 | static SkImageInfo read_info(SkReadBuffer& buffer) { |
michael@0 | 106 | SkImageInfo info; |
michael@0 | 107 | info.unflatten(buffer); |
michael@0 | 108 | return info; |
michael@0 | 109 | } |
michael@0 | 110 | |
michael@0 | 111 | SkPixelRef::SkPixelRef(SkReadBuffer& buffer, SkBaseMutex* mutex) |
michael@0 | 112 | : INHERITED(buffer) |
michael@0 | 113 | , fInfo(read_info(buffer)) |
michael@0 | 114 | { |
michael@0 | 115 | this->setMutex(mutex); |
michael@0 | 116 | fRec.zero(); |
michael@0 | 117 | fLockCount = 0; |
michael@0 | 118 | fIsImmutable = buffer.readBool(); |
michael@0 | 119 | fGenerationID = buffer.readUInt(); |
michael@0 | 120 | fUniqueGenerationID = false; // Conservatively assuming the original still exists. |
michael@0 | 121 | fPreLocked = false; |
michael@0 | 122 | } |
michael@0 | 123 | |
michael@0 | 124 | SkPixelRef::~SkPixelRef() { |
michael@0 | 125 | this->callGenIDChangeListeners(); |
michael@0 | 126 | } |
michael@0 | 127 | |
michael@0 | 128 | void SkPixelRef::needsNewGenID() { |
michael@0 | 129 | fGenerationID = 0; |
michael@0 | 130 | fUniqueGenerationID = false; |
michael@0 | 131 | } |
michael@0 | 132 | |
michael@0 | 133 | void SkPixelRef::cloneGenID(const SkPixelRef& that) { |
michael@0 | 134 | // This is subtle. We must call that.getGenerationID() to make sure its genID isn't 0. |
michael@0 | 135 | this->fGenerationID = that.getGenerationID(); |
michael@0 | 136 | this->fUniqueGenerationID = false; |
michael@0 | 137 | that.fUniqueGenerationID = false; |
michael@0 | 138 | } |
michael@0 | 139 | |
michael@0 | 140 | void SkPixelRef::setPreLocked(void* pixels, size_t rowBytes, SkColorTable* ctable) { |
michael@0 | 141 | #ifndef SK_IGNORE_PIXELREF_SETPRELOCKED |
michael@0 | 142 | // only call me in your constructor, otherwise fLockCount tracking can get |
michael@0 | 143 | // out of sync. |
michael@0 | 144 | fRec.fPixels = pixels; |
michael@0 | 145 | fRec.fColorTable = ctable; |
michael@0 | 146 | fRec.fRowBytes = rowBytes; |
michael@0 | 147 | fLockCount = SKPIXELREF_PRELOCKED_LOCKCOUNT; |
michael@0 | 148 | fPreLocked = true; |
michael@0 | 149 | #endif |
michael@0 | 150 | } |
michael@0 | 151 | |
michael@0 | 152 | void SkPixelRef::flatten(SkWriteBuffer& buffer) const { |
michael@0 | 153 | this->INHERITED::flatten(buffer); |
michael@0 | 154 | fInfo.flatten(buffer); |
michael@0 | 155 | buffer.writeBool(fIsImmutable); |
michael@0 | 156 | // We write the gen ID into the picture for within-process recording. This |
michael@0 | 157 | // is safe since the same genID will never refer to two different sets of |
michael@0 | 158 | // pixels (barring overflow). However, each process has its own "namespace" |
michael@0 | 159 | // of genIDs. So for cross-process recording we write a zero which will |
michael@0 | 160 | // trigger assignment of a new genID in playback. |
michael@0 | 161 | if (buffer.isCrossProcess()) { |
michael@0 | 162 | buffer.writeUInt(0); |
michael@0 | 163 | } else { |
michael@0 | 164 | buffer.writeUInt(fGenerationID); |
michael@0 | 165 | fUniqueGenerationID = false; // Conservative, a copy is probably about to exist. |
michael@0 | 166 | } |
michael@0 | 167 | } |
michael@0 | 168 | |
michael@0 | 169 | bool SkPixelRef::lockPixels(LockRec* rec) { |
michael@0 | 170 | SkASSERT(!fPreLocked || SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount); |
michael@0 | 171 | |
michael@0 | 172 | if (!fPreLocked) { |
michael@0 | 173 | SkAutoMutexAcquire ac(*fMutex); |
michael@0 | 174 | |
michael@0 | 175 | if (1 == ++fLockCount) { |
michael@0 | 176 | SkASSERT(fRec.isZero()); |
michael@0 | 177 | |
michael@0 | 178 | LockRec rec; |
michael@0 | 179 | if (!this->onNewLockPixels(&rec)) { |
michael@0 | 180 | return false; |
michael@0 | 181 | } |
michael@0 | 182 | SkASSERT(!rec.isZero()); // else why did onNewLock return true? |
michael@0 | 183 | fRec = rec; |
michael@0 | 184 | } |
michael@0 | 185 | } |
michael@0 | 186 | *rec = fRec; |
michael@0 | 187 | return true; |
michael@0 | 188 | } |
michael@0 | 189 | |
michael@0 | 190 | bool SkPixelRef::lockPixels() { |
michael@0 | 191 | LockRec rec; |
michael@0 | 192 | return this->lockPixels(&rec); |
michael@0 | 193 | } |
michael@0 | 194 | |
michael@0 | 195 | void SkPixelRef::unlockPixels() { |
michael@0 | 196 | SkASSERT(!fPreLocked || SKPIXELREF_PRELOCKED_LOCKCOUNT == fLockCount); |
michael@0 | 197 | |
michael@0 | 198 | if (!fPreLocked) { |
michael@0 | 199 | SkAutoMutexAcquire ac(*fMutex); |
michael@0 | 200 | |
michael@0 | 201 | SkASSERT(fLockCount > 0); |
michael@0 | 202 | if (0 == --fLockCount) { |
michael@0 | 203 | // don't call onUnlockPixels unless onLockPixels succeeded |
michael@0 | 204 | if (fRec.fPixels) { |
michael@0 | 205 | this->onUnlockPixels(); |
michael@0 | 206 | fRec.zero(); |
michael@0 | 207 | } else { |
michael@0 | 208 | SkASSERT(fRec.isZero()); |
michael@0 | 209 | } |
michael@0 | 210 | } |
michael@0 | 211 | } |
michael@0 | 212 | } |
michael@0 | 213 | |
michael@0 | 214 | bool SkPixelRef::lockPixelsAreWritable() const { |
michael@0 | 215 | return this->onLockPixelsAreWritable(); |
michael@0 | 216 | } |
michael@0 | 217 | |
michael@0 | 218 | bool SkPixelRef::onLockPixelsAreWritable() const { |
michael@0 | 219 | return true; |
michael@0 | 220 | } |
michael@0 | 221 | |
michael@0 | 222 | bool SkPixelRef::onImplementsDecodeInto() { |
michael@0 | 223 | return false; |
michael@0 | 224 | } |
michael@0 | 225 | |
michael@0 | 226 | bool SkPixelRef::onDecodeInto(int pow2, SkBitmap* bitmap) { |
michael@0 | 227 | return false; |
michael@0 | 228 | } |
michael@0 | 229 | |
michael@0 | 230 | uint32_t SkPixelRef::getGenerationID() const { |
michael@0 | 231 | if (0 == fGenerationID) { |
michael@0 | 232 | fGenerationID = SkNextPixelRefGenerationID(); |
michael@0 | 233 | fUniqueGenerationID = true; // The only time we can be sure of this! |
michael@0 | 234 | } |
michael@0 | 235 | return fGenerationID; |
michael@0 | 236 | } |
michael@0 | 237 | |
michael@0 | 238 | void SkPixelRef::addGenIDChangeListener(GenIDChangeListener* listener) { |
michael@0 | 239 | if (NULL == listener || !fUniqueGenerationID) { |
michael@0 | 240 | // No point in tracking this if we're not going to call it. |
michael@0 | 241 | SkDELETE(listener); |
michael@0 | 242 | return; |
michael@0 | 243 | } |
michael@0 | 244 | *fGenIDChangeListeners.append() = listener; |
michael@0 | 245 | } |
michael@0 | 246 | |
michael@0 | 247 | void SkPixelRef::callGenIDChangeListeners() { |
michael@0 | 248 | // We don't invalidate ourselves if we think another SkPixelRef is sharing our genID. |
michael@0 | 249 | if (fUniqueGenerationID) { |
michael@0 | 250 | for (int i = 0; i < fGenIDChangeListeners.count(); i++) { |
michael@0 | 251 | fGenIDChangeListeners[i]->onChange(); |
michael@0 | 252 | } |
michael@0 | 253 | } |
michael@0 | 254 | // Listeners get at most one shot, so whether these triggered or not, blow them away. |
michael@0 | 255 | fGenIDChangeListeners.deleteAll(); |
michael@0 | 256 | } |
michael@0 | 257 | |
michael@0 | 258 | void SkPixelRef::notifyPixelsChanged() { |
michael@0 | 259 | #ifdef SK_DEBUG |
michael@0 | 260 | if (fIsImmutable) { |
michael@0 | 261 | SkDebugf("========== notifyPixelsChanged called on immutable pixelref"); |
michael@0 | 262 | } |
michael@0 | 263 | #endif |
michael@0 | 264 | this->callGenIDChangeListeners(); |
michael@0 | 265 | this->needsNewGenID(); |
michael@0 | 266 | } |
michael@0 | 267 | |
michael@0 | 268 | void SkPixelRef::changeAlphaType(SkAlphaType at) { |
michael@0 | 269 | *const_cast<SkAlphaType*>(&fInfo.fAlphaType) = at; |
michael@0 | 270 | } |
michael@0 | 271 | |
michael@0 | 272 | void SkPixelRef::setImmutable() { |
michael@0 | 273 | fIsImmutable = true; |
michael@0 | 274 | } |
michael@0 | 275 | |
michael@0 | 276 | bool SkPixelRef::readPixels(SkBitmap* dst, const SkIRect* subset) { |
michael@0 | 277 | return this->onReadPixels(dst, subset); |
michael@0 | 278 | } |
michael@0 | 279 | |
michael@0 | 280 | bool SkPixelRef::onReadPixels(SkBitmap* dst, const SkIRect* subset) { |
michael@0 | 281 | return false; |
michael@0 | 282 | } |
michael@0 | 283 | |
michael@0 | 284 | SkData* SkPixelRef::onRefEncodedData() { |
michael@0 | 285 | return NULL; |
michael@0 | 286 | } |
michael@0 | 287 | |
michael@0 | 288 | size_t SkPixelRef::getAllocatedSizeInBytes() const { |
michael@0 | 289 | return 0; |
michael@0 | 290 | } |
michael@0 | 291 | |
michael@0 | 292 | /////////////////////////////////////////////////////////////////////////////// |
michael@0 | 293 | |
michael@0 | 294 | #ifdef SK_BUILD_FOR_ANDROID |
michael@0 | 295 | void SkPixelRef::globalRef(void* data) { |
michael@0 | 296 | this->ref(); |
michael@0 | 297 | } |
michael@0 | 298 | |
michael@0 | 299 | void SkPixelRef::globalUnref() { |
michael@0 | 300 | this->unref(); |
michael@0 | 301 | } |
michael@0 | 302 | #endif |