Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #ifdef JSGC_GENERATIONAL |
michael@0 | 8 | |
michael@0 | 9 | #include "gc/StoreBuffer.h" |
michael@0 | 10 | |
michael@0 | 11 | #include "mozilla/Assertions.h" |
michael@0 | 12 | |
michael@0 | 13 | #include "vm/ArgumentsObject.h" |
michael@0 | 14 | #include "vm/ForkJoin.h" |
michael@0 | 15 | |
michael@0 | 16 | #include "jsgcinlines.h" |
michael@0 | 17 | |
michael@0 | 18 | using namespace js; |
michael@0 | 19 | using namespace js::gc; |
michael@0 | 20 | using mozilla::ReentrancyGuard; |
michael@0 | 21 | |
michael@0 | 22 | /*** Edges ***/ |
michael@0 | 23 | |
michael@0 | 24 | void |
michael@0 | 25 | StoreBuffer::SlotsEdge::mark(JSTracer *trc) |
michael@0 | 26 | { |
michael@0 | 27 | JSObject *obj = object(); |
michael@0 | 28 | |
michael@0 | 29 | if (trc->runtime()->gcNursery.isInside(obj)) |
michael@0 | 30 | return; |
michael@0 | 31 | |
michael@0 | 32 | if (!obj->isNative()) { |
michael@0 | 33 | const Class *clasp = obj->getClass(); |
michael@0 | 34 | if (clasp) |
michael@0 | 35 | clasp->trace(trc, obj); |
michael@0 | 36 | return; |
michael@0 | 37 | } |
michael@0 | 38 | |
michael@0 | 39 | if (kind() == ElementKind) { |
michael@0 | 40 | int32_t initLen = obj->getDenseInitializedLength(); |
michael@0 | 41 | int32_t clampedStart = Min(start_, initLen); |
michael@0 | 42 | int32_t clampedEnd = Min(start_ + count_, initLen); |
michael@0 | 43 | gc::MarkArraySlots(trc, clampedEnd - clampedStart, |
michael@0 | 44 | obj->getDenseElements() + clampedStart, "element"); |
michael@0 | 45 | } else { |
michael@0 | 46 | int32_t start = Min(uint32_t(start_), obj->slotSpan()); |
michael@0 | 47 | int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan()); |
michael@0 | 48 | MOZ_ASSERT(end >= start); |
michael@0 | 49 | MarkObjectSlots(trc, obj, start, end - start); |
michael@0 | 50 | } |
michael@0 | 51 | } |
michael@0 | 52 | |
michael@0 | 53 | void |
michael@0 | 54 | StoreBuffer::WholeCellEdges::mark(JSTracer *trc) |
michael@0 | 55 | { |
michael@0 | 56 | JS_ASSERT(edge->isTenured()); |
michael@0 | 57 | JSGCTraceKind kind = GetGCThingTraceKind(edge); |
michael@0 | 58 | if (kind <= JSTRACE_OBJECT) { |
michael@0 | 59 | JSObject *object = static_cast<JSObject *>(edge); |
michael@0 | 60 | if (object->is<ArgumentsObject>()) |
michael@0 | 61 | ArgumentsObject::trace(trc, object); |
michael@0 | 62 | MarkChildren(trc, object); |
michael@0 | 63 | return; |
michael@0 | 64 | } |
michael@0 | 65 | #ifdef JS_ION |
michael@0 | 66 | JS_ASSERT(kind == JSTRACE_JITCODE); |
michael@0 | 67 | static_cast<jit::JitCode *>(edge)->trace(trc); |
michael@0 | 68 | #else |
michael@0 | 69 | MOZ_ASSUME_UNREACHABLE("Only objects can be in the wholeCellBuffer if IonMonkey is disabled."); |
michael@0 | 70 | #endif |
michael@0 | 71 | } |
michael@0 | 72 | |
michael@0 | 73 | void |
michael@0 | 74 | StoreBuffer::CellPtrEdge::mark(JSTracer *trc) |
michael@0 | 75 | { |
michael@0 | 76 | if (!*edge) |
michael@0 | 77 | return; |
michael@0 | 78 | |
michael@0 | 79 | JS_ASSERT(GetGCThingTraceKind(*edge) == JSTRACE_OBJECT); |
michael@0 | 80 | MarkObjectRoot(trc, reinterpret_cast<JSObject**>(edge), "store buffer edge"); |
michael@0 | 81 | } |
michael@0 | 82 | |
michael@0 | 83 | void |
michael@0 | 84 | StoreBuffer::ValueEdge::mark(JSTracer *trc) |
michael@0 | 85 | { |
michael@0 | 86 | if (!deref()) |
michael@0 | 87 | return; |
michael@0 | 88 | |
michael@0 | 89 | MarkValueRoot(trc, edge, "store buffer edge"); |
michael@0 | 90 | } |
michael@0 | 91 | |
michael@0 | 92 | /*** MonoTypeBuffer ***/ |
michael@0 | 93 | |
michael@0 | 94 | template <typename T> |
michael@0 | 95 | void |
michael@0 | 96 | StoreBuffer::MonoTypeBuffer<T>::handleOverflow(StoreBuffer *owner) |
michael@0 | 97 | { |
michael@0 | 98 | if (!owner->isAboutToOverflow()) { |
michael@0 | 99 | /* |
michael@0 | 100 | * Compact the buffer now, and if that fails to free enough space then |
michael@0 | 101 | * trigger a minor collection. |
michael@0 | 102 | */ |
michael@0 | 103 | compact(owner); |
michael@0 | 104 | if (isAboutToOverflow()) |
michael@0 | 105 | owner->setAboutToOverflow(); |
michael@0 | 106 | } else { |
michael@0 | 107 | /* |
michael@0 | 108 | * A minor GC has already been triggered, so there's no point |
michael@0 | 109 | * compacting unless the buffer is totally full. |
michael@0 | 110 | */ |
michael@0 | 111 | if (storage_->availableInCurrentChunk() < sizeof(T)) |
michael@0 | 112 | maybeCompact(owner); |
michael@0 | 113 | } |
michael@0 | 114 | } |
michael@0 | 115 | |
michael@0 | 116 | template <typename T> |
michael@0 | 117 | void |
michael@0 | 118 | StoreBuffer::MonoTypeBuffer<T>::compactRemoveDuplicates(StoreBuffer *owner) |
michael@0 | 119 | { |
michael@0 | 120 | typedef HashSet<T, typename T::Hasher, SystemAllocPolicy> DedupSet; |
michael@0 | 121 | |
michael@0 | 122 | DedupSet duplicates; |
michael@0 | 123 | if (!duplicates.init()) |
michael@0 | 124 | return; /* Failure to de-dup is acceptable. */ |
michael@0 | 125 | |
michael@0 | 126 | LifoAlloc::Enum insert(*storage_); |
michael@0 | 127 | for (LifoAlloc::Enum e(*storage_); !e.empty(); e.popFront<T>()) { |
michael@0 | 128 | T *edge = e.get<T>(); |
michael@0 | 129 | if (!duplicates.has(*edge)) { |
michael@0 | 130 | insert.updateFront<T>(*edge); |
michael@0 | 131 | insert.popFront<T>(); |
michael@0 | 132 | |
michael@0 | 133 | /* Failure to insert will leave the set with duplicates. Oh well. */ |
michael@0 | 134 | duplicates.put(*edge); |
michael@0 | 135 | } |
michael@0 | 136 | } |
michael@0 | 137 | storage_->release(insert.mark()); |
michael@0 | 138 | |
michael@0 | 139 | duplicates.clear(); |
michael@0 | 140 | } |
michael@0 | 141 | |
michael@0 | 142 | template <typename T> |
michael@0 | 143 | void |
michael@0 | 144 | StoreBuffer::MonoTypeBuffer<T>::compact(StoreBuffer *owner) |
michael@0 | 145 | { |
michael@0 | 146 | JS_ASSERT(storage_); |
michael@0 | 147 | compactRemoveDuplicates(owner); |
michael@0 | 148 | usedAtLastCompact_ = storage_->used(); |
michael@0 | 149 | } |
michael@0 | 150 | |
michael@0 | 151 | template <typename T> |
michael@0 | 152 | void |
michael@0 | 153 | StoreBuffer::MonoTypeBuffer<T>::maybeCompact(StoreBuffer *owner) |
michael@0 | 154 | { |
michael@0 | 155 | JS_ASSERT(storage_); |
michael@0 | 156 | if (storage_->used() != usedAtLastCompact_) |
michael@0 | 157 | compact(owner); |
michael@0 | 158 | } |
michael@0 | 159 | |
michael@0 | 160 | template <typename T> |
michael@0 | 161 | void |
michael@0 | 162 | StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer *owner, JSTracer *trc) |
michael@0 | 163 | { |
michael@0 | 164 | JS_ASSERT(owner->isEnabled()); |
michael@0 | 165 | ReentrancyGuard g(*owner); |
michael@0 | 166 | if (!storage_) |
michael@0 | 167 | return; |
michael@0 | 168 | |
michael@0 | 169 | maybeCompact(owner); |
michael@0 | 170 | for (LifoAlloc::Enum e(*storage_); !e.empty(); e.popFront<T>()) { |
michael@0 | 171 | T *edge = e.get<T>(); |
michael@0 | 172 | edge->mark(trc); |
michael@0 | 173 | } |
michael@0 | 174 | } |
michael@0 | 175 | |
michael@0 | 176 | /*** RelocatableMonoTypeBuffer ***/ |
michael@0 | 177 | |
michael@0 | 178 | template <typename T> |
michael@0 | 179 | void |
michael@0 | 180 | StoreBuffer::RelocatableMonoTypeBuffer<T>::compactMoved(StoreBuffer *owner) |
michael@0 | 181 | { |
michael@0 | 182 | LifoAlloc &storage = *this->storage_; |
michael@0 | 183 | EdgeSet invalidated; |
michael@0 | 184 | if (!invalidated.init()) |
michael@0 | 185 | CrashAtUnhandlableOOM("RelocatableMonoTypeBuffer::compactMoved: Failed to init table."); |
michael@0 | 186 | |
michael@0 | 187 | /* Collect the set of entries which are currently invalid. */ |
michael@0 | 188 | for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) { |
michael@0 | 189 | T *edge = e.get<T>(); |
michael@0 | 190 | if (edge->isTagged()) { |
michael@0 | 191 | if (!invalidated.put(edge->untagged().edge)) |
michael@0 | 192 | CrashAtUnhandlableOOM("RelocatableMonoTypeBuffer::compactMoved: Failed to put removal."); |
michael@0 | 193 | } else { |
michael@0 | 194 | invalidated.remove(edge->untagged().edge); |
michael@0 | 195 | } |
michael@0 | 196 | } |
michael@0 | 197 | |
michael@0 | 198 | /* Remove all entries which are in the invalidated set. */ |
michael@0 | 199 | LifoAlloc::Enum insert(storage); |
michael@0 | 200 | for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) { |
michael@0 | 201 | T *edge = e.get<T>(); |
michael@0 | 202 | if (!edge->isTagged() && !invalidated.has(edge->untagged().edge)) { |
michael@0 | 203 | insert.updateFront<T>(*edge); |
michael@0 | 204 | insert.popFront<T>(); |
michael@0 | 205 | } |
michael@0 | 206 | } |
michael@0 | 207 | storage.release(insert.mark()); |
michael@0 | 208 | |
michael@0 | 209 | invalidated.clear(); |
michael@0 | 210 | |
michael@0 | 211 | #ifdef DEBUG |
michael@0 | 212 | for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) |
michael@0 | 213 | JS_ASSERT(!e.get<T>()->isTagged()); |
michael@0 | 214 | #endif |
michael@0 | 215 | } |
michael@0 | 216 | |
michael@0 | 217 | template <typename T> |
michael@0 | 218 | void |
michael@0 | 219 | StoreBuffer::RelocatableMonoTypeBuffer<T>::compact(StoreBuffer *owner) |
michael@0 | 220 | { |
michael@0 | 221 | compactMoved(owner); |
michael@0 | 222 | StoreBuffer::MonoTypeBuffer<T>::compact(owner); |
michael@0 | 223 | } |
michael@0 | 224 | |
michael@0 | 225 | /*** GenericBuffer ***/ |
michael@0 | 226 | |
michael@0 | 227 | void |
michael@0 | 228 | StoreBuffer::GenericBuffer::mark(StoreBuffer *owner, JSTracer *trc) |
michael@0 | 229 | { |
michael@0 | 230 | JS_ASSERT(owner->isEnabled()); |
michael@0 | 231 | ReentrancyGuard g(*owner); |
michael@0 | 232 | if (!storage_) |
michael@0 | 233 | return; |
michael@0 | 234 | |
michael@0 | 235 | for (LifoAlloc::Enum e(*storage_); !e.empty();) { |
michael@0 | 236 | unsigned size = *e.get<unsigned>(); |
michael@0 | 237 | e.popFront<unsigned>(); |
michael@0 | 238 | BufferableRef *edge = e.get<BufferableRef>(size); |
michael@0 | 239 | edge->mark(trc); |
michael@0 | 240 | e.popFront(size); |
michael@0 | 241 | } |
michael@0 | 242 | } |
michael@0 | 243 | |
michael@0 | 244 | /*** StoreBuffer ***/ |
michael@0 | 245 | |
michael@0 | 246 | bool |
michael@0 | 247 | StoreBuffer::enable() |
michael@0 | 248 | { |
michael@0 | 249 | if (enabled_) |
michael@0 | 250 | return true; |
michael@0 | 251 | |
michael@0 | 252 | if (!bufferVal.init() || |
michael@0 | 253 | !bufferCell.init() || |
michael@0 | 254 | !bufferSlot.init() || |
michael@0 | 255 | !bufferWholeCell.init() || |
michael@0 | 256 | !bufferRelocVal.init() || |
michael@0 | 257 | !bufferRelocCell.init() || |
michael@0 | 258 | !bufferGeneric.init()) |
michael@0 | 259 | { |
michael@0 | 260 | return false; |
michael@0 | 261 | } |
michael@0 | 262 | |
michael@0 | 263 | enabled_ = true; |
michael@0 | 264 | return true; |
michael@0 | 265 | } |
michael@0 | 266 | |
michael@0 | 267 | void |
michael@0 | 268 | StoreBuffer::disable() |
michael@0 | 269 | { |
michael@0 | 270 | if (!enabled_) |
michael@0 | 271 | return; |
michael@0 | 272 | |
michael@0 | 273 | aboutToOverflow_ = false; |
michael@0 | 274 | |
michael@0 | 275 | enabled_ = false; |
michael@0 | 276 | } |
michael@0 | 277 | |
michael@0 | 278 | bool |
michael@0 | 279 | StoreBuffer::clear() |
michael@0 | 280 | { |
michael@0 | 281 | if (!enabled_) |
michael@0 | 282 | return true; |
michael@0 | 283 | |
michael@0 | 284 | aboutToOverflow_ = false; |
michael@0 | 285 | |
michael@0 | 286 | bufferVal.clear(); |
michael@0 | 287 | bufferCell.clear(); |
michael@0 | 288 | bufferSlot.clear(); |
michael@0 | 289 | bufferWholeCell.clear(); |
michael@0 | 290 | bufferRelocVal.clear(); |
michael@0 | 291 | bufferRelocCell.clear(); |
michael@0 | 292 | bufferGeneric.clear(); |
michael@0 | 293 | |
michael@0 | 294 | return true; |
michael@0 | 295 | } |
michael@0 | 296 | |
michael@0 | 297 | void |
michael@0 | 298 | StoreBuffer::markAll(JSTracer *trc) |
michael@0 | 299 | { |
michael@0 | 300 | bufferVal.mark(this, trc); |
michael@0 | 301 | bufferCell.mark(this, trc); |
michael@0 | 302 | bufferSlot.mark(this, trc); |
michael@0 | 303 | bufferWholeCell.mark(this, trc); |
michael@0 | 304 | bufferRelocVal.mark(this, trc); |
michael@0 | 305 | bufferRelocCell.mark(this, trc); |
michael@0 | 306 | bufferGeneric.mark(this, trc); |
michael@0 | 307 | } |
michael@0 | 308 | |
michael@0 | 309 | void |
michael@0 | 310 | StoreBuffer::setAboutToOverflow() |
michael@0 | 311 | { |
michael@0 | 312 | aboutToOverflow_ = true; |
michael@0 | 313 | runtime_->requestInterrupt(JSRuntime::RequestInterruptMainThread); |
michael@0 | 314 | } |
michael@0 | 315 | |
michael@0 | 316 | bool |
michael@0 | 317 | StoreBuffer::inParallelSection() const |
michael@0 | 318 | { |
michael@0 | 319 | return InParallelSection(); |
michael@0 | 320 | } |
michael@0 | 321 | |
michael@0 | 322 | void |
michael@0 | 323 | StoreBuffer::addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes |
michael@0 | 324 | *sizes) |
michael@0 | 325 | { |
michael@0 | 326 | sizes->storeBufferVals += bufferVal.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 327 | sizes->storeBufferCells += bufferCell.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 328 | sizes->storeBufferSlots += bufferSlot.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 329 | sizes->storeBufferWholeCells += bufferWholeCell.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 330 | sizes->storeBufferRelocVals += bufferRelocVal.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 331 | sizes->storeBufferRelocCells += bufferRelocCell.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 332 | sizes->storeBufferGenerics += bufferGeneric.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 333 | } |
michael@0 | 334 | |
michael@0 | 335 | JS_PUBLIC_API(void) |
michael@0 | 336 | JS::HeapCellPostBarrier(js::gc::Cell **cellp) |
michael@0 | 337 | { |
michael@0 | 338 | JS_ASSERT(*cellp); |
michael@0 | 339 | JSRuntime *runtime = (*cellp)->runtimeFromMainThread(); |
michael@0 | 340 | runtime->gcStoreBuffer.putRelocatableCell(cellp); |
michael@0 | 341 | } |
michael@0 | 342 | |
michael@0 | 343 | JS_PUBLIC_API(void) |
michael@0 | 344 | JS::HeapCellRelocate(js::gc::Cell **cellp) |
michael@0 | 345 | { |
michael@0 | 346 | /* Called with old contents of *pp before overwriting. */ |
michael@0 | 347 | JS_ASSERT(*cellp); |
michael@0 | 348 | JSRuntime *runtime = (*cellp)->runtimeFromMainThread(); |
michael@0 | 349 | runtime->gcStoreBuffer.removeRelocatableCell(cellp); |
michael@0 | 350 | } |
michael@0 | 351 | |
michael@0 | 352 | JS_PUBLIC_API(void) |
michael@0 | 353 | JS::HeapValuePostBarrier(JS::Value *valuep) |
michael@0 | 354 | { |
michael@0 | 355 | JS_ASSERT(valuep->isMarkable()); |
michael@0 | 356 | if (valuep->isString() && StringIsPermanentAtom(valuep->toString())) |
michael@0 | 357 | return; |
michael@0 | 358 | JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread(); |
michael@0 | 359 | runtime->gcStoreBuffer.putRelocatableValue(valuep); |
michael@0 | 360 | } |
michael@0 | 361 | |
michael@0 | 362 | JS_PUBLIC_API(void) |
michael@0 | 363 | JS::HeapValueRelocate(JS::Value *valuep) |
michael@0 | 364 | { |
michael@0 | 365 | /* Called with old contents of *valuep before overwriting. */ |
michael@0 | 366 | JS_ASSERT(valuep->isMarkable()); |
michael@0 | 367 | if (valuep->isString() && StringIsPermanentAtom(valuep->toString())) |
michael@0 | 368 | return; |
michael@0 | 369 | JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread(); |
michael@0 | 370 | runtime->gcStoreBuffer.removeRelocatableValue(valuep); |
michael@0 | 371 | } |
michael@0 | 372 | |
michael@0 | 373 | template class StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>; |
michael@0 | 374 | template class StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>; |
michael@0 | 375 | template class StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>; |
michael@0 | 376 | template class StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>; |
michael@0 | 377 | template class StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::ValueEdge>; |
michael@0 | 378 | template class StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::CellPtrEdge>; |
michael@0 | 379 | |
michael@0 | 380 | #endif /* JSGC_GENERATIONAL */ |