Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef gc_StoreBuffer_h
8 #define gc_StoreBuffer_h
10 #ifdef JSGC_GENERATIONAL
12 #ifndef JSGC_USE_EXACT_ROOTING
13 # error "Generational GC requires exact rooting."
14 #endif
16 #include "mozilla/DebugOnly.h"
17 #include "mozilla/ReentrancyGuard.h"
19 #include "jsalloc.h"
21 #include "ds/LifoAlloc.h"
22 #include "gc/Nursery.h"
23 #include "gc/Tracer.h"
24 #include "js/MemoryMetrics.h"
26 namespace js {
28 void
29 CrashAtUnhandlableOOM(const char *reason);
31 namespace gc {
33 /*
34 * BufferableRef represents an abstract reference for use in the generational
35 * GC's remembered set. Entries in the store buffer that cannot be represented
36 * with the simple pointer-to-a-pointer scheme must derive from this class and
37 * use the generic store buffer interface.
38 */
39 class BufferableRef
40 {
41 public:
42 virtual void mark(JSTracer *trc) = 0;
43 bool maybeInRememberedSet(const Nursery &) const { return true; }
44 };
46 /*
47 * HashKeyRef represents a reference to a HashMap key. This should normally
48 * be used through the HashTableWriteBarrierPost function.
49 */
50 template <typename Map, typename Key>
51 class HashKeyRef : public BufferableRef
52 {
53 Map *map;
54 Key key;
56 public:
57 HashKeyRef(Map *m, const Key &k) : map(m), key(k) {}
59 void mark(JSTracer *trc) {
60 Key prior = key;
61 typename Map::Ptr p = map->lookup(key);
62 if (!p)
63 return;
64 trc->setTracingLocation(&*p);
65 Mark(trc, &key, "HashKeyRef");
66 map->rekeyIfMoved(prior, key);
67 }
68 };
70 typedef HashSet<void *, PointerHasher<void *, 3>, SystemAllocPolicy> EdgeSet;
72 /* The size of a single block of store buffer storage space. */
73 static const size_t LifoAllocBlockSize = 1 << 16; /* 64KiB */
75 /*
76 * The StoreBuffer observes all writes that occur in the system and performs
77 * efficient filtering of them to derive a remembered set for nursery GC.
78 */
79 class StoreBuffer
80 {
81 friend class mozilla::ReentrancyGuard;
83 /* The size at which a block is about to overflow. */
84 static const size_t MinAvailableSize = (size_t)(LifoAllocBlockSize * 1.0 / 8.0);
86 /*
87 * This buffer holds only a single type of edge. Using this buffer is more
88 * efficient than the generic buffer when many writes will be to the same
89 * type of edge: e.g. Value or Cell*.
90 */
91 template<typename T>
92 struct MonoTypeBuffer
93 {
94 LifoAlloc *storage_;
95 size_t usedAtLastCompact_;
97 explicit MonoTypeBuffer() : storage_(nullptr), usedAtLastCompact_(0) {}
98 ~MonoTypeBuffer() { js_delete(storage_); }
100 bool init() {
101 if (!storage_)
102 storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
103 clear();
104 return bool(storage_);
105 }
107 void clear() {
108 if (!storage_)
109 return;
111 storage_->used() ? storage_->releaseAll() : storage_->freeAll();
112 usedAtLastCompact_ = 0;
113 }
115 bool isAboutToOverflow() const {
116 return !storage_->isEmpty() && storage_->availableInCurrentChunk() < MinAvailableSize;
117 }
119 void handleOverflow(StoreBuffer *owner);
121 /* Compaction algorithms. */
122 void compactRemoveDuplicates(StoreBuffer *owner);
124 /*
125 * Attempts to reduce the usage of the buffer by removing unnecessary
126 * entries.
127 */
128 virtual void compact(StoreBuffer *owner);
130 /* Compacts if any entries have been added since the last compaction. */
131 void maybeCompact(StoreBuffer *owner);
133 /* Add one item to the buffer. */
134 void put(StoreBuffer *owner, const T &t) {
135 JS_ASSERT(storage_);
137 T *tp = storage_->new_<T>(t);
138 if (!tp)
139 CrashAtUnhandlableOOM("Failed to allocate for MonoTypeBuffer::put.");
141 if (isAboutToOverflow())
142 handleOverflow(owner);
143 }
145 /* Mark the source of all edges in the store buffer. */
146 void mark(StoreBuffer *owner, JSTracer *trc);
148 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
149 return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
150 }
152 private:
153 MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
154 };
156 /*
157 * Overrides the MonoTypeBuffer to support pointers that may be moved in
158 * memory outside of the GC's control.
159 */
160 template <typename T>
161 struct RelocatableMonoTypeBuffer : public MonoTypeBuffer<T>
162 {
163 /* Override compaction to filter out removed items. */
164 void compactMoved(StoreBuffer *owner);
165 virtual void compact(StoreBuffer *owner) MOZ_OVERRIDE;
167 /* Record a removal from the buffer. */
168 void unput(StoreBuffer *owner, const T &v) {
169 MonoTypeBuffer<T>::put(owner, v.tagged());
170 }
171 };
173 struct GenericBuffer
174 {
175 LifoAlloc *storage_;
177 explicit GenericBuffer() : storage_(nullptr) {}
178 ~GenericBuffer() { js_delete(storage_); }
180 bool init() {
181 if (!storage_)
182 storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
183 clear();
184 return bool(storage_);
185 }
187 void clear() {
188 if (!storage_)
189 return;
191 storage_->used() ? storage_->releaseAll() : storage_->freeAll();
192 }
194 bool isAboutToOverflow() const {
195 return !storage_->isEmpty() && storage_->availableInCurrentChunk() < MinAvailableSize;
196 }
198 /* Mark all generic edges. */
199 void mark(StoreBuffer *owner, JSTracer *trc);
201 template <typename T>
202 void put(StoreBuffer *owner, const T &t) {
203 JS_ASSERT(storage_);
205 /* Ensure T is derived from BufferableRef. */
206 (void)static_cast<const BufferableRef*>(&t);
208 unsigned size = sizeof(T);
209 unsigned *sizep = storage_->newPod<unsigned>();
210 if (!sizep)
211 CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put.");
212 *sizep = size;
214 T *tp = storage_->new_<T>(t);
215 if (!tp)
216 CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put.");
218 if (isAboutToOverflow())
219 owner->setAboutToOverflow();
220 }
222 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
223 return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
224 }
226 private:
227 GenericBuffer &operator=(const GenericBuffer& other) MOZ_DELETE;
228 };
230 template <typename Edge>
231 struct PointerEdgeHasher
232 {
233 typedef Edge Lookup;
234 static HashNumber hash(const Lookup &l) { return uintptr_t(l.edge) >> 3; }
235 static bool match(const Edge &k, const Lookup &l) { return k == l; }
236 };
238 struct CellPtrEdge
239 {
240 Cell **edge;
242 explicit CellPtrEdge(Cell **v) : edge(v) {}
243 bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
244 bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
246 bool maybeInRememberedSet(const Nursery &nursery) const {
247 return !nursery.isInside(edge) && nursery.isInside(*edge);
248 }
250 void mark(JSTracer *trc);
252 CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
253 CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
254 bool isTagged() const { return bool(uintptr_t(edge) & 1); }
256 typedef PointerEdgeHasher<CellPtrEdge> Hasher;
257 };
259 struct ValueEdge
260 {
261 JS::Value *edge;
263 explicit ValueEdge(JS::Value *v) : edge(v) {}
264 bool operator==(const ValueEdge &other) const { return edge == other.edge; }
265 bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
267 void *deref() const { return edge->isGCThing() ? edge->toGCThing() : nullptr; }
269 bool maybeInRememberedSet(const Nursery &nursery) const {
270 return !nursery.isInside(edge) && nursery.isInside(deref());
271 }
273 void mark(JSTracer *trc);
275 ValueEdge tagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) | 1)); }
276 ValueEdge untagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) & ~1)); }
277 bool isTagged() const { return bool(uintptr_t(edge) & 1); }
279 typedef PointerEdgeHasher<ValueEdge> Hasher;
280 };
282 struct SlotsEdge
283 {
284 // These definitions must match those in HeapSlot::Kind.
285 const static int SlotKind = 0;
286 const static int ElementKind = 1;
288 uintptr_t objectAndKind_; // JSObject* | Kind
289 int32_t start_;
290 int32_t count_;
292 SlotsEdge(JSObject *object, int kind, int32_t start, int32_t count)
293 : objectAndKind_(uintptr_t(object) | kind), start_(start), count_(count)
294 {
295 JS_ASSERT((uintptr_t(object) & 1) == 0);
296 JS_ASSERT(kind <= 1);
297 JS_ASSERT(start >= 0);
298 JS_ASSERT(count > 0);
299 }
301 JSObject *object() const { return reinterpret_cast<JSObject *>(objectAndKind_ & ~1); }
302 int kind() const { return (int)(objectAndKind_ & 1); }
304 bool operator==(const SlotsEdge &other) const {
305 return objectAndKind_ == other.objectAndKind_ &&
306 start_ == other.start_ &&
307 count_ == other.count_;
308 }
310 bool operator!=(const SlotsEdge &other) const {
311 return !(*this == other);
312 }
314 bool maybeInRememberedSet(const Nursery &nursery) const {
315 return !nursery.isInside(object());
316 }
318 void mark(JSTracer *trc);
320 typedef struct {
321 typedef SlotsEdge Lookup;
322 static HashNumber hash(const Lookup &l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; }
323 static bool match(const SlotsEdge &k, const Lookup &l) { return k == l; }
324 } Hasher;
325 };
327 struct WholeCellEdges
328 {
329 Cell *edge;
331 explicit WholeCellEdges(Cell *cell) : edge(cell) {
332 JS_ASSERT(edge->isTenured());
333 }
335 bool operator==(const WholeCellEdges &other) const { return edge == other.edge; }
336 bool operator!=(const WholeCellEdges &other) const { return edge != other.edge; }
338 bool maybeInRememberedSet(const Nursery &nursery) const { return true; }
340 static bool supportsDeduplication() { return true; }
341 void *deduplicationKey() const { return (void *)edge; }
343 void mark(JSTracer *trc);
345 typedef PointerEdgeHasher<WholeCellEdges> Hasher;
346 };
348 template <typename Key>
349 struct CallbackRef : public BufferableRef
350 {
351 typedef void (*MarkCallback)(JSTracer *trc, Key *key, void *data);
353 CallbackRef(MarkCallback cb, Key *k, void *d) : callback(cb), key(k), data(d) {}
355 virtual void mark(JSTracer *trc) {
356 callback(trc, key, data);
357 }
359 private:
360 MarkCallback callback;
361 Key *key;
362 void *data;
363 };
365 template <typename Edge>
366 bool isOkayToUseBuffer(const Edge &edge) const {
367 /*
368 * Disabled store buffers may not have a valid state; e.g. when stored
369 * inline in the ChunkTrailer.
370 */
371 if (!isEnabled())
372 return false;
374 /*
375 * The concurrent parsing thread cannot validly insert into the buffer,
376 * but it should not activate the re-entrancy guard either.
377 */
378 if (!CurrentThreadCanAccessRuntime(runtime_))
379 return false;
381 return true;
382 }
384 template <typename Buffer, typename Edge>
385 void put(Buffer &buffer, const Edge &edge) {
386 if (!isOkayToUseBuffer(edge))
387 return;
388 mozilla::ReentrancyGuard g(*this);
389 if (edge.maybeInRememberedSet(nursery_))
390 buffer.put(this, edge);
391 }
393 template <typename Buffer, typename Edge>
394 void unput(Buffer &buffer, const Edge &edge) {
395 if (!isOkayToUseBuffer(edge))
396 return;
397 mozilla::ReentrancyGuard g(*this);
398 buffer.unput(this, edge);
399 }
401 MonoTypeBuffer<ValueEdge> bufferVal;
402 MonoTypeBuffer<CellPtrEdge> bufferCell;
403 MonoTypeBuffer<SlotsEdge> bufferSlot;
404 MonoTypeBuffer<WholeCellEdges> bufferWholeCell;
405 RelocatableMonoTypeBuffer<ValueEdge> bufferRelocVal;
406 RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
407 GenericBuffer bufferGeneric;
409 JSRuntime *runtime_;
410 const Nursery &nursery_;
412 bool aboutToOverflow_;
413 bool enabled_;
414 mozilla::DebugOnly<bool> entered; /* For ReentrancyGuard. */
416 public:
417 explicit StoreBuffer(JSRuntime *rt, const Nursery &nursery)
418 : bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(),
419 bufferRelocVal(), bufferRelocCell(), bufferGeneric(),
420 runtime_(rt), nursery_(nursery), aboutToOverflow_(false), enabled_(false),
421 entered(false)
422 {
423 }
425 bool enable();
426 void disable();
427 bool isEnabled() const { return enabled_; }
429 bool clear();
431 /* Get the overflowed status. */
432 bool isAboutToOverflow() const { return aboutToOverflow_; }
434 /* Insert a single edge into the buffer/remembered set. */
435 void putValue(JS::Value *valuep) { put(bufferVal, ValueEdge(valuep)); }
436 void putCell(Cell **cellp) { put(bufferCell, CellPtrEdge(cellp)); }
437 void putSlot(JSObject *obj, int kind, int32_t start, int32_t count) {
438 put(bufferSlot, SlotsEdge(obj, kind, start, count));
439 }
440 void putWholeCell(Cell *cell) {
441 JS_ASSERT(cell->isTenured());
442 put(bufferWholeCell, WholeCellEdges(cell));
443 }
445 /* Insert or update a single edge in the Relocatable buffer. */
446 void putRelocatableValue(JS::Value *valuep) { put(bufferRelocVal, ValueEdge(valuep)); }
447 void putRelocatableCell(Cell **cellp) { put(bufferRelocCell, CellPtrEdge(cellp)); }
448 void removeRelocatableValue(JS::Value *valuep) { unput(bufferRelocVal, ValueEdge(valuep)); }
449 void removeRelocatableCell(Cell **cellp) { unput(bufferRelocCell, CellPtrEdge(cellp)); }
451 /* Insert an entry into the generic buffer. */
452 template <typename T>
453 void putGeneric(const T &t) { put(bufferGeneric, t);}
455 /* Insert or update a callback entry. */
456 template <typename Key>
457 void putCallback(void (*callback)(JSTracer *trc, Key *key, void *data), Key *key, void *data) {
458 put(bufferGeneric, CallbackRef<Key>(callback, key, data));
459 }
461 /* Methods to mark the source of all edges in the store buffer. */
462 void markAll(JSTracer *trc);
463 void markValues(JSTracer *trc) { bufferVal.mark(this, trc); }
464 void markCells(JSTracer *trc) { bufferCell.mark(this, trc); }
465 void markSlots(JSTracer *trc) { bufferSlot.mark(this, trc); }
466 void markWholeCells(JSTracer *trc) { bufferWholeCell.mark(this, trc); }
467 void markRelocatableValues(JSTracer *trc) { bufferRelocVal.mark(this, trc); }
468 void markRelocatableCells(JSTracer *trc) { bufferRelocCell.mark(this, trc); }
469 void markGenericEntries(JSTracer *trc) { bufferGeneric.mark(this, trc); }
471 /* We cannot call InParallelSection directly because of a circular dependency. */
472 bool inParallelSection() const;
474 /* For use by our owned buffers and for testing. */
475 void setAboutToOverflow();
477 void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes *sizes);
478 };
480 } /* namespace gc */
481 } /* namespace js */
483 #endif /* JSGC_GENERATIONAL */
485 #endif /* gc_StoreBuffer_h */