js/src/gc/StoreBuffer.h

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sts=4 et sw=4 tw=99:
michael@0 3 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #ifndef gc_StoreBuffer_h
michael@0 8 #define gc_StoreBuffer_h
michael@0 9
michael@0 10 #ifdef JSGC_GENERATIONAL
michael@0 11
michael@0 12 #ifndef JSGC_USE_EXACT_ROOTING
michael@0 13 # error "Generational GC requires exact rooting."
michael@0 14 #endif
michael@0 15
michael@0 16 #include "mozilla/DebugOnly.h"
michael@0 17 #include "mozilla/ReentrancyGuard.h"
michael@0 18
michael@0 19 #include "jsalloc.h"
michael@0 20
michael@0 21 #include "ds/LifoAlloc.h"
michael@0 22 #include "gc/Nursery.h"
michael@0 23 #include "gc/Tracer.h"
michael@0 24 #include "js/MemoryMetrics.h"
michael@0 25
michael@0 26 namespace js {
michael@0 27
michael@0 28 void
michael@0 29 CrashAtUnhandlableOOM(const char *reason);
michael@0 30
michael@0 31 namespace gc {
michael@0 32
michael@0 33 /*
michael@0 34 * BufferableRef represents an abstract reference for use in the generational
michael@0 35 * GC's remembered set. Entries in the store buffer that cannot be represented
michael@0 36 * with the simple pointer-to-a-pointer scheme must derive from this class and
michael@0 37 * use the generic store buffer interface.
michael@0 38 */
michael@0 39 class BufferableRef
michael@0 40 {
michael@0 41 public:
michael@0 42 virtual void mark(JSTracer *trc) = 0;
michael@0 43 bool maybeInRememberedSet(const Nursery &) const { return true; }
michael@0 44 };
michael@0 45
michael@0 46 /*
michael@0 47 * HashKeyRef represents a reference to a HashMap key. This should normally
michael@0 48 * be used through the HashTableWriteBarrierPost function.
michael@0 49 */
michael@0 50 template <typename Map, typename Key>
michael@0 51 class HashKeyRef : public BufferableRef
michael@0 52 {
michael@0 53 Map *map;
michael@0 54 Key key;
michael@0 55
michael@0 56 public:
michael@0 57 HashKeyRef(Map *m, const Key &k) : map(m), key(k) {}
michael@0 58
michael@0 59 void mark(JSTracer *trc) {
michael@0 60 Key prior = key;
michael@0 61 typename Map::Ptr p = map->lookup(key);
michael@0 62 if (!p)
michael@0 63 return;
michael@0 64 trc->setTracingLocation(&*p);
michael@0 65 Mark(trc, &key, "HashKeyRef");
michael@0 66 map->rekeyIfMoved(prior, key);
michael@0 67 }
michael@0 68 };
michael@0 69
michael@0 70 typedef HashSet<void *, PointerHasher<void *, 3>, SystemAllocPolicy> EdgeSet;
michael@0 71
michael@0 72 /* The size of a single block of store buffer storage space. */
michael@0 73 static const size_t LifoAllocBlockSize = 1 << 16; /* 64KiB */
michael@0 74
michael@0 75 /*
michael@0 76 * The StoreBuffer observes all writes that occur in the system and performs
michael@0 77 * efficient filtering of them to derive a remembered set for nursery GC.
michael@0 78 */
michael@0 79 class StoreBuffer
michael@0 80 {
michael@0 81 friend class mozilla::ReentrancyGuard;
michael@0 82
michael@0 83 /* The size at which a block is about to overflow. */
michael@0 84 static const size_t MinAvailableSize = (size_t)(LifoAllocBlockSize * 1.0 / 8.0);
michael@0 85
michael@0 86 /*
michael@0 87 * This buffer holds only a single type of edge. Using this buffer is more
michael@0 88 * efficient than the generic buffer when many writes will be to the same
michael@0 89 * type of edge: e.g. Value or Cell*.
michael@0 90 */
michael@0 91 template<typename T>
michael@0 92 struct MonoTypeBuffer
michael@0 93 {
michael@0 94 LifoAlloc *storage_;
michael@0 95 size_t usedAtLastCompact_;
michael@0 96
michael@0 97 explicit MonoTypeBuffer() : storage_(nullptr), usedAtLastCompact_(0) {}
michael@0 98 ~MonoTypeBuffer() { js_delete(storage_); }
michael@0 99
michael@0 100 bool init() {
michael@0 101 if (!storage_)
michael@0 102 storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
michael@0 103 clear();
michael@0 104 return bool(storage_);
michael@0 105 }
michael@0 106
michael@0 107 void clear() {
michael@0 108 if (!storage_)
michael@0 109 return;
michael@0 110
michael@0 111 storage_->used() ? storage_->releaseAll() : storage_->freeAll();
michael@0 112 usedAtLastCompact_ = 0;
michael@0 113 }
michael@0 114
michael@0 115 bool isAboutToOverflow() const {
michael@0 116 return !storage_->isEmpty() && storage_->availableInCurrentChunk() < MinAvailableSize;
michael@0 117 }
michael@0 118
michael@0 119 void handleOverflow(StoreBuffer *owner);
michael@0 120
michael@0 121 /* Compaction algorithms. */
michael@0 122 void compactRemoveDuplicates(StoreBuffer *owner);
michael@0 123
michael@0 124 /*
michael@0 125 * Attempts to reduce the usage of the buffer by removing unnecessary
michael@0 126 * entries.
michael@0 127 */
michael@0 128 virtual void compact(StoreBuffer *owner);
michael@0 129
michael@0 130 /* Compacts if any entries have been added since the last compaction. */
michael@0 131 void maybeCompact(StoreBuffer *owner);
michael@0 132
michael@0 133 /* Add one item to the buffer. */
michael@0 134 void put(StoreBuffer *owner, const T &t) {
michael@0 135 JS_ASSERT(storage_);
michael@0 136
michael@0 137 T *tp = storage_->new_<T>(t);
michael@0 138 if (!tp)
michael@0 139 CrashAtUnhandlableOOM("Failed to allocate for MonoTypeBuffer::put.");
michael@0 140
michael@0 141 if (isAboutToOverflow())
michael@0 142 handleOverflow(owner);
michael@0 143 }
michael@0 144
michael@0 145 /* Mark the source of all edges in the store buffer. */
michael@0 146 void mark(StoreBuffer *owner, JSTracer *trc);
michael@0 147
michael@0 148 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
michael@0 149 return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
michael@0 150 }
michael@0 151
michael@0 152 private:
michael@0 153 MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
michael@0 154 };
michael@0 155
michael@0 156 /*
michael@0 157 * Overrides the MonoTypeBuffer to support pointers that may be moved in
michael@0 158 * memory outside of the GC's control.
michael@0 159 */
michael@0 160 template <typename T>
michael@0 161 struct RelocatableMonoTypeBuffer : public MonoTypeBuffer<T>
michael@0 162 {
michael@0 163 /* Override compaction to filter out removed items. */
michael@0 164 void compactMoved(StoreBuffer *owner);
michael@0 165 virtual void compact(StoreBuffer *owner) MOZ_OVERRIDE;
michael@0 166
michael@0 167 /* Record a removal from the buffer. */
michael@0 168 void unput(StoreBuffer *owner, const T &v) {
michael@0 169 MonoTypeBuffer<T>::put(owner, v.tagged());
michael@0 170 }
michael@0 171 };
michael@0 172
michael@0 173 struct GenericBuffer
michael@0 174 {
michael@0 175 LifoAlloc *storage_;
michael@0 176
michael@0 177 explicit GenericBuffer() : storage_(nullptr) {}
michael@0 178 ~GenericBuffer() { js_delete(storage_); }
michael@0 179
michael@0 180 bool init() {
michael@0 181 if (!storage_)
michael@0 182 storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
michael@0 183 clear();
michael@0 184 return bool(storage_);
michael@0 185 }
michael@0 186
michael@0 187 void clear() {
michael@0 188 if (!storage_)
michael@0 189 return;
michael@0 190
michael@0 191 storage_->used() ? storage_->releaseAll() : storage_->freeAll();
michael@0 192 }
michael@0 193
michael@0 194 bool isAboutToOverflow() const {
michael@0 195 return !storage_->isEmpty() && storage_->availableInCurrentChunk() < MinAvailableSize;
michael@0 196 }
michael@0 197
michael@0 198 /* Mark all generic edges. */
michael@0 199 void mark(StoreBuffer *owner, JSTracer *trc);
michael@0 200
michael@0 201 template <typename T>
michael@0 202 void put(StoreBuffer *owner, const T &t) {
michael@0 203 JS_ASSERT(storage_);
michael@0 204
michael@0 205 /* Ensure T is derived from BufferableRef. */
michael@0 206 (void)static_cast<const BufferableRef*>(&t);
michael@0 207
michael@0 208 unsigned size = sizeof(T);
michael@0 209 unsigned *sizep = storage_->newPod<unsigned>();
michael@0 210 if (!sizep)
michael@0 211 CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put.");
michael@0 212 *sizep = size;
michael@0 213
michael@0 214 T *tp = storage_->new_<T>(t);
michael@0 215 if (!tp)
michael@0 216 CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put.");
michael@0 217
michael@0 218 if (isAboutToOverflow())
michael@0 219 owner->setAboutToOverflow();
michael@0 220 }
michael@0 221
michael@0 222 size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
michael@0 223 return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
michael@0 224 }
michael@0 225
michael@0 226 private:
michael@0 227 GenericBuffer &operator=(const GenericBuffer& other) MOZ_DELETE;
michael@0 228 };
michael@0 229
michael@0 230 template <typename Edge>
michael@0 231 struct PointerEdgeHasher
michael@0 232 {
michael@0 233 typedef Edge Lookup;
michael@0 234 static HashNumber hash(const Lookup &l) { return uintptr_t(l.edge) >> 3; }
michael@0 235 static bool match(const Edge &k, const Lookup &l) { return k == l; }
michael@0 236 };
michael@0 237
michael@0 238 struct CellPtrEdge
michael@0 239 {
michael@0 240 Cell **edge;
michael@0 241
michael@0 242 explicit CellPtrEdge(Cell **v) : edge(v) {}
michael@0 243 bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
michael@0 244 bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
michael@0 245
michael@0 246 bool maybeInRememberedSet(const Nursery &nursery) const {
michael@0 247 return !nursery.isInside(edge) && nursery.isInside(*edge);
michael@0 248 }
michael@0 249
michael@0 250 void mark(JSTracer *trc);
michael@0 251
michael@0 252 CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
michael@0 253 CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
michael@0 254 bool isTagged() const { return bool(uintptr_t(edge) & 1); }
michael@0 255
michael@0 256 typedef PointerEdgeHasher<CellPtrEdge> Hasher;
michael@0 257 };
michael@0 258
michael@0 259 struct ValueEdge
michael@0 260 {
michael@0 261 JS::Value *edge;
michael@0 262
michael@0 263 explicit ValueEdge(JS::Value *v) : edge(v) {}
michael@0 264 bool operator==(const ValueEdge &other) const { return edge == other.edge; }
michael@0 265 bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
michael@0 266
michael@0 267 void *deref() const { return edge->isGCThing() ? edge->toGCThing() : nullptr; }
michael@0 268
michael@0 269 bool maybeInRememberedSet(const Nursery &nursery) const {
michael@0 270 return !nursery.isInside(edge) && nursery.isInside(deref());
michael@0 271 }
michael@0 272
michael@0 273 void mark(JSTracer *trc);
michael@0 274
michael@0 275 ValueEdge tagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) | 1)); }
michael@0 276 ValueEdge untagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) & ~1)); }
michael@0 277 bool isTagged() const { return bool(uintptr_t(edge) & 1); }
michael@0 278
michael@0 279 typedef PointerEdgeHasher<ValueEdge> Hasher;
michael@0 280 };
michael@0 281
michael@0 282 struct SlotsEdge
michael@0 283 {
michael@0 284 // These definitions must match those in HeapSlot::Kind.
michael@0 285 const static int SlotKind = 0;
michael@0 286 const static int ElementKind = 1;
michael@0 287
michael@0 288 uintptr_t objectAndKind_; // JSObject* | Kind
michael@0 289 int32_t start_;
michael@0 290 int32_t count_;
michael@0 291
michael@0 292 SlotsEdge(JSObject *object, int kind, int32_t start, int32_t count)
michael@0 293 : objectAndKind_(uintptr_t(object) | kind), start_(start), count_(count)
michael@0 294 {
michael@0 295 JS_ASSERT((uintptr_t(object) & 1) == 0);
michael@0 296 JS_ASSERT(kind <= 1);
michael@0 297 JS_ASSERT(start >= 0);
michael@0 298 JS_ASSERT(count > 0);
michael@0 299 }
michael@0 300
michael@0 301 JSObject *object() const { return reinterpret_cast<JSObject *>(objectAndKind_ & ~1); }
michael@0 302 int kind() const { return (int)(objectAndKind_ & 1); }
michael@0 303
michael@0 304 bool operator==(const SlotsEdge &other) const {
michael@0 305 return objectAndKind_ == other.objectAndKind_ &&
michael@0 306 start_ == other.start_ &&
michael@0 307 count_ == other.count_;
michael@0 308 }
michael@0 309
michael@0 310 bool operator!=(const SlotsEdge &other) const {
michael@0 311 return !(*this == other);
michael@0 312 }
michael@0 313
michael@0 314 bool maybeInRememberedSet(const Nursery &nursery) const {
michael@0 315 return !nursery.isInside(object());
michael@0 316 }
michael@0 317
michael@0 318 void mark(JSTracer *trc);
michael@0 319
michael@0 320 typedef struct {
michael@0 321 typedef SlotsEdge Lookup;
michael@0 322 static HashNumber hash(const Lookup &l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; }
michael@0 323 static bool match(const SlotsEdge &k, const Lookup &l) { return k == l; }
michael@0 324 } Hasher;
michael@0 325 };
michael@0 326
michael@0 327 struct WholeCellEdges
michael@0 328 {
michael@0 329 Cell *edge;
michael@0 330
michael@0 331 explicit WholeCellEdges(Cell *cell) : edge(cell) {
michael@0 332 JS_ASSERT(edge->isTenured());
michael@0 333 }
michael@0 334
michael@0 335 bool operator==(const WholeCellEdges &other) const { return edge == other.edge; }
michael@0 336 bool operator!=(const WholeCellEdges &other) const { return edge != other.edge; }
michael@0 337
michael@0 338 bool maybeInRememberedSet(const Nursery &nursery) const { return true; }
michael@0 339
michael@0 340 static bool supportsDeduplication() { return true; }
michael@0 341 void *deduplicationKey() const { return (void *)edge; }
michael@0 342
michael@0 343 void mark(JSTracer *trc);
michael@0 344
michael@0 345 typedef PointerEdgeHasher<WholeCellEdges> Hasher;
michael@0 346 };
michael@0 347
michael@0 348 template <typename Key>
michael@0 349 struct CallbackRef : public BufferableRef
michael@0 350 {
michael@0 351 typedef void (*MarkCallback)(JSTracer *trc, Key *key, void *data);
michael@0 352
michael@0 353 CallbackRef(MarkCallback cb, Key *k, void *d) : callback(cb), key(k), data(d) {}
michael@0 354
michael@0 355 virtual void mark(JSTracer *trc) {
michael@0 356 callback(trc, key, data);
michael@0 357 }
michael@0 358
michael@0 359 private:
michael@0 360 MarkCallback callback;
michael@0 361 Key *key;
michael@0 362 void *data;
michael@0 363 };
michael@0 364
michael@0 365 template <typename Edge>
michael@0 366 bool isOkayToUseBuffer(const Edge &edge) const {
michael@0 367 /*
michael@0 368 * Disabled store buffers may not have a valid state; e.g. when stored
michael@0 369 * inline in the ChunkTrailer.
michael@0 370 */
michael@0 371 if (!isEnabled())
michael@0 372 return false;
michael@0 373
michael@0 374 /*
michael@0 375 * The concurrent parsing thread cannot validly insert into the buffer,
michael@0 376 * but it should not activate the re-entrancy guard either.
michael@0 377 */
michael@0 378 if (!CurrentThreadCanAccessRuntime(runtime_))
michael@0 379 return false;
michael@0 380
michael@0 381 return true;
michael@0 382 }
michael@0 383
michael@0 384 template <typename Buffer, typename Edge>
michael@0 385 void put(Buffer &buffer, const Edge &edge) {
michael@0 386 if (!isOkayToUseBuffer(edge))
michael@0 387 return;
michael@0 388 mozilla::ReentrancyGuard g(*this);
michael@0 389 if (edge.maybeInRememberedSet(nursery_))
michael@0 390 buffer.put(this, edge);
michael@0 391 }
michael@0 392
michael@0 393 template <typename Buffer, typename Edge>
michael@0 394 void unput(Buffer &buffer, const Edge &edge) {
michael@0 395 if (!isOkayToUseBuffer(edge))
michael@0 396 return;
michael@0 397 mozilla::ReentrancyGuard g(*this);
michael@0 398 buffer.unput(this, edge);
michael@0 399 }
michael@0 400
michael@0 401 MonoTypeBuffer<ValueEdge> bufferVal;
michael@0 402 MonoTypeBuffer<CellPtrEdge> bufferCell;
michael@0 403 MonoTypeBuffer<SlotsEdge> bufferSlot;
michael@0 404 MonoTypeBuffer<WholeCellEdges> bufferWholeCell;
michael@0 405 RelocatableMonoTypeBuffer<ValueEdge> bufferRelocVal;
michael@0 406 RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
michael@0 407 GenericBuffer bufferGeneric;
michael@0 408
michael@0 409 JSRuntime *runtime_;
michael@0 410 const Nursery &nursery_;
michael@0 411
michael@0 412 bool aboutToOverflow_;
michael@0 413 bool enabled_;
michael@0 414 mozilla::DebugOnly<bool> entered; /* For ReentrancyGuard. */
michael@0 415
michael@0 416 public:
michael@0 417 explicit StoreBuffer(JSRuntime *rt, const Nursery &nursery)
michael@0 418 : bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(),
michael@0 419 bufferRelocVal(), bufferRelocCell(), bufferGeneric(),
michael@0 420 runtime_(rt), nursery_(nursery), aboutToOverflow_(false), enabled_(false),
michael@0 421 entered(false)
michael@0 422 {
michael@0 423 }
michael@0 424
michael@0 425 bool enable();
michael@0 426 void disable();
michael@0 427 bool isEnabled() const { return enabled_; }
michael@0 428
michael@0 429 bool clear();
michael@0 430
michael@0 431 /* Get the overflowed status. */
michael@0 432 bool isAboutToOverflow() const { return aboutToOverflow_; }
michael@0 433
michael@0 434 /* Insert a single edge into the buffer/remembered set. */
michael@0 435 void putValue(JS::Value *valuep) { put(bufferVal, ValueEdge(valuep)); }
michael@0 436 void putCell(Cell **cellp) { put(bufferCell, CellPtrEdge(cellp)); }
michael@0 437 void putSlot(JSObject *obj, int kind, int32_t start, int32_t count) {
michael@0 438 put(bufferSlot, SlotsEdge(obj, kind, start, count));
michael@0 439 }
michael@0 440 void putWholeCell(Cell *cell) {
michael@0 441 JS_ASSERT(cell->isTenured());
michael@0 442 put(bufferWholeCell, WholeCellEdges(cell));
michael@0 443 }
michael@0 444
michael@0 445 /* Insert or update a single edge in the Relocatable buffer. */
michael@0 446 void putRelocatableValue(JS::Value *valuep) { put(bufferRelocVal, ValueEdge(valuep)); }
michael@0 447 void putRelocatableCell(Cell **cellp) { put(bufferRelocCell, CellPtrEdge(cellp)); }
michael@0 448 void removeRelocatableValue(JS::Value *valuep) { unput(bufferRelocVal, ValueEdge(valuep)); }
michael@0 449 void removeRelocatableCell(Cell **cellp) { unput(bufferRelocCell, CellPtrEdge(cellp)); }
michael@0 450
michael@0 451 /* Insert an entry into the generic buffer. */
michael@0 452 template <typename T>
michael@0 453 void putGeneric(const T &t) { put(bufferGeneric, t);}
michael@0 454
michael@0 455 /* Insert or update a callback entry. */
michael@0 456 template <typename Key>
michael@0 457 void putCallback(void (*callback)(JSTracer *trc, Key *key, void *data), Key *key, void *data) {
michael@0 458 put(bufferGeneric, CallbackRef<Key>(callback, key, data));
michael@0 459 }
michael@0 460
michael@0 461 /* Methods to mark the source of all edges in the store buffer. */
michael@0 462 void markAll(JSTracer *trc);
michael@0 463 void markValues(JSTracer *trc) { bufferVal.mark(this, trc); }
michael@0 464 void markCells(JSTracer *trc) { bufferCell.mark(this, trc); }
michael@0 465 void markSlots(JSTracer *trc) { bufferSlot.mark(this, trc); }
michael@0 466 void markWholeCells(JSTracer *trc) { bufferWholeCell.mark(this, trc); }
michael@0 467 void markRelocatableValues(JSTracer *trc) { bufferRelocVal.mark(this, trc); }
michael@0 468 void markRelocatableCells(JSTracer *trc) { bufferRelocCell.mark(this, trc); }
michael@0 469 void markGenericEntries(JSTracer *trc) { bufferGeneric.mark(this, trc); }
michael@0 470
michael@0 471 /* We cannot call InParallelSection directly because of a circular dependency. */
michael@0 472 bool inParallelSection() const;
michael@0 473
michael@0 474 /* For use by our owned buffers and for testing. */
michael@0 475 void setAboutToOverflow();
michael@0 476
michael@0 477 void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes *sizes);
michael@0 478 };
michael@0 479
michael@0 480 } /* namespace gc */
michael@0 481 } /* namespace js */
michael@0 482
michael@0 483 #endif /* JSGC_GENERATIONAL */
michael@0 484
michael@0 485 #endif /* gc_StoreBuffer_h */

mercurial