michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sts=4 et sw=4 tw=99: michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #ifndef gc_StoreBuffer_h michael@0: #define gc_StoreBuffer_h michael@0: michael@0: #ifdef JSGC_GENERATIONAL michael@0: michael@0: #ifndef JSGC_USE_EXACT_ROOTING michael@0: # error "Generational GC requires exact rooting." michael@0: #endif michael@0: michael@0: #include "mozilla/DebugOnly.h" michael@0: #include "mozilla/ReentrancyGuard.h" michael@0: michael@0: #include "jsalloc.h" michael@0: michael@0: #include "ds/LifoAlloc.h" michael@0: #include "gc/Nursery.h" michael@0: #include "gc/Tracer.h" michael@0: #include "js/MemoryMetrics.h" michael@0: michael@0: namespace js { michael@0: michael@0: void michael@0: CrashAtUnhandlableOOM(const char *reason); michael@0: michael@0: namespace gc { michael@0: michael@0: /* michael@0: * BufferableRef represents an abstract reference for use in the generational michael@0: * GC's remembered set. Entries in the store buffer that cannot be represented michael@0: * with the simple pointer-to-a-pointer scheme must derive from this class and michael@0: * use the generic store buffer interface. michael@0: */ michael@0: class BufferableRef michael@0: { michael@0: public: michael@0: virtual void mark(JSTracer *trc) = 0; michael@0: bool maybeInRememberedSet(const Nursery &) const { return true; } michael@0: }; michael@0: michael@0: /* michael@0: * HashKeyRef represents a reference to a HashMap key. This should normally michael@0: * be used through the HashTableWriteBarrierPost function. michael@0: */ michael@0: template michael@0: class HashKeyRef : public BufferableRef michael@0: { michael@0: Map *map; michael@0: Key key; michael@0: michael@0: public: michael@0: HashKeyRef(Map *m, const Key &k) : map(m), key(k) {} michael@0: michael@0: void mark(JSTracer *trc) { michael@0: Key prior = key; michael@0: typename Map::Ptr p = map->lookup(key); michael@0: if (!p) michael@0: return; michael@0: trc->setTracingLocation(&*p); michael@0: Mark(trc, &key, "HashKeyRef"); michael@0: map->rekeyIfMoved(prior, key); michael@0: } michael@0: }; michael@0: michael@0: typedef HashSet, SystemAllocPolicy> EdgeSet; michael@0: michael@0: /* The size of a single block of store buffer storage space. */ michael@0: static const size_t LifoAllocBlockSize = 1 << 16; /* 64KiB */ michael@0: michael@0: /* michael@0: * The StoreBuffer observes all writes that occur in the system and performs michael@0: * efficient filtering of them to derive a remembered set for nursery GC. michael@0: */ michael@0: class StoreBuffer michael@0: { michael@0: friend class mozilla::ReentrancyGuard; michael@0: michael@0: /* The size at which a block is about to overflow. */ michael@0: static const size_t MinAvailableSize = (size_t)(LifoAllocBlockSize * 1.0 / 8.0); michael@0: michael@0: /* michael@0: * This buffer holds only a single type of edge. Using this buffer is more michael@0: * efficient than the generic buffer when many writes will be to the same michael@0: * type of edge: e.g. Value or Cell*. michael@0: */ michael@0: template michael@0: struct MonoTypeBuffer michael@0: { michael@0: LifoAlloc *storage_; michael@0: size_t usedAtLastCompact_; michael@0: michael@0: explicit MonoTypeBuffer() : storage_(nullptr), usedAtLastCompact_(0) {} michael@0: ~MonoTypeBuffer() { js_delete(storage_); } michael@0: michael@0: bool init() { michael@0: if (!storage_) michael@0: storage_ = js_new(LifoAllocBlockSize); michael@0: clear(); michael@0: return bool(storage_); michael@0: } michael@0: michael@0: void clear() { michael@0: if (!storage_) michael@0: return; michael@0: michael@0: storage_->used() ? storage_->releaseAll() : storage_->freeAll(); michael@0: usedAtLastCompact_ = 0; michael@0: } michael@0: michael@0: bool isAboutToOverflow() const { michael@0: return !storage_->isEmpty() && storage_->availableInCurrentChunk() < MinAvailableSize; michael@0: } michael@0: michael@0: void handleOverflow(StoreBuffer *owner); michael@0: michael@0: /* Compaction algorithms. */ michael@0: void compactRemoveDuplicates(StoreBuffer *owner); michael@0: michael@0: /* michael@0: * Attempts to reduce the usage of the buffer by removing unnecessary michael@0: * entries. michael@0: */ michael@0: virtual void compact(StoreBuffer *owner); michael@0: michael@0: /* Compacts if any entries have been added since the last compaction. */ michael@0: void maybeCompact(StoreBuffer *owner); michael@0: michael@0: /* Add one item to the buffer. */ michael@0: void put(StoreBuffer *owner, const T &t) { michael@0: JS_ASSERT(storage_); michael@0: michael@0: T *tp = storage_->new_(t); michael@0: if (!tp) michael@0: CrashAtUnhandlableOOM("Failed to allocate for MonoTypeBuffer::put."); michael@0: michael@0: if (isAboutToOverflow()) michael@0: handleOverflow(owner); michael@0: } michael@0: michael@0: /* Mark the source of all edges in the store buffer. */ michael@0: void mark(StoreBuffer *owner, JSTracer *trc); michael@0: michael@0: size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) { michael@0: return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0; michael@0: } michael@0: michael@0: private: michael@0: MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE; michael@0: }; michael@0: michael@0: /* michael@0: * Overrides the MonoTypeBuffer to support pointers that may be moved in michael@0: * memory outside of the GC's control. michael@0: */ michael@0: template michael@0: struct RelocatableMonoTypeBuffer : public MonoTypeBuffer michael@0: { michael@0: /* Override compaction to filter out removed items. */ michael@0: void compactMoved(StoreBuffer *owner); michael@0: virtual void compact(StoreBuffer *owner) MOZ_OVERRIDE; michael@0: michael@0: /* Record a removal from the buffer. */ michael@0: void unput(StoreBuffer *owner, const T &v) { michael@0: MonoTypeBuffer::put(owner, v.tagged()); michael@0: } michael@0: }; michael@0: michael@0: struct GenericBuffer michael@0: { michael@0: LifoAlloc *storage_; michael@0: michael@0: explicit GenericBuffer() : storage_(nullptr) {} michael@0: ~GenericBuffer() { js_delete(storage_); } michael@0: michael@0: bool init() { michael@0: if (!storage_) michael@0: storage_ = js_new(LifoAllocBlockSize); michael@0: clear(); michael@0: return bool(storage_); michael@0: } michael@0: michael@0: void clear() { michael@0: if (!storage_) michael@0: return; michael@0: michael@0: storage_->used() ? storage_->releaseAll() : storage_->freeAll(); michael@0: } michael@0: michael@0: bool isAboutToOverflow() const { michael@0: return !storage_->isEmpty() && storage_->availableInCurrentChunk() < MinAvailableSize; michael@0: } michael@0: michael@0: /* Mark all generic edges. */ michael@0: void mark(StoreBuffer *owner, JSTracer *trc); michael@0: michael@0: template michael@0: void put(StoreBuffer *owner, const T &t) { michael@0: JS_ASSERT(storage_); michael@0: michael@0: /* Ensure T is derived from BufferableRef. */ michael@0: (void)static_cast(&t); michael@0: michael@0: unsigned size = sizeof(T); michael@0: unsigned *sizep = storage_->newPod(); michael@0: if (!sizep) michael@0: CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put."); michael@0: *sizep = size; michael@0: michael@0: T *tp = storage_->new_(t); michael@0: if (!tp) michael@0: CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put."); michael@0: michael@0: if (isAboutToOverflow()) michael@0: owner->setAboutToOverflow(); michael@0: } michael@0: michael@0: size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) { michael@0: return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0; michael@0: } michael@0: michael@0: private: michael@0: GenericBuffer &operator=(const GenericBuffer& other) MOZ_DELETE; michael@0: }; michael@0: michael@0: template michael@0: struct PointerEdgeHasher michael@0: { michael@0: typedef Edge Lookup; michael@0: static HashNumber hash(const Lookup &l) { return uintptr_t(l.edge) >> 3; } michael@0: static bool match(const Edge &k, const Lookup &l) { return k == l; } michael@0: }; michael@0: michael@0: struct CellPtrEdge michael@0: { michael@0: Cell **edge; michael@0: michael@0: explicit CellPtrEdge(Cell **v) : edge(v) {} michael@0: bool operator==(const CellPtrEdge &other) const { return edge == other.edge; } michael@0: bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; } michael@0: michael@0: bool maybeInRememberedSet(const Nursery &nursery) const { michael@0: return !nursery.isInside(edge) && nursery.isInside(*edge); michael@0: } michael@0: michael@0: void mark(JSTracer *trc); michael@0: michael@0: CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); } michael@0: CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); } michael@0: bool isTagged() const { return bool(uintptr_t(edge) & 1); } michael@0: michael@0: typedef PointerEdgeHasher Hasher; michael@0: }; michael@0: michael@0: struct ValueEdge michael@0: { michael@0: JS::Value *edge; michael@0: michael@0: explicit ValueEdge(JS::Value *v) : edge(v) {} michael@0: bool operator==(const ValueEdge &other) const { return edge == other.edge; } michael@0: bool operator!=(const ValueEdge &other) const { return edge != other.edge; } michael@0: michael@0: void *deref() const { return edge->isGCThing() ? edge->toGCThing() : nullptr; } michael@0: michael@0: bool maybeInRememberedSet(const Nursery &nursery) const { michael@0: return !nursery.isInside(edge) && nursery.isInside(deref()); michael@0: } michael@0: michael@0: void mark(JSTracer *trc); michael@0: michael@0: ValueEdge tagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) | 1)); } michael@0: ValueEdge untagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) & ~1)); } michael@0: bool isTagged() const { return bool(uintptr_t(edge) & 1); } michael@0: michael@0: typedef PointerEdgeHasher Hasher; michael@0: }; michael@0: michael@0: struct SlotsEdge michael@0: { michael@0: // These definitions must match those in HeapSlot::Kind. michael@0: const static int SlotKind = 0; michael@0: const static int ElementKind = 1; michael@0: michael@0: uintptr_t objectAndKind_; // JSObject* | Kind michael@0: int32_t start_; michael@0: int32_t count_; michael@0: michael@0: SlotsEdge(JSObject *object, int kind, int32_t start, int32_t count) michael@0: : objectAndKind_(uintptr_t(object) | kind), start_(start), count_(count) michael@0: { michael@0: JS_ASSERT((uintptr_t(object) & 1) == 0); michael@0: JS_ASSERT(kind <= 1); michael@0: JS_ASSERT(start >= 0); michael@0: JS_ASSERT(count > 0); michael@0: } michael@0: michael@0: JSObject *object() const { return reinterpret_cast(objectAndKind_ & ~1); } michael@0: int kind() const { return (int)(objectAndKind_ & 1); } michael@0: michael@0: bool operator==(const SlotsEdge &other) const { michael@0: return objectAndKind_ == other.objectAndKind_ && michael@0: start_ == other.start_ && michael@0: count_ == other.count_; michael@0: } michael@0: michael@0: bool operator!=(const SlotsEdge &other) const { michael@0: return !(*this == other); michael@0: } michael@0: michael@0: bool maybeInRememberedSet(const Nursery &nursery) const { michael@0: return !nursery.isInside(object()); michael@0: } michael@0: michael@0: void mark(JSTracer *trc); michael@0: michael@0: typedef struct { michael@0: typedef SlotsEdge Lookup; michael@0: static HashNumber hash(const Lookup &l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; } michael@0: static bool match(const SlotsEdge &k, const Lookup &l) { return k == l; } michael@0: } Hasher; michael@0: }; michael@0: michael@0: struct WholeCellEdges michael@0: { michael@0: Cell *edge; michael@0: michael@0: explicit WholeCellEdges(Cell *cell) : edge(cell) { michael@0: JS_ASSERT(edge->isTenured()); michael@0: } michael@0: michael@0: bool operator==(const WholeCellEdges &other) const { return edge == other.edge; } michael@0: bool operator!=(const WholeCellEdges &other) const { return edge != other.edge; } michael@0: michael@0: bool maybeInRememberedSet(const Nursery &nursery) const { return true; } michael@0: michael@0: static bool supportsDeduplication() { return true; } michael@0: void *deduplicationKey() const { return (void *)edge; } michael@0: michael@0: void mark(JSTracer *trc); michael@0: michael@0: typedef PointerEdgeHasher Hasher; michael@0: }; michael@0: michael@0: template michael@0: struct CallbackRef : public BufferableRef michael@0: { michael@0: typedef void (*MarkCallback)(JSTracer *trc, Key *key, void *data); michael@0: michael@0: CallbackRef(MarkCallback cb, Key *k, void *d) : callback(cb), key(k), data(d) {} michael@0: michael@0: virtual void mark(JSTracer *trc) { michael@0: callback(trc, key, data); michael@0: } michael@0: michael@0: private: michael@0: MarkCallback callback; michael@0: Key *key; michael@0: void *data; michael@0: }; michael@0: michael@0: template michael@0: bool isOkayToUseBuffer(const Edge &edge) const { michael@0: /* michael@0: * Disabled store buffers may not have a valid state; e.g. when stored michael@0: * inline in the ChunkTrailer. michael@0: */ michael@0: if (!isEnabled()) michael@0: return false; michael@0: michael@0: /* michael@0: * The concurrent parsing thread cannot validly insert into the buffer, michael@0: * but it should not activate the re-entrancy guard either. michael@0: */ michael@0: if (!CurrentThreadCanAccessRuntime(runtime_)) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: template michael@0: void put(Buffer &buffer, const Edge &edge) { michael@0: if (!isOkayToUseBuffer(edge)) michael@0: return; michael@0: mozilla::ReentrancyGuard g(*this); michael@0: if (edge.maybeInRememberedSet(nursery_)) michael@0: buffer.put(this, edge); michael@0: } michael@0: michael@0: template michael@0: void unput(Buffer &buffer, const Edge &edge) { michael@0: if (!isOkayToUseBuffer(edge)) michael@0: return; michael@0: mozilla::ReentrancyGuard g(*this); michael@0: buffer.unput(this, edge); michael@0: } michael@0: michael@0: MonoTypeBuffer bufferVal; michael@0: MonoTypeBuffer bufferCell; michael@0: MonoTypeBuffer bufferSlot; michael@0: MonoTypeBuffer bufferWholeCell; michael@0: RelocatableMonoTypeBuffer bufferRelocVal; michael@0: RelocatableMonoTypeBuffer bufferRelocCell; michael@0: GenericBuffer bufferGeneric; michael@0: michael@0: JSRuntime *runtime_; michael@0: const Nursery &nursery_; michael@0: michael@0: bool aboutToOverflow_; michael@0: bool enabled_; michael@0: mozilla::DebugOnly entered; /* For ReentrancyGuard. */ michael@0: michael@0: public: michael@0: explicit StoreBuffer(JSRuntime *rt, const Nursery &nursery) michael@0: : bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(), michael@0: bufferRelocVal(), bufferRelocCell(), bufferGeneric(), michael@0: runtime_(rt), nursery_(nursery), aboutToOverflow_(false), enabled_(false), michael@0: entered(false) michael@0: { michael@0: } michael@0: michael@0: bool enable(); michael@0: void disable(); michael@0: bool isEnabled() const { return enabled_; } michael@0: michael@0: bool clear(); michael@0: michael@0: /* Get the overflowed status. */ michael@0: bool isAboutToOverflow() const { return aboutToOverflow_; } michael@0: michael@0: /* Insert a single edge into the buffer/remembered set. */ michael@0: void putValue(JS::Value *valuep) { put(bufferVal, ValueEdge(valuep)); } michael@0: void putCell(Cell **cellp) { put(bufferCell, CellPtrEdge(cellp)); } michael@0: void putSlot(JSObject *obj, int kind, int32_t start, int32_t count) { michael@0: put(bufferSlot, SlotsEdge(obj, kind, start, count)); michael@0: } michael@0: void putWholeCell(Cell *cell) { michael@0: JS_ASSERT(cell->isTenured()); michael@0: put(bufferWholeCell, WholeCellEdges(cell)); michael@0: } michael@0: michael@0: /* Insert or update a single edge in the Relocatable buffer. */ michael@0: void putRelocatableValue(JS::Value *valuep) { put(bufferRelocVal, ValueEdge(valuep)); } michael@0: void putRelocatableCell(Cell **cellp) { put(bufferRelocCell, CellPtrEdge(cellp)); } michael@0: void removeRelocatableValue(JS::Value *valuep) { unput(bufferRelocVal, ValueEdge(valuep)); } michael@0: void removeRelocatableCell(Cell **cellp) { unput(bufferRelocCell, CellPtrEdge(cellp)); } michael@0: michael@0: /* Insert an entry into the generic buffer. */ michael@0: template michael@0: void putGeneric(const T &t) { put(bufferGeneric, t);} michael@0: michael@0: /* Insert or update a callback entry. */ michael@0: template michael@0: void putCallback(void (*callback)(JSTracer *trc, Key *key, void *data), Key *key, void *data) { michael@0: put(bufferGeneric, CallbackRef(callback, key, data)); michael@0: } michael@0: michael@0: /* Methods to mark the source of all edges in the store buffer. */ michael@0: void markAll(JSTracer *trc); michael@0: void markValues(JSTracer *trc) { bufferVal.mark(this, trc); } michael@0: void markCells(JSTracer *trc) { bufferCell.mark(this, trc); } michael@0: void markSlots(JSTracer *trc) { bufferSlot.mark(this, trc); } michael@0: void markWholeCells(JSTracer *trc) { bufferWholeCell.mark(this, trc); } michael@0: void markRelocatableValues(JSTracer *trc) { bufferRelocVal.mark(this, trc); } michael@0: void markRelocatableCells(JSTracer *trc) { bufferRelocCell.mark(this, trc); } michael@0: void markGenericEntries(JSTracer *trc) { bufferGeneric.mark(this, trc); } michael@0: michael@0: /* We cannot call InParallelSection directly because of a circular dependency. */ michael@0: bool inParallelSection() const; michael@0: michael@0: /* For use by our owned buffers and for testing. */ michael@0: void setAboutToOverflow(); michael@0: michael@0: void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes *sizes); michael@0: }; michael@0: michael@0: } /* namespace gc */ michael@0: } /* namespace js */ michael@0: michael@0: #endif /* JSGC_GENERATIONAL */ michael@0: michael@0: #endif /* gc_StoreBuffer_h */