js/src/gc/StoreBuffer.h

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/gc/StoreBuffer.h	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,485 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#ifndef gc_StoreBuffer_h
    1.11 +#define gc_StoreBuffer_h
    1.12 +
    1.13 +#ifdef JSGC_GENERATIONAL
    1.14 +
    1.15 +#ifndef JSGC_USE_EXACT_ROOTING
    1.16 +# error "Generational GC requires exact rooting."
    1.17 +#endif
    1.18 +
    1.19 +#include "mozilla/DebugOnly.h"
    1.20 +#include "mozilla/ReentrancyGuard.h"
    1.21 +
    1.22 +#include "jsalloc.h"
    1.23 +
    1.24 +#include "ds/LifoAlloc.h"
    1.25 +#include "gc/Nursery.h"
    1.26 +#include "gc/Tracer.h"
    1.27 +#include "js/MemoryMetrics.h"
    1.28 +
    1.29 +namespace js {
    1.30 +
    1.31 +void
    1.32 +CrashAtUnhandlableOOM(const char *reason);
    1.33 +
    1.34 +namespace gc {
    1.35 +
    1.36 +/*
    1.37 + * BufferableRef represents an abstract reference for use in the generational
    1.38 + * GC's remembered set. Entries in the store buffer that cannot be represented
    1.39 + * with the simple pointer-to-a-pointer scheme must derive from this class and
    1.40 + * use the generic store buffer interface.
    1.41 + */
    1.42 +class BufferableRef
    1.43 +{
    1.44 +  public:
    1.45 +    virtual void mark(JSTracer *trc) = 0;
    1.46 +    bool maybeInRememberedSet(const Nursery &) const { return true; }
    1.47 +};
    1.48 +
    1.49 +/*
    1.50 + * HashKeyRef represents a reference to a HashMap key. This should normally
    1.51 + * be used through the HashTableWriteBarrierPost function.
    1.52 + */
    1.53 +template <typename Map, typename Key>
    1.54 +class HashKeyRef : public BufferableRef
    1.55 +{
    1.56 +    Map *map;
    1.57 +    Key key;
    1.58 +
    1.59 +  public:
    1.60 +    HashKeyRef(Map *m, const Key &k) : map(m), key(k) {}
    1.61 +
    1.62 +    void mark(JSTracer *trc) {
    1.63 +        Key prior = key;
    1.64 +        typename Map::Ptr p = map->lookup(key);
    1.65 +        if (!p)
    1.66 +            return;
    1.67 +        trc->setTracingLocation(&*p);
    1.68 +        Mark(trc, &key, "HashKeyRef");
    1.69 +        map->rekeyIfMoved(prior, key);
    1.70 +    }
    1.71 +};
    1.72 +
    1.73 +typedef HashSet<void *, PointerHasher<void *, 3>, SystemAllocPolicy> EdgeSet;
    1.74 +
    1.75 +/* The size of a single block of store buffer storage space. */
    1.76 +static const size_t LifoAllocBlockSize = 1 << 16; /* 64KiB */
    1.77 +
    1.78 +/*
    1.79 + * The StoreBuffer observes all writes that occur in the system and performs
    1.80 + * efficient filtering of them to derive a remembered set for nursery GC.
    1.81 + */
    1.82 +class StoreBuffer
    1.83 +{
    1.84 +    friend class mozilla::ReentrancyGuard;
    1.85 +
    1.86 +    /* The size at which a block is about to overflow. */
    1.87 +    static const size_t MinAvailableSize = (size_t)(LifoAllocBlockSize * 1.0 / 8.0);
    1.88 +
    1.89 +    /*
    1.90 +     * This buffer holds only a single type of edge. Using this buffer is more
    1.91 +     * efficient than the generic buffer when many writes will be to the same
    1.92 +     * type of edge: e.g. Value or Cell*.
    1.93 +     */
    1.94 +    template<typename T>
    1.95 +    struct MonoTypeBuffer
    1.96 +    {
    1.97 +        LifoAlloc *storage_;
    1.98 +        size_t usedAtLastCompact_;
    1.99 +
   1.100 +        explicit MonoTypeBuffer() : storage_(nullptr), usedAtLastCompact_(0) {}
   1.101 +        ~MonoTypeBuffer() { js_delete(storage_); }
   1.102 +
   1.103 +        bool init() {
   1.104 +            if (!storage_)
   1.105 +                storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
   1.106 +            clear();
   1.107 +            return bool(storage_);
   1.108 +        }
   1.109 +
   1.110 +        void clear() {
   1.111 +            if (!storage_)
   1.112 +                return;
   1.113 +
   1.114 +            storage_->used() ? storage_->releaseAll() : storage_->freeAll();
   1.115 +            usedAtLastCompact_ = 0;
   1.116 +        }
   1.117 +
   1.118 +        bool isAboutToOverflow() const {
   1.119 +            return !storage_->isEmpty() && storage_->availableInCurrentChunk() < MinAvailableSize;
   1.120 +        }
   1.121 +
   1.122 +        void handleOverflow(StoreBuffer *owner);
   1.123 +
   1.124 +        /* Compaction algorithms. */
   1.125 +        void compactRemoveDuplicates(StoreBuffer *owner);
   1.126 +
   1.127 +        /*
   1.128 +         * Attempts to reduce the usage of the buffer by removing unnecessary
   1.129 +         * entries.
   1.130 +         */
   1.131 +        virtual void compact(StoreBuffer *owner);
   1.132 +
   1.133 +        /* Compacts if any entries have been added since the last compaction. */
   1.134 +        void maybeCompact(StoreBuffer *owner);
   1.135 +
   1.136 +        /* Add one item to the buffer. */
   1.137 +        void put(StoreBuffer *owner, const T &t) {
   1.138 +            JS_ASSERT(storage_);
   1.139 +
   1.140 +            T *tp = storage_->new_<T>(t);
   1.141 +            if (!tp)
   1.142 +                CrashAtUnhandlableOOM("Failed to allocate for MonoTypeBuffer::put.");
   1.143 +
   1.144 +            if (isAboutToOverflow())
   1.145 +                handleOverflow(owner);
   1.146 +        }
   1.147 +
   1.148 +        /* Mark the source of all edges in the store buffer. */
   1.149 +        void mark(StoreBuffer *owner, JSTracer *trc);
   1.150 +
   1.151 +        size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
   1.152 +            return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
   1.153 +        }
   1.154 +
   1.155 +      private:
   1.156 +        MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
   1.157 +    };
   1.158 +
   1.159 +    /*
   1.160 +     * Overrides the MonoTypeBuffer to support pointers that may be moved in
   1.161 +     * memory outside of the GC's control.
   1.162 +     */
   1.163 +    template <typename T>
   1.164 +    struct RelocatableMonoTypeBuffer : public MonoTypeBuffer<T>
   1.165 +    {
   1.166 +        /* Override compaction to filter out removed items. */
   1.167 +        void compactMoved(StoreBuffer *owner);
   1.168 +        virtual void compact(StoreBuffer *owner) MOZ_OVERRIDE;
   1.169 +
   1.170 +        /* Record a removal from the buffer. */
   1.171 +        void unput(StoreBuffer *owner, const T &v) {
   1.172 +            MonoTypeBuffer<T>::put(owner, v.tagged());
   1.173 +        }
   1.174 +    };
   1.175 +
   1.176 +    struct GenericBuffer
   1.177 +    {
   1.178 +        LifoAlloc *storage_;
   1.179 +
   1.180 +        explicit GenericBuffer() : storage_(nullptr) {}
   1.181 +        ~GenericBuffer() { js_delete(storage_); }
   1.182 +
   1.183 +        bool init() {
   1.184 +            if (!storage_)
   1.185 +                storage_ = js_new<LifoAlloc>(LifoAllocBlockSize);
   1.186 +            clear();
   1.187 +            return bool(storage_);
   1.188 +        }
   1.189 +
   1.190 +        void clear() {
   1.191 +            if (!storage_)
   1.192 +                return;
   1.193 +
   1.194 +            storage_->used() ? storage_->releaseAll() : storage_->freeAll();
   1.195 +        }
   1.196 +
   1.197 +        bool isAboutToOverflow() const {
   1.198 +            return !storage_->isEmpty() && storage_->availableInCurrentChunk() < MinAvailableSize;
   1.199 +        }
   1.200 +
   1.201 +        /* Mark all generic edges. */
   1.202 +        void mark(StoreBuffer *owner, JSTracer *trc);
   1.203 +
   1.204 +        template <typename T>
   1.205 +        void put(StoreBuffer *owner, const T &t) {
   1.206 +            JS_ASSERT(storage_);
   1.207 +
   1.208 +            /* Ensure T is derived from BufferableRef. */
   1.209 +            (void)static_cast<const BufferableRef*>(&t);
   1.210 +
   1.211 +            unsigned size = sizeof(T);
   1.212 +            unsigned *sizep = storage_->newPod<unsigned>();
   1.213 +            if (!sizep)
   1.214 +                CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put.");
   1.215 +            *sizep = size;
   1.216 +
   1.217 +            T *tp = storage_->new_<T>(t);
   1.218 +            if (!tp)
   1.219 +                CrashAtUnhandlableOOM("Failed to allocate for GenericBuffer::put.");
   1.220 +
   1.221 +            if (isAboutToOverflow())
   1.222 +                owner->setAboutToOverflow();
   1.223 +        }
   1.224 +
   1.225 +        size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
   1.226 +            return storage_ ? storage_->sizeOfIncludingThis(mallocSizeOf) : 0;
   1.227 +        }
   1.228 +
   1.229 +      private:
   1.230 +        GenericBuffer &operator=(const GenericBuffer& other) MOZ_DELETE;
   1.231 +    };
   1.232 +
   1.233 +    template <typename Edge>
   1.234 +    struct PointerEdgeHasher
   1.235 +    {
   1.236 +        typedef Edge Lookup;
   1.237 +        static HashNumber hash(const Lookup &l) { return uintptr_t(l.edge) >> 3; }
   1.238 +        static bool match(const Edge &k, const Lookup &l) { return k == l; }
   1.239 +    };
   1.240 +
   1.241 +    struct CellPtrEdge
   1.242 +    {
   1.243 +        Cell **edge;
   1.244 +
   1.245 +        explicit CellPtrEdge(Cell **v) : edge(v) {}
   1.246 +        bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
   1.247 +        bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
   1.248 +
   1.249 +        bool maybeInRememberedSet(const Nursery &nursery) const {
   1.250 +            return !nursery.isInside(edge) && nursery.isInside(*edge);
   1.251 +        }
   1.252 +
   1.253 +        void mark(JSTracer *trc);
   1.254 +
   1.255 +        CellPtrEdge tagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) | 1)); }
   1.256 +        CellPtrEdge untagged() const { return CellPtrEdge((Cell **)(uintptr_t(edge) & ~1)); }
   1.257 +        bool isTagged() const { return bool(uintptr_t(edge) & 1); }
   1.258 +
   1.259 +        typedef PointerEdgeHasher<CellPtrEdge> Hasher;
   1.260 +    };
   1.261 +
   1.262 +    struct ValueEdge
   1.263 +    {
   1.264 +        JS::Value *edge;
   1.265 +
   1.266 +        explicit ValueEdge(JS::Value *v) : edge(v) {}
   1.267 +        bool operator==(const ValueEdge &other) const { return edge == other.edge; }
   1.268 +        bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
   1.269 +
   1.270 +        void *deref() const { return edge->isGCThing() ? edge->toGCThing() : nullptr; }
   1.271 +
   1.272 +        bool maybeInRememberedSet(const Nursery &nursery) const {
   1.273 +            return !nursery.isInside(edge) && nursery.isInside(deref());
   1.274 +        }
   1.275 +
   1.276 +        void mark(JSTracer *trc);
   1.277 +
   1.278 +        ValueEdge tagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) | 1)); }
   1.279 +        ValueEdge untagged() const { return ValueEdge((JS::Value *)(uintptr_t(edge) & ~1)); }
   1.280 +        bool isTagged() const { return bool(uintptr_t(edge) & 1); }
   1.281 +
   1.282 +        typedef PointerEdgeHasher<ValueEdge> Hasher;
   1.283 +    };
   1.284 +
   1.285 +    struct SlotsEdge
   1.286 +    {
   1.287 +        // These definitions must match those in HeapSlot::Kind.
   1.288 +        const static int SlotKind = 0;
   1.289 +        const static int ElementKind = 1;
   1.290 +
   1.291 +        uintptr_t objectAndKind_; // JSObject* | Kind
   1.292 +        int32_t start_;
   1.293 +        int32_t count_;
   1.294 +
   1.295 +        SlotsEdge(JSObject *object, int kind, int32_t start, int32_t count)
   1.296 +          : objectAndKind_(uintptr_t(object) | kind), start_(start), count_(count)
   1.297 +        {
   1.298 +            JS_ASSERT((uintptr_t(object) & 1) == 0);
   1.299 +            JS_ASSERT(kind <= 1);
   1.300 +            JS_ASSERT(start >= 0);
   1.301 +            JS_ASSERT(count > 0);
   1.302 +        }
   1.303 +
   1.304 +        JSObject *object() const { return reinterpret_cast<JSObject *>(objectAndKind_ & ~1); }
   1.305 +        int kind() const { return (int)(objectAndKind_ & 1); }
   1.306 +
   1.307 +        bool operator==(const SlotsEdge &other) const {
   1.308 +            return objectAndKind_ == other.objectAndKind_ &&
   1.309 +                   start_ == other.start_ &&
   1.310 +                   count_ == other.count_;
   1.311 +        }
   1.312 +
   1.313 +        bool operator!=(const SlotsEdge &other) const {
   1.314 +            return !(*this == other);
   1.315 +        }
   1.316 +
   1.317 +        bool maybeInRememberedSet(const Nursery &nursery) const {
   1.318 +            return !nursery.isInside(object());
   1.319 +        }
   1.320 +
   1.321 +        void mark(JSTracer *trc);
   1.322 +
   1.323 +        typedef struct {
   1.324 +            typedef SlotsEdge Lookup;
   1.325 +            static HashNumber hash(const Lookup &l) { return l.objectAndKind_ ^ l.start_ ^ l.count_; }
   1.326 +            static bool match(const SlotsEdge &k, const Lookup &l) { return k == l; }
   1.327 +        } Hasher;
   1.328 +    };
   1.329 +
   1.330 +    struct WholeCellEdges
   1.331 +    {
   1.332 +        Cell *edge;
   1.333 +
   1.334 +        explicit WholeCellEdges(Cell *cell) : edge(cell) {
   1.335 +            JS_ASSERT(edge->isTenured());
   1.336 +        }
   1.337 +
   1.338 +        bool operator==(const WholeCellEdges &other) const { return edge == other.edge; }
   1.339 +        bool operator!=(const WholeCellEdges &other) const { return edge != other.edge; }
   1.340 +
   1.341 +        bool maybeInRememberedSet(const Nursery &nursery) const { return true; }
   1.342 +
   1.343 +        static bool supportsDeduplication() { return true; }
   1.344 +        void *deduplicationKey() const { return (void *)edge; }
   1.345 +
   1.346 +        void mark(JSTracer *trc);
   1.347 +
   1.348 +        typedef PointerEdgeHasher<WholeCellEdges> Hasher;
   1.349 +    };
   1.350 +
   1.351 +    template <typename Key>
   1.352 +    struct CallbackRef : public BufferableRef
   1.353 +    {
   1.354 +        typedef void (*MarkCallback)(JSTracer *trc, Key *key, void *data);
   1.355 +
   1.356 +        CallbackRef(MarkCallback cb, Key *k, void *d) : callback(cb), key(k), data(d) {}
   1.357 +
   1.358 +        virtual void mark(JSTracer *trc) {
   1.359 +            callback(trc, key, data);
   1.360 +        }
   1.361 +
   1.362 +      private:
   1.363 +        MarkCallback callback;
   1.364 +        Key *key;
   1.365 +        void *data;
   1.366 +    };
   1.367 +
   1.368 +    template <typename Edge>
   1.369 +    bool isOkayToUseBuffer(const Edge &edge) const {
   1.370 +        /*
   1.371 +         * Disabled store buffers may not have a valid state; e.g. when stored
   1.372 +         * inline in the ChunkTrailer.
   1.373 +         */
   1.374 +        if (!isEnabled())
   1.375 +            return false;
   1.376 +
   1.377 +        /*
   1.378 +         * The concurrent parsing thread cannot validly insert into the buffer,
   1.379 +         * but it should not activate the re-entrancy guard either.
   1.380 +         */
   1.381 +        if (!CurrentThreadCanAccessRuntime(runtime_))
   1.382 +            return false;
   1.383 +
   1.384 +        return true;
   1.385 +    }
   1.386 +
   1.387 +    template <typename Buffer, typename Edge>
   1.388 +    void put(Buffer &buffer, const Edge &edge) {
   1.389 +        if (!isOkayToUseBuffer(edge))
   1.390 +            return;
   1.391 +        mozilla::ReentrancyGuard g(*this);
   1.392 +        if (edge.maybeInRememberedSet(nursery_))
   1.393 +            buffer.put(this, edge);
   1.394 +    }
   1.395 +
   1.396 +    template <typename Buffer, typename Edge>
   1.397 +    void unput(Buffer &buffer, const Edge &edge) {
   1.398 +        if (!isOkayToUseBuffer(edge))
   1.399 +            return;
   1.400 +        mozilla::ReentrancyGuard g(*this);
   1.401 +        buffer.unput(this, edge);
   1.402 +    }
   1.403 +
   1.404 +    MonoTypeBuffer<ValueEdge> bufferVal;
   1.405 +    MonoTypeBuffer<CellPtrEdge> bufferCell;
   1.406 +    MonoTypeBuffer<SlotsEdge> bufferSlot;
   1.407 +    MonoTypeBuffer<WholeCellEdges> bufferWholeCell;
   1.408 +    RelocatableMonoTypeBuffer<ValueEdge> bufferRelocVal;
   1.409 +    RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
   1.410 +    GenericBuffer bufferGeneric;
   1.411 +
   1.412 +    JSRuntime *runtime_;
   1.413 +    const Nursery &nursery_;
   1.414 +
   1.415 +    bool aboutToOverflow_;
   1.416 +    bool enabled_;
   1.417 +    mozilla::DebugOnly<bool> entered; /* For ReentrancyGuard. */
   1.418 +
   1.419 +  public:
   1.420 +    explicit StoreBuffer(JSRuntime *rt, const Nursery &nursery)
   1.421 +      : bufferVal(), bufferCell(), bufferSlot(), bufferWholeCell(),
   1.422 +        bufferRelocVal(), bufferRelocCell(), bufferGeneric(),
   1.423 +        runtime_(rt), nursery_(nursery), aboutToOverflow_(false), enabled_(false),
   1.424 +        entered(false)
   1.425 +    {
   1.426 +    }
   1.427 +
   1.428 +    bool enable();
   1.429 +    void disable();
   1.430 +    bool isEnabled() const { return enabled_; }
   1.431 +
   1.432 +    bool clear();
   1.433 +
   1.434 +    /* Get the overflowed status. */
   1.435 +    bool isAboutToOverflow() const { return aboutToOverflow_; }
   1.436 +
   1.437 +    /* Insert a single edge into the buffer/remembered set. */
   1.438 +    void putValue(JS::Value *valuep) { put(bufferVal, ValueEdge(valuep)); }
   1.439 +    void putCell(Cell **cellp) { put(bufferCell, CellPtrEdge(cellp)); }
   1.440 +    void putSlot(JSObject *obj, int kind, int32_t start, int32_t count) {
   1.441 +        put(bufferSlot, SlotsEdge(obj, kind, start, count));
   1.442 +    }
   1.443 +    void putWholeCell(Cell *cell) {
   1.444 +        JS_ASSERT(cell->isTenured());
   1.445 +        put(bufferWholeCell, WholeCellEdges(cell));
   1.446 +    }
   1.447 +
   1.448 +    /* Insert or update a single edge in the Relocatable buffer. */
   1.449 +    void putRelocatableValue(JS::Value *valuep) { put(bufferRelocVal, ValueEdge(valuep)); }
   1.450 +    void putRelocatableCell(Cell **cellp) { put(bufferRelocCell, CellPtrEdge(cellp)); }
   1.451 +    void removeRelocatableValue(JS::Value *valuep) { unput(bufferRelocVal, ValueEdge(valuep)); }
   1.452 +    void removeRelocatableCell(Cell **cellp) { unput(bufferRelocCell, CellPtrEdge(cellp)); }
   1.453 +
   1.454 +    /* Insert an entry into the generic buffer. */
   1.455 +    template <typename T>
   1.456 +    void putGeneric(const T &t) { put(bufferGeneric, t);}
   1.457 +
   1.458 +    /* Insert or update a callback entry. */
   1.459 +    template <typename Key>
   1.460 +    void putCallback(void (*callback)(JSTracer *trc, Key *key, void *data), Key *key, void *data) {
   1.461 +        put(bufferGeneric, CallbackRef<Key>(callback, key, data));
   1.462 +    }
   1.463 +
   1.464 +    /* Methods to mark the source of all edges in the store buffer. */
   1.465 +    void markAll(JSTracer *trc);
   1.466 +    void markValues(JSTracer *trc)            { bufferVal.mark(this, trc); }
   1.467 +    void markCells(JSTracer *trc)             { bufferCell.mark(this, trc); }
   1.468 +    void markSlots(JSTracer *trc)             { bufferSlot.mark(this, trc); }
   1.469 +    void markWholeCells(JSTracer *trc)        { bufferWholeCell.mark(this, trc); }
   1.470 +    void markRelocatableValues(JSTracer *trc) { bufferRelocVal.mark(this, trc); }
   1.471 +    void markRelocatableCells(JSTracer *trc)  { bufferRelocCell.mark(this, trc); }
   1.472 +    void markGenericEntries(JSTracer *trc)    { bufferGeneric.mark(this, trc); }
   1.473 +
   1.474 +    /* We cannot call InParallelSection directly because of a circular dependency. */
   1.475 +    bool inParallelSection() const;
   1.476 +
   1.477 +    /* For use by our owned buffers and for testing. */
   1.478 +    void setAboutToOverflow();
   1.479 +
   1.480 +    void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes *sizes);
   1.481 +};
   1.482 +
   1.483 +} /* namespace gc */
   1.484 +} /* namespace js */
   1.485 +
   1.486 +#endif /* JSGC_GENERATIONAL */
   1.487 +
   1.488 +#endif /* gc_StoreBuffer_h */

mercurial