js/src/gc/Zone.h

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/gc/Zone.h	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,479 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#ifndef gc_Zone_h
    1.11 +#define gc_Zone_h
    1.12 +
    1.13 +#include "mozilla/Atomics.h"
    1.14 +#include "mozilla/MemoryReporting.h"
    1.15 +
    1.16 +#include "jscntxt.h"
    1.17 +#include "jsgc.h"
    1.18 +#include "jsinfer.h"
    1.19 +
    1.20 +#include "gc/FindSCCs.h"
    1.21 +
    1.22 +namespace js {
    1.23 +
    1.24 +namespace jit {
    1.25 +class JitZone;
    1.26 +}
    1.27 +
    1.28 +/*
    1.29 + * Encapsulates the data needed to perform allocation.  Typically there is
    1.30 + * precisely one of these per zone (|cx->zone().allocator|).  However, in
    1.31 + * parallel execution mode, there will be one per worker thread.
    1.32 + */
    1.33 +class Allocator
    1.34 +{
    1.35 +    /*
    1.36 +     * Since allocators can be accessed from worker threads, the parent zone_
    1.37 +     * should not be accessed in general. ArenaLists is allowed to actually do
    1.38 +     * the allocation, however.
    1.39 +     */
    1.40 +    friend class gc::ArenaLists;
    1.41 +
    1.42 +    JS::Zone *zone_;
    1.43 +
    1.44 +  public:
    1.45 +    explicit Allocator(JS::Zone *zone);
    1.46 +
    1.47 +    js::gc::ArenaLists arenas;
    1.48 +};
    1.49 +
    1.50 +typedef Vector<JSCompartment *, 1, SystemAllocPolicy> CompartmentVector;
    1.51 +
    1.52 +} /* namespace js */
    1.53 +
    1.54 +namespace JS {
    1.55 +
    1.56 +/*
    1.57 + * A zone is a collection of compartments. Every compartment belongs to exactly
    1.58 + * one zone. In Firefox, there is roughly one zone per tab along with a system
    1.59 + * zone for everything else. Zones mainly serve as boundaries for garbage
    1.60 + * collection. Unlike compartments, they have no special security properties.
    1.61 + *
    1.62 + * Every GC thing belongs to exactly one zone. GC things from the same zone but
    1.63 + * different compartments can share an arena (4k page). GC things from different
    1.64 + * zones cannot be stored in the same arena. The garbage collector is capable of
    1.65 + * collecting one zone at a time; it cannot collect at the granularity of
    1.66 + * compartments.
    1.67 + *
    1.68 + * GC things are tied to zones and compartments as follows:
    1.69 + *
    1.70 + * - JSObjects belong to a compartment and cannot be shared between
    1.71 + *   compartments. If an object needs to point to a JSObject in a different
    1.72 + *   compartment, regardless of zone, it must go through a cross-compartment
    1.73 + *   wrapper. Each compartment keeps track of its outgoing wrappers in a table.
    1.74 + *
    1.75 + * - JSStrings do not belong to any particular compartment, but they do belong
    1.76 + *   to a zone. Thus, two different compartments in the same zone can point to a
    1.77 + *   JSString. When a string needs to be wrapped, we copy it if it's in a
    1.78 + *   different zone and do nothing if it's in the same zone. Thus, transferring
    1.79 + *   strings within a zone is very efficient.
    1.80 + *
    1.81 + * - Shapes and base shapes belong to a compartment and cannot be shared between
    1.82 + *   compartments. A base shape holds a pointer to its compartment. Shapes find
    1.83 + *   their compartment via their base shape. JSObjects find their compartment
    1.84 + *   via their shape.
    1.85 + *
    1.86 + * - Scripts are also compartment-local and cannot be shared. A script points to
    1.87 + *   its compartment.
    1.88 + *
    1.89 + * - Type objects and JitCode objects belong to a compartment and cannot be
    1.90 + *   shared. However, there is no mechanism to obtain their compartments.
    1.91 + *
    1.92 + * A zone remains alive as long as any GC things in the zone are alive. A
    1.93 + * compartment remains alive as long as any JSObjects, scripts, shapes, or base
    1.94 + * shapes within it are alive.
    1.95 + *
    1.96 + * We always guarantee that a zone has at least one live compartment by refusing
    1.97 + * to delete the last compartment in a live zone. (This could happen, for
    1.98 + * example, if the conservative scanner marks a string in an otherwise dead
    1.99 + * zone.)
   1.100 + */
   1.101 +
   1.102 +struct Zone : public JS::shadow::Zone,
   1.103 +              public js::gc::GraphNodeBase<JS::Zone>,
   1.104 +              public js::MallocProvider<JS::Zone>
   1.105 +{
   1.106 +  private:
   1.107 +    friend bool js::CurrentThreadCanAccessZone(Zone *zone);
   1.108 +
   1.109 +  public:
   1.110 +    js::Allocator                allocator;
   1.111 +
   1.112 +    js::CompartmentVector        compartments;
   1.113 +
   1.114 +  private:
   1.115 +    bool                         ionUsingBarriers_;
   1.116 +
   1.117 +  public:
   1.118 +    bool                         active;  // GC flag, whether there are active frames
   1.119 +
   1.120 +    bool compileBarriers(bool needsBarrier) const {
   1.121 +        return needsBarrier || runtimeFromMainThread()->gcZeal() == js::gc::ZealVerifierPreValue;
   1.122 +    }
   1.123 +
   1.124 +    bool compileBarriers() const {
   1.125 +        return compileBarriers(needsBarrier());
   1.126 +    }
   1.127 +
   1.128 +    enum ShouldUpdateIon {
   1.129 +        DontUpdateIon,
   1.130 +        UpdateIon
   1.131 +    };
   1.132 +
   1.133 +    void setNeedsBarrier(bool needs, ShouldUpdateIon updateIon);
   1.134 +
   1.135 +    const bool *addressOfNeedsBarrier() const {
   1.136 +        return &needsBarrier_;
   1.137 +    }
   1.138 +
   1.139 +  public:
   1.140 +    enum GCState {
   1.141 +        NoGC,
   1.142 +        Mark,
   1.143 +        MarkGray,
   1.144 +        Sweep,
   1.145 +        Finished
   1.146 +    };
   1.147 +
   1.148 +  private:
   1.149 +    bool                         gcScheduled;
   1.150 +    GCState                      gcState;
   1.151 +    bool                         gcPreserveCode;
   1.152 +
   1.153 +  public:
   1.154 +    bool isCollecting() const {
   1.155 +        if (runtimeFromMainThread()->isHeapCollecting())
   1.156 +            return gcState != NoGC;
   1.157 +        else
   1.158 +            return needsBarrier();
   1.159 +    }
   1.160 +
   1.161 +    bool isPreservingCode() const {
   1.162 +        return gcPreserveCode;
   1.163 +    }
   1.164 +
   1.165 +    /*
   1.166 +     * If this returns true, all object tracing must be done with a GC marking
   1.167 +     * tracer.
   1.168 +     */
   1.169 +    bool requireGCTracer() const {
   1.170 +        return runtimeFromMainThread()->isHeapMajorCollecting() && gcState != NoGC;
   1.171 +    }
   1.172 +
   1.173 +    void setGCState(GCState state) {
   1.174 +        JS_ASSERT(runtimeFromMainThread()->isHeapBusy());
   1.175 +        JS_ASSERT_IF(state != NoGC, canCollect());
   1.176 +        gcState = state;
   1.177 +    }
   1.178 +
   1.179 +    void scheduleGC() {
   1.180 +        JS_ASSERT(!runtimeFromMainThread()->isHeapBusy());
   1.181 +        gcScheduled = true;
   1.182 +    }
   1.183 +
   1.184 +    void unscheduleGC() {
   1.185 +        gcScheduled = false;
   1.186 +    }
   1.187 +
   1.188 +    bool isGCScheduled() {
   1.189 +        return gcScheduled && canCollect();
   1.190 +    }
   1.191 +
   1.192 +    void setPreservingCode(bool preserving) {
   1.193 +        gcPreserveCode = preserving;
   1.194 +    }
   1.195 +
   1.196 +    bool canCollect() {
   1.197 +        // Zones cannot be collected while in use by other threads.
   1.198 +        if (usedByExclusiveThread)
   1.199 +            return false;
   1.200 +        JSRuntime *rt = runtimeFromAnyThread();
   1.201 +        if (rt->isAtomsZone(this) && rt->exclusiveThreadsPresent())
   1.202 +            return false;
   1.203 +        return true;
   1.204 +    }
   1.205 +
   1.206 +    bool wasGCStarted() const {
   1.207 +        return gcState != NoGC;
   1.208 +    }
   1.209 +
   1.210 +    bool isGCMarking() {
   1.211 +        if (runtimeFromMainThread()->isHeapCollecting())
   1.212 +            return gcState == Mark || gcState == MarkGray;
   1.213 +        else
   1.214 +            return needsBarrier();
   1.215 +    }
   1.216 +
   1.217 +    bool isGCMarkingBlack() {
   1.218 +        return gcState == Mark;
   1.219 +    }
   1.220 +
   1.221 +    bool isGCMarkingGray() {
   1.222 +        return gcState == MarkGray;
   1.223 +    }
   1.224 +
   1.225 +    bool isGCSweeping() {
   1.226 +        return gcState == Sweep;
   1.227 +    }
   1.228 +
   1.229 +    bool isGCFinished() {
   1.230 +        return gcState == Finished;
   1.231 +    }
   1.232 +
   1.233 +    /* This is updated by both the main and GC helper threads. */
   1.234 +    mozilla::Atomic<size_t, mozilla::ReleaseAcquire> gcBytes;
   1.235 +
   1.236 +    size_t                       gcTriggerBytes;
   1.237 +    size_t                       gcMaxMallocBytes;
   1.238 +    double                       gcHeapGrowthFactor;
   1.239 +
   1.240 +    bool                         isSystem;
   1.241 +
   1.242 +    /* Whether this zone is being used by a thread with an ExclusiveContext. */
   1.243 +    bool usedByExclusiveThread;
   1.244 +
   1.245 +    /*
   1.246 +     * Get a number that is incremented whenever this zone is collected, and
   1.247 +     * possibly at other times too.
   1.248 +     */
   1.249 +    uint64_t gcNumber();
   1.250 +
   1.251 +    /*
   1.252 +     * These flags help us to discover if a compartment that shouldn't be alive
   1.253 +     * manages to outlive a GC.
   1.254 +     */
   1.255 +    bool                         scheduledForDestruction;
   1.256 +    bool                         maybeAlive;
   1.257 +
   1.258 +    /*
   1.259 +     * Malloc counter to measure memory pressure for GC scheduling. It runs from
   1.260 +     * gcMaxMallocBytes down to zero. This counter should be used only when it's
   1.261 +     * not possible to know the size of a free.
   1.262 +     */
   1.263 +    mozilla::Atomic<ptrdiff_t, mozilla::ReleaseAcquire> gcMallocBytes;
   1.264 +
   1.265 +    /*
   1.266 +     * Whether a GC has been triggered as a result of gcMallocBytes falling
   1.267 +     * below zero.
   1.268 +     *
   1.269 +     * This should be a bool, but Atomic only supports 32-bit and pointer-sized
   1.270 +     * types.
   1.271 +     */
   1.272 +    mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> gcMallocGCTriggered;
   1.273 +
   1.274 +    /* This compartment's gray roots. */
   1.275 +    js::Vector<js::GrayRoot, 0, js::SystemAllocPolicy> gcGrayRoots;
   1.276 +
   1.277 +    /* Per-zone data for use by an embedder. */
   1.278 +    void *data;
   1.279 +
   1.280 +    Zone(JSRuntime *rt);
   1.281 +    ~Zone();
   1.282 +
   1.283 +    void findOutgoingEdges(js::gc::ComponentFinder<JS::Zone> &finder);
   1.284 +
   1.285 +    void discardJitCode(js::FreeOp *fop);
   1.286 +
   1.287 +    void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
   1.288 +                                size_t *typePool,
   1.289 +                                size_t *baselineStubsOptimized);
   1.290 +
   1.291 +    void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
   1.292 +    void reduceGCTriggerBytes(size_t amount);
   1.293 +
   1.294 +    void resetGCMallocBytes();
   1.295 +    void setGCMaxMallocBytes(size_t value);
   1.296 +    void updateMallocCounter(size_t nbytes) {
   1.297 +        /*
   1.298 +         * Note: this code may be run from worker threads.  We
   1.299 +         * tolerate any thread races when updating gcMallocBytes.
   1.300 +         */
   1.301 +        gcMallocBytes -= ptrdiff_t(nbytes);
   1.302 +        if (MOZ_UNLIKELY(isTooMuchMalloc()))
   1.303 +            onTooMuchMalloc();
   1.304 +    }
   1.305 +
   1.306 +    bool isTooMuchMalloc() const {
   1.307 +        return gcMallocBytes <= 0;
   1.308 +     }
   1.309 +
   1.310 +    void onTooMuchMalloc();
   1.311 +
   1.312 +    void *onOutOfMemory(void *p, size_t nbytes) {
   1.313 +        return runtimeFromMainThread()->onOutOfMemory(p, nbytes);
   1.314 +    }
   1.315 +    void reportAllocationOverflow() {
   1.316 +        js_ReportAllocationOverflow(nullptr);
   1.317 +    }
   1.318 +
   1.319 +    js::types::TypeZone types;
   1.320 +
   1.321 +    void sweep(js::FreeOp *fop, bool releaseTypes, bool *oom);
   1.322 +
   1.323 +    bool hasMarkedCompartments();
   1.324 +
   1.325 +  private:
   1.326 +    void sweepBreakpoints(js::FreeOp *fop);
   1.327 +
   1.328 +#ifdef JS_ION
   1.329 +    js::jit::JitZone *jitZone_;
   1.330 +    js::jit::JitZone *createJitZone(JSContext *cx);
   1.331 +
   1.332 +  public:
   1.333 +    js::jit::JitZone *getJitZone(JSContext *cx) {
   1.334 +        return jitZone_ ? jitZone_ : createJitZone(cx);
   1.335 +    }
   1.336 +    js::jit::JitZone *jitZone() {
   1.337 +        return jitZone_;
   1.338 +    }
   1.339 +#endif
   1.340 +};
   1.341 +
   1.342 +} /* namespace JS */
   1.343 +
   1.344 +namespace js {
   1.345 +
   1.346 +/*
   1.347 + * Using the atoms zone without holding the exclusive access lock is dangerous
   1.348 + * because worker threads may be using it simultaneously. Therefore, it's
   1.349 + * better to skip the atoms zone when iterating over zones. If you need to
   1.350 + * iterate over the atoms zone, consider taking the exclusive access lock first.
   1.351 + */
   1.352 +enum ZoneSelector {
   1.353 +    WithAtoms,
   1.354 +    SkipAtoms
   1.355 +};
   1.356 +
   1.357 +class ZonesIter {
   1.358 +  private:
   1.359 +    JS::Zone **it, **end;
   1.360 +
   1.361 +  public:
   1.362 +    ZonesIter(JSRuntime *rt, ZoneSelector selector) {
   1.363 +        it = rt->zones.begin();
   1.364 +        end = rt->zones.end();
   1.365 +
   1.366 +        if (selector == SkipAtoms) {
   1.367 +            JS_ASSERT(rt->isAtomsZone(*it));
   1.368 +            it++;
   1.369 +        }
   1.370 +    }
   1.371 +
   1.372 +    bool done() const { return it == end; }
   1.373 +
   1.374 +    void next() {
   1.375 +        JS_ASSERT(!done());
   1.376 +        do {
   1.377 +            it++;
   1.378 +        } while (!done() && (*it)->usedByExclusiveThread);
   1.379 +    }
   1.380 +
   1.381 +    JS::Zone *get() const {
   1.382 +        JS_ASSERT(!done());
   1.383 +        return *it;
   1.384 +    }
   1.385 +
   1.386 +    operator JS::Zone *() const { return get(); }
   1.387 +    JS::Zone *operator->() const { return get(); }
   1.388 +};
   1.389 +
   1.390 +struct CompartmentsInZoneIter
   1.391 +{
   1.392 +    // This is for the benefit of CompartmentsIterT::comp.
   1.393 +    friend class mozilla::Maybe<CompartmentsInZoneIter>;
   1.394 +  private:
   1.395 +    JSCompartment **it, **end;
   1.396 +
   1.397 +    CompartmentsInZoneIter()
   1.398 +      : it(nullptr), end(nullptr)
   1.399 +    {}
   1.400 +
   1.401 +  public:
   1.402 +    explicit CompartmentsInZoneIter(JS::Zone *zone) {
   1.403 +        it = zone->compartments.begin();
   1.404 +        end = zone->compartments.end();
   1.405 +    }
   1.406 +
   1.407 +    bool done() const {
   1.408 +        JS_ASSERT(it);
   1.409 +        return it == end;
   1.410 +    }
   1.411 +    void next() {
   1.412 +        JS_ASSERT(!done());
   1.413 +        it++;
   1.414 +    }
   1.415 +
   1.416 +    JSCompartment *get() const {
   1.417 +        JS_ASSERT(it);
   1.418 +        return *it;
   1.419 +    }
   1.420 +
   1.421 +    operator JSCompartment *() const { return get(); }
   1.422 +    JSCompartment *operator->() const { return get(); }
   1.423 +};
   1.424 +
   1.425 +/*
   1.426 + * This iterator iterates over all the compartments in a given set of zones. The
   1.427 + * set of zones is determined by iterating ZoneIterT.
   1.428 + */
   1.429 +template<class ZonesIterT>
   1.430 +class CompartmentsIterT
   1.431 +{
   1.432 +  private:
   1.433 +    ZonesIterT zone;
   1.434 +    mozilla::Maybe<CompartmentsInZoneIter> comp;
   1.435 +
   1.436 +  public:
   1.437 +    explicit CompartmentsIterT(JSRuntime *rt)
   1.438 +      : zone(rt)
   1.439 +    {
   1.440 +        if (zone.done())
   1.441 +            comp.construct();
   1.442 +        else
   1.443 +            comp.construct(zone);
   1.444 +    }
   1.445 +
   1.446 +    CompartmentsIterT(JSRuntime *rt, ZoneSelector selector)
   1.447 +      : zone(rt, selector)
   1.448 +    {
   1.449 +        if (zone.done())
   1.450 +            comp.construct();
   1.451 +        else
   1.452 +            comp.construct(zone);
   1.453 +    }
   1.454 +
   1.455 +    bool done() const { return zone.done(); }
   1.456 +
   1.457 +    void next() {
   1.458 +        JS_ASSERT(!done());
   1.459 +        JS_ASSERT(!comp.ref().done());
   1.460 +        comp.ref().next();
   1.461 +        if (comp.ref().done()) {
   1.462 +            comp.destroy();
   1.463 +            zone.next();
   1.464 +            if (!zone.done())
   1.465 +                comp.construct(zone);
   1.466 +        }
   1.467 +    }
   1.468 +
   1.469 +    JSCompartment *get() const {
   1.470 +        JS_ASSERT(!done());
   1.471 +        return comp.ref();
   1.472 +    }
   1.473 +
   1.474 +    operator JSCompartment *() const { return get(); }
   1.475 +    JSCompartment *operator->() const { return get(); }
   1.476 +};
   1.477 +
   1.478 +typedef CompartmentsIterT<ZonesIter> CompartmentsIter;
   1.479 +
   1.480 +} /* namespace js */
   1.481 +
   1.482 +#endif /* gc_Zone_h */

mercurial