js/src/gc/Marking.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/gc/Marking.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,1813 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "gc/Marking.h"
    1.11 +
    1.12 +#include "mozilla/DebugOnly.h"
    1.13 +
    1.14 +#include "jit/IonCode.h"
    1.15 +#include "js/SliceBudget.h"
    1.16 +#include "vm/ArgumentsObject.h"
    1.17 +#include "vm/ScopeObject.h"
    1.18 +#include "vm/Shape.h"
    1.19 +#include "vm/TypedArrayObject.h"
    1.20 +
    1.21 +#include "jscompartmentinlines.h"
    1.22 +#include "jsinferinlines.h"
    1.23 +#include "jsobjinlines.h"
    1.24 +
    1.25 +#ifdef JSGC_GENERATIONAL
    1.26 +# include "gc/Nursery-inl.h"
    1.27 +#endif
    1.28 +#include "vm/String-inl.h"
    1.29 +
    1.30 +using namespace js;
    1.31 +using namespace js::gc;
    1.32 +
    1.33 +using mozilla::DebugOnly;
    1.34 +
    1.35 +void * const js::NullPtr::constNullValue = nullptr;
    1.36 +
    1.37 +JS_PUBLIC_DATA(void * const) JS::NullPtr::constNullValue = nullptr;
    1.38 +
    1.39 +/*
    1.40 + * There are two mostly separate mark paths. The first is a fast path used
    1.41 + * internally in the GC. The second is a slow path used for root marking and
    1.42 + * for API consumers like the cycle collector or Class::trace implementations.
    1.43 + *
    1.44 + * The fast path uses explicit stacks. The basic marking process during a GC is
    1.45 + * that all roots are pushed on to a mark stack, and then each item on the
    1.46 + * stack is scanned (possibly pushing more stuff) until the stack is empty.
    1.47 + *
    1.48 + * PushMarkStack pushes a GC thing onto the mark stack. In some cases (shapes
    1.49 + * or strings) it eagerly marks the object rather than pushing it. Popping and
    1.50 + * scanning is done by the processMarkStackTop method. For efficiency reasons
    1.51 + * like tail recursion elimination that method also implements the scanning of
    1.52 + * objects. For other GC things it uses helper methods.
    1.53 + *
    1.54 + * Most of the marking code outside Marking.cpp uses functions like MarkObject,
    1.55 + * MarkString, etc. These functions check if an object is in the compartment
    1.56 + * currently being GCed. If it is, they call PushMarkStack. Roots are pushed
    1.57 + * this way as well as pointers traversed inside trace hooks (for things like
    1.58 + * PropertyIteratorObjects). It is always valid to call a MarkX function
    1.59 + * instead of PushMarkStack, although it may be slower.
    1.60 + *
    1.61 + * The MarkX functions also handle non-GC object traversal. In this case, they
    1.62 + * call a callback for each object visited. This is a recursive process; the
    1.63 + * mark stacks are not involved. These callbacks may ask for the outgoing
    1.64 + * pointers to be visited. Eventually, this leads to the MarkChildren functions
    1.65 + * being called. These functions duplicate much of the functionality of
    1.66 + * scanning functions, but they don't push onto an explicit stack.
    1.67 + */
    1.68 +
    1.69 +static inline void
    1.70 +PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing);
    1.71 +
    1.72 +static inline void
    1.73 +PushMarkStack(GCMarker *gcmarker, JSFunction *thing);
    1.74 +
    1.75 +static inline void
    1.76 +PushMarkStack(GCMarker *gcmarker, JSScript *thing);
    1.77 +
    1.78 +static inline void
    1.79 +PushMarkStack(GCMarker *gcmarker, Shape *thing);
    1.80 +
    1.81 +static inline void
    1.82 +PushMarkStack(GCMarker *gcmarker, JSString *thing);
    1.83 +
    1.84 +static inline void
    1.85 +PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing);
    1.86 +
    1.87 +namespace js {
    1.88 +namespace gc {
    1.89 +
    1.90 +static void MarkChildren(JSTracer *trc, JSString *str);
    1.91 +static void MarkChildren(JSTracer *trc, JSScript *script);
    1.92 +static void MarkChildren(JSTracer *trc, LazyScript *lazy);
    1.93 +static void MarkChildren(JSTracer *trc, Shape *shape);
    1.94 +static void MarkChildren(JSTracer *trc, BaseShape *base);
    1.95 +static void MarkChildren(JSTracer *trc, types::TypeObject *type);
    1.96 +static void MarkChildren(JSTracer *trc, jit::JitCode *code);
    1.97 +
    1.98 +} /* namespace gc */
    1.99 +} /* namespace js */
   1.100 +
   1.101 +/*** Object Marking ***/
   1.102 +
   1.103 +#if defined(DEBUG)
   1.104 +template<typename T>
   1.105 +static inline bool
   1.106 +IsThingPoisoned(T *thing)
   1.107 +{
   1.108 +    static_assert(sizeof(T) >= sizeof(FreeSpan) + sizeof(uint32_t),
   1.109 +                  "Ensure it is well defined to look past any free span that "
   1.110 +                  "may be embedded in the thing's header when freed.");
   1.111 +    const uint8_t poisonBytes[] = {
   1.112 +        JS_FRESH_NURSERY_PATTERN,
   1.113 +        JS_SWEPT_NURSERY_PATTERN,
   1.114 +        JS_ALLOCATED_NURSERY_PATTERN,
   1.115 +        JS_FRESH_TENURED_PATTERN,
   1.116 +        JS_SWEPT_TENURED_PATTERN,
   1.117 +        JS_ALLOCATED_TENURED_PATTERN,
   1.118 +        JS_SWEPT_CODE_PATTERN,
   1.119 +        JS_SWEPT_FRAME_PATTERN
   1.120 +    };
   1.121 +    const int numPoisonBytes = sizeof(poisonBytes) / sizeof(poisonBytes[0]);
   1.122 +    uint32_t *p = reinterpret_cast<uint32_t *>(reinterpret_cast<FreeSpan *>(thing) + 1);
   1.123 +    // Note: all free patterns are odd to make the common, not-poisoned case a single test.
   1.124 +    if ((*p & 1) == 0)
   1.125 +        return false;
   1.126 +    for (int i = 0; i < numPoisonBytes; ++i) {
   1.127 +        const uint8_t pb = poisonBytes[i];
   1.128 +        const uint32_t pw = pb | (pb << 8) | (pb << 16) | (pb << 24);
   1.129 +        if (*p == pw)
   1.130 +            return true;
   1.131 +    }
   1.132 +    return false;
   1.133 +}
   1.134 +#endif
   1.135 +
   1.136 +static GCMarker *
   1.137 +AsGCMarker(JSTracer *trc)
   1.138 +{
   1.139 +    JS_ASSERT(IS_GC_MARKING_TRACER(trc));
   1.140 +    return static_cast<GCMarker *>(trc);
   1.141 +}
   1.142 +
   1.143 +template <typename T> bool ThingIsPermanentAtom(T *thing) { return false; }
   1.144 +template <> bool ThingIsPermanentAtom<JSString>(JSString *str) { return str->isPermanentAtom(); }
   1.145 +template <> bool ThingIsPermanentAtom<JSFlatString>(JSFlatString *str) { return str->isPermanentAtom(); }
   1.146 +template <> bool ThingIsPermanentAtom<JSLinearString>(JSLinearString *str) { return str->isPermanentAtom(); }
   1.147 +template <> bool ThingIsPermanentAtom<JSAtom>(JSAtom *atom) { return atom->isPermanent(); }
   1.148 +template <> bool ThingIsPermanentAtom<PropertyName>(PropertyName *name) { return name->isPermanent(); }
   1.149 +
   1.150 +template<typename T>
   1.151 +static inline void
   1.152 +CheckMarkedThing(JSTracer *trc, T *thing)
   1.153 +{
   1.154 +#ifdef DEBUG
   1.155 +    JS_ASSERT(trc);
   1.156 +    JS_ASSERT(thing);
   1.157 +
   1.158 +    /* This function uses data that's not available in the nursery. */
   1.159 +    if (IsInsideNursery(trc->runtime(), thing))
   1.160 +        return;
   1.161 +
   1.162 +    /*
   1.163 +     * Permanent atoms are not associated with this runtime, but will be ignored
   1.164 +     * during marking.
   1.165 +     */
   1.166 +    if (ThingIsPermanentAtom(thing))
   1.167 +        return;
   1.168 +
   1.169 +    JS_ASSERT(thing->zone());
   1.170 +    JS_ASSERT(thing->zone()->runtimeFromMainThread() == trc->runtime());
   1.171 +    JS_ASSERT(trc->hasTracingDetails());
   1.172 +
   1.173 +    DebugOnly<JSRuntime *> rt = trc->runtime();
   1.174 +
   1.175 +    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadZones,
   1.176 +                 !thing->zone()->scheduledForDestruction);
   1.177 +
   1.178 +    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
   1.179 +
   1.180 +    JS_ASSERT_IF(thing->zone()->requireGCTracer(),
   1.181 +                 IS_GC_MARKING_TRACER(trc));
   1.182 +
   1.183 +    JS_ASSERT(thing->isAligned());
   1.184 +
   1.185 +    JS_ASSERT(MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing));
   1.186 +
   1.187 +    JS_ASSERT_IF(rt->gcStrictCompartmentChecking,
   1.188 +                 thing->zone()->isCollecting() || rt->isAtomsZone(thing->zone()));
   1.189 +
   1.190 +    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && AsGCMarker(trc)->getMarkColor() == GRAY,
   1.191 +                 !thing->zone()->isGCMarkingBlack() || rt->isAtomsZone(thing->zone()));
   1.192 +
   1.193 +    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc),
   1.194 +                 !(thing->zone()->isGCSweeping() || thing->zone()->isGCFinished()));
   1.195 +
   1.196 +    /*
   1.197 +     * Try to assert that the thing is allocated.  This is complicated by the
   1.198 +     * fact that allocated things may still contain the poison pattern if that
   1.199 +     * part has not been overwritten, and that the free span list head in the
   1.200 +     * ArenaHeader may not be synced with the real one in ArenaLists.
   1.201 +     */
   1.202 +    JS_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy(),
   1.203 +                 !InFreeList(thing->arenaHeader(), thing));
   1.204 +#endif
   1.205 +}
   1.206 +
   1.207 +template<typename T>
   1.208 +static void
   1.209 +MarkInternal(JSTracer *trc, T **thingp)
   1.210 +{
   1.211 +    JS_ASSERT(thingp);
   1.212 +    T *thing = *thingp;
   1.213 +
   1.214 +    CheckMarkedThing(trc, thing);
   1.215 +
   1.216 +    if (!trc->callback) {
   1.217 +        /*
   1.218 +         * We may mark a Nursery thing outside the context of the
   1.219 +         * MinorCollectionTracer because of a pre-barrier. The pre-barrier is
   1.220 +         * not needed in this case because we perform a minor collection before
   1.221 +         * each incremental slice.
   1.222 +         */
   1.223 +        if (IsInsideNursery(trc->runtime(), thing))
   1.224 +            return;
   1.225 +
   1.226 +        /*
   1.227 +         * Don't mark permanent atoms, as they may be associated with another
   1.228 +         * runtime. Note that PushMarkStack() also checks this, but the tests
   1.229 +         * and maybeAlive write below should only be done on the main thread.
   1.230 +         */
   1.231 +        if (ThingIsPermanentAtom(thing))
   1.232 +            return;
   1.233 +
   1.234 +        /*
   1.235 +         * Don't mark things outside a compartment if we are in a
   1.236 +         * per-compartment GC.
   1.237 +         */
   1.238 +        if (!thing->zone()->isGCMarking())
   1.239 +            return;
   1.240 +
   1.241 +        PushMarkStack(AsGCMarker(trc), thing);
   1.242 +        thing->zone()->maybeAlive = true;
   1.243 +    } else {
   1.244 +        trc->callback(trc, (void **)thingp, MapTypeToTraceKind<T>::kind);
   1.245 +        trc->unsetTracingLocation();
   1.246 +    }
   1.247 +
   1.248 +    trc->clearTracingDetails();
   1.249 +}
   1.250 +
   1.251 +#define JS_ROOT_MARKING_ASSERT(trc)                                     \
   1.252 +    JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc),                             \
   1.253 +                 trc->runtime()->gcIncrementalState == NO_INCREMENTAL ||  \
   1.254 +                 trc->runtime()->gcIncrementalState == MARK_ROOTS);
   1.255 +
   1.256 +namespace js {
   1.257 +namespace gc {
   1.258 +
   1.259 +template <typename T>
   1.260 +void
   1.261 +MarkUnbarriered(JSTracer *trc, T **thingp, const char *name)
   1.262 +{
   1.263 +    trc->setTracingName(name);
   1.264 +    MarkInternal(trc, thingp);
   1.265 +}
   1.266 +
   1.267 +template <typename T>
   1.268 +static void
   1.269 +Mark(JSTracer *trc, BarrieredPtr<T> *thing, const char *name)
   1.270 +{
   1.271 +    trc->setTracingName(name);
   1.272 +    MarkInternal(trc, thing->unsafeGet());
   1.273 +}
   1.274 +
   1.275 +void
   1.276 +MarkPermanentAtom(JSTracer *trc, JSAtom *atom, const char *name)
   1.277 +{
   1.278 +    trc->setTracingName(name);
   1.279 +
   1.280 +    JS_ASSERT(atom->isPermanent());
   1.281 +
   1.282 +    CheckMarkedThing(trc, atom);
   1.283 +
   1.284 +    if (!trc->callback) {
   1.285 +        // Atoms do not refer to other GC things so don't need to go on the mark stack.
   1.286 +        // Additionally, PushMarkStack will ignore permanent atoms.
   1.287 +        atom->markIfUnmarked();
   1.288 +    } else {
   1.289 +        void *thing = atom;
   1.290 +        trc->callback(trc, &thing, JSTRACE_STRING);
   1.291 +        JS_ASSERT(thing == atom);
   1.292 +        trc->unsetTracingLocation();
   1.293 +    }
   1.294 +
   1.295 +    trc->clearTracingDetails();
   1.296 +}
   1.297 +
   1.298 +} /* namespace gc */
   1.299 +} /* namespace js */
   1.300 +
   1.301 +template <typename T>
   1.302 +static void
   1.303 +MarkRoot(JSTracer *trc, T **thingp, const char *name)
   1.304 +{
   1.305 +    JS_ROOT_MARKING_ASSERT(trc);
   1.306 +    trc->setTracingName(name);
   1.307 +    MarkInternal(trc, thingp);
   1.308 +}
   1.309 +
   1.310 +template <typename T>
   1.311 +static void
   1.312 +MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name)
   1.313 +{
   1.314 +    for (size_t i = 0; i < len; ++i) {
   1.315 +        if (vec[i].get()) {
   1.316 +            trc->setTracingIndex(name, i);
   1.317 +            MarkInternal(trc, vec[i].unsafeGet());
   1.318 +        }
   1.319 +    }
   1.320 +}
   1.321 +
   1.322 +template <typename T>
   1.323 +static void
   1.324 +MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name)
   1.325 +{
   1.326 +    JS_ROOT_MARKING_ASSERT(trc);
   1.327 +    for (size_t i = 0; i < len; ++i) {
   1.328 +        if (vec[i]) {
   1.329 +            trc->setTracingIndex(name, i);
   1.330 +            MarkInternal(trc, &vec[i]);
   1.331 +        }
   1.332 +    }
   1.333 +}
   1.334 +
   1.335 +namespace js {
   1.336 +namespace gc {
   1.337 +
   1.338 +template <typename T>
   1.339 +static bool
   1.340 +IsMarked(T **thingp)
   1.341 +{
   1.342 +    JS_ASSERT(thingp);
   1.343 +    JS_ASSERT(*thingp);
   1.344 +#ifdef JSGC_GENERATIONAL
   1.345 +    Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery;
   1.346 +    if (nursery.isInside(*thingp))
   1.347 +        return nursery.getForwardedPointer(thingp);
   1.348 +#endif
   1.349 +    Zone *zone = (*thingp)->tenuredZone();
   1.350 +    if (!zone->isCollecting() || zone->isGCFinished())
   1.351 +        return true;
   1.352 +    return (*thingp)->isMarked();
   1.353 +}
   1.354 +
   1.355 +template <typename T>
   1.356 +static bool
   1.357 +IsAboutToBeFinalized(T **thingp)
   1.358 +{
   1.359 +    JS_ASSERT(thingp);
   1.360 +    JS_ASSERT(*thingp);
   1.361 +
   1.362 +    T *thing = *thingp;
   1.363 +    JSRuntime *rt = thing->runtimeFromAnyThread();
   1.364 +
   1.365 +    /* Permanent atoms are never finalized by non-owning runtimes. */
   1.366 +    if (ThingIsPermanentAtom(thing) && !TlsPerThreadData.get()->associatedWith(rt))
   1.367 +        return false;
   1.368 +
   1.369 +#ifdef JSGC_GENERATIONAL
   1.370 +    Nursery &nursery = rt->gcNursery;
   1.371 +    JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !nursery.isInside(thing));
   1.372 +    if (rt->isHeapMinorCollecting()) {
   1.373 +        if (nursery.isInside(thing))
   1.374 +            return !nursery.getForwardedPointer(thingp);
   1.375 +        return false;
   1.376 +    }
   1.377 +#endif
   1.378 +
   1.379 +    if (!thing->tenuredZone()->isGCSweeping())
   1.380 +        return false;
   1.381 +
   1.382 +    /*
   1.383 +     * We should return false for things that have been allocated during
   1.384 +     * incremental sweeping, but this possibility doesn't occur at the moment
   1.385 +     * because this function is only called at the very start of the sweeping a
   1.386 +     * compartment group and during minor gc. Rather than do the extra check,
   1.387 +     * we just assert that it's not necessary.
   1.388 +     */
   1.389 +    JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental);
   1.390 +
   1.391 +    return !thing->isMarked();
   1.392 +}
   1.393 +
   1.394 +template <typename T>
   1.395 +T *
   1.396 +UpdateIfRelocated(JSRuntime *rt, T **thingp)
   1.397 +{
   1.398 +    JS_ASSERT(thingp);
   1.399 +#ifdef JSGC_GENERATIONAL
   1.400 +    if (*thingp && rt->isHeapMinorCollecting() && rt->gcNursery.isInside(*thingp))
   1.401 +        rt->gcNursery.getForwardedPointer(thingp);
   1.402 +#endif
   1.403 +    return *thingp;
   1.404 +}
   1.405 +
   1.406 +#define DeclMarkerImpl(base, type)                                                                \
   1.407 +void                                                                                              \
   1.408 +Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name)                            \
   1.409 +{                                                                                                 \
   1.410 +    Mark<type>(trc, thing, name);                                                                 \
   1.411 +}                                                                                                 \
   1.412 +                                                                                                  \
   1.413 +void                                                                                              \
   1.414 +Mark##base##Root(JSTracer *trc, type **thingp, const char *name)                                  \
   1.415 +{                                                                                                 \
   1.416 +    MarkRoot<type>(trc, thingp, name);                                                            \
   1.417 +}                                                                                                 \
   1.418 +                                                                                                  \
   1.419 +void                                                                                              \
   1.420 +Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name)                           \
   1.421 +{                                                                                                 \
   1.422 +    MarkUnbarriered<type>(trc, thingp, name);                                                     \
   1.423 +}                                                                                                 \
   1.424 +                                                                                                  \
   1.425 +/* Explicitly instantiate MarkUnbarriered<type>. It is referenced from */                         \
   1.426 +/* other translation units and the instantiation might otherwise get */                           \
   1.427 +/* inlined away. */                                                                               \
   1.428 +template void MarkUnbarriered<type>(JSTracer *, type **, const char *);                           \
   1.429 +                                                                                                  \
   1.430 +void                                                                                              \
   1.431 +Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *vec, const char *name)                \
   1.432 +{                                                                                                 \
   1.433 +    MarkRange<type>(trc, len, vec, name);                                                         \
   1.434 +}                                                                                                 \
   1.435 +                                                                                                  \
   1.436 +void                                                                                              \
   1.437 +Mark##base##RootRange(JSTracer *trc, size_t len, type **vec, const char *name)                    \
   1.438 +{                                                                                                 \
   1.439 +    MarkRootRange<type>(trc, len, vec, name);                                                     \
   1.440 +}                                                                                                 \
   1.441 +                                                                                                  \
   1.442 +bool                                                                                              \
   1.443 +Is##base##Marked(type **thingp)                                                                   \
   1.444 +{                                                                                                 \
   1.445 +    return IsMarked<type>(thingp);                                                                \
   1.446 +}                                                                                                 \
   1.447 +                                                                                                  \
   1.448 +bool                                                                                              \
   1.449 +Is##base##Marked(BarrieredPtr<type> *thingp)                                                      \
   1.450 +{                                                                                                 \
   1.451 +    return IsMarked<type>(thingp->unsafeGet());                                                   \
   1.452 +}                                                                                                 \
   1.453 +                                                                                                  \
   1.454 +bool                                                                                              \
   1.455 +Is##base##AboutToBeFinalized(type **thingp)                                                       \
   1.456 +{                                                                                                 \
   1.457 +    return IsAboutToBeFinalized<type>(thingp);                                                    \
   1.458 +}                                                                                                 \
   1.459 +                                                                                                  \
   1.460 +bool                                                                                              \
   1.461 +Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp)                                          \
   1.462 +{                                                                                                 \
   1.463 +    return IsAboutToBeFinalized<type>(thingp->unsafeGet());                                       \
   1.464 +}                                                                                                 \
   1.465 +                                                                                                  \
   1.466 +type *                                                                                            \
   1.467 +Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp)                              \
   1.468 +{                                                                                                 \
   1.469 +    return UpdateIfRelocated<type>(rt, thingp->unsafeGet());                                      \
   1.470 +}                                                                                                 \
   1.471 +                                                                                                  \
   1.472 +type *                                                                                            \
   1.473 +Update##base##IfRelocated(JSRuntime *rt, type **thingp)                                           \
   1.474 +{                                                                                                 \
   1.475 +    return UpdateIfRelocated<type>(rt, thingp);                                                   \
   1.476 +}
   1.477 +
   1.478 +
   1.479 +DeclMarkerImpl(BaseShape, BaseShape)
   1.480 +DeclMarkerImpl(BaseShape, UnownedBaseShape)
   1.481 +DeclMarkerImpl(JitCode, jit::JitCode)
   1.482 +DeclMarkerImpl(Object, ArgumentsObject)
   1.483 +DeclMarkerImpl(Object, ArrayBufferObject)
   1.484 +DeclMarkerImpl(Object, ArrayBufferViewObject)
   1.485 +DeclMarkerImpl(Object, SharedArrayBufferObject)
   1.486 +DeclMarkerImpl(Object, DebugScopeObject)
   1.487 +DeclMarkerImpl(Object, GlobalObject)
   1.488 +DeclMarkerImpl(Object, JSObject)
   1.489 +DeclMarkerImpl(Object, JSFunction)
   1.490 +DeclMarkerImpl(Object, ObjectImpl)
   1.491 +DeclMarkerImpl(Object, ScopeObject)
   1.492 +DeclMarkerImpl(Script, JSScript)
   1.493 +DeclMarkerImpl(LazyScript, LazyScript)
   1.494 +DeclMarkerImpl(Shape, Shape)
   1.495 +DeclMarkerImpl(String, JSAtom)
   1.496 +DeclMarkerImpl(String, JSString)
   1.497 +DeclMarkerImpl(String, JSFlatString)
   1.498 +DeclMarkerImpl(String, JSLinearString)
   1.499 +DeclMarkerImpl(String, PropertyName)
   1.500 +DeclMarkerImpl(TypeObject, js::types::TypeObject)
   1.501 +
   1.502 +} /* namespace gc */
   1.503 +} /* namespace js */
   1.504 +
   1.505 +/*** Externally Typed Marking ***/
   1.506 +
   1.507 +void
   1.508 +gc::MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind)
   1.509 +{
   1.510 +    JS_ASSERT(thingp);
   1.511 +    JS_ASSERT(*thingp);
   1.512 +    DebugOnly<Cell *> cell = static_cast<Cell *>(*thingp);
   1.513 +    JS_ASSERT_IF(cell->isTenured(), kind == MapAllocToTraceKind(cell->tenuredGetAllocKind()));
   1.514 +    switch (kind) {
   1.515 +      case JSTRACE_OBJECT:
   1.516 +        MarkInternal(trc, reinterpret_cast<JSObject **>(thingp));
   1.517 +        break;
   1.518 +      case JSTRACE_STRING:
   1.519 +        MarkInternal(trc, reinterpret_cast<JSString **>(thingp));
   1.520 +        break;
   1.521 +      case JSTRACE_SCRIPT:
   1.522 +        MarkInternal(trc, reinterpret_cast<JSScript **>(thingp));
   1.523 +        break;
   1.524 +      case JSTRACE_LAZY_SCRIPT:
   1.525 +        MarkInternal(trc, reinterpret_cast<LazyScript **>(thingp));
   1.526 +        break;
   1.527 +      case JSTRACE_SHAPE:
   1.528 +        MarkInternal(trc, reinterpret_cast<Shape **>(thingp));
   1.529 +        break;
   1.530 +      case JSTRACE_BASE_SHAPE:
   1.531 +        MarkInternal(trc, reinterpret_cast<BaseShape **>(thingp));
   1.532 +        break;
   1.533 +      case JSTRACE_TYPE_OBJECT:
   1.534 +        MarkInternal(trc, reinterpret_cast<types::TypeObject **>(thingp));
   1.535 +        break;
   1.536 +      case JSTRACE_JITCODE:
   1.537 +        MarkInternal(trc, reinterpret_cast<jit::JitCode **>(thingp));
   1.538 +        break;
   1.539 +    }
   1.540 +}
   1.541 +
   1.542 +static void
   1.543 +MarkGCThingInternal(JSTracer *trc, void **thingp, const char *name)
   1.544 +{
   1.545 +    trc->setTracingName(name);
   1.546 +    JS_ASSERT(thingp);
   1.547 +    if (!*thingp)
   1.548 +        return;
   1.549 +    MarkKind(trc, thingp, GetGCThingTraceKind(*thingp));
   1.550 +}
   1.551 +
   1.552 +void
   1.553 +gc::MarkGCThingRoot(JSTracer *trc, void **thingp, const char *name)
   1.554 +{
   1.555 +    JS_ROOT_MARKING_ASSERT(trc);
   1.556 +    MarkGCThingInternal(trc, thingp, name);
   1.557 +}
   1.558 +
   1.559 +void
   1.560 +gc::MarkGCThingUnbarriered(JSTracer *trc, void **thingp, const char *name)
   1.561 +{
   1.562 +    MarkGCThingInternal(trc, thingp, name);
   1.563 +}
   1.564 +
   1.565 +/*** ID Marking ***/
   1.566 +
   1.567 +static inline void
   1.568 +MarkIdInternal(JSTracer *trc, jsid *id)
   1.569 +{
   1.570 +    if (JSID_IS_STRING(*id)) {
   1.571 +        JSString *str = JSID_TO_STRING(*id);
   1.572 +        trc->setTracingLocation((void *)id);
   1.573 +        MarkInternal(trc, &str);
   1.574 +        *id = NON_INTEGER_ATOM_TO_JSID(reinterpret_cast<JSAtom *>(str));
   1.575 +    } else if (MOZ_UNLIKELY(JSID_IS_OBJECT(*id))) {
   1.576 +        JSObject *obj = JSID_TO_OBJECT(*id);
   1.577 +        trc->setTracingLocation((void *)id);
   1.578 +        MarkInternal(trc, &obj);
   1.579 +        *id = OBJECT_TO_JSID(obj);
   1.580 +    } else {
   1.581 +        /* Unset realLocation manually if we do not call MarkInternal. */
   1.582 +        trc->unsetTracingLocation();
   1.583 +    }
   1.584 +}
   1.585 +
   1.586 +void
   1.587 +gc::MarkId(JSTracer *trc, BarrieredId *id, const char *name)
   1.588 +{
   1.589 +    trc->setTracingName(name);
   1.590 +    MarkIdInternal(trc, id->unsafeGet());
   1.591 +}
   1.592 +
   1.593 +void
   1.594 +gc::MarkIdRoot(JSTracer *trc, jsid *id, const char *name)
   1.595 +{
   1.596 +    JS_ROOT_MARKING_ASSERT(trc);
   1.597 +    trc->setTracingName(name);
   1.598 +    MarkIdInternal(trc, id);
   1.599 +}
   1.600 +
   1.601 +void
   1.602 +gc::MarkIdUnbarriered(JSTracer *trc, jsid *id, const char *name)
   1.603 +{
   1.604 +    trc->setTracingName(name);
   1.605 +    MarkIdInternal(trc, id);
   1.606 +}
   1.607 +
   1.608 +void
   1.609 +gc::MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name)
   1.610 +{
   1.611 +    for (size_t i = 0; i < len; ++i) {
   1.612 +        trc->setTracingIndex(name, i);
   1.613 +        MarkIdInternal(trc, vec[i].unsafeGet());
   1.614 +    }
   1.615 +}
   1.616 +
   1.617 +void
   1.618 +gc::MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
   1.619 +{
   1.620 +    JS_ROOT_MARKING_ASSERT(trc);
   1.621 +    for (size_t i = 0; i < len; ++i) {
   1.622 +        trc->setTracingIndex(name, i);
   1.623 +        MarkIdInternal(trc, &vec[i]);
   1.624 +    }
   1.625 +}
   1.626 +
   1.627 +/*** Value Marking ***/
   1.628 +
   1.629 +static inline void
   1.630 +MarkValueInternal(JSTracer *trc, Value *v)
   1.631 +{
   1.632 +    if (v->isMarkable()) {
   1.633 +        JS_ASSERT(v->toGCThing());
   1.634 +        void *thing = v->toGCThing();
   1.635 +        trc->setTracingLocation((void *)v);
   1.636 +        MarkKind(trc, &thing, v->gcKind());
   1.637 +        if (v->isString())
   1.638 +            v->setString((JSString *)thing);
   1.639 +        else
   1.640 +            v->setObjectOrNull((JSObject *)thing);
   1.641 +    } else {
   1.642 +        /* Unset realLocation manually if we do not call MarkInternal. */
   1.643 +        trc->unsetTracingLocation();
   1.644 +    }
   1.645 +}
   1.646 +
   1.647 +void
   1.648 +gc::MarkValue(JSTracer *trc, BarrieredValue *v, const char *name)
   1.649 +{
   1.650 +    trc->setTracingName(name);
   1.651 +    MarkValueInternal(trc, v->unsafeGet());
   1.652 +}
   1.653 +
   1.654 +void
   1.655 +gc::MarkValueRoot(JSTracer *trc, Value *v, const char *name)
   1.656 +{
   1.657 +    JS_ROOT_MARKING_ASSERT(trc);
   1.658 +    trc->setTracingName(name);
   1.659 +    MarkValueInternal(trc, v);
   1.660 +}
   1.661 +
   1.662 +void
   1.663 +gc::MarkTypeRoot(JSTracer *trc, types::Type *v, const char *name)
   1.664 +{
   1.665 +    JS_ROOT_MARKING_ASSERT(trc);
   1.666 +    trc->setTracingName(name);
   1.667 +    if (v->isSingleObject()) {
   1.668 +        JSObject *obj = v->singleObject();
   1.669 +        MarkInternal(trc, &obj);
   1.670 +        *v = types::Type::ObjectType(obj);
   1.671 +    } else if (v->isTypeObject()) {
   1.672 +        types::TypeObject *typeObj = v->typeObject();
   1.673 +        MarkInternal(trc, &typeObj);
   1.674 +        *v = types::Type::ObjectType(typeObj);
   1.675 +    }
   1.676 +}
   1.677 +
   1.678 +void
   1.679 +gc::MarkValueRange(JSTracer *trc, size_t len, BarrieredValue *vec, const char *name)
   1.680 +{
   1.681 +    for (size_t i = 0; i < len; ++i) {
   1.682 +        trc->setTracingIndex(name, i);
   1.683 +        MarkValueInternal(trc, vec[i].unsafeGet());
   1.684 +    }
   1.685 +}
   1.686 +
   1.687 +void
   1.688 +gc::MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name)
   1.689 +{
   1.690 +    JS_ROOT_MARKING_ASSERT(trc);
   1.691 +    for (size_t i = 0; i < len; ++i) {
   1.692 +        trc->setTracingIndex(name, i);
   1.693 +        MarkValueInternal(trc, &vec[i]);
   1.694 +    }
   1.695 +}
   1.696 +
   1.697 +bool
   1.698 +gc::IsValueMarked(Value *v)
   1.699 +{
   1.700 +    JS_ASSERT(v->isMarkable());
   1.701 +    bool rv;
   1.702 +    if (v->isString()) {
   1.703 +        JSString *str = (JSString *)v->toGCThing();
   1.704 +        rv = IsMarked<JSString>(&str);
   1.705 +        v->setString(str);
   1.706 +    } else {
   1.707 +        JSObject *obj = (JSObject *)v->toGCThing();
   1.708 +        rv = IsMarked<JSObject>(&obj);
   1.709 +        v->setObject(*obj);
   1.710 +    }
   1.711 +    return rv;
   1.712 +}
   1.713 +
   1.714 +bool
   1.715 +gc::IsValueAboutToBeFinalized(Value *v)
   1.716 +{
   1.717 +    JS_ASSERT(v->isMarkable());
   1.718 +    bool rv;
   1.719 +    if (v->isString()) {
   1.720 +        JSString *str = (JSString *)v->toGCThing();
   1.721 +        rv = IsAboutToBeFinalized<JSString>(&str);
   1.722 +        v->setString(str);
   1.723 +    } else {
   1.724 +        JSObject *obj = (JSObject *)v->toGCThing();
   1.725 +        rv = IsAboutToBeFinalized<JSObject>(&obj);
   1.726 +        v->setObject(*obj);
   1.727 +    }
   1.728 +    return rv;
   1.729 +}
   1.730 +
   1.731 +/*** Slot Marking ***/
   1.732 +
   1.733 +bool
   1.734 +gc::IsSlotMarked(HeapSlot *s)
   1.735 +{
   1.736 +    return IsMarked(s);
   1.737 +}
   1.738 +
   1.739 +void
   1.740 +gc::MarkSlot(JSTracer *trc, HeapSlot *s, const char *name)
   1.741 +{
   1.742 +    trc->setTracingName(name);
   1.743 +    MarkValueInternal(trc, s->unsafeGet());
   1.744 +}
   1.745 +
   1.746 +void
   1.747 +gc::MarkArraySlots(JSTracer *trc, size_t len, HeapSlot *vec, const char *name)
   1.748 +{
   1.749 +    for (size_t i = 0; i < len; ++i) {
   1.750 +        trc->setTracingIndex(name, i);
   1.751 +        MarkValueInternal(trc, vec[i].unsafeGet());
   1.752 +    }
   1.753 +}
   1.754 +
   1.755 +void
   1.756 +gc::MarkObjectSlots(JSTracer *trc, JSObject *obj, uint32_t start, uint32_t nslots)
   1.757 +{
   1.758 +    JS_ASSERT(obj->isNative());
   1.759 +    for (uint32_t i = start; i < (start + nslots); ++i) {
   1.760 +        trc->setTracingDetails(js_GetObjectSlotName, obj, i);
   1.761 +        MarkValueInternal(trc, obj->nativeGetSlotRef(i).unsafeGet());
   1.762 +    }
   1.763 +}
   1.764 +
   1.765 +static bool
   1.766 +ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
   1.767 +{
   1.768 +    if (!IS_GC_MARKING_TRACER(trc))
   1.769 +        return true;
   1.770 +
   1.771 +    uint32_t color = AsGCMarker(trc)->getMarkColor();
   1.772 +    JS_ASSERT(color == BLACK || color == GRAY);
   1.773 +
   1.774 +    if (IsInsideNursery(trc->runtime(), cell)) {
   1.775 +        JS_ASSERT(color == BLACK);
   1.776 +        return false;
   1.777 +    }
   1.778 +
   1.779 +    JS::Zone *zone = cell->tenuredZone();
   1.780 +    if (color == BLACK) {
   1.781 +        /*
   1.782 +         * Having black->gray edges violates our promise to the cycle
   1.783 +         * collector. This can happen if we're collecting a compartment and it
   1.784 +         * has an edge to an uncollected compartment: it's possible that the
   1.785 +         * source and destination of the cross-compartment edge should be gray,
   1.786 +         * but the source was marked black by the conservative scanner.
   1.787 +         */
   1.788 +        if (cell->isMarked(GRAY)) {
   1.789 +            JS_ASSERT(!zone->isCollecting());
   1.790 +            trc->runtime()->gcFoundBlackGrayEdges = true;
   1.791 +        }
   1.792 +        return zone->isGCMarking();
   1.793 +    } else {
   1.794 +        if (zone->isGCMarkingBlack()) {
   1.795 +            /*
   1.796 +             * The destination compartment is being not being marked gray now,
   1.797 +             * but it will be later, so record the cell so it can be marked gray
   1.798 +             * at the appropriate time.
   1.799 +             */
   1.800 +            if (!cell->isMarked())
   1.801 +                DelayCrossCompartmentGrayMarking(src);
   1.802 +            return false;
   1.803 +        }
   1.804 +        return zone->isGCMarkingGray();
   1.805 +    }
   1.806 +}
   1.807 +
   1.808 +void
   1.809 +gc::MarkCrossCompartmentObjectUnbarriered(JSTracer *trc, JSObject *src, JSObject **dst, const char *name)
   1.810 +{
   1.811 +    if (ShouldMarkCrossCompartment(trc, src, *dst))
   1.812 +        MarkObjectUnbarriered(trc, dst, name);
   1.813 +}
   1.814 +
   1.815 +void
   1.816 +gc::MarkCrossCompartmentScriptUnbarriered(JSTracer *trc, JSObject *src, JSScript **dst,
   1.817 +                                          const char *name)
   1.818 +{
   1.819 +    if (ShouldMarkCrossCompartment(trc, src, *dst))
   1.820 +        MarkScriptUnbarriered(trc, dst, name);
   1.821 +}
   1.822 +
   1.823 +void
   1.824 +gc::MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapSlot *dst, const char *name)
   1.825 +{
   1.826 +    if (dst->isMarkable() && ShouldMarkCrossCompartment(trc, src, (Cell *)dst->toGCThing()))
   1.827 +        MarkSlot(trc, dst, name);
   1.828 +}
   1.829 +
   1.830 +/*** Special Marking ***/
   1.831 +
   1.832 +void
   1.833 +gc::MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name)
   1.834 +{
   1.835 +    trc->setTracingName(name);
   1.836 +    MarkInternal(trc, thingp->unsafeGet());
   1.837 +}
   1.838 +
   1.839 +void
   1.840 +gc::MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
   1.841 +{
   1.842 +    trc->setTracingName(name);
   1.843 +    MarkValueInternal(trc, v);
   1.844 +}
   1.845 +
   1.846 +bool
   1.847 +gc::IsCellMarked(Cell **thingp)
   1.848 +{
   1.849 +    return IsMarked<Cell>(thingp);
   1.850 +}
   1.851 +
   1.852 +bool
   1.853 +gc::IsCellAboutToBeFinalized(Cell **thingp)
   1.854 +{
   1.855 +    return IsAboutToBeFinalized<Cell>(thingp);
   1.856 +}
   1.857 +
   1.858 +/*** Push Mark Stack ***/
   1.859 +
   1.860 +#define JS_COMPARTMENT_ASSERT(rt, thing)                                \
   1.861 +    JS_ASSERT((thing)->zone()->isGCMarking())
   1.862 +
   1.863 +#define JS_COMPARTMENT_ASSERT_STR(rt, thing)                            \
   1.864 +    JS_ASSERT((thing)->zone()->isGCMarking() ||                         \
   1.865 +              (rt)->isAtomsZone((thing)->zone()));
   1.866 +
   1.867 +static void
   1.868 +PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing)
   1.869 +{
   1.870 +    JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
   1.871 +    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
   1.872 +
   1.873 +    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.874 +        gcmarker->pushObject(thing);
   1.875 +}
   1.876 +
   1.877 +/*
   1.878 + * PushMarkStack for BaseShape unpacks its children directly onto the mark
   1.879 + * stack. For a pre-barrier between incremental slices, this may result in
   1.880 + * objects in the nursery getting pushed onto the mark stack. It is safe to
   1.881 + * ignore these objects because they will be marked by the matching
   1.882 + * post-barrier during the minor GC at the start of each incremental slice.
   1.883 + */
   1.884 +static void
   1.885 +MaybePushMarkStackBetweenSlices(GCMarker *gcmarker, JSObject *thing)
   1.886 +{
   1.887 +    JSRuntime *rt = gcmarker->runtime();
   1.888 +    JS_COMPARTMENT_ASSERT(rt, thing);
   1.889 +    JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(rt, thing));
   1.890 +
   1.891 +    if (!IsInsideNursery(rt, thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.892 +        gcmarker->pushObject(thing);
   1.893 +}
   1.894 +
   1.895 +static void
   1.896 +PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
   1.897 +{
   1.898 +    JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
   1.899 +    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
   1.900 +
   1.901 +    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.902 +        gcmarker->pushObject(thing);
   1.903 +}
   1.904 +
   1.905 +static void
   1.906 +PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
   1.907 +{
   1.908 +    JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
   1.909 +    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
   1.910 +
   1.911 +    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.912 +        gcmarker->pushType(thing);
   1.913 +}
   1.914 +
   1.915 +static void
   1.916 +PushMarkStack(GCMarker *gcmarker, JSScript *thing)
   1.917 +{
   1.918 +    JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
   1.919 +    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
   1.920 +
   1.921 +    /*
   1.922 +     * We mark scripts directly rather than pushing on the stack as they can
   1.923 +     * refer to other scripts only indirectly (like via nested functions) and
   1.924 +     * we cannot get to deep recursion.
   1.925 +     */
   1.926 +    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.927 +        MarkChildren(gcmarker, thing);
   1.928 +}
   1.929 +
   1.930 +static void
   1.931 +PushMarkStack(GCMarker *gcmarker, LazyScript *thing)
   1.932 +{
   1.933 +    JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
   1.934 +    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
   1.935 +
   1.936 +    /*
   1.937 +     * We mark lazy scripts directly rather than pushing on the stack as they
   1.938 +     * only refer to normal scripts and to strings, and cannot recurse.
   1.939 +     */
   1.940 +    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.941 +        MarkChildren(gcmarker, thing);
   1.942 +}
   1.943 +
   1.944 +static void
   1.945 +ScanShape(GCMarker *gcmarker, Shape *shape);
   1.946 +
   1.947 +static void
   1.948 +PushMarkStack(GCMarker *gcmarker, Shape *thing)
   1.949 +{
   1.950 +    JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
   1.951 +    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
   1.952 +
   1.953 +    /* We mark shapes directly rather than pushing on the stack. */
   1.954 +    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.955 +        ScanShape(gcmarker, thing);
   1.956 +}
   1.957 +
   1.958 +static void
   1.959 +PushMarkStack(GCMarker *gcmarker, jit::JitCode *thing)
   1.960 +{
   1.961 +    JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
   1.962 +    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
   1.963 +
   1.964 +    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.965 +        gcmarker->pushJitCode(thing);
   1.966 +}
   1.967 +
   1.968 +static inline void
   1.969 +ScanBaseShape(GCMarker *gcmarker, BaseShape *base);
   1.970 +
   1.971 +static void
   1.972 +PushMarkStack(GCMarker *gcmarker, BaseShape *thing)
   1.973 +{
   1.974 +    JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
   1.975 +    JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
   1.976 +
   1.977 +    /* We mark base shapes directly rather than pushing on the stack. */
   1.978 +    if (thing->markIfUnmarked(gcmarker->getMarkColor()))
   1.979 +        ScanBaseShape(gcmarker, thing);
   1.980 +}
   1.981 +
   1.982 +static void
   1.983 +ScanShape(GCMarker *gcmarker, Shape *shape)
   1.984 +{
   1.985 +  restart:
   1.986 +    PushMarkStack(gcmarker, shape->base());
   1.987 +
   1.988 +    const BarrieredId &id = shape->propidRef();
   1.989 +    if (JSID_IS_STRING(id))
   1.990 +        PushMarkStack(gcmarker, JSID_TO_STRING(id));
   1.991 +    else if (MOZ_UNLIKELY(JSID_IS_OBJECT(id)))
   1.992 +        PushMarkStack(gcmarker, JSID_TO_OBJECT(id));
   1.993 +
   1.994 +    shape = shape->previous();
   1.995 +    if (shape && shape->markIfUnmarked(gcmarker->getMarkColor()))
   1.996 +        goto restart;
   1.997 +}
   1.998 +
   1.999 +static inline void
  1.1000 +ScanBaseShape(GCMarker *gcmarker, BaseShape *base)
  1.1001 +{
  1.1002 +    base->assertConsistency();
  1.1003 +
  1.1004 +    base->compartment()->mark();
  1.1005 +
  1.1006 +    if (base->hasGetterObject())
  1.1007 +        MaybePushMarkStackBetweenSlices(gcmarker, base->getterObject());
  1.1008 +
  1.1009 +    if (base->hasSetterObject())
  1.1010 +        MaybePushMarkStackBetweenSlices(gcmarker, base->setterObject());
  1.1011 +
  1.1012 +    if (JSObject *parent = base->getObjectParent()) {
  1.1013 +        MaybePushMarkStackBetweenSlices(gcmarker, parent);
  1.1014 +    } else if (GlobalObject *global = base->compartment()->maybeGlobal()) {
  1.1015 +        PushMarkStack(gcmarker, global);
  1.1016 +    }
  1.1017 +
  1.1018 +    if (JSObject *metadata = base->getObjectMetadata())
  1.1019 +        MaybePushMarkStackBetweenSlices(gcmarker, metadata);
  1.1020 +
  1.1021 +    /*
  1.1022 +     * All children of the owned base shape are consistent with its
  1.1023 +     * unowned one, thus we do not need to trace through children of the
  1.1024 +     * unowned base shape.
  1.1025 +     */
  1.1026 +    if (base->isOwned()) {
  1.1027 +        UnownedBaseShape *unowned = base->baseUnowned();
  1.1028 +        JS_ASSERT(base->compartment() == unowned->compartment());
  1.1029 +        unowned->markIfUnmarked(gcmarker->getMarkColor());
  1.1030 +    }
  1.1031 +}
  1.1032 +
  1.1033 +static inline void
  1.1034 +ScanLinearString(GCMarker *gcmarker, JSLinearString *str)
  1.1035 +{
  1.1036 +    JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str);
  1.1037 +    JS_ASSERT(str->isMarked());
  1.1038 +
  1.1039 +    /*
  1.1040 +     * Add extra asserts to confirm the static type to detect incorrect string
  1.1041 +     * mutations.
  1.1042 +     */
  1.1043 +    JS_ASSERT(str->JSString::isLinear());
  1.1044 +    while (str->hasBase()) {
  1.1045 +        str = str->base();
  1.1046 +        JS_ASSERT(str->JSString::isLinear());
  1.1047 +        if (str->isPermanentAtom())
  1.1048 +            break;
  1.1049 +        JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str);
  1.1050 +        if (!str->markIfUnmarked())
  1.1051 +            break;
  1.1052 +    }
  1.1053 +}
  1.1054 +
  1.1055 +/*
  1.1056 + * The function tries to scan the whole rope tree using the marking stack as
  1.1057 + * temporary storage. If that becomes full, the unscanned ropes are added to
  1.1058 + * the delayed marking list. When the function returns, the marking stack is
  1.1059 + * at the same depth as it was on entry. This way we avoid using tags when
  1.1060 + * pushing ropes to the stack as ropes never leaks to other users of the
  1.1061 + * stack. This also assumes that a rope can only point to other ropes or
  1.1062 + * linear strings, it cannot refer to GC things of other types.
  1.1063 + */
  1.1064 +static void
  1.1065 +ScanRope(GCMarker *gcmarker, JSRope *rope)
  1.1066 +{
  1.1067 +    ptrdiff_t savedPos = gcmarker->stack.position();
  1.1068 +    JS_DIAGNOSTICS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
  1.1069 +    for (;;) {
  1.1070 +        JS_DIAGNOSTICS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
  1.1071 +        JS_DIAGNOSTICS_ASSERT(rope->JSString::isRope());
  1.1072 +        JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), rope);
  1.1073 +        JS_ASSERT(rope->isMarked());
  1.1074 +        JSRope *next = nullptr;
  1.1075 +
  1.1076 +        JSString *right = rope->rightChild();
  1.1077 +        if (!right->isPermanentAtom() && right->markIfUnmarked()) {
  1.1078 +            if (right->isLinear())
  1.1079 +                ScanLinearString(gcmarker, &right->asLinear());
  1.1080 +            else
  1.1081 +                next = &right->asRope();
  1.1082 +        }
  1.1083 +
  1.1084 +        JSString *left = rope->leftChild();
  1.1085 +        if (!left->isPermanentAtom() && left->markIfUnmarked()) {
  1.1086 +            if (left->isLinear()) {
  1.1087 +                ScanLinearString(gcmarker, &left->asLinear());
  1.1088 +            } else {
  1.1089 +                /*
  1.1090 +                 * When both children are ropes, set aside the right one to
  1.1091 +                 * scan it later.
  1.1092 +                 */
  1.1093 +                if (next && !gcmarker->stack.push(reinterpret_cast<uintptr_t>(next)))
  1.1094 +                    gcmarker->delayMarkingChildren(next);
  1.1095 +                next = &left->asRope();
  1.1096 +            }
  1.1097 +        }
  1.1098 +        if (next) {
  1.1099 +            rope = next;
  1.1100 +        } else if (savedPos != gcmarker->stack.position()) {
  1.1101 +            JS_ASSERT(savedPos < gcmarker->stack.position());
  1.1102 +            rope = reinterpret_cast<JSRope *>(gcmarker->stack.pop());
  1.1103 +        } else {
  1.1104 +            break;
  1.1105 +        }
  1.1106 +    }
  1.1107 +    JS_ASSERT(savedPos == gcmarker->stack.position());
  1.1108 + }
  1.1109 +
  1.1110 +static inline void
  1.1111 +ScanString(GCMarker *gcmarker, JSString *str)
  1.1112 +{
  1.1113 +    if (str->isLinear())
  1.1114 +        ScanLinearString(gcmarker, &str->asLinear());
  1.1115 +    else
  1.1116 +        ScanRope(gcmarker, &str->asRope());
  1.1117 +}
  1.1118 +
  1.1119 +static inline void
  1.1120 +PushMarkStack(GCMarker *gcmarker, JSString *str)
  1.1121 +{
  1.1122 +    // Permanent atoms might not be associated with this runtime.
  1.1123 +    if (str->isPermanentAtom())
  1.1124 +        return;
  1.1125 +
  1.1126 +    JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str);
  1.1127 +
  1.1128 +    /*
  1.1129 +     * As string can only refer to other strings we fully scan its GC graph
  1.1130 +     * using the explicit stack when navigating the rope tree to avoid
  1.1131 +     * dealing with strings on the stack in drainMarkStack.
  1.1132 +     */
  1.1133 +    if (str->markIfUnmarked())
  1.1134 +        ScanString(gcmarker, str);
  1.1135 +}
  1.1136 +
  1.1137 +void
  1.1138 +gc::MarkChildren(JSTracer *trc, JSObject *obj)
  1.1139 +{
  1.1140 +    obj->markChildren(trc);
  1.1141 +}
  1.1142 +
  1.1143 +static void
  1.1144 +gc::MarkChildren(JSTracer *trc, JSString *str)
  1.1145 +{
  1.1146 +    if (str->hasBase())
  1.1147 +        str->markBase(trc);
  1.1148 +    else if (str->isRope())
  1.1149 +        str->asRope().markChildren(trc);
  1.1150 +}
  1.1151 +
  1.1152 +static void
  1.1153 +gc::MarkChildren(JSTracer *trc, JSScript *script)
  1.1154 +{
  1.1155 +    script->markChildren(trc);
  1.1156 +}
  1.1157 +
  1.1158 +static void
  1.1159 +gc::MarkChildren(JSTracer *trc, LazyScript *lazy)
  1.1160 +{
  1.1161 +    lazy->markChildren(trc);
  1.1162 +}
  1.1163 +
  1.1164 +static void
  1.1165 +gc::MarkChildren(JSTracer *trc, Shape *shape)
  1.1166 +{
  1.1167 +    shape->markChildren(trc);
  1.1168 +}
  1.1169 +
  1.1170 +static void
  1.1171 +gc::MarkChildren(JSTracer *trc, BaseShape *base)
  1.1172 +{
  1.1173 +    base->markChildren(trc);
  1.1174 +}
  1.1175 +
  1.1176 +/*
  1.1177 + * This function is used by the cycle collector to trace through the
  1.1178 + * children of a BaseShape (and its baseUnowned(), if any). The cycle
  1.1179 + * collector does not directly care about BaseShapes, so only the
  1.1180 + * getter, setter, and parent are marked. Furthermore, the parent is
  1.1181 + * marked only if it isn't the same as prevParent, which will be
  1.1182 + * updated to the current shape's parent.
  1.1183 + */
  1.1184 +static inline void
  1.1185 +MarkCycleCollectorChildren(JSTracer *trc, BaseShape *base, JSObject **prevParent)
  1.1186 +{
  1.1187 +    JS_ASSERT(base);
  1.1188 +
  1.1189 +    /*
  1.1190 +     * The cycle collector does not need to trace unowned base shapes,
  1.1191 +     * as they have the same getter, setter and parent as the original
  1.1192 +     * base shape.
  1.1193 +     */
  1.1194 +    base->assertConsistency();
  1.1195 +
  1.1196 +    if (base->hasGetterObject()) {
  1.1197 +        JSObject *tmp = base->getterObject();
  1.1198 +        MarkObjectUnbarriered(trc, &tmp, "getter");
  1.1199 +        JS_ASSERT(tmp == base->getterObject());
  1.1200 +    }
  1.1201 +
  1.1202 +    if (base->hasSetterObject()) {
  1.1203 +        JSObject *tmp = base->setterObject();
  1.1204 +        MarkObjectUnbarriered(trc, &tmp, "setter");
  1.1205 +        JS_ASSERT(tmp == base->setterObject());
  1.1206 +    }
  1.1207 +
  1.1208 +    JSObject *parent = base->getObjectParent();
  1.1209 +    if (parent && parent != *prevParent) {
  1.1210 +        MarkObjectUnbarriered(trc, &parent, "parent");
  1.1211 +        JS_ASSERT(parent == base->getObjectParent());
  1.1212 +        *prevParent = parent;
  1.1213 +    }
  1.1214 +}
  1.1215 +
  1.1216 +/*
  1.1217 + * This function is used by the cycle collector to trace through a
  1.1218 + * shape. The cycle collector does not care about shapes or base
  1.1219 + * shapes, so those are not marked. Instead, any shapes or base shapes
  1.1220 + * that are encountered have their children marked. Stack space is
  1.1221 + * bounded. If two shapes in a row have the same parent pointer, the
  1.1222 + * parent pointer will only be marked once.
  1.1223 + */
  1.1224 +void
  1.1225 +gc::MarkCycleCollectorChildren(JSTracer *trc, Shape *shape)
  1.1226 +{
  1.1227 +    JSObject *prevParent = nullptr;
  1.1228 +    do {
  1.1229 +        MarkCycleCollectorChildren(trc, shape->base(), &prevParent);
  1.1230 +        MarkId(trc, &shape->propidRef(), "propid");
  1.1231 +        shape = shape->previous();
  1.1232 +    } while (shape);
  1.1233 +}
  1.1234 +
  1.1235 +static void
  1.1236 +ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
  1.1237 +{
  1.1238 +    unsigned count = type->getPropertyCount();
  1.1239 +    for (unsigned i = 0; i < count; i++) {
  1.1240 +        types::Property *prop = type->getProperty(i);
  1.1241 +        if (prop && JSID_IS_STRING(prop->id))
  1.1242 +            PushMarkStack(gcmarker, JSID_TO_STRING(prop->id));
  1.1243 +    }
  1.1244 +
  1.1245 +    if (type->proto().isObject())
  1.1246 +        PushMarkStack(gcmarker, type->proto().toObject());
  1.1247 +
  1.1248 +    if (type->singleton() && !type->lazy())
  1.1249 +        PushMarkStack(gcmarker, type->singleton());
  1.1250 +
  1.1251 +    if (type->hasNewScript()) {
  1.1252 +        PushMarkStack(gcmarker, type->newScript()->fun);
  1.1253 +        PushMarkStack(gcmarker, type->newScript()->templateObject);
  1.1254 +    } else if (type->hasTypedObject()) {
  1.1255 +        PushMarkStack(gcmarker, type->typedObject()->descrHeapPtr());
  1.1256 +    }
  1.1257 +
  1.1258 +    if (type->interpretedFunction)
  1.1259 +        PushMarkStack(gcmarker, type->interpretedFunction);
  1.1260 +}
  1.1261 +
  1.1262 +static void
  1.1263 +gc::MarkChildren(JSTracer *trc, types::TypeObject *type)
  1.1264 +{
  1.1265 +    unsigned count = type->getPropertyCount();
  1.1266 +    for (unsigned i = 0; i < count; i++) {
  1.1267 +        types::Property *prop = type->getProperty(i);
  1.1268 +        if (prop)
  1.1269 +            MarkId(trc, &prop->id, "type_prop");
  1.1270 +    }
  1.1271 +
  1.1272 +    if (type->proto().isObject())
  1.1273 +        MarkObject(trc, &type->protoRaw(), "type_proto");
  1.1274 +
  1.1275 +    if (type->singleton() && !type->lazy())
  1.1276 +        MarkObject(trc, &type->singletonRaw(), "type_singleton");
  1.1277 +
  1.1278 +    if (type->hasNewScript()) {
  1.1279 +        MarkObject(trc, &type->newScript()->fun, "type_new_function");
  1.1280 +        MarkObject(trc, &type->newScript()->templateObject, "type_new_template");
  1.1281 +    } else if (type->hasTypedObject()) {
  1.1282 +        MarkObject(trc, &type->typedObject()->descrHeapPtr(), "type_heap_ptr");
  1.1283 +    }
  1.1284 +
  1.1285 +    if (type->interpretedFunction)
  1.1286 +        MarkObject(trc, &type->interpretedFunction, "type_function");
  1.1287 +}
  1.1288 +
  1.1289 +static void
  1.1290 +gc::MarkChildren(JSTracer *trc, jit::JitCode *code)
  1.1291 +{
  1.1292 +#ifdef JS_ION
  1.1293 +    code->trace(trc);
  1.1294 +#endif
  1.1295 +}
  1.1296 +
  1.1297 +template<typename T>
  1.1298 +static void
  1.1299 +PushArenaTyped(GCMarker *gcmarker, ArenaHeader *aheader)
  1.1300 +{
  1.1301 +    for (CellIterUnderGC i(aheader); !i.done(); i.next())
  1.1302 +        PushMarkStack(gcmarker, i.get<T>());
  1.1303 +}
  1.1304 +
  1.1305 +void
  1.1306 +gc::PushArena(GCMarker *gcmarker, ArenaHeader *aheader)
  1.1307 +{
  1.1308 +    switch (MapAllocToTraceKind(aheader->getAllocKind())) {
  1.1309 +      case JSTRACE_OBJECT:
  1.1310 +        PushArenaTyped<JSObject>(gcmarker, aheader);
  1.1311 +        break;
  1.1312 +
  1.1313 +      case JSTRACE_STRING:
  1.1314 +        PushArenaTyped<JSString>(gcmarker, aheader);
  1.1315 +        break;
  1.1316 +
  1.1317 +      case JSTRACE_SCRIPT:
  1.1318 +        PushArenaTyped<JSScript>(gcmarker, aheader);
  1.1319 +        break;
  1.1320 +
  1.1321 +      case JSTRACE_LAZY_SCRIPT:
  1.1322 +        PushArenaTyped<LazyScript>(gcmarker, aheader);
  1.1323 +        break;
  1.1324 +
  1.1325 +      case JSTRACE_SHAPE:
  1.1326 +        PushArenaTyped<js::Shape>(gcmarker, aheader);
  1.1327 +        break;
  1.1328 +
  1.1329 +      case JSTRACE_BASE_SHAPE:
  1.1330 +        PushArenaTyped<js::BaseShape>(gcmarker, aheader);
  1.1331 +        break;
  1.1332 +
  1.1333 +      case JSTRACE_TYPE_OBJECT:
  1.1334 +        PushArenaTyped<js::types::TypeObject>(gcmarker, aheader);
  1.1335 +        break;
  1.1336 +
  1.1337 +      case JSTRACE_JITCODE:
  1.1338 +        PushArenaTyped<js::jit::JitCode>(gcmarker, aheader);
  1.1339 +        break;
  1.1340 +    }
  1.1341 +}
  1.1342 +
  1.1343 +struct SlotArrayLayout
  1.1344 +{
  1.1345 +    union {
  1.1346 +        HeapSlot *end;
  1.1347 +        uintptr_t kind;
  1.1348 +    };
  1.1349 +    union {
  1.1350 +        HeapSlot *start;
  1.1351 +        uintptr_t index;
  1.1352 +    };
  1.1353 +    JSObject *obj;
  1.1354 +
  1.1355 +    static void staticAsserts() {
  1.1356 +        /* This should have the same layout as three mark stack items. */
  1.1357 +        JS_STATIC_ASSERT(sizeof(SlotArrayLayout) == 3 * sizeof(uintptr_t));
  1.1358 +    }
  1.1359 +};
  1.1360 +
  1.1361 +/*
  1.1362 + * During incremental GC, we return from drainMarkStack without having processed
  1.1363 + * the entire stack. At that point, JS code can run and reallocate slot arrays
  1.1364 + * that are stored on the stack. To prevent this from happening, we replace all
  1.1365 + * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
  1.1366 + * pointers are replaced with slot indexes, and slot array end pointers are
  1.1367 + * replaced with the kind of index (properties vs. elements).
  1.1368 + */
  1.1369 +void
  1.1370 +GCMarker::saveValueRanges()
  1.1371 +{
  1.1372 +    for (uintptr_t *p = stack.tos_; p > stack.stack_; ) {
  1.1373 +        uintptr_t tag = *--p & StackTagMask;
  1.1374 +        if (tag == ValueArrayTag) {
  1.1375 +            *p &= ~StackTagMask;
  1.1376 +            p -= 2;
  1.1377 +            SlotArrayLayout *arr = reinterpret_cast<SlotArrayLayout *>(p);
  1.1378 +            JSObject *obj = arr->obj;
  1.1379 +            JS_ASSERT(obj->isNative());
  1.1380 +
  1.1381 +            HeapSlot *vp = obj->getDenseElements();
  1.1382 +            if (arr->end == vp + obj->getDenseInitializedLength()) {
  1.1383 +                JS_ASSERT(arr->start >= vp);
  1.1384 +                arr->index = arr->start - vp;
  1.1385 +                arr->kind = HeapSlot::Element;
  1.1386 +            } else {
  1.1387 +                HeapSlot *vp = obj->fixedSlots();
  1.1388 +                unsigned nfixed = obj->numFixedSlots();
  1.1389 +                if (arr->start == arr->end) {
  1.1390 +                    arr->index = obj->slotSpan();
  1.1391 +                } else if (arr->start >= vp && arr->start < vp + nfixed) {
  1.1392 +                    JS_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
  1.1393 +                    arr->index = arr->start - vp;
  1.1394 +                } else {
  1.1395 +                    JS_ASSERT(arr->start >= obj->slots &&
  1.1396 +                              arr->end == obj->slots + obj->slotSpan() - nfixed);
  1.1397 +                    arr->index = (arr->start - obj->slots) + nfixed;
  1.1398 +                }
  1.1399 +                arr->kind = HeapSlot::Slot;
  1.1400 +            }
  1.1401 +            p[2] |= SavedValueArrayTag;
  1.1402 +        } else if (tag == SavedValueArrayTag) {
  1.1403 +            p -= 2;
  1.1404 +        }
  1.1405 +    }
  1.1406 +}
  1.1407 +
  1.1408 +bool
  1.1409 +GCMarker::restoreValueArray(JSObject *obj, void **vpp, void **endp)
  1.1410 +{
  1.1411 +    uintptr_t start = stack.pop();
  1.1412 +    HeapSlot::Kind kind = (HeapSlot::Kind) stack.pop();
  1.1413 +
  1.1414 +    if (kind == HeapSlot::Element) {
  1.1415 +        if (!obj->is<ArrayObject>())
  1.1416 +            return false;
  1.1417 +
  1.1418 +        uint32_t initlen = obj->getDenseInitializedLength();
  1.1419 +        HeapSlot *vp = obj->getDenseElements();
  1.1420 +        if (start < initlen) {
  1.1421 +            *vpp = vp + start;
  1.1422 +            *endp = vp + initlen;
  1.1423 +        } else {
  1.1424 +            /* The object shrunk, in which case no scanning is needed. */
  1.1425 +            *vpp = *endp = vp;
  1.1426 +        }
  1.1427 +    } else {
  1.1428 +        JS_ASSERT(kind == HeapSlot::Slot);
  1.1429 +        HeapSlot *vp = obj->fixedSlots();
  1.1430 +        unsigned nfixed = obj->numFixedSlots();
  1.1431 +        unsigned nslots = obj->slotSpan();
  1.1432 +        if (start < nslots) {
  1.1433 +            if (start < nfixed) {
  1.1434 +                *vpp = vp + start;
  1.1435 +                *endp = vp + Min(nfixed, nslots);
  1.1436 +            } else {
  1.1437 +                *vpp = obj->slots + start - nfixed;
  1.1438 +                *endp = obj->slots + nslots - nfixed;
  1.1439 +            }
  1.1440 +        } else {
  1.1441 +            /* The object shrunk, in which case no scanning is needed. */
  1.1442 +            *vpp = *endp = vp;
  1.1443 +        }
  1.1444 +    }
  1.1445 +
  1.1446 +    JS_ASSERT(*vpp <= *endp);
  1.1447 +    return true;
  1.1448 +}
  1.1449 +
  1.1450 +void
  1.1451 +GCMarker::processMarkStackOther(uintptr_t tag, uintptr_t addr)
  1.1452 +{
  1.1453 +    if (tag == TypeTag) {
  1.1454 +        ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
  1.1455 +    } else if (tag == SavedValueArrayTag) {
  1.1456 +        JS_ASSERT(!(addr & CellMask));
  1.1457 +        JSObject *obj = reinterpret_cast<JSObject *>(addr);
  1.1458 +        HeapValue *vp, *end;
  1.1459 +        if (restoreValueArray(obj, (void **)&vp, (void **)&end))
  1.1460 +            pushValueArray(obj, vp, end);
  1.1461 +        else
  1.1462 +            pushObject(obj);
  1.1463 +    } else if (tag == JitCodeTag) {
  1.1464 +        MarkChildren(this, reinterpret_cast<jit::JitCode *>(addr));
  1.1465 +    }
  1.1466 +}
  1.1467 +
  1.1468 +inline void
  1.1469 +GCMarker::processMarkStackTop(SliceBudget &budget)
  1.1470 +{
  1.1471 +    /*
  1.1472 +     * The function uses explicit goto and implements the scanning of the
  1.1473 +     * object directly. It allows to eliminate the tail recursion and
  1.1474 +     * significantly improve the marking performance, see bug 641025.
  1.1475 +     */
  1.1476 +    HeapSlot *vp, *end;
  1.1477 +    JSObject *obj;
  1.1478 +
  1.1479 +    uintptr_t addr = stack.pop();
  1.1480 +    uintptr_t tag = addr & StackTagMask;
  1.1481 +    addr &= ~StackTagMask;
  1.1482 +
  1.1483 +    if (tag == ValueArrayTag) {
  1.1484 +        JS_STATIC_ASSERT(ValueArrayTag == 0);
  1.1485 +        JS_ASSERT(!(addr & CellMask));
  1.1486 +        obj = reinterpret_cast<JSObject *>(addr);
  1.1487 +        uintptr_t addr2 = stack.pop();
  1.1488 +        uintptr_t addr3 = stack.pop();
  1.1489 +        JS_ASSERT(addr2 <= addr3);
  1.1490 +        JS_ASSERT((addr3 - addr2) % sizeof(Value) == 0);
  1.1491 +        vp = reinterpret_cast<HeapSlot *>(addr2);
  1.1492 +        end = reinterpret_cast<HeapSlot *>(addr3);
  1.1493 +        goto scan_value_array;
  1.1494 +    }
  1.1495 +
  1.1496 +    if (tag == ObjectTag) {
  1.1497 +        obj = reinterpret_cast<JSObject *>(addr);
  1.1498 +        JS_COMPARTMENT_ASSERT(runtime(), obj);
  1.1499 +        goto scan_obj;
  1.1500 +    }
  1.1501 +
  1.1502 +    processMarkStackOther(tag, addr);
  1.1503 +    return;
  1.1504 +
  1.1505 +  scan_value_array:
  1.1506 +    JS_ASSERT(vp <= end);
  1.1507 +    while (vp != end) {
  1.1508 +        const Value &v = *vp++;
  1.1509 +        if (v.isString()) {
  1.1510 +            JSString *str = v.toString();
  1.1511 +            if (!str->isPermanentAtom()) {
  1.1512 +                JS_COMPARTMENT_ASSERT_STR(runtime(), str);
  1.1513 +                JS_ASSERT(runtime()->isAtomsZone(str->zone()) || str->zone() == obj->zone());
  1.1514 +                if (str->markIfUnmarked())
  1.1515 +                    ScanString(this, str);
  1.1516 +            }
  1.1517 +        } else if (v.isObject()) {
  1.1518 +            JSObject *obj2 = &v.toObject();
  1.1519 +            JS_COMPARTMENT_ASSERT(runtime(), obj2);
  1.1520 +            JS_ASSERT(obj->compartment() == obj2->compartment());
  1.1521 +            if (obj2->markIfUnmarked(getMarkColor())) {
  1.1522 +                pushValueArray(obj, vp, end);
  1.1523 +                obj = obj2;
  1.1524 +                goto scan_obj;
  1.1525 +            }
  1.1526 +        }
  1.1527 +    }
  1.1528 +    return;
  1.1529 +
  1.1530 +  scan_obj:
  1.1531 +    {
  1.1532 +        JS_COMPARTMENT_ASSERT(runtime(), obj);
  1.1533 +
  1.1534 +        budget.step();
  1.1535 +        if (budget.isOverBudget()) {
  1.1536 +            pushObject(obj);
  1.1537 +            return;
  1.1538 +        }
  1.1539 +
  1.1540 +        types::TypeObject *type = obj->typeFromGC();
  1.1541 +        PushMarkStack(this, type);
  1.1542 +
  1.1543 +        Shape *shape = obj->lastProperty();
  1.1544 +        PushMarkStack(this, shape);
  1.1545 +
  1.1546 +        /* Call the trace hook if necessary. */
  1.1547 +        const Class *clasp = type->clasp();
  1.1548 +        if (clasp->trace) {
  1.1549 +            // Global objects all have the same trace hook. That hook is safe without barriers
  1.1550 +            // if the gloal has no custom trace hook of it's own, or has been moved to a different
  1.1551 +            // compartment, and so can't have one.
  1.1552 +            JS_ASSERT_IF(runtime()->gcMode() == JSGC_MODE_INCREMENTAL &&
  1.1553 +                         runtime()->gcIncrementalEnabled &&
  1.1554 +                         !(clasp->trace == JS_GlobalObjectTraceHook &&
  1.1555 +                           (!obj->compartment()->options().getTrace() ||
  1.1556 +                            !obj->isOwnGlobal())),
  1.1557 +                         clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
  1.1558 +            clasp->trace(this, obj);
  1.1559 +        }
  1.1560 +
  1.1561 +        if (!shape->isNative())
  1.1562 +            return;
  1.1563 +
  1.1564 +        unsigned nslots = obj->slotSpan();
  1.1565 +
  1.1566 +        if (!obj->hasEmptyElements()) {
  1.1567 +            vp = obj->getDenseElements();
  1.1568 +            end = vp + obj->getDenseInitializedLength();
  1.1569 +            if (!nslots)
  1.1570 +                goto scan_value_array;
  1.1571 +            pushValueArray(obj, vp, end);
  1.1572 +        }
  1.1573 +
  1.1574 +        vp = obj->fixedSlots();
  1.1575 +        if (obj->slots) {
  1.1576 +            unsigned nfixed = obj->numFixedSlots();
  1.1577 +            if (nslots > nfixed) {
  1.1578 +                pushValueArray(obj, vp, vp + nfixed);
  1.1579 +                vp = obj->slots;
  1.1580 +                end = vp + (nslots - nfixed);
  1.1581 +                goto scan_value_array;
  1.1582 +            }
  1.1583 +        }
  1.1584 +        JS_ASSERT(nslots <= obj->numFixedSlots());
  1.1585 +        end = vp + nslots;
  1.1586 +        goto scan_value_array;
  1.1587 +    }
  1.1588 +}
  1.1589 +
  1.1590 +bool
  1.1591 +GCMarker::drainMarkStack(SliceBudget &budget)
  1.1592 +{
  1.1593 +#ifdef DEBUG
  1.1594 +    JSRuntime *rt = runtime();
  1.1595 +
  1.1596 +    struct AutoCheckCompartment {
  1.1597 +        JSRuntime *runtime;
  1.1598 +        AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
  1.1599 +            JS_ASSERT(!rt->gcStrictCompartmentChecking);
  1.1600 +            runtime->gcStrictCompartmentChecking = true;
  1.1601 +        }
  1.1602 +        ~AutoCheckCompartment() { runtime->gcStrictCompartmentChecking = false; }
  1.1603 +    } acc(rt);
  1.1604 +#endif
  1.1605 +
  1.1606 +    if (budget.isOverBudget())
  1.1607 +        return false;
  1.1608 +
  1.1609 +    for (;;) {
  1.1610 +        while (!stack.isEmpty()) {
  1.1611 +            processMarkStackTop(budget);
  1.1612 +            if (budget.isOverBudget()) {
  1.1613 +                saveValueRanges();
  1.1614 +                return false;
  1.1615 +            }
  1.1616 +        }
  1.1617 +
  1.1618 +        if (!hasDelayedChildren())
  1.1619 +            break;
  1.1620 +
  1.1621 +        /*
  1.1622 +         * Mark children of things that caused too deep recursion during the
  1.1623 +         * above tracing. Don't do this until we're done with everything
  1.1624 +         * else.
  1.1625 +         */
  1.1626 +        if (!markDelayedChildren(budget)) {
  1.1627 +            saveValueRanges();
  1.1628 +            return false;
  1.1629 +        }
  1.1630 +    }
  1.1631 +
  1.1632 +    return true;
  1.1633 +}
  1.1634 +
  1.1635 +void
  1.1636 +js::TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
  1.1637 +{
  1.1638 +    switch (kind) {
  1.1639 +      case JSTRACE_OBJECT:
  1.1640 +        MarkChildren(trc, static_cast<JSObject *>(thing));
  1.1641 +        break;
  1.1642 +
  1.1643 +      case JSTRACE_STRING:
  1.1644 +        MarkChildren(trc, static_cast<JSString *>(thing));
  1.1645 +        break;
  1.1646 +
  1.1647 +      case JSTRACE_SCRIPT:
  1.1648 +        MarkChildren(trc, static_cast<JSScript *>(thing));
  1.1649 +        break;
  1.1650 +
  1.1651 +      case JSTRACE_LAZY_SCRIPT:
  1.1652 +        MarkChildren(trc, static_cast<LazyScript *>(thing));
  1.1653 +        break;
  1.1654 +
  1.1655 +      case JSTRACE_SHAPE:
  1.1656 +        MarkChildren(trc, static_cast<Shape *>(thing));
  1.1657 +        break;
  1.1658 +
  1.1659 +      case JSTRACE_JITCODE:
  1.1660 +        MarkChildren(trc, (js::jit::JitCode *)thing);
  1.1661 +        break;
  1.1662 +
  1.1663 +      case JSTRACE_BASE_SHAPE:
  1.1664 +        MarkChildren(trc, static_cast<BaseShape *>(thing));
  1.1665 +        break;
  1.1666 +
  1.1667 +      case JSTRACE_TYPE_OBJECT:
  1.1668 +        MarkChildren(trc, (types::TypeObject *)thing);
  1.1669 +        break;
  1.1670 +    }
  1.1671 +}
  1.1672 +
  1.1673 +static void
  1.1674 +UnmarkGrayGCThing(void *thing)
  1.1675 +{
  1.1676 +    static_cast<js::gc::Cell *>(thing)->unmark(js::gc::GRAY);
  1.1677 +}
  1.1678 +
  1.1679 +static void
  1.1680 +UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind);
  1.1681 +
  1.1682 +struct UnmarkGrayTracer : public JSTracer
  1.1683 +{
  1.1684 +    /*
  1.1685 +     * We set eagerlyTraceWeakMaps to false because the cycle collector will fix
  1.1686 +     * up any color mismatches involving weakmaps when it runs.
  1.1687 +     */
  1.1688 +    UnmarkGrayTracer(JSRuntime *rt)
  1.1689 +      : JSTracer(rt, UnmarkGrayChildren, DoNotTraceWeakMaps),
  1.1690 +        tracingShape(false),
  1.1691 +        previousShape(nullptr),
  1.1692 +        unmarkedAny(false)
  1.1693 +    {}
  1.1694 +
  1.1695 +    UnmarkGrayTracer(JSTracer *trc, bool tracingShape)
  1.1696 +      : JSTracer(trc->runtime(), UnmarkGrayChildren, DoNotTraceWeakMaps),
  1.1697 +        tracingShape(tracingShape),
  1.1698 +        previousShape(nullptr),
  1.1699 +        unmarkedAny(false)
  1.1700 +    {}
  1.1701 +
  1.1702 +    /* True iff we are tracing the immediate children of a shape. */
  1.1703 +    bool tracingShape;
  1.1704 +
  1.1705 +    /* If tracingShape, shape child or nullptr. Otherwise, nullptr. */
  1.1706 +    void *previousShape;
  1.1707 +
  1.1708 +    /* Whether we unmarked anything. */
  1.1709 +    bool unmarkedAny;
  1.1710 +};
  1.1711 +
  1.1712 +/*
  1.1713 + * The GC and CC are run independently. Consequently, the following sequence of
  1.1714 + * events can occur:
  1.1715 + * 1. GC runs and marks an object gray.
  1.1716 + * 2. Some JS code runs that creates a pointer from a JS root to the gray
  1.1717 + *    object. If we re-ran a GC at this point, the object would now be black.
  1.1718 + * 3. Now we run the CC. It may think it can collect the gray object, even
  1.1719 + *    though it's reachable from the JS heap.
  1.1720 + *
  1.1721 + * To prevent this badness, we unmark the gray bit of an object when it is
  1.1722 + * accessed by callers outside XPConnect. This would cause the object to go
  1.1723 + * black in step 2 above. This must be done on everything reachable from the
  1.1724 + * object being returned. The following code takes care of the recursive
  1.1725 + * re-coloring.
  1.1726 + *
  1.1727 + * There is an additional complication for certain kinds of edges that are not
  1.1728 + * contained explicitly in the source object itself, such as from a weakmap key
  1.1729 + * to its value, and from an object being watched by a watchpoint to the
  1.1730 + * watchpoint's closure. These "implicit edges" are represented in some other
  1.1731 + * container object, such as the weakmap or the watchpoint itself. In these
  1.1732 + * cases, calling unmark gray on an object won't find all of its children.
  1.1733 + *
  1.1734 + * Handling these implicit edges has two parts:
  1.1735 + * - A special pass enumerating all of the containers that know about the
  1.1736 + *   implicit edges to fix any black-gray edges that have been created. This
  1.1737 + *   is implemented in nsXPConnect::FixWeakMappingGrayBits.
  1.1738 + * - To prevent any incorrectly gray objects from escaping to live JS outside
  1.1739 + *   of the containers, we must add unmark-graying read barriers to these
  1.1740 + *   containers.
  1.1741 + */
  1.1742 +static void
  1.1743 +UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind)
  1.1744 +{
  1.1745 +    void *thing = *thingp;
  1.1746 +    int stackDummy;
  1.1747 +    if (!JS_CHECK_STACK_SIZE(trc->runtime()->mainThread.nativeStackLimit[StackForSystemCode], &stackDummy)) {
  1.1748 +        /*
  1.1749 +         * If we run out of stack, we take a more drastic measure: require that
  1.1750 +         * we GC again before the next CC.
  1.1751 +         */
  1.1752 +        trc->runtime()->gcGrayBitsValid = false;
  1.1753 +        return;
  1.1754 +    }
  1.1755 +
  1.1756 +    UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc);
  1.1757 +    if (!IsInsideNursery(trc->runtime(), thing)) {
  1.1758 +        if (!JS::GCThingIsMarkedGray(thing))
  1.1759 +            return;
  1.1760 +
  1.1761 +        UnmarkGrayGCThing(thing);
  1.1762 +        tracer->unmarkedAny = true;
  1.1763 +    }
  1.1764 +
  1.1765 +    /*
  1.1766 +     * Trace children of |thing|. If |thing| and its parent are both shapes,
  1.1767 +     * |thing| will get saved to mPreviousShape without being traced. The parent
  1.1768 +     * will later trace |thing|. This is done to avoid increasing the stack
  1.1769 +     * depth during shape tracing. It is safe to do because a shape can only
  1.1770 +     * have one child that is a shape.
  1.1771 +     */
  1.1772 +    UnmarkGrayTracer childTracer(tracer, kind == JSTRACE_SHAPE);
  1.1773 +
  1.1774 +    if (kind != JSTRACE_SHAPE) {
  1.1775 +        JS_TraceChildren(&childTracer, thing, kind);
  1.1776 +        JS_ASSERT(!childTracer.previousShape);
  1.1777 +        tracer->unmarkedAny |= childTracer.unmarkedAny;
  1.1778 +        return;
  1.1779 +    }
  1.1780 +
  1.1781 +    if (tracer->tracingShape) {
  1.1782 +        JS_ASSERT(!tracer->previousShape);
  1.1783 +        tracer->previousShape = thing;
  1.1784 +        return;
  1.1785 +    }
  1.1786 +
  1.1787 +    do {
  1.1788 +        JS_ASSERT(!JS::GCThingIsMarkedGray(thing));
  1.1789 +        JS_TraceChildren(&childTracer, thing, JSTRACE_SHAPE);
  1.1790 +        thing = childTracer.previousShape;
  1.1791 +        childTracer.previousShape = nullptr;
  1.1792 +    } while (thing);
  1.1793 +    tracer->unmarkedAny |= childTracer.unmarkedAny;
  1.1794 +}
  1.1795 +
  1.1796 +JS_FRIEND_API(bool)
  1.1797 +JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
  1.1798 +{
  1.1799 +    JS_ASSERT(kind != JSTRACE_SHAPE);
  1.1800 +
  1.1801 +    JSRuntime *rt = static_cast<Cell *>(thing)->runtimeFromMainThread();
  1.1802 +
  1.1803 +    bool unmarkedArg = false;
  1.1804 +    if (!IsInsideNursery(rt, thing)) {
  1.1805 +        if (!JS::GCThingIsMarkedGray(thing))
  1.1806 +            return false;
  1.1807 +
  1.1808 +        UnmarkGrayGCThing(thing);
  1.1809 +        unmarkedArg = true;
  1.1810 +    }
  1.1811 +
  1.1812 +    UnmarkGrayTracer trc(rt);
  1.1813 +    JS_TraceChildren(&trc, thing, kind);
  1.1814 +
  1.1815 +    return unmarkedArg || trc.unmarkedAny;
  1.1816 +}

mercurial