js/src/gc/Marking.cpp

Thu, 22 Jan 2015 13:21:57 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Thu, 22 Jan 2015 13:21:57 +0100
branch
TOR_BUG_9701
changeset 15
b8a032363ba2
permissions
-rw-r--r--

Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sts=4 et sw=4 tw=99:
michael@0 3 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #include "gc/Marking.h"
michael@0 8
michael@0 9 #include "mozilla/DebugOnly.h"
michael@0 10
michael@0 11 #include "jit/IonCode.h"
michael@0 12 #include "js/SliceBudget.h"
michael@0 13 #include "vm/ArgumentsObject.h"
michael@0 14 #include "vm/ScopeObject.h"
michael@0 15 #include "vm/Shape.h"
michael@0 16 #include "vm/TypedArrayObject.h"
michael@0 17
michael@0 18 #include "jscompartmentinlines.h"
michael@0 19 #include "jsinferinlines.h"
michael@0 20 #include "jsobjinlines.h"
michael@0 21
michael@0 22 #ifdef JSGC_GENERATIONAL
michael@0 23 # include "gc/Nursery-inl.h"
michael@0 24 #endif
michael@0 25 #include "vm/String-inl.h"
michael@0 26
michael@0 27 using namespace js;
michael@0 28 using namespace js::gc;
michael@0 29
michael@0 30 using mozilla::DebugOnly;
michael@0 31
michael@0 32 void * const js::NullPtr::constNullValue = nullptr;
michael@0 33
michael@0 34 JS_PUBLIC_DATA(void * const) JS::NullPtr::constNullValue = nullptr;
michael@0 35
michael@0 36 /*
michael@0 37 * There are two mostly separate mark paths. The first is a fast path used
michael@0 38 * internally in the GC. The second is a slow path used for root marking and
michael@0 39 * for API consumers like the cycle collector or Class::trace implementations.
michael@0 40 *
michael@0 41 * The fast path uses explicit stacks. The basic marking process during a GC is
michael@0 42 * that all roots are pushed on to a mark stack, and then each item on the
michael@0 43 * stack is scanned (possibly pushing more stuff) until the stack is empty.
michael@0 44 *
michael@0 45 * PushMarkStack pushes a GC thing onto the mark stack. In some cases (shapes
michael@0 46 * or strings) it eagerly marks the object rather than pushing it. Popping and
michael@0 47 * scanning is done by the processMarkStackTop method. For efficiency reasons
michael@0 48 * like tail recursion elimination that method also implements the scanning of
michael@0 49 * objects. For other GC things it uses helper methods.
michael@0 50 *
michael@0 51 * Most of the marking code outside Marking.cpp uses functions like MarkObject,
michael@0 52 * MarkString, etc. These functions check if an object is in the compartment
michael@0 53 * currently being GCed. If it is, they call PushMarkStack. Roots are pushed
michael@0 54 * this way as well as pointers traversed inside trace hooks (for things like
michael@0 55 * PropertyIteratorObjects). It is always valid to call a MarkX function
michael@0 56 * instead of PushMarkStack, although it may be slower.
michael@0 57 *
michael@0 58 * The MarkX functions also handle non-GC object traversal. In this case, they
michael@0 59 * call a callback for each object visited. This is a recursive process; the
michael@0 60 * mark stacks are not involved. These callbacks may ask for the outgoing
michael@0 61 * pointers to be visited. Eventually, this leads to the MarkChildren functions
michael@0 62 * being called. These functions duplicate much of the functionality of
michael@0 63 * scanning functions, but they don't push onto an explicit stack.
michael@0 64 */
michael@0 65
michael@0 66 static inline void
michael@0 67 PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing);
michael@0 68
michael@0 69 static inline void
michael@0 70 PushMarkStack(GCMarker *gcmarker, JSFunction *thing);
michael@0 71
michael@0 72 static inline void
michael@0 73 PushMarkStack(GCMarker *gcmarker, JSScript *thing);
michael@0 74
michael@0 75 static inline void
michael@0 76 PushMarkStack(GCMarker *gcmarker, Shape *thing);
michael@0 77
michael@0 78 static inline void
michael@0 79 PushMarkStack(GCMarker *gcmarker, JSString *thing);
michael@0 80
michael@0 81 static inline void
michael@0 82 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing);
michael@0 83
michael@0 84 namespace js {
michael@0 85 namespace gc {
michael@0 86
michael@0 87 static void MarkChildren(JSTracer *trc, JSString *str);
michael@0 88 static void MarkChildren(JSTracer *trc, JSScript *script);
michael@0 89 static void MarkChildren(JSTracer *trc, LazyScript *lazy);
michael@0 90 static void MarkChildren(JSTracer *trc, Shape *shape);
michael@0 91 static void MarkChildren(JSTracer *trc, BaseShape *base);
michael@0 92 static void MarkChildren(JSTracer *trc, types::TypeObject *type);
michael@0 93 static void MarkChildren(JSTracer *trc, jit::JitCode *code);
michael@0 94
michael@0 95 } /* namespace gc */
michael@0 96 } /* namespace js */
michael@0 97
michael@0 98 /*** Object Marking ***/
michael@0 99
michael@0 100 #if defined(DEBUG)
michael@0 101 template<typename T>
michael@0 102 static inline bool
michael@0 103 IsThingPoisoned(T *thing)
michael@0 104 {
michael@0 105 static_assert(sizeof(T) >= sizeof(FreeSpan) + sizeof(uint32_t),
michael@0 106 "Ensure it is well defined to look past any free span that "
michael@0 107 "may be embedded in the thing's header when freed.");
michael@0 108 const uint8_t poisonBytes[] = {
michael@0 109 JS_FRESH_NURSERY_PATTERN,
michael@0 110 JS_SWEPT_NURSERY_PATTERN,
michael@0 111 JS_ALLOCATED_NURSERY_PATTERN,
michael@0 112 JS_FRESH_TENURED_PATTERN,
michael@0 113 JS_SWEPT_TENURED_PATTERN,
michael@0 114 JS_ALLOCATED_TENURED_PATTERN,
michael@0 115 JS_SWEPT_CODE_PATTERN,
michael@0 116 JS_SWEPT_FRAME_PATTERN
michael@0 117 };
michael@0 118 const int numPoisonBytes = sizeof(poisonBytes) / sizeof(poisonBytes[0]);
michael@0 119 uint32_t *p = reinterpret_cast<uint32_t *>(reinterpret_cast<FreeSpan *>(thing) + 1);
michael@0 120 // Note: all free patterns are odd to make the common, not-poisoned case a single test.
michael@0 121 if ((*p & 1) == 0)
michael@0 122 return false;
michael@0 123 for (int i = 0; i < numPoisonBytes; ++i) {
michael@0 124 const uint8_t pb = poisonBytes[i];
michael@0 125 const uint32_t pw = pb | (pb << 8) | (pb << 16) | (pb << 24);
michael@0 126 if (*p == pw)
michael@0 127 return true;
michael@0 128 }
michael@0 129 return false;
michael@0 130 }
michael@0 131 #endif
michael@0 132
michael@0 133 static GCMarker *
michael@0 134 AsGCMarker(JSTracer *trc)
michael@0 135 {
michael@0 136 JS_ASSERT(IS_GC_MARKING_TRACER(trc));
michael@0 137 return static_cast<GCMarker *>(trc);
michael@0 138 }
michael@0 139
michael@0 140 template <typename T> bool ThingIsPermanentAtom(T *thing) { return false; }
michael@0 141 template <> bool ThingIsPermanentAtom<JSString>(JSString *str) { return str->isPermanentAtom(); }
michael@0 142 template <> bool ThingIsPermanentAtom<JSFlatString>(JSFlatString *str) { return str->isPermanentAtom(); }
michael@0 143 template <> bool ThingIsPermanentAtom<JSLinearString>(JSLinearString *str) { return str->isPermanentAtom(); }
michael@0 144 template <> bool ThingIsPermanentAtom<JSAtom>(JSAtom *atom) { return atom->isPermanent(); }
michael@0 145 template <> bool ThingIsPermanentAtom<PropertyName>(PropertyName *name) { return name->isPermanent(); }
michael@0 146
michael@0 147 template<typename T>
michael@0 148 static inline void
michael@0 149 CheckMarkedThing(JSTracer *trc, T *thing)
michael@0 150 {
michael@0 151 #ifdef DEBUG
michael@0 152 JS_ASSERT(trc);
michael@0 153 JS_ASSERT(thing);
michael@0 154
michael@0 155 /* This function uses data that's not available in the nursery. */
michael@0 156 if (IsInsideNursery(trc->runtime(), thing))
michael@0 157 return;
michael@0 158
michael@0 159 /*
michael@0 160 * Permanent atoms are not associated with this runtime, but will be ignored
michael@0 161 * during marking.
michael@0 162 */
michael@0 163 if (ThingIsPermanentAtom(thing))
michael@0 164 return;
michael@0 165
michael@0 166 JS_ASSERT(thing->zone());
michael@0 167 JS_ASSERT(thing->zone()->runtimeFromMainThread() == trc->runtime());
michael@0 168 JS_ASSERT(trc->hasTracingDetails());
michael@0 169
michael@0 170 DebugOnly<JSRuntime *> rt = trc->runtime();
michael@0 171
michael@0 172 JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadZones,
michael@0 173 !thing->zone()->scheduledForDestruction);
michael@0 174
michael@0 175 JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
michael@0 176
michael@0 177 JS_ASSERT_IF(thing->zone()->requireGCTracer(),
michael@0 178 IS_GC_MARKING_TRACER(trc));
michael@0 179
michael@0 180 JS_ASSERT(thing->isAligned());
michael@0 181
michael@0 182 JS_ASSERT(MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing));
michael@0 183
michael@0 184 JS_ASSERT_IF(rt->gcStrictCompartmentChecking,
michael@0 185 thing->zone()->isCollecting() || rt->isAtomsZone(thing->zone()));
michael@0 186
michael@0 187 JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && AsGCMarker(trc)->getMarkColor() == GRAY,
michael@0 188 !thing->zone()->isGCMarkingBlack() || rt->isAtomsZone(thing->zone()));
michael@0 189
michael@0 190 JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc),
michael@0 191 !(thing->zone()->isGCSweeping() || thing->zone()->isGCFinished()));
michael@0 192
michael@0 193 /*
michael@0 194 * Try to assert that the thing is allocated. This is complicated by the
michael@0 195 * fact that allocated things may still contain the poison pattern if that
michael@0 196 * part has not been overwritten, and that the free span list head in the
michael@0 197 * ArenaHeader may not be synced with the real one in ArenaLists.
michael@0 198 */
michael@0 199 JS_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy(),
michael@0 200 !InFreeList(thing->arenaHeader(), thing));
michael@0 201 #endif
michael@0 202 }
michael@0 203
michael@0 204 template<typename T>
michael@0 205 static void
michael@0 206 MarkInternal(JSTracer *trc, T **thingp)
michael@0 207 {
michael@0 208 JS_ASSERT(thingp);
michael@0 209 T *thing = *thingp;
michael@0 210
michael@0 211 CheckMarkedThing(trc, thing);
michael@0 212
michael@0 213 if (!trc->callback) {
michael@0 214 /*
michael@0 215 * We may mark a Nursery thing outside the context of the
michael@0 216 * MinorCollectionTracer because of a pre-barrier. The pre-barrier is
michael@0 217 * not needed in this case because we perform a minor collection before
michael@0 218 * each incremental slice.
michael@0 219 */
michael@0 220 if (IsInsideNursery(trc->runtime(), thing))
michael@0 221 return;
michael@0 222
michael@0 223 /*
michael@0 224 * Don't mark permanent atoms, as they may be associated with another
michael@0 225 * runtime. Note that PushMarkStack() also checks this, but the tests
michael@0 226 * and maybeAlive write below should only be done on the main thread.
michael@0 227 */
michael@0 228 if (ThingIsPermanentAtom(thing))
michael@0 229 return;
michael@0 230
michael@0 231 /*
michael@0 232 * Don't mark things outside a compartment if we are in a
michael@0 233 * per-compartment GC.
michael@0 234 */
michael@0 235 if (!thing->zone()->isGCMarking())
michael@0 236 return;
michael@0 237
michael@0 238 PushMarkStack(AsGCMarker(trc), thing);
michael@0 239 thing->zone()->maybeAlive = true;
michael@0 240 } else {
michael@0 241 trc->callback(trc, (void **)thingp, MapTypeToTraceKind<T>::kind);
michael@0 242 trc->unsetTracingLocation();
michael@0 243 }
michael@0 244
michael@0 245 trc->clearTracingDetails();
michael@0 246 }
michael@0 247
michael@0 248 #define JS_ROOT_MARKING_ASSERT(trc) \
michael@0 249 JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), \
michael@0 250 trc->runtime()->gcIncrementalState == NO_INCREMENTAL || \
michael@0 251 trc->runtime()->gcIncrementalState == MARK_ROOTS);
michael@0 252
michael@0 253 namespace js {
michael@0 254 namespace gc {
michael@0 255
michael@0 256 template <typename T>
michael@0 257 void
michael@0 258 MarkUnbarriered(JSTracer *trc, T **thingp, const char *name)
michael@0 259 {
michael@0 260 trc->setTracingName(name);
michael@0 261 MarkInternal(trc, thingp);
michael@0 262 }
michael@0 263
michael@0 264 template <typename T>
michael@0 265 static void
michael@0 266 Mark(JSTracer *trc, BarrieredPtr<T> *thing, const char *name)
michael@0 267 {
michael@0 268 trc->setTracingName(name);
michael@0 269 MarkInternal(trc, thing->unsafeGet());
michael@0 270 }
michael@0 271
michael@0 272 void
michael@0 273 MarkPermanentAtom(JSTracer *trc, JSAtom *atom, const char *name)
michael@0 274 {
michael@0 275 trc->setTracingName(name);
michael@0 276
michael@0 277 JS_ASSERT(atom->isPermanent());
michael@0 278
michael@0 279 CheckMarkedThing(trc, atom);
michael@0 280
michael@0 281 if (!trc->callback) {
michael@0 282 // Atoms do not refer to other GC things so don't need to go on the mark stack.
michael@0 283 // Additionally, PushMarkStack will ignore permanent atoms.
michael@0 284 atom->markIfUnmarked();
michael@0 285 } else {
michael@0 286 void *thing = atom;
michael@0 287 trc->callback(trc, &thing, JSTRACE_STRING);
michael@0 288 JS_ASSERT(thing == atom);
michael@0 289 trc->unsetTracingLocation();
michael@0 290 }
michael@0 291
michael@0 292 trc->clearTracingDetails();
michael@0 293 }
michael@0 294
michael@0 295 } /* namespace gc */
michael@0 296 } /* namespace js */
michael@0 297
michael@0 298 template <typename T>
michael@0 299 static void
michael@0 300 MarkRoot(JSTracer *trc, T **thingp, const char *name)
michael@0 301 {
michael@0 302 JS_ROOT_MARKING_ASSERT(trc);
michael@0 303 trc->setTracingName(name);
michael@0 304 MarkInternal(trc, thingp);
michael@0 305 }
michael@0 306
michael@0 307 template <typename T>
michael@0 308 static void
michael@0 309 MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name)
michael@0 310 {
michael@0 311 for (size_t i = 0; i < len; ++i) {
michael@0 312 if (vec[i].get()) {
michael@0 313 trc->setTracingIndex(name, i);
michael@0 314 MarkInternal(trc, vec[i].unsafeGet());
michael@0 315 }
michael@0 316 }
michael@0 317 }
michael@0 318
michael@0 319 template <typename T>
michael@0 320 static void
michael@0 321 MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name)
michael@0 322 {
michael@0 323 JS_ROOT_MARKING_ASSERT(trc);
michael@0 324 for (size_t i = 0; i < len; ++i) {
michael@0 325 if (vec[i]) {
michael@0 326 trc->setTracingIndex(name, i);
michael@0 327 MarkInternal(trc, &vec[i]);
michael@0 328 }
michael@0 329 }
michael@0 330 }
michael@0 331
michael@0 332 namespace js {
michael@0 333 namespace gc {
michael@0 334
michael@0 335 template <typename T>
michael@0 336 static bool
michael@0 337 IsMarked(T **thingp)
michael@0 338 {
michael@0 339 JS_ASSERT(thingp);
michael@0 340 JS_ASSERT(*thingp);
michael@0 341 #ifdef JSGC_GENERATIONAL
michael@0 342 Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery;
michael@0 343 if (nursery.isInside(*thingp))
michael@0 344 return nursery.getForwardedPointer(thingp);
michael@0 345 #endif
michael@0 346 Zone *zone = (*thingp)->tenuredZone();
michael@0 347 if (!zone->isCollecting() || zone->isGCFinished())
michael@0 348 return true;
michael@0 349 return (*thingp)->isMarked();
michael@0 350 }
michael@0 351
michael@0 352 template <typename T>
michael@0 353 static bool
michael@0 354 IsAboutToBeFinalized(T **thingp)
michael@0 355 {
michael@0 356 JS_ASSERT(thingp);
michael@0 357 JS_ASSERT(*thingp);
michael@0 358
michael@0 359 T *thing = *thingp;
michael@0 360 JSRuntime *rt = thing->runtimeFromAnyThread();
michael@0 361
michael@0 362 /* Permanent atoms are never finalized by non-owning runtimes. */
michael@0 363 if (ThingIsPermanentAtom(thing) && !TlsPerThreadData.get()->associatedWith(rt))
michael@0 364 return false;
michael@0 365
michael@0 366 #ifdef JSGC_GENERATIONAL
michael@0 367 Nursery &nursery = rt->gcNursery;
michael@0 368 JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !nursery.isInside(thing));
michael@0 369 if (rt->isHeapMinorCollecting()) {
michael@0 370 if (nursery.isInside(thing))
michael@0 371 return !nursery.getForwardedPointer(thingp);
michael@0 372 return false;
michael@0 373 }
michael@0 374 #endif
michael@0 375
michael@0 376 if (!thing->tenuredZone()->isGCSweeping())
michael@0 377 return false;
michael@0 378
michael@0 379 /*
michael@0 380 * We should return false for things that have been allocated during
michael@0 381 * incremental sweeping, but this possibility doesn't occur at the moment
michael@0 382 * because this function is only called at the very start of the sweeping a
michael@0 383 * compartment group and during minor gc. Rather than do the extra check,
michael@0 384 * we just assert that it's not necessary.
michael@0 385 */
michael@0 386 JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental);
michael@0 387
michael@0 388 return !thing->isMarked();
michael@0 389 }
michael@0 390
michael@0 391 template <typename T>
michael@0 392 T *
michael@0 393 UpdateIfRelocated(JSRuntime *rt, T **thingp)
michael@0 394 {
michael@0 395 JS_ASSERT(thingp);
michael@0 396 #ifdef JSGC_GENERATIONAL
michael@0 397 if (*thingp && rt->isHeapMinorCollecting() && rt->gcNursery.isInside(*thingp))
michael@0 398 rt->gcNursery.getForwardedPointer(thingp);
michael@0 399 #endif
michael@0 400 return *thingp;
michael@0 401 }
michael@0 402
michael@0 403 #define DeclMarkerImpl(base, type) \
michael@0 404 void \
michael@0 405 Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name) \
michael@0 406 { \
michael@0 407 Mark<type>(trc, thing, name); \
michael@0 408 } \
michael@0 409 \
michael@0 410 void \
michael@0 411 Mark##base##Root(JSTracer *trc, type **thingp, const char *name) \
michael@0 412 { \
michael@0 413 MarkRoot<type>(trc, thingp, name); \
michael@0 414 } \
michael@0 415 \
michael@0 416 void \
michael@0 417 Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name) \
michael@0 418 { \
michael@0 419 MarkUnbarriered<type>(trc, thingp, name); \
michael@0 420 } \
michael@0 421 \
michael@0 422 /* Explicitly instantiate MarkUnbarriered<type>. It is referenced from */ \
michael@0 423 /* other translation units and the instantiation might otherwise get */ \
michael@0 424 /* inlined away. */ \
michael@0 425 template void MarkUnbarriered<type>(JSTracer *, type **, const char *); \
michael@0 426 \
michael@0 427 void \
michael@0 428 Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *vec, const char *name) \
michael@0 429 { \
michael@0 430 MarkRange<type>(trc, len, vec, name); \
michael@0 431 } \
michael@0 432 \
michael@0 433 void \
michael@0 434 Mark##base##RootRange(JSTracer *trc, size_t len, type **vec, const char *name) \
michael@0 435 { \
michael@0 436 MarkRootRange<type>(trc, len, vec, name); \
michael@0 437 } \
michael@0 438 \
michael@0 439 bool \
michael@0 440 Is##base##Marked(type **thingp) \
michael@0 441 { \
michael@0 442 return IsMarked<type>(thingp); \
michael@0 443 } \
michael@0 444 \
michael@0 445 bool \
michael@0 446 Is##base##Marked(BarrieredPtr<type> *thingp) \
michael@0 447 { \
michael@0 448 return IsMarked<type>(thingp->unsafeGet()); \
michael@0 449 } \
michael@0 450 \
michael@0 451 bool \
michael@0 452 Is##base##AboutToBeFinalized(type **thingp) \
michael@0 453 { \
michael@0 454 return IsAboutToBeFinalized<type>(thingp); \
michael@0 455 } \
michael@0 456 \
michael@0 457 bool \
michael@0 458 Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp) \
michael@0 459 { \
michael@0 460 return IsAboutToBeFinalized<type>(thingp->unsafeGet()); \
michael@0 461 } \
michael@0 462 \
michael@0 463 type * \
michael@0 464 Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp) \
michael@0 465 { \
michael@0 466 return UpdateIfRelocated<type>(rt, thingp->unsafeGet()); \
michael@0 467 } \
michael@0 468 \
michael@0 469 type * \
michael@0 470 Update##base##IfRelocated(JSRuntime *rt, type **thingp) \
michael@0 471 { \
michael@0 472 return UpdateIfRelocated<type>(rt, thingp); \
michael@0 473 }
michael@0 474
michael@0 475
michael@0 476 DeclMarkerImpl(BaseShape, BaseShape)
michael@0 477 DeclMarkerImpl(BaseShape, UnownedBaseShape)
michael@0 478 DeclMarkerImpl(JitCode, jit::JitCode)
michael@0 479 DeclMarkerImpl(Object, ArgumentsObject)
michael@0 480 DeclMarkerImpl(Object, ArrayBufferObject)
michael@0 481 DeclMarkerImpl(Object, ArrayBufferViewObject)
michael@0 482 DeclMarkerImpl(Object, SharedArrayBufferObject)
michael@0 483 DeclMarkerImpl(Object, DebugScopeObject)
michael@0 484 DeclMarkerImpl(Object, GlobalObject)
michael@0 485 DeclMarkerImpl(Object, JSObject)
michael@0 486 DeclMarkerImpl(Object, JSFunction)
michael@0 487 DeclMarkerImpl(Object, ObjectImpl)
michael@0 488 DeclMarkerImpl(Object, ScopeObject)
michael@0 489 DeclMarkerImpl(Script, JSScript)
michael@0 490 DeclMarkerImpl(LazyScript, LazyScript)
michael@0 491 DeclMarkerImpl(Shape, Shape)
michael@0 492 DeclMarkerImpl(String, JSAtom)
michael@0 493 DeclMarkerImpl(String, JSString)
michael@0 494 DeclMarkerImpl(String, JSFlatString)
michael@0 495 DeclMarkerImpl(String, JSLinearString)
michael@0 496 DeclMarkerImpl(String, PropertyName)
michael@0 497 DeclMarkerImpl(TypeObject, js::types::TypeObject)
michael@0 498
michael@0 499 } /* namespace gc */
michael@0 500 } /* namespace js */
michael@0 501
michael@0 502 /*** Externally Typed Marking ***/
michael@0 503
michael@0 504 void
michael@0 505 gc::MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind)
michael@0 506 {
michael@0 507 JS_ASSERT(thingp);
michael@0 508 JS_ASSERT(*thingp);
michael@0 509 DebugOnly<Cell *> cell = static_cast<Cell *>(*thingp);
michael@0 510 JS_ASSERT_IF(cell->isTenured(), kind == MapAllocToTraceKind(cell->tenuredGetAllocKind()));
michael@0 511 switch (kind) {
michael@0 512 case JSTRACE_OBJECT:
michael@0 513 MarkInternal(trc, reinterpret_cast<JSObject **>(thingp));
michael@0 514 break;
michael@0 515 case JSTRACE_STRING:
michael@0 516 MarkInternal(trc, reinterpret_cast<JSString **>(thingp));
michael@0 517 break;
michael@0 518 case JSTRACE_SCRIPT:
michael@0 519 MarkInternal(trc, reinterpret_cast<JSScript **>(thingp));
michael@0 520 break;
michael@0 521 case JSTRACE_LAZY_SCRIPT:
michael@0 522 MarkInternal(trc, reinterpret_cast<LazyScript **>(thingp));
michael@0 523 break;
michael@0 524 case JSTRACE_SHAPE:
michael@0 525 MarkInternal(trc, reinterpret_cast<Shape **>(thingp));
michael@0 526 break;
michael@0 527 case JSTRACE_BASE_SHAPE:
michael@0 528 MarkInternal(trc, reinterpret_cast<BaseShape **>(thingp));
michael@0 529 break;
michael@0 530 case JSTRACE_TYPE_OBJECT:
michael@0 531 MarkInternal(trc, reinterpret_cast<types::TypeObject **>(thingp));
michael@0 532 break;
michael@0 533 case JSTRACE_JITCODE:
michael@0 534 MarkInternal(trc, reinterpret_cast<jit::JitCode **>(thingp));
michael@0 535 break;
michael@0 536 }
michael@0 537 }
michael@0 538
michael@0 539 static void
michael@0 540 MarkGCThingInternal(JSTracer *trc, void **thingp, const char *name)
michael@0 541 {
michael@0 542 trc->setTracingName(name);
michael@0 543 JS_ASSERT(thingp);
michael@0 544 if (!*thingp)
michael@0 545 return;
michael@0 546 MarkKind(trc, thingp, GetGCThingTraceKind(*thingp));
michael@0 547 }
michael@0 548
michael@0 549 void
michael@0 550 gc::MarkGCThingRoot(JSTracer *trc, void **thingp, const char *name)
michael@0 551 {
michael@0 552 JS_ROOT_MARKING_ASSERT(trc);
michael@0 553 MarkGCThingInternal(trc, thingp, name);
michael@0 554 }
michael@0 555
michael@0 556 void
michael@0 557 gc::MarkGCThingUnbarriered(JSTracer *trc, void **thingp, const char *name)
michael@0 558 {
michael@0 559 MarkGCThingInternal(trc, thingp, name);
michael@0 560 }
michael@0 561
michael@0 562 /*** ID Marking ***/
michael@0 563
michael@0 564 static inline void
michael@0 565 MarkIdInternal(JSTracer *trc, jsid *id)
michael@0 566 {
michael@0 567 if (JSID_IS_STRING(*id)) {
michael@0 568 JSString *str = JSID_TO_STRING(*id);
michael@0 569 trc->setTracingLocation((void *)id);
michael@0 570 MarkInternal(trc, &str);
michael@0 571 *id = NON_INTEGER_ATOM_TO_JSID(reinterpret_cast<JSAtom *>(str));
michael@0 572 } else if (MOZ_UNLIKELY(JSID_IS_OBJECT(*id))) {
michael@0 573 JSObject *obj = JSID_TO_OBJECT(*id);
michael@0 574 trc->setTracingLocation((void *)id);
michael@0 575 MarkInternal(trc, &obj);
michael@0 576 *id = OBJECT_TO_JSID(obj);
michael@0 577 } else {
michael@0 578 /* Unset realLocation manually if we do not call MarkInternal. */
michael@0 579 trc->unsetTracingLocation();
michael@0 580 }
michael@0 581 }
michael@0 582
michael@0 583 void
michael@0 584 gc::MarkId(JSTracer *trc, BarrieredId *id, const char *name)
michael@0 585 {
michael@0 586 trc->setTracingName(name);
michael@0 587 MarkIdInternal(trc, id->unsafeGet());
michael@0 588 }
michael@0 589
michael@0 590 void
michael@0 591 gc::MarkIdRoot(JSTracer *trc, jsid *id, const char *name)
michael@0 592 {
michael@0 593 JS_ROOT_MARKING_ASSERT(trc);
michael@0 594 trc->setTracingName(name);
michael@0 595 MarkIdInternal(trc, id);
michael@0 596 }
michael@0 597
michael@0 598 void
michael@0 599 gc::MarkIdUnbarriered(JSTracer *trc, jsid *id, const char *name)
michael@0 600 {
michael@0 601 trc->setTracingName(name);
michael@0 602 MarkIdInternal(trc, id);
michael@0 603 }
michael@0 604
michael@0 605 void
michael@0 606 gc::MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name)
michael@0 607 {
michael@0 608 for (size_t i = 0; i < len; ++i) {
michael@0 609 trc->setTracingIndex(name, i);
michael@0 610 MarkIdInternal(trc, vec[i].unsafeGet());
michael@0 611 }
michael@0 612 }
michael@0 613
michael@0 614 void
michael@0 615 gc::MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
michael@0 616 {
michael@0 617 JS_ROOT_MARKING_ASSERT(trc);
michael@0 618 for (size_t i = 0; i < len; ++i) {
michael@0 619 trc->setTracingIndex(name, i);
michael@0 620 MarkIdInternal(trc, &vec[i]);
michael@0 621 }
michael@0 622 }
michael@0 623
michael@0 624 /*** Value Marking ***/
michael@0 625
michael@0 626 static inline void
michael@0 627 MarkValueInternal(JSTracer *trc, Value *v)
michael@0 628 {
michael@0 629 if (v->isMarkable()) {
michael@0 630 JS_ASSERT(v->toGCThing());
michael@0 631 void *thing = v->toGCThing();
michael@0 632 trc->setTracingLocation((void *)v);
michael@0 633 MarkKind(trc, &thing, v->gcKind());
michael@0 634 if (v->isString())
michael@0 635 v->setString((JSString *)thing);
michael@0 636 else
michael@0 637 v->setObjectOrNull((JSObject *)thing);
michael@0 638 } else {
michael@0 639 /* Unset realLocation manually if we do not call MarkInternal. */
michael@0 640 trc->unsetTracingLocation();
michael@0 641 }
michael@0 642 }
michael@0 643
michael@0 644 void
michael@0 645 gc::MarkValue(JSTracer *trc, BarrieredValue *v, const char *name)
michael@0 646 {
michael@0 647 trc->setTracingName(name);
michael@0 648 MarkValueInternal(trc, v->unsafeGet());
michael@0 649 }
michael@0 650
michael@0 651 void
michael@0 652 gc::MarkValueRoot(JSTracer *trc, Value *v, const char *name)
michael@0 653 {
michael@0 654 JS_ROOT_MARKING_ASSERT(trc);
michael@0 655 trc->setTracingName(name);
michael@0 656 MarkValueInternal(trc, v);
michael@0 657 }
michael@0 658
michael@0 659 void
michael@0 660 gc::MarkTypeRoot(JSTracer *trc, types::Type *v, const char *name)
michael@0 661 {
michael@0 662 JS_ROOT_MARKING_ASSERT(trc);
michael@0 663 trc->setTracingName(name);
michael@0 664 if (v->isSingleObject()) {
michael@0 665 JSObject *obj = v->singleObject();
michael@0 666 MarkInternal(trc, &obj);
michael@0 667 *v = types::Type::ObjectType(obj);
michael@0 668 } else if (v->isTypeObject()) {
michael@0 669 types::TypeObject *typeObj = v->typeObject();
michael@0 670 MarkInternal(trc, &typeObj);
michael@0 671 *v = types::Type::ObjectType(typeObj);
michael@0 672 }
michael@0 673 }
michael@0 674
michael@0 675 void
michael@0 676 gc::MarkValueRange(JSTracer *trc, size_t len, BarrieredValue *vec, const char *name)
michael@0 677 {
michael@0 678 for (size_t i = 0; i < len; ++i) {
michael@0 679 trc->setTracingIndex(name, i);
michael@0 680 MarkValueInternal(trc, vec[i].unsafeGet());
michael@0 681 }
michael@0 682 }
michael@0 683
michael@0 684 void
michael@0 685 gc::MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name)
michael@0 686 {
michael@0 687 JS_ROOT_MARKING_ASSERT(trc);
michael@0 688 for (size_t i = 0; i < len; ++i) {
michael@0 689 trc->setTracingIndex(name, i);
michael@0 690 MarkValueInternal(trc, &vec[i]);
michael@0 691 }
michael@0 692 }
michael@0 693
michael@0 694 bool
michael@0 695 gc::IsValueMarked(Value *v)
michael@0 696 {
michael@0 697 JS_ASSERT(v->isMarkable());
michael@0 698 bool rv;
michael@0 699 if (v->isString()) {
michael@0 700 JSString *str = (JSString *)v->toGCThing();
michael@0 701 rv = IsMarked<JSString>(&str);
michael@0 702 v->setString(str);
michael@0 703 } else {
michael@0 704 JSObject *obj = (JSObject *)v->toGCThing();
michael@0 705 rv = IsMarked<JSObject>(&obj);
michael@0 706 v->setObject(*obj);
michael@0 707 }
michael@0 708 return rv;
michael@0 709 }
michael@0 710
michael@0 711 bool
michael@0 712 gc::IsValueAboutToBeFinalized(Value *v)
michael@0 713 {
michael@0 714 JS_ASSERT(v->isMarkable());
michael@0 715 bool rv;
michael@0 716 if (v->isString()) {
michael@0 717 JSString *str = (JSString *)v->toGCThing();
michael@0 718 rv = IsAboutToBeFinalized<JSString>(&str);
michael@0 719 v->setString(str);
michael@0 720 } else {
michael@0 721 JSObject *obj = (JSObject *)v->toGCThing();
michael@0 722 rv = IsAboutToBeFinalized<JSObject>(&obj);
michael@0 723 v->setObject(*obj);
michael@0 724 }
michael@0 725 return rv;
michael@0 726 }
michael@0 727
michael@0 728 /*** Slot Marking ***/
michael@0 729
michael@0 730 bool
michael@0 731 gc::IsSlotMarked(HeapSlot *s)
michael@0 732 {
michael@0 733 return IsMarked(s);
michael@0 734 }
michael@0 735
michael@0 736 void
michael@0 737 gc::MarkSlot(JSTracer *trc, HeapSlot *s, const char *name)
michael@0 738 {
michael@0 739 trc->setTracingName(name);
michael@0 740 MarkValueInternal(trc, s->unsafeGet());
michael@0 741 }
michael@0 742
michael@0 743 void
michael@0 744 gc::MarkArraySlots(JSTracer *trc, size_t len, HeapSlot *vec, const char *name)
michael@0 745 {
michael@0 746 for (size_t i = 0; i < len; ++i) {
michael@0 747 trc->setTracingIndex(name, i);
michael@0 748 MarkValueInternal(trc, vec[i].unsafeGet());
michael@0 749 }
michael@0 750 }
michael@0 751
michael@0 752 void
michael@0 753 gc::MarkObjectSlots(JSTracer *trc, JSObject *obj, uint32_t start, uint32_t nslots)
michael@0 754 {
michael@0 755 JS_ASSERT(obj->isNative());
michael@0 756 for (uint32_t i = start; i < (start + nslots); ++i) {
michael@0 757 trc->setTracingDetails(js_GetObjectSlotName, obj, i);
michael@0 758 MarkValueInternal(trc, obj->nativeGetSlotRef(i).unsafeGet());
michael@0 759 }
michael@0 760 }
michael@0 761
michael@0 762 static bool
michael@0 763 ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
michael@0 764 {
michael@0 765 if (!IS_GC_MARKING_TRACER(trc))
michael@0 766 return true;
michael@0 767
michael@0 768 uint32_t color = AsGCMarker(trc)->getMarkColor();
michael@0 769 JS_ASSERT(color == BLACK || color == GRAY);
michael@0 770
michael@0 771 if (IsInsideNursery(trc->runtime(), cell)) {
michael@0 772 JS_ASSERT(color == BLACK);
michael@0 773 return false;
michael@0 774 }
michael@0 775
michael@0 776 JS::Zone *zone = cell->tenuredZone();
michael@0 777 if (color == BLACK) {
michael@0 778 /*
michael@0 779 * Having black->gray edges violates our promise to the cycle
michael@0 780 * collector. This can happen if we're collecting a compartment and it
michael@0 781 * has an edge to an uncollected compartment: it's possible that the
michael@0 782 * source and destination of the cross-compartment edge should be gray,
michael@0 783 * but the source was marked black by the conservative scanner.
michael@0 784 */
michael@0 785 if (cell->isMarked(GRAY)) {
michael@0 786 JS_ASSERT(!zone->isCollecting());
michael@0 787 trc->runtime()->gcFoundBlackGrayEdges = true;
michael@0 788 }
michael@0 789 return zone->isGCMarking();
michael@0 790 } else {
michael@0 791 if (zone->isGCMarkingBlack()) {
michael@0 792 /*
michael@0 793 * The destination compartment is being not being marked gray now,
michael@0 794 * but it will be later, so record the cell so it can be marked gray
michael@0 795 * at the appropriate time.
michael@0 796 */
michael@0 797 if (!cell->isMarked())
michael@0 798 DelayCrossCompartmentGrayMarking(src);
michael@0 799 return false;
michael@0 800 }
michael@0 801 return zone->isGCMarkingGray();
michael@0 802 }
michael@0 803 }
michael@0 804
michael@0 805 void
michael@0 806 gc::MarkCrossCompartmentObjectUnbarriered(JSTracer *trc, JSObject *src, JSObject **dst, const char *name)
michael@0 807 {
michael@0 808 if (ShouldMarkCrossCompartment(trc, src, *dst))
michael@0 809 MarkObjectUnbarriered(trc, dst, name);
michael@0 810 }
michael@0 811
michael@0 812 void
michael@0 813 gc::MarkCrossCompartmentScriptUnbarriered(JSTracer *trc, JSObject *src, JSScript **dst,
michael@0 814 const char *name)
michael@0 815 {
michael@0 816 if (ShouldMarkCrossCompartment(trc, src, *dst))
michael@0 817 MarkScriptUnbarriered(trc, dst, name);
michael@0 818 }
michael@0 819
michael@0 820 void
michael@0 821 gc::MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapSlot *dst, const char *name)
michael@0 822 {
michael@0 823 if (dst->isMarkable() && ShouldMarkCrossCompartment(trc, src, (Cell *)dst->toGCThing()))
michael@0 824 MarkSlot(trc, dst, name);
michael@0 825 }
michael@0 826
michael@0 827 /*** Special Marking ***/
michael@0 828
michael@0 829 void
michael@0 830 gc::MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name)
michael@0 831 {
michael@0 832 trc->setTracingName(name);
michael@0 833 MarkInternal(trc, thingp->unsafeGet());
michael@0 834 }
michael@0 835
michael@0 836 void
michael@0 837 gc::MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name)
michael@0 838 {
michael@0 839 trc->setTracingName(name);
michael@0 840 MarkValueInternal(trc, v);
michael@0 841 }
michael@0 842
michael@0 843 bool
michael@0 844 gc::IsCellMarked(Cell **thingp)
michael@0 845 {
michael@0 846 return IsMarked<Cell>(thingp);
michael@0 847 }
michael@0 848
michael@0 849 bool
michael@0 850 gc::IsCellAboutToBeFinalized(Cell **thingp)
michael@0 851 {
michael@0 852 return IsAboutToBeFinalized<Cell>(thingp);
michael@0 853 }
michael@0 854
michael@0 855 /*** Push Mark Stack ***/
michael@0 856
michael@0 857 #define JS_COMPARTMENT_ASSERT(rt, thing) \
michael@0 858 JS_ASSERT((thing)->zone()->isGCMarking())
michael@0 859
michael@0 860 #define JS_COMPARTMENT_ASSERT_STR(rt, thing) \
michael@0 861 JS_ASSERT((thing)->zone()->isGCMarking() || \
michael@0 862 (rt)->isAtomsZone((thing)->zone()));
michael@0 863
michael@0 864 static void
michael@0 865 PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing)
michael@0 866 {
michael@0 867 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
michael@0 868 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
michael@0 869
michael@0 870 if (thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 871 gcmarker->pushObject(thing);
michael@0 872 }
michael@0 873
michael@0 874 /*
michael@0 875 * PushMarkStack for BaseShape unpacks its children directly onto the mark
michael@0 876 * stack. For a pre-barrier between incremental slices, this may result in
michael@0 877 * objects in the nursery getting pushed onto the mark stack. It is safe to
michael@0 878 * ignore these objects because they will be marked by the matching
michael@0 879 * post-barrier during the minor GC at the start of each incremental slice.
michael@0 880 */
michael@0 881 static void
michael@0 882 MaybePushMarkStackBetweenSlices(GCMarker *gcmarker, JSObject *thing)
michael@0 883 {
michael@0 884 JSRuntime *rt = gcmarker->runtime();
michael@0 885 JS_COMPARTMENT_ASSERT(rt, thing);
michael@0 886 JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(rt, thing));
michael@0 887
michael@0 888 if (!IsInsideNursery(rt, thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 889 gcmarker->pushObject(thing);
michael@0 890 }
michael@0 891
michael@0 892 static void
michael@0 893 PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
michael@0 894 {
michael@0 895 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
michael@0 896 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
michael@0 897
michael@0 898 if (thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 899 gcmarker->pushObject(thing);
michael@0 900 }
michael@0 901
michael@0 902 static void
michael@0 903 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
michael@0 904 {
michael@0 905 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
michael@0 906 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
michael@0 907
michael@0 908 if (thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 909 gcmarker->pushType(thing);
michael@0 910 }
michael@0 911
michael@0 912 static void
michael@0 913 PushMarkStack(GCMarker *gcmarker, JSScript *thing)
michael@0 914 {
michael@0 915 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
michael@0 916 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
michael@0 917
michael@0 918 /*
michael@0 919 * We mark scripts directly rather than pushing on the stack as they can
michael@0 920 * refer to other scripts only indirectly (like via nested functions) and
michael@0 921 * we cannot get to deep recursion.
michael@0 922 */
michael@0 923 if (thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 924 MarkChildren(gcmarker, thing);
michael@0 925 }
michael@0 926
michael@0 927 static void
michael@0 928 PushMarkStack(GCMarker *gcmarker, LazyScript *thing)
michael@0 929 {
michael@0 930 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
michael@0 931 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
michael@0 932
michael@0 933 /*
michael@0 934 * We mark lazy scripts directly rather than pushing on the stack as they
michael@0 935 * only refer to normal scripts and to strings, and cannot recurse.
michael@0 936 */
michael@0 937 if (thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 938 MarkChildren(gcmarker, thing);
michael@0 939 }
michael@0 940
michael@0 941 static void
michael@0 942 ScanShape(GCMarker *gcmarker, Shape *shape);
michael@0 943
michael@0 944 static void
michael@0 945 PushMarkStack(GCMarker *gcmarker, Shape *thing)
michael@0 946 {
michael@0 947 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
michael@0 948 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
michael@0 949
michael@0 950 /* We mark shapes directly rather than pushing on the stack. */
michael@0 951 if (thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 952 ScanShape(gcmarker, thing);
michael@0 953 }
michael@0 954
michael@0 955 static void
michael@0 956 PushMarkStack(GCMarker *gcmarker, jit::JitCode *thing)
michael@0 957 {
michael@0 958 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
michael@0 959 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
michael@0 960
michael@0 961 if (thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 962 gcmarker->pushJitCode(thing);
michael@0 963 }
michael@0 964
michael@0 965 static inline void
michael@0 966 ScanBaseShape(GCMarker *gcmarker, BaseShape *base);
michael@0 967
michael@0 968 static void
michael@0 969 PushMarkStack(GCMarker *gcmarker, BaseShape *thing)
michael@0 970 {
michael@0 971 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
michael@0 972 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
michael@0 973
michael@0 974 /* We mark base shapes directly rather than pushing on the stack. */
michael@0 975 if (thing->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 976 ScanBaseShape(gcmarker, thing);
michael@0 977 }
michael@0 978
michael@0 979 static void
michael@0 980 ScanShape(GCMarker *gcmarker, Shape *shape)
michael@0 981 {
michael@0 982 restart:
michael@0 983 PushMarkStack(gcmarker, shape->base());
michael@0 984
michael@0 985 const BarrieredId &id = shape->propidRef();
michael@0 986 if (JSID_IS_STRING(id))
michael@0 987 PushMarkStack(gcmarker, JSID_TO_STRING(id));
michael@0 988 else if (MOZ_UNLIKELY(JSID_IS_OBJECT(id)))
michael@0 989 PushMarkStack(gcmarker, JSID_TO_OBJECT(id));
michael@0 990
michael@0 991 shape = shape->previous();
michael@0 992 if (shape && shape->markIfUnmarked(gcmarker->getMarkColor()))
michael@0 993 goto restart;
michael@0 994 }
michael@0 995
michael@0 996 static inline void
michael@0 997 ScanBaseShape(GCMarker *gcmarker, BaseShape *base)
michael@0 998 {
michael@0 999 base->assertConsistency();
michael@0 1000
michael@0 1001 base->compartment()->mark();
michael@0 1002
michael@0 1003 if (base->hasGetterObject())
michael@0 1004 MaybePushMarkStackBetweenSlices(gcmarker, base->getterObject());
michael@0 1005
michael@0 1006 if (base->hasSetterObject())
michael@0 1007 MaybePushMarkStackBetweenSlices(gcmarker, base->setterObject());
michael@0 1008
michael@0 1009 if (JSObject *parent = base->getObjectParent()) {
michael@0 1010 MaybePushMarkStackBetweenSlices(gcmarker, parent);
michael@0 1011 } else if (GlobalObject *global = base->compartment()->maybeGlobal()) {
michael@0 1012 PushMarkStack(gcmarker, global);
michael@0 1013 }
michael@0 1014
michael@0 1015 if (JSObject *metadata = base->getObjectMetadata())
michael@0 1016 MaybePushMarkStackBetweenSlices(gcmarker, metadata);
michael@0 1017
michael@0 1018 /*
michael@0 1019 * All children of the owned base shape are consistent with its
michael@0 1020 * unowned one, thus we do not need to trace through children of the
michael@0 1021 * unowned base shape.
michael@0 1022 */
michael@0 1023 if (base->isOwned()) {
michael@0 1024 UnownedBaseShape *unowned = base->baseUnowned();
michael@0 1025 JS_ASSERT(base->compartment() == unowned->compartment());
michael@0 1026 unowned->markIfUnmarked(gcmarker->getMarkColor());
michael@0 1027 }
michael@0 1028 }
michael@0 1029
michael@0 1030 static inline void
michael@0 1031 ScanLinearString(GCMarker *gcmarker, JSLinearString *str)
michael@0 1032 {
michael@0 1033 JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str);
michael@0 1034 JS_ASSERT(str->isMarked());
michael@0 1035
michael@0 1036 /*
michael@0 1037 * Add extra asserts to confirm the static type to detect incorrect string
michael@0 1038 * mutations.
michael@0 1039 */
michael@0 1040 JS_ASSERT(str->JSString::isLinear());
michael@0 1041 while (str->hasBase()) {
michael@0 1042 str = str->base();
michael@0 1043 JS_ASSERT(str->JSString::isLinear());
michael@0 1044 if (str->isPermanentAtom())
michael@0 1045 break;
michael@0 1046 JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str);
michael@0 1047 if (!str->markIfUnmarked())
michael@0 1048 break;
michael@0 1049 }
michael@0 1050 }
michael@0 1051
michael@0 1052 /*
michael@0 1053 * The function tries to scan the whole rope tree using the marking stack as
michael@0 1054 * temporary storage. If that becomes full, the unscanned ropes are added to
michael@0 1055 * the delayed marking list. When the function returns, the marking stack is
michael@0 1056 * at the same depth as it was on entry. This way we avoid using tags when
michael@0 1057 * pushing ropes to the stack as ropes never leaks to other users of the
michael@0 1058 * stack. This also assumes that a rope can only point to other ropes or
michael@0 1059 * linear strings, it cannot refer to GC things of other types.
michael@0 1060 */
michael@0 1061 static void
michael@0 1062 ScanRope(GCMarker *gcmarker, JSRope *rope)
michael@0 1063 {
michael@0 1064 ptrdiff_t savedPos = gcmarker->stack.position();
michael@0 1065 JS_DIAGNOSTICS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
michael@0 1066 for (;;) {
michael@0 1067 JS_DIAGNOSTICS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING);
michael@0 1068 JS_DIAGNOSTICS_ASSERT(rope->JSString::isRope());
michael@0 1069 JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), rope);
michael@0 1070 JS_ASSERT(rope->isMarked());
michael@0 1071 JSRope *next = nullptr;
michael@0 1072
michael@0 1073 JSString *right = rope->rightChild();
michael@0 1074 if (!right->isPermanentAtom() && right->markIfUnmarked()) {
michael@0 1075 if (right->isLinear())
michael@0 1076 ScanLinearString(gcmarker, &right->asLinear());
michael@0 1077 else
michael@0 1078 next = &right->asRope();
michael@0 1079 }
michael@0 1080
michael@0 1081 JSString *left = rope->leftChild();
michael@0 1082 if (!left->isPermanentAtom() && left->markIfUnmarked()) {
michael@0 1083 if (left->isLinear()) {
michael@0 1084 ScanLinearString(gcmarker, &left->asLinear());
michael@0 1085 } else {
michael@0 1086 /*
michael@0 1087 * When both children are ropes, set aside the right one to
michael@0 1088 * scan it later.
michael@0 1089 */
michael@0 1090 if (next && !gcmarker->stack.push(reinterpret_cast<uintptr_t>(next)))
michael@0 1091 gcmarker->delayMarkingChildren(next);
michael@0 1092 next = &left->asRope();
michael@0 1093 }
michael@0 1094 }
michael@0 1095 if (next) {
michael@0 1096 rope = next;
michael@0 1097 } else if (savedPos != gcmarker->stack.position()) {
michael@0 1098 JS_ASSERT(savedPos < gcmarker->stack.position());
michael@0 1099 rope = reinterpret_cast<JSRope *>(gcmarker->stack.pop());
michael@0 1100 } else {
michael@0 1101 break;
michael@0 1102 }
michael@0 1103 }
michael@0 1104 JS_ASSERT(savedPos == gcmarker->stack.position());
michael@0 1105 }
michael@0 1106
michael@0 1107 static inline void
michael@0 1108 ScanString(GCMarker *gcmarker, JSString *str)
michael@0 1109 {
michael@0 1110 if (str->isLinear())
michael@0 1111 ScanLinearString(gcmarker, &str->asLinear());
michael@0 1112 else
michael@0 1113 ScanRope(gcmarker, &str->asRope());
michael@0 1114 }
michael@0 1115
michael@0 1116 static inline void
michael@0 1117 PushMarkStack(GCMarker *gcmarker, JSString *str)
michael@0 1118 {
michael@0 1119 // Permanent atoms might not be associated with this runtime.
michael@0 1120 if (str->isPermanentAtom())
michael@0 1121 return;
michael@0 1122
michael@0 1123 JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str);
michael@0 1124
michael@0 1125 /*
michael@0 1126 * As string can only refer to other strings we fully scan its GC graph
michael@0 1127 * using the explicit stack when navigating the rope tree to avoid
michael@0 1128 * dealing with strings on the stack in drainMarkStack.
michael@0 1129 */
michael@0 1130 if (str->markIfUnmarked())
michael@0 1131 ScanString(gcmarker, str);
michael@0 1132 }
michael@0 1133
michael@0 1134 void
michael@0 1135 gc::MarkChildren(JSTracer *trc, JSObject *obj)
michael@0 1136 {
michael@0 1137 obj->markChildren(trc);
michael@0 1138 }
michael@0 1139
michael@0 1140 static void
michael@0 1141 gc::MarkChildren(JSTracer *trc, JSString *str)
michael@0 1142 {
michael@0 1143 if (str->hasBase())
michael@0 1144 str->markBase(trc);
michael@0 1145 else if (str->isRope())
michael@0 1146 str->asRope().markChildren(trc);
michael@0 1147 }
michael@0 1148
michael@0 1149 static void
michael@0 1150 gc::MarkChildren(JSTracer *trc, JSScript *script)
michael@0 1151 {
michael@0 1152 script->markChildren(trc);
michael@0 1153 }
michael@0 1154
michael@0 1155 static void
michael@0 1156 gc::MarkChildren(JSTracer *trc, LazyScript *lazy)
michael@0 1157 {
michael@0 1158 lazy->markChildren(trc);
michael@0 1159 }
michael@0 1160
michael@0 1161 static void
michael@0 1162 gc::MarkChildren(JSTracer *trc, Shape *shape)
michael@0 1163 {
michael@0 1164 shape->markChildren(trc);
michael@0 1165 }
michael@0 1166
michael@0 1167 static void
michael@0 1168 gc::MarkChildren(JSTracer *trc, BaseShape *base)
michael@0 1169 {
michael@0 1170 base->markChildren(trc);
michael@0 1171 }
michael@0 1172
michael@0 1173 /*
michael@0 1174 * This function is used by the cycle collector to trace through the
michael@0 1175 * children of a BaseShape (and its baseUnowned(), if any). The cycle
michael@0 1176 * collector does not directly care about BaseShapes, so only the
michael@0 1177 * getter, setter, and parent are marked. Furthermore, the parent is
michael@0 1178 * marked only if it isn't the same as prevParent, which will be
michael@0 1179 * updated to the current shape's parent.
michael@0 1180 */
michael@0 1181 static inline void
michael@0 1182 MarkCycleCollectorChildren(JSTracer *trc, BaseShape *base, JSObject **prevParent)
michael@0 1183 {
michael@0 1184 JS_ASSERT(base);
michael@0 1185
michael@0 1186 /*
michael@0 1187 * The cycle collector does not need to trace unowned base shapes,
michael@0 1188 * as they have the same getter, setter and parent as the original
michael@0 1189 * base shape.
michael@0 1190 */
michael@0 1191 base->assertConsistency();
michael@0 1192
michael@0 1193 if (base->hasGetterObject()) {
michael@0 1194 JSObject *tmp = base->getterObject();
michael@0 1195 MarkObjectUnbarriered(trc, &tmp, "getter");
michael@0 1196 JS_ASSERT(tmp == base->getterObject());
michael@0 1197 }
michael@0 1198
michael@0 1199 if (base->hasSetterObject()) {
michael@0 1200 JSObject *tmp = base->setterObject();
michael@0 1201 MarkObjectUnbarriered(trc, &tmp, "setter");
michael@0 1202 JS_ASSERT(tmp == base->setterObject());
michael@0 1203 }
michael@0 1204
michael@0 1205 JSObject *parent = base->getObjectParent();
michael@0 1206 if (parent && parent != *prevParent) {
michael@0 1207 MarkObjectUnbarriered(trc, &parent, "parent");
michael@0 1208 JS_ASSERT(parent == base->getObjectParent());
michael@0 1209 *prevParent = parent;
michael@0 1210 }
michael@0 1211 }
michael@0 1212
michael@0 1213 /*
michael@0 1214 * This function is used by the cycle collector to trace through a
michael@0 1215 * shape. The cycle collector does not care about shapes or base
michael@0 1216 * shapes, so those are not marked. Instead, any shapes or base shapes
michael@0 1217 * that are encountered have their children marked. Stack space is
michael@0 1218 * bounded. If two shapes in a row have the same parent pointer, the
michael@0 1219 * parent pointer will only be marked once.
michael@0 1220 */
michael@0 1221 void
michael@0 1222 gc::MarkCycleCollectorChildren(JSTracer *trc, Shape *shape)
michael@0 1223 {
michael@0 1224 JSObject *prevParent = nullptr;
michael@0 1225 do {
michael@0 1226 MarkCycleCollectorChildren(trc, shape->base(), &prevParent);
michael@0 1227 MarkId(trc, &shape->propidRef(), "propid");
michael@0 1228 shape = shape->previous();
michael@0 1229 } while (shape);
michael@0 1230 }
michael@0 1231
michael@0 1232 static void
michael@0 1233 ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type)
michael@0 1234 {
michael@0 1235 unsigned count = type->getPropertyCount();
michael@0 1236 for (unsigned i = 0; i < count; i++) {
michael@0 1237 types::Property *prop = type->getProperty(i);
michael@0 1238 if (prop && JSID_IS_STRING(prop->id))
michael@0 1239 PushMarkStack(gcmarker, JSID_TO_STRING(prop->id));
michael@0 1240 }
michael@0 1241
michael@0 1242 if (type->proto().isObject())
michael@0 1243 PushMarkStack(gcmarker, type->proto().toObject());
michael@0 1244
michael@0 1245 if (type->singleton() && !type->lazy())
michael@0 1246 PushMarkStack(gcmarker, type->singleton());
michael@0 1247
michael@0 1248 if (type->hasNewScript()) {
michael@0 1249 PushMarkStack(gcmarker, type->newScript()->fun);
michael@0 1250 PushMarkStack(gcmarker, type->newScript()->templateObject);
michael@0 1251 } else if (type->hasTypedObject()) {
michael@0 1252 PushMarkStack(gcmarker, type->typedObject()->descrHeapPtr());
michael@0 1253 }
michael@0 1254
michael@0 1255 if (type->interpretedFunction)
michael@0 1256 PushMarkStack(gcmarker, type->interpretedFunction);
michael@0 1257 }
michael@0 1258
michael@0 1259 static void
michael@0 1260 gc::MarkChildren(JSTracer *trc, types::TypeObject *type)
michael@0 1261 {
michael@0 1262 unsigned count = type->getPropertyCount();
michael@0 1263 for (unsigned i = 0; i < count; i++) {
michael@0 1264 types::Property *prop = type->getProperty(i);
michael@0 1265 if (prop)
michael@0 1266 MarkId(trc, &prop->id, "type_prop");
michael@0 1267 }
michael@0 1268
michael@0 1269 if (type->proto().isObject())
michael@0 1270 MarkObject(trc, &type->protoRaw(), "type_proto");
michael@0 1271
michael@0 1272 if (type->singleton() && !type->lazy())
michael@0 1273 MarkObject(trc, &type->singletonRaw(), "type_singleton");
michael@0 1274
michael@0 1275 if (type->hasNewScript()) {
michael@0 1276 MarkObject(trc, &type->newScript()->fun, "type_new_function");
michael@0 1277 MarkObject(trc, &type->newScript()->templateObject, "type_new_template");
michael@0 1278 } else if (type->hasTypedObject()) {
michael@0 1279 MarkObject(trc, &type->typedObject()->descrHeapPtr(), "type_heap_ptr");
michael@0 1280 }
michael@0 1281
michael@0 1282 if (type->interpretedFunction)
michael@0 1283 MarkObject(trc, &type->interpretedFunction, "type_function");
michael@0 1284 }
michael@0 1285
michael@0 1286 static void
michael@0 1287 gc::MarkChildren(JSTracer *trc, jit::JitCode *code)
michael@0 1288 {
michael@0 1289 #ifdef JS_ION
michael@0 1290 code->trace(trc);
michael@0 1291 #endif
michael@0 1292 }
michael@0 1293
michael@0 1294 template<typename T>
michael@0 1295 static void
michael@0 1296 PushArenaTyped(GCMarker *gcmarker, ArenaHeader *aheader)
michael@0 1297 {
michael@0 1298 for (CellIterUnderGC i(aheader); !i.done(); i.next())
michael@0 1299 PushMarkStack(gcmarker, i.get<T>());
michael@0 1300 }
michael@0 1301
michael@0 1302 void
michael@0 1303 gc::PushArena(GCMarker *gcmarker, ArenaHeader *aheader)
michael@0 1304 {
michael@0 1305 switch (MapAllocToTraceKind(aheader->getAllocKind())) {
michael@0 1306 case JSTRACE_OBJECT:
michael@0 1307 PushArenaTyped<JSObject>(gcmarker, aheader);
michael@0 1308 break;
michael@0 1309
michael@0 1310 case JSTRACE_STRING:
michael@0 1311 PushArenaTyped<JSString>(gcmarker, aheader);
michael@0 1312 break;
michael@0 1313
michael@0 1314 case JSTRACE_SCRIPT:
michael@0 1315 PushArenaTyped<JSScript>(gcmarker, aheader);
michael@0 1316 break;
michael@0 1317
michael@0 1318 case JSTRACE_LAZY_SCRIPT:
michael@0 1319 PushArenaTyped<LazyScript>(gcmarker, aheader);
michael@0 1320 break;
michael@0 1321
michael@0 1322 case JSTRACE_SHAPE:
michael@0 1323 PushArenaTyped<js::Shape>(gcmarker, aheader);
michael@0 1324 break;
michael@0 1325
michael@0 1326 case JSTRACE_BASE_SHAPE:
michael@0 1327 PushArenaTyped<js::BaseShape>(gcmarker, aheader);
michael@0 1328 break;
michael@0 1329
michael@0 1330 case JSTRACE_TYPE_OBJECT:
michael@0 1331 PushArenaTyped<js::types::TypeObject>(gcmarker, aheader);
michael@0 1332 break;
michael@0 1333
michael@0 1334 case JSTRACE_JITCODE:
michael@0 1335 PushArenaTyped<js::jit::JitCode>(gcmarker, aheader);
michael@0 1336 break;
michael@0 1337 }
michael@0 1338 }
michael@0 1339
michael@0 1340 struct SlotArrayLayout
michael@0 1341 {
michael@0 1342 union {
michael@0 1343 HeapSlot *end;
michael@0 1344 uintptr_t kind;
michael@0 1345 };
michael@0 1346 union {
michael@0 1347 HeapSlot *start;
michael@0 1348 uintptr_t index;
michael@0 1349 };
michael@0 1350 JSObject *obj;
michael@0 1351
michael@0 1352 static void staticAsserts() {
michael@0 1353 /* This should have the same layout as three mark stack items. */
michael@0 1354 JS_STATIC_ASSERT(sizeof(SlotArrayLayout) == 3 * sizeof(uintptr_t));
michael@0 1355 }
michael@0 1356 };
michael@0 1357
michael@0 1358 /*
michael@0 1359 * During incremental GC, we return from drainMarkStack without having processed
michael@0 1360 * the entire stack. At that point, JS code can run and reallocate slot arrays
michael@0 1361 * that are stored on the stack. To prevent this from happening, we replace all
michael@0 1362 * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots
michael@0 1363 * pointers are replaced with slot indexes, and slot array end pointers are
michael@0 1364 * replaced with the kind of index (properties vs. elements).
michael@0 1365 */
michael@0 1366 void
michael@0 1367 GCMarker::saveValueRanges()
michael@0 1368 {
michael@0 1369 for (uintptr_t *p = stack.tos_; p > stack.stack_; ) {
michael@0 1370 uintptr_t tag = *--p & StackTagMask;
michael@0 1371 if (tag == ValueArrayTag) {
michael@0 1372 *p &= ~StackTagMask;
michael@0 1373 p -= 2;
michael@0 1374 SlotArrayLayout *arr = reinterpret_cast<SlotArrayLayout *>(p);
michael@0 1375 JSObject *obj = arr->obj;
michael@0 1376 JS_ASSERT(obj->isNative());
michael@0 1377
michael@0 1378 HeapSlot *vp = obj->getDenseElements();
michael@0 1379 if (arr->end == vp + obj->getDenseInitializedLength()) {
michael@0 1380 JS_ASSERT(arr->start >= vp);
michael@0 1381 arr->index = arr->start - vp;
michael@0 1382 arr->kind = HeapSlot::Element;
michael@0 1383 } else {
michael@0 1384 HeapSlot *vp = obj->fixedSlots();
michael@0 1385 unsigned nfixed = obj->numFixedSlots();
michael@0 1386 if (arr->start == arr->end) {
michael@0 1387 arr->index = obj->slotSpan();
michael@0 1388 } else if (arr->start >= vp && arr->start < vp + nfixed) {
michael@0 1389 JS_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan()));
michael@0 1390 arr->index = arr->start - vp;
michael@0 1391 } else {
michael@0 1392 JS_ASSERT(arr->start >= obj->slots &&
michael@0 1393 arr->end == obj->slots + obj->slotSpan() - nfixed);
michael@0 1394 arr->index = (arr->start - obj->slots) + nfixed;
michael@0 1395 }
michael@0 1396 arr->kind = HeapSlot::Slot;
michael@0 1397 }
michael@0 1398 p[2] |= SavedValueArrayTag;
michael@0 1399 } else if (tag == SavedValueArrayTag) {
michael@0 1400 p -= 2;
michael@0 1401 }
michael@0 1402 }
michael@0 1403 }
michael@0 1404
michael@0 1405 bool
michael@0 1406 GCMarker::restoreValueArray(JSObject *obj, void **vpp, void **endp)
michael@0 1407 {
michael@0 1408 uintptr_t start = stack.pop();
michael@0 1409 HeapSlot::Kind kind = (HeapSlot::Kind) stack.pop();
michael@0 1410
michael@0 1411 if (kind == HeapSlot::Element) {
michael@0 1412 if (!obj->is<ArrayObject>())
michael@0 1413 return false;
michael@0 1414
michael@0 1415 uint32_t initlen = obj->getDenseInitializedLength();
michael@0 1416 HeapSlot *vp = obj->getDenseElements();
michael@0 1417 if (start < initlen) {
michael@0 1418 *vpp = vp + start;
michael@0 1419 *endp = vp + initlen;
michael@0 1420 } else {
michael@0 1421 /* The object shrunk, in which case no scanning is needed. */
michael@0 1422 *vpp = *endp = vp;
michael@0 1423 }
michael@0 1424 } else {
michael@0 1425 JS_ASSERT(kind == HeapSlot::Slot);
michael@0 1426 HeapSlot *vp = obj->fixedSlots();
michael@0 1427 unsigned nfixed = obj->numFixedSlots();
michael@0 1428 unsigned nslots = obj->slotSpan();
michael@0 1429 if (start < nslots) {
michael@0 1430 if (start < nfixed) {
michael@0 1431 *vpp = vp + start;
michael@0 1432 *endp = vp + Min(nfixed, nslots);
michael@0 1433 } else {
michael@0 1434 *vpp = obj->slots + start - nfixed;
michael@0 1435 *endp = obj->slots + nslots - nfixed;
michael@0 1436 }
michael@0 1437 } else {
michael@0 1438 /* The object shrunk, in which case no scanning is needed. */
michael@0 1439 *vpp = *endp = vp;
michael@0 1440 }
michael@0 1441 }
michael@0 1442
michael@0 1443 JS_ASSERT(*vpp <= *endp);
michael@0 1444 return true;
michael@0 1445 }
michael@0 1446
michael@0 1447 void
michael@0 1448 GCMarker::processMarkStackOther(uintptr_t tag, uintptr_t addr)
michael@0 1449 {
michael@0 1450 if (tag == TypeTag) {
michael@0 1451 ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr));
michael@0 1452 } else if (tag == SavedValueArrayTag) {
michael@0 1453 JS_ASSERT(!(addr & CellMask));
michael@0 1454 JSObject *obj = reinterpret_cast<JSObject *>(addr);
michael@0 1455 HeapValue *vp, *end;
michael@0 1456 if (restoreValueArray(obj, (void **)&vp, (void **)&end))
michael@0 1457 pushValueArray(obj, vp, end);
michael@0 1458 else
michael@0 1459 pushObject(obj);
michael@0 1460 } else if (tag == JitCodeTag) {
michael@0 1461 MarkChildren(this, reinterpret_cast<jit::JitCode *>(addr));
michael@0 1462 }
michael@0 1463 }
michael@0 1464
michael@0 1465 inline void
michael@0 1466 GCMarker::processMarkStackTop(SliceBudget &budget)
michael@0 1467 {
michael@0 1468 /*
michael@0 1469 * The function uses explicit goto and implements the scanning of the
michael@0 1470 * object directly. It allows to eliminate the tail recursion and
michael@0 1471 * significantly improve the marking performance, see bug 641025.
michael@0 1472 */
michael@0 1473 HeapSlot *vp, *end;
michael@0 1474 JSObject *obj;
michael@0 1475
michael@0 1476 uintptr_t addr = stack.pop();
michael@0 1477 uintptr_t tag = addr & StackTagMask;
michael@0 1478 addr &= ~StackTagMask;
michael@0 1479
michael@0 1480 if (tag == ValueArrayTag) {
michael@0 1481 JS_STATIC_ASSERT(ValueArrayTag == 0);
michael@0 1482 JS_ASSERT(!(addr & CellMask));
michael@0 1483 obj = reinterpret_cast<JSObject *>(addr);
michael@0 1484 uintptr_t addr2 = stack.pop();
michael@0 1485 uintptr_t addr3 = stack.pop();
michael@0 1486 JS_ASSERT(addr2 <= addr3);
michael@0 1487 JS_ASSERT((addr3 - addr2) % sizeof(Value) == 0);
michael@0 1488 vp = reinterpret_cast<HeapSlot *>(addr2);
michael@0 1489 end = reinterpret_cast<HeapSlot *>(addr3);
michael@0 1490 goto scan_value_array;
michael@0 1491 }
michael@0 1492
michael@0 1493 if (tag == ObjectTag) {
michael@0 1494 obj = reinterpret_cast<JSObject *>(addr);
michael@0 1495 JS_COMPARTMENT_ASSERT(runtime(), obj);
michael@0 1496 goto scan_obj;
michael@0 1497 }
michael@0 1498
michael@0 1499 processMarkStackOther(tag, addr);
michael@0 1500 return;
michael@0 1501
michael@0 1502 scan_value_array:
michael@0 1503 JS_ASSERT(vp <= end);
michael@0 1504 while (vp != end) {
michael@0 1505 const Value &v = *vp++;
michael@0 1506 if (v.isString()) {
michael@0 1507 JSString *str = v.toString();
michael@0 1508 if (!str->isPermanentAtom()) {
michael@0 1509 JS_COMPARTMENT_ASSERT_STR(runtime(), str);
michael@0 1510 JS_ASSERT(runtime()->isAtomsZone(str->zone()) || str->zone() == obj->zone());
michael@0 1511 if (str->markIfUnmarked())
michael@0 1512 ScanString(this, str);
michael@0 1513 }
michael@0 1514 } else if (v.isObject()) {
michael@0 1515 JSObject *obj2 = &v.toObject();
michael@0 1516 JS_COMPARTMENT_ASSERT(runtime(), obj2);
michael@0 1517 JS_ASSERT(obj->compartment() == obj2->compartment());
michael@0 1518 if (obj2->markIfUnmarked(getMarkColor())) {
michael@0 1519 pushValueArray(obj, vp, end);
michael@0 1520 obj = obj2;
michael@0 1521 goto scan_obj;
michael@0 1522 }
michael@0 1523 }
michael@0 1524 }
michael@0 1525 return;
michael@0 1526
michael@0 1527 scan_obj:
michael@0 1528 {
michael@0 1529 JS_COMPARTMENT_ASSERT(runtime(), obj);
michael@0 1530
michael@0 1531 budget.step();
michael@0 1532 if (budget.isOverBudget()) {
michael@0 1533 pushObject(obj);
michael@0 1534 return;
michael@0 1535 }
michael@0 1536
michael@0 1537 types::TypeObject *type = obj->typeFromGC();
michael@0 1538 PushMarkStack(this, type);
michael@0 1539
michael@0 1540 Shape *shape = obj->lastProperty();
michael@0 1541 PushMarkStack(this, shape);
michael@0 1542
michael@0 1543 /* Call the trace hook if necessary. */
michael@0 1544 const Class *clasp = type->clasp();
michael@0 1545 if (clasp->trace) {
michael@0 1546 // Global objects all have the same trace hook. That hook is safe without barriers
michael@0 1547 // if the gloal has no custom trace hook of it's own, or has been moved to a different
michael@0 1548 // compartment, and so can't have one.
michael@0 1549 JS_ASSERT_IF(runtime()->gcMode() == JSGC_MODE_INCREMENTAL &&
michael@0 1550 runtime()->gcIncrementalEnabled &&
michael@0 1551 !(clasp->trace == JS_GlobalObjectTraceHook &&
michael@0 1552 (!obj->compartment()->options().getTrace() ||
michael@0 1553 !obj->isOwnGlobal())),
michael@0 1554 clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS);
michael@0 1555 clasp->trace(this, obj);
michael@0 1556 }
michael@0 1557
michael@0 1558 if (!shape->isNative())
michael@0 1559 return;
michael@0 1560
michael@0 1561 unsigned nslots = obj->slotSpan();
michael@0 1562
michael@0 1563 if (!obj->hasEmptyElements()) {
michael@0 1564 vp = obj->getDenseElements();
michael@0 1565 end = vp + obj->getDenseInitializedLength();
michael@0 1566 if (!nslots)
michael@0 1567 goto scan_value_array;
michael@0 1568 pushValueArray(obj, vp, end);
michael@0 1569 }
michael@0 1570
michael@0 1571 vp = obj->fixedSlots();
michael@0 1572 if (obj->slots) {
michael@0 1573 unsigned nfixed = obj->numFixedSlots();
michael@0 1574 if (nslots > nfixed) {
michael@0 1575 pushValueArray(obj, vp, vp + nfixed);
michael@0 1576 vp = obj->slots;
michael@0 1577 end = vp + (nslots - nfixed);
michael@0 1578 goto scan_value_array;
michael@0 1579 }
michael@0 1580 }
michael@0 1581 JS_ASSERT(nslots <= obj->numFixedSlots());
michael@0 1582 end = vp + nslots;
michael@0 1583 goto scan_value_array;
michael@0 1584 }
michael@0 1585 }
michael@0 1586
michael@0 1587 bool
michael@0 1588 GCMarker::drainMarkStack(SliceBudget &budget)
michael@0 1589 {
michael@0 1590 #ifdef DEBUG
michael@0 1591 JSRuntime *rt = runtime();
michael@0 1592
michael@0 1593 struct AutoCheckCompartment {
michael@0 1594 JSRuntime *runtime;
michael@0 1595 AutoCheckCompartment(JSRuntime *rt) : runtime(rt) {
michael@0 1596 JS_ASSERT(!rt->gcStrictCompartmentChecking);
michael@0 1597 runtime->gcStrictCompartmentChecking = true;
michael@0 1598 }
michael@0 1599 ~AutoCheckCompartment() { runtime->gcStrictCompartmentChecking = false; }
michael@0 1600 } acc(rt);
michael@0 1601 #endif
michael@0 1602
michael@0 1603 if (budget.isOverBudget())
michael@0 1604 return false;
michael@0 1605
michael@0 1606 for (;;) {
michael@0 1607 while (!stack.isEmpty()) {
michael@0 1608 processMarkStackTop(budget);
michael@0 1609 if (budget.isOverBudget()) {
michael@0 1610 saveValueRanges();
michael@0 1611 return false;
michael@0 1612 }
michael@0 1613 }
michael@0 1614
michael@0 1615 if (!hasDelayedChildren())
michael@0 1616 break;
michael@0 1617
michael@0 1618 /*
michael@0 1619 * Mark children of things that caused too deep recursion during the
michael@0 1620 * above tracing. Don't do this until we're done with everything
michael@0 1621 * else.
michael@0 1622 */
michael@0 1623 if (!markDelayedChildren(budget)) {
michael@0 1624 saveValueRanges();
michael@0 1625 return false;
michael@0 1626 }
michael@0 1627 }
michael@0 1628
michael@0 1629 return true;
michael@0 1630 }
michael@0 1631
michael@0 1632 void
michael@0 1633 js::TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
michael@0 1634 {
michael@0 1635 switch (kind) {
michael@0 1636 case JSTRACE_OBJECT:
michael@0 1637 MarkChildren(trc, static_cast<JSObject *>(thing));
michael@0 1638 break;
michael@0 1639
michael@0 1640 case JSTRACE_STRING:
michael@0 1641 MarkChildren(trc, static_cast<JSString *>(thing));
michael@0 1642 break;
michael@0 1643
michael@0 1644 case JSTRACE_SCRIPT:
michael@0 1645 MarkChildren(trc, static_cast<JSScript *>(thing));
michael@0 1646 break;
michael@0 1647
michael@0 1648 case JSTRACE_LAZY_SCRIPT:
michael@0 1649 MarkChildren(trc, static_cast<LazyScript *>(thing));
michael@0 1650 break;
michael@0 1651
michael@0 1652 case JSTRACE_SHAPE:
michael@0 1653 MarkChildren(trc, static_cast<Shape *>(thing));
michael@0 1654 break;
michael@0 1655
michael@0 1656 case JSTRACE_JITCODE:
michael@0 1657 MarkChildren(trc, (js::jit::JitCode *)thing);
michael@0 1658 break;
michael@0 1659
michael@0 1660 case JSTRACE_BASE_SHAPE:
michael@0 1661 MarkChildren(trc, static_cast<BaseShape *>(thing));
michael@0 1662 break;
michael@0 1663
michael@0 1664 case JSTRACE_TYPE_OBJECT:
michael@0 1665 MarkChildren(trc, (types::TypeObject *)thing);
michael@0 1666 break;
michael@0 1667 }
michael@0 1668 }
michael@0 1669
michael@0 1670 static void
michael@0 1671 UnmarkGrayGCThing(void *thing)
michael@0 1672 {
michael@0 1673 static_cast<js::gc::Cell *>(thing)->unmark(js::gc::GRAY);
michael@0 1674 }
michael@0 1675
michael@0 1676 static void
michael@0 1677 UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind);
michael@0 1678
michael@0 1679 struct UnmarkGrayTracer : public JSTracer
michael@0 1680 {
michael@0 1681 /*
michael@0 1682 * We set eagerlyTraceWeakMaps to false because the cycle collector will fix
michael@0 1683 * up any color mismatches involving weakmaps when it runs.
michael@0 1684 */
michael@0 1685 UnmarkGrayTracer(JSRuntime *rt)
michael@0 1686 : JSTracer(rt, UnmarkGrayChildren, DoNotTraceWeakMaps),
michael@0 1687 tracingShape(false),
michael@0 1688 previousShape(nullptr),
michael@0 1689 unmarkedAny(false)
michael@0 1690 {}
michael@0 1691
michael@0 1692 UnmarkGrayTracer(JSTracer *trc, bool tracingShape)
michael@0 1693 : JSTracer(trc->runtime(), UnmarkGrayChildren, DoNotTraceWeakMaps),
michael@0 1694 tracingShape(tracingShape),
michael@0 1695 previousShape(nullptr),
michael@0 1696 unmarkedAny(false)
michael@0 1697 {}
michael@0 1698
michael@0 1699 /* True iff we are tracing the immediate children of a shape. */
michael@0 1700 bool tracingShape;
michael@0 1701
michael@0 1702 /* If tracingShape, shape child or nullptr. Otherwise, nullptr. */
michael@0 1703 void *previousShape;
michael@0 1704
michael@0 1705 /* Whether we unmarked anything. */
michael@0 1706 bool unmarkedAny;
michael@0 1707 };
michael@0 1708
michael@0 1709 /*
michael@0 1710 * The GC and CC are run independently. Consequently, the following sequence of
michael@0 1711 * events can occur:
michael@0 1712 * 1. GC runs and marks an object gray.
michael@0 1713 * 2. Some JS code runs that creates a pointer from a JS root to the gray
michael@0 1714 * object. If we re-ran a GC at this point, the object would now be black.
michael@0 1715 * 3. Now we run the CC. It may think it can collect the gray object, even
michael@0 1716 * though it's reachable from the JS heap.
michael@0 1717 *
michael@0 1718 * To prevent this badness, we unmark the gray bit of an object when it is
michael@0 1719 * accessed by callers outside XPConnect. This would cause the object to go
michael@0 1720 * black in step 2 above. This must be done on everything reachable from the
michael@0 1721 * object being returned. The following code takes care of the recursive
michael@0 1722 * re-coloring.
michael@0 1723 *
michael@0 1724 * There is an additional complication for certain kinds of edges that are not
michael@0 1725 * contained explicitly in the source object itself, such as from a weakmap key
michael@0 1726 * to its value, and from an object being watched by a watchpoint to the
michael@0 1727 * watchpoint's closure. These "implicit edges" are represented in some other
michael@0 1728 * container object, such as the weakmap or the watchpoint itself. In these
michael@0 1729 * cases, calling unmark gray on an object won't find all of its children.
michael@0 1730 *
michael@0 1731 * Handling these implicit edges has two parts:
michael@0 1732 * - A special pass enumerating all of the containers that know about the
michael@0 1733 * implicit edges to fix any black-gray edges that have been created. This
michael@0 1734 * is implemented in nsXPConnect::FixWeakMappingGrayBits.
michael@0 1735 * - To prevent any incorrectly gray objects from escaping to live JS outside
michael@0 1736 * of the containers, we must add unmark-graying read barriers to these
michael@0 1737 * containers.
michael@0 1738 */
michael@0 1739 static void
michael@0 1740 UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind)
michael@0 1741 {
michael@0 1742 void *thing = *thingp;
michael@0 1743 int stackDummy;
michael@0 1744 if (!JS_CHECK_STACK_SIZE(trc->runtime()->mainThread.nativeStackLimit[StackForSystemCode], &stackDummy)) {
michael@0 1745 /*
michael@0 1746 * If we run out of stack, we take a more drastic measure: require that
michael@0 1747 * we GC again before the next CC.
michael@0 1748 */
michael@0 1749 trc->runtime()->gcGrayBitsValid = false;
michael@0 1750 return;
michael@0 1751 }
michael@0 1752
michael@0 1753 UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc);
michael@0 1754 if (!IsInsideNursery(trc->runtime(), thing)) {
michael@0 1755 if (!JS::GCThingIsMarkedGray(thing))
michael@0 1756 return;
michael@0 1757
michael@0 1758 UnmarkGrayGCThing(thing);
michael@0 1759 tracer->unmarkedAny = true;
michael@0 1760 }
michael@0 1761
michael@0 1762 /*
michael@0 1763 * Trace children of |thing|. If |thing| and its parent are both shapes,
michael@0 1764 * |thing| will get saved to mPreviousShape without being traced. The parent
michael@0 1765 * will later trace |thing|. This is done to avoid increasing the stack
michael@0 1766 * depth during shape tracing. It is safe to do because a shape can only
michael@0 1767 * have one child that is a shape.
michael@0 1768 */
michael@0 1769 UnmarkGrayTracer childTracer(tracer, kind == JSTRACE_SHAPE);
michael@0 1770
michael@0 1771 if (kind != JSTRACE_SHAPE) {
michael@0 1772 JS_TraceChildren(&childTracer, thing, kind);
michael@0 1773 JS_ASSERT(!childTracer.previousShape);
michael@0 1774 tracer->unmarkedAny |= childTracer.unmarkedAny;
michael@0 1775 return;
michael@0 1776 }
michael@0 1777
michael@0 1778 if (tracer->tracingShape) {
michael@0 1779 JS_ASSERT(!tracer->previousShape);
michael@0 1780 tracer->previousShape = thing;
michael@0 1781 return;
michael@0 1782 }
michael@0 1783
michael@0 1784 do {
michael@0 1785 JS_ASSERT(!JS::GCThingIsMarkedGray(thing));
michael@0 1786 JS_TraceChildren(&childTracer, thing, JSTRACE_SHAPE);
michael@0 1787 thing = childTracer.previousShape;
michael@0 1788 childTracer.previousShape = nullptr;
michael@0 1789 } while (thing);
michael@0 1790 tracer->unmarkedAny |= childTracer.unmarkedAny;
michael@0 1791 }
michael@0 1792
michael@0 1793 JS_FRIEND_API(bool)
michael@0 1794 JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
michael@0 1795 {
michael@0 1796 JS_ASSERT(kind != JSTRACE_SHAPE);
michael@0 1797
michael@0 1798 JSRuntime *rt = static_cast<Cell *>(thing)->runtimeFromMainThread();
michael@0 1799
michael@0 1800 bool unmarkedArg = false;
michael@0 1801 if (!IsInsideNursery(rt, thing)) {
michael@0 1802 if (!JS::GCThingIsMarkedGray(thing))
michael@0 1803 return false;
michael@0 1804
michael@0 1805 UnmarkGrayGCThing(thing);
michael@0 1806 unmarkedArg = true;
michael@0 1807 }
michael@0 1808
michael@0 1809 UnmarkGrayTracer trc(rt);
michael@0 1810 JS_TraceChildren(&trc, thing, kind);
michael@0 1811
michael@0 1812 return unmarkedArg || trc.unmarkedAny;
michael@0 1813 }

mercurial