Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "mozilla/ArrayUtils.h" |
michael@0 | 8 | |
michael@0 | 9 | #ifdef MOZ_VALGRIND |
michael@0 | 10 | # include <valgrind/memcheck.h> |
michael@0 | 11 | #endif |
michael@0 | 12 | |
michael@0 | 13 | #include "jscntxt.h" |
michael@0 | 14 | #include "jsgc.h" |
michael@0 | 15 | #include "jsonparser.h" |
michael@0 | 16 | #include "jsprf.h" |
michael@0 | 17 | #include "jstypes.h" |
michael@0 | 18 | #include "jswatchpoint.h" |
michael@0 | 19 | |
michael@0 | 20 | #include "builtin/MapObject.h" |
michael@0 | 21 | #include "frontend/BytecodeCompiler.h" |
michael@0 | 22 | #include "gc/GCInternals.h" |
michael@0 | 23 | #include "gc/Marking.h" |
michael@0 | 24 | #ifdef JS_ION |
michael@0 | 25 | # include "jit/IonMacroAssembler.h" |
michael@0 | 26 | #endif |
michael@0 | 27 | #include "js/HashTable.h" |
michael@0 | 28 | #include "vm/Debugger.h" |
michael@0 | 29 | |
michael@0 | 30 | #include "jsgcinlines.h" |
michael@0 | 31 | #include "jsobjinlines.h" |
michael@0 | 32 | |
michael@0 | 33 | using namespace js; |
michael@0 | 34 | using namespace js::gc; |
michael@0 | 35 | |
michael@0 | 36 | using mozilla::ArrayEnd; |
michael@0 | 37 | |
michael@0 | 38 | typedef RootedValueMap::Range RootRange; |
michael@0 | 39 | typedef RootedValueMap::Entry RootEntry; |
michael@0 | 40 | typedef RootedValueMap::Enum RootEnum; |
michael@0 | 41 | |
michael@0 | 42 | #ifdef JSGC_USE_EXACT_ROOTING |
michael@0 | 43 | static inline void |
michael@0 | 44 | MarkExactStackRoot(JSTracer *trc, Rooted<void*> *rooter, ThingRootKind kind) |
michael@0 | 45 | { |
michael@0 | 46 | void **addr = (void **)rooter->address(); |
michael@0 | 47 | if (IsNullTaggedPointer(*addr)) |
michael@0 | 48 | return; |
michael@0 | 49 | |
michael@0 | 50 | if (kind == THING_ROOT_OBJECT && *addr == TaggedProto::LazyProto) |
michael@0 | 51 | return; |
michael@0 | 52 | |
michael@0 | 53 | switch (kind) { |
michael@0 | 54 | case THING_ROOT_OBJECT: MarkObjectRoot(trc, (JSObject **)addr, "exact-object"); break; |
michael@0 | 55 | case THING_ROOT_STRING: MarkStringRoot(trc, (JSString **)addr, "exact-string"); break; |
michael@0 | 56 | case THING_ROOT_SCRIPT: MarkScriptRoot(trc, (JSScript **)addr, "exact-script"); break; |
michael@0 | 57 | case THING_ROOT_LAZY_SCRIPT: MarkLazyScriptRoot(trc, (LazyScript **)addr, "exact-lazy-script"); break; |
michael@0 | 58 | case THING_ROOT_SHAPE: MarkShapeRoot(trc, (Shape **)addr, "exact-shape"); break; |
michael@0 | 59 | case THING_ROOT_BASE_SHAPE: MarkBaseShapeRoot(trc, (BaseShape **)addr, "exact-baseshape"); break; |
michael@0 | 60 | case THING_ROOT_TYPE: MarkTypeRoot(trc, (types::Type *)addr, "exact-type"); break; |
michael@0 | 61 | case THING_ROOT_TYPE_OBJECT: MarkTypeObjectRoot(trc, (types::TypeObject **)addr, "exact-typeobject"); break; |
michael@0 | 62 | case THING_ROOT_JIT_CODE: MarkJitCodeRoot(trc, (jit::JitCode **)addr, "exact-jitcode"); break; |
michael@0 | 63 | case THING_ROOT_VALUE: MarkValueRoot(trc, (Value *)addr, "exact-value"); break; |
michael@0 | 64 | case THING_ROOT_ID: MarkIdRoot(trc, (jsid *)addr, "exact-id"); break; |
michael@0 | 65 | case THING_ROOT_BINDINGS: ((Bindings *)addr)->trace(trc); break; |
michael@0 | 66 | case THING_ROOT_PROPERTY_DESCRIPTOR: ((JSPropertyDescriptor *)addr)->trace(trc); break; |
michael@0 | 67 | case THING_ROOT_CUSTOM: { |
michael@0 | 68 | // 'rooter' is a member within a class containing a vtable. Back up |
michael@0 | 69 | // to the vtable and call trace() through it. |
michael@0 | 70 | const size_t rooterOffset = offsetof(RootedGeneric<void*>, rooter); |
michael@0 | 71 | reinterpret_cast< RootedGeneric<void*>* >(uintptr_t(rooter) - rooterOffset)->trace(trc); |
michael@0 | 72 | break; |
michael@0 | 73 | } |
michael@0 | 74 | default: MOZ_ASSUME_UNREACHABLE("Invalid THING_ROOT kind"); break; |
michael@0 | 75 | } |
michael@0 | 76 | } |
michael@0 | 77 | |
michael@0 | 78 | static inline void |
michael@0 | 79 | MarkExactStackRootList(JSTracer *trc, Rooted<void*> *rooter, ThingRootKind kind) |
michael@0 | 80 | { |
michael@0 | 81 | while (rooter) { |
michael@0 | 82 | MarkExactStackRoot(trc, rooter, kind); |
michael@0 | 83 | rooter = rooter->previous(); |
michael@0 | 84 | } |
michael@0 | 85 | } |
michael@0 | 86 | |
michael@0 | 87 | static void |
michael@0 | 88 | MarkExactStackRoots(JSTracer *trc) |
michael@0 | 89 | { |
michael@0 | 90 | for (unsigned i = 0; i < THING_ROOT_LIMIT; i++) { |
michael@0 | 91 | for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) |
michael@0 | 92 | MarkExactStackRootList(trc, cx->thingGCRooters[i], ThingRootKind(i)); |
michael@0 | 93 | |
michael@0 | 94 | MarkExactStackRootList(trc, trc->runtime()->mainThread.thingGCRooters[i], ThingRootKind(i)); |
michael@0 | 95 | } |
michael@0 | 96 | } |
michael@0 | 97 | #endif /* JSGC_USE_EXACT_ROOTING */ |
michael@0 | 98 | |
michael@0 | 99 | enum ConservativeGCTest |
michael@0 | 100 | { |
michael@0 | 101 | CGCT_VALID, |
michael@0 | 102 | CGCT_LOWBITSET, /* excluded because one of the low bits was set */ |
michael@0 | 103 | CGCT_NOTARENA, /* not within arena range in a chunk */ |
michael@0 | 104 | CGCT_OTHERCOMPARTMENT, /* in another compartment */ |
michael@0 | 105 | CGCT_NOTCHUNK, /* not within a valid chunk */ |
michael@0 | 106 | CGCT_FREEARENA, /* within arena containing only free things */ |
michael@0 | 107 | CGCT_NOTLIVE, /* gcthing is not allocated */ |
michael@0 | 108 | CGCT_END |
michael@0 | 109 | }; |
michael@0 | 110 | |
michael@0 | 111 | /* |
michael@0 | 112 | * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and |
michael@0 | 113 | * details about the thing if so. On failure, returns the reason for rejection. |
michael@0 | 114 | */ |
michael@0 | 115 | static inline ConservativeGCTest |
michael@0 | 116 | IsAddressableGCThing(JSRuntime *rt, uintptr_t w, |
michael@0 | 117 | bool skipUncollectedCompartments, |
michael@0 | 118 | gc::AllocKind *thingKindPtr, |
michael@0 | 119 | ArenaHeader **arenaHeader, |
michael@0 | 120 | void **thing) |
michael@0 | 121 | { |
michael@0 | 122 | /* |
michael@0 | 123 | * We assume that the compiler never uses sub-word alignment to store |
michael@0 | 124 | * pointers and does not tag pointers on its own. Additionally, the value |
michael@0 | 125 | * representation for all values and the jsid representation for GC-things |
michael@0 | 126 | * do not touch the low two bits. Thus any word with the low two bits set |
michael@0 | 127 | * is not a valid GC-thing. |
michael@0 | 128 | */ |
michael@0 | 129 | JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4); |
michael@0 | 130 | if (w & 0x3) |
michael@0 | 131 | return CGCT_LOWBITSET; |
michael@0 | 132 | |
michael@0 | 133 | /* |
michael@0 | 134 | * An object jsid has its low bits tagged. In the value representation on |
michael@0 | 135 | * 64-bit, the high bits are tagged. |
michael@0 | 136 | */ |
michael@0 | 137 | const uintptr_t JSID_PAYLOAD_MASK = ~uintptr_t(JSID_TYPE_MASK); |
michael@0 | 138 | #if JS_BITS_PER_WORD == 32 |
michael@0 | 139 | uintptr_t addr = w & JSID_PAYLOAD_MASK; |
michael@0 | 140 | #elif JS_BITS_PER_WORD == 64 |
michael@0 | 141 | uintptr_t addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK; |
michael@0 | 142 | #endif |
michael@0 | 143 | |
michael@0 | 144 | Chunk *chunk = Chunk::fromAddress(addr); |
michael@0 | 145 | |
michael@0 | 146 | if (!rt->gcChunkSet.has(chunk)) |
michael@0 | 147 | return CGCT_NOTCHUNK; |
michael@0 | 148 | |
michael@0 | 149 | /* |
michael@0 | 150 | * We query for pointers outside the arena array after checking for an |
michael@0 | 151 | * allocated chunk. Such pointers are rare and we want to reject them |
michael@0 | 152 | * after doing more likely rejections. |
michael@0 | 153 | */ |
michael@0 | 154 | if (!Chunk::withinArenasRange(addr)) |
michael@0 | 155 | return CGCT_NOTARENA; |
michael@0 | 156 | |
michael@0 | 157 | /* If the arena is not currently allocated, don't access the header. */ |
michael@0 | 158 | size_t arenaOffset = Chunk::arenaIndex(addr); |
michael@0 | 159 | if (chunk->decommittedArenas.get(arenaOffset)) |
michael@0 | 160 | return CGCT_FREEARENA; |
michael@0 | 161 | |
michael@0 | 162 | ArenaHeader *aheader = &chunk->arenas[arenaOffset].aheader; |
michael@0 | 163 | |
michael@0 | 164 | if (!aheader->allocated()) |
michael@0 | 165 | return CGCT_FREEARENA; |
michael@0 | 166 | |
michael@0 | 167 | if (skipUncollectedCompartments && !aheader->zone->isCollecting()) |
michael@0 | 168 | return CGCT_OTHERCOMPARTMENT; |
michael@0 | 169 | |
michael@0 | 170 | AllocKind thingKind = aheader->getAllocKind(); |
michael@0 | 171 | uintptr_t offset = addr & ArenaMask; |
michael@0 | 172 | uintptr_t minOffset = Arena::firstThingOffset(thingKind); |
michael@0 | 173 | if (offset < minOffset) |
michael@0 | 174 | return CGCT_NOTARENA; |
michael@0 | 175 | |
michael@0 | 176 | /* addr can point inside the thing so we must align the address. */ |
michael@0 | 177 | uintptr_t shift = (offset - minOffset) % Arena::thingSize(thingKind); |
michael@0 | 178 | addr -= shift; |
michael@0 | 179 | |
michael@0 | 180 | if (thing) |
michael@0 | 181 | *thing = reinterpret_cast<void *>(addr); |
michael@0 | 182 | if (arenaHeader) |
michael@0 | 183 | *arenaHeader = aheader; |
michael@0 | 184 | if (thingKindPtr) |
michael@0 | 185 | *thingKindPtr = thingKind; |
michael@0 | 186 | return CGCT_VALID; |
michael@0 | 187 | } |
michael@0 | 188 | |
michael@0 | 189 | /* |
michael@0 | 190 | * Returns CGCT_VALID and mark it if the w can be a live GC thing and sets |
michael@0 | 191 | * thingKind accordingly. Otherwise returns the reason for rejection. |
michael@0 | 192 | */ |
michael@0 | 193 | static inline ConservativeGCTest |
michael@0 | 194 | MarkIfGCThingWord(JSTracer *trc, uintptr_t w) |
michael@0 | 195 | { |
michael@0 | 196 | void *thing; |
michael@0 | 197 | ArenaHeader *aheader; |
michael@0 | 198 | AllocKind thingKind; |
michael@0 | 199 | ConservativeGCTest status = |
michael@0 | 200 | IsAddressableGCThing(trc->runtime(), w, IS_GC_MARKING_TRACER(trc), |
michael@0 | 201 | &thingKind, &aheader, &thing); |
michael@0 | 202 | if (status != CGCT_VALID) |
michael@0 | 203 | return status; |
michael@0 | 204 | |
michael@0 | 205 | /* |
michael@0 | 206 | * Check if the thing is free. We must use the list of free spans as at |
michael@0 | 207 | * this point we no longer have the mark bits from the previous GC run and |
michael@0 | 208 | * we must account for newly allocated things. |
michael@0 | 209 | */ |
michael@0 | 210 | if (InFreeList(aheader, thing)) |
michael@0 | 211 | return CGCT_NOTLIVE; |
michael@0 | 212 | |
michael@0 | 213 | JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind); |
michael@0 | 214 | #ifdef DEBUG |
michael@0 | 215 | const char pattern[] = "machine_stack %p"; |
michael@0 | 216 | char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2]; |
michael@0 | 217 | JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing); |
michael@0 | 218 | trc->setTracingName(nameBuf); |
michael@0 | 219 | #endif |
michael@0 | 220 | trc->setTracingLocation((void *)w); |
michael@0 | 221 | void *tmp = thing; |
michael@0 | 222 | MarkKind(trc, &tmp, traceKind); |
michael@0 | 223 | JS_ASSERT(tmp == thing); |
michael@0 | 224 | |
michael@0 | 225 | #ifdef DEBUG |
michael@0 | 226 | if (trc->runtime()->gcIncrementalState == MARK_ROOTS) |
michael@0 | 227 | trc->runtime()->mainThread.gcSavedRoots.append( |
michael@0 | 228 | PerThreadData::SavedGCRoot(thing, traceKind)); |
michael@0 | 229 | #endif |
michael@0 | 230 | |
michael@0 | 231 | return CGCT_VALID; |
michael@0 | 232 | } |
michael@0 | 233 | |
michael@0 | 234 | #ifndef JSGC_USE_EXACT_ROOTING |
michael@0 | 235 | static void |
michael@0 | 236 | MarkWordConservatively(JSTracer *trc, uintptr_t w) |
michael@0 | 237 | { |
michael@0 | 238 | /* |
michael@0 | 239 | * The conservative scanner may access words that valgrind considers as |
michael@0 | 240 | * undefined. To avoid false positives and not to alter valgrind view of |
michael@0 | 241 | * the memory we make as memcheck-defined the argument, a copy of the |
michael@0 | 242 | * original word. See bug 572678. |
michael@0 | 243 | */ |
michael@0 | 244 | #ifdef MOZ_VALGRIND |
michael@0 | 245 | JS_SILENCE_UNUSED_VALUE_IN_EXPR(VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w))); |
michael@0 | 246 | #endif |
michael@0 | 247 | |
michael@0 | 248 | MarkIfGCThingWord(trc, w); |
michael@0 | 249 | } |
michael@0 | 250 | |
michael@0 | 251 | MOZ_ASAN_BLACKLIST |
michael@0 | 252 | static void |
michael@0 | 253 | MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t *end) |
michael@0 | 254 | { |
michael@0 | 255 | JS_ASSERT(begin <= end); |
michael@0 | 256 | for (const uintptr_t *i = begin; i < end; ++i) |
michael@0 | 257 | MarkWordConservatively(trc, *i); |
michael@0 | 258 | } |
michael@0 | 259 | |
michael@0 | 260 | static void |
michael@0 | 261 | MarkRangeConservativelyAndSkipIon(JSTracer *trc, JSRuntime *rt, const uintptr_t *begin, const uintptr_t *end) |
michael@0 | 262 | { |
michael@0 | 263 | const uintptr_t *i = begin; |
michael@0 | 264 | |
michael@0 | 265 | #if JS_STACK_GROWTH_DIRECTION < 0 && defined(JS_ION) && !defined(JS_ARM_SIMULATOR) |
michael@0 | 266 | // Walk only regions in between JIT activations. Note that non-volatile |
michael@0 | 267 | // registers are spilled to the stack before the entry frame, ensuring |
michael@0 | 268 | // that the conservative scanner will still see them. |
michael@0 | 269 | // |
michael@0 | 270 | // If the ARM simulator is enabled, JIT activations are not on the native |
michael@0 | 271 | // stack but on the simulator stack, so we don't have to skip JIT regions |
michael@0 | 272 | // in this case. |
michael@0 | 273 | for (jit::JitActivationIterator iter(rt); !iter.done(); ++iter) { |
michael@0 | 274 | uintptr_t *jitMin, *jitEnd; |
michael@0 | 275 | iter.jitStackRange(jitMin, jitEnd); |
michael@0 | 276 | |
michael@0 | 277 | MarkRangeConservatively(trc, i, jitMin); |
michael@0 | 278 | i = jitEnd; |
michael@0 | 279 | } |
michael@0 | 280 | #endif |
michael@0 | 281 | |
michael@0 | 282 | // Mark everything after the most recent Ion activation. |
michael@0 | 283 | MarkRangeConservatively(trc, i, end); |
michael@0 | 284 | } |
michael@0 | 285 | |
michael@0 | 286 | static MOZ_NEVER_INLINE void |
michael@0 | 287 | MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots) |
michael@0 | 288 | { |
michael@0 | 289 | JSRuntime *rt = trc->runtime(); |
michael@0 | 290 | |
michael@0 | 291 | #ifdef DEBUG |
michael@0 | 292 | if (useSavedRoots) { |
michael@0 | 293 | for (PerThreadData::SavedGCRoot *root = rt->mainThread.gcSavedRoots.begin(); |
michael@0 | 294 | root != rt->mainThread.gcSavedRoots.end(); |
michael@0 | 295 | root++) |
michael@0 | 296 | { |
michael@0 | 297 | trc->setTracingName("cstack"); |
michael@0 | 298 | MarkKind(trc, &root->thing, root->kind); |
michael@0 | 299 | } |
michael@0 | 300 | return; |
michael@0 | 301 | } |
michael@0 | 302 | |
michael@0 | 303 | if (rt->gcIncrementalState == MARK_ROOTS) |
michael@0 | 304 | rt->mainThread.gcSavedRoots.clearAndFree(); |
michael@0 | 305 | #endif |
michael@0 | 306 | |
michael@0 | 307 | ConservativeGCData *cgcd = &rt->conservativeGC; |
michael@0 | 308 | if (!cgcd->hasStackToScan()) { |
michael@0 | 309 | #ifdef JS_THREADSAFE |
michael@0 | 310 | JS_ASSERT(!rt->requestDepth); |
michael@0 | 311 | #endif |
michael@0 | 312 | return; |
michael@0 | 313 | } |
michael@0 | 314 | |
michael@0 | 315 | uintptr_t *stackMin, *stackEnd; |
michael@0 | 316 | #if JS_STACK_GROWTH_DIRECTION > 0 |
michael@0 | 317 | stackMin = reinterpret_cast<uintptr_t *>(rt->nativeStackBase); |
michael@0 | 318 | stackEnd = cgcd->nativeStackTop; |
michael@0 | 319 | #else |
michael@0 | 320 | stackMin = cgcd->nativeStackTop + 1; |
michael@0 | 321 | stackEnd = reinterpret_cast<uintptr_t *>(rt->nativeStackBase); |
michael@0 | 322 | #endif |
michael@0 | 323 | |
michael@0 | 324 | JS_ASSERT(stackMin <= stackEnd); |
michael@0 | 325 | MarkRangeConservativelyAndSkipIon(trc, rt, stackMin, stackEnd); |
michael@0 | 326 | MarkRangeConservatively(trc, cgcd->registerSnapshot.words, |
michael@0 | 327 | ArrayEnd(cgcd->registerSnapshot.words)); |
michael@0 | 328 | } |
michael@0 | 329 | |
michael@0 | 330 | void |
michael@0 | 331 | js::MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv) |
michael@0 | 332 | { |
michael@0 | 333 | const uintptr_t *begin = beginv->payloadUIntPtr(); |
michael@0 | 334 | const uintptr_t *end = endv->payloadUIntPtr(); |
michael@0 | 335 | #ifdef JS_NUNBOX32 |
michael@0 | 336 | /* |
michael@0 | 337 | * With 64-bit jsvals on 32-bit systems, we can optimize a bit by |
michael@0 | 338 | * scanning only the payloads. |
michael@0 | 339 | */ |
michael@0 | 340 | JS_ASSERT(begin <= end); |
michael@0 | 341 | for (const uintptr_t *i = begin; i < end; i += sizeof(Value) / sizeof(uintptr_t)) |
michael@0 | 342 | MarkWordConservatively(trc, *i); |
michael@0 | 343 | #else |
michael@0 | 344 | MarkRangeConservatively(trc, begin, end); |
michael@0 | 345 | #endif |
michael@0 | 346 | } |
michael@0 | 347 | |
michael@0 | 348 | #endif /* JSGC_USE_EXACT_ROOTING */ |
michael@0 | 349 | |
michael@0 | 350 | MOZ_NEVER_INLINE void |
michael@0 | 351 | ConservativeGCData::recordStackTop() |
michael@0 | 352 | { |
michael@0 | 353 | /* Update the native stack pointer if it points to a bigger stack. */ |
michael@0 | 354 | uintptr_t dummy; |
michael@0 | 355 | nativeStackTop = &dummy; |
michael@0 | 356 | |
michael@0 | 357 | /* |
michael@0 | 358 | * To record and update the register snapshot for the conservative scanning |
michael@0 | 359 | * with the latest values we use setjmp. |
michael@0 | 360 | */ |
michael@0 | 361 | #if defined(_MSC_VER) |
michael@0 | 362 | # pragma warning(push) |
michael@0 | 363 | # pragma warning(disable: 4611) |
michael@0 | 364 | #endif |
michael@0 | 365 | (void) setjmp(registerSnapshot.jmpbuf); |
michael@0 | 366 | #if defined(_MSC_VER) |
michael@0 | 367 | # pragma warning(pop) |
michael@0 | 368 | #endif |
michael@0 | 369 | } |
michael@0 | 370 | |
michael@0 | 371 | void |
michael@0 | 372 | JS::AutoIdArray::trace(JSTracer *trc) |
michael@0 | 373 | { |
michael@0 | 374 | JS_ASSERT(tag_ == IDARRAY); |
michael@0 | 375 | gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray"); |
michael@0 | 376 | } |
michael@0 | 377 | |
michael@0 | 378 | inline void |
michael@0 | 379 | AutoGCRooter::trace(JSTracer *trc) |
michael@0 | 380 | { |
michael@0 | 381 | switch (tag_) { |
michael@0 | 382 | case PARSER: |
michael@0 | 383 | frontend::MarkParser(trc, this); |
michael@0 | 384 | return; |
michael@0 | 385 | |
michael@0 | 386 | case IDARRAY: { |
michael@0 | 387 | JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray; |
michael@0 | 388 | MarkIdRange(trc, ida->length, ida->vector, "JS::AutoIdArray.idArray"); |
michael@0 | 389 | return; |
michael@0 | 390 | } |
michael@0 | 391 | |
michael@0 | 392 | case DESCRIPTORS: { |
michael@0 | 393 | PropDescArray &descriptors = |
michael@0 | 394 | static_cast<AutoPropDescArrayRooter *>(this)->descriptors; |
michael@0 | 395 | for (size_t i = 0, len = descriptors.length(); i < len; i++) { |
michael@0 | 396 | PropDesc &desc = descriptors[i]; |
michael@0 | 397 | MarkValueRoot(trc, &desc.pd_, "PropDesc::pd_"); |
michael@0 | 398 | MarkValueRoot(trc, &desc.value_, "PropDesc::value_"); |
michael@0 | 399 | MarkValueRoot(trc, &desc.get_, "PropDesc::get_"); |
michael@0 | 400 | MarkValueRoot(trc, &desc.set_, "PropDesc::set_"); |
michael@0 | 401 | } |
michael@0 | 402 | return; |
michael@0 | 403 | } |
michael@0 | 404 | |
michael@0 | 405 | case ID: |
michael@0 | 406 | MarkIdRoot(trc, &static_cast<AutoIdRooter *>(this)->id_, "JS::AutoIdRooter.id_"); |
michael@0 | 407 | return; |
michael@0 | 408 | |
michael@0 | 409 | case VALVECTOR: { |
michael@0 | 410 | AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector; |
michael@0 | 411 | MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector"); |
michael@0 | 412 | return; |
michael@0 | 413 | } |
michael@0 | 414 | |
michael@0 | 415 | case IDVECTOR: { |
michael@0 | 416 | AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector; |
michael@0 | 417 | MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector"); |
michael@0 | 418 | return; |
michael@0 | 419 | } |
michael@0 | 420 | |
michael@0 | 421 | case SHAPEVECTOR: { |
michael@0 | 422 | AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector; |
michael@0 | 423 | MarkShapeRootRange(trc, vector.length(), const_cast<Shape **>(vector.begin()), |
michael@0 | 424 | "js::AutoShapeVector.vector"); |
michael@0 | 425 | return; |
michael@0 | 426 | } |
michael@0 | 427 | |
michael@0 | 428 | case OBJVECTOR: { |
michael@0 | 429 | AutoObjectVector::VectorImpl &vector = static_cast<AutoObjectVector *>(this)->vector; |
michael@0 | 430 | MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector"); |
michael@0 | 431 | return; |
michael@0 | 432 | } |
michael@0 | 433 | |
michael@0 | 434 | case FUNVECTOR: { |
michael@0 | 435 | AutoFunctionVector::VectorImpl &vector = static_cast<AutoFunctionVector *>(this)->vector; |
michael@0 | 436 | MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoFunctionVector.vector"); |
michael@0 | 437 | return; |
michael@0 | 438 | } |
michael@0 | 439 | |
michael@0 | 440 | case STRINGVECTOR: { |
michael@0 | 441 | AutoStringVector::VectorImpl &vector = static_cast<AutoStringVector *>(this)->vector; |
michael@0 | 442 | MarkStringRootRange(trc, vector.length(), vector.begin(), "js::AutoStringVector.vector"); |
michael@0 | 443 | return; |
michael@0 | 444 | } |
michael@0 | 445 | |
michael@0 | 446 | case NAMEVECTOR: { |
michael@0 | 447 | AutoNameVector::VectorImpl &vector = static_cast<AutoNameVector *>(this)->vector; |
michael@0 | 448 | MarkStringRootRange(trc, vector.length(), vector.begin(), "js::AutoNameVector.vector"); |
michael@0 | 449 | return; |
michael@0 | 450 | } |
michael@0 | 451 | |
michael@0 | 452 | case VALARRAY: { |
michael@0 | 453 | /* |
michael@0 | 454 | * We don't know the template size parameter, but we can safely treat it |
michael@0 | 455 | * as an AutoValueArray<1> because the length is stored separately. |
michael@0 | 456 | */ |
michael@0 | 457 | AutoValueArray<1> *array = static_cast<AutoValueArray<1> *>(this); |
michael@0 | 458 | MarkValueRootRange(trc, array->length(), array->begin(), "js::AutoValueArray"); |
michael@0 | 459 | return; |
michael@0 | 460 | } |
michael@0 | 461 | |
michael@0 | 462 | case SCRIPTVECTOR: { |
michael@0 | 463 | AutoScriptVector::VectorImpl &vector = static_cast<AutoScriptVector *>(this)->vector; |
michael@0 | 464 | MarkScriptRootRange(trc, vector.length(), vector.begin(), "js::AutoScriptVector.vector"); |
michael@0 | 465 | return; |
michael@0 | 466 | } |
michael@0 | 467 | |
michael@0 | 468 | case OBJOBJHASHMAP: { |
michael@0 | 469 | AutoObjectObjectHashMap::HashMapImpl &map = static_cast<AutoObjectObjectHashMap *>(this)->map; |
michael@0 | 470 | for (AutoObjectObjectHashMap::Enum e(map); !e.empty(); e.popFront()) { |
michael@0 | 471 | MarkObjectRoot(trc, &e.front().value(), "AutoObjectObjectHashMap value"); |
michael@0 | 472 | trc->setTracingLocation((void *)&e.front().key()); |
michael@0 | 473 | JSObject *key = e.front().key(); |
michael@0 | 474 | MarkObjectRoot(trc, &key, "AutoObjectObjectHashMap key"); |
michael@0 | 475 | if (key != e.front().key()) |
michael@0 | 476 | e.rekeyFront(key); |
michael@0 | 477 | } |
michael@0 | 478 | return; |
michael@0 | 479 | } |
michael@0 | 480 | |
michael@0 | 481 | case OBJU32HASHMAP: { |
michael@0 | 482 | AutoObjectUnsigned32HashMap *self = static_cast<AutoObjectUnsigned32HashMap *>(this); |
michael@0 | 483 | AutoObjectUnsigned32HashMap::HashMapImpl &map = self->map; |
michael@0 | 484 | for (AutoObjectUnsigned32HashMap::Enum e(map); !e.empty(); e.popFront()) { |
michael@0 | 485 | JSObject *key = e.front().key(); |
michael@0 | 486 | MarkObjectRoot(trc, &key, "AutoObjectUnsignedHashMap key"); |
michael@0 | 487 | if (key != e.front().key()) |
michael@0 | 488 | e.rekeyFront(key); |
michael@0 | 489 | } |
michael@0 | 490 | return; |
michael@0 | 491 | } |
michael@0 | 492 | |
michael@0 | 493 | case OBJHASHSET: { |
michael@0 | 494 | AutoObjectHashSet *self = static_cast<AutoObjectHashSet *>(this); |
michael@0 | 495 | AutoObjectHashSet::HashSetImpl &set = self->set; |
michael@0 | 496 | for (AutoObjectHashSet::Enum e(set); !e.empty(); e.popFront()) { |
michael@0 | 497 | JSObject *obj = e.front(); |
michael@0 | 498 | MarkObjectRoot(trc, &obj, "AutoObjectHashSet value"); |
michael@0 | 499 | if (obj != e.front()) |
michael@0 | 500 | e.rekeyFront(obj); |
michael@0 | 501 | } |
michael@0 | 502 | return; |
michael@0 | 503 | } |
michael@0 | 504 | |
michael@0 | 505 | case HASHABLEVALUE: { |
michael@0 | 506 | AutoHashableValueRooter *rooter = static_cast<AutoHashableValueRooter *>(this); |
michael@0 | 507 | rooter->trace(trc); |
michael@0 | 508 | return; |
michael@0 | 509 | } |
michael@0 | 510 | |
michael@0 | 511 | case IONMASM: { |
michael@0 | 512 | #ifdef JS_ION |
michael@0 | 513 | static_cast<js::jit::MacroAssembler::AutoRooter *>(this)->masm()->trace(trc); |
michael@0 | 514 | #endif |
michael@0 | 515 | return; |
michael@0 | 516 | } |
michael@0 | 517 | |
michael@0 | 518 | case IONALLOC: { |
michael@0 | 519 | #ifdef JS_ION |
michael@0 | 520 | static_cast<js::jit::AutoTempAllocatorRooter *>(this)->trace(trc); |
michael@0 | 521 | #endif |
michael@0 | 522 | return; |
michael@0 | 523 | } |
michael@0 | 524 | |
michael@0 | 525 | case WRAPPER: { |
michael@0 | 526 | /* |
michael@0 | 527 | * We need to use MarkValueUnbarriered here because we mark wrapper |
michael@0 | 528 | * roots in every slice. This is because of some rule-breaking in |
michael@0 | 529 | * RemapAllWrappersForObject; see comment there. |
michael@0 | 530 | */ |
michael@0 | 531 | MarkValueUnbarriered(trc, &static_cast<AutoWrapperRooter *>(this)->value.get(), |
michael@0 | 532 | "JS::AutoWrapperRooter.value"); |
michael@0 | 533 | return; |
michael@0 | 534 | } |
michael@0 | 535 | |
michael@0 | 536 | case WRAPVECTOR: { |
michael@0 | 537 | AutoWrapperVector::VectorImpl &vector = static_cast<AutoWrapperVector *>(this)->vector; |
michael@0 | 538 | /* |
michael@0 | 539 | * We need to use MarkValueUnbarriered here because we mark wrapper |
michael@0 | 540 | * roots in every slice. This is because of some rule-breaking in |
michael@0 | 541 | * RemapAllWrappersForObject; see comment there. |
michael@0 | 542 | */ |
michael@0 | 543 | for (WrapperValue *p = vector.begin(); p < vector.end(); p++) |
michael@0 | 544 | MarkValueUnbarriered(trc, &p->get(), "js::AutoWrapperVector.vector"); |
michael@0 | 545 | return; |
michael@0 | 546 | } |
michael@0 | 547 | |
michael@0 | 548 | case JSONPARSER: |
michael@0 | 549 | static_cast<js::JSONParser *>(this)->trace(trc); |
michael@0 | 550 | return; |
michael@0 | 551 | |
michael@0 | 552 | case CUSTOM: |
michael@0 | 553 | static_cast<JS::CustomAutoRooter *>(this)->trace(trc); |
michael@0 | 554 | return; |
michael@0 | 555 | } |
michael@0 | 556 | |
michael@0 | 557 | JS_ASSERT(tag_ >= 0); |
michael@0 | 558 | if (Value *vp = static_cast<AutoArrayRooter *>(this)->array) |
michael@0 | 559 | MarkValueRootRange(trc, tag_, vp, "JS::AutoArrayRooter.array"); |
michael@0 | 560 | } |
michael@0 | 561 | |
michael@0 | 562 | /* static */ void |
michael@0 | 563 | AutoGCRooter::traceAll(JSTracer *trc) |
michael@0 | 564 | { |
michael@0 | 565 | for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) { |
michael@0 | 566 | for (js::AutoGCRooter *gcr = cx->autoGCRooters; gcr; gcr = gcr->down) |
michael@0 | 567 | gcr->trace(trc); |
michael@0 | 568 | } |
michael@0 | 569 | } |
michael@0 | 570 | |
michael@0 | 571 | /* static */ void |
michael@0 | 572 | AutoGCRooter::traceAllWrappers(JSTracer *trc) |
michael@0 | 573 | { |
michael@0 | 574 | for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) { |
michael@0 | 575 | for (js::AutoGCRooter *gcr = cx->autoGCRooters; gcr; gcr = gcr->down) { |
michael@0 | 576 | if (gcr->tag_ == WRAPVECTOR || gcr->tag_ == WRAPPER) |
michael@0 | 577 | gcr->trace(trc); |
michael@0 | 578 | } |
michael@0 | 579 | } |
michael@0 | 580 | } |
michael@0 | 581 | |
michael@0 | 582 | void |
michael@0 | 583 | AutoHashableValueRooter::trace(JSTracer *trc) |
michael@0 | 584 | { |
michael@0 | 585 | MarkValueRoot(trc, reinterpret_cast<Value*>(&value), "AutoHashableValueRooter"); |
michael@0 | 586 | } |
michael@0 | 587 | |
michael@0 | 588 | void |
michael@0 | 589 | StackShape::trace(JSTracer *trc) |
michael@0 | 590 | { |
michael@0 | 591 | if (base) |
michael@0 | 592 | MarkBaseShapeRoot(trc, (BaseShape**) &base, "StackShape base"); |
michael@0 | 593 | MarkIdRoot(trc, (jsid*) &propid, "StackShape id"); |
michael@0 | 594 | } |
michael@0 | 595 | |
michael@0 | 596 | void |
michael@0 | 597 | JSPropertyDescriptor::trace(JSTracer *trc) |
michael@0 | 598 | { |
michael@0 | 599 | if (obj) |
michael@0 | 600 | MarkObjectRoot(trc, &obj, "Descriptor::obj"); |
michael@0 | 601 | MarkValueRoot(trc, &value, "Descriptor::value"); |
michael@0 | 602 | if ((attrs & JSPROP_GETTER) && getter) { |
michael@0 | 603 | JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, getter); |
michael@0 | 604 | MarkObjectRoot(trc, &tmp, "Descriptor::get"); |
michael@0 | 605 | getter = JS_DATA_TO_FUNC_PTR(JSPropertyOp, tmp); |
michael@0 | 606 | } |
michael@0 | 607 | if ((attrs & JSPROP_SETTER) && setter) { |
michael@0 | 608 | JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, setter); |
michael@0 | 609 | MarkObjectRoot(trc, &tmp, "Descriptor::set"); |
michael@0 | 610 | setter = JS_DATA_TO_FUNC_PTR(JSStrictPropertyOp, tmp); |
michael@0 | 611 | } |
michael@0 | 612 | } |
michael@0 | 613 | |
michael@0 | 614 | namespace js { |
michael@0 | 615 | namespace gc { |
michael@0 | 616 | |
michael@0 | 617 | template<typename T> |
michael@0 | 618 | struct PersistentRootedMarker |
michael@0 | 619 | { |
michael@0 | 620 | typedef PersistentRooted<T> Element; |
michael@0 | 621 | typedef mozilla::LinkedList<Element> List; |
michael@0 | 622 | typedef void (*MarkFunc)(JSTracer *trc, T *ref, const char *name); |
michael@0 | 623 | |
michael@0 | 624 | template <MarkFunc Mark> |
michael@0 | 625 | static void |
michael@0 | 626 | markChainIfNotNull(JSTracer *trc, List &list, const char *name) |
michael@0 | 627 | { |
michael@0 | 628 | for (Element *r = list.getFirst(); r; r = r->getNext()) { |
michael@0 | 629 | if (r->get()) |
michael@0 | 630 | Mark(trc, r->address(), name); |
michael@0 | 631 | } |
michael@0 | 632 | } |
michael@0 | 633 | |
michael@0 | 634 | template <MarkFunc Mark> |
michael@0 | 635 | static void |
michael@0 | 636 | markChain(JSTracer *trc, List &list, const char *name) |
michael@0 | 637 | { |
michael@0 | 638 | for (Element *r = list.getFirst(); r; r = r->getNext()) |
michael@0 | 639 | Mark(trc, r->address(), name); |
michael@0 | 640 | } |
michael@0 | 641 | }; |
michael@0 | 642 | } |
michael@0 | 643 | } |
michael@0 | 644 | |
michael@0 | 645 | void |
michael@0 | 646 | js::gc::MarkPersistentRootedChains(JSTracer *trc) |
michael@0 | 647 | { |
michael@0 | 648 | JSRuntime *rt = trc->runtime(); |
michael@0 | 649 | |
michael@0 | 650 | // Mark the PersistentRooted chains of types that may be null. |
michael@0 | 651 | PersistentRootedMarker<JSFunction*>::markChainIfNotNull<MarkObjectRoot>( |
michael@0 | 652 | trc, rt->functionPersistentRooteds, "PersistentRooted<JSFunction *>"); |
michael@0 | 653 | PersistentRootedMarker<JSObject*>::markChainIfNotNull<MarkObjectRoot>( |
michael@0 | 654 | trc, rt->objectPersistentRooteds, "PersistentRooted<JSObject *>"); |
michael@0 | 655 | PersistentRootedMarker<JSScript*>::markChainIfNotNull<MarkScriptRoot>( |
michael@0 | 656 | trc, rt->scriptPersistentRooteds, "PersistentRooted<JSScript *>"); |
michael@0 | 657 | PersistentRootedMarker<JSString*>::markChainIfNotNull<MarkStringRoot>( |
michael@0 | 658 | trc, rt->stringPersistentRooteds, "PersistentRooted<JSString *>"); |
michael@0 | 659 | |
michael@0 | 660 | // Mark the PersistentRooted chains of types that are never null. |
michael@0 | 661 | PersistentRootedMarker<jsid>::markChain<MarkIdRoot>(trc, rt->idPersistentRooteds, |
michael@0 | 662 | "PersistentRooted<jsid>"); |
michael@0 | 663 | PersistentRootedMarker<Value>::markChain<MarkValueRoot>(trc, rt->valuePersistentRooteds, |
michael@0 | 664 | "PersistentRooted<Value>"); |
michael@0 | 665 | } |
michael@0 | 666 | |
michael@0 | 667 | void |
michael@0 | 668 | js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots) |
michael@0 | 669 | { |
michael@0 | 670 | JSRuntime *rt = trc->runtime(); |
michael@0 | 671 | JS_ASSERT(trc->callback != GCMarker::GrayCallback); |
michael@0 | 672 | |
michael@0 | 673 | JS_ASSERT(!rt->mainThread.suppressGC); |
michael@0 | 674 | |
michael@0 | 675 | if (IS_GC_MARKING_TRACER(trc)) { |
michael@0 | 676 | for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { |
michael@0 | 677 | if (!c->zone()->isCollecting()) |
michael@0 | 678 | c->markCrossCompartmentWrappers(trc); |
michael@0 | 679 | } |
michael@0 | 680 | Debugger::markCrossCompartmentDebuggerObjectReferents(trc); |
michael@0 | 681 | } |
michael@0 | 682 | |
michael@0 | 683 | AutoGCRooter::traceAll(trc); |
michael@0 | 684 | |
michael@0 | 685 | if (!rt->isBeingDestroyed()) { |
michael@0 | 686 | #ifdef JSGC_USE_EXACT_ROOTING |
michael@0 | 687 | MarkExactStackRoots(trc); |
michael@0 | 688 | #else |
michael@0 | 689 | MarkConservativeStackRoots(trc, useSavedRoots); |
michael@0 | 690 | #endif |
michael@0 | 691 | rt->markSelfHostingGlobal(trc); |
michael@0 | 692 | } |
michael@0 | 693 | |
michael@0 | 694 | for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) { |
michael@0 | 695 | const RootEntry &entry = r.front(); |
michael@0 | 696 | const char *name = entry.value().name ? entry.value().name : "root"; |
michael@0 | 697 | JSGCRootType type = entry.value().type; |
michael@0 | 698 | void *key = entry.key(); |
michael@0 | 699 | if (type == JS_GC_ROOT_VALUE_PTR) { |
michael@0 | 700 | MarkValueRoot(trc, reinterpret_cast<Value *>(key), name); |
michael@0 | 701 | } else if (*reinterpret_cast<void **>(key)){ |
michael@0 | 702 | if (type == JS_GC_ROOT_STRING_PTR) |
michael@0 | 703 | MarkStringRoot(trc, reinterpret_cast<JSString **>(key), name); |
michael@0 | 704 | else if (type == JS_GC_ROOT_OBJECT_PTR) |
michael@0 | 705 | MarkObjectRoot(trc, reinterpret_cast<JSObject **>(key), name); |
michael@0 | 706 | else if (type == JS_GC_ROOT_SCRIPT_PTR) |
michael@0 | 707 | MarkScriptRoot(trc, reinterpret_cast<JSScript **>(key), name); |
michael@0 | 708 | else |
michael@0 | 709 | MOZ_ASSUME_UNREACHABLE("unexpected js::RootInfo::type value"); |
michael@0 | 710 | } |
michael@0 | 711 | } |
michael@0 | 712 | |
michael@0 | 713 | MarkPersistentRootedChains(trc); |
michael@0 | 714 | |
michael@0 | 715 | if (rt->scriptAndCountsVector) { |
michael@0 | 716 | ScriptAndCountsVector &vec = *rt->scriptAndCountsVector; |
michael@0 | 717 | for (size_t i = 0; i < vec.length(); i++) |
michael@0 | 718 | MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector"); |
michael@0 | 719 | } |
michael@0 | 720 | |
michael@0 | 721 | if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) { |
michael@0 | 722 | if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment()->zone()->isCollecting()) { |
michael@0 | 723 | MarkPermanentAtoms(trc); |
michael@0 | 724 | MarkAtoms(trc); |
michael@0 | 725 | #ifdef JS_ION |
michael@0 | 726 | jit::JitRuntime::Mark(trc); |
michael@0 | 727 | #endif |
michael@0 | 728 | } |
michael@0 | 729 | } |
michael@0 | 730 | |
michael@0 | 731 | for (ContextIter acx(rt); !acx.done(); acx.next()) |
michael@0 | 732 | acx->mark(trc); |
michael@0 | 733 | |
michael@0 | 734 | for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) { |
michael@0 | 735 | if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting()) |
michael@0 | 736 | continue; |
michael@0 | 737 | |
michael@0 | 738 | /* Do not discard scripts with counts while profiling. */ |
michael@0 | 739 | if (rt->profilingScripts && !rt->isHeapMinorCollecting()) { |
michael@0 | 740 | for (CellIterUnderGC i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) { |
michael@0 | 741 | JSScript *script = i.get<JSScript>(); |
michael@0 | 742 | if (script->hasScriptCounts()) { |
michael@0 | 743 | MarkScriptRoot(trc, &script, "profilingScripts"); |
michael@0 | 744 | JS_ASSERT(script == i.get<JSScript>()); |
michael@0 | 745 | } |
michael@0 | 746 | } |
michael@0 | 747 | } |
michael@0 | 748 | } |
michael@0 | 749 | |
michael@0 | 750 | /* We can't use GCCompartmentsIter if we're called from TraceRuntime. */ |
michael@0 | 751 | for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { |
michael@0 | 752 | if (trc->runtime()->isHeapMinorCollecting()) |
michael@0 | 753 | c->globalWriteBarriered = false; |
michael@0 | 754 | |
michael@0 | 755 | if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting()) |
michael@0 | 756 | continue; |
michael@0 | 757 | |
michael@0 | 758 | /* During a GC, these are treated as weak pointers. */ |
michael@0 | 759 | if (!IS_GC_MARKING_TRACER(trc)) { |
michael@0 | 760 | if (c->watchpointMap) |
michael@0 | 761 | c->watchpointMap->markAll(trc); |
michael@0 | 762 | } |
michael@0 | 763 | |
michael@0 | 764 | /* Mark debug scopes, if present */ |
michael@0 | 765 | if (c->debugScopes) |
michael@0 | 766 | c->debugScopes->mark(trc); |
michael@0 | 767 | } |
michael@0 | 768 | |
michael@0 | 769 | MarkInterpreterActivations(rt, trc); |
michael@0 | 770 | |
michael@0 | 771 | #ifdef JS_ION |
michael@0 | 772 | jit::MarkJitActivations(rt, trc); |
michael@0 | 773 | #endif |
michael@0 | 774 | |
michael@0 | 775 | if (!rt->isHeapMinorCollecting()) { |
michael@0 | 776 | /* |
michael@0 | 777 | * All JSCompartment::mark does is mark the globals for compartments |
michael@0 | 778 | * which have been entered. Globals aren't nursery allocated so there's |
michael@0 | 779 | * no need to do this for minor GCs. |
michael@0 | 780 | */ |
michael@0 | 781 | for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) |
michael@0 | 782 | c->markRoots(trc); |
michael@0 | 783 | |
michael@0 | 784 | /* |
michael@0 | 785 | * The embedding can register additional roots here. |
michael@0 | 786 | * |
michael@0 | 787 | * We don't need to trace these in a minor GC because all pointers into |
michael@0 | 788 | * the nursery should be in the store buffer, and we want to avoid the |
michael@0 | 789 | * time taken to trace all these roots. |
michael@0 | 790 | */ |
michael@0 | 791 | for (size_t i = 0; i < rt->gcBlackRootTracers.length(); i++) { |
michael@0 | 792 | const JSRuntime::ExtraTracer &e = rt->gcBlackRootTracers[i]; |
michael@0 | 793 | (*e.op)(trc, e.data); |
michael@0 | 794 | } |
michael@0 | 795 | |
michael@0 | 796 | /* During GC, we don't mark gray roots at this stage. */ |
michael@0 | 797 | if (JSTraceDataOp op = rt->gcGrayRootTracer.op) { |
michael@0 | 798 | if (!IS_GC_MARKING_TRACER(trc)) |
michael@0 | 799 | (*op)(trc, rt->gcGrayRootTracer.data); |
michael@0 | 800 | } |
michael@0 | 801 | } |
michael@0 | 802 | } |
michael@0 | 803 | |
michael@0 | 804 | void |
michael@0 | 805 | js::gc::BufferGrayRoots(GCMarker *gcmarker) |
michael@0 | 806 | { |
michael@0 | 807 | JSRuntime *rt = gcmarker->runtime(); |
michael@0 | 808 | gcmarker->startBufferingGrayRoots(); |
michael@0 | 809 | if (JSTraceDataOp op = rt->gcGrayRootTracer.op) |
michael@0 | 810 | (*op)(gcmarker, rt->gcGrayRootTracer.data); |
michael@0 | 811 | gcmarker->endBufferingGrayRoots(); |
michael@0 | 812 | } |