1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/js/src/gc/RootMarking.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,812 @@ 1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- 1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99: 1.6 + * This Source Code Form is subject to the terms of the Mozilla Public 1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this 1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.9 + 1.10 +#include "mozilla/ArrayUtils.h" 1.11 + 1.12 +#ifdef MOZ_VALGRIND 1.13 +# include <valgrind/memcheck.h> 1.14 +#endif 1.15 + 1.16 +#include "jscntxt.h" 1.17 +#include "jsgc.h" 1.18 +#include "jsonparser.h" 1.19 +#include "jsprf.h" 1.20 +#include "jstypes.h" 1.21 +#include "jswatchpoint.h" 1.22 + 1.23 +#include "builtin/MapObject.h" 1.24 +#include "frontend/BytecodeCompiler.h" 1.25 +#include "gc/GCInternals.h" 1.26 +#include "gc/Marking.h" 1.27 +#ifdef JS_ION 1.28 +# include "jit/IonMacroAssembler.h" 1.29 +#endif 1.30 +#include "js/HashTable.h" 1.31 +#include "vm/Debugger.h" 1.32 + 1.33 +#include "jsgcinlines.h" 1.34 +#include "jsobjinlines.h" 1.35 + 1.36 +using namespace js; 1.37 +using namespace js::gc; 1.38 + 1.39 +using mozilla::ArrayEnd; 1.40 + 1.41 +typedef RootedValueMap::Range RootRange; 1.42 +typedef RootedValueMap::Entry RootEntry; 1.43 +typedef RootedValueMap::Enum RootEnum; 1.44 + 1.45 +#ifdef JSGC_USE_EXACT_ROOTING 1.46 +static inline void 1.47 +MarkExactStackRoot(JSTracer *trc, Rooted<void*> *rooter, ThingRootKind kind) 1.48 +{ 1.49 + void **addr = (void **)rooter->address(); 1.50 + if (IsNullTaggedPointer(*addr)) 1.51 + return; 1.52 + 1.53 + if (kind == THING_ROOT_OBJECT && *addr == TaggedProto::LazyProto) 1.54 + return; 1.55 + 1.56 + switch (kind) { 1.57 + case THING_ROOT_OBJECT: MarkObjectRoot(trc, (JSObject **)addr, "exact-object"); break; 1.58 + case THING_ROOT_STRING: MarkStringRoot(trc, (JSString **)addr, "exact-string"); break; 1.59 + case THING_ROOT_SCRIPT: MarkScriptRoot(trc, (JSScript **)addr, "exact-script"); break; 1.60 + case THING_ROOT_LAZY_SCRIPT: MarkLazyScriptRoot(trc, (LazyScript **)addr, "exact-lazy-script"); break; 1.61 + case THING_ROOT_SHAPE: MarkShapeRoot(trc, (Shape **)addr, "exact-shape"); break; 1.62 + case THING_ROOT_BASE_SHAPE: MarkBaseShapeRoot(trc, (BaseShape **)addr, "exact-baseshape"); break; 1.63 + case THING_ROOT_TYPE: MarkTypeRoot(trc, (types::Type *)addr, "exact-type"); break; 1.64 + case THING_ROOT_TYPE_OBJECT: MarkTypeObjectRoot(trc, (types::TypeObject **)addr, "exact-typeobject"); break; 1.65 + case THING_ROOT_JIT_CODE: MarkJitCodeRoot(trc, (jit::JitCode **)addr, "exact-jitcode"); break; 1.66 + case THING_ROOT_VALUE: MarkValueRoot(trc, (Value *)addr, "exact-value"); break; 1.67 + case THING_ROOT_ID: MarkIdRoot(trc, (jsid *)addr, "exact-id"); break; 1.68 + case THING_ROOT_BINDINGS: ((Bindings *)addr)->trace(trc); break; 1.69 + case THING_ROOT_PROPERTY_DESCRIPTOR: ((JSPropertyDescriptor *)addr)->trace(trc); break; 1.70 + case THING_ROOT_CUSTOM: { 1.71 + // 'rooter' is a member within a class containing a vtable. Back up 1.72 + // to the vtable and call trace() through it. 1.73 + const size_t rooterOffset = offsetof(RootedGeneric<void*>, rooter); 1.74 + reinterpret_cast< RootedGeneric<void*>* >(uintptr_t(rooter) - rooterOffset)->trace(trc); 1.75 + break; 1.76 + } 1.77 + default: MOZ_ASSUME_UNREACHABLE("Invalid THING_ROOT kind"); break; 1.78 + } 1.79 +} 1.80 + 1.81 +static inline void 1.82 +MarkExactStackRootList(JSTracer *trc, Rooted<void*> *rooter, ThingRootKind kind) 1.83 +{ 1.84 + while (rooter) { 1.85 + MarkExactStackRoot(trc, rooter, kind); 1.86 + rooter = rooter->previous(); 1.87 + } 1.88 +} 1.89 + 1.90 +static void 1.91 +MarkExactStackRoots(JSTracer *trc) 1.92 +{ 1.93 + for (unsigned i = 0; i < THING_ROOT_LIMIT; i++) { 1.94 + for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) 1.95 + MarkExactStackRootList(trc, cx->thingGCRooters[i], ThingRootKind(i)); 1.96 + 1.97 + MarkExactStackRootList(trc, trc->runtime()->mainThread.thingGCRooters[i], ThingRootKind(i)); 1.98 + } 1.99 +} 1.100 +#endif /* JSGC_USE_EXACT_ROOTING */ 1.101 + 1.102 +enum ConservativeGCTest 1.103 +{ 1.104 + CGCT_VALID, 1.105 + CGCT_LOWBITSET, /* excluded because one of the low bits was set */ 1.106 + CGCT_NOTARENA, /* not within arena range in a chunk */ 1.107 + CGCT_OTHERCOMPARTMENT, /* in another compartment */ 1.108 + CGCT_NOTCHUNK, /* not within a valid chunk */ 1.109 + CGCT_FREEARENA, /* within arena containing only free things */ 1.110 + CGCT_NOTLIVE, /* gcthing is not allocated */ 1.111 + CGCT_END 1.112 +}; 1.113 + 1.114 +/* 1.115 + * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and 1.116 + * details about the thing if so. On failure, returns the reason for rejection. 1.117 + */ 1.118 +static inline ConservativeGCTest 1.119 +IsAddressableGCThing(JSRuntime *rt, uintptr_t w, 1.120 + bool skipUncollectedCompartments, 1.121 + gc::AllocKind *thingKindPtr, 1.122 + ArenaHeader **arenaHeader, 1.123 + void **thing) 1.124 +{ 1.125 + /* 1.126 + * We assume that the compiler never uses sub-word alignment to store 1.127 + * pointers and does not tag pointers on its own. Additionally, the value 1.128 + * representation for all values and the jsid representation for GC-things 1.129 + * do not touch the low two bits. Thus any word with the low two bits set 1.130 + * is not a valid GC-thing. 1.131 + */ 1.132 + JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4); 1.133 + if (w & 0x3) 1.134 + return CGCT_LOWBITSET; 1.135 + 1.136 + /* 1.137 + * An object jsid has its low bits tagged. In the value representation on 1.138 + * 64-bit, the high bits are tagged. 1.139 + */ 1.140 + const uintptr_t JSID_PAYLOAD_MASK = ~uintptr_t(JSID_TYPE_MASK); 1.141 +#if JS_BITS_PER_WORD == 32 1.142 + uintptr_t addr = w & JSID_PAYLOAD_MASK; 1.143 +#elif JS_BITS_PER_WORD == 64 1.144 + uintptr_t addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK; 1.145 +#endif 1.146 + 1.147 + Chunk *chunk = Chunk::fromAddress(addr); 1.148 + 1.149 + if (!rt->gcChunkSet.has(chunk)) 1.150 + return CGCT_NOTCHUNK; 1.151 + 1.152 + /* 1.153 + * We query for pointers outside the arena array after checking for an 1.154 + * allocated chunk. Such pointers are rare and we want to reject them 1.155 + * after doing more likely rejections. 1.156 + */ 1.157 + if (!Chunk::withinArenasRange(addr)) 1.158 + return CGCT_NOTARENA; 1.159 + 1.160 + /* If the arena is not currently allocated, don't access the header. */ 1.161 + size_t arenaOffset = Chunk::arenaIndex(addr); 1.162 + if (chunk->decommittedArenas.get(arenaOffset)) 1.163 + return CGCT_FREEARENA; 1.164 + 1.165 + ArenaHeader *aheader = &chunk->arenas[arenaOffset].aheader; 1.166 + 1.167 + if (!aheader->allocated()) 1.168 + return CGCT_FREEARENA; 1.169 + 1.170 + if (skipUncollectedCompartments && !aheader->zone->isCollecting()) 1.171 + return CGCT_OTHERCOMPARTMENT; 1.172 + 1.173 + AllocKind thingKind = aheader->getAllocKind(); 1.174 + uintptr_t offset = addr & ArenaMask; 1.175 + uintptr_t minOffset = Arena::firstThingOffset(thingKind); 1.176 + if (offset < minOffset) 1.177 + return CGCT_NOTARENA; 1.178 + 1.179 + /* addr can point inside the thing so we must align the address. */ 1.180 + uintptr_t shift = (offset - minOffset) % Arena::thingSize(thingKind); 1.181 + addr -= shift; 1.182 + 1.183 + if (thing) 1.184 + *thing = reinterpret_cast<void *>(addr); 1.185 + if (arenaHeader) 1.186 + *arenaHeader = aheader; 1.187 + if (thingKindPtr) 1.188 + *thingKindPtr = thingKind; 1.189 + return CGCT_VALID; 1.190 +} 1.191 + 1.192 +/* 1.193 + * Returns CGCT_VALID and mark it if the w can be a live GC thing and sets 1.194 + * thingKind accordingly. Otherwise returns the reason for rejection. 1.195 + */ 1.196 +static inline ConservativeGCTest 1.197 +MarkIfGCThingWord(JSTracer *trc, uintptr_t w) 1.198 +{ 1.199 + void *thing; 1.200 + ArenaHeader *aheader; 1.201 + AllocKind thingKind; 1.202 + ConservativeGCTest status = 1.203 + IsAddressableGCThing(trc->runtime(), w, IS_GC_MARKING_TRACER(trc), 1.204 + &thingKind, &aheader, &thing); 1.205 + if (status != CGCT_VALID) 1.206 + return status; 1.207 + 1.208 + /* 1.209 + * Check if the thing is free. We must use the list of free spans as at 1.210 + * this point we no longer have the mark bits from the previous GC run and 1.211 + * we must account for newly allocated things. 1.212 + */ 1.213 + if (InFreeList(aheader, thing)) 1.214 + return CGCT_NOTLIVE; 1.215 + 1.216 + JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind); 1.217 +#ifdef DEBUG 1.218 + const char pattern[] = "machine_stack %p"; 1.219 + char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2]; 1.220 + JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing); 1.221 + trc->setTracingName(nameBuf); 1.222 +#endif 1.223 + trc->setTracingLocation((void *)w); 1.224 + void *tmp = thing; 1.225 + MarkKind(trc, &tmp, traceKind); 1.226 + JS_ASSERT(tmp == thing); 1.227 + 1.228 +#ifdef DEBUG 1.229 + if (trc->runtime()->gcIncrementalState == MARK_ROOTS) 1.230 + trc->runtime()->mainThread.gcSavedRoots.append( 1.231 + PerThreadData::SavedGCRoot(thing, traceKind)); 1.232 +#endif 1.233 + 1.234 + return CGCT_VALID; 1.235 +} 1.236 + 1.237 +#ifndef JSGC_USE_EXACT_ROOTING 1.238 +static void 1.239 +MarkWordConservatively(JSTracer *trc, uintptr_t w) 1.240 +{ 1.241 + /* 1.242 + * The conservative scanner may access words that valgrind considers as 1.243 + * undefined. To avoid false positives and not to alter valgrind view of 1.244 + * the memory we make as memcheck-defined the argument, a copy of the 1.245 + * original word. See bug 572678. 1.246 + */ 1.247 +#ifdef MOZ_VALGRIND 1.248 + JS_SILENCE_UNUSED_VALUE_IN_EXPR(VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w))); 1.249 +#endif 1.250 + 1.251 + MarkIfGCThingWord(trc, w); 1.252 +} 1.253 + 1.254 +MOZ_ASAN_BLACKLIST 1.255 +static void 1.256 +MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t *end) 1.257 +{ 1.258 + JS_ASSERT(begin <= end); 1.259 + for (const uintptr_t *i = begin; i < end; ++i) 1.260 + MarkWordConservatively(trc, *i); 1.261 +} 1.262 + 1.263 +static void 1.264 +MarkRangeConservativelyAndSkipIon(JSTracer *trc, JSRuntime *rt, const uintptr_t *begin, const uintptr_t *end) 1.265 +{ 1.266 + const uintptr_t *i = begin; 1.267 + 1.268 +#if JS_STACK_GROWTH_DIRECTION < 0 && defined(JS_ION) && !defined(JS_ARM_SIMULATOR) 1.269 + // Walk only regions in between JIT activations. Note that non-volatile 1.270 + // registers are spilled to the stack before the entry frame, ensuring 1.271 + // that the conservative scanner will still see them. 1.272 + // 1.273 + // If the ARM simulator is enabled, JIT activations are not on the native 1.274 + // stack but on the simulator stack, so we don't have to skip JIT regions 1.275 + // in this case. 1.276 + for (jit::JitActivationIterator iter(rt); !iter.done(); ++iter) { 1.277 + uintptr_t *jitMin, *jitEnd; 1.278 + iter.jitStackRange(jitMin, jitEnd); 1.279 + 1.280 + MarkRangeConservatively(trc, i, jitMin); 1.281 + i = jitEnd; 1.282 + } 1.283 +#endif 1.284 + 1.285 + // Mark everything after the most recent Ion activation. 1.286 + MarkRangeConservatively(trc, i, end); 1.287 +} 1.288 + 1.289 +static MOZ_NEVER_INLINE void 1.290 +MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots) 1.291 +{ 1.292 + JSRuntime *rt = trc->runtime(); 1.293 + 1.294 +#ifdef DEBUG 1.295 + if (useSavedRoots) { 1.296 + for (PerThreadData::SavedGCRoot *root = rt->mainThread.gcSavedRoots.begin(); 1.297 + root != rt->mainThread.gcSavedRoots.end(); 1.298 + root++) 1.299 + { 1.300 + trc->setTracingName("cstack"); 1.301 + MarkKind(trc, &root->thing, root->kind); 1.302 + } 1.303 + return; 1.304 + } 1.305 + 1.306 + if (rt->gcIncrementalState == MARK_ROOTS) 1.307 + rt->mainThread.gcSavedRoots.clearAndFree(); 1.308 +#endif 1.309 + 1.310 + ConservativeGCData *cgcd = &rt->conservativeGC; 1.311 + if (!cgcd->hasStackToScan()) { 1.312 +#ifdef JS_THREADSAFE 1.313 + JS_ASSERT(!rt->requestDepth); 1.314 +#endif 1.315 + return; 1.316 + } 1.317 + 1.318 + uintptr_t *stackMin, *stackEnd; 1.319 +#if JS_STACK_GROWTH_DIRECTION > 0 1.320 + stackMin = reinterpret_cast<uintptr_t *>(rt->nativeStackBase); 1.321 + stackEnd = cgcd->nativeStackTop; 1.322 +#else 1.323 + stackMin = cgcd->nativeStackTop + 1; 1.324 + stackEnd = reinterpret_cast<uintptr_t *>(rt->nativeStackBase); 1.325 +#endif 1.326 + 1.327 + JS_ASSERT(stackMin <= stackEnd); 1.328 + MarkRangeConservativelyAndSkipIon(trc, rt, stackMin, stackEnd); 1.329 + MarkRangeConservatively(trc, cgcd->registerSnapshot.words, 1.330 + ArrayEnd(cgcd->registerSnapshot.words)); 1.331 +} 1.332 + 1.333 +void 1.334 +js::MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv) 1.335 +{ 1.336 + const uintptr_t *begin = beginv->payloadUIntPtr(); 1.337 + const uintptr_t *end = endv->payloadUIntPtr(); 1.338 +#ifdef JS_NUNBOX32 1.339 + /* 1.340 + * With 64-bit jsvals on 32-bit systems, we can optimize a bit by 1.341 + * scanning only the payloads. 1.342 + */ 1.343 + JS_ASSERT(begin <= end); 1.344 + for (const uintptr_t *i = begin; i < end; i += sizeof(Value) / sizeof(uintptr_t)) 1.345 + MarkWordConservatively(trc, *i); 1.346 +#else 1.347 + MarkRangeConservatively(trc, begin, end); 1.348 +#endif 1.349 +} 1.350 + 1.351 +#endif /* JSGC_USE_EXACT_ROOTING */ 1.352 + 1.353 +MOZ_NEVER_INLINE void 1.354 +ConservativeGCData::recordStackTop() 1.355 +{ 1.356 + /* Update the native stack pointer if it points to a bigger stack. */ 1.357 + uintptr_t dummy; 1.358 + nativeStackTop = &dummy; 1.359 + 1.360 + /* 1.361 + * To record and update the register snapshot for the conservative scanning 1.362 + * with the latest values we use setjmp. 1.363 + */ 1.364 +#if defined(_MSC_VER) 1.365 +# pragma warning(push) 1.366 +# pragma warning(disable: 4611) 1.367 +#endif 1.368 + (void) setjmp(registerSnapshot.jmpbuf); 1.369 +#if defined(_MSC_VER) 1.370 +# pragma warning(pop) 1.371 +#endif 1.372 +} 1.373 + 1.374 +void 1.375 +JS::AutoIdArray::trace(JSTracer *trc) 1.376 +{ 1.377 + JS_ASSERT(tag_ == IDARRAY); 1.378 + gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray"); 1.379 +} 1.380 + 1.381 +inline void 1.382 +AutoGCRooter::trace(JSTracer *trc) 1.383 +{ 1.384 + switch (tag_) { 1.385 + case PARSER: 1.386 + frontend::MarkParser(trc, this); 1.387 + return; 1.388 + 1.389 + case IDARRAY: { 1.390 + JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray; 1.391 + MarkIdRange(trc, ida->length, ida->vector, "JS::AutoIdArray.idArray"); 1.392 + return; 1.393 + } 1.394 + 1.395 + case DESCRIPTORS: { 1.396 + PropDescArray &descriptors = 1.397 + static_cast<AutoPropDescArrayRooter *>(this)->descriptors; 1.398 + for (size_t i = 0, len = descriptors.length(); i < len; i++) { 1.399 + PropDesc &desc = descriptors[i]; 1.400 + MarkValueRoot(trc, &desc.pd_, "PropDesc::pd_"); 1.401 + MarkValueRoot(trc, &desc.value_, "PropDesc::value_"); 1.402 + MarkValueRoot(trc, &desc.get_, "PropDesc::get_"); 1.403 + MarkValueRoot(trc, &desc.set_, "PropDesc::set_"); 1.404 + } 1.405 + return; 1.406 + } 1.407 + 1.408 + case ID: 1.409 + MarkIdRoot(trc, &static_cast<AutoIdRooter *>(this)->id_, "JS::AutoIdRooter.id_"); 1.410 + return; 1.411 + 1.412 + case VALVECTOR: { 1.413 + AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector; 1.414 + MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector"); 1.415 + return; 1.416 + } 1.417 + 1.418 + case IDVECTOR: { 1.419 + AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector; 1.420 + MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector"); 1.421 + return; 1.422 + } 1.423 + 1.424 + case SHAPEVECTOR: { 1.425 + AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector; 1.426 + MarkShapeRootRange(trc, vector.length(), const_cast<Shape **>(vector.begin()), 1.427 + "js::AutoShapeVector.vector"); 1.428 + return; 1.429 + } 1.430 + 1.431 + case OBJVECTOR: { 1.432 + AutoObjectVector::VectorImpl &vector = static_cast<AutoObjectVector *>(this)->vector; 1.433 + MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector"); 1.434 + return; 1.435 + } 1.436 + 1.437 + case FUNVECTOR: { 1.438 + AutoFunctionVector::VectorImpl &vector = static_cast<AutoFunctionVector *>(this)->vector; 1.439 + MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoFunctionVector.vector"); 1.440 + return; 1.441 + } 1.442 + 1.443 + case STRINGVECTOR: { 1.444 + AutoStringVector::VectorImpl &vector = static_cast<AutoStringVector *>(this)->vector; 1.445 + MarkStringRootRange(trc, vector.length(), vector.begin(), "js::AutoStringVector.vector"); 1.446 + return; 1.447 + } 1.448 + 1.449 + case NAMEVECTOR: { 1.450 + AutoNameVector::VectorImpl &vector = static_cast<AutoNameVector *>(this)->vector; 1.451 + MarkStringRootRange(trc, vector.length(), vector.begin(), "js::AutoNameVector.vector"); 1.452 + return; 1.453 + } 1.454 + 1.455 + case VALARRAY: { 1.456 + /* 1.457 + * We don't know the template size parameter, but we can safely treat it 1.458 + * as an AutoValueArray<1> because the length is stored separately. 1.459 + */ 1.460 + AutoValueArray<1> *array = static_cast<AutoValueArray<1> *>(this); 1.461 + MarkValueRootRange(trc, array->length(), array->begin(), "js::AutoValueArray"); 1.462 + return; 1.463 + } 1.464 + 1.465 + case SCRIPTVECTOR: { 1.466 + AutoScriptVector::VectorImpl &vector = static_cast<AutoScriptVector *>(this)->vector; 1.467 + MarkScriptRootRange(trc, vector.length(), vector.begin(), "js::AutoScriptVector.vector"); 1.468 + return; 1.469 + } 1.470 + 1.471 + case OBJOBJHASHMAP: { 1.472 + AutoObjectObjectHashMap::HashMapImpl &map = static_cast<AutoObjectObjectHashMap *>(this)->map; 1.473 + for (AutoObjectObjectHashMap::Enum e(map); !e.empty(); e.popFront()) { 1.474 + MarkObjectRoot(trc, &e.front().value(), "AutoObjectObjectHashMap value"); 1.475 + trc->setTracingLocation((void *)&e.front().key()); 1.476 + JSObject *key = e.front().key(); 1.477 + MarkObjectRoot(trc, &key, "AutoObjectObjectHashMap key"); 1.478 + if (key != e.front().key()) 1.479 + e.rekeyFront(key); 1.480 + } 1.481 + return; 1.482 + } 1.483 + 1.484 + case OBJU32HASHMAP: { 1.485 + AutoObjectUnsigned32HashMap *self = static_cast<AutoObjectUnsigned32HashMap *>(this); 1.486 + AutoObjectUnsigned32HashMap::HashMapImpl &map = self->map; 1.487 + for (AutoObjectUnsigned32HashMap::Enum e(map); !e.empty(); e.popFront()) { 1.488 + JSObject *key = e.front().key(); 1.489 + MarkObjectRoot(trc, &key, "AutoObjectUnsignedHashMap key"); 1.490 + if (key != e.front().key()) 1.491 + e.rekeyFront(key); 1.492 + } 1.493 + return; 1.494 + } 1.495 + 1.496 + case OBJHASHSET: { 1.497 + AutoObjectHashSet *self = static_cast<AutoObjectHashSet *>(this); 1.498 + AutoObjectHashSet::HashSetImpl &set = self->set; 1.499 + for (AutoObjectHashSet::Enum e(set); !e.empty(); e.popFront()) { 1.500 + JSObject *obj = e.front(); 1.501 + MarkObjectRoot(trc, &obj, "AutoObjectHashSet value"); 1.502 + if (obj != e.front()) 1.503 + e.rekeyFront(obj); 1.504 + } 1.505 + return; 1.506 + } 1.507 + 1.508 + case HASHABLEVALUE: { 1.509 + AutoHashableValueRooter *rooter = static_cast<AutoHashableValueRooter *>(this); 1.510 + rooter->trace(trc); 1.511 + return; 1.512 + } 1.513 + 1.514 + case IONMASM: { 1.515 +#ifdef JS_ION 1.516 + static_cast<js::jit::MacroAssembler::AutoRooter *>(this)->masm()->trace(trc); 1.517 +#endif 1.518 + return; 1.519 + } 1.520 + 1.521 + case IONALLOC: { 1.522 +#ifdef JS_ION 1.523 + static_cast<js::jit::AutoTempAllocatorRooter *>(this)->trace(trc); 1.524 +#endif 1.525 + return; 1.526 + } 1.527 + 1.528 + case WRAPPER: { 1.529 + /* 1.530 + * We need to use MarkValueUnbarriered here because we mark wrapper 1.531 + * roots in every slice. This is because of some rule-breaking in 1.532 + * RemapAllWrappersForObject; see comment there. 1.533 + */ 1.534 + MarkValueUnbarriered(trc, &static_cast<AutoWrapperRooter *>(this)->value.get(), 1.535 + "JS::AutoWrapperRooter.value"); 1.536 + return; 1.537 + } 1.538 + 1.539 + case WRAPVECTOR: { 1.540 + AutoWrapperVector::VectorImpl &vector = static_cast<AutoWrapperVector *>(this)->vector; 1.541 + /* 1.542 + * We need to use MarkValueUnbarriered here because we mark wrapper 1.543 + * roots in every slice. This is because of some rule-breaking in 1.544 + * RemapAllWrappersForObject; see comment there. 1.545 + */ 1.546 + for (WrapperValue *p = vector.begin(); p < vector.end(); p++) 1.547 + MarkValueUnbarriered(trc, &p->get(), "js::AutoWrapperVector.vector"); 1.548 + return; 1.549 + } 1.550 + 1.551 + case JSONPARSER: 1.552 + static_cast<js::JSONParser *>(this)->trace(trc); 1.553 + return; 1.554 + 1.555 + case CUSTOM: 1.556 + static_cast<JS::CustomAutoRooter *>(this)->trace(trc); 1.557 + return; 1.558 + } 1.559 + 1.560 + JS_ASSERT(tag_ >= 0); 1.561 + if (Value *vp = static_cast<AutoArrayRooter *>(this)->array) 1.562 + MarkValueRootRange(trc, tag_, vp, "JS::AutoArrayRooter.array"); 1.563 +} 1.564 + 1.565 +/* static */ void 1.566 +AutoGCRooter::traceAll(JSTracer *trc) 1.567 +{ 1.568 + for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) { 1.569 + for (js::AutoGCRooter *gcr = cx->autoGCRooters; gcr; gcr = gcr->down) 1.570 + gcr->trace(trc); 1.571 + } 1.572 +} 1.573 + 1.574 +/* static */ void 1.575 +AutoGCRooter::traceAllWrappers(JSTracer *trc) 1.576 +{ 1.577 + for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) { 1.578 + for (js::AutoGCRooter *gcr = cx->autoGCRooters; gcr; gcr = gcr->down) { 1.579 + if (gcr->tag_ == WRAPVECTOR || gcr->tag_ == WRAPPER) 1.580 + gcr->trace(trc); 1.581 + } 1.582 + } 1.583 +} 1.584 + 1.585 +void 1.586 +AutoHashableValueRooter::trace(JSTracer *trc) 1.587 +{ 1.588 + MarkValueRoot(trc, reinterpret_cast<Value*>(&value), "AutoHashableValueRooter"); 1.589 +} 1.590 + 1.591 +void 1.592 +StackShape::trace(JSTracer *trc) 1.593 +{ 1.594 + if (base) 1.595 + MarkBaseShapeRoot(trc, (BaseShape**) &base, "StackShape base"); 1.596 + MarkIdRoot(trc, (jsid*) &propid, "StackShape id"); 1.597 +} 1.598 + 1.599 +void 1.600 +JSPropertyDescriptor::trace(JSTracer *trc) 1.601 +{ 1.602 + if (obj) 1.603 + MarkObjectRoot(trc, &obj, "Descriptor::obj"); 1.604 + MarkValueRoot(trc, &value, "Descriptor::value"); 1.605 + if ((attrs & JSPROP_GETTER) && getter) { 1.606 + JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, getter); 1.607 + MarkObjectRoot(trc, &tmp, "Descriptor::get"); 1.608 + getter = JS_DATA_TO_FUNC_PTR(JSPropertyOp, tmp); 1.609 + } 1.610 + if ((attrs & JSPROP_SETTER) && setter) { 1.611 + JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, setter); 1.612 + MarkObjectRoot(trc, &tmp, "Descriptor::set"); 1.613 + setter = JS_DATA_TO_FUNC_PTR(JSStrictPropertyOp, tmp); 1.614 + } 1.615 +} 1.616 + 1.617 +namespace js { 1.618 +namespace gc { 1.619 + 1.620 +template<typename T> 1.621 +struct PersistentRootedMarker 1.622 +{ 1.623 + typedef PersistentRooted<T> Element; 1.624 + typedef mozilla::LinkedList<Element> List; 1.625 + typedef void (*MarkFunc)(JSTracer *trc, T *ref, const char *name); 1.626 + 1.627 + template <MarkFunc Mark> 1.628 + static void 1.629 + markChainIfNotNull(JSTracer *trc, List &list, const char *name) 1.630 + { 1.631 + for (Element *r = list.getFirst(); r; r = r->getNext()) { 1.632 + if (r->get()) 1.633 + Mark(trc, r->address(), name); 1.634 + } 1.635 + } 1.636 + 1.637 + template <MarkFunc Mark> 1.638 + static void 1.639 + markChain(JSTracer *trc, List &list, const char *name) 1.640 + { 1.641 + for (Element *r = list.getFirst(); r; r = r->getNext()) 1.642 + Mark(trc, r->address(), name); 1.643 + } 1.644 +}; 1.645 +} 1.646 +} 1.647 + 1.648 +void 1.649 +js::gc::MarkPersistentRootedChains(JSTracer *trc) 1.650 +{ 1.651 + JSRuntime *rt = trc->runtime(); 1.652 + 1.653 + // Mark the PersistentRooted chains of types that may be null. 1.654 + PersistentRootedMarker<JSFunction*>::markChainIfNotNull<MarkObjectRoot>( 1.655 + trc, rt->functionPersistentRooteds, "PersistentRooted<JSFunction *>"); 1.656 + PersistentRootedMarker<JSObject*>::markChainIfNotNull<MarkObjectRoot>( 1.657 + trc, rt->objectPersistentRooteds, "PersistentRooted<JSObject *>"); 1.658 + PersistentRootedMarker<JSScript*>::markChainIfNotNull<MarkScriptRoot>( 1.659 + trc, rt->scriptPersistentRooteds, "PersistentRooted<JSScript *>"); 1.660 + PersistentRootedMarker<JSString*>::markChainIfNotNull<MarkStringRoot>( 1.661 + trc, rt->stringPersistentRooteds, "PersistentRooted<JSString *>"); 1.662 + 1.663 + // Mark the PersistentRooted chains of types that are never null. 1.664 + PersistentRootedMarker<jsid>::markChain<MarkIdRoot>(trc, rt->idPersistentRooteds, 1.665 + "PersistentRooted<jsid>"); 1.666 + PersistentRootedMarker<Value>::markChain<MarkValueRoot>(trc, rt->valuePersistentRooteds, 1.667 + "PersistentRooted<Value>"); 1.668 +} 1.669 + 1.670 +void 1.671 +js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots) 1.672 +{ 1.673 + JSRuntime *rt = trc->runtime(); 1.674 + JS_ASSERT(trc->callback != GCMarker::GrayCallback); 1.675 + 1.676 + JS_ASSERT(!rt->mainThread.suppressGC); 1.677 + 1.678 + if (IS_GC_MARKING_TRACER(trc)) { 1.679 + for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { 1.680 + if (!c->zone()->isCollecting()) 1.681 + c->markCrossCompartmentWrappers(trc); 1.682 + } 1.683 + Debugger::markCrossCompartmentDebuggerObjectReferents(trc); 1.684 + } 1.685 + 1.686 + AutoGCRooter::traceAll(trc); 1.687 + 1.688 + if (!rt->isBeingDestroyed()) { 1.689 +#ifdef JSGC_USE_EXACT_ROOTING 1.690 + MarkExactStackRoots(trc); 1.691 +#else 1.692 + MarkConservativeStackRoots(trc, useSavedRoots); 1.693 +#endif 1.694 + rt->markSelfHostingGlobal(trc); 1.695 + } 1.696 + 1.697 + for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) { 1.698 + const RootEntry &entry = r.front(); 1.699 + const char *name = entry.value().name ? entry.value().name : "root"; 1.700 + JSGCRootType type = entry.value().type; 1.701 + void *key = entry.key(); 1.702 + if (type == JS_GC_ROOT_VALUE_PTR) { 1.703 + MarkValueRoot(trc, reinterpret_cast<Value *>(key), name); 1.704 + } else if (*reinterpret_cast<void **>(key)){ 1.705 + if (type == JS_GC_ROOT_STRING_PTR) 1.706 + MarkStringRoot(trc, reinterpret_cast<JSString **>(key), name); 1.707 + else if (type == JS_GC_ROOT_OBJECT_PTR) 1.708 + MarkObjectRoot(trc, reinterpret_cast<JSObject **>(key), name); 1.709 + else if (type == JS_GC_ROOT_SCRIPT_PTR) 1.710 + MarkScriptRoot(trc, reinterpret_cast<JSScript **>(key), name); 1.711 + else 1.712 + MOZ_ASSUME_UNREACHABLE("unexpected js::RootInfo::type value"); 1.713 + } 1.714 + } 1.715 + 1.716 + MarkPersistentRootedChains(trc); 1.717 + 1.718 + if (rt->scriptAndCountsVector) { 1.719 + ScriptAndCountsVector &vec = *rt->scriptAndCountsVector; 1.720 + for (size_t i = 0; i < vec.length(); i++) 1.721 + MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector"); 1.722 + } 1.723 + 1.724 + if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) { 1.725 + if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment()->zone()->isCollecting()) { 1.726 + MarkPermanentAtoms(trc); 1.727 + MarkAtoms(trc); 1.728 +#ifdef JS_ION 1.729 + jit::JitRuntime::Mark(trc); 1.730 +#endif 1.731 + } 1.732 + } 1.733 + 1.734 + for (ContextIter acx(rt); !acx.done(); acx.next()) 1.735 + acx->mark(trc); 1.736 + 1.737 + for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) { 1.738 + if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting()) 1.739 + continue; 1.740 + 1.741 + /* Do not discard scripts with counts while profiling. */ 1.742 + if (rt->profilingScripts && !rt->isHeapMinorCollecting()) { 1.743 + for (CellIterUnderGC i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) { 1.744 + JSScript *script = i.get<JSScript>(); 1.745 + if (script->hasScriptCounts()) { 1.746 + MarkScriptRoot(trc, &script, "profilingScripts"); 1.747 + JS_ASSERT(script == i.get<JSScript>()); 1.748 + } 1.749 + } 1.750 + } 1.751 + } 1.752 + 1.753 + /* We can't use GCCompartmentsIter if we're called from TraceRuntime. */ 1.754 + for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { 1.755 + if (trc->runtime()->isHeapMinorCollecting()) 1.756 + c->globalWriteBarriered = false; 1.757 + 1.758 + if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting()) 1.759 + continue; 1.760 + 1.761 + /* During a GC, these are treated as weak pointers. */ 1.762 + if (!IS_GC_MARKING_TRACER(trc)) { 1.763 + if (c->watchpointMap) 1.764 + c->watchpointMap->markAll(trc); 1.765 + } 1.766 + 1.767 + /* Mark debug scopes, if present */ 1.768 + if (c->debugScopes) 1.769 + c->debugScopes->mark(trc); 1.770 + } 1.771 + 1.772 + MarkInterpreterActivations(rt, trc); 1.773 + 1.774 +#ifdef JS_ION 1.775 + jit::MarkJitActivations(rt, trc); 1.776 +#endif 1.777 + 1.778 + if (!rt->isHeapMinorCollecting()) { 1.779 + /* 1.780 + * All JSCompartment::mark does is mark the globals for compartments 1.781 + * which have been entered. Globals aren't nursery allocated so there's 1.782 + * no need to do this for minor GCs. 1.783 + */ 1.784 + for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) 1.785 + c->markRoots(trc); 1.786 + 1.787 + /* 1.788 + * The embedding can register additional roots here. 1.789 + * 1.790 + * We don't need to trace these in a minor GC because all pointers into 1.791 + * the nursery should be in the store buffer, and we want to avoid the 1.792 + * time taken to trace all these roots. 1.793 + */ 1.794 + for (size_t i = 0; i < rt->gcBlackRootTracers.length(); i++) { 1.795 + const JSRuntime::ExtraTracer &e = rt->gcBlackRootTracers[i]; 1.796 + (*e.op)(trc, e.data); 1.797 + } 1.798 + 1.799 + /* During GC, we don't mark gray roots at this stage. */ 1.800 + if (JSTraceDataOp op = rt->gcGrayRootTracer.op) { 1.801 + if (!IS_GC_MARKING_TRACER(trc)) 1.802 + (*op)(trc, rt->gcGrayRootTracer.data); 1.803 + } 1.804 + } 1.805 +} 1.806 + 1.807 +void 1.808 +js::gc::BufferGrayRoots(GCMarker *gcmarker) 1.809 +{ 1.810 + JSRuntime *rt = gcmarker->runtime(); 1.811 + gcmarker->startBufferingGrayRoots(); 1.812 + if (JSTraceDataOp op = rt->gcGrayRootTracer.op) 1.813 + (*op)(gcmarker, rt->gcGrayRootTracer.data); 1.814 + gcmarker->endBufferingGrayRoots(); 1.815 +}