michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sts=4 et sw=4 tw=99: michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #include "mozilla/ArrayUtils.h" michael@0: michael@0: #ifdef MOZ_VALGRIND michael@0: # include michael@0: #endif michael@0: michael@0: #include "jscntxt.h" michael@0: #include "jsgc.h" michael@0: #include "jsonparser.h" michael@0: #include "jsprf.h" michael@0: #include "jstypes.h" michael@0: #include "jswatchpoint.h" michael@0: michael@0: #include "builtin/MapObject.h" michael@0: #include "frontend/BytecodeCompiler.h" michael@0: #include "gc/GCInternals.h" michael@0: #include "gc/Marking.h" michael@0: #ifdef JS_ION michael@0: # include "jit/IonMacroAssembler.h" michael@0: #endif michael@0: #include "js/HashTable.h" michael@0: #include "vm/Debugger.h" michael@0: michael@0: #include "jsgcinlines.h" michael@0: #include "jsobjinlines.h" michael@0: michael@0: using namespace js; michael@0: using namespace js::gc; michael@0: michael@0: using mozilla::ArrayEnd; michael@0: michael@0: typedef RootedValueMap::Range RootRange; michael@0: typedef RootedValueMap::Entry RootEntry; michael@0: typedef RootedValueMap::Enum RootEnum; michael@0: michael@0: #ifdef JSGC_USE_EXACT_ROOTING michael@0: static inline void michael@0: MarkExactStackRoot(JSTracer *trc, Rooted *rooter, ThingRootKind kind) michael@0: { michael@0: void **addr = (void **)rooter->address(); michael@0: if (IsNullTaggedPointer(*addr)) michael@0: return; michael@0: michael@0: if (kind == THING_ROOT_OBJECT && *addr == TaggedProto::LazyProto) michael@0: return; michael@0: michael@0: switch (kind) { michael@0: case THING_ROOT_OBJECT: MarkObjectRoot(trc, (JSObject **)addr, "exact-object"); break; michael@0: case THING_ROOT_STRING: MarkStringRoot(trc, (JSString **)addr, "exact-string"); break; michael@0: case THING_ROOT_SCRIPT: MarkScriptRoot(trc, (JSScript **)addr, "exact-script"); break; michael@0: case THING_ROOT_LAZY_SCRIPT: MarkLazyScriptRoot(trc, (LazyScript **)addr, "exact-lazy-script"); break; michael@0: case THING_ROOT_SHAPE: MarkShapeRoot(trc, (Shape **)addr, "exact-shape"); break; michael@0: case THING_ROOT_BASE_SHAPE: MarkBaseShapeRoot(trc, (BaseShape **)addr, "exact-baseshape"); break; michael@0: case THING_ROOT_TYPE: MarkTypeRoot(trc, (types::Type *)addr, "exact-type"); break; michael@0: case THING_ROOT_TYPE_OBJECT: MarkTypeObjectRoot(trc, (types::TypeObject **)addr, "exact-typeobject"); break; michael@0: case THING_ROOT_JIT_CODE: MarkJitCodeRoot(trc, (jit::JitCode **)addr, "exact-jitcode"); break; michael@0: case THING_ROOT_VALUE: MarkValueRoot(trc, (Value *)addr, "exact-value"); break; michael@0: case THING_ROOT_ID: MarkIdRoot(trc, (jsid *)addr, "exact-id"); break; michael@0: case THING_ROOT_BINDINGS: ((Bindings *)addr)->trace(trc); break; michael@0: case THING_ROOT_PROPERTY_DESCRIPTOR: ((JSPropertyDescriptor *)addr)->trace(trc); break; michael@0: case THING_ROOT_CUSTOM: { michael@0: // 'rooter' is a member within a class containing a vtable. Back up michael@0: // to the vtable and call trace() through it. michael@0: const size_t rooterOffset = offsetof(RootedGeneric, rooter); michael@0: reinterpret_cast< RootedGeneric* >(uintptr_t(rooter) - rooterOffset)->trace(trc); michael@0: break; michael@0: } michael@0: default: MOZ_ASSUME_UNREACHABLE("Invalid THING_ROOT kind"); break; michael@0: } michael@0: } michael@0: michael@0: static inline void michael@0: MarkExactStackRootList(JSTracer *trc, Rooted *rooter, ThingRootKind kind) michael@0: { michael@0: while (rooter) { michael@0: MarkExactStackRoot(trc, rooter, kind); michael@0: rooter = rooter->previous(); michael@0: } michael@0: } michael@0: michael@0: static void michael@0: MarkExactStackRoots(JSTracer *trc) michael@0: { michael@0: for (unsigned i = 0; i < THING_ROOT_LIMIT; i++) { michael@0: for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) michael@0: MarkExactStackRootList(trc, cx->thingGCRooters[i], ThingRootKind(i)); michael@0: michael@0: MarkExactStackRootList(trc, trc->runtime()->mainThread.thingGCRooters[i], ThingRootKind(i)); michael@0: } michael@0: } michael@0: #endif /* JSGC_USE_EXACT_ROOTING */ michael@0: michael@0: enum ConservativeGCTest michael@0: { michael@0: CGCT_VALID, michael@0: CGCT_LOWBITSET, /* excluded because one of the low bits was set */ michael@0: CGCT_NOTARENA, /* not within arena range in a chunk */ michael@0: CGCT_OTHERCOMPARTMENT, /* in another compartment */ michael@0: CGCT_NOTCHUNK, /* not within a valid chunk */ michael@0: CGCT_FREEARENA, /* within arena containing only free things */ michael@0: CGCT_NOTLIVE, /* gcthing is not allocated */ michael@0: CGCT_END michael@0: }; michael@0: michael@0: /* michael@0: * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and michael@0: * details about the thing if so. On failure, returns the reason for rejection. michael@0: */ michael@0: static inline ConservativeGCTest michael@0: IsAddressableGCThing(JSRuntime *rt, uintptr_t w, michael@0: bool skipUncollectedCompartments, michael@0: gc::AllocKind *thingKindPtr, michael@0: ArenaHeader **arenaHeader, michael@0: void **thing) michael@0: { michael@0: /* michael@0: * We assume that the compiler never uses sub-word alignment to store michael@0: * pointers and does not tag pointers on its own. Additionally, the value michael@0: * representation for all values and the jsid representation for GC-things michael@0: * do not touch the low two bits. Thus any word with the low two bits set michael@0: * is not a valid GC-thing. michael@0: */ michael@0: JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4); michael@0: if (w & 0x3) michael@0: return CGCT_LOWBITSET; michael@0: michael@0: /* michael@0: * An object jsid has its low bits tagged. In the value representation on michael@0: * 64-bit, the high bits are tagged. michael@0: */ michael@0: const uintptr_t JSID_PAYLOAD_MASK = ~uintptr_t(JSID_TYPE_MASK); michael@0: #if JS_BITS_PER_WORD == 32 michael@0: uintptr_t addr = w & JSID_PAYLOAD_MASK; michael@0: #elif JS_BITS_PER_WORD == 64 michael@0: uintptr_t addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK; michael@0: #endif michael@0: michael@0: Chunk *chunk = Chunk::fromAddress(addr); michael@0: michael@0: if (!rt->gcChunkSet.has(chunk)) michael@0: return CGCT_NOTCHUNK; michael@0: michael@0: /* michael@0: * We query for pointers outside the arena array after checking for an michael@0: * allocated chunk. Such pointers are rare and we want to reject them michael@0: * after doing more likely rejections. michael@0: */ michael@0: if (!Chunk::withinArenasRange(addr)) michael@0: return CGCT_NOTARENA; michael@0: michael@0: /* If the arena is not currently allocated, don't access the header. */ michael@0: size_t arenaOffset = Chunk::arenaIndex(addr); michael@0: if (chunk->decommittedArenas.get(arenaOffset)) michael@0: return CGCT_FREEARENA; michael@0: michael@0: ArenaHeader *aheader = &chunk->arenas[arenaOffset].aheader; michael@0: michael@0: if (!aheader->allocated()) michael@0: return CGCT_FREEARENA; michael@0: michael@0: if (skipUncollectedCompartments && !aheader->zone->isCollecting()) michael@0: return CGCT_OTHERCOMPARTMENT; michael@0: michael@0: AllocKind thingKind = aheader->getAllocKind(); michael@0: uintptr_t offset = addr & ArenaMask; michael@0: uintptr_t minOffset = Arena::firstThingOffset(thingKind); michael@0: if (offset < minOffset) michael@0: return CGCT_NOTARENA; michael@0: michael@0: /* addr can point inside the thing so we must align the address. */ michael@0: uintptr_t shift = (offset - minOffset) % Arena::thingSize(thingKind); michael@0: addr -= shift; michael@0: michael@0: if (thing) michael@0: *thing = reinterpret_cast(addr); michael@0: if (arenaHeader) michael@0: *arenaHeader = aheader; michael@0: if (thingKindPtr) michael@0: *thingKindPtr = thingKind; michael@0: return CGCT_VALID; michael@0: } michael@0: michael@0: /* michael@0: * Returns CGCT_VALID and mark it if the w can be a live GC thing and sets michael@0: * thingKind accordingly. Otherwise returns the reason for rejection. michael@0: */ michael@0: static inline ConservativeGCTest michael@0: MarkIfGCThingWord(JSTracer *trc, uintptr_t w) michael@0: { michael@0: void *thing; michael@0: ArenaHeader *aheader; michael@0: AllocKind thingKind; michael@0: ConservativeGCTest status = michael@0: IsAddressableGCThing(trc->runtime(), w, IS_GC_MARKING_TRACER(trc), michael@0: &thingKind, &aheader, &thing); michael@0: if (status != CGCT_VALID) michael@0: return status; michael@0: michael@0: /* michael@0: * Check if the thing is free. We must use the list of free spans as at michael@0: * this point we no longer have the mark bits from the previous GC run and michael@0: * we must account for newly allocated things. michael@0: */ michael@0: if (InFreeList(aheader, thing)) michael@0: return CGCT_NOTLIVE; michael@0: michael@0: JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind); michael@0: #ifdef DEBUG michael@0: const char pattern[] = "machine_stack %p"; michael@0: char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2]; michael@0: JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing); michael@0: trc->setTracingName(nameBuf); michael@0: #endif michael@0: trc->setTracingLocation((void *)w); michael@0: void *tmp = thing; michael@0: MarkKind(trc, &tmp, traceKind); michael@0: JS_ASSERT(tmp == thing); michael@0: michael@0: #ifdef DEBUG michael@0: if (trc->runtime()->gcIncrementalState == MARK_ROOTS) michael@0: trc->runtime()->mainThread.gcSavedRoots.append( michael@0: PerThreadData::SavedGCRoot(thing, traceKind)); michael@0: #endif michael@0: michael@0: return CGCT_VALID; michael@0: } michael@0: michael@0: #ifndef JSGC_USE_EXACT_ROOTING michael@0: static void michael@0: MarkWordConservatively(JSTracer *trc, uintptr_t w) michael@0: { michael@0: /* michael@0: * The conservative scanner may access words that valgrind considers as michael@0: * undefined. To avoid false positives and not to alter valgrind view of michael@0: * the memory we make as memcheck-defined the argument, a copy of the michael@0: * original word. See bug 572678. michael@0: */ michael@0: #ifdef MOZ_VALGRIND michael@0: JS_SILENCE_UNUSED_VALUE_IN_EXPR(VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w))); michael@0: #endif michael@0: michael@0: MarkIfGCThingWord(trc, w); michael@0: } michael@0: michael@0: MOZ_ASAN_BLACKLIST michael@0: static void michael@0: MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t *end) michael@0: { michael@0: JS_ASSERT(begin <= end); michael@0: for (const uintptr_t *i = begin; i < end; ++i) michael@0: MarkWordConservatively(trc, *i); michael@0: } michael@0: michael@0: static void michael@0: MarkRangeConservativelyAndSkipIon(JSTracer *trc, JSRuntime *rt, const uintptr_t *begin, const uintptr_t *end) michael@0: { michael@0: const uintptr_t *i = begin; michael@0: michael@0: #if JS_STACK_GROWTH_DIRECTION < 0 && defined(JS_ION) && !defined(JS_ARM_SIMULATOR) michael@0: // Walk only regions in between JIT activations. Note that non-volatile michael@0: // registers are spilled to the stack before the entry frame, ensuring michael@0: // that the conservative scanner will still see them. michael@0: // michael@0: // If the ARM simulator is enabled, JIT activations are not on the native michael@0: // stack but on the simulator stack, so we don't have to skip JIT regions michael@0: // in this case. michael@0: for (jit::JitActivationIterator iter(rt); !iter.done(); ++iter) { michael@0: uintptr_t *jitMin, *jitEnd; michael@0: iter.jitStackRange(jitMin, jitEnd); michael@0: michael@0: MarkRangeConservatively(trc, i, jitMin); michael@0: i = jitEnd; michael@0: } michael@0: #endif michael@0: michael@0: // Mark everything after the most recent Ion activation. michael@0: MarkRangeConservatively(trc, i, end); michael@0: } michael@0: michael@0: static MOZ_NEVER_INLINE void michael@0: MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots) michael@0: { michael@0: JSRuntime *rt = trc->runtime(); michael@0: michael@0: #ifdef DEBUG michael@0: if (useSavedRoots) { michael@0: for (PerThreadData::SavedGCRoot *root = rt->mainThread.gcSavedRoots.begin(); michael@0: root != rt->mainThread.gcSavedRoots.end(); michael@0: root++) michael@0: { michael@0: trc->setTracingName("cstack"); michael@0: MarkKind(trc, &root->thing, root->kind); michael@0: } michael@0: return; michael@0: } michael@0: michael@0: if (rt->gcIncrementalState == MARK_ROOTS) michael@0: rt->mainThread.gcSavedRoots.clearAndFree(); michael@0: #endif michael@0: michael@0: ConservativeGCData *cgcd = &rt->conservativeGC; michael@0: if (!cgcd->hasStackToScan()) { michael@0: #ifdef JS_THREADSAFE michael@0: JS_ASSERT(!rt->requestDepth); michael@0: #endif michael@0: return; michael@0: } michael@0: michael@0: uintptr_t *stackMin, *stackEnd; michael@0: #if JS_STACK_GROWTH_DIRECTION > 0 michael@0: stackMin = reinterpret_cast(rt->nativeStackBase); michael@0: stackEnd = cgcd->nativeStackTop; michael@0: #else michael@0: stackMin = cgcd->nativeStackTop + 1; michael@0: stackEnd = reinterpret_cast(rt->nativeStackBase); michael@0: #endif michael@0: michael@0: JS_ASSERT(stackMin <= stackEnd); michael@0: MarkRangeConservativelyAndSkipIon(trc, rt, stackMin, stackEnd); michael@0: MarkRangeConservatively(trc, cgcd->registerSnapshot.words, michael@0: ArrayEnd(cgcd->registerSnapshot.words)); michael@0: } michael@0: michael@0: void michael@0: js::MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv) michael@0: { michael@0: const uintptr_t *begin = beginv->payloadUIntPtr(); michael@0: const uintptr_t *end = endv->payloadUIntPtr(); michael@0: #ifdef JS_NUNBOX32 michael@0: /* michael@0: * With 64-bit jsvals on 32-bit systems, we can optimize a bit by michael@0: * scanning only the payloads. michael@0: */ michael@0: JS_ASSERT(begin <= end); michael@0: for (const uintptr_t *i = begin; i < end; i += sizeof(Value) / sizeof(uintptr_t)) michael@0: MarkWordConservatively(trc, *i); michael@0: #else michael@0: MarkRangeConservatively(trc, begin, end); michael@0: #endif michael@0: } michael@0: michael@0: #endif /* JSGC_USE_EXACT_ROOTING */ michael@0: michael@0: MOZ_NEVER_INLINE void michael@0: ConservativeGCData::recordStackTop() michael@0: { michael@0: /* Update the native stack pointer if it points to a bigger stack. */ michael@0: uintptr_t dummy; michael@0: nativeStackTop = &dummy; michael@0: michael@0: /* michael@0: * To record and update the register snapshot for the conservative scanning michael@0: * with the latest values we use setjmp. michael@0: */ michael@0: #if defined(_MSC_VER) michael@0: # pragma warning(push) michael@0: # pragma warning(disable: 4611) michael@0: #endif michael@0: (void) setjmp(registerSnapshot.jmpbuf); michael@0: #if defined(_MSC_VER) michael@0: # pragma warning(pop) michael@0: #endif michael@0: } michael@0: michael@0: void michael@0: JS::AutoIdArray::trace(JSTracer *trc) michael@0: { michael@0: JS_ASSERT(tag_ == IDARRAY); michael@0: gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray"); michael@0: } michael@0: michael@0: inline void michael@0: AutoGCRooter::trace(JSTracer *trc) michael@0: { michael@0: switch (tag_) { michael@0: case PARSER: michael@0: frontend::MarkParser(trc, this); michael@0: return; michael@0: michael@0: case IDARRAY: { michael@0: JSIdArray *ida = static_cast(this)->idArray; michael@0: MarkIdRange(trc, ida->length, ida->vector, "JS::AutoIdArray.idArray"); michael@0: return; michael@0: } michael@0: michael@0: case DESCRIPTORS: { michael@0: PropDescArray &descriptors = michael@0: static_cast(this)->descriptors; michael@0: for (size_t i = 0, len = descriptors.length(); i < len; i++) { michael@0: PropDesc &desc = descriptors[i]; michael@0: MarkValueRoot(trc, &desc.pd_, "PropDesc::pd_"); michael@0: MarkValueRoot(trc, &desc.value_, "PropDesc::value_"); michael@0: MarkValueRoot(trc, &desc.get_, "PropDesc::get_"); michael@0: MarkValueRoot(trc, &desc.set_, "PropDesc::set_"); michael@0: } michael@0: return; michael@0: } michael@0: michael@0: case ID: michael@0: MarkIdRoot(trc, &static_cast(this)->id_, "JS::AutoIdRooter.id_"); michael@0: return; michael@0: michael@0: case VALVECTOR: { michael@0: AutoValueVector::VectorImpl &vector = static_cast(this)->vector; michael@0: MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case IDVECTOR: { michael@0: AutoIdVector::VectorImpl &vector = static_cast(this)->vector; michael@0: MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case SHAPEVECTOR: { michael@0: AutoShapeVector::VectorImpl &vector = static_cast(this)->vector; michael@0: MarkShapeRootRange(trc, vector.length(), const_cast(vector.begin()), michael@0: "js::AutoShapeVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case OBJVECTOR: { michael@0: AutoObjectVector::VectorImpl &vector = static_cast(this)->vector; michael@0: MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case FUNVECTOR: { michael@0: AutoFunctionVector::VectorImpl &vector = static_cast(this)->vector; michael@0: MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoFunctionVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case STRINGVECTOR: { michael@0: AutoStringVector::VectorImpl &vector = static_cast(this)->vector; michael@0: MarkStringRootRange(trc, vector.length(), vector.begin(), "js::AutoStringVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case NAMEVECTOR: { michael@0: AutoNameVector::VectorImpl &vector = static_cast(this)->vector; michael@0: MarkStringRootRange(trc, vector.length(), vector.begin(), "js::AutoNameVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case VALARRAY: { michael@0: /* michael@0: * We don't know the template size parameter, but we can safely treat it michael@0: * as an AutoValueArray<1> because the length is stored separately. michael@0: */ michael@0: AutoValueArray<1> *array = static_cast *>(this); michael@0: MarkValueRootRange(trc, array->length(), array->begin(), "js::AutoValueArray"); michael@0: return; michael@0: } michael@0: michael@0: case SCRIPTVECTOR: { michael@0: AutoScriptVector::VectorImpl &vector = static_cast(this)->vector; michael@0: MarkScriptRootRange(trc, vector.length(), vector.begin(), "js::AutoScriptVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case OBJOBJHASHMAP: { michael@0: AutoObjectObjectHashMap::HashMapImpl &map = static_cast(this)->map; michael@0: for (AutoObjectObjectHashMap::Enum e(map); !e.empty(); e.popFront()) { michael@0: MarkObjectRoot(trc, &e.front().value(), "AutoObjectObjectHashMap value"); michael@0: trc->setTracingLocation((void *)&e.front().key()); michael@0: JSObject *key = e.front().key(); michael@0: MarkObjectRoot(trc, &key, "AutoObjectObjectHashMap key"); michael@0: if (key != e.front().key()) michael@0: e.rekeyFront(key); michael@0: } michael@0: return; michael@0: } michael@0: michael@0: case OBJU32HASHMAP: { michael@0: AutoObjectUnsigned32HashMap *self = static_cast(this); michael@0: AutoObjectUnsigned32HashMap::HashMapImpl &map = self->map; michael@0: for (AutoObjectUnsigned32HashMap::Enum e(map); !e.empty(); e.popFront()) { michael@0: JSObject *key = e.front().key(); michael@0: MarkObjectRoot(trc, &key, "AutoObjectUnsignedHashMap key"); michael@0: if (key != e.front().key()) michael@0: e.rekeyFront(key); michael@0: } michael@0: return; michael@0: } michael@0: michael@0: case OBJHASHSET: { michael@0: AutoObjectHashSet *self = static_cast(this); michael@0: AutoObjectHashSet::HashSetImpl &set = self->set; michael@0: for (AutoObjectHashSet::Enum e(set); !e.empty(); e.popFront()) { michael@0: JSObject *obj = e.front(); michael@0: MarkObjectRoot(trc, &obj, "AutoObjectHashSet value"); michael@0: if (obj != e.front()) michael@0: e.rekeyFront(obj); michael@0: } michael@0: return; michael@0: } michael@0: michael@0: case HASHABLEVALUE: { michael@0: AutoHashableValueRooter *rooter = static_cast(this); michael@0: rooter->trace(trc); michael@0: return; michael@0: } michael@0: michael@0: case IONMASM: { michael@0: #ifdef JS_ION michael@0: static_cast(this)->masm()->trace(trc); michael@0: #endif michael@0: return; michael@0: } michael@0: michael@0: case IONALLOC: { michael@0: #ifdef JS_ION michael@0: static_cast(this)->trace(trc); michael@0: #endif michael@0: return; michael@0: } michael@0: michael@0: case WRAPPER: { michael@0: /* michael@0: * We need to use MarkValueUnbarriered here because we mark wrapper michael@0: * roots in every slice. This is because of some rule-breaking in michael@0: * RemapAllWrappersForObject; see comment there. michael@0: */ michael@0: MarkValueUnbarriered(trc, &static_cast(this)->value.get(), michael@0: "JS::AutoWrapperRooter.value"); michael@0: return; michael@0: } michael@0: michael@0: case WRAPVECTOR: { michael@0: AutoWrapperVector::VectorImpl &vector = static_cast(this)->vector; michael@0: /* michael@0: * We need to use MarkValueUnbarriered here because we mark wrapper michael@0: * roots in every slice. This is because of some rule-breaking in michael@0: * RemapAllWrappersForObject; see comment there. michael@0: */ michael@0: for (WrapperValue *p = vector.begin(); p < vector.end(); p++) michael@0: MarkValueUnbarriered(trc, &p->get(), "js::AutoWrapperVector.vector"); michael@0: return; michael@0: } michael@0: michael@0: case JSONPARSER: michael@0: static_cast(this)->trace(trc); michael@0: return; michael@0: michael@0: case CUSTOM: michael@0: static_cast(this)->trace(trc); michael@0: return; michael@0: } michael@0: michael@0: JS_ASSERT(tag_ >= 0); michael@0: if (Value *vp = static_cast(this)->array) michael@0: MarkValueRootRange(trc, tag_, vp, "JS::AutoArrayRooter.array"); michael@0: } michael@0: michael@0: /* static */ void michael@0: AutoGCRooter::traceAll(JSTracer *trc) michael@0: { michael@0: for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) { michael@0: for (js::AutoGCRooter *gcr = cx->autoGCRooters; gcr; gcr = gcr->down) michael@0: gcr->trace(trc); michael@0: } michael@0: } michael@0: michael@0: /* static */ void michael@0: AutoGCRooter::traceAllWrappers(JSTracer *trc) michael@0: { michael@0: for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) { michael@0: for (js::AutoGCRooter *gcr = cx->autoGCRooters; gcr; gcr = gcr->down) { michael@0: if (gcr->tag_ == WRAPVECTOR || gcr->tag_ == WRAPPER) michael@0: gcr->trace(trc); michael@0: } michael@0: } michael@0: } michael@0: michael@0: void michael@0: AutoHashableValueRooter::trace(JSTracer *trc) michael@0: { michael@0: MarkValueRoot(trc, reinterpret_cast(&value), "AutoHashableValueRooter"); michael@0: } michael@0: michael@0: void michael@0: StackShape::trace(JSTracer *trc) michael@0: { michael@0: if (base) michael@0: MarkBaseShapeRoot(trc, (BaseShape**) &base, "StackShape base"); michael@0: MarkIdRoot(trc, (jsid*) &propid, "StackShape id"); michael@0: } michael@0: michael@0: void michael@0: JSPropertyDescriptor::trace(JSTracer *trc) michael@0: { michael@0: if (obj) michael@0: MarkObjectRoot(trc, &obj, "Descriptor::obj"); michael@0: MarkValueRoot(trc, &value, "Descriptor::value"); michael@0: if ((attrs & JSPROP_GETTER) && getter) { michael@0: JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, getter); michael@0: MarkObjectRoot(trc, &tmp, "Descriptor::get"); michael@0: getter = JS_DATA_TO_FUNC_PTR(JSPropertyOp, tmp); michael@0: } michael@0: if ((attrs & JSPROP_SETTER) && setter) { michael@0: JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, setter); michael@0: MarkObjectRoot(trc, &tmp, "Descriptor::set"); michael@0: setter = JS_DATA_TO_FUNC_PTR(JSStrictPropertyOp, tmp); michael@0: } michael@0: } michael@0: michael@0: namespace js { michael@0: namespace gc { michael@0: michael@0: template michael@0: struct PersistentRootedMarker michael@0: { michael@0: typedef PersistentRooted Element; michael@0: typedef mozilla::LinkedList List; michael@0: typedef void (*MarkFunc)(JSTracer *trc, T *ref, const char *name); michael@0: michael@0: template michael@0: static void michael@0: markChainIfNotNull(JSTracer *trc, List &list, const char *name) michael@0: { michael@0: for (Element *r = list.getFirst(); r; r = r->getNext()) { michael@0: if (r->get()) michael@0: Mark(trc, r->address(), name); michael@0: } michael@0: } michael@0: michael@0: template michael@0: static void michael@0: markChain(JSTracer *trc, List &list, const char *name) michael@0: { michael@0: for (Element *r = list.getFirst(); r; r = r->getNext()) michael@0: Mark(trc, r->address(), name); michael@0: } michael@0: }; michael@0: } michael@0: } michael@0: michael@0: void michael@0: js::gc::MarkPersistentRootedChains(JSTracer *trc) michael@0: { michael@0: JSRuntime *rt = trc->runtime(); michael@0: michael@0: // Mark the PersistentRooted chains of types that may be null. michael@0: PersistentRootedMarker::markChainIfNotNull( michael@0: trc, rt->functionPersistentRooteds, "PersistentRooted"); michael@0: PersistentRootedMarker::markChainIfNotNull( michael@0: trc, rt->objectPersistentRooteds, "PersistentRooted"); michael@0: PersistentRootedMarker::markChainIfNotNull( michael@0: trc, rt->scriptPersistentRooteds, "PersistentRooted"); michael@0: PersistentRootedMarker::markChainIfNotNull( michael@0: trc, rt->stringPersistentRooteds, "PersistentRooted"); michael@0: michael@0: // Mark the PersistentRooted chains of types that are never null. michael@0: PersistentRootedMarker::markChain(trc, rt->idPersistentRooteds, michael@0: "PersistentRooted"); michael@0: PersistentRootedMarker::markChain(trc, rt->valuePersistentRooteds, michael@0: "PersistentRooted"); michael@0: } michael@0: michael@0: void michael@0: js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots) michael@0: { michael@0: JSRuntime *rt = trc->runtime(); michael@0: JS_ASSERT(trc->callback != GCMarker::GrayCallback); michael@0: michael@0: JS_ASSERT(!rt->mainThread.suppressGC); michael@0: michael@0: if (IS_GC_MARKING_TRACER(trc)) { michael@0: for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { michael@0: if (!c->zone()->isCollecting()) michael@0: c->markCrossCompartmentWrappers(trc); michael@0: } michael@0: Debugger::markCrossCompartmentDebuggerObjectReferents(trc); michael@0: } michael@0: michael@0: AutoGCRooter::traceAll(trc); michael@0: michael@0: if (!rt->isBeingDestroyed()) { michael@0: #ifdef JSGC_USE_EXACT_ROOTING michael@0: MarkExactStackRoots(trc); michael@0: #else michael@0: MarkConservativeStackRoots(trc, useSavedRoots); michael@0: #endif michael@0: rt->markSelfHostingGlobal(trc); michael@0: } michael@0: michael@0: for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) { michael@0: const RootEntry &entry = r.front(); michael@0: const char *name = entry.value().name ? entry.value().name : "root"; michael@0: JSGCRootType type = entry.value().type; michael@0: void *key = entry.key(); michael@0: if (type == JS_GC_ROOT_VALUE_PTR) { michael@0: MarkValueRoot(trc, reinterpret_cast(key), name); michael@0: } else if (*reinterpret_cast(key)){ michael@0: if (type == JS_GC_ROOT_STRING_PTR) michael@0: MarkStringRoot(trc, reinterpret_cast(key), name); michael@0: else if (type == JS_GC_ROOT_OBJECT_PTR) michael@0: MarkObjectRoot(trc, reinterpret_cast(key), name); michael@0: else if (type == JS_GC_ROOT_SCRIPT_PTR) michael@0: MarkScriptRoot(trc, reinterpret_cast(key), name); michael@0: else michael@0: MOZ_ASSUME_UNREACHABLE("unexpected js::RootInfo::type value"); michael@0: } michael@0: } michael@0: michael@0: MarkPersistentRootedChains(trc); michael@0: michael@0: if (rt->scriptAndCountsVector) { michael@0: ScriptAndCountsVector &vec = *rt->scriptAndCountsVector; michael@0: for (size_t i = 0; i < vec.length(); i++) michael@0: MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector"); michael@0: } michael@0: michael@0: if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) { michael@0: if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment()->zone()->isCollecting()) { michael@0: MarkPermanentAtoms(trc); michael@0: MarkAtoms(trc); michael@0: #ifdef JS_ION michael@0: jit::JitRuntime::Mark(trc); michael@0: #endif michael@0: } michael@0: } michael@0: michael@0: for (ContextIter acx(rt); !acx.done(); acx.next()) michael@0: acx->mark(trc); michael@0: michael@0: for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) { michael@0: if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting()) michael@0: continue; michael@0: michael@0: /* Do not discard scripts with counts while profiling. */ michael@0: if (rt->profilingScripts && !rt->isHeapMinorCollecting()) { michael@0: for (CellIterUnderGC i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) { michael@0: JSScript *script = i.get(); michael@0: if (script->hasScriptCounts()) { michael@0: MarkScriptRoot(trc, &script, "profilingScripts"); michael@0: JS_ASSERT(script == i.get()); michael@0: } michael@0: } michael@0: } michael@0: } michael@0: michael@0: /* We can't use GCCompartmentsIter if we're called from TraceRuntime. */ michael@0: for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { michael@0: if (trc->runtime()->isHeapMinorCollecting()) michael@0: c->globalWriteBarriered = false; michael@0: michael@0: if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting()) michael@0: continue; michael@0: michael@0: /* During a GC, these are treated as weak pointers. */ michael@0: if (!IS_GC_MARKING_TRACER(trc)) { michael@0: if (c->watchpointMap) michael@0: c->watchpointMap->markAll(trc); michael@0: } michael@0: michael@0: /* Mark debug scopes, if present */ michael@0: if (c->debugScopes) michael@0: c->debugScopes->mark(trc); michael@0: } michael@0: michael@0: MarkInterpreterActivations(rt, trc); michael@0: michael@0: #ifdef JS_ION michael@0: jit::MarkJitActivations(rt, trc); michael@0: #endif michael@0: michael@0: if (!rt->isHeapMinorCollecting()) { michael@0: /* michael@0: * All JSCompartment::mark does is mark the globals for compartments michael@0: * which have been entered. Globals aren't nursery allocated so there's michael@0: * no need to do this for minor GCs. michael@0: */ michael@0: for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) michael@0: c->markRoots(trc); michael@0: michael@0: /* michael@0: * The embedding can register additional roots here. michael@0: * michael@0: * We don't need to trace these in a minor GC because all pointers into michael@0: * the nursery should be in the store buffer, and we want to avoid the michael@0: * time taken to trace all these roots. michael@0: */ michael@0: for (size_t i = 0; i < rt->gcBlackRootTracers.length(); i++) { michael@0: const JSRuntime::ExtraTracer &e = rt->gcBlackRootTracers[i]; michael@0: (*e.op)(trc, e.data); michael@0: } michael@0: michael@0: /* During GC, we don't mark gray roots at this stage. */ michael@0: if (JSTraceDataOp op = rt->gcGrayRootTracer.op) { michael@0: if (!IS_GC_MARKING_TRACER(trc)) michael@0: (*op)(trc, rt->gcGrayRootTracer.data); michael@0: } michael@0: } michael@0: } michael@0: michael@0: void michael@0: js::gc::BufferGrayRoots(GCMarker *gcmarker) michael@0: { michael@0: JSRuntime *rt = gcmarker->runtime(); michael@0: gcmarker->startBufferingGrayRoots(); michael@0: if (JSTraceDataOp op = rt->gcGrayRootTracer.op) michael@0: (*op)(gcmarker, rt->gcGrayRootTracer.data); michael@0: gcmarker->endBufferingGrayRoots(); michael@0: }