michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ michael@0: /* vim: set ts=8 sts=2 et sw=2 tw=80: */ michael@0: /* This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: // We're dividing JS objects into 3 categories: michael@0: // michael@0: // 1. "real" roots, held by the JS engine itself or rooted through the root michael@0: // and lock JS APIs. Roots from this category are considered black in the michael@0: // cycle collector, any cycle they participate in is uncollectable. michael@0: // michael@0: // 2. certain roots held by C++ objects that are guaranteed to be alive. michael@0: // Roots from this category are considered black in the cycle collector, michael@0: // and any cycle they participate in is uncollectable. These roots are michael@0: // traced from TraceNativeBlackRoots. michael@0: // michael@0: // 3. all other roots held by C++ objects that participate in cycle michael@0: // collection, held by us (see TraceNativeGrayRoots). Roots from this michael@0: // category are considered grey in the cycle collector; whether or not michael@0: // they are collected depends on the objects that hold them. michael@0: // michael@0: // Note that if a root is in multiple categories the fact that it is in michael@0: // category 1 or 2 that takes precedence, so it will be considered black. michael@0: // michael@0: // During garbage collection we switch to an additional mark color (gray) michael@0: // when tracing inside TraceNativeGrayRoots. This allows us to walk those michael@0: // roots later on and add all objects reachable only from them to the michael@0: // cycle collector. michael@0: // michael@0: // Phases: michael@0: // michael@0: // 1. marking of the roots in category 1 by having the JS GC do its marking michael@0: // 2. marking of the roots in category 2 by having the JS GC call us back michael@0: // (via JS_SetExtraGCRootsTracer) and running TraceNativeBlackRoots michael@0: // 3. marking of the roots in category 3 by TraceNativeGrayRoots using an michael@0: // additional color (gray). michael@0: // 4. end of GC, GC can sweep its heap michael@0: // michael@0: // At some later point, when the cycle collector runs: michael@0: // michael@0: // 5. walk gray objects and add them to the cycle collector, cycle collect michael@0: // michael@0: // JS objects that are part of cycles the cycle collector breaks will be michael@0: // collected by the next JS GC. michael@0: // michael@0: // If WantAllTraces() is false the cycle collector will not traverse roots michael@0: // from category 1 or any JS objects held by them. Any JS objects they hold michael@0: // will already be marked by the JS GC and will thus be colored black michael@0: // themselves. Any C++ objects they hold will have a missing (untraversed) michael@0: // edge from the JS object to the C++ object and so it will be marked black michael@0: // too. This decreases the number of objects that the cycle collector has to michael@0: // deal with. michael@0: // To improve debugging, if WantAllTraces() is true all JS objects are michael@0: // traversed. michael@0: michael@0: #include "mozilla/CycleCollectedJSRuntime.h" michael@0: #include michael@0: #include "mozilla/ArrayUtils.h" michael@0: #include "mozilla/MemoryReporting.h" michael@0: #include "mozilla/dom/BindingUtils.h" michael@0: #include "mozilla/dom/DOMJSClass.h" michael@0: #include "mozilla/dom/ScriptSettings.h" michael@0: #include "jsprf.h" michael@0: #include "nsCycleCollectionNoteRootCallback.h" michael@0: #include "nsCycleCollectionParticipant.h" michael@0: #include "nsCycleCollector.h" michael@0: #include "nsDOMJSUtils.h" michael@0: #include "nsIException.h" michael@0: #include "nsThreadUtils.h" michael@0: #include "xpcpublic.h" michael@0: michael@0: using namespace mozilla; michael@0: using namespace mozilla::dom; michael@0: michael@0: namespace mozilla { michael@0: michael@0: struct DeferredFinalizeFunctionHolder michael@0: { michael@0: DeferredFinalizeFunction run; michael@0: void *data; michael@0: }; michael@0: michael@0: class IncrementalFinalizeRunnable : public nsRunnable michael@0: { michael@0: typedef nsAutoTArray DeferredFinalizeArray; michael@0: typedef CycleCollectedJSRuntime::DeferredFinalizerTable DeferredFinalizerTable; michael@0: michael@0: CycleCollectedJSRuntime* mRuntime; michael@0: nsTArray mSupports; michael@0: DeferredFinalizeArray mDeferredFinalizeFunctions; michael@0: uint32_t mFinalizeFunctionToRun; michael@0: michael@0: static const PRTime SliceMillis = 10; /* ms */ michael@0: michael@0: static PLDHashOperator michael@0: DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction, michael@0: void*& aData, michael@0: void* aClosure); michael@0: michael@0: public: michael@0: IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt, michael@0: nsTArray& mSupports, michael@0: DeferredFinalizerTable& aFinalizerTable); michael@0: virtual ~IncrementalFinalizeRunnable(); michael@0: michael@0: void ReleaseNow(bool aLimited); michael@0: michael@0: NS_DECL_NSIRUNNABLE michael@0: }; michael@0: michael@0: } // namespace mozilla michael@0: michael@0: inline bool michael@0: AddToCCKind(JSGCTraceKind kind) michael@0: { michael@0: return kind == JSTRACE_OBJECT || kind == JSTRACE_SCRIPT; michael@0: } michael@0: michael@0: static void michael@0: TraceWeakMappingChild(JSTracer* trc, void** thingp, JSGCTraceKind kind); michael@0: michael@0: struct NoteWeakMapChildrenTracer : public JSTracer michael@0: { michael@0: NoteWeakMapChildrenTracer(JSRuntime *rt, nsCycleCollectionNoteRootCallback& cb) michael@0: : JSTracer(rt, TraceWeakMappingChild), mCb(cb) michael@0: { michael@0: } michael@0: nsCycleCollectionNoteRootCallback& mCb; michael@0: bool mTracedAny; michael@0: JSObject* mMap; michael@0: void* mKey; michael@0: void* mKeyDelegate; michael@0: }; michael@0: michael@0: static void michael@0: TraceWeakMappingChild(JSTracer* trc, void** thingp, JSGCTraceKind kind) michael@0: { michael@0: MOZ_ASSERT(trc->callback == TraceWeakMappingChild); michael@0: void* thing = *thingp; michael@0: NoteWeakMapChildrenTracer* tracer = michael@0: static_cast(trc); michael@0: michael@0: if (kind == JSTRACE_STRING) { michael@0: return; michael@0: } michael@0: michael@0: if (!xpc_IsGrayGCThing(thing) && !tracer->mCb.WantAllTraces()) { michael@0: return; michael@0: } michael@0: michael@0: if (AddToCCKind(kind)) { michael@0: tracer->mCb.NoteWeakMapping(tracer->mMap, tracer->mKey, tracer->mKeyDelegate, thing); michael@0: tracer->mTracedAny = true; michael@0: } else { michael@0: JS_TraceChildren(trc, thing, kind); michael@0: } michael@0: } michael@0: michael@0: struct NoteWeakMapsTracer : public js::WeakMapTracer michael@0: { michael@0: NoteWeakMapsTracer(JSRuntime* rt, js::WeakMapTraceCallback cb, michael@0: nsCycleCollectionNoteRootCallback& cccb) michael@0: : js::WeakMapTracer(rt, cb), mCb(cccb), mChildTracer(rt, cccb) michael@0: { michael@0: } michael@0: nsCycleCollectionNoteRootCallback& mCb; michael@0: NoteWeakMapChildrenTracer mChildTracer; michael@0: }; michael@0: michael@0: static void michael@0: TraceWeakMapping(js::WeakMapTracer* trc, JSObject* m, michael@0: void* k, JSGCTraceKind kkind, michael@0: void* v, JSGCTraceKind vkind) michael@0: { michael@0: MOZ_ASSERT(trc->callback == TraceWeakMapping); michael@0: NoteWeakMapsTracer* tracer = static_cast(trc); michael@0: michael@0: // If nothing that could be held alive by this entry is marked gray, return. michael@0: if ((!k || !xpc_IsGrayGCThing(k)) && MOZ_LIKELY(!tracer->mCb.WantAllTraces())) { michael@0: if (!v || !xpc_IsGrayGCThing(v) || vkind == JSTRACE_STRING) { michael@0: return; michael@0: } michael@0: } michael@0: michael@0: // The cycle collector can only properly reason about weak maps if it can michael@0: // reason about the liveness of their keys, which in turn requires that michael@0: // the key can be represented in the cycle collector graph. All existing michael@0: // uses of weak maps use either objects or scripts as keys, which are okay. michael@0: MOZ_ASSERT(AddToCCKind(kkind)); michael@0: michael@0: // As an emergency fallback for non-debug builds, if the key is not michael@0: // representable in the cycle collector graph, we treat it as marked. This michael@0: // can cause leaks, but is preferable to ignoring the binding, which could michael@0: // cause the cycle collector to free live objects. michael@0: if (!AddToCCKind(kkind)) { michael@0: k = nullptr; michael@0: } michael@0: michael@0: JSObject* kdelegate = nullptr; michael@0: if (k && kkind == JSTRACE_OBJECT) { michael@0: kdelegate = js::GetWeakmapKeyDelegate((JSObject*)k); michael@0: } michael@0: michael@0: if (AddToCCKind(vkind)) { michael@0: tracer->mCb.NoteWeakMapping(m, k, kdelegate, v); michael@0: } else { michael@0: tracer->mChildTracer.mTracedAny = false; michael@0: tracer->mChildTracer.mMap = m; michael@0: tracer->mChildTracer.mKey = k; michael@0: tracer->mChildTracer.mKeyDelegate = kdelegate; michael@0: michael@0: if (v && vkind != JSTRACE_STRING) { michael@0: JS_TraceChildren(&tracer->mChildTracer, v, vkind); michael@0: } michael@0: michael@0: // The delegate could hold alive the key, so report something to the CC michael@0: // if we haven't already. michael@0: if (!tracer->mChildTracer.mTracedAny && k && xpc_IsGrayGCThing(k) && kdelegate) { michael@0: tracer->mCb.NoteWeakMapping(m, k, kdelegate, nullptr); michael@0: } michael@0: } michael@0: } michael@0: michael@0: // This is based on the logic in TraceWeakMapping. michael@0: struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer michael@0: { michael@0: FixWeakMappingGrayBitsTracer(JSRuntime* rt) michael@0: : js::WeakMapTracer(rt, FixWeakMappingGrayBits) michael@0: {} michael@0: michael@0: void michael@0: FixAll() michael@0: { michael@0: do { michael@0: mAnyMarked = false; michael@0: js::TraceWeakMaps(this); michael@0: } while (mAnyMarked); michael@0: } michael@0: michael@0: private: michael@0: michael@0: static void michael@0: FixWeakMappingGrayBits(js::WeakMapTracer* trc, JSObject* m, michael@0: void* k, JSGCTraceKind kkind, michael@0: void* v, JSGCTraceKind vkind) michael@0: { michael@0: MOZ_ASSERT(!JS::IsIncrementalGCInProgress(trc->runtime), michael@0: "Don't call FixWeakMappingGrayBits during a GC."); michael@0: michael@0: FixWeakMappingGrayBitsTracer* tracer = static_cast(trc); michael@0: michael@0: // If nothing that could be held alive by this entry is marked gray, return. michael@0: bool delegateMightNeedMarking = k && xpc_IsGrayGCThing(k); michael@0: bool valueMightNeedMarking = v && xpc_IsGrayGCThing(v) && vkind != JSTRACE_STRING; michael@0: if (!delegateMightNeedMarking && !valueMightNeedMarking) { michael@0: return; michael@0: } michael@0: michael@0: if (!AddToCCKind(kkind)) { michael@0: k = nullptr; michael@0: } michael@0: michael@0: if (delegateMightNeedMarking && kkind == JSTRACE_OBJECT) { michael@0: JSObject* kdelegate = js::GetWeakmapKeyDelegate((JSObject*)k); michael@0: if (kdelegate && !xpc_IsGrayGCThing(kdelegate)) { michael@0: if (JS::UnmarkGrayGCThingRecursively(k, JSTRACE_OBJECT)) { michael@0: tracer->mAnyMarked = true; michael@0: } michael@0: } michael@0: } michael@0: michael@0: if (v && xpc_IsGrayGCThing(v) && michael@0: (!k || !xpc_IsGrayGCThing(k)) && michael@0: (!m || !xpc_IsGrayGCThing(m)) && michael@0: vkind != JSTRACE_SHAPE) { michael@0: if (JS::UnmarkGrayGCThingRecursively(v, vkind)) { michael@0: tracer->mAnyMarked = true; michael@0: } michael@0: } michael@0: } michael@0: michael@0: bool mAnyMarked; michael@0: }; michael@0: michael@0: struct Closure michael@0: { michael@0: Closure(nsCycleCollectionNoteRootCallback* aCb) michael@0: : mCycleCollectionEnabled(true), mCb(aCb) michael@0: { michael@0: } michael@0: michael@0: bool mCycleCollectionEnabled; michael@0: nsCycleCollectionNoteRootCallback* mCb; michael@0: }; michael@0: michael@0: static void michael@0: CheckParticipatesInCycleCollection(void* aThing, const char* aName, void* aClosure) michael@0: { michael@0: Closure* closure = static_cast(aClosure); michael@0: michael@0: if (closure->mCycleCollectionEnabled) { michael@0: return; michael@0: } michael@0: michael@0: if (AddToCCKind(js::GCThingTraceKind(aThing)) && michael@0: xpc_IsGrayGCThing(aThing)) michael@0: { michael@0: closure->mCycleCollectionEnabled = true; michael@0: } michael@0: } michael@0: michael@0: static PLDHashOperator michael@0: NoteJSHolder(void *holder, nsScriptObjectTracer *&tracer, void *arg) michael@0: { michael@0: Closure *closure = static_cast(arg); michael@0: michael@0: bool noteRoot; michael@0: if (MOZ_UNLIKELY(closure->mCb->WantAllTraces())) { michael@0: noteRoot = true; michael@0: } else { michael@0: closure->mCycleCollectionEnabled = false; michael@0: tracer->Trace(holder, TraceCallbackFunc(CheckParticipatesInCycleCollection), closure); michael@0: noteRoot = closure->mCycleCollectionEnabled; michael@0: } michael@0: michael@0: if (noteRoot) { michael@0: closure->mCb->NoteNativeRoot(holder, tracer); michael@0: } michael@0: michael@0: return PL_DHASH_NEXT; michael@0: } michael@0: michael@0: NS_IMETHODIMP michael@0: JSGCThingParticipant::Traverse(void* p, nsCycleCollectionTraversalCallback& cb) michael@0: { michael@0: CycleCollectedJSRuntime* runtime = reinterpret_cast michael@0: (reinterpret_cast(this) - michael@0: offsetof(CycleCollectedJSRuntime, mGCThingCycleCollectorGlobal)); michael@0: michael@0: runtime->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_FULL, michael@0: p, js::GCThingTraceKind(p), cb); michael@0: return NS_OK; michael@0: } michael@0: michael@0: // NB: This is only used to initialize the participant in michael@0: // CycleCollectedJSRuntime. It should never be used directly. michael@0: static JSGCThingParticipant sGCThingCycleCollectorGlobal; michael@0: michael@0: NS_IMETHODIMP michael@0: JSZoneParticipant::Traverse(void* p, nsCycleCollectionTraversalCallback& cb) michael@0: { michael@0: CycleCollectedJSRuntime* runtime = reinterpret_cast michael@0: (reinterpret_cast(this) - michael@0: offsetof(CycleCollectedJSRuntime, mJSZoneCycleCollectorGlobal)); michael@0: michael@0: MOZ_ASSERT(!cb.WantAllTraces()); michael@0: JS::Zone* zone = static_cast(p); michael@0: michael@0: runtime->TraverseZone(zone, cb); michael@0: return NS_OK; michael@0: } michael@0: michael@0: static void michael@0: NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind); michael@0: michael@0: struct TraversalTracer : public JSTracer michael@0: { michael@0: TraversalTracer(JSRuntime *rt, nsCycleCollectionTraversalCallback& aCb) michael@0: : JSTracer(rt, NoteJSChildTracerShim, DoNotTraceWeakMaps), mCb(aCb) michael@0: { michael@0: } michael@0: nsCycleCollectionTraversalCallback& mCb; michael@0: }; michael@0: michael@0: static void michael@0: NoteJSChild(JSTracer* aTrc, void* aThing, JSGCTraceKind aTraceKind) michael@0: { michael@0: TraversalTracer* tracer = static_cast(aTrc); michael@0: michael@0: // Don't traverse non-gray objects, unless we want all traces. michael@0: if (!xpc_IsGrayGCThing(aThing) && !tracer->mCb.WantAllTraces()) { michael@0: return; michael@0: } michael@0: michael@0: /* michael@0: * This function needs to be careful to avoid stack overflow. Normally, when michael@0: * AddToCCKind is true, the recursion terminates immediately as we just add michael@0: * |thing| to the CC graph. So overflow is only possible when there are long michael@0: * chains of non-AddToCCKind GC things. Currently, this only can happen via michael@0: * shape parent pointers. The special JSTRACE_SHAPE case below handles michael@0: * parent pointers iteratively, rather than recursively, to avoid overflow. michael@0: */ michael@0: if (AddToCCKind(aTraceKind)) { michael@0: if (MOZ_UNLIKELY(tracer->mCb.WantDebugInfo())) { michael@0: // based on DumpNotify in jsapi.cpp michael@0: if (tracer->debugPrinter()) { michael@0: char buffer[200]; michael@0: tracer->debugPrinter()(aTrc, buffer, sizeof(buffer)); michael@0: tracer->mCb.NoteNextEdgeName(buffer); michael@0: } else if (tracer->debugPrintIndex() != (size_t)-1) { michael@0: char buffer[200]; michael@0: JS_snprintf(buffer, sizeof(buffer), "%s[%lu]", michael@0: static_cast(tracer->debugPrintArg()), michael@0: tracer->debugPrintIndex()); michael@0: tracer->mCb.NoteNextEdgeName(buffer); michael@0: } else { michael@0: tracer->mCb.NoteNextEdgeName(static_cast(tracer->debugPrintArg())); michael@0: } michael@0: } michael@0: tracer->mCb.NoteJSChild(aThing); michael@0: } else if (aTraceKind == JSTRACE_SHAPE) { michael@0: JS_TraceShapeCycleCollectorChildren(aTrc, aThing); michael@0: } else if (aTraceKind != JSTRACE_STRING) { michael@0: JS_TraceChildren(aTrc, aThing, aTraceKind); michael@0: } michael@0: } michael@0: michael@0: static void michael@0: NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind) michael@0: { michael@0: NoteJSChild(aTrc, *aThingp, aTraceKind); michael@0: } michael@0: michael@0: static void michael@0: NoteJSChildGrayWrapperShim(void* aData, void* aThing) michael@0: { michael@0: TraversalTracer* trc = static_cast(aData); michael@0: NoteJSChild(trc, aThing, js::GCThingTraceKind(aThing)); michael@0: } michael@0: michael@0: /* michael@0: * The cycle collection participant for a Zone is intended to produce the same michael@0: * results as if all of the gray GCthings in a zone were merged into a single node, michael@0: * except for self-edges. This avoids the overhead of representing all of the GCthings in michael@0: * the zone in the cycle collector graph, which should be much faster if many of michael@0: * the GCthings in the zone are gray. michael@0: * michael@0: * Zone merging should not always be used, because it is a conservative michael@0: * approximation of the true cycle collector graph that can incorrectly identify some michael@0: * garbage objects as being live. For instance, consider two cycles that pass through a michael@0: * zone, where one is garbage and the other is live. If we merge the entire michael@0: * zone, the cycle collector will think that both are alive. michael@0: * michael@0: * We don't have to worry about losing track of a garbage cycle, because any such garbage michael@0: * cycle incorrectly identified as live must contain at least one C++ to JS edge, and michael@0: * XPConnect will always add the C++ object to the CC graph. (This is in contrast to pure michael@0: * C++ garbage cycles, which must always be properly identified, because we clear the michael@0: * purple buffer during every CC, which may contain the last reference to a garbage michael@0: * cycle.) michael@0: */ michael@0: michael@0: // NB: This is only used to initialize the participant in michael@0: // CycleCollectedJSRuntime. It should never be used directly. michael@0: static const JSZoneParticipant sJSZoneCycleCollectorGlobal; michael@0: michael@0: CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSRuntime* aParentRuntime, michael@0: uint32_t aMaxbytes, michael@0: JSUseHelperThreads aUseHelperThreads) michael@0: : mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal), michael@0: mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal), michael@0: mJSRuntime(nullptr), michael@0: mJSHolders(512) michael@0: { michael@0: mozilla::dom::InitScriptSettings(); michael@0: michael@0: mJSRuntime = JS_NewRuntime(aMaxbytes, aUseHelperThreads, aParentRuntime); michael@0: if (!mJSRuntime) { michael@0: MOZ_CRASH(); michael@0: } michael@0: michael@0: if (!JS_AddExtraGCRootsTracer(mJSRuntime, TraceBlackJS, this)) { michael@0: MOZ_CRASH(); michael@0: } michael@0: JS_SetGrayGCRootsTracer(mJSRuntime, TraceGrayJS, this); michael@0: JS_SetGCCallback(mJSRuntime, GCCallback, this); michael@0: JS_SetContextCallback(mJSRuntime, ContextCallback, this); michael@0: JS_SetDestroyZoneCallback(mJSRuntime, XPCStringConvert::FreeZoneCache); michael@0: JS_SetSweepZoneCallback(mJSRuntime, XPCStringConvert::ClearZoneCache); michael@0: michael@0: nsCycleCollector_registerJSRuntime(this); michael@0: } michael@0: michael@0: CycleCollectedJSRuntime::~CycleCollectedJSRuntime() michael@0: { michael@0: MOZ_ASSERT(mJSRuntime); michael@0: MOZ_ASSERT(!mDeferredFinalizerTable.Count()); michael@0: MOZ_ASSERT(!mDeferredSupports.Length()); michael@0: michael@0: // Clear mPendingException first, since it might be cycle collected. michael@0: mPendingException = nullptr; michael@0: michael@0: JS_DestroyRuntime(mJSRuntime); michael@0: mJSRuntime = nullptr; michael@0: nsCycleCollector_forgetJSRuntime(); michael@0: michael@0: mozilla::dom::DestroyScriptSettings(); michael@0: } michael@0: michael@0: size_t michael@0: CycleCollectedJSRuntime::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const michael@0: { michael@0: size_t n = 0; michael@0: michael@0: // nullptr for the second arg; we're not measuring anything hanging off the michael@0: // entries in mJSHolders. michael@0: n += mJSHolders.SizeOfExcludingThis(nullptr, aMallocSizeOf); michael@0: michael@0: return n; michael@0: } michael@0: michael@0: static PLDHashOperator michael@0: UnmarkJSHolder(void* holder, nsScriptObjectTracer*& tracer, void* arg) michael@0: { michael@0: tracer->CanSkip(holder, true); michael@0: return PL_DHASH_NEXT; michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::UnmarkSkippableJSHolders() michael@0: { michael@0: mJSHolders.Enumerate(UnmarkJSHolder, nullptr); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::DescribeGCThing(bool aIsMarked, void* aThing, michael@0: JSGCTraceKind aTraceKind, michael@0: nsCycleCollectionTraversalCallback& aCb) const michael@0: { michael@0: if (!aCb.WantDebugInfo()) { michael@0: aCb.DescribeGCedNode(aIsMarked, "JS Object"); michael@0: return; michael@0: } michael@0: michael@0: char name[72]; michael@0: uint64_t compartmentAddress = 0; michael@0: if (aTraceKind == JSTRACE_OBJECT) { michael@0: JSObject* obj = static_cast(aThing); michael@0: compartmentAddress = (uint64_t)js::GetObjectCompartment(obj); michael@0: const js::Class* clasp = js::GetObjectClass(obj); michael@0: michael@0: // Give the subclass a chance to do something michael@0: if (DescribeCustomObjects(obj, clasp, name)) { michael@0: // Nothing else to do! michael@0: } else if (js::IsFunctionObject(obj)) { michael@0: JSFunction* fun = JS_GetObjectFunction(obj); michael@0: JSString* str = JS_GetFunctionDisplayId(fun); michael@0: if (str) { michael@0: NS_ConvertUTF16toUTF8 fname(JS_GetInternedStringChars(str)); michael@0: JS_snprintf(name, sizeof(name), michael@0: "JS Object (Function - %s)", fname.get()); michael@0: } else { michael@0: JS_snprintf(name, sizeof(name), "JS Object (Function)"); michael@0: } michael@0: } else { michael@0: JS_snprintf(name, sizeof(name), "JS Object (%s)", michael@0: clasp->name); michael@0: } michael@0: } else { michael@0: static const char trace_types[][11] = { michael@0: "Object", michael@0: "String", michael@0: "Script", michael@0: "LazyScript", michael@0: "IonCode", michael@0: "Shape", michael@0: "BaseShape", michael@0: "TypeObject", michael@0: }; michael@0: static_assert(MOZ_ARRAY_LENGTH(trace_types) == JSTRACE_LAST + 1, michael@0: "JSTRACE_LAST enum must match trace_types count."); michael@0: JS_snprintf(name, sizeof(name), "JS %s", trace_types[aTraceKind]); michael@0: } michael@0: michael@0: // Disable printing global for objects while we figure out ObjShrink fallout. michael@0: aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::NoteGCThingJSChildren(void* aThing, michael@0: JSGCTraceKind aTraceKind, michael@0: nsCycleCollectionTraversalCallback& aCb) const michael@0: { michael@0: MOZ_ASSERT(mJSRuntime); michael@0: TraversalTracer trc(mJSRuntime, aCb); michael@0: JS_TraceChildren(&trc, aThing, aTraceKind); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(const js::Class* aClasp, JSObject* aObj, michael@0: nsCycleCollectionTraversalCallback& aCb) const michael@0: { michael@0: MOZ_ASSERT(aClasp); michael@0: MOZ_ASSERT(aClasp == js::GetObjectClass(aObj)); michael@0: michael@0: if (NoteCustomGCThingXPCOMChildren(aClasp, aObj, aCb)) { michael@0: // Nothing else to do! michael@0: return; michael@0: } michael@0: // XXX This test does seem fragile, we should probably whitelist classes michael@0: // that do hold a strong reference, but that might not be possible. michael@0: else if (aClasp->flags & JSCLASS_HAS_PRIVATE && michael@0: aClasp->flags & JSCLASS_PRIVATE_IS_NSISUPPORTS) { michael@0: NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "js::GetObjectPrivate(obj)"); michael@0: aCb.NoteXPCOMChild(static_cast(js::GetObjectPrivate(aObj))); michael@0: } else { michael@0: const DOMClass* domClass = GetDOMClass(aObj); michael@0: if (domClass) { michael@0: NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "UnwrapDOMObject(obj)"); michael@0: if (domClass->mDOMObjectIsISupports) { michael@0: aCb.NoteXPCOMChild(UnwrapDOMObject(aObj)); michael@0: } else if (domClass->mParticipant) { michael@0: aCb.NoteNativeChild(UnwrapDOMObject(aObj), michael@0: domClass->mParticipant); michael@0: } michael@0: } michael@0: } michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::TraverseGCThing(TraverseSelect aTs, void* aThing, michael@0: JSGCTraceKind aTraceKind, michael@0: nsCycleCollectionTraversalCallback& aCb) michael@0: { michael@0: MOZ_ASSERT(aTraceKind == js::GCThingTraceKind(aThing)); michael@0: bool isMarkedGray = xpc_IsGrayGCThing(aThing); michael@0: michael@0: if (aTs == TRAVERSE_FULL) { michael@0: DescribeGCThing(!isMarkedGray, aThing, aTraceKind, aCb); michael@0: } michael@0: michael@0: // If this object is alive, then all of its children are alive. For JS objects, michael@0: // the black-gray invariant ensures the children are also marked black. For C++ michael@0: // objects, the ref count from this object will keep them alive. Thus we don't michael@0: // need to trace our children, unless we are debugging using WantAllTraces. michael@0: if (!isMarkedGray && !aCb.WantAllTraces()) { michael@0: return; michael@0: } michael@0: michael@0: if (aTs == TRAVERSE_FULL) { michael@0: NoteGCThingJSChildren(aThing, aTraceKind, aCb); michael@0: } michael@0: michael@0: if (aTraceKind == JSTRACE_OBJECT) { michael@0: JSObject* obj = static_cast(aThing); michael@0: NoteGCThingXPCOMChildren(js::GetObjectClass(obj), obj, aCb); michael@0: } michael@0: } michael@0: michael@0: struct TraverseObjectShimClosure { michael@0: nsCycleCollectionTraversalCallback& cb; michael@0: CycleCollectedJSRuntime* self; michael@0: }; michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::TraverseZone(JS::Zone* aZone, michael@0: nsCycleCollectionTraversalCallback& aCb) michael@0: { michael@0: /* michael@0: * We treat the zone as being gray. We handle non-gray GCthings in the michael@0: * zone by not reporting their children to the CC. The black-gray invariant michael@0: * ensures that any JS children will also be non-gray, and thus don't need to be michael@0: * added to the graph. For C++ children, not representing the edge from the michael@0: * non-gray JS GCthings to the C++ object will keep the child alive. michael@0: * michael@0: * We don't allow zone merging in a WantAllTraces CC, because then these michael@0: * assumptions don't hold. michael@0: */ michael@0: aCb.DescribeGCedNode(false, "JS Zone"); michael@0: michael@0: /* michael@0: * Every JS child of everything in the zone is either in the zone michael@0: * or is a cross-compartment wrapper. In the former case, we don't need to michael@0: * represent these edges in the CC graph because JS objects are not ref counted. michael@0: * In the latter case, the JS engine keeps a map of these wrappers, which we michael@0: * iterate over. Edges between compartments in the same zone will add michael@0: * unnecessary loop edges to the graph (bug 842137). michael@0: */ michael@0: TraversalTracer trc(mJSRuntime, aCb); michael@0: js::VisitGrayWrapperTargets(aZone, NoteJSChildGrayWrapperShim, &trc); michael@0: michael@0: /* michael@0: * To find C++ children of things in the zone, we scan every JS Object in michael@0: * the zone. Only JS Objects can have C++ children. michael@0: */ michael@0: TraverseObjectShimClosure closure = { aCb, this }; michael@0: js::IterateGrayObjects(aZone, TraverseObjectShim, &closure); michael@0: } michael@0: michael@0: /* static */ void michael@0: CycleCollectedJSRuntime::TraverseObjectShim(void* aData, void* aThing) michael@0: { michael@0: TraverseObjectShimClosure* closure = michael@0: static_cast(aData); michael@0: michael@0: MOZ_ASSERT(js::GCThingTraceKind(aThing) == JSTRACE_OBJECT); michael@0: closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP, aThing, michael@0: JSTRACE_OBJECT, closure->cb); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::TraverseNativeRoots(nsCycleCollectionNoteRootCallback& aCb) michael@0: { michael@0: // NB: This is here just to preserve the existing XPConnect order. I doubt it michael@0: // would hurt to do this after the JS holders. michael@0: TraverseAdditionalNativeRoots(aCb); michael@0: michael@0: Closure closure(&aCb); michael@0: mJSHolders.Enumerate(NoteJSHolder, &closure); michael@0: } michael@0: michael@0: /* static */ void michael@0: CycleCollectedJSRuntime::TraceBlackJS(JSTracer* aTracer, void* aData) michael@0: { michael@0: CycleCollectedJSRuntime* self = static_cast(aData); michael@0: michael@0: self->TraceNativeBlackRoots(aTracer); michael@0: } michael@0: michael@0: /* static */ void michael@0: CycleCollectedJSRuntime::TraceGrayJS(JSTracer* aTracer, void* aData) michael@0: { michael@0: CycleCollectedJSRuntime* self = static_cast(aData); michael@0: michael@0: // Mark these roots as gray so the CC can walk them later. michael@0: self->TraceNativeGrayRoots(aTracer); michael@0: } michael@0: michael@0: /* static */ void michael@0: CycleCollectedJSRuntime::GCCallback(JSRuntime* aRuntime, michael@0: JSGCStatus aStatus, michael@0: void* aData) michael@0: { michael@0: CycleCollectedJSRuntime* self = static_cast(aData); michael@0: michael@0: MOZ_ASSERT(aRuntime == self->Runtime()); michael@0: michael@0: self->OnGC(aStatus); michael@0: } michael@0: michael@0: /* static */ bool michael@0: CycleCollectedJSRuntime::ContextCallback(JSContext* aContext, michael@0: unsigned aOperation, michael@0: void* aData) michael@0: { michael@0: CycleCollectedJSRuntime* self = static_cast(aData); michael@0: michael@0: MOZ_ASSERT(JS_GetRuntime(aContext) == self->Runtime()); michael@0: michael@0: return self->CustomContextCallback(aContext, aOperation); michael@0: } michael@0: michael@0: struct JsGcTracer : public TraceCallbacks michael@0: { michael@0: virtual void Trace(JS::Heap *p, const char *name, void *closure) const MOZ_OVERRIDE { michael@0: JS_CallHeapValueTracer(static_cast(closure), p, name); michael@0: } michael@0: virtual void Trace(JS::Heap *p, const char *name, void *closure) const MOZ_OVERRIDE { michael@0: JS_CallHeapIdTracer(static_cast(closure), p, name); michael@0: } michael@0: virtual void Trace(JS::Heap *p, const char *name, void *closure) const MOZ_OVERRIDE { michael@0: JS_CallHeapObjectTracer(static_cast(closure), p, name); michael@0: } michael@0: virtual void Trace(JS::TenuredHeap *p, const char *name, void *closure) const MOZ_OVERRIDE { michael@0: JS_CallTenuredObjectTracer(static_cast(closure), p, name); michael@0: } michael@0: virtual void Trace(JS::Heap *p, const char *name, void *closure) const MOZ_OVERRIDE { michael@0: JS_CallHeapStringTracer(static_cast(closure), p, name); michael@0: } michael@0: virtual void Trace(JS::Heap *p, const char *name, void *closure) const MOZ_OVERRIDE { michael@0: JS_CallHeapScriptTracer(static_cast(closure), p, name); michael@0: } michael@0: virtual void Trace(JS::Heap *p, const char *name, void *closure) const MOZ_OVERRIDE { michael@0: JS_CallHeapFunctionTracer(static_cast(closure), p, name); michael@0: } michael@0: }; michael@0: michael@0: static PLDHashOperator michael@0: TraceJSHolder(void* aHolder, nsScriptObjectTracer*& aTracer, void* aArg) michael@0: { michael@0: aTracer->Trace(aHolder, JsGcTracer(), aArg); michael@0: michael@0: return PL_DHASH_NEXT; michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::TraceNativeGrayRoots(JSTracer* aTracer) michael@0: { michael@0: // NB: This is here just to preserve the existing XPConnect order. I doubt it michael@0: // would hurt to do this after the JS holders. michael@0: TraceAdditionalNativeGrayRoots(aTracer); michael@0: michael@0: mJSHolders.Enumerate(TraceJSHolder, aTracer); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer) michael@0: { michael@0: mJSHolders.Put(aHolder, aTracer); michael@0: } michael@0: michael@0: struct ClearJSHolder : TraceCallbacks michael@0: { michael@0: virtual void Trace(JS::Heap* aPtr, const char*, void*) const MOZ_OVERRIDE michael@0: { michael@0: *aPtr = JSVAL_VOID; michael@0: } michael@0: michael@0: virtual void Trace(JS::Heap* aPtr, const char*, void*) const MOZ_OVERRIDE michael@0: { michael@0: *aPtr = JSID_VOID; michael@0: } michael@0: michael@0: virtual void Trace(JS::Heap* aPtr, const char*, void*) const MOZ_OVERRIDE michael@0: { michael@0: *aPtr = nullptr; michael@0: } michael@0: michael@0: virtual void Trace(JS::TenuredHeap* aPtr, const char*, void*) const MOZ_OVERRIDE michael@0: { michael@0: *aPtr = nullptr; michael@0: } michael@0: michael@0: virtual void Trace(JS::Heap* aPtr, const char*, void*) const MOZ_OVERRIDE michael@0: { michael@0: *aPtr = nullptr; michael@0: } michael@0: michael@0: virtual void Trace(JS::Heap* aPtr, const char*, void*) const MOZ_OVERRIDE michael@0: { michael@0: *aPtr = nullptr; michael@0: } michael@0: michael@0: virtual void Trace(JS::Heap* aPtr, const char*, void*) const MOZ_OVERRIDE michael@0: { michael@0: *aPtr = nullptr; michael@0: } michael@0: }; michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder) michael@0: { michael@0: nsScriptObjectTracer* tracer = mJSHolders.Get(aHolder); michael@0: if (!tracer) { michael@0: return; michael@0: } michael@0: tracer->Trace(aHolder, ClearJSHolder(), nullptr); michael@0: mJSHolders.Remove(aHolder); michael@0: } michael@0: michael@0: #ifdef DEBUG michael@0: bool michael@0: CycleCollectedJSRuntime::IsJSHolder(void* aHolder) michael@0: { michael@0: return mJSHolders.Get(aHolder, nullptr); michael@0: } michael@0: michael@0: static void michael@0: AssertNoGcThing(void* aGCThing, const char* aName, void* aClosure) michael@0: { michael@0: MOZ_ASSERT(!aGCThing); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder) michael@0: { michael@0: nsScriptObjectTracer* tracer = mJSHolders.Get(aPossibleJSHolder); michael@0: if (tracer) { michael@0: tracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing), nullptr); michael@0: } michael@0: } michael@0: #endif michael@0: michael@0: already_AddRefed michael@0: CycleCollectedJSRuntime::GetPendingException() const michael@0: { michael@0: nsCOMPtr out = mPendingException; michael@0: return out.forget(); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::SetPendingException(nsIException* aException) michael@0: { michael@0: mPendingException = aException; michael@0: } michael@0: michael@0: nsCycleCollectionParticipant* michael@0: CycleCollectedJSRuntime::GCThingParticipant() michael@0: { michael@0: return &mGCThingCycleCollectorGlobal; michael@0: } michael@0: michael@0: nsCycleCollectionParticipant* michael@0: CycleCollectedJSRuntime::ZoneParticipant() michael@0: { michael@0: return &mJSZoneCycleCollectorGlobal; michael@0: } michael@0: michael@0: nsresult michael@0: CycleCollectedJSRuntime::TraverseRoots(nsCycleCollectionNoteRootCallback &aCb) michael@0: { michael@0: TraverseNativeRoots(aCb); michael@0: michael@0: NoteWeakMapsTracer trc(mJSRuntime, TraceWeakMapping, aCb); michael@0: js::TraceWeakMaps(&trc); michael@0: michael@0: return NS_OK; michael@0: } michael@0: michael@0: /* michael@0: * Return true if there exists a JSContext with a default global whose current michael@0: * inner is gray. The intent is to look for JS Object windows. We don't merge michael@0: * system compartments, so we don't use them to trigger merging CCs. michael@0: */ michael@0: bool michael@0: CycleCollectedJSRuntime::UsefulToMergeZones() const michael@0: { michael@0: if (!NS_IsMainThread()) { michael@0: return false; michael@0: } michael@0: michael@0: JSContext* iter = nullptr; michael@0: JSContext* cx; michael@0: JSAutoRequest ar(nsContentUtils::GetSafeJSContext()); michael@0: while ((cx = JS_ContextIterator(mJSRuntime, &iter))) { michael@0: // Skip anything without an nsIScriptContext. michael@0: nsIScriptContext* scx = GetScriptContextFromJSContext(cx); michael@0: JS::RootedObject obj(cx, scx ? scx->GetWindowProxyPreserveColor() : nullptr); michael@0: if (!obj) { michael@0: continue; michael@0: } michael@0: MOZ_ASSERT(js::IsOuterObject(obj)); michael@0: // Grab the inner from the outer. michael@0: obj = JS_ObjectToInnerObject(cx, obj); michael@0: MOZ_ASSERT(!js::GetObjectParent(obj)); michael@0: if (JS::GCThingIsMarkedGray(obj) && michael@0: !js::IsSystemCompartment(js::GetObjectCompartment(obj))) { michael@0: return true; michael@0: } michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::FixWeakMappingGrayBits() const michael@0: { michael@0: FixWeakMappingGrayBitsTracer fixer(mJSRuntime); michael@0: fixer.FixAll(); michael@0: } michael@0: michael@0: bool michael@0: CycleCollectedJSRuntime::NeedCollect() const michael@0: { michael@0: return !js::AreGCGrayBitsValid(mJSRuntime); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::Collect(uint32_t aReason) const michael@0: { michael@0: MOZ_ASSERT(aReason < JS::gcreason::NUM_REASONS); michael@0: JS::gcreason::Reason gcreason = static_cast(aReason); michael@0: michael@0: JS::PrepareForFullGC(mJSRuntime); michael@0: JS::GCForReason(mJSRuntime, gcreason); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc, michael@0: DeferredFinalizeFunction aFunc, michael@0: void* aThing) michael@0: { michael@0: void* thingArray = nullptr; michael@0: bool hadThingArray = mDeferredFinalizerTable.Get(aFunc, &thingArray); michael@0: michael@0: thingArray = aAppendFunc(thingArray, aThing); michael@0: if (!hadThingArray) { michael@0: mDeferredFinalizerTable.Put(aFunc, thingArray); michael@0: } michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports) michael@0: { michael@0: mDeferredSupports.AppendElement(aSupports); michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::DumpJSHeap(FILE* file) michael@0: { michael@0: js::DumpHeapComplete(Runtime(), file, js::CollectNurseryBeforeDump); michael@0: } michael@0: michael@0: michael@0: bool michael@0: ReleaseSliceNow(uint32_t aSlice, void* aData) michael@0: { michael@0: MOZ_ASSERT(aSlice > 0, "nonsensical/useless call with slice == 0"); michael@0: nsTArray* items = static_cast*>(aData); michael@0: michael@0: uint32_t length = items->Length(); michael@0: aSlice = std::min(aSlice, length); michael@0: for (uint32_t i = length; i > length - aSlice; --i) { michael@0: // Remove (and NS_RELEASE) the last entry in "items": michael@0: uint32_t lastItemIdx = i - 1; michael@0: michael@0: nsISupports* wrapper = items->ElementAt(lastItemIdx); michael@0: items->RemoveElementAt(lastItemIdx); michael@0: NS_IF_RELEASE(wrapper); michael@0: } michael@0: michael@0: return items->IsEmpty(); michael@0: } michael@0: michael@0: /* static */ PLDHashOperator michael@0: IncrementalFinalizeRunnable::DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction, michael@0: void*& aData, michael@0: void* aClosure) michael@0: { michael@0: DeferredFinalizeArray* array = static_cast(aClosure); michael@0: michael@0: DeferredFinalizeFunctionHolder* function = array->AppendElement(); michael@0: function->run = aFunction; michael@0: function->data = aData; michael@0: michael@0: return PL_DHASH_REMOVE; michael@0: } michael@0: michael@0: IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt, michael@0: nsTArray& aSupports, michael@0: DeferredFinalizerTable& aFinalizers) michael@0: : mRuntime(aRt), michael@0: mFinalizeFunctionToRun(0) michael@0: { michael@0: this->mSupports.SwapElements(aSupports); michael@0: DeferredFinalizeFunctionHolder* function = mDeferredFinalizeFunctions.AppendElement(); michael@0: function->run = ReleaseSliceNow; michael@0: function->data = &this->mSupports; michael@0: michael@0: // Enumerate the hashtable into our array. michael@0: aFinalizers.Enumerate(DeferredFinalizerEnumerator, &mDeferredFinalizeFunctions); michael@0: } michael@0: michael@0: IncrementalFinalizeRunnable::~IncrementalFinalizeRunnable() michael@0: { michael@0: MOZ_ASSERT(this != mRuntime->mFinalizeRunnable); michael@0: } michael@0: michael@0: void michael@0: IncrementalFinalizeRunnable::ReleaseNow(bool aLimited) michael@0: { michael@0: //MOZ_ASSERT(NS_IsMainThread()); michael@0: MOZ_ASSERT(mDeferredFinalizeFunctions.Length() != 0, michael@0: "We should have at least ReleaseSliceNow to run"); michael@0: MOZ_ASSERT(mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length(), michael@0: "No more finalizers to run?"); michael@0: michael@0: TimeDuration sliceTime = TimeDuration::FromMilliseconds(SliceMillis); michael@0: TimeStamp started = TimeStamp::Now(); michael@0: bool timeout = false; michael@0: do { michael@0: const DeferredFinalizeFunctionHolder &function = michael@0: mDeferredFinalizeFunctions[mFinalizeFunctionToRun]; michael@0: if (aLimited) { michael@0: bool done = false; michael@0: while (!timeout && !done) { michael@0: /* michael@0: * We don't want to read the clock too often, so we try to michael@0: * release slices of 100 items. michael@0: */ michael@0: done = function.run(100, function.data); michael@0: timeout = TimeStamp::Now() - started >= sliceTime; michael@0: } michael@0: if (done) { michael@0: ++mFinalizeFunctionToRun; michael@0: } michael@0: if (timeout) { michael@0: break; michael@0: } michael@0: } else { michael@0: function.run(UINT32_MAX, function.data); michael@0: ++mFinalizeFunctionToRun; michael@0: } michael@0: } while (mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length()); michael@0: michael@0: if (mFinalizeFunctionToRun == mDeferredFinalizeFunctions.Length()) { michael@0: MOZ_ASSERT(mRuntime->mFinalizeRunnable == this); michael@0: mDeferredFinalizeFunctions.Clear(); michael@0: // NB: This may delete this! michael@0: mRuntime->mFinalizeRunnable = nullptr; michael@0: } michael@0: } michael@0: michael@0: NS_IMETHODIMP michael@0: IncrementalFinalizeRunnable::Run() michael@0: { michael@0: if (mRuntime->mFinalizeRunnable != this) { michael@0: /* These items were already processed synchronously in JSGC_END. */ michael@0: MOZ_ASSERT(!mSupports.Length()); michael@0: MOZ_ASSERT(!mDeferredFinalizeFunctions.Length()); michael@0: return NS_OK; michael@0: } michael@0: michael@0: ReleaseNow(true); michael@0: michael@0: if (mDeferredFinalizeFunctions.Length()) { michael@0: nsresult rv = NS_DispatchToCurrentThread(this); michael@0: if (NS_FAILED(rv)) { michael@0: ReleaseNow(false); michael@0: } michael@0: } michael@0: michael@0: return NS_OK; michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::FinalizeDeferredThings(DeferredFinalizeType aType) michael@0: { michael@0: MOZ_ASSERT(!mFinalizeRunnable); michael@0: mFinalizeRunnable = new IncrementalFinalizeRunnable(this, michael@0: mDeferredSupports, michael@0: mDeferredFinalizerTable); michael@0: michael@0: // Everything should be gone now. michael@0: MOZ_ASSERT(!mDeferredSupports.Length()); michael@0: MOZ_ASSERT(!mDeferredFinalizerTable.Count()); michael@0: michael@0: if (aType == FinalizeIncrementally) { michael@0: NS_DispatchToCurrentThread(mFinalizeRunnable); michael@0: } else { michael@0: mFinalizeRunnable->ReleaseNow(false); michael@0: MOZ_ASSERT(!mFinalizeRunnable); michael@0: } michael@0: } michael@0: michael@0: void michael@0: CycleCollectedJSRuntime::OnGC(JSGCStatus aStatus) michael@0: { michael@0: switch (aStatus) { michael@0: case JSGC_BEGIN: michael@0: nsCycleCollector_prepareForGarbageCollection(); michael@0: break; michael@0: case JSGC_END: michael@0: { michael@0: /* michael@0: * If the previous GC created a runnable to finalize objects michael@0: * incrementally, and if it hasn't finished yet, finish it now. We michael@0: * don't want these to build up. We also don't want to allow any michael@0: * existing incremental finalize runnables to run after a michael@0: * non-incremental GC, since they are often used to detect leaks. michael@0: */ michael@0: if (mFinalizeRunnable) { michael@0: mFinalizeRunnable->ReleaseNow(false); michael@0: } michael@0: michael@0: // Do any deferred finalization of native objects. michael@0: FinalizeDeferredThings(JS::WasIncrementalGC(mJSRuntime) ? FinalizeIncrementally : michael@0: FinalizeNow); michael@0: break; michael@0: } michael@0: default: michael@0: MOZ_CRASH(); michael@0: } michael@0: michael@0: CustomGCCallback(aStatus); michael@0: }