1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/xpcom/base/CycleCollectedJSRuntime.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,1165 @@ 1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 1.5 +/* vim: set ts=8 sts=2 et sw=2 tw=80: */ 1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public 1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this 1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.9 + 1.10 +// We're dividing JS objects into 3 categories: 1.11 +// 1.12 +// 1. "real" roots, held by the JS engine itself or rooted through the root 1.13 +// and lock JS APIs. Roots from this category are considered black in the 1.14 +// cycle collector, any cycle they participate in is uncollectable. 1.15 +// 1.16 +// 2. certain roots held by C++ objects that are guaranteed to be alive. 1.17 +// Roots from this category are considered black in the cycle collector, 1.18 +// and any cycle they participate in is uncollectable. These roots are 1.19 +// traced from TraceNativeBlackRoots. 1.20 +// 1.21 +// 3. all other roots held by C++ objects that participate in cycle 1.22 +// collection, held by us (see TraceNativeGrayRoots). Roots from this 1.23 +// category are considered grey in the cycle collector; whether or not 1.24 +// they are collected depends on the objects that hold them. 1.25 +// 1.26 +// Note that if a root is in multiple categories the fact that it is in 1.27 +// category 1 or 2 that takes precedence, so it will be considered black. 1.28 +// 1.29 +// During garbage collection we switch to an additional mark color (gray) 1.30 +// when tracing inside TraceNativeGrayRoots. This allows us to walk those 1.31 +// roots later on and add all objects reachable only from them to the 1.32 +// cycle collector. 1.33 +// 1.34 +// Phases: 1.35 +// 1.36 +// 1. marking of the roots in category 1 by having the JS GC do its marking 1.37 +// 2. marking of the roots in category 2 by having the JS GC call us back 1.38 +// (via JS_SetExtraGCRootsTracer) and running TraceNativeBlackRoots 1.39 +// 3. marking of the roots in category 3 by TraceNativeGrayRoots using an 1.40 +// additional color (gray). 1.41 +// 4. end of GC, GC can sweep its heap 1.42 +// 1.43 +// At some later point, when the cycle collector runs: 1.44 +// 1.45 +// 5. walk gray objects and add them to the cycle collector, cycle collect 1.46 +// 1.47 +// JS objects that are part of cycles the cycle collector breaks will be 1.48 +// collected by the next JS GC. 1.49 +// 1.50 +// If WantAllTraces() is false the cycle collector will not traverse roots 1.51 +// from category 1 or any JS objects held by them. Any JS objects they hold 1.52 +// will already be marked by the JS GC and will thus be colored black 1.53 +// themselves. Any C++ objects they hold will have a missing (untraversed) 1.54 +// edge from the JS object to the C++ object and so it will be marked black 1.55 +// too. This decreases the number of objects that the cycle collector has to 1.56 +// deal with. 1.57 +// To improve debugging, if WantAllTraces() is true all JS objects are 1.58 +// traversed. 1.59 + 1.60 +#include "mozilla/CycleCollectedJSRuntime.h" 1.61 +#include <algorithm> 1.62 +#include "mozilla/ArrayUtils.h" 1.63 +#include "mozilla/MemoryReporting.h" 1.64 +#include "mozilla/dom/BindingUtils.h" 1.65 +#include "mozilla/dom/DOMJSClass.h" 1.66 +#include "mozilla/dom/ScriptSettings.h" 1.67 +#include "jsprf.h" 1.68 +#include "nsCycleCollectionNoteRootCallback.h" 1.69 +#include "nsCycleCollectionParticipant.h" 1.70 +#include "nsCycleCollector.h" 1.71 +#include "nsDOMJSUtils.h" 1.72 +#include "nsIException.h" 1.73 +#include "nsThreadUtils.h" 1.74 +#include "xpcpublic.h" 1.75 + 1.76 +using namespace mozilla; 1.77 +using namespace mozilla::dom; 1.78 + 1.79 +namespace mozilla { 1.80 + 1.81 +struct DeferredFinalizeFunctionHolder 1.82 +{ 1.83 + DeferredFinalizeFunction run; 1.84 + void *data; 1.85 +}; 1.86 + 1.87 +class IncrementalFinalizeRunnable : public nsRunnable 1.88 +{ 1.89 + typedef nsAutoTArray<DeferredFinalizeFunctionHolder, 16> DeferredFinalizeArray; 1.90 + typedef CycleCollectedJSRuntime::DeferredFinalizerTable DeferredFinalizerTable; 1.91 + 1.92 + CycleCollectedJSRuntime* mRuntime; 1.93 + nsTArray<nsISupports*> mSupports; 1.94 + DeferredFinalizeArray mDeferredFinalizeFunctions; 1.95 + uint32_t mFinalizeFunctionToRun; 1.96 + 1.97 + static const PRTime SliceMillis = 10; /* ms */ 1.98 + 1.99 + static PLDHashOperator 1.100 + DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction, 1.101 + void*& aData, 1.102 + void* aClosure); 1.103 + 1.104 +public: 1.105 + IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt, 1.106 + nsTArray<nsISupports*>& mSupports, 1.107 + DeferredFinalizerTable& aFinalizerTable); 1.108 + virtual ~IncrementalFinalizeRunnable(); 1.109 + 1.110 + void ReleaseNow(bool aLimited); 1.111 + 1.112 + NS_DECL_NSIRUNNABLE 1.113 +}; 1.114 + 1.115 +} // namespace mozilla 1.116 + 1.117 +inline bool 1.118 +AddToCCKind(JSGCTraceKind kind) 1.119 +{ 1.120 + return kind == JSTRACE_OBJECT || kind == JSTRACE_SCRIPT; 1.121 +} 1.122 + 1.123 +static void 1.124 +TraceWeakMappingChild(JSTracer* trc, void** thingp, JSGCTraceKind kind); 1.125 + 1.126 +struct NoteWeakMapChildrenTracer : public JSTracer 1.127 +{ 1.128 + NoteWeakMapChildrenTracer(JSRuntime *rt, nsCycleCollectionNoteRootCallback& cb) 1.129 + : JSTracer(rt, TraceWeakMappingChild), mCb(cb) 1.130 + { 1.131 + } 1.132 + nsCycleCollectionNoteRootCallback& mCb; 1.133 + bool mTracedAny; 1.134 + JSObject* mMap; 1.135 + void* mKey; 1.136 + void* mKeyDelegate; 1.137 +}; 1.138 + 1.139 +static void 1.140 +TraceWeakMappingChild(JSTracer* trc, void** thingp, JSGCTraceKind kind) 1.141 +{ 1.142 + MOZ_ASSERT(trc->callback == TraceWeakMappingChild); 1.143 + void* thing = *thingp; 1.144 + NoteWeakMapChildrenTracer* tracer = 1.145 + static_cast<NoteWeakMapChildrenTracer*>(trc); 1.146 + 1.147 + if (kind == JSTRACE_STRING) { 1.148 + return; 1.149 + } 1.150 + 1.151 + if (!xpc_IsGrayGCThing(thing) && !tracer->mCb.WantAllTraces()) { 1.152 + return; 1.153 + } 1.154 + 1.155 + if (AddToCCKind(kind)) { 1.156 + tracer->mCb.NoteWeakMapping(tracer->mMap, tracer->mKey, tracer->mKeyDelegate, thing); 1.157 + tracer->mTracedAny = true; 1.158 + } else { 1.159 + JS_TraceChildren(trc, thing, kind); 1.160 + } 1.161 +} 1.162 + 1.163 +struct NoteWeakMapsTracer : public js::WeakMapTracer 1.164 +{ 1.165 + NoteWeakMapsTracer(JSRuntime* rt, js::WeakMapTraceCallback cb, 1.166 + nsCycleCollectionNoteRootCallback& cccb) 1.167 + : js::WeakMapTracer(rt, cb), mCb(cccb), mChildTracer(rt, cccb) 1.168 + { 1.169 + } 1.170 + nsCycleCollectionNoteRootCallback& mCb; 1.171 + NoteWeakMapChildrenTracer mChildTracer; 1.172 +}; 1.173 + 1.174 +static void 1.175 +TraceWeakMapping(js::WeakMapTracer* trc, JSObject* m, 1.176 + void* k, JSGCTraceKind kkind, 1.177 + void* v, JSGCTraceKind vkind) 1.178 +{ 1.179 + MOZ_ASSERT(trc->callback == TraceWeakMapping); 1.180 + NoteWeakMapsTracer* tracer = static_cast<NoteWeakMapsTracer* >(trc); 1.181 + 1.182 + // If nothing that could be held alive by this entry is marked gray, return. 1.183 + if ((!k || !xpc_IsGrayGCThing(k)) && MOZ_LIKELY(!tracer->mCb.WantAllTraces())) { 1.184 + if (!v || !xpc_IsGrayGCThing(v) || vkind == JSTRACE_STRING) { 1.185 + return; 1.186 + } 1.187 + } 1.188 + 1.189 + // The cycle collector can only properly reason about weak maps if it can 1.190 + // reason about the liveness of their keys, which in turn requires that 1.191 + // the key can be represented in the cycle collector graph. All existing 1.192 + // uses of weak maps use either objects or scripts as keys, which are okay. 1.193 + MOZ_ASSERT(AddToCCKind(kkind)); 1.194 + 1.195 + // As an emergency fallback for non-debug builds, if the key is not 1.196 + // representable in the cycle collector graph, we treat it as marked. This 1.197 + // can cause leaks, but is preferable to ignoring the binding, which could 1.198 + // cause the cycle collector to free live objects. 1.199 + if (!AddToCCKind(kkind)) { 1.200 + k = nullptr; 1.201 + } 1.202 + 1.203 + JSObject* kdelegate = nullptr; 1.204 + if (k && kkind == JSTRACE_OBJECT) { 1.205 + kdelegate = js::GetWeakmapKeyDelegate((JSObject*)k); 1.206 + } 1.207 + 1.208 + if (AddToCCKind(vkind)) { 1.209 + tracer->mCb.NoteWeakMapping(m, k, kdelegate, v); 1.210 + } else { 1.211 + tracer->mChildTracer.mTracedAny = false; 1.212 + tracer->mChildTracer.mMap = m; 1.213 + tracer->mChildTracer.mKey = k; 1.214 + tracer->mChildTracer.mKeyDelegate = kdelegate; 1.215 + 1.216 + if (v && vkind != JSTRACE_STRING) { 1.217 + JS_TraceChildren(&tracer->mChildTracer, v, vkind); 1.218 + } 1.219 + 1.220 + // The delegate could hold alive the key, so report something to the CC 1.221 + // if we haven't already. 1.222 + if (!tracer->mChildTracer.mTracedAny && k && xpc_IsGrayGCThing(k) && kdelegate) { 1.223 + tracer->mCb.NoteWeakMapping(m, k, kdelegate, nullptr); 1.224 + } 1.225 + } 1.226 +} 1.227 + 1.228 +// This is based on the logic in TraceWeakMapping. 1.229 +struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer 1.230 +{ 1.231 + FixWeakMappingGrayBitsTracer(JSRuntime* rt) 1.232 + : js::WeakMapTracer(rt, FixWeakMappingGrayBits) 1.233 + {} 1.234 + 1.235 + void 1.236 + FixAll() 1.237 + { 1.238 + do { 1.239 + mAnyMarked = false; 1.240 + js::TraceWeakMaps(this); 1.241 + } while (mAnyMarked); 1.242 + } 1.243 + 1.244 +private: 1.245 + 1.246 + static void 1.247 + FixWeakMappingGrayBits(js::WeakMapTracer* trc, JSObject* m, 1.248 + void* k, JSGCTraceKind kkind, 1.249 + void* v, JSGCTraceKind vkind) 1.250 + { 1.251 + MOZ_ASSERT(!JS::IsIncrementalGCInProgress(trc->runtime), 1.252 + "Don't call FixWeakMappingGrayBits during a GC."); 1.253 + 1.254 + FixWeakMappingGrayBitsTracer* tracer = static_cast<FixWeakMappingGrayBitsTracer*>(trc); 1.255 + 1.256 + // If nothing that could be held alive by this entry is marked gray, return. 1.257 + bool delegateMightNeedMarking = k && xpc_IsGrayGCThing(k); 1.258 + bool valueMightNeedMarking = v && xpc_IsGrayGCThing(v) && vkind != JSTRACE_STRING; 1.259 + if (!delegateMightNeedMarking && !valueMightNeedMarking) { 1.260 + return; 1.261 + } 1.262 + 1.263 + if (!AddToCCKind(kkind)) { 1.264 + k = nullptr; 1.265 + } 1.266 + 1.267 + if (delegateMightNeedMarking && kkind == JSTRACE_OBJECT) { 1.268 + JSObject* kdelegate = js::GetWeakmapKeyDelegate((JSObject*)k); 1.269 + if (kdelegate && !xpc_IsGrayGCThing(kdelegate)) { 1.270 + if (JS::UnmarkGrayGCThingRecursively(k, JSTRACE_OBJECT)) { 1.271 + tracer->mAnyMarked = true; 1.272 + } 1.273 + } 1.274 + } 1.275 + 1.276 + if (v && xpc_IsGrayGCThing(v) && 1.277 + (!k || !xpc_IsGrayGCThing(k)) && 1.278 + (!m || !xpc_IsGrayGCThing(m)) && 1.279 + vkind != JSTRACE_SHAPE) { 1.280 + if (JS::UnmarkGrayGCThingRecursively(v, vkind)) { 1.281 + tracer->mAnyMarked = true; 1.282 + } 1.283 + } 1.284 + } 1.285 + 1.286 + bool mAnyMarked; 1.287 +}; 1.288 + 1.289 +struct Closure 1.290 +{ 1.291 + Closure(nsCycleCollectionNoteRootCallback* aCb) 1.292 + : mCycleCollectionEnabled(true), mCb(aCb) 1.293 + { 1.294 + } 1.295 + 1.296 + bool mCycleCollectionEnabled; 1.297 + nsCycleCollectionNoteRootCallback* mCb; 1.298 +}; 1.299 + 1.300 +static void 1.301 +CheckParticipatesInCycleCollection(void* aThing, const char* aName, void* aClosure) 1.302 +{ 1.303 + Closure* closure = static_cast<Closure*>(aClosure); 1.304 + 1.305 + if (closure->mCycleCollectionEnabled) { 1.306 + return; 1.307 + } 1.308 + 1.309 + if (AddToCCKind(js::GCThingTraceKind(aThing)) && 1.310 + xpc_IsGrayGCThing(aThing)) 1.311 + { 1.312 + closure->mCycleCollectionEnabled = true; 1.313 + } 1.314 +} 1.315 + 1.316 +static PLDHashOperator 1.317 +NoteJSHolder(void *holder, nsScriptObjectTracer *&tracer, void *arg) 1.318 +{ 1.319 + Closure *closure = static_cast<Closure*>(arg); 1.320 + 1.321 + bool noteRoot; 1.322 + if (MOZ_UNLIKELY(closure->mCb->WantAllTraces())) { 1.323 + noteRoot = true; 1.324 + } else { 1.325 + closure->mCycleCollectionEnabled = false; 1.326 + tracer->Trace(holder, TraceCallbackFunc(CheckParticipatesInCycleCollection), closure); 1.327 + noteRoot = closure->mCycleCollectionEnabled; 1.328 + } 1.329 + 1.330 + if (noteRoot) { 1.331 + closure->mCb->NoteNativeRoot(holder, tracer); 1.332 + } 1.333 + 1.334 + return PL_DHASH_NEXT; 1.335 +} 1.336 + 1.337 +NS_IMETHODIMP 1.338 +JSGCThingParticipant::Traverse(void* p, nsCycleCollectionTraversalCallback& cb) 1.339 +{ 1.340 + CycleCollectedJSRuntime* runtime = reinterpret_cast<CycleCollectedJSRuntime*> 1.341 + (reinterpret_cast<char*>(this) - 1.342 + offsetof(CycleCollectedJSRuntime, mGCThingCycleCollectorGlobal)); 1.343 + 1.344 + runtime->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_FULL, 1.345 + p, js::GCThingTraceKind(p), cb); 1.346 + return NS_OK; 1.347 +} 1.348 + 1.349 +// NB: This is only used to initialize the participant in 1.350 +// CycleCollectedJSRuntime. It should never be used directly. 1.351 +static JSGCThingParticipant sGCThingCycleCollectorGlobal; 1.352 + 1.353 +NS_IMETHODIMP 1.354 +JSZoneParticipant::Traverse(void* p, nsCycleCollectionTraversalCallback& cb) 1.355 +{ 1.356 + CycleCollectedJSRuntime* runtime = reinterpret_cast<CycleCollectedJSRuntime*> 1.357 + (reinterpret_cast<char*>(this) - 1.358 + offsetof(CycleCollectedJSRuntime, mJSZoneCycleCollectorGlobal)); 1.359 + 1.360 + MOZ_ASSERT(!cb.WantAllTraces()); 1.361 + JS::Zone* zone = static_cast<JS::Zone*>(p); 1.362 + 1.363 + runtime->TraverseZone(zone, cb); 1.364 + return NS_OK; 1.365 +} 1.366 + 1.367 +static void 1.368 +NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind); 1.369 + 1.370 +struct TraversalTracer : public JSTracer 1.371 +{ 1.372 + TraversalTracer(JSRuntime *rt, nsCycleCollectionTraversalCallback& aCb) 1.373 + : JSTracer(rt, NoteJSChildTracerShim, DoNotTraceWeakMaps), mCb(aCb) 1.374 + { 1.375 + } 1.376 + nsCycleCollectionTraversalCallback& mCb; 1.377 +}; 1.378 + 1.379 +static void 1.380 +NoteJSChild(JSTracer* aTrc, void* aThing, JSGCTraceKind aTraceKind) 1.381 +{ 1.382 + TraversalTracer* tracer = static_cast<TraversalTracer*>(aTrc); 1.383 + 1.384 + // Don't traverse non-gray objects, unless we want all traces. 1.385 + if (!xpc_IsGrayGCThing(aThing) && !tracer->mCb.WantAllTraces()) { 1.386 + return; 1.387 + } 1.388 + 1.389 + /* 1.390 + * This function needs to be careful to avoid stack overflow. Normally, when 1.391 + * AddToCCKind is true, the recursion terminates immediately as we just add 1.392 + * |thing| to the CC graph. So overflow is only possible when there are long 1.393 + * chains of non-AddToCCKind GC things. Currently, this only can happen via 1.394 + * shape parent pointers. The special JSTRACE_SHAPE case below handles 1.395 + * parent pointers iteratively, rather than recursively, to avoid overflow. 1.396 + */ 1.397 + if (AddToCCKind(aTraceKind)) { 1.398 + if (MOZ_UNLIKELY(tracer->mCb.WantDebugInfo())) { 1.399 + // based on DumpNotify in jsapi.cpp 1.400 + if (tracer->debugPrinter()) { 1.401 + char buffer[200]; 1.402 + tracer->debugPrinter()(aTrc, buffer, sizeof(buffer)); 1.403 + tracer->mCb.NoteNextEdgeName(buffer); 1.404 + } else if (tracer->debugPrintIndex() != (size_t)-1) { 1.405 + char buffer[200]; 1.406 + JS_snprintf(buffer, sizeof(buffer), "%s[%lu]", 1.407 + static_cast<const char *>(tracer->debugPrintArg()), 1.408 + tracer->debugPrintIndex()); 1.409 + tracer->mCb.NoteNextEdgeName(buffer); 1.410 + } else { 1.411 + tracer->mCb.NoteNextEdgeName(static_cast<const char*>(tracer->debugPrintArg())); 1.412 + } 1.413 + } 1.414 + tracer->mCb.NoteJSChild(aThing); 1.415 + } else if (aTraceKind == JSTRACE_SHAPE) { 1.416 + JS_TraceShapeCycleCollectorChildren(aTrc, aThing); 1.417 + } else if (aTraceKind != JSTRACE_STRING) { 1.418 + JS_TraceChildren(aTrc, aThing, aTraceKind); 1.419 + } 1.420 +} 1.421 + 1.422 +static void 1.423 +NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind) 1.424 +{ 1.425 + NoteJSChild(aTrc, *aThingp, aTraceKind); 1.426 +} 1.427 + 1.428 +static void 1.429 +NoteJSChildGrayWrapperShim(void* aData, void* aThing) 1.430 +{ 1.431 + TraversalTracer* trc = static_cast<TraversalTracer*>(aData); 1.432 + NoteJSChild(trc, aThing, js::GCThingTraceKind(aThing)); 1.433 +} 1.434 + 1.435 +/* 1.436 + * The cycle collection participant for a Zone is intended to produce the same 1.437 + * results as if all of the gray GCthings in a zone were merged into a single node, 1.438 + * except for self-edges. This avoids the overhead of representing all of the GCthings in 1.439 + * the zone in the cycle collector graph, which should be much faster if many of 1.440 + * the GCthings in the zone are gray. 1.441 + * 1.442 + * Zone merging should not always be used, because it is a conservative 1.443 + * approximation of the true cycle collector graph that can incorrectly identify some 1.444 + * garbage objects as being live. For instance, consider two cycles that pass through a 1.445 + * zone, where one is garbage and the other is live. If we merge the entire 1.446 + * zone, the cycle collector will think that both are alive. 1.447 + * 1.448 + * We don't have to worry about losing track of a garbage cycle, because any such garbage 1.449 + * cycle incorrectly identified as live must contain at least one C++ to JS edge, and 1.450 + * XPConnect will always add the C++ object to the CC graph. (This is in contrast to pure 1.451 + * C++ garbage cycles, which must always be properly identified, because we clear the 1.452 + * purple buffer during every CC, which may contain the last reference to a garbage 1.453 + * cycle.) 1.454 + */ 1.455 + 1.456 +// NB: This is only used to initialize the participant in 1.457 +// CycleCollectedJSRuntime. It should never be used directly. 1.458 +static const JSZoneParticipant sJSZoneCycleCollectorGlobal; 1.459 + 1.460 +CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSRuntime* aParentRuntime, 1.461 + uint32_t aMaxbytes, 1.462 + JSUseHelperThreads aUseHelperThreads) 1.463 + : mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal), 1.464 + mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal), 1.465 + mJSRuntime(nullptr), 1.466 + mJSHolders(512) 1.467 +{ 1.468 + mozilla::dom::InitScriptSettings(); 1.469 + 1.470 + mJSRuntime = JS_NewRuntime(aMaxbytes, aUseHelperThreads, aParentRuntime); 1.471 + if (!mJSRuntime) { 1.472 + MOZ_CRASH(); 1.473 + } 1.474 + 1.475 + if (!JS_AddExtraGCRootsTracer(mJSRuntime, TraceBlackJS, this)) { 1.476 + MOZ_CRASH(); 1.477 + } 1.478 + JS_SetGrayGCRootsTracer(mJSRuntime, TraceGrayJS, this); 1.479 + JS_SetGCCallback(mJSRuntime, GCCallback, this); 1.480 + JS_SetContextCallback(mJSRuntime, ContextCallback, this); 1.481 + JS_SetDestroyZoneCallback(mJSRuntime, XPCStringConvert::FreeZoneCache); 1.482 + JS_SetSweepZoneCallback(mJSRuntime, XPCStringConvert::ClearZoneCache); 1.483 + 1.484 + nsCycleCollector_registerJSRuntime(this); 1.485 +} 1.486 + 1.487 +CycleCollectedJSRuntime::~CycleCollectedJSRuntime() 1.488 +{ 1.489 + MOZ_ASSERT(mJSRuntime); 1.490 + MOZ_ASSERT(!mDeferredFinalizerTable.Count()); 1.491 + MOZ_ASSERT(!mDeferredSupports.Length()); 1.492 + 1.493 + // Clear mPendingException first, since it might be cycle collected. 1.494 + mPendingException = nullptr; 1.495 + 1.496 + JS_DestroyRuntime(mJSRuntime); 1.497 + mJSRuntime = nullptr; 1.498 + nsCycleCollector_forgetJSRuntime(); 1.499 + 1.500 + mozilla::dom::DestroyScriptSettings(); 1.501 +} 1.502 + 1.503 +size_t 1.504 +CycleCollectedJSRuntime::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const 1.505 +{ 1.506 + size_t n = 0; 1.507 + 1.508 + // nullptr for the second arg; we're not measuring anything hanging off the 1.509 + // entries in mJSHolders. 1.510 + n += mJSHolders.SizeOfExcludingThis(nullptr, aMallocSizeOf); 1.511 + 1.512 + return n; 1.513 +} 1.514 + 1.515 +static PLDHashOperator 1.516 +UnmarkJSHolder(void* holder, nsScriptObjectTracer*& tracer, void* arg) 1.517 +{ 1.518 + tracer->CanSkip(holder, true); 1.519 + return PL_DHASH_NEXT; 1.520 +} 1.521 + 1.522 +void 1.523 +CycleCollectedJSRuntime::UnmarkSkippableJSHolders() 1.524 +{ 1.525 + mJSHolders.Enumerate(UnmarkJSHolder, nullptr); 1.526 +} 1.527 + 1.528 +void 1.529 +CycleCollectedJSRuntime::DescribeGCThing(bool aIsMarked, void* aThing, 1.530 + JSGCTraceKind aTraceKind, 1.531 + nsCycleCollectionTraversalCallback& aCb) const 1.532 +{ 1.533 + if (!aCb.WantDebugInfo()) { 1.534 + aCb.DescribeGCedNode(aIsMarked, "JS Object"); 1.535 + return; 1.536 + } 1.537 + 1.538 + char name[72]; 1.539 + uint64_t compartmentAddress = 0; 1.540 + if (aTraceKind == JSTRACE_OBJECT) { 1.541 + JSObject* obj = static_cast<JSObject*>(aThing); 1.542 + compartmentAddress = (uint64_t)js::GetObjectCompartment(obj); 1.543 + const js::Class* clasp = js::GetObjectClass(obj); 1.544 + 1.545 + // Give the subclass a chance to do something 1.546 + if (DescribeCustomObjects(obj, clasp, name)) { 1.547 + // Nothing else to do! 1.548 + } else if (js::IsFunctionObject(obj)) { 1.549 + JSFunction* fun = JS_GetObjectFunction(obj); 1.550 + JSString* str = JS_GetFunctionDisplayId(fun); 1.551 + if (str) { 1.552 + NS_ConvertUTF16toUTF8 fname(JS_GetInternedStringChars(str)); 1.553 + JS_snprintf(name, sizeof(name), 1.554 + "JS Object (Function - %s)", fname.get()); 1.555 + } else { 1.556 + JS_snprintf(name, sizeof(name), "JS Object (Function)"); 1.557 + } 1.558 + } else { 1.559 + JS_snprintf(name, sizeof(name), "JS Object (%s)", 1.560 + clasp->name); 1.561 + } 1.562 + } else { 1.563 + static const char trace_types[][11] = { 1.564 + "Object", 1.565 + "String", 1.566 + "Script", 1.567 + "LazyScript", 1.568 + "IonCode", 1.569 + "Shape", 1.570 + "BaseShape", 1.571 + "TypeObject", 1.572 + }; 1.573 + static_assert(MOZ_ARRAY_LENGTH(trace_types) == JSTRACE_LAST + 1, 1.574 + "JSTRACE_LAST enum must match trace_types count."); 1.575 + JS_snprintf(name, sizeof(name), "JS %s", trace_types[aTraceKind]); 1.576 + } 1.577 + 1.578 + // Disable printing global for objects while we figure out ObjShrink fallout. 1.579 + aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress); 1.580 +} 1.581 + 1.582 +void 1.583 +CycleCollectedJSRuntime::NoteGCThingJSChildren(void* aThing, 1.584 + JSGCTraceKind aTraceKind, 1.585 + nsCycleCollectionTraversalCallback& aCb) const 1.586 +{ 1.587 + MOZ_ASSERT(mJSRuntime); 1.588 + TraversalTracer trc(mJSRuntime, aCb); 1.589 + JS_TraceChildren(&trc, aThing, aTraceKind); 1.590 +} 1.591 + 1.592 +void 1.593 +CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(const js::Class* aClasp, JSObject* aObj, 1.594 + nsCycleCollectionTraversalCallback& aCb) const 1.595 +{ 1.596 + MOZ_ASSERT(aClasp); 1.597 + MOZ_ASSERT(aClasp == js::GetObjectClass(aObj)); 1.598 + 1.599 + if (NoteCustomGCThingXPCOMChildren(aClasp, aObj, aCb)) { 1.600 + // Nothing else to do! 1.601 + return; 1.602 + } 1.603 + // XXX This test does seem fragile, we should probably whitelist classes 1.604 + // that do hold a strong reference, but that might not be possible. 1.605 + else if (aClasp->flags & JSCLASS_HAS_PRIVATE && 1.606 + aClasp->flags & JSCLASS_PRIVATE_IS_NSISUPPORTS) { 1.607 + NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "js::GetObjectPrivate(obj)"); 1.608 + aCb.NoteXPCOMChild(static_cast<nsISupports*>(js::GetObjectPrivate(aObj))); 1.609 + } else { 1.610 + const DOMClass* domClass = GetDOMClass(aObj); 1.611 + if (domClass) { 1.612 + NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "UnwrapDOMObject(obj)"); 1.613 + if (domClass->mDOMObjectIsISupports) { 1.614 + aCb.NoteXPCOMChild(UnwrapDOMObject<nsISupports>(aObj)); 1.615 + } else if (domClass->mParticipant) { 1.616 + aCb.NoteNativeChild(UnwrapDOMObject<void>(aObj), 1.617 + domClass->mParticipant); 1.618 + } 1.619 + } 1.620 + } 1.621 +} 1.622 + 1.623 +void 1.624 +CycleCollectedJSRuntime::TraverseGCThing(TraverseSelect aTs, void* aThing, 1.625 + JSGCTraceKind aTraceKind, 1.626 + nsCycleCollectionTraversalCallback& aCb) 1.627 +{ 1.628 + MOZ_ASSERT(aTraceKind == js::GCThingTraceKind(aThing)); 1.629 + bool isMarkedGray = xpc_IsGrayGCThing(aThing); 1.630 + 1.631 + if (aTs == TRAVERSE_FULL) { 1.632 + DescribeGCThing(!isMarkedGray, aThing, aTraceKind, aCb); 1.633 + } 1.634 + 1.635 + // If this object is alive, then all of its children are alive. For JS objects, 1.636 + // the black-gray invariant ensures the children are also marked black. For C++ 1.637 + // objects, the ref count from this object will keep them alive. Thus we don't 1.638 + // need to trace our children, unless we are debugging using WantAllTraces. 1.639 + if (!isMarkedGray && !aCb.WantAllTraces()) { 1.640 + return; 1.641 + } 1.642 + 1.643 + if (aTs == TRAVERSE_FULL) { 1.644 + NoteGCThingJSChildren(aThing, aTraceKind, aCb); 1.645 + } 1.646 + 1.647 + if (aTraceKind == JSTRACE_OBJECT) { 1.648 + JSObject* obj = static_cast<JSObject*>(aThing); 1.649 + NoteGCThingXPCOMChildren(js::GetObjectClass(obj), obj, aCb); 1.650 + } 1.651 +} 1.652 + 1.653 +struct TraverseObjectShimClosure { 1.654 + nsCycleCollectionTraversalCallback& cb; 1.655 + CycleCollectedJSRuntime* self; 1.656 +}; 1.657 + 1.658 +void 1.659 +CycleCollectedJSRuntime::TraverseZone(JS::Zone* aZone, 1.660 + nsCycleCollectionTraversalCallback& aCb) 1.661 +{ 1.662 + /* 1.663 + * We treat the zone as being gray. We handle non-gray GCthings in the 1.664 + * zone by not reporting their children to the CC. The black-gray invariant 1.665 + * ensures that any JS children will also be non-gray, and thus don't need to be 1.666 + * added to the graph. For C++ children, not representing the edge from the 1.667 + * non-gray JS GCthings to the C++ object will keep the child alive. 1.668 + * 1.669 + * We don't allow zone merging in a WantAllTraces CC, because then these 1.670 + * assumptions don't hold. 1.671 + */ 1.672 + aCb.DescribeGCedNode(false, "JS Zone"); 1.673 + 1.674 + /* 1.675 + * Every JS child of everything in the zone is either in the zone 1.676 + * or is a cross-compartment wrapper. In the former case, we don't need to 1.677 + * represent these edges in the CC graph because JS objects are not ref counted. 1.678 + * In the latter case, the JS engine keeps a map of these wrappers, which we 1.679 + * iterate over. Edges between compartments in the same zone will add 1.680 + * unnecessary loop edges to the graph (bug 842137). 1.681 + */ 1.682 + TraversalTracer trc(mJSRuntime, aCb); 1.683 + js::VisitGrayWrapperTargets(aZone, NoteJSChildGrayWrapperShim, &trc); 1.684 + 1.685 + /* 1.686 + * To find C++ children of things in the zone, we scan every JS Object in 1.687 + * the zone. Only JS Objects can have C++ children. 1.688 + */ 1.689 + TraverseObjectShimClosure closure = { aCb, this }; 1.690 + js::IterateGrayObjects(aZone, TraverseObjectShim, &closure); 1.691 +} 1.692 + 1.693 +/* static */ void 1.694 +CycleCollectedJSRuntime::TraverseObjectShim(void* aData, void* aThing) 1.695 +{ 1.696 + TraverseObjectShimClosure* closure = 1.697 + static_cast<TraverseObjectShimClosure*>(aData); 1.698 + 1.699 + MOZ_ASSERT(js::GCThingTraceKind(aThing) == JSTRACE_OBJECT); 1.700 + closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP, aThing, 1.701 + JSTRACE_OBJECT, closure->cb); 1.702 +} 1.703 + 1.704 +void 1.705 +CycleCollectedJSRuntime::TraverseNativeRoots(nsCycleCollectionNoteRootCallback& aCb) 1.706 +{ 1.707 + // NB: This is here just to preserve the existing XPConnect order. I doubt it 1.708 + // would hurt to do this after the JS holders. 1.709 + TraverseAdditionalNativeRoots(aCb); 1.710 + 1.711 + Closure closure(&aCb); 1.712 + mJSHolders.Enumerate(NoteJSHolder, &closure); 1.713 +} 1.714 + 1.715 +/* static */ void 1.716 +CycleCollectedJSRuntime::TraceBlackJS(JSTracer* aTracer, void* aData) 1.717 +{ 1.718 + CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData); 1.719 + 1.720 + self->TraceNativeBlackRoots(aTracer); 1.721 +} 1.722 + 1.723 +/* static */ void 1.724 +CycleCollectedJSRuntime::TraceGrayJS(JSTracer* aTracer, void* aData) 1.725 +{ 1.726 + CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData); 1.727 + 1.728 + // Mark these roots as gray so the CC can walk them later. 1.729 + self->TraceNativeGrayRoots(aTracer); 1.730 +} 1.731 + 1.732 +/* static */ void 1.733 +CycleCollectedJSRuntime::GCCallback(JSRuntime* aRuntime, 1.734 + JSGCStatus aStatus, 1.735 + void* aData) 1.736 +{ 1.737 + CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData); 1.738 + 1.739 + MOZ_ASSERT(aRuntime == self->Runtime()); 1.740 + 1.741 + self->OnGC(aStatus); 1.742 +} 1.743 + 1.744 +/* static */ bool 1.745 +CycleCollectedJSRuntime::ContextCallback(JSContext* aContext, 1.746 + unsigned aOperation, 1.747 + void* aData) 1.748 +{ 1.749 + CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData); 1.750 + 1.751 + MOZ_ASSERT(JS_GetRuntime(aContext) == self->Runtime()); 1.752 + 1.753 + return self->CustomContextCallback(aContext, aOperation); 1.754 +} 1.755 + 1.756 +struct JsGcTracer : public TraceCallbacks 1.757 +{ 1.758 + virtual void Trace(JS::Heap<JS::Value> *p, const char *name, void *closure) const MOZ_OVERRIDE { 1.759 + JS_CallHeapValueTracer(static_cast<JSTracer*>(closure), p, name); 1.760 + } 1.761 + virtual void Trace(JS::Heap<jsid> *p, const char *name, void *closure) const MOZ_OVERRIDE { 1.762 + JS_CallHeapIdTracer(static_cast<JSTracer*>(closure), p, name); 1.763 + } 1.764 + virtual void Trace(JS::Heap<JSObject *> *p, const char *name, void *closure) const MOZ_OVERRIDE { 1.765 + JS_CallHeapObjectTracer(static_cast<JSTracer*>(closure), p, name); 1.766 + } 1.767 + virtual void Trace(JS::TenuredHeap<JSObject *> *p, const char *name, void *closure) const MOZ_OVERRIDE { 1.768 + JS_CallTenuredObjectTracer(static_cast<JSTracer*>(closure), p, name); 1.769 + } 1.770 + virtual void Trace(JS::Heap<JSString *> *p, const char *name, void *closure) const MOZ_OVERRIDE { 1.771 + JS_CallHeapStringTracer(static_cast<JSTracer*>(closure), p, name); 1.772 + } 1.773 + virtual void Trace(JS::Heap<JSScript *> *p, const char *name, void *closure) const MOZ_OVERRIDE { 1.774 + JS_CallHeapScriptTracer(static_cast<JSTracer*>(closure), p, name); 1.775 + } 1.776 + virtual void Trace(JS::Heap<JSFunction *> *p, const char *name, void *closure) const MOZ_OVERRIDE { 1.777 + JS_CallHeapFunctionTracer(static_cast<JSTracer*>(closure), p, name); 1.778 + } 1.779 +}; 1.780 + 1.781 +static PLDHashOperator 1.782 +TraceJSHolder(void* aHolder, nsScriptObjectTracer*& aTracer, void* aArg) 1.783 +{ 1.784 + aTracer->Trace(aHolder, JsGcTracer(), aArg); 1.785 + 1.786 + return PL_DHASH_NEXT; 1.787 +} 1.788 + 1.789 +void 1.790 +CycleCollectedJSRuntime::TraceNativeGrayRoots(JSTracer* aTracer) 1.791 +{ 1.792 + // NB: This is here just to preserve the existing XPConnect order. I doubt it 1.793 + // would hurt to do this after the JS holders. 1.794 + TraceAdditionalNativeGrayRoots(aTracer); 1.795 + 1.796 + mJSHolders.Enumerate(TraceJSHolder, aTracer); 1.797 +} 1.798 + 1.799 +void 1.800 +CycleCollectedJSRuntime::AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer) 1.801 +{ 1.802 + mJSHolders.Put(aHolder, aTracer); 1.803 +} 1.804 + 1.805 +struct ClearJSHolder : TraceCallbacks 1.806 +{ 1.807 + virtual void Trace(JS::Heap<JS::Value>* aPtr, const char*, void*) const MOZ_OVERRIDE 1.808 + { 1.809 + *aPtr = JSVAL_VOID; 1.810 + } 1.811 + 1.812 + virtual void Trace(JS::Heap<jsid>* aPtr, const char*, void*) const MOZ_OVERRIDE 1.813 + { 1.814 + *aPtr = JSID_VOID; 1.815 + } 1.816 + 1.817 + virtual void Trace(JS::Heap<JSObject*>* aPtr, const char*, void*) const MOZ_OVERRIDE 1.818 + { 1.819 + *aPtr = nullptr; 1.820 + } 1.821 + 1.822 + virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char*, void*) const MOZ_OVERRIDE 1.823 + { 1.824 + *aPtr = nullptr; 1.825 + } 1.826 + 1.827 + virtual void Trace(JS::Heap<JSString*>* aPtr, const char*, void*) const MOZ_OVERRIDE 1.828 + { 1.829 + *aPtr = nullptr; 1.830 + } 1.831 + 1.832 + virtual void Trace(JS::Heap<JSScript*>* aPtr, const char*, void*) const MOZ_OVERRIDE 1.833 + { 1.834 + *aPtr = nullptr; 1.835 + } 1.836 + 1.837 + virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char*, void*) const MOZ_OVERRIDE 1.838 + { 1.839 + *aPtr = nullptr; 1.840 + } 1.841 +}; 1.842 + 1.843 +void 1.844 +CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder) 1.845 +{ 1.846 + nsScriptObjectTracer* tracer = mJSHolders.Get(aHolder); 1.847 + if (!tracer) { 1.848 + return; 1.849 + } 1.850 + tracer->Trace(aHolder, ClearJSHolder(), nullptr); 1.851 + mJSHolders.Remove(aHolder); 1.852 +} 1.853 + 1.854 +#ifdef DEBUG 1.855 +bool 1.856 +CycleCollectedJSRuntime::IsJSHolder(void* aHolder) 1.857 +{ 1.858 + return mJSHolders.Get(aHolder, nullptr); 1.859 +} 1.860 + 1.861 +static void 1.862 +AssertNoGcThing(void* aGCThing, const char* aName, void* aClosure) 1.863 +{ 1.864 + MOZ_ASSERT(!aGCThing); 1.865 +} 1.866 + 1.867 +void 1.868 +CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder) 1.869 +{ 1.870 + nsScriptObjectTracer* tracer = mJSHolders.Get(aPossibleJSHolder); 1.871 + if (tracer) { 1.872 + tracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing), nullptr); 1.873 + } 1.874 +} 1.875 +#endif 1.876 + 1.877 +already_AddRefed<nsIException> 1.878 +CycleCollectedJSRuntime::GetPendingException() const 1.879 +{ 1.880 + nsCOMPtr<nsIException> out = mPendingException; 1.881 + return out.forget(); 1.882 +} 1.883 + 1.884 +void 1.885 +CycleCollectedJSRuntime::SetPendingException(nsIException* aException) 1.886 +{ 1.887 + mPendingException = aException; 1.888 +} 1.889 + 1.890 +nsCycleCollectionParticipant* 1.891 +CycleCollectedJSRuntime::GCThingParticipant() 1.892 +{ 1.893 + return &mGCThingCycleCollectorGlobal; 1.894 +} 1.895 + 1.896 +nsCycleCollectionParticipant* 1.897 +CycleCollectedJSRuntime::ZoneParticipant() 1.898 +{ 1.899 + return &mJSZoneCycleCollectorGlobal; 1.900 +} 1.901 + 1.902 +nsresult 1.903 +CycleCollectedJSRuntime::TraverseRoots(nsCycleCollectionNoteRootCallback &aCb) 1.904 +{ 1.905 + TraverseNativeRoots(aCb); 1.906 + 1.907 + NoteWeakMapsTracer trc(mJSRuntime, TraceWeakMapping, aCb); 1.908 + js::TraceWeakMaps(&trc); 1.909 + 1.910 + return NS_OK; 1.911 +} 1.912 + 1.913 +/* 1.914 + * Return true if there exists a JSContext with a default global whose current 1.915 + * inner is gray. The intent is to look for JS Object windows. We don't merge 1.916 + * system compartments, so we don't use them to trigger merging CCs. 1.917 + */ 1.918 +bool 1.919 +CycleCollectedJSRuntime::UsefulToMergeZones() const 1.920 +{ 1.921 + if (!NS_IsMainThread()) { 1.922 + return false; 1.923 + } 1.924 + 1.925 + JSContext* iter = nullptr; 1.926 + JSContext* cx; 1.927 + JSAutoRequest ar(nsContentUtils::GetSafeJSContext()); 1.928 + while ((cx = JS_ContextIterator(mJSRuntime, &iter))) { 1.929 + // Skip anything without an nsIScriptContext. 1.930 + nsIScriptContext* scx = GetScriptContextFromJSContext(cx); 1.931 + JS::RootedObject obj(cx, scx ? scx->GetWindowProxyPreserveColor() : nullptr); 1.932 + if (!obj) { 1.933 + continue; 1.934 + } 1.935 + MOZ_ASSERT(js::IsOuterObject(obj)); 1.936 + // Grab the inner from the outer. 1.937 + obj = JS_ObjectToInnerObject(cx, obj); 1.938 + MOZ_ASSERT(!js::GetObjectParent(obj)); 1.939 + if (JS::GCThingIsMarkedGray(obj) && 1.940 + !js::IsSystemCompartment(js::GetObjectCompartment(obj))) { 1.941 + return true; 1.942 + } 1.943 + } 1.944 + return false; 1.945 +} 1.946 + 1.947 +void 1.948 +CycleCollectedJSRuntime::FixWeakMappingGrayBits() const 1.949 +{ 1.950 + FixWeakMappingGrayBitsTracer fixer(mJSRuntime); 1.951 + fixer.FixAll(); 1.952 +} 1.953 + 1.954 +bool 1.955 +CycleCollectedJSRuntime::NeedCollect() const 1.956 +{ 1.957 + return !js::AreGCGrayBitsValid(mJSRuntime); 1.958 +} 1.959 + 1.960 +void 1.961 +CycleCollectedJSRuntime::Collect(uint32_t aReason) const 1.962 +{ 1.963 + MOZ_ASSERT(aReason < JS::gcreason::NUM_REASONS); 1.964 + JS::gcreason::Reason gcreason = static_cast<JS::gcreason::Reason>(aReason); 1.965 + 1.966 + JS::PrepareForFullGC(mJSRuntime); 1.967 + JS::GCForReason(mJSRuntime, gcreason); 1.968 +} 1.969 + 1.970 +void 1.971 +CycleCollectedJSRuntime::DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc, 1.972 + DeferredFinalizeFunction aFunc, 1.973 + void* aThing) 1.974 +{ 1.975 + void* thingArray = nullptr; 1.976 + bool hadThingArray = mDeferredFinalizerTable.Get(aFunc, &thingArray); 1.977 + 1.978 + thingArray = aAppendFunc(thingArray, aThing); 1.979 + if (!hadThingArray) { 1.980 + mDeferredFinalizerTable.Put(aFunc, thingArray); 1.981 + } 1.982 +} 1.983 + 1.984 +void 1.985 +CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports) 1.986 +{ 1.987 + mDeferredSupports.AppendElement(aSupports); 1.988 +} 1.989 + 1.990 +void 1.991 +CycleCollectedJSRuntime::DumpJSHeap(FILE* file) 1.992 +{ 1.993 + js::DumpHeapComplete(Runtime(), file, js::CollectNurseryBeforeDump); 1.994 +} 1.995 + 1.996 + 1.997 +bool 1.998 +ReleaseSliceNow(uint32_t aSlice, void* aData) 1.999 +{ 1.1000 + MOZ_ASSERT(aSlice > 0, "nonsensical/useless call with slice == 0"); 1.1001 + nsTArray<nsISupports*>* items = static_cast<nsTArray<nsISupports*>*>(aData); 1.1002 + 1.1003 + uint32_t length = items->Length(); 1.1004 + aSlice = std::min(aSlice, length); 1.1005 + for (uint32_t i = length; i > length - aSlice; --i) { 1.1006 + // Remove (and NS_RELEASE) the last entry in "items": 1.1007 + uint32_t lastItemIdx = i - 1; 1.1008 + 1.1009 + nsISupports* wrapper = items->ElementAt(lastItemIdx); 1.1010 + items->RemoveElementAt(lastItemIdx); 1.1011 + NS_IF_RELEASE(wrapper); 1.1012 + } 1.1013 + 1.1014 + return items->IsEmpty(); 1.1015 +} 1.1016 + 1.1017 +/* static */ PLDHashOperator 1.1018 +IncrementalFinalizeRunnable::DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction, 1.1019 + void*& aData, 1.1020 + void* aClosure) 1.1021 +{ 1.1022 + DeferredFinalizeArray* array = static_cast<DeferredFinalizeArray*>(aClosure); 1.1023 + 1.1024 + DeferredFinalizeFunctionHolder* function = array->AppendElement(); 1.1025 + function->run = aFunction; 1.1026 + function->data = aData; 1.1027 + 1.1028 + return PL_DHASH_REMOVE; 1.1029 +} 1.1030 + 1.1031 +IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt, 1.1032 + nsTArray<nsISupports*>& aSupports, 1.1033 + DeferredFinalizerTable& aFinalizers) 1.1034 + : mRuntime(aRt), 1.1035 + mFinalizeFunctionToRun(0) 1.1036 +{ 1.1037 + this->mSupports.SwapElements(aSupports); 1.1038 + DeferredFinalizeFunctionHolder* function = mDeferredFinalizeFunctions.AppendElement(); 1.1039 + function->run = ReleaseSliceNow; 1.1040 + function->data = &this->mSupports; 1.1041 + 1.1042 + // Enumerate the hashtable into our array. 1.1043 + aFinalizers.Enumerate(DeferredFinalizerEnumerator, &mDeferredFinalizeFunctions); 1.1044 +} 1.1045 + 1.1046 +IncrementalFinalizeRunnable::~IncrementalFinalizeRunnable() 1.1047 +{ 1.1048 + MOZ_ASSERT(this != mRuntime->mFinalizeRunnable); 1.1049 +} 1.1050 + 1.1051 +void 1.1052 +IncrementalFinalizeRunnable::ReleaseNow(bool aLimited) 1.1053 +{ 1.1054 + //MOZ_ASSERT(NS_IsMainThread()); 1.1055 + MOZ_ASSERT(mDeferredFinalizeFunctions.Length() != 0, 1.1056 + "We should have at least ReleaseSliceNow to run"); 1.1057 + MOZ_ASSERT(mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length(), 1.1058 + "No more finalizers to run?"); 1.1059 + 1.1060 + TimeDuration sliceTime = TimeDuration::FromMilliseconds(SliceMillis); 1.1061 + TimeStamp started = TimeStamp::Now(); 1.1062 + bool timeout = false; 1.1063 + do { 1.1064 + const DeferredFinalizeFunctionHolder &function = 1.1065 + mDeferredFinalizeFunctions[mFinalizeFunctionToRun]; 1.1066 + if (aLimited) { 1.1067 + bool done = false; 1.1068 + while (!timeout && !done) { 1.1069 + /* 1.1070 + * We don't want to read the clock too often, so we try to 1.1071 + * release slices of 100 items. 1.1072 + */ 1.1073 + done = function.run(100, function.data); 1.1074 + timeout = TimeStamp::Now() - started >= sliceTime; 1.1075 + } 1.1076 + if (done) { 1.1077 + ++mFinalizeFunctionToRun; 1.1078 + } 1.1079 + if (timeout) { 1.1080 + break; 1.1081 + } 1.1082 + } else { 1.1083 + function.run(UINT32_MAX, function.data); 1.1084 + ++mFinalizeFunctionToRun; 1.1085 + } 1.1086 + } while (mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length()); 1.1087 + 1.1088 + if (mFinalizeFunctionToRun == mDeferredFinalizeFunctions.Length()) { 1.1089 + MOZ_ASSERT(mRuntime->mFinalizeRunnable == this); 1.1090 + mDeferredFinalizeFunctions.Clear(); 1.1091 + // NB: This may delete this! 1.1092 + mRuntime->mFinalizeRunnable = nullptr; 1.1093 + } 1.1094 +} 1.1095 + 1.1096 +NS_IMETHODIMP 1.1097 +IncrementalFinalizeRunnable::Run() 1.1098 +{ 1.1099 + if (mRuntime->mFinalizeRunnable != this) { 1.1100 + /* These items were already processed synchronously in JSGC_END. */ 1.1101 + MOZ_ASSERT(!mSupports.Length()); 1.1102 + MOZ_ASSERT(!mDeferredFinalizeFunctions.Length()); 1.1103 + return NS_OK; 1.1104 + } 1.1105 + 1.1106 + ReleaseNow(true); 1.1107 + 1.1108 + if (mDeferredFinalizeFunctions.Length()) { 1.1109 + nsresult rv = NS_DispatchToCurrentThread(this); 1.1110 + if (NS_FAILED(rv)) { 1.1111 + ReleaseNow(false); 1.1112 + } 1.1113 + } 1.1114 + 1.1115 + return NS_OK; 1.1116 +} 1.1117 + 1.1118 +void 1.1119 +CycleCollectedJSRuntime::FinalizeDeferredThings(DeferredFinalizeType aType) 1.1120 +{ 1.1121 + MOZ_ASSERT(!mFinalizeRunnable); 1.1122 + mFinalizeRunnable = new IncrementalFinalizeRunnable(this, 1.1123 + mDeferredSupports, 1.1124 + mDeferredFinalizerTable); 1.1125 + 1.1126 + // Everything should be gone now. 1.1127 + MOZ_ASSERT(!mDeferredSupports.Length()); 1.1128 + MOZ_ASSERT(!mDeferredFinalizerTable.Count()); 1.1129 + 1.1130 + if (aType == FinalizeIncrementally) { 1.1131 + NS_DispatchToCurrentThread(mFinalizeRunnable); 1.1132 + } else { 1.1133 + mFinalizeRunnable->ReleaseNow(false); 1.1134 + MOZ_ASSERT(!mFinalizeRunnable); 1.1135 + } 1.1136 +} 1.1137 + 1.1138 +void 1.1139 +CycleCollectedJSRuntime::OnGC(JSGCStatus aStatus) 1.1140 +{ 1.1141 + switch (aStatus) { 1.1142 + case JSGC_BEGIN: 1.1143 + nsCycleCollector_prepareForGarbageCollection(); 1.1144 + break; 1.1145 + case JSGC_END: 1.1146 + { 1.1147 + /* 1.1148 + * If the previous GC created a runnable to finalize objects 1.1149 + * incrementally, and if it hasn't finished yet, finish it now. We 1.1150 + * don't want these to build up. We also don't want to allow any 1.1151 + * existing incremental finalize runnables to run after a 1.1152 + * non-incremental GC, since they are often used to detect leaks. 1.1153 + */ 1.1154 + if (mFinalizeRunnable) { 1.1155 + mFinalizeRunnable->ReleaseNow(false); 1.1156 + } 1.1157 + 1.1158 + // Do any deferred finalization of native objects. 1.1159 + FinalizeDeferredThings(JS::WasIncrementalGC(mJSRuntime) ? FinalizeIncrementally : 1.1160 + FinalizeNow); 1.1161 + break; 1.1162 + } 1.1163 + default: 1.1164 + MOZ_CRASH(); 1.1165 + } 1.1166 + 1.1167 + CustomGCCallback(aStatus); 1.1168 +}