|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
|
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */ |
|
3 /* This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 // We're dividing JS objects into 3 categories: |
|
8 // |
|
9 // 1. "real" roots, held by the JS engine itself or rooted through the root |
|
10 // and lock JS APIs. Roots from this category are considered black in the |
|
11 // cycle collector, any cycle they participate in is uncollectable. |
|
12 // |
|
13 // 2. certain roots held by C++ objects that are guaranteed to be alive. |
|
14 // Roots from this category are considered black in the cycle collector, |
|
15 // and any cycle they participate in is uncollectable. These roots are |
|
16 // traced from TraceNativeBlackRoots. |
|
17 // |
|
18 // 3. all other roots held by C++ objects that participate in cycle |
|
19 // collection, held by us (see TraceNativeGrayRoots). Roots from this |
|
20 // category are considered grey in the cycle collector; whether or not |
|
21 // they are collected depends on the objects that hold them. |
|
22 // |
|
23 // Note that if a root is in multiple categories the fact that it is in |
|
24 // category 1 or 2 that takes precedence, so it will be considered black. |
|
25 // |
|
26 // During garbage collection we switch to an additional mark color (gray) |
|
27 // when tracing inside TraceNativeGrayRoots. This allows us to walk those |
|
28 // roots later on and add all objects reachable only from them to the |
|
29 // cycle collector. |
|
30 // |
|
31 // Phases: |
|
32 // |
|
33 // 1. marking of the roots in category 1 by having the JS GC do its marking |
|
34 // 2. marking of the roots in category 2 by having the JS GC call us back |
|
35 // (via JS_SetExtraGCRootsTracer) and running TraceNativeBlackRoots |
|
36 // 3. marking of the roots in category 3 by TraceNativeGrayRoots using an |
|
37 // additional color (gray). |
|
38 // 4. end of GC, GC can sweep its heap |
|
39 // |
|
40 // At some later point, when the cycle collector runs: |
|
41 // |
|
42 // 5. walk gray objects and add them to the cycle collector, cycle collect |
|
43 // |
|
44 // JS objects that are part of cycles the cycle collector breaks will be |
|
45 // collected by the next JS GC. |
|
46 // |
|
47 // If WantAllTraces() is false the cycle collector will not traverse roots |
|
48 // from category 1 or any JS objects held by them. Any JS objects they hold |
|
49 // will already be marked by the JS GC and will thus be colored black |
|
50 // themselves. Any C++ objects they hold will have a missing (untraversed) |
|
51 // edge from the JS object to the C++ object and so it will be marked black |
|
52 // too. This decreases the number of objects that the cycle collector has to |
|
53 // deal with. |
|
54 // To improve debugging, if WantAllTraces() is true all JS objects are |
|
55 // traversed. |
|
56 |
|
57 #include "mozilla/CycleCollectedJSRuntime.h" |
|
58 #include <algorithm> |
|
59 #include "mozilla/ArrayUtils.h" |
|
60 #include "mozilla/MemoryReporting.h" |
|
61 #include "mozilla/dom/BindingUtils.h" |
|
62 #include "mozilla/dom/DOMJSClass.h" |
|
63 #include "mozilla/dom/ScriptSettings.h" |
|
64 #include "jsprf.h" |
|
65 #include "nsCycleCollectionNoteRootCallback.h" |
|
66 #include "nsCycleCollectionParticipant.h" |
|
67 #include "nsCycleCollector.h" |
|
68 #include "nsDOMJSUtils.h" |
|
69 #include "nsIException.h" |
|
70 #include "nsThreadUtils.h" |
|
71 #include "xpcpublic.h" |
|
72 |
|
73 using namespace mozilla; |
|
74 using namespace mozilla::dom; |
|
75 |
|
76 namespace mozilla { |
|
77 |
|
78 struct DeferredFinalizeFunctionHolder |
|
79 { |
|
80 DeferredFinalizeFunction run; |
|
81 void *data; |
|
82 }; |
|
83 |
|
84 class IncrementalFinalizeRunnable : public nsRunnable |
|
85 { |
|
86 typedef nsAutoTArray<DeferredFinalizeFunctionHolder, 16> DeferredFinalizeArray; |
|
87 typedef CycleCollectedJSRuntime::DeferredFinalizerTable DeferredFinalizerTable; |
|
88 |
|
89 CycleCollectedJSRuntime* mRuntime; |
|
90 nsTArray<nsISupports*> mSupports; |
|
91 DeferredFinalizeArray mDeferredFinalizeFunctions; |
|
92 uint32_t mFinalizeFunctionToRun; |
|
93 |
|
94 static const PRTime SliceMillis = 10; /* ms */ |
|
95 |
|
96 static PLDHashOperator |
|
97 DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction, |
|
98 void*& aData, |
|
99 void* aClosure); |
|
100 |
|
101 public: |
|
102 IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt, |
|
103 nsTArray<nsISupports*>& mSupports, |
|
104 DeferredFinalizerTable& aFinalizerTable); |
|
105 virtual ~IncrementalFinalizeRunnable(); |
|
106 |
|
107 void ReleaseNow(bool aLimited); |
|
108 |
|
109 NS_DECL_NSIRUNNABLE |
|
110 }; |
|
111 |
|
112 } // namespace mozilla |
|
113 |
|
114 inline bool |
|
115 AddToCCKind(JSGCTraceKind kind) |
|
116 { |
|
117 return kind == JSTRACE_OBJECT || kind == JSTRACE_SCRIPT; |
|
118 } |
|
119 |
|
120 static void |
|
121 TraceWeakMappingChild(JSTracer* trc, void** thingp, JSGCTraceKind kind); |
|
122 |
|
123 struct NoteWeakMapChildrenTracer : public JSTracer |
|
124 { |
|
125 NoteWeakMapChildrenTracer(JSRuntime *rt, nsCycleCollectionNoteRootCallback& cb) |
|
126 : JSTracer(rt, TraceWeakMappingChild), mCb(cb) |
|
127 { |
|
128 } |
|
129 nsCycleCollectionNoteRootCallback& mCb; |
|
130 bool mTracedAny; |
|
131 JSObject* mMap; |
|
132 void* mKey; |
|
133 void* mKeyDelegate; |
|
134 }; |
|
135 |
|
136 static void |
|
137 TraceWeakMappingChild(JSTracer* trc, void** thingp, JSGCTraceKind kind) |
|
138 { |
|
139 MOZ_ASSERT(trc->callback == TraceWeakMappingChild); |
|
140 void* thing = *thingp; |
|
141 NoteWeakMapChildrenTracer* tracer = |
|
142 static_cast<NoteWeakMapChildrenTracer*>(trc); |
|
143 |
|
144 if (kind == JSTRACE_STRING) { |
|
145 return; |
|
146 } |
|
147 |
|
148 if (!xpc_IsGrayGCThing(thing) && !tracer->mCb.WantAllTraces()) { |
|
149 return; |
|
150 } |
|
151 |
|
152 if (AddToCCKind(kind)) { |
|
153 tracer->mCb.NoteWeakMapping(tracer->mMap, tracer->mKey, tracer->mKeyDelegate, thing); |
|
154 tracer->mTracedAny = true; |
|
155 } else { |
|
156 JS_TraceChildren(trc, thing, kind); |
|
157 } |
|
158 } |
|
159 |
|
160 struct NoteWeakMapsTracer : public js::WeakMapTracer |
|
161 { |
|
162 NoteWeakMapsTracer(JSRuntime* rt, js::WeakMapTraceCallback cb, |
|
163 nsCycleCollectionNoteRootCallback& cccb) |
|
164 : js::WeakMapTracer(rt, cb), mCb(cccb), mChildTracer(rt, cccb) |
|
165 { |
|
166 } |
|
167 nsCycleCollectionNoteRootCallback& mCb; |
|
168 NoteWeakMapChildrenTracer mChildTracer; |
|
169 }; |
|
170 |
|
171 static void |
|
172 TraceWeakMapping(js::WeakMapTracer* trc, JSObject* m, |
|
173 void* k, JSGCTraceKind kkind, |
|
174 void* v, JSGCTraceKind vkind) |
|
175 { |
|
176 MOZ_ASSERT(trc->callback == TraceWeakMapping); |
|
177 NoteWeakMapsTracer* tracer = static_cast<NoteWeakMapsTracer* >(trc); |
|
178 |
|
179 // If nothing that could be held alive by this entry is marked gray, return. |
|
180 if ((!k || !xpc_IsGrayGCThing(k)) && MOZ_LIKELY(!tracer->mCb.WantAllTraces())) { |
|
181 if (!v || !xpc_IsGrayGCThing(v) || vkind == JSTRACE_STRING) { |
|
182 return; |
|
183 } |
|
184 } |
|
185 |
|
186 // The cycle collector can only properly reason about weak maps if it can |
|
187 // reason about the liveness of their keys, which in turn requires that |
|
188 // the key can be represented in the cycle collector graph. All existing |
|
189 // uses of weak maps use either objects or scripts as keys, which are okay. |
|
190 MOZ_ASSERT(AddToCCKind(kkind)); |
|
191 |
|
192 // As an emergency fallback for non-debug builds, if the key is not |
|
193 // representable in the cycle collector graph, we treat it as marked. This |
|
194 // can cause leaks, but is preferable to ignoring the binding, which could |
|
195 // cause the cycle collector to free live objects. |
|
196 if (!AddToCCKind(kkind)) { |
|
197 k = nullptr; |
|
198 } |
|
199 |
|
200 JSObject* kdelegate = nullptr; |
|
201 if (k && kkind == JSTRACE_OBJECT) { |
|
202 kdelegate = js::GetWeakmapKeyDelegate((JSObject*)k); |
|
203 } |
|
204 |
|
205 if (AddToCCKind(vkind)) { |
|
206 tracer->mCb.NoteWeakMapping(m, k, kdelegate, v); |
|
207 } else { |
|
208 tracer->mChildTracer.mTracedAny = false; |
|
209 tracer->mChildTracer.mMap = m; |
|
210 tracer->mChildTracer.mKey = k; |
|
211 tracer->mChildTracer.mKeyDelegate = kdelegate; |
|
212 |
|
213 if (v && vkind != JSTRACE_STRING) { |
|
214 JS_TraceChildren(&tracer->mChildTracer, v, vkind); |
|
215 } |
|
216 |
|
217 // The delegate could hold alive the key, so report something to the CC |
|
218 // if we haven't already. |
|
219 if (!tracer->mChildTracer.mTracedAny && k && xpc_IsGrayGCThing(k) && kdelegate) { |
|
220 tracer->mCb.NoteWeakMapping(m, k, kdelegate, nullptr); |
|
221 } |
|
222 } |
|
223 } |
|
224 |
|
225 // This is based on the logic in TraceWeakMapping. |
|
226 struct FixWeakMappingGrayBitsTracer : public js::WeakMapTracer |
|
227 { |
|
228 FixWeakMappingGrayBitsTracer(JSRuntime* rt) |
|
229 : js::WeakMapTracer(rt, FixWeakMappingGrayBits) |
|
230 {} |
|
231 |
|
232 void |
|
233 FixAll() |
|
234 { |
|
235 do { |
|
236 mAnyMarked = false; |
|
237 js::TraceWeakMaps(this); |
|
238 } while (mAnyMarked); |
|
239 } |
|
240 |
|
241 private: |
|
242 |
|
243 static void |
|
244 FixWeakMappingGrayBits(js::WeakMapTracer* trc, JSObject* m, |
|
245 void* k, JSGCTraceKind kkind, |
|
246 void* v, JSGCTraceKind vkind) |
|
247 { |
|
248 MOZ_ASSERT(!JS::IsIncrementalGCInProgress(trc->runtime), |
|
249 "Don't call FixWeakMappingGrayBits during a GC."); |
|
250 |
|
251 FixWeakMappingGrayBitsTracer* tracer = static_cast<FixWeakMappingGrayBitsTracer*>(trc); |
|
252 |
|
253 // If nothing that could be held alive by this entry is marked gray, return. |
|
254 bool delegateMightNeedMarking = k && xpc_IsGrayGCThing(k); |
|
255 bool valueMightNeedMarking = v && xpc_IsGrayGCThing(v) && vkind != JSTRACE_STRING; |
|
256 if (!delegateMightNeedMarking && !valueMightNeedMarking) { |
|
257 return; |
|
258 } |
|
259 |
|
260 if (!AddToCCKind(kkind)) { |
|
261 k = nullptr; |
|
262 } |
|
263 |
|
264 if (delegateMightNeedMarking && kkind == JSTRACE_OBJECT) { |
|
265 JSObject* kdelegate = js::GetWeakmapKeyDelegate((JSObject*)k); |
|
266 if (kdelegate && !xpc_IsGrayGCThing(kdelegate)) { |
|
267 if (JS::UnmarkGrayGCThingRecursively(k, JSTRACE_OBJECT)) { |
|
268 tracer->mAnyMarked = true; |
|
269 } |
|
270 } |
|
271 } |
|
272 |
|
273 if (v && xpc_IsGrayGCThing(v) && |
|
274 (!k || !xpc_IsGrayGCThing(k)) && |
|
275 (!m || !xpc_IsGrayGCThing(m)) && |
|
276 vkind != JSTRACE_SHAPE) { |
|
277 if (JS::UnmarkGrayGCThingRecursively(v, vkind)) { |
|
278 tracer->mAnyMarked = true; |
|
279 } |
|
280 } |
|
281 } |
|
282 |
|
283 bool mAnyMarked; |
|
284 }; |
|
285 |
|
286 struct Closure |
|
287 { |
|
288 Closure(nsCycleCollectionNoteRootCallback* aCb) |
|
289 : mCycleCollectionEnabled(true), mCb(aCb) |
|
290 { |
|
291 } |
|
292 |
|
293 bool mCycleCollectionEnabled; |
|
294 nsCycleCollectionNoteRootCallback* mCb; |
|
295 }; |
|
296 |
|
297 static void |
|
298 CheckParticipatesInCycleCollection(void* aThing, const char* aName, void* aClosure) |
|
299 { |
|
300 Closure* closure = static_cast<Closure*>(aClosure); |
|
301 |
|
302 if (closure->mCycleCollectionEnabled) { |
|
303 return; |
|
304 } |
|
305 |
|
306 if (AddToCCKind(js::GCThingTraceKind(aThing)) && |
|
307 xpc_IsGrayGCThing(aThing)) |
|
308 { |
|
309 closure->mCycleCollectionEnabled = true; |
|
310 } |
|
311 } |
|
312 |
|
313 static PLDHashOperator |
|
314 NoteJSHolder(void *holder, nsScriptObjectTracer *&tracer, void *arg) |
|
315 { |
|
316 Closure *closure = static_cast<Closure*>(arg); |
|
317 |
|
318 bool noteRoot; |
|
319 if (MOZ_UNLIKELY(closure->mCb->WantAllTraces())) { |
|
320 noteRoot = true; |
|
321 } else { |
|
322 closure->mCycleCollectionEnabled = false; |
|
323 tracer->Trace(holder, TraceCallbackFunc(CheckParticipatesInCycleCollection), closure); |
|
324 noteRoot = closure->mCycleCollectionEnabled; |
|
325 } |
|
326 |
|
327 if (noteRoot) { |
|
328 closure->mCb->NoteNativeRoot(holder, tracer); |
|
329 } |
|
330 |
|
331 return PL_DHASH_NEXT; |
|
332 } |
|
333 |
|
334 NS_IMETHODIMP |
|
335 JSGCThingParticipant::Traverse(void* p, nsCycleCollectionTraversalCallback& cb) |
|
336 { |
|
337 CycleCollectedJSRuntime* runtime = reinterpret_cast<CycleCollectedJSRuntime*> |
|
338 (reinterpret_cast<char*>(this) - |
|
339 offsetof(CycleCollectedJSRuntime, mGCThingCycleCollectorGlobal)); |
|
340 |
|
341 runtime->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_FULL, |
|
342 p, js::GCThingTraceKind(p), cb); |
|
343 return NS_OK; |
|
344 } |
|
345 |
|
346 // NB: This is only used to initialize the participant in |
|
347 // CycleCollectedJSRuntime. It should never be used directly. |
|
348 static JSGCThingParticipant sGCThingCycleCollectorGlobal; |
|
349 |
|
350 NS_IMETHODIMP |
|
351 JSZoneParticipant::Traverse(void* p, nsCycleCollectionTraversalCallback& cb) |
|
352 { |
|
353 CycleCollectedJSRuntime* runtime = reinterpret_cast<CycleCollectedJSRuntime*> |
|
354 (reinterpret_cast<char*>(this) - |
|
355 offsetof(CycleCollectedJSRuntime, mJSZoneCycleCollectorGlobal)); |
|
356 |
|
357 MOZ_ASSERT(!cb.WantAllTraces()); |
|
358 JS::Zone* zone = static_cast<JS::Zone*>(p); |
|
359 |
|
360 runtime->TraverseZone(zone, cb); |
|
361 return NS_OK; |
|
362 } |
|
363 |
|
364 static void |
|
365 NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind); |
|
366 |
|
367 struct TraversalTracer : public JSTracer |
|
368 { |
|
369 TraversalTracer(JSRuntime *rt, nsCycleCollectionTraversalCallback& aCb) |
|
370 : JSTracer(rt, NoteJSChildTracerShim, DoNotTraceWeakMaps), mCb(aCb) |
|
371 { |
|
372 } |
|
373 nsCycleCollectionTraversalCallback& mCb; |
|
374 }; |
|
375 |
|
376 static void |
|
377 NoteJSChild(JSTracer* aTrc, void* aThing, JSGCTraceKind aTraceKind) |
|
378 { |
|
379 TraversalTracer* tracer = static_cast<TraversalTracer*>(aTrc); |
|
380 |
|
381 // Don't traverse non-gray objects, unless we want all traces. |
|
382 if (!xpc_IsGrayGCThing(aThing) && !tracer->mCb.WantAllTraces()) { |
|
383 return; |
|
384 } |
|
385 |
|
386 /* |
|
387 * This function needs to be careful to avoid stack overflow. Normally, when |
|
388 * AddToCCKind is true, the recursion terminates immediately as we just add |
|
389 * |thing| to the CC graph. So overflow is only possible when there are long |
|
390 * chains of non-AddToCCKind GC things. Currently, this only can happen via |
|
391 * shape parent pointers. The special JSTRACE_SHAPE case below handles |
|
392 * parent pointers iteratively, rather than recursively, to avoid overflow. |
|
393 */ |
|
394 if (AddToCCKind(aTraceKind)) { |
|
395 if (MOZ_UNLIKELY(tracer->mCb.WantDebugInfo())) { |
|
396 // based on DumpNotify in jsapi.cpp |
|
397 if (tracer->debugPrinter()) { |
|
398 char buffer[200]; |
|
399 tracer->debugPrinter()(aTrc, buffer, sizeof(buffer)); |
|
400 tracer->mCb.NoteNextEdgeName(buffer); |
|
401 } else if (tracer->debugPrintIndex() != (size_t)-1) { |
|
402 char buffer[200]; |
|
403 JS_snprintf(buffer, sizeof(buffer), "%s[%lu]", |
|
404 static_cast<const char *>(tracer->debugPrintArg()), |
|
405 tracer->debugPrintIndex()); |
|
406 tracer->mCb.NoteNextEdgeName(buffer); |
|
407 } else { |
|
408 tracer->mCb.NoteNextEdgeName(static_cast<const char*>(tracer->debugPrintArg())); |
|
409 } |
|
410 } |
|
411 tracer->mCb.NoteJSChild(aThing); |
|
412 } else if (aTraceKind == JSTRACE_SHAPE) { |
|
413 JS_TraceShapeCycleCollectorChildren(aTrc, aThing); |
|
414 } else if (aTraceKind != JSTRACE_STRING) { |
|
415 JS_TraceChildren(aTrc, aThing, aTraceKind); |
|
416 } |
|
417 } |
|
418 |
|
419 static void |
|
420 NoteJSChildTracerShim(JSTracer* aTrc, void** aThingp, JSGCTraceKind aTraceKind) |
|
421 { |
|
422 NoteJSChild(aTrc, *aThingp, aTraceKind); |
|
423 } |
|
424 |
|
425 static void |
|
426 NoteJSChildGrayWrapperShim(void* aData, void* aThing) |
|
427 { |
|
428 TraversalTracer* trc = static_cast<TraversalTracer*>(aData); |
|
429 NoteJSChild(trc, aThing, js::GCThingTraceKind(aThing)); |
|
430 } |
|
431 |
|
432 /* |
|
433 * The cycle collection participant for a Zone is intended to produce the same |
|
434 * results as if all of the gray GCthings in a zone were merged into a single node, |
|
435 * except for self-edges. This avoids the overhead of representing all of the GCthings in |
|
436 * the zone in the cycle collector graph, which should be much faster if many of |
|
437 * the GCthings in the zone are gray. |
|
438 * |
|
439 * Zone merging should not always be used, because it is a conservative |
|
440 * approximation of the true cycle collector graph that can incorrectly identify some |
|
441 * garbage objects as being live. For instance, consider two cycles that pass through a |
|
442 * zone, where one is garbage and the other is live. If we merge the entire |
|
443 * zone, the cycle collector will think that both are alive. |
|
444 * |
|
445 * We don't have to worry about losing track of a garbage cycle, because any such garbage |
|
446 * cycle incorrectly identified as live must contain at least one C++ to JS edge, and |
|
447 * XPConnect will always add the C++ object to the CC graph. (This is in contrast to pure |
|
448 * C++ garbage cycles, which must always be properly identified, because we clear the |
|
449 * purple buffer during every CC, which may contain the last reference to a garbage |
|
450 * cycle.) |
|
451 */ |
|
452 |
|
453 // NB: This is only used to initialize the participant in |
|
454 // CycleCollectedJSRuntime. It should never be used directly. |
|
455 static const JSZoneParticipant sJSZoneCycleCollectorGlobal; |
|
456 |
|
457 CycleCollectedJSRuntime::CycleCollectedJSRuntime(JSRuntime* aParentRuntime, |
|
458 uint32_t aMaxbytes, |
|
459 JSUseHelperThreads aUseHelperThreads) |
|
460 : mGCThingCycleCollectorGlobal(sGCThingCycleCollectorGlobal), |
|
461 mJSZoneCycleCollectorGlobal(sJSZoneCycleCollectorGlobal), |
|
462 mJSRuntime(nullptr), |
|
463 mJSHolders(512) |
|
464 { |
|
465 mozilla::dom::InitScriptSettings(); |
|
466 |
|
467 mJSRuntime = JS_NewRuntime(aMaxbytes, aUseHelperThreads, aParentRuntime); |
|
468 if (!mJSRuntime) { |
|
469 MOZ_CRASH(); |
|
470 } |
|
471 |
|
472 if (!JS_AddExtraGCRootsTracer(mJSRuntime, TraceBlackJS, this)) { |
|
473 MOZ_CRASH(); |
|
474 } |
|
475 JS_SetGrayGCRootsTracer(mJSRuntime, TraceGrayJS, this); |
|
476 JS_SetGCCallback(mJSRuntime, GCCallback, this); |
|
477 JS_SetContextCallback(mJSRuntime, ContextCallback, this); |
|
478 JS_SetDestroyZoneCallback(mJSRuntime, XPCStringConvert::FreeZoneCache); |
|
479 JS_SetSweepZoneCallback(mJSRuntime, XPCStringConvert::ClearZoneCache); |
|
480 |
|
481 nsCycleCollector_registerJSRuntime(this); |
|
482 } |
|
483 |
|
484 CycleCollectedJSRuntime::~CycleCollectedJSRuntime() |
|
485 { |
|
486 MOZ_ASSERT(mJSRuntime); |
|
487 MOZ_ASSERT(!mDeferredFinalizerTable.Count()); |
|
488 MOZ_ASSERT(!mDeferredSupports.Length()); |
|
489 |
|
490 // Clear mPendingException first, since it might be cycle collected. |
|
491 mPendingException = nullptr; |
|
492 |
|
493 JS_DestroyRuntime(mJSRuntime); |
|
494 mJSRuntime = nullptr; |
|
495 nsCycleCollector_forgetJSRuntime(); |
|
496 |
|
497 mozilla::dom::DestroyScriptSettings(); |
|
498 } |
|
499 |
|
500 size_t |
|
501 CycleCollectedJSRuntime::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const |
|
502 { |
|
503 size_t n = 0; |
|
504 |
|
505 // nullptr for the second arg; we're not measuring anything hanging off the |
|
506 // entries in mJSHolders. |
|
507 n += mJSHolders.SizeOfExcludingThis(nullptr, aMallocSizeOf); |
|
508 |
|
509 return n; |
|
510 } |
|
511 |
|
512 static PLDHashOperator |
|
513 UnmarkJSHolder(void* holder, nsScriptObjectTracer*& tracer, void* arg) |
|
514 { |
|
515 tracer->CanSkip(holder, true); |
|
516 return PL_DHASH_NEXT; |
|
517 } |
|
518 |
|
519 void |
|
520 CycleCollectedJSRuntime::UnmarkSkippableJSHolders() |
|
521 { |
|
522 mJSHolders.Enumerate(UnmarkJSHolder, nullptr); |
|
523 } |
|
524 |
|
525 void |
|
526 CycleCollectedJSRuntime::DescribeGCThing(bool aIsMarked, void* aThing, |
|
527 JSGCTraceKind aTraceKind, |
|
528 nsCycleCollectionTraversalCallback& aCb) const |
|
529 { |
|
530 if (!aCb.WantDebugInfo()) { |
|
531 aCb.DescribeGCedNode(aIsMarked, "JS Object"); |
|
532 return; |
|
533 } |
|
534 |
|
535 char name[72]; |
|
536 uint64_t compartmentAddress = 0; |
|
537 if (aTraceKind == JSTRACE_OBJECT) { |
|
538 JSObject* obj = static_cast<JSObject*>(aThing); |
|
539 compartmentAddress = (uint64_t)js::GetObjectCompartment(obj); |
|
540 const js::Class* clasp = js::GetObjectClass(obj); |
|
541 |
|
542 // Give the subclass a chance to do something |
|
543 if (DescribeCustomObjects(obj, clasp, name)) { |
|
544 // Nothing else to do! |
|
545 } else if (js::IsFunctionObject(obj)) { |
|
546 JSFunction* fun = JS_GetObjectFunction(obj); |
|
547 JSString* str = JS_GetFunctionDisplayId(fun); |
|
548 if (str) { |
|
549 NS_ConvertUTF16toUTF8 fname(JS_GetInternedStringChars(str)); |
|
550 JS_snprintf(name, sizeof(name), |
|
551 "JS Object (Function - %s)", fname.get()); |
|
552 } else { |
|
553 JS_snprintf(name, sizeof(name), "JS Object (Function)"); |
|
554 } |
|
555 } else { |
|
556 JS_snprintf(name, sizeof(name), "JS Object (%s)", |
|
557 clasp->name); |
|
558 } |
|
559 } else { |
|
560 static const char trace_types[][11] = { |
|
561 "Object", |
|
562 "String", |
|
563 "Script", |
|
564 "LazyScript", |
|
565 "IonCode", |
|
566 "Shape", |
|
567 "BaseShape", |
|
568 "TypeObject", |
|
569 }; |
|
570 static_assert(MOZ_ARRAY_LENGTH(trace_types) == JSTRACE_LAST + 1, |
|
571 "JSTRACE_LAST enum must match trace_types count."); |
|
572 JS_snprintf(name, sizeof(name), "JS %s", trace_types[aTraceKind]); |
|
573 } |
|
574 |
|
575 // Disable printing global for objects while we figure out ObjShrink fallout. |
|
576 aCb.DescribeGCedNode(aIsMarked, name, compartmentAddress); |
|
577 } |
|
578 |
|
579 void |
|
580 CycleCollectedJSRuntime::NoteGCThingJSChildren(void* aThing, |
|
581 JSGCTraceKind aTraceKind, |
|
582 nsCycleCollectionTraversalCallback& aCb) const |
|
583 { |
|
584 MOZ_ASSERT(mJSRuntime); |
|
585 TraversalTracer trc(mJSRuntime, aCb); |
|
586 JS_TraceChildren(&trc, aThing, aTraceKind); |
|
587 } |
|
588 |
|
589 void |
|
590 CycleCollectedJSRuntime::NoteGCThingXPCOMChildren(const js::Class* aClasp, JSObject* aObj, |
|
591 nsCycleCollectionTraversalCallback& aCb) const |
|
592 { |
|
593 MOZ_ASSERT(aClasp); |
|
594 MOZ_ASSERT(aClasp == js::GetObjectClass(aObj)); |
|
595 |
|
596 if (NoteCustomGCThingXPCOMChildren(aClasp, aObj, aCb)) { |
|
597 // Nothing else to do! |
|
598 return; |
|
599 } |
|
600 // XXX This test does seem fragile, we should probably whitelist classes |
|
601 // that do hold a strong reference, but that might not be possible. |
|
602 else if (aClasp->flags & JSCLASS_HAS_PRIVATE && |
|
603 aClasp->flags & JSCLASS_PRIVATE_IS_NSISUPPORTS) { |
|
604 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "js::GetObjectPrivate(obj)"); |
|
605 aCb.NoteXPCOMChild(static_cast<nsISupports*>(js::GetObjectPrivate(aObj))); |
|
606 } else { |
|
607 const DOMClass* domClass = GetDOMClass(aObj); |
|
608 if (domClass) { |
|
609 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(aCb, "UnwrapDOMObject(obj)"); |
|
610 if (domClass->mDOMObjectIsISupports) { |
|
611 aCb.NoteXPCOMChild(UnwrapDOMObject<nsISupports>(aObj)); |
|
612 } else if (domClass->mParticipant) { |
|
613 aCb.NoteNativeChild(UnwrapDOMObject<void>(aObj), |
|
614 domClass->mParticipant); |
|
615 } |
|
616 } |
|
617 } |
|
618 } |
|
619 |
|
620 void |
|
621 CycleCollectedJSRuntime::TraverseGCThing(TraverseSelect aTs, void* aThing, |
|
622 JSGCTraceKind aTraceKind, |
|
623 nsCycleCollectionTraversalCallback& aCb) |
|
624 { |
|
625 MOZ_ASSERT(aTraceKind == js::GCThingTraceKind(aThing)); |
|
626 bool isMarkedGray = xpc_IsGrayGCThing(aThing); |
|
627 |
|
628 if (aTs == TRAVERSE_FULL) { |
|
629 DescribeGCThing(!isMarkedGray, aThing, aTraceKind, aCb); |
|
630 } |
|
631 |
|
632 // If this object is alive, then all of its children are alive. For JS objects, |
|
633 // the black-gray invariant ensures the children are also marked black. For C++ |
|
634 // objects, the ref count from this object will keep them alive. Thus we don't |
|
635 // need to trace our children, unless we are debugging using WantAllTraces. |
|
636 if (!isMarkedGray && !aCb.WantAllTraces()) { |
|
637 return; |
|
638 } |
|
639 |
|
640 if (aTs == TRAVERSE_FULL) { |
|
641 NoteGCThingJSChildren(aThing, aTraceKind, aCb); |
|
642 } |
|
643 |
|
644 if (aTraceKind == JSTRACE_OBJECT) { |
|
645 JSObject* obj = static_cast<JSObject*>(aThing); |
|
646 NoteGCThingXPCOMChildren(js::GetObjectClass(obj), obj, aCb); |
|
647 } |
|
648 } |
|
649 |
|
650 struct TraverseObjectShimClosure { |
|
651 nsCycleCollectionTraversalCallback& cb; |
|
652 CycleCollectedJSRuntime* self; |
|
653 }; |
|
654 |
|
655 void |
|
656 CycleCollectedJSRuntime::TraverseZone(JS::Zone* aZone, |
|
657 nsCycleCollectionTraversalCallback& aCb) |
|
658 { |
|
659 /* |
|
660 * We treat the zone as being gray. We handle non-gray GCthings in the |
|
661 * zone by not reporting their children to the CC. The black-gray invariant |
|
662 * ensures that any JS children will also be non-gray, and thus don't need to be |
|
663 * added to the graph. For C++ children, not representing the edge from the |
|
664 * non-gray JS GCthings to the C++ object will keep the child alive. |
|
665 * |
|
666 * We don't allow zone merging in a WantAllTraces CC, because then these |
|
667 * assumptions don't hold. |
|
668 */ |
|
669 aCb.DescribeGCedNode(false, "JS Zone"); |
|
670 |
|
671 /* |
|
672 * Every JS child of everything in the zone is either in the zone |
|
673 * or is a cross-compartment wrapper. In the former case, we don't need to |
|
674 * represent these edges in the CC graph because JS objects are not ref counted. |
|
675 * In the latter case, the JS engine keeps a map of these wrappers, which we |
|
676 * iterate over. Edges between compartments in the same zone will add |
|
677 * unnecessary loop edges to the graph (bug 842137). |
|
678 */ |
|
679 TraversalTracer trc(mJSRuntime, aCb); |
|
680 js::VisitGrayWrapperTargets(aZone, NoteJSChildGrayWrapperShim, &trc); |
|
681 |
|
682 /* |
|
683 * To find C++ children of things in the zone, we scan every JS Object in |
|
684 * the zone. Only JS Objects can have C++ children. |
|
685 */ |
|
686 TraverseObjectShimClosure closure = { aCb, this }; |
|
687 js::IterateGrayObjects(aZone, TraverseObjectShim, &closure); |
|
688 } |
|
689 |
|
690 /* static */ void |
|
691 CycleCollectedJSRuntime::TraverseObjectShim(void* aData, void* aThing) |
|
692 { |
|
693 TraverseObjectShimClosure* closure = |
|
694 static_cast<TraverseObjectShimClosure*>(aData); |
|
695 |
|
696 MOZ_ASSERT(js::GCThingTraceKind(aThing) == JSTRACE_OBJECT); |
|
697 closure->self->TraverseGCThing(CycleCollectedJSRuntime::TRAVERSE_CPP, aThing, |
|
698 JSTRACE_OBJECT, closure->cb); |
|
699 } |
|
700 |
|
701 void |
|
702 CycleCollectedJSRuntime::TraverseNativeRoots(nsCycleCollectionNoteRootCallback& aCb) |
|
703 { |
|
704 // NB: This is here just to preserve the existing XPConnect order. I doubt it |
|
705 // would hurt to do this after the JS holders. |
|
706 TraverseAdditionalNativeRoots(aCb); |
|
707 |
|
708 Closure closure(&aCb); |
|
709 mJSHolders.Enumerate(NoteJSHolder, &closure); |
|
710 } |
|
711 |
|
712 /* static */ void |
|
713 CycleCollectedJSRuntime::TraceBlackJS(JSTracer* aTracer, void* aData) |
|
714 { |
|
715 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData); |
|
716 |
|
717 self->TraceNativeBlackRoots(aTracer); |
|
718 } |
|
719 |
|
720 /* static */ void |
|
721 CycleCollectedJSRuntime::TraceGrayJS(JSTracer* aTracer, void* aData) |
|
722 { |
|
723 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData); |
|
724 |
|
725 // Mark these roots as gray so the CC can walk them later. |
|
726 self->TraceNativeGrayRoots(aTracer); |
|
727 } |
|
728 |
|
729 /* static */ void |
|
730 CycleCollectedJSRuntime::GCCallback(JSRuntime* aRuntime, |
|
731 JSGCStatus aStatus, |
|
732 void* aData) |
|
733 { |
|
734 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData); |
|
735 |
|
736 MOZ_ASSERT(aRuntime == self->Runtime()); |
|
737 |
|
738 self->OnGC(aStatus); |
|
739 } |
|
740 |
|
741 /* static */ bool |
|
742 CycleCollectedJSRuntime::ContextCallback(JSContext* aContext, |
|
743 unsigned aOperation, |
|
744 void* aData) |
|
745 { |
|
746 CycleCollectedJSRuntime* self = static_cast<CycleCollectedJSRuntime*>(aData); |
|
747 |
|
748 MOZ_ASSERT(JS_GetRuntime(aContext) == self->Runtime()); |
|
749 |
|
750 return self->CustomContextCallback(aContext, aOperation); |
|
751 } |
|
752 |
|
753 struct JsGcTracer : public TraceCallbacks |
|
754 { |
|
755 virtual void Trace(JS::Heap<JS::Value> *p, const char *name, void *closure) const MOZ_OVERRIDE { |
|
756 JS_CallHeapValueTracer(static_cast<JSTracer*>(closure), p, name); |
|
757 } |
|
758 virtual void Trace(JS::Heap<jsid> *p, const char *name, void *closure) const MOZ_OVERRIDE { |
|
759 JS_CallHeapIdTracer(static_cast<JSTracer*>(closure), p, name); |
|
760 } |
|
761 virtual void Trace(JS::Heap<JSObject *> *p, const char *name, void *closure) const MOZ_OVERRIDE { |
|
762 JS_CallHeapObjectTracer(static_cast<JSTracer*>(closure), p, name); |
|
763 } |
|
764 virtual void Trace(JS::TenuredHeap<JSObject *> *p, const char *name, void *closure) const MOZ_OVERRIDE { |
|
765 JS_CallTenuredObjectTracer(static_cast<JSTracer*>(closure), p, name); |
|
766 } |
|
767 virtual void Trace(JS::Heap<JSString *> *p, const char *name, void *closure) const MOZ_OVERRIDE { |
|
768 JS_CallHeapStringTracer(static_cast<JSTracer*>(closure), p, name); |
|
769 } |
|
770 virtual void Trace(JS::Heap<JSScript *> *p, const char *name, void *closure) const MOZ_OVERRIDE { |
|
771 JS_CallHeapScriptTracer(static_cast<JSTracer*>(closure), p, name); |
|
772 } |
|
773 virtual void Trace(JS::Heap<JSFunction *> *p, const char *name, void *closure) const MOZ_OVERRIDE { |
|
774 JS_CallHeapFunctionTracer(static_cast<JSTracer*>(closure), p, name); |
|
775 } |
|
776 }; |
|
777 |
|
778 static PLDHashOperator |
|
779 TraceJSHolder(void* aHolder, nsScriptObjectTracer*& aTracer, void* aArg) |
|
780 { |
|
781 aTracer->Trace(aHolder, JsGcTracer(), aArg); |
|
782 |
|
783 return PL_DHASH_NEXT; |
|
784 } |
|
785 |
|
786 void |
|
787 CycleCollectedJSRuntime::TraceNativeGrayRoots(JSTracer* aTracer) |
|
788 { |
|
789 // NB: This is here just to preserve the existing XPConnect order. I doubt it |
|
790 // would hurt to do this after the JS holders. |
|
791 TraceAdditionalNativeGrayRoots(aTracer); |
|
792 |
|
793 mJSHolders.Enumerate(TraceJSHolder, aTracer); |
|
794 } |
|
795 |
|
796 void |
|
797 CycleCollectedJSRuntime::AddJSHolder(void* aHolder, nsScriptObjectTracer* aTracer) |
|
798 { |
|
799 mJSHolders.Put(aHolder, aTracer); |
|
800 } |
|
801 |
|
802 struct ClearJSHolder : TraceCallbacks |
|
803 { |
|
804 virtual void Trace(JS::Heap<JS::Value>* aPtr, const char*, void*) const MOZ_OVERRIDE |
|
805 { |
|
806 *aPtr = JSVAL_VOID; |
|
807 } |
|
808 |
|
809 virtual void Trace(JS::Heap<jsid>* aPtr, const char*, void*) const MOZ_OVERRIDE |
|
810 { |
|
811 *aPtr = JSID_VOID; |
|
812 } |
|
813 |
|
814 virtual void Trace(JS::Heap<JSObject*>* aPtr, const char*, void*) const MOZ_OVERRIDE |
|
815 { |
|
816 *aPtr = nullptr; |
|
817 } |
|
818 |
|
819 virtual void Trace(JS::TenuredHeap<JSObject*>* aPtr, const char*, void*) const MOZ_OVERRIDE |
|
820 { |
|
821 *aPtr = nullptr; |
|
822 } |
|
823 |
|
824 virtual void Trace(JS::Heap<JSString*>* aPtr, const char*, void*) const MOZ_OVERRIDE |
|
825 { |
|
826 *aPtr = nullptr; |
|
827 } |
|
828 |
|
829 virtual void Trace(JS::Heap<JSScript*>* aPtr, const char*, void*) const MOZ_OVERRIDE |
|
830 { |
|
831 *aPtr = nullptr; |
|
832 } |
|
833 |
|
834 virtual void Trace(JS::Heap<JSFunction*>* aPtr, const char*, void*) const MOZ_OVERRIDE |
|
835 { |
|
836 *aPtr = nullptr; |
|
837 } |
|
838 }; |
|
839 |
|
840 void |
|
841 CycleCollectedJSRuntime::RemoveJSHolder(void* aHolder) |
|
842 { |
|
843 nsScriptObjectTracer* tracer = mJSHolders.Get(aHolder); |
|
844 if (!tracer) { |
|
845 return; |
|
846 } |
|
847 tracer->Trace(aHolder, ClearJSHolder(), nullptr); |
|
848 mJSHolders.Remove(aHolder); |
|
849 } |
|
850 |
|
851 #ifdef DEBUG |
|
852 bool |
|
853 CycleCollectedJSRuntime::IsJSHolder(void* aHolder) |
|
854 { |
|
855 return mJSHolders.Get(aHolder, nullptr); |
|
856 } |
|
857 |
|
858 static void |
|
859 AssertNoGcThing(void* aGCThing, const char* aName, void* aClosure) |
|
860 { |
|
861 MOZ_ASSERT(!aGCThing); |
|
862 } |
|
863 |
|
864 void |
|
865 CycleCollectedJSRuntime::AssertNoObjectsToTrace(void* aPossibleJSHolder) |
|
866 { |
|
867 nsScriptObjectTracer* tracer = mJSHolders.Get(aPossibleJSHolder); |
|
868 if (tracer) { |
|
869 tracer->Trace(aPossibleJSHolder, TraceCallbackFunc(AssertNoGcThing), nullptr); |
|
870 } |
|
871 } |
|
872 #endif |
|
873 |
|
874 already_AddRefed<nsIException> |
|
875 CycleCollectedJSRuntime::GetPendingException() const |
|
876 { |
|
877 nsCOMPtr<nsIException> out = mPendingException; |
|
878 return out.forget(); |
|
879 } |
|
880 |
|
881 void |
|
882 CycleCollectedJSRuntime::SetPendingException(nsIException* aException) |
|
883 { |
|
884 mPendingException = aException; |
|
885 } |
|
886 |
|
887 nsCycleCollectionParticipant* |
|
888 CycleCollectedJSRuntime::GCThingParticipant() |
|
889 { |
|
890 return &mGCThingCycleCollectorGlobal; |
|
891 } |
|
892 |
|
893 nsCycleCollectionParticipant* |
|
894 CycleCollectedJSRuntime::ZoneParticipant() |
|
895 { |
|
896 return &mJSZoneCycleCollectorGlobal; |
|
897 } |
|
898 |
|
899 nsresult |
|
900 CycleCollectedJSRuntime::TraverseRoots(nsCycleCollectionNoteRootCallback &aCb) |
|
901 { |
|
902 TraverseNativeRoots(aCb); |
|
903 |
|
904 NoteWeakMapsTracer trc(mJSRuntime, TraceWeakMapping, aCb); |
|
905 js::TraceWeakMaps(&trc); |
|
906 |
|
907 return NS_OK; |
|
908 } |
|
909 |
|
910 /* |
|
911 * Return true if there exists a JSContext with a default global whose current |
|
912 * inner is gray. The intent is to look for JS Object windows. We don't merge |
|
913 * system compartments, so we don't use them to trigger merging CCs. |
|
914 */ |
|
915 bool |
|
916 CycleCollectedJSRuntime::UsefulToMergeZones() const |
|
917 { |
|
918 if (!NS_IsMainThread()) { |
|
919 return false; |
|
920 } |
|
921 |
|
922 JSContext* iter = nullptr; |
|
923 JSContext* cx; |
|
924 JSAutoRequest ar(nsContentUtils::GetSafeJSContext()); |
|
925 while ((cx = JS_ContextIterator(mJSRuntime, &iter))) { |
|
926 // Skip anything without an nsIScriptContext. |
|
927 nsIScriptContext* scx = GetScriptContextFromJSContext(cx); |
|
928 JS::RootedObject obj(cx, scx ? scx->GetWindowProxyPreserveColor() : nullptr); |
|
929 if (!obj) { |
|
930 continue; |
|
931 } |
|
932 MOZ_ASSERT(js::IsOuterObject(obj)); |
|
933 // Grab the inner from the outer. |
|
934 obj = JS_ObjectToInnerObject(cx, obj); |
|
935 MOZ_ASSERT(!js::GetObjectParent(obj)); |
|
936 if (JS::GCThingIsMarkedGray(obj) && |
|
937 !js::IsSystemCompartment(js::GetObjectCompartment(obj))) { |
|
938 return true; |
|
939 } |
|
940 } |
|
941 return false; |
|
942 } |
|
943 |
|
944 void |
|
945 CycleCollectedJSRuntime::FixWeakMappingGrayBits() const |
|
946 { |
|
947 FixWeakMappingGrayBitsTracer fixer(mJSRuntime); |
|
948 fixer.FixAll(); |
|
949 } |
|
950 |
|
951 bool |
|
952 CycleCollectedJSRuntime::NeedCollect() const |
|
953 { |
|
954 return !js::AreGCGrayBitsValid(mJSRuntime); |
|
955 } |
|
956 |
|
957 void |
|
958 CycleCollectedJSRuntime::Collect(uint32_t aReason) const |
|
959 { |
|
960 MOZ_ASSERT(aReason < JS::gcreason::NUM_REASONS); |
|
961 JS::gcreason::Reason gcreason = static_cast<JS::gcreason::Reason>(aReason); |
|
962 |
|
963 JS::PrepareForFullGC(mJSRuntime); |
|
964 JS::GCForReason(mJSRuntime, gcreason); |
|
965 } |
|
966 |
|
967 void |
|
968 CycleCollectedJSRuntime::DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc, |
|
969 DeferredFinalizeFunction aFunc, |
|
970 void* aThing) |
|
971 { |
|
972 void* thingArray = nullptr; |
|
973 bool hadThingArray = mDeferredFinalizerTable.Get(aFunc, &thingArray); |
|
974 |
|
975 thingArray = aAppendFunc(thingArray, aThing); |
|
976 if (!hadThingArray) { |
|
977 mDeferredFinalizerTable.Put(aFunc, thingArray); |
|
978 } |
|
979 } |
|
980 |
|
981 void |
|
982 CycleCollectedJSRuntime::DeferredFinalize(nsISupports* aSupports) |
|
983 { |
|
984 mDeferredSupports.AppendElement(aSupports); |
|
985 } |
|
986 |
|
987 void |
|
988 CycleCollectedJSRuntime::DumpJSHeap(FILE* file) |
|
989 { |
|
990 js::DumpHeapComplete(Runtime(), file, js::CollectNurseryBeforeDump); |
|
991 } |
|
992 |
|
993 |
|
994 bool |
|
995 ReleaseSliceNow(uint32_t aSlice, void* aData) |
|
996 { |
|
997 MOZ_ASSERT(aSlice > 0, "nonsensical/useless call with slice == 0"); |
|
998 nsTArray<nsISupports*>* items = static_cast<nsTArray<nsISupports*>*>(aData); |
|
999 |
|
1000 uint32_t length = items->Length(); |
|
1001 aSlice = std::min(aSlice, length); |
|
1002 for (uint32_t i = length; i > length - aSlice; --i) { |
|
1003 // Remove (and NS_RELEASE) the last entry in "items": |
|
1004 uint32_t lastItemIdx = i - 1; |
|
1005 |
|
1006 nsISupports* wrapper = items->ElementAt(lastItemIdx); |
|
1007 items->RemoveElementAt(lastItemIdx); |
|
1008 NS_IF_RELEASE(wrapper); |
|
1009 } |
|
1010 |
|
1011 return items->IsEmpty(); |
|
1012 } |
|
1013 |
|
1014 /* static */ PLDHashOperator |
|
1015 IncrementalFinalizeRunnable::DeferredFinalizerEnumerator(DeferredFinalizeFunction& aFunction, |
|
1016 void*& aData, |
|
1017 void* aClosure) |
|
1018 { |
|
1019 DeferredFinalizeArray* array = static_cast<DeferredFinalizeArray*>(aClosure); |
|
1020 |
|
1021 DeferredFinalizeFunctionHolder* function = array->AppendElement(); |
|
1022 function->run = aFunction; |
|
1023 function->data = aData; |
|
1024 |
|
1025 return PL_DHASH_REMOVE; |
|
1026 } |
|
1027 |
|
1028 IncrementalFinalizeRunnable::IncrementalFinalizeRunnable(CycleCollectedJSRuntime* aRt, |
|
1029 nsTArray<nsISupports*>& aSupports, |
|
1030 DeferredFinalizerTable& aFinalizers) |
|
1031 : mRuntime(aRt), |
|
1032 mFinalizeFunctionToRun(0) |
|
1033 { |
|
1034 this->mSupports.SwapElements(aSupports); |
|
1035 DeferredFinalizeFunctionHolder* function = mDeferredFinalizeFunctions.AppendElement(); |
|
1036 function->run = ReleaseSliceNow; |
|
1037 function->data = &this->mSupports; |
|
1038 |
|
1039 // Enumerate the hashtable into our array. |
|
1040 aFinalizers.Enumerate(DeferredFinalizerEnumerator, &mDeferredFinalizeFunctions); |
|
1041 } |
|
1042 |
|
1043 IncrementalFinalizeRunnable::~IncrementalFinalizeRunnable() |
|
1044 { |
|
1045 MOZ_ASSERT(this != mRuntime->mFinalizeRunnable); |
|
1046 } |
|
1047 |
|
1048 void |
|
1049 IncrementalFinalizeRunnable::ReleaseNow(bool aLimited) |
|
1050 { |
|
1051 //MOZ_ASSERT(NS_IsMainThread()); |
|
1052 MOZ_ASSERT(mDeferredFinalizeFunctions.Length() != 0, |
|
1053 "We should have at least ReleaseSliceNow to run"); |
|
1054 MOZ_ASSERT(mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length(), |
|
1055 "No more finalizers to run?"); |
|
1056 |
|
1057 TimeDuration sliceTime = TimeDuration::FromMilliseconds(SliceMillis); |
|
1058 TimeStamp started = TimeStamp::Now(); |
|
1059 bool timeout = false; |
|
1060 do { |
|
1061 const DeferredFinalizeFunctionHolder &function = |
|
1062 mDeferredFinalizeFunctions[mFinalizeFunctionToRun]; |
|
1063 if (aLimited) { |
|
1064 bool done = false; |
|
1065 while (!timeout && !done) { |
|
1066 /* |
|
1067 * We don't want to read the clock too often, so we try to |
|
1068 * release slices of 100 items. |
|
1069 */ |
|
1070 done = function.run(100, function.data); |
|
1071 timeout = TimeStamp::Now() - started >= sliceTime; |
|
1072 } |
|
1073 if (done) { |
|
1074 ++mFinalizeFunctionToRun; |
|
1075 } |
|
1076 if (timeout) { |
|
1077 break; |
|
1078 } |
|
1079 } else { |
|
1080 function.run(UINT32_MAX, function.data); |
|
1081 ++mFinalizeFunctionToRun; |
|
1082 } |
|
1083 } while (mFinalizeFunctionToRun < mDeferredFinalizeFunctions.Length()); |
|
1084 |
|
1085 if (mFinalizeFunctionToRun == mDeferredFinalizeFunctions.Length()) { |
|
1086 MOZ_ASSERT(mRuntime->mFinalizeRunnable == this); |
|
1087 mDeferredFinalizeFunctions.Clear(); |
|
1088 // NB: This may delete this! |
|
1089 mRuntime->mFinalizeRunnable = nullptr; |
|
1090 } |
|
1091 } |
|
1092 |
|
1093 NS_IMETHODIMP |
|
1094 IncrementalFinalizeRunnable::Run() |
|
1095 { |
|
1096 if (mRuntime->mFinalizeRunnable != this) { |
|
1097 /* These items were already processed synchronously in JSGC_END. */ |
|
1098 MOZ_ASSERT(!mSupports.Length()); |
|
1099 MOZ_ASSERT(!mDeferredFinalizeFunctions.Length()); |
|
1100 return NS_OK; |
|
1101 } |
|
1102 |
|
1103 ReleaseNow(true); |
|
1104 |
|
1105 if (mDeferredFinalizeFunctions.Length()) { |
|
1106 nsresult rv = NS_DispatchToCurrentThread(this); |
|
1107 if (NS_FAILED(rv)) { |
|
1108 ReleaseNow(false); |
|
1109 } |
|
1110 } |
|
1111 |
|
1112 return NS_OK; |
|
1113 } |
|
1114 |
|
1115 void |
|
1116 CycleCollectedJSRuntime::FinalizeDeferredThings(DeferredFinalizeType aType) |
|
1117 { |
|
1118 MOZ_ASSERT(!mFinalizeRunnable); |
|
1119 mFinalizeRunnable = new IncrementalFinalizeRunnable(this, |
|
1120 mDeferredSupports, |
|
1121 mDeferredFinalizerTable); |
|
1122 |
|
1123 // Everything should be gone now. |
|
1124 MOZ_ASSERT(!mDeferredSupports.Length()); |
|
1125 MOZ_ASSERT(!mDeferredFinalizerTable.Count()); |
|
1126 |
|
1127 if (aType == FinalizeIncrementally) { |
|
1128 NS_DispatchToCurrentThread(mFinalizeRunnable); |
|
1129 } else { |
|
1130 mFinalizeRunnable->ReleaseNow(false); |
|
1131 MOZ_ASSERT(!mFinalizeRunnable); |
|
1132 } |
|
1133 } |
|
1134 |
|
1135 void |
|
1136 CycleCollectedJSRuntime::OnGC(JSGCStatus aStatus) |
|
1137 { |
|
1138 switch (aStatus) { |
|
1139 case JSGC_BEGIN: |
|
1140 nsCycleCollector_prepareForGarbageCollection(); |
|
1141 break; |
|
1142 case JSGC_END: |
|
1143 { |
|
1144 /* |
|
1145 * If the previous GC created a runnable to finalize objects |
|
1146 * incrementally, and if it hasn't finished yet, finish it now. We |
|
1147 * don't want these to build up. We also don't want to allow any |
|
1148 * existing incremental finalize runnables to run after a |
|
1149 * non-incremental GC, since they are often used to detect leaks. |
|
1150 */ |
|
1151 if (mFinalizeRunnable) { |
|
1152 mFinalizeRunnable->ReleaseNow(false); |
|
1153 } |
|
1154 |
|
1155 // Do any deferred finalization of native objects. |
|
1156 FinalizeDeferredThings(JS::WasIncrementalGC(mJSRuntime) ? FinalizeIncrementally : |
|
1157 FinalizeNow); |
|
1158 break; |
|
1159 } |
|
1160 default: |
|
1161 MOZ_CRASH(); |
|
1162 } |
|
1163 |
|
1164 CustomGCCallback(aStatus); |
|
1165 } |