|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
|
2 * vim: set ts=8 sts=4 et sw=4 tw=99: |
|
3 * This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 #include "gc/Marking.h" |
|
8 |
|
9 #include "mozilla/DebugOnly.h" |
|
10 |
|
11 #include "jit/IonCode.h" |
|
12 #include "js/SliceBudget.h" |
|
13 #include "vm/ArgumentsObject.h" |
|
14 #include "vm/ScopeObject.h" |
|
15 #include "vm/Shape.h" |
|
16 #include "vm/TypedArrayObject.h" |
|
17 |
|
18 #include "jscompartmentinlines.h" |
|
19 #include "jsinferinlines.h" |
|
20 #include "jsobjinlines.h" |
|
21 |
|
22 #ifdef JSGC_GENERATIONAL |
|
23 # include "gc/Nursery-inl.h" |
|
24 #endif |
|
25 #include "vm/String-inl.h" |
|
26 |
|
27 using namespace js; |
|
28 using namespace js::gc; |
|
29 |
|
30 using mozilla::DebugOnly; |
|
31 |
|
32 void * const js::NullPtr::constNullValue = nullptr; |
|
33 |
|
34 JS_PUBLIC_DATA(void * const) JS::NullPtr::constNullValue = nullptr; |
|
35 |
|
36 /* |
|
37 * There are two mostly separate mark paths. The first is a fast path used |
|
38 * internally in the GC. The second is a slow path used for root marking and |
|
39 * for API consumers like the cycle collector or Class::trace implementations. |
|
40 * |
|
41 * The fast path uses explicit stacks. The basic marking process during a GC is |
|
42 * that all roots are pushed on to a mark stack, and then each item on the |
|
43 * stack is scanned (possibly pushing more stuff) until the stack is empty. |
|
44 * |
|
45 * PushMarkStack pushes a GC thing onto the mark stack. In some cases (shapes |
|
46 * or strings) it eagerly marks the object rather than pushing it. Popping and |
|
47 * scanning is done by the processMarkStackTop method. For efficiency reasons |
|
48 * like tail recursion elimination that method also implements the scanning of |
|
49 * objects. For other GC things it uses helper methods. |
|
50 * |
|
51 * Most of the marking code outside Marking.cpp uses functions like MarkObject, |
|
52 * MarkString, etc. These functions check if an object is in the compartment |
|
53 * currently being GCed. If it is, they call PushMarkStack. Roots are pushed |
|
54 * this way as well as pointers traversed inside trace hooks (for things like |
|
55 * PropertyIteratorObjects). It is always valid to call a MarkX function |
|
56 * instead of PushMarkStack, although it may be slower. |
|
57 * |
|
58 * The MarkX functions also handle non-GC object traversal. In this case, they |
|
59 * call a callback for each object visited. This is a recursive process; the |
|
60 * mark stacks are not involved. These callbacks may ask for the outgoing |
|
61 * pointers to be visited. Eventually, this leads to the MarkChildren functions |
|
62 * being called. These functions duplicate much of the functionality of |
|
63 * scanning functions, but they don't push onto an explicit stack. |
|
64 */ |
|
65 |
|
66 static inline void |
|
67 PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing); |
|
68 |
|
69 static inline void |
|
70 PushMarkStack(GCMarker *gcmarker, JSFunction *thing); |
|
71 |
|
72 static inline void |
|
73 PushMarkStack(GCMarker *gcmarker, JSScript *thing); |
|
74 |
|
75 static inline void |
|
76 PushMarkStack(GCMarker *gcmarker, Shape *thing); |
|
77 |
|
78 static inline void |
|
79 PushMarkStack(GCMarker *gcmarker, JSString *thing); |
|
80 |
|
81 static inline void |
|
82 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing); |
|
83 |
|
84 namespace js { |
|
85 namespace gc { |
|
86 |
|
87 static void MarkChildren(JSTracer *trc, JSString *str); |
|
88 static void MarkChildren(JSTracer *trc, JSScript *script); |
|
89 static void MarkChildren(JSTracer *trc, LazyScript *lazy); |
|
90 static void MarkChildren(JSTracer *trc, Shape *shape); |
|
91 static void MarkChildren(JSTracer *trc, BaseShape *base); |
|
92 static void MarkChildren(JSTracer *trc, types::TypeObject *type); |
|
93 static void MarkChildren(JSTracer *trc, jit::JitCode *code); |
|
94 |
|
95 } /* namespace gc */ |
|
96 } /* namespace js */ |
|
97 |
|
98 /*** Object Marking ***/ |
|
99 |
|
100 #if defined(DEBUG) |
|
101 template<typename T> |
|
102 static inline bool |
|
103 IsThingPoisoned(T *thing) |
|
104 { |
|
105 static_assert(sizeof(T) >= sizeof(FreeSpan) + sizeof(uint32_t), |
|
106 "Ensure it is well defined to look past any free span that " |
|
107 "may be embedded in the thing's header when freed."); |
|
108 const uint8_t poisonBytes[] = { |
|
109 JS_FRESH_NURSERY_PATTERN, |
|
110 JS_SWEPT_NURSERY_PATTERN, |
|
111 JS_ALLOCATED_NURSERY_PATTERN, |
|
112 JS_FRESH_TENURED_PATTERN, |
|
113 JS_SWEPT_TENURED_PATTERN, |
|
114 JS_ALLOCATED_TENURED_PATTERN, |
|
115 JS_SWEPT_CODE_PATTERN, |
|
116 JS_SWEPT_FRAME_PATTERN |
|
117 }; |
|
118 const int numPoisonBytes = sizeof(poisonBytes) / sizeof(poisonBytes[0]); |
|
119 uint32_t *p = reinterpret_cast<uint32_t *>(reinterpret_cast<FreeSpan *>(thing) + 1); |
|
120 // Note: all free patterns are odd to make the common, not-poisoned case a single test. |
|
121 if ((*p & 1) == 0) |
|
122 return false; |
|
123 for (int i = 0; i < numPoisonBytes; ++i) { |
|
124 const uint8_t pb = poisonBytes[i]; |
|
125 const uint32_t pw = pb | (pb << 8) | (pb << 16) | (pb << 24); |
|
126 if (*p == pw) |
|
127 return true; |
|
128 } |
|
129 return false; |
|
130 } |
|
131 #endif |
|
132 |
|
133 static GCMarker * |
|
134 AsGCMarker(JSTracer *trc) |
|
135 { |
|
136 JS_ASSERT(IS_GC_MARKING_TRACER(trc)); |
|
137 return static_cast<GCMarker *>(trc); |
|
138 } |
|
139 |
|
140 template <typename T> bool ThingIsPermanentAtom(T *thing) { return false; } |
|
141 template <> bool ThingIsPermanentAtom<JSString>(JSString *str) { return str->isPermanentAtom(); } |
|
142 template <> bool ThingIsPermanentAtom<JSFlatString>(JSFlatString *str) { return str->isPermanentAtom(); } |
|
143 template <> bool ThingIsPermanentAtom<JSLinearString>(JSLinearString *str) { return str->isPermanentAtom(); } |
|
144 template <> bool ThingIsPermanentAtom<JSAtom>(JSAtom *atom) { return atom->isPermanent(); } |
|
145 template <> bool ThingIsPermanentAtom<PropertyName>(PropertyName *name) { return name->isPermanent(); } |
|
146 |
|
147 template<typename T> |
|
148 static inline void |
|
149 CheckMarkedThing(JSTracer *trc, T *thing) |
|
150 { |
|
151 #ifdef DEBUG |
|
152 JS_ASSERT(trc); |
|
153 JS_ASSERT(thing); |
|
154 |
|
155 /* This function uses data that's not available in the nursery. */ |
|
156 if (IsInsideNursery(trc->runtime(), thing)) |
|
157 return; |
|
158 |
|
159 /* |
|
160 * Permanent atoms are not associated with this runtime, but will be ignored |
|
161 * during marking. |
|
162 */ |
|
163 if (ThingIsPermanentAtom(thing)) |
|
164 return; |
|
165 |
|
166 JS_ASSERT(thing->zone()); |
|
167 JS_ASSERT(thing->zone()->runtimeFromMainThread() == trc->runtime()); |
|
168 JS_ASSERT(trc->hasTracingDetails()); |
|
169 |
|
170 DebugOnly<JSRuntime *> rt = trc->runtime(); |
|
171 |
|
172 JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadZones, |
|
173 !thing->zone()->scheduledForDestruction); |
|
174 |
|
175 JS_ASSERT(CurrentThreadCanAccessRuntime(rt)); |
|
176 |
|
177 JS_ASSERT_IF(thing->zone()->requireGCTracer(), |
|
178 IS_GC_MARKING_TRACER(trc)); |
|
179 |
|
180 JS_ASSERT(thing->isAligned()); |
|
181 |
|
182 JS_ASSERT(MapTypeToTraceKind<T>::kind == GetGCThingTraceKind(thing)); |
|
183 |
|
184 JS_ASSERT_IF(rt->gcStrictCompartmentChecking, |
|
185 thing->zone()->isCollecting() || rt->isAtomsZone(thing->zone())); |
|
186 |
|
187 JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && AsGCMarker(trc)->getMarkColor() == GRAY, |
|
188 !thing->zone()->isGCMarkingBlack() || rt->isAtomsZone(thing->zone())); |
|
189 |
|
190 JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), |
|
191 !(thing->zone()->isGCSweeping() || thing->zone()->isGCFinished())); |
|
192 |
|
193 /* |
|
194 * Try to assert that the thing is allocated. This is complicated by the |
|
195 * fact that allocated things may still contain the poison pattern if that |
|
196 * part has not been overwritten, and that the free span list head in the |
|
197 * ArenaHeader may not be synced with the real one in ArenaLists. |
|
198 */ |
|
199 JS_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy(), |
|
200 !InFreeList(thing->arenaHeader(), thing)); |
|
201 #endif |
|
202 } |
|
203 |
|
204 template<typename T> |
|
205 static void |
|
206 MarkInternal(JSTracer *trc, T **thingp) |
|
207 { |
|
208 JS_ASSERT(thingp); |
|
209 T *thing = *thingp; |
|
210 |
|
211 CheckMarkedThing(trc, thing); |
|
212 |
|
213 if (!trc->callback) { |
|
214 /* |
|
215 * We may mark a Nursery thing outside the context of the |
|
216 * MinorCollectionTracer because of a pre-barrier. The pre-barrier is |
|
217 * not needed in this case because we perform a minor collection before |
|
218 * each incremental slice. |
|
219 */ |
|
220 if (IsInsideNursery(trc->runtime(), thing)) |
|
221 return; |
|
222 |
|
223 /* |
|
224 * Don't mark permanent atoms, as they may be associated with another |
|
225 * runtime. Note that PushMarkStack() also checks this, but the tests |
|
226 * and maybeAlive write below should only be done on the main thread. |
|
227 */ |
|
228 if (ThingIsPermanentAtom(thing)) |
|
229 return; |
|
230 |
|
231 /* |
|
232 * Don't mark things outside a compartment if we are in a |
|
233 * per-compartment GC. |
|
234 */ |
|
235 if (!thing->zone()->isGCMarking()) |
|
236 return; |
|
237 |
|
238 PushMarkStack(AsGCMarker(trc), thing); |
|
239 thing->zone()->maybeAlive = true; |
|
240 } else { |
|
241 trc->callback(trc, (void **)thingp, MapTypeToTraceKind<T>::kind); |
|
242 trc->unsetTracingLocation(); |
|
243 } |
|
244 |
|
245 trc->clearTracingDetails(); |
|
246 } |
|
247 |
|
248 #define JS_ROOT_MARKING_ASSERT(trc) \ |
|
249 JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc), \ |
|
250 trc->runtime()->gcIncrementalState == NO_INCREMENTAL || \ |
|
251 trc->runtime()->gcIncrementalState == MARK_ROOTS); |
|
252 |
|
253 namespace js { |
|
254 namespace gc { |
|
255 |
|
256 template <typename T> |
|
257 void |
|
258 MarkUnbarriered(JSTracer *trc, T **thingp, const char *name) |
|
259 { |
|
260 trc->setTracingName(name); |
|
261 MarkInternal(trc, thingp); |
|
262 } |
|
263 |
|
264 template <typename T> |
|
265 static void |
|
266 Mark(JSTracer *trc, BarrieredPtr<T> *thing, const char *name) |
|
267 { |
|
268 trc->setTracingName(name); |
|
269 MarkInternal(trc, thing->unsafeGet()); |
|
270 } |
|
271 |
|
272 void |
|
273 MarkPermanentAtom(JSTracer *trc, JSAtom *atom, const char *name) |
|
274 { |
|
275 trc->setTracingName(name); |
|
276 |
|
277 JS_ASSERT(atom->isPermanent()); |
|
278 |
|
279 CheckMarkedThing(trc, atom); |
|
280 |
|
281 if (!trc->callback) { |
|
282 // Atoms do not refer to other GC things so don't need to go on the mark stack. |
|
283 // Additionally, PushMarkStack will ignore permanent atoms. |
|
284 atom->markIfUnmarked(); |
|
285 } else { |
|
286 void *thing = atom; |
|
287 trc->callback(trc, &thing, JSTRACE_STRING); |
|
288 JS_ASSERT(thing == atom); |
|
289 trc->unsetTracingLocation(); |
|
290 } |
|
291 |
|
292 trc->clearTracingDetails(); |
|
293 } |
|
294 |
|
295 } /* namespace gc */ |
|
296 } /* namespace js */ |
|
297 |
|
298 template <typename T> |
|
299 static void |
|
300 MarkRoot(JSTracer *trc, T **thingp, const char *name) |
|
301 { |
|
302 JS_ROOT_MARKING_ASSERT(trc); |
|
303 trc->setTracingName(name); |
|
304 MarkInternal(trc, thingp); |
|
305 } |
|
306 |
|
307 template <typename T> |
|
308 static void |
|
309 MarkRange(JSTracer *trc, size_t len, HeapPtr<T> *vec, const char *name) |
|
310 { |
|
311 for (size_t i = 0; i < len; ++i) { |
|
312 if (vec[i].get()) { |
|
313 trc->setTracingIndex(name, i); |
|
314 MarkInternal(trc, vec[i].unsafeGet()); |
|
315 } |
|
316 } |
|
317 } |
|
318 |
|
319 template <typename T> |
|
320 static void |
|
321 MarkRootRange(JSTracer *trc, size_t len, T **vec, const char *name) |
|
322 { |
|
323 JS_ROOT_MARKING_ASSERT(trc); |
|
324 for (size_t i = 0; i < len; ++i) { |
|
325 if (vec[i]) { |
|
326 trc->setTracingIndex(name, i); |
|
327 MarkInternal(trc, &vec[i]); |
|
328 } |
|
329 } |
|
330 } |
|
331 |
|
332 namespace js { |
|
333 namespace gc { |
|
334 |
|
335 template <typename T> |
|
336 static bool |
|
337 IsMarked(T **thingp) |
|
338 { |
|
339 JS_ASSERT(thingp); |
|
340 JS_ASSERT(*thingp); |
|
341 #ifdef JSGC_GENERATIONAL |
|
342 Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery; |
|
343 if (nursery.isInside(*thingp)) |
|
344 return nursery.getForwardedPointer(thingp); |
|
345 #endif |
|
346 Zone *zone = (*thingp)->tenuredZone(); |
|
347 if (!zone->isCollecting() || zone->isGCFinished()) |
|
348 return true; |
|
349 return (*thingp)->isMarked(); |
|
350 } |
|
351 |
|
352 template <typename T> |
|
353 static bool |
|
354 IsAboutToBeFinalized(T **thingp) |
|
355 { |
|
356 JS_ASSERT(thingp); |
|
357 JS_ASSERT(*thingp); |
|
358 |
|
359 T *thing = *thingp; |
|
360 JSRuntime *rt = thing->runtimeFromAnyThread(); |
|
361 |
|
362 /* Permanent atoms are never finalized by non-owning runtimes. */ |
|
363 if (ThingIsPermanentAtom(thing) && !TlsPerThreadData.get()->associatedWith(rt)) |
|
364 return false; |
|
365 |
|
366 #ifdef JSGC_GENERATIONAL |
|
367 Nursery &nursery = rt->gcNursery; |
|
368 JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !nursery.isInside(thing)); |
|
369 if (rt->isHeapMinorCollecting()) { |
|
370 if (nursery.isInside(thing)) |
|
371 return !nursery.getForwardedPointer(thingp); |
|
372 return false; |
|
373 } |
|
374 #endif |
|
375 |
|
376 if (!thing->tenuredZone()->isGCSweeping()) |
|
377 return false; |
|
378 |
|
379 /* |
|
380 * We should return false for things that have been allocated during |
|
381 * incremental sweeping, but this possibility doesn't occur at the moment |
|
382 * because this function is only called at the very start of the sweeping a |
|
383 * compartment group and during minor gc. Rather than do the extra check, |
|
384 * we just assert that it's not necessary. |
|
385 */ |
|
386 JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental); |
|
387 |
|
388 return !thing->isMarked(); |
|
389 } |
|
390 |
|
391 template <typename T> |
|
392 T * |
|
393 UpdateIfRelocated(JSRuntime *rt, T **thingp) |
|
394 { |
|
395 JS_ASSERT(thingp); |
|
396 #ifdef JSGC_GENERATIONAL |
|
397 if (*thingp && rt->isHeapMinorCollecting() && rt->gcNursery.isInside(*thingp)) |
|
398 rt->gcNursery.getForwardedPointer(thingp); |
|
399 #endif |
|
400 return *thingp; |
|
401 } |
|
402 |
|
403 #define DeclMarkerImpl(base, type) \ |
|
404 void \ |
|
405 Mark##base(JSTracer *trc, BarrieredPtr<type> *thing, const char *name) \ |
|
406 { \ |
|
407 Mark<type>(trc, thing, name); \ |
|
408 } \ |
|
409 \ |
|
410 void \ |
|
411 Mark##base##Root(JSTracer *trc, type **thingp, const char *name) \ |
|
412 { \ |
|
413 MarkRoot<type>(trc, thingp, name); \ |
|
414 } \ |
|
415 \ |
|
416 void \ |
|
417 Mark##base##Unbarriered(JSTracer *trc, type **thingp, const char *name) \ |
|
418 { \ |
|
419 MarkUnbarriered<type>(trc, thingp, name); \ |
|
420 } \ |
|
421 \ |
|
422 /* Explicitly instantiate MarkUnbarriered<type>. It is referenced from */ \ |
|
423 /* other translation units and the instantiation might otherwise get */ \ |
|
424 /* inlined away. */ \ |
|
425 template void MarkUnbarriered<type>(JSTracer *, type **, const char *); \ |
|
426 \ |
|
427 void \ |
|
428 Mark##base##Range(JSTracer *trc, size_t len, HeapPtr<type> *vec, const char *name) \ |
|
429 { \ |
|
430 MarkRange<type>(trc, len, vec, name); \ |
|
431 } \ |
|
432 \ |
|
433 void \ |
|
434 Mark##base##RootRange(JSTracer *trc, size_t len, type **vec, const char *name) \ |
|
435 { \ |
|
436 MarkRootRange<type>(trc, len, vec, name); \ |
|
437 } \ |
|
438 \ |
|
439 bool \ |
|
440 Is##base##Marked(type **thingp) \ |
|
441 { \ |
|
442 return IsMarked<type>(thingp); \ |
|
443 } \ |
|
444 \ |
|
445 bool \ |
|
446 Is##base##Marked(BarrieredPtr<type> *thingp) \ |
|
447 { \ |
|
448 return IsMarked<type>(thingp->unsafeGet()); \ |
|
449 } \ |
|
450 \ |
|
451 bool \ |
|
452 Is##base##AboutToBeFinalized(type **thingp) \ |
|
453 { \ |
|
454 return IsAboutToBeFinalized<type>(thingp); \ |
|
455 } \ |
|
456 \ |
|
457 bool \ |
|
458 Is##base##AboutToBeFinalized(BarrieredPtr<type> *thingp) \ |
|
459 { \ |
|
460 return IsAboutToBeFinalized<type>(thingp->unsafeGet()); \ |
|
461 } \ |
|
462 \ |
|
463 type * \ |
|
464 Update##base##IfRelocated(JSRuntime *rt, BarrieredPtr<type> *thingp) \ |
|
465 { \ |
|
466 return UpdateIfRelocated<type>(rt, thingp->unsafeGet()); \ |
|
467 } \ |
|
468 \ |
|
469 type * \ |
|
470 Update##base##IfRelocated(JSRuntime *rt, type **thingp) \ |
|
471 { \ |
|
472 return UpdateIfRelocated<type>(rt, thingp); \ |
|
473 } |
|
474 |
|
475 |
|
476 DeclMarkerImpl(BaseShape, BaseShape) |
|
477 DeclMarkerImpl(BaseShape, UnownedBaseShape) |
|
478 DeclMarkerImpl(JitCode, jit::JitCode) |
|
479 DeclMarkerImpl(Object, ArgumentsObject) |
|
480 DeclMarkerImpl(Object, ArrayBufferObject) |
|
481 DeclMarkerImpl(Object, ArrayBufferViewObject) |
|
482 DeclMarkerImpl(Object, SharedArrayBufferObject) |
|
483 DeclMarkerImpl(Object, DebugScopeObject) |
|
484 DeclMarkerImpl(Object, GlobalObject) |
|
485 DeclMarkerImpl(Object, JSObject) |
|
486 DeclMarkerImpl(Object, JSFunction) |
|
487 DeclMarkerImpl(Object, ObjectImpl) |
|
488 DeclMarkerImpl(Object, ScopeObject) |
|
489 DeclMarkerImpl(Script, JSScript) |
|
490 DeclMarkerImpl(LazyScript, LazyScript) |
|
491 DeclMarkerImpl(Shape, Shape) |
|
492 DeclMarkerImpl(String, JSAtom) |
|
493 DeclMarkerImpl(String, JSString) |
|
494 DeclMarkerImpl(String, JSFlatString) |
|
495 DeclMarkerImpl(String, JSLinearString) |
|
496 DeclMarkerImpl(String, PropertyName) |
|
497 DeclMarkerImpl(TypeObject, js::types::TypeObject) |
|
498 |
|
499 } /* namespace gc */ |
|
500 } /* namespace js */ |
|
501 |
|
502 /*** Externally Typed Marking ***/ |
|
503 |
|
504 void |
|
505 gc::MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind) |
|
506 { |
|
507 JS_ASSERT(thingp); |
|
508 JS_ASSERT(*thingp); |
|
509 DebugOnly<Cell *> cell = static_cast<Cell *>(*thingp); |
|
510 JS_ASSERT_IF(cell->isTenured(), kind == MapAllocToTraceKind(cell->tenuredGetAllocKind())); |
|
511 switch (kind) { |
|
512 case JSTRACE_OBJECT: |
|
513 MarkInternal(trc, reinterpret_cast<JSObject **>(thingp)); |
|
514 break; |
|
515 case JSTRACE_STRING: |
|
516 MarkInternal(trc, reinterpret_cast<JSString **>(thingp)); |
|
517 break; |
|
518 case JSTRACE_SCRIPT: |
|
519 MarkInternal(trc, reinterpret_cast<JSScript **>(thingp)); |
|
520 break; |
|
521 case JSTRACE_LAZY_SCRIPT: |
|
522 MarkInternal(trc, reinterpret_cast<LazyScript **>(thingp)); |
|
523 break; |
|
524 case JSTRACE_SHAPE: |
|
525 MarkInternal(trc, reinterpret_cast<Shape **>(thingp)); |
|
526 break; |
|
527 case JSTRACE_BASE_SHAPE: |
|
528 MarkInternal(trc, reinterpret_cast<BaseShape **>(thingp)); |
|
529 break; |
|
530 case JSTRACE_TYPE_OBJECT: |
|
531 MarkInternal(trc, reinterpret_cast<types::TypeObject **>(thingp)); |
|
532 break; |
|
533 case JSTRACE_JITCODE: |
|
534 MarkInternal(trc, reinterpret_cast<jit::JitCode **>(thingp)); |
|
535 break; |
|
536 } |
|
537 } |
|
538 |
|
539 static void |
|
540 MarkGCThingInternal(JSTracer *trc, void **thingp, const char *name) |
|
541 { |
|
542 trc->setTracingName(name); |
|
543 JS_ASSERT(thingp); |
|
544 if (!*thingp) |
|
545 return; |
|
546 MarkKind(trc, thingp, GetGCThingTraceKind(*thingp)); |
|
547 } |
|
548 |
|
549 void |
|
550 gc::MarkGCThingRoot(JSTracer *trc, void **thingp, const char *name) |
|
551 { |
|
552 JS_ROOT_MARKING_ASSERT(trc); |
|
553 MarkGCThingInternal(trc, thingp, name); |
|
554 } |
|
555 |
|
556 void |
|
557 gc::MarkGCThingUnbarriered(JSTracer *trc, void **thingp, const char *name) |
|
558 { |
|
559 MarkGCThingInternal(trc, thingp, name); |
|
560 } |
|
561 |
|
562 /*** ID Marking ***/ |
|
563 |
|
564 static inline void |
|
565 MarkIdInternal(JSTracer *trc, jsid *id) |
|
566 { |
|
567 if (JSID_IS_STRING(*id)) { |
|
568 JSString *str = JSID_TO_STRING(*id); |
|
569 trc->setTracingLocation((void *)id); |
|
570 MarkInternal(trc, &str); |
|
571 *id = NON_INTEGER_ATOM_TO_JSID(reinterpret_cast<JSAtom *>(str)); |
|
572 } else if (MOZ_UNLIKELY(JSID_IS_OBJECT(*id))) { |
|
573 JSObject *obj = JSID_TO_OBJECT(*id); |
|
574 trc->setTracingLocation((void *)id); |
|
575 MarkInternal(trc, &obj); |
|
576 *id = OBJECT_TO_JSID(obj); |
|
577 } else { |
|
578 /* Unset realLocation manually if we do not call MarkInternal. */ |
|
579 trc->unsetTracingLocation(); |
|
580 } |
|
581 } |
|
582 |
|
583 void |
|
584 gc::MarkId(JSTracer *trc, BarrieredId *id, const char *name) |
|
585 { |
|
586 trc->setTracingName(name); |
|
587 MarkIdInternal(trc, id->unsafeGet()); |
|
588 } |
|
589 |
|
590 void |
|
591 gc::MarkIdRoot(JSTracer *trc, jsid *id, const char *name) |
|
592 { |
|
593 JS_ROOT_MARKING_ASSERT(trc); |
|
594 trc->setTracingName(name); |
|
595 MarkIdInternal(trc, id); |
|
596 } |
|
597 |
|
598 void |
|
599 gc::MarkIdUnbarriered(JSTracer *trc, jsid *id, const char *name) |
|
600 { |
|
601 trc->setTracingName(name); |
|
602 MarkIdInternal(trc, id); |
|
603 } |
|
604 |
|
605 void |
|
606 gc::MarkIdRange(JSTracer *trc, size_t len, HeapId *vec, const char *name) |
|
607 { |
|
608 for (size_t i = 0; i < len; ++i) { |
|
609 trc->setTracingIndex(name, i); |
|
610 MarkIdInternal(trc, vec[i].unsafeGet()); |
|
611 } |
|
612 } |
|
613 |
|
614 void |
|
615 gc::MarkIdRootRange(JSTracer *trc, size_t len, jsid *vec, const char *name) |
|
616 { |
|
617 JS_ROOT_MARKING_ASSERT(trc); |
|
618 for (size_t i = 0; i < len; ++i) { |
|
619 trc->setTracingIndex(name, i); |
|
620 MarkIdInternal(trc, &vec[i]); |
|
621 } |
|
622 } |
|
623 |
|
624 /*** Value Marking ***/ |
|
625 |
|
626 static inline void |
|
627 MarkValueInternal(JSTracer *trc, Value *v) |
|
628 { |
|
629 if (v->isMarkable()) { |
|
630 JS_ASSERT(v->toGCThing()); |
|
631 void *thing = v->toGCThing(); |
|
632 trc->setTracingLocation((void *)v); |
|
633 MarkKind(trc, &thing, v->gcKind()); |
|
634 if (v->isString()) |
|
635 v->setString((JSString *)thing); |
|
636 else |
|
637 v->setObjectOrNull((JSObject *)thing); |
|
638 } else { |
|
639 /* Unset realLocation manually if we do not call MarkInternal. */ |
|
640 trc->unsetTracingLocation(); |
|
641 } |
|
642 } |
|
643 |
|
644 void |
|
645 gc::MarkValue(JSTracer *trc, BarrieredValue *v, const char *name) |
|
646 { |
|
647 trc->setTracingName(name); |
|
648 MarkValueInternal(trc, v->unsafeGet()); |
|
649 } |
|
650 |
|
651 void |
|
652 gc::MarkValueRoot(JSTracer *trc, Value *v, const char *name) |
|
653 { |
|
654 JS_ROOT_MARKING_ASSERT(trc); |
|
655 trc->setTracingName(name); |
|
656 MarkValueInternal(trc, v); |
|
657 } |
|
658 |
|
659 void |
|
660 gc::MarkTypeRoot(JSTracer *trc, types::Type *v, const char *name) |
|
661 { |
|
662 JS_ROOT_MARKING_ASSERT(trc); |
|
663 trc->setTracingName(name); |
|
664 if (v->isSingleObject()) { |
|
665 JSObject *obj = v->singleObject(); |
|
666 MarkInternal(trc, &obj); |
|
667 *v = types::Type::ObjectType(obj); |
|
668 } else if (v->isTypeObject()) { |
|
669 types::TypeObject *typeObj = v->typeObject(); |
|
670 MarkInternal(trc, &typeObj); |
|
671 *v = types::Type::ObjectType(typeObj); |
|
672 } |
|
673 } |
|
674 |
|
675 void |
|
676 gc::MarkValueRange(JSTracer *trc, size_t len, BarrieredValue *vec, const char *name) |
|
677 { |
|
678 for (size_t i = 0; i < len; ++i) { |
|
679 trc->setTracingIndex(name, i); |
|
680 MarkValueInternal(trc, vec[i].unsafeGet()); |
|
681 } |
|
682 } |
|
683 |
|
684 void |
|
685 gc::MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name) |
|
686 { |
|
687 JS_ROOT_MARKING_ASSERT(trc); |
|
688 for (size_t i = 0; i < len; ++i) { |
|
689 trc->setTracingIndex(name, i); |
|
690 MarkValueInternal(trc, &vec[i]); |
|
691 } |
|
692 } |
|
693 |
|
694 bool |
|
695 gc::IsValueMarked(Value *v) |
|
696 { |
|
697 JS_ASSERT(v->isMarkable()); |
|
698 bool rv; |
|
699 if (v->isString()) { |
|
700 JSString *str = (JSString *)v->toGCThing(); |
|
701 rv = IsMarked<JSString>(&str); |
|
702 v->setString(str); |
|
703 } else { |
|
704 JSObject *obj = (JSObject *)v->toGCThing(); |
|
705 rv = IsMarked<JSObject>(&obj); |
|
706 v->setObject(*obj); |
|
707 } |
|
708 return rv; |
|
709 } |
|
710 |
|
711 bool |
|
712 gc::IsValueAboutToBeFinalized(Value *v) |
|
713 { |
|
714 JS_ASSERT(v->isMarkable()); |
|
715 bool rv; |
|
716 if (v->isString()) { |
|
717 JSString *str = (JSString *)v->toGCThing(); |
|
718 rv = IsAboutToBeFinalized<JSString>(&str); |
|
719 v->setString(str); |
|
720 } else { |
|
721 JSObject *obj = (JSObject *)v->toGCThing(); |
|
722 rv = IsAboutToBeFinalized<JSObject>(&obj); |
|
723 v->setObject(*obj); |
|
724 } |
|
725 return rv; |
|
726 } |
|
727 |
|
728 /*** Slot Marking ***/ |
|
729 |
|
730 bool |
|
731 gc::IsSlotMarked(HeapSlot *s) |
|
732 { |
|
733 return IsMarked(s); |
|
734 } |
|
735 |
|
736 void |
|
737 gc::MarkSlot(JSTracer *trc, HeapSlot *s, const char *name) |
|
738 { |
|
739 trc->setTracingName(name); |
|
740 MarkValueInternal(trc, s->unsafeGet()); |
|
741 } |
|
742 |
|
743 void |
|
744 gc::MarkArraySlots(JSTracer *trc, size_t len, HeapSlot *vec, const char *name) |
|
745 { |
|
746 for (size_t i = 0; i < len; ++i) { |
|
747 trc->setTracingIndex(name, i); |
|
748 MarkValueInternal(trc, vec[i].unsafeGet()); |
|
749 } |
|
750 } |
|
751 |
|
752 void |
|
753 gc::MarkObjectSlots(JSTracer *trc, JSObject *obj, uint32_t start, uint32_t nslots) |
|
754 { |
|
755 JS_ASSERT(obj->isNative()); |
|
756 for (uint32_t i = start; i < (start + nslots); ++i) { |
|
757 trc->setTracingDetails(js_GetObjectSlotName, obj, i); |
|
758 MarkValueInternal(trc, obj->nativeGetSlotRef(i).unsafeGet()); |
|
759 } |
|
760 } |
|
761 |
|
762 static bool |
|
763 ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell) |
|
764 { |
|
765 if (!IS_GC_MARKING_TRACER(trc)) |
|
766 return true; |
|
767 |
|
768 uint32_t color = AsGCMarker(trc)->getMarkColor(); |
|
769 JS_ASSERT(color == BLACK || color == GRAY); |
|
770 |
|
771 if (IsInsideNursery(trc->runtime(), cell)) { |
|
772 JS_ASSERT(color == BLACK); |
|
773 return false; |
|
774 } |
|
775 |
|
776 JS::Zone *zone = cell->tenuredZone(); |
|
777 if (color == BLACK) { |
|
778 /* |
|
779 * Having black->gray edges violates our promise to the cycle |
|
780 * collector. This can happen if we're collecting a compartment and it |
|
781 * has an edge to an uncollected compartment: it's possible that the |
|
782 * source and destination of the cross-compartment edge should be gray, |
|
783 * but the source was marked black by the conservative scanner. |
|
784 */ |
|
785 if (cell->isMarked(GRAY)) { |
|
786 JS_ASSERT(!zone->isCollecting()); |
|
787 trc->runtime()->gcFoundBlackGrayEdges = true; |
|
788 } |
|
789 return zone->isGCMarking(); |
|
790 } else { |
|
791 if (zone->isGCMarkingBlack()) { |
|
792 /* |
|
793 * The destination compartment is being not being marked gray now, |
|
794 * but it will be later, so record the cell so it can be marked gray |
|
795 * at the appropriate time. |
|
796 */ |
|
797 if (!cell->isMarked()) |
|
798 DelayCrossCompartmentGrayMarking(src); |
|
799 return false; |
|
800 } |
|
801 return zone->isGCMarkingGray(); |
|
802 } |
|
803 } |
|
804 |
|
805 void |
|
806 gc::MarkCrossCompartmentObjectUnbarriered(JSTracer *trc, JSObject *src, JSObject **dst, const char *name) |
|
807 { |
|
808 if (ShouldMarkCrossCompartment(trc, src, *dst)) |
|
809 MarkObjectUnbarriered(trc, dst, name); |
|
810 } |
|
811 |
|
812 void |
|
813 gc::MarkCrossCompartmentScriptUnbarriered(JSTracer *trc, JSObject *src, JSScript **dst, |
|
814 const char *name) |
|
815 { |
|
816 if (ShouldMarkCrossCompartment(trc, src, *dst)) |
|
817 MarkScriptUnbarriered(trc, dst, name); |
|
818 } |
|
819 |
|
820 void |
|
821 gc::MarkCrossCompartmentSlot(JSTracer *trc, JSObject *src, HeapSlot *dst, const char *name) |
|
822 { |
|
823 if (dst->isMarkable() && ShouldMarkCrossCompartment(trc, src, (Cell *)dst->toGCThing())) |
|
824 MarkSlot(trc, dst, name); |
|
825 } |
|
826 |
|
827 /*** Special Marking ***/ |
|
828 |
|
829 void |
|
830 gc::MarkObject(JSTracer *trc, HeapPtr<GlobalObject, JSScript *> *thingp, const char *name) |
|
831 { |
|
832 trc->setTracingName(name); |
|
833 MarkInternal(trc, thingp->unsafeGet()); |
|
834 } |
|
835 |
|
836 void |
|
837 gc::MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name) |
|
838 { |
|
839 trc->setTracingName(name); |
|
840 MarkValueInternal(trc, v); |
|
841 } |
|
842 |
|
843 bool |
|
844 gc::IsCellMarked(Cell **thingp) |
|
845 { |
|
846 return IsMarked<Cell>(thingp); |
|
847 } |
|
848 |
|
849 bool |
|
850 gc::IsCellAboutToBeFinalized(Cell **thingp) |
|
851 { |
|
852 return IsAboutToBeFinalized<Cell>(thingp); |
|
853 } |
|
854 |
|
855 /*** Push Mark Stack ***/ |
|
856 |
|
857 #define JS_COMPARTMENT_ASSERT(rt, thing) \ |
|
858 JS_ASSERT((thing)->zone()->isGCMarking()) |
|
859 |
|
860 #define JS_COMPARTMENT_ASSERT_STR(rt, thing) \ |
|
861 JS_ASSERT((thing)->zone()->isGCMarking() || \ |
|
862 (rt)->isAtomsZone((thing)->zone())); |
|
863 |
|
864 static void |
|
865 PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing) |
|
866 { |
|
867 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing); |
|
868 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing)); |
|
869 |
|
870 if (thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
871 gcmarker->pushObject(thing); |
|
872 } |
|
873 |
|
874 /* |
|
875 * PushMarkStack for BaseShape unpacks its children directly onto the mark |
|
876 * stack. For a pre-barrier between incremental slices, this may result in |
|
877 * objects in the nursery getting pushed onto the mark stack. It is safe to |
|
878 * ignore these objects because they will be marked by the matching |
|
879 * post-barrier during the minor GC at the start of each incremental slice. |
|
880 */ |
|
881 static void |
|
882 MaybePushMarkStackBetweenSlices(GCMarker *gcmarker, JSObject *thing) |
|
883 { |
|
884 JSRuntime *rt = gcmarker->runtime(); |
|
885 JS_COMPARTMENT_ASSERT(rt, thing); |
|
886 JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(rt, thing)); |
|
887 |
|
888 if (!IsInsideNursery(rt, thing) && thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
889 gcmarker->pushObject(thing); |
|
890 } |
|
891 |
|
892 static void |
|
893 PushMarkStack(GCMarker *gcmarker, JSFunction *thing) |
|
894 { |
|
895 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing); |
|
896 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing)); |
|
897 |
|
898 if (thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
899 gcmarker->pushObject(thing); |
|
900 } |
|
901 |
|
902 static void |
|
903 PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing) |
|
904 { |
|
905 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing); |
|
906 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing)); |
|
907 |
|
908 if (thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
909 gcmarker->pushType(thing); |
|
910 } |
|
911 |
|
912 static void |
|
913 PushMarkStack(GCMarker *gcmarker, JSScript *thing) |
|
914 { |
|
915 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing); |
|
916 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing)); |
|
917 |
|
918 /* |
|
919 * We mark scripts directly rather than pushing on the stack as they can |
|
920 * refer to other scripts only indirectly (like via nested functions) and |
|
921 * we cannot get to deep recursion. |
|
922 */ |
|
923 if (thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
924 MarkChildren(gcmarker, thing); |
|
925 } |
|
926 |
|
927 static void |
|
928 PushMarkStack(GCMarker *gcmarker, LazyScript *thing) |
|
929 { |
|
930 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing); |
|
931 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing)); |
|
932 |
|
933 /* |
|
934 * We mark lazy scripts directly rather than pushing on the stack as they |
|
935 * only refer to normal scripts and to strings, and cannot recurse. |
|
936 */ |
|
937 if (thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
938 MarkChildren(gcmarker, thing); |
|
939 } |
|
940 |
|
941 static void |
|
942 ScanShape(GCMarker *gcmarker, Shape *shape); |
|
943 |
|
944 static void |
|
945 PushMarkStack(GCMarker *gcmarker, Shape *thing) |
|
946 { |
|
947 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing); |
|
948 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing)); |
|
949 |
|
950 /* We mark shapes directly rather than pushing on the stack. */ |
|
951 if (thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
952 ScanShape(gcmarker, thing); |
|
953 } |
|
954 |
|
955 static void |
|
956 PushMarkStack(GCMarker *gcmarker, jit::JitCode *thing) |
|
957 { |
|
958 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing); |
|
959 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing)); |
|
960 |
|
961 if (thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
962 gcmarker->pushJitCode(thing); |
|
963 } |
|
964 |
|
965 static inline void |
|
966 ScanBaseShape(GCMarker *gcmarker, BaseShape *base); |
|
967 |
|
968 static void |
|
969 PushMarkStack(GCMarker *gcmarker, BaseShape *thing) |
|
970 { |
|
971 JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing); |
|
972 JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing)); |
|
973 |
|
974 /* We mark base shapes directly rather than pushing on the stack. */ |
|
975 if (thing->markIfUnmarked(gcmarker->getMarkColor())) |
|
976 ScanBaseShape(gcmarker, thing); |
|
977 } |
|
978 |
|
979 static void |
|
980 ScanShape(GCMarker *gcmarker, Shape *shape) |
|
981 { |
|
982 restart: |
|
983 PushMarkStack(gcmarker, shape->base()); |
|
984 |
|
985 const BarrieredId &id = shape->propidRef(); |
|
986 if (JSID_IS_STRING(id)) |
|
987 PushMarkStack(gcmarker, JSID_TO_STRING(id)); |
|
988 else if (MOZ_UNLIKELY(JSID_IS_OBJECT(id))) |
|
989 PushMarkStack(gcmarker, JSID_TO_OBJECT(id)); |
|
990 |
|
991 shape = shape->previous(); |
|
992 if (shape && shape->markIfUnmarked(gcmarker->getMarkColor())) |
|
993 goto restart; |
|
994 } |
|
995 |
|
996 static inline void |
|
997 ScanBaseShape(GCMarker *gcmarker, BaseShape *base) |
|
998 { |
|
999 base->assertConsistency(); |
|
1000 |
|
1001 base->compartment()->mark(); |
|
1002 |
|
1003 if (base->hasGetterObject()) |
|
1004 MaybePushMarkStackBetweenSlices(gcmarker, base->getterObject()); |
|
1005 |
|
1006 if (base->hasSetterObject()) |
|
1007 MaybePushMarkStackBetweenSlices(gcmarker, base->setterObject()); |
|
1008 |
|
1009 if (JSObject *parent = base->getObjectParent()) { |
|
1010 MaybePushMarkStackBetweenSlices(gcmarker, parent); |
|
1011 } else if (GlobalObject *global = base->compartment()->maybeGlobal()) { |
|
1012 PushMarkStack(gcmarker, global); |
|
1013 } |
|
1014 |
|
1015 if (JSObject *metadata = base->getObjectMetadata()) |
|
1016 MaybePushMarkStackBetweenSlices(gcmarker, metadata); |
|
1017 |
|
1018 /* |
|
1019 * All children of the owned base shape are consistent with its |
|
1020 * unowned one, thus we do not need to trace through children of the |
|
1021 * unowned base shape. |
|
1022 */ |
|
1023 if (base->isOwned()) { |
|
1024 UnownedBaseShape *unowned = base->baseUnowned(); |
|
1025 JS_ASSERT(base->compartment() == unowned->compartment()); |
|
1026 unowned->markIfUnmarked(gcmarker->getMarkColor()); |
|
1027 } |
|
1028 } |
|
1029 |
|
1030 static inline void |
|
1031 ScanLinearString(GCMarker *gcmarker, JSLinearString *str) |
|
1032 { |
|
1033 JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str); |
|
1034 JS_ASSERT(str->isMarked()); |
|
1035 |
|
1036 /* |
|
1037 * Add extra asserts to confirm the static type to detect incorrect string |
|
1038 * mutations. |
|
1039 */ |
|
1040 JS_ASSERT(str->JSString::isLinear()); |
|
1041 while (str->hasBase()) { |
|
1042 str = str->base(); |
|
1043 JS_ASSERT(str->JSString::isLinear()); |
|
1044 if (str->isPermanentAtom()) |
|
1045 break; |
|
1046 JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str); |
|
1047 if (!str->markIfUnmarked()) |
|
1048 break; |
|
1049 } |
|
1050 } |
|
1051 |
|
1052 /* |
|
1053 * The function tries to scan the whole rope tree using the marking stack as |
|
1054 * temporary storage. If that becomes full, the unscanned ropes are added to |
|
1055 * the delayed marking list. When the function returns, the marking stack is |
|
1056 * at the same depth as it was on entry. This way we avoid using tags when |
|
1057 * pushing ropes to the stack as ropes never leaks to other users of the |
|
1058 * stack. This also assumes that a rope can only point to other ropes or |
|
1059 * linear strings, it cannot refer to GC things of other types. |
|
1060 */ |
|
1061 static void |
|
1062 ScanRope(GCMarker *gcmarker, JSRope *rope) |
|
1063 { |
|
1064 ptrdiff_t savedPos = gcmarker->stack.position(); |
|
1065 JS_DIAGNOSTICS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING); |
|
1066 for (;;) { |
|
1067 JS_DIAGNOSTICS_ASSERT(GetGCThingTraceKind(rope) == JSTRACE_STRING); |
|
1068 JS_DIAGNOSTICS_ASSERT(rope->JSString::isRope()); |
|
1069 JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), rope); |
|
1070 JS_ASSERT(rope->isMarked()); |
|
1071 JSRope *next = nullptr; |
|
1072 |
|
1073 JSString *right = rope->rightChild(); |
|
1074 if (!right->isPermanentAtom() && right->markIfUnmarked()) { |
|
1075 if (right->isLinear()) |
|
1076 ScanLinearString(gcmarker, &right->asLinear()); |
|
1077 else |
|
1078 next = &right->asRope(); |
|
1079 } |
|
1080 |
|
1081 JSString *left = rope->leftChild(); |
|
1082 if (!left->isPermanentAtom() && left->markIfUnmarked()) { |
|
1083 if (left->isLinear()) { |
|
1084 ScanLinearString(gcmarker, &left->asLinear()); |
|
1085 } else { |
|
1086 /* |
|
1087 * When both children are ropes, set aside the right one to |
|
1088 * scan it later. |
|
1089 */ |
|
1090 if (next && !gcmarker->stack.push(reinterpret_cast<uintptr_t>(next))) |
|
1091 gcmarker->delayMarkingChildren(next); |
|
1092 next = &left->asRope(); |
|
1093 } |
|
1094 } |
|
1095 if (next) { |
|
1096 rope = next; |
|
1097 } else if (savedPos != gcmarker->stack.position()) { |
|
1098 JS_ASSERT(savedPos < gcmarker->stack.position()); |
|
1099 rope = reinterpret_cast<JSRope *>(gcmarker->stack.pop()); |
|
1100 } else { |
|
1101 break; |
|
1102 } |
|
1103 } |
|
1104 JS_ASSERT(savedPos == gcmarker->stack.position()); |
|
1105 } |
|
1106 |
|
1107 static inline void |
|
1108 ScanString(GCMarker *gcmarker, JSString *str) |
|
1109 { |
|
1110 if (str->isLinear()) |
|
1111 ScanLinearString(gcmarker, &str->asLinear()); |
|
1112 else |
|
1113 ScanRope(gcmarker, &str->asRope()); |
|
1114 } |
|
1115 |
|
1116 static inline void |
|
1117 PushMarkStack(GCMarker *gcmarker, JSString *str) |
|
1118 { |
|
1119 // Permanent atoms might not be associated with this runtime. |
|
1120 if (str->isPermanentAtom()) |
|
1121 return; |
|
1122 |
|
1123 JS_COMPARTMENT_ASSERT_STR(gcmarker->runtime(), str); |
|
1124 |
|
1125 /* |
|
1126 * As string can only refer to other strings we fully scan its GC graph |
|
1127 * using the explicit stack when navigating the rope tree to avoid |
|
1128 * dealing with strings on the stack in drainMarkStack. |
|
1129 */ |
|
1130 if (str->markIfUnmarked()) |
|
1131 ScanString(gcmarker, str); |
|
1132 } |
|
1133 |
|
1134 void |
|
1135 gc::MarkChildren(JSTracer *trc, JSObject *obj) |
|
1136 { |
|
1137 obj->markChildren(trc); |
|
1138 } |
|
1139 |
|
1140 static void |
|
1141 gc::MarkChildren(JSTracer *trc, JSString *str) |
|
1142 { |
|
1143 if (str->hasBase()) |
|
1144 str->markBase(trc); |
|
1145 else if (str->isRope()) |
|
1146 str->asRope().markChildren(trc); |
|
1147 } |
|
1148 |
|
1149 static void |
|
1150 gc::MarkChildren(JSTracer *trc, JSScript *script) |
|
1151 { |
|
1152 script->markChildren(trc); |
|
1153 } |
|
1154 |
|
1155 static void |
|
1156 gc::MarkChildren(JSTracer *trc, LazyScript *lazy) |
|
1157 { |
|
1158 lazy->markChildren(trc); |
|
1159 } |
|
1160 |
|
1161 static void |
|
1162 gc::MarkChildren(JSTracer *trc, Shape *shape) |
|
1163 { |
|
1164 shape->markChildren(trc); |
|
1165 } |
|
1166 |
|
1167 static void |
|
1168 gc::MarkChildren(JSTracer *trc, BaseShape *base) |
|
1169 { |
|
1170 base->markChildren(trc); |
|
1171 } |
|
1172 |
|
1173 /* |
|
1174 * This function is used by the cycle collector to trace through the |
|
1175 * children of a BaseShape (and its baseUnowned(), if any). The cycle |
|
1176 * collector does not directly care about BaseShapes, so only the |
|
1177 * getter, setter, and parent are marked. Furthermore, the parent is |
|
1178 * marked only if it isn't the same as prevParent, which will be |
|
1179 * updated to the current shape's parent. |
|
1180 */ |
|
1181 static inline void |
|
1182 MarkCycleCollectorChildren(JSTracer *trc, BaseShape *base, JSObject **prevParent) |
|
1183 { |
|
1184 JS_ASSERT(base); |
|
1185 |
|
1186 /* |
|
1187 * The cycle collector does not need to trace unowned base shapes, |
|
1188 * as they have the same getter, setter and parent as the original |
|
1189 * base shape. |
|
1190 */ |
|
1191 base->assertConsistency(); |
|
1192 |
|
1193 if (base->hasGetterObject()) { |
|
1194 JSObject *tmp = base->getterObject(); |
|
1195 MarkObjectUnbarriered(trc, &tmp, "getter"); |
|
1196 JS_ASSERT(tmp == base->getterObject()); |
|
1197 } |
|
1198 |
|
1199 if (base->hasSetterObject()) { |
|
1200 JSObject *tmp = base->setterObject(); |
|
1201 MarkObjectUnbarriered(trc, &tmp, "setter"); |
|
1202 JS_ASSERT(tmp == base->setterObject()); |
|
1203 } |
|
1204 |
|
1205 JSObject *parent = base->getObjectParent(); |
|
1206 if (parent && parent != *prevParent) { |
|
1207 MarkObjectUnbarriered(trc, &parent, "parent"); |
|
1208 JS_ASSERT(parent == base->getObjectParent()); |
|
1209 *prevParent = parent; |
|
1210 } |
|
1211 } |
|
1212 |
|
1213 /* |
|
1214 * This function is used by the cycle collector to trace through a |
|
1215 * shape. The cycle collector does not care about shapes or base |
|
1216 * shapes, so those are not marked. Instead, any shapes or base shapes |
|
1217 * that are encountered have their children marked. Stack space is |
|
1218 * bounded. If two shapes in a row have the same parent pointer, the |
|
1219 * parent pointer will only be marked once. |
|
1220 */ |
|
1221 void |
|
1222 gc::MarkCycleCollectorChildren(JSTracer *trc, Shape *shape) |
|
1223 { |
|
1224 JSObject *prevParent = nullptr; |
|
1225 do { |
|
1226 MarkCycleCollectorChildren(trc, shape->base(), &prevParent); |
|
1227 MarkId(trc, &shape->propidRef(), "propid"); |
|
1228 shape = shape->previous(); |
|
1229 } while (shape); |
|
1230 } |
|
1231 |
|
1232 static void |
|
1233 ScanTypeObject(GCMarker *gcmarker, types::TypeObject *type) |
|
1234 { |
|
1235 unsigned count = type->getPropertyCount(); |
|
1236 for (unsigned i = 0; i < count; i++) { |
|
1237 types::Property *prop = type->getProperty(i); |
|
1238 if (prop && JSID_IS_STRING(prop->id)) |
|
1239 PushMarkStack(gcmarker, JSID_TO_STRING(prop->id)); |
|
1240 } |
|
1241 |
|
1242 if (type->proto().isObject()) |
|
1243 PushMarkStack(gcmarker, type->proto().toObject()); |
|
1244 |
|
1245 if (type->singleton() && !type->lazy()) |
|
1246 PushMarkStack(gcmarker, type->singleton()); |
|
1247 |
|
1248 if (type->hasNewScript()) { |
|
1249 PushMarkStack(gcmarker, type->newScript()->fun); |
|
1250 PushMarkStack(gcmarker, type->newScript()->templateObject); |
|
1251 } else if (type->hasTypedObject()) { |
|
1252 PushMarkStack(gcmarker, type->typedObject()->descrHeapPtr()); |
|
1253 } |
|
1254 |
|
1255 if (type->interpretedFunction) |
|
1256 PushMarkStack(gcmarker, type->interpretedFunction); |
|
1257 } |
|
1258 |
|
1259 static void |
|
1260 gc::MarkChildren(JSTracer *trc, types::TypeObject *type) |
|
1261 { |
|
1262 unsigned count = type->getPropertyCount(); |
|
1263 for (unsigned i = 0; i < count; i++) { |
|
1264 types::Property *prop = type->getProperty(i); |
|
1265 if (prop) |
|
1266 MarkId(trc, &prop->id, "type_prop"); |
|
1267 } |
|
1268 |
|
1269 if (type->proto().isObject()) |
|
1270 MarkObject(trc, &type->protoRaw(), "type_proto"); |
|
1271 |
|
1272 if (type->singleton() && !type->lazy()) |
|
1273 MarkObject(trc, &type->singletonRaw(), "type_singleton"); |
|
1274 |
|
1275 if (type->hasNewScript()) { |
|
1276 MarkObject(trc, &type->newScript()->fun, "type_new_function"); |
|
1277 MarkObject(trc, &type->newScript()->templateObject, "type_new_template"); |
|
1278 } else if (type->hasTypedObject()) { |
|
1279 MarkObject(trc, &type->typedObject()->descrHeapPtr(), "type_heap_ptr"); |
|
1280 } |
|
1281 |
|
1282 if (type->interpretedFunction) |
|
1283 MarkObject(trc, &type->interpretedFunction, "type_function"); |
|
1284 } |
|
1285 |
|
1286 static void |
|
1287 gc::MarkChildren(JSTracer *trc, jit::JitCode *code) |
|
1288 { |
|
1289 #ifdef JS_ION |
|
1290 code->trace(trc); |
|
1291 #endif |
|
1292 } |
|
1293 |
|
1294 template<typename T> |
|
1295 static void |
|
1296 PushArenaTyped(GCMarker *gcmarker, ArenaHeader *aheader) |
|
1297 { |
|
1298 for (CellIterUnderGC i(aheader); !i.done(); i.next()) |
|
1299 PushMarkStack(gcmarker, i.get<T>()); |
|
1300 } |
|
1301 |
|
1302 void |
|
1303 gc::PushArena(GCMarker *gcmarker, ArenaHeader *aheader) |
|
1304 { |
|
1305 switch (MapAllocToTraceKind(aheader->getAllocKind())) { |
|
1306 case JSTRACE_OBJECT: |
|
1307 PushArenaTyped<JSObject>(gcmarker, aheader); |
|
1308 break; |
|
1309 |
|
1310 case JSTRACE_STRING: |
|
1311 PushArenaTyped<JSString>(gcmarker, aheader); |
|
1312 break; |
|
1313 |
|
1314 case JSTRACE_SCRIPT: |
|
1315 PushArenaTyped<JSScript>(gcmarker, aheader); |
|
1316 break; |
|
1317 |
|
1318 case JSTRACE_LAZY_SCRIPT: |
|
1319 PushArenaTyped<LazyScript>(gcmarker, aheader); |
|
1320 break; |
|
1321 |
|
1322 case JSTRACE_SHAPE: |
|
1323 PushArenaTyped<js::Shape>(gcmarker, aheader); |
|
1324 break; |
|
1325 |
|
1326 case JSTRACE_BASE_SHAPE: |
|
1327 PushArenaTyped<js::BaseShape>(gcmarker, aheader); |
|
1328 break; |
|
1329 |
|
1330 case JSTRACE_TYPE_OBJECT: |
|
1331 PushArenaTyped<js::types::TypeObject>(gcmarker, aheader); |
|
1332 break; |
|
1333 |
|
1334 case JSTRACE_JITCODE: |
|
1335 PushArenaTyped<js::jit::JitCode>(gcmarker, aheader); |
|
1336 break; |
|
1337 } |
|
1338 } |
|
1339 |
|
1340 struct SlotArrayLayout |
|
1341 { |
|
1342 union { |
|
1343 HeapSlot *end; |
|
1344 uintptr_t kind; |
|
1345 }; |
|
1346 union { |
|
1347 HeapSlot *start; |
|
1348 uintptr_t index; |
|
1349 }; |
|
1350 JSObject *obj; |
|
1351 |
|
1352 static void staticAsserts() { |
|
1353 /* This should have the same layout as three mark stack items. */ |
|
1354 JS_STATIC_ASSERT(sizeof(SlotArrayLayout) == 3 * sizeof(uintptr_t)); |
|
1355 } |
|
1356 }; |
|
1357 |
|
1358 /* |
|
1359 * During incremental GC, we return from drainMarkStack without having processed |
|
1360 * the entire stack. At that point, JS code can run and reallocate slot arrays |
|
1361 * that are stored on the stack. To prevent this from happening, we replace all |
|
1362 * ValueArrayTag stack items with SavedValueArrayTag. In the latter, slots |
|
1363 * pointers are replaced with slot indexes, and slot array end pointers are |
|
1364 * replaced with the kind of index (properties vs. elements). |
|
1365 */ |
|
1366 void |
|
1367 GCMarker::saveValueRanges() |
|
1368 { |
|
1369 for (uintptr_t *p = stack.tos_; p > stack.stack_; ) { |
|
1370 uintptr_t tag = *--p & StackTagMask; |
|
1371 if (tag == ValueArrayTag) { |
|
1372 *p &= ~StackTagMask; |
|
1373 p -= 2; |
|
1374 SlotArrayLayout *arr = reinterpret_cast<SlotArrayLayout *>(p); |
|
1375 JSObject *obj = arr->obj; |
|
1376 JS_ASSERT(obj->isNative()); |
|
1377 |
|
1378 HeapSlot *vp = obj->getDenseElements(); |
|
1379 if (arr->end == vp + obj->getDenseInitializedLength()) { |
|
1380 JS_ASSERT(arr->start >= vp); |
|
1381 arr->index = arr->start - vp; |
|
1382 arr->kind = HeapSlot::Element; |
|
1383 } else { |
|
1384 HeapSlot *vp = obj->fixedSlots(); |
|
1385 unsigned nfixed = obj->numFixedSlots(); |
|
1386 if (arr->start == arr->end) { |
|
1387 arr->index = obj->slotSpan(); |
|
1388 } else if (arr->start >= vp && arr->start < vp + nfixed) { |
|
1389 JS_ASSERT(arr->end == vp + Min(nfixed, obj->slotSpan())); |
|
1390 arr->index = arr->start - vp; |
|
1391 } else { |
|
1392 JS_ASSERT(arr->start >= obj->slots && |
|
1393 arr->end == obj->slots + obj->slotSpan() - nfixed); |
|
1394 arr->index = (arr->start - obj->slots) + nfixed; |
|
1395 } |
|
1396 arr->kind = HeapSlot::Slot; |
|
1397 } |
|
1398 p[2] |= SavedValueArrayTag; |
|
1399 } else if (tag == SavedValueArrayTag) { |
|
1400 p -= 2; |
|
1401 } |
|
1402 } |
|
1403 } |
|
1404 |
|
1405 bool |
|
1406 GCMarker::restoreValueArray(JSObject *obj, void **vpp, void **endp) |
|
1407 { |
|
1408 uintptr_t start = stack.pop(); |
|
1409 HeapSlot::Kind kind = (HeapSlot::Kind) stack.pop(); |
|
1410 |
|
1411 if (kind == HeapSlot::Element) { |
|
1412 if (!obj->is<ArrayObject>()) |
|
1413 return false; |
|
1414 |
|
1415 uint32_t initlen = obj->getDenseInitializedLength(); |
|
1416 HeapSlot *vp = obj->getDenseElements(); |
|
1417 if (start < initlen) { |
|
1418 *vpp = vp + start; |
|
1419 *endp = vp + initlen; |
|
1420 } else { |
|
1421 /* The object shrunk, in which case no scanning is needed. */ |
|
1422 *vpp = *endp = vp; |
|
1423 } |
|
1424 } else { |
|
1425 JS_ASSERT(kind == HeapSlot::Slot); |
|
1426 HeapSlot *vp = obj->fixedSlots(); |
|
1427 unsigned nfixed = obj->numFixedSlots(); |
|
1428 unsigned nslots = obj->slotSpan(); |
|
1429 if (start < nslots) { |
|
1430 if (start < nfixed) { |
|
1431 *vpp = vp + start; |
|
1432 *endp = vp + Min(nfixed, nslots); |
|
1433 } else { |
|
1434 *vpp = obj->slots + start - nfixed; |
|
1435 *endp = obj->slots + nslots - nfixed; |
|
1436 } |
|
1437 } else { |
|
1438 /* The object shrunk, in which case no scanning is needed. */ |
|
1439 *vpp = *endp = vp; |
|
1440 } |
|
1441 } |
|
1442 |
|
1443 JS_ASSERT(*vpp <= *endp); |
|
1444 return true; |
|
1445 } |
|
1446 |
|
1447 void |
|
1448 GCMarker::processMarkStackOther(uintptr_t tag, uintptr_t addr) |
|
1449 { |
|
1450 if (tag == TypeTag) { |
|
1451 ScanTypeObject(this, reinterpret_cast<types::TypeObject *>(addr)); |
|
1452 } else if (tag == SavedValueArrayTag) { |
|
1453 JS_ASSERT(!(addr & CellMask)); |
|
1454 JSObject *obj = reinterpret_cast<JSObject *>(addr); |
|
1455 HeapValue *vp, *end; |
|
1456 if (restoreValueArray(obj, (void **)&vp, (void **)&end)) |
|
1457 pushValueArray(obj, vp, end); |
|
1458 else |
|
1459 pushObject(obj); |
|
1460 } else if (tag == JitCodeTag) { |
|
1461 MarkChildren(this, reinterpret_cast<jit::JitCode *>(addr)); |
|
1462 } |
|
1463 } |
|
1464 |
|
1465 inline void |
|
1466 GCMarker::processMarkStackTop(SliceBudget &budget) |
|
1467 { |
|
1468 /* |
|
1469 * The function uses explicit goto and implements the scanning of the |
|
1470 * object directly. It allows to eliminate the tail recursion and |
|
1471 * significantly improve the marking performance, see bug 641025. |
|
1472 */ |
|
1473 HeapSlot *vp, *end; |
|
1474 JSObject *obj; |
|
1475 |
|
1476 uintptr_t addr = stack.pop(); |
|
1477 uintptr_t tag = addr & StackTagMask; |
|
1478 addr &= ~StackTagMask; |
|
1479 |
|
1480 if (tag == ValueArrayTag) { |
|
1481 JS_STATIC_ASSERT(ValueArrayTag == 0); |
|
1482 JS_ASSERT(!(addr & CellMask)); |
|
1483 obj = reinterpret_cast<JSObject *>(addr); |
|
1484 uintptr_t addr2 = stack.pop(); |
|
1485 uintptr_t addr3 = stack.pop(); |
|
1486 JS_ASSERT(addr2 <= addr3); |
|
1487 JS_ASSERT((addr3 - addr2) % sizeof(Value) == 0); |
|
1488 vp = reinterpret_cast<HeapSlot *>(addr2); |
|
1489 end = reinterpret_cast<HeapSlot *>(addr3); |
|
1490 goto scan_value_array; |
|
1491 } |
|
1492 |
|
1493 if (tag == ObjectTag) { |
|
1494 obj = reinterpret_cast<JSObject *>(addr); |
|
1495 JS_COMPARTMENT_ASSERT(runtime(), obj); |
|
1496 goto scan_obj; |
|
1497 } |
|
1498 |
|
1499 processMarkStackOther(tag, addr); |
|
1500 return; |
|
1501 |
|
1502 scan_value_array: |
|
1503 JS_ASSERT(vp <= end); |
|
1504 while (vp != end) { |
|
1505 const Value &v = *vp++; |
|
1506 if (v.isString()) { |
|
1507 JSString *str = v.toString(); |
|
1508 if (!str->isPermanentAtom()) { |
|
1509 JS_COMPARTMENT_ASSERT_STR(runtime(), str); |
|
1510 JS_ASSERT(runtime()->isAtomsZone(str->zone()) || str->zone() == obj->zone()); |
|
1511 if (str->markIfUnmarked()) |
|
1512 ScanString(this, str); |
|
1513 } |
|
1514 } else if (v.isObject()) { |
|
1515 JSObject *obj2 = &v.toObject(); |
|
1516 JS_COMPARTMENT_ASSERT(runtime(), obj2); |
|
1517 JS_ASSERT(obj->compartment() == obj2->compartment()); |
|
1518 if (obj2->markIfUnmarked(getMarkColor())) { |
|
1519 pushValueArray(obj, vp, end); |
|
1520 obj = obj2; |
|
1521 goto scan_obj; |
|
1522 } |
|
1523 } |
|
1524 } |
|
1525 return; |
|
1526 |
|
1527 scan_obj: |
|
1528 { |
|
1529 JS_COMPARTMENT_ASSERT(runtime(), obj); |
|
1530 |
|
1531 budget.step(); |
|
1532 if (budget.isOverBudget()) { |
|
1533 pushObject(obj); |
|
1534 return; |
|
1535 } |
|
1536 |
|
1537 types::TypeObject *type = obj->typeFromGC(); |
|
1538 PushMarkStack(this, type); |
|
1539 |
|
1540 Shape *shape = obj->lastProperty(); |
|
1541 PushMarkStack(this, shape); |
|
1542 |
|
1543 /* Call the trace hook if necessary. */ |
|
1544 const Class *clasp = type->clasp(); |
|
1545 if (clasp->trace) { |
|
1546 // Global objects all have the same trace hook. That hook is safe without barriers |
|
1547 // if the gloal has no custom trace hook of it's own, or has been moved to a different |
|
1548 // compartment, and so can't have one. |
|
1549 JS_ASSERT_IF(runtime()->gcMode() == JSGC_MODE_INCREMENTAL && |
|
1550 runtime()->gcIncrementalEnabled && |
|
1551 !(clasp->trace == JS_GlobalObjectTraceHook && |
|
1552 (!obj->compartment()->options().getTrace() || |
|
1553 !obj->isOwnGlobal())), |
|
1554 clasp->flags & JSCLASS_IMPLEMENTS_BARRIERS); |
|
1555 clasp->trace(this, obj); |
|
1556 } |
|
1557 |
|
1558 if (!shape->isNative()) |
|
1559 return; |
|
1560 |
|
1561 unsigned nslots = obj->slotSpan(); |
|
1562 |
|
1563 if (!obj->hasEmptyElements()) { |
|
1564 vp = obj->getDenseElements(); |
|
1565 end = vp + obj->getDenseInitializedLength(); |
|
1566 if (!nslots) |
|
1567 goto scan_value_array; |
|
1568 pushValueArray(obj, vp, end); |
|
1569 } |
|
1570 |
|
1571 vp = obj->fixedSlots(); |
|
1572 if (obj->slots) { |
|
1573 unsigned nfixed = obj->numFixedSlots(); |
|
1574 if (nslots > nfixed) { |
|
1575 pushValueArray(obj, vp, vp + nfixed); |
|
1576 vp = obj->slots; |
|
1577 end = vp + (nslots - nfixed); |
|
1578 goto scan_value_array; |
|
1579 } |
|
1580 } |
|
1581 JS_ASSERT(nslots <= obj->numFixedSlots()); |
|
1582 end = vp + nslots; |
|
1583 goto scan_value_array; |
|
1584 } |
|
1585 } |
|
1586 |
|
1587 bool |
|
1588 GCMarker::drainMarkStack(SliceBudget &budget) |
|
1589 { |
|
1590 #ifdef DEBUG |
|
1591 JSRuntime *rt = runtime(); |
|
1592 |
|
1593 struct AutoCheckCompartment { |
|
1594 JSRuntime *runtime; |
|
1595 AutoCheckCompartment(JSRuntime *rt) : runtime(rt) { |
|
1596 JS_ASSERT(!rt->gcStrictCompartmentChecking); |
|
1597 runtime->gcStrictCompartmentChecking = true; |
|
1598 } |
|
1599 ~AutoCheckCompartment() { runtime->gcStrictCompartmentChecking = false; } |
|
1600 } acc(rt); |
|
1601 #endif |
|
1602 |
|
1603 if (budget.isOverBudget()) |
|
1604 return false; |
|
1605 |
|
1606 for (;;) { |
|
1607 while (!stack.isEmpty()) { |
|
1608 processMarkStackTop(budget); |
|
1609 if (budget.isOverBudget()) { |
|
1610 saveValueRanges(); |
|
1611 return false; |
|
1612 } |
|
1613 } |
|
1614 |
|
1615 if (!hasDelayedChildren()) |
|
1616 break; |
|
1617 |
|
1618 /* |
|
1619 * Mark children of things that caused too deep recursion during the |
|
1620 * above tracing. Don't do this until we're done with everything |
|
1621 * else. |
|
1622 */ |
|
1623 if (!markDelayedChildren(budget)) { |
|
1624 saveValueRanges(); |
|
1625 return false; |
|
1626 } |
|
1627 } |
|
1628 |
|
1629 return true; |
|
1630 } |
|
1631 |
|
1632 void |
|
1633 js::TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind) |
|
1634 { |
|
1635 switch (kind) { |
|
1636 case JSTRACE_OBJECT: |
|
1637 MarkChildren(trc, static_cast<JSObject *>(thing)); |
|
1638 break; |
|
1639 |
|
1640 case JSTRACE_STRING: |
|
1641 MarkChildren(trc, static_cast<JSString *>(thing)); |
|
1642 break; |
|
1643 |
|
1644 case JSTRACE_SCRIPT: |
|
1645 MarkChildren(trc, static_cast<JSScript *>(thing)); |
|
1646 break; |
|
1647 |
|
1648 case JSTRACE_LAZY_SCRIPT: |
|
1649 MarkChildren(trc, static_cast<LazyScript *>(thing)); |
|
1650 break; |
|
1651 |
|
1652 case JSTRACE_SHAPE: |
|
1653 MarkChildren(trc, static_cast<Shape *>(thing)); |
|
1654 break; |
|
1655 |
|
1656 case JSTRACE_JITCODE: |
|
1657 MarkChildren(trc, (js::jit::JitCode *)thing); |
|
1658 break; |
|
1659 |
|
1660 case JSTRACE_BASE_SHAPE: |
|
1661 MarkChildren(trc, static_cast<BaseShape *>(thing)); |
|
1662 break; |
|
1663 |
|
1664 case JSTRACE_TYPE_OBJECT: |
|
1665 MarkChildren(trc, (types::TypeObject *)thing); |
|
1666 break; |
|
1667 } |
|
1668 } |
|
1669 |
|
1670 static void |
|
1671 UnmarkGrayGCThing(void *thing) |
|
1672 { |
|
1673 static_cast<js::gc::Cell *>(thing)->unmark(js::gc::GRAY); |
|
1674 } |
|
1675 |
|
1676 static void |
|
1677 UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind); |
|
1678 |
|
1679 struct UnmarkGrayTracer : public JSTracer |
|
1680 { |
|
1681 /* |
|
1682 * We set eagerlyTraceWeakMaps to false because the cycle collector will fix |
|
1683 * up any color mismatches involving weakmaps when it runs. |
|
1684 */ |
|
1685 UnmarkGrayTracer(JSRuntime *rt) |
|
1686 : JSTracer(rt, UnmarkGrayChildren, DoNotTraceWeakMaps), |
|
1687 tracingShape(false), |
|
1688 previousShape(nullptr), |
|
1689 unmarkedAny(false) |
|
1690 {} |
|
1691 |
|
1692 UnmarkGrayTracer(JSTracer *trc, bool tracingShape) |
|
1693 : JSTracer(trc->runtime(), UnmarkGrayChildren, DoNotTraceWeakMaps), |
|
1694 tracingShape(tracingShape), |
|
1695 previousShape(nullptr), |
|
1696 unmarkedAny(false) |
|
1697 {} |
|
1698 |
|
1699 /* True iff we are tracing the immediate children of a shape. */ |
|
1700 bool tracingShape; |
|
1701 |
|
1702 /* If tracingShape, shape child or nullptr. Otherwise, nullptr. */ |
|
1703 void *previousShape; |
|
1704 |
|
1705 /* Whether we unmarked anything. */ |
|
1706 bool unmarkedAny; |
|
1707 }; |
|
1708 |
|
1709 /* |
|
1710 * The GC and CC are run independently. Consequently, the following sequence of |
|
1711 * events can occur: |
|
1712 * 1. GC runs and marks an object gray. |
|
1713 * 2. Some JS code runs that creates a pointer from a JS root to the gray |
|
1714 * object. If we re-ran a GC at this point, the object would now be black. |
|
1715 * 3. Now we run the CC. It may think it can collect the gray object, even |
|
1716 * though it's reachable from the JS heap. |
|
1717 * |
|
1718 * To prevent this badness, we unmark the gray bit of an object when it is |
|
1719 * accessed by callers outside XPConnect. This would cause the object to go |
|
1720 * black in step 2 above. This must be done on everything reachable from the |
|
1721 * object being returned. The following code takes care of the recursive |
|
1722 * re-coloring. |
|
1723 * |
|
1724 * There is an additional complication for certain kinds of edges that are not |
|
1725 * contained explicitly in the source object itself, such as from a weakmap key |
|
1726 * to its value, and from an object being watched by a watchpoint to the |
|
1727 * watchpoint's closure. These "implicit edges" are represented in some other |
|
1728 * container object, such as the weakmap or the watchpoint itself. In these |
|
1729 * cases, calling unmark gray on an object won't find all of its children. |
|
1730 * |
|
1731 * Handling these implicit edges has two parts: |
|
1732 * - A special pass enumerating all of the containers that know about the |
|
1733 * implicit edges to fix any black-gray edges that have been created. This |
|
1734 * is implemented in nsXPConnect::FixWeakMappingGrayBits. |
|
1735 * - To prevent any incorrectly gray objects from escaping to live JS outside |
|
1736 * of the containers, we must add unmark-graying read barriers to these |
|
1737 * containers. |
|
1738 */ |
|
1739 static void |
|
1740 UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind) |
|
1741 { |
|
1742 void *thing = *thingp; |
|
1743 int stackDummy; |
|
1744 if (!JS_CHECK_STACK_SIZE(trc->runtime()->mainThread.nativeStackLimit[StackForSystemCode], &stackDummy)) { |
|
1745 /* |
|
1746 * If we run out of stack, we take a more drastic measure: require that |
|
1747 * we GC again before the next CC. |
|
1748 */ |
|
1749 trc->runtime()->gcGrayBitsValid = false; |
|
1750 return; |
|
1751 } |
|
1752 |
|
1753 UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc); |
|
1754 if (!IsInsideNursery(trc->runtime(), thing)) { |
|
1755 if (!JS::GCThingIsMarkedGray(thing)) |
|
1756 return; |
|
1757 |
|
1758 UnmarkGrayGCThing(thing); |
|
1759 tracer->unmarkedAny = true; |
|
1760 } |
|
1761 |
|
1762 /* |
|
1763 * Trace children of |thing|. If |thing| and its parent are both shapes, |
|
1764 * |thing| will get saved to mPreviousShape without being traced. The parent |
|
1765 * will later trace |thing|. This is done to avoid increasing the stack |
|
1766 * depth during shape tracing. It is safe to do because a shape can only |
|
1767 * have one child that is a shape. |
|
1768 */ |
|
1769 UnmarkGrayTracer childTracer(tracer, kind == JSTRACE_SHAPE); |
|
1770 |
|
1771 if (kind != JSTRACE_SHAPE) { |
|
1772 JS_TraceChildren(&childTracer, thing, kind); |
|
1773 JS_ASSERT(!childTracer.previousShape); |
|
1774 tracer->unmarkedAny |= childTracer.unmarkedAny; |
|
1775 return; |
|
1776 } |
|
1777 |
|
1778 if (tracer->tracingShape) { |
|
1779 JS_ASSERT(!tracer->previousShape); |
|
1780 tracer->previousShape = thing; |
|
1781 return; |
|
1782 } |
|
1783 |
|
1784 do { |
|
1785 JS_ASSERT(!JS::GCThingIsMarkedGray(thing)); |
|
1786 JS_TraceChildren(&childTracer, thing, JSTRACE_SHAPE); |
|
1787 thing = childTracer.previousShape; |
|
1788 childTracer.previousShape = nullptr; |
|
1789 } while (thing); |
|
1790 tracer->unmarkedAny |= childTracer.unmarkedAny; |
|
1791 } |
|
1792 |
|
1793 JS_FRIEND_API(bool) |
|
1794 JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind) |
|
1795 { |
|
1796 JS_ASSERT(kind != JSTRACE_SHAPE); |
|
1797 |
|
1798 JSRuntime *rt = static_cast<Cell *>(thing)->runtimeFromMainThread(); |
|
1799 |
|
1800 bool unmarkedArg = false; |
|
1801 if (!IsInsideNursery(rt, thing)) { |
|
1802 if (!JS::GCThingIsMarkedGray(thing)) |
|
1803 return false; |
|
1804 |
|
1805 UnmarkGrayGCThing(thing); |
|
1806 unmarkedArg = true; |
|
1807 } |
|
1808 |
|
1809 UnmarkGrayTracer trc(rt); |
|
1810 JS_TraceChildren(&trc, thing, kind); |
|
1811 |
|
1812 return unmarkedArg || trc.unmarkedAny; |
|
1813 } |