Thu, 22 Jan 2015 13:21:57 +0100
Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "mozilla/ArrayUtils.h"
9 #ifdef MOZ_VALGRIND
10 # include <valgrind/memcheck.h>
11 #endif
13 #include "jscntxt.h"
14 #include "jsgc.h"
15 #include "jsonparser.h"
16 #include "jsprf.h"
17 #include "jstypes.h"
18 #include "jswatchpoint.h"
20 #include "builtin/MapObject.h"
21 #include "frontend/BytecodeCompiler.h"
22 #include "gc/GCInternals.h"
23 #include "gc/Marking.h"
24 #ifdef JS_ION
25 # include "jit/IonMacroAssembler.h"
26 #endif
27 #include "js/HashTable.h"
28 #include "vm/Debugger.h"
30 #include "jsgcinlines.h"
31 #include "jsobjinlines.h"
33 using namespace js;
34 using namespace js::gc;
36 using mozilla::ArrayEnd;
38 typedef RootedValueMap::Range RootRange;
39 typedef RootedValueMap::Entry RootEntry;
40 typedef RootedValueMap::Enum RootEnum;
42 #ifdef JSGC_USE_EXACT_ROOTING
43 static inline void
44 MarkExactStackRoot(JSTracer *trc, Rooted<void*> *rooter, ThingRootKind kind)
45 {
46 void **addr = (void **)rooter->address();
47 if (IsNullTaggedPointer(*addr))
48 return;
50 if (kind == THING_ROOT_OBJECT && *addr == TaggedProto::LazyProto)
51 return;
53 switch (kind) {
54 case THING_ROOT_OBJECT: MarkObjectRoot(trc, (JSObject **)addr, "exact-object"); break;
55 case THING_ROOT_STRING: MarkStringRoot(trc, (JSString **)addr, "exact-string"); break;
56 case THING_ROOT_SCRIPT: MarkScriptRoot(trc, (JSScript **)addr, "exact-script"); break;
57 case THING_ROOT_LAZY_SCRIPT: MarkLazyScriptRoot(trc, (LazyScript **)addr, "exact-lazy-script"); break;
58 case THING_ROOT_SHAPE: MarkShapeRoot(trc, (Shape **)addr, "exact-shape"); break;
59 case THING_ROOT_BASE_SHAPE: MarkBaseShapeRoot(trc, (BaseShape **)addr, "exact-baseshape"); break;
60 case THING_ROOT_TYPE: MarkTypeRoot(trc, (types::Type *)addr, "exact-type"); break;
61 case THING_ROOT_TYPE_OBJECT: MarkTypeObjectRoot(trc, (types::TypeObject **)addr, "exact-typeobject"); break;
62 case THING_ROOT_JIT_CODE: MarkJitCodeRoot(trc, (jit::JitCode **)addr, "exact-jitcode"); break;
63 case THING_ROOT_VALUE: MarkValueRoot(trc, (Value *)addr, "exact-value"); break;
64 case THING_ROOT_ID: MarkIdRoot(trc, (jsid *)addr, "exact-id"); break;
65 case THING_ROOT_BINDINGS: ((Bindings *)addr)->trace(trc); break;
66 case THING_ROOT_PROPERTY_DESCRIPTOR: ((JSPropertyDescriptor *)addr)->trace(trc); break;
67 case THING_ROOT_CUSTOM: {
68 // 'rooter' is a member within a class containing a vtable. Back up
69 // to the vtable and call trace() through it.
70 const size_t rooterOffset = offsetof(RootedGeneric<void*>, rooter);
71 reinterpret_cast< RootedGeneric<void*>* >(uintptr_t(rooter) - rooterOffset)->trace(trc);
72 break;
73 }
74 default: MOZ_ASSUME_UNREACHABLE("Invalid THING_ROOT kind"); break;
75 }
76 }
78 static inline void
79 MarkExactStackRootList(JSTracer *trc, Rooted<void*> *rooter, ThingRootKind kind)
80 {
81 while (rooter) {
82 MarkExactStackRoot(trc, rooter, kind);
83 rooter = rooter->previous();
84 }
85 }
87 static void
88 MarkExactStackRoots(JSTracer *trc)
89 {
90 for (unsigned i = 0; i < THING_ROOT_LIMIT; i++) {
91 for (ContextIter cx(trc->runtime()); !cx.done(); cx.next())
92 MarkExactStackRootList(trc, cx->thingGCRooters[i], ThingRootKind(i));
94 MarkExactStackRootList(trc, trc->runtime()->mainThread.thingGCRooters[i], ThingRootKind(i));
95 }
96 }
97 #endif /* JSGC_USE_EXACT_ROOTING */
99 enum ConservativeGCTest
100 {
101 CGCT_VALID,
102 CGCT_LOWBITSET, /* excluded because one of the low bits was set */
103 CGCT_NOTARENA, /* not within arena range in a chunk */
104 CGCT_OTHERCOMPARTMENT, /* in another compartment */
105 CGCT_NOTCHUNK, /* not within a valid chunk */
106 CGCT_FREEARENA, /* within arena containing only free things */
107 CGCT_NOTLIVE, /* gcthing is not allocated */
108 CGCT_END
109 };
111 /*
112 * Tests whether w is a (possibly dead) GC thing. Returns CGCT_VALID and
113 * details about the thing if so. On failure, returns the reason for rejection.
114 */
115 static inline ConservativeGCTest
116 IsAddressableGCThing(JSRuntime *rt, uintptr_t w,
117 bool skipUncollectedCompartments,
118 gc::AllocKind *thingKindPtr,
119 ArenaHeader **arenaHeader,
120 void **thing)
121 {
122 /*
123 * We assume that the compiler never uses sub-word alignment to store
124 * pointers and does not tag pointers on its own. Additionally, the value
125 * representation for all values and the jsid representation for GC-things
126 * do not touch the low two bits. Thus any word with the low two bits set
127 * is not a valid GC-thing.
128 */
129 JS_STATIC_ASSERT(JSID_TYPE_STRING == 0 && JSID_TYPE_OBJECT == 4);
130 if (w & 0x3)
131 return CGCT_LOWBITSET;
133 /*
134 * An object jsid has its low bits tagged. In the value representation on
135 * 64-bit, the high bits are tagged.
136 */
137 const uintptr_t JSID_PAYLOAD_MASK = ~uintptr_t(JSID_TYPE_MASK);
138 #if JS_BITS_PER_WORD == 32
139 uintptr_t addr = w & JSID_PAYLOAD_MASK;
140 #elif JS_BITS_PER_WORD == 64
141 uintptr_t addr = w & JSID_PAYLOAD_MASK & JSVAL_PAYLOAD_MASK;
142 #endif
144 Chunk *chunk = Chunk::fromAddress(addr);
146 if (!rt->gcChunkSet.has(chunk))
147 return CGCT_NOTCHUNK;
149 /*
150 * We query for pointers outside the arena array after checking for an
151 * allocated chunk. Such pointers are rare and we want to reject them
152 * after doing more likely rejections.
153 */
154 if (!Chunk::withinArenasRange(addr))
155 return CGCT_NOTARENA;
157 /* If the arena is not currently allocated, don't access the header. */
158 size_t arenaOffset = Chunk::arenaIndex(addr);
159 if (chunk->decommittedArenas.get(arenaOffset))
160 return CGCT_FREEARENA;
162 ArenaHeader *aheader = &chunk->arenas[arenaOffset].aheader;
164 if (!aheader->allocated())
165 return CGCT_FREEARENA;
167 if (skipUncollectedCompartments && !aheader->zone->isCollecting())
168 return CGCT_OTHERCOMPARTMENT;
170 AllocKind thingKind = aheader->getAllocKind();
171 uintptr_t offset = addr & ArenaMask;
172 uintptr_t minOffset = Arena::firstThingOffset(thingKind);
173 if (offset < minOffset)
174 return CGCT_NOTARENA;
176 /* addr can point inside the thing so we must align the address. */
177 uintptr_t shift = (offset - minOffset) % Arena::thingSize(thingKind);
178 addr -= shift;
180 if (thing)
181 *thing = reinterpret_cast<void *>(addr);
182 if (arenaHeader)
183 *arenaHeader = aheader;
184 if (thingKindPtr)
185 *thingKindPtr = thingKind;
186 return CGCT_VALID;
187 }
189 /*
190 * Returns CGCT_VALID and mark it if the w can be a live GC thing and sets
191 * thingKind accordingly. Otherwise returns the reason for rejection.
192 */
193 static inline ConservativeGCTest
194 MarkIfGCThingWord(JSTracer *trc, uintptr_t w)
195 {
196 void *thing;
197 ArenaHeader *aheader;
198 AllocKind thingKind;
199 ConservativeGCTest status =
200 IsAddressableGCThing(trc->runtime(), w, IS_GC_MARKING_TRACER(trc),
201 &thingKind, &aheader, &thing);
202 if (status != CGCT_VALID)
203 return status;
205 /*
206 * Check if the thing is free. We must use the list of free spans as at
207 * this point we no longer have the mark bits from the previous GC run and
208 * we must account for newly allocated things.
209 */
210 if (InFreeList(aheader, thing))
211 return CGCT_NOTLIVE;
213 JSGCTraceKind traceKind = MapAllocToTraceKind(thingKind);
214 #ifdef DEBUG
215 const char pattern[] = "machine_stack %p";
216 char nameBuf[sizeof(pattern) - 2 + sizeof(thing) * 2];
217 JS_snprintf(nameBuf, sizeof(nameBuf), pattern, thing);
218 trc->setTracingName(nameBuf);
219 #endif
220 trc->setTracingLocation((void *)w);
221 void *tmp = thing;
222 MarkKind(trc, &tmp, traceKind);
223 JS_ASSERT(tmp == thing);
225 #ifdef DEBUG
226 if (trc->runtime()->gcIncrementalState == MARK_ROOTS)
227 trc->runtime()->mainThread.gcSavedRoots.append(
228 PerThreadData::SavedGCRoot(thing, traceKind));
229 #endif
231 return CGCT_VALID;
232 }
234 #ifndef JSGC_USE_EXACT_ROOTING
235 static void
236 MarkWordConservatively(JSTracer *trc, uintptr_t w)
237 {
238 /*
239 * The conservative scanner may access words that valgrind considers as
240 * undefined. To avoid false positives and not to alter valgrind view of
241 * the memory we make as memcheck-defined the argument, a copy of the
242 * original word. See bug 572678.
243 */
244 #ifdef MOZ_VALGRIND
245 JS_SILENCE_UNUSED_VALUE_IN_EXPR(VALGRIND_MAKE_MEM_DEFINED(&w, sizeof(w)));
246 #endif
248 MarkIfGCThingWord(trc, w);
249 }
251 MOZ_ASAN_BLACKLIST
252 static void
253 MarkRangeConservatively(JSTracer *trc, const uintptr_t *begin, const uintptr_t *end)
254 {
255 JS_ASSERT(begin <= end);
256 for (const uintptr_t *i = begin; i < end; ++i)
257 MarkWordConservatively(trc, *i);
258 }
260 static void
261 MarkRangeConservativelyAndSkipIon(JSTracer *trc, JSRuntime *rt, const uintptr_t *begin, const uintptr_t *end)
262 {
263 const uintptr_t *i = begin;
265 #if JS_STACK_GROWTH_DIRECTION < 0 && defined(JS_ION) && !defined(JS_ARM_SIMULATOR)
266 // Walk only regions in between JIT activations. Note that non-volatile
267 // registers are spilled to the stack before the entry frame, ensuring
268 // that the conservative scanner will still see them.
269 //
270 // If the ARM simulator is enabled, JIT activations are not on the native
271 // stack but on the simulator stack, so we don't have to skip JIT regions
272 // in this case.
273 for (jit::JitActivationIterator iter(rt); !iter.done(); ++iter) {
274 uintptr_t *jitMin, *jitEnd;
275 iter.jitStackRange(jitMin, jitEnd);
277 MarkRangeConservatively(trc, i, jitMin);
278 i = jitEnd;
279 }
280 #endif
282 // Mark everything after the most recent Ion activation.
283 MarkRangeConservatively(trc, i, end);
284 }
286 static MOZ_NEVER_INLINE void
287 MarkConservativeStackRoots(JSTracer *trc, bool useSavedRoots)
288 {
289 JSRuntime *rt = trc->runtime();
291 #ifdef DEBUG
292 if (useSavedRoots) {
293 for (PerThreadData::SavedGCRoot *root = rt->mainThread.gcSavedRoots.begin();
294 root != rt->mainThread.gcSavedRoots.end();
295 root++)
296 {
297 trc->setTracingName("cstack");
298 MarkKind(trc, &root->thing, root->kind);
299 }
300 return;
301 }
303 if (rt->gcIncrementalState == MARK_ROOTS)
304 rt->mainThread.gcSavedRoots.clearAndFree();
305 #endif
307 ConservativeGCData *cgcd = &rt->conservativeGC;
308 if (!cgcd->hasStackToScan()) {
309 #ifdef JS_THREADSAFE
310 JS_ASSERT(!rt->requestDepth);
311 #endif
312 return;
313 }
315 uintptr_t *stackMin, *stackEnd;
316 #if JS_STACK_GROWTH_DIRECTION > 0
317 stackMin = reinterpret_cast<uintptr_t *>(rt->nativeStackBase);
318 stackEnd = cgcd->nativeStackTop;
319 #else
320 stackMin = cgcd->nativeStackTop + 1;
321 stackEnd = reinterpret_cast<uintptr_t *>(rt->nativeStackBase);
322 #endif
324 JS_ASSERT(stackMin <= stackEnd);
325 MarkRangeConservativelyAndSkipIon(trc, rt, stackMin, stackEnd);
326 MarkRangeConservatively(trc, cgcd->registerSnapshot.words,
327 ArrayEnd(cgcd->registerSnapshot.words));
328 }
330 void
331 js::MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
332 {
333 const uintptr_t *begin = beginv->payloadUIntPtr();
334 const uintptr_t *end = endv->payloadUIntPtr();
335 #ifdef JS_NUNBOX32
336 /*
337 * With 64-bit jsvals on 32-bit systems, we can optimize a bit by
338 * scanning only the payloads.
339 */
340 JS_ASSERT(begin <= end);
341 for (const uintptr_t *i = begin; i < end; i += sizeof(Value) / sizeof(uintptr_t))
342 MarkWordConservatively(trc, *i);
343 #else
344 MarkRangeConservatively(trc, begin, end);
345 #endif
346 }
348 #endif /* JSGC_USE_EXACT_ROOTING */
350 MOZ_NEVER_INLINE void
351 ConservativeGCData::recordStackTop()
352 {
353 /* Update the native stack pointer if it points to a bigger stack. */
354 uintptr_t dummy;
355 nativeStackTop = &dummy;
357 /*
358 * To record and update the register snapshot for the conservative scanning
359 * with the latest values we use setjmp.
360 */
361 #if defined(_MSC_VER)
362 # pragma warning(push)
363 # pragma warning(disable: 4611)
364 #endif
365 (void) setjmp(registerSnapshot.jmpbuf);
366 #if defined(_MSC_VER)
367 # pragma warning(pop)
368 #endif
369 }
371 void
372 JS::AutoIdArray::trace(JSTracer *trc)
373 {
374 JS_ASSERT(tag_ == IDARRAY);
375 gc::MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
376 }
378 inline void
379 AutoGCRooter::trace(JSTracer *trc)
380 {
381 switch (tag_) {
382 case PARSER:
383 frontend::MarkParser(trc, this);
384 return;
386 case IDARRAY: {
387 JSIdArray *ida = static_cast<AutoIdArray *>(this)->idArray;
388 MarkIdRange(trc, ida->length, ida->vector, "JS::AutoIdArray.idArray");
389 return;
390 }
392 case DESCRIPTORS: {
393 PropDescArray &descriptors =
394 static_cast<AutoPropDescArrayRooter *>(this)->descriptors;
395 for (size_t i = 0, len = descriptors.length(); i < len; i++) {
396 PropDesc &desc = descriptors[i];
397 MarkValueRoot(trc, &desc.pd_, "PropDesc::pd_");
398 MarkValueRoot(trc, &desc.value_, "PropDesc::value_");
399 MarkValueRoot(trc, &desc.get_, "PropDesc::get_");
400 MarkValueRoot(trc, &desc.set_, "PropDesc::set_");
401 }
402 return;
403 }
405 case ID:
406 MarkIdRoot(trc, &static_cast<AutoIdRooter *>(this)->id_, "JS::AutoIdRooter.id_");
407 return;
409 case VALVECTOR: {
410 AutoValueVector::VectorImpl &vector = static_cast<AutoValueVector *>(this)->vector;
411 MarkValueRootRange(trc, vector.length(), vector.begin(), "js::AutoValueVector.vector");
412 return;
413 }
415 case IDVECTOR: {
416 AutoIdVector::VectorImpl &vector = static_cast<AutoIdVector *>(this)->vector;
417 MarkIdRootRange(trc, vector.length(), vector.begin(), "js::AutoIdVector.vector");
418 return;
419 }
421 case SHAPEVECTOR: {
422 AutoShapeVector::VectorImpl &vector = static_cast<js::AutoShapeVector *>(this)->vector;
423 MarkShapeRootRange(trc, vector.length(), const_cast<Shape **>(vector.begin()),
424 "js::AutoShapeVector.vector");
425 return;
426 }
428 case OBJVECTOR: {
429 AutoObjectVector::VectorImpl &vector = static_cast<AutoObjectVector *>(this)->vector;
430 MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoObjectVector.vector");
431 return;
432 }
434 case FUNVECTOR: {
435 AutoFunctionVector::VectorImpl &vector = static_cast<AutoFunctionVector *>(this)->vector;
436 MarkObjectRootRange(trc, vector.length(), vector.begin(), "js::AutoFunctionVector.vector");
437 return;
438 }
440 case STRINGVECTOR: {
441 AutoStringVector::VectorImpl &vector = static_cast<AutoStringVector *>(this)->vector;
442 MarkStringRootRange(trc, vector.length(), vector.begin(), "js::AutoStringVector.vector");
443 return;
444 }
446 case NAMEVECTOR: {
447 AutoNameVector::VectorImpl &vector = static_cast<AutoNameVector *>(this)->vector;
448 MarkStringRootRange(trc, vector.length(), vector.begin(), "js::AutoNameVector.vector");
449 return;
450 }
452 case VALARRAY: {
453 /*
454 * We don't know the template size parameter, but we can safely treat it
455 * as an AutoValueArray<1> because the length is stored separately.
456 */
457 AutoValueArray<1> *array = static_cast<AutoValueArray<1> *>(this);
458 MarkValueRootRange(trc, array->length(), array->begin(), "js::AutoValueArray");
459 return;
460 }
462 case SCRIPTVECTOR: {
463 AutoScriptVector::VectorImpl &vector = static_cast<AutoScriptVector *>(this)->vector;
464 MarkScriptRootRange(trc, vector.length(), vector.begin(), "js::AutoScriptVector.vector");
465 return;
466 }
468 case OBJOBJHASHMAP: {
469 AutoObjectObjectHashMap::HashMapImpl &map = static_cast<AutoObjectObjectHashMap *>(this)->map;
470 for (AutoObjectObjectHashMap::Enum e(map); !e.empty(); e.popFront()) {
471 MarkObjectRoot(trc, &e.front().value(), "AutoObjectObjectHashMap value");
472 trc->setTracingLocation((void *)&e.front().key());
473 JSObject *key = e.front().key();
474 MarkObjectRoot(trc, &key, "AutoObjectObjectHashMap key");
475 if (key != e.front().key())
476 e.rekeyFront(key);
477 }
478 return;
479 }
481 case OBJU32HASHMAP: {
482 AutoObjectUnsigned32HashMap *self = static_cast<AutoObjectUnsigned32HashMap *>(this);
483 AutoObjectUnsigned32HashMap::HashMapImpl &map = self->map;
484 for (AutoObjectUnsigned32HashMap::Enum e(map); !e.empty(); e.popFront()) {
485 JSObject *key = e.front().key();
486 MarkObjectRoot(trc, &key, "AutoObjectUnsignedHashMap key");
487 if (key != e.front().key())
488 e.rekeyFront(key);
489 }
490 return;
491 }
493 case OBJHASHSET: {
494 AutoObjectHashSet *self = static_cast<AutoObjectHashSet *>(this);
495 AutoObjectHashSet::HashSetImpl &set = self->set;
496 for (AutoObjectHashSet::Enum e(set); !e.empty(); e.popFront()) {
497 JSObject *obj = e.front();
498 MarkObjectRoot(trc, &obj, "AutoObjectHashSet value");
499 if (obj != e.front())
500 e.rekeyFront(obj);
501 }
502 return;
503 }
505 case HASHABLEVALUE: {
506 AutoHashableValueRooter *rooter = static_cast<AutoHashableValueRooter *>(this);
507 rooter->trace(trc);
508 return;
509 }
511 case IONMASM: {
512 #ifdef JS_ION
513 static_cast<js::jit::MacroAssembler::AutoRooter *>(this)->masm()->trace(trc);
514 #endif
515 return;
516 }
518 case IONALLOC: {
519 #ifdef JS_ION
520 static_cast<js::jit::AutoTempAllocatorRooter *>(this)->trace(trc);
521 #endif
522 return;
523 }
525 case WRAPPER: {
526 /*
527 * We need to use MarkValueUnbarriered here because we mark wrapper
528 * roots in every slice. This is because of some rule-breaking in
529 * RemapAllWrappersForObject; see comment there.
530 */
531 MarkValueUnbarriered(trc, &static_cast<AutoWrapperRooter *>(this)->value.get(),
532 "JS::AutoWrapperRooter.value");
533 return;
534 }
536 case WRAPVECTOR: {
537 AutoWrapperVector::VectorImpl &vector = static_cast<AutoWrapperVector *>(this)->vector;
538 /*
539 * We need to use MarkValueUnbarriered here because we mark wrapper
540 * roots in every slice. This is because of some rule-breaking in
541 * RemapAllWrappersForObject; see comment there.
542 */
543 for (WrapperValue *p = vector.begin(); p < vector.end(); p++)
544 MarkValueUnbarriered(trc, &p->get(), "js::AutoWrapperVector.vector");
545 return;
546 }
548 case JSONPARSER:
549 static_cast<js::JSONParser *>(this)->trace(trc);
550 return;
552 case CUSTOM:
553 static_cast<JS::CustomAutoRooter *>(this)->trace(trc);
554 return;
555 }
557 JS_ASSERT(tag_ >= 0);
558 if (Value *vp = static_cast<AutoArrayRooter *>(this)->array)
559 MarkValueRootRange(trc, tag_, vp, "JS::AutoArrayRooter.array");
560 }
562 /* static */ void
563 AutoGCRooter::traceAll(JSTracer *trc)
564 {
565 for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) {
566 for (js::AutoGCRooter *gcr = cx->autoGCRooters; gcr; gcr = gcr->down)
567 gcr->trace(trc);
568 }
569 }
571 /* static */ void
572 AutoGCRooter::traceAllWrappers(JSTracer *trc)
573 {
574 for (ContextIter cx(trc->runtime()); !cx.done(); cx.next()) {
575 for (js::AutoGCRooter *gcr = cx->autoGCRooters; gcr; gcr = gcr->down) {
576 if (gcr->tag_ == WRAPVECTOR || gcr->tag_ == WRAPPER)
577 gcr->trace(trc);
578 }
579 }
580 }
582 void
583 AutoHashableValueRooter::trace(JSTracer *trc)
584 {
585 MarkValueRoot(trc, reinterpret_cast<Value*>(&value), "AutoHashableValueRooter");
586 }
588 void
589 StackShape::trace(JSTracer *trc)
590 {
591 if (base)
592 MarkBaseShapeRoot(trc, (BaseShape**) &base, "StackShape base");
593 MarkIdRoot(trc, (jsid*) &propid, "StackShape id");
594 }
596 void
597 JSPropertyDescriptor::trace(JSTracer *trc)
598 {
599 if (obj)
600 MarkObjectRoot(trc, &obj, "Descriptor::obj");
601 MarkValueRoot(trc, &value, "Descriptor::value");
602 if ((attrs & JSPROP_GETTER) && getter) {
603 JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, getter);
604 MarkObjectRoot(trc, &tmp, "Descriptor::get");
605 getter = JS_DATA_TO_FUNC_PTR(JSPropertyOp, tmp);
606 }
607 if ((attrs & JSPROP_SETTER) && setter) {
608 JSObject *tmp = JS_FUNC_TO_DATA_PTR(JSObject *, setter);
609 MarkObjectRoot(trc, &tmp, "Descriptor::set");
610 setter = JS_DATA_TO_FUNC_PTR(JSStrictPropertyOp, tmp);
611 }
612 }
614 namespace js {
615 namespace gc {
617 template<typename T>
618 struct PersistentRootedMarker
619 {
620 typedef PersistentRooted<T> Element;
621 typedef mozilla::LinkedList<Element> List;
622 typedef void (*MarkFunc)(JSTracer *trc, T *ref, const char *name);
624 template <MarkFunc Mark>
625 static void
626 markChainIfNotNull(JSTracer *trc, List &list, const char *name)
627 {
628 for (Element *r = list.getFirst(); r; r = r->getNext()) {
629 if (r->get())
630 Mark(trc, r->address(), name);
631 }
632 }
634 template <MarkFunc Mark>
635 static void
636 markChain(JSTracer *trc, List &list, const char *name)
637 {
638 for (Element *r = list.getFirst(); r; r = r->getNext())
639 Mark(trc, r->address(), name);
640 }
641 };
642 }
643 }
645 void
646 js::gc::MarkPersistentRootedChains(JSTracer *trc)
647 {
648 JSRuntime *rt = trc->runtime();
650 // Mark the PersistentRooted chains of types that may be null.
651 PersistentRootedMarker<JSFunction*>::markChainIfNotNull<MarkObjectRoot>(
652 trc, rt->functionPersistentRooteds, "PersistentRooted<JSFunction *>");
653 PersistentRootedMarker<JSObject*>::markChainIfNotNull<MarkObjectRoot>(
654 trc, rt->objectPersistentRooteds, "PersistentRooted<JSObject *>");
655 PersistentRootedMarker<JSScript*>::markChainIfNotNull<MarkScriptRoot>(
656 trc, rt->scriptPersistentRooteds, "PersistentRooted<JSScript *>");
657 PersistentRootedMarker<JSString*>::markChainIfNotNull<MarkStringRoot>(
658 trc, rt->stringPersistentRooteds, "PersistentRooted<JSString *>");
660 // Mark the PersistentRooted chains of types that are never null.
661 PersistentRootedMarker<jsid>::markChain<MarkIdRoot>(trc, rt->idPersistentRooteds,
662 "PersistentRooted<jsid>");
663 PersistentRootedMarker<Value>::markChain<MarkValueRoot>(trc, rt->valuePersistentRooteds,
664 "PersistentRooted<Value>");
665 }
667 void
668 js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
669 {
670 JSRuntime *rt = trc->runtime();
671 JS_ASSERT(trc->callback != GCMarker::GrayCallback);
673 JS_ASSERT(!rt->mainThread.suppressGC);
675 if (IS_GC_MARKING_TRACER(trc)) {
676 for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
677 if (!c->zone()->isCollecting())
678 c->markCrossCompartmentWrappers(trc);
679 }
680 Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
681 }
683 AutoGCRooter::traceAll(trc);
685 if (!rt->isBeingDestroyed()) {
686 #ifdef JSGC_USE_EXACT_ROOTING
687 MarkExactStackRoots(trc);
688 #else
689 MarkConservativeStackRoots(trc, useSavedRoots);
690 #endif
691 rt->markSelfHostingGlobal(trc);
692 }
694 for (RootRange r = rt->gcRootsHash.all(); !r.empty(); r.popFront()) {
695 const RootEntry &entry = r.front();
696 const char *name = entry.value().name ? entry.value().name : "root";
697 JSGCRootType type = entry.value().type;
698 void *key = entry.key();
699 if (type == JS_GC_ROOT_VALUE_PTR) {
700 MarkValueRoot(trc, reinterpret_cast<Value *>(key), name);
701 } else if (*reinterpret_cast<void **>(key)){
702 if (type == JS_GC_ROOT_STRING_PTR)
703 MarkStringRoot(trc, reinterpret_cast<JSString **>(key), name);
704 else if (type == JS_GC_ROOT_OBJECT_PTR)
705 MarkObjectRoot(trc, reinterpret_cast<JSObject **>(key), name);
706 else if (type == JS_GC_ROOT_SCRIPT_PTR)
707 MarkScriptRoot(trc, reinterpret_cast<JSScript **>(key), name);
708 else
709 MOZ_ASSUME_UNREACHABLE("unexpected js::RootInfo::type value");
710 }
711 }
713 MarkPersistentRootedChains(trc);
715 if (rt->scriptAndCountsVector) {
716 ScriptAndCountsVector &vec = *rt->scriptAndCountsVector;
717 for (size_t i = 0; i < vec.length(); i++)
718 MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
719 }
721 if (!rt->isBeingDestroyed() && !trc->runtime()->isHeapMinorCollecting()) {
722 if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment()->zone()->isCollecting()) {
723 MarkPermanentAtoms(trc);
724 MarkAtoms(trc);
725 #ifdef JS_ION
726 jit::JitRuntime::Mark(trc);
727 #endif
728 }
729 }
731 for (ContextIter acx(rt); !acx.done(); acx.next())
732 acx->mark(trc);
734 for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
735 if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting())
736 continue;
738 /* Do not discard scripts with counts while profiling. */
739 if (rt->profilingScripts && !rt->isHeapMinorCollecting()) {
740 for (CellIterUnderGC i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
741 JSScript *script = i.get<JSScript>();
742 if (script->hasScriptCounts()) {
743 MarkScriptRoot(trc, &script, "profilingScripts");
744 JS_ASSERT(script == i.get<JSScript>());
745 }
746 }
747 }
748 }
750 /* We can't use GCCompartmentsIter if we're called from TraceRuntime. */
751 for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
752 if (trc->runtime()->isHeapMinorCollecting())
753 c->globalWriteBarriered = false;
755 if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting())
756 continue;
758 /* During a GC, these are treated as weak pointers. */
759 if (!IS_GC_MARKING_TRACER(trc)) {
760 if (c->watchpointMap)
761 c->watchpointMap->markAll(trc);
762 }
764 /* Mark debug scopes, if present */
765 if (c->debugScopes)
766 c->debugScopes->mark(trc);
767 }
769 MarkInterpreterActivations(rt, trc);
771 #ifdef JS_ION
772 jit::MarkJitActivations(rt, trc);
773 #endif
775 if (!rt->isHeapMinorCollecting()) {
776 /*
777 * All JSCompartment::mark does is mark the globals for compartments
778 * which have been entered. Globals aren't nursery allocated so there's
779 * no need to do this for minor GCs.
780 */
781 for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
782 c->markRoots(trc);
784 /*
785 * The embedding can register additional roots here.
786 *
787 * We don't need to trace these in a minor GC because all pointers into
788 * the nursery should be in the store buffer, and we want to avoid the
789 * time taken to trace all these roots.
790 */
791 for (size_t i = 0; i < rt->gcBlackRootTracers.length(); i++) {
792 const JSRuntime::ExtraTracer &e = rt->gcBlackRootTracers[i];
793 (*e.op)(trc, e.data);
794 }
796 /* During GC, we don't mark gray roots at this stage. */
797 if (JSTraceDataOp op = rt->gcGrayRootTracer.op) {
798 if (!IS_GC_MARKING_TRACER(trc))
799 (*op)(trc, rt->gcGrayRootTracer.data);
800 }
801 }
802 }
804 void
805 js::gc::BufferGrayRoots(GCMarker *gcmarker)
806 {
807 JSRuntime *rt = gcmarker->runtime();
808 gcmarker->startBufferingGrayRoots();
809 if (JSTraceDataOp op = rt->gcGrayRootTracer.op)
810 (*op)(gcmarker, rt->gcGrayRootTracer.data);
811 gcmarker->endBufferingGrayRoots();
812 }