Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "gc/Tracer.h"
9 #include "mozilla/DebugOnly.h"
11 #include "jsapi.h"
12 #include "jsfun.h"
13 #include "jsgc.h"
14 #include "jsprf.h"
15 #include "jsscript.h"
16 #include "jsutil.h"
17 #include "NamespaceImports.h"
19 #include "gc/GCInternals.h"
20 #include "gc/Marking.h"
22 #include "jsgcinlines.h"
24 using namespace js;
25 using namespace js::gc;
26 using mozilla::DebugOnly;
28 JS_PUBLIC_API(void)
29 JS_CallValueTracer(JSTracer *trc, Value *valuep, const char *name)
30 {
31 MarkValueUnbarriered(trc, valuep, name);
32 }
34 JS_PUBLIC_API(void)
35 JS_CallIdTracer(JSTracer *trc, jsid *idp, const char *name)
36 {
37 MarkIdUnbarriered(trc, idp, name);
38 }
40 JS_PUBLIC_API(void)
41 JS_CallObjectTracer(JSTracer *trc, JSObject **objp, const char *name)
42 {
43 MarkObjectUnbarriered(trc, objp, name);
44 }
46 JS_PUBLIC_API(void)
47 JS_CallStringTracer(JSTracer *trc, JSString **strp, const char *name)
48 {
49 MarkStringUnbarriered(trc, strp, name);
50 }
52 JS_PUBLIC_API(void)
53 JS_CallScriptTracer(JSTracer *trc, JSScript **scriptp, const char *name)
54 {
55 MarkScriptUnbarriered(trc, scriptp, name);
56 }
58 JS_PUBLIC_API(void)
59 JS_CallHeapValueTracer(JSTracer *trc, JS::Heap<JS::Value> *valuep, const char *name)
60 {
61 MarkValueUnbarriered(trc, valuep->unsafeGet(), name);
62 }
64 JS_PUBLIC_API(void)
65 JS_CallHeapIdTracer(JSTracer *trc, JS::Heap<jsid> *idp, const char *name)
66 {
67 MarkIdUnbarriered(trc, idp->unsafeGet(), name);
68 }
70 JS_PUBLIC_API(void)
71 JS_CallHeapObjectTracer(JSTracer *trc, JS::Heap<JSObject *> *objp, const char *name)
72 {
73 MarkObjectUnbarriered(trc, objp->unsafeGet(), name);
74 }
76 JS_PUBLIC_API(void)
77 JS_CallHeapStringTracer(JSTracer *trc, JS::Heap<JSString *> *strp, const char *name)
78 {
79 MarkStringUnbarriered(trc, strp->unsafeGet(), name);
80 }
82 JS_PUBLIC_API(void)
83 JS_CallHeapScriptTracer(JSTracer *trc, JS::Heap<JSScript *> *scriptp, const char *name)
84 {
85 MarkScriptUnbarriered(trc, scriptp->unsafeGet(), name);
86 }
88 JS_PUBLIC_API(void)
89 JS_CallHeapFunctionTracer(JSTracer *trc, JS::Heap<JSFunction *> *funp, const char *name)
90 {
91 MarkObjectUnbarriered(trc, funp->unsafeGet(), name);
92 }
94 JS_PUBLIC_API(void)
95 JS_CallTenuredObjectTracer(JSTracer *trc, JS::TenuredHeap<JSObject *> *objp, const char *name)
96 {
97 JSObject *obj = objp->getPtr();
98 if (!obj)
99 return;
101 trc->setTracingLocation((void*)objp);
102 MarkObjectUnbarriered(trc, &obj, name);
104 objp->setPtr(obj);
105 }
107 JS_PUBLIC_API(void)
108 JS_TraceChildren(JSTracer *trc, void *thing, JSGCTraceKind kind)
109 {
110 js::TraceChildren(trc, thing, kind);
111 }
113 JS_PUBLIC_API(void)
114 JS_TraceRuntime(JSTracer *trc)
115 {
116 AssertHeapIsIdle(trc->runtime());
117 TraceRuntime(trc);
118 }
120 static size_t
121 CountDecimalDigits(size_t num)
122 {
123 size_t numDigits = 0;
124 do {
125 num /= 10;
126 numDigits++;
127 } while (num > 0);
129 return numDigits;
130 }
132 JS_PUBLIC_API(void)
133 JS_GetTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing,
134 JSGCTraceKind kind, bool details)
135 {
136 const char *name = nullptr; /* silence uninitialized warning */
137 size_t n;
139 if (bufsize == 0)
140 return;
142 switch (kind) {
143 case JSTRACE_OBJECT:
144 {
145 name = static_cast<JSObject *>(thing)->getClass()->name;
146 break;
147 }
149 case JSTRACE_STRING:
150 name = ((JSString *)thing)->isDependent()
151 ? "substring"
152 : "string";
153 break;
155 case JSTRACE_SCRIPT:
156 name = "script";
157 break;
159 case JSTRACE_LAZY_SCRIPT:
160 name = "lazyscript";
161 break;
163 case JSTRACE_JITCODE:
164 name = "jitcode";
165 break;
167 case JSTRACE_SHAPE:
168 name = "shape";
169 break;
171 case JSTRACE_BASE_SHAPE:
172 name = "base_shape";
173 break;
175 case JSTRACE_TYPE_OBJECT:
176 name = "type_object";
177 break;
178 }
180 n = strlen(name);
181 if (n > bufsize - 1)
182 n = bufsize - 1;
183 js_memcpy(buf, name, n + 1);
184 buf += n;
185 bufsize -= n;
186 *buf = '\0';
188 if (details && bufsize > 2) {
189 switch (kind) {
190 case JSTRACE_OBJECT:
191 {
192 JSObject *obj = (JSObject *)thing;
193 if (obj->is<JSFunction>()) {
194 JSFunction *fun = &obj->as<JSFunction>();
195 if (fun->displayAtom()) {
196 *buf++ = ' ';
197 bufsize--;
198 PutEscapedString(buf, bufsize, fun->displayAtom(), 0);
199 }
200 } else if (obj->getClass()->flags & JSCLASS_HAS_PRIVATE) {
201 JS_snprintf(buf, bufsize, " %p", obj->getPrivate());
202 } else {
203 JS_snprintf(buf, bufsize, " <no private>");
204 }
205 break;
206 }
208 case JSTRACE_STRING:
209 {
210 *buf++ = ' ';
211 bufsize--;
212 JSString *str = (JSString *)thing;
214 if (str->isLinear()) {
215 bool willFit = str->length() + strlen("<length > ") +
216 CountDecimalDigits(str->length()) < bufsize;
218 n = JS_snprintf(buf, bufsize, "<length %d%s> ",
219 (int)str->length(),
220 willFit ? "" : " (truncated)");
221 buf += n;
222 bufsize -= n;
224 PutEscapedString(buf, bufsize, &str->asLinear(), 0);
225 }
226 else
227 JS_snprintf(buf, bufsize, "<rope: length %d>", (int)str->length());
228 break;
229 }
231 case JSTRACE_SCRIPT:
232 {
233 JSScript *script = static_cast<JSScript *>(thing);
234 JS_snprintf(buf, bufsize, " %s:%u", script->filename(), unsigned(script->lineno()));
235 break;
236 }
238 case JSTRACE_LAZY_SCRIPT:
239 case JSTRACE_JITCODE:
240 case JSTRACE_SHAPE:
241 case JSTRACE_BASE_SHAPE:
242 case JSTRACE_TYPE_OBJECT:
243 break;
244 }
245 }
246 buf[bufsize - 1] = '\0';
247 }
249 JSTracer::JSTracer(JSRuntime *rt, JSTraceCallback traceCallback,
250 WeakMapTraceKind weakTraceKind /* = TraceWeakMapValues */)
251 : callback(traceCallback)
252 , runtime_(rt)
253 , debugPrinter_(nullptr)
254 , debugPrintArg_(nullptr)
255 , debugPrintIndex_(size_t(-1))
256 , eagerlyTraceWeakMaps_(weakTraceKind)
257 #ifdef JS_GC_ZEAL
258 , realLocation_(nullptr)
259 #endif
260 {
261 }
263 bool
264 JSTracer::hasTracingDetails() const
265 {
266 return debugPrinter_ || debugPrintArg_;
267 }
269 const char *
270 JSTracer::tracingName(const char *fallback) const
271 {
272 JS_ASSERT(hasTracingDetails());
273 return debugPrinter_ ? fallback : (const char *)debugPrintArg_;
274 }
276 const char *
277 JSTracer::getTracingEdgeName(char *buffer, size_t bufferSize)
278 {
279 if (debugPrinter_) {
280 debugPrinter_(this, buffer, bufferSize);
281 return buffer;
282 }
283 if (debugPrintIndex_ != size_t(-1)) {
284 JS_snprintf(buffer, bufferSize, "%s[%lu]",
285 (const char *)debugPrintArg_,
286 debugPrintIndex_);
287 return buffer;
288 }
289 return (const char*)debugPrintArg_;
290 }
292 JSTraceNamePrinter
293 JSTracer::debugPrinter() const
294 {
295 return debugPrinter_;
296 }
298 const void *
299 JSTracer::debugPrintArg() const
300 {
301 return debugPrintArg_;
302 }
304 size_t
305 JSTracer::debugPrintIndex() const
306 {
307 return debugPrintIndex_;
308 }
310 void
311 JSTracer::setTraceCallback(JSTraceCallback traceCallback)
312 {
313 callback = traceCallback;
314 }
316 #ifdef JS_GC_ZEAL
317 void
318 JSTracer::setTracingLocation(void *location)
319 {
320 if (!realLocation_ || !location)
321 realLocation_ = location;
322 }
324 void
325 JSTracer::unsetTracingLocation()
326 {
327 realLocation_ = nullptr;
328 }
330 void **
331 JSTracer::tracingLocation(void **thingp)
332 {
333 return realLocation_ ? (void **)realLocation_ : thingp;
334 }
335 #endif
337 bool
338 MarkStack::init(JSGCMode gcMode)
339 {
340 setBaseCapacity(gcMode);
342 JS_ASSERT(!stack_);
343 uintptr_t *newStack = js_pod_malloc<uintptr_t>(baseCapacity_);
344 if (!newStack)
345 return false;
347 setStack(newStack, 0, baseCapacity_);
348 return true;
349 }
351 void
352 MarkStack::setBaseCapacity(JSGCMode mode)
353 {
354 switch (mode) {
355 case JSGC_MODE_GLOBAL:
356 case JSGC_MODE_COMPARTMENT:
357 baseCapacity_ = NON_INCREMENTAL_MARK_STACK_BASE_CAPACITY;
358 break;
359 case JSGC_MODE_INCREMENTAL:
360 baseCapacity_ = INCREMENTAL_MARK_STACK_BASE_CAPACITY;
361 break;
362 default:
363 MOZ_ASSUME_UNREACHABLE("bad gc mode");
364 }
366 if (baseCapacity_ > maxCapacity_)
367 baseCapacity_ = maxCapacity_;
368 }
370 void
371 MarkStack::setMaxCapacity(size_t maxCapacity)
372 {
373 JS_ASSERT(isEmpty());
374 maxCapacity_ = maxCapacity;
375 if (baseCapacity_ > maxCapacity_)
376 baseCapacity_ = maxCapacity_;
378 reset();
379 }
381 void
382 MarkStack::reset()
383 {
384 if (capacity() == baseCapacity_) {
385 // No size change; keep the current stack.
386 setStack(stack_, 0, baseCapacity_);
387 return;
388 }
390 uintptr_t *newStack = (uintptr_t *)js_realloc(stack_, sizeof(uintptr_t) * baseCapacity_);
391 if (!newStack) {
392 // If the realloc fails, just keep using the existing stack; it's
393 // not ideal but better than failing.
394 newStack = stack_;
395 baseCapacity_ = capacity();
396 }
397 setStack(newStack, 0, baseCapacity_);
398 }
400 bool
401 MarkStack::enlarge(unsigned count)
402 {
403 size_t newCapacity = Min(maxCapacity_, capacity() * 2);
404 if (newCapacity < capacity() + count)
405 return false;
407 size_t tosIndex = position();
409 uintptr_t *newStack = (uintptr_t *)js_realloc(stack_, sizeof(uintptr_t) * newCapacity);
410 if (!newStack)
411 return false;
413 setStack(newStack, tosIndex, newCapacity);
414 return true;
415 }
417 void
418 MarkStack::setGCMode(JSGCMode gcMode)
419 {
420 // The mark stack won't be resized until the next call to reset(), but
421 // that will happen at the end of the next GC.
422 setBaseCapacity(gcMode);
423 }
425 size_t
426 MarkStack::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
427 {
428 return mallocSizeOf(stack_);
429 }
431 /*
432 * DoNotTraceWeakMaps: the GC is recomputing the liveness of WeakMap entries,
433 * so we delay visting entries.
434 */
435 GCMarker::GCMarker(JSRuntime *rt)
436 : JSTracer(rt, nullptr, DoNotTraceWeakMaps),
437 stack(size_t(-1)),
438 color(BLACK),
439 unmarkedArenaStackTop(nullptr),
440 markLaterArenas(0),
441 grayBufferState(GRAY_BUFFER_UNUSED),
442 started(false)
443 {
444 }
446 bool
447 GCMarker::init(JSGCMode gcMode)
448 {
449 return stack.init(gcMode);
450 }
452 void
453 GCMarker::start()
454 {
455 JS_ASSERT(!started);
456 started = true;
457 color = BLACK;
459 JS_ASSERT(!unmarkedArenaStackTop);
460 JS_ASSERT(markLaterArenas == 0);
462 }
464 void
465 GCMarker::stop()
466 {
467 JS_ASSERT(isDrained());
469 JS_ASSERT(started);
470 started = false;
472 JS_ASSERT(!unmarkedArenaStackTop);
473 JS_ASSERT(markLaterArenas == 0);
475 /* Free non-ballast stack memory. */
476 stack.reset();
478 resetBufferedGrayRoots();
479 grayBufferState = GRAY_BUFFER_UNUSED;
480 }
482 void
483 GCMarker::reset()
484 {
485 color = BLACK;
487 stack.reset();
488 JS_ASSERT(isMarkStackEmpty());
490 while (unmarkedArenaStackTop) {
491 ArenaHeader *aheader = unmarkedArenaStackTop;
492 JS_ASSERT(aheader->hasDelayedMarking);
493 JS_ASSERT(markLaterArenas);
494 unmarkedArenaStackTop = aheader->getNextDelayedMarking();
495 aheader->unsetDelayedMarking();
496 aheader->markOverflow = 0;
497 aheader->allocatedDuringIncremental = 0;
498 markLaterArenas--;
499 }
500 JS_ASSERT(isDrained());
501 JS_ASSERT(!markLaterArenas);
502 }
504 void
505 GCMarker::markDelayedChildren(ArenaHeader *aheader)
506 {
507 if (aheader->markOverflow) {
508 bool always = aheader->allocatedDuringIncremental;
509 aheader->markOverflow = 0;
511 for (CellIterUnderGC i(aheader); !i.done(); i.next()) {
512 Cell *t = i.getCell();
513 if (always || t->isMarked()) {
514 t->markIfUnmarked();
515 JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
516 }
517 }
518 } else {
519 JS_ASSERT(aheader->allocatedDuringIncremental);
520 PushArena(this, aheader);
521 }
522 aheader->allocatedDuringIncremental = 0;
523 /*
524 * Note that during an incremental GC we may still be allocating into
525 * aheader. However, prepareForIncrementalGC sets the
526 * allocatedDuringIncremental flag if we continue marking.
527 */
528 }
530 bool
531 GCMarker::markDelayedChildren(SliceBudget &budget)
532 {
533 gcstats::MaybeAutoPhase ap;
534 if (runtime()->gcIncrementalState == MARK)
535 ap.construct(runtime()->gcStats, gcstats::PHASE_MARK_DELAYED);
537 JS_ASSERT(unmarkedArenaStackTop);
538 do {
539 /*
540 * If marking gets delayed at the same arena again, we must repeat
541 * marking of its things. For that we pop arena from the stack and
542 * clear its hasDelayedMarking flag before we begin the marking.
543 */
544 ArenaHeader *aheader = unmarkedArenaStackTop;
545 JS_ASSERT(aheader->hasDelayedMarking);
546 JS_ASSERT(markLaterArenas);
547 unmarkedArenaStackTop = aheader->getNextDelayedMarking();
548 aheader->unsetDelayedMarking();
549 markLaterArenas--;
550 markDelayedChildren(aheader);
552 budget.step(150);
553 if (budget.isOverBudget())
554 return false;
555 } while (unmarkedArenaStackTop);
556 JS_ASSERT(!markLaterArenas);
558 return true;
559 }
561 #ifdef DEBUG
562 void
563 GCMarker::checkZone(void *p)
564 {
565 JS_ASSERT(started);
566 DebugOnly<Cell *> cell = static_cast<Cell *>(p);
567 JS_ASSERT_IF(cell->isTenured(), cell->tenuredZone()->isCollecting());
568 }
569 #endif
571 bool
572 GCMarker::hasBufferedGrayRoots() const
573 {
574 return grayBufferState == GRAY_BUFFER_OK;
575 }
577 void
578 GCMarker::startBufferingGrayRoots()
579 {
580 JS_ASSERT(grayBufferState == GRAY_BUFFER_UNUSED);
581 grayBufferState = GRAY_BUFFER_OK;
582 for (GCZonesIter zone(runtime()); !zone.done(); zone.next())
583 JS_ASSERT(zone->gcGrayRoots.empty());
585 JS_ASSERT(!callback);
586 callback = GrayCallback;
587 JS_ASSERT(IS_GC_MARKING_TRACER(this));
588 }
590 void
591 GCMarker::endBufferingGrayRoots()
592 {
593 JS_ASSERT(callback == GrayCallback);
594 callback = nullptr;
595 JS_ASSERT(IS_GC_MARKING_TRACER(this));
596 JS_ASSERT(grayBufferState == GRAY_BUFFER_OK ||
597 grayBufferState == GRAY_BUFFER_FAILED);
598 }
600 void
601 GCMarker::resetBufferedGrayRoots()
602 {
603 for (GCZonesIter zone(runtime()); !zone.done(); zone.next())
604 zone->gcGrayRoots.clearAndFree();
605 }
607 void
608 GCMarker::markBufferedGrayRoots(JS::Zone *zone)
609 {
610 JS_ASSERT(grayBufferState == GRAY_BUFFER_OK);
611 JS_ASSERT(zone->isGCMarkingGray());
613 for (GrayRoot *elem = zone->gcGrayRoots.begin(); elem != zone->gcGrayRoots.end(); elem++) {
614 #ifdef DEBUG
615 setTracingDetails(elem->debugPrinter, elem->debugPrintArg, elem->debugPrintIndex);
616 #endif
617 void *tmp = elem->thing;
618 setTracingLocation((void *)&elem->thing);
619 MarkKind(this, &tmp, elem->kind);
620 JS_ASSERT(tmp == elem->thing);
621 }
622 }
624 void
625 GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
626 {
627 JS_ASSERT(started);
629 if (grayBufferState == GRAY_BUFFER_FAILED)
630 return;
632 GrayRoot root(thing, kind);
633 #ifdef DEBUG
634 root.debugPrinter = debugPrinter();
635 root.debugPrintArg = debugPrintArg();
636 root.debugPrintIndex = debugPrintIndex();
637 #endif
639 Zone *zone = static_cast<Cell *>(thing)->tenuredZone();
640 if (zone->isCollecting()) {
641 zone->maybeAlive = true;
642 if (!zone->gcGrayRoots.append(root)) {
643 resetBufferedGrayRoots();
644 grayBufferState = GRAY_BUFFER_FAILED;
645 }
646 }
647 }
649 void
650 GCMarker::GrayCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind)
651 {
652 JS_ASSERT(thingp);
653 JS_ASSERT(*thingp);
654 GCMarker *gcmarker = static_cast<GCMarker *>(trc);
655 gcmarker->appendGrayRoot(*thingp, kind);
656 }
658 size_t
659 GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
660 {
661 size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
662 for (ZonesIter zone(runtime(), WithAtoms); !zone.done(); zone.next())
663 size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
664 return size;
665 }
667 void
668 js::SetMarkStackLimit(JSRuntime *rt, size_t limit)
669 {
670 JS_ASSERT(!rt->isHeapBusy());
671 AutoStopVerifyingBarriers pauseVerification(rt, false);
672 rt->gcMarker.setMaxCapacity(limit);
673 }