Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jscompartmentinlines.h"
9 #include "mozilla/DebugOnly.h"
10 #include "mozilla/MemoryReporting.h"
12 #include "jscntxt.h"
13 #include "jsfriendapi.h"
14 #include "jsgc.h"
15 #include "jsiter.h"
16 #include "jsproxy.h"
17 #include "jswatchpoint.h"
18 #include "jswrapper.h"
20 #include "gc/Marking.h"
21 #ifdef JS_ION
22 #include "jit/JitCompartment.h"
23 #endif
24 #include "js/RootingAPI.h"
25 #include "vm/StopIterationObject.h"
26 #include "vm/WrapperObject.h"
28 #include "jsatominlines.h"
29 #include "jsfuninlines.h"
30 #include "jsgcinlines.h"
31 #include "jsinferinlines.h"
32 #include "jsobjinlines.h"
34 using namespace js;
35 using namespace js::gc;
37 using mozilla::DebugOnly;
39 JSCompartment::JSCompartment(Zone *zone, const JS::CompartmentOptions &options = JS::CompartmentOptions())
40 : options_(options),
41 zone_(zone),
42 runtime_(zone->runtimeFromMainThread()),
43 principals(nullptr),
44 isSystem(false),
45 isSelfHosting(false),
46 marked(true),
47 #ifdef DEBUG
48 firedOnNewGlobalObject(false),
49 #endif
50 global_(nullptr),
51 enterCompartmentDepth(0),
52 data(nullptr),
53 objectMetadataCallback(nullptr),
54 lastAnimationTime(0),
55 regExps(runtime_),
56 globalWriteBarriered(false),
57 propertyTree(thisForCtor()),
58 selfHostingScriptSource(nullptr),
59 gcIncomingGrayPointers(nullptr),
60 gcWeakMapList(nullptr),
61 debugModeBits(runtime_->debugMode ? DebugFromC : 0),
62 rngState(0),
63 watchpointMap(nullptr),
64 scriptCountsMap(nullptr),
65 debugScriptMap(nullptr),
66 debugScopes(nullptr),
67 enumerators(nullptr),
68 compartmentStats(nullptr)
69 #ifdef JS_ION
70 , jitCompartment_(nullptr)
71 #endif
72 {
73 runtime_->numCompartments++;
74 JS_ASSERT_IF(options.mergeable(), options.invisibleToDebugger());
75 }
77 JSCompartment::~JSCompartment()
78 {
79 #ifdef JS_ION
80 js_delete(jitCompartment_);
81 #endif
83 js_delete(watchpointMap);
84 js_delete(scriptCountsMap);
85 js_delete(debugScriptMap);
86 js_delete(debugScopes);
87 js_free(enumerators);
89 runtime_->numCompartments--;
90 }
92 bool
93 JSCompartment::init(JSContext *cx)
94 {
95 /*
96 * As a hack, we clear our timezone cache every time we create a new
97 * compartment. This ensures that the cache is always relatively fresh, but
98 * shouldn't interfere with benchmarks which create tons of date objects
99 * (unless they also create tons of iframes, which seems unlikely).
100 */
101 if (cx)
102 cx->runtime()->dateTimeInfo.updateTimeZoneAdjustment();
104 activeAnalysis = false;
106 if (!crossCompartmentWrappers.init(0))
107 return false;
109 if (!regExps.init(cx))
110 return false;
112 enumerators = NativeIterator::allocateSentinel(cx);
113 if (!enumerators)
114 return false;
116 if (!savedStacks_.init())
117 return false;
119 return debuggees.init(0);
120 }
122 #ifdef JS_ION
123 jit::JitRuntime *
124 JSRuntime::createJitRuntime(JSContext *cx)
125 {
126 // The shared stubs are created in the atoms compartment, which may be
127 // accessed by other threads with an exclusive context.
128 AutoLockForExclusiveAccess atomsLock(cx);
130 // The runtime will only be created on its owning thread, but reads of a
131 // runtime's jitRuntime() can occur when another thread is requesting an
132 // interrupt.
133 AutoLockForInterrupt lock(this);
135 JS_ASSERT(!jitRuntime_);
137 jitRuntime_ = cx->new_<jit::JitRuntime>();
139 if (!jitRuntime_)
140 return nullptr;
142 if (!jitRuntime_->initialize(cx)) {
143 js_delete(jitRuntime_);
144 jitRuntime_ = nullptr;
146 JSCompartment *comp = cx->runtime()->atomsCompartment();
147 if (comp->jitCompartment_) {
148 js_delete(comp->jitCompartment_);
149 comp->jitCompartment_ = nullptr;
150 }
152 return nullptr;
153 }
155 return jitRuntime_;
156 }
158 bool
159 JSCompartment::ensureJitCompartmentExists(JSContext *cx)
160 {
161 using namespace js::jit;
162 if (jitCompartment_)
163 return true;
165 if (!zone()->getJitZone(cx))
166 return false;
168 /* Set the compartment early, so linking works. */
169 jitCompartment_ = cx->new_<JitCompartment>();
171 if (!jitCompartment_)
172 return false;
174 if (!jitCompartment_->initialize(cx)) {
175 js_delete(jitCompartment_);
176 jitCompartment_ = nullptr;
177 return false;
178 }
180 return true;
181 }
182 #endif
184 #ifdef JSGC_GENERATIONAL
186 /*
187 * This class is used to add a post barrier on the crossCompartmentWrappers map,
188 * as the key is calculated based on objects which may be moved by generational
189 * GC.
190 */
191 class WrapperMapRef : public BufferableRef
192 {
193 WrapperMap *map;
194 CrossCompartmentKey key;
196 public:
197 WrapperMapRef(WrapperMap *map, const CrossCompartmentKey &key)
198 : map(map), key(key) {}
200 void mark(JSTracer *trc) {
201 CrossCompartmentKey prior = key;
202 if (key.debugger)
203 Mark(trc, &key.debugger, "CCW debugger");
204 if (key.kind != CrossCompartmentKey::StringWrapper)
205 Mark(trc, reinterpret_cast<JSObject**>(&key.wrapped), "CCW wrapped object");
206 if (key.debugger == prior.debugger && key.wrapped == prior.wrapped)
207 return;
209 /* Look for the original entry, which might have been removed. */
210 WrapperMap::Ptr p = map->lookup(prior);
211 if (!p)
212 return;
214 /* Rekey the entry. */
215 map->rekeyAs(prior, key, key);
216 }
217 };
219 #ifdef JS_GC_ZEAL
220 void
221 JSCompartment::checkWrapperMapAfterMovingGC()
222 {
223 /*
224 * Assert that the postbarriers have worked and that nothing is left in
225 * wrapperMap that points into the nursery, and that the hash table entries
226 * are discoverable.
227 */
228 JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread());
229 for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
230 CrossCompartmentKey key = e.front().key();
231 JS_ASSERT(!IsInsideNursery(rt, key.debugger));
232 JS_ASSERT(!IsInsideNursery(rt, key.wrapped));
233 JS_ASSERT(!IsInsideNursery(rt, e.front().value().get().toGCThing()));
235 WrapperMap::Ptr ptr = crossCompartmentWrappers.lookup(key);
236 JS_ASSERT(ptr.found() && &*ptr == &e.front());
237 }
238 }
239 #endif
241 #endif
243 bool
244 JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, const js::Value &wrapper)
245 {
246 JS_ASSERT(wrapped.wrapped);
247 JS_ASSERT(!IsPoisonedPtr(wrapped.wrapped));
248 JS_ASSERT(!IsPoisonedPtr(wrapped.debugger));
249 JS_ASSERT(!IsPoisonedPtr(wrapper.toGCThing()));
250 JS_ASSERT_IF(wrapped.kind == CrossCompartmentKey::StringWrapper, wrapper.isString());
251 JS_ASSERT_IF(wrapped.kind != CrossCompartmentKey::StringWrapper, wrapper.isObject());
252 bool success = crossCompartmentWrappers.put(wrapped, wrapper);
254 #ifdef JSGC_GENERATIONAL
255 /* There's no point allocating wrappers in the nursery since we will tenure them anyway. */
256 Nursery &nursery = cx->nursery();
257 JS_ASSERT(!nursery.isInside(wrapper.toGCThing()));
259 if (success && (nursery.isInside(wrapped.wrapped) || nursery.isInside(wrapped.debugger))) {
260 WrapperMapRef ref(&crossCompartmentWrappers, wrapped);
261 cx->runtime()->gcStoreBuffer.putGeneric(ref);
262 }
263 #endif
265 return success;
266 }
268 bool
269 JSCompartment::wrap(JSContext *cx, JSString **strp)
270 {
271 JS_ASSERT(!cx->runtime()->isAtomsCompartment(this));
272 JS_ASSERT(cx->compartment() == this);
274 /* If the string is already in this compartment, we are done. */
275 JSString *str = *strp;
276 if (str->zoneFromAnyThread() == zone())
277 return true;
279 /* If the string is an atom, we don't have to copy. */
280 if (str->isAtom()) {
281 JS_ASSERT(str->isPermanentAtom() ||
282 cx->runtime()->isAtomsZone(str->zone()));
283 return true;
284 }
286 /* Check the cache. */
287 RootedValue key(cx, StringValue(str));
288 if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(key)) {
289 *strp = p->value().get().toString();
290 return true;
291 }
293 /*
294 * No dice. Make a copy, and cache it. Directly allocate the copy in the
295 * destination compartment, rather than first flattening it (and possibly
296 * allocating in source compartment), because we don't know whether the
297 * flattening will pay off later.
298 */
299 JSString *copy;
300 if (str->hasPureChars()) {
301 copy = js_NewStringCopyN<CanGC>(cx, str->pureChars(), str->length());
302 } else {
303 ScopedJSFreePtr<jschar> copiedChars;
304 if (!str->copyNonPureCharsZ(cx, copiedChars))
305 return false;
306 copy = js_NewString<CanGC>(cx, copiedChars.forget(), str->length());
307 }
309 if (!copy)
310 return false;
311 if (!putWrapper(cx, key, StringValue(copy)))
312 return false;
314 *strp = copy;
315 return true;
316 }
318 bool
319 JSCompartment::wrap(JSContext *cx, HeapPtrString *strp)
320 {
321 RootedString str(cx, *strp);
322 if (!wrap(cx, str.address()))
323 return false;
324 *strp = str;
325 return true;
326 }
328 bool
329 JSCompartment::wrap(JSContext *cx, MutableHandleObject obj, HandleObject existingArg)
330 {
331 JS_ASSERT(!cx->runtime()->isAtomsCompartment(this));
332 JS_ASSERT(cx->compartment() == this);
333 JS_ASSERT_IF(existingArg, existingArg->compartment() == cx->compartment());
334 JS_ASSERT_IF(existingArg, IsDeadProxyObject(existingArg));
336 if (!obj)
337 return true;
338 AutoDisableProxyCheck adpc(cx->runtime());
340 // Wrappers should really be parented to the wrapped parent of the wrapped
341 // object, but in that case a wrapped global object would have a nullptr
342 // parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead,
343 // we parent all wrappers to the global object in their home compartment.
344 // This loses us some transparency, and is generally very cheesy.
345 HandleObject global = cx->global();
346 RootedObject objGlobal(cx, &obj->global());
347 JS_ASSERT(global);
348 JS_ASSERT(objGlobal);
350 const JSWrapObjectCallbacks *cb = cx->runtime()->wrapObjectCallbacks;
352 if (obj->compartment() == this) {
353 obj.set(GetOuterObject(cx, obj));
354 return true;
355 }
357 // If we have a cross-compartment wrapper, make sure that the cx isn't
358 // associated with the self-hosting global. We don't want to create
359 // wrappers for objects in other runtimes, which may be the case for the
360 // self-hosting global.
361 JS_ASSERT(!cx->runtime()->isSelfHostingGlobal(global) &&
362 !cx->runtime()->isSelfHostingGlobal(objGlobal));
364 // Unwrap the object, but don't unwrap outer windows.
365 unsigned flags = 0;
366 obj.set(UncheckedUnwrap(obj, /* stopAtOuter = */ true, &flags));
368 if (obj->compartment() == this) {
369 MOZ_ASSERT(obj == GetOuterObject(cx, obj));
370 return true;
371 }
373 // Translate StopIteration singleton.
374 if (obj->is<StopIterationObject>()) {
375 // StopIteration isn't a constructor, but it's stored in GlobalObject
376 // as one, out of laziness. Hence the GetBuiltinConstructor call here.
377 RootedObject stopIteration(cx);
378 if (!GetBuiltinConstructor(cx, JSProto_StopIteration, &stopIteration))
379 return false;
380 obj.set(stopIteration);
381 return true;
382 }
384 // Invoke the prewrap callback. We're a bit worried about infinite
385 // recursion here, so we do a check - see bug 809295.
386 JS_CHECK_CHROME_RECURSION(cx, return false);
387 if (cb->preWrap) {
388 obj.set(cb->preWrap(cx, global, obj, flags));
389 if (!obj)
390 return false;
391 }
392 MOZ_ASSERT(obj == GetOuterObject(cx, obj));
394 if (obj->compartment() == this)
395 return true;
398 // If we already have a wrapper for this value, use it.
399 RootedValue key(cx, ObjectValue(*obj));
400 if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(key)) {
401 obj.set(&p->value().get().toObject());
402 JS_ASSERT(obj->is<CrossCompartmentWrapperObject>());
403 JS_ASSERT(obj->getParent() == global);
404 return true;
405 }
407 RootedObject proto(cx, TaggedProto::LazyProto);
408 RootedObject existing(cx, existingArg);
409 if (existing) {
410 // Is it possible to reuse |existing|?
411 if (!existing->getTaggedProto().isLazy() ||
412 // Note: don't use is<ObjectProxyObject>() here -- it also matches subclasses!
413 existing->getClass() != &ProxyObject::uncallableClass_ ||
414 existing->getParent() != global ||
415 obj->isCallable())
416 {
417 existing = nullptr;
418 }
419 }
421 obj.set(cb->wrap(cx, existing, obj, proto, global, flags));
422 if (!obj)
423 return false;
425 // We maintain the invariant that the key in the cross-compartment wrapper
426 // map is always directly wrapped by the value.
427 JS_ASSERT(Wrapper::wrappedObject(obj) == &key.get().toObject());
429 return putWrapper(cx, key, ObjectValue(*obj));
430 }
432 bool
433 JSCompartment::wrapId(JSContext *cx, jsid *idp)
434 {
435 MOZ_ASSERT(*idp != JSID_VOID, "JSID_VOID is an out-of-band sentinel value");
436 if (JSID_IS_INT(*idp))
437 return true;
438 RootedValue value(cx, IdToValue(*idp));
439 if (!wrap(cx, &value))
440 return false;
441 RootedId id(cx);
442 if (!ValueToId<CanGC>(cx, value, &id))
443 return false;
445 *idp = id;
446 return true;
447 }
449 bool
450 JSCompartment::wrap(JSContext *cx, PropertyOp *propp)
451 {
452 RootedValue value(cx, CastAsObjectJsval(*propp));
453 if (!wrap(cx, &value))
454 return false;
455 *propp = CastAsPropertyOp(value.toObjectOrNull());
456 return true;
457 }
459 bool
460 JSCompartment::wrap(JSContext *cx, StrictPropertyOp *propp)
461 {
462 RootedValue value(cx, CastAsObjectJsval(*propp));
463 if (!wrap(cx, &value))
464 return false;
465 *propp = CastAsStrictPropertyOp(value.toObjectOrNull());
466 return true;
467 }
469 bool
470 JSCompartment::wrap(JSContext *cx, MutableHandle<PropertyDescriptor> desc)
471 {
472 if (!wrap(cx, desc.object()))
473 return false;
475 if (desc.hasGetterObject()) {
476 if (!wrap(cx, &desc.getter()))
477 return false;
478 }
479 if (desc.hasSetterObject()) {
480 if (!wrap(cx, &desc.setter()))
481 return false;
482 }
484 return wrap(cx, desc.value());
485 }
487 bool
488 JSCompartment::wrap(JSContext *cx, AutoIdVector &props)
489 {
490 jsid *vector = props.begin();
491 int length = props.length();
492 for (size_t n = 0; n < size_t(length); ++n) {
493 if (!wrapId(cx, &vector[n]))
494 return false;
495 }
496 return true;
497 }
499 /*
500 * This method marks pointers that cross compartment boundaries. It should be
501 * called only for per-compartment GCs, since full GCs naturally follow pointers
502 * across compartments.
503 */
504 void
505 JSCompartment::markCrossCompartmentWrappers(JSTracer *trc)
506 {
507 JS_ASSERT(!zone()->isCollecting());
509 for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
510 Value v = e.front().value();
511 if (e.front().key().kind == CrossCompartmentKey::ObjectWrapper) {
512 ProxyObject *wrapper = &v.toObject().as<ProxyObject>();
514 /*
515 * We have a cross-compartment wrapper. Its private pointer may
516 * point into the compartment being collected, so we should mark it.
517 */
518 Value referent = wrapper->private_();
519 MarkValueRoot(trc, &referent, "cross-compartment wrapper");
520 JS_ASSERT(referent == wrapper->private_());
521 }
522 }
523 }
525 void
526 JSCompartment::trace(JSTracer *trc)
527 {
528 // At the moment, this is merely ceremonial, but any live-compartment-only tracing should go
529 // here.
530 }
532 void
533 JSCompartment::markRoots(JSTracer *trc)
534 {
535 JS_ASSERT(!trc->runtime()->isHeapMinorCollecting());
537 #ifdef JS_ION
538 if (jitCompartment_)
539 jitCompartment_->mark(trc, this);
540 #endif
542 /*
543 * If a compartment is on-stack, we mark its global so that
544 * JSContext::global() remains valid.
545 */
546 if (enterCompartmentDepth && global_)
547 MarkObjectRoot(trc, global_.unsafeGet(), "on-stack compartment global");
548 }
550 void
551 JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
552 {
553 JS_ASSERT(!activeAnalysis);
555 /* This function includes itself in PHASE_SWEEP_TABLES. */
556 sweepCrossCompartmentWrappers();
558 JSRuntime *rt = runtimeFromMainThread();
560 {
561 gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
563 /* Remove dead references held weakly by the compartment. */
565 sweepBaseShapeTable();
566 sweepInitialShapeTable();
567 sweepNewTypeObjectTable(newTypeObjects);
568 sweepNewTypeObjectTable(lazyTypeObjects);
569 sweepCallsiteClones();
570 savedStacks_.sweep(rt);
572 if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet()))
573 global_ = nullptr;
575 if (selfHostingScriptSource &&
576 IsObjectAboutToBeFinalized((JSObject **) selfHostingScriptSource.unsafeGet()))
577 {
578 selfHostingScriptSource = nullptr;
579 }
581 #ifdef JS_ION
582 if (jitCompartment_)
583 jitCompartment_->sweep(fop);
584 #endif
586 /*
587 * JIT code increments activeUseCount for any RegExpShared used by jit
588 * code for the lifetime of the JIT script. Thus, we must perform
589 * sweeping after clearing jit code.
590 */
591 regExps.sweep(rt);
593 if (debugScopes)
594 debugScopes->sweep(rt);
596 /* Finalize unreachable (key,value) pairs in all weak maps. */
597 WeakMapBase::sweepCompartment(this);
598 }
600 NativeIterator *ni = enumerators->next();
601 while (ni != enumerators) {
602 JSObject *iterObj = ni->iterObj();
603 NativeIterator *next = ni->next();
604 if (gc::IsObjectAboutToBeFinalized(&iterObj))
605 ni->unlink();
606 ni = next;
607 }
608 }
610 /*
611 * Remove dead wrappers from the table. We must sweep all compartments, since
612 * string entries in the crossCompartmentWrappers table are not marked during
613 * markCrossCompartmentWrappers.
614 */
615 void
616 JSCompartment::sweepCrossCompartmentWrappers()
617 {
618 JSRuntime *rt = runtimeFromMainThread();
620 gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
621 gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_WRAPPER);
623 /* Remove dead wrappers from the table. */
624 for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
625 CrossCompartmentKey key = e.front().key();
626 bool keyDying = IsCellAboutToBeFinalized(&key.wrapped);
627 bool valDying = IsValueAboutToBeFinalized(e.front().value().unsafeGet());
628 bool dbgDying = key.debugger && IsObjectAboutToBeFinalized(&key.debugger);
629 if (keyDying || valDying || dbgDying) {
630 JS_ASSERT(key.kind != CrossCompartmentKey::StringWrapper);
631 e.removeFront();
632 } else if (key.wrapped != e.front().key().wrapped ||
633 key.debugger != e.front().key().debugger)
634 {
635 e.rekeyFront(key);
636 }
637 }
638 }
640 void
641 JSCompartment::purge()
642 {
643 dtoaCache.purge();
644 }
646 void
647 JSCompartment::clearTables()
648 {
649 global_ = nullptr;
651 regExps.clearTables();
653 // No scripts should have run in this compartment. This is used when
654 // merging a compartment that has been used off thread into another
655 // compartment and zone.
656 JS_ASSERT(crossCompartmentWrappers.empty());
657 JS_ASSERT_IF(callsiteClones.initialized(), callsiteClones.empty());
658 #ifdef JS_ION
659 JS_ASSERT(!jitCompartment_);
660 #endif
661 JS_ASSERT(!debugScopes);
662 JS_ASSERT(!gcWeakMapList);
663 JS_ASSERT(enumerators->next() == enumerators);
665 types.clearTables();
666 if (baseShapes.initialized())
667 baseShapes.clear();
668 if (initialShapes.initialized())
669 initialShapes.clear();
670 if (newTypeObjects.initialized())
671 newTypeObjects.clear();
672 if (lazyTypeObjects.initialized())
673 lazyTypeObjects.clear();
674 if (savedStacks_.initialized())
675 savedStacks_.clear();
676 }
678 void
679 JSCompartment::setObjectMetadataCallback(js::ObjectMetadataCallback callback)
680 {
681 // Clear any jitcode in the runtime, which behaves differently depending on
682 // whether there is a creation callback.
683 ReleaseAllJITCode(runtime_->defaultFreeOp());
685 objectMetadataCallback = callback;
686 }
688 bool
689 JSCompartment::hasScriptsOnStack()
690 {
691 for (ActivationIterator iter(runtimeFromMainThread()); !iter.done(); ++iter) {
692 if (iter->compartment() == this)
693 return true;
694 }
696 return false;
697 }
699 static bool
700 AddInnerLazyFunctionsFromScript(JSScript *script, AutoObjectVector &lazyFunctions)
701 {
702 if (!script->hasObjects())
703 return true;
704 ObjectArray *objects = script->objects();
705 for (size_t i = script->innerObjectsStart(); i < objects->length; i++) {
706 JSObject *obj = objects->vector[i];
707 if (obj->is<JSFunction>() && obj->as<JSFunction>().isInterpretedLazy()) {
708 if (!lazyFunctions.append(obj))
709 return false;
710 }
711 }
712 return true;
713 }
715 static bool
716 CreateLazyScriptsForCompartment(JSContext *cx)
717 {
718 AutoObjectVector lazyFunctions(cx);
720 // Find all live lazy scripts in the compartment, and via them all root
721 // lazy functions in the compartment: those which have not been compiled,
722 // which have a source object, indicating that they have a parent, and
723 // which do not have an uncompiled enclosing script. The last condition is
724 // so that we don't compile lazy scripts whose enclosing scripts failed to
725 // compile, indicating that the lazy script did not escape the script.
726 for (gc::CellIter i(cx->zone(), gc::FINALIZE_LAZY_SCRIPT); !i.done(); i.next()) {
727 LazyScript *lazy = i.get<LazyScript>();
728 JSFunction *fun = lazy->functionNonDelazifying();
729 if (fun->compartment() == cx->compartment() &&
730 lazy->sourceObject() && !lazy->maybeScript() &&
731 !lazy->hasUncompiledEnclosingScript())
732 {
733 MOZ_ASSERT(fun->isInterpretedLazy());
734 MOZ_ASSERT(lazy == fun->lazyScriptOrNull());
735 if (!lazyFunctions.append(fun))
736 return false;
737 }
738 }
740 // Create scripts for each lazy function, updating the list of functions to
741 // process with any newly exposed inner functions in created scripts.
742 // A function cannot be delazified until its outer script exists.
743 for (size_t i = 0; i < lazyFunctions.length(); i++) {
744 JSFunction *fun = &lazyFunctions[i]->as<JSFunction>();
746 // lazyFunctions may have been populated with multiple functions for
747 // a lazy script.
748 if (!fun->isInterpretedLazy())
749 continue;
751 JSScript *script = fun->getOrCreateScript(cx);
752 if (!script)
753 return false;
754 if (!AddInnerLazyFunctionsFromScript(script, lazyFunctions))
755 return false;
756 }
758 return true;
759 }
761 bool
762 JSCompartment::ensureDelazifyScriptsForDebugMode(JSContext *cx)
763 {
764 MOZ_ASSERT(cx->compartment() == this);
765 if ((debugModeBits & DebugNeedDelazification) && !CreateLazyScriptsForCompartment(cx))
766 return false;
767 debugModeBits &= ~DebugNeedDelazification;
768 return true;
769 }
771 bool
772 JSCompartment::setDebugModeFromC(JSContext *cx, bool b, AutoDebugModeInvalidation &invalidate)
773 {
774 bool enabledBefore = debugMode();
775 bool enabledAfter = (debugModeBits & DebugModeFromMask & ~DebugFromC) || b;
777 // Enabling debug mode from C (vs of from JS) can only be done when no
778 // scripts from the target compartment are on the stack.
779 //
780 // We do allow disabling debug mode while scripts are on the stack. In
781 // that case the debug-mode code for those scripts remains, so subsequently
782 // hooks may be called erroneously, even though debug mode is supposedly
783 // off, and we have to live with it.
784 bool onStack = false;
785 if (enabledBefore != enabledAfter) {
786 onStack = hasScriptsOnStack();
787 if (b && onStack) {
788 JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_DEBUG_NOT_IDLE);
789 return false;
790 }
791 }
793 debugModeBits = (debugModeBits & ~DebugFromC) | (b ? DebugFromC : 0);
794 JS_ASSERT(debugMode() == enabledAfter);
795 if (enabledBefore != enabledAfter) {
796 // Pass in a nullptr cx to not bother recompiling for JSD1, since
797 // we're still enforcing the idle-stack invariant here.
798 if (!updateJITForDebugMode(nullptr, invalidate))
799 return false;
800 if (!enabledAfter)
801 DebugScopes::onCompartmentLeaveDebugMode(this);
802 }
803 return true;
804 }
806 bool
807 JSCompartment::updateJITForDebugMode(JSContext *maybecx, AutoDebugModeInvalidation &invalidate)
808 {
809 #ifdef JS_ION
810 // The AutoDebugModeInvalidation argument makes sure we can't forget to
811 // invalidate, but it is also important not to run any scripts in this
812 // compartment until the invalidate is destroyed. That is the caller's
813 // responsibility.
814 if (!jit::UpdateForDebugMode(maybecx, this, invalidate))
815 return false;
816 #endif
817 return true;
818 }
820 bool
821 JSCompartment::addDebuggee(JSContext *cx, js::GlobalObject *global)
822 {
823 AutoDebugModeInvalidation invalidate(this);
824 return addDebuggee(cx, global, invalidate);
825 }
827 bool
828 JSCompartment::addDebuggee(JSContext *cx,
829 GlobalObject *globalArg,
830 AutoDebugModeInvalidation &invalidate)
831 {
832 Rooted<GlobalObject*> global(cx, globalArg);
834 bool wasEnabled = debugMode();
835 if (!debuggees.put(global)) {
836 js_ReportOutOfMemory(cx);
837 return false;
838 }
839 debugModeBits |= DebugFromJS;
840 if (!wasEnabled && !updateJITForDebugMode(cx, invalidate))
841 return false;
842 return true;
843 }
845 bool
846 JSCompartment::removeDebuggee(JSContext *cx,
847 js::GlobalObject *global,
848 js::GlobalObjectSet::Enum *debuggeesEnum)
849 {
850 AutoDebugModeInvalidation invalidate(this);
851 return removeDebuggee(cx, global, invalidate, debuggeesEnum);
852 }
854 bool
855 JSCompartment::removeDebuggee(JSContext *cx,
856 js::GlobalObject *global,
857 AutoDebugModeInvalidation &invalidate,
858 js::GlobalObjectSet::Enum *debuggeesEnum)
859 {
860 bool wasEnabled = debugMode();
861 removeDebuggeeUnderGC(cx->runtime()->defaultFreeOp(), global, invalidate, debuggeesEnum);
862 if (wasEnabled && !debugMode() && !updateJITForDebugMode(cx, invalidate))
863 return false;
864 return true;
865 }
867 void
868 JSCompartment::removeDebuggeeUnderGC(FreeOp *fop,
869 js::GlobalObject *global,
870 js::GlobalObjectSet::Enum *debuggeesEnum)
871 {
872 AutoDebugModeInvalidation invalidate(this);
873 removeDebuggeeUnderGC(fop, global, invalidate, debuggeesEnum);
874 }
876 void
877 JSCompartment::removeDebuggeeUnderGC(FreeOp *fop,
878 js::GlobalObject *global,
879 AutoDebugModeInvalidation &invalidate,
880 js::GlobalObjectSet::Enum *debuggeesEnum)
881 {
882 bool wasEnabled = debugMode();
883 JS_ASSERT(debuggees.has(global));
884 if (debuggeesEnum)
885 debuggeesEnum->removeFront();
886 else
887 debuggees.remove(global);
889 if (debuggees.empty()) {
890 debugModeBits &= ~DebugFromJS;
891 if (wasEnabled && !debugMode())
892 DebugScopes::onCompartmentLeaveDebugMode(this);
893 }
894 }
896 void
897 JSCompartment::clearBreakpointsIn(FreeOp *fop, js::Debugger *dbg, HandleObject handler)
898 {
899 for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
900 JSScript *script = i.get<JSScript>();
901 if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
902 script->clearBreakpointsIn(fop, dbg, handler);
903 }
904 }
906 void
907 JSCompartment::clearTraps(FreeOp *fop)
908 {
909 MinorGC(fop->runtime(), JS::gcreason::EVICT_NURSERY);
910 for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
911 JSScript *script = i.get<JSScript>();
912 if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
913 script->clearTraps(fop);
914 }
915 }
917 void
918 JSCompartment::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
919 size_t *tiAllocationSiteTables,
920 size_t *tiArrayTypeTables,
921 size_t *tiObjectTypeTables,
922 size_t *compartmentObject,
923 size_t *shapesCompartmentTables,
924 size_t *crossCompartmentWrappersArg,
925 size_t *regexpCompartment,
926 size_t *debuggeesSet,
927 size_t *savedStacksSet)
928 {
929 *compartmentObject += mallocSizeOf(this);
930 types.addSizeOfExcludingThis(mallocSizeOf, tiAllocationSiteTables,
931 tiArrayTypeTables, tiObjectTypeTables);
932 *shapesCompartmentTables += baseShapes.sizeOfExcludingThis(mallocSizeOf)
933 + initialShapes.sizeOfExcludingThis(mallocSizeOf)
934 + newTypeObjects.sizeOfExcludingThis(mallocSizeOf)
935 + lazyTypeObjects.sizeOfExcludingThis(mallocSizeOf);
936 *crossCompartmentWrappersArg += crossCompartmentWrappers.sizeOfExcludingThis(mallocSizeOf);
937 *regexpCompartment += regExps.sizeOfExcludingThis(mallocSizeOf);
938 *debuggeesSet += debuggees.sizeOfExcludingThis(mallocSizeOf);
939 *savedStacksSet += savedStacks_.sizeOfExcludingThis(mallocSizeOf);
940 }
942 void
943 JSCompartment::adoptWorkerAllocator(Allocator *workerAllocator)
944 {
945 zone()->allocator.arenas.adoptArenas(runtimeFromMainThread(), &workerAllocator->arenas);
946 }