michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sts=4 et sw=4 tw=99: michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #include "jscompartmentinlines.h" michael@0: michael@0: #include "mozilla/DebugOnly.h" michael@0: #include "mozilla/MemoryReporting.h" michael@0: michael@0: #include "jscntxt.h" michael@0: #include "jsfriendapi.h" michael@0: #include "jsgc.h" michael@0: #include "jsiter.h" michael@0: #include "jsproxy.h" michael@0: #include "jswatchpoint.h" michael@0: #include "jswrapper.h" michael@0: michael@0: #include "gc/Marking.h" michael@0: #ifdef JS_ION michael@0: #include "jit/JitCompartment.h" michael@0: #endif michael@0: #include "js/RootingAPI.h" michael@0: #include "vm/StopIterationObject.h" michael@0: #include "vm/WrapperObject.h" michael@0: michael@0: #include "jsatominlines.h" michael@0: #include "jsfuninlines.h" michael@0: #include "jsgcinlines.h" michael@0: #include "jsinferinlines.h" michael@0: #include "jsobjinlines.h" michael@0: michael@0: using namespace js; michael@0: using namespace js::gc; michael@0: michael@0: using mozilla::DebugOnly; michael@0: michael@0: JSCompartment::JSCompartment(Zone *zone, const JS::CompartmentOptions &options = JS::CompartmentOptions()) michael@0: : options_(options), michael@0: zone_(zone), michael@0: runtime_(zone->runtimeFromMainThread()), michael@0: principals(nullptr), michael@0: isSystem(false), michael@0: isSelfHosting(false), michael@0: marked(true), michael@0: #ifdef DEBUG michael@0: firedOnNewGlobalObject(false), michael@0: #endif michael@0: global_(nullptr), michael@0: enterCompartmentDepth(0), michael@0: data(nullptr), michael@0: objectMetadataCallback(nullptr), michael@0: lastAnimationTime(0), michael@0: regExps(runtime_), michael@0: globalWriteBarriered(false), michael@0: propertyTree(thisForCtor()), michael@0: selfHostingScriptSource(nullptr), michael@0: gcIncomingGrayPointers(nullptr), michael@0: gcWeakMapList(nullptr), michael@0: debugModeBits(runtime_->debugMode ? DebugFromC : 0), michael@0: rngState(0), michael@0: watchpointMap(nullptr), michael@0: scriptCountsMap(nullptr), michael@0: debugScriptMap(nullptr), michael@0: debugScopes(nullptr), michael@0: enumerators(nullptr), michael@0: compartmentStats(nullptr) michael@0: #ifdef JS_ION michael@0: , jitCompartment_(nullptr) michael@0: #endif michael@0: { michael@0: runtime_->numCompartments++; michael@0: JS_ASSERT_IF(options.mergeable(), options.invisibleToDebugger()); michael@0: } michael@0: michael@0: JSCompartment::~JSCompartment() michael@0: { michael@0: #ifdef JS_ION michael@0: js_delete(jitCompartment_); michael@0: #endif michael@0: michael@0: js_delete(watchpointMap); michael@0: js_delete(scriptCountsMap); michael@0: js_delete(debugScriptMap); michael@0: js_delete(debugScopes); michael@0: js_free(enumerators); michael@0: michael@0: runtime_->numCompartments--; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::init(JSContext *cx) michael@0: { michael@0: /* michael@0: * As a hack, we clear our timezone cache every time we create a new michael@0: * compartment. This ensures that the cache is always relatively fresh, but michael@0: * shouldn't interfere with benchmarks which create tons of date objects michael@0: * (unless they also create tons of iframes, which seems unlikely). michael@0: */ michael@0: if (cx) michael@0: cx->runtime()->dateTimeInfo.updateTimeZoneAdjustment(); michael@0: michael@0: activeAnalysis = false; michael@0: michael@0: if (!crossCompartmentWrappers.init(0)) michael@0: return false; michael@0: michael@0: if (!regExps.init(cx)) michael@0: return false; michael@0: michael@0: enumerators = NativeIterator::allocateSentinel(cx); michael@0: if (!enumerators) michael@0: return false; michael@0: michael@0: if (!savedStacks_.init()) michael@0: return false; michael@0: michael@0: return debuggees.init(0); michael@0: } michael@0: michael@0: #ifdef JS_ION michael@0: jit::JitRuntime * michael@0: JSRuntime::createJitRuntime(JSContext *cx) michael@0: { michael@0: // The shared stubs are created in the atoms compartment, which may be michael@0: // accessed by other threads with an exclusive context. michael@0: AutoLockForExclusiveAccess atomsLock(cx); michael@0: michael@0: // The runtime will only be created on its owning thread, but reads of a michael@0: // runtime's jitRuntime() can occur when another thread is requesting an michael@0: // interrupt. michael@0: AutoLockForInterrupt lock(this); michael@0: michael@0: JS_ASSERT(!jitRuntime_); michael@0: michael@0: jitRuntime_ = cx->new_(); michael@0: michael@0: if (!jitRuntime_) michael@0: return nullptr; michael@0: michael@0: if (!jitRuntime_->initialize(cx)) { michael@0: js_delete(jitRuntime_); michael@0: jitRuntime_ = nullptr; michael@0: michael@0: JSCompartment *comp = cx->runtime()->atomsCompartment(); michael@0: if (comp->jitCompartment_) { michael@0: js_delete(comp->jitCompartment_); michael@0: comp->jitCompartment_ = nullptr; michael@0: } michael@0: michael@0: return nullptr; michael@0: } michael@0: michael@0: return jitRuntime_; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::ensureJitCompartmentExists(JSContext *cx) michael@0: { michael@0: using namespace js::jit; michael@0: if (jitCompartment_) michael@0: return true; michael@0: michael@0: if (!zone()->getJitZone(cx)) michael@0: return false; michael@0: michael@0: /* Set the compartment early, so linking works. */ michael@0: jitCompartment_ = cx->new_(); michael@0: michael@0: if (!jitCompartment_) michael@0: return false; michael@0: michael@0: if (!jitCompartment_->initialize(cx)) { michael@0: js_delete(jitCompartment_); michael@0: jitCompartment_ = nullptr; michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: #endif michael@0: michael@0: #ifdef JSGC_GENERATIONAL michael@0: michael@0: /* michael@0: * This class is used to add a post barrier on the crossCompartmentWrappers map, michael@0: * as the key is calculated based on objects which may be moved by generational michael@0: * GC. michael@0: */ michael@0: class WrapperMapRef : public BufferableRef michael@0: { michael@0: WrapperMap *map; michael@0: CrossCompartmentKey key; michael@0: michael@0: public: michael@0: WrapperMapRef(WrapperMap *map, const CrossCompartmentKey &key) michael@0: : map(map), key(key) {} michael@0: michael@0: void mark(JSTracer *trc) { michael@0: CrossCompartmentKey prior = key; michael@0: if (key.debugger) michael@0: Mark(trc, &key.debugger, "CCW debugger"); michael@0: if (key.kind != CrossCompartmentKey::StringWrapper) michael@0: Mark(trc, reinterpret_cast(&key.wrapped), "CCW wrapped object"); michael@0: if (key.debugger == prior.debugger && key.wrapped == prior.wrapped) michael@0: return; michael@0: michael@0: /* Look for the original entry, which might have been removed. */ michael@0: WrapperMap::Ptr p = map->lookup(prior); michael@0: if (!p) michael@0: return; michael@0: michael@0: /* Rekey the entry. */ michael@0: map->rekeyAs(prior, key, key); michael@0: } michael@0: }; michael@0: michael@0: #ifdef JS_GC_ZEAL michael@0: void michael@0: JSCompartment::checkWrapperMapAfterMovingGC() michael@0: { michael@0: /* michael@0: * Assert that the postbarriers have worked and that nothing is left in michael@0: * wrapperMap that points into the nursery, and that the hash table entries michael@0: * are discoverable. michael@0: */ michael@0: JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread()); michael@0: for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { michael@0: CrossCompartmentKey key = e.front().key(); michael@0: JS_ASSERT(!IsInsideNursery(rt, key.debugger)); michael@0: JS_ASSERT(!IsInsideNursery(rt, key.wrapped)); michael@0: JS_ASSERT(!IsInsideNursery(rt, e.front().value().get().toGCThing())); michael@0: michael@0: WrapperMap::Ptr ptr = crossCompartmentWrappers.lookup(key); michael@0: JS_ASSERT(ptr.found() && &*ptr == &e.front()); michael@0: } michael@0: } michael@0: #endif michael@0: michael@0: #endif michael@0: michael@0: bool michael@0: JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, const js::Value &wrapper) michael@0: { michael@0: JS_ASSERT(wrapped.wrapped); michael@0: JS_ASSERT(!IsPoisonedPtr(wrapped.wrapped)); michael@0: JS_ASSERT(!IsPoisonedPtr(wrapped.debugger)); michael@0: JS_ASSERT(!IsPoisonedPtr(wrapper.toGCThing())); michael@0: JS_ASSERT_IF(wrapped.kind == CrossCompartmentKey::StringWrapper, wrapper.isString()); michael@0: JS_ASSERT_IF(wrapped.kind != CrossCompartmentKey::StringWrapper, wrapper.isObject()); michael@0: bool success = crossCompartmentWrappers.put(wrapped, wrapper); michael@0: michael@0: #ifdef JSGC_GENERATIONAL michael@0: /* There's no point allocating wrappers in the nursery since we will tenure them anyway. */ michael@0: Nursery &nursery = cx->nursery(); michael@0: JS_ASSERT(!nursery.isInside(wrapper.toGCThing())); michael@0: michael@0: if (success && (nursery.isInside(wrapped.wrapped) || nursery.isInside(wrapped.debugger))) { michael@0: WrapperMapRef ref(&crossCompartmentWrappers, wrapped); michael@0: cx->runtime()->gcStoreBuffer.putGeneric(ref); michael@0: } michael@0: #endif michael@0: michael@0: return success; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::wrap(JSContext *cx, JSString **strp) michael@0: { michael@0: JS_ASSERT(!cx->runtime()->isAtomsCompartment(this)); michael@0: JS_ASSERT(cx->compartment() == this); michael@0: michael@0: /* If the string is already in this compartment, we are done. */ michael@0: JSString *str = *strp; michael@0: if (str->zoneFromAnyThread() == zone()) michael@0: return true; michael@0: michael@0: /* If the string is an atom, we don't have to copy. */ michael@0: if (str->isAtom()) { michael@0: JS_ASSERT(str->isPermanentAtom() || michael@0: cx->runtime()->isAtomsZone(str->zone())); michael@0: return true; michael@0: } michael@0: michael@0: /* Check the cache. */ michael@0: RootedValue key(cx, StringValue(str)); michael@0: if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(key)) { michael@0: *strp = p->value().get().toString(); michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * No dice. Make a copy, and cache it. Directly allocate the copy in the michael@0: * destination compartment, rather than first flattening it (and possibly michael@0: * allocating in source compartment), because we don't know whether the michael@0: * flattening will pay off later. michael@0: */ michael@0: JSString *copy; michael@0: if (str->hasPureChars()) { michael@0: copy = js_NewStringCopyN(cx, str->pureChars(), str->length()); michael@0: } else { michael@0: ScopedJSFreePtr copiedChars; michael@0: if (!str->copyNonPureCharsZ(cx, copiedChars)) michael@0: return false; michael@0: copy = js_NewString(cx, copiedChars.forget(), str->length()); michael@0: } michael@0: michael@0: if (!copy) michael@0: return false; michael@0: if (!putWrapper(cx, key, StringValue(copy))) michael@0: return false; michael@0: michael@0: *strp = copy; michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::wrap(JSContext *cx, HeapPtrString *strp) michael@0: { michael@0: RootedString str(cx, *strp); michael@0: if (!wrap(cx, str.address())) michael@0: return false; michael@0: *strp = str; michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::wrap(JSContext *cx, MutableHandleObject obj, HandleObject existingArg) michael@0: { michael@0: JS_ASSERT(!cx->runtime()->isAtomsCompartment(this)); michael@0: JS_ASSERT(cx->compartment() == this); michael@0: JS_ASSERT_IF(existingArg, existingArg->compartment() == cx->compartment()); michael@0: JS_ASSERT_IF(existingArg, IsDeadProxyObject(existingArg)); michael@0: michael@0: if (!obj) michael@0: return true; michael@0: AutoDisableProxyCheck adpc(cx->runtime()); michael@0: michael@0: // Wrappers should really be parented to the wrapped parent of the wrapped michael@0: // object, but in that case a wrapped global object would have a nullptr michael@0: // parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead, michael@0: // we parent all wrappers to the global object in their home compartment. michael@0: // This loses us some transparency, and is generally very cheesy. michael@0: HandleObject global = cx->global(); michael@0: RootedObject objGlobal(cx, &obj->global()); michael@0: JS_ASSERT(global); michael@0: JS_ASSERT(objGlobal); michael@0: michael@0: const JSWrapObjectCallbacks *cb = cx->runtime()->wrapObjectCallbacks; michael@0: michael@0: if (obj->compartment() == this) { michael@0: obj.set(GetOuterObject(cx, obj)); michael@0: return true; michael@0: } michael@0: michael@0: // If we have a cross-compartment wrapper, make sure that the cx isn't michael@0: // associated with the self-hosting global. We don't want to create michael@0: // wrappers for objects in other runtimes, which may be the case for the michael@0: // self-hosting global. michael@0: JS_ASSERT(!cx->runtime()->isSelfHostingGlobal(global) && michael@0: !cx->runtime()->isSelfHostingGlobal(objGlobal)); michael@0: michael@0: // Unwrap the object, but don't unwrap outer windows. michael@0: unsigned flags = 0; michael@0: obj.set(UncheckedUnwrap(obj, /* stopAtOuter = */ true, &flags)); michael@0: michael@0: if (obj->compartment() == this) { michael@0: MOZ_ASSERT(obj == GetOuterObject(cx, obj)); michael@0: return true; michael@0: } michael@0: michael@0: // Translate StopIteration singleton. michael@0: if (obj->is()) { michael@0: // StopIteration isn't a constructor, but it's stored in GlobalObject michael@0: // as one, out of laziness. Hence the GetBuiltinConstructor call here. michael@0: RootedObject stopIteration(cx); michael@0: if (!GetBuiltinConstructor(cx, JSProto_StopIteration, &stopIteration)) michael@0: return false; michael@0: obj.set(stopIteration); michael@0: return true; michael@0: } michael@0: michael@0: // Invoke the prewrap callback. We're a bit worried about infinite michael@0: // recursion here, so we do a check - see bug 809295. michael@0: JS_CHECK_CHROME_RECURSION(cx, return false); michael@0: if (cb->preWrap) { michael@0: obj.set(cb->preWrap(cx, global, obj, flags)); michael@0: if (!obj) michael@0: return false; michael@0: } michael@0: MOZ_ASSERT(obj == GetOuterObject(cx, obj)); michael@0: michael@0: if (obj->compartment() == this) michael@0: return true; michael@0: michael@0: michael@0: // If we already have a wrapper for this value, use it. michael@0: RootedValue key(cx, ObjectValue(*obj)); michael@0: if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(key)) { michael@0: obj.set(&p->value().get().toObject()); michael@0: JS_ASSERT(obj->is()); michael@0: JS_ASSERT(obj->getParent() == global); michael@0: return true; michael@0: } michael@0: michael@0: RootedObject proto(cx, TaggedProto::LazyProto); michael@0: RootedObject existing(cx, existingArg); michael@0: if (existing) { michael@0: // Is it possible to reuse |existing|? michael@0: if (!existing->getTaggedProto().isLazy() || michael@0: // Note: don't use is() here -- it also matches subclasses! michael@0: existing->getClass() != &ProxyObject::uncallableClass_ || michael@0: existing->getParent() != global || michael@0: obj->isCallable()) michael@0: { michael@0: existing = nullptr; michael@0: } michael@0: } michael@0: michael@0: obj.set(cb->wrap(cx, existing, obj, proto, global, flags)); michael@0: if (!obj) michael@0: return false; michael@0: michael@0: // We maintain the invariant that the key in the cross-compartment wrapper michael@0: // map is always directly wrapped by the value. michael@0: JS_ASSERT(Wrapper::wrappedObject(obj) == &key.get().toObject()); michael@0: michael@0: return putWrapper(cx, key, ObjectValue(*obj)); michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::wrapId(JSContext *cx, jsid *idp) michael@0: { michael@0: MOZ_ASSERT(*idp != JSID_VOID, "JSID_VOID is an out-of-band sentinel value"); michael@0: if (JSID_IS_INT(*idp)) michael@0: return true; michael@0: RootedValue value(cx, IdToValue(*idp)); michael@0: if (!wrap(cx, &value)) michael@0: return false; michael@0: RootedId id(cx); michael@0: if (!ValueToId(cx, value, &id)) michael@0: return false; michael@0: michael@0: *idp = id; michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::wrap(JSContext *cx, PropertyOp *propp) michael@0: { michael@0: RootedValue value(cx, CastAsObjectJsval(*propp)); michael@0: if (!wrap(cx, &value)) michael@0: return false; michael@0: *propp = CastAsPropertyOp(value.toObjectOrNull()); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::wrap(JSContext *cx, StrictPropertyOp *propp) michael@0: { michael@0: RootedValue value(cx, CastAsObjectJsval(*propp)); michael@0: if (!wrap(cx, &value)) michael@0: return false; michael@0: *propp = CastAsStrictPropertyOp(value.toObjectOrNull()); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::wrap(JSContext *cx, MutableHandle desc) michael@0: { michael@0: if (!wrap(cx, desc.object())) michael@0: return false; michael@0: michael@0: if (desc.hasGetterObject()) { michael@0: if (!wrap(cx, &desc.getter())) michael@0: return false; michael@0: } michael@0: if (desc.hasSetterObject()) { michael@0: if (!wrap(cx, &desc.setter())) michael@0: return false; michael@0: } michael@0: michael@0: return wrap(cx, desc.value()); michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::wrap(JSContext *cx, AutoIdVector &props) michael@0: { michael@0: jsid *vector = props.begin(); michael@0: int length = props.length(); michael@0: for (size_t n = 0; n < size_t(length); ++n) { michael@0: if (!wrapId(cx, &vector[n])) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * This method marks pointers that cross compartment boundaries. It should be michael@0: * called only for per-compartment GCs, since full GCs naturally follow pointers michael@0: * across compartments. michael@0: */ michael@0: void michael@0: JSCompartment::markCrossCompartmentWrappers(JSTracer *trc) michael@0: { michael@0: JS_ASSERT(!zone()->isCollecting()); michael@0: michael@0: for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { michael@0: Value v = e.front().value(); michael@0: if (e.front().key().kind == CrossCompartmentKey::ObjectWrapper) { michael@0: ProxyObject *wrapper = &v.toObject().as(); michael@0: michael@0: /* michael@0: * We have a cross-compartment wrapper. Its private pointer may michael@0: * point into the compartment being collected, so we should mark it. michael@0: */ michael@0: Value referent = wrapper->private_(); michael@0: MarkValueRoot(trc, &referent, "cross-compartment wrapper"); michael@0: JS_ASSERT(referent == wrapper->private_()); michael@0: } michael@0: } michael@0: } michael@0: michael@0: void michael@0: JSCompartment::trace(JSTracer *trc) michael@0: { michael@0: // At the moment, this is merely ceremonial, but any live-compartment-only tracing should go michael@0: // here. michael@0: } michael@0: michael@0: void michael@0: JSCompartment::markRoots(JSTracer *trc) michael@0: { michael@0: JS_ASSERT(!trc->runtime()->isHeapMinorCollecting()); michael@0: michael@0: #ifdef JS_ION michael@0: if (jitCompartment_) michael@0: jitCompartment_->mark(trc, this); michael@0: #endif michael@0: michael@0: /* michael@0: * If a compartment is on-stack, we mark its global so that michael@0: * JSContext::global() remains valid. michael@0: */ michael@0: if (enterCompartmentDepth && global_) michael@0: MarkObjectRoot(trc, global_.unsafeGet(), "on-stack compartment global"); michael@0: } michael@0: michael@0: void michael@0: JSCompartment::sweep(FreeOp *fop, bool releaseTypes) michael@0: { michael@0: JS_ASSERT(!activeAnalysis); michael@0: michael@0: /* This function includes itself in PHASE_SWEEP_TABLES. */ michael@0: sweepCrossCompartmentWrappers(); michael@0: michael@0: JSRuntime *rt = runtimeFromMainThread(); michael@0: michael@0: { michael@0: gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); michael@0: michael@0: /* Remove dead references held weakly by the compartment. */ michael@0: michael@0: sweepBaseShapeTable(); michael@0: sweepInitialShapeTable(); michael@0: sweepNewTypeObjectTable(newTypeObjects); michael@0: sweepNewTypeObjectTable(lazyTypeObjects); michael@0: sweepCallsiteClones(); michael@0: savedStacks_.sweep(rt); michael@0: michael@0: if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet())) michael@0: global_ = nullptr; michael@0: michael@0: if (selfHostingScriptSource && michael@0: IsObjectAboutToBeFinalized((JSObject **) selfHostingScriptSource.unsafeGet())) michael@0: { michael@0: selfHostingScriptSource = nullptr; michael@0: } michael@0: michael@0: #ifdef JS_ION michael@0: if (jitCompartment_) michael@0: jitCompartment_->sweep(fop); michael@0: #endif michael@0: michael@0: /* michael@0: * JIT code increments activeUseCount for any RegExpShared used by jit michael@0: * code for the lifetime of the JIT script. Thus, we must perform michael@0: * sweeping after clearing jit code. michael@0: */ michael@0: regExps.sweep(rt); michael@0: michael@0: if (debugScopes) michael@0: debugScopes->sweep(rt); michael@0: michael@0: /* Finalize unreachable (key,value) pairs in all weak maps. */ michael@0: WeakMapBase::sweepCompartment(this); michael@0: } michael@0: michael@0: NativeIterator *ni = enumerators->next(); michael@0: while (ni != enumerators) { michael@0: JSObject *iterObj = ni->iterObj(); michael@0: NativeIterator *next = ni->next(); michael@0: if (gc::IsObjectAboutToBeFinalized(&iterObj)) michael@0: ni->unlink(); michael@0: ni = next; michael@0: } michael@0: } michael@0: michael@0: /* michael@0: * Remove dead wrappers from the table. We must sweep all compartments, since michael@0: * string entries in the crossCompartmentWrappers table are not marked during michael@0: * markCrossCompartmentWrappers. michael@0: */ michael@0: void michael@0: JSCompartment::sweepCrossCompartmentWrappers() michael@0: { michael@0: JSRuntime *rt = runtimeFromMainThread(); michael@0: michael@0: gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); michael@0: gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_WRAPPER); michael@0: michael@0: /* Remove dead wrappers from the table. */ michael@0: for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { michael@0: CrossCompartmentKey key = e.front().key(); michael@0: bool keyDying = IsCellAboutToBeFinalized(&key.wrapped); michael@0: bool valDying = IsValueAboutToBeFinalized(e.front().value().unsafeGet()); michael@0: bool dbgDying = key.debugger && IsObjectAboutToBeFinalized(&key.debugger); michael@0: if (keyDying || valDying || dbgDying) { michael@0: JS_ASSERT(key.kind != CrossCompartmentKey::StringWrapper); michael@0: e.removeFront(); michael@0: } else if (key.wrapped != e.front().key().wrapped || michael@0: key.debugger != e.front().key().debugger) michael@0: { michael@0: e.rekeyFront(key); michael@0: } michael@0: } michael@0: } michael@0: michael@0: void michael@0: JSCompartment::purge() michael@0: { michael@0: dtoaCache.purge(); michael@0: } michael@0: michael@0: void michael@0: JSCompartment::clearTables() michael@0: { michael@0: global_ = nullptr; michael@0: michael@0: regExps.clearTables(); michael@0: michael@0: // No scripts should have run in this compartment. This is used when michael@0: // merging a compartment that has been used off thread into another michael@0: // compartment and zone. michael@0: JS_ASSERT(crossCompartmentWrappers.empty()); michael@0: JS_ASSERT_IF(callsiteClones.initialized(), callsiteClones.empty()); michael@0: #ifdef JS_ION michael@0: JS_ASSERT(!jitCompartment_); michael@0: #endif michael@0: JS_ASSERT(!debugScopes); michael@0: JS_ASSERT(!gcWeakMapList); michael@0: JS_ASSERT(enumerators->next() == enumerators); michael@0: michael@0: types.clearTables(); michael@0: if (baseShapes.initialized()) michael@0: baseShapes.clear(); michael@0: if (initialShapes.initialized()) michael@0: initialShapes.clear(); michael@0: if (newTypeObjects.initialized()) michael@0: newTypeObjects.clear(); michael@0: if (lazyTypeObjects.initialized()) michael@0: lazyTypeObjects.clear(); michael@0: if (savedStacks_.initialized()) michael@0: savedStacks_.clear(); michael@0: } michael@0: michael@0: void michael@0: JSCompartment::setObjectMetadataCallback(js::ObjectMetadataCallback callback) michael@0: { michael@0: // Clear any jitcode in the runtime, which behaves differently depending on michael@0: // whether there is a creation callback. michael@0: ReleaseAllJITCode(runtime_->defaultFreeOp()); michael@0: michael@0: objectMetadataCallback = callback; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::hasScriptsOnStack() michael@0: { michael@0: for (ActivationIterator iter(runtimeFromMainThread()); !iter.done(); ++iter) { michael@0: if (iter->compartment() == this) michael@0: return true; michael@0: } michael@0: michael@0: return false; michael@0: } michael@0: michael@0: static bool michael@0: AddInnerLazyFunctionsFromScript(JSScript *script, AutoObjectVector &lazyFunctions) michael@0: { michael@0: if (!script->hasObjects()) michael@0: return true; michael@0: ObjectArray *objects = script->objects(); michael@0: for (size_t i = script->innerObjectsStart(); i < objects->length; i++) { michael@0: JSObject *obj = objects->vector[i]; michael@0: if (obj->is() && obj->as().isInterpretedLazy()) { michael@0: if (!lazyFunctions.append(obj)) michael@0: return false; michael@0: } michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: CreateLazyScriptsForCompartment(JSContext *cx) michael@0: { michael@0: AutoObjectVector lazyFunctions(cx); michael@0: michael@0: // Find all live lazy scripts in the compartment, and via them all root michael@0: // lazy functions in the compartment: those which have not been compiled, michael@0: // which have a source object, indicating that they have a parent, and michael@0: // which do not have an uncompiled enclosing script. The last condition is michael@0: // so that we don't compile lazy scripts whose enclosing scripts failed to michael@0: // compile, indicating that the lazy script did not escape the script. michael@0: for (gc::CellIter i(cx->zone(), gc::FINALIZE_LAZY_SCRIPT); !i.done(); i.next()) { michael@0: LazyScript *lazy = i.get(); michael@0: JSFunction *fun = lazy->functionNonDelazifying(); michael@0: if (fun->compartment() == cx->compartment() && michael@0: lazy->sourceObject() && !lazy->maybeScript() && michael@0: !lazy->hasUncompiledEnclosingScript()) michael@0: { michael@0: MOZ_ASSERT(fun->isInterpretedLazy()); michael@0: MOZ_ASSERT(lazy == fun->lazyScriptOrNull()); michael@0: if (!lazyFunctions.append(fun)) michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: // Create scripts for each lazy function, updating the list of functions to michael@0: // process with any newly exposed inner functions in created scripts. michael@0: // A function cannot be delazified until its outer script exists. michael@0: for (size_t i = 0; i < lazyFunctions.length(); i++) { michael@0: JSFunction *fun = &lazyFunctions[i]->as(); michael@0: michael@0: // lazyFunctions may have been populated with multiple functions for michael@0: // a lazy script. michael@0: if (!fun->isInterpretedLazy()) michael@0: continue; michael@0: michael@0: JSScript *script = fun->getOrCreateScript(cx); michael@0: if (!script) michael@0: return false; michael@0: if (!AddInnerLazyFunctionsFromScript(script, lazyFunctions)) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::ensureDelazifyScriptsForDebugMode(JSContext *cx) michael@0: { michael@0: MOZ_ASSERT(cx->compartment() == this); michael@0: if ((debugModeBits & DebugNeedDelazification) && !CreateLazyScriptsForCompartment(cx)) michael@0: return false; michael@0: debugModeBits &= ~DebugNeedDelazification; michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::setDebugModeFromC(JSContext *cx, bool b, AutoDebugModeInvalidation &invalidate) michael@0: { michael@0: bool enabledBefore = debugMode(); michael@0: bool enabledAfter = (debugModeBits & DebugModeFromMask & ~DebugFromC) || b; michael@0: michael@0: // Enabling debug mode from C (vs of from JS) can only be done when no michael@0: // scripts from the target compartment are on the stack. michael@0: // michael@0: // We do allow disabling debug mode while scripts are on the stack. In michael@0: // that case the debug-mode code for those scripts remains, so subsequently michael@0: // hooks may be called erroneously, even though debug mode is supposedly michael@0: // off, and we have to live with it. michael@0: bool onStack = false; michael@0: if (enabledBefore != enabledAfter) { michael@0: onStack = hasScriptsOnStack(); michael@0: if (b && onStack) { michael@0: JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_DEBUG_NOT_IDLE); michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: debugModeBits = (debugModeBits & ~DebugFromC) | (b ? DebugFromC : 0); michael@0: JS_ASSERT(debugMode() == enabledAfter); michael@0: if (enabledBefore != enabledAfter) { michael@0: // Pass in a nullptr cx to not bother recompiling for JSD1, since michael@0: // we're still enforcing the idle-stack invariant here. michael@0: if (!updateJITForDebugMode(nullptr, invalidate)) michael@0: return false; michael@0: if (!enabledAfter) michael@0: DebugScopes::onCompartmentLeaveDebugMode(this); michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::updateJITForDebugMode(JSContext *maybecx, AutoDebugModeInvalidation &invalidate) michael@0: { michael@0: #ifdef JS_ION michael@0: // The AutoDebugModeInvalidation argument makes sure we can't forget to michael@0: // invalidate, but it is also important not to run any scripts in this michael@0: // compartment until the invalidate is destroyed. That is the caller's michael@0: // responsibility. michael@0: if (!jit::UpdateForDebugMode(maybecx, this, invalidate)) michael@0: return false; michael@0: #endif michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::addDebuggee(JSContext *cx, js::GlobalObject *global) michael@0: { michael@0: AutoDebugModeInvalidation invalidate(this); michael@0: return addDebuggee(cx, global, invalidate); michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::addDebuggee(JSContext *cx, michael@0: GlobalObject *globalArg, michael@0: AutoDebugModeInvalidation &invalidate) michael@0: { michael@0: Rooted global(cx, globalArg); michael@0: michael@0: bool wasEnabled = debugMode(); michael@0: if (!debuggees.put(global)) { michael@0: js_ReportOutOfMemory(cx); michael@0: return false; michael@0: } michael@0: debugModeBits |= DebugFromJS; michael@0: if (!wasEnabled && !updateJITForDebugMode(cx, invalidate)) michael@0: return false; michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::removeDebuggee(JSContext *cx, michael@0: js::GlobalObject *global, michael@0: js::GlobalObjectSet::Enum *debuggeesEnum) michael@0: { michael@0: AutoDebugModeInvalidation invalidate(this); michael@0: return removeDebuggee(cx, global, invalidate, debuggeesEnum); michael@0: } michael@0: michael@0: bool michael@0: JSCompartment::removeDebuggee(JSContext *cx, michael@0: js::GlobalObject *global, michael@0: AutoDebugModeInvalidation &invalidate, michael@0: js::GlobalObjectSet::Enum *debuggeesEnum) michael@0: { michael@0: bool wasEnabled = debugMode(); michael@0: removeDebuggeeUnderGC(cx->runtime()->defaultFreeOp(), global, invalidate, debuggeesEnum); michael@0: if (wasEnabled && !debugMode() && !updateJITForDebugMode(cx, invalidate)) michael@0: return false; michael@0: return true; michael@0: } michael@0: michael@0: void michael@0: JSCompartment::removeDebuggeeUnderGC(FreeOp *fop, michael@0: js::GlobalObject *global, michael@0: js::GlobalObjectSet::Enum *debuggeesEnum) michael@0: { michael@0: AutoDebugModeInvalidation invalidate(this); michael@0: removeDebuggeeUnderGC(fop, global, invalidate, debuggeesEnum); michael@0: } michael@0: michael@0: void michael@0: JSCompartment::removeDebuggeeUnderGC(FreeOp *fop, michael@0: js::GlobalObject *global, michael@0: AutoDebugModeInvalidation &invalidate, michael@0: js::GlobalObjectSet::Enum *debuggeesEnum) michael@0: { michael@0: bool wasEnabled = debugMode(); michael@0: JS_ASSERT(debuggees.has(global)); michael@0: if (debuggeesEnum) michael@0: debuggeesEnum->removeFront(); michael@0: else michael@0: debuggees.remove(global); michael@0: michael@0: if (debuggees.empty()) { michael@0: debugModeBits &= ~DebugFromJS; michael@0: if (wasEnabled && !debugMode()) michael@0: DebugScopes::onCompartmentLeaveDebugMode(this); michael@0: } michael@0: } michael@0: michael@0: void michael@0: JSCompartment::clearBreakpointsIn(FreeOp *fop, js::Debugger *dbg, HandleObject handler) michael@0: { michael@0: for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { michael@0: JSScript *script = i.get(); michael@0: if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode()) michael@0: script->clearBreakpointsIn(fop, dbg, handler); michael@0: } michael@0: } michael@0: michael@0: void michael@0: JSCompartment::clearTraps(FreeOp *fop) michael@0: { michael@0: MinorGC(fop->runtime(), JS::gcreason::EVICT_NURSERY); michael@0: for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { michael@0: JSScript *script = i.get(); michael@0: if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode()) michael@0: script->clearTraps(fop); michael@0: } michael@0: } michael@0: michael@0: void michael@0: JSCompartment::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, michael@0: size_t *tiAllocationSiteTables, michael@0: size_t *tiArrayTypeTables, michael@0: size_t *tiObjectTypeTables, michael@0: size_t *compartmentObject, michael@0: size_t *shapesCompartmentTables, michael@0: size_t *crossCompartmentWrappersArg, michael@0: size_t *regexpCompartment, michael@0: size_t *debuggeesSet, michael@0: size_t *savedStacksSet) michael@0: { michael@0: *compartmentObject += mallocSizeOf(this); michael@0: types.addSizeOfExcludingThis(mallocSizeOf, tiAllocationSiteTables, michael@0: tiArrayTypeTables, tiObjectTypeTables); michael@0: *shapesCompartmentTables += baseShapes.sizeOfExcludingThis(mallocSizeOf) michael@0: + initialShapes.sizeOfExcludingThis(mallocSizeOf) michael@0: + newTypeObjects.sizeOfExcludingThis(mallocSizeOf) michael@0: + lazyTypeObjects.sizeOfExcludingThis(mallocSizeOf); michael@0: *crossCompartmentWrappersArg += crossCompartmentWrappers.sizeOfExcludingThis(mallocSizeOf); michael@0: *regexpCompartment += regExps.sizeOfExcludingThis(mallocSizeOf); michael@0: *debuggeesSet += debuggees.sizeOfExcludingThis(mallocSizeOf); michael@0: *savedStacksSet += savedStacks_.sizeOfExcludingThis(mallocSizeOf); michael@0: } michael@0: michael@0: void michael@0: JSCompartment::adoptWorkerAllocator(Allocator *workerAllocator) michael@0: { michael@0: zone()->allocator.arenas.adoptArenas(runtimeFromMainThread(), &workerAllocator->arenas); michael@0: }