Thu, 15 Jan 2015 15:55:04 +0100
Back out 97036ab72558 which inappropriately compared turds to third parties.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "jscompartmentinlines.h" |
michael@0 | 8 | |
michael@0 | 9 | #include "mozilla/DebugOnly.h" |
michael@0 | 10 | #include "mozilla/MemoryReporting.h" |
michael@0 | 11 | |
michael@0 | 12 | #include "jscntxt.h" |
michael@0 | 13 | #include "jsfriendapi.h" |
michael@0 | 14 | #include "jsgc.h" |
michael@0 | 15 | #include "jsiter.h" |
michael@0 | 16 | #include "jsproxy.h" |
michael@0 | 17 | #include "jswatchpoint.h" |
michael@0 | 18 | #include "jswrapper.h" |
michael@0 | 19 | |
michael@0 | 20 | #include "gc/Marking.h" |
michael@0 | 21 | #ifdef JS_ION |
michael@0 | 22 | #include "jit/JitCompartment.h" |
michael@0 | 23 | #endif |
michael@0 | 24 | #include "js/RootingAPI.h" |
michael@0 | 25 | #include "vm/StopIterationObject.h" |
michael@0 | 26 | #include "vm/WrapperObject.h" |
michael@0 | 27 | |
michael@0 | 28 | #include "jsatominlines.h" |
michael@0 | 29 | #include "jsfuninlines.h" |
michael@0 | 30 | #include "jsgcinlines.h" |
michael@0 | 31 | #include "jsinferinlines.h" |
michael@0 | 32 | #include "jsobjinlines.h" |
michael@0 | 33 | |
michael@0 | 34 | using namespace js; |
michael@0 | 35 | using namespace js::gc; |
michael@0 | 36 | |
michael@0 | 37 | using mozilla::DebugOnly; |
michael@0 | 38 | |
michael@0 | 39 | JSCompartment::JSCompartment(Zone *zone, const JS::CompartmentOptions &options = JS::CompartmentOptions()) |
michael@0 | 40 | : options_(options), |
michael@0 | 41 | zone_(zone), |
michael@0 | 42 | runtime_(zone->runtimeFromMainThread()), |
michael@0 | 43 | principals(nullptr), |
michael@0 | 44 | isSystem(false), |
michael@0 | 45 | isSelfHosting(false), |
michael@0 | 46 | marked(true), |
michael@0 | 47 | #ifdef DEBUG |
michael@0 | 48 | firedOnNewGlobalObject(false), |
michael@0 | 49 | #endif |
michael@0 | 50 | global_(nullptr), |
michael@0 | 51 | enterCompartmentDepth(0), |
michael@0 | 52 | data(nullptr), |
michael@0 | 53 | objectMetadataCallback(nullptr), |
michael@0 | 54 | lastAnimationTime(0), |
michael@0 | 55 | regExps(runtime_), |
michael@0 | 56 | globalWriteBarriered(false), |
michael@0 | 57 | propertyTree(thisForCtor()), |
michael@0 | 58 | selfHostingScriptSource(nullptr), |
michael@0 | 59 | gcIncomingGrayPointers(nullptr), |
michael@0 | 60 | gcWeakMapList(nullptr), |
michael@0 | 61 | debugModeBits(runtime_->debugMode ? DebugFromC : 0), |
michael@0 | 62 | rngState(0), |
michael@0 | 63 | watchpointMap(nullptr), |
michael@0 | 64 | scriptCountsMap(nullptr), |
michael@0 | 65 | debugScriptMap(nullptr), |
michael@0 | 66 | debugScopes(nullptr), |
michael@0 | 67 | enumerators(nullptr), |
michael@0 | 68 | compartmentStats(nullptr) |
michael@0 | 69 | #ifdef JS_ION |
michael@0 | 70 | , jitCompartment_(nullptr) |
michael@0 | 71 | #endif |
michael@0 | 72 | { |
michael@0 | 73 | runtime_->numCompartments++; |
michael@0 | 74 | JS_ASSERT_IF(options.mergeable(), options.invisibleToDebugger()); |
michael@0 | 75 | } |
michael@0 | 76 | |
michael@0 | 77 | JSCompartment::~JSCompartment() |
michael@0 | 78 | { |
michael@0 | 79 | #ifdef JS_ION |
michael@0 | 80 | js_delete(jitCompartment_); |
michael@0 | 81 | #endif |
michael@0 | 82 | |
michael@0 | 83 | js_delete(watchpointMap); |
michael@0 | 84 | js_delete(scriptCountsMap); |
michael@0 | 85 | js_delete(debugScriptMap); |
michael@0 | 86 | js_delete(debugScopes); |
michael@0 | 87 | js_free(enumerators); |
michael@0 | 88 | |
michael@0 | 89 | runtime_->numCompartments--; |
michael@0 | 90 | } |
michael@0 | 91 | |
michael@0 | 92 | bool |
michael@0 | 93 | JSCompartment::init(JSContext *cx) |
michael@0 | 94 | { |
michael@0 | 95 | /* |
michael@0 | 96 | * As a hack, we clear our timezone cache every time we create a new |
michael@0 | 97 | * compartment. This ensures that the cache is always relatively fresh, but |
michael@0 | 98 | * shouldn't interfere with benchmarks which create tons of date objects |
michael@0 | 99 | * (unless they also create tons of iframes, which seems unlikely). |
michael@0 | 100 | */ |
michael@0 | 101 | if (cx) |
michael@0 | 102 | cx->runtime()->dateTimeInfo.updateTimeZoneAdjustment(); |
michael@0 | 103 | |
michael@0 | 104 | activeAnalysis = false; |
michael@0 | 105 | |
michael@0 | 106 | if (!crossCompartmentWrappers.init(0)) |
michael@0 | 107 | return false; |
michael@0 | 108 | |
michael@0 | 109 | if (!regExps.init(cx)) |
michael@0 | 110 | return false; |
michael@0 | 111 | |
michael@0 | 112 | enumerators = NativeIterator::allocateSentinel(cx); |
michael@0 | 113 | if (!enumerators) |
michael@0 | 114 | return false; |
michael@0 | 115 | |
michael@0 | 116 | if (!savedStacks_.init()) |
michael@0 | 117 | return false; |
michael@0 | 118 | |
michael@0 | 119 | return debuggees.init(0); |
michael@0 | 120 | } |
michael@0 | 121 | |
michael@0 | 122 | #ifdef JS_ION |
michael@0 | 123 | jit::JitRuntime * |
michael@0 | 124 | JSRuntime::createJitRuntime(JSContext *cx) |
michael@0 | 125 | { |
michael@0 | 126 | // The shared stubs are created in the atoms compartment, which may be |
michael@0 | 127 | // accessed by other threads with an exclusive context. |
michael@0 | 128 | AutoLockForExclusiveAccess atomsLock(cx); |
michael@0 | 129 | |
michael@0 | 130 | // The runtime will only be created on its owning thread, but reads of a |
michael@0 | 131 | // runtime's jitRuntime() can occur when another thread is requesting an |
michael@0 | 132 | // interrupt. |
michael@0 | 133 | AutoLockForInterrupt lock(this); |
michael@0 | 134 | |
michael@0 | 135 | JS_ASSERT(!jitRuntime_); |
michael@0 | 136 | |
michael@0 | 137 | jitRuntime_ = cx->new_<jit::JitRuntime>(); |
michael@0 | 138 | |
michael@0 | 139 | if (!jitRuntime_) |
michael@0 | 140 | return nullptr; |
michael@0 | 141 | |
michael@0 | 142 | if (!jitRuntime_->initialize(cx)) { |
michael@0 | 143 | js_delete(jitRuntime_); |
michael@0 | 144 | jitRuntime_ = nullptr; |
michael@0 | 145 | |
michael@0 | 146 | JSCompartment *comp = cx->runtime()->atomsCompartment(); |
michael@0 | 147 | if (comp->jitCompartment_) { |
michael@0 | 148 | js_delete(comp->jitCompartment_); |
michael@0 | 149 | comp->jitCompartment_ = nullptr; |
michael@0 | 150 | } |
michael@0 | 151 | |
michael@0 | 152 | return nullptr; |
michael@0 | 153 | } |
michael@0 | 154 | |
michael@0 | 155 | return jitRuntime_; |
michael@0 | 156 | } |
michael@0 | 157 | |
michael@0 | 158 | bool |
michael@0 | 159 | JSCompartment::ensureJitCompartmentExists(JSContext *cx) |
michael@0 | 160 | { |
michael@0 | 161 | using namespace js::jit; |
michael@0 | 162 | if (jitCompartment_) |
michael@0 | 163 | return true; |
michael@0 | 164 | |
michael@0 | 165 | if (!zone()->getJitZone(cx)) |
michael@0 | 166 | return false; |
michael@0 | 167 | |
michael@0 | 168 | /* Set the compartment early, so linking works. */ |
michael@0 | 169 | jitCompartment_ = cx->new_<JitCompartment>(); |
michael@0 | 170 | |
michael@0 | 171 | if (!jitCompartment_) |
michael@0 | 172 | return false; |
michael@0 | 173 | |
michael@0 | 174 | if (!jitCompartment_->initialize(cx)) { |
michael@0 | 175 | js_delete(jitCompartment_); |
michael@0 | 176 | jitCompartment_ = nullptr; |
michael@0 | 177 | return false; |
michael@0 | 178 | } |
michael@0 | 179 | |
michael@0 | 180 | return true; |
michael@0 | 181 | } |
michael@0 | 182 | #endif |
michael@0 | 183 | |
michael@0 | 184 | #ifdef JSGC_GENERATIONAL |
michael@0 | 185 | |
michael@0 | 186 | /* |
michael@0 | 187 | * This class is used to add a post barrier on the crossCompartmentWrappers map, |
michael@0 | 188 | * as the key is calculated based on objects which may be moved by generational |
michael@0 | 189 | * GC. |
michael@0 | 190 | */ |
michael@0 | 191 | class WrapperMapRef : public BufferableRef |
michael@0 | 192 | { |
michael@0 | 193 | WrapperMap *map; |
michael@0 | 194 | CrossCompartmentKey key; |
michael@0 | 195 | |
michael@0 | 196 | public: |
michael@0 | 197 | WrapperMapRef(WrapperMap *map, const CrossCompartmentKey &key) |
michael@0 | 198 | : map(map), key(key) {} |
michael@0 | 199 | |
michael@0 | 200 | void mark(JSTracer *trc) { |
michael@0 | 201 | CrossCompartmentKey prior = key; |
michael@0 | 202 | if (key.debugger) |
michael@0 | 203 | Mark(trc, &key.debugger, "CCW debugger"); |
michael@0 | 204 | if (key.kind != CrossCompartmentKey::StringWrapper) |
michael@0 | 205 | Mark(trc, reinterpret_cast<JSObject**>(&key.wrapped), "CCW wrapped object"); |
michael@0 | 206 | if (key.debugger == prior.debugger && key.wrapped == prior.wrapped) |
michael@0 | 207 | return; |
michael@0 | 208 | |
michael@0 | 209 | /* Look for the original entry, which might have been removed. */ |
michael@0 | 210 | WrapperMap::Ptr p = map->lookup(prior); |
michael@0 | 211 | if (!p) |
michael@0 | 212 | return; |
michael@0 | 213 | |
michael@0 | 214 | /* Rekey the entry. */ |
michael@0 | 215 | map->rekeyAs(prior, key, key); |
michael@0 | 216 | } |
michael@0 | 217 | }; |
michael@0 | 218 | |
michael@0 | 219 | #ifdef JS_GC_ZEAL |
michael@0 | 220 | void |
michael@0 | 221 | JSCompartment::checkWrapperMapAfterMovingGC() |
michael@0 | 222 | { |
michael@0 | 223 | /* |
michael@0 | 224 | * Assert that the postbarriers have worked and that nothing is left in |
michael@0 | 225 | * wrapperMap that points into the nursery, and that the hash table entries |
michael@0 | 226 | * are discoverable. |
michael@0 | 227 | */ |
michael@0 | 228 | JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread()); |
michael@0 | 229 | for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { |
michael@0 | 230 | CrossCompartmentKey key = e.front().key(); |
michael@0 | 231 | JS_ASSERT(!IsInsideNursery(rt, key.debugger)); |
michael@0 | 232 | JS_ASSERT(!IsInsideNursery(rt, key.wrapped)); |
michael@0 | 233 | JS_ASSERT(!IsInsideNursery(rt, e.front().value().get().toGCThing())); |
michael@0 | 234 | |
michael@0 | 235 | WrapperMap::Ptr ptr = crossCompartmentWrappers.lookup(key); |
michael@0 | 236 | JS_ASSERT(ptr.found() && &*ptr == &e.front()); |
michael@0 | 237 | } |
michael@0 | 238 | } |
michael@0 | 239 | #endif |
michael@0 | 240 | |
michael@0 | 241 | #endif |
michael@0 | 242 | |
michael@0 | 243 | bool |
michael@0 | 244 | JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, const js::Value &wrapper) |
michael@0 | 245 | { |
michael@0 | 246 | JS_ASSERT(wrapped.wrapped); |
michael@0 | 247 | JS_ASSERT(!IsPoisonedPtr(wrapped.wrapped)); |
michael@0 | 248 | JS_ASSERT(!IsPoisonedPtr(wrapped.debugger)); |
michael@0 | 249 | JS_ASSERT(!IsPoisonedPtr(wrapper.toGCThing())); |
michael@0 | 250 | JS_ASSERT_IF(wrapped.kind == CrossCompartmentKey::StringWrapper, wrapper.isString()); |
michael@0 | 251 | JS_ASSERT_IF(wrapped.kind != CrossCompartmentKey::StringWrapper, wrapper.isObject()); |
michael@0 | 252 | bool success = crossCompartmentWrappers.put(wrapped, wrapper); |
michael@0 | 253 | |
michael@0 | 254 | #ifdef JSGC_GENERATIONAL |
michael@0 | 255 | /* There's no point allocating wrappers in the nursery since we will tenure them anyway. */ |
michael@0 | 256 | Nursery &nursery = cx->nursery(); |
michael@0 | 257 | JS_ASSERT(!nursery.isInside(wrapper.toGCThing())); |
michael@0 | 258 | |
michael@0 | 259 | if (success && (nursery.isInside(wrapped.wrapped) || nursery.isInside(wrapped.debugger))) { |
michael@0 | 260 | WrapperMapRef ref(&crossCompartmentWrappers, wrapped); |
michael@0 | 261 | cx->runtime()->gcStoreBuffer.putGeneric(ref); |
michael@0 | 262 | } |
michael@0 | 263 | #endif |
michael@0 | 264 | |
michael@0 | 265 | return success; |
michael@0 | 266 | } |
michael@0 | 267 | |
michael@0 | 268 | bool |
michael@0 | 269 | JSCompartment::wrap(JSContext *cx, JSString **strp) |
michael@0 | 270 | { |
michael@0 | 271 | JS_ASSERT(!cx->runtime()->isAtomsCompartment(this)); |
michael@0 | 272 | JS_ASSERT(cx->compartment() == this); |
michael@0 | 273 | |
michael@0 | 274 | /* If the string is already in this compartment, we are done. */ |
michael@0 | 275 | JSString *str = *strp; |
michael@0 | 276 | if (str->zoneFromAnyThread() == zone()) |
michael@0 | 277 | return true; |
michael@0 | 278 | |
michael@0 | 279 | /* If the string is an atom, we don't have to copy. */ |
michael@0 | 280 | if (str->isAtom()) { |
michael@0 | 281 | JS_ASSERT(str->isPermanentAtom() || |
michael@0 | 282 | cx->runtime()->isAtomsZone(str->zone())); |
michael@0 | 283 | return true; |
michael@0 | 284 | } |
michael@0 | 285 | |
michael@0 | 286 | /* Check the cache. */ |
michael@0 | 287 | RootedValue key(cx, StringValue(str)); |
michael@0 | 288 | if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(key)) { |
michael@0 | 289 | *strp = p->value().get().toString(); |
michael@0 | 290 | return true; |
michael@0 | 291 | } |
michael@0 | 292 | |
michael@0 | 293 | /* |
michael@0 | 294 | * No dice. Make a copy, and cache it. Directly allocate the copy in the |
michael@0 | 295 | * destination compartment, rather than first flattening it (and possibly |
michael@0 | 296 | * allocating in source compartment), because we don't know whether the |
michael@0 | 297 | * flattening will pay off later. |
michael@0 | 298 | */ |
michael@0 | 299 | JSString *copy; |
michael@0 | 300 | if (str->hasPureChars()) { |
michael@0 | 301 | copy = js_NewStringCopyN<CanGC>(cx, str->pureChars(), str->length()); |
michael@0 | 302 | } else { |
michael@0 | 303 | ScopedJSFreePtr<jschar> copiedChars; |
michael@0 | 304 | if (!str->copyNonPureCharsZ(cx, copiedChars)) |
michael@0 | 305 | return false; |
michael@0 | 306 | copy = js_NewString<CanGC>(cx, copiedChars.forget(), str->length()); |
michael@0 | 307 | } |
michael@0 | 308 | |
michael@0 | 309 | if (!copy) |
michael@0 | 310 | return false; |
michael@0 | 311 | if (!putWrapper(cx, key, StringValue(copy))) |
michael@0 | 312 | return false; |
michael@0 | 313 | |
michael@0 | 314 | *strp = copy; |
michael@0 | 315 | return true; |
michael@0 | 316 | } |
michael@0 | 317 | |
michael@0 | 318 | bool |
michael@0 | 319 | JSCompartment::wrap(JSContext *cx, HeapPtrString *strp) |
michael@0 | 320 | { |
michael@0 | 321 | RootedString str(cx, *strp); |
michael@0 | 322 | if (!wrap(cx, str.address())) |
michael@0 | 323 | return false; |
michael@0 | 324 | *strp = str; |
michael@0 | 325 | return true; |
michael@0 | 326 | } |
michael@0 | 327 | |
michael@0 | 328 | bool |
michael@0 | 329 | JSCompartment::wrap(JSContext *cx, MutableHandleObject obj, HandleObject existingArg) |
michael@0 | 330 | { |
michael@0 | 331 | JS_ASSERT(!cx->runtime()->isAtomsCompartment(this)); |
michael@0 | 332 | JS_ASSERT(cx->compartment() == this); |
michael@0 | 333 | JS_ASSERT_IF(existingArg, existingArg->compartment() == cx->compartment()); |
michael@0 | 334 | JS_ASSERT_IF(existingArg, IsDeadProxyObject(existingArg)); |
michael@0 | 335 | |
michael@0 | 336 | if (!obj) |
michael@0 | 337 | return true; |
michael@0 | 338 | AutoDisableProxyCheck adpc(cx->runtime()); |
michael@0 | 339 | |
michael@0 | 340 | // Wrappers should really be parented to the wrapped parent of the wrapped |
michael@0 | 341 | // object, but in that case a wrapped global object would have a nullptr |
michael@0 | 342 | // parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead, |
michael@0 | 343 | // we parent all wrappers to the global object in their home compartment. |
michael@0 | 344 | // This loses us some transparency, and is generally very cheesy. |
michael@0 | 345 | HandleObject global = cx->global(); |
michael@0 | 346 | RootedObject objGlobal(cx, &obj->global()); |
michael@0 | 347 | JS_ASSERT(global); |
michael@0 | 348 | JS_ASSERT(objGlobal); |
michael@0 | 349 | |
michael@0 | 350 | const JSWrapObjectCallbacks *cb = cx->runtime()->wrapObjectCallbacks; |
michael@0 | 351 | |
michael@0 | 352 | if (obj->compartment() == this) { |
michael@0 | 353 | obj.set(GetOuterObject(cx, obj)); |
michael@0 | 354 | return true; |
michael@0 | 355 | } |
michael@0 | 356 | |
michael@0 | 357 | // If we have a cross-compartment wrapper, make sure that the cx isn't |
michael@0 | 358 | // associated with the self-hosting global. We don't want to create |
michael@0 | 359 | // wrappers for objects in other runtimes, which may be the case for the |
michael@0 | 360 | // self-hosting global. |
michael@0 | 361 | JS_ASSERT(!cx->runtime()->isSelfHostingGlobal(global) && |
michael@0 | 362 | !cx->runtime()->isSelfHostingGlobal(objGlobal)); |
michael@0 | 363 | |
michael@0 | 364 | // Unwrap the object, but don't unwrap outer windows. |
michael@0 | 365 | unsigned flags = 0; |
michael@0 | 366 | obj.set(UncheckedUnwrap(obj, /* stopAtOuter = */ true, &flags)); |
michael@0 | 367 | |
michael@0 | 368 | if (obj->compartment() == this) { |
michael@0 | 369 | MOZ_ASSERT(obj == GetOuterObject(cx, obj)); |
michael@0 | 370 | return true; |
michael@0 | 371 | } |
michael@0 | 372 | |
michael@0 | 373 | // Translate StopIteration singleton. |
michael@0 | 374 | if (obj->is<StopIterationObject>()) { |
michael@0 | 375 | // StopIteration isn't a constructor, but it's stored in GlobalObject |
michael@0 | 376 | // as one, out of laziness. Hence the GetBuiltinConstructor call here. |
michael@0 | 377 | RootedObject stopIteration(cx); |
michael@0 | 378 | if (!GetBuiltinConstructor(cx, JSProto_StopIteration, &stopIteration)) |
michael@0 | 379 | return false; |
michael@0 | 380 | obj.set(stopIteration); |
michael@0 | 381 | return true; |
michael@0 | 382 | } |
michael@0 | 383 | |
michael@0 | 384 | // Invoke the prewrap callback. We're a bit worried about infinite |
michael@0 | 385 | // recursion here, so we do a check - see bug 809295. |
michael@0 | 386 | JS_CHECK_CHROME_RECURSION(cx, return false); |
michael@0 | 387 | if (cb->preWrap) { |
michael@0 | 388 | obj.set(cb->preWrap(cx, global, obj, flags)); |
michael@0 | 389 | if (!obj) |
michael@0 | 390 | return false; |
michael@0 | 391 | } |
michael@0 | 392 | MOZ_ASSERT(obj == GetOuterObject(cx, obj)); |
michael@0 | 393 | |
michael@0 | 394 | if (obj->compartment() == this) |
michael@0 | 395 | return true; |
michael@0 | 396 | |
michael@0 | 397 | |
michael@0 | 398 | // If we already have a wrapper for this value, use it. |
michael@0 | 399 | RootedValue key(cx, ObjectValue(*obj)); |
michael@0 | 400 | if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(key)) { |
michael@0 | 401 | obj.set(&p->value().get().toObject()); |
michael@0 | 402 | JS_ASSERT(obj->is<CrossCompartmentWrapperObject>()); |
michael@0 | 403 | JS_ASSERT(obj->getParent() == global); |
michael@0 | 404 | return true; |
michael@0 | 405 | } |
michael@0 | 406 | |
michael@0 | 407 | RootedObject proto(cx, TaggedProto::LazyProto); |
michael@0 | 408 | RootedObject existing(cx, existingArg); |
michael@0 | 409 | if (existing) { |
michael@0 | 410 | // Is it possible to reuse |existing|? |
michael@0 | 411 | if (!existing->getTaggedProto().isLazy() || |
michael@0 | 412 | // Note: don't use is<ObjectProxyObject>() here -- it also matches subclasses! |
michael@0 | 413 | existing->getClass() != &ProxyObject::uncallableClass_ || |
michael@0 | 414 | existing->getParent() != global || |
michael@0 | 415 | obj->isCallable()) |
michael@0 | 416 | { |
michael@0 | 417 | existing = nullptr; |
michael@0 | 418 | } |
michael@0 | 419 | } |
michael@0 | 420 | |
michael@0 | 421 | obj.set(cb->wrap(cx, existing, obj, proto, global, flags)); |
michael@0 | 422 | if (!obj) |
michael@0 | 423 | return false; |
michael@0 | 424 | |
michael@0 | 425 | // We maintain the invariant that the key in the cross-compartment wrapper |
michael@0 | 426 | // map is always directly wrapped by the value. |
michael@0 | 427 | JS_ASSERT(Wrapper::wrappedObject(obj) == &key.get().toObject()); |
michael@0 | 428 | |
michael@0 | 429 | return putWrapper(cx, key, ObjectValue(*obj)); |
michael@0 | 430 | } |
michael@0 | 431 | |
michael@0 | 432 | bool |
michael@0 | 433 | JSCompartment::wrapId(JSContext *cx, jsid *idp) |
michael@0 | 434 | { |
michael@0 | 435 | MOZ_ASSERT(*idp != JSID_VOID, "JSID_VOID is an out-of-band sentinel value"); |
michael@0 | 436 | if (JSID_IS_INT(*idp)) |
michael@0 | 437 | return true; |
michael@0 | 438 | RootedValue value(cx, IdToValue(*idp)); |
michael@0 | 439 | if (!wrap(cx, &value)) |
michael@0 | 440 | return false; |
michael@0 | 441 | RootedId id(cx); |
michael@0 | 442 | if (!ValueToId<CanGC>(cx, value, &id)) |
michael@0 | 443 | return false; |
michael@0 | 444 | |
michael@0 | 445 | *idp = id; |
michael@0 | 446 | return true; |
michael@0 | 447 | } |
michael@0 | 448 | |
michael@0 | 449 | bool |
michael@0 | 450 | JSCompartment::wrap(JSContext *cx, PropertyOp *propp) |
michael@0 | 451 | { |
michael@0 | 452 | RootedValue value(cx, CastAsObjectJsval(*propp)); |
michael@0 | 453 | if (!wrap(cx, &value)) |
michael@0 | 454 | return false; |
michael@0 | 455 | *propp = CastAsPropertyOp(value.toObjectOrNull()); |
michael@0 | 456 | return true; |
michael@0 | 457 | } |
michael@0 | 458 | |
michael@0 | 459 | bool |
michael@0 | 460 | JSCompartment::wrap(JSContext *cx, StrictPropertyOp *propp) |
michael@0 | 461 | { |
michael@0 | 462 | RootedValue value(cx, CastAsObjectJsval(*propp)); |
michael@0 | 463 | if (!wrap(cx, &value)) |
michael@0 | 464 | return false; |
michael@0 | 465 | *propp = CastAsStrictPropertyOp(value.toObjectOrNull()); |
michael@0 | 466 | return true; |
michael@0 | 467 | } |
michael@0 | 468 | |
michael@0 | 469 | bool |
michael@0 | 470 | JSCompartment::wrap(JSContext *cx, MutableHandle<PropertyDescriptor> desc) |
michael@0 | 471 | { |
michael@0 | 472 | if (!wrap(cx, desc.object())) |
michael@0 | 473 | return false; |
michael@0 | 474 | |
michael@0 | 475 | if (desc.hasGetterObject()) { |
michael@0 | 476 | if (!wrap(cx, &desc.getter())) |
michael@0 | 477 | return false; |
michael@0 | 478 | } |
michael@0 | 479 | if (desc.hasSetterObject()) { |
michael@0 | 480 | if (!wrap(cx, &desc.setter())) |
michael@0 | 481 | return false; |
michael@0 | 482 | } |
michael@0 | 483 | |
michael@0 | 484 | return wrap(cx, desc.value()); |
michael@0 | 485 | } |
michael@0 | 486 | |
michael@0 | 487 | bool |
michael@0 | 488 | JSCompartment::wrap(JSContext *cx, AutoIdVector &props) |
michael@0 | 489 | { |
michael@0 | 490 | jsid *vector = props.begin(); |
michael@0 | 491 | int length = props.length(); |
michael@0 | 492 | for (size_t n = 0; n < size_t(length); ++n) { |
michael@0 | 493 | if (!wrapId(cx, &vector[n])) |
michael@0 | 494 | return false; |
michael@0 | 495 | } |
michael@0 | 496 | return true; |
michael@0 | 497 | } |
michael@0 | 498 | |
michael@0 | 499 | /* |
michael@0 | 500 | * This method marks pointers that cross compartment boundaries. It should be |
michael@0 | 501 | * called only for per-compartment GCs, since full GCs naturally follow pointers |
michael@0 | 502 | * across compartments. |
michael@0 | 503 | */ |
michael@0 | 504 | void |
michael@0 | 505 | JSCompartment::markCrossCompartmentWrappers(JSTracer *trc) |
michael@0 | 506 | { |
michael@0 | 507 | JS_ASSERT(!zone()->isCollecting()); |
michael@0 | 508 | |
michael@0 | 509 | for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { |
michael@0 | 510 | Value v = e.front().value(); |
michael@0 | 511 | if (e.front().key().kind == CrossCompartmentKey::ObjectWrapper) { |
michael@0 | 512 | ProxyObject *wrapper = &v.toObject().as<ProxyObject>(); |
michael@0 | 513 | |
michael@0 | 514 | /* |
michael@0 | 515 | * We have a cross-compartment wrapper. Its private pointer may |
michael@0 | 516 | * point into the compartment being collected, so we should mark it. |
michael@0 | 517 | */ |
michael@0 | 518 | Value referent = wrapper->private_(); |
michael@0 | 519 | MarkValueRoot(trc, &referent, "cross-compartment wrapper"); |
michael@0 | 520 | JS_ASSERT(referent == wrapper->private_()); |
michael@0 | 521 | } |
michael@0 | 522 | } |
michael@0 | 523 | } |
michael@0 | 524 | |
michael@0 | 525 | void |
michael@0 | 526 | JSCompartment::trace(JSTracer *trc) |
michael@0 | 527 | { |
michael@0 | 528 | // At the moment, this is merely ceremonial, but any live-compartment-only tracing should go |
michael@0 | 529 | // here. |
michael@0 | 530 | } |
michael@0 | 531 | |
michael@0 | 532 | void |
michael@0 | 533 | JSCompartment::markRoots(JSTracer *trc) |
michael@0 | 534 | { |
michael@0 | 535 | JS_ASSERT(!trc->runtime()->isHeapMinorCollecting()); |
michael@0 | 536 | |
michael@0 | 537 | #ifdef JS_ION |
michael@0 | 538 | if (jitCompartment_) |
michael@0 | 539 | jitCompartment_->mark(trc, this); |
michael@0 | 540 | #endif |
michael@0 | 541 | |
michael@0 | 542 | /* |
michael@0 | 543 | * If a compartment is on-stack, we mark its global so that |
michael@0 | 544 | * JSContext::global() remains valid. |
michael@0 | 545 | */ |
michael@0 | 546 | if (enterCompartmentDepth && global_) |
michael@0 | 547 | MarkObjectRoot(trc, global_.unsafeGet(), "on-stack compartment global"); |
michael@0 | 548 | } |
michael@0 | 549 | |
michael@0 | 550 | void |
michael@0 | 551 | JSCompartment::sweep(FreeOp *fop, bool releaseTypes) |
michael@0 | 552 | { |
michael@0 | 553 | JS_ASSERT(!activeAnalysis); |
michael@0 | 554 | |
michael@0 | 555 | /* This function includes itself in PHASE_SWEEP_TABLES. */ |
michael@0 | 556 | sweepCrossCompartmentWrappers(); |
michael@0 | 557 | |
michael@0 | 558 | JSRuntime *rt = runtimeFromMainThread(); |
michael@0 | 559 | |
michael@0 | 560 | { |
michael@0 | 561 | gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); |
michael@0 | 562 | |
michael@0 | 563 | /* Remove dead references held weakly by the compartment. */ |
michael@0 | 564 | |
michael@0 | 565 | sweepBaseShapeTable(); |
michael@0 | 566 | sweepInitialShapeTable(); |
michael@0 | 567 | sweepNewTypeObjectTable(newTypeObjects); |
michael@0 | 568 | sweepNewTypeObjectTable(lazyTypeObjects); |
michael@0 | 569 | sweepCallsiteClones(); |
michael@0 | 570 | savedStacks_.sweep(rt); |
michael@0 | 571 | |
michael@0 | 572 | if (global_ && IsObjectAboutToBeFinalized(global_.unsafeGet())) |
michael@0 | 573 | global_ = nullptr; |
michael@0 | 574 | |
michael@0 | 575 | if (selfHostingScriptSource && |
michael@0 | 576 | IsObjectAboutToBeFinalized((JSObject **) selfHostingScriptSource.unsafeGet())) |
michael@0 | 577 | { |
michael@0 | 578 | selfHostingScriptSource = nullptr; |
michael@0 | 579 | } |
michael@0 | 580 | |
michael@0 | 581 | #ifdef JS_ION |
michael@0 | 582 | if (jitCompartment_) |
michael@0 | 583 | jitCompartment_->sweep(fop); |
michael@0 | 584 | #endif |
michael@0 | 585 | |
michael@0 | 586 | /* |
michael@0 | 587 | * JIT code increments activeUseCount for any RegExpShared used by jit |
michael@0 | 588 | * code for the lifetime of the JIT script. Thus, we must perform |
michael@0 | 589 | * sweeping after clearing jit code. |
michael@0 | 590 | */ |
michael@0 | 591 | regExps.sweep(rt); |
michael@0 | 592 | |
michael@0 | 593 | if (debugScopes) |
michael@0 | 594 | debugScopes->sweep(rt); |
michael@0 | 595 | |
michael@0 | 596 | /* Finalize unreachable (key,value) pairs in all weak maps. */ |
michael@0 | 597 | WeakMapBase::sweepCompartment(this); |
michael@0 | 598 | } |
michael@0 | 599 | |
michael@0 | 600 | NativeIterator *ni = enumerators->next(); |
michael@0 | 601 | while (ni != enumerators) { |
michael@0 | 602 | JSObject *iterObj = ni->iterObj(); |
michael@0 | 603 | NativeIterator *next = ni->next(); |
michael@0 | 604 | if (gc::IsObjectAboutToBeFinalized(&iterObj)) |
michael@0 | 605 | ni->unlink(); |
michael@0 | 606 | ni = next; |
michael@0 | 607 | } |
michael@0 | 608 | } |
michael@0 | 609 | |
michael@0 | 610 | /* |
michael@0 | 611 | * Remove dead wrappers from the table. We must sweep all compartments, since |
michael@0 | 612 | * string entries in the crossCompartmentWrappers table are not marked during |
michael@0 | 613 | * markCrossCompartmentWrappers. |
michael@0 | 614 | */ |
michael@0 | 615 | void |
michael@0 | 616 | JSCompartment::sweepCrossCompartmentWrappers() |
michael@0 | 617 | { |
michael@0 | 618 | JSRuntime *rt = runtimeFromMainThread(); |
michael@0 | 619 | |
michael@0 | 620 | gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES); |
michael@0 | 621 | gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_WRAPPER); |
michael@0 | 622 | |
michael@0 | 623 | /* Remove dead wrappers from the table. */ |
michael@0 | 624 | for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) { |
michael@0 | 625 | CrossCompartmentKey key = e.front().key(); |
michael@0 | 626 | bool keyDying = IsCellAboutToBeFinalized(&key.wrapped); |
michael@0 | 627 | bool valDying = IsValueAboutToBeFinalized(e.front().value().unsafeGet()); |
michael@0 | 628 | bool dbgDying = key.debugger && IsObjectAboutToBeFinalized(&key.debugger); |
michael@0 | 629 | if (keyDying || valDying || dbgDying) { |
michael@0 | 630 | JS_ASSERT(key.kind != CrossCompartmentKey::StringWrapper); |
michael@0 | 631 | e.removeFront(); |
michael@0 | 632 | } else if (key.wrapped != e.front().key().wrapped || |
michael@0 | 633 | key.debugger != e.front().key().debugger) |
michael@0 | 634 | { |
michael@0 | 635 | e.rekeyFront(key); |
michael@0 | 636 | } |
michael@0 | 637 | } |
michael@0 | 638 | } |
michael@0 | 639 | |
michael@0 | 640 | void |
michael@0 | 641 | JSCompartment::purge() |
michael@0 | 642 | { |
michael@0 | 643 | dtoaCache.purge(); |
michael@0 | 644 | } |
michael@0 | 645 | |
michael@0 | 646 | void |
michael@0 | 647 | JSCompartment::clearTables() |
michael@0 | 648 | { |
michael@0 | 649 | global_ = nullptr; |
michael@0 | 650 | |
michael@0 | 651 | regExps.clearTables(); |
michael@0 | 652 | |
michael@0 | 653 | // No scripts should have run in this compartment. This is used when |
michael@0 | 654 | // merging a compartment that has been used off thread into another |
michael@0 | 655 | // compartment and zone. |
michael@0 | 656 | JS_ASSERT(crossCompartmentWrappers.empty()); |
michael@0 | 657 | JS_ASSERT_IF(callsiteClones.initialized(), callsiteClones.empty()); |
michael@0 | 658 | #ifdef JS_ION |
michael@0 | 659 | JS_ASSERT(!jitCompartment_); |
michael@0 | 660 | #endif |
michael@0 | 661 | JS_ASSERT(!debugScopes); |
michael@0 | 662 | JS_ASSERT(!gcWeakMapList); |
michael@0 | 663 | JS_ASSERT(enumerators->next() == enumerators); |
michael@0 | 664 | |
michael@0 | 665 | types.clearTables(); |
michael@0 | 666 | if (baseShapes.initialized()) |
michael@0 | 667 | baseShapes.clear(); |
michael@0 | 668 | if (initialShapes.initialized()) |
michael@0 | 669 | initialShapes.clear(); |
michael@0 | 670 | if (newTypeObjects.initialized()) |
michael@0 | 671 | newTypeObjects.clear(); |
michael@0 | 672 | if (lazyTypeObjects.initialized()) |
michael@0 | 673 | lazyTypeObjects.clear(); |
michael@0 | 674 | if (savedStacks_.initialized()) |
michael@0 | 675 | savedStacks_.clear(); |
michael@0 | 676 | } |
michael@0 | 677 | |
michael@0 | 678 | void |
michael@0 | 679 | JSCompartment::setObjectMetadataCallback(js::ObjectMetadataCallback callback) |
michael@0 | 680 | { |
michael@0 | 681 | // Clear any jitcode in the runtime, which behaves differently depending on |
michael@0 | 682 | // whether there is a creation callback. |
michael@0 | 683 | ReleaseAllJITCode(runtime_->defaultFreeOp()); |
michael@0 | 684 | |
michael@0 | 685 | objectMetadataCallback = callback; |
michael@0 | 686 | } |
michael@0 | 687 | |
michael@0 | 688 | bool |
michael@0 | 689 | JSCompartment::hasScriptsOnStack() |
michael@0 | 690 | { |
michael@0 | 691 | for (ActivationIterator iter(runtimeFromMainThread()); !iter.done(); ++iter) { |
michael@0 | 692 | if (iter->compartment() == this) |
michael@0 | 693 | return true; |
michael@0 | 694 | } |
michael@0 | 695 | |
michael@0 | 696 | return false; |
michael@0 | 697 | } |
michael@0 | 698 | |
michael@0 | 699 | static bool |
michael@0 | 700 | AddInnerLazyFunctionsFromScript(JSScript *script, AutoObjectVector &lazyFunctions) |
michael@0 | 701 | { |
michael@0 | 702 | if (!script->hasObjects()) |
michael@0 | 703 | return true; |
michael@0 | 704 | ObjectArray *objects = script->objects(); |
michael@0 | 705 | for (size_t i = script->innerObjectsStart(); i < objects->length; i++) { |
michael@0 | 706 | JSObject *obj = objects->vector[i]; |
michael@0 | 707 | if (obj->is<JSFunction>() && obj->as<JSFunction>().isInterpretedLazy()) { |
michael@0 | 708 | if (!lazyFunctions.append(obj)) |
michael@0 | 709 | return false; |
michael@0 | 710 | } |
michael@0 | 711 | } |
michael@0 | 712 | return true; |
michael@0 | 713 | } |
michael@0 | 714 | |
michael@0 | 715 | static bool |
michael@0 | 716 | CreateLazyScriptsForCompartment(JSContext *cx) |
michael@0 | 717 | { |
michael@0 | 718 | AutoObjectVector lazyFunctions(cx); |
michael@0 | 719 | |
michael@0 | 720 | // Find all live lazy scripts in the compartment, and via them all root |
michael@0 | 721 | // lazy functions in the compartment: those which have not been compiled, |
michael@0 | 722 | // which have a source object, indicating that they have a parent, and |
michael@0 | 723 | // which do not have an uncompiled enclosing script. The last condition is |
michael@0 | 724 | // so that we don't compile lazy scripts whose enclosing scripts failed to |
michael@0 | 725 | // compile, indicating that the lazy script did not escape the script. |
michael@0 | 726 | for (gc::CellIter i(cx->zone(), gc::FINALIZE_LAZY_SCRIPT); !i.done(); i.next()) { |
michael@0 | 727 | LazyScript *lazy = i.get<LazyScript>(); |
michael@0 | 728 | JSFunction *fun = lazy->functionNonDelazifying(); |
michael@0 | 729 | if (fun->compartment() == cx->compartment() && |
michael@0 | 730 | lazy->sourceObject() && !lazy->maybeScript() && |
michael@0 | 731 | !lazy->hasUncompiledEnclosingScript()) |
michael@0 | 732 | { |
michael@0 | 733 | MOZ_ASSERT(fun->isInterpretedLazy()); |
michael@0 | 734 | MOZ_ASSERT(lazy == fun->lazyScriptOrNull()); |
michael@0 | 735 | if (!lazyFunctions.append(fun)) |
michael@0 | 736 | return false; |
michael@0 | 737 | } |
michael@0 | 738 | } |
michael@0 | 739 | |
michael@0 | 740 | // Create scripts for each lazy function, updating the list of functions to |
michael@0 | 741 | // process with any newly exposed inner functions in created scripts. |
michael@0 | 742 | // A function cannot be delazified until its outer script exists. |
michael@0 | 743 | for (size_t i = 0; i < lazyFunctions.length(); i++) { |
michael@0 | 744 | JSFunction *fun = &lazyFunctions[i]->as<JSFunction>(); |
michael@0 | 745 | |
michael@0 | 746 | // lazyFunctions may have been populated with multiple functions for |
michael@0 | 747 | // a lazy script. |
michael@0 | 748 | if (!fun->isInterpretedLazy()) |
michael@0 | 749 | continue; |
michael@0 | 750 | |
michael@0 | 751 | JSScript *script = fun->getOrCreateScript(cx); |
michael@0 | 752 | if (!script) |
michael@0 | 753 | return false; |
michael@0 | 754 | if (!AddInnerLazyFunctionsFromScript(script, lazyFunctions)) |
michael@0 | 755 | return false; |
michael@0 | 756 | } |
michael@0 | 757 | |
michael@0 | 758 | return true; |
michael@0 | 759 | } |
michael@0 | 760 | |
michael@0 | 761 | bool |
michael@0 | 762 | JSCompartment::ensureDelazifyScriptsForDebugMode(JSContext *cx) |
michael@0 | 763 | { |
michael@0 | 764 | MOZ_ASSERT(cx->compartment() == this); |
michael@0 | 765 | if ((debugModeBits & DebugNeedDelazification) && !CreateLazyScriptsForCompartment(cx)) |
michael@0 | 766 | return false; |
michael@0 | 767 | debugModeBits &= ~DebugNeedDelazification; |
michael@0 | 768 | return true; |
michael@0 | 769 | } |
michael@0 | 770 | |
michael@0 | 771 | bool |
michael@0 | 772 | JSCompartment::setDebugModeFromC(JSContext *cx, bool b, AutoDebugModeInvalidation &invalidate) |
michael@0 | 773 | { |
michael@0 | 774 | bool enabledBefore = debugMode(); |
michael@0 | 775 | bool enabledAfter = (debugModeBits & DebugModeFromMask & ~DebugFromC) || b; |
michael@0 | 776 | |
michael@0 | 777 | // Enabling debug mode from C (vs of from JS) can only be done when no |
michael@0 | 778 | // scripts from the target compartment are on the stack. |
michael@0 | 779 | // |
michael@0 | 780 | // We do allow disabling debug mode while scripts are on the stack. In |
michael@0 | 781 | // that case the debug-mode code for those scripts remains, so subsequently |
michael@0 | 782 | // hooks may be called erroneously, even though debug mode is supposedly |
michael@0 | 783 | // off, and we have to live with it. |
michael@0 | 784 | bool onStack = false; |
michael@0 | 785 | if (enabledBefore != enabledAfter) { |
michael@0 | 786 | onStack = hasScriptsOnStack(); |
michael@0 | 787 | if (b && onStack) { |
michael@0 | 788 | JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_DEBUG_NOT_IDLE); |
michael@0 | 789 | return false; |
michael@0 | 790 | } |
michael@0 | 791 | } |
michael@0 | 792 | |
michael@0 | 793 | debugModeBits = (debugModeBits & ~DebugFromC) | (b ? DebugFromC : 0); |
michael@0 | 794 | JS_ASSERT(debugMode() == enabledAfter); |
michael@0 | 795 | if (enabledBefore != enabledAfter) { |
michael@0 | 796 | // Pass in a nullptr cx to not bother recompiling for JSD1, since |
michael@0 | 797 | // we're still enforcing the idle-stack invariant here. |
michael@0 | 798 | if (!updateJITForDebugMode(nullptr, invalidate)) |
michael@0 | 799 | return false; |
michael@0 | 800 | if (!enabledAfter) |
michael@0 | 801 | DebugScopes::onCompartmentLeaveDebugMode(this); |
michael@0 | 802 | } |
michael@0 | 803 | return true; |
michael@0 | 804 | } |
michael@0 | 805 | |
michael@0 | 806 | bool |
michael@0 | 807 | JSCompartment::updateJITForDebugMode(JSContext *maybecx, AutoDebugModeInvalidation &invalidate) |
michael@0 | 808 | { |
michael@0 | 809 | #ifdef JS_ION |
michael@0 | 810 | // The AutoDebugModeInvalidation argument makes sure we can't forget to |
michael@0 | 811 | // invalidate, but it is also important not to run any scripts in this |
michael@0 | 812 | // compartment until the invalidate is destroyed. That is the caller's |
michael@0 | 813 | // responsibility. |
michael@0 | 814 | if (!jit::UpdateForDebugMode(maybecx, this, invalidate)) |
michael@0 | 815 | return false; |
michael@0 | 816 | #endif |
michael@0 | 817 | return true; |
michael@0 | 818 | } |
michael@0 | 819 | |
michael@0 | 820 | bool |
michael@0 | 821 | JSCompartment::addDebuggee(JSContext *cx, js::GlobalObject *global) |
michael@0 | 822 | { |
michael@0 | 823 | AutoDebugModeInvalidation invalidate(this); |
michael@0 | 824 | return addDebuggee(cx, global, invalidate); |
michael@0 | 825 | } |
michael@0 | 826 | |
michael@0 | 827 | bool |
michael@0 | 828 | JSCompartment::addDebuggee(JSContext *cx, |
michael@0 | 829 | GlobalObject *globalArg, |
michael@0 | 830 | AutoDebugModeInvalidation &invalidate) |
michael@0 | 831 | { |
michael@0 | 832 | Rooted<GlobalObject*> global(cx, globalArg); |
michael@0 | 833 | |
michael@0 | 834 | bool wasEnabled = debugMode(); |
michael@0 | 835 | if (!debuggees.put(global)) { |
michael@0 | 836 | js_ReportOutOfMemory(cx); |
michael@0 | 837 | return false; |
michael@0 | 838 | } |
michael@0 | 839 | debugModeBits |= DebugFromJS; |
michael@0 | 840 | if (!wasEnabled && !updateJITForDebugMode(cx, invalidate)) |
michael@0 | 841 | return false; |
michael@0 | 842 | return true; |
michael@0 | 843 | } |
michael@0 | 844 | |
michael@0 | 845 | bool |
michael@0 | 846 | JSCompartment::removeDebuggee(JSContext *cx, |
michael@0 | 847 | js::GlobalObject *global, |
michael@0 | 848 | js::GlobalObjectSet::Enum *debuggeesEnum) |
michael@0 | 849 | { |
michael@0 | 850 | AutoDebugModeInvalidation invalidate(this); |
michael@0 | 851 | return removeDebuggee(cx, global, invalidate, debuggeesEnum); |
michael@0 | 852 | } |
michael@0 | 853 | |
michael@0 | 854 | bool |
michael@0 | 855 | JSCompartment::removeDebuggee(JSContext *cx, |
michael@0 | 856 | js::GlobalObject *global, |
michael@0 | 857 | AutoDebugModeInvalidation &invalidate, |
michael@0 | 858 | js::GlobalObjectSet::Enum *debuggeesEnum) |
michael@0 | 859 | { |
michael@0 | 860 | bool wasEnabled = debugMode(); |
michael@0 | 861 | removeDebuggeeUnderGC(cx->runtime()->defaultFreeOp(), global, invalidate, debuggeesEnum); |
michael@0 | 862 | if (wasEnabled && !debugMode() && !updateJITForDebugMode(cx, invalidate)) |
michael@0 | 863 | return false; |
michael@0 | 864 | return true; |
michael@0 | 865 | } |
michael@0 | 866 | |
michael@0 | 867 | void |
michael@0 | 868 | JSCompartment::removeDebuggeeUnderGC(FreeOp *fop, |
michael@0 | 869 | js::GlobalObject *global, |
michael@0 | 870 | js::GlobalObjectSet::Enum *debuggeesEnum) |
michael@0 | 871 | { |
michael@0 | 872 | AutoDebugModeInvalidation invalidate(this); |
michael@0 | 873 | removeDebuggeeUnderGC(fop, global, invalidate, debuggeesEnum); |
michael@0 | 874 | } |
michael@0 | 875 | |
michael@0 | 876 | void |
michael@0 | 877 | JSCompartment::removeDebuggeeUnderGC(FreeOp *fop, |
michael@0 | 878 | js::GlobalObject *global, |
michael@0 | 879 | AutoDebugModeInvalidation &invalidate, |
michael@0 | 880 | js::GlobalObjectSet::Enum *debuggeesEnum) |
michael@0 | 881 | { |
michael@0 | 882 | bool wasEnabled = debugMode(); |
michael@0 | 883 | JS_ASSERT(debuggees.has(global)); |
michael@0 | 884 | if (debuggeesEnum) |
michael@0 | 885 | debuggeesEnum->removeFront(); |
michael@0 | 886 | else |
michael@0 | 887 | debuggees.remove(global); |
michael@0 | 888 | |
michael@0 | 889 | if (debuggees.empty()) { |
michael@0 | 890 | debugModeBits &= ~DebugFromJS; |
michael@0 | 891 | if (wasEnabled && !debugMode()) |
michael@0 | 892 | DebugScopes::onCompartmentLeaveDebugMode(this); |
michael@0 | 893 | } |
michael@0 | 894 | } |
michael@0 | 895 | |
michael@0 | 896 | void |
michael@0 | 897 | JSCompartment::clearBreakpointsIn(FreeOp *fop, js::Debugger *dbg, HandleObject handler) |
michael@0 | 898 | { |
michael@0 | 899 | for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { |
michael@0 | 900 | JSScript *script = i.get<JSScript>(); |
michael@0 | 901 | if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode()) |
michael@0 | 902 | script->clearBreakpointsIn(fop, dbg, handler); |
michael@0 | 903 | } |
michael@0 | 904 | } |
michael@0 | 905 | |
michael@0 | 906 | void |
michael@0 | 907 | JSCompartment::clearTraps(FreeOp *fop) |
michael@0 | 908 | { |
michael@0 | 909 | MinorGC(fop->runtime(), JS::gcreason::EVICT_NURSERY); |
michael@0 | 910 | for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) { |
michael@0 | 911 | JSScript *script = i.get<JSScript>(); |
michael@0 | 912 | if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode()) |
michael@0 | 913 | script->clearTraps(fop); |
michael@0 | 914 | } |
michael@0 | 915 | } |
michael@0 | 916 | |
michael@0 | 917 | void |
michael@0 | 918 | JSCompartment::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, |
michael@0 | 919 | size_t *tiAllocationSiteTables, |
michael@0 | 920 | size_t *tiArrayTypeTables, |
michael@0 | 921 | size_t *tiObjectTypeTables, |
michael@0 | 922 | size_t *compartmentObject, |
michael@0 | 923 | size_t *shapesCompartmentTables, |
michael@0 | 924 | size_t *crossCompartmentWrappersArg, |
michael@0 | 925 | size_t *regexpCompartment, |
michael@0 | 926 | size_t *debuggeesSet, |
michael@0 | 927 | size_t *savedStacksSet) |
michael@0 | 928 | { |
michael@0 | 929 | *compartmentObject += mallocSizeOf(this); |
michael@0 | 930 | types.addSizeOfExcludingThis(mallocSizeOf, tiAllocationSiteTables, |
michael@0 | 931 | tiArrayTypeTables, tiObjectTypeTables); |
michael@0 | 932 | *shapesCompartmentTables += baseShapes.sizeOfExcludingThis(mallocSizeOf) |
michael@0 | 933 | + initialShapes.sizeOfExcludingThis(mallocSizeOf) |
michael@0 | 934 | + newTypeObjects.sizeOfExcludingThis(mallocSizeOf) |
michael@0 | 935 | + lazyTypeObjects.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 936 | *crossCompartmentWrappersArg += crossCompartmentWrappers.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 937 | *regexpCompartment += regExps.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 938 | *debuggeesSet += debuggees.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 939 | *savedStacksSet += savedStacks_.sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 940 | } |
michael@0 | 941 | |
michael@0 | 942 | void |
michael@0 | 943 | JSCompartment::adoptWorkerAllocator(Allocator *workerAllocator) |
michael@0 | 944 | { |
michael@0 | 945 | zone()->allocator.arenas.adoptArenas(runtimeFromMainThread(), &workerAllocator->arenas); |
michael@0 | 946 | } |