js/src/jit/Ion.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/jit/Ion.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,3157 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "jit/Ion.h"
    1.11 +
    1.12 +#include "mozilla/MemoryReporting.h"
    1.13 +#include "mozilla/ThreadLocal.h"
    1.14 +
    1.15 +#include "jscompartment.h"
    1.16 +#include "jsprf.h"
    1.17 +#include "jsworkers.h"
    1.18 +
    1.19 +#include "gc/Marking.h"
    1.20 +#include "jit/AliasAnalysis.h"
    1.21 +#include "jit/AsmJSModule.h"
    1.22 +#include "jit/BacktrackingAllocator.h"
    1.23 +#include "jit/BaselineDebugModeOSR.h"
    1.24 +#include "jit/BaselineFrame.h"
    1.25 +#include "jit/BaselineInspector.h"
    1.26 +#include "jit/BaselineJIT.h"
    1.27 +#include "jit/CodeGenerator.h"
    1.28 +#include "jit/EdgeCaseAnalysis.h"
    1.29 +#include "jit/EffectiveAddressAnalysis.h"
    1.30 +#include "jit/IonAnalysis.h"
    1.31 +#include "jit/IonBuilder.h"
    1.32 +#include "jit/IonOptimizationLevels.h"
    1.33 +#include "jit/IonSpewer.h"
    1.34 +#include "jit/JitCommon.h"
    1.35 +#include "jit/JitCompartment.h"
    1.36 +#include "jit/LICM.h"
    1.37 +#include "jit/LinearScan.h"
    1.38 +#include "jit/LIR.h"
    1.39 +#include "jit/Lowering.h"
    1.40 +#include "jit/ParallelSafetyAnalysis.h"
    1.41 +#include "jit/PerfSpewer.h"
    1.42 +#include "jit/RangeAnalysis.h"
    1.43 +#include "jit/StupidAllocator.h"
    1.44 +#include "jit/UnreachableCodeElimination.h"
    1.45 +#include "jit/ValueNumbering.h"
    1.46 +#include "vm/ForkJoin.h"
    1.47 +#include "vm/TraceLogging.h"
    1.48 +
    1.49 +#include "jscompartmentinlines.h"
    1.50 +#include "jsgcinlines.h"
    1.51 +#include "jsinferinlines.h"
    1.52 +#include "jsobjinlines.h"
    1.53 +
    1.54 +#include "jit/ExecutionMode-inl.h"
    1.55 +
    1.56 +using namespace js;
    1.57 +using namespace js::jit;
    1.58 +
    1.59 +using mozilla::ThreadLocal;
    1.60 +
    1.61 +// Assert that JitCode is gc::Cell aligned.
    1.62 +JS_STATIC_ASSERT(sizeof(JitCode) % gc::CellSize == 0);
    1.63 +
    1.64 +static ThreadLocal<IonContext*> TlsIonContext;
    1.65 +
    1.66 +static IonContext *
    1.67 +CurrentIonContext()
    1.68 +{
    1.69 +    if (!TlsIonContext.initialized())
    1.70 +        return nullptr;
    1.71 +    return TlsIonContext.get();
    1.72 +}
    1.73 +
    1.74 +void
    1.75 +jit::SetIonContext(IonContext *ctx)
    1.76 +{
    1.77 +    TlsIonContext.set(ctx);
    1.78 +}
    1.79 +
    1.80 +IonContext *
    1.81 +jit::GetIonContext()
    1.82 +{
    1.83 +    MOZ_ASSERT(CurrentIonContext());
    1.84 +    return CurrentIonContext();
    1.85 +}
    1.86 +
    1.87 +IonContext *
    1.88 +jit::MaybeGetIonContext()
    1.89 +{
    1.90 +    return CurrentIonContext();
    1.91 +}
    1.92 +
    1.93 +IonContext::IonContext(JSContext *cx, TempAllocator *temp)
    1.94 +  : cx(cx),
    1.95 +    temp(temp),
    1.96 +    runtime(CompileRuntime::get(cx->runtime())),
    1.97 +    compartment(CompileCompartment::get(cx->compartment())),
    1.98 +    prev_(CurrentIonContext()),
    1.99 +    assemblerCount_(0)
   1.100 +{
   1.101 +    SetIonContext(this);
   1.102 +}
   1.103 +
   1.104 +IonContext::IonContext(ExclusiveContext *cx, TempAllocator *temp)
   1.105 +  : cx(nullptr),
   1.106 +    temp(temp),
   1.107 +    runtime(CompileRuntime::get(cx->runtime_)),
   1.108 +    compartment(nullptr),
   1.109 +    prev_(CurrentIonContext()),
   1.110 +    assemblerCount_(0)
   1.111 +{
   1.112 +    SetIonContext(this);
   1.113 +}
   1.114 +
   1.115 +IonContext::IonContext(CompileRuntime *rt, CompileCompartment *comp, TempAllocator *temp)
   1.116 +  : cx(nullptr),
   1.117 +    temp(temp),
   1.118 +    runtime(rt),
   1.119 +    compartment(comp),
   1.120 +    prev_(CurrentIonContext()),
   1.121 +    assemblerCount_(0)
   1.122 +{
   1.123 +    SetIonContext(this);
   1.124 +}
   1.125 +
   1.126 +IonContext::IonContext(CompileRuntime *rt)
   1.127 +  : cx(nullptr),
   1.128 +    temp(nullptr),
   1.129 +    runtime(rt),
   1.130 +    compartment(nullptr),
   1.131 +    prev_(CurrentIonContext()),
   1.132 +    assemblerCount_(0)
   1.133 +{
   1.134 +    SetIonContext(this);
   1.135 +}
   1.136 +
   1.137 +IonContext::~IonContext()
   1.138 +{
   1.139 +    SetIonContext(prev_);
   1.140 +}
   1.141 +
   1.142 +bool
   1.143 +jit::InitializeIon()
   1.144 +{
   1.145 +    if (!TlsIonContext.initialized() && !TlsIonContext.init())
   1.146 +        return false;
   1.147 +    CheckLogging();
   1.148 +    CheckPerf();
   1.149 +    return true;
   1.150 +}
   1.151 +
   1.152 +JitRuntime::JitRuntime()
   1.153 +  : execAlloc_(nullptr),
   1.154 +    ionAlloc_(nullptr),
   1.155 +    exceptionTail_(nullptr),
   1.156 +    bailoutTail_(nullptr),
   1.157 +    enterJIT_(nullptr),
   1.158 +    bailoutHandler_(nullptr),
   1.159 +    argumentsRectifier_(nullptr),
   1.160 +    argumentsRectifierReturnAddr_(nullptr),
   1.161 +    parallelArgumentsRectifier_(nullptr),
   1.162 +    invalidator_(nullptr),
   1.163 +    debugTrapHandler_(nullptr),
   1.164 +    forkJoinGetSliceStub_(nullptr),
   1.165 +    baselineDebugModeOSRHandler_(nullptr),
   1.166 +    functionWrappers_(nullptr),
   1.167 +    osrTempData_(nullptr),
   1.168 +    ionCodeProtected_(false)
   1.169 +{
   1.170 +}
   1.171 +
   1.172 +JitRuntime::~JitRuntime()
   1.173 +{
   1.174 +    js_delete(functionWrappers_);
   1.175 +    freeOsrTempData();
   1.176 +
   1.177 +    // Note: The interrupt lock is not taken here, as JitRuntime is only
   1.178 +    // destroyed along with its containing JSRuntime.
   1.179 +    js_delete(ionAlloc_);
   1.180 +}
   1.181 +
   1.182 +bool
   1.183 +JitRuntime::initialize(JSContext *cx)
   1.184 +{
   1.185 +    JS_ASSERT(cx->runtime()->currentThreadHasExclusiveAccess());
   1.186 +    JS_ASSERT(cx->runtime()->currentThreadOwnsInterruptLock());
   1.187 +
   1.188 +    AutoCompartment ac(cx, cx->atomsCompartment());
   1.189 +
   1.190 +    IonContext ictx(cx, nullptr);
   1.191 +
   1.192 +    execAlloc_ = cx->runtime()->getExecAlloc(cx);
   1.193 +    if (!execAlloc_)
   1.194 +        return false;
   1.195 +
   1.196 +    if (!cx->compartment()->ensureJitCompartmentExists(cx))
   1.197 +        return false;
   1.198 +
   1.199 +    functionWrappers_ = cx->new_<VMWrapperMap>(cx);
   1.200 +    if (!functionWrappers_ || !functionWrappers_->init())
   1.201 +        return false;
   1.202 +
   1.203 +    IonSpew(IonSpew_Codegen, "# Emitting exception tail stub");
   1.204 +    exceptionTail_ = generateExceptionTailStub(cx);
   1.205 +    if (!exceptionTail_)
   1.206 +        return false;
   1.207 +
   1.208 +    IonSpew(IonSpew_Codegen, "# Emitting bailout tail stub");
   1.209 +    bailoutTail_ = generateBailoutTailStub(cx);
   1.210 +    if (!bailoutTail_)
   1.211 +        return false;
   1.212 +
   1.213 +    if (cx->runtime()->jitSupportsFloatingPoint) {
   1.214 +        IonSpew(IonSpew_Codegen, "# Emitting bailout tables");
   1.215 +
   1.216 +        // Initialize some Ion-only stubs that require floating-point support.
   1.217 +        if (!bailoutTables_.reserve(FrameSizeClass::ClassLimit().classId()))
   1.218 +            return false;
   1.219 +
   1.220 +        for (uint32_t id = 0;; id++) {
   1.221 +            FrameSizeClass class_ = FrameSizeClass::FromClass(id);
   1.222 +            if (class_ == FrameSizeClass::ClassLimit())
   1.223 +                break;
   1.224 +            bailoutTables_.infallibleAppend((JitCode *)nullptr);
   1.225 +            bailoutTables_[id] = generateBailoutTable(cx, id);
   1.226 +            if (!bailoutTables_[id])
   1.227 +                return false;
   1.228 +        }
   1.229 +
   1.230 +        IonSpew(IonSpew_Codegen, "# Emitting bailout handler");
   1.231 +        bailoutHandler_ = generateBailoutHandler(cx);
   1.232 +        if (!bailoutHandler_)
   1.233 +            return false;
   1.234 +
   1.235 +        IonSpew(IonSpew_Codegen, "# Emitting invalidator");
   1.236 +        invalidator_ = generateInvalidator(cx);
   1.237 +        if (!invalidator_)
   1.238 +            return false;
   1.239 +    }
   1.240 +
   1.241 +    IonSpew(IonSpew_Codegen, "# Emitting sequential arguments rectifier");
   1.242 +    argumentsRectifier_ = generateArgumentsRectifier(cx, SequentialExecution, &argumentsRectifierReturnAddr_);
   1.243 +    if (!argumentsRectifier_)
   1.244 +        return false;
   1.245 +
   1.246 +#ifdef JS_THREADSAFE
   1.247 +    IonSpew(IonSpew_Codegen, "# Emitting parallel arguments rectifier");
   1.248 +    parallelArgumentsRectifier_ = generateArgumentsRectifier(cx, ParallelExecution, nullptr);
   1.249 +    if (!parallelArgumentsRectifier_)
   1.250 +        return false;
   1.251 +#endif
   1.252 +
   1.253 +    IonSpew(IonSpew_Codegen, "# Emitting EnterJIT sequence");
   1.254 +    enterJIT_ = generateEnterJIT(cx, EnterJitOptimized);
   1.255 +    if (!enterJIT_)
   1.256 +        return false;
   1.257 +
   1.258 +    IonSpew(IonSpew_Codegen, "# Emitting EnterBaselineJIT sequence");
   1.259 +    enterBaselineJIT_ = generateEnterJIT(cx, EnterJitBaseline);
   1.260 +    if (!enterBaselineJIT_)
   1.261 +        return false;
   1.262 +
   1.263 +    IonSpew(IonSpew_Codegen, "# Emitting Pre Barrier for Value");
   1.264 +    valuePreBarrier_ = generatePreBarrier(cx, MIRType_Value);
   1.265 +    if (!valuePreBarrier_)
   1.266 +        return false;
   1.267 +
   1.268 +    IonSpew(IonSpew_Codegen, "# Emitting Pre Barrier for Shape");
   1.269 +    shapePreBarrier_ = generatePreBarrier(cx, MIRType_Shape);
   1.270 +    if (!shapePreBarrier_)
   1.271 +        return false;
   1.272 +
   1.273 +    IonSpew(IonSpew_Codegen, "# Emitting VM function wrappers");
   1.274 +    for (VMFunction *fun = VMFunction::functions; fun; fun = fun->next) {
   1.275 +        if (!generateVMWrapper(cx, *fun))
   1.276 +            return false;
   1.277 +    }
   1.278 +
   1.279 +    return true;
   1.280 +}
   1.281 +
   1.282 +JitCode *
   1.283 +JitRuntime::debugTrapHandler(JSContext *cx)
   1.284 +{
   1.285 +    if (!debugTrapHandler_) {
   1.286 +        // JitRuntime code stubs are shared across compartments and have to
   1.287 +        // be allocated in the atoms compartment.
   1.288 +        AutoLockForExclusiveAccess lock(cx);
   1.289 +        AutoCompartment ac(cx, cx->runtime()->atomsCompartment());
   1.290 +        debugTrapHandler_ = generateDebugTrapHandler(cx);
   1.291 +    }
   1.292 +    return debugTrapHandler_;
   1.293 +}
   1.294 +
   1.295 +bool
   1.296 +JitRuntime::ensureForkJoinGetSliceStubExists(JSContext *cx)
   1.297 +{
   1.298 +    if (!forkJoinGetSliceStub_) {
   1.299 +        IonSpew(IonSpew_Codegen, "# Emitting ForkJoinGetSlice stub");
   1.300 +        AutoLockForExclusiveAccess lock(cx);
   1.301 +        AutoCompartment ac(cx, cx->runtime()->atomsCompartment());
   1.302 +        forkJoinGetSliceStub_ = generateForkJoinGetSliceStub(cx);
   1.303 +    }
   1.304 +    return !!forkJoinGetSliceStub_;
   1.305 +}
   1.306 +
   1.307 +uint8_t *
   1.308 +JitRuntime::allocateOsrTempData(size_t size)
   1.309 +{
   1.310 +    osrTempData_ = (uint8_t *)js_realloc(osrTempData_, size);
   1.311 +    return osrTempData_;
   1.312 +}
   1.313 +
   1.314 +void
   1.315 +JitRuntime::freeOsrTempData()
   1.316 +{
   1.317 +    js_free(osrTempData_);
   1.318 +    osrTempData_ = nullptr;
   1.319 +}
   1.320 +
   1.321 +JSC::ExecutableAllocator *
   1.322 +JitRuntime::createIonAlloc(JSContext *cx)
   1.323 +{
   1.324 +    JS_ASSERT(cx->runtime()->currentThreadOwnsInterruptLock());
   1.325 +
   1.326 +    ionAlloc_ = js_new<JSC::ExecutableAllocator>();
   1.327 +    if (!ionAlloc_)
   1.328 +        js_ReportOutOfMemory(cx);
   1.329 +    return ionAlloc_;
   1.330 +}
   1.331 +
   1.332 +void
   1.333 +JitRuntime::ensureIonCodeProtected(JSRuntime *rt)
   1.334 +{
   1.335 +    JS_ASSERT(rt->currentThreadOwnsInterruptLock());
   1.336 +
   1.337 +    if (!rt->signalHandlersInstalled() || ionCodeProtected_ || !ionAlloc_)
   1.338 +        return;
   1.339 +
   1.340 +    // Protect all Ion code in the runtime to trigger an access violation the
   1.341 +    // next time any of it runs on the main thread.
   1.342 +    ionAlloc_->toggleAllCodeAsAccessible(false);
   1.343 +    ionCodeProtected_ = true;
   1.344 +}
   1.345 +
   1.346 +bool
   1.347 +JitRuntime::handleAccessViolation(JSRuntime *rt, void *faultingAddress)
   1.348 +{
   1.349 +    if (!rt->signalHandlersInstalled() || !ionAlloc_ || !ionAlloc_->codeContains((char *) faultingAddress))
   1.350 +        return false;
   1.351 +
   1.352 +#ifdef JS_THREADSAFE
   1.353 +    // All places where the interrupt lock is taken must either ensure that Ion
   1.354 +    // code memory won't be accessed within, or call ensureIonCodeAccessible to
   1.355 +    // render the memory safe for accessing. Otherwise taking the lock below
   1.356 +    // will deadlock the process.
   1.357 +    JS_ASSERT(!rt->currentThreadOwnsInterruptLock());
   1.358 +#endif
   1.359 +
   1.360 +    // Taking this lock is necessary to prevent the interrupting thread from marking
   1.361 +    // the memory as inaccessible while we are patching backedges. This will cause us
   1.362 +    // to SEGV while still inside the signal handler, and the process will terminate.
   1.363 +    JSRuntime::AutoLockForInterrupt lock(rt);
   1.364 +
   1.365 +    // Ion code in the runtime faulted after it was made inaccessible. Reset
   1.366 +    // the code privileges and patch all loop backedges to perform an interrupt
   1.367 +    // check instead.
   1.368 +    ensureIonCodeAccessible(rt);
   1.369 +    return true;
   1.370 +}
   1.371 +
   1.372 +void
   1.373 +JitRuntime::ensureIonCodeAccessible(JSRuntime *rt)
   1.374 +{
   1.375 +    JS_ASSERT(rt->currentThreadOwnsInterruptLock());
   1.376 +
   1.377 +    // This can only be called on the main thread and while handling signals,
   1.378 +    // which happens on a separate thread in OS X.
   1.379 +#ifndef XP_MACOSX
   1.380 +    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
   1.381 +#endif
   1.382 +
   1.383 +    if (ionCodeProtected_) {
   1.384 +        ionAlloc_->toggleAllCodeAsAccessible(true);
   1.385 +        ionCodeProtected_ = false;
   1.386 +    }
   1.387 +
   1.388 +    if (rt->interrupt) {
   1.389 +        // The interrupt handler needs to be invoked by this thread, but we may
   1.390 +        // be inside a signal handler and have no idea what is above us on the
   1.391 +        // stack (probably we are executing Ion code at an arbitrary point, but
   1.392 +        // we could be elsewhere, say repatching a jump for an IonCache).
   1.393 +        // Patch all backedges in the runtime so they will invoke the interrupt
   1.394 +        // handler the next time they execute.
   1.395 +        patchIonBackedges(rt, BackedgeInterruptCheck);
   1.396 +    }
   1.397 +}
   1.398 +
   1.399 +void
   1.400 +JitRuntime::patchIonBackedges(JSRuntime *rt, BackedgeTarget target)
   1.401 +{
   1.402 +#ifndef XP_MACOSX
   1.403 +    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
   1.404 +#endif
   1.405 +
   1.406 +    // Patch all loop backedges in Ion code so that they either jump to the
   1.407 +    // normal loop header or to an interrupt handler each time they run.
   1.408 +    for (InlineListIterator<PatchableBackedge> iter(backedgeList_.begin());
   1.409 +         iter != backedgeList_.end();
   1.410 +         iter++)
   1.411 +    {
   1.412 +        PatchableBackedge *patchableBackedge = *iter;
   1.413 +        PatchJump(patchableBackedge->backedge, target == BackedgeLoopHeader
   1.414 +                                               ? patchableBackedge->loopHeader
   1.415 +                                               : patchableBackedge->interruptCheck);
   1.416 +    }
   1.417 +}
   1.418 +
   1.419 +void
   1.420 +jit::RequestInterruptForIonCode(JSRuntime *rt, JSRuntime::InterruptMode mode)
   1.421 +{
   1.422 +    JitRuntime *jitRuntime = rt->jitRuntime();
   1.423 +    if (!jitRuntime)
   1.424 +        return;
   1.425 +
   1.426 +    JS_ASSERT(rt->currentThreadOwnsInterruptLock());
   1.427 +
   1.428 +    // The mechanism for interrupting normal ion code varies depending on how
   1.429 +    // the interrupt is being requested.
   1.430 +    switch (mode) {
   1.431 +      case JSRuntime::RequestInterruptMainThread:
   1.432 +        // When requesting an interrupt from the main thread, Ion loop
   1.433 +        // backedges can be patched directly. Make sure we don't segv while
   1.434 +        // patching the backedges, to avoid deadlocking inside the signal
   1.435 +        // handler.
   1.436 +        JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
   1.437 +        jitRuntime->ensureIonCodeAccessible(rt);
   1.438 +        break;
   1.439 +
   1.440 +      case JSRuntime::RequestInterruptAnyThread:
   1.441 +        // When requesting an interrupt from off the main thread, protect
   1.442 +        // Ion code memory so that the main thread will fault and enter a
   1.443 +        // signal handler when trying to execute the code. The signal
   1.444 +        // handler will unprotect the code and patch loop backedges so
   1.445 +        // that the interrupt handler is invoked afterwards.
   1.446 +        jitRuntime->ensureIonCodeProtected(rt);
   1.447 +        break;
   1.448 +
   1.449 +      case JSRuntime::RequestInterruptAnyThreadDontStopIon:
   1.450 +      case JSRuntime::RequestInterruptAnyThreadForkJoin:
   1.451 +        // The caller does not require Ion code to be interrupted.
   1.452 +        // Nothing more needs to be done.
   1.453 +        break;
   1.454 +
   1.455 +      default:
   1.456 +        MOZ_ASSUME_UNREACHABLE("Bad interrupt mode");
   1.457 +    }
   1.458 +}
   1.459 +
   1.460 +JitCompartment::JitCompartment()
   1.461 +  : stubCodes_(nullptr),
   1.462 +    baselineCallReturnFromIonAddr_(nullptr),
   1.463 +    baselineGetPropReturnFromIonAddr_(nullptr),
   1.464 +    baselineSetPropReturnFromIonAddr_(nullptr),
   1.465 +    baselineCallReturnFromStubAddr_(nullptr),
   1.466 +    baselineGetPropReturnFromStubAddr_(nullptr),
   1.467 +    baselineSetPropReturnFromStubAddr_(nullptr),
   1.468 +    stringConcatStub_(nullptr),
   1.469 +    parallelStringConcatStub_(nullptr),
   1.470 +    activeParallelEntryScripts_(nullptr)
   1.471 +{
   1.472 +}
   1.473 +
   1.474 +JitCompartment::~JitCompartment()
   1.475 +{
   1.476 +    js_delete(stubCodes_);
   1.477 +    js_delete(activeParallelEntryScripts_);
   1.478 +}
   1.479 +
   1.480 +bool
   1.481 +JitCompartment::initialize(JSContext *cx)
   1.482 +{
   1.483 +    stubCodes_ = cx->new_<ICStubCodeMap>(cx);
   1.484 +    if (!stubCodes_ || !stubCodes_->init())
   1.485 +        return false;
   1.486 +
   1.487 +    return true;
   1.488 +}
   1.489 +
   1.490 +bool
   1.491 +JitCompartment::ensureIonStubsExist(JSContext *cx)
   1.492 +{
   1.493 +    if (!stringConcatStub_) {
   1.494 +        stringConcatStub_ = generateStringConcatStub(cx, SequentialExecution);
   1.495 +        if (!stringConcatStub_)
   1.496 +            return false;
   1.497 +    }
   1.498 +
   1.499 +#ifdef JS_THREADSAFE
   1.500 +    if (!parallelStringConcatStub_) {
   1.501 +        parallelStringConcatStub_ = generateStringConcatStub(cx, ParallelExecution);
   1.502 +        if (!parallelStringConcatStub_)
   1.503 +            return false;
   1.504 +    }
   1.505 +#endif
   1.506 +
   1.507 +    return true;
   1.508 +}
   1.509 +
   1.510 +bool
   1.511 +JitCompartment::notifyOfActiveParallelEntryScript(JSContext *cx, HandleScript script)
   1.512 +{
   1.513 +    // Fast path. The isParallelEntryScript bit guarantees that the script is
   1.514 +    // already in the set.
   1.515 +    if (script->parallelIonScript()->isParallelEntryScript()) {
   1.516 +        MOZ_ASSERT(activeParallelEntryScripts_ && activeParallelEntryScripts_->has(script));
   1.517 +        script->parallelIonScript()->resetParallelAge();
   1.518 +        return true;
   1.519 +    }
   1.520 +
   1.521 +    if (!activeParallelEntryScripts_) {
   1.522 +        activeParallelEntryScripts_ = cx->new_<ScriptSet>(cx);
   1.523 +        if (!activeParallelEntryScripts_ || !activeParallelEntryScripts_->init())
   1.524 +            return false;
   1.525 +    }
   1.526 +
   1.527 +    script->parallelIonScript()->setIsParallelEntryScript();
   1.528 +    ScriptSet::AddPtr p = activeParallelEntryScripts_->lookupForAdd(script);
   1.529 +    return p || activeParallelEntryScripts_->add(p, script);
   1.530 +}
   1.531 +
   1.532 +void
   1.533 +jit::FinishOffThreadBuilder(IonBuilder *builder)
   1.534 +{
   1.535 +    ExecutionMode executionMode = builder->info().executionMode();
   1.536 +
   1.537 +    // Clear the recompiling flag of the old ionScript, since we continue to
   1.538 +    // use the old ionScript if recompiling fails.
   1.539 +    if (executionMode == SequentialExecution && builder->script()->hasIonScript())
   1.540 +        builder->script()->ionScript()->clearRecompiling();
   1.541 +
   1.542 +    // Clean up if compilation did not succeed.
   1.543 +    if (CompilingOffThread(builder->script(), executionMode))
   1.544 +        SetIonScript(builder->script(), executionMode, nullptr);
   1.545 +
   1.546 +    // The builder is allocated into its LifoAlloc, so destroying that will
   1.547 +    // destroy the builder and all other data accumulated during compilation,
   1.548 +    // except any final codegen (which includes an assembler and needs to be
   1.549 +    // explicitly destroyed).
   1.550 +    js_delete(builder->backgroundCodegen());
   1.551 +    js_delete(builder->alloc().lifoAlloc());
   1.552 +}
   1.553 +
   1.554 +static inline void
   1.555 +FinishAllOffThreadCompilations(JSCompartment *comp)
   1.556 +{
   1.557 +#ifdef JS_THREADSAFE
   1.558 +    AutoLockWorkerThreadState lock;
   1.559 +    GlobalWorkerThreadState::IonBuilderVector &finished = WorkerThreadState().ionFinishedList();
   1.560 +
   1.561 +    for (size_t i = 0; i < finished.length(); i++) {
   1.562 +        IonBuilder *builder = finished[i];
   1.563 +        if (builder->compartment == CompileCompartment::get(comp)) {
   1.564 +            FinishOffThreadBuilder(builder);
   1.565 +            WorkerThreadState().remove(finished, &i);
   1.566 +        }
   1.567 +    }
   1.568 +#endif
   1.569 +}
   1.570 +
   1.571 +/* static */ void
   1.572 +JitRuntime::Mark(JSTracer *trc)
   1.573 +{
   1.574 +    JS_ASSERT(!trc->runtime()->isHeapMinorCollecting());
   1.575 +    Zone *zone = trc->runtime()->atomsCompartment()->zone();
   1.576 +    for (gc::CellIterUnderGC i(zone, gc::FINALIZE_JITCODE); !i.done(); i.next()) {
   1.577 +        JitCode *code = i.get<JitCode>();
   1.578 +        MarkJitCodeRoot(trc, &code, "wrapper");
   1.579 +    }
   1.580 +}
   1.581 +
   1.582 +void
   1.583 +JitCompartment::mark(JSTracer *trc, JSCompartment *compartment)
   1.584 +{
   1.585 +    // Cancel any active or pending off thread compilations. Note that the
   1.586 +    // MIR graph does not hold any nursery pointers, so there's no need to
   1.587 +    // do this for minor GCs.
   1.588 +    JS_ASSERT(!trc->runtime()->isHeapMinorCollecting());
   1.589 +    CancelOffThreadIonCompile(compartment, nullptr);
   1.590 +    FinishAllOffThreadCompilations(compartment);
   1.591 +
   1.592 +    // Free temporary OSR buffer.
   1.593 +    trc->runtime()->jitRuntime()->freeOsrTempData();
   1.594 +
   1.595 +    // Mark scripts with parallel IonScripts if we should preserve them.
   1.596 +    if (activeParallelEntryScripts_) {
   1.597 +        for (ScriptSet::Enum e(*activeParallelEntryScripts_); !e.empty(); e.popFront()) {
   1.598 +            JSScript *script = e.front();
   1.599 +
   1.600 +            // If the script has since been invalidated or was attached by an
   1.601 +            // off-thread worker too late (i.e., the ForkJoin finished with
   1.602 +            // warmup doing all the work), remove it.
   1.603 +            if (!script->hasParallelIonScript() ||
   1.604 +                !script->parallelIonScript()->isParallelEntryScript())
   1.605 +            {
   1.606 +                e.removeFront();
   1.607 +                continue;
   1.608 +            }
   1.609 +
   1.610 +            // Check and increment the age. If the script is below the max
   1.611 +            // age, mark it.
   1.612 +            //
   1.613 +            // Subtlety: We depend on the tracing of the parallel IonScript's
   1.614 +            // callTargetEntries to propagate the parallel age to the entire
   1.615 +            // call graph.
   1.616 +            if (ShouldPreserveParallelJITCode(trc->runtime(), script, /* increase = */ true)) {
   1.617 +                MarkScript(trc, const_cast<EncapsulatedPtrScript *>(&e.front()), "par-script");
   1.618 +                MOZ_ASSERT(script == e.front());
   1.619 +            }
   1.620 +        }
   1.621 +    }
   1.622 +}
   1.623 +
   1.624 +void
   1.625 +JitCompartment::sweep(FreeOp *fop)
   1.626 +{
   1.627 +    stubCodes_->sweep(fop);
   1.628 +
   1.629 +    // If the sweep removed the ICCall_Fallback stub, nullptr the baselineCallReturnAddr_ field.
   1.630 +    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::Call_Fallback))) {
   1.631 +        baselineCallReturnFromIonAddr_ = nullptr;
   1.632 +        baselineCallReturnFromStubAddr_ = nullptr;
   1.633 +    }
   1.634 +    // Similarly for the ICGetProp_Fallback stub.
   1.635 +    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::GetProp_Fallback))) {
   1.636 +        baselineGetPropReturnFromIonAddr_ = nullptr;
   1.637 +        baselineGetPropReturnFromStubAddr_ = nullptr;
   1.638 +    }
   1.639 +    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::SetProp_Fallback))) {
   1.640 +        baselineSetPropReturnFromIonAddr_ = nullptr;
   1.641 +        baselineSetPropReturnFromStubAddr_ = nullptr;
   1.642 +    }
   1.643 +
   1.644 +    if (stringConcatStub_ && !IsJitCodeMarked(stringConcatStub_.unsafeGet()))
   1.645 +        stringConcatStub_ = nullptr;
   1.646 +
   1.647 +    if (parallelStringConcatStub_ && !IsJitCodeMarked(parallelStringConcatStub_.unsafeGet()))
   1.648 +        parallelStringConcatStub_ = nullptr;
   1.649 +
   1.650 +    if (activeParallelEntryScripts_) {
   1.651 +        for (ScriptSet::Enum e(*activeParallelEntryScripts_); !e.empty(); e.popFront()) {
   1.652 +            JSScript *script = e.front();
   1.653 +            if (!IsScriptMarked(&script))
   1.654 +                e.removeFront();
   1.655 +            else
   1.656 +                MOZ_ASSERT(script == e.front());
   1.657 +        }
   1.658 +    }
   1.659 +}
   1.660 +
   1.661 +JitCode *
   1.662 +JitRuntime::getBailoutTable(const FrameSizeClass &frameClass) const
   1.663 +{
   1.664 +    JS_ASSERT(frameClass != FrameSizeClass::None());
   1.665 +    return bailoutTables_[frameClass.classId()];
   1.666 +}
   1.667 +
   1.668 +JitCode *
   1.669 +JitRuntime::getVMWrapper(const VMFunction &f) const
   1.670 +{
   1.671 +    JS_ASSERT(functionWrappers_);
   1.672 +    JS_ASSERT(functionWrappers_->initialized());
   1.673 +    JitRuntime::VMWrapperMap::Ptr p = functionWrappers_->readonlyThreadsafeLookup(&f);
   1.674 +    JS_ASSERT(p);
   1.675 +
   1.676 +    return p->value();
   1.677 +}
   1.678 +
   1.679 +template <AllowGC allowGC>
   1.680 +JitCode *
   1.681 +JitCode::New(JSContext *cx, uint8_t *code, uint32_t bufferSize, uint32_t headerSize,
   1.682 +             JSC::ExecutablePool *pool, JSC::CodeKind kind)
   1.683 +{
   1.684 +    JitCode *codeObj = js::NewJitCode<allowGC>(cx);
   1.685 +    if (!codeObj) {
   1.686 +        pool->release(headerSize + bufferSize, kind);
   1.687 +        return nullptr;
   1.688 +    }
   1.689 +
   1.690 +    new (codeObj) JitCode(code, bufferSize, headerSize, pool, kind);
   1.691 +    return codeObj;
   1.692 +}
   1.693 +
   1.694 +template
   1.695 +JitCode *
   1.696 +JitCode::New<CanGC>(JSContext *cx, uint8_t *code, uint32_t bufferSize, uint32_t headerSize,
   1.697 +                    JSC::ExecutablePool *pool, JSC::CodeKind kind);
   1.698 +
   1.699 +template
   1.700 +JitCode *
   1.701 +JitCode::New<NoGC>(JSContext *cx, uint8_t *code, uint32_t bufferSize, uint32_t headerSize,
   1.702 +                   JSC::ExecutablePool *pool, JSC::CodeKind kind);
   1.703 +
   1.704 +void
   1.705 +JitCode::copyFrom(MacroAssembler &masm)
   1.706 +{
   1.707 +    // Store the JitCode pointer right before the code buffer, so we can
   1.708 +    // recover the gcthing from relocation tables.
   1.709 +    *(JitCode **)(code_ - sizeof(JitCode *)) = this;
   1.710 +    insnSize_ = masm.instructionsSize();
   1.711 +    masm.executableCopy(code_);
   1.712 +
   1.713 +    jumpRelocTableBytes_ = masm.jumpRelocationTableBytes();
   1.714 +    masm.copyJumpRelocationTable(code_ + jumpRelocTableOffset());
   1.715 +
   1.716 +    dataRelocTableBytes_ = masm.dataRelocationTableBytes();
   1.717 +    masm.copyDataRelocationTable(code_ + dataRelocTableOffset());
   1.718 +
   1.719 +    preBarrierTableBytes_ = masm.preBarrierTableBytes();
   1.720 +    masm.copyPreBarrierTable(code_ + preBarrierTableOffset());
   1.721 +
   1.722 +    masm.processCodeLabels(code_);
   1.723 +}
   1.724 +
   1.725 +void
   1.726 +JitCode::trace(JSTracer *trc)
   1.727 +{
   1.728 +    // Note that we cannot mark invalidated scripts, since we've basically
   1.729 +    // corrupted the code stream by injecting bailouts.
   1.730 +    if (invalidated())
   1.731 +        return;
   1.732 +
   1.733 +    if (jumpRelocTableBytes_) {
   1.734 +        uint8_t *start = code_ + jumpRelocTableOffset();
   1.735 +        CompactBufferReader reader(start, start + jumpRelocTableBytes_);
   1.736 +        MacroAssembler::TraceJumpRelocations(trc, this, reader);
   1.737 +    }
   1.738 +    if (dataRelocTableBytes_) {
   1.739 +        uint8_t *start = code_ + dataRelocTableOffset();
   1.740 +        CompactBufferReader reader(start, start + dataRelocTableBytes_);
   1.741 +        MacroAssembler::TraceDataRelocations(trc, this, reader);
   1.742 +    }
   1.743 +}
   1.744 +
   1.745 +void
   1.746 +JitCode::finalize(FreeOp *fop)
   1.747 +{
   1.748 +    // Make sure this can't race with an interrupting thread, which may try
   1.749 +    // to read the contents of the pool we are releasing references in.
   1.750 +    JS_ASSERT(fop->runtime()->currentThreadOwnsInterruptLock());
   1.751 +
   1.752 +    // Buffer can be freed at any time hereafter. Catch use-after-free bugs.
   1.753 +    // Don't do this if the Ion code is protected, as the signal handler will
   1.754 +    // deadlock trying to reacquire the interrupt lock.
   1.755 +    if (fop->runtime()->jitRuntime() && !fop->runtime()->jitRuntime()->ionCodeProtected())
   1.756 +        memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
   1.757 +    code_ = nullptr;
   1.758 +
   1.759 +    // Code buffers are stored inside JSC pools.
   1.760 +    // Pools are refcounted. Releasing the pool may free it.
   1.761 +    if (pool_) {
   1.762 +        // Horrible hack: if we are using perf integration, we don't
   1.763 +        // want to reuse code addresses, so we just leak the memory instead.
   1.764 +        if (!PerfEnabled())
   1.765 +            pool_->release(headerSize_ + bufferSize_, JSC::CodeKind(kind_));
   1.766 +        pool_ = nullptr;
   1.767 +    }
   1.768 +}
   1.769 +
   1.770 +void
   1.771 +JitCode::togglePreBarriers(bool enabled)
   1.772 +{
   1.773 +    uint8_t *start = code_ + preBarrierTableOffset();
   1.774 +    CompactBufferReader reader(start, start + preBarrierTableBytes_);
   1.775 +
   1.776 +    while (reader.more()) {
   1.777 +        size_t offset = reader.readUnsigned();
   1.778 +        CodeLocationLabel loc(this, offset);
   1.779 +        if (enabled)
   1.780 +            Assembler::ToggleToCmp(loc);
   1.781 +        else
   1.782 +            Assembler::ToggleToJmp(loc);
   1.783 +    }
   1.784 +}
   1.785 +
   1.786 +IonScript::IonScript()
   1.787 +  : method_(nullptr),
   1.788 +    deoptTable_(nullptr),
   1.789 +    osrPc_(nullptr),
   1.790 +    osrEntryOffset_(0),
   1.791 +    skipArgCheckEntryOffset_(0),
   1.792 +    invalidateEpilogueOffset_(0),
   1.793 +    invalidateEpilogueDataOffset_(0),
   1.794 +    numBailouts_(0),
   1.795 +    hasUncompiledCallTarget_(false),
   1.796 +    isParallelEntryScript_(false),
   1.797 +    hasSPSInstrumentation_(false),
   1.798 +    recompiling_(false),
   1.799 +    runtimeData_(0),
   1.800 +    runtimeSize_(0),
   1.801 +    cacheIndex_(0),
   1.802 +    cacheEntries_(0),
   1.803 +    safepointIndexOffset_(0),
   1.804 +    safepointIndexEntries_(0),
   1.805 +    safepointsStart_(0),
   1.806 +    safepointsSize_(0),
   1.807 +    frameSlots_(0),
   1.808 +    frameSize_(0),
   1.809 +    bailoutTable_(0),
   1.810 +    bailoutEntries_(0),
   1.811 +    osiIndexOffset_(0),
   1.812 +    osiIndexEntries_(0),
   1.813 +    snapshots_(0),
   1.814 +    snapshotsListSize_(0),
   1.815 +    snapshotsRVATableSize_(0),
   1.816 +    constantTable_(0),
   1.817 +    constantEntries_(0),
   1.818 +    callTargetList_(0),
   1.819 +    callTargetEntries_(0),
   1.820 +    backedgeList_(0),
   1.821 +    backedgeEntries_(0),
   1.822 +    refcount_(0),
   1.823 +    parallelAge_(0),
   1.824 +    recompileInfo_(),
   1.825 +    osrPcMismatchCounter_(0),
   1.826 +    dependentAsmJSModules(nullptr)
   1.827 +{
   1.828 +}
   1.829 +
   1.830 +IonScript *
   1.831 +IonScript::New(JSContext *cx, types::RecompileInfo recompileInfo,
   1.832 +               uint32_t frameSlots, uint32_t frameSize,
   1.833 +               size_t snapshotsListSize, size_t snapshotsRVATableSize,
   1.834 +               size_t recoversSize, size_t bailoutEntries,
   1.835 +               size_t constants, size_t safepointIndices,
   1.836 +               size_t osiIndices, size_t cacheEntries,
   1.837 +               size_t runtimeSize,  size_t safepointsSize,
   1.838 +               size_t callTargetEntries, size_t backedgeEntries,
   1.839 +               OptimizationLevel optimizationLevel)
   1.840 +{
   1.841 +    static const int DataAlignment = sizeof(void *);
   1.842 +
   1.843 +    if (snapshotsListSize >= MAX_BUFFER_SIZE ||
   1.844 +        (bailoutEntries >= MAX_BUFFER_SIZE / sizeof(uint32_t)))
   1.845 +    {
   1.846 +        js_ReportOutOfMemory(cx);
   1.847 +        return nullptr;
   1.848 +    }
   1.849 +
   1.850 +    // This should not overflow on x86, because the memory is already allocated
   1.851 +    // *somewhere* and if their total overflowed there would be no memory left
   1.852 +    // at all.
   1.853 +    size_t paddedSnapshotsSize = AlignBytes(snapshotsListSize + snapshotsRVATableSize, DataAlignment);
   1.854 +    size_t paddedRecoversSize = AlignBytes(recoversSize, DataAlignment);
   1.855 +    size_t paddedBailoutSize = AlignBytes(bailoutEntries * sizeof(uint32_t), DataAlignment);
   1.856 +    size_t paddedConstantsSize = AlignBytes(constants * sizeof(Value), DataAlignment);
   1.857 +    size_t paddedSafepointIndicesSize = AlignBytes(safepointIndices * sizeof(SafepointIndex), DataAlignment);
   1.858 +    size_t paddedOsiIndicesSize = AlignBytes(osiIndices * sizeof(OsiIndex), DataAlignment);
   1.859 +    size_t paddedCacheEntriesSize = AlignBytes(cacheEntries * sizeof(uint32_t), DataAlignment);
   1.860 +    size_t paddedRuntimeSize = AlignBytes(runtimeSize, DataAlignment);
   1.861 +    size_t paddedSafepointSize = AlignBytes(safepointsSize, DataAlignment);
   1.862 +    size_t paddedCallTargetSize = AlignBytes(callTargetEntries * sizeof(JSScript *), DataAlignment);
   1.863 +    size_t paddedBackedgeSize = AlignBytes(backedgeEntries * sizeof(PatchableBackedge), DataAlignment);
   1.864 +    size_t bytes = paddedSnapshotsSize +
   1.865 +                   paddedRecoversSize +
   1.866 +                   paddedBailoutSize +
   1.867 +                   paddedConstantsSize +
   1.868 +                   paddedSafepointIndicesSize+
   1.869 +                   paddedOsiIndicesSize +
   1.870 +                   paddedCacheEntriesSize +
   1.871 +                   paddedRuntimeSize +
   1.872 +                   paddedSafepointSize +
   1.873 +                   paddedCallTargetSize +
   1.874 +                   paddedBackedgeSize;
   1.875 +    uint8_t *buffer = (uint8_t *)cx->malloc_(sizeof(IonScript) + bytes);
   1.876 +    if (!buffer)
   1.877 +        return nullptr;
   1.878 +
   1.879 +    IonScript *script = reinterpret_cast<IonScript *>(buffer);
   1.880 +    new (script) IonScript();
   1.881 +
   1.882 +    uint32_t offsetCursor = sizeof(IonScript);
   1.883 +
   1.884 +    script->runtimeData_ = offsetCursor;
   1.885 +    script->runtimeSize_ = runtimeSize;
   1.886 +    offsetCursor += paddedRuntimeSize;
   1.887 +
   1.888 +    script->cacheIndex_ = offsetCursor;
   1.889 +    script->cacheEntries_ = cacheEntries;
   1.890 +    offsetCursor += paddedCacheEntriesSize;
   1.891 +
   1.892 +    script->safepointIndexOffset_ = offsetCursor;
   1.893 +    script->safepointIndexEntries_ = safepointIndices;
   1.894 +    offsetCursor += paddedSafepointIndicesSize;
   1.895 +
   1.896 +    script->safepointsStart_ = offsetCursor;
   1.897 +    script->safepointsSize_ = safepointsSize;
   1.898 +    offsetCursor += paddedSafepointSize;
   1.899 +
   1.900 +    script->bailoutTable_ = offsetCursor;
   1.901 +    script->bailoutEntries_ = bailoutEntries;
   1.902 +    offsetCursor += paddedBailoutSize;
   1.903 +
   1.904 +    script->osiIndexOffset_ = offsetCursor;
   1.905 +    script->osiIndexEntries_ = osiIndices;
   1.906 +    offsetCursor += paddedOsiIndicesSize;
   1.907 +
   1.908 +    script->snapshots_ = offsetCursor;
   1.909 +    script->snapshotsListSize_ = snapshotsListSize;
   1.910 +    script->snapshotsRVATableSize_ = snapshotsRVATableSize;
   1.911 +    offsetCursor += paddedSnapshotsSize;
   1.912 +
   1.913 +    script->recovers_ = offsetCursor;
   1.914 +    script->recoversSize_ = recoversSize;
   1.915 +    offsetCursor += paddedRecoversSize;
   1.916 +
   1.917 +    script->constantTable_ = offsetCursor;
   1.918 +    script->constantEntries_ = constants;
   1.919 +    offsetCursor += paddedConstantsSize;
   1.920 +
   1.921 +    script->callTargetList_ = offsetCursor;
   1.922 +    script->callTargetEntries_ = callTargetEntries;
   1.923 +    offsetCursor += paddedCallTargetSize;
   1.924 +
   1.925 +    script->backedgeList_ = offsetCursor;
   1.926 +    script->backedgeEntries_ = backedgeEntries;
   1.927 +    offsetCursor += paddedBackedgeSize;
   1.928 +
   1.929 +    script->frameSlots_ = frameSlots;
   1.930 +    script->frameSize_ = frameSize;
   1.931 +
   1.932 +    script->recompileInfo_ = recompileInfo;
   1.933 +    script->optimizationLevel_ = optimizationLevel;
   1.934 +
   1.935 +    return script;
   1.936 +}
   1.937 +
   1.938 +void
   1.939 +IonScript::trace(JSTracer *trc)
   1.940 +{
   1.941 +    if (method_)
   1.942 +        MarkJitCode(trc, &method_, "method");
   1.943 +
   1.944 +    if (deoptTable_)
   1.945 +        MarkJitCode(trc, &deoptTable_, "deoptimizationTable");
   1.946 +
   1.947 +    for (size_t i = 0; i < numConstants(); i++)
   1.948 +        gc::MarkValue(trc, &getConstant(i), "constant");
   1.949 +
   1.950 +    // No write barrier is needed for the call target list, as it's attached
   1.951 +    // at compilation time and is read only.
   1.952 +    for (size_t i = 0; i < callTargetEntries(); i++) {
   1.953 +        // Propagate the parallelAge to the call targets.
   1.954 +        if (callTargetList()[i]->hasParallelIonScript())
   1.955 +            callTargetList()[i]->parallelIonScript()->parallelAge_ = parallelAge_;
   1.956 +
   1.957 +        gc::MarkScriptUnbarriered(trc, &callTargetList()[i], "callTarget");
   1.958 +    }
   1.959 +}
   1.960 +
   1.961 +/* static */ void
   1.962 +IonScript::writeBarrierPre(Zone *zone, IonScript *ionScript)
   1.963 +{
   1.964 +#ifdef JSGC_INCREMENTAL
   1.965 +    if (zone->needsBarrier())
   1.966 +        ionScript->trace(zone->barrierTracer());
   1.967 +#endif
   1.968 +}
   1.969 +
   1.970 +void
   1.971 +IonScript::copySnapshots(const SnapshotWriter *writer)
   1.972 +{
   1.973 +    MOZ_ASSERT(writer->listSize() == snapshotsListSize_);
   1.974 +    memcpy((uint8_t *)this + snapshots_,
   1.975 +           writer->listBuffer(), snapshotsListSize_);
   1.976 +
   1.977 +    MOZ_ASSERT(snapshotsRVATableSize_);
   1.978 +    MOZ_ASSERT(writer->RVATableSize() == snapshotsRVATableSize_);
   1.979 +    memcpy((uint8_t *)this + snapshots_ + snapshotsListSize_,
   1.980 +           writer->RVATableBuffer(), snapshotsRVATableSize_);
   1.981 +}
   1.982 +
   1.983 +void
   1.984 +IonScript::copyRecovers(const RecoverWriter *writer)
   1.985 +{
   1.986 +    MOZ_ASSERT(writer->size() == recoversSize_);
   1.987 +    memcpy((uint8_t *)this + recovers_, writer->buffer(), recoversSize_);
   1.988 +}
   1.989 +
   1.990 +void
   1.991 +IonScript::copySafepoints(const SafepointWriter *writer)
   1.992 +{
   1.993 +    JS_ASSERT(writer->size() == safepointsSize_);
   1.994 +    memcpy((uint8_t *)this + safepointsStart_, writer->buffer(), safepointsSize_);
   1.995 +}
   1.996 +
   1.997 +void
   1.998 +IonScript::copyBailoutTable(const SnapshotOffset *table)
   1.999 +{
  1.1000 +    memcpy(bailoutTable(), table, bailoutEntries_ * sizeof(uint32_t));
  1.1001 +}
  1.1002 +
  1.1003 +void
  1.1004 +IonScript::copyConstants(const Value *vp)
  1.1005 +{
  1.1006 +    for (size_t i = 0; i < constantEntries_; i++)
  1.1007 +        constants()[i].init(vp[i]);
  1.1008 +}
  1.1009 +
  1.1010 +void
  1.1011 +IonScript::copyCallTargetEntries(JSScript **callTargets)
  1.1012 +{
  1.1013 +    for (size_t i = 0; i < callTargetEntries_; i++)
  1.1014 +        callTargetList()[i] = callTargets[i];
  1.1015 +}
  1.1016 +
  1.1017 +void
  1.1018 +IonScript::copyPatchableBackedges(JSContext *cx, JitCode *code,
  1.1019 +                                  PatchableBackedgeInfo *backedges)
  1.1020 +{
  1.1021 +    for (size_t i = 0; i < backedgeEntries_; i++) {
  1.1022 +        const PatchableBackedgeInfo &info = backedges[i];
  1.1023 +        PatchableBackedge *patchableBackedge = &backedgeList()[i];
  1.1024 +
  1.1025 +        CodeLocationJump backedge(code, info.backedge);
  1.1026 +        CodeLocationLabel loopHeader(code, CodeOffsetLabel(info.loopHeader->offset()));
  1.1027 +        CodeLocationLabel interruptCheck(code, CodeOffsetLabel(info.interruptCheck->offset()));
  1.1028 +        new(patchableBackedge) PatchableBackedge(backedge, loopHeader, interruptCheck);
  1.1029 +
  1.1030 +        // Point the backedge to either of its possible targets, according to
  1.1031 +        // whether an interrupt is currently desired, matching the targets
  1.1032 +        // established by ensureIonCodeAccessible() above. We don't handle the
  1.1033 +        // interrupt immediately as the interrupt lock is held here.
  1.1034 +        PatchJump(backedge, cx->runtime()->interrupt ? interruptCheck : loopHeader);
  1.1035 +
  1.1036 +        cx->runtime()->jitRuntime()->addPatchableBackedge(patchableBackedge);
  1.1037 +    }
  1.1038 +}
  1.1039 +
  1.1040 +void
  1.1041 +IonScript::copySafepointIndices(const SafepointIndex *si, MacroAssembler &masm)
  1.1042 +{
  1.1043 +    // Jumps in the caches reflect the offset of those jumps in the compiled
  1.1044 +    // code, not the absolute positions of the jumps. Update according to the
  1.1045 +    // final code address now.
  1.1046 +    SafepointIndex *table = safepointIndices();
  1.1047 +    memcpy(table, si, safepointIndexEntries_ * sizeof(SafepointIndex));
  1.1048 +    for (size_t i = 0; i < safepointIndexEntries_; i++)
  1.1049 +        table[i].adjustDisplacement(masm.actualOffset(table[i].displacement()));
  1.1050 +}
  1.1051 +
  1.1052 +void
  1.1053 +IonScript::copyOsiIndices(const OsiIndex *oi, MacroAssembler &masm)
  1.1054 +{
  1.1055 +    memcpy(osiIndices(), oi, osiIndexEntries_ * sizeof(OsiIndex));
  1.1056 +    for (unsigned i = 0; i < osiIndexEntries_; i++)
  1.1057 +        osiIndices()[i].fixUpOffset(masm);
  1.1058 +}
  1.1059 +
  1.1060 +void
  1.1061 +IonScript::copyRuntimeData(const uint8_t *data)
  1.1062 +{
  1.1063 +    memcpy(runtimeData(), data, runtimeSize());
  1.1064 +}
  1.1065 +
  1.1066 +void
  1.1067 +IonScript::copyCacheEntries(const uint32_t *caches, MacroAssembler &masm)
  1.1068 +{
  1.1069 +    memcpy(cacheIndex(), caches, numCaches() * sizeof(uint32_t));
  1.1070 +
  1.1071 +    // Jumps in the caches reflect the offset of those jumps in the compiled
  1.1072 +    // code, not the absolute positions of the jumps. Update according to the
  1.1073 +    // final code address now.
  1.1074 +    for (size_t i = 0; i < numCaches(); i++)
  1.1075 +        getCacheFromIndex(i).updateBaseAddress(method_, masm);
  1.1076 +}
  1.1077 +
  1.1078 +const SafepointIndex *
  1.1079 +IonScript::getSafepointIndex(uint32_t disp) const
  1.1080 +{
  1.1081 +    JS_ASSERT(safepointIndexEntries_ > 0);
  1.1082 +
  1.1083 +    const SafepointIndex *table = safepointIndices();
  1.1084 +    if (safepointIndexEntries_ == 1) {
  1.1085 +        JS_ASSERT(disp == table[0].displacement());
  1.1086 +        return &table[0];
  1.1087 +    }
  1.1088 +
  1.1089 +    size_t minEntry = 0;
  1.1090 +    size_t maxEntry = safepointIndexEntries_ - 1;
  1.1091 +    uint32_t min = table[minEntry].displacement();
  1.1092 +    uint32_t max = table[maxEntry].displacement();
  1.1093 +
  1.1094 +    // Raise if the element is not in the list.
  1.1095 +    JS_ASSERT(min <= disp && disp <= max);
  1.1096 +
  1.1097 +    // Approximate the location of the FrameInfo.
  1.1098 +    size_t guess = (disp - min) * (maxEntry - minEntry) / (max - min) + minEntry;
  1.1099 +    uint32_t guessDisp = table[guess].displacement();
  1.1100 +
  1.1101 +    if (table[guess].displacement() == disp)
  1.1102 +        return &table[guess];
  1.1103 +
  1.1104 +    // Doing a linear scan from the guess should be more efficient in case of
  1.1105 +    // small group which are equally distributed on the code.
  1.1106 +    //
  1.1107 +    // such as:  <...      ...    ...  ...  .   ...    ...>
  1.1108 +    if (guessDisp > disp) {
  1.1109 +        while (--guess >= minEntry) {
  1.1110 +            guessDisp = table[guess].displacement();
  1.1111 +            JS_ASSERT(guessDisp >= disp);
  1.1112 +            if (guessDisp == disp)
  1.1113 +                return &table[guess];
  1.1114 +        }
  1.1115 +    } else {
  1.1116 +        while (++guess <= maxEntry) {
  1.1117 +            guessDisp = table[guess].displacement();
  1.1118 +            JS_ASSERT(guessDisp <= disp);
  1.1119 +            if (guessDisp == disp)
  1.1120 +                return &table[guess];
  1.1121 +        }
  1.1122 +    }
  1.1123 +
  1.1124 +    MOZ_ASSUME_UNREACHABLE("displacement not found.");
  1.1125 +}
  1.1126 +
  1.1127 +const OsiIndex *
  1.1128 +IonScript::getOsiIndex(uint32_t disp) const
  1.1129 +{
  1.1130 +    for (const OsiIndex *it = osiIndices(), *end = osiIndices() + osiIndexEntries_;
  1.1131 +         it != end;
  1.1132 +         ++it)
  1.1133 +    {
  1.1134 +        if (it->returnPointDisplacement() == disp)
  1.1135 +            return it;
  1.1136 +    }
  1.1137 +
  1.1138 +    MOZ_ASSUME_UNREACHABLE("Failed to find OSI point return address");
  1.1139 +}
  1.1140 +
  1.1141 +const OsiIndex *
  1.1142 +IonScript::getOsiIndex(uint8_t *retAddr) const
  1.1143 +{
  1.1144 +    IonSpew(IonSpew_Invalidate, "IonScript %p has method %p raw %p", (void *) this, (void *)
  1.1145 +            method(), method()->raw());
  1.1146 +
  1.1147 +    JS_ASSERT(containsCodeAddress(retAddr));
  1.1148 +    uint32_t disp = retAddr - method()->raw();
  1.1149 +    return getOsiIndex(disp);
  1.1150 +}
  1.1151 +
  1.1152 +void
  1.1153 +IonScript::Trace(JSTracer *trc, IonScript *script)
  1.1154 +{
  1.1155 +    if (script != ION_DISABLED_SCRIPT)
  1.1156 +        script->trace(trc);
  1.1157 +}
  1.1158 +
  1.1159 +void
  1.1160 +IonScript::Destroy(FreeOp *fop, IonScript *script)
  1.1161 +{
  1.1162 +    script->destroyCaches();
  1.1163 +    script->unlinkFromRuntime(fop);
  1.1164 +    fop->free_(script);
  1.1165 +}
  1.1166 +
  1.1167 +void
  1.1168 +IonScript::toggleBarriers(bool enabled)
  1.1169 +{
  1.1170 +    method()->togglePreBarriers(enabled);
  1.1171 +}
  1.1172 +
  1.1173 +void
  1.1174 +IonScript::purgeCaches()
  1.1175 +{
  1.1176 +    // Don't reset any ICs if we're invalidated, otherwise, repointing the
  1.1177 +    // inline jump could overwrite an invalidation marker. These ICs can
  1.1178 +    // no longer run, however, the IC slow paths may be active on the stack.
  1.1179 +    // ICs therefore are required to check for invalidation before patching,
  1.1180 +    // to ensure the same invariant.
  1.1181 +    if (invalidated())
  1.1182 +        return;
  1.1183 +
  1.1184 +    for (size_t i = 0; i < numCaches(); i++)
  1.1185 +        getCacheFromIndex(i).reset();
  1.1186 +}
  1.1187 +
  1.1188 +void
  1.1189 +IonScript::destroyCaches()
  1.1190 +{
  1.1191 +    for (size_t i = 0; i < numCaches(); i++)
  1.1192 +        getCacheFromIndex(i).destroy();
  1.1193 +}
  1.1194 +
  1.1195 +bool
  1.1196 +IonScript::addDependentAsmJSModule(JSContext *cx, DependentAsmJSModuleExit exit)
  1.1197 +{
  1.1198 +    if (!dependentAsmJSModules) {
  1.1199 +        dependentAsmJSModules = cx->new_<Vector<DependentAsmJSModuleExit> >(cx);
  1.1200 +        if (!dependentAsmJSModules)
  1.1201 +            return false;
  1.1202 +    }
  1.1203 +    return dependentAsmJSModules->append(exit);
  1.1204 +}
  1.1205 +
  1.1206 +void
  1.1207 +IonScript::unlinkFromRuntime(FreeOp *fop)
  1.1208 +{
  1.1209 +    // Remove any links from AsmJSModules that contain optimized FFI calls into
  1.1210 +    // this IonScript.
  1.1211 +    if (dependentAsmJSModules) {
  1.1212 +        for (size_t i = 0; i < dependentAsmJSModules->length(); i++) {
  1.1213 +            DependentAsmJSModuleExit exit = dependentAsmJSModules->begin()[i];
  1.1214 +            exit.module->detachIonCompilation(exit.exitIndex);
  1.1215 +        }
  1.1216 +
  1.1217 +        fop->delete_(dependentAsmJSModules);
  1.1218 +        dependentAsmJSModules = nullptr;
  1.1219 +    }
  1.1220 +
  1.1221 +    // The writes to the executable buffer below may clobber backedge jumps, so
  1.1222 +    // make sure that those backedges are unlinked from the runtime and not
  1.1223 +    // reclobbered with garbage if an interrupt is requested.
  1.1224 +    JSRuntime *rt = fop->runtime();
  1.1225 +    for (size_t i = 0; i < backedgeEntries_; i++) {
  1.1226 +        PatchableBackedge *backedge = &backedgeList()[i];
  1.1227 +        rt->jitRuntime()->removePatchableBackedge(backedge);
  1.1228 +    }
  1.1229 +
  1.1230 +    // Clear the list of backedges, so that this method is idempotent. It is
  1.1231 +    // called during destruction, and may be additionally called when the
  1.1232 +    // script is invalidated.
  1.1233 +    backedgeEntries_ = 0;
  1.1234 +}
  1.1235 +
  1.1236 +void
  1.1237 +jit::ToggleBarriers(JS::Zone *zone, bool needs)
  1.1238 +{
  1.1239 +    JSRuntime *rt = zone->runtimeFromMainThread();
  1.1240 +    if (!rt->hasJitRuntime())
  1.1241 +        return;
  1.1242 +
  1.1243 +    for (gc::CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
  1.1244 +        JSScript *script = i.get<JSScript>();
  1.1245 +        if (script->hasIonScript())
  1.1246 +            script->ionScript()->toggleBarriers(needs);
  1.1247 +        if (script->hasBaselineScript())
  1.1248 +            script->baselineScript()->toggleBarriers(needs);
  1.1249 +    }
  1.1250 +
  1.1251 +    for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
  1.1252 +        if (comp->jitCompartment())
  1.1253 +            comp->jitCompartment()->toggleBaselineStubBarriers(needs);
  1.1254 +    }
  1.1255 +}
  1.1256 +
  1.1257 +namespace js {
  1.1258 +namespace jit {
  1.1259 +
  1.1260 +bool
  1.1261 +OptimizeMIR(MIRGenerator *mir)
  1.1262 +{
  1.1263 +    MIRGraph &graph = mir->graph();
  1.1264 +    TraceLogger *logger;
  1.1265 +    if (GetIonContext()->runtime->onMainThread())
  1.1266 +        logger = TraceLoggerForMainThread(GetIonContext()->runtime);
  1.1267 +    else
  1.1268 +        logger = TraceLoggerForCurrentThread();
  1.1269 +
  1.1270 +    if (!mir->compilingAsmJS()) {
  1.1271 +        if (!MakeMRegExpHoistable(graph))
  1.1272 +            return false;
  1.1273 +    }
  1.1274 +
  1.1275 +    IonSpewPass("BuildSSA");
  1.1276 +    AssertBasicGraphCoherency(graph);
  1.1277 +
  1.1278 +    if (mir->shouldCancel("Start"))
  1.1279 +        return false;
  1.1280 +
  1.1281 +    {
  1.1282 +        AutoTraceLog log(logger, TraceLogger::SplitCriticalEdges);
  1.1283 +        if (!SplitCriticalEdges(graph))
  1.1284 +            return false;
  1.1285 +        IonSpewPass("Split Critical Edges");
  1.1286 +        AssertGraphCoherency(graph);
  1.1287 +
  1.1288 +        if (mir->shouldCancel("Split Critical Edges"))
  1.1289 +            return false;
  1.1290 +    }
  1.1291 +
  1.1292 +    {
  1.1293 +        AutoTraceLog log(logger, TraceLogger::RenumberBlocks);
  1.1294 +        if (!RenumberBlocks(graph))
  1.1295 +            return false;
  1.1296 +        IonSpewPass("Renumber Blocks");
  1.1297 +        AssertGraphCoherency(graph);
  1.1298 +
  1.1299 +        if (mir->shouldCancel("Renumber Blocks"))
  1.1300 +            return false;
  1.1301 +    }
  1.1302 +
  1.1303 +    {
  1.1304 +        AutoTraceLog log(logger, TraceLogger::DominatorTree);
  1.1305 +        if (!BuildDominatorTree(graph))
  1.1306 +            return false;
  1.1307 +        // No spew: graph not changed.
  1.1308 +
  1.1309 +        if (mir->shouldCancel("Dominator Tree"))
  1.1310 +            return false;
  1.1311 +    }
  1.1312 +
  1.1313 +    {
  1.1314 +        AutoTraceLog log(logger, TraceLogger::PhiAnalysis);
  1.1315 +        // Aggressive phi elimination must occur before any code elimination. If the
  1.1316 +        // script contains a try-statement, we only compiled the try block and not
  1.1317 +        // the catch or finally blocks, so in this case it's also invalid to use
  1.1318 +        // aggressive phi elimination.
  1.1319 +        Observability observability = graph.hasTryBlock()
  1.1320 +                                      ? ConservativeObservability
  1.1321 +                                      : AggressiveObservability;
  1.1322 +        if (!EliminatePhis(mir, graph, observability))
  1.1323 +            return false;
  1.1324 +        IonSpewPass("Eliminate phis");
  1.1325 +        AssertGraphCoherency(graph);
  1.1326 +
  1.1327 +        if (mir->shouldCancel("Eliminate phis"))
  1.1328 +            return false;
  1.1329 +
  1.1330 +        if (!BuildPhiReverseMapping(graph))
  1.1331 +            return false;
  1.1332 +        AssertExtendedGraphCoherency(graph);
  1.1333 +        // No spew: graph not changed.
  1.1334 +
  1.1335 +        if (mir->shouldCancel("Phi reverse mapping"))
  1.1336 +            return false;
  1.1337 +    }
  1.1338 +
  1.1339 +    if (!mir->compilingAsmJS()) {
  1.1340 +        AutoTraceLog log(logger, TraceLogger::ApplyTypes);
  1.1341 +        if (!ApplyTypeInformation(mir, graph))
  1.1342 +            return false;
  1.1343 +        IonSpewPass("Apply types");
  1.1344 +        AssertExtendedGraphCoherency(graph);
  1.1345 +
  1.1346 +        if (mir->shouldCancel("Apply types"))
  1.1347 +            return false;
  1.1348 +    }
  1.1349 +
  1.1350 +    if (graph.entryBlock()->info().executionMode() == ParallelExecution) {
  1.1351 +        AutoTraceLog log(logger, TraceLogger::ParallelSafetyAnalysis);
  1.1352 +        ParallelSafetyAnalysis analysis(mir, graph);
  1.1353 +        if (!analysis.analyze())
  1.1354 +            return false;
  1.1355 +    }
  1.1356 +
  1.1357 +    // Alias analysis is required for LICM and GVN so that we don't move
  1.1358 +    // loads across stores.
  1.1359 +    if (mir->optimizationInfo().licmEnabled() ||
  1.1360 +        mir->optimizationInfo().gvnEnabled())
  1.1361 +    {
  1.1362 +        AutoTraceLog log(logger, TraceLogger::AliasAnalysis);
  1.1363 +        AliasAnalysis analysis(mir, graph);
  1.1364 +        if (!analysis.analyze())
  1.1365 +            return false;
  1.1366 +        IonSpewPass("Alias analysis");
  1.1367 +        AssertExtendedGraphCoherency(graph);
  1.1368 +
  1.1369 +        if (mir->shouldCancel("Alias analysis"))
  1.1370 +            return false;
  1.1371 +
  1.1372 +        // Eliminating dead resume point operands requires basic block
  1.1373 +        // instructions to be numbered. Reuse the numbering computed during
  1.1374 +        // alias analysis.
  1.1375 +        if (!EliminateDeadResumePointOperands(mir, graph))
  1.1376 +            return false;
  1.1377 +
  1.1378 +        if (mir->shouldCancel("Eliminate dead resume point operands"))
  1.1379 +            return false;
  1.1380 +    }
  1.1381 +
  1.1382 +    if (mir->optimizationInfo().gvnEnabled()) {
  1.1383 +        AutoTraceLog log(logger, TraceLogger::GVN);
  1.1384 +        ValueNumberer gvn(mir, graph, mir->optimizationInfo().gvnKind() == GVN_Optimistic);
  1.1385 +        if (!gvn.analyze())
  1.1386 +            return false;
  1.1387 +        IonSpewPass("GVN");
  1.1388 +        AssertExtendedGraphCoherency(graph);
  1.1389 +
  1.1390 +        if (mir->shouldCancel("GVN"))
  1.1391 +            return false;
  1.1392 +    }
  1.1393 +
  1.1394 +    if (mir->optimizationInfo().uceEnabled()) {
  1.1395 +        AutoTraceLog log(logger, TraceLogger::UCE);
  1.1396 +        UnreachableCodeElimination uce(mir, graph);
  1.1397 +        if (!uce.analyze())
  1.1398 +            return false;
  1.1399 +        IonSpewPass("UCE");
  1.1400 +        AssertExtendedGraphCoherency(graph);
  1.1401 +
  1.1402 +        if (mir->shouldCancel("UCE"))
  1.1403 +            return false;
  1.1404 +    }
  1.1405 +
  1.1406 +    if (mir->optimizationInfo().licmEnabled()) {
  1.1407 +        AutoTraceLog log(logger, TraceLogger::LICM);
  1.1408 +        // LICM can hoist instructions from conditional branches and trigger
  1.1409 +        // repeated bailouts. Disable it if this script is known to bailout
  1.1410 +        // frequently.
  1.1411 +        JSScript *script = mir->info().script();
  1.1412 +        if (!script || !script->hadFrequentBailouts()) {
  1.1413 +            LICM licm(mir, graph);
  1.1414 +            if (!licm.analyze())
  1.1415 +                return false;
  1.1416 +            IonSpewPass("LICM");
  1.1417 +            AssertExtendedGraphCoherency(graph);
  1.1418 +
  1.1419 +            if (mir->shouldCancel("LICM"))
  1.1420 +                return false;
  1.1421 +        }
  1.1422 +    }
  1.1423 +
  1.1424 +    if (mir->optimizationInfo().rangeAnalysisEnabled()) {
  1.1425 +        AutoTraceLog log(logger, TraceLogger::RangeAnalysis);
  1.1426 +        RangeAnalysis r(mir, graph);
  1.1427 +        if (!r.addBetaNodes())
  1.1428 +            return false;
  1.1429 +        IonSpewPass("Beta");
  1.1430 +        AssertExtendedGraphCoherency(graph);
  1.1431 +
  1.1432 +        if (mir->shouldCancel("RA Beta"))
  1.1433 +            return false;
  1.1434 +
  1.1435 +        if (!r.analyze() || !r.addRangeAssertions())
  1.1436 +            return false;
  1.1437 +        IonSpewPass("Range Analysis");
  1.1438 +        AssertExtendedGraphCoherency(graph);
  1.1439 +
  1.1440 +        if (mir->shouldCancel("Range Analysis"))
  1.1441 +            return false;
  1.1442 +
  1.1443 +        if (!r.removeBetaNodes())
  1.1444 +            return false;
  1.1445 +        IonSpewPass("De-Beta");
  1.1446 +        AssertExtendedGraphCoherency(graph);
  1.1447 +
  1.1448 +        if (mir->shouldCancel("RA De-Beta"))
  1.1449 +            return false;
  1.1450 +
  1.1451 +        if (mir->optimizationInfo().uceEnabled()) {
  1.1452 +            bool shouldRunUCE = false;
  1.1453 +            if (!r.prepareForUCE(&shouldRunUCE))
  1.1454 +                return false;
  1.1455 +            IonSpewPass("RA check UCE");
  1.1456 +            AssertExtendedGraphCoherency(graph);
  1.1457 +
  1.1458 +            if (mir->shouldCancel("RA check UCE"))
  1.1459 +                return false;
  1.1460 +
  1.1461 +            if (shouldRunUCE) {
  1.1462 +                UnreachableCodeElimination uce(mir, graph);
  1.1463 +                uce.disableAliasAnalysis();
  1.1464 +                if (!uce.analyze())
  1.1465 +                    return false;
  1.1466 +                IonSpewPass("UCE After RA");
  1.1467 +                AssertExtendedGraphCoherency(graph);
  1.1468 +
  1.1469 +                if (mir->shouldCancel("UCE After RA"))
  1.1470 +                    return false;
  1.1471 +            }
  1.1472 +        }
  1.1473 +
  1.1474 +        if (!r.truncate())
  1.1475 +            return false;
  1.1476 +        IonSpewPass("Truncate Doubles");
  1.1477 +        AssertExtendedGraphCoherency(graph);
  1.1478 +
  1.1479 +        if (mir->shouldCancel("Truncate Doubles"))
  1.1480 +            return false;
  1.1481 +    }
  1.1482 +
  1.1483 +    if (mir->optimizationInfo().eaaEnabled()) {
  1.1484 +        AutoTraceLog log(logger, TraceLogger::EffectiveAddressAnalysis);
  1.1485 +        EffectiveAddressAnalysis eaa(graph);
  1.1486 +        if (!eaa.analyze())
  1.1487 +            return false;
  1.1488 +        IonSpewPass("Effective Address Analysis");
  1.1489 +        AssertExtendedGraphCoherency(graph);
  1.1490 +
  1.1491 +        if (mir->shouldCancel("Effective Address Analysis"))
  1.1492 +            return false;
  1.1493 +    }
  1.1494 +
  1.1495 +    {
  1.1496 +        AutoTraceLog log(logger, TraceLogger::EliminateDeadCode);
  1.1497 +        if (!EliminateDeadCode(mir, graph))
  1.1498 +            return false;
  1.1499 +        IonSpewPass("DCE");
  1.1500 +        AssertExtendedGraphCoherency(graph);
  1.1501 +
  1.1502 +        if (mir->shouldCancel("DCE"))
  1.1503 +            return false;
  1.1504 +    }
  1.1505 +
  1.1506 +    // Passes after this point must not move instructions; these analyses
  1.1507 +    // depend on knowing the final order in which instructions will execute.
  1.1508 +
  1.1509 +    if (mir->optimizationInfo().edgeCaseAnalysisEnabled()) {
  1.1510 +        AutoTraceLog log(logger, TraceLogger::EdgeCaseAnalysis);
  1.1511 +        EdgeCaseAnalysis edgeCaseAnalysis(mir, graph);
  1.1512 +        if (!edgeCaseAnalysis.analyzeLate())
  1.1513 +            return false;
  1.1514 +        IonSpewPass("Edge Case Analysis (Late)");
  1.1515 +        AssertGraphCoherency(graph);
  1.1516 +
  1.1517 +        if (mir->shouldCancel("Edge Case Analysis (Late)"))
  1.1518 +            return false;
  1.1519 +    }
  1.1520 +
  1.1521 +    if (mir->optimizationInfo().eliminateRedundantChecksEnabled()) {
  1.1522 +        AutoTraceLog log(logger, TraceLogger::EliminateRedundantChecks);
  1.1523 +        // Note: check elimination has to run after all other passes that move
  1.1524 +        // instructions. Since check uses are replaced with the actual index,
  1.1525 +        // code motion after this pass could incorrectly move a load or store
  1.1526 +        // before its bounds check.
  1.1527 +        if (!EliminateRedundantChecks(graph))
  1.1528 +            return false;
  1.1529 +        IonSpewPass("Bounds Check Elimination");
  1.1530 +        AssertGraphCoherency(graph);
  1.1531 +    }
  1.1532 +
  1.1533 +    return true;
  1.1534 +}
  1.1535 +
  1.1536 +LIRGraph *
  1.1537 +GenerateLIR(MIRGenerator *mir)
  1.1538 +{
  1.1539 +    MIRGraph &graph = mir->graph();
  1.1540 +
  1.1541 +    LIRGraph *lir = mir->alloc().lifoAlloc()->new_<LIRGraph>(&graph);
  1.1542 +    if (!lir || !lir->init())
  1.1543 +        return nullptr;
  1.1544 +
  1.1545 +    LIRGenerator lirgen(mir, graph, *lir);
  1.1546 +    if (!lirgen.generate())
  1.1547 +        return nullptr;
  1.1548 +    IonSpewPass("Generate LIR");
  1.1549 +
  1.1550 +    if (mir->shouldCancel("Generate LIR"))
  1.1551 +        return nullptr;
  1.1552 +
  1.1553 +    AllocationIntegrityState integrity(*lir);
  1.1554 +
  1.1555 +    switch (mir->optimizationInfo().registerAllocator()) {
  1.1556 +      case RegisterAllocator_LSRA: {
  1.1557 +#ifdef DEBUG
  1.1558 +        if (!integrity.record())
  1.1559 +            return nullptr;
  1.1560 +#endif
  1.1561 +
  1.1562 +        LinearScanAllocator regalloc(mir, &lirgen, *lir);
  1.1563 +        if (!regalloc.go())
  1.1564 +            return nullptr;
  1.1565 +
  1.1566 +#ifdef DEBUG
  1.1567 +        if (!integrity.check(false))
  1.1568 +            return nullptr;
  1.1569 +#endif
  1.1570 +
  1.1571 +        IonSpewPass("Allocate Registers [LSRA]", &regalloc);
  1.1572 +        break;
  1.1573 +      }
  1.1574 +
  1.1575 +      case RegisterAllocator_Backtracking: {
  1.1576 +#ifdef DEBUG
  1.1577 +        if (!integrity.record())
  1.1578 +            return nullptr;
  1.1579 +#endif
  1.1580 +
  1.1581 +        BacktrackingAllocator regalloc(mir, &lirgen, *lir);
  1.1582 +        if (!regalloc.go())
  1.1583 +            return nullptr;
  1.1584 +
  1.1585 +#ifdef DEBUG
  1.1586 +        if (!integrity.check(false))
  1.1587 +            return nullptr;
  1.1588 +#endif
  1.1589 +
  1.1590 +        IonSpewPass("Allocate Registers [Backtracking]");
  1.1591 +        break;
  1.1592 +      }
  1.1593 +
  1.1594 +      case RegisterAllocator_Stupid: {
  1.1595 +        // Use the integrity checker to populate safepoint information, so
  1.1596 +        // run it in all builds.
  1.1597 +        if (!integrity.record())
  1.1598 +            return nullptr;
  1.1599 +
  1.1600 +        StupidAllocator regalloc(mir, &lirgen, *lir);
  1.1601 +        if (!regalloc.go())
  1.1602 +            return nullptr;
  1.1603 +        if (!integrity.check(true))
  1.1604 +            return nullptr;
  1.1605 +        IonSpewPass("Allocate Registers [Stupid]");
  1.1606 +        break;
  1.1607 +      }
  1.1608 +
  1.1609 +      default:
  1.1610 +        MOZ_ASSUME_UNREACHABLE("Bad regalloc");
  1.1611 +    }
  1.1612 +
  1.1613 +    if (mir->shouldCancel("Allocate Registers"))
  1.1614 +        return nullptr;
  1.1615 +
  1.1616 +    // Now that all optimization and register allocation is done, re-introduce
  1.1617 +    // critical edges to avoid unnecessary jumps.
  1.1618 +    if (!UnsplitEdges(lir))
  1.1619 +        return nullptr;
  1.1620 +    IonSpewPass("Unsplit Critical Edges");
  1.1621 +    AssertBasicGraphCoherency(graph);
  1.1622 +
  1.1623 +    return lir;
  1.1624 +}
  1.1625 +
  1.1626 +CodeGenerator *
  1.1627 +GenerateCode(MIRGenerator *mir, LIRGraph *lir)
  1.1628 +{
  1.1629 +    CodeGenerator *codegen = js_new<CodeGenerator>(mir, lir);
  1.1630 +    if (!codegen)
  1.1631 +        return nullptr;
  1.1632 +
  1.1633 +    if (!codegen->generate()) {
  1.1634 +        js_delete(codegen);
  1.1635 +        return nullptr;
  1.1636 +    }
  1.1637 +
  1.1638 +    return codegen;
  1.1639 +}
  1.1640 +
  1.1641 +CodeGenerator *
  1.1642 +CompileBackEnd(MIRGenerator *mir)
  1.1643 +{
  1.1644 +    if (!OptimizeMIR(mir))
  1.1645 +        return nullptr;
  1.1646 +
  1.1647 +    LIRGraph *lir = GenerateLIR(mir);
  1.1648 +    if (!lir)
  1.1649 +        return nullptr;
  1.1650 +
  1.1651 +    return GenerateCode(mir, lir);
  1.1652 +}
  1.1653 +
  1.1654 +void
  1.1655 +AttachFinishedCompilations(JSContext *cx)
  1.1656 +{
  1.1657 +#ifdef JS_THREADSAFE
  1.1658 +    JitCompartment *ion = cx->compartment()->jitCompartment();
  1.1659 +    if (!ion)
  1.1660 +        return;
  1.1661 +
  1.1662 +    types::AutoEnterAnalysis enterTypes(cx);
  1.1663 +    AutoLockWorkerThreadState lock;
  1.1664 +
  1.1665 +    GlobalWorkerThreadState::IonBuilderVector &finished = WorkerThreadState().ionFinishedList();
  1.1666 +
  1.1667 +    TraceLogger *logger = TraceLoggerForMainThread(cx->runtime());
  1.1668 +
  1.1669 +    // Incorporate any off thread compilations for the compartment which have
  1.1670 +    // finished, failed or have been cancelled.
  1.1671 +    while (true) {
  1.1672 +        IonBuilder *builder = nullptr;
  1.1673 +
  1.1674 +        // Find a finished builder for the compartment.
  1.1675 +        for (size_t i = 0; i < finished.length(); i++) {
  1.1676 +            IonBuilder *testBuilder = finished[i];
  1.1677 +            if (testBuilder->compartment == CompileCompartment::get(cx->compartment())) {
  1.1678 +                builder = testBuilder;
  1.1679 +                WorkerThreadState().remove(finished, &i);
  1.1680 +                break;
  1.1681 +            }
  1.1682 +        }
  1.1683 +        if (!builder)
  1.1684 +            break;
  1.1685 +
  1.1686 +        if (CodeGenerator *codegen = builder->backgroundCodegen()) {
  1.1687 +            RootedScript script(cx, builder->script());
  1.1688 +            IonContext ictx(cx, &builder->alloc());
  1.1689 +            AutoTraceLog logScript(logger, TraceLogCreateTextId(logger, script));
  1.1690 +            AutoTraceLog logLink(logger, TraceLogger::IonLinking);
  1.1691 +
  1.1692 +            // Root the assembler until the builder is finished below. As it
  1.1693 +            // was constructed off thread, the assembler has not been rooted
  1.1694 +            // previously, though any GC activity would discard the builder.
  1.1695 +            codegen->masm.constructRoot(cx);
  1.1696 +
  1.1697 +            bool success;
  1.1698 +            {
  1.1699 +                // Release the worker thread lock and root the compiler for GC.
  1.1700 +                AutoTempAllocatorRooter root(cx, &builder->alloc());
  1.1701 +                AutoUnlockWorkerThreadState unlock;
  1.1702 +                success = codegen->link(cx, builder->constraints());
  1.1703 +            }
  1.1704 +
  1.1705 +            if (!success) {
  1.1706 +                // Silently ignore OOM during code generation. The caller is
  1.1707 +                // InvokeInterruptCallback, which always runs at a
  1.1708 +                // nondeterministic time. It's not OK to throw a catchable
  1.1709 +                // exception from there.
  1.1710 +                cx->clearPendingException();
  1.1711 +            }
  1.1712 +        }
  1.1713 +
  1.1714 +        FinishOffThreadBuilder(builder);
  1.1715 +    }
  1.1716 +#endif
  1.1717 +}
  1.1718 +
  1.1719 +static const size_t BUILDER_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 1 << 12;
  1.1720 +
  1.1721 +static inline bool
  1.1722 +OffThreadCompilationAvailable(JSContext *cx)
  1.1723 +{
  1.1724 +#ifdef JS_THREADSAFE
  1.1725 +    // Even if off thread compilation is enabled, compilation must still occur
  1.1726 +    // on the main thread in some cases. Do not compile off thread during an
  1.1727 +    // incremental GC, as this may trip incremental read barriers.
  1.1728 +    //
  1.1729 +    // Require cpuCount > 1 so that Ion compilation jobs and main-thread
  1.1730 +    // execution are not competing for the same resources.
  1.1731 +    //
  1.1732 +    // Skip off thread compilation if PC count profiling is enabled, as
  1.1733 +    // CodeGenerator::maybeCreateScriptCounts will not attach script profiles
  1.1734 +    // when running off thread.
  1.1735 +    return cx->runtime()->canUseParallelIonCompilation()
  1.1736 +        && WorkerThreadState().cpuCount > 1
  1.1737 +        && cx->runtime()->gcIncrementalState == gc::NO_INCREMENTAL
  1.1738 +        && !cx->runtime()->profilingScripts;
  1.1739 +#else
  1.1740 +    return false;
  1.1741 +#endif
  1.1742 +}
  1.1743 +
  1.1744 +static void
  1.1745 +TrackAllProperties(JSContext *cx, JSObject *obj)
  1.1746 +{
  1.1747 +    JS_ASSERT(obj->hasSingletonType());
  1.1748 +
  1.1749 +    for (Shape::Range<NoGC> range(obj->lastProperty()); !range.empty(); range.popFront())
  1.1750 +        types::EnsureTrackPropertyTypes(cx, obj, range.front().propid());
  1.1751 +}
  1.1752 +
  1.1753 +static void
  1.1754 +TrackPropertiesForSingletonScopes(JSContext *cx, JSScript *script, BaselineFrame *baselineFrame)
  1.1755 +{
  1.1756 +    // Ensure that all properties of singleton call objects which the script
  1.1757 +    // could access are tracked. These are generally accessed through
  1.1758 +    // ALIASEDVAR operations in baseline and will not be tracked even if they
  1.1759 +    // have been accessed in baseline code.
  1.1760 +    JSObject *environment = script->functionNonDelazifying()
  1.1761 +                            ? script->functionNonDelazifying()->environment()
  1.1762 +                            : nullptr;
  1.1763 +
  1.1764 +    while (environment && !environment->is<GlobalObject>()) {
  1.1765 +        if (environment->is<CallObject>() && environment->hasSingletonType())
  1.1766 +            TrackAllProperties(cx, environment);
  1.1767 +        environment = environment->enclosingScope();
  1.1768 +    }
  1.1769 +
  1.1770 +    if (baselineFrame) {
  1.1771 +        JSObject *scope = baselineFrame->scopeChain();
  1.1772 +        if (scope->is<CallObject>() && scope->hasSingletonType())
  1.1773 +            TrackAllProperties(cx, scope);
  1.1774 +    }
  1.1775 +}
  1.1776 +
  1.1777 +static AbortReason
  1.1778 +IonCompile(JSContext *cx, JSScript *script,
  1.1779 +           BaselineFrame *baselineFrame, jsbytecode *osrPc, bool constructing,
  1.1780 +           ExecutionMode executionMode, bool recompile,
  1.1781 +           OptimizationLevel optimizationLevel)
  1.1782 +{
  1.1783 +    TraceLogger *logger = TraceLoggerForMainThread(cx->runtime());
  1.1784 +    AutoTraceLog logScript(logger, TraceLogCreateTextId(logger, script));
  1.1785 +    AutoTraceLog logCompile(logger, TraceLogger::IonCompilation);
  1.1786 +
  1.1787 +    JS_ASSERT(optimizationLevel > Optimization_DontCompile);
  1.1788 +
  1.1789 +    // Make sure the script's canonical function isn't lazy. We can't de-lazify
  1.1790 +    // it in a worker thread.
  1.1791 +    script->ensureNonLazyCanonicalFunction(cx);
  1.1792 +
  1.1793 +    TrackPropertiesForSingletonScopes(cx, script, baselineFrame);
  1.1794 +
  1.1795 +    LifoAlloc *alloc = cx->new_<LifoAlloc>(BUILDER_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
  1.1796 +    if (!alloc)
  1.1797 +        return AbortReason_Alloc;
  1.1798 +
  1.1799 +    ScopedJSDeletePtr<LifoAlloc> autoDelete(alloc);
  1.1800 +
  1.1801 +    TempAllocator *temp = alloc->new_<TempAllocator>(alloc);
  1.1802 +    if (!temp)
  1.1803 +        return AbortReason_Alloc;
  1.1804 +
  1.1805 +    IonContext ictx(cx, temp);
  1.1806 +
  1.1807 +    types::AutoEnterAnalysis enter(cx);
  1.1808 +
  1.1809 +    if (!cx->compartment()->ensureJitCompartmentExists(cx))
  1.1810 +        return AbortReason_Alloc;
  1.1811 +
  1.1812 +    if (!cx->compartment()->jitCompartment()->ensureIonStubsExist(cx))
  1.1813 +        return AbortReason_Alloc;
  1.1814 +
  1.1815 +    if (executionMode == ParallelExecution &&
  1.1816 +        LIRGenerator::allowInlineForkJoinGetSlice() &&
  1.1817 +        !cx->runtime()->jitRuntime()->ensureForkJoinGetSliceStubExists(cx))
  1.1818 +    {
  1.1819 +        return AbortReason_Alloc;
  1.1820 +    }
  1.1821 +
  1.1822 +    MIRGraph *graph = alloc->new_<MIRGraph>(temp);
  1.1823 +    if (!graph)
  1.1824 +        return AbortReason_Alloc;
  1.1825 +
  1.1826 +    CompileInfo *info = alloc->new_<CompileInfo>(script, script->functionNonDelazifying(), osrPc,
  1.1827 +                                                 constructing, executionMode,
  1.1828 +                                                 script->needsArgsObj());
  1.1829 +    if (!info)
  1.1830 +        return AbortReason_Alloc;
  1.1831 +
  1.1832 +    BaselineInspector *inspector = alloc->new_<BaselineInspector>(script);
  1.1833 +    if (!inspector)
  1.1834 +        return AbortReason_Alloc;
  1.1835 +
  1.1836 +    BaselineFrameInspector *baselineFrameInspector = nullptr;
  1.1837 +    if (baselineFrame) {
  1.1838 +        baselineFrameInspector = NewBaselineFrameInspector(temp, baselineFrame, info);
  1.1839 +        if (!baselineFrameInspector)
  1.1840 +            return AbortReason_Alloc;
  1.1841 +    }
  1.1842 +
  1.1843 +    AutoTempAllocatorRooter root(cx, temp);
  1.1844 +    types::CompilerConstraintList *constraints = types::NewCompilerConstraintList(*temp);
  1.1845 +    if (!constraints)
  1.1846 +        return AbortReason_Alloc;
  1.1847 +
  1.1848 +    const OptimizationInfo *optimizationInfo = js_IonOptimizations.get(optimizationLevel);
  1.1849 +    const JitCompileOptions options(cx);
  1.1850 +
  1.1851 +    IonBuilder *builder = alloc->new_<IonBuilder>((JSContext *) nullptr,
  1.1852 +                                                  CompileCompartment::get(cx->compartment()),
  1.1853 +                                                  options, temp, graph, constraints,
  1.1854 +                                                  inspector, info, optimizationInfo,
  1.1855 +                                                  baselineFrameInspector);
  1.1856 +    if (!builder)
  1.1857 +        return AbortReason_Alloc;
  1.1858 +
  1.1859 +    JS_ASSERT(recompile == HasIonScript(builder->script(), executionMode));
  1.1860 +    JS_ASSERT(CanIonCompile(builder->script(), executionMode));
  1.1861 +
  1.1862 +    RootedScript builderScript(cx, builder->script());
  1.1863 +
  1.1864 +    if (recompile) {
  1.1865 +        JS_ASSERT(executionMode == SequentialExecution);
  1.1866 +        builderScript->ionScript()->setRecompiling();
  1.1867 +    }
  1.1868 +
  1.1869 +    IonSpewNewFunction(graph, builderScript);
  1.1870 +
  1.1871 +    bool succeeded = builder->build();
  1.1872 +    builder->clearForBackEnd();
  1.1873 +
  1.1874 +    if (!succeeded)
  1.1875 +        return builder->abortReason();
  1.1876 +
  1.1877 +    // If possible, compile the script off thread.
  1.1878 +    if (OffThreadCompilationAvailable(cx)) {
  1.1879 +        if (!recompile)
  1.1880 +            SetIonScript(builderScript, executionMode, ION_COMPILING_SCRIPT);
  1.1881 +
  1.1882 +        IonSpew(IonSpew_Logs, "Can't log script %s:%d. (Compiled on background thread.)",
  1.1883 +                              builderScript->filename(), builderScript->lineno());
  1.1884 +
  1.1885 +        if (!StartOffThreadIonCompile(cx, builder)) {
  1.1886 +            IonSpew(IonSpew_Abort, "Unable to start off-thread ion compilation.");
  1.1887 +            return AbortReason_Alloc;
  1.1888 +        }
  1.1889 +
  1.1890 +        // The allocator and associated data will be destroyed after being
  1.1891 +        // processed in the finishedOffThreadCompilations list.
  1.1892 +        autoDelete.forget();
  1.1893 +
  1.1894 +        return AbortReason_NoAbort;
  1.1895 +    }
  1.1896 +
  1.1897 +    ScopedJSDeletePtr<CodeGenerator> codegen(CompileBackEnd(builder));
  1.1898 +    if (!codegen) {
  1.1899 +        IonSpew(IonSpew_Abort, "Failed during back-end compilation.");
  1.1900 +        return AbortReason_Disable;
  1.1901 +    }
  1.1902 +
  1.1903 +    bool success = codegen->link(cx, builder->constraints());
  1.1904 +
  1.1905 +    IonSpewEndFunction();
  1.1906 +
  1.1907 +    return success ? AbortReason_NoAbort : AbortReason_Disable;
  1.1908 +}
  1.1909 +
  1.1910 +static bool
  1.1911 +CheckFrame(BaselineFrame *frame)
  1.1912 +{
  1.1913 +    JS_ASSERT(!frame->isGeneratorFrame());
  1.1914 +    JS_ASSERT(!frame->isDebuggerFrame());
  1.1915 +
  1.1916 +    // This check is to not overrun the stack.
  1.1917 +    if (frame->isFunctionFrame() && TooManyArguments(frame->numActualArgs())) {
  1.1918 +        IonSpew(IonSpew_Abort, "too many actual args");
  1.1919 +        return false;
  1.1920 +    }
  1.1921 +
  1.1922 +    return true;
  1.1923 +}
  1.1924 +
  1.1925 +static bool
  1.1926 +CheckScript(JSContext *cx, JSScript *script, bool osr)
  1.1927 +{
  1.1928 +    if (script->isForEval()) {
  1.1929 +        // Eval frames are not yet supported. Supporting this will require new
  1.1930 +        // logic in pushBailoutFrame to deal with linking prev.
  1.1931 +        // Additionally, JSOP_DEFVAR support will require baking in isEvalFrame().
  1.1932 +        IonSpew(IonSpew_Abort, "eval script");
  1.1933 +        return false;
  1.1934 +    }
  1.1935 +
  1.1936 +    if (!script->compileAndGo()) {
  1.1937 +        IonSpew(IonSpew_Abort, "not compile-and-go");
  1.1938 +        return false;
  1.1939 +    }
  1.1940 +
  1.1941 +    return true;
  1.1942 +}
  1.1943 +
  1.1944 +static MethodStatus
  1.1945 +CheckScriptSize(JSContext *cx, JSScript* script)
  1.1946 +{
  1.1947 +    if (!js_JitOptions.limitScriptSize)
  1.1948 +        return Method_Compiled;
  1.1949 +
  1.1950 +    if (script->length() > MAX_OFF_THREAD_SCRIPT_SIZE) {
  1.1951 +        // Some scripts are so large we never try to Ion compile them.
  1.1952 +        IonSpew(IonSpew_Abort, "Script too large (%u bytes)", script->length());
  1.1953 +        return Method_CantCompile;
  1.1954 +    }
  1.1955 +
  1.1956 +    uint32_t numLocalsAndArgs = analyze::TotalSlots(script);
  1.1957 +    if (cx->runtime()->isWorkerRuntime()) {
  1.1958 +        // DOM Workers don't have off thread compilation enabled. Since workers
  1.1959 +        // don't block the browser's event loop, allow them to compile larger
  1.1960 +        // scripts.
  1.1961 +        JS_ASSERT(!cx->runtime()->canUseParallelIonCompilation());
  1.1962 +
  1.1963 +        if (script->length() > MAX_DOM_WORKER_SCRIPT_SIZE ||
  1.1964 +            numLocalsAndArgs > MAX_DOM_WORKER_LOCALS_AND_ARGS)
  1.1965 +        {
  1.1966 +            return Method_CantCompile;
  1.1967 +        }
  1.1968 +
  1.1969 +        return Method_Compiled;
  1.1970 +    }
  1.1971 +
  1.1972 +    if (script->length() > MAX_MAIN_THREAD_SCRIPT_SIZE ||
  1.1973 +        numLocalsAndArgs > MAX_MAIN_THREAD_LOCALS_AND_ARGS)
  1.1974 +    {
  1.1975 +#ifdef JS_THREADSAFE
  1.1976 +        size_t cpuCount = WorkerThreadState().cpuCount;
  1.1977 +#else
  1.1978 +        size_t cpuCount = 1;
  1.1979 +#endif
  1.1980 +        if (cx->runtime()->canUseParallelIonCompilation() && cpuCount > 1) {
  1.1981 +            // Even if off thread compilation is enabled, there are cases where
  1.1982 +            // compilation must still occur on the main thread. Don't compile
  1.1983 +            // in these cases (except when profiling scripts, as compilations
  1.1984 +            // occurring with profiling should reflect those without), but do
  1.1985 +            // not forbid compilation so that the script may be compiled later.
  1.1986 +            if (!OffThreadCompilationAvailable(cx) && !cx->runtime()->profilingScripts) {
  1.1987 +                IonSpew(IonSpew_Abort,
  1.1988 +                        "Script too large for main thread, skipping (%u bytes) (%u locals/args)",
  1.1989 +                        script->length(), numLocalsAndArgs);
  1.1990 +                return Method_Skipped;
  1.1991 +            }
  1.1992 +        } else {
  1.1993 +            IonSpew(IonSpew_Abort, "Script too large (%u bytes) (%u locals/args)",
  1.1994 +                    script->length(), numLocalsAndArgs);
  1.1995 +            return Method_CantCompile;
  1.1996 +        }
  1.1997 +    }
  1.1998 +
  1.1999 +    return Method_Compiled;
  1.2000 +}
  1.2001 +
  1.2002 +bool
  1.2003 +CanIonCompileScript(JSContext *cx, JSScript *script, bool osr)
  1.2004 +{
  1.2005 +    if (!script->canIonCompile() || !CheckScript(cx, script, osr))
  1.2006 +        return false;
  1.2007 +
  1.2008 +    return CheckScriptSize(cx, script) == Method_Compiled;
  1.2009 +}
  1.2010 +
  1.2011 +static OptimizationLevel
  1.2012 +GetOptimizationLevel(HandleScript script, jsbytecode *pc, ExecutionMode executionMode)
  1.2013 +{
  1.2014 +    if (executionMode == ParallelExecution)
  1.2015 +        return Optimization_Normal;
  1.2016 +
  1.2017 +    JS_ASSERT(executionMode == SequentialExecution);
  1.2018 +
  1.2019 +    return js_IonOptimizations.levelForScript(script, pc);
  1.2020 +}
  1.2021 +
  1.2022 +static MethodStatus
  1.2023 +Compile(JSContext *cx, HandleScript script, BaselineFrame *osrFrame, jsbytecode *osrPc,
  1.2024 +        bool constructing, ExecutionMode executionMode)
  1.2025 +{
  1.2026 +    JS_ASSERT(jit::IsIonEnabled(cx));
  1.2027 +    JS_ASSERT(jit::IsBaselineEnabled(cx));
  1.2028 +    JS_ASSERT_IF(osrPc != nullptr, LoopEntryCanIonOsr(osrPc));
  1.2029 +    JS_ASSERT_IF(executionMode == ParallelExecution, !osrFrame && !osrPc);
  1.2030 +    JS_ASSERT_IF(executionMode == ParallelExecution, !HasIonScript(script, executionMode));
  1.2031 +
  1.2032 +    if (!script->hasBaselineScript())
  1.2033 +        return Method_Skipped;
  1.2034 +
  1.2035 +    if (cx->compartment()->debugMode()) {
  1.2036 +        IonSpew(IonSpew_Abort, "debugging");
  1.2037 +        return Method_CantCompile;
  1.2038 +    }
  1.2039 +
  1.2040 +    if (!CheckScript(cx, script, bool(osrPc))) {
  1.2041 +        IonSpew(IonSpew_Abort, "Aborted compilation of %s:%d", script->filename(), script->lineno());
  1.2042 +        return Method_CantCompile;
  1.2043 +    }
  1.2044 +
  1.2045 +    MethodStatus status = CheckScriptSize(cx, script);
  1.2046 +    if (status != Method_Compiled) {
  1.2047 +        IonSpew(IonSpew_Abort, "Aborted compilation of %s:%d", script->filename(), script->lineno());
  1.2048 +        return status;
  1.2049 +    }
  1.2050 +
  1.2051 +    bool recompile = false;
  1.2052 +    OptimizationLevel optimizationLevel = GetOptimizationLevel(script, osrPc, executionMode);
  1.2053 +    if (optimizationLevel == Optimization_DontCompile)
  1.2054 +        return Method_Skipped;
  1.2055 +
  1.2056 +    IonScript *scriptIon = GetIonScript(script, executionMode);
  1.2057 +    if (scriptIon) {
  1.2058 +        if (!scriptIon->method())
  1.2059 +            return Method_CantCompile;
  1.2060 +
  1.2061 +        MethodStatus failedState = Method_Compiled;
  1.2062 +
  1.2063 +        // If we keep failing to enter the script due to an OSR pc mismatch,
  1.2064 +        // recompile with the right pc.
  1.2065 +        if (osrPc && script->ionScript()->osrPc() != osrPc) {
  1.2066 +            uint32_t count = script->ionScript()->incrOsrPcMismatchCounter();
  1.2067 +            if (count <= js_JitOptions.osrPcMismatchesBeforeRecompile)
  1.2068 +                return Method_Skipped;
  1.2069 +
  1.2070 +            failedState = Method_Skipped;
  1.2071 +        }
  1.2072 +
  1.2073 +        // Don't recompile/overwrite higher optimized code,
  1.2074 +        // with a lower optimization level.
  1.2075 +        if (optimizationLevel < scriptIon->optimizationLevel())
  1.2076 +            return failedState;
  1.2077 +
  1.2078 +        if (optimizationLevel == scriptIon->optimizationLevel() &&
  1.2079 +            (!osrPc || script->ionScript()->osrPc() == osrPc))
  1.2080 +        {
  1.2081 +            return failedState;
  1.2082 +        }
  1.2083 +
  1.2084 +        // Don't start compiling if already compiling
  1.2085 +        if (scriptIon->isRecompiling())
  1.2086 +            return failedState;
  1.2087 +
  1.2088 +        if (osrPc)
  1.2089 +            script->ionScript()->resetOsrPcMismatchCounter();
  1.2090 +
  1.2091 +        recompile = true;
  1.2092 +    }
  1.2093 +
  1.2094 +    AbortReason reason = IonCompile(cx, script, osrFrame, osrPc, constructing, executionMode,
  1.2095 +                                    recompile, optimizationLevel);
  1.2096 +    if (reason == AbortReason_Error)
  1.2097 +        return Method_Error;
  1.2098 +
  1.2099 +    if (reason == AbortReason_Disable)
  1.2100 +        return Method_CantCompile;
  1.2101 +
  1.2102 +    if (reason == AbortReason_Alloc) {
  1.2103 +        js_ReportOutOfMemory(cx);
  1.2104 +        return Method_Error;
  1.2105 +    }
  1.2106 +
  1.2107 +    // Compilation succeeded or we invalidated right away or an inlining/alloc abort
  1.2108 +    if (HasIonScript(script, executionMode)) {
  1.2109 +        if (osrPc && script->ionScript()->osrPc() != osrPc)
  1.2110 +            return Method_Skipped;
  1.2111 +        return Method_Compiled;
  1.2112 +    }
  1.2113 +    return Method_Skipped;
  1.2114 +}
  1.2115 +
  1.2116 +} // namespace jit
  1.2117 +} // namespace js
  1.2118 +
  1.2119 +// Decide if a transition from interpreter execution to Ion code should occur.
  1.2120 +// May compile or recompile the target JSScript.
  1.2121 +MethodStatus
  1.2122 +jit::CanEnterAtBranch(JSContext *cx, JSScript *script, BaselineFrame *osrFrame,
  1.2123 +                      jsbytecode *pc, bool isConstructing)
  1.2124 +{
  1.2125 +    JS_ASSERT(jit::IsIonEnabled(cx));
  1.2126 +    JS_ASSERT((JSOp)*pc == JSOP_LOOPENTRY);
  1.2127 +    JS_ASSERT(LoopEntryCanIonOsr(pc));
  1.2128 +
  1.2129 +    // Skip if the script has been disabled.
  1.2130 +    if (!script->canIonCompile())
  1.2131 +        return Method_Skipped;
  1.2132 +
  1.2133 +    // Skip if the script is being compiled off thread.
  1.2134 +    if (script->isIonCompilingOffThread())
  1.2135 +        return Method_Skipped;
  1.2136 +
  1.2137 +    // Skip if the code is expected to result in a bailout.
  1.2138 +    if (script->hasIonScript() && script->ionScript()->bailoutExpected())
  1.2139 +        return Method_Skipped;
  1.2140 +
  1.2141 +    // Optionally ignore on user request.
  1.2142 +    if (!js_JitOptions.osr)
  1.2143 +        return Method_Skipped;
  1.2144 +
  1.2145 +    // Mark as forbidden if frame can't be handled.
  1.2146 +    if (!CheckFrame(osrFrame)) {
  1.2147 +        ForbidCompilation(cx, script);
  1.2148 +        return Method_CantCompile;
  1.2149 +    }
  1.2150 +
  1.2151 +    // Attempt compilation.
  1.2152 +    // - Returns Method_Compiled if the right ionscript is present
  1.2153 +    //   (Meaning it was present or a sequantial compile finished)
  1.2154 +    // - Returns Method_Skipped if pc doesn't match
  1.2155 +    //   (This means a background thread compilation with that pc could have started or not.)
  1.2156 +    RootedScript rscript(cx, script);
  1.2157 +    MethodStatus status = Compile(cx, rscript, osrFrame, pc, isConstructing, SequentialExecution);
  1.2158 +    if (status != Method_Compiled) {
  1.2159 +        if (status == Method_CantCompile)
  1.2160 +            ForbidCompilation(cx, script);
  1.2161 +        return status;
  1.2162 +    }
  1.2163 +
  1.2164 +    return Method_Compiled;
  1.2165 +}
  1.2166 +
  1.2167 +MethodStatus
  1.2168 +jit::CanEnter(JSContext *cx, RunState &state)
  1.2169 +{
  1.2170 +    JS_ASSERT(jit::IsIonEnabled(cx));
  1.2171 +
  1.2172 +    JSScript *script = state.script();
  1.2173 +
  1.2174 +    // Skip if the script has been disabled.
  1.2175 +    if (!script->canIonCompile())
  1.2176 +        return Method_Skipped;
  1.2177 +
  1.2178 +    // Skip if the script is being compiled off thread.
  1.2179 +    if (script->isIonCompilingOffThread())
  1.2180 +        return Method_Skipped;
  1.2181 +
  1.2182 +    // Skip if the code is expected to result in a bailout.
  1.2183 +    if (script->hasIonScript() && script->ionScript()->bailoutExpected())
  1.2184 +        return Method_Skipped;
  1.2185 +
  1.2186 +    // If constructing, allocate a new |this| object before building Ion.
  1.2187 +    // Creating |this| is done before building Ion because it may change the
  1.2188 +    // type information and invalidate compilation results.
  1.2189 +    if (state.isInvoke()) {
  1.2190 +        InvokeState &invoke = *state.asInvoke();
  1.2191 +
  1.2192 +        if (TooManyArguments(invoke.args().length())) {
  1.2193 +            IonSpew(IonSpew_Abort, "too many actual args");
  1.2194 +            ForbidCompilation(cx, script);
  1.2195 +            return Method_CantCompile;
  1.2196 +        }
  1.2197 +
  1.2198 +        if (TooManyArguments(invoke.args().callee().as<JSFunction>().nargs())) {
  1.2199 +            IonSpew(IonSpew_Abort, "too many args");
  1.2200 +            ForbidCompilation(cx, script);
  1.2201 +            return Method_CantCompile;
  1.2202 +        }
  1.2203 +
  1.2204 +        if (invoke.constructing() && invoke.args().thisv().isPrimitive()) {
  1.2205 +            RootedScript scriptRoot(cx, script);
  1.2206 +            RootedObject callee(cx, &invoke.args().callee());
  1.2207 +            RootedObject obj(cx, CreateThisForFunction(cx, callee,
  1.2208 +                                                       invoke.useNewType()
  1.2209 +                                                       ? SingletonObject
  1.2210 +                                                       : GenericObject));
  1.2211 +            if (!obj || !jit::IsIonEnabled(cx)) // Note: OOM under CreateThis can disable TI.
  1.2212 +                return Method_Skipped;
  1.2213 +            invoke.args().setThis(ObjectValue(*obj));
  1.2214 +            script = scriptRoot;
  1.2215 +        }
  1.2216 +    } else if (state.isGenerator()) {
  1.2217 +        IonSpew(IonSpew_Abort, "generator frame");
  1.2218 +        ForbidCompilation(cx, script);
  1.2219 +        return Method_CantCompile;
  1.2220 +    }
  1.2221 +
  1.2222 +    // If --ion-eager is used, compile with Baseline first, so that we
  1.2223 +    // can directly enter IonMonkey.
  1.2224 +    RootedScript rscript(cx, script);
  1.2225 +    if (js_JitOptions.eagerCompilation && !rscript->hasBaselineScript()) {
  1.2226 +        MethodStatus status = CanEnterBaselineMethod(cx, state);
  1.2227 +        if (status != Method_Compiled)
  1.2228 +            return status;
  1.2229 +    }
  1.2230 +
  1.2231 +    // Attempt compilation. Returns Method_Compiled if already compiled.
  1.2232 +    bool constructing = state.isInvoke() && state.asInvoke()->constructing();
  1.2233 +    MethodStatus status =
  1.2234 +        Compile(cx, rscript, nullptr, nullptr, constructing, SequentialExecution);
  1.2235 +    if (status != Method_Compiled) {
  1.2236 +        if (status == Method_CantCompile)
  1.2237 +            ForbidCompilation(cx, rscript);
  1.2238 +        return status;
  1.2239 +    }
  1.2240 +
  1.2241 +    return Method_Compiled;
  1.2242 +}
  1.2243 +
  1.2244 +MethodStatus
  1.2245 +jit::CompileFunctionForBaseline(JSContext *cx, HandleScript script, BaselineFrame *frame,
  1.2246 +                                bool isConstructing)
  1.2247 +{
  1.2248 +    JS_ASSERT(jit::IsIonEnabled(cx));
  1.2249 +    JS_ASSERT(frame->fun()->nonLazyScript()->canIonCompile());
  1.2250 +    JS_ASSERT(!frame->fun()->nonLazyScript()->isIonCompilingOffThread());
  1.2251 +    JS_ASSERT(!frame->fun()->nonLazyScript()->hasIonScript());
  1.2252 +    JS_ASSERT(frame->isFunctionFrame());
  1.2253 +
  1.2254 +    // Mark as forbidden if frame can't be handled.
  1.2255 +    if (!CheckFrame(frame)) {
  1.2256 +        ForbidCompilation(cx, script);
  1.2257 +        return Method_CantCompile;
  1.2258 +    }
  1.2259 +
  1.2260 +    // Attempt compilation. Returns Method_Compiled if already compiled.
  1.2261 +    MethodStatus status =
  1.2262 +        Compile(cx, script, frame, nullptr, isConstructing, SequentialExecution);
  1.2263 +    if (status != Method_Compiled) {
  1.2264 +        if (status == Method_CantCompile)
  1.2265 +            ForbidCompilation(cx, script);
  1.2266 +        return status;
  1.2267 +    }
  1.2268 +
  1.2269 +    return Method_Compiled;
  1.2270 +}
  1.2271 +
  1.2272 +MethodStatus
  1.2273 +jit::Recompile(JSContext *cx, HandleScript script, BaselineFrame *osrFrame, jsbytecode *osrPc,
  1.2274 +               bool constructing)
  1.2275 +{
  1.2276 +    JS_ASSERT(script->hasIonScript());
  1.2277 +    if (script->ionScript()->isRecompiling())
  1.2278 +        return Method_Compiled;
  1.2279 +
  1.2280 +    MethodStatus status =
  1.2281 +        Compile(cx, script, osrFrame, osrPc, constructing, SequentialExecution);
  1.2282 +    if (status != Method_Compiled) {
  1.2283 +        if (status == Method_CantCompile)
  1.2284 +            ForbidCompilation(cx, script);
  1.2285 +        return status;
  1.2286 +    }
  1.2287 +
  1.2288 +    return Method_Compiled;
  1.2289 +}
  1.2290 +
  1.2291 +MethodStatus
  1.2292 +jit::CanEnterInParallel(JSContext *cx, HandleScript script)
  1.2293 +{
  1.2294 +    // Skip if the script has been disabled.
  1.2295 +    //
  1.2296 +    // Note: We return Method_Skipped in this case because the other
  1.2297 +    // CanEnter() methods do so. However, ForkJoin.cpp detects this
  1.2298 +    // condition differently treats it more like an error.
  1.2299 +    if (!script->canParallelIonCompile())
  1.2300 +        return Method_Skipped;
  1.2301 +
  1.2302 +    // Skip if the script is being compiled off thread.
  1.2303 +    if (script->isParallelIonCompilingOffThread())
  1.2304 +        return Method_Skipped;
  1.2305 +
  1.2306 +    MethodStatus status = Compile(cx, script, nullptr, nullptr, false, ParallelExecution);
  1.2307 +    if (status != Method_Compiled) {
  1.2308 +        if (status == Method_CantCompile)
  1.2309 +            ForbidCompilation(cx, script, ParallelExecution);
  1.2310 +        return status;
  1.2311 +    }
  1.2312 +
  1.2313 +    // This can GC, so afterward, script->parallelIon is
  1.2314 +    // not guaranteed to be valid.
  1.2315 +    if (!cx->runtime()->jitRuntime()->enterIon())
  1.2316 +        return Method_Error;
  1.2317 +
  1.2318 +    // Subtle: it is possible for GC to occur during
  1.2319 +    // compilation of one of the invoked functions, which
  1.2320 +    // would cause the earlier functions (such as the
  1.2321 +    // kernel itself) to be collected.  In this event, we
  1.2322 +    // give up and fallback to sequential for now.
  1.2323 +    if (!script->hasParallelIonScript()) {
  1.2324 +        parallel::Spew(
  1.2325 +            parallel::SpewCompile,
  1.2326 +            "Script %p:%s:%u was garbage-collected or invalidated",
  1.2327 +            script.get(), script->filename(), script->lineno());
  1.2328 +        return Method_Skipped;
  1.2329 +    }
  1.2330 +
  1.2331 +    return Method_Compiled;
  1.2332 +}
  1.2333 +
  1.2334 +MethodStatus
  1.2335 +jit::CanEnterUsingFastInvoke(JSContext *cx, HandleScript script, uint32_t numActualArgs)
  1.2336 +{
  1.2337 +    JS_ASSERT(jit::IsIonEnabled(cx));
  1.2338 +
  1.2339 +    // Skip if the code is expected to result in a bailout.
  1.2340 +    if (!script->hasIonScript() || script->ionScript()->bailoutExpected())
  1.2341 +        return Method_Skipped;
  1.2342 +
  1.2343 +    // Don't handle arguments underflow, to make this work we would have to pad
  1.2344 +    // missing arguments with |undefined|.
  1.2345 +    if (numActualArgs < script->functionNonDelazifying()->nargs())
  1.2346 +        return Method_Skipped;
  1.2347 +
  1.2348 +    if (!cx->compartment()->ensureJitCompartmentExists(cx))
  1.2349 +        return Method_Error;
  1.2350 +
  1.2351 +    // This can GC, so afterward, script->ion is not guaranteed to be valid.
  1.2352 +    if (!cx->runtime()->jitRuntime()->enterIon())
  1.2353 +        return Method_Error;
  1.2354 +
  1.2355 +    if (!script->hasIonScript())
  1.2356 +        return Method_Skipped;
  1.2357 +
  1.2358 +    return Method_Compiled;
  1.2359 +}
  1.2360 +
  1.2361 +static IonExecStatus
  1.2362 +EnterIon(JSContext *cx, EnterJitData &data)
  1.2363 +{
  1.2364 +    JS_CHECK_RECURSION(cx, return IonExec_Aborted);
  1.2365 +    JS_ASSERT(jit::IsIonEnabled(cx));
  1.2366 +    JS_ASSERT(!data.osrFrame);
  1.2367 +
  1.2368 +    EnterJitCode enter = cx->runtime()->jitRuntime()->enterIon();
  1.2369 +
  1.2370 +    // Caller must construct |this| before invoking the Ion function.
  1.2371 +    JS_ASSERT_IF(data.constructing, data.maxArgv[0].isObject());
  1.2372 +
  1.2373 +    data.result.setInt32(data.numActualArgs);
  1.2374 +    {
  1.2375 +        AssertCompartmentUnchanged pcc(cx);
  1.2376 +        JitActivation activation(cx, data.constructing);
  1.2377 +
  1.2378 +        CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, /* osrFrame = */nullptr, data.calleeToken,
  1.2379 +                            /* scopeChain = */ nullptr, 0, data.result.address());
  1.2380 +    }
  1.2381 +
  1.2382 +    JS_ASSERT(!cx->runtime()->hasIonReturnOverride());
  1.2383 +
  1.2384 +    // Jit callers wrap primitive constructor return.
  1.2385 +    if (!data.result.isMagic() && data.constructing && data.result.isPrimitive())
  1.2386 +        data.result = data.maxArgv[0];
  1.2387 +
  1.2388 +    // Release temporary buffer used for OSR into Ion.
  1.2389 +    cx->runtime()->getJitRuntime(cx)->freeOsrTempData();
  1.2390 +
  1.2391 +    JS_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR));
  1.2392 +    return data.result.isMagic() ? IonExec_Error : IonExec_Ok;
  1.2393 +}
  1.2394 +
  1.2395 +bool
  1.2396 +jit::SetEnterJitData(JSContext *cx, EnterJitData &data, RunState &state, AutoValueVector &vals)
  1.2397 +{
  1.2398 +    data.osrFrame = nullptr;
  1.2399 +
  1.2400 +    if (state.isInvoke()) {
  1.2401 +        CallArgs &args = state.asInvoke()->args();
  1.2402 +        unsigned numFormals = state.script()->functionNonDelazifying()->nargs();
  1.2403 +        data.constructing = state.asInvoke()->constructing();
  1.2404 +        data.numActualArgs = args.length();
  1.2405 +        data.maxArgc = Max(args.length(), numFormals) + 1;
  1.2406 +        data.scopeChain = nullptr;
  1.2407 +        data.calleeToken = CalleeToToken(&args.callee().as<JSFunction>());
  1.2408 +
  1.2409 +        if (data.numActualArgs >= numFormals) {
  1.2410 +            data.maxArgv = args.base() + 1;
  1.2411 +        } else {
  1.2412 +            // Pad missing arguments with |undefined|.
  1.2413 +            for (size_t i = 1; i < args.length() + 2; i++) {
  1.2414 +                if (!vals.append(args.base()[i]))
  1.2415 +                    return false;
  1.2416 +            }
  1.2417 +
  1.2418 +            while (vals.length() < numFormals + 1) {
  1.2419 +                if (!vals.append(UndefinedValue()))
  1.2420 +                    return false;
  1.2421 +            }
  1.2422 +
  1.2423 +            JS_ASSERT(vals.length() >= numFormals + 1);
  1.2424 +            data.maxArgv = vals.begin();
  1.2425 +        }
  1.2426 +    } else {
  1.2427 +        data.constructing = false;
  1.2428 +        data.numActualArgs = 0;
  1.2429 +        data.maxArgc = 1;
  1.2430 +        data.maxArgv = state.asExecute()->addressOfThisv();
  1.2431 +        data.scopeChain = state.asExecute()->scopeChain();
  1.2432 +
  1.2433 +        data.calleeToken = CalleeToToken(state.script());
  1.2434 +
  1.2435 +        if (state.script()->isForEval() &&
  1.2436 +            !(state.asExecute()->type() & InterpreterFrame::GLOBAL))
  1.2437 +        {
  1.2438 +            ScriptFrameIter iter(cx);
  1.2439 +            if (iter.isFunctionFrame())
  1.2440 +                data.calleeToken = CalleeToToken(iter.callee());
  1.2441 +        }
  1.2442 +    }
  1.2443 +
  1.2444 +    return true;
  1.2445 +}
  1.2446 +
  1.2447 +IonExecStatus
  1.2448 +jit::IonCannon(JSContext *cx, RunState &state)
  1.2449 +{
  1.2450 +    IonScript *ion = state.script()->ionScript();
  1.2451 +
  1.2452 +    EnterJitData data(cx);
  1.2453 +    data.jitcode = ion->method()->raw();
  1.2454 +
  1.2455 +    AutoValueVector vals(cx);
  1.2456 +    if (!SetEnterJitData(cx, data, state, vals))
  1.2457 +        return IonExec_Error;
  1.2458 +
  1.2459 +    IonExecStatus status = EnterIon(cx, data);
  1.2460 +
  1.2461 +    if (status == IonExec_Ok)
  1.2462 +        state.setReturnValue(data.result);
  1.2463 +
  1.2464 +    return status;
  1.2465 +}
  1.2466 +
  1.2467 +IonExecStatus
  1.2468 +jit::FastInvoke(JSContext *cx, HandleFunction fun, CallArgs &args)
  1.2469 +{
  1.2470 +    JS_CHECK_RECURSION(cx, return IonExec_Error);
  1.2471 +
  1.2472 +    IonScript *ion = fun->nonLazyScript()->ionScript();
  1.2473 +    JitCode *code = ion->method();
  1.2474 +    void *jitcode = code->raw();
  1.2475 +
  1.2476 +    JS_ASSERT(jit::IsIonEnabled(cx));
  1.2477 +    JS_ASSERT(!ion->bailoutExpected());
  1.2478 +
  1.2479 +    JitActivation activation(cx, /* firstFrameIsConstructing = */false);
  1.2480 +
  1.2481 +    EnterJitCode enter = cx->runtime()->jitRuntime()->enterIon();
  1.2482 +    void *calleeToken = CalleeToToken(fun);
  1.2483 +
  1.2484 +    RootedValue result(cx, Int32Value(args.length()));
  1.2485 +    JS_ASSERT(args.length() >= fun->nargs());
  1.2486 +
  1.2487 +    CALL_GENERATED_CODE(enter, jitcode, args.length() + 1, args.array() - 1, /* osrFrame = */nullptr,
  1.2488 +                        calleeToken, /* scopeChain = */ nullptr, 0, result.address());
  1.2489 +
  1.2490 +    JS_ASSERT(!cx->runtime()->hasIonReturnOverride());
  1.2491 +
  1.2492 +    args.rval().set(result);
  1.2493 +
  1.2494 +    JS_ASSERT_IF(result.isMagic(), result.isMagic(JS_ION_ERROR));
  1.2495 +    return result.isMagic() ? IonExec_Error : IonExec_Ok;
  1.2496 +}
  1.2497 +
  1.2498 +static void
  1.2499 +InvalidateActivation(FreeOp *fop, uint8_t *ionTop, bool invalidateAll)
  1.2500 +{
  1.2501 +    IonSpew(IonSpew_Invalidate, "BEGIN invalidating activation");
  1.2502 +
  1.2503 +    size_t frameno = 1;
  1.2504 +
  1.2505 +    for (JitFrameIterator it(ionTop, SequentialExecution); !it.done(); ++it, ++frameno) {
  1.2506 +        JS_ASSERT_IF(frameno == 1, it.type() == JitFrame_Exit);
  1.2507 +
  1.2508 +#ifdef DEBUG
  1.2509 +        switch (it.type()) {
  1.2510 +          case JitFrame_Exit:
  1.2511 +            IonSpew(IonSpew_Invalidate, "#%d exit frame @ %p", frameno, it.fp());
  1.2512 +            break;
  1.2513 +          case JitFrame_BaselineJS:
  1.2514 +          case JitFrame_IonJS:
  1.2515 +          {
  1.2516 +            JS_ASSERT(it.isScripted());
  1.2517 +            const char *type = it.isIonJS() ? "Optimized" : "Baseline";
  1.2518 +            IonSpew(IonSpew_Invalidate, "#%d %s JS frame @ %p, %s:%d (fun: %p, script: %p, pc %p)",
  1.2519 +                    frameno, type, it.fp(), it.script()->filename(), it.script()->lineno(),
  1.2520 +                    it.maybeCallee(), (JSScript *)it.script(), it.returnAddressToFp());
  1.2521 +            break;
  1.2522 +          }
  1.2523 +          case JitFrame_BaselineStub:
  1.2524 +            IonSpew(IonSpew_Invalidate, "#%d baseline stub frame @ %p", frameno, it.fp());
  1.2525 +            break;
  1.2526 +          case JitFrame_Rectifier:
  1.2527 +            IonSpew(IonSpew_Invalidate, "#%d rectifier frame @ %p", frameno, it.fp());
  1.2528 +            break;
  1.2529 +          case JitFrame_Unwound_IonJS:
  1.2530 +          case JitFrame_Unwound_BaselineStub:
  1.2531 +            MOZ_ASSUME_UNREACHABLE("invalid");
  1.2532 +          case JitFrame_Unwound_Rectifier:
  1.2533 +            IonSpew(IonSpew_Invalidate, "#%d unwound rectifier frame @ %p", frameno, it.fp());
  1.2534 +            break;
  1.2535 +          case JitFrame_Entry:
  1.2536 +            IonSpew(IonSpew_Invalidate, "#%d entry frame @ %p", frameno, it.fp());
  1.2537 +            break;
  1.2538 +        }
  1.2539 +#endif
  1.2540 +
  1.2541 +        if (!it.isIonJS())
  1.2542 +            continue;
  1.2543 +
  1.2544 +        // See if the frame has already been invalidated.
  1.2545 +        if (it.checkInvalidation())
  1.2546 +            continue;
  1.2547 +
  1.2548 +        JSScript *script = it.script();
  1.2549 +        if (!script->hasIonScript())
  1.2550 +            continue;
  1.2551 +
  1.2552 +        if (!invalidateAll && !script->ionScript()->invalidated())
  1.2553 +            continue;
  1.2554 +
  1.2555 +        IonScript *ionScript = script->ionScript();
  1.2556 +
  1.2557 +        // Purge ICs before we mark this script as invalidated. This will
  1.2558 +        // prevent lastJump_ from appearing to be a bogus pointer, just
  1.2559 +        // in case anyone tries to read it.
  1.2560 +        ionScript->purgeCaches();
  1.2561 +
  1.2562 +        // Clean up any pointers from elsewhere in the runtime to this IonScript
  1.2563 +        // which is about to become disconnected from its JSScript.
  1.2564 +        ionScript->unlinkFromRuntime(fop);
  1.2565 +
  1.2566 +        // This frame needs to be invalidated. We do the following:
  1.2567 +        //
  1.2568 +        // 1. Increment the reference counter to keep the ionScript alive
  1.2569 +        //    for the invalidation bailout or for the exception handler.
  1.2570 +        // 2. Determine safepoint that corresponds to the current call.
  1.2571 +        // 3. From safepoint, get distance to the OSI-patchable offset.
  1.2572 +        // 4. From the IonScript, determine the distance between the
  1.2573 +        //    call-patchable offset and the invalidation epilogue.
  1.2574 +        // 5. Patch the OSI point with a call-relative to the
  1.2575 +        //    invalidation epilogue.
  1.2576 +        //
  1.2577 +        // The code generator ensures that there's enough space for us
  1.2578 +        // to patch in a call-relative operation at each invalidation
  1.2579 +        // point.
  1.2580 +        //
  1.2581 +        // Note: you can't simplify this mechanism to "just patch the
  1.2582 +        // instruction immediately after the call" because things may
  1.2583 +        // need to move into a well-defined register state (using move
  1.2584 +        // instructions after the call) in to capture an appropriate
  1.2585 +        // snapshot after the call occurs.
  1.2586 +
  1.2587 +        ionScript->incref();
  1.2588 +
  1.2589 +        const SafepointIndex *si = ionScript->getSafepointIndex(it.returnAddressToFp());
  1.2590 +        JitCode *ionCode = ionScript->method();
  1.2591 +
  1.2592 +        JS::Zone *zone = script->zone();
  1.2593 +        if (zone->needsBarrier()) {
  1.2594 +            // We're about to remove edges from the JSScript to gcthings
  1.2595 +            // embedded in the JitCode. Perform one final trace of the
  1.2596 +            // JitCode for the incremental GC, as it must know about
  1.2597 +            // those edges.
  1.2598 +            ionCode->trace(zone->barrierTracer());
  1.2599 +        }
  1.2600 +        ionCode->setInvalidated();
  1.2601 +
  1.2602 +        // Write the delta (from the return address offset to the
  1.2603 +        // IonScript pointer embedded into the invalidation epilogue)
  1.2604 +        // where the safepointed call instruction used to be. We rely on
  1.2605 +        // the call sequence causing the safepoint being >= the size of
  1.2606 +        // a uint32, which is checked during safepoint index
  1.2607 +        // construction.
  1.2608 +        CodeLocationLabel dataLabelToMunge(it.returnAddressToFp());
  1.2609 +        ptrdiff_t delta = ionScript->invalidateEpilogueDataOffset() -
  1.2610 +                          (it.returnAddressToFp() - ionCode->raw());
  1.2611 +        Assembler::patchWrite_Imm32(dataLabelToMunge, Imm32(delta));
  1.2612 +
  1.2613 +        CodeLocationLabel osiPatchPoint = SafepointReader::InvalidationPatchPoint(ionScript, si);
  1.2614 +        CodeLocationLabel invalidateEpilogue(ionCode, ionScript->invalidateEpilogueOffset());
  1.2615 +
  1.2616 +        IonSpew(IonSpew_Invalidate, "   ! Invalidate ionScript %p (ref %u) -> patching osipoint %p",
  1.2617 +                ionScript, ionScript->refcount(), (void *) osiPatchPoint.raw());
  1.2618 +        Assembler::patchWrite_NearCall(osiPatchPoint, invalidateEpilogue);
  1.2619 +    }
  1.2620 +
  1.2621 +    IonSpew(IonSpew_Invalidate, "END invalidating activation");
  1.2622 +}
  1.2623 +
  1.2624 +void
  1.2625 +jit::StopAllOffThreadCompilations(JSCompartment *comp)
  1.2626 +{
  1.2627 +    if (!comp->jitCompartment())
  1.2628 +        return;
  1.2629 +    CancelOffThreadIonCompile(comp, nullptr);
  1.2630 +    FinishAllOffThreadCompilations(comp);
  1.2631 +}
  1.2632 +
  1.2633 +void
  1.2634 +jit::InvalidateAll(FreeOp *fop, Zone *zone)
  1.2635 +{
  1.2636 +    for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
  1.2637 +        StopAllOffThreadCompilations(comp);
  1.2638 +
  1.2639 +    for (JitActivationIterator iter(fop->runtime()); !iter.done(); ++iter) {
  1.2640 +        if (iter->compartment()->zone() == zone) {
  1.2641 +            IonSpew(IonSpew_Invalidate, "Invalidating all frames for GC");
  1.2642 +            InvalidateActivation(fop, iter.jitTop(), true);
  1.2643 +        }
  1.2644 +    }
  1.2645 +}
  1.2646 +
  1.2647 +
  1.2648 +void
  1.2649 +jit::Invalidate(types::TypeZone &types, FreeOp *fop,
  1.2650 +                const Vector<types::RecompileInfo> &invalid, bool resetUses,
  1.2651 +                bool cancelOffThread)
  1.2652 +{
  1.2653 +    IonSpew(IonSpew_Invalidate, "Start invalidation.");
  1.2654 +
  1.2655 +    // Add an invalidation reference to all invalidated IonScripts to indicate
  1.2656 +    // to the traversal which frames have been invalidated.
  1.2657 +    size_t numInvalidations = 0;
  1.2658 +    for (size_t i = 0; i < invalid.length(); i++) {
  1.2659 +        const types::CompilerOutput &co = *invalid[i].compilerOutput(types);
  1.2660 +        if (!co.isValid())
  1.2661 +            continue;
  1.2662 +
  1.2663 +        if (cancelOffThread)
  1.2664 +            CancelOffThreadIonCompile(co.script()->compartment(), co.script());
  1.2665 +
  1.2666 +        if (!co.ion())
  1.2667 +            continue;
  1.2668 +
  1.2669 +        IonSpew(IonSpew_Invalidate, " Invalidate %s:%u, IonScript %p",
  1.2670 +                co.script()->filename(), co.script()->lineno(), co.ion());
  1.2671 +
  1.2672 +        // Keep the ion script alive during the invalidation and flag this
  1.2673 +        // ionScript as being invalidated.  This increment is removed by the
  1.2674 +        // loop after the calls to InvalidateActivation.
  1.2675 +        co.ion()->incref();
  1.2676 +        numInvalidations++;
  1.2677 +    }
  1.2678 +
  1.2679 +    if (!numInvalidations) {
  1.2680 +        IonSpew(IonSpew_Invalidate, " No IonScript invalidation.");
  1.2681 +        return;
  1.2682 +    }
  1.2683 +
  1.2684 +    for (JitActivationIterator iter(fop->runtime()); !iter.done(); ++iter)
  1.2685 +        InvalidateActivation(fop, iter.jitTop(), false);
  1.2686 +
  1.2687 +    // Drop the references added above. If a script was never active, its
  1.2688 +    // IonScript will be immediately destroyed. Otherwise, it will be held live
  1.2689 +    // until its last invalidated frame is destroyed.
  1.2690 +    for (size_t i = 0; i < invalid.length(); i++) {
  1.2691 +        types::CompilerOutput &co = *invalid[i].compilerOutput(types);
  1.2692 +        if (!co.isValid())
  1.2693 +            continue;
  1.2694 +
  1.2695 +        ExecutionMode executionMode = co.mode();
  1.2696 +        JSScript *script = co.script();
  1.2697 +        IonScript *ionScript = co.ion();
  1.2698 +        if (!ionScript)
  1.2699 +            continue;
  1.2700 +
  1.2701 +        SetIonScript(script, executionMode, nullptr);
  1.2702 +        ionScript->decref(fop);
  1.2703 +        co.invalidate();
  1.2704 +        numInvalidations--;
  1.2705 +
  1.2706 +        // Wait for the scripts to get warm again before doing another
  1.2707 +        // compile, unless either:
  1.2708 +        // (1) we are recompiling *because* a script got hot;
  1.2709 +        //     (resetUses is false); or,
  1.2710 +        // (2) we are invalidating a parallel script.  This is because
  1.2711 +        //     the useCount only applies to sequential uses.  Parallel
  1.2712 +        //     execution *requires* ion, and so we don't limit it to
  1.2713 +        //     methods with a high usage count (though we do check that
  1.2714 +        //     the useCount is at least 1 when compiling the transitive
  1.2715 +        //     closure of potential callees, to avoid compiling things
  1.2716 +        //     that are never run at all).
  1.2717 +        if (resetUses && executionMode != ParallelExecution)
  1.2718 +            script->resetUseCount();
  1.2719 +    }
  1.2720 +
  1.2721 +    // Make sure we didn't leak references by invalidating the same IonScript
  1.2722 +    // multiple times in the above loop.
  1.2723 +    JS_ASSERT(!numInvalidations);
  1.2724 +}
  1.2725 +
  1.2726 +void
  1.2727 +jit::Invalidate(JSContext *cx, const Vector<types::RecompileInfo> &invalid, bool resetUses,
  1.2728 +                bool cancelOffThread)
  1.2729 +{
  1.2730 +    jit::Invalidate(cx->zone()->types, cx->runtime()->defaultFreeOp(), invalid, resetUses,
  1.2731 +                    cancelOffThread);
  1.2732 +}
  1.2733 +
  1.2734 +bool
  1.2735 +jit::Invalidate(JSContext *cx, JSScript *script, ExecutionMode mode, bool resetUses,
  1.2736 +                bool cancelOffThread)
  1.2737 +{
  1.2738 +    JS_ASSERT(script->hasIonScript());
  1.2739 +
  1.2740 +    if (cx->runtime()->spsProfiler.enabled()) {
  1.2741 +        // Register invalidation with profiler.
  1.2742 +        // Format of event payload string:
  1.2743 +        //      "<filename>:<lineno>"
  1.2744 +
  1.2745 +        // Get the script filename, if any, and its length.
  1.2746 +        const char *filename = script->filename();
  1.2747 +        if (filename == nullptr)
  1.2748 +            filename = "<unknown>";
  1.2749 +
  1.2750 +        size_t len = strlen(filename) + 20;
  1.2751 +        char *buf = js_pod_malloc<char>(len);
  1.2752 +        if (!buf)
  1.2753 +            return false;
  1.2754 +
  1.2755 +        // Construct the descriptive string.
  1.2756 +        JS_snprintf(buf, len, "Invalidate %s:%llu", filename, script->lineno());
  1.2757 +        cx->runtime()->spsProfiler.markEvent(buf);
  1.2758 +        js_free(buf);
  1.2759 +    }
  1.2760 +
  1.2761 +    Vector<types::RecompileInfo> scripts(cx);
  1.2762 +
  1.2763 +    switch (mode) {
  1.2764 +      case SequentialExecution:
  1.2765 +        JS_ASSERT(script->hasIonScript());
  1.2766 +        if (!scripts.append(script->ionScript()->recompileInfo()))
  1.2767 +            return false;
  1.2768 +        break;
  1.2769 +      case ParallelExecution:
  1.2770 +        JS_ASSERT(script->hasParallelIonScript());
  1.2771 +        if (!scripts.append(script->parallelIonScript()->recompileInfo()))
  1.2772 +            return false;
  1.2773 +        break;
  1.2774 +      default:
  1.2775 +        MOZ_ASSUME_UNREACHABLE("No such execution mode");
  1.2776 +    }
  1.2777 +
  1.2778 +    Invalidate(cx, scripts, resetUses, cancelOffThread);
  1.2779 +    return true;
  1.2780 +}
  1.2781 +
  1.2782 +bool
  1.2783 +jit::Invalidate(JSContext *cx, JSScript *script, bool resetUses, bool cancelOffThread)
  1.2784 +{
  1.2785 +    return Invalidate(cx, script, SequentialExecution, resetUses, cancelOffThread);
  1.2786 +}
  1.2787 +
  1.2788 +static void
  1.2789 +FinishInvalidationOf(FreeOp *fop, JSScript *script, IonScript *ionScript)
  1.2790 +{
  1.2791 +    types::TypeZone &types = script->zone()->types;
  1.2792 +
  1.2793 +    // Note: If the script is about to be swept, the compiler output may have
  1.2794 +    // already been destroyed.
  1.2795 +    if (types::CompilerOutput *output = ionScript->recompileInfo().compilerOutput(types))
  1.2796 +        output->invalidate();
  1.2797 +
  1.2798 +    // If this script has Ion code on the stack, invalidated() will return
  1.2799 +    // true. In this case we have to wait until destroying it.
  1.2800 +    if (!ionScript->invalidated())
  1.2801 +        jit::IonScript::Destroy(fop, ionScript);
  1.2802 +}
  1.2803 +
  1.2804 +template <ExecutionMode mode>
  1.2805 +void
  1.2806 +jit::FinishInvalidation(FreeOp *fop, JSScript *script)
  1.2807 +{
  1.2808 +    // In all cases, nullptr out script->ion or script->parallelIon to avoid
  1.2809 +    // re-entry.
  1.2810 +    switch (mode) {
  1.2811 +      case SequentialExecution:
  1.2812 +        if (script->hasIonScript()) {
  1.2813 +            IonScript *ion = script->ionScript();
  1.2814 +            script->setIonScript(nullptr);
  1.2815 +            FinishInvalidationOf(fop, script, ion);
  1.2816 +        }
  1.2817 +        return;
  1.2818 +
  1.2819 +      case ParallelExecution:
  1.2820 +        if (script->hasParallelIonScript()) {
  1.2821 +            IonScript *parallelIon = script->parallelIonScript();
  1.2822 +            script->setParallelIonScript(nullptr);
  1.2823 +            FinishInvalidationOf(fop, script, parallelIon);
  1.2824 +        }
  1.2825 +        return;
  1.2826 +
  1.2827 +      default:
  1.2828 +        MOZ_ASSUME_UNREACHABLE("bad execution mode");
  1.2829 +    }
  1.2830 +}
  1.2831 +
  1.2832 +template void
  1.2833 +jit::FinishInvalidation<SequentialExecution>(FreeOp *fop, JSScript *script);
  1.2834 +
  1.2835 +template void
  1.2836 +jit::FinishInvalidation<ParallelExecution>(FreeOp *fop, JSScript *script);
  1.2837 +
  1.2838 +void
  1.2839 +jit::MarkValueFromIon(JSRuntime *rt, Value *vp)
  1.2840 +{
  1.2841 +    gc::MarkValueUnbarriered(&rt->gcMarker, vp, "write barrier");
  1.2842 +}
  1.2843 +
  1.2844 +void
  1.2845 +jit::MarkShapeFromIon(JSRuntime *rt, Shape **shapep)
  1.2846 +{
  1.2847 +    gc::MarkShapeUnbarriered(&rt->gcMarker, shapep, "write barrier");
  1.2848 +}
  1.2849 +
  1.2850 +void
  1.2851 +jit::ForbidCompilation(JSContext *cx, JSScript *script)
  1.2852 +{
  1.2853 +    ForbidCompilation(cx, script, SequentialExecution);
  1.2854 +}
  1.2855 +
  1.2856 +void
  1.2857 +jit::ForbidCompilation(JSContext *cx, JSScript *script, ExecutionMode mode)
  1.2858 +{
  1.2859 +    IonSpew(IonSpew_Abort, "Disabling Ion mode %d compilation of script %s:%d",
  1.2860 +            mode, script->filename(), script->lineno());
  1.2861 +
  1.2862 +    CancelOffThreadIonCompile(cx->compartment(), script);
  1.2863 +
  1.2864 +    switch (mode) {
  1.2865 +      case SequentialExecution:
  1.2866 +        if (script->hasIonScript()) {
  1.2867 +            // It is only safe to modify script->ion if the script is not currently
  1.2868 +            // running, because JitFrameIterator needs to tell what ionScript to
  1.2869 +            // use (either the one on the JSScript, or the one hidden in the
  1.2870 +            // breadcrumbs Invalidation() leaves). Therefore, if invalidation
  1.2871 +            // fails, we cannot disable the script.
  1.2872 +            if (!Invalidate(cx, script, mode, false))
  1.2873 +                return;
  1.2874 +        }
  1.2875 +
  1.2876 +        script->setIonScript(ION_DISABLED_SCRIPT);
  1.2877 +        return;
  1.2878 +
  1.2879 +      case ParallelExecution:
  1.2880 +        if (script->hasParallelIonScript()) {
  1.2881 +            if (!Invalidate(cx, script, mode, false))
  1.2882 +                return;
  1.2883 +        }
  1.2884 +
  1.2885 +        script->setParallelIonScript(ION_DISABLED_SCRIPT);
  1.2886 +        return;
  1.2887 +
  1.2888 +      default:
  1.2889 +        MOZ_ASSUME_UNREACHABLE("No such execution mode");
  1.2890 +    }
  1.2891 +
  1.2892 +    MOZ_ASSUME_UNREACHABLE("No such execution mode");
  1.2893 +}
  1.2894 +
  1.2895 +AutoFlushICache *
  1.2896 +PerThreadData::autoFlushICache() const
  1.2897 +{
  1.2898 +    return autoFlushICache_;
  1.2899 +}
  1.2900 +
  1.2901 +void
  1.2902 +PerThreadData::setAutoFlushICache(AutoFlushICache *afc)
  1.2903 +{
  1.2904 +    autoFlushICache_ = afc;
  1.2905 +}
  1.2906 +
  1.2907 +// Set the range for the merging of flushes.  The flushing is deferred until the end of
  1.2908 +// the AutoFlushICache context.  Subsequent flushing within this range will is also
  1.2909 +// deferred.  This is only expected to be defined once for each AutoFlushICache
  1.2910 +// context.  It assumes the range will be flushed is required to be within an
  1.2911 +// AutoFlushICache context.
  1.2912 +void
  1.2913 +AutoFlushICache::setRange(uintptr_t start, size_t len)
  1.2914 +{
  1.2915 +#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
  1.2916 +    AutoFlushICache *afc = TlsPerThreadData.get()->PerThreadData::autoFlushICache();
  1.2917 +    JS_ASSERT(afc);
  1.2918 +    JS_ASSERT(!afc->start_);
  1.2919 +    IonSpewCont(IonSpew_CacheFlush, "(%x %x):", start, len);
  1.2920 +
  1.2921 +    uintptr_t stop = start + len;
  1.2922 +    afc->start_ = start;
  1.2923 +    afc->stop_ = stop;
  1.2924 +#endif
  1.2925 +}
  1.2926 +
  1.2927 +// Flush the instruction cache.
  1.2928 +//
  1.2929 +// If called within a dynamic AutoFlushICache context and if the range is already pending
  1.2930 +// flushing for this AutoFlushICache context then the request is ignored with the
  1.2931 +// understanding that it will be flushed on exit from the AutoFlushICache context.
  1.2932 +// Otherwise the range is flushed immediately.
  1.2933 +//
  1.2934 +// Updates outside the current code object are typically the exception so they are flushed
  1.2935 +// immediately rather than attempting to merge them.
  1.2936 +//
  1.2937 +// For efficiency it is expected that all large ranges will be flushed within an
  1.2938 +// AutoFlushICache, so check.  If this assertion is hit then it does not necessarily
  1.2939 +// indicate a progam fault but it might indicate a lost opportunity to merge cache
  1.2940 +// flushing.  It can be corrected by wrapping the call in an AutoFlushICache to context.
  1.2941 +void
  1.2942 +AutoFlushICache::flush(uintptr_t start, size_t len)
  1.2943 +{
  1.2944 +#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
  1.2945 +    AutoFlushICache *afc = TlsPerThreadData.get()->PerThreadData::autoFlushICache();
  1.2946 +    if (!afc) {
  1.2947 +        IonSpewCont(IonSpew_CacheFlush, "#");
  1.2948 +        JSC::ExecutableAllocator::cacheFlush((void*)start, len);
  1.2949 +        JS_ASSERT(len <= 16);
  1.2950 +        return;
  1.2951 +    }
  1.2952 +
  1.2953 +    uintptr_t stop = start + len;
  1.2954 +    if (start >= afc->start_ && stop <= afc->stop_) {
  1.2955 +        // Update is within the pending flush range, so defer to the end of the context.
  1.2956 +        IonSpewCont(IonSpew_CacheFlush, afc->inhibit_ ? "-" : "=");
  1.2957 +        return;
  1.2958 +    }
  1.2959 +
  1.2960 +    IonSpewCont(IonSpew_CacheFlush, afc->inhibit_ ? "x" : "*");
  1.2961 +    JSC::ExecutableAllocator::cacheFlush((void *)start, len);
  1.2962 +#endif
  1.2963 +}
  1.2964 +
  1.2965 +// Flag the current dynamic AutoFlushICache as inhibiting flushing. Useful in error paths
  1.2966 +// where the changes are being abandoned.
  1.2967 +void
  1.2968 +AutoFlushICache::setInhibit()
  1.2969 +{
  1.2970 +#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
  1.2971 +    AutoFlushICache *afc = TlsPerThreadData.get()->PerThreadData::autoFlushICache();
  1.2972 +    JS_ASSERT(afc);
  1.2973 +    JS_ASSERT(afc->start_);
  1.2974 +    IonSpewCont(IonSpew_CacheFlush, "I");
  1.2975 +    afc->inhibit_ = true;
  1.2976 +#endif
  1.2977 +}
  1.2978 +
  1.2979 +// The common use case is merging cache flushes when preparing a code object.  In this
  1.2980 +// case the entire range of the code object is being flushed and as the code is patched
  1.2981 +// smaller redundant flushes could occur.  The design allows an AutoFlushICache dynamic
  1.2982 +// thread local context to be declared in which the range of the code object can be set
  1.2983 +// which defers flushing until the end of this dynamic context.  The redundant flushing
  1.2984 +// within this code range is also deferred avoiding redundant flushing.  Flushing outside
  1.2985 +// this code range is not affected and proceeds immediately.
  1.2986 +//
  1.2987 +// In some cases flushing is not necessary, such as when compiling an asm.js module which
  1.2988 +// is flushed again when dynamically linked, and also in error paths that abandon the
  1.2989 +// code.  Flushing within the set code range can be inhibited within the AutoFlushICache
  1.2990 +// dynamic context by setting an inhibit flag.
  1.2991 +//
  1.2992 +// The JS compiler can be re-entered while within an AutoFlushICache dynamic context and
  1.2993 +// it is assumed that code being assembled or patched is not executed before the exit of
  1.2994 +// the respective AutoFlushICache dynamic context.
  1.2995 +//
  1.2996 +AutoFlushICache::AutoFlushICache(const char *nonce, bool inhibit)
  1.2997 +  : start_(0),
  1.2998 +    stop_(0),
  1.2999 +    name_(nonce),
  1.3000 +    inhibit_(inhibit)
  1.3001 +{
  1.3002 +#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
  1.3003 +    PerThreadData *pt = TlsPerThreadData.get();
  1.3004 +    AutoFlushICache *afc = pt->PerThreadData::autoFlushICache();
  1.3005 +    if (afc)
  1.3006 +        IonSpew(IonSpew_CacheFlush, "<%s,%s%s ", nonce, afc->name_, inhibit ? " I" : "");
  1.3007 +    else
  1.3008 +        IonSpewCont(IonSpew_CacheFlush, "<%s%s ", nonce, inhibit ? " I" : "");
  1.3009 +
  1.3010 +    prev_ = afc;
  1.3011 +    pt->PerThreadData::setAutoFlushICache(this);
  1.3012 +#endif
  1.3013 +}
  1.3014 +
  1.3015 +AutoFlushICache::~AutoFlushICache()
  1.3016 +{
  1.3017 +#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
  1.3018 +    PerThreadData *pt = TlsPerThreadData.get();
  1.3019 +    JS_ASSERT(pt->PerThreadData::autoFlushICache() == this);
  1.3020 +
  1.3021 +    if (!inhibit_ && start_)
  1.3022 +        JSC::ExecutableAllocator::cacheFlush((void *)start_, size_t(stop_ - start_));
  1.3023 +
  1.3024 +    IonSpewCont(IonSpew_CacheFlush, "%s%s>", name_, start_ ? "" : " U");
  1.3025 +    IonSpewFin(IonSpew_CacheFlush);
  1.3026 +    pt->PerThreadData::setAutoFlushICache(prev_);
  1.3027 +#endif
  1.3028 +}
  1.3029 +
  1.3030 +void
  1.3031 +jit::PurgeCaches(JSScript *script)
  1.3032 +{
  1.3033 +    if (script->hasIonScript())
  1.3034 +        script->ionScript()->purgeCaches();
  1.3035 +
  1.3036 +    if (script->hasParallelIonScript())
  1.3037 +        script->parallelIonScript()->purgeCaches();
  1.3038 +}
  1.3039 +
  1.3040 +size_t
  1.3041 +jit::SizeOfIonData(JSScript *script, mozilla::MallocSizeOf mallocSizeOf)
  1.3042 +{
  1.3043 +    size_t result = 0;
  1.3044 +
  1.3045 +    if (script->hasIonScript())
  1.3046 +        result += script->ionScript()->sizeOfIncludingThis(mallocSizeOf);
  1.3047 +
  1.3048 +    if (script->hasParallelIonScript())
  1.3049 +        result += script->parallelIonScript()->sizeOfIncludingThis(mallocSizeOf);
  1.3050 +
  1.3051 +    return result;
  1.3052 +}
  1.3053 +
  1.3054 +void
  1.3055 +jit::DestroyIonScripts(FreeOp *fop, JSScript *script)
  1.3056 +{
  1.3057 +    if (script->hasIonScript())
  1.3058 +        jit::IonScript::Destroy(fop, script->ionScript());
  1.3059 +
  1.3060 +    if (script->hasParallelIonScript())
  1.3061 +        jit::IonScript::Destroy(fop, script->parallelIonScript());
  1.3062 +
  1.3063 +    if (script->hasBaselineScript())
  1.3064 +        jit::BaselineScript::Destroy(fop, script->baselineScript());
  1.3065 +}
  1.3066 +
  1.3067 +void
  1.3068 +jit::TraceIonScripts(JSTracer* trc, JSScript *script)
  1.3069 +{
  1.3070 +    if (script->hasIonScript())
  1.3071 +        jit::IonScript::Trace(trc, script->ionScript());
  1.3072 +
  1.3073 +    if (script->hasParallelIonScript())
  1.3074 +        jit::IonScript::Trace(trc, script->parallelIonScript());
  1.3075 +
  1.3076 +    if (script->hasBaselineScript())
  1.3077 +        jit::BaselineScript::Trace(trc, script->baselineScript());
  1.3078 +}
  1.3079 +
  1.3080 +bool
  1.3081 +jit::RematerializeAllFrames(JSContext *cx, JSCompartment *comp)
  1.3082 +{
  1.3083 +    for (JitActivationIterator iter(comp->runtimeFromMainThread()); !iter.done(); ++iter) {
  1.3084 +        if (iter.activation()->compartment() == comp) {
  1.3085 +            for (JitFrameIterator frameIter(iter); !frameIter.done(); ++frameIter) {
  1.3086 +                if (!frameIter.isIonJS())
  1.3087 +                    continue;
  1.3088 +                if (!iter.activation()->asJit()->getRematerializedFrame(cx, frameIter))
  1.3089 +                    return false;
  1.3090 +            }
  1.3091 +        }
  1.3092 +    }
  1.3093 +    return true;
  1.3094 +}
  1.3095 +
  1.3096 +bool
  1.3097 +jit::UpdateForDebugMode(JSContext *maybecx, JSCompartment *comp,
  1.3098 +                     AutoDebugModeInvalidation &invalidate)
  1.3099 +{
  1.3100 +    MOZ_ASSERT(invalidate.isFor(comp));
  1.3101 +
  1.3102 +    // Schedule invalidation of all optimized JIT code since debug mode
  1.3103 +    // invalidates assumptions.
  1.3104 +    invalidate.scheduleInvalidation(comp->debugMode());
  1.3105 +
  1.3106 +    // Recompile on-stack baseline scripts if we have a cx.
  1.3107 +    if (maybecx) {
  1.3108 +        IonContext ictx(maybecx, nullptr);
  1.3109 +        if (!RecompileOnStackBaselineScriptsForDebugMode(maybecx, comp)) {
  1.3110 +            js_ReportOutOfMemory(maybecx);
  1.3111 +            return false;
  1.3112 +        }
  1.3113 +    }
  1.3114 +
  1.3115 +    return true;
  1.3116 +}
  1.3117 +
  1.3118 +AutoDebugModeInvalidation::~AutoDebugModeInvalidation()
  1.3119 +{
  1.3120 +    MOZ_ASSERT(!!comp_ != !!zone_);
  1.3121 +
  1.3122 +    if (needInvalidation_ == NoNeed)
  1.3123 +        return;
  1.3124 +
  1.3125 +    Zone *zone = zone_ ? zone_ : comp_->zone();
  1.3126 +    JSRuntime *rt = zone->runtimeFromMainThread();
  1.3127 +    FreeOp *fop = rt->defaultFreeOp();
  1.3128 +
  1.3129 +    if (comp_) {
  1.3130 +        StopAllOffThreadCompilations(comp_);
  1.3131 +    } else {
  1.3132 +        for (CompartmentsInZoneIter comp(zone_); !comp.done(); comp.next())
  1.3133 +            StopAllOffThreadCompilations(comp);
  1.3134 +    }
  1.3135 +
  1.3136 +    // Don't discard active baseline scripts. They are recompiled for debug
  1.3137 +    // mode.
  1.3138 +    jit::MarkActiveBaselineScripts(zone);
  1.3139 +
  1.3140 +    for (JitActivationIterator iter(rt); !iter.done(); ++iter) {
  1.3141 +        JSCompartment *comp = iter->compartment();
  1.3142 +        if (comp_ == comp || zone_ == comp->zone()) {
  1.3143 +            IonContext ictx(CompileRuntime::get(rt));
  1.3144 +            IonSpew(IonSpew_Invalidate, "Invalidating frames for debug mode toggle");
  1.3145 +            InvalidateActivation(fop, iter.jitTop(), true);
  1.3146 +        }
  1.3147 +    }
  1.3148 +
  1.3149 +    for (gc::CellIter i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
  1.3150 +        JSScript *script = i.get<JSScript>();
  1.3151 +        if (script->compartment() == comp_ || zone_) {
  1.3152 +            FinishInvalidation<SequentialExecution>(fop, script);
  1.3153 +            FinishInvalidation<ParallelExecution>(fop, script);
  1.3154 +            FinishDiscardBaselineScript(fop, script);
  1.3155 +            script->resetUseCount();
  1.3156 +        } else if (script->hasBaselineScript()) {
  1.3157 +            script->baselineScript()->resetActive();
  1.3158 +        }
  1.3159 +    }
  1.3160 +}

mercurial