js/src/jit/BaselineJIT.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/jit/BaselineJIT.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,955 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "jit/BaselineJIT.h"
    1.11 +
    1.12 +#include "mozilla/MemoryReporting.h"
    1.13 +
    1.14 +#include "jit/BaselineCompiler.h"
    1.15 +#include "jit/BaselineIC.h"
    1.16 +#include "jit/CompileInfo.h"
    1.17 +#include "jit/IonSpewer.h"
    1.18 +#include "jit/JitCommon.h"
    1.19 +#include "vm/Interpreter.h"
    1.20 +#include "vm/TraceLogging.h"
    1.21 +
    1.22 +#include "jsgcinlines.h"
    1.23 +#include "jsobjinlines.h"
    1.24 +#include "jsopcodeinlines.h"
    1.25 +#include "jsscriptinlines.h"
    1.26 +
    1.27 +#include "jit/IonFrames-inl.h"
    1.28 +#include "vm/Stack-inl.h"
    1.29 +
    1.30 +using namespace js;
    1.31 +using namespace js::jit;
    1.32 +
    1.33 +/* static */ PCMappingSlotInfo::SlotLocation
    1.34 +PCMappingSlotInfo::ToSlotLocation(const StackValue *stackVal)
    1.35 +{
    1.36 +    if (stackVal->kind() == StackValue::Register) {
    1.37 +        if (stackVal->reg() == R0)
    1.38 +            return SlotInR0;
    1.39 +        JS_ASSERT(stackVal->reg() == R1);
    1.40 +        return SlotInR1;
    1.41 +    }
    1.42 +    JS_ASSERT(stackVal->kind() != StackValue::Stack);
    1.43 +    return SlotIgnore;
    1.44 +}
    1.45 +
    1.46 +BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
    1.47 +                               uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset)
    1.48 +  : method_(nullptr),
    1.49 +    templateScope_(nullptr),
    1.50 +    fallbackStubSpace_(),
    1.51 +    prologueOffset_(prologueOffset),
    1.52 +    epilogueOffset_(epilogueOffset),
    1.53 +#ifdef DEBUG
    1.54 +    spsOn_(false),
    1.55 +#endif
    1.56 +    spsPushToggleOffset_(spsPushToggleOffset),
    1.57 +    postDebugPrologueOffset_(postDebugPrologueOffset),
    1.58 +    flags_(0)
    1.59 +{ }
    1.60 +
    1.61 +static const size_t BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4096;
    1.62 +static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000;
    1.63 +
    1.64 +static bool
    1.65 +CheckFrame(InterpreterFrame *fp)
    1.66 +{
    1.67 +    if (fp->isGeneratorFrame()) {
    1.68 +        IonSpew(IonSpew_BaselineAbort, "generator frame");
    1.69 +        return false;
    1.70 +    }
    1.71 +
    1.72 +    if (fp->isDebuggerFrame()) {
    1.73 +        // Debugger eval-in-frame. These are likely short-running scripts so
    1.74 +        // don't bother compiling them for now.
    1.75 +        IonSpew(IonSpew_BaselineAbort, "debugger frame");
    1.76 +        return false;
    1.77 +    }
    1.78 +
    1.79 +    if (fp->isNonEvalFunctionFrame() && fp->numActualArgs() > BASELINE_MAX_ARGS_LENGTH) {
    1.80 +        // Fall back to the interpreter to avoid running out of stack space.
    1.81 +        IonSpew(IonSpew_BaselineAbort, "Too many arguments (%u)", fp->numActualArgs());
    1.82 +        return false;
    1.83 +    }
    1.84 +
    1.85 +    return true;
    1.86 +}
    1.87 +
    1.88 +static bool
    1.89 +IsJSDEnabled(JSContext *cx)
    1.90 +{
    1.91 +    return cx->compartment()->debugMode() && cx->runtime()->debugHooks.callHook;
    1.92 +}
    1.93 +
    1.94 +static IonExecStatus
    1.95 +EnterBaseline(JSContext *cx, EnterJitData &data)
    1.96 +{
    1.97 +    if (data.osrFrame) {
    1.98 +        // Check for potential stack overflow before OSR-ing.
    1.99 +        uint8_t spDummy;
   1.100 +        uint32_t extra = BaselineFrame::Size() + (data.osrNumStackValues * sizeof(Value));
   1.101 +        uint8_t *checkSp = (&spDummy) - extra;
   1.102 +        JS_CHECK_RECURSION_WITH_SP(cx, checkSp, return IonExec_Aborted);
   1.103 +    } else {
   1.104 +        JS_CHECK_RECURSION(cx, return IonExec_Aborted);
   1.105 +    }
   1.106 +
   1.107 +    JS_ASSERT(jit::IsBaselineEnabled(cx));
   1.108 +    JS_ASSERT_IF(data.osrFrame, CheckFrame(data.osrFrame));
   1.109 +
   1.110 +    EnterJitCode enter = cx->runtime()->jitRuntime()->enterBaseline();
   1.111 +
   1.112 +    // Caller must construct |this| before invoking the Ion function.
   1.113 +    JS_ASSERT_IF(data.constructing, data.maxArgv[0].isObject());
   1.114 +
   1.115 +    data.result.setInt32(data.numActualArgs);
   1.116 +    {
   1.117 +        AssertCompartmentUnchanged pcc(cx);
   1.118 +        JitActivation activation(cx, data.constructing);
   1.119 +
   1.120 +        if (data.osrFrame)
   1.121 +            data.osrFrame->setRunningInJit();
   1.122 +
   1.123 +        JS_ASSERT_IF(data.osrFrame, !IsJSDEnabled(cx));
   1.124 +
   1.125 +        // Single transition point from Interpreter to Baseline.
   1.126 +        CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, data.osrFrame, data.calleeToken,
   1.127 +                            data.scopeChain.get(), data.osrNumStackValues, data.result.address());
   1.128 +
   1.129 +        if (data.osrFrame)
   1.130 +            data.osrFrame->clearRunningInJit();
   1.131 +    }
   1.132 +
   1.133 +    JS_ASSERT(!cx->runtime()->hasIonReturnOverride());
   1.134 +
   1.135 +    // Jit callers wrap primitive constructor return.
   1.136 +    if (!data.result.isMagic() && data.constructing && data.result.isPrimitive())
   1.137 +        data.result = data.maxArgv[0];
   1.138 +
   1.139 +    // Release temporary buffer used for OSR into Ion.
   1.140 +    cx->runtime()->getJitRuntime(cx)->freeOsrTempData();
   1.141 +
   1.142 +    JS_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR));
   1.143 +    return data.result.isMagic() ? IonExec_Error : IonExec_Ok;
   1.144 +}
   1.145 +
   1.146 +IonExecStatus
   1.147 +jit::EnterBaselineMethod(JSContext *cx, RunState &state)
   1.148 +{
   1.149 +    BaselineScript *baseline = state.script()->baselineScript();
   1.150 +
   1.151 +    EnterJitData data(cx);
   1.152 +    data.jitcode = baseline->method()->raw();
   1.153 +
   1.154 +    AutoValueVector vals(cx);
   1.155 +    if (!SetEnterJitData(cx, data, state, vals))
   1.156 +        return IonExec_Error;
   1.157 +
   1.158 +    IonExecStatus status = EnterBaseline(cx, data);
   1.159 +    if (status != IonExec_Ok)
   1.160 +        return status;
   1.161 +
   1.162 +    state.setReturnValue(data.result);
   1.163 +    return IonExec_Ok;
   1.164 +}
   1.165 +
   1.166 +IonExecStatus
   1.167 +jit::EnterBaselineAtBranch(JSContext *cx, InterpreterFrame *fp, jsbytecode *pc)
   1.168 +{
   1.169 +    JS_ASSERT(JSOp(*pc) == JSOP_LOOPENTRY);
   1.170 +
   1.171 +    BaselineScript *baseline = fp->script()->baselineScript();
   1.172 +
   1.173 +    EnterJitData data(cx);
   1.174 +    data.jitcode = baseline->nativeCodeForPC(fp->script(), pc);
   1.175 +
   1.176 +    // Skip debug breakpoint/trap handler, the interpreter already handled it
   1.177 +    // for the current op.
   1.178 +    if (cx->compartment()->debugMode())
   1.179 +        data.jitcode += MacroAssembler::ToggledCallSize();
   1.180 +
   1.181 +    data.osrFrame = fp;
   1.182 +    data.osrNumStackValues = fp->script()->nfixed() + cx->interpreterRegs().stackDepth();
   1.183 +
   1.184 +    RootedValue thisv(cx);
   1.185 +
   1.186 +    if (fp->isNonEvalFunctionFrame()) {
   1.187 +        data.constructing = fp->isConstructing();
   1.188 +        data.numActualArgs = fp->numActualArgs();
   1.189 +        data.maxArgc = Max(fp->numActualArgs(), fp->numFormalArgs()) + 1; // +1 = include |this|
   1.190 +        data.maxArgv = fp->argv() - 1; // -1 = include |this|
   1.191 +        data.scopeChain = nullptr;
   1.192 +        data.calleeToken = CalleeToToken(&fp->callee());
   1.193 +    } else {
   1.194 +        thisv = fp->thisValue();
   1.195 +        data.constructing = false;
   1.196 +        data.numActualArgs = 0;
   1.197 +        data.maxArgc = 1;
   1.198 +        data.maxArgv = thisv.address();
   1.199 +        data.scopeChain = fp->scopeChain();
   1.200 +
   1.201 +        // For eval function frames, set the callee token to the enclosing function.
   1.202 +        if (fp->isFunctionFrame())
   1.203 +            data.calleeToken = CalleeToToken(&fp->callee());
   1.204 +        else
   1.205 +            data.calleeToken = CalleeToToken(fp->script());
   1.206 +    }
   1.207 +
   1.208 +    TraceLogger *logger = TraceLoggerForMainThread(cx->runtime());
   1.209 +    TraceLogStopEvent(logger, TraceLogger::Interpreter);
   1.210 +    TraceLogStartEvent(logger, TraceLogger::Baseline);
   1.211 +
   1.212 +    IonExecStatus status = EnterBaseline(cx, data);
   1.213 +    if (status != IonExec_Ok)
   1.214 +        return status;
   1.215 +
   1.216 +    fp->setReturnValue(data.result);
   1.217 +    return IonExec_Ok;
   1.218 +}
   1.219 +
   1.220 +MethodStatus
   1.221 +jit::BaselineCompile(JSContext *cx, JSScript *script)
   1.222 +{
   1.223 +    JS_ASSERT(!script->hasBaselineScript());
   1.224 +    JS_ASSERT(script->canBaselineCompile());
   1.225 +    JS_ASSERT(IsBaselineEnabled(cx));
   1.226 +    LifoAlloc alloc(BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
   1.227 +
   1.228 +    script->ensureNonLazyCanonicalFunction(cx);
   1.229 +
   1.230 +    TempAllocator *temp = alloc.new_<TempAllocator>(&alloc);
   1.231 +    if (!temp)
   1.232 +        return Method_Error;
   1.233 +
   1.234 +    IonContext ictx(cx, temp);
   1.235 +
   1.236 +    BaselineCompiler compiler(cx, *temp, script);
   1.237 +    if (!compiler.init())
   1.238 +        return Method_Error;
   1.239 +
   1.240 +    MethodStatus status = compiler.compile();
   1.241 +
   1.242 +    JS_ASSERT_IF(status == Method_Compiled, script->hasBaselineScript());
   1.243 +    JS_ASSERT_IF(status != Method_Compiled, !script->hasBaselineScript());
   1.244 +
   1.245 +    if (status == Method_CantCompile)
   1.246 +        script->setBaselineScript(cx, BASELINE_DISABLED_SCRIPT);
   1.247 +
   1.248 +    return status;
   1.249 +}
   1.250 +
   1.251 +static MethodStatus
   1.252 +CanEnterBaselineJIT(JSContext *cx, HandleScript script, bool osr)
   1.253 +{
   1.254 +    JS_ASSERT(jit::IsBaselineEnabled(cx));
   1.255 +
   1.256 +    // Skip if the script has been disabled.
   1.257 +    if (!script->canBaselineCompile())
   1.258 +        return Method_Skipped;
   1.259 +
   1.260 +    if (script->length() > BaselineScript::MAX_JSSCRIPT_LENGTH)
   1.261 +        return Method_CantCompile;
   1.262 +
   1.263 +    if (script->nslots() > BaselineScript::MAX_JSSCRIPT_SLOTS)
   1.264 +        return Method_CantCompile;
   1.265 +
   1.266 +    if (!cx->compartment()->ensureJitCompartmentExists(cx))
   1.267 +        return Method_Error;
   1.268 +
   1.269 +    if (script->hasBaselineScript())
   1.270 +        return Method_Compiled;
   1.271 +
   1.272 +    // Check script use count. However, always eagerly compile scripts if JSD
   1.273 +    // is enabled, so that we don't have to OSR and don't have to update the
   1.274 +    // frame pointer stored in JSD's frames list.
   1.275 +    //
   1.276 +    // Also eagerly compile if we are in parallel warmup, the point of which
   1.277 +    // is to gather type information so that the script may be compiled for
   1.278 +    // parallel execution. We want to avoid the situation of OSRing during
   1.279 +    // warmup and only gathering type information for the loop, and not the
   1.280 +    // rest of the function.
   1.281 +    if (IsJSDEnabled(cx) || cx->runtime()->forkJoinWarmup > 0) {
   1.282 +        if (osr)
   1.283 +            return Method_Skipped;
   1.284 +    } else if (script->incUseCount() <= js_JitOptions.baselineUsesBeforeCompile) {
   1.285 +        return Method_Skipped;
   1.286 +    }
   1.287 +
   1.288 +    if (script->isCallsiteClone()) {
   1.289 +        // Ensure the original function is compiled too, so that bailouts from
   1.290 +        // Ion code have a BaselineScript to resume into.
   1.291 +        RootedScript original(cx, script->donorFunction()->nonLazyScript());
   1.292 +        JS_ASSERT(original != script);
   1.293 +
   1.294 +        if (!original->canBaselineCompile())
   1.295 +            return Method_CantCompile;
   1.296 +
   1.297 +        if (!original->hasBaselineScript()) {
   1.298 +            MethodStatus status = BaselineCompile(cx, original);
   1.299 +            if (status != Method_Compiled)
   1.300 +                return status;
   1.301 +        }
   1.302 +    }
   1.303 +
   1.304 +    return BaselineCompile(cx, script);
   1.305 +}
   1.306 +
   1.307 +MethodStatus
   1.308 +jit::CanEnterBaselineAtBranch(JSContext *cx, InterpreterFrame *fp, bool newType)
   1.309 +{
   1.310 +   // If constructing, allocate a new |this| object.
   1.311 +   if (fp->isConstructing() && fp->functionThis().isPrimitive()) {
   1.312 +       RootedObject callee(cx, &fp->callee());
   1.313 +       RootedObject obj(cx, CreateThisForFunction(cx, callee, newType ? SingletonObject : GenericObject));
   1.314 +       if (!obj)
   1.315 +           return Method_Skipped;
   1.316 +       fp->functionThis().setObject(*obj);
   1.317 +   }
   1.318 +
   1.319 +   if (!CheckFrame(fp))
   1.320 +       return Method_CantCompile;
   1.321 +
   1.322 +   RootedScript script(cx, fp->script());
   1.323 +   return CanEnterBaselineJIT(cx, script, /* osr = */true);
   1.324 +}
   1.325 +
   1.326 +MethodStatus
   1.327 +jit::CanEnterBaselineMethod(JSContext *cx, RunState &state)
   1.328 +{
   1.329 +    if (state.isInvoke()) {
   1.330 +        InvokeState &invoke = *state.asInvoke();
   1.331 +
   1.332 +        if (invoke.args().length() > BASELINE_MAX_ARGS_LENGTH) {
   1.333 +            IonSpew(IonSpew_BaselineAbort, "Too many arguments (%u)", invoke.args().length());
   1.334 +            return Method_CantCompile;
   1.335 +        }
   1.336 +
   1.337 +        // If constructing, allocate a new |this| object.
   1.338 +        if (invoke.constructing() && invoke.args().thisv().isPrimitive()) {
   1.339 +            RootedObject callee(cx, &invoke.args().callee());
   1.340 +            RootedObject obj(cx, CreateThisForFunction(cx, callee,
   1.341 +                                                       invoke.useNewType()
   1.342 +                                                       ? SingletonObject
   1.343 +                                                       : GenericObject));
   1.344 +            if (!obj)
   1.345 +                return Method_Skipped;
   1.346 +            invoke.args().setThis(ObjectValue(*obj));
   1.347 +        }
   1.348 +    } else if (state.isExecute()) {
   1.349 +        ExecuteType type = state.asExecute()->type();
   1.350 +        if (type == EXECUTE_DEBUG || type == EXECUTE_DEBUG_GLOBAL) {
   1.351 +            IonSpew(IonSpew_BaselineAbort, "debugger frame");
   1.352 +            return Method_CantCompile;
   1.353 +        }
   1.354 +    } else {
   1.355 +        JS_ASSERT(state.isGenerator());
   1.356 +        IonSpew(IonSpew_BaselineAbort, "generator frame");
   1.357 +        return Method_CantCompile;
   1.358 +    }
   1.359 +
   1.360 +    RootedScript script(cx, state.script());
   1.361 +    return CanEnterBaselineJIT(cx, script, /* osr = */false);
   1.362 +};
   1.363 +
   1.364 +BaselineScript *
   1.365 +BaselineScript::New(JSContext *cx, uint32_t prologueOffset, uint32_t epilogueOffset,
   1.366 +                    uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset,
   1.367 +                    size_t icEntries, size_t pcMappingIndexEntries, size_t pcMappingSize,
   1.368 +                    size_t bytecodeTypeMapEntries)
   1.369 +{
   1.370 +    static const unsigned DataAlignment = sizeof(uintptr_t);
   1.371 +
   1.372 +    size_t paddedBaselineScriptSize = AlignBytes(sizeof(BaselineScript), DataAlignment);
   1.373 +
   1.374 +    size_t icEntriesSize = icEntries * sizeof(ICEntry);
   1.375 +    size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
   1.376 +    size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t);
   1.377 +
   1.378 +    size_t paddedICEntriesSize = AlignBytes(icEntriesSize, DataAlignment);
   1.379 +    size_t paddedPCMappingIndexEntriesSize = AlignBytes(pcMappingIndexEntriesSize, DataAlignment);
   1.380 +    size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment);
   1.381 +    size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment);
   1.382 +
   1.383 +    size_t allocBytes = paddedBaselineScriptSize +
   1.384 +        paddedICEntriesSize +
   1.385 +        paddedPCMappingIndexEntriesSize +
   1.386 +        paddedPCMappingSize +
   1.387 +        paddedBytecodeTypesMapSize;
   1.388 +
   1.389 +    uint8_t *buffer = (uint8_t *)cx->malloc_(allocBytes);
   1.390 +    if (!buffer)
   1.391 +        return nullptr;
   1.392 +
   1.393 +    BaselineScript *script = reinterpret_cast<BaselineScript *>(buffer);
   1.394 +    new (script) BaselineScript(prologueOffset, epilogueOffset,
   1.395 +                                spsPushToggleOffset, postDebugPrologueOffset);
   1.396 +
   1.397 +    size_t offsetCursor = paddedBaselineScriptSize;
   1.398 +
   1.399 +    script->icEntriesOffset_ = offsetCursor;
   1.400 +    script->icEntries_ = icEntries;
   1.401 +    offsetCursor += paddedICEntriesSize;
   1.402 +
   1.403 +    script->pcMappingIndexOffset_ = offsetCursor;
   1.404 +    script->pcMappingIndexEntries_ = pcMappingIndexEntries;
   1.405 +    offsetCursor += paddedPCMappingIndexEntriesSize;
   1.406 +
   1.407 +    script->pcMappingOffset_ = offsetCursor;
   1.408 +    script->pcMappingSize_ = pcMappingSize;
   1.409 +    offsetCursor += paddedPCMappingSize;
   1.410 +
   1.411 +    script->bytecodeTypeMapOffset_ = bytecodeTypeMapEntries ? offsetCursor : 0;
   1.412 +
   1.413 +    return script;
   1.414 +}
   1.415 +
   1.416 +void
   1.417 +BaselineScript::trace(JSTracer *trc)
   1.418 +{
   1.419 +    MarkJitCode(trc, &method_, "baseline-method");
   1.420 +    if (templateScope_)
   1.421 +        MarkObject(trc, &templateScope_, "baseline-template-scope");
   1.422 +
   1.423 +    // Mark all IC stub codes hanging off the IC stub entries.
   1.424 +    for (size_t i = 0; i < numICEntries(); i++) {
   1.425 +        ICEntry &ent = icEntry(i);
   1.426 +        if (!ent.hasStub())
   1.427 +            continue;
   1.428 +        for (ICStub *stub = ent.firstStub(); stub; stub = stub->next())
   1.429 +            stub->trace(trc);
   1.430 +    }
   1.431 +}
   1.432 +
   1.433 +/* static */
   1.434 +void
   1.435 +BaselineScript::writeBarrierPre(Zone *zone, BaselineScript *script)
   1.436 +{
   1.437 +#ifdef JSGC_INCREMENTAL
   1.438 +    if (zone->needsBarrier())
   1.439 +        script->trace(zone->barrierTracer());
   1.440 +#endif
   1.441 +}
   1.442 +
   1.443 +void
   1.444 +BaselineScript::Trace(JSTracer *trc, BaselineScript *script)
   1.445 +{
   1.446 +    script->trace(trc);
   1.447 +}
   1.448 +
   1.449 +void
   1.450 +BaselineScript::Destroy(FreeOp *fop, BaselineScript *script)
   1.451 +{
   1.452 +#ifdef JSGC_GENERATIONAL
   1.453 +    /*
   1.454 +     * When the script contains pointers to nursery things, the store buffer
   1.455 +     * will contain entries refering to the referenced things. Since we can
   1.456 +     * destroy scripts outside the context of a GC, this situation can result
   1.457 +     * in invalid store buffer entries. Assert that if we do destroy scripts
   1.458 +     * outside of a GC that we at least emptied the nursery first.
   1.459 +     */
   1.460 +    JS_ASSERT(fop->runtime()->gcNursery.isEmpty());
   1.461 +#endif
   1.462 +    fop->delete_(script);
   1.463 +}
   1.464 +
   1.465 +ICEntry &
   1.466 +BaselineScript::icEntry(size_t index)
   1.467 +{
   1.468 +    JS_ASSERT(index < numICEntries());
   1.469 +    return icEntryList()[index];
   1.470 +}
   1.471 +
   1.472 +PCMappingIndexEntry &
   1.473 +BaselineScript::pcMappingIndexEntry(size_t index)
   1.474 +{
   1.475 +    JS_ASSERT(index < numPCMappingIndexEntries());
   1.476 +    return pcMappingIndexEntryList()[index];
   1.477 +}
   1.478 +
   1.479 +CompactBufferReader
   1.480 +BaselineScript::pcMappingReader(size_t indexEntry)
   1.481 +{
   1.482 +    PCMappingIndexEntry &entry = pcMappingIndexEntry(indexEntry);
   1.483 +
   1.484 +    uint8_t *dataStart = pcMappingData() + entry.bufferOffset;
   1.485 +    uint8_t *dataEnd = (indexEntry == numPCMappingIndexEntries() - 1)
   1.486 +        ? pcMappingData() + pcMappingSize_
   1.487 +        : pcMappingData() + pcMappingIndexEntry(indexEntry + 1).bufferOffset;
   1.488 +
   1.489 +    return CompactBufferReader(dataStart, dataEnd);
   1.490 +}
   1.491 +
   1.492 +ICEntry *
   1.493 +BaselineScript::maybeICEntryFromReturnOffset(CodeOffsetLabel returnOffset)
   1.494 +{
   1.495 +    size_t bottom = 0;
   1.496 +    size_t top = numICEntries();
   1.497 +    size_t mid = bottom + (top - bottom) / 2;
   1.498 +    while (mid < top) {
   1.499 +        ICEntry &midEntry = icEntry(mid);
   1.500 +        if (midEntry.returnOffset().offset() < returnOffset.offset())
   1.501 +            bottom = mid + 1;
   1.502 +        else // if (midEntry.returnOffset().offset() >= returnOffset.offset())
   1.503 +            top = mid;
   1.504 +        mid = bottom + (top - bottom) / 2;
   1.505 +    }
   1.506 +    if (mid >= numICEntries())
   1.507 +        return nullptr;
   1.508 +
   1.509 +    if (icEntry(mid).returnOffset().offset() != returnOffset.offset())
   1.510 +        return nullptr;
   1.511 +
   1.512 +    return &icEntry(mid);
   1.513 +}
   1.514 +
   1.515 +ICEntry &
   1.516 +BaselineScript::icEntryFromReturnOffset(CodeOffsetLabel returnOffset)
   1.517 +{
   1.518 +    ICEntry *result = maybeICEntryFromReturnOffset(returnOffset);
   1.519 +    JS_ASSERT(result);
   1.520 +    return *result;
   1.521 +}
   1.522 +
   1.523 +uint8_t *
   1.524 +BaselineScript::returnAddressForIC(const ICEntry &ent)
   1.525 +{
   1.526 +    return method()->raw() + ent.returnOffset().offset();
   1.527 +}
   1.528 +
   1.529 +ICEntry &
   1.530 +BaselineScript::icEntryFromPCOffset(uint32_t pcOffset)
   1.531 +{
   1.532 +    // Multiple IC entries can have the same PC offset, but this method only looks for
   1.533 +    // those which have isForOp() set.
   1.534 +    size_t bottom = 0;
   1.535 +    size_t top = numICEntries();
   1.536 +    size_t mid = bottom + (top - bottom) / 2;
   1.537 +    while (mid < top) {
   1.538 +        ICEntry &midEntry = icEntry(mid);
   1.539 +        if (midEntry.pcOffset() < pcOffset)
   1.540 +            bottom = mid + 1;
   1.541 +        else if (midEntry.pcOffset() > pcOffset)
   1.542 +            top = mid;
   1.543 +        else
   1.544 +            break;
   1.545 +        mid = bottom + (top - bottom) / 2;
   1.546 +    }
   1.547 +    // Found an IC entry with a matching PC offset.  Search backward, and then
   1.548 +    // forward from this IC entry, looking for one with the same PC offset which
   1.549 +    // has isForOp() set.
   1.550 +    for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) {
   1.551 +        if (icEntry(i).isForOp())
   1.552 +            return icEntry(i);
   1.553 +    }
   1.554 +    for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) {
   1.555 +        if (icEntry(i).isForOp())
   1.556 +            return icEntry(i);
   1.557 +    }
   1.558 +    MOZ_ASSUME_UNREACHABLE("Invalid PC offset for IC entry.");
   1.559 +}
   1.560 +
   1.561 +ICEntry &
   1.562 +BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, ICEntry *prevLookedUpEntry)
   1.563 +{
   1.564 +    // Do a linear forward search from the last queried PC offset, or fallback to a
   1.565 +    // binary search if the last offset is too far away.
   1.566 +    if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() &&
   1.567 +        (pcOffset - prevLookedUpEntry->pcOffset()) <= 10)
   1.568 +    {
   1.569 +        ICEntry *firstEntry = &icEntry(0);
   1.570 +        ICEntry *lastEntry = &icEntry(numICEntries() - 1);
   1.571 +        ICEntry *curEntry = prevLookedUpEntry;
   1.572 +        while (curEntry >= firstEntry && curEntry <= lastEntry) {
   1.573 +            if (curEntry->pcOffset() == pcOffset && curEntry->isForOp())
   1.574 +                break;
   1.575 +            curEntry++;
   1.576 +        }
   1.577 +        JS_ASSERT(curEntry->pcOffset() == pcOffset && curEntry->isForOp());
   1.578 +        return *curEntry;
   1.579 +    }
   1.580 +
   1.581 +    return icEntryFromPCOffset(pcOffset);
   1.582 +}
   1.583 +
   1.584 +ICEntry *
   1.585 +BaselineScript::maybeICEntryFromReturnAddress(uint8_t *returnAddr)
   1.586 +{
   1.587 +    JS_ASSERT(returnAddr > method_->raw());
   1.588 +    JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
   1.589 +    CodeOffsetLabel offset(returnAddr - method_->raw());
   1.590 +    return maybeICEntryFromReturnOffset(offset);
   1.591 +}
   1.592 +
   1.593 +ICEntry &
   1.594 +BaselineScript::icEntryFromReturnAddress(uint8_t *returnAddr)
   1.595 +{
   1.596 +    JS_ASSERT(returnAddr > method_->raw());
   1.597 +    JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
   1.598 +    CodeOffsetLabel offset(returnAddr - method_->raw());
   1.599 +    return icEntryFromReturnOffset(offset);
   1.600 +}
   1.601 +
   1.602 +void
   1.603 +BaselineScript::copyICEntries(JSScript *script, const ICEntry *entries, MacroAssembler &masm)
   1.604 +{
   1.605 +    // Fix up the return offset in the IC entries and copy them in.
   1.606 +    // Also write out the IC entry ptrs in any fallback stubs that were added.
   1.607 +    for (uint32_t i = 0; i < numICEntries(); i++) {
   1.608 +        ICEntry &realEntry = icEntry(i);
   1.609 +        realEntry = entries[i];
   1.610 +        realEntry.fixupReturnOffset(masm);
   1.611 +
   1.612 +        if (!realEntry.hasStub()) {
   1.613 +            // VM call without any stubs.
   1.614 +            continue;
   1.615 +        }
   1.616 +
   1.617 +        // If the attached stub is a fallback stub, then fix it up with
   1.618 +        // a pointer to the (now available) realEntry.
   1.619 +        if (realEntry.firstStub()->isFallback())
   1.620 +            realEntry.firstStub()->toFallbackStub()->fixupICEntry(&realEntry);
   1.621 +
   1.622 +        if (realEntry.firstStub()->isTypeMonitor_Fallback()) {
   1.623 +            ICTypeMonitor_Fallback *stub = realEntry.firstStub()->toTypeMonitor_Fallback();
   1.624 +            stub->fixupICEntry(&realEntry);
   1.625 +        }
   1.626 +
   1.627 +        if (realEntry.firstStub()->isTableSwitch()) {
   1.628 +            ICTableSwitch *stub = realEntry.firstStub()->toTableSwitch();
   1.629 +            stub->fixupJumpTable(script, this);
   1.630 +        }
   1.631 +    }
   1.632 +}
   1.633 +
   1.634 +void
   1.635 +BaselineScript::adoptFallbackStubs(FallbackICStubSpace *stubSpace)
   1.636 +{
   1.637 +    fallbackStubSpace_.adoptFrom(stubSpace);
   1.638 +}
   1.639 +
   1.640 +void
   1.641 +BaselineScript::copyPCMappingEntries(const CompactBufferWriter &entries)
   1.642 +{
   1.643 +    JS_ASSERT(entries.length() > 0);
   1.644 +    JS_ASSERT(entries.length() == pcMappingSize_);
   1.645 +
   1.646 +    memcpy(pcMappingData(), entries.buffer(), entries.length());
   1.647 +}
   1.648 +
   1.649 +void
   1.650 +BaselineScript::copyPCMappingIndexEntries(const PCMappingIndexEntry *entries)
   1.651 +{
   1.652 +    for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++)
   1.653 +        pcMappingIndexEntry(i) = entries[i];
   1.654 +}
   1.655 +
   1.656 +uint8_t *
   1.657 +BaselineScript::nativeCodeForPC(JSScript *script, jsbytecode *pc, PCMappingSlotInfo *slotInfo)
   1.658 +{
   1.659 +    JS_ASSERT_IF(script->hasBaselineScript(), script->baselineScript() == this);
   1.660 +
   1.661 +    uint32_t pcOffset = script->pcToOffset(pc);
   1.662 +
   1.663 +    // Look for the first PCMappingIndexEntry with pc > the pc we are
   1.664 +    // interested in.
   1.665 +    uint32_t i = 1;
   1.666 +    for (; i < numPCMappingIndexEntries(); i++) {
   1.667 +        if (pcMappingIndexEntry(i).pcOffset > pcOffset)
   1.668 +            break;
   1.669 +    }
   1.670 +
   1.671 +    // The previous entry contains the current pc.
   1.672 +    JS_ASSERT(i > 0);
   1.673 +    i--;
   1.674 +
   1.675 +    PCMappingIndexEntry &entry = pcMappingIndexEntry(i);
   1.676 +    JS_ASSERT(pcOffset >= entry.pcOffset);
   1.677 +
   1.678 +    CompactBufferReader reader(pcMappingReader(i));
   1.679 +    jsbytecode *curPC = script->offsetToPC(entry.pcOffset);
   1.680 +    uint32_t nativeOffset = entry.nativeOffset;
   1.681 +
   1.682 +    JS_ASSERT(script->containsPC(curPC));
   1.683 +    JS_ASSERT(curPC <= pc);
   1.684 +
   1.685 +    while (true) {
   1.686 +        // If the high bit is set, the native offset relative to the
   1.687 +        // previous pc != 0 and comes next.
   1.688 +        uint8_t b = reader.readByte();
   1.689 +        if (b & 0x80)
   1.690 +            nativeOffset += reader.readUnsigned();
   1.691 +
   1.692 +        if (curPC == pc) {
   1.693 +            if (slotInfo)
   1.694 +                *slotInfo = PCMappingSlotInfo(b & ~0x80);
   1.695 +            return method_->raw() + nativeOffset;
   1.696 +        }
   1.697 +
   1.698 +        curPC += GetBytecodeLength(curPC);
   1.699 +    }
   1.700 +
   1.701 +    MOZ_ASSUME_UNREACHABLE("Invalid pc");
   1.702 +}
   1.703 +
   1.704 +jsbytecode *
   1.705 +BaselineScript::pcForReturnOffset(JSScript *script, uint32_t nativeOffset)
   1.706 +{
   1.707 +    JS_ASSERT(script->baselineScript() == this);
   1.708 +    JS_ASSERT(nativeOffset < method_->instructionsSize());
   1.709 +
   1.710 +    // Look for the first PCMappingIndexEntry with native offset > the native offset we are
   1.711 +    // interested in.
   1.712 +    uint32_t i = 1;
   1.713 +    for (; i < numPCMappingIndexEntries(); i++) {
   1.714 +        if (pcMappingIndexEntry(i).nativeOffset > nativeOffset)
   1.715 +            break;
   1.716 +    }
   1.717 +
   1.718 +    // Go back an entry to search forward from.
   1.719 +    JS_ASSERT(i > 0);
   1.720 +    i--;
   1.721 +
   1.722 +    PCMappingIndexEntry &entry = pcMappingIndexEntry(i);
   1.723 +    JS_ASSERT(nativeOffset >= entry.nativeOffset);
   1.724 +
   1.725 +    CompactBufferReader reader(pcMappingReader(i));
   1.726 +    jsbytecode *curPC = script->offsetToPC(entry.pcOffset);
   1.727 +    uint32_t curNativeOffset = entry.nativeOffset;
   1.728 +
   1.729 +    JS_ASSERT(script->containsPC(curPC));
   1.730 +    JS_ASSERT(curNativeOffset <= nativeOffset);
   1.731 +
   1.732 +    while (true) {
   1.733 +        // If the high bit is set, the native offset relative to the
   1.734 +        // previous pc != 0 and comes next.
   1.735 +        uint8_t b = reader.readByte();
   1.736 +        if (b & 0x80)
   1.737 +            curNativeOffset += reader.readUnsigned();
   1.738 +
   1.739 +        if (curNativeOffset == nativeOffset)
   1.740 +            return curPC;
   1.741 +
   1.742 +        curPC += GetBytecodeLength(curPC);
   1.743 +    }
   1.744 +
   1.745 +    MOZ_ASSUME_UNREACHABLE("Invalid pc");
   1.746 +}
   1.747 +
   1.748 +jsbytecode *
   1.749 +BaselineScript::pcForReturnAddress(JSScript *script, uint8_t *nativeAddress)
   1.750 +{
   1.751 +    JS_ASSERT(script->baselineScript() == this);
   1.752 +    JS_ASSERT(nativeAddress >= method_->raw());
   1.753 +    JS_ASSERT(nativeAddress < method_->raw() + method_->instructionsSize());
   1.754 +    return pcForReturnOffset(script, uint32_t(nativeAddress - method_->raw()));
   1.755 +}
   1.756 +
   1.757 +void
   1.758 +BaselineScript::toggleDebugTraps(JSScript *script, jsbytecode *pc)
   1.759 +{
   1.760 +    JS_ASSERT(script->baselineScript() == this);
   1.761 +
   1.762 +    // Only scripts compiled for debug mode have toggled calls.
   1.763 +    if (!debugMode())
   1.764 +        return;
   1.765 +
   1.766 +    SrcNoteLineScanner scanner(script->notes(), script->lineno());
   1.767 +
   1.768 +    for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {
   1.769 +        PCMappingIndexEntry &entry = pcMappingIndexEntry(i);
   1.770 +
   1.771 +        CompactBufferReader reader(pcMappingReader(i));
   1.772 +        jsbytecode *curPC = script->offsetToPC(entry.pcOffset);
   1.773 +        uint32_t nativeOffset = entry.nativeOffset;
   1.774 +
   1.775 +        JS_ASSERT(script->containsPC(curPC));
   1.776 +
   1.777 +        while (reader.more()) {
   1.778 +            uint8_t b = reader.readByte();
   1.779 +            if (b & 0x80)
   1.780 +                nativeOffset += reader.readUnsigned();
   1.781 +
   1.782 +            scanner.advanceTo(script->pcToOffset(curPC));
   1.783 +
   1.784 +            if (!pc || pc == curPC) {
   1.785 +                bool enabled = (script->stepModeEnabled() && scanner.isLineHeader()) ||
   1.786 +                    script->hasBreakpointsAt(curPC);
   1.787 +
   1.788 +                // Patch the trap.
   1.789 +                CodeLocationLabel label(method(), nativeOffset);
   1.790 +                Assembler::ToggleCall(label, enabled);
   1.791 +            }
   1.792 +
   1.793 +            curPC += GetBytecodeLength(curPC);
   1.794 +        }
   1.795 +    }
   1.796 +}
   1.797 +
   1.798 +void
   1.799 +BaselineScript::toggleSPS(bool enable)
   1.800 +{
   1.801 +    JS_ASSERT(enable == !(bool)spsOn_);
   1.802 +
   1.803 +    IonSpew(IonSpew_BaselineIC, "  toggling SPS %s for BaselineScript %p",
   1.804 +            enable ? "on" : "off", this);
   1.805 +
   1.806 +    // Toggle the jump
   1.807 +    CodeLocationLabel pushToggleLocation(method_, CodeOffsetLabel(spsPushToggleOffset_));
   1.808 +    if (enable)
   1.809 +        Assembler::ToggleToCmp(pushToggleLocation);
   1.810 +    else
   1.811 +        Assembler::ToggleToJmp(pushToggleLocation);
   1.812 +#ifdef DEBUG
   1.813 +    spsOn_ = enable;
   1.814 +#endif
   1.815 +}
   1.816 +
   1.817 +void
   1.818 +BaselineScript::purgeOptimizedStubs(Zone *zone)
   1.819 +{
   1.820 +    IonSpew(IonSpew_BaselineIC, "Purging optimized stubs");
   1.821 +
   1.822 +    for (size_t i = 0; i < numICEntries(); i++) {
   1.823 +        ICEntry &entry = icEntry(i);
   1.824 +        if (!entry.hasStub())
   1.825 +            continue;
   1.826 +
   1.827 +        ICStub *lastStub = entry.firstStub();
   1.828 +        while (lastStub->next())
   1.829 +            lastStub = lastStub->next();
   1.830 +
   1.831 +        if (lastStub->isFallback()) {
   1.832 +            // Unlink all stubs allocated in the optimized space.
   1.833 +            ICStub *stub = entry.firstStub();
   1.834 +            ICStub *prev = nullptr;
   1.835 +
   1.836 +            while (stub->next()) {
   1.837 +                if (!stub->allocatedInFallbackSpace()) {
   1.838 +                    lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
   1.839 +                    stub = stub->next();
   1.840 +                    continue;
   1.841 +                }
   1.842 +
   1.843 +                prev = stub;
   1.844 +                stub = stub->next();
   1.845 +            }
   1.846 +
   1.847 +            if (lastStub->isMonitoredFallback()) {
   1.848 +                // Monitor stubs can't make calls, so are always in the
   1.849 +                // optimized stub space.
   1.850 +                ICTypeMonitor_Fallback *lastMonStub =
   1.851 +                    lastStub->toMonitoredFallbackStub()->fallbackMonitorStub();
   1.852 +                lastMonStub->resetMonitorStubChain(zone);
   1.853 +            }
   1.854 +        } else if (lastStub->isTypeMonitor_Fallback()) {
   1.855 +            lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
   1.856 +        } else {
   1.857 +            JS_ASSERT(lastStub->isTableSwitch());
   1.858 +        }
   1.859 +    }
   1.860 +
   1.861 +#ifdef DEBUG
   1.862 +    // All remaining stubs must be allocated in the fallback space.
   1.863 +    for (size_t i = 0; i < numICEntries(); i++) {
   1.864 +        ICEntry &entry = icEntry(i);
   1.865 +        if (!entry.hasStub())
   1.866 +            continue;
   1.867 +
   1.868 +        ICStub *stub = entry.firstStub();
   1.869 +        while (stub->next()) {
   1.870 +            JS_ASSERT(stub->allocatedInFallbackSpace());
   1.871 +            stub = stub->next();
   1.872 +        }
   1.873 +    }
   1.874 +#endif
   1.875 +}
   1.876 +
   1.877 +void
   1.878 +jit::FinishDiscardBaselineScript(FreeOp *fop, JSScript *script)
   1.879 +{
   1.880 +    if (!script->hasBaselineScript())
   1.881 +        return;
   1.882 +
   1.883 +    if (script->baselineScript()->active()) {
   1.884 +        // Script is live on the stack. Keep the BaselineScript, but destroy
   1.885 +        // stubs allocated in the optimized stub space.
   1.886 +        script->baselineScript()->purgeOptimizedStubs(script->zone());
   1.887 +
   1.888 +        // Reset |active| flag so that we don't need a separate script
   1.889 +        // iteration to unmark them.
   1.890 +        script->baselineScript()->resetActive();
   1.891 +        return;
   1.892 +    }
   1.893 +
   1.894 +    BaselineScript *baseline = script->baselineScript();
   1.895 +    script->setBaselineScript(nullptr, nullptr);
   1.896 +    BaselineScript::Destroy(fop, baseline);
   1.897 +}
   1.898 +
   1.899 +void
   1.900 +jit::JitCompartment::toggleBaselineStubBarriers(bool enabled)
   1.901 +{
   1.902 +    for (ICStubCodeMap::Enum e(*stubCodes_); !e.empty(); e.popFront()) {
   1.903 +        JitCode *code = *e.front().value().unsafeGet();
   1.904 +        code->togglePreBarriers(enabled);
   1.905 +    }
   1.906 +}
   1.907 +
   1.908 +void
   1.909 +jit::AddSizeOfBaselineData(JSScript *script, mozilla::MallocSizeOf mallocSizeOf, size_t *data,
   1.910 +                           size_t *fallbackStubs)
   1.911 +{
   1.912 +    if (script->hasBaselineScript())
   1.913 +        script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
   1.914 +}
   1.915 +
   1.916 +void
   1.917 +jit::ToggleBaselineSPS(JSRuntime *runtime, bool enable)
   1.918 +{
   1.919 +    for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
   1.920 +        for (gc::CellIter i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
   1.921 +            JSScript *script = i.get<JSScript>();
   1.922 +            if (!script->hasBaselineScript())
   1.923 +                continue;
   1.924 +            script->baselineScript()->toggleSPS(enable);
   1.925 +        }
   1.926 +    }
   1.927 +}
   1.928 +
   1.929 +static void
   1.930 +MarkActiveBaselineScripts(JSRuntime *rt, const JitActivationIterator &activation)
   1.931 +{
   1.932 +    for (jit::JitFrameIterator iter(activation); !iter.done(); ++iter) {
   1.933 +        switch (iter.type()) {
   1.934 +          case JitFrame_BaselineJS:
   1.935 +            iter.script()->baselineScript()->setActive();
   1.936 +            break;
   1.937 +          case JitFrame_IonJS: {
   1.938 +            // Keep the baseline script around, since bailouts from the ion
   1.939 +            // jitcode might need to re-enter into the baseline jitcode.
   1.940 +            iter.script()->baselineScript()->setActive();
   1.941 +            for (InlineFrameIterator inlineIter(rt, &iter); inlineIter.more(); ++inlineIter)
   1.942 +                inlineIter.script()->baselineScript()->setActive();
   1.943 +            break;
   1.944 +          }
   1.945 +          default:;
   1.946 +        }
   1.947 +    }
   1.948 +}
   1.949 +
   1.950 +void
   1.951 +jit::MarkActiveBaselineScripts(Zone *zone)
   1.952 +{
   1.953 +    JSRuntime *rt = zone->runtimeFromMainThread();
   1.954 +    for (JitActivationIterator iter(rt); !iter.done(); ++iter) {
   1.955 +        if (iter->compartment()->zone() == zone)
   1.956 +            MarkActiveBaselineScripts(rt, iter);
   1.957 +    }
   1.958 +}

mercurial