michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sts=4 et sw=4 tw=99: michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #include "jit/BaselineJIT.h" michael@0: michael@0: #include "mozilla/MemoryReporting.h" michael@0: michael@0: #include "jit/BaselineCompiler.h" michael@0: #include "jit/BaselineIC.h" michael@0: #include "jit/CompileInfo.h" michael@0: #include "jit/IonSpewer.h" michael@0: #include "jit/JitCommon.h" michael@0: #include "vm/Interpreter.h" michael@0: #include "vm/TraceLogging.h" michael@0: michael@0: #include "jsgcinlines.h" michael@0: #include "jsobjinlines.h" michael@0: #include "jsopcodeinlines.h" michael@0: #include "jsscriptinlines.h" michael@0: michael@0: #include "jit/IonFrames-inl.h" michael@0: #include "vm/Stack-inl.h" michael@0: michael@0: using namespace js; michael@0: using namespace js::jit; michael@0: michael@0: /* static */ PCMappingSlotInfo::SlotLocation michael@0: PCMappingSlotInfo::ToSlotLocation(const StackValue *stackVal) michael@0: { michael@0: if (stackVal->kind() == StackValue::Register) { michael@0: if (stackVal->reg() == R0) michael@0: return SlotInR0; michael@0: JS_ASSERT(stackVal->reg() == R1); michael@0: return SlotInR1; michael@0: } michael@0: JS_ASSERT(stackVal->kind() != StackValue::Stack); michael@0: return SlotIgnore; michael@0: } michael@0: michael@0: BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset, michael@0: uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset) michael@0: : method_(nullptr), michael@0: templateScope_(nullptr), michael@0: fallbackStubSpace_(), michael@0: prologueOffset_(prologueOffset), michael@0: epilogueOffset_(epilogueOffset), michael@0: #ifdef DEBUG michael@0: spsOn_(false), michael@0: #endif michael@0: spsPushToggleOffset_(spsPushToggleOffset), michael@0: postDebugPrologueOffset_(postDebugPrologueOffset), michael@0: flags_(0) michael@0: { } michael@0: michael@0: static const size_t BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4096; michael@0: static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000; michael@0: michael@0: static bool michael@0: CheckFrame(InterpreterFrame *fp) michael@0: { michael@0: if (fp->isGeneratorFrame()) { michael@0: IonSpew(IonSpew_BaselineAbort, "generator frame"); michael@0: return false; michael@0: } michael@0: michael@0: if (fp->isDebuggerFrame()) { michael@0: // Debugger eval-in-frame. These are likely short-running scripts so michael@0: // don't bother compiling them for now. michael@0: IonSpew(IonSpew_BaselineAbort, "debugger frame"); michael@0: return false; michael@0: } michael@0: michael@0: if (fp->isNonEvalFunctionFrame() && fp->numActualArgs() > BASELINE_MAX_ARGS_LENGTH) { michael@0: // Fall back to the interpreter to avoid running out of stack space. michael@0: IonSpew(IonSpew_BaselineAbort, "Too many arguments (%u)", fp->numActualArgs()); michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: IsJSDEnabled(JSContext *cx) michael@0: { michael@0: return cx->compartment()->debugMode() && cx->runtime()->debugHooks.callHook; michael@0: } michael@0: michael@0: static IonExecStatus michael@0: EnterBaseline(JSContext *cx, EnterJitData &data) michael@0: { michael@0: if (data.osrFrame) { michael@0: // Check for potential stack overflow before OSR-ing. michael@0: uint8_t spDummy; michael@0: uint32_t extra = BaselineFrame::Size() + (data.osrNumStackValues * sizeof(Value)); michael@0: uint8_t *checkSp = (&spDummy) - extra; michael@0: JS_CHECK_RECURSION_WITH_SP(cx, checkSp, return IonExec_Aborted); michael@0: } else { michael@0: JS_CHECK_RECURSION(cx, return IonExec_Aborted); michael@0: } michael@0: michael@0: JS_ASSERT(jit::IsBaselineEnabled(cx)); michael@0: JS_ASSERT_IF(data.osrFrame, CheckFrame(data.osrFrame)); michael@0: michael@0: EnterJitCode enter = cx->runtime()->jitRuntime()->enterBaseline(); michael@0: michael@0: // Caller must construct |this| before invoking the Ion function. michael@0: JS_ASSERT_IF(data.constructing, data.maxArgv[0].isObject()); michael@0: michael@0: data.result.setInt32(data.numActualArgs); michael@0: { michael@0: AssertCompartmentUnchanged pcc(cx); michael@0: JitActivation activation(cx, data.constructing); michael@0: michael@0: if (data.osrFrame) michael@0: data.osrFrame->setRunningInJit(); michael@0: michael@0: JS_ASSERT_IF(data.osrFrame, !IsJSDEnabled(cx)); michael@0: michael@0: // Single transition point from Interpreter to Baseline. michael@0: CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, data.osrFrame, data.calleeToken, michael@0: data.scopeChain.get(), data.osrNumStackValues, data.result.address()); michael@0: michael@0: if (data.osrFrame) michael@0: data.osrFrame->clearRunningInJit(); michael@0: } michael@0: michael@0: JS_ASSERT(!cx->runtime()->hasIonReturnOverride()); michael@0: michael@0: // Jit callers wrap primitive constructor return. michael@0: if (!data.result.isMagic() && data.constructing && data.result.isPrimitive()) michael@0: data.result = data.maxArgv[0]; michael@0: michael@0: // Release temporary buffer used for OSR into Ion. michael@0: cx->runtime()->getJitRuntime(cx)->freeOsrTempData(); michael@0: michael@0: JS_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR)); michael@0: return data.result.isMagic() ? IonExec_Error : IonExec_Ok; michael@0: } michael@0: michael@0: IonExecStatus michael@0: jit::EnterBaselineMethod(JSContext *cx, RunState &state) michael@0: { michael@0: BaselineScript *baseline = state.script()->baselineScript(); michael@0: michael@0: EnterJitData data(cx); michael@0: data.jitcode = baseline->method()->raw(); michael@0: michael@0: AutoValueVector vals(cx); michael@0: if (!SetEnterJitData(cx, data, state, vals)) michael@0: return IonExec_Error; michael@0: michael@0: IonExecStatus status = EnterBaseline(cx, data); michael@0: if (status != IonExec_Ok) michael@0: return status; michael@0: michael@0: state.setReturnValue(data.result); michael@0: return IonExec_Ok; michael@0: } michael@0: michael@0: IonExecStatus michael@0: jit::EnterBaselineAtBranch(JSContext *cx, InterpreterFrame *fp, jsbytecode *pc) michael@0: { michael@0: JS_ASSERT(JSOp(*pc) == JSOP_LOOPENTRY); michael@0: michael@0: BaselineScript *baseline = fp->script()->baselineScript(); michael@0: michael@0: EnterJitData data(cx); michael@0: data.jitcode = baseline->nativeCodeForPC(fp->script(), pc); michael@0: michael@0: // Skip debug breakpoint/trap handler, the interpreter already handled it michael@0: // for the current op. michael@0: if (cx->compartment()->debugMode()) michael@0: data.jitcode += MacroAssembler::ToggledCallSize(); michael@0: michael@0: data.osrFrame = fp; michael@0: data.osrNumStackValues = fp->script()->nfixed() + cx->interpreterRegs().stackDepth(); michael@0: michael@0: RootedValue thisv(cx); michael@0: michael@0: if (fp->isNonEvalFunctionFrame()) { michael@0: data.constructing = fp->isConstructing(); michael@0: data.numActualArgs = fp->numActualArgs(); michael@0: data.maxArgc = Max(fp->numActualArgs(), fp->numFormalArgs()) + 1; // +1 = include |this| michael@0: data.maxArgv = fp->argv() - 1; // -1 = include |this| michael@0: data.scopeChain = nullptr; michael@0: data.calleeToken = CalleeToToken(&fp->callee()); michael@0: } else { michael@0: thisv = fp->thisValue(); michael@0: data.constructing = false; michael@0: data.numActualArgs = 0; michael@0: data.maxArgc = 1; michael@0: data.maxArgv = thisv.address(); michael@0: data.scopeChain = fp->scopeChain(); michael@0: michael@0: // For eval function frames, set the callee token to the enclosing function. michael@0: if (fp->isFunctionFrame()) michael@0: data.calleeToken = CalleeToToken(&fp->callee()); michael@0: else michael@0: data.calleeToken = CalleeToToken(fp->script()); michael@0: } michael@0: michael@0: TraceLogger *logger = TraceLoggerForMainThread(cx->runtime()); michael@0: TraceLogStopEvent(logger, TraceLogger::Interpreter); michael@0: TraceLogStartEvent(logger, TraceLogger::Baseline); michael@0: michael@0: IonExecStatus status = EnterBaseline(cx, data); michael@0: if (status != IonExec_Ok) michael@0: return status; michael@0: michael@0: fp->setReturnValue(data.result); michael@0: return IonExec_Ok; michael@0: } michael@0: michael@0: MethodStatus michael@0: jit::BaselineCompile(JSContext *cx, JSScript *script) michael@0: { michael@0: JS_ASSERT(!script->hasBaselineScript()); michael@0: JS_ASSERT(script->canBaselineCompile()); michael@0: JS_ASSERT(IsBaselineEnabled(cx)); michael@0: LifoAlloc alloc(BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE); michael@0: michael@0: script->ensureNonLazyCanonicalFunction(cx); michael@0: michael@0: TempAllocator *temp = alloc.new_(&alloc); michael@0: if (!temp) michael@0: return Method_Error; michael@0: michael@0: IonContext ictx(cx, temp); michael@0: michael@0: BaselineCompiler compiler(cx, *temp, script); michael@0: if (!compiler.init()) michael@0: return Method_Error; michael@0: michael@0: MethodStatus status = compiler.compile(); michael@0: michael@0: JS_ASSERT_IF(status == Method_Compiled, script->hasBaselineScript()); michael@0: JS_ASSERT_IF(status != Method_Compiled, !script->hasBaselineScript()); michael@0: michael@0: if (status == Method_CantCompile) michael@0: script->setBaselineScript(cx, BASELINE_DISABLED_SCRIPT); michael@0: michael@0: return status; michael@0: } michael@0: michael@0: static MethodStatus michael@0: CanEnterBaselineJIT(JSContext *cx, HandleScript script, bool osr) michael@0: { michael@0: JS_ASSERT(jit::IsBaselineEnabled(cx)); michael@0: michael@0: // Skip if the script has been disabled. michael@0: if (!script->canBaselineCompile()) michael@0: return Method_Skipped; michael@0: michael@0: if (script->length() > BaselineScript::MAX_JSSCRIPT_LENGTH) michael@0: return Method_CantCompile; michael@0: michael@0: if (script->nslots() > BaselineScript::MAX_JSSCRIPT_SLOTS) michael@0: return Method_CantCompile; michael@0: michael@0: if (!cx->compartment()->ensureJitCompartmentExists(cx)) michael@0: return Method_Error; michael@0: michael@0: if (script->hasBaselineScript()) michael@0: return Method_Compiled; michael@0: michael@0: // Check script use count. However, always eagerly compile scripts if JSD michael@0: // is enabled, so that we don't have to OSR and don't have to update the michael@0: // frame pointer stored in JSD's frames list. michael@0: // michael@0: // Also eagerly compile if we are in parallel warmup, the point of which michael@0: // is to gather type information so that the script may be compiled for michael@0: // parallel execution. We want to avoid the situation of OSRing during michael@0: // warmup and only gathering type information for the loop, and not the michael@0: // rest of the function. michael@0: if (IsJSDEnabled(cx) || cx->runtime()->forkJoinWarmup > 0) { michael@0: if (osr) michael@0: return Method_Skipped; michael@0: } else if (script->incUseCount() <= js_JitOptions.baselineUsesBeforeCompile) { michael@0: return Method_Skipped; michael@0: } michael@0: michael@0: if (script->isCallsiteClone()) { michael@0: // Ensure the original function is compiled too, so that bailouts from michael@0: // Ion code have a BaselineScript to resume into. michael@0: RootedScript original(cx, script->donorFunction()->nonLazyScript()); michael@0: JS_ASSERT(original != script); michael@0: michael@0: if (!original->canBaselineCompile()) michael@0: return Method_CantCompile; michael@0: michael@0: if (!original->hasBaselineScript()) { michael@0: MethodStatus status = BaselineCompile(cx, original); michael@0: if (status != Method_Compiled) michael@0: return status; michael@0: } michael@0: } michael@0: michael@0: return BaselineCompile(cx, script); michael@0: } michael@0: michael@0: MethodStatus michael@0: jit::CanEnterBaselineAtBranch(JSContext *cx, InterpreterFrame *fp, bool newType) michael@0: { michael@0: // If constructing, allocate a new |this| object. michael@0: if (fp->isConstructing() && fp->functionThis().isPrimitive()) { michael@0: RootedObject callee(cx, &fp->callee()); michael@0: RootedObject obj(cx, CreateThisForFunction(cx, callee, newType ? SingletonObject : GenericObject)); michael@0: if (!obj) michael@0: return Method_Skipped; michael@0: fp->functionThis().setObject(*obj); michael@0: } michael@0: michael@0: if (!CheckFrame(fp)) michael@0: return Method_CantCompile; michael@0: michael@0: RootedScript script(cx, fp->script()); michael@0: return CanEnterBaselineJIT(cx, script, /* osr = */true); michael@0: } michael@0: michael@0: MethodStatus michael@0: jit::CanEnterBaselineMethod(JSContext *cx, RunState &state) michael@0: { michael@0: if (state.isInvoke()) { michael@0: InvokeState &invoke = *state.asInvoke(); michael@0: michael@0: if (invoke.args().length() > BASELINE_MAX_ARGS_LENGTH) { michael@0: IonSpew(IonSpew_BaselineAbort, "Too many arguments (%u)", invoke.args().length()); michael@0: return Method_CantCompile; michael@0: } michael@0: michael@0: // If constructing, allocate a new |this| object. michael@0: if (invoke.constructing() && invoke.args().thisv().isPrimitive()) { michael@0: RootedObject callee(cx, &invoke.args().callee()); michael@0: RootedObject obj(cx, CreateThisForFunction(cx, callee, michael@0: invoke.useNewType() michael@0: ? SingletonObject michael@0: : GenericObject)); michael@0: if (!obj) michael@0: return Method_Skipped; michael@0: invoke.args().setThis(ObjectValue(*obj)); michael@0: } michael@0: } else if (state.isExecute()) { michael@0: ExecuteType type = state.asExecute()->type(); michael@0: if (type == EXECUTE_DEBUG || type == EXECUTE_DEBUG_GLOBAL) { michael@0: IonSpew(IonSpew_BaselineAbort, "debugger frame"); michael@0: return Method_CantCompile; michael@0: } michael@0: } else { michael@0: JS_ASSERT(state.isGenerator()); michael@0: IonSpew(IonSpew_BaselineAbort, "generator frame"); michael@0: return Method_CantCompile; michael@0: } michael@0: michael@0: RootedScript script(cx, state.script()); michael@0: return CanEnterBaselineJIT(cx, script, /* osr = */false); michael@0: }; michael@0: michael@0: BaselineScript * michael@0: BaselineScript::New(JSContext *cx, uint32_t prologueOffset, uint32_t epilogueOffset, michael@0: uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset, michael@0: size_t icEntries, size_t pcMappingIndexEntries, size_t pcMappingSize, michael@0: size_t bytecodeTypeMapEntries) michael@0: { michael@0: static const unsigned DataAlignment = sizeof(uintptr_t); michael@0: michael@0: size_t paddedBaselineScriptSize = AlignBytes(sizeof(BaselineScript), DataAlignment); michael@0: michael@0: size_t icEntriesSize = icEntries * sizeof(ICEntry); michael@0: size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry); michael@0: size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t); michael@0: michael@0: size_t paddedICEntriesSize = AlignBytes(icEntriesSize, DataAlignment); michael@0: size_t paddedPCMappingIndexEntriesSize = AlignBytes(pcMappingIndexEntriesSize, DataAlignment); michael@0: size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment); michael@0: size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment); michael@0: michael@0: size_t allocBytes = paddedBaselineScriptSize + michael@0: paddedICEntriesSize + michael@0: paddedPCMappingIndexEntriesSize + michael@0: paddedPCMappingSize + michael@0: paddedBytecodeTypesMapSize; michael@0: michael@0: uint8_t *buffer = (uint8_t *)cx->malloc_(allocBytes); michael@0: if (!buffer) michael@0: return nullptr; michael@0: michael@0: BaselineScript *script = reinterpret_cast(buffer); michael@0: new (script) BaselineScript(prologueOffset, epilogueOffset, michael@0: spsPushToggleOffset, postDebugPrologueOffset); michael@0: michael@0: size_t offsetCursor = paddedBaselineScriptSize; michael@0: michael@0: script->icEntriesOffset_ = offsetCursor; michael@0: script->icEntries_ = icEntries; michael@0: offsetCursor += paddedICEntriesSize; michael@0: michael@0: script->pcMappingIndexOffset_ = offsetCursor; michael@0: script->pcMappingIndexEntries_ = pcMappingIndexEntries; michael@0: offsetCursor += paddedPCMappingIndexEntriesSize; michael@0: michael@0: script->pcMappingOffset_ = offsetCursor; michael@0: script->pcMappingSize_ = pcMappingSize; michael@0: offsetCursor += paddedPCMappingSize; michael@0: michael@0: script->bytecodeTypeMapOffset_ = bytecodeTypeMapEntries ? offsetCursor : 0; michael@0: michael@0: return script; michael@0: } michael@0: michael@0: void michael@0: BaselineScript::trace(JSTracer *trc) michael@0: { michael@0: MarkJitCode(trc, &method_, "baseline-method"); michael@0: if (templateScope_) michael@0: MarkObject(trc, &templateScope_, "baseline-template-scope"); michael@0: michael@0: // Mark all IC stub codes hanging off the IC stub entries. michael@0: for (size_t i = 0; i < numICEntries(); i++) { michael@0: ICEntry &ent = icEntry(i); michael@0: if (!ent.hasStub()) michael@0: continue; michael@0: for (ICStub *stub = ent.firstStub(); stub; stub = stub->next()) michael@0: stub->trace(trc); michael@0: } michael@0: } michael@0: michael@0: /* static */ michael@0: void michael@0: BaselineScript::writeBarrierPre(Zone *zone, BaselineScript *script) michael@0: { michael@0: #ifdef JSGC_INCREMENTAL michael@0: if (zone->needsBarrier()) michael@0: script->trace(zone->barrierTracer()); michael@0: #endif michael@0: } michael@0: michael@0: void michael@0: BaselineScript::Trace(JSTracer *trc, BaselineScript *script) michael@0: { michael@0: script->trace(trc); michael@0: } michael@0: michael@0: void michael@0: BaselineScript::Destroy(FreeOp *fop, BaselineScript *script) michael@0: { michael@0: #ifdef JSGC_GENERATIONAL michael@0: /* michael@0: * When the script contains pointers to nursery things, the store buffer michael@0: * will contain entries refering to the referenced things. Since we can michael@0: * destroy scripts outside the context of a GC, this situation can result michael@0: * in invalid store buffer entries. Assert that if we do destroy scripts michael@0: * outside of a GC that we at least emptied the nursery first. michael@0: */ michael@0: JS_ASSERT(fop->runtime()->gcNursery.isEmpty()); michael@0: #endif michael@0: fop->delete_(script); michael@0: } michael@0: michael@0: ICEntry & michael@0: BaselineScript::icEntry(size_t index) michael@0: { michael@0: JS_ASSERT(index < numICEntries()); michael@0: return icEntryList()[index]; michael@0: } michael@0: michael@0: PCMappingIndexEntry & michael@0: BaselineScript::pcMappingIndexEntry(size_t index) michael@0: { michael@0: JS_ASSERT(index < numPCMappingIndexEntries()); michael@0: return pcMappingIndexEntryList()[index]; michael@0: } michael@0: michael@0: CompactBufferReader michael@0: BaselineScript::pcMappingReader(size_t indexEntry) michael@0: { michael@0: PCMappingIndexEntry &entry = pcMappingIndexEntry(indexEntry); michael@0: michael@0: uint8_t *dataStart = pcMappingData() + entry.bufferOffset; michael@0: uint8_t *dataEnd = (indexEntry == numPCMappingIndexEntries() - 1) michael@0: ? pcMappingData() + pcMappingSize_ michael@0: : pcMappingData() + pcMappingIndexEntry(indexEntry + 1).bufferOffset; michael@0: michael@0: return CompactBufferReader(dataStart, dataEnd); michael@0: } michael@0: michael@0: ICEntry * michael@0: BaselineScript::maybeICEntryFromReturnOffset(CodeOffsetLabel returnOffset) michael@0: { michael@0: size_t bottom = 0; michael@0: size_t top = numICEntries(); michael@0: size_t mid = bottom + (top - bottom) / 2; michael@0: while (mid < top) { michael@0: ICEntry &midEntry = icEntry(mid); michael@0: if (midEntry.returnOffset().offset() < returnOffset.offset()) michael@0: bottom = mid + 1; michael@0: else // if (midEntry.returnOffset().offset() >= returnOffset.offset()) michael@0: top = mid; michael@0: mid = bottom + (top - bottom) / 2; michael@0: } michael@0: if (mid >= numICEntries()) michael@0: return nullptr; michael@0: michael@0: if (icEntry(mid).returnOffset().offset() != returnOffset.offset()) michael@0: return nullptr; michael@0: michael@0: return &icEntry(mid); michael@0: } michael@0: michael@0: ICEntry & michael@0: BaselineScript::icEntryFromReturnOffset(CodeOffsetLabel returnOffset) michael@0: { michael@0: ICEntry *result = maybeICEntryFromReturnOffset(returnOffset); michael@0: JS_ASSERT(result); michael@0: return *result; michael@0: } michael@0: michael@0: uint8_t * michael@0: BaselineScript::returnAddressForIC(const ICEntry &ent) michael@0: { michael@0: return method()->raw() + ent.returnOffset().offset(); michael@0: } michael@0: michael@0: ICEntry & michael@0: BaselineScript::icEntryFromPCOffset(uint32_t pcOffset) michael@0: { michael@0: // Multiple IC entries can have the same PC offset, but this method only looks for michael@0: // those which have isForOp() set. michael@0: size_t bottom = 0; michael@0: size_t top = numICEntries(); michael@0: size_t mid = bottom + (top - bottom) / 2; michael@0: while (mid < top) { michael@0: ICEntry &midEntry = icEntry(mid); michael@0: if (midEntry.pcOffset() < pcOffset) michael@0: bottom = mid + 1; michael@0: else if (midEntry.pcOffset() > pcOffset) michael@0: top = mid; michael@0: else michael@0: break; michael@0: mid = bottom + (top - bottom) / 2; michael@0: } michael@0: // Found an IC entry with a matching PC offset. Search backward, and then michael@0: // forward from this IC entry, looking for one with the same PC offset which michael@0: // has isForOp() set. michael@0: for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) { michael@0: if (icEntry(i).isForOp()) michael@0: return icEntry(i); michael@0: } michael@0: for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) { michael@0: if (icEntry(i).isForOp()) michael@0: return icEntry(i); michael@0: } michael@0: MOZ_ASSUME_UNREACHABLE("Invalid PC offset for IC entry."); michael@0: } michael@0: michael@0: ICEntry & michael@0: BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, ICEntry *prevLookedUpEntry) michael@0: { michael@0: // Do a linear forward search from the last queried PC offset, or fallback to a michael@0: // binary search if the last offset is too far away. michael@0: if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() && michael@0: (pcOffset - prevLookedUpEntry->pcOffset()) <= 10) michael@0: { michael@0: ICEntry *firstEntry = &icEntry(0); michael@0: ICEntry *lastEntry = &icEntry(numICEntries() - 1); michael@0: ICEntry *curEntry = prevLookedUpEntry; michael@0: while (curEntry >= firstEntry && curEntry <= lastEntry) { michael@0: if (curEntry->pcOffset() == pcOffset && curEntry->isForOp()) michael@0: break; michael@0: curEntry++; michael@0: } michael@0: JS_ASSERT(curEntry->pcOffset() == pcOffset && curEntry->isForOp()); michael@0: return *curEntry; michael@0: } michael@0: michael@0: return icEntryFromPCOffset(pcOffset); michael@0: } michael@0: michael@0: ICEntry * michael@0: BaselineScript::maybeICEntryFromReturnAddress(uint8_t *returnAddr) michael@0: { michael@0: JS_ASSERT(returnAddr > method_->raw()); michael@0: JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize()); michael@0: CodeOffsetLabel offset(returnAddr - method_->raw()); michael@0: return maybeICEntryFromReturnOffset(offset); michael@0: } michael@0: michael@0: ICEntry & michael@0: BaselineScript::icEntryFromReturnAddress(uint8_t *returnAddr) michael@0: { michael@0: JS_ASSERT(returnAddr > method_->raw()); michael@0: JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize()); michael@0: CodeOffsetLabel offset(returnAddr - method_->raw()); michael@0: return icEntryFromReturnOffset(offset); michael@0: } michael@0: michael@0: void michael@0: BaselineScript::copyICEntries(JSScript *script, const ICEntry *entries, MacroAssembler &masm) michael@0: { michael@0: // Fix up the return offset in the IC entries and copy them in. michael@0: // Also write out the IC entry ptrs in any fallback stubs that were added. michael@0: for (uint32_t i = 0; i < numICEntries(); i++) { michael@0: ICEntry &realEntry = icEntry(i); michael@0: realEntry = entries[i]; michael@0: realEntry.fixupReturnOffset(masm); michael@0: michael@0: if (!realEntry.hasStub()) { michael@0: // VM call without any stubs. michael@0: continue; michael@0: } michael@0: michael@0: // If the attached stub is a fallback stub, then fix it up with michael@0: // a pointer to the (now available) realEntry. michael@0: if (realEntry.firstStub()->isFallback()) michael@0: realEntry.firstStub()->toFallbackStub()->fixupICEntry(&realEntry); michael@0: michael@0: if (realEntry.firstStub()->isTypeMonitor_Fallback()) { michael@0: ICTypeMonitor_Fallback *stub = realEntry.firstStub()->toTypeMonitor_Fallback(); michael@0: stub->fixupICEntry(&realEntry); michael@0: } michael@0: michael@0: if (realEntry.firstStub()->isTableSwitch()) { michael@0: ICTableSwitch *stub = realEntry.firstStub()->toTableSwitch(); michael@0: stub->fixupJumpTable(script, this); michael@0: } michael@0: } michael@0: } michael@0: michael@0: void michael@0: BaselineScript::adoptFallbackStubs(FallbackICStubSpace *stubSpace) michael@0: { michael@0: fallbackStubSpace_.adoptFrom(stubSpace); michael@0: } michael@0: michael@0: void michael@0: BaselineScript::copyPCMappingEntries(const CompactBufferWriter &entries) michael@0: { michael@0: JS_ASSERT(entries.length() > 0); michael@0: JS_ASSERT(entries.length() == pcMappingSize_); michael@0: michael@0: memcpy(pcMappingData(), entries.buffer(), entries.length()); michael@0: } michael@0: michael@0: void michael@0: BaselineScript::copyPCMappingIndexEntries(const PCMappingIndexEntry *entries) michael@0: { michael@0: for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) michael@0: pcMappingIndexEntry(i) = entries[i]; michael@0: } michael@0: michael@0: uint8_t * michael@0: BaselineScript::nativeCodeForPC(JSScript *script, jsbytecode *pc, PCMappingSlotInfo *slotInfo) michael@0: { michael@0: JS_ASSERT_IF(script->hasBaselineScript(), script->baselineScript() == this); michael@0: michael@0: uint32_t pcOffset = script->pcToOffset(pc); michael@0: michael@0: // Look for the first PCMappingIndexEntry with pc > the pc we are michael@0: // interested in. michael@0: uint32_t i = 1; michael@0: for (; i < numPCMappingIndexEntries(); i++) { michael@0: if (pcMappingIndexEntry(i).pcOffset > pcOffset) michael@0: break; michael@0: } michael@0: michael@0: // The previous entry contains the current pc. michael@0: JS_ASSERT(i > 0); michael@0: i--; michael@0: michael@0: PCMappingIndexEntry &entry = pcMappingIndexEntry(i); michael@0: JS_ASSERT(pcOffset >= entry.pcOffset); michael@0: michael@0: CompactBufferReader reader(pcMappingReader(i)); michael@0: jsbytecode *curPC = script->offsetToPC(entry.pcOffset); michael@0: uint32_t nativeOffset = entry.nativeOffset; michael@0: michael@0: JS_ASSERT(script->containsPC(curPC)); michael@0: JS_ASSERT(curPC <= pc); michael@0: michael@0: while (true) { michael@0: // If the high bit is set, the native offset relative to the michael@0: // previous pc != 0 and comes next. michael@0: uint8_t b = reader.readByte(); michael@0: if (b & 0x80) michael@0: nativeOffset += reader.readUnsigned(); michael@0: michael@0: if (curPC == pc) { michael@0: if (slotInfo) michael@0: *slotInfo = PCMappingSlotInfo(b & ~0x80); michael@0: return method_->raw() + nativeOffset; michael@0: } michael@0: michael@0: curPC += GetBytecodeLength(curPC); michael@0: } michael@0: michael@0: MOZ_ASSUME_UNREACHABLE("Invalid pc"); michael@0: } michael@0: michael@0: jsbytecode * michael@0: BaselineScript::pcForReturnOffset(JSScript *script, uint32_t nativeOffset) michael@0: { michael@0: JS_ASSERT(script->baselineScript() == this); michael@0: JS_ASSERT(nativeOffset < method_->instructionsSize()); michael@0: michael@0: // Look for the first PCMappingIndexEntry with native offset > the native offset we are michael@0: // interested in. michael@0: uint32_t i = 1; michael@0: for (; i < numPCMappingIndexEntries(); i++) { michael@0: if (pcMappingIndexEntry(i).nativeOffset > nativeOffset) michael@0: break; michael@0: } michael@0: michael@0: // Go back an entry to search forward from. michael@0: JS_ASSERT(i > 0); michael@0: i--; michael@0: michael@0: PCMappingIndexEntry &entry = pcMappingIndexEntry(i); michael@0: JS_ASSERT(nativeOffset >= entry.nativeOffset); michael@0: michael@0: CompactBufferReader reader(pcMappingReader(i)); michael@0: jsbytecode *curPC = script->offsetToPC(entry.pcOffset); michael@0: uint32_t curNativeOffset = entry.nativeOffset; michael@0: michael@0: JS_ASSERT(script->containsPC(curPC)); michael@0: JS_ASSERT(curNativeOffset <= nativeOffset); michael@0: michael@0: while (true) { michael@0: // If the high bit is set, the native offset relative to the michael@0: // previous pc != 0 and comes next. michael@0: uint8_t b = reader.readByte(); michael@0: if (b & 0x80) michael@0: curNativeOffset += reader.readUnsigned(); michael@0: michael@0: if (curNativeOffset == nativeOffset) michael@0: return curPC; michael@0: michael@0: curPC += GetBytecodeLength(curPC); michael@0: } michael@0: michael@0: MOZ_ASSUME_UNREACHABLE("Invalid pc"); michael@0: } michael@0: michael@0: jsbytecode * michael@0: BaselineScript::pcForReturnAddress(JSScript *script, uint8_t *nativeAddress) michael@0: { michael@0: JS_ASSERT(script->baselineScript() == this); michael@0: JS_ASSERT(nativeAddress >= method_->raw()); michael@0: JS_ASSERT(nativeAddress < method_->raw() + method_->instructionsSize()); michael@0: return pcForReturnOffset(script, uint32_t(nativeAddress - method_->raw())); michael@0: } michael@0: michael@0: void michael@0: BaselineScript::toggleDebugTraps(JSScript *script, jsbytecode *pc) michael@0: { michael@0: JS_ASSERT(script->baselineScript() == this); michael@0: michael@0: // Only scripts compiled for debug mode have toggled calls. michael@0: if (!debugMode()) michael@0: return; michael@0: michael@0: SrcNoteLineScanner scanner(script->notes(), script->lineno()); michael@0: michael@0: for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) { michael@0: PCMappingIndexEntry &entry = pcMappingIndexEntry(i); michael@0: michael@0: CompactBufferReader reader(pcMappingReader(i)); michael@0: jsbytecode *curPC = script->offsetToPC(entry.pcOffset); michael@0: uint32_t nativeOffset = entry.nativeOffset; michael@0: michael@0: JS_ASSERT(script->containsPC(curPC)); michael@0: michael@0: while (reader.more()) { michael@0: uint8_t b = reader.readByte(); michael@0: if (b & 0x80) michael@0: nativeOffset += reader.readUnsigned(); michael@0: michael@0: scanner.advanceTo(script->pcToOffset(curPC)); michael@0: michael@0: if (!pc || pc == curPC) { michael@0: bool enabled = (script->stepModeEnabled() && scanner.isLineHeader()) || michael@0: script->hasBreakpointsAt(curPC); michael@0: michael@0: // Patch the trap. michael@0: CodeLocationLabel label(method(), nativeOffset); michael@0: Assembler::ToggleCall(label, enabled); michael@0: } michael@0: michael@0: curPC += GetBytecodeLength(curPC); michael@0: } michael@0: } michael@0: } michael@0: michael@0: void michael@0: BaselineScript::toggleSPS(bool enable) michael@0: { michael@0: JS_ASSERT(enable == !(bool)spsOn_); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " toggling SPS %s for BaselineScript %p", michael@0: enable ? "on" : "off", this); michael@0: michael@0: // Toggle the jump michael@0: CodeLocationLabel pushToggleLocation(method_, CodeOffsetLabel(spsPushToggleOffset_)); michael@0: if (enable) michael@0: Assembler::ToggleToCmp(pushToggleLocation); michael@0: else michael@0: Assembler::ToggleToJmp(pushToggleLocation); michael@0: #ifdef DEBUG michael@0: spsOn_ = enable; michael@0: #endif michael@0: } michael@0: michael@0: void michael@0: BaselineScript::purgeOptimizedStubs(Zone *zone) michael@0: { michael@0: IonSpew(IonSpew_BaselineIC, "Purging optimized stubs"); michael@0: michael@0: for (size_t i = 0; i < numICEntries(); i++) { michael@0: ICEntry &entry = icEntry(i); michael@0: if (!entry.hasStub()) michael@0: continue; michael@0: michael@0: ICStub *lastStub = entry.firstStub(); michael@0: while (lastStub->next()) michael@0: lastStub = lastStub->next(); michael@0: michael@0: if (lastStub->isFallback()) { michael@0: // Unlink all stubs allocated in the optimized space. michael@0: ICStub *stub = entry.firstStub(); michael@0: ICStub *prev = nullptr; michael@0: michael@0: while (stub->next()) { michael@0: if (!stub->allocatedInFallbackSpace()) { michael@0: lastStub->toFallbackStub()->unlinkStub(zone, prev, stub); michael@0: stub = stub->next(); michael@0: continue; michael@0: } michael@0: michael@0: prev = stub; michael@0: stub = stub->next(); michael@0: } michael@0: michael@0: if (lastStub->isMonitoredFallback()) { michael@0: // Monitor stubs can't make calls, so are always in the michael@0: // optimized stub space. michael@0: ICTypeMonitor_Fallback *lastMonStub = michael@0: lastStub->toMonitoredFallbackStub()->fallbackMonitorStub(); michael@0: lastMonStub->resetMonitorStubChain(zone); michael@0: } michael@0: } else if (lastStub->isTypeMonitor_Fallback()) { michael@0: lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone); michael@0: } else { michael@0: JS_ASSERT(lastStub->isTableSwitch()); michael@0: } michael@0: } michael@0: michael@0: #ifdef DEBUG michael@0: // All remaining stubs must be allocated in the fallback space. michael@0: for (size_t i = 0; i < numICEntries(); i++) { michael@0: ICEntry &entry = icEntry(i); michael@0: if (!entry.hasStub()) michael@0: continue; michael@0: michael@0: ICStub *stub = entry.firstStub(); michael@0: while (stub->next()) { michael@0: JS_ASSERT(stub->allocatedInFallbackSpace()); michael@0: stub = stub->next(); michael@0: } michael@0: } michael@0: #endif michael@0: } michael@0: michael@0: void michael@0: jit::FinishDiscardBaselineScript(FreeOp *fop, JSScript *script) michael@0: { michael@0: if (!script->hasBaselineScript()) michael@0: return; michael@0: michael@0: if (script->baselineScript()->active()) { michael@0: // Script is live on the stack. Keep the BaselineScript, but destroy michael@0: // stubs allocated in the optimized stub space. michael@0: script->baselineScript()->purgeOptimizedStubs(script->zone()); michael@0: michael@0: // Reset |active| flag so that we don't need a separate script michael@0: // iteration to unmark them. michael@0: script->baselineScript()->resetActive(); michael@0: return; michael@0: } michael@0: michael@0: BaselineScript *baseline = script->baselineScript(); michael@0: script->setBaselineScript(nullptr, nullptr); michael@0: BaselineScript::Destroy(fop, baseline); michael@0: } michael@0: michael@0: void michael@0: jit::JitCompartment::toggleBaselineStubBarriers(bool enabled) michael@0: { michael@0: for (ICStubCodeMap::Enum e(*stubCodes_); !e.empty(); e.popFront()) { michael@0: JitCode *code = *e.front().value().unsafeGet(); michael@0: code->togglePreBarriers(enabled); michael@0: } michael@0: } michael@0: michael@0: void michael@0: jit::AddSizeOfBaselineData(JSScript *script, mozilla::MallocSizeOf mallocSizeOf, size_t *data, michael@0: size_t *fallbackStubs) michael@0: { michael@0: if (script->hasBaselineScript()) michael@0: script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs); michael@0: } michael@0: michael@0: void michael@0: jit::ToggleBaselineSPS(JSRuntime *runtime, bool enable) michael@0: { michael@0: for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) { michael@0: for (gc::CellIter i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) { michael@0: JSScript *script = i.get(); michael@0: if (!script->hasBaselineScript()) michael@0: continue; michael@0: script->baselineScript()->toggleSPS(enable); michael@0: } michael@0: } michael@0: } michael@0: michael@0: static void michael@0: MarkActiveBaselineScripts(JSRuntime *rt, const JitActivationIterator &activation) michael@0: { michael@0: for (jit::JitFrameIterator iter(activation); !iter.done(); ++iter) { michael@0: switch (iter.type()) { michael@0: case JitFrame_BaselineJS: michael@0: iter.script()->baselineScript()->setActive(); michael@0: break; michael@0: case JitFrame_IonJS: { michael@0: // Keep the baseline script around, since bailouts from the ion michael@0: // jitcode might need to re-enter into the baseline jitcode. michael@0: iter.script()->baselineScript()->setActive(); michael@0: for (InlineFrameIterator inlineIter(rt, &iter); inlineIter.more(); ++inlineIter) michael@0: inlineIter.script()->baselineScript()->setActive(); michael@0: break; michael@0: } michael@0: default:; michael@0: } michael@0: } michael@0: } michael@0: michael@0: void michael@0: jit::MarkActiveBaselineScripts(Zone *zone) michael@0: { michael@0: JSRuntime *rt = zone->runtimeFromMainThread(); michael@0: for (JitActivationIterator iter(rt); !iter.done(); ++iter) { michael@0: if (iter->compartment()->zone() == zone) michael@0: MarkActiveBaselineScripts(rt, iter); michael@0: } michael@0: }