Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "jit/BaselineJIT.h" |
michael@0 | 8 | |
michael@0 | 9 | #include "mozilla/MemoryReporting.h" |
michael@0 | 10 | |
michael@0 | 11 | #include "jit/BaselineCompiler.h" |
michael@0 | 12 | #include "jit/BaselineIC.h" |
michael@0 | 13 | #include "jit/CompileInfo.h" |
michael@0 | 14 | #include "jit/IonSpewer.h" |
michael@0 | 15 | #include "jit/JitCommon.h" |
michael@0 | 16 | #include "vm/Interpreter.h" |
michael@0 | 17 | #include "vm/TraceLogging.h" |
michael@0 | 18 | |
michael@0 | 19 | #include "jsgcinlines.h" |
michael@0 | 20 | #include "jsobjinlines.h" |
michael@0 | 21 | #include "jsopcodeinlines.h" |
michael@0 | 22 | #include "jsscriptinlines.h" |
michael@0 | 23 | |
michael@0 | 24 | #include "jit/IonFrames-inl.h" |
michael@0 | 25 | #include "vm/Stack-inl.h" |
michael@0 | 26 | |
michael@0 | 27 | using namespace js; |
michael@0 | 28 | using namespace js::jit; |
michael@0 | 29 | |
michael@0 | 30 | /* static */ PCMappingSlotInfo::SlotLocation |
michael@0 | 31 | PCMappingSlotInfo::ToSlotLocation(const StackValue *stackVal) |
michael@0 | 32 | { |
michael@0 | 33 | if (stackVal->kind() == StackValue::Register) { |
michael@0 | 34 | if (stackVal->reg() == R0) |
michael@0 | 35 | return SlotInR0; |
michael@0 | 36 | JS_ASSERT(stackVal->reg() == R1); |
michael@0 | 37 | return SlotInR1; |
michael@0 | 38 | } |
michael@0 | 39 | JS_ASSERT(stackVal->kind() != StackValue::Stack); |
michael@0 | 40 | return SlotIgnore; |
michael@0 | 41 | } |
michael@0 | 42 | |
michael@0 | 43 | BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset, |
michael@0 | 44 | uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset) |
michael@0 | 45 | : method_(nullptr), |
michael@0 | 46 | templateScope_(nullptr), |
michael@0 | 47 | fallbackStubSpace_(), |
michael@0 | 48 | prologueOffset_(prologueOffset), |
michael@0 | 49 | epilogueOffset_(epilogueOffset), |
michael@0 | 50 | #ifdef DEBUG |
michael@0 | 51 | spsOn_(false), |
michael@0 | 52 | #endif |
michael@0 | 53 | spsPushToggleOffset_(spsPushToggleOffset), |
michael@0 | 54 | postDebugPrologueOffset_(postDebugPrologueOffset), |
michael@0 | 55 | flags_(0) |
michael@0 | 56 | { } |
michael@0 | 57 | |
michael@0 | 58 | static const size_t BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4096; |
michael@0 | 59 | static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000; |
michael@0 | 60 | |
michael@0 | 61 | static bool |
michael@0 | 62 | CheckFrame(InterpreterFrame *fp) |
michael@0 | 63 | { |
michael@0 | 64 | if (fp->isGeneratorFrame()) { |
michael@0 | 65 | IonSpew(IonSpew_BaselineAbort, "generator frame"); |
michael@0 | 66 | return false; |
michael@0 | 67 | } |
michael@0 | 68 | |
michael@0 | 69 | if (fp->isDebuggerFrame()) { |
michael@0 | 70 | // Debugger eval-in-frame. These are likely short-running scripts so |
michael@0 | 71 | // don't bother compiling them for now. |
michael@0 | 72 | IonSpew(IonSpew_BaselineAbort, "debugger frame"); |
michael@0 | 73 | return false; |
michael@0 | 74 | } |
michael@0 | 75 | |
michael@0 | 76 | if (fp->isNonEvalFunctionFrame() && fp->numActualArgs() > BASELINE_MAX_ARGS_LENGTH) { |
michael@0 | 77 | // Fall back to the interpreter to avoid running out of stack space. |
michael@0 | 78 | IonSpew(IonSpew_BaselineAbort, "Too many arguments (%u)", fp->numActualArgs()); |
michael@0 | 79 | return false; |
michael@0 | 80 | } |
michael@0 | 81 | |
michael@0 | 82 | return true; |
michael@0 | 83 | } |
michael@0 | 84 | |
michael@0 | 85 | static bool |
michael@0 | 86 | IsJSDEnabled(JSContext *cx) |
michael@0 | 87 | { |
michael@0 | 88 | return cx->compartment()->debugMode() && cx->runtime()->debugHooks.callHook; |
michael@0 | 89 | } |
michael@0 | 90 | |
michael@0 | 91 | static IonExecStatus |
michael@0 | 92 | EnterBaseline(JSContext *cx, EnterJitData &data) |
michael@0 | 93 | { |
michael@0 | 94 | if (data.osrFrame) { |
michael@0 | 95 | // Check for potential stack overflow before OSR-ing. |
michael@0 | 96 | uint8_t spDummy; |
michael@0 | 97 | uint32_t extra = BaselineFrame::Size() + (data.osrNumStackValues * sizeof(Value)); |
michael@0 | 98 | uint8_t *checkSp = (&spDummy) - extra; |
michael@0 | 99 | JS_CHECK_RECURSION_WITH_SP(cx, checkSp, return IonExec_Aborted); |
michael@0 | 100 | } else { |
michael@0 | 101 | JS_CHECK_RECURSION(cx, return IonExec_Aborted); |
michael@0 | 102 | } |
michael@0 | 103 | |
michael@0 | 104 | JS_ASSERT(jit::IsBaselineEnabled(cx)); |
michael@0 | 105 | JS_ASSERT_IF(data.osrFrame, CheckFrame(data.osrFrame)); |
michael@0 | 106 | |
michael@0 | 107 | EnterJitCode enter = cx->runtime()->jitRuntime()->enterBaseline(); |
michael@0 | 108 | |
michael@0 | 109 | // Caller must construct |this| before invoking the Ion function. |
michael@0 | 110 | JS_ASSERT_IF(data.constructing, data.maxArgv[0].isObject()); |
michael@0 | 111 | |
michael@0 | 112 | data.result.setInt32(data.numActualArgs); |
michael@0 | 113 | { |
michael@0 | 114 | AssertCompartmentUnchanged pcc(cx); |
michael@0 | 115 | JitActivation activation(cx, data.constructing); |
michael@0 | 116 | |
michael@0 | 117 | if (data.osrFrame) |
michael@0 | 118 | data.osrFrame->setRunningInJit(); |
michael@0 | 119 | |
michael@0 | 120 | JS_ASSERT_IF(data.osrFrame, !IsJSDEnabled(cx)); |
michael@0 | 121 | |
michael@0 | 122 | // Single transition point from Interpreter to Baseline. |
michael@0 | 123 | CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, data.osrFrame, data.calleeToken, |
michael@0 | 124 | data.scopeChain.get(), data.osrNumStackValues, data.result.address()); |
michael@0 | 125 | |
michael@0 | 126 | if (data.osrFrame) |
michael@0 | 127 | data.osrFrame->clearRunningInJit(); |
michael@0 | 128 | } |
michael@0 | 129 | |
michael@0 | 130 | JS_ASSERT(!cx->runtime()->hasIonReturnOverride()); |
michael@0 | 131 | |
michael@0 | 132 | // Jit callers wrap primitive constructor return. |
michael@0 | 133 | if (!data.result.isMagic() && data.constructing && data.result.isPrimitive()) |
michael@0 | 134 | data.result = data.maxArgv[0]; |
michael@0 | 135 | |
michael@0 | 136 | // Release temporary buffer used for OSR into Ion. |
michael@0 | 137 | cx->runtime()->getJitRuntime(cx)->freeOsrTempData(); |
michael@0 | 138 | |
michael@0 | 139 | JS_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR)); |
michael@0 | 140 | return data.result.isMagic() ? IonExec_Error : IonExec_Ok; |
michael@0 | 141 | } |
michael@0 | 142 | |
michael@0 | 143 | IonExecStatus |
michael@0 | 144 | jit::EnterBaselineMethod(JSContext *cx, RunState &state) |
michael@0 | 145 | { |
michael@0 | 146 | BaselineScript *baseline = state.script()->baselineScript(); |
michael@0 | 147 | |
michael@0 | 148 | EnterJitData data(cx); |
michael@0 | 149 | data.jitcode = baseline->method()->raw(); |
michael@0 | 150 | |
michael@0 | 151 | AutoValueVector vals(cx); |
michael@0 | 152 | if (!SetEnterJitData(cx, data, state, vals)) |
michael@0 | 153 | return IonExec_Error; |
michael@0 | 154 | |
michael@0 | 155 | IonExecStatus status = EnterBaseline(cx, data); |
michael@0 | 156 | if (status != IonExec_Ok) |
michael@0 | 157 | return status; |
michael@0 | 158 | |
michael@0 | 159 | state.setReturnValue(data.result); |
michael@0 | 160 | return IonExec_Ok; |
michael@0 | 161 | } |
michael@0 | 162 | |
michael@0 | 163 | IonExecStatus |
michael@0 | 164 | jit::EnterBaselineAtBranch(JSContext *cx, InterpreterFrame *fp, jsbytecode *pc) |
michael@0 | 165 | { |
michael@0 | 166 | JS_ASSERT(JSOp(*pc) == JSOP_LOOPENTRY); |
michael@0 | 167 | |
michael@0 | 168 | BaselineScript *baseline = fp->script()->baselineScript(); |
michael@0 | 169 | |
michael@0 | 170 | EnterJitData data(cx); |
michael@0 | 171 | data.jitcode = baseline->nativeCodeForPC(fp->script(), pc); |
michael@0 | 172 | |
michael@0 | 173 | // Skip debug breakpoint/trap handler, the interpreter already handled it |
michael@0 | 174 | // for the current op. |
michael@0 | 175 | if (cx->compartment()->debugMode()) |
michael@0 | 176 | data.jitcode += MacroAssembler::ToggledCallSize(); |
michael@0 | 177 | |
michael@0 | 178 | data.osrFrame = fp; |
michael@0 | 179 | data.osrNumStackValues = fp->script()->nfixed() + cx->interpreterRegs().stackDepth(); |
michael@0 | 180 | |
michael@0 | 181 | RootedValue thisv(cx); |
michael@0 | 182 | |
michael@0 | 183 | if (fp->isNonEvalFunctionFrame()) { |
michael@0 | 184 | data.constructing = fp->isConstructing(); |
michael@0 | 185 | data.numActualArgs = fp->numActualArgs(); |
michael@0 | 186 | data.maxArgc = Max(fp->numActualArgs(), fp->numFormalArgs()) + 1; // +1 = include |this| |
michael@0 | 187 | data.maxArgv = fp->argv() - 1; // -1 = include |this| |
michael@0 | 188 | data.scopeChain = nullptr; |
michael@0 | 189 | data.calleeToken = CalleeToToken(&fp->callee()); |
michael@0 | 190 | } else { |
michael@0 | 191 | thisv = fp->thisValue(); |
michael@0 | 192 | data.constructing = false; |
michael@0 | 193 | data.numActualArgs = 0; |
michael@0 | 194 | data.maxArgc = 1; |
michael@0 | 195 | data.maxArgv = thisv.address(); |
michael@0 | 196 | data.scopeChain = fp->scopeChain(); |
michael@0 | 197 | |
michael@0 | 198 | // For eval function frames, set the callee token to the enclosing function. |
michael@0 | 199 | if (fp->isFunctionFrame()) |
michael@0 | 200 | data.calleeToken = CalleeToToken(&fp->callee()); |
michael@0 | 201 | else |
michael@0 | 202 | data.calleeToken = CalleeToToken(fp->script()); |
michael@0 | 203 | } |
michael@0 | 204 | |
michael@0 | 205 | TraceLogger *logger = TraceLoggerForMainThread(cx->runtime()); |
michael@0 | 206 | TraceLogStopEvent(logger, TraceLogger::Interpreter); |
michael@0 | 207 | TraceLogStartEvent(logger, TraceLogger::Baseline); |
michael@0 | 208 | |
michael@0 | 209 | IonExecStatus status = EnterBaseline(cx, data); |
michael@0 | 210 | if (status != IonExec_Ok) |
michael@0 | 211 | return status; |
michael@0 | 212 | |
michael@0 | 213 | fp->setReturnValue(data.result); |
michael@0 | 214 | return IonExec_Ok; |
michael@0 | 215 | } |
michael@0 | 216 | |
michael@0 | 217 | MethodStatus |
michael@0 | 218 | jit::BaselineCompile(JSContext *cx, JSScript *script) |
michael@0 | 219 | { |
michael@0 | 220 | JS_ASSERT(!script->hasBaselineScript()); |
michael@0 | 221 | JS_ASSERT(script->canBaselineCompile()); |
michael@0 | 222 | JS_ASSERT(IsBaselineEnabled(cx)); |
michael@0 | 223 | LifoAlloc alloc(BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE); |
michael@0 | 224 | |
michael@0 | 225 | script->ensureNonLazyCanonicalFunction(cx); |
michael@0 | 226 | |
michael@0 | 227 | TempAllocator *temp = alloc.new_<TempAllocator>(&alloc); |
michael@0 | 228 | if (!temp) |
michael@0 | 229 | return Method_Error; |
michael@0 | 230 | |
michael@0 | 231 | IonContext ictx(cx, temp); |
michael@0 | 232 | |
michael@0 | 233 | BaselineCompiler compiler(cx, *temp, script); |
michael@0 | 234 | if (!compiler.init()) |
michael@0 | 235 | return Method_Error; |
michael@0 | 236 | |
michael@0 | 237 | MethodStatus status = compiler.compile(); |
michael@0 | 238 | |
michael@0 | 239 | JS_ASSERT_IF(status == Method_Compiled, script->hasBaselineScript()); |
michael@0 | 240 | JS_ASSERT_IF(status != Method_Compiled, !script->hasBaselineScript()); |
michael@0 | 241 | |
michael@0 | 242 | if (status == Method_CantCompile) |
michael@0 | 243 | script->setBaselineScript(cx, BASELINE_DISABLED_SCRIPT); |
michael@0 | 244 | |
michael@0 | 245 | return status; |
michael@0 | 246 | } |
michael@0 | 247 | |
michael@0 | 248 | static MethodStatus |
michael@0 | 249 | CanEnterBaselineJIT(JSContext *cx, HandleScript script, bool osr) |
michael@0 | 250 | { |
michael@0 | 251 | JS_ASSERT(jit::IsBaselineEnabled(cx)); |
michael@0 | 252 | |
michael@0 | 253 | // Skip if the script has been disabled. |
michael@0 | 254 | if (!script->canBaselineCompile()) |
michael@0 | 255 | return Method_Skipped; |
michael@0 | 256 | |
michael@0 | 257 | if (script->length() > BaselineScript::MAX_JSSCRIPT_LENGTH) |
michael@0 | 258 | return Method_CantCompile; |
michael@0 | 259 | |
michael@0 | 260 | if (script->nslots() > BaselineScript::MAX_JSSCRIPT_SLOTS) |
michael@0 | 261 | return Method_CantCompile; |
michael@0 | 262 | |
michael@0 | 263 | if (!cx->compartment()->ensureJitCompartmentExists(cx)) |
michael@0 | 264 | return Method_Error; |
michael@0 | 265 | |
michael@0 | 266 | if (script->hasBaselineScript()) |
michael@0 | 267 | return Method_Compiled; |
michael@0 | 268 | |
michael@0 | 269 | // Check script use count. However, always eagerly compile scripts if JSD |
michael@0 | 270 | // is enabled, so that we don't have to OSR and don't have to update the |
michael@0 | 271 | // frame pointer stored in JSD's frames list. |
michael@0 | 272 | // |
michael@0 | 273 | // Also eagerly compile if we are in parallel warmup, the point of which |
michael@0 | 274 | // is to gather type information so that the script may be compiled for |
michael@0 | 275 | // parallel execution. We want to avoid the situation of OSRing during |
michael@0 | 276 | // warmup and only gathering type information for the loop, and not the |
michael@0 | 277 | // rest of the function. |
michael@0 | 278 | if (IsJSDEnabled(cx) || cx->runtime()->forkJoinWarmup > 0) { |
michael@0 | 279 | if (osr) |
michael@0 | 280 | return Method_Skipped; |
michael@0 | 281 | } else if (script->incUseCount() <= js_JitOptions.baselineUsesBeforeCompile) { |
michael@0 | 282 | return Method_Skipped; |
michael@0 | 283 | } |
michael@0 | 284 | |
michael@0 | 285 | if (script->isCallsiteClone()) { |
michael@0 | 286 | // Ensure the original function is compiled too, so that bailouts from |
michael@0 | 287 | // Ion code have a BaselineScript to resume into. |
michael@0 | 288 | RootedScript original(cx, script->donorFunction()->nonLazyScript()); |
michael@0 | 289 | JS_ASSERT(original != script); |
michael@0 | 290 | |
michael@0 | 291 | if (!original->canBaselineCompile()) |
michael@0 | 292 | return Method_CantCompile; |
michael@0 | 293 | |
michael@0 | 294 | if (!original->hasBaselineScript()) { |
michael@0 | 295 | MethodStatus status = BaselineCompile(cx, original); |
michael@0 | 296 | if (status != Method_Compiled) |
michael@0 | 297 | return status; |
michael@0 | 298 | } |
michael@0 | 299 | } |
michael@0 | 300 | |
michael@0 | 301 | return BaselineCompile(cx, script); |
michael@0 | 302 | } |
michael@0 | 303 | |
michael@0 | 304 | MethodStatus |
michael@0 | 305 | jit::CanEnterBaselineAtBranch(JSContext *cx, InterpreterFrame *fp, bool newType) |
michael@0 | 306 | { |
michael@0 | 307 | // If constructing, allocate a new |this| object. |
michael@0 | 308 | if (fp->isConstructing() && fp->functionThis().isPrimitive()) { |
michael@0 | 309 | RootedObject callee(cx, &fp->callee()); |
michael@0 | 310 | RootedObject obj(cx, CreateThisForFunction(cx, callee, newType ? SingletonObject : GenericObject)); |
michael@0 | 311 | if (!obj) |
michael@0 | 312 | return Method_Skipped; |
michael@0 | 313 | fp->functionThis().setObject(*obj); |
michael@0 | 314 | } |
michael@0 | 315 | |
michael@0 | 316 | if (!CheckFrame(fp)) |
michael@0 | 317 | return Method_CantCompile; |
michael@0 | 318 | |
michael@0 | 319 | RootedScript script(cx, fp->script()); |
michael@0 | 320 | return CanEnterBaselineJIT(cx, script, /* osr = */true); |
michael@0 | 321 | } |
michael@0 | 322 | |
michael@0 | 323 | MethodStatus |
michael@0 | 324 | jit::CanEnterBaselineMethod(JSContext *cx, RunState &state) |
michael@0 | 325 | { |
michael@0 | 326 | if (state.isInvoke()) { |
michael@0 | 327 | InvokeState &invoke = *state.asInvoke(); |
michael@0 | 328 | |
michael@0 | 329 | if (invoke.args().length() > BASELINE_MAX_ARGS_LENGTH) { |
michael@0 | 330 | IonSpew(IonSpew_BaselineAbort, "Too many arguments (%u)", invoke.args().length()); |
michael@0 | 331 | return Method_CantCompile; |
michael@0 | 332 | } |
michael@0 | 333 | |
michael@0 | 334 | // If constructing, allocate a new |this| object. |
michael@0 | 335 | if (invoke.constructing() && invoke.args().thisv().isPrimitive()) { |
michael@0 | 336 | RootedObject callee(cx, &invoke.args().callee()); |
michael@0 | 337 | RootedObject obj(cx, CreateThisForFunction(cx, callee, |
michael@0 | 338 | invoke.useNewType() |
michael@0 | 339 | ? SingletonObject |
michael@0 | 340 | : GenericObject)); |
michael@0 | 341 | if (!obj) |
michael@0 | 342 | return Method_Skipped; |
michael@0 | 343 | invoke.args().setThis(ObjectValue(*obj)); |
michael@0 | 344 | } |
michael@0 | 345 | } else if (state.isExecute()) { |
michael@0 | 346 | ExecuteType type = state.asExecute()->type(); |
michael@0 | 347 | if (type == EXECUTE_DEBUG || type == EXECUTE_DEBUG_GLOBAL) { |
michael@0 | 348 | IonSpew(IonSpew_BaselineAbort, "debugger frame"); |
michael@0 | 349 | return Method_CantCompile; |
michael@0 | 350 | } |
michael@0 | 351 | } else { |
michael@0 | 352 | JS_ASSERT(state.isGenerator()); |
michael@0 | 353 | IonSpew(IonSpew_BaselineAbort, "generator frame"); |
michael@0 | 354 | return Method_CantCompile; |
michael@0 | 355 | } |
michael@0 | 356 | |
michael@0 | 357 | RootedScript script(cx, state.script()); |
michael@0 | 358 | return CanEnterBaselineJIT(cx, script, /* osr = */false); |
michael@0 | 359 | }; |
michael@0 | 360 | |
michael@0 | 361 | BaselineScript * |
michael@0 | 362 | BaselineScript::New(JSContext *cx, uint32_t prologueOffset, uint32_t epilogueOffset, |
michael@0 | 363 | uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset, |
michael@0 | 364 | size_t icEntries, size_t pcMappingIndexEntries, size_t pcMappingSize, |
michael@0 | 365 | size_t bytecodeTypeMapEntries) |
michael@0 | 366 | { |
michael@0 | 367 | static const unsigned DataAlignment = sizeof(uintptr_t); |
michael@0 | 368 | |
michael@0 | 369 | size_t paddedBaselineScriptSize = AlignBytes(sizeof(BaselineScript), DataAlignment); |
michael@0 | 370 | |
michael@0 | 371 | size_t icEntriesSize = icEntries * sizeof(ICEntry); |
michael@0 | 372 | size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry); |
michael@0 | 373 | size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t); |
michael@0 | 374 | |
michael@0 | 375 | size_t paddedICEntriesSize = AlignBytes(icEntriesSize, DataAlignment); |
michael@0 | 376 | size_t paddedPCMappingIndexEntriesSize = AlignBytes(pcMappingIndexEntriesSize, DataAlignment); |
michael@0 | 377 | size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment); |
michael@0 | 378 | size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment); |
michael@0 | 379 | |
michael@0 | 380 | size_t allocBytes = paddedBaselineScriptSize + |
michael@0 | 381 | paddedICEntriesSize + |
michael@0 | 382 | paddedPCMappingIndexEntriesSize + |
michael@0 | 383 | paddedPCMappingSize + |
michael@0 | 384 | paddedBytecodeTypesMapSize; |
michael@0 | 385 | |
michael@0 | 386 | uint8_t *buffer = (uint8_t *)cx->malloc_(allocBytes); |
michael@0 | 387 | if (!buffer) |
michael@0 | 388 | return nullptr; |
michael@0 | 389 | |
michael@0 | 390 | BaselineScript *script = reinterpret_cast<BaselineScript *>(buffer); |
michael@0 | 391 | new (script) BaselineScript(prologueOffset, epilogueOffset, |
michael@0 | 392 | spsPushToggleOffset, postDebugPrologueOffset); |
michael@0 | 393 | |
michael@0 | 394 | size_t offsetCursor = paddedBaselineScriptSize; |
michael@0 | 395 | |
michael@0 | 396 | script->icEntriesOffset_ = offsetCursor; |
michael@0 | 397 | script->icEntries_ = icEntries; |
michael@0 | 398 | offsetCursor += paddedICEntriesSize; |
michael@0 | 399 | |
michael@0 | 400 | script->pcMappingIndexOffset_ = offsetCursor; |
michael@0 | 401 | script->pcMappingIndexEntries_ = pcMappingIndexEntries; |
michael@0 | 402 | offsetCursor += paddedPCMappingIndexEntriesSize; |
michael@0 | 403 | |
michael@0 | 404 | script->pcMappingOffset_ = offsetCursor; |
michael@0 | 405 | script->pcMappingSize_ = pcMappingSize; |
michael@0 | 406 | offsetCursor += paddedPCMappingSize; |
michael@0 | 407 | |
michael@0 | 408 | script->bytecodeTypeMapOffset_ = bytecodeTypeMapEntries ? offsetCursor : 0; |
michael@0 | 409 | |
michael@0 | 410 | return script; |
michael@0 | 411 | } |
michael@0 | 412 | |
michael@0 | 413 | void |
michael@0 | 414 | BaselineScript::trace(JSTracer *trc) |
michael@0 | 415 | { |
michael@0 | 416 | MarkJitCode(trc, &method_, "baseline-method"); |
michael@0 | 417 | if (templateScope_) |
michael@0 | 418 | MarkObject(trc, &templateScope_, "baseline-template-scope"); |
michael@0 | 419 | |
michael@0 | 420 | // Mark all IC stub codes hanging off the IC stub entries. |
michael@0 | 421 | for (size_t i = 0; i < numICEntries(); i++) { |
michael@0 | 422 | ICEntry &ent = icEntry(i); |
michael@0 | 423 | if (!ent.hasStub()) |
michael@0 | 424 | continue; |
michael@0 | 425 | for (ICStub *stub = ent.firstStub(); stub; stub = stub->next()) |
michael@0 | 426 | stub->trace(trc); |
michael@0 | 427 | } |
michael@0 | 428 | } |
michael@0 | 429 | |
michael@0 | 430 | /* static */ |
michael@0 | 431 | void |
michael@0 | 432 | BaselineScript::writeBarrierPre(Zone *zone, BaselineScript *script) |
michael@0 | 433 | { |
michael@0 | 434 | #ifdef JSGC_INCREMENTAL |
michael@0 | 435 | if (zone->needsBarrier()) |
michael@0 | 436 | script->trace(zone->barrierTracer()); |
michael@0 | 437 | #endif |
michael@0 | 438 | } |
michael@0 | 439 | |
michael@0 | 440 | void |
michael@0 | 441 | BaselineScript::Trace(JSTracer *trc, BaselineScript *script) |
michael@0 | 442 | { |
michael@0 | 443 | script->trace(trc); |
michael@0 | 444 | } |
michael@0 | 445 | |
michael@0 | 446 | void |
michael@0 | 447 | BaselineScript::Destroy(FreeOp *fop, BaselineScript *script) |
michael@0 | 448 | { |
michael@0 | 449 | #ifdef JSGC_GENERATIONAL |
michael@0 | 450 | /* |
michael@0 | 451 | * When the script contains pointers to nursery things, the store buffer |
michael@0 | 452 | * will contain entries refering to the referenced things. Since we can |
michael@0 | 453 | * destroy scripts outside the context of a GC, this situation can result |
michael@0 | 454 | * in invalid store buffer entries. Assert that if we do destroy scripts |
michael@0 | 455 | * outside of a GC that we at least emptied the nursery first. |
michael@0 | 456 | */ |
michael@0 | 457 | JS_ASSERT(fop->runtime()->gcNursery.isEmpty()); |
michael@0 | 458 | #endif |
michael@0 | 459 | fop->delete_(script); |
michael@0 | 460 | } |
michael@0 | 461 | |
michael@0 | 462 | ICEntry & |
michael@0 | 463 | BaselineScript::icEntry(size_t index) |
michael@0 | 464 | { |
michael@0 | 465 | JS_ASSERT(index < numICEntries()); |
michael@0 | 466 | return icEntryList()[index]; |
michael@0 | 467 | } |
michael@0 | 468 | |
michael@0 | 469 | PCMappingIndexEntry & |
michael@0 | 470 | BaselineScript::pcMappingIndexEntry(size_t index) |
michael@0 | 471 | { |
michael@0 | 472 | JS_ASSERT(index < numPCMappingIndexEntries()); |
michael@0 | 473 | return pcMappingIndexEntryList()[index]; |
michael@0 | 474 | } |
michael@0 | 475 | |
michael@0 | 476 | CompactBufferReader |
michael@0 | 477 | BaselineScript::pcMappingReader(size_t indexEntry) |
michael@0 | 478 | { |
michael@0 | 479 | PCMappingIndexEntry &entry = pcMappingIndexEntry(indexEntry); |
michael@0 | 480 | |
michael@0 | 481 | uint8_t *dataStart = pcMappingData() + entry.bufferOffset; |
michael@0 | 482 | uint8_t *dataEnd = (indexEntry == numPCMappingIndexEntries() - 1) |
michael@0 | 483 | ? pcMappingData() + pcMappingSize_ |
michael@0 | 484 | : pcMappingData() + pcMappingIndexEntry(indexEntry + 1).bufferOffset; |
michael@0 | 485 | |
michael@0 | 486 | return CompactBufferReader(dataStart, dataEnd); |
michael@0 | 487 | } |
michael@0 | 488 | |
michael@0 | 489 | ICEntry * |
michael@0 | 490 | BaselineScript::maybeICEntryFromReturnOffset(CodeOffsetLabel returnOffset) |
michael@0 | 491 | { |
michael@0 | 492 | size_t bottom = 0; |
michael@0 | 493 | size_t top = numICEntries(); |
michael@0 | 494 | size_t mid = bottom + (top - bottom) / 2; |
michael@0 | 495 | while (mid < top) { |
michael@0 | 496 | ICEntry &midEntry = icEntry(mid); |
michael@0 | 497 | if (midEntry.returnOffset().offset() < returnOffset.offset()) |
michael@0 | 498 | bottom = mid + 1; |
michael@0 | 499 | else // if (midEntry.returnOffset().offset() >= returnOffset.offset()) |
michael@0 | 500 | top = mid; |
michael@0 | 501 | mid = bottom + (top - bottom) / 2; |
michael@0 | 502 | } |
michael@0 | 503 | if (mid >= numICEntries()) |
michael@0 | 504 | return nullptr; |
michael@0 | 505 | |
michael@0 | 506 | if (icEntry(mid).returnOffset().offset() != returnOffset.offset()) |
michael@0 | 507 | return nullptr; |
michael@0 | 508 | |
michael@0 | 509 | return &icEntry(mid); |
michael@0 | 510 | } |
michael@0 | 511 | |
michael@0 | 512 | ICEntry & |
michael@0 | 513 | BaselineScript::icEntryFromReturnOffset(CodeOffsetLabel returnOffset) |
michael@0 | 514 | { |
michael@0 | 515 | ICEntry *result = maybeICEntryFromReturnOffset(returnOffset); |
michael@0 | 516 | JS_ASSERT(result); |
michael@0 | 517 | return *result; |
michael@0 | 518 | } |
michael@0 | 519 | |
michael@0 | 520 | uint8_t * |
michael@0 | 521 | BaselineScript::returnAddressForIC(const ICEntry &ent) |
michael@0 | 522 | { |
michael@0 | 523 | return method()->raw() + ent.returnOffset().offset(); |
michael@0 | 524 | } |
michael@0 | 525 | |
michael@0 | 526 | ICEntry & |
michael@0 | 527 | BaselineScript::icEntryFromPCOffset(uint32_t pcOffset) |
michael@0 | 528 | { |
michael@0 | 529 | // Multiple IC entries can have the same PC offset, but this method only looks for |
michael@0 | 530 | // those which have isForOp() set. |
michael@0 | 531 | size_t bottom = 0; |
michael@0 | 532 | size_t top = numICEntries(); |
michael@0 | 533 | size_t mid = bottom + (top - bottom) / 2; |
michael@0 | 534 | while (mid < top) { |
michael@0 | 535 | ICEntry &midEntry = icEntry(mid); |
michael@0 | 536 | if (midEntry.pcOffset() < pcOffset) |
michael@0 | 537 | bottom = mid + 1; |
michael@0 | 538 | else if (midEntry.pcOffset() > pcOffset) |
michael@0 | 539 | top = mid; |
michael@0 | 540 | else |
michael@0 | 541 | break; |
michael@0 | 542 | mid = bottom + (top - bottom) / 2; |
michael@0 | 543 | } |
michael@0 | 544 | // Found an IC entry with a matching PC offset. Search backward, and then |
michael@0 | 545 | // forward from this IC entry, looking for one with the same PC offset which |
michael@0 | 546 | // has isForOp() set. |
michael@0 | 547 | for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) { |
michael@0 | 548 | if (icEntry(i).isForOp()) |
michael@0 | 549 | return icEntry(i); |
michael@0 | 550 | } |
michael@0 | 551 | for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) { |
michael@0 | 552 | if (icEntry(i).isForOp()) |
michael@0 | 553 | return icEntry(i); |
michael@0 | 554 | } |
michael@0 | 555 | MOZ_ASSUME_UNREACHABLE("Invalid PC offset for IC entry."); |
michael@0 | 556 | } |
michael@0 | 557 | |
michael@0 | 558 | ICEntry & |
michael@0 | 559 | BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, ICEntry *prevLookedUpEntry) |
michael@0 | 560 | { |
michael@0 | 561 | // Do a linear forward search from the last queried PC offset, or fallback to a |
michael@0 | 562 | // binary search if the last offset is too far away. |
michael@0 | 563 | if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() && |
michael@0 | 564 | (pcOffset - prevLookedUpEntry->pcOffset()) <= 10) |
michael@0 | 565 | { |
michael@0 | 566 | ICEntry *firstEntry = &icEntry(0); |
michael@0 | 567 | ICEntry *lastEntry = &icEntry(numICEntries() - 1); |
michael@0 | 568 | ICEntry *curEntry = prevLookedUpEntry; |
michael@0 | 569 | while (curEntry >= firstEntry && curEntry <= lastEntry) { |
michael@0 | 570 | if (curEntry->pcOffset() == pcOffset && curEntry->isForOp()) |
michael@0 | 571 | break; |
michael@0 | 572 | curEntry++; |
michael@0 | 573 | } |
michael@0 | 574 | JS_ASSERT(curEntry->pcOffset() == pcOffset && curEntry->isForOp()); |
michael@0 | 575 | return *curEntry; |
michael@0 | 576 | } |
michael@0 | 577 | |
michael@0 | 578 | return icEntryFromPCOffset(pcOffset); |
michael@0 | 579 | } |
michael@0 | 580 | |
michael@0 | 581 | ICEntry * |
michael@0 | 582 | BaselineScript::maybeICEntryFromReturnAddress(uint8_t *returnAddr) |
michael@0 | 583 | { |
michael@0 | 584 | JS_ASSERT(returnAddr > method_->raw()); |
michael@0 | 585 | JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize()); |
michael@0 | 586 | CodeOffsetLabel offset(returnAddr - method_->raw()); |
michael@0 | 587 | return maybeICEntryFromReturnOffset(offset); |
michael@0 | 588 | } |
michael@0 | 589 | |
michael@0 | 590 | ICEntry & |
michael@0 | 591 | BaselineScript::icEntryFromReturnAddress(uint8_t *returnAddr) |
michael@0 | 592 | { |
michael@0 | 593 | JS_ASSERT(returnAddr > method_->raw()); |
michael@0 | 594 | JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize()); |
michael@0 | 595 | CodeOffsetLabel offset(returnAddr - method_->raw()); |
michael@0 | 596 | return icEntryFromReturnOffset(offset); |
michael@0 | 597 | } |
michael@0 | 598 | |
michael@0 | 599 | void |
michael@0 | 600 | BaselineScript::copyICEntries(JSScript *script, const ICEntry *entries, MacroAssembler &masm) |
michael@0 | 601 | { |
michael@0 | 602 | // Fix up the return offset in the IC entries and copy them in. |
michael@0 | 603 | // Also write out the IC entry ptrs in any fallback stubs that were added. |
michael@0 | 604 | for (uint32_t i = 0; i < numICEntries(); i++) { |
michael@0 | 605 | ICEntry &realEntry = icEntry(i); |
michael@0 | 606 | realEntry = entries[i]; |
michael@0 | 607 | realEntry.fixupReturnOffset(masm); |
michael@0 | 608 | |
michael@0 | 609 | if (!realEntry.hasStub()) { |
michael@0 | 610 | // VM call without any stubs. |
michael@0 | 611 | continue; |
michael@0 | 612 | } |
michael@0 | 613 | |
michael@0 | 614 | // If the attached stub is a fallback stub, then fix it up with |
michael@0 | 615 | // a pointer to the (now available) realEntry. |
michael@0 | 616 | if (realEntry.firstStub()->isFallback()) |
michael@0 | 617 | realEntry.firstStub()->toFallbackStub()->fixupICEntry(&realEntry); |
michael@0 | 618 | |
michael@0 | 619 | if (realEntry.firstStub()->isTypeMonitor_Fallback()) { |
michael@0 | 620 | ICTypeMonitor_Fallback *stub = realEntry.firstStub()->toTypeMonitor_Fallback(); |
michael@0 | 621 | stub->fixupICEntry(&realEntry); |
michael@0 | 622 | } |
michael@0 | 623 | |
michael@0 | 624 | if (realEntry.firstStub()->isTableSwitch()) { |
michael@0 | 625 | ICTableSwitch *stub = realEntry.firstStub()->toTableSwitch(); |
michael@0 | 626 | stub->fixupJumpTable(script, this); |
michael@0 | 627 | } |
michael@0 | 628 | } |
michael@0 | 629 | } |
michael@0 | 630 | |
michael@0 | 631 | void |
michael@0 | 632 | BaselineScript::adoptFallbackStubs(FallbackICStubSpace *stubSpace) |
michael@0 | 633 | { |
michael@0 | 634 | fallbackStubSpace_.adoptFrom(stubSpace); |
michael@0 | 635 | } |
michael@0 | 636 | |
michael@0 | 637 | void |
michael@0 | 638 | BaselineScript::copyPCMappingEntries(const CompactBufferWriter &entries) |
michael@0 | 639 | { |
michael@0 | 640 | JS_ASSERT(entries.length() > 0); |
michael@0 | 641 | JS_ASSERT(entries.length() == pcMappingSize_); |
michael@0 | 642 | |
michael@0 | 643 | memcpy(pcMappingData(), entries.buffer(), entries.length()); |
michael@0 | 644 | } |
michael@0 | 645 | |
michael@0 | 646 | void |
michael@0 | 647 | BaselineScript::copyPCMappingIndexEntries(const PCMappingIndexEntry *entries) |
michael@0 | 648 | { |
michael@0 | 649 | for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) |
michael@0 | 650 | pcMappingIndexEntry(i) = entries[i]; |
michael@0 | 651 | } |
michael@0 | 652 | |
michael@0 | 653 | uint8_t * |
michael@0 | 654 | BaselineScript::nativeCodeForPC(JSScript *script, jsbytecode *pc, PCMappingSlotInfo *slotInfo) |
michael@0 | 655 | { |
michael@0 | 656 | JS_ASSERT_IF(script->hasBaselineScript(), script->baselineScript() == this); |
michael@0 | 657 | |
michael@0 | 658 | uint32_t pcOffset = script->pcToOffset(pc); |
michael@0 | 659 | |
michael@0 | 660 | // Look for the first PCMappingIndexEntry with pc > the pc we are |
michael@0 | 661 | // interested in. |
michael@0 | 662 | uint32_t i = 1; |
michael@0 | 663 | for (; i < numPCMappingIndexEntries(); i++) { |
michael@0 | 664 | if (pcMappingIndexEntry(i).pcOffset > pcOffset) |
michael@0 | 665 | break; |
michael@0 | 666 | } |
michael@0 | 667 | |
michael@0 | 668 | // The previous entry contains the current pc. |
michael@0 | 669 | JS_ASSERT(i > 0); |
michael@0 | 670 | i--; |
michael@0 | 671 | |
michael@0 | 672 | PCMappingIndexEntry &entry = pcMappingIndexEntry(i); |
michael@0 | 673 | JS_ASSERT(pcOffset >= entry.pcOffset); |
michael@0 | 674 | |
michael@0 | 675 | CompactBufferReader reader(pcMappingReader(i)); |
michael@0 | 676 | jsbytecode *curPC = script->offsetToPC(entry.pcOffset); |
michael@0 | 677 | uint32_t nativeOffset = entry.nativeOffset; |
michael@0 | 678 | |
michael@0 | 679 | JS_ASSERT(script->containsPC(curPC)); |
michael@0 | 680 | JS_ASSERT(curPC <= pc); |
michael@0 | 681 | |
michael@0 | 682 | while (true) { |
michael@0 | 683 | // If the high bit is set, the native offset relative to the |
michael@0 | 684 | // previous pc != 0 and comes next. |
michael@0 | 685 | uint8_t b = reader.readByte(); |
michael@0 | 686 | if (b & 0x80) |
michael@0 | 687 | nativeOffset += reader.readUnsigned(); |
michael@0 | 688 | |
michael@0 | 689 | if (curPC == pc) { |
michael@0 | 690 | if (slotInfo) |
michael@0 | 691 | *slotInfo = PCMappingSlotInfo(b & ~0x80); |
michael@0 | 692 | return method_->raw() + nativeOffset; |
michael@0 | 693 | } |
michael@0 | 694 | |
michael@0 | 695 | curPC += GetBytecodeLength(curPC); |
michael@0 | 696 | } |
michael@0 | 697 | |
michael@0 | 698 | MOZ_ASSUME_UNREACHABLE("Invalid pc"); |
michael@0 | 699 | } |
michael@0 | 700 | |
michael@0 | 701 | jsbytecode * |
michael@0 | 702 | BaselineScript::pcForReturnOffset(JSScript *script, uint32_t nativeOffset) |
michael@0 | 703 | { |
michael@0 | 704 | JS_ASSERT(script->baselineScript() == this); |
michael@0 | 705 | JS_ASSERT(nativeOffset < method_->instructionsSize()); |
michael@0 | 706 | |
michael@0 | 707 | // Look for the first PCMappingIndexEntry with native offset > the native offset we are |
michael@0 | 708 | // interested in. |
michael@0 | 709 | uint32_t i = 1; |
michael@0 | 710 | for (; i < numPCMappingIndexEntries(); i++) { |
michael@0 | 711 | if (pcMappingIndexEntry(i).nativeOffset > nativeOffset) |
michael@0 | 712 | break; |
michael@0 | 713 | } |
michael@0 | 714 | |
michael@0 | 715 | // Go back an entry to search forward from. |
michael@0 | 716 | JS_ASSERT(i > 0); |
michael@0 | 717 | i--; |
michael@0 | 718 | |
michael@0 | 719 | PCMappingIndexEntry &entry = pcMappingIndexEntry(i); |
michael@0 | 720 | JS_ASSERT(nativeOffset >= entry.nativeOffset); |
michael@0 | 721 | |
michael@0 | 722 | CompactBufferReader reader(pcMappingReader(i)); |
michael@0 | 723 | jsbytecode *curPC = script->offsetToPC(entry.pcOffset); |
michael@0 | 724 | uint32_t curNativeOffset = entry.nativeOffset; |
michael@0 | 725 | |
michael@0 | 726 | JS_ASSERT(script->containsPC(curPC)); |
michael@0 | 727 | JS_ASSERT(curNativeOffset <= nativeOffset); |
michael@0 | 728 | |
michael@0 | 729 | while (true) { |
michael@0 | 730 | // If the high bit is set, the native offset relative to the |
michael@0 | 731 | // previous pc != 0 and comes next. |
michael@0 | 732 | uint8_t b = reader.readByte(); |
michael@0 | 733 | if (b & 0x80) |
michael@0 | 734 | curNativeOffset += reader.readUnsigned(); |
michael@0 | 735 | |
michael@0 | 736 | if (curNativeOffset == nativeOffset) |
michael@0 | 737 | return curPC; |
michael@0 | 738 | |
michael@0 | 739 | curPC += GetBytecodeLength(curPC); |
michael@0 | 740 | } |
michael@0 | 741 | |
michael@0 | 742 | MOZ_ASSUME_UNREACHABLE("Invalid pc"); |
michael@0 | 743 | } |
michael@0 | 744 | |
michael@0 | 745 | jsbytecode * |
michael@0 | 746 | BaselineScript::pcForReturnAddress(JSScript *script, uint8_t *nativeAddress) |
michael@0 | 747 | { |
michael@0 | 748 | JS_ASSERT(script->baselineScript() == this); |
michael@0 | 749 | JS_ASSERT(nativeAddress >= method_->raw()); |
michael@0 | 750 | JS_ASSERT(nativeAddress < method_->raw() + method_->instructionsSize()); |
michael@0 | 751 | return pcForReturnOffset(script, uint32_t(nativeAddress - method_->raw())); |
michael@0 | 752 | } |
michael@0 | 753 | |
michael@0 | 754 | void |
michael@0 | 755 | BaselineScript::toggleDebugTraps(JSScript *script, jsbytecode *pc) |
michael@0 | 756 | { |
michael@0 | 757 | JS_ASSERT(script->baselineScript() == this); |
michael@0 | 758 | |
michael@0 | 759 | // Only scripts compiled for debug mode have toggled calls. |
michael@0 | 760 | if (!debugMode()) |
michael@0 | 761 | return; |
michael@0 | 762 | |
michael@0 | 763 | SrcNoteLineScanner scanner(script->notes(), script->lineno()); |
michael@0 | 764 | |
michael@0 | 765 | for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) { |
michael@0 | 766 | PCMappingIndexEntry &entry = pcMappingIndexEntry(i); |
michael@0 | 767 | |
michael@0 | 768 | CompactBufferReader reader(pcMappingReader(i)); |
michael@0 | 769 | jsbytecode *curPC = script->offsetToPC(entry.pcOffset); |
michael@0 | 770 | uint32_t nativeOffset = entry.nativeOffset; |
michael@0 | 771 | |
michael@0 | 772 | JS_ASSERT(script->containsPC(curPC)); |
michael@0 | 773 | |
michael@0 | 774 | while (reader.more()) { |
michael@0 | 775 | uint8_t b = reader.readByte(); |
michael@0 | 776 | if (b & 0x80) |
michael@0 | 777 | nativeOffset += reader.readUnsigned(); |
michael@0 | 778 | |
michael@0 | 779 | scanner.advanceTo(script->pcToOffset(curPC)); |
michael@0 | 780 | |
michael@0 | 781 | if (!pc || pc == curPC) { |
michael@0 | 782 | bool enabled = (script->stepModeEnabled() && scanner.isLineHeader()) || |
michael@0 | 783 | script->hasBreakpointsAt(curPC); |
michael@0 | 784 | |
michael@0 | 785 | // Patch the trap. |
michael@0 | 786 | CodeLocationLabel label(method(), nativeOffset); |
michael@0 | 787 | Assembler::ToggleCall(label, enabled); |
michael@0 | 788 | } |
michael@0 | 789 | |
michael@0 | 790 | curPC += GetBytecodeLength(curPC); |
michael@0 | 791 | } |
michael@0 | 792 | } |
michael@0 | 793 | } |
michael@0 | 794 | |
michael@0 | 795 | void |
michael@0 | 796 | BaselineScript::toggleSPS(bool enable) |
michael@0 | 797 | { |
michael@0 | 798 | JS_ASSERT(enable == !(bool)spsOn_); |
michael@0 | 799 | |
michael@0 | 800 | IonSpew(IonSpew_BaselineIC, " toggling SPS %s for BaselineScript %p", |
michael@0 | 801 | enable ? "on" : "off", this); |
michael@0 | 802 | |
michael@0 | 803 | // Toggle the jump |
michael@0 | 804 | CodeLocationLabel pushToggleLocation(method_, CodeOffsetLabel(spsPushToggleOffset_)); |
michael@0 | 805 | if (enable) |
michael@0 | 806 | Assembler::ToggleToCmp(pushToggleLocation); |
michael@0 | 807 | else |
michael@0 | 808 | Assembler::ToggleToJmp(pushToggleLocation); |
michael@0 | 809 | #ifdef DEBUG |
michael@0 | 810 | spsOn_ = enable; |
michael@0 | 811 | #endif |
michael@0 | 812 | } |
michael@0 | 813 | |
michael@0 | 814 | void |
michael@0 | 815 | BaselineScript::purgeOptimizedStubs(Zone *zone) |
michael@0 | 816 | { |
michael@0 | 817 | IonSpew(IonSpew_BaselineIC, "Purging optimized stubs"); |
michael@0 | 818 | |
michael@0 | 819 | for (size_t i = 0; i < numICEntries(); i++) { |
michael@0 | 820 | ICEntry &entry = icEntry(i); |
michael@0 | 821 | if (!entry.hasStub()) |
michael@0 | 822 | continue; |
michael@0 | 823 | |
michael@0 | 824 | ICStub *lastStub = entry.firstStub(); |
michael@0 | 825 | while (lastStub->next()) |
michael@0 | 826 | lastStub = lastStub->next(); |
michael@0 | 827 | |
michael@0 | 828 | if (lastStub->isFallback()) { |
michael@0 | 829 | // Unlink all stubs allocated in the optimized space. |
michael@0 | 830 | ICStub *stub = entry.firstStub(); |
michael@0 | 831 | ICStub *prev = nullptr; |
michael@0 | 832 | |
michael@0 | 833 | while (stub->next()) { |
michael@0 | 834 | if (!stub->allocatedInFallbackSpace()) { |
michael@0 | 835 | lastStub->toFallbackStub()->unlinkStub(zone, prev, stub); |
michael@0 | 836 | stub = stub->next(); |
michael@0 | 837 | continue; |
michael@0 | 838 | } |
michael@0 | 839 | |
michael@0 | 840 | prev = stub; |
michael@0 | 841 | stub = stub->next(); |
michael@0 | 842 | } |
michael@0 | 843 | |
michael@0 | 844 | if (lastStub->isMonitoredFallback()) { |
michael@0 | 845 | // Monitor stubs can't make calls, so are always in the |
michael@0 | 846 | // optimized stub space. |
michael@0 | 847 | ICTypeMonitor_Fallback *lastMonStub = |
michael@0 | 848 | lastStub->toMonitoredFallbackStub()->fallbackMonitorStub(); |
michael@0 | 849 | lastMonStub->resetMonitorStubChain(zone); |
michael@0 | 850 | } |
michael@0 | 851 | } else if (lastStub->isTypeMonitor_Fallback()) { |
michael@0 | 852 | lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone); |
michael@0 | 853 | } else { |
michael@0 | 854 | JS_ASSERT(lastStub->isTableSwitch()); |
michael@0 | 855 | } |
michael@0 | 856 | } |
michael@0 | 857 | |
michael@0 | 858 | #ifdef DEBUG |
michael@0 | 859 | // All remaining stubs must be allocated in the fallback space. |
michael@0 | 860 | for (size_t i = 0; i < numICEntries(); i++) { |
michael@0 | 861 | ICEntry &entry = icEntry(i); |
michael@0 | 862 | if (!entry.hasStub()) |
michael@0 | 863 | continue; |
michael@0 | 864 | |
michael@0 | 865 | ICStub *stub = entry.firstStub(); |
michael@0 | 866 | while (stub->next()) { |
michael@0 | 867 | JS_ASSERT(stub->allocatedInFallbackSpace()); |
michael@0 | 868 | stub = stub->next(); |
michael@0 | 869 | } |
michael@0 | 870 | } |
michael@0 | 871 | #endif |
michael@0 | 872 | } |
michael@0 | 873 | |
michael@0 | 874 | void |
michael@0 | 875 | jit::FinishDiscardBaselineScript(FreeOp *fop, JSScript *script) |
michael@0 | 876 | { |
michael@0 | 877 | if (!script->hasBaselineScript()) |
michael@0 | 878 | return; |
michael@0 | 879 | |
michael@0 | 880 | if (script->baselineScript()->active()) { |
michael@0 | 881 | // Script is live on the stack. Keep the BaselineScript, but destroy |
michael@0 | 882 | // stubs allocated in the optimized stub space. |
michael@0 | 883 | script->baselineScript()->purgeOptimizedStubs(script->zone()); |
michael@0 | 884 | |
michael@0 | 885 | // Reset |active| flag so that we don't need a separate script |
michael@0 | 886 | // iteration to unmark them. |
michael@0 | 887 | script->baselineScript()->resetActive(); |
michael@0 | 888 | return; |
michael@0 | 889 | } |
michael@0 | 890 | |
michael@0 | 891 | BaselineScript *baseline = script->baselineScript(); |
michael@0 | 892 | script->setBaselineScript(nullptr, nullptr); |
michael@0 | 893 | BaselineScript::Destroy(fop, baseline); |
michael@0 | 894 | } |
michael@0 | 895 | |
michael@0 | 896 | void |
michael@0 | 897 | jit::JitCompartment::toggleBaselineStubBarriers(bool enabled) |
michael@0 | 898 | { |
michael@0 | 899 | for (ICStubCodeMap::Enum e(*stubCodes_); !e.empty(); e.popFront()) { |
michael@0 | 900 | JitCode *code = *e.front().value().unsafeGet(); |
michael@0 | 901 | code->togglePreBarriers(enabled); |
michael@0 | 902 | } |
michael@0 | 903 | } |
michael@0 | 904 | |
michael@0 | 905 | void |
michael@0 | 906 | jit::AddSizeOfBaselineData(JSScript *script, mozilla::MallocSizeOf mallocSizeOf, size_t *data, |
michael@0 | 907 | size_t *fallbackStubs) |
michael@0 | 908 | { |
michael@0 | 909 | if (script->hasBaselineScript()) |
michael@0 | 910 | script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs); |
michael@0 | 911 | } |
michael@0 | 912 | |
michael@0 | 913 | void |
michael@0 | 914 | jit::ToggleBaselineSPS(JSRuntime *runtime, bool enable) |
michael@0 | 915 | { |
michael@0 | 916 | for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) { |
michael@0 | 917 | for (gc::CellIter i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) { |
michael@0 | 918 | JSScript *script = i.get<JSScript>(); |
michael@0 | 919 | if (!script->hasBaselineScript()) |
michael@0 | 920 | continue; |
michael@0 | 921 | script->baselineScript()->toggleSPS(enable); |
michael@0 | 922 | } |
michael@0 | 923 | } |
michael@0 | 924 | } |
michael@0 | 925 | |
michael@0 | 926 | static void |
michael@0 | 927 | MarkActiveBaselineScripts(JSRuntime *rt, const JitActivationIterator &activation) |
michael@0 | 928 | { |
michael@0 | 929 | for (jit::JitFrameIterator iter(activation); !iter.done(); ++iter) { |
michael@0 | 930 | switch (iter.type()) { |
michael@0 | 931 | case JitFrame_BaselineJS: |
michael@0 | 932 | iter.script()->baselineScript()->setActive(); |
michael@0 | 933 | break; |
michael@0 | 934 | case JitFrame_IonJS: { |
michael@0 | 935 | // Keep the baseline script around, since bailouts from the ion |
michael@0 | 936 | // jitcode might need to re-enter into the baseline jitcode. |
michael@0 | 937 | iter.script()->baselineScript()->setActive(); |
michael@0 | 938 | for (InlineFrameIterator inlineIter(rt, &iter); inlineIter.more(); ++inlineIter) |
michael@0 | 939 | inlineIter.script()->baselineScript()->setActive(); |
michael@0 | 940 | break; |
michael@0 | 941 | } |
michael@0 | 942 | default:; |
michael@0 | 943 | } |
michael@0 | 944 | } |
michael@0 | 945 | } |
michael@0 | 946 | |
michael@0 | 947 | void |
michael@0 | 948 | jit::MarkActiveBaselineScripts(Zone *zone) |
michael@0 | 949 | { |
michael@0 | 950 | JSRuntime *rt = zone->runtimeFromMainThread(); |
michael@0 | 951 | for (JitActivationIterator iter(rt); !iter.done(); ++iter) { |
michael@0 | 952 | if (iter->compartment()->zone() == zone) |
michael@0 | 953 | MarkActiveBaselineScripts(rt, iter); |
michael@0 | 954 | } |
michael@0 | 955 | } |