js/src/jit/mips/Trampoline-mips.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/jit/mips/Trampoline-mips.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,997 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "jscompartment.h"
    1.11 +
    1.12 +#include "jit/Bailouts.h"
    1.13 +#include "jit/IonFrames.h"
    1.14 +#include "jit/IonLinker.h"
    1.15 +#include "jit/IonSpewer.h"
    1.16 +#include "jit/JitCompartment.h"
    1.17 +#include "jit/mips/Bailouts-mips.h"
    1.18 +#include "jit/mips/BaselineHelpers-mips.h"
    1.19 +#ifdef JS_ION_PERF
    1.20 +# include "jit/PerfSpewer.h"
    1.21 +#endif
    1.22 +#include "jit/VMFunctions.h"
    1.23 +
    1.24 +#include "jit/ExecutionMode-inl.h"
    1.25 +
    1.26 +using namespace js;
    1.27 +using namespace js::jit;
    1.28 +
    1.29 +static_assert(sizeof(uintptr_t) == sizeof(uint32_t), "Not 64-bit clean.");
    1.30 +
    1.31 +struct EnterJITRegs
    1.32 +{
    1.33 +    double f30;
    1.34 +    double f28;
    1.35 +    double f26;
    1.36 +    double f24;
    1.37 +    double f22;
    1.38 +    double f20;
    1.39 +
    1.40 +    // empty slot for alignment
    1.41 +    uintptr_t align;
    1.42 +
    1.43 +    // non-volatile registers.
    1.44 +    uintptr_t ra;
    1.45 +    uintptr_t s7;
    1.46 +    uintptr_t s6;
    1.47 +    uintptr_t s5;
    1.48 +    uintptr_t s4;
    1.49 +    uintptr_t s3;
    1.50 +    uintptr_t s2;
    1.51 +    uintptr_t s1;
    1.52 +    uintptr_t s0;
    1.53 +};
    1.54 +
    1.55 +struct EnterJITArgs
    1.56 +{
    1.57 +    // First 4 argumet placeholders
    1.58 +    void *jitcode; // <- sp points here when function is entered.
    1.59 +    int maxArgc;
    1.60 +    Value *maxArgv;
    1.61 +    InterpreterFrame *fp;
    1.62 +
    1.63 +    // Arguments on stack
    1.64 +    CalleeToken calleeToken;
    1.65 +    JSObject *scopeChain;
    1.66 +    size_t numStackValues;
    1.67 +    Value *vp;
    1.68 +};
    1.69 +
    1.70 +static void
    1.71 +GenerateReturn(MacroAssembler &masm, int returnCode)
    1.72 +{
    1.73 +    MOZ_ASSERT(masm.framePushed() == sizeof(EnterJITRegs));
    1.74 +
    1.75 +    // Restore non-volatile registers
    1.76 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s0)), s0);
    1.77 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s1)), s1);
    1.78 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s2)), s2);
    1.79 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s3)), s3);
    1.80 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s4)), s4);
    1.81 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s5)), s5);
    1.82 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s6)), s6);
    1.83 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s7)), s7);
    1.84 +    masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, ra)), ra);
    1.85 +
    1.86 +    // Restore non-volatile floating point registers
    1.87 +    masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f20)), f20);
    1.88 +    masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f22)), f22);
    1.89 +    masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f24)), f24);
    1.90 +    masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f26)), f26);
    1.91 +    masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f28)), f28);
    1.92 +    masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f30)), f30);
    1.93 +
    1.94 +    masm.freeStack(sizeof(EnterJITRegs));
    1.95 +
    1.96 +    masm.branch(ra);
    1.97 +}
    1.98 +
    1.99 +static void
   1.100 +GeneratePrologue(MacroAssembler &masm)
   1.101 +{
   1.102 +    // Save non-volatile registers. These must be saved by the trampoline,
   1.103 +    // rather than the JIT'd code, because they are scanned by the conservative
   1.104 +    // scanner.
   1.105 +    masm.reserveStack(sizeof(EnterJITRegs));
   1.106 +    masm.storePtr(s0, Address(StackPointer, offsetof(EnterJITRegs, s0)));
   1.107 +    masm.storePtr(s1, Address(StackPointer, offsetof(EnterJITRegs, s1)));
   1.108 +    masm.storePtr(s2, Address(StackPointer, offsetof(EnterJITRegs, s2)));
   1.109 +    masm.storePtr(s3, Address(StackPointer, offsetof(EnterJITRegs, s3)));
   1.110 +    masm.storePtr(s4, Address(StackPointer, offsetof(EnterJITRegs, s4)));
   1.111 +    masm.storePtr(s5, Address(StackPointer, offsetof(EnterJITRegs, s5)));
   1.112 +    masm.storePtr(s6, Address(StackPointer, offsetof(EnterJITRegs, s6)));
   1.113 +    masm.storePtr(s7, Address(StackPointer, offsetof(EnterJITRegs, s7)));
   1.114 +    masm.storePtr(ra, Address(StackPointer, offsetof(EnterJITRegs, ra)));
   1.115 +
   1.116 +    masm.as_sd(f20, StackPointer, offsetof(EnterJITRegs, f20));
   1.117 +    masm.as_sd(f22, StackPointer, offsetof(EnterJITRegs, f22));
   1.118 +    masm.as_sd(f24, StackPointer, offsetof(EnterJITRegs, f24));
   1.119 +    masm.as_sd(f26, StackPointer, offsetof(EnterJITRegs, f26));
   1.120 +    masm.as_sd(f28, StackPointer, offsetof(EnterJITRegs, f28));
   1.121 +    masm.as_sd(f30, StackPointer, offsetof(EnterJITRegs, f30));
   1.122 +}
   1.123 +
   1.124 +
   1.125 +/*
   1.126 + * This method generates a trampoline for a c++ function with the following
   1.127 + * signature:
   1.128 + *   void enter(void *code, int argc, Value *argv, InterpreterFrame *fp,
   1.129 + *              CalleeToken calleeToken, JSObject *scopeChain, Value *vp)
   1.130 + *   ...using standard EABI calling convention
   1.131 + */
   1.132 +JitCode *
   1.133 +JitRuntime::generateEnterJIT(JSContext *cx, EnterJitType type)
   1.134 +{
   1.135 +    const Register reg_code = a0;
   1.136 +    const Register reg_argc = a1;
   1.137 +    const Register reg_argv = a2;
   1.138 +    const Register reg_frame = a3;
   1.139 +
   1.140 +    MOZ_ASSERT(OsrFrameReg == reg_frame);
   1.141 +
   1.142 +    MacroAssembler masm(cx);
   1.143 +    GeneratePrologue(masm);
   1.144 +
   1.145 +    const Address slotToken(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, calleeToken));
   1.146 +    const Address slotVp(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, vp));
   1.147 +
   1.148 +    // Save stack pointer into s4
   1.149 +    masm.movePtr(StackPointer, s4);
   1.150 +
   1.151 +    // Load calleeToken into s2.
   1.152 +    masm.loadPtr(slotToken, s2);
   1.153 +
   1.154 +    // Save stack pointer as baseline frame.
   1.155 +    if (type == EnterJitBaseline)
   1.156 +        masm.movePtr(StackPointer, BaselineFrameReg);
   1.157 +
   1.158 +    // Load the number of actual arguments into s3.
   1.159 +    masm.loadPtr(slotVp, s3);
   1.160 +    masm.unboxInt32(Address(s3, 0), s3);
   1.161 +
   1.162 +    /***************************************************************
   1.163 +    Loop over argv vector, push arguments onto stack in reverse order
   1.164 +    ***************************************************************/
   1.165 +
   1.166 +    masm.as_sll(s0, reg_argc, 3); // s0 = argc * 8
   1.167 +    masm.addPtr(reg_argv, s0); // s0 = argv + argc * 8
   1.168 +
   1.169 +    // Loop over arguments, copying them from an unknown buffer onto the Ion
   1.170 +    // stack so they can be accessed from JIT'ed code.
   1.171 +    Label header, footer;
   1.172 +    // If there aren't any arguments, don't do anything
   1.173 +    masm.ma_b(s0, reg_argv, &footer, Assembler::BelowOrEqual, ShortJump);
   1.174 +    {
   1.175 +        masm.bind(&header);
   1.176 +
   1.177 +        masm.subPtr(Imm32(2 * sizeof(uintptr_t)), s0);
   1.178 +        masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
   1.179 +
   1.180 +        ValueOperand value = ValueOperand(s6, s7);
   1.181 +        masm.loadValue(Address(s0, 0), value);
   1.182 +        masm.storeValue(value, Address(StackPointer, 0));
   1.183 +
   1.184 +        masm.ma_b(s0, reg_argv, &header, Assembler::Above, ShortJump);
   1.185 +    }
   1.186 +    masm.bind(&footer);
   1.187 +
   1.188 +    masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
   1.189 +    masm.storePtr(s3, Address(StackPointer, sizeof(uintptr_t))); // actual arguments
   1.190 +    masm.storePtr(s2, Address(StackPointer, 0)); // callee token
   1.191 +
   1.192 +    masm.subPtr(StackPointer, s4);
   1.193 +    masm.makeFrameDescriptor(s4, JitFrame_Entry);
   1.194 +    masm.push(s4); // descriptor
   1.195 +
   1.196 +    CodeLabel returnLabel;
   1.197 +    if (type == EnterJitBaseline) {
   1.198 +        // Handle OSR.
   1.199 +        GeneralRegisterSet regs(GeneralRegisterSet::All());
   1.200 +        regs.take(JSReturnOperand);
   1.201 +        regs.take(OsrFrameReg);
   1.202 +        regs.take(BaselineFrameReg);
   1.203 +        regs.take(reg_code);
   1.204 +
   1.205 +        const Address slotNumStackValues(BaselineFrameReg, sizeof(EnterJITRegs) +
   1.206 +                                         offsetof(EnterJITArgs, numStackValues));
   1.207 +        const Address slotScopeChain(BaselineFrameReg, sizeof(EnterJITRegs) +
   1.208 +                                     offsetof(EnterJITArgs, scopeChain));
   1.209 +
   1.210 +        Label notOsr;
   1.211 +        masm.ma_b(OsrFrameReg, OsrFrameReg, &notOsr, Assembler::Zero, ShortJump);
   1.212 +
   1.213 +        Register scratch = regs.takeAny();
   1.214 +
   1.215 +        Register numStackValues = regs.takeAny();
   1.216 +        masm.load32(slotNumStackValues, numStackValues);
   1.217 +
   1.218 +        // Push return address, previous frame pointer.
   1.219 +        masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
   1.220 +        masm.ma_li(scratch, returnLabel.dest());
   1.221 +        masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t)));
   1.222 +        masm.storePtr(BaselineFrameReg, Address(StackPointer, 0));
   1.223 +
   1.224 +        // Reserve frame.
   1.225 +        Register framePtr = BaselineFrameReg;
   1.226 +        masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer);
   1.227 +        masm.movePtr(StackPointer, framePtr);
   1.228 +
   1.229 +        // Reserve space for locals and stack values.
   1.230 +        masm.ma_sll(scratch, numStackValues, Imm32(3));
   1.231 +        masm.subPtr(scratch, StackPointer);
   1.232 +
   1.233 +        // Enter exit frame.
   1.234 +        masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), scratch);
   1.235 +        masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS);
   1.236 +
   1.237 +        // Push frame descriptor and fake return address.
   1.238 +        masm.reserveStack(2 * sizeof(uintptr_t));
   1.239 +        masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t))); // Frame descriptor
   1.240 +        masm.storePtr(zero, Address(StackPointer, 0)); // fake return address
   1.241 +
   1.242 +        masm.enterFakeExitFrame();
   1.243 +
   1.244 +        masm.reserveStack(2 * sizeof(uintptr_t));
   1.245 +        masm.storePtr(framePtr, Address(StackPointer, sizeof(uintptr_t))); // BaselineFrame
   1.246 +        masm.storePtr(reg_code, Address(StackPointer, 0)); // jitcode
   1.247 +
   1.248 +        masm.setupUnalignedABICall(3, scratch);
   1.249 +        masm.passABIArg(BaselineFrameReg); // BaselineFrame
   1.250 +        masm.passABIArg(OsrFrameReg); // InterpreterFrame
   1.251 +        masm.passABIArg(numStackValues);
   1.252 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, jit::InitBaselineFrameForOsr));
   1.253 +
   1.254 +        Register jitcode = regs.takeAny();
   1.255 +        masm.loadPtr(Address(StackPointer, 0), jitcode);
   1.256 +        masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr);
   1.257 +        masm.freeStack(2 * sizeof(uintptr_t));
   1.258 +
   1.259 +        MOZ_ASSERT(jitcode != ReturnReg);
   1.260 +
   1.261 +        Label error;
   1.262 +        masm.freeStack(IonExitFrameLayout::SizeWithFooter());
   1.263 +        masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
   1.264 +        masm.branchIfFalseBool(ReturnReg, &error);
   1.265 +
   1.266 +        masm.jump(jitcode);
   1.267 +
   1.268 +        // OOM: load error value, discard return address and previous frame
   1.269 +        // pointer and return.
   1.270 +        masm.bind(&error);
   1.271 +        masm.movePtr(framePtr, StackPointer);
   1.272 +        masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
   1.273 +        masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
   1.274 +        masm.ma_li(scratch, returnLabel.dest());
   1.275 +        masm.jump(scratch);
   1.276 +
   1.277 +        masm.bind(&notOsr);
   1.278 +        // Load the scope chain in R1.
   1.279 +        MOZ_ASSERT(R1.scratchReg() != reg_code);
   1.280 +        masm.loadPtr(slotScopeChain, R1.scratchReg());
   1.281 +    }
   1.282 +
   1.283 +    // Call the function with pushing return address to stack.
   1.284 +    masm.ma_callIonHalfPush(reg_code);
   1.285 +
   1.286 +    if (type == EnterJitBaseline) {
   1.287 +        // Baseline OSR will return here.
   1.288 +        masm.bind(returnLabel.src());
   1.289 +        if (!masm.addCodeLabel(returnLabel))
   1.290 +            return nullptr;
   1.291 +    }
   1.292 +
   1.293 +    // Pop arguments off the stack.
   1.294 +    // s0 <- 8*argc (size of all arguments we pushed on the stack)
   1.295 +    masm.pop(s0);
   1.296 +    masm.rshiftPtr(Imm32(4), s0);
   1.297 +    masm.addPtr(s0, StackPointer);
   1.298 +
   1.299 +    // Store the returned value into the slotVp
   1.300 +    masm.loadPtr(slotVp, s1);
   1.301 +    masm.storeValue(JSReturnOperand, Address(s1, 0));
   1.302 +
   1.303 +    // Restore non-volatile registers and return.
   1.304 +    GenerateReturn(masm, ShortJump);
   1.305 +
   1.306 +    Linker linker(masm);
   1.307 +    AutoFlushICache afc("GenerateEnterJIT");
   1.308 +    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.309 +
   1.310 +#ifdef JS_ION_PERF
   1.311 +    writePerfSpewerJitCodeProfile(code, "EnterJIT");
   1.312 +#endif
   1.313 +
   1.314 +    return code;
   1.315 +}
   1.316 +
   1.317 +JitCode *
   1.318 +JitRuntime::generateInvalidator(JSContext *cx)
   1.319 +{
   1.320 +    MacroAssembler masm(cx);
   1.321 +
   1.322 +    // NOTE: Members ionScript_ and osiPointReturnAddress_ of
   1.323 +    // InvalidationBailoutStack are already on the stack.
   1.324 +    static const uint32_t STACK_DATA_SIZE = sizeof(InvalidationBailoutStack) -
   1.325 +                                            2 * sizeof(uintptr_t);
   1.326 +
   1.327 +    // Stack has to be alligned here. If not, we will have to fix it.
   1.328 +    masm.checkStackAlignment();
   1.329 +
   1.330 +    // Make room for data on stack.
   1.331 +    masm.subPtr(Imm32(STACK_DATA_SIZE), StackPointer);
   1.332 +
   1.333 +    // Save general purpose registers
   1.334 +    for (uint32_t i = 0; i < Registers::Total; i++) {
   1.335 +        Address address = Address(StackPointer, InvalidationBailoutStack::offsetOfRegs() +
   1.336 +                                                i * sizeof(uintptr_t));
   1.337 +        masm.storePtr(Register::FromCode(i), address);
   1.338 +    }
   1.339 +
   1.340 +    // Save floating point registers
   1.341 +    // We can use as_sd because stack is alligned.
   1.342 +    for (uint32_t i = 0; i < FloatRegisters::Total; i++)
   1.343 +        masm.as_sd(FloatRegister::FromCode(i), StackPointer,
   1.344 +                   InvalidationBailoutStack::offsetOfFpRegs() + i * sizeof(double));
   1.345 +
   1.346 +    // Pass pointer to InvalidationBailoutStack structure.
   1.347 +    masm.movePtr(StackPointer, a0);
   1.348 +
   1.349 +    // Reserve place for return value and BailoutInfo pointer
   1.350 +    masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
   1.351 +    // Pass pointer to return value.
   1.352 +    masm.ma_addu(a1, StackPointer, Imm32(sizeof(uintptr_t)));
   1.353 +    // Pass pointer to BailoutInfo
   1.354 +    masm.movePtr(StackPointer, a2);
   1.355 +
   1.356 +    masm.setupAlignedABICall(3);
   1.357 +    masm.passABIArg(a0);
   1.358 +    masm.passABIArg(a1);
   1.359 +    masm.passABIArg(a2);
   1.360 +    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, InvalidationBailout));
   1.361 +
   1.362 +    masm.loadPtr(Address(StackPointer, 0), a2);
   1.363 +    masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), a1);
   1.364 +    // Remove the return address, the IonScript, the register state
   1.365 +    // (InvaliationBailoutStack) and the space that was allocated for the
   1.366 +    // return value.
   1.367 +    masm.addPtr(Imm32(sizeof(InvalidationBailoutStack) + 2 * sizeof(uintptr_t)), StackPointer);
   1.368 +    // remove the space that this frame was using before the bailout
   1.369 +    // (computed by InvalidationBailout)
   1.370 +    masm.addPtr(a1, StackPointer);
   1.371 +
   1.372 +    // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
   1.373 +    JitCode *bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
   1.374 +    masm.branch(bailoutTail);
   1.375 +
   1.376 +    Linker linker(masm);
   1.377 +    AutoFlushICache afc("Invalidator");
   1.378 +    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.379 +    IonSpew(IonSpew_Invalidate, "   invalidation thunk created at %p", (void *) code->raw());
   1.380 +
   1.381 +#ifdef JS_ION_PERF
   1.382 +    writePerfSpewerJitCodeProfile(code, "Invalidator");
   1.383 +#endif
   1.384 +
   1.385 +    return code;
   1.386 +}
   1.387 +
   1.388 +JitCode *
   1.389 +JitRuntime::generateArgumentsRectifier(JSContext *cx, ExecutionMode mode, void **returnAddrOut)
   1.390 +{
   1.391 +    MacroAssembler masm(cx);
   1.392 +
   1.393 +    // ArgumentsRectifierReg contains the |nargs| pushed onto the current
   1.394 +    // frame. Including |this|, there are (|nargs| + 1) arguments to copy.
   1.395 +    MOZ_ASSERT(ArgumentsRectifierReg == s3);
   1.396 +
   1.397 +    Register numActArgsReg = t6;
   1.398 +    Register calleeTokenReg = t7;
   1.399 +    Register numArgsReg = t5;
   1.400 +
   1.401 +    // Copy number of actual arguments into numActArgsReg
   1.402 +    masm.loadPtr(Address(StackPointer, IonRectifierFrameLayout::offsetOfNumActualArgs()),
   1.403 +                 numActArgsReg);
   1.404 +
   1.405 +    // Load the number of |undefined|s to push into t1.
   1.406 +    masm.loadPtr(Address(StackPointer, IonRectifierFrameLayout::offsetOfCalleeToken()),
   1.407 +                 calleeTokenReg);
   1.408 +    masm.load16ZeroExtend(Address(calleeTokenReg, JSFunction::offsetOfNargs()), numArgsReg);
   1.409 +
   1.410 +    masm.ma_subu(t1, numArgsReg, s3);
   1.411 +
   1.412 +    masm.moveValue(UndefinedValue(), ValueOperand(t3, t4));
   1.413 +
   1.414 +    masm.movePtr(StackPointer, t2); // Save %sp.
   1.415 +
   1.416 +    // Push undefined.
   1.417 +    {
   1.418 +        Label undefLoopTop;
   1.419 +        masm.bind(&undefLoopTop);
   1.420 +
   1.421 +        masm.subPtr(Imm32(sizeof(Value)), StackPointer);
   1.422 +        masm.storeValue(ValueOperand(t3, t4), Address(StackPointer, 0));
   1.423 +        masm.sub32(Imm32(1), t1);
   1.424 +
   1.425 +        masm.ma_b(t1, t1, &undefLoopTop, Assembler::NonZero, ShortJump);
   1.426 +    }
   1.427 +
   1.428 +    // Get the topmost argument.
   1.429 +    masm.ma_sll(t0, s3, Imm32(3)); // t0 <- nargs * 8
   1.430 +    masm.addPtr(t0, t2); // t2 <- t2(saved sp) + nargs * 8
   1.431 +    masm.addPtr(Imm32(sizeof(IonRectifierFrameLayout)), t2);
   1.432 +
   1.433 +    // Push arguments, |nargs| + 1 times (to include |this|).
   1.434 +    {
   1.435 +        Label copyLoopTop, initialSkip;
   1.436 +
   1.437 +        masm.ma_b(&initialSkip, ShortJump);
   1.438 +
   1.439 +        masm.bind(&copyLoopTop);
   1.440 +        masm.subPtr(Imm32(sizeof(Value)), t2);
   1.441 +        masm.sub32(Imm32(1), s3);
   1.442 +
   1.443 +        masm.bind(&initialSkip);
   1.444 +
   1.445 +        MOZ_ASSERT(sizeof(Value) == 2 * sizeof(uint32_t));
   1.446 +        // Read argument and push to stack.
   1.447 +        masm.subPtr(Imm32(sizeof(Value)), StackPointer);
   1.448 +        masm.load32(Address(t2, NUNBOX32_TYPE_OFFSET), t0);
   1.449 +        masm.store32(t0, Address(StackPointer, NUNBOX32_TYPE_OFFSET));
   1.450 +        masm.load32(Address(t2, NUNBOX32_PAYLOAD_OFFSET), t0);
   1.451 +        masm.store32(t0, Address(StackPointer, NUNBOX32_PAYLOAD_OFFSET));
   1.452 +
   1.453 +        masm.ma_b(s3, s3, &copyLoopTop, Assembler::NonZero, ShortJump);
   1.454 +    }
   1.455 +
   1.456 +    // translate the framesize from values into bytes
   1.457 +    masm.ma_addu(t0, numArgsReg, Imm32(1));
   1.458 +    masm.lshiftPtr(Imm32(3), t0);
   1.459 +
   1.460 +    // Construct sizeDescriptor.
   1.461 +    masm.makeFrameDescriptor(t0, JitFrame_Rectifier);
   1.462 +
   1.463 +    // Construct IonJSFrameLayout.
   1.464 +    masm.subPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
   1.465 +    // Push actual arguments.
   1.466 +    masm.storePtr(numActArgsReg, Address(StackPointer, 2 * sizeof(uintptr_t)));
   1.467 +    // Push callee token.
   1.468 +    masm.storePtr(calleeTokenReg, Address(StackPointer, sizeof(uintptr_t)));
   1.469 +    // Push frame descriptor.
   1.470 +    masm.storePtr(t0, Address(StackPointer, 0));
   1.471 +
   1.472 +    // Call the target function.
   1.473 +    // Note that this code assumes the function is JITted.
   1.474 +    masm.loadPtr(Address(calleeTokenReg, JSFunction::offsetOfNativeOrScript()), t1);
   1.475 +    masm.loadBaselineOrIonRaw(t1, t1, mode, nullptr);
   1.476 +    masm.ma_callIonHalfPush(t1);
   1.477 +
   1.478 +    uint32_t returnOffset = masm.currentOffset();
   1.479 +
   1.480 +    // arg1
   1.481 +    //  ...
   1.482 +    // argN
   1.483 +    // num actual args
   1.484 +    // callee token
   1.485 +    // sizeDescriptor     <- sp now
   1.486 +    // return address
   1.487 +
   1.488 +    // Remove the rectifier frame.
   1.489 +    // t0 <- descriptor with FrameType.
   1.490 +    masm.loadPtr(Address(StackPointer, 0), t0);
   1.491 +    masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), t0); // t0 <- descriptor.
   1.492 +
   1.493 +    // Discard descriptor, calleeToken and number of actual arguments.
   1.494 +    masm.addPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
   1.495 +
   1.496 +    // arg1
   1.497 +    //  ...
   1.498 +    // argN               <- sp now; t0 <- frame descriptor
   1.499 +    // num actual args
   1.500 +    // callee token
   1.501 +    // sizeDescriptor
   1.502 +    // return address
   1.503 +
   1.504 +    // Discard pushed arguments.
   1.505 +    masm.addPtr(t0, StackPointer);
   1.506 +
   1.507 +    masm.ret();
   1.508 +    Linker linker(masm);
   1.509 +    AutoFlushICache afc("ArgumentsRectifier");
   1.510 +    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.511 +
   1.512 +    CodeOffsetLabel returnLabel(returnOffset);
   1.513 +    returnLabel.fixup(&masm);
   1.514 +    if (returnAddrOut)
   1.515 +        *returnAddrOut = (void *) (code->raw() + returnLabel.offset());
   1.516 +
   1.517 +#ifdef JS_ION_PERF
   1.518 +    writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
   1.519 +#endif
   1.520 +
   1.521 +    return code;
   1.522 +}
   1.523 +
   1.524 +/* There are two different stack layouts when doing bailout. They are
   1.525 + * represented via class BailoutStack.
   1.526 + *
   1.527 + * - First case is when bailout is done trough bailout table. In this case
   1.528 + * table offset is stored in $ra (look at JitRuntime::generateBailoutTable())
   1.529 + * and thunk code should save it on stack. In this case frameClassId_ cannot
   1.530 + * be NO_FRAME_SIZE_CLASS_ID. Members snapshotOffset_ and padding_ are not on
   1.531 + * the stack.
   1.532 + *
   1.533 + * - Other case is when bailout is done via out of line code (lazy bailout).
   1.534 + * In this case frame size is stored in $ra (look at
   1.535 + * CodeGeneratorMIPS::generateOutOfLineCode()) and thunk code should save it
   1.536 + * on stack. Other difference is that members snapshotOffset_ and padding_ are
   1.537 + * pushed to the stack by CodeGeneratorMIPS::visitOutOfLineBailout(). Field
   1.538 + * frameClassId_ is forced to be NO_FRAME_SIZE_CLASS_ID
   1.539 + * (See: JitRuntime::generateBailoutHandler).
   1.540 + */
   1.541 +static void
   1.542 +GenerateBailoutThunk(JSContext *cx, MacroAssembler &masm, uint32_t frameClass)
   1.543 +{
   1.544 +    // NOTE: Members snapshotOffset_ and padding_ of BailoutStack
   1.545 +    // are not stored in this function.
   1.546 +    static const uint32_t bailoutDataSize = sizeof(BailoutStack) - 2 * sizeof(uintptr_t);
   1.547 +    static const uint32_t bailoutInfoOutParamSize = 2 * sizeof(uintptr_t);
   1.548 +
   1.549 +    // Make sure that alignment is proper.
   1.550 +    masm.checkStackAlignment();
   1.551 +
   1.552 +    // Make room for data.
   1.553 +    masm.subPtr(Imm32(bailoutDataSize), StackPointer);
   1.554 +
   1.555 +    // Save general purpose registers.
   1.556 +    for (uint32_t i = 0; i < Registers::Total; i++) {
   1.557 +        uint32_t off = BailoutStack::offsetOfRegs() + i * sizeof(uintptr_t);
   1.558 +        masm.storePtr(Register::FromCode(i), Address(StackPointer, off));
   1.559 +    }
   1.560 +
   1.561 +    // Save floating point registers
   1.562 +    // We can use as_sd because stack is alligned.
   1.563 +    for (uintptr_t i = 0; i < FloatRegisters::Total; i++)
   1.564 +        masm.as_sd(FloatRegister::FromCode(i), StackPointer,
   1.565 +                   BailoutStack::offsetOfFpRegs() + i * sizeof(double));
   1.566 +
   1.567 +    // Store the frameSize_ or tableOffset_ stored in ra
   1.568 +    // See: JitRuntime::generateBailoutTable()
   1.569 +    // See: CodeGeneratorMIPS::generateOutOfLineCode()
   1.570 +    masm.storePtr(ra, Address(StackPointer, BailoutStack::offsetOfFrameSize()));
   1.571 +
   1.572 +    // Put frame class to stack
   1.573 +    masm.storePtr(ImmWord(frameClass), Address(StackPointer, BailoutStack::offsetOfFrameClass()));
   1.574 +
   1.575 +    // Put pointer to BailoutStack as first argument to the Bailout()
   1.576 +    masm.movePtr(StackPointer, a0);
   1.577 +    // Put pointer to BailoutInfo
   1.578 +    masm.subPtr(Imm32(bailoutInfoOutParamSize), StackPointer);
   1.579 +    masm.storePtr(ImmPtr(nullptr), Address(StackPointer, 0));
   1.580 +    masm.movePtr(StackPointer, a1);
   1.581 +
   1.582 +    masm.setupAlignedABICall(2);
   1.583 +    masm.passABIArg(a0);
   1.584 +    masm.passABIArg(a1);
   1.585 +    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, Bailout));
   1.586 +
   1.587 +    // Get BailoutInfo pointer
   1.588 +    masm.loadPtr(Address(StackPointer, 0), a2);
   1.589 +
   1.590 +    // Remove both the bailout frame and the topmost Ion frame's stack.
   1.591 +    if (frameClass == NO_FRAME_SIZE_CLASS_ID) {
   1.592 +        // Load frameSize from stack
   1.593 +        masm.loadPtr(Address(StackPointer,
   1.594 +                             bailoutInfoOutParamSize + BailoutStack::offsetOfFrameSize()), a1);
   1.595 +
   1.596 +        // Remove complete BailoutStack class and data after it
   1.597 +        masm.addPtr(Imm32(sizeof(BailoutStack) + bailoutInfoOutParamSize), StackPointer);
   1.598 +        // Remove frame size srom stack
   1.599 +        masm.addPtr(a1, StackPointer);
   1.600 +    } else {
   1.601 +        uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize();
   1.602 +        // Remove the data this fuction added and frame size.
   1.603 +        masm.addPtr(Imm32(bailoutDataSize + bailoutInfoOutParamSize + frameSize), StackPointer);
   1.604 +    }
   1.605 +
   1.606 +    // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
   1.607 +    JitCode *bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
   1.608 +    masm.branch(bailoutTail);
   1.609 +}
   1.610 +
   1.611 +JitCode *
   1.612 +JitRuntime::generateBailoutTable(JSContext *cx, uint32_t frameClass)
   1.613 +{
   1.614 +    MacroAssembler masm(cx);
   1.615 +
   1.616 +    Label bailout;
   1.617 +    for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++) {
   1.618 +        // Calculate offset to the end of table
   1.619 +        int32_t offset = (BAILOUT_TABLE_SIZE - i) * BAILOUT_TABLE_ENTRY_SIZE;
   1.620 +
   1.621 +        // We use the 'ra' as table offset later in GenerateBailoutThunk
   1.622 +        masm.as_bal(BOffImm16(offset));
   1.623 +        masm.nop();
   1.624 +    }
   1.625 +    masm.bind(&bailout);
   1.626 +
   1.627 +    GenerateBailoutThunk(cx, masm, frameClass);
   1.628 +
   1.629 +    Linker linker(masm);
   1.630 +    AutoFlushICache afc("BailoutTable");
   1.631 +    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.632 +
   1.633 +#ifdef JS_ION_PERF
   1.634 +    writePerfSpewerJitCodeProfile(code, "BailoutTable");
   1.635 +#endif
   1.636 +
   1.637 +    return code;
   1.638 +}
   1.639 +
   1.640 +JitCode *
   1.641 +JitRuntime::generateBailoutHandler(JSContext *cx)
   1.642 +{
   1.643 +    MacroAssembler masm(cx);
   1.644 +    GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
   1.645 +
   1.646 +    Linker linker(masm);
   1.647 +    AutoFlushICache afc("BailoutHandler");
   1.648 +    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.649 +
   1.650 +#ifdef JS_ION_PERF
   1.651 +    writePerfSpewerJitCodeProfile(code, "BailoutHandler");
   1.652 +#endif
   1.653 +
   1.654 +    return code;
   1.655 +}
   1.656 +
   1.657 +JitCode *
   1.658 +JitRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
   1.659 +{
   1.660 +    MOZ_ASSERT(functionWrappers_);
   1.661 +    MOZ_ASSERT(functionWrappers_->initialized());
   1.662 +    VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
   1.663 +    if (p)
   1.664 +        return p->value();
   1.665 +
   1.666 +    MacroAssembler masm(cx);
   1.667 +
   1.668 +    GeneralRegisterSet regs = GeneralRegisterSet(Register::Codes::WrapperMask);
   1.669 +
   1.670 +    static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
   1.671 +                  "Wrapper register set should be a superset of Volatile register set.");
   1.672 +
   1.673 +    // The context is the first argument; a0 is the first argument register.
   1.674 +    Register cxreg = a0;
   1.675 +    regs.take(cxreg);
   1.676 +
   1.677 +    // We're aligned to an exit frame, so link it up.
   1.678 +    masm.enterExitFrameAndLoadContext(&f, cxreg, regs.getAny(), f.executionMode);
   1.679 +
   1.680 +    // Save the base of the argument set stored on the stack.
   1.681 +    Register argsBase = InvalidReg;
   1.682 +    if (f.explicitArgs) {
   1.683 +        argsBase = t1; // Use temporary register.
   1.684 +        regs.take(argsBase);
   1.685 +        masm.ma_addu(argsBase, StackPointer, Imm32(IonExitFrameLayout::SizeWithFooter()));
   1.686 +    }
   1.687 +
   1.688 +    // Reserve space for the outparameter.
   1.689 +    Register outReg = InvalidReg;
   1.690 +    switch (f.outParam) {
   1.691 +      case Type_Value:
   1.692 +        outReg = t0; // Use temporary register.
   1.693 +        regs.take(outReg);
   1.694 +        // Value outparam has to be 8 byte aligned because the called
   1.695 +        // function can use sdc1 or ldc1 instructions to access it.
   1.696 +        masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value));
   1.697 +        masm.alignPointerUp(StackPointer, outReg, StackAlignment);
   1.698 +        break;
   1.699 +
   1.700 +      case Type_Handle:
   1.701 +        outReg = t0;
   1.702 +        regs.take(outReg);
   1.703 +        if (f.outParamRootType == VMFunction::RootValue) {
   1.704 +            // Value outparam has to be 8 byte aligned because the called
   1.705 +            // function can use sdc1 or ldc1 instructions to access it.
   1.706 +            masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value));
   1.707 +            masm.alignPointerUp(StackPointer, outReg, StackAlignment);
   1.708 +            masm.storeValue(UndefinedValue(), Address(outReg, 0));
   1.709 +        }
   1.710 +        else {
   1.711 +            masm.PushEmptyRooted(f.outParamRootType);
   1.712 +            masm.movePtr(StackPointer, outReg);
   1.713 +        }
   1.714 +        break;
   1.715 +
   1.716 +      case Type_Bool:
   1.717 +      case Type_Int32:
   1.718 +        MOZ_ASSERT(sizeof(uintptr_t) == sizeof(uint32_t));
   1.719 +      case Type_Pointer:
   1.720 +        outReg = t0;
   1.721 +        regs.take(outReg);
   1.722 +        masm.reserveStack(sizeof(uintptr_t));
   1.723 +        masm.movePtr(StackPointer, outReg);
   1.724 +        break;
   1.725 +
   1.726 +      case Type_Double:
   1.727 +        outReg = t0;
   1.728 +        regs.take(outReg);
   1.729 +        // Double outparam has to be 8 byte aligned because the called
   1.730 +        // function can use sdc1 or ldc1 instructions to access it.
   1.731 +        masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(double));
   1.732 +        masm.alignPointerUp(StackPointer, outReg, StackAlignment);
   1.733 +        break;
   1.734 +
   1.735 +      default:
   1.736 +        MOZ_ASSERT(f.outParam == Type_Void);
   1.737 +        break;
   1.738 +    }
   1.739 +
   1.740 +    masm.setupUnalignedABICall(f.argc(), regs.getAny());
   1.741 +    masm.passABIArg(cxreg);
   1.742 +
   1.743 +    size_t argDisp = 0;
   1.744 +
   1.745 +    // Copy any arguments.
   1.746 +    for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
   1.747 +        MoveOperand from;
   1.748 +        switch (f.argProperties(explicitArg)) {
   1.749 +          case VMFunction::WordByValue:
   1.750 +            masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
   1.751 +            argDisp += sizeof(uint32_t);
   1.752 +            break;
   1.753 +          case VMFunction::DoubleByValue:
   1.754 +            // Values should be passed by reference, not by value, so we
   1.755 +            // assert that the argument is a double-precision float.
   1.756 +            MOZ_ASSERT(f.argPassedInFloatReg(explicitArg));
   1.757 +            masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::DOUBLE);
   1.758 +            argDisp += sizeof(double);
   1.759 +            break;
   1.760 +          case VMFunction::WordByRef:
   1.761 +            masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
   1.762 +                            MoveOp::GENERAL);
   1.763 +            argDisp += sizeof(uint32_t);
   1.764 +            break;
   1.765 +          case VMFunction::DoubleByRef:
   1.766 +            masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
   1.767 +                            MoveOp::GENERAL);
   1.768 +            argDisp += sizeof(double);
   1.769 +            break;
   1.770 +        }
   1.771 +    }
   1.772 +
   1.773 +    // Copy the implicit outparam, if any.
   1.774 +    if (outReg != InvalidReg)
   1.775 +        masm.passABIArg(outReg);
   1.776 +
   1.777 +    masm.callWithABI(f.wrapped);
   1.778 +
   1.779 +    // Test for failure.
   1.780 +    switch (f.failType()) {
   1.781 +      case Type_Object:
   1.782 +        masm.branchTestPtr(Assembler::Zero, v0, v0, masm.failureLabel(f.executionMode));
   1.783 +        break;
   1.784 +      case Type_Bool:
   1.785 +        // Called functions return bools, which are 0/false and non-zero/true
   1.786 +        masm.branchIfFalseBool(v0, masm.failureLabel(f.executionMode));
   1.787 +        break;
   1.788 +      default:
   1.789 +        MOZ_ASSUME_UNREACHABLE("unknown failure kind");
   1.790 +    }
   1.791 +
   1.792 +    // Load the outparam and free any allocated stack.
   1.793 +    switch (f.outParam) {
   1.794 +      case Type_Handle:
   1.795 +        if (f.outParamRootType == VMFunction::RootValue) {
   1.796 +            masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment);
   1.797 +            masm.loadValue(Address(SecondScratchReg, 0), JSReturnOperand);
   1.798 +            masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value));
   1.799 +        }
   1.800 +        else {
   1.801 +            masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand);
   1.802 +        }
   1.803 +        break;
   1.804 +
   1.805 +      case Type_Value:
   1.806 +        masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment);
   1.807 +        masm.loadValue(Address(SecondScratchReg, 0), JSReturnOperand);
   1.808 +        masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value));
   1.809 +        break;
   1.810 +
   1.811 +      case Type_Int32:
   1.812 +        MOZ_ASSERT(sizeof(uintptr_t) == sizeof(uint32_t));
   1.813 +      case Type_Pointer:
   1.814 +        masm.load32(Address(StackPointer, 0), ReturnReg);
   1.815 +        masm.freeStack(sizeof(uintptr_t));
   1.816 +        break;
   1.817 +
   1.818 +      case Type_Bool:
   1.819 +        masm.load8ZeroExtend(Address(StackPointer, 0), ReturnReg);
   1.820 +        masm.freeStack(sizeof(uintptr_t));
   1.821 +        break;
   1.822 +
   1.823 +      case Type_Double:
   1.824 +        if (cx->runtime()->jitSupportsFloatingPoint) {
   1.825 +            masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment);
   1.826 +            // Address is aligned, so we can use as_ld.
   1.827 +            masm.as_ld(ReturnFloatReg, SecondScratchReg, 0);
   1.828 +        } else {
   1.829 +            masm.assumeUnreachable("Unable to load into float reg, with no FP support.");
   1.830 +        }
   1.831 +        masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(double));
   1.832 +        break;
   1.833 +
   1.834 +      default:
   1.835 +        MOZ_ASSERT(f.outParam == Type_Void);
   1.836 +        break;
   1.837 +    }
   1.838 +    masm.leaveExitFrame();
   1.839 +    masm.retn(Imm32(sizeof(IonExitFrameLayout) +
   1.840 +                    f.explicitStackSlots() * sizeof(uintptr_t) +
   1.841 +                    f.extraValuesToPop * sizeof(Value)));
   1.842 +
   1.843 +    Linker linker(masm);
   1.844 +    AutoFlushICache afc("VMWrapper");
   1.845 +    JitCode *wrapper = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.846 +    if (!wrapper)
   1.847 +        return nullptr;
   1.848 +
   1.849 +    // linker.newCode may trigger a GC and sweep functionWrappers_ so we have
   1.850 +    // to use relookupOrAdd instead of add.
   1.851 +    if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
   1.852 +        return nullptr;
   1.853 +
   1.854 +#ifdef JS_ION_PERF
   1.855 +    writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
   1.856 +#endif
   1.857 +
   1.858 +    return wrapper;
   1.859 +}
   1.860 +
   1.861 +JitCode *
   1.862 +JitRuntime::generatePreBarrier(JSContext *cx, MIRType type)
   1.863 +{
   1.864 +    MacroAssembler masm(cx);
   1.865 +
   1.866 +    RegisterSet save;
   1.867 +    if (cx->runtime()->jitSupportsFloatingPoint) {
   1.868 +        save = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
   1.869 +                           FloatRegisterSet(FloatRegisters::VolatileMask));
   1.870 +    } else {
   1.871 +        save = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
   1.872 +                           FloatRegisterSet());
   1.873 +    }
   1.874 +    masm.PushRegsInMask(save);
   1.875 +
   1.876 +    MOZ_ASSERT(PreBarrierReg == a1);
   1.877 +    masm.movePtr(ImmPtr(cx->runtime()), a0);
   1.878 +
   1.879 +    masm.setupUnalignedABICall(2, a2);
   1.880 +    masm.passABIArg(a0);
   1.881 +    masm.passABIArg(a1);
   1.882 +
   1.883 +    if (type == MIRType_Value) {
   1.884 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkValueFromIon));
   1.885 +    } else {
   1.886 +        MOZ_ASSERT(type == MIRType_Shape);
   1.887 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkShapeFromIon));
   1.888 +    }
   1.889 +
   1.890 +    masm.PopRegsInMask(save);
   1.891 +    masm.ret();
   1.892 +
   1.893 +    Linker linker(masm);
   1.894 +    AutoFlushICache afc("PreBarrier");
   1.895 +    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.896 +
   1.897 +#ifdef JS_ION_PERF
   1.898 +    writePerfSpewerJitCodeProfile(code, "PreBarrier");
   1.899 +#endif
   1.900 +
   1.901 +    return code;
   1.902 +}
   1.903 +
   1.904 +typedef bool (*HandleDebugTrapFn)(JSContext *, BaselineFrame *, uint8_t *, bool *);
   1.905 +static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap);
   1.906 +
   1.907 +JitCode *
   1.908 +JitRuntime::generateDebugTrapHandler(JSContext *cx)
   1.909 +{
   1.910 +    MacroAssembler masm(cx);
   1.911 +
   1.912 +    Register scratch1 = t0;
   1.913 +    Register scratch2 = t1;
   1.914 +
   1.915 +    // Load BaselineFrame pointer in scratch1.
   1.916 +    masm.movePtr(s5, scratch1);
   1.917 +    masm.subPtr(Imm32(BaselineFrame::Size()), scratch1);
   1.918 +
   1.919 +    // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
   1.920 +    // the stub frame has a nullptr ICStub pointer, since this pointer is
   1.921 +    // marked during GC.
   1.922 +    masm.movePtr(ImmPtr(nullptr), BaselineStubReg);
   1.923 +    EmitEnterStubFrame(masm, scratch2);
   1.924 +
   1.925 +    JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
   1.926 +    if (!code)
   1.927 +        return nullptr;
   1.928 +
   1.929 +    masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
   1.930 +    masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t)));
   1.931 +    masm.storePtr(scratch1, Address(StackPointer, 0));
   1.932 +
   1.933 +    EmitCallVM(code, masm);
   1.934 +
   1.935 +    EmitLeaveStubFrame(masm);
   1.936 +
   1.937 +    // If the stub returns |true|, we have to perform a forced return
   1.938 +    // (return from the JS frame). If the stub returns |false|, just return
   1.939 +    // from the trap stub so that execution continues at the current pc.
   1.940 +    Label forcedReturn;
   1.941 +    masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
   1.942 +
   1.943 +    // ra was restored by EmitLeaveStubFrame
   1.944 +    masm.branch(ra);
   1.945 +
   1.946 +    masm.bind(&forcedReturn);
   1.947 +    masm.loadValue(Address(s5, BaselineFrame::reverseOffsetOfReturnValue()),
   1.948 +                   JSReturnOperand);
   1.949 +    masm.movePtr(s5, StackPointer);
   1.950 +    masm.pop(s5);
   1.951 +    masm.ret();
   1.952 +
   1.953 +    Linker linker(masm);
   1.954 +    AutoFlushICache afc("DebugTrapHandler");
   1.955 +    JitCode *codeDbg = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.956 +
   1.957 +#ifdef JS_ION_PERF
   1.958 +    writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
   1.959 +#endif
   1.960 +
   1.961 +    return codeDbg;
   1.962 +}
   1.963 +
   1.964 +
   1.965 +JitCode *
   1.966 +JitRuntime::generateExceptionTailStub(JSContext *cx)
   1.967 +{
   1.968 +    MacroAssembler masm;
   1.969 +
   1.970 +    masm.handleFailureWithHandlerTail();
   1.971 +
   1.972 +    Linker linker(masm);
   1.973 +    AutoFlushICache afc("ExceptionTailStub");
   1.974 +    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.975 +
   1.976 +#ifdef JS_ION_PERF
   1.977 +    writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
   1.978 +#endif
   1.979 +
   1.980 +    return code;
   1.981 +}
   1.982 +
   1.983 +JitCode *
   1.984 +JitRuntime::generateBailoutTailStub(JSContext *cx)
   1.985 +{
   1.986 +    MacroAssembler masm;
   1.987 +
   1.988 +    masm.generateBailoutTail(a1, a2);
   1.989 +
   1.990 +    Linker linker(masm);
   1.991 +    AutoFlushICache afc("BailoutTailStub");
   1.992 +    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
   1.993 +
   1.994 +#ifdef JS_ION_PERF
   1.995 +    writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
   1.996 +#endif
   1.997 +
   1.998 +    return code;
   1.999 +}
  1.1000 +

mercurial