Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "jscompartment.h" |
michael@0 | 8 | |
michael@0 | 9 | #include "jit/Bailouts.h" |
michael@0 | 10 | #include "jit/IonFrames.h" |
michael@0 | 11 | #include "jit/IonLinker.h" |
michael@0 | 12 | #include "jit/IonSpewer.h" |
michael@0 | 13 | #include "jit/JitCompartment.h" |
michael@0 | 14 | #include "jit/mips/Bailouts-mips.h" |
michael@0 | 15 | #include "jit/mips/BaselineHelpers-mips.h" |
michael@0 | 16 | #ifdef JS_ION_PERF |
michael@0 | 17 | # include "jit/PerfSpewer.h" |
michael@0 | 18 | #endif |
michael@0 | 19 | #include "jit/VMFunctions.h" |
michael@0 | 20 | |
michael@0 | 21 | #include "jit/ExecutionMode-inl.h" |
michael@0 | 22 | |
michael@0 | 23 | using namespace js; |
michael@0 | 24 | using namespace js::jit; |
michael@0 | 25 | |
michael@0 | 26 | static_assert(sizeof(uintptr_t) == sizeof(uint32_t), "Not 64-bit clean."); |
michael@0 | 27 | |
michael@0 | 28 | struct EnterJITRegs |
michael@0 | 29 | { |
michael@0 | 30 | double f30; |
michael@0 | 31 | double f28; |
michael@0 | 32 | double f26; |
michael@0 | 33 | double f24; |
michael@0 | 34 | double f22; |
michael@0 | 35 | double f20; |
michael@0 | 36 | |
michael@0 | 37 | // empty slot for alignment |
michael@0 | 38 | uintptr_t align; |
michael@0 | 39 | |
michael@0 | 40 | // non-volatile registers. |
michael@0 | 41 | uintptr_t ra; |
michael@0 | 42 | uintptr_t s7; |
michael@0 | 43 | uintptr_t s6; |
michael@0 | 44 | uintptr_t s5; |
michael@0 | 45 | uintptr_t s4; |
michael@0 | 46 | uintptr_t s3; |
michael@0 | 47 | uintptr_t s2; |
michael@0 | 48 | uintptr_t s1; |
michael@0 | 49 | uintptr_t s0; |
michael@0 | 50 | }; |
michael@0 | 51 | |
michael@0 | 52 | struct EnterJITArgs |
michael@0 | 53 | { |
michael@0 | 54 | // First 4 argumet placeholders |
michael@0 | 55 | void *jitcode; // <- sp points here when function is entered. |
michael@0 | 56 | int maxArgc; |
michael@0 | 57 | Value *maxArgv; |
michael@0 | 58 | InterpreterFrame *fp; |
michael@0 | 59 | |
michael@0 | 60 | // Arguments on stack |
michael@0 | 61 | CalleeToken calleeToken; |
michael@0 | 62 | JSObject *scopeChain; |
michael@0 | 63 | size_t numStackValues; |
michael@0 | 64 | Value *vp; |
michael@0 | 65 | }; |
michael@0 | 66 | |
michael@0 | 67 | static void |
michael@0 | 68 | GenerateReturn(MacroAssembler &masm, int returnCode) |
michael@0 | 69 | { |
michael@0 | 70 | MOZ_ASSERT(masm.framePushed() == sizeof(EnterJITRegs)); |
michael@0 | 71 | |
michael@0 | 72 | // Restore non-volatile registers |
michael@0 | 73 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s0)), s0); |
michael@0 | 74 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s1)), s1); |
michael@0 | 75 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s2)), s2); |
michael@0 | 76 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s3)), s3); |
michael@0 | 77 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s4)), s4); |
michael@0 | 78 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s5)), s5); |
michael@0 | 79 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s6)), s6); |
michael@0 | 80 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s7)), s7); |
michael@0 | 81 | masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, ra)), ra); |
michael@0 | 82 | |
michael@0 | 83 | // Restore non-volatile floating point registers |
michael@0 | 84 | masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f20)), f20); |
michael@0 | 85 | masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f22)), f22); |
michael@0 | 86 | masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f24)), f24); |
michael@0 | 87 | masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f26)), f26); |
michael@0 | 88 | masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f28)), f28); |
michael@0 | 89 | masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f30)), f30); |
michael@0 | 90 | |
michael@0 | 91 | masm.freeStack(sizeof(EnterJITRegs)); |
michael@0 | 92 | |
michael@0 | 93 | masm.branch(ra); |
michael@0 | 94 | } |
michael@0 | 95 | |
michael@0 | 96 | static void |
michael@0 | 97 | GeneratePrologue(MacroAssembler &masm) |
michael@0 | 98 | { |
michael@0 | 99 | // Save non-volatile registers. These must be saved by the trampoline, |
michael@0 | 100 | // rather than the JIT'd code, because they are scanned by the conservative |
michael@0 | 101 | // scanner. |
michael@0 | 102 | masm.reserveStack(sizeof(EnterJITRegs)); |
michael@0 | 103 | masm.storePtr(s0, Address(StackPointer, offsetof(EnterJITRegs, s0))); |
michael@0 | 104 | masm.storePtr(s1, Address(StackPointer, offsetof(EnterJITRegs, s1))); |
michael@0 | 105 | masm.storePtr(s2, Address(StackPointer, offsetof(EnterJITRegs, s2))); |
michael@0 | 106 | masm.storePtr(s3, Address(StackPointer, offsetof(EnterJITRegs, s3))); |
michael@0 | 107 | masm.storePtr(s4, Address(StackPointer, offsetof(EnterJITRegs, s4))); |
michael@0 | 108 | masm.storePtr(s5, Address(StackPointer, offsetof(EnterJITRegs, s5))); |
michael@0 | 109 | masm.storePtr(s6, Address(StackPointer, offsetof(EnterJITRegs, s6))); |
michael@0 | 110 | masm.storePtr(s7, Address(StackPointer, offsetof(EnterJITRegs, s7))); |
michael@0 | 111 | masm.storePtr(ra, Address(StackPointer, offsetof(EnterJITRegs, ra))); |
michael@0 | 112 | |
michael@0 | 113 | masm.as_sd(f20, StackPointer, offsetof(EnterJITRegs, f20)); |
michael@0 | 114 | masm.as_sd(f22, StackPointer, offsetof(EnterJITRegs, f22)); |
michael@0 | 115 | masm.as_sd(f24, StackPointer, offsetof(EnterJITRegs, f24)); |
michael@0 | 116 | masm.as_sd(f26, StackPointer, offsetof(EnterJITRegs, f26)); |
michael@0 | 117 | masm.as_sd(f28, StackPointer, offsetof(EnterJITRegs, f28)); |
michael@0 | 118 | masm.as_sd(f30, StackPointer, offsetof(EnterJITRegs, f30)); |
michael@0 | 119 | } |
michael@0 | 120 | |
michael@0 | 121 | |
michael@0 | 122 | /* |
michael@0 | 123 | * This method generates a trampoline for a c++ function with the following |
michael@0 | 124 | * signature: |
michael@0 | 125 | * void enter(void *code, int argc, Value *argv, InterpreterFrame *fp, |
michael@0 | 126 | * CalleeToken calleeToken, JSObject *scopeChain, Value *vp) |
michael@0 | 127 | * ...using standard EABI calling convention |
michael@0 | 128 | */ |
michael@0 | 129 | JitCode * |
michael@0 | 130 | JitRuntime::generateEnterJIT(JSContext *cx, EnterJitType type) |
michael@0 | 131 | { |
michael@0 | 132 | const Register reg_code = a0; |
michael@0 | 133 | const Register reg_argc = a1; |
michael@0 | 134 | const Register reg_argv = a2; |
michael@0 | 135 | const Register reg_frame = a3; |
michael@0 | 136 | |
michael@0 | 137 | MOZ_ASSERT(OsrFrameReg == reg_frame); |
michael@0 | 138 | |
michael@0 | 139 | MacroAssembler masm(cx); |
michael@0 | 140 | GeneratePrologue(masm); |
michael@0 | 141 | |
michael@0 | 142 | const Address slotToken(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, calleeToken)); |
michael@0 | 143 | const Address slotVp(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, vp)); |
michael@0 | 144 | |
michael@0 | 145 | // Save stack pointer into s4 |
michael@0 | 146 | masm.movePtr(StackPointer, s4); |
michael@0 | 147 | |
michael@0 | 148 | // Load calleeToken into s2. |
michael@0 | 149 | masm.loadPtr(slotToken, s2); |
michael@0 | 150 | |
michael@0 | 151 | // Save stack pointer as baseline frame. |
michael@0 | 152 | if (type == EnterJitBaseline) |
michael@0 | 153 | masm.movePtr(StackPointer, BaselineFrameReg); |
michael@0 | 154 | |
michael@0 | 155 | // Load the number of actual arguments into s3. |
michael@0 | 156 | masm.loadPtr(slotVp, s3); |
michael@0 | 157 | masm.unboxInt32(Address(s3, 0), s3); |
michael@0 | 158 | |
michael@0 | 159 | /*************************************************************** |
michael@0 | 160 | Loop over argv vector, push arguments onto stack in reverse order |
michael@0 | 161 | ***************************************************************/ |
michael@0 | 162 | |
michael@0 | 163 | masm.as_sll(s0, reg_argc, 3); // s0 = argc * 8 |
michael@0 | 164 | masm.addPtr(reg_argv, s0); // s0 = argv + argc * 8 |
michael@0 | 165 | |
michael@0 | 166 | // Loop over arguments, copying them from an unknown buffer onto the Ion |
michael@0 | 167 | // stack so they can be accessed from JIT'ed code. |
michael@0 | 168 | Label header, footer; |
michael@0 | 169 | // If there aren't any arguments, don't do anything |
michael@0 | 170 | masm.ma_b(s0, reg_argv, &footer, Assembler::BelowOrEqual, ShortJump); |
michael@0 | 171 | { |
michael@0 | 172 | masm.bind(&header); |
michael@0 | 173 | |
michael@0 | 174 | masm.subPtr(Imm32(2 * sizeof(uintptr_t)), s0); |
michael@0 | 175 | masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 176 | |
michael@0 | 177 | ValueOperand value = ValueOperand(s6, s7); |
michael@0 | 178 | masm.loadValue(Address(s0, 0), value); |
michael@0 | 179 | masm.storeValue(value, Address(StackPointer, 0)); |
michael@0 | 180 | |
michael@0 | 181 | masm.ma_b(s0, reg_argv, &header, Assembler::Above, ShortJump); |
michael@0 | 182 | } |
michael@0 | 183 | masm.bind(&footer); |
michael@0 | 184 | |
michael@0 | 185 | masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 186 | masm.storePtr(s3, Address(StackPointer, sizeof(uintptr_t))); // actual arguments |
michael@0 | 187 | masm.storePtr(s2, Address(StackPointer, 0)); // callee token |
michael@0 | 188 | |
michael@0 | 189 | masm.subPtr(StackPointer, s4); |
michael@0 | 190 | masm.makeFrameDescriptor(s4, JitFrame_Entry); |
michael@0 | 191 | masm.push(s4); // descriptor |
michael@0 | 192 | |
michael@0 | 193 | CodeLabel returnLabel; |
michael@0 | 194 | if (type == EnterJitBaseline) { |
michael@0 | 195 | // Handle OSR. |
michael@0 | 196 | GeneralRegisterSet regs(GeneralRegisterSet::All()); |
michael@0 | 197 | regs.take(JSReturnOperand); |
michael@0 | 198 | regs.take(OsrFrameReg); |
michael@0 | 199 | regs.take(BaselineFrameReg); |
michael@0 | 200 | regs.take(reg_code); |
michael@0 | 201 | |
michael@0 | 202 | const Address slotNumStackValues(BaselineFrameReg, sizeof(EnterJITRegs) + |
michael@0 | 203 | offsetof(EnterJITArgs, numStackValues)); |
michael@0 | 204 | const Address slotScopeChain(BaselineFrameReg, sizeof(EnterJITRegs) + |
michael@0 | 205 | offsetof(EnterJITArgs, scopeChain)); |
michael@0 | 206 | |
michael@0 | 207 | Label notOsr; |
michael@0 | 208 | masm.ma_b(OsrFrameReg, OsrFrameReg, ¬Osr, Assembler::Zero, ShortJump); |
michael@0 | 209 | |
michael@0 | 210 | Register scratch = regs.takeAny(); |
michael@0 | 211 | |
michael@0 | 212 | Register numStackValues = regs.takeAny(); |
michael@0 | 213 | masm.load32(slotNumStackValues, numStackValues); |
michael@0 | 214 | |
michael@0 | 215 | // Push return address, previous frame pointer. |
michael@0 | 216 | masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 217 | masm.ma_li(scratch, returnLabel.dest()); |
michael@0 | 218 | masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t))); |
michael@0 | 219 | masm.storePtr(BaselineFrameReg, Address(StackPointer, 0)); |
michael@0 | 220 | |
michael@0 | 221 | // Reserve frame. |
michael@0 | 222 | Register framePtr = BaselineFrameReg; |
michael@0 | 223 | masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer); |
michael@0 | 224 | masm.movePtr(StackPointer, framePtr); |
michael@0 | 225 | |
michael@0 | 226 | // Reserve space for locals and stack values. |
michael@0 | 227 | masm.ma_sll(scratch, numStackValues, Imm32(3)); |
michael@0 | 228 | masm.subPtr(scratch, StackPointer); |
michael@0 | 229 | |
michael@0 | 230 | // Enter exit frame. |
michael@0 | 231 | masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), scratch); |
michael@0 | 232 | masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS); |
michael@0 | 233 | |
michael@0 | 234 | // Push frame descriptor and fake return address. |
michael@0 | 235 | masm.reserveStack(2 * sizeof(uintptr_t)); |
michael@0 | 236 | masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t))); // Frame descriptor |
michael@0 | 237 | masm.storePtr(zero, Address(StackPointer, 0)); // fake return address |
michael@0 | 238 | |
michael@0 | 239 | masm.enterFakeExitFrame(); |
michael@0 | 240 | |
michael@0 | 241 | masm.reserveStack(2 * sizeof(uintptr_t)); |
michael@0 | 242 | masm.storePtr(framePtr, Address(StackPointer, sizeof(uintptr_t))); // BaselineFrame |
michael@0 | 243 | masm.storePtr(reg_code, Address(StackPointer, 0)); // jitcode |
michael@0 | 244 | |
michael@0 | 245 | masm.setupUnalignedABICall(3, scratch); |
michael@0 | 246 | masm.passABIArg(BaselineFrameReg); // BaselineFrame |
michael@0 | 247 | masm.passABIArg(OsrFrameReg); // InterpreterFrame |
michael@0 | 248 | masm.passABIArg(numStackValues); |
michael@0 | 249 | masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, jit::InitBaselineFrameForOsr)); |
michael@0 | 250 | |
michael@0 | 251 | Register jitcode = regs.takeAny(); |
michael@0 | 252 | masm.loadPtr(Address(StackPointer, 0), jitcode); |
michael@0 | 253 | masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr); |
michael@0 | 254 | masm.freeStack(2 * sizeof(uintptr_t)); |
michael@0 | 255 | |
michael@0 | 256 | MOZ_ASSERT(jitcode != ReturnReg); |
michael@0 | 257 | |
michael@0 | 258 | Label error; |
michael@0 | 259 | masm.freeStack(IonExitFrameLayout::SizeWithFooter()); |
michael@0 | 260 | masm.addPtr(Imm32(BaselineFrame::Size()), framePtr); |
michael@0 | 261 | masm.branchIfFalseBool(ReturnReg, &error); |
michael@0 | 262 | |
michael@0 | 263 | masm.jump(jitcode); |
michael@0 | 264 | |
michael@0 | 265 | // OOM: load error value, discard return address and previous frame |
michael@0 | 266 | // pointer and return. |
michael@0 | 267 | masm.bind(&error); |
michael@0 | 268 | masm.movePtr(framePtr, StackPointer); |
michael@0 | 269 | masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 270 | masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand); |
michael@0 | 271 | masm.ma_li(scratch, returnLabel.dest()); |
michael@0 | 272 | masm.jump(scratch); |
michael@0 | 273 | |
michael@0 | 274 | masm.bind(¬Osr); |
michael@0 | 275 | // Load the scope chain in R1. |
michael@0 | 276 | MOZ_ASSERT(R1.scratchReg() != reg_code); |
michael@0 | 277 | masm.loadPtr(slotScopeChain, R1.scratchReg()); |
michael@0 | 278 | } |
michael@0 | 279 | |
michael@0 | 280 | // Call the function with pushing return address to stack. |
michael@0 | 281 | masm.ma_callIonHalfPush(reg_code); |
michael@0 | 282 | |
michael@0 | 283 | if (type == EnterJitBaseline) { |
michael@0 | 284 | // Baseline OSR will return here. |
michael@0 | 285 | masm.bind(returnLabel.src()); |
michael@0 | 286 | if (!masm.addCodeLabel(returnLabel)) |
michael@0 | 287 | return nullptr; |
michael@0 | 288 | } |
michael@0 | 289 | |
michael@0 | 290 | // Pop arguments off the stack. |
michael@0 | 291 | // s0 <- 8*argc (size of all arguments we pushed on the stack) |
michael@0 | 292 | masm.pop(s0); |
michael@0 | 293 | masm.rshiftPtr(Imm32(4), s0); |
michael@0 | 294 | masm.addPtr(s0, StackPointer); |
michael@0 | 295 | |
michael@0 | 296 | // Store the returned value into the slotVp |
michael@0 | 297 | masm.loadPtr(slotVp, s1); |
michael@0 | 298 | masm.storeValue(JSReturnOperand, Address(s1, 0)); |
michael@0 | 299 | |
michael@0 | 300 | // Restore non-volatile registers and return. |
michael@0 | 301 | GenerateReturn(masm, ShortJump); |
michael@0 | 302 | |
michael@0 | 303 | Linker linker(masm); |
michael@0 | 304 | AutoFlushICache afc("GenerateEnterJIT"); |
michael@0 | 305 | JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 306 | |
michael@0 | 307 | #ifdef JS_ION_PERF |
michael@0 | 308 | writePerfSpewerJitCodeProfile(code, "EnterJIT"); |
michael@0 | 309 | #endif |
michael@0 | 310 | |
michael@0 | 311 | return code; |
michael@0 | 312 | } |
michael@0 | 313 | |
michael@0 | 314 | JitCode * |
michael@0 | 315 | JitRuntime::generateInvalidator(JSContext *cx) |
michael@0 | 316 | { |
michael@0 | 317 | MacroAssembler masm(cx); |
michael@0 | 318 | |
michael@0 | 319 | // NOTE: Members ionScript_ and osiPointReturnAddress_ of |
michael@0 | 320 | // InvalidationBailoutStack are already on the stack. |
michael@0 | 321 | static const uint32_t STACK_DATA_SIZE = sizeof(InvalidationBailoutStack) - |
michael@0 | 322 | 2 * sizeof(uintptr_t); |
michael@0 | 323 | |
michael@0 | 324 | // Stack has to be alligned here. If not, we will have to fix it. |
michael@0 | 325 | masm.checkStackAlignment(); |
michael@0 | 326 | |
michael@0 | 327 | // Make room for data on stack. |
michael@0 | 328 | masm.subPtr(Imm32(STACK_DATA_SIZE), StackPointer); |
michael@0 | 329 | |
michael@0 | 330 | // Save general purpose registers |
michael@0 | 331 | for (uint32_t i = 0; i < Registers::Total; i++) { |
michael@0 | 332 | Address address = Address(StackPointer, InvalidationBailoutStack::offsetOfRegs() + |
michael@0 | 333 | i * sizeof(uintptr_t)); |
michael@0 | 334 | masm.storePtr(Register::FromCode(i), address); |
michael@0 | 335 | } |
michael@0 | 336 | |
michael@0 | 337 | // Save floating point registers |
michael@0 | 338 | // We can use as_sd because stack is alligned. |
michael@0 | 339 | for (uint32_t i = 0; i < FloatRegisters::Total; i++) |
michael@0 | 340 | masm.as_sd(FloatRegister::FromCode(i), StackPointer, |
michael@0 | 341 | InvalidationBailoutStack::offsetOfFpRegs() + i * sizeof(double)); |
michael@0 | 342 | |
michael@0 | 343 | // Pass pointer to InvalidationBailoutStack structure. |
michael@0 | 344 | masm.movePtr(StackPointer, a0); |
michael@0 | 345 | |
michael@0 | 346 | // Reserve place for return value and BailoutInfo pointer |
michael@0 | 347 | masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 348 | // Pass pointer to return value. |
michael@0 | 349 | masm.ma_addu(a1, StackPointer, Imm32(sizeof(uintptr_t))); |
michael@0 | 350 | // Pass pointer to BailoutInfo |
michael@0 | 351 | masm.movePtr(StackPointer, a2); |
michael@0 | 352 | |
michael@0 | 353 | masm.setupAlignedABICall(3); |
michael@0 | 354 | masm.passABIArg(a0); |
michael@0 | 355 | masm.passABIArg(a1); |
michael@0 | 356 | masm.passABIArg(a2); |
michael@0 | 357 | masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, InvalidationBailout)); |
michael@0 | 358 | |
michael@0 | 359 | masm.loadPtr(Address(StackPointer, 0), a2); |
michael@0 | 360 | masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), a1); |
michael@0 | 361 | // Remove the return address, the IonScript, the register state |
michael@0 | 362 | // (InvaliationBailoutStack) and the space that was allocated for the |
michael@0 | 363 | // return value. |
michael@0 | 364 | masm.addPtr(Imm32(sizeof(InvalidationBailoutStack) + 2 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 365 | // remove the space that this frame was using before the bailout |
michael@0 | 366 | // (computed by InvalidationBailout) |
michael@0 | 367 | masm.addPtr(a1, StackPointer); |
michael@0 | 368 | |
michael@0 | 369 | // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2. |
michael@0 | 370 | JitCode *bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail(); |
michael@0 | 371 | masm.branch(bailoutTail); |
michael@0 | 372 | |
michael@0 | 373 | Linker linker(masm); |
michael@0 | 374 | AutoFlushICache afc("Invalidator"); |
michael@0 | 375 | JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 376 | IonSpew(IonSpew_Invalidate, " invalidation thunk created at %p", (void *) code->raw()); |
michael@0 | 377 | |
michael@0 | 378 | #ifdef JS_ION_PERF |
michael@0 | 379 | writePerfSpewerJitCodeProfile(code, "Invalidator"); |
michael@0 | 380 | #endif |
michael@0 | 381 | |
michael@0 | 382 | return code; |
michael@0 | 383 | } |
michael@0 | 384 | |
michael@0 | 385 | JitCode * |
michael@0 | 386 | JitRuntime::generateArgumentsRectifier(JSContext *cx, ExecutionMode mode, void **returnAddrOut) |
michael@0 | 387 | { |
michael@0 | 388 | MacroAssembler masm(cx); |
michael@0 | 389 | |
michael@0 | 390 | // ArgumentsRectifierReg contains the |nargs| pushed onto the current |
michael@0 | 391 | // frame. Including |this|, there are (|nargs| + 1) arguments to copy. |
michael@0 | 392 | MOZ_ASSERT(ArgumentsRectifierReg == s3); |
michael@0 | 393 | |
michael@0 | 394 | Register numActArgsReg = t6; |
michael@0 | 395 | Register calleeTokenReg = t7; |
michael@0 | 396 | Register numArgsReg = t5; |
michael@0 | 397 | |
michael@0 | 398 | // Copy number of actual arguments into numActArgsReg |
michael@0 | 399 | masm.loadPtr(Address(StackPointer, IonRectifierFrameLayout::offsetOfNumActualArgs()), |
michael@0 | 400 | numActArgsReg); |
michael@0 | 401 | |
michael@0 | 402 | // Load the number of |undefined|s to push into t1. |
michael@0 | 403 | masm.loadPtr(Address(StackPointer, IonRectifierFrameLayout::offsetOfCalleeToken()), |
michael@0 | 404 | calleeTokenReg); |
michael@0 | 405 | masm.load16ZeroExtend(Address(calleeTokenReg, JSFunction::offsetOfNargs()), numArgsReg); |
michael@0 | 406 | |
michael@0 | 407 | masm.ma_subu(t1, numArgsReg, s3); |
michael@0 | 408 | |
michael@0 | 409 | masm.moveValue(UndefinedValue(), ValueOperand(t3, t4)); |
michael@0 | 410 | |
michael@0 | 411 | masm.movePtr(StackPointer, t2); // Save %sp. |
michael@0 | 412 | |
michael@0 | 413 | // Push undefined. |
michael@0 | 414 | { |
michael@0 | 415 | Label undefLoopTop; |
michael@0 | 416 | masm.bind(&undefLoopTop); |
michael@0 | 417 | |
michael@0 | 418 | masm.subPtr(Imm32(sizeof(Value)), StackPointer); |
michael@0 | 419 | masm.storeValue(ValueOperand(t3, t4), Address(StackPointer, 0)); |
michael@0 | 420 | masm.sub32(Imm32(1), t1); |
michael@0 | 421 | |
michael@0 | 422 | masm.ma_b(t1, t1, &undefLoopTop, Assembler::NonZero, ShortJump); |
michael@0 | 423 | } |
michael@0 | 424 | |
michael@0 | 425 | // Get the topmost argument. |
michael@0 | 426 | masm.ma_sll(t0, s3, Imm32(3)); // t0 <- nargs * 8 |
michael@0 | 427 | masm.addPtr(t0, t2); // t2 <- t2(saved sp) + nargs * 8 |
michael@0 | 428 | masm.addPtr(Imm32(sizeof(IonRectifierFrameLayout)), t2); |
michael@0 | 429 | |
michael@0 | 430 | // Push arguments, |nargs| + 1 times (to include |this|). |
michael@0 | 431 | { |
michael@0 | 432 | Label copyLoopTop, initialSkip; |
michael@0 | 433 | |
michael@0 | 434 | masm.ma_b(&initialSkip, ShortJump); |
michael@0 | 435 | |
michael@0 | 436 | masm.bind(©LoopTop); |
michael@0 | 437 | masm.subPtr(Imm32(sizeof(Value)), t2); |
michael@0 | 438 | masm.sub32(Imm32(1), s3); |
michael@0 | 439 | |
michael@0 | 440 | masm.bind(&initialSkip); |
michael@0 | 441 | |
michael@0 | 442 | MOZ_ASSERT(sizeof(Value) == 2 * sizeof(uint32_t)); |
michael@0 | 443 | // Read argument and push to stack. |
michael@0 | 444 | masm.subPtr(Imm32(sizeof(Value)), StackPointer); |
michael@0 | 445 | masm.load32(Address(t2, NUNBOX32_TYPE_OFFSET), t0); |
michael@0 | 446 | masm.store32(t0, Address(StackPointer, NUNBOX32_TYPE_OFFSET)); |
michael@0 | 447 | masm.load32(Address(t2, NUNBOX32_PAYLOAD_OFFSET), t0); |
michael@0 | 448 | masm.store32(t0, Address(StackPointer, NUNBOX32_PAYLOAD_OFFSET)); |
michael@0 | 449 | |
michael@0 | 450 | masm.ma_b(s3, s3, ©LoopTop, Assembler::NonZero, ShortJump); |
michael@0 | 451 | } |
michael@0 | 452 | |
michael@0 | 453 | // translate the framesize from values into bytes |
michael@0 | 454 | masm.ma_addu(t0, numArgsReg, Imm32(1)); |
michael@0 | 455 | masm.lshiftPtr(Imm32(3), t0); |
michael@0 | 456 | |
michael@0 | 457 | // Construct sizeDescriptor. |
michael@0 | 458 | masm.makeFrameDescriptor(t0, JitFrame_Rectifier); |
michael@0 | 459 | |
michael@0 | 460 | // Construct IonJSFrameLayout. |
michael@0 | 461 | masm.subPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 462 | // Push actual arguments. |
michael@0 | 463 | masm.storePtr(numActArgsReg, Address(StackPointer, 2 * sizeof(uintptr_t))); |
michael@0 | 464 | // Push callee token. |
michael@0 | 465 | masm.storePtr(calleeTokenReg, Address(StackPointer, sizeof(uintptr_t))); |
michael@0 | 466 | // Push frame descriptor. |
michael@0 | 467 | masm.storePtr(t0, Address(StackPointer, 0)); |
michael@0 | 468 | |
michael@0 | 469 | // Call the target function. |
michael@0 | 470 | // Note that this code assumes the function is JITted. |
michael@0 | 471 | masm.loadPtr(Address(calleeTokenReg, JSFunction::offsetOfNativeOrScript()), t1); |
michael@0 | 472 | masm.loadBaselineOrIonRaw(t1, t1, mode, nullptr); |
michael@0 | 473 | masm.ma_callIonHalfPush(t1); |
michael@0 | 474 | |
michael@0 | 475 | uint32_t returnOffset = masm.currentOffset(); |
michael@0 | 476 | |
michael@0 | 477 | // arg1 |
michael@0 | 478 | // ... |
michael@0 | 479 | // argN |
michael@0 | 480 | // num actual args |
michael@0 | 481 | // callee token |
michael@0 | 482 | // sizeDescriptor <- sp now |
michael@0 | 483 | // return address |
michael@0 | 484 | |
michael@0 | 485 | // Remove the rectifier frame. |
michael@0 | 486 | // t0 <- descriptor with FrameType. |
michael@0 | 487 | masm.loadPtr(Address(StackPointer, 0), t0); |
michael@0 | 488 | masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), t0); // t0 <- descriptor. |
michael@0 | 489 | |
michael@0 | 490 | // Discard descriptor, calleeToken and number of actual arguments. |
michael@0 | 491 | masm.addPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 492 | |
michael@0 | 493 | // arg1 |
michael@0 | 494 | // ... |
michael@0 | 495 | // argN <- sp now; t0 <- frame descriptor |
michael@0 | 496 | // num actual args |
michael@0 | 497 | // callee token |
michael@0 | 498 | // sizeDescriptor |
michael@0 | 499 | // return address |
michael@0 | 500 | |
michael@0 | 501 | // Discard pushed arguments. |
michael@0 | 502 | masm.addPtr(t0, StackPointer); |
michael@0 | 503 | |
michael@0 | 504 | masm.ret(); |
michael@0 | 505 | Linker linker(masm); |
michael@0 | 506 | AutoFlushICache afc("ArgumentsRectifier"); |
michael@0 | 507 | JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 508 | |
michael@0 | 509 | CodeOffsetLabel returnLabel(returnOffset); |
michael@0 | 510 | returnLabel.fixup(&masm); |
michael@0 | 511 | if (returnAddrOut) |
michael@0 | 512 | *returnAddrOut = (void *) (code->raw() + returnLabel.offset()); |
michael@0 | 513 | |
michael@0 | 514 | #ifdef JS_ION_PERF |
michael@0 | 515 | writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier"); |
michael@0 | 516 | #endif |
michael@0 | 517 | |
michael@0 | 518 | return code; |
michael@0 | 519 | } |
michael@0 | 520 | |
michael@0 | 521 | /* There are two different stack layouts when doing bailout. They are |
michael@0 | 522 | * represented via class BailoutStack. |
michael@0 | 523 | * |
michael@0 | 524 | * - First case is when bailout is done trough bailout table. In this case |
michael@0 | 525 | * table offset is stored in $ra (look at JitRuntime::generateBailoutTable()) |
michael@0 | 526 | * and thunk code should save it on stack. In this case frameClassId_ cannot |
michael@0 | 527 | * be NO_FRAME_SIZE_CLASS_ID. Members snapshotOffset_ and padding_ are not on |
michael@0 | 528 | * the stack. |
michael@0 | 529 | * |
michael@0 | 530 | * - Other case is when bailout is done via out of line code (lazy bailout). |
michael@0 | 531 | * In this case frame size is stored in $ra (look at |
michael@0 | 532 | * CodeGeneratorMIPS::generateOutOfLineCode()) and thunk code should save it |
michael@0 | 533 | * on stack. Other difference is that members snapshotOffset_ and padding_ are |
michael@0 | 534 | * pushed to the stack by CodeGeneratorMIPS::visitOutOfLineBailout(). Field |
michael@0 | 535 | * frameClassId_ is forced to be NO_FRAME_SIZE_CLASS_ID |
michael@0 | 536 | * (See: JitRuntime::generateBailoutHandler). |
michael@0 | 537 | */ |
michael@0 | 538 | static void |
michael@0 | 539 | GenerateBailoutThunk(JSContext *cx, MacroAssembler &masm, uint32_t frameClass) |
michael@0 | 540 | { |
michael@0 | 541 | // NOTE: Members snapshotOffset_ and padding_ of BailoutStack |
michael@0 | 542 | // are not stored in this function. |
michael@0 | 543 | static const uint32_t bailoutDataSize = sizeof(BailoutStack) - 2 * sizeof(uintptr_t); |
michael@0 | 544 | static const uint32_t bailoutInfoOutParamSize = 2 * sizeof(uintptr_t); |
michael@0 | 545 | |
michael@0 | 546 | // Make sure that alignment is proper. |
michael@0 | 547 | masm.checkStackAlignment(); |
michael@0 | 548 | |
michael@0 | 549 | // Make room for data. |
michael@0 | 550 | masm.subPtr(Imm32(bailoutDataSize), StackPointer); |
michael@0 | 551 | |
michael@0 | 552 | // Save general purpose registers. |
michael@0 | 553 | for (uint32_t i = 0; i < Registers::Total; i++) { |
michael@0 | 554 | uint32_t off = BailoutStack::offsetOfRegs() + i * sizeof(uintptr_t); |
michael@0 | 555 | masm.storePtr(Register::FromCode(i), Address(StackPointer, off)); |
michael@0 | 556 | } |
michael@0 | 557 | |
michael@0 | 558 | // Save floating point registers |
michael@0 | 559 | // We can use as_sd because stack is alligned. |
michael@0 | 560 | for (uintptr_t i = 0; i < FloatRegisters::Total; i++) |
michael@0 | 561 | masm.as_sd(FloatRegister::FromCode(i), StackPointer, |
michael@0 | 562 | BailoutStack::offsetOfFpRegs() + i * sizeof(double)); |
michael@0 | 563 | |
michael@0 | 564 | // Store the frameSize_ or tableOffset_ stored in ra |
michael@0 | 565 | // See: JitRuntime::generateBailoutTable() |
michael@0 | 566 | // See: CodeGeneratorMIPS::generateOutOfLineCode() |
michael@0 | 567 | masm.storePtr(ra, Address(StackPointer, BailoutStack::offsetOfFrameSize())); |
michael@0 | 568 | |
michael@0 | 569 | // Put frame class to stack |
michael@0 | 570 | masm.storePtr(ImmWord(frameClass), Address(StackPointer, BailoutStack::offsetOfFrameClass())); |
michael@0 | 571 | |
michael@0 | 572 | // Put pointer to BailoutStack as first argument to the Bailout() |
michael@0 | 573 | masm.movePtr(StackPointer, a0); |
michael@0 | 574 | // Put pointer to BailoutInfo |
michael@0 | 575 | masm.subPtr(Imm32(bailoutInfoOutParamSize), StackPointer); |
michael@0 | 576 | masm.storePtr(ImmPtr(nullptr), Address(StackPointer, 0)); |
michael@0 | 577 | masm.movePtr(StackPointer, a1); |
michael@0 | 578 | |
michael@0 | 579 | masm.setupAlignedABICall(2); |
michael@0 | 580 | masm.passABIArg(a0); |
michael@0 | 581 | masm.passABIArg(a1); |
michael@0 | 582 | masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, Bailout)); |
michael@0 | 583 | |
michael@0 | 584 | // Get BailoutInfo pointer |
michael@0 | 585 | masm.loadPtr(Address(StackPointer, 0), a2); |
michael@0 | 586 | |
michael@0 | 587 | // Remove both the bailout frame and the topmost Ion frame's stack. |
michael@0 | 588 | if (frameClass == NO_FRAME_SIZE_CLASS_ID) { |
michael@0 | 589 | // Load frameSize from stack |
michael@0 | 590 | masm.loadPtr(Address(StackPointer, |
michael@0 | 591 | bailoutInfoOutParamSize + BailoutStack::offsetOfFrameSize()), a1); |
michael@0 | 592 | |
michael@0 | 593 | // Remove complete BailoutStack class and data after it |
michael@0 | 594 | masm.addPtr(Imm32(sizeof(BailoutStack) + bailoutInfoOutParamSize), StackPointer); |
michael@0 | 595 | // Remove frame size srom stack |
michael@0 | 596 | masm.addPtr(a1, StackPointer); |
michael@0 | 597 | } else { |
michael@0 | 598 | uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize(); |
michael@0 | 599 | // Remove the data this fuction added and frame size. |
michael@0 | 600 | masm.addPtr(Imm32(bailoutDataSize + bailoutInfoOutParamSize + frameSize), StackPointer); |
michael@0 | 601 | } |
michael@0 | 602 | |
michael@0 | 603 | // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2. |
michael@0 | 604 | JitCode *bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail(); |
michael@0 | 605 | masm.branch(bailoutTail); |
michael@0 | 606 | } |
michael@0 | 607 | |
michael@0 | 608 | JitCode * |
michael@0 | 609 | JitRuntime::generateBailoutTable(JSContext *cx, uint32_t frameClass) |
michael@0 | 610 | { |
michael@0 | 611 | MacroAssembler masm(cx); |
michael@0 | 612 | |
michael@0 | 613 | Label bailout; |
michael@0 | 614 | for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++) { |
michael@0 | 615 | // Calculate offset to the end of table |
michael@0 | 616 | int32_t offset = (BAILOUT_TABLE_SIZE - i) * BAILOUT_TABLE_ENTRY_SIZE; |
michael@0 | 617 | |
michael@0 | 618 | // We use the 'ra' as table offset later in GenerateBailoutThunk |
michael@0 | 619 | masm.as_bal(BOffImm16(offset)); |
michael@0 | 620 | masm.nop(); |
michael@0 | 621 | } |
michael@0 | 622 | masm.bind(&bailout); |
michael@0 | 623 | |
michael@0 | 624 | GenerateBailoutThunk(cx, masm, frameClass); |
michael@0 | 625 | |
michael@0 | 626 | Linker linker(masm); |
michael@0 | 627 | AutoFlushICache afc("BailoutTable"); |
michael@0 | 628 | JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 629 | |
michael@0 | 630 | #ifdef JS_ION_PERF |
michael@0 | 631 | writePerfSpewerJitCodeProfile(code, "BailoutTable"); |
michael@0 | 632 | #endif |
michael@0 | 633 | |
michael@0 | 634 | return code; |
michael@0 | 635 | } |
michael@0 | 636 | |
michael@0 | 637 | JitCode * |
michael@0 | 638 | JitRuntime::generateBailoutHandler(JSContext *cx) |
michael@0 | 639 | { |
michael@0 | 640 | MacroAssembler masm(cx); |
michael@0 | 641 | GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID); |
michael@0 | 642 | |
michael@0 | 643 | Linker linker(masm); |
michael@0 | 644 | AutoFlushICache afc("BailoutHandler"); |
michael@0 | 645 | JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 646 | |
michael@0 | 647 | #ifdef JS_ION_PERF |
michael@0 | 648 | writePerfSpewerJitCodeProfile(code, "BailoutHandler"); |
michael@0 | 649 | #endif |
michael@0 | 650 | |
michael@0 | 651 | return code; |
michael@0 | 652 | } |
michael@0 | 653 | |
michael@0 | 654 | JitCode * |
michael@0 | 655 | JitRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f) |
michael@0 | 656 | { |
michael@0 | 657 | MOZ_ASSERT(functionWrappers_); |
michael@0 | 658 | MOZ_ASSERT(functionWrappers_->initialized()); |
michael@0 | 659 | VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f); |
michael@0 | 660 | if (p) |
michael@0 | 661 | return p->value(); |
michael@0 | 662 | |
michael@0 | 663 | MacroAssembler masm(cx); |
michael@0 | 664 | |
michael@0 | 665 | GeneralRegisterSet regs = GeneralRegisterSet(Register::Codes::WrapperMask); |
michael@0 | 666 | |
michael@0 | 667 | static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0, |
michael@0 | 668 | "Wrapper register set should be a superset of Volatile register set."); |
michael@0 | 669 | |
michael@0 | 670 | // The context is the first argument; a0 is the first argument register. |
michael@0 | 671 | Register cxreg = a0; |
michael@0 | 672 | regs.take(cxreg); |
michael@0 | 673 | |
michael@0 | 674 | // We're aligned to an exit frame, so link it up. |
michael@0 | 675 | masm.enterExitFrameAndLoadContext(&f, cxreg, regs.getAny(), f.executionMode); |
michael@0 | 676 | |
michael@0 | 677 | // Save the base of the argument set stored on the stack. |
michael@0 | 678 | Register argsBase = InvalidReg; |
michael@0 | 679 | if (f.explicitArgs) { |
michael@0 | 680 | argsBase = t1; // Use temporary register. |
michael@0 | 681 | regs.take(argsBase); |
michael@0 | 682 | masm.ma_addu(argsBase, StackPointer, Imm32(IonExitFrameLayout::SizeWithFooter())); |
michael@0 | 683 | } |
michael@0 | 684 | |
michael@0 | 685 | // Reserve space for the outparameter. |
michael@0 | 686 | Register outReg = InvalidReg; |
michael@0 | 687 | switch (f.outParam) { |
michael@0 | 688 | case Type_Value: |
michael@0 | 689 | outReg = t0; // Use temporary register. |
michael@0 | 690 | regs.take(outReg); |
michael@0 | 691 | // Value outparam has to be 8 byte aligned because the called |
michael@0 | 692 | // function can use sdc1 or ldc1 instructions to access it. |
michael@0 | 693 | masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value)); |
michael@0 | 694 | masm.alignPointerUp(StackPointer, outReg, StackAlignment); |
michael@0 | 695 | break; |
michael@0 | 696 | |
michael@0 | 697 | case Type_Handle: |
michael@0 | 698 | outReg = t0; |
michael@0 | 699 | regs.take(outReg); |
michael@0 | 700 | if (f.outParamRootType == VMFunction::RootValue) { |
michael@0 | 701 | // Value outparam has to be 8 byte aligned because the called |
michael@0 | 702 | // function can use sdc1 or ldc1 instructions to access it. |
michael@0 | 703 | masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value)); |
michael@0 | 704 | masm.alignPointerUp(StackPointer, outReg, StackAlignment); |
michael@0 | 705 | masm.storeValue(UndefinedValue(), Address(outReg, 0)); |
michael@0 | 706 | } |
michael@0 | 707 | else { |
michael@0 | 708 | masm.PushEmptyRooted(f.outParamRootType); |
michael@0 | 709 | masm.movePtr(StackPointer, outReg); |
michael@0 | 710 | } |
michael@0 | 711 | break; |
michael@0 | 712 | |
michael@0 | 713 | case Type_Bool: |
michael@0 | 714 | case Type_Int32: |
michael@0 | 715 | MOZ_ASSERT(sizeof(uintptr_t) == sizeof(uint32_t)); |
michael@0 | 716 | case Type_Pointer: |
michael@0 | 717 | outReg = t0; |
michael@0 | 718 | regs.take(outReg); |
michael@0 | 719 | masm.reserveStack(sizeof(uintptr_t)); |
michael@0 | 720 | masm.movePtr(StackPointer, outReg); |
michael@0 | 721 | break; |
michael@0 | 722 | |
michael@0 | 723 | case Type_Double: |
michael@0 | 724 | outReg = t0; |
michael@0 | 725 | regs.take(outReg); |
michael@0 | 726 | // Double outparam has to be 8 byte aligned because the called |
michael@0 | 727 | // function can use sdc1 or ldc1 instructions to access it. |
michael@0 | 728 | masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(double)); |
michael@0 | 729 | masm.alignPointerUp(StackPointer, outReg, StackAlignment); |
michael@0 | 730 | break; |
michael@0 | 731 | |
michael@0 | 732 | default: |
michael@0 | 733 | MOZ_ASSERT(f.outParam == Type_Void); |
michael@0 | 734 | break; |
michael@0 | 735 | } |
michael@0 | 736 | |
michael@0 | 737 | masm.setupUnalignedABICall(f.argc(), regs.getAny()); |
michael@0 | 738 | masm.passABIArg(cxreg); |
michael@0 | 739 | |
michael@0 | 740 | size_t argDisp = 0; |
michael@0 | 741 | |
michael@0 | 742 | // Copy any arguments. |
michael@0 | 743 | for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) { |
michael@0 | 744 | MoveOperand from; |
michael@0 | 745 | switch (f.argProperties(explicitArg)) { |
michael@0 | 746 | case VMFunction::WordByValue: |
michael@0 | 747 | masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL); |
michael@0 | 748 | argDisp += sizeof(uint32_t); |
michael@0 | 749 | break; |
michael@0 | 750 | case VMFunction::DoubleByValue: |
michael@0 | 751 | // Values should be passed by reference, not by value, so we |
michael@0 | 752 | // assert that the argument is a double-precision float. |
michael@0 | 753 | MOZ_ASSERT(f.argPassedInFloatReg(explicitArg)); |
michael@0 | 754 | masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::DOUBLE); |
michael@0 | 755 | argDisp += sizeof(double); |
michael@0 | 756 | break; |
michael@0 | 757 | case VMFunction::WordByRef: |
michael@0 | 758 | masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS), |
michael@0 | 759 | MoveOp::GENERAL); |
michael@0 | 760 | argDisp += sizeof(uint32_t); |
michael@0 | 761 | break; |
michael@0 | 762 | case VMFunction::DoubleByRef: |
michael@0 | 763 | masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS), |
michael@0 | 764 | MoveOp::GENERAL); |
michael@0 | 765 | argDisp += sizeof(double); |
michael@0 | 766 | break; |
michael@0 | 767 | } |
michael@0 | 768 | } |
michael@0 | 769 | |
michael@0 | 770 | // Copy the implicit outparam, if any. |
michael@0 | 771 | if (outReg != InvalidReg) |
michael@0 | 772 | masm.passABIArg(outReg); |
michael@0 | 773 | |
michael@0 | 774 | masm.callWithABI(f.wrapped); |
michael@0 | 775 | |
michael@0 | 776 | // Test for failure. |
michael@0 | 777 | switch (f.failType()) { |
michael@0 | 778 | case Type_Object: |
michael@0 | 779 | masm.branchTestPtr(Assembler::Zero, v0, v0, masm.failureLabel(f.executionMode)); |
michael@0 | 780 | break; |
michael@0 | 781 | case Type_Bool: |
michael@0 | 782 | // Called functions return bools, which are 0/false and non-zero/true |
michael@0 | 783 | masm.branchIfFalseBool(v0, masm.failureLabel(f.executionMode)); |
michael@0 | 784 | break; |
michael@0 | 785 | default: |
michael@0 | 786 | MOZ_ASSUME_UNREACHABLE("unknown failure kind"); |
michael@0 | 787 | } |
michael@0 | 788 | |
michael@0 | 789 | // Load the outparam and free any allocated stack. |
michael@0 | 790 | switch (f.outParam) { |
michael@0 | 791 | case Type_Handle: |
michael@0 | 792 | if (f.outParamRootType == VMFunction::RootValue) { |
michael@0 | 793 | masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment); |
michael@0 | 794 | masm.loadValue(Address(SecondScratchReg, 0), JSReturnOperand); |
michael@0 | 795 | masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value)); |
michael@0 | 796 | } |
michael@0 | 797 | else { |
michael@0 | 798 | masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand); |
michael@0 | 799 | } |
michael@0 | 800 | break; |
michael@0 | 801 | |
michael@0 | 802 | case Type_Value: |
michael@0 | 803 | masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment); |
michael@0 | 804 | masm.loadValue(Address(SecondScratchReg, 0), JSReturnOperand); |
michael@0 | 805 | masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value)); |
michael@0 | 806 | break; |
michael@0 | 807 | |
michael@0 | 808 | case Type_Int32: |
michael@0 | 809 | MOZ_ASSERT(sizeof(uintptr_t) == sizeof(uint32_t)); |
michael@0 | 810 | case Type_Pointer: |
michael@0 | 811 | masm.load32(Address(StackPointer, 0), ReturnReg); |
michael@0 | 812 | masm.freeStack(sizeof(uintptr_t)); |
michael@0 | 813 | break; |
michael@0 | 814 | |
michael@0 | 815 | case Type_Bool: |
michael@0 | 816 | masm.load8ZeroExtend(Address(StackPointer, 0), ReturnReg); |
michael@0 | 817 | masm.freeStack(sizeof(uintptr_t)); |
michael@0 | 818 | break; |
michael@0 | 819 | |
michael@0 | 820 | case Type_Double: |
michael@0 | 821 | if (cx->runtime()->jitSupportsFloatingPoint) { |
michael@0 | 822 | masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment); |
michael@0 | 823 | // Address is aligned, so we can use as_ld. |
michael@0 | 824 | masm.as_ld(ReturnFloatReg, SecondScratchReg, 0); |
michael@0 | 825 | } else { |
michael@0 | 826 | masm.assumeUnreachable("Unable to load into float reg, with no FP support."); |
michael@0 | 827 | } |
michael@0 | 828 | masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(double)); |
michael@0 | 829 | break; |
michael@0 | 830 | |
michael@0 | 831 | default: |
michael@0 | 832 | MOZ_ASSERT(f.outParam == Type_Void); |
michael@0 | 833 | break; |
michael@0 | 834 | } |
michael@0 | 835 | masm.leaveExitFrame(); |
michael@0 | 836 | masm.retn(Imm32(sizeof(IonExitFrameLayout) + |
michael@0 | 837 | f.explicitStackSlots() * sizeof(uintptr_t) + |
michael@0 | 838 | f.extraValuesToPop * sizeof(Value))); |
michael@0 | 839 | |
michael@0 | 840 | Linker linker(masm); |
michael@0 | 841 | AutoFlushICache afc("VMWrapper"); |
michael@0 | 842 | JitCode *wrapper = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 843 | if (!wrapper) |
michael@0 | 844 | return nullptr; |
michael@0 | 845 | |
michael@0 | 846 | // linker.newCode may trigger a GC and sweep functionWrappers_ so we have |
michael@0 | 847 | // to use relookupOrAdd instead of add. |
michael@0 | 848 | if (!functionWrappers_->relookupOrAdd(p, &f, wrapper)) |
michael@0 | 849 | return nullptr; |
michael@0 | 850 | |
michael@0 | 851 | #ifdef JS_ION_PERF |
michael@0 | 852 | writePerfSpewerJitCodeProfile(wrapper, "VMWrapper"); |
michael@0 | 853 | #endif |
michael@0 | 854 | |
michael@0 | 855 | return wrapper; |
michael@0 | 856 | } |
michael@0 | 857 | |
michael@0 | 858 | JitCode * |
michael@0 | 859 | JitRuntime::generatePreBarrier(JSContext *cx, MIRType type) |
michael@0 | 860 | { |
michael@0 | 861 | MacroAssembler masm(cx); |
michael@0 | 862 | |
michael@0 | 863 | RegisterSet save; |
michael@0 | 864 | if (cx->runtime()->jitSupportsFloatingPoint) { |
michael@0 | 865 | save = RegisterSet(GeneralRegisterSet(Registers::VolatileMask), |
michael@0 | 866 | FloatRegisterSet(FloatRegisters::VolatileMask)); |
michael@0 | 867 | } else { |
michael@0 | 868 | save = RegisterSet(GeneralRegisterSet(Registers::VolatileMask), |
michael@0 | 869 | FloatRegisterSet()); |
michael@0 | 870 | } |
michael@0 | 871 | masm.PushRegsInMask(save); |
michael@0 | 872 | |
michael@0 | 873 | MOZ_ASSERT(PreBarrierReg == a1); |
michael@0 | 874 | masm.movePtr(ImmPtr(cx->runtime()), a0); |
michael@0 | 875 | |
michael@0 | 876 | masm.setupUnalignedABICall(2, a2); |
michael@0 | 877 | masm.passABIArg(a0); |
michael@0 | 878 | masm.passABIArg(a1); |
michael@0 | 879 | |
michael@0 | 880 | if (type == MIRType_Value) { |
michael@0 | 881 | masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkValueFromIon)); |
michael@0 | 882 | } else { |
michael@0 | 883 | MOZ_ASSERT(type == MIRType_Shape); |
michael@0 | 884 | masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkShapeFromIon)); |
michael@0 | 885 | } |
michael@0 | 886 | |
michael@0 | 887 | masm.PopRegsInMask(save); |
michael@0 | 888 | masm.ret(); |
michael@0 | 889 | |
michael@0 | 890 | Linker linker(masm); |
michael@0 | 891 | AutoFlushICache afc("PreBarrier"); |
michael@0 | 892 | JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 893 | |
michael@0 | 894 | #ifdef JS_ION_PERF |
michael@0 | 895 | writePerfSpewerJitCodeProfile(code, "PreBarrier"); |
michael@0 | 896 | #endif |
michael@0 | 897 | |
michael@0 | 898 | return code; |
michael@0 | 899 | } |
michael@0 | 900 | |
michael@0 | 901 | typedef bool (*HandleDebugTrapFn)(JSContext *, BaselineFrame *, uint8_t *, bool *); |
michael@0 | 902 | static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap); |
michael@0 | 903 | |
michael@0 | 904 | JitCode * |
michael@0 | 905 | JitRuntime::generateDebugTrapHandler(JSContext *cx) |
michael@0 | 906 | { |
michael@0 | 907 | MacroAssembler masm(cx); |
michael@0 | 908 | |
michael@0 | 909 | Register scratch1 = t0; |
michael@0 | 910 | Register scratch2 = t1; |
michael@0 | 911 | |
michael@0 | 912 | // Load BaselineFrame pointer in scratch1. |
michael@0 | 913 | masm.movePtr(s5, scratch1); |
michael@0 | 914 | masm.subPtr(Imm32(BaselineFrame::Size()), scratch1); |
michael@0 | 915 | |
michael@0 | 916 | // Enter a stub frame and call the HandleDebugTrap VM function. Ensure |
michael@0 | 917 | // the stub frame has a nullptr ICStub pointer, since this pointer is |
michael@0 | 918 | // marked during GC. |
michael@0 | 919 | masm.movePtr(ImmPtr(nullptr), BaselineStubReg); |
michael@0 | 920 | EmitEnterStubFrame(masm, scratch2); |
michael@0 | 921 | |
michael@0 | 922 | JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo); |
michael@0 | 923 | if (!code) |
michael@0 | 924 | return nullptr; |
michael@0 | 925 | |
michael@0 | 926 | masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer); |
michael@0 | 927 | masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t))); |
michael@0 | 928 | masm.storePtr(scratch1, Address(StackPointer, 0)); |
michael@0 | 929 | |
michael@0 | 930 | EmitCallVM(code, masm); |
michael@0 | 931 | |
michael@0 | 932 | EmitLeaveStubFrame(masm); |
michael@0 | 933 | |
michael@0 | 934 | // If the stub returns |true|, we have to perform a forced return |
michael@0 | 935 | // (return from the JS frame). If the stub returns |false|, just return |
michael@0 | 936 | // from the trap stub so that execution continues at the current pc. |
michael@0 | 937 | Label forcedReturn; |
michael@0 | 938 | masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn); |
michael@0 | 939 | |
michael@0 | 940 | // ra was restored by EmitLeaveStubFrame |
michael@0 | 941 | masm.branch(ra); |
michael@0 | 942 | |
michael@0 | 943 | masm.bind(&forcedReturn); |
michael@0 | 944 | masm.loadValue(Address(s5, BaselineFrame::reverseOffsetOfReturnValue()), |
michael@0 | 945 | JSReturnOperand); |
michael@0 | 946 | masm.movePtr(s5, StackPointer); |
michael@0 | 947 | masm.pop(s5); |
michael@0 | 948 | masm.ret(); |
michael@0 | 949 | |
michael@0 | 950 | Linker linker(masm); |
michael@0 | 951 | AutoFlushICache afc("DebugTrapHandler"); |
michael@0 | 952 | JitCode *codeDbg = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 953 | |
michael@0 | 954 | #ifdef JS_ION_PERF |
michael@0 | 955 | writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler"); |
michael@0 | 956 | #endif |
michael@0 | 957 | |
michael@0 | 958 | return codeDbg; |
michael@0 | 959 | } |
michael@0 | 960 | |
michael@0 | 961 | |
michael@0 | 962 | JitCode * |
michael@0 | 963 | JitRuntime::generateExceptionTailStub(JSContext *cx) |
michael@0 | 964 | { |
michael@0 | 965 | MacroAssembler masm; |
michael@0 | 966 | |
michael@0 | 967 | masm.handleFailureWithHandlerTail(); |
michael@0 | 968 | |
michael@0 | 969 | Linker linker(masm); |
michael@0 | 970 | AutoFlushICache afc("ExceptionTailStub"); |
michael@0 | 971 | JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 972 | |
michael@0 | 973 | #ifdef JS_ION_PERF |
michael@0 | 974 | writePerfSpewerJitCodeProfile(code, "ExceptionTailStub"); |
michael@0 | 975 | #endif |
michael@0 | 976 | |
michael@0 | 977 | return code; |
michael@0 | 978 | } |
michael@0 | 979 | |
michael@0 | 980 | JitCode * |
michael@0 | 981 | JitRuntime::generateBailoutTailStub(JSContext *cx) |
michael@0 | 982 | { |
michael@0 | 983 | MacroAssembler masm; |
michael@0 | 984 | |
michael@0 | 985 | masm.generateBailoutTail(a1, a2); |
michael@0 | 986 | |
michael@0 | 987 | Linker linker(masm); |
michael@0 | 988 | AutoFlushICache afc("BailoutTailStub"); |
michael@0 | 989 | JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
michael@0 | 990 | |
michael@0 | 991 | #ifdef JS_ION_PERF |
michael@0 | 992 | writePerfSpewerJitCodeProfile(code, "BailoutTailStub"); |
michael@0 | 993 | #endif |
michael@0 | 994 | |
michael@0 | 995 | return code; |
michael@0 | 996 | } |
michael@0 | 997 |