Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jscompartment.h"
9 #include "jit/Bailouts.h"
10 #include "jit/IonFrames.h"
11 #include "jit/IonLinker.h"
12 #include "jit/IonSpewer.h"
13 #include "jit/JitCompartment.h"
14 #include "jit/mips/Bailouts-mips.h"
15 #include "jit/mips/BaselineHelpers-mips.h"
16 #ifdef JS_ION_PERF
17 # include "jit/PerfSpewer.h"
18 #endif
19 #include "jit/VMFunctions.h"
21 #include "jit/ExecutionMode-inl.h"
23 using namespace js;
24 using namespace js::jit;
26 static_assert(sizeof(uintptr_t) == sizeof(uint32_t), "Not 64-bit clean.");
28 struct EnterJITRegs
29 {
30 double f30;
31 double f28;
32 double f26;
33 double f24;
34 double f22;
35 double f20;
37 // empty slot for alignment
38 uintptr_t align;
40 // non-volatile registers.
41 uintptr_t ra;
42 uintptr_t s7;
43 uintptr_t s6;
44 uintptr_t s5;
45 uintptr_t s4;
46 uintptr_t s3;
47 uintptr_t s2;
48 uintptr_t s1;
49 uintptr_t s0;
50 };
52 struct EnterJITArgs
53 {
54 // First 4 argumet placeholders
55 void *jitcode; // <- sp points here when function is entered.
56 int maxArgc;
57 Value *maxArgv;
58 InterpreterFrame *fp;
60 // Arguments on stack
61 CalleeToken calleeToken;
62 JSObject *scopeChain;
63 size_t numStackValues;
64 Value *vp;
65 };
67 static void
68 GenerateReturn(MacroAssembler &masm, int returnCode)
69 {
70 MOZ_ASSERT(masm.framePushed() == sizeof(EnterJITRegs));
72 // Restore non-volatile registers
73 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s0)), s0);
74 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s1)), s1);
75 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s2)), s2);
76 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s3)), s3);
77 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s4)), s4);
78 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s5)), s5);
79 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s6)), s6);
80 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, s7)), s7);
81 masm.loadPtr(Address(StackPointer, offsetof(EnterJITRegs, ra)), ra);
83 // Restore non-volatile floating point registers
84 masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f20)), f20);
85 masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f22)), f22);
86 masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f24)), f24);
87 masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f26)), f26);
88 masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f28)), f28);
89 masm.loadDouble(Address(StackPointer, offsetof(EnterJITRegs, f30)), f30);
91 masm.freeStack(sizeof(EnterJITRegs));
93 masm.branch(ra);
94 }
96 static void
97 GeneratePrologue(MacroAssembler &masm)
98 {
99 // Save non-volatile registers. These must be saved by the trampoline,
100 // rather than the JIT'd code, because they are scanned by the conservative
101 // scanner.
102 masm.reserveStack(sizeof(EnterJITRegs));
103 masm.storePtr(s0, Address(StackPointer, offsetof(EnterJITRegs, s0)));
104 masm.storePtr(s1, Address(StackPointer, offsetof(EnterJITRegs, s1)));
105 masm.storePtr(s2, Address(StackPointer, offsetof(EnterJITRegs, s2)));
106 masm.storePtr(s3, Address(StackPointer, offsetof(EnterJITRegs, s3)));
107 masm.storePtr(s4, Address(StackPointer, offsetof(EnterJITRegs, s4)));
108 masm.storePtr(s5, Address(StackPointer, offsetof(EnterJITRegs, s5)));
109 masm.storePtr(s6, Address(StackPointer, offsetof(EnterJITRegs, s6)));
110 masm.storePtr(s7, Address(StackPointer, offsetof(EnterJITRegs, s7)));
111 masm.storePtr(ra, Address(StackPointer, offsetof(EnterJITRegs, ra)));
113 masm.as_sd(f20, StackPointer, offsetof(EnterJITRegs, f20));
114 masm.as_sd(f22, StackPointer, offsetof(EnterJITRegs, f22));
115 masm.as_sd(f24, StackPointer, offsetof(EnterJITRegs, f24));
116 masm.as_sd(f26, StackPointer, offsetof(EnterJITRegs, f26));
117 masm.as_sd(f28, StackPointer, offsetof(EnterJITRegs, f28));
118 masm.as_sd(f30, StackPointer, offsetof(EnterJITRegs, f30));
119 }
122 /*
123 * This method generates a trampoline for a c++ function with the following
124 * signature:
125 * void enter(void *code, int argc, Value *argv, InterpreterFrame *fp,
126 * CalleeToken calleeToken, JSObject *scopeChain, Value *vp)
127 * ...using standard EABI calling convention
128 */
129 JitCode *
130 JitRuntime::generateEnterJIT(JSContext *cx, EnterJitType type)
131 {
132 const Register reg_code = a0;
133 const Register reg_argc = a1;
134 const Register reg_argv = a2;
135 const Register reg_frame = a3;
137 MOZ_ASSERT(OsrFrameReg == reg_frame);
139 MacroAssembler masm(cx);
140 GeneratePrologue(masm);
142 const Address slotToken(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, calleeToken));
143 const Address slotVp(sp, sizeof(EnterJITRegs) + offsetof(EnterJITArgs, vp));
145 // Save stack pointer into s4
146 masm.movePtr(StackPointer, s4);
148 // Load calleeToken into s2.
149 masm.loadPtr(slotToken, s2);
151 // Save stack pointer as baseline frame.
152 if (type == EnterJitBaseline)
153 masm.movePtr(StackPointer, BaselineFrameReg);
155 // Load the number of actual arguments into s3.
156 masm.loadPtr(slotVp, s3);
157 masm.unboxInt32(Address(s3, 0), s3);
159 /***************************************************************
160 Loop over argv vector, push arguments onto stack in reverse order
161 ***************************************************************/
163 masm.as_sll(s0, reg_argc, 3); // s0 = argc * 8
164 masm.addPtr(reg_argv, s0); // s0 = argv + argc * 8
166 // Loop over arguments, copying them from an unknown buffer onto the Ion
167 // stack so they can be accessed from JIT'ed code.
168 Label header, footer;
169 // If there aren't any arguments, don't do anything
170 masm.ma_b(s0, reg_argv, &footer, Assembler::BelowOrEqual, ShortJump);
171 {
172 masm.bind(&header);
174 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), s0);
175 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
177 ValueOperand value = ValueOperand(s6, s7);
178 masm.loadValue(Address(s0, 0), value);
179 masm.storeValue(value, Address(StackPointer, 0));
181 masm.ma_b(s0, reg_argv, &header, Assembler::Above, ShortJump);
182 }
183 masm.bind(&footer);
185 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
186 masm.storePtr(s3, Address(StackPointer, sizeof(uintptr_t))); // actual arguments
187 masm.storePtr(s2, Address(StackPointer, 0)); // callee token
189 masm.subPtr(StackPointer, s4);
190 masm.makeFrameDescriptor(s4, JitFrame_Entry);
191 masm.push(s4); // descriptor
193 CodeLabel returnLabel;
194 if (type == EnterJitBaseline) {
195 // Handle OSR.
196 GeneralRegisterSet regs(GeneralRegisterSet::All());
197 regs.take(JSReturnOperand);
198 regs.take(OsrFrameReg);
199 regs.take(BaselineFrameReg);
200 regs.take(reg_code);
202 const Address slotNumStackValues(BaselineFrameReg, sizeof(EnterJITRegs) +
203 offsetof(EnterJITArgs, numStackValues));
204 const Address slotScopeChain(BaselineFrameReg, sizeof(EnterJITRegs) +
205 offsetof(EnterJITArgs, scopeChain));
207 Label notOsr;
208 masm.ma_b(OsrFrameReg, OsrFrameReg, ¬Osr, Assembler::Zero, ShortJump);
210 Register scratch = regs.takeAny();
212 Register numStackValues = regs.takeAny();
213 masm.load32(slotNumStackValues, numStackValues);
215 // Push return address, previous frame pointer.
216 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
217 masm.ma_li(scratch, returnLabel.dest());
218 masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t)));
219 masm.storePtr(BaselineFrameReg, Address(StackPointer, 0));
221 // Reserve frame.
222 Register framePtr = BaselineFrameReg;
223 masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer);
224 masm.movePtr(StackPointer, framePtr);
226 // Reserve space for locals and stack values.
227 masm.ma_sll(scratch, numStackValues, Imm32(3));
228 masm.subPtr(scratch, StackPointer);
230 // Enter exit frame.
231 masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), scratch);
232 masm.makeFrameDescriptor(scratch, JitFrame_BaselineJS);
234 // Push frame descriptor and fake return address.
235 masm.reserveStack(2 * sizeof(uintptr_t));
236 masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t))); // Frame descriptor
237 masm.storePtr(zero, Address(StackPointer, 0)); // fake return address
239 masm.enterFakeExitFrame();
241 masm.reserveStack(2 * sizeof(uintptr_t));
242 masm.storePtr(framePtr, Address(StackPointer, sizeof(uintptr_t))); // BaselineFrame
243 masm.storePtr(reg_code, Address(StackPointer, 0)); // jitcode
245 masm.setupUnalignedABICall(3, scratch);
246 masm.passABIArg(BaselineFrameReg); // BaselineFrame
247 masm.passABIArg(OsrFrameReg); // InterpreterFrame
248 masm.passABIArg(numStackValues);
249 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, jit::InitBaselineFrameForOsr));
251 Register jitcode = regs.takeAny();
252 masm.loadPtr(Address(StackPointer, 0), jitcode);
253 masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr);
254 masm.freeStack(2 * sizeof(uintptr_t));
256 MOZ_ASSERT(jitcode != ReturnReg);
258 Label error;
259 masm.freeStack(IonExitFrameLayout::SizeWithFooter());
260 masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
261 masm.branchIfFalseBool(ReturnReg, &error);
263 masm.jump(jitcode);
265 // OOM: load error value, discard return address and previous frame
266 // pointer and return.
267 masm.bind(&error);
268 masm.movePtr(framePtr, StackPointer);
269 masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
270 masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
271 masm.ma_li(scratch, returnLabel.dest());
272 masm.jump(scratch);
274 masm.bind(¬Osr);
275 // Load the scope chain in R1.
276 MOZ_ASSERT(R1.scratchReg() != reg_code);
277 masm.loadPtr(slotScopeChain, R1.scratchReg());
278 }
280 // Call the function with pushing return address to stack.
281 masm.ma_callIonHalfPush(reg_code);
283 if (type == EnterJitBaseline) {
284 // Baseline OSR will return here.
285 masm.bind(returnLabel.src());
286 if (!masm.addCodeLabel(returnLabel))
287 return nullptr;
288 }
290 // Pop arguments off the stack.
291 // s0 <- 8*argc (size of all arguments we pushed on the stack)
292 masm.pop(s0);
293 masm.rshiftPtr(Imm32(4), s0);
294 masm.addPtr(s0, StackPointer);
296 // Store the returned value into the slotVp
297 masm.loadPtr(slotVp, s1);
298 masm.storeValue(JSReturnOperand, Address(s1, 0));
300 // Restore non-volatile registers and return.
301 GenerateReturn(masm, ShortJump);
303 Linker linker(masm);
304 AutoFlushICache afc("GenerateEnterJIT");
305 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
307 #ifdef JS_ION_PERF
308 writePerfSpewerJitCodeProfile(code, "EnterJIT");
309 #endif
311 return code;
312 }
314 JitCode *
315 JitRuntime::generateInvalidator(JSContext *cx)
316 {
317 MacroAssembler masm(cx);
319 // NOTE: Members ionScript_ and osiPointReturnAddress_ of
320 // InvalidationBailoutStack are already on the stack.
321 static const uint32_t STACK_DATA_SIZE = sizeof(InvalidationBailoutStack) -
322 2 * sizeof(uintptr_t);
324 // Stack has to be alligned here. If not, we will have to fix it.
325 masm.checkStackAlignment();
327 // Make room for data on stack.
328 masm.subPtr(Imm32(STACK_DATA_SIZE), StackPointer);
330 // Save general purpose registers
331 for (uint32_t i = 0; i < Registers::Total; i++) {
332 Address address = Address(StackPointer, InvalidationBailoutStack::offsetOfRegs() +
333 i * sizeof(uintptr_t));
334 masm.storePtr(Register::FromCode(i), address);
335 }
337 // Save floating point registers
338 // We can use as_sd because stack is alligned.
339 for (uint32_t i = 0; i < FloatRegisters::Total; i++)
340 masm.as_sd(FloatRegister::FromCode(i), StackPointer,
341 InvalidationBailoutStack::offsetOfFpRegs() + i * sizeof(double));
343 // Pass pointer to InvalidationBailoutStack structure.
344 masm.movePtr(StackPointer, a0);
346 // Reserve place for return value and BailoutInfo pointer
347 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
348 // Pass pointer to return value.
349 masm.ma_addu(a1, StackPointer, Imm32(sizeof(uintptr_t)));
350 // Pass pointer to BailoutInfo
351 masm.movePtr(StackPointer, a2);
353 masm.setupAlignedABICall(3);
354 masm.passABIArg(a0);
355 masm.passABIArg(a1);
356 masm.passABIArg(a2);
357 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, InvalidationBailout));
359 masm.loadPtr(Address(StackPointer, 0), a2);
360 masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), a1);
361 // Remove the return address, the IonScript, the register state
362 // (InvaliationBailoutStack) and the space that was allocated for the
363 // return value.
364 masm.addPtr(Imm32(sizeof(InvalidationBailoutStack) + 2 * sizeof(uintptr_t)), StackPointer);
365 // remove the space that this frame was using before the bailout
366 // (computed by InvalidationBailout)
367 masm.addPtr(a1, StackPointer);
369 // Jump to shared bailout tail. The BailoutInfo pointer has to be in r2.
370 JitCode *bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
371 masm.branch(bailoutTail);
373 Linker linker(masm);
374 AutoFlushICache afc("Invalidator");
375 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
376 IonSpew(IonSpew_Invalidate, " invalidation thunk created at %p", (void *) code->raw());
378 #ifdef JS_ION_PERF
379 writePerfSpewerJitCodeProfile(code, "Invalidator");
380 #endif
382 return code;
383 }
385 JitCode *
386 JitRuntime::generateArgumentsRectifier(JSContext *cx, ExecutionMode mode, void **returnAddrOut)
387 {
388 MacroAssembler masm(cx);
390 // ArgumentsRectifierReg contains the |nargs| pushed onto the current
391 // frame. Including |this|, there are (|nargs| + 1) arguments to copy.
392 MOZ_ASSERT(ArgumentsRectifierReg == s3);
394 Register numActArgsReg = t6;
395 Register calleeTokenReg = t7;
396 Register numArgsReg = t5;
398 // Copy number of actual arguments into numActArgsReg
399 masm.loadPtr(Address(StackPointer, IonRectifierFrameLayout::offsetOfNumActualArgs()),
400 numActArgsReg);
402 // Load the number of |undefined|s to push into t1.
403 masm.loadPtr(Address(StackPointer, IonRectifierFrameLayout::offsetOfCalleeToken()),
404 calleeTokenReg);
405 masm.load16ZeroExtend(Address(calleeTokenReg, JSFunction::offsetOfNargs()), numArgsReg);
407 masm.ma_subu(t1, numArgsReg, s3);
409 masm.moveValue(UndefinedValue(), ValueOperand(t3, t4));
411 masm.movePtr(StackPointer, t2); // Save %sp.
413 // Push undefined.
414 {
415 Label undefLoopTop;
416 masm.bind(&undefLoopTop);
418 masm.subPtr(Imm32(sizeof(Value)), StackPointer);
419 masm.storeValue(ValueOperand(t3, t4), Address(StackPointer, 0));
420 masm.sub32(Imm32(1), t1);
422 masm.ma_b(t1, t1, &undefLoopTop, Assembler::NonZero, ShortJump);
423 }
425 // Get the topmost argument.
426 masm.ma_sll(t0, s3, Imm32(3)); // t0 <- nargs * 8
427 masm.addPtr(t0, t2); // t2 <- t2(saved sp) + nargs * 8
428 masm.addPtr(Imm32(sizeof(IonRectifierFrameLayout)), t2);
430 // Push arguments, |nargs| + 1 times (to include |this|).
431 {
432 Label copyLoopTop, initialSkip;
434 masm.ma_b(&initialSkip, ShortJump);
436 masm.bind(©LoopTop);
437 masm.subPtr(Imm32(sizeof(Value)), t2);
438 masm.sub32(Imm32(1), s3);
440 masm.bind(&initialSkip);
442 MOZ_ASSERT(sizeof(Value) == 2 * sizeof(uint32_t));
443 // Read argument and push to stack.
444 masm.subPtr(Imm32(sizeof(Value)), StackPointer);
445 masm.load32(Address(t2, NUNBOX32_TYPE_OFFSET), t0);
446 masm.store32(t0, Address(StackPointer, NUNBOX32_TYPE_OFFSET));
447 masm.load32(Address(t2, NUNBOX32_PAYLOAD_OFFSET), t0);
448 masm.store32(t0, Address(StackPointer, NUNBOX32_PAYLOAD_OFFSET));
450 masm.ma_b(s3, s3, ©LoopTop, Assembler::NonZero, ShortJump);
451 }
453 // translate the framesize from values into bytes
454 masm.ma_addu(t0, numArgsReg, Imm32(1));
455 masm.lshiftPtr(Imm32(3), t0);
457 // Construct sizeDescriptor.
458 masm.makeFrameDescriptor(t0, JitFrame_Rectifier);
460 // Construct IonJSFrameLayout.
461 masm.subPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
462 // Push actual arguments.
463 masm.storePtr(numActArgsReg, Address(StackPointer, 2 * sizeof(uintptr_t)));
464 // Push callee token.
465 masm.storePtr(calleeTokenReg, Address(StackPointer, sizeof(uintptr_t)));
466 // Push frame descriptor.
467 masm.storePtr(t0, Address(StackPointer, 0));
469 // Call the target function.
470 // Note that this code assumes the function is JITted.
471 masm.loadPtr(Address(calleeTokenReg, JSFunction::offsetOfNativeOrScript()), t1);
472 masm.loadBaselineOrIonRaw(t1, t1, mode, nullptr);
473 masm.ma_callIonHalfPush(t1);
475 uint32_t returnOffset = masm.currentOffset();
477 // arg1
478 // ...
479 // argN
480 // num actual args
481 // callee token
482 // sizeDescriptor <- sp now
483 // return address
485 // Remove the rectifier frame.
486 // t0 <- descriptor with FrameType.
487 masm.loadPtr(Address(StackPointer, 0), t0);
488 masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), t0); // t0 <- descriptor.
490 // Discard descriptor, calleeToken and number of actual arguments.
491 masm.addPtr(Imm32(3 * sizeof(uintptr_t)), StackPointer);
493 // arg1
494 // ...
495 // argN <- sp now; t0 <- frame descriptor
496 // num actual args
497 // callee token
498 // sizeDescriptor
499 // return address
501 // Discard pushed arguments.
502 masm.addPtr(t0, StackPointer);
504 masm.ret();
505 Linker linker(masm);
506 AutoFlushICache afc("ArgumentsRectifier");
507 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
509 CodeOffsetLabel returnLabel(returnOffset);
510 returnLabel.fixup(&masm);
511 if (returnAddrOut)
512 *returnAddrOut = (void *) (code->raw() + returnLabel.offset());
514 #ifdef JS_ION_PERF
515 writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier");
516 #endif
518 return code;
519 }
521 /* There are two different stack layouts when doing bailout. They are
522 * represented via class BailoutStack.
523 *
524 * - First case is when bailout is done trough bailout table. In this case
525 * table offset is stored in $ra (look at JitRuntime::generateBailoutTable())
526 * and thunk code should save it on stack. In this case frameClassId_ cannot
527 * be NO_FRAME_SIZE_CLASS_ID. Members snapshotOffset_ and padding_ are not on
528 * the stack.
529 *
530 * - Other case is when bailout is done via out of line code (lazy bailout).
531 * In this case frame size is stored in $ra (look at
532 * CodeGeneratorMIPS::generateOutOfLineCode()) and thunk code should save it
533 * on stack. Other difference is that members snapshotOffset_ and padding_ are
534 * pushed to the stack by CodeGeneratorMIPS::visitOutOfLineBailout(). Field
535 * frameClassId_ is forced to be NO_FRAME_SIZE_CLASS_ID
536 * (See: JitRuntime::generateBailoutHandler).
537 */
538 static void
539 GenerateBailoutThunk(JSContext *cx, MacroAssembler &masm, uint32_t frameClass)
540 {
541 // NOTE: Members snapshotOffset_ and padding_ of BailoutStack
542 // are not stored in this function.
543 static const uint32_t bailoutDataSize = sizeof(BailoutStack) - 2 * sizeof(uintptr_t);
544 static const uint32_t bailoutInfoOutParamSize = 2 * sizeof(uintptr_t);
546 // Make sure that alignment is proper.
547 masm.checkStackAlignment();
549 // Make room for data.
550 masm.subPtr(Imm32(bailoutDataSize), StackPointer);
552 // Save general purpose registers.
553 for (uint32_t i = 0; i < Registers::Total; i++) {
554 uint32_t off = BailoutStack::offsetOfRegs() + i * sizeof(uintptr_t);
555 masm.storePtr(Register::FromCode(i), Address(StackPointer, off));
556 }
558 // Save floating point registers
559 // We can use as_sd because stack is alligned.
560 for (uintptr_t i = 0; i < FloatRegisters::Total; i++)
561 masm.as_sd(FloatRegister::FromCode(i), StackPointer,
562 BailoutStack::offsetOfFpRegs() + i * sizeof(double));
564 // Store the frameSize_ or tableOffset_ stored in ra
565 // See: JitRuntime::generateBailoutTable()
566 // See: CodeGeneratorMIPS::generateOutOfLineCode()
567 masm.storePtr(ra, Address(StackPointer, BailoutStack::offsetOfFrameSize()));
569 // Put frame class to stack
570 masm.storePtr(ImmWord(frameClass), Address(StackPointer, BailoutStack::offsetOfFrameClass()));
572 // Put pointer to BailoutStack as first argument to the Bailout()
573 masm.movePtr(StackPointer, a0);
574 // Put pointer to BailoutInfo
575 masm.subPtr(Imm32(bailoutInfoOutParamSize), StackPointer);
576 masm.storePtr(ImmPtr(nullptr), Address(StackPointer, 0));
577 masm.movePtr(StackPointer, a1);
579 masm.setupAlignedABICall(2);
580 masm.passABIArg(a0);
581 masm.passABIArg(a1);
582 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, Bailout));
584 // Get BailoutInfo pointer
585 masm.loadPtr(Address(StackPointer, 0), a2);
587 // Remove both the bailout frame and the topmost Ion frame's stack.
588 if (frameClass == NO_FRAME_SIZE_CLASS_ID) {
589 // Load frameSize from stack
590 masm.loadPtr(Address(StackPointer,
591 bailoutInfoOutParamSize + BailoutStack::offsetOfFrameSize()), a1);
593 // Remove complete BailoutStack class and data after it
594 masm.addPtr(Imm32(sizeof(BailoutStack) + bailoutInfoOutParamSize), StackPointer);
595 // Remove frame size srom stack
596 masm.addPtr(a1, StackPointer);
597 } else {
598 uint32_t frameSize = FrameSizeClass::FromClass(frameClass).frameSize();
599 // Remove the data this fuction added and frame size.
600 masm.addPtr(Imm32(bailoutDataSize + bailoutInfoOutParamSize + frameSize), StackPointer);
601 }
603 // Jump to shared bailout tail. The BailoutInfo pointer has to be in a2.
604 JitCode *bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail();
605 masm.branch(bailoutTail);
606 }
608 JitCode *
609 JitRuntime::generateBailoutTable(JSContext *cx, uint32_t frameClass)
610 {
611 MacroAssembler masm(cx);
613 Label bailout;
614 for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++) {
615 // Calculate offset to the end of table
616 int32_t offset = (BAILOUT_TABLE_SIZE - i) * BAILOUT_TABLE_ENTRY_SIZE;
618 // We use the 'ra' as table offset later in GenerateBailoutThunk
619 masm.as_bal(BOffImm16(offset));
620 masm.nop();
621 }
622 masm.bind(&bailout);
624 GenerateBailoutThunk(cx, masm, frameClass);
626 Linker linker(masm);
627 AutoFlushICache afc("BailoutTable");
628 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
630 #ifdef JS_ION_PERF
631 writePerfSpewerJitCodeProfile(code, "BailoutTable");
632 #endif
634 return code;
635 }
637 JitCode *
638 JitRuntime::generateBailoutHandler(JSContext *cx)
639 {
640 MacroAssembler masm(cx);
641 GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
643 Linker linker(masm);
644 AutoFlushICache afc("BailoutHandler");
645 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
647 #ifdef JS_ION_PERF
648 writePerfSpewerJitCodeProfile(code, "BailoutHandler");
649 #endif
651 return code;
652 }
654 JitCode *
655 JitRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
656 {
657 MOZ_ASSERT(functionWrappers_);
658 MOZ_ASSERT(functionWrappers_->initialized());
659 VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f);
660 if (p)
661 return p->value();
663 MacroAssembler masm(cx);
665 GeneralRegisterSet regs = GeneralRegisterSet(Register::Codes::WrapperMask);
667 static_assert((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0,
668 "Wrapper register set should be a superset of Volatile register set.");
670 // The context is the first argument; a0 is the first argument register.
671 Register cxreg = a0;
672 regs.take(cxreg);
674 // We're aligned to an exit frame, so link it up.
675 masm.enterExitFrameAndLoadContext(&f, cxreg, regs.getAny(), f.executionMode);
677 // Save the base of the argument set stored on the stack.
678 Register argsBase = InvalidReg;
679 if (f.explicitArgs) {
680 argsBase = t1; // Use temporary register.
681 regs.take(argsBase);
682 masm.ma_addu(argsBase, StackPointer, Imm32(IonExitFrameLayout::SizeWithFooter()));
683 }
685 // Reserve space for the outparameter.
686 Register outReg = InvalidReg;
687 switch (f.outParam) {
688 case Type_Value:
689 outReg = t0; // Use temporary register.
690 regs.take(outReg);
691 // Value outparam has to be 8 byte aligned because the called
692 // function can use sdc1 or ldc1 instructions to access it.
693 masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value));
694 masm.alignPointerUp(StackPointer, outReg, StackAlignment);
695 break;
697 case Type_Handle:
698 outReg = t0;
699 regs.take(outReg);
700 if (f.outParamRootType == VMFunction::RootValue) {
701 // Value outparam has to be 8 byte aligned because the called
702 // function can use sdc1 or ldc1 instructions to access it.
703 masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value));
704 masm.alignPointerUp(StackPointer, outReg, StackAlignment);
705 masm.storeValue(UndefinedValue(), Address(outReg, 0));
706 }
707 else {
708 masm.PushEmptyRooted(f.outParamRootType);
709 masm.movePtr(StackPointer, outReg);
710 }
711 break;
713 case Type_Bool:
714 case Type_Int32:
715 MOZ_ASSERT(sizeof(uintptr_t) == sizeof(uint32_t));
716 case Type_Pointer:
717 outReg = t0;
718 regs.take(outReg);
719 masm.reserveStack(sizeof(uintptr_t));
720 masm.movePtr(StackPointer, outReg);
721 break;
723 case Type_Double:
724 outReg = t0;
725 regs.take(outReg);
726 // Double outparam has to be 8 byte aligned because the called
727 // function can use sdc1 or ldc1 instructions to access it.
728 masm.reserveStack((StackAlignment - sizeof(uintptr_t)) + sizeof(double));
729 masm.alignPointerUp(StackPointer, outReg, StackAlignment);
730 break;
732 default:
733 MOZ_ASSERT(f.outParam == Type_Void);
734 break;
735 }
737 masm.setupUnalignedABICall(f.argc(), regs.getAny());
738 masm.passABIArg(cxreg);
740 size_t argDisp = 0;
742 // Copy any arguments.
743 for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) {
744 MoveOperand from;
745 switch (f.argProperties(explicitArg)) {
746 case VMFunction::WordByValue:
747 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL);
748 argDisp += sizeof(uint32_t);
749 break;
750 case VMFunction::DoubleByValue:
751 // Values should be passed by reference, not by value, so we
752 // assert that the argument is a double-precision float.
753 MOZ_ASSERT(f.argPassedInFloatReg(explicitArg));
754 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::DOUBLE);
755 argDisp += sizeof(double);
756 break;
757 case VMFunction::WordByRef:
758 masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
759 MoveOp::GENERAL);
760 argDisp += sizeof(uint32_t);
761 break;
762 case VMFunction::DoubleByRef:
763 masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS),
764 MoveOp::GENERAL);
765 argDisp += sizeof(double);
766 break;
767 }
768 }
770 // Copy the implicit outparam, if any.
771 if (outReg != InvalidReg)
772 masm.passABIArg(outReg);
774 masm.callWithABI(f.wrapped);
776 // Test for failure.
777 switch (f.failType()) {
778 case Type_Object:
779 masm.branchTestPtr(Assembler::Zero, v0, v0, masm.failureLabel(f.executionMode));
780 break;
781 case Type_Bool:
782 // Called functions return bools, which are 0/false and non-zero/true
783 masm.branchIfFalseBool(v0, masm.failureLabel(f.executionMode));
784 break;
785 default:
786 MOZ_ASSUME_UNREACHABLE("unknown failure kind");
787 }
789 // Load the outparam and free any allocated stack.
790 switch (f.outParam) {
791 case Type_Handle:
792 if (f.outParamRootType == VMFunction::RootValue) {
793 masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment);
794 masm.loadValue(Address(SecondScratchReg, 0), JSReturnOperand);
795 masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value));
796 }
797 else {
798 masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand);
799 }
800 break;
802 case Type_Value:
803 masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment);
804 masm.loadValue(Address(SecondScratchReg, 0), JSReturnOperand);
805 masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(Value));
806 break;
808 case Type_Int32:
809 MOZ_ASSERT(sizeof(uintptr_t) == sizeof(uint32_t));
810 case Type_Pointer:
811 masm.load32(Address(StackPointer, 0), ReturnReg);
812 masm.freeStack(sizeof(uintptr_t));
813 break;
815 case Type_Bool:
816 masm.load8ZeroExtend(Address(StackPointer, 0), ReturnReg);
817 masm.freeStack(sizeof(uintptr_t));
818 break;
820 case Type_Double:
821 if (cx->runtime()->jitSupportsFloatingPoint) {
822 masm.alignPointerUp(StackPointer, SecondScratchReg, StackAlignment);
823 // Address is aligned, so we can use as_ld.
824 masm.as_ld(ReturnFloatReg, SecondScratchReg, 0);
825 } else {
826 masm.assumeUnreachable("Unable to load into float reg, with no FP support.");
827 }
828 masm.freeStack((StackAlignment - sizeof(uintptr_t)) + sizeof(double));
829 break;
831 default:
832 MOZ_ASSERT(f.outParam == Type_Void);
833 break;
834 }
835 masm.leaveExitFrame();
836 masm.retn(Imm32(sizeof(IonExitFrameLayout) +
837 f.explicitStackSlots() * sizeof(uintptr_t) +
838 f.extraValuesToPop * sizeof(Value)));
840 Linker linker(masm);
841 AutoFlushICache afc("VMWrapper");
842 JitCode *wrapper = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
843 if (!wrapper)
844 return nullptr;
846 // linker.newCode may trigger a GC and sweep functionWrappers_ so we have
847 // to use relookupOrAdd instead of add.
848 if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
849 return nullptr;
851 #ifdef JS_ION_PERF
852 writePerfSpewerJitCodeProfile(wrapper, "VMWrapper");
853 #endif
855 return wrapper;
856 }
858 JitCode *
859 JitRuntime::generatePreBarrier(JSContext *cx, MIRType type)
860 {
861 MacroAssembler masm(cx);
863 RegisterSet save;
864 if (cx->runtime()->jitSupportsFloatingPoint) {
865 save = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
866 FloatRegisterSet(FloatRegisters::VolatileMask));
867 } else {
868 save = RegisterSet(GeneralRegisterSet(Registers::VolatileMask),
869 FloatRegisterSet());
870 }
871 masm.PushRegsInMask(save);
873 MOZ_ASSERT(PreBarrierReg == a1);
874 masm.movePtr(ImmPtr(cx->runtime()), a0);
876 masm.setupUnalignedABICall(2, a2);
877 masm.passABIArg(a0);
878 masm.passABIArg(a1);
880 if (type == MIRType_Value) {
881 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkValueFromIon));
882 } else {
883 MOZ_ASSERT(type == MIRType_Shape);
884 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkShapeFromIon));
885 }
887 masm.PopRegsInMask(save);
888 masm.ret();
890 Linker linker(masm);
891 AutoFlushICache afc("PreBarrier");
892 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
894 #ifdef JS_ION_PERF
895 writePerfSpewerJitCodeProfile(code, "PreBarrier");
896 #endif
898 return code;
899 }
901 typedef bool (*HandleDebugTrapFn)(JSContext *, BaselineFrame *, uint8_t *, bool *);
902 static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap);
904 JitCode *
905 JitRuntime::generateDebugTrapHandler(JSContext *cx)
906 {
907 MacroAssembler masm(cx);
909 Register scratch1 = t0;
910 Register scratch2 = t1;
912 // Load BaselineFrame pointer in scratch1.
913 masm.movePtr(s5, scratch1);
914 masm.subPtr(Imm32(BaselineFrame::Size()), scratch1);
916 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure
917 // the stub frame has a nullptr ICStub pointer, since this pointer is
918 // marked during GC.
919 masm.movePtr(ImmPtr(nullptr), BaselineStubReg);
920 EmitEnterStubFrame(masm, scratch2);
922 JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo);
923 if (!code)
924 return nullptr;
926 masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
927 masm.storePtr(ra, Address(StackPointer, sizeof(uintptr_t)));
928 masm.storePtr(scratch1, Address(StackPointer, 0));
930 EmitCallVM(code, masm);
932 EmitLeaveStubFrame(masm);
934 // If the stub returns |true|, we have to perform a forced return
935 // (return from the JS frame). If the stub returns |false|, just return
936 // from the trap stub so that execution continues at the current pc.
937 Label forcedReturn;
938 masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
940 // ra was restored by EmitLeaveStubFrame
941 masm.branch(ra);
943 masm.bind(&forcedReturn);
944 masm.loadValue(Address(s5, BaselineFrame::reverseOffsetOfReturnValue()),
945 JSReturnOperand);
946 masm.movePtr(s5, StackPointer);
947 masm.pop(s5);
948 masm.ret();
950 Linker linker(masm);
951 AutoFlushICache afc("DebugTrapHandler");
952 JitCode *codeDbg = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
954 #ifdef JS_ION_PERF
955 writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
956 #endif
958 return codeDbg;
959 }
962 JitCode *
963 JitRuntime::generateExceptionTailStub(JSContext *cx)
964 {
965 MacroAssembler masm;
967 masm.handleFailureWithHandlerTail();
969 Linker linker(masm);
970 AutoFlushICache afc("ExceptionTailStub");
971 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
973 #ifdef JS_ION_PERF
974 writePerfSpewerJitCodeProfile(code, "ExceptionTailStub");
975 #endif
977 return code;
978 }
980 JitCode *
981 JitRuntime::generateBailoutTailStub(JSContext *cx)
982 {
983 MacroAssembler masm;
985 masm.generateBailoutTail(a1, a2);
987 Linker linker(masm);
988 AutoFlushICache afc("BailoutTailStub");
989 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
991 #ifdef JS_ION_PERF
992 writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
993 #endif
995 return code;
996 }