|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
|
2 * vim: set ts=8 sts=4 et sw=4 tw=99: |
|
3 * This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 #include "jit/Bailouts.h" |
|
8 #include "jit/IonFrames.h" |
|
9 #include "jit/IonLinker.h" |
|
10 #include "jit/JitCompartment.h" |
|
11 #ifdef JS_ION_PERF |
|
12 # include "jit/PerfSpewer.h" |
|
13 #endif |
|
14 #include "jit/VMFunctions.h" |
|
15 #include "jit/x64/BaselineHelpers-x64.h" |
|
16 |
|
17 using namespace js; |
|
18 using namespace js::jit; |
|
19 |
|
20 // All registers to save and restore. This includes the stack pointer, since we |
|
21 // use the ability to reference register values on the stack by index. |
|
22 static const RegisterSet AllRegs = |
|
23 RegisterSet(GeneralRegisterSet(Registers::AllMask), |
|
24 FloatRegisterSet(FloatRegisters::AllMask)); |
|
25 |
|
26 /* This method generates a trampoline on x64 for a c++ function with |
|
27 * the following signature: |
|
28 * bool blah(void *code, int argc, Value *argv, JSObject *scopeChain, |
|
29 * Value *vp) |
|
30 * ...using standard x64 fastcall calling convention |
|
31 */ |
|
32 JitCode * |
|
33 JitRuntime::generateEnterJIT(JSContext *cx, EnterJitType type) |
|
34 { |
|
35 MacroAssembler masm(cx); |
|
36 |
|
37 const Register reg_code = IntArgReg0; |
|
38 const Register reg_argc = IntArgReg1; |
|
39 const Register reg_argv = IntArgReg2; |
|
40 JS_ASSERT(OsrFrameReg == IntArgReg3); |
|
41 |
|
42 #if defined(_WIN64) |
|
43 const Operand token = Operand(rbp, 16 + ShadowStackSpace); |
|
44 const Operand scopeChain = Operand(rbp, 24 + ShadowStackSpace); |
|
45 const Operand numStackValuesAddr = Operand(rbp, 32 + ShadowStackSpace); |
|
46 const Operand result = Operand(rbp, 40 + ShadowStackSpace); |
|
47 #else |
|
48 const Register token = IntArgReg4; |
|
49 const Register scopeChain = IntArgReg5; |
|
50 const Operand numStackValuesAddr = Operand(rbp, 16 + ShadowStackSpace); |
|
51 const Operand result = Operand(rbp, 24 + ShadowStackSpace); |
|
52 #endif |
|
53 |
|
54 // Save old stack frame pointer, set new stack frame pointer. |
|
55 masm.push(rbp); |
|
56 masm.mov(rsp, rbp); |
|
57 |
|
58 // Save non-volatile registers. These must be saved by the trampoline, rather |
|
59 // than by the JIT'd code, because they are scanned by the conservative scanner. |
|
60 masm.push(rbx); |
|
61 masm.push(r12); |
|
62 masm.push(r13); |
|
63 masm.push(r14); |
|
64 masm.push(r15); |
|
65 #if defined(_WIN64) |
|
66 masm.push(rdi); |
|
67 masm.push(rsi); |
|
68 |
|
69 // 16-byte aligment for movdqa |
|
70 masm.subq(Imm32(16 * 10 + 8), rsp); |
|
71 |
|
72 masm.movdqa(xmm6, Operand(rsp, 16 * 0)); |
|
73 masm.movdqa(xmm7, Operand(rsp, 16 * 1)); |
|
74 masm.movdqa(xmm8, Operand(rsp, 16 * 2)); |
|
75 masm.movdqa(xmm9, Operand(rsp, 16 * 3)); |
|
76 masm.movdqa(xmm10, Operand(rsp, 16 * 4)); |
|
77 masm.movdqa(xmm11, Operand(rsp, 16 * 5)); |
|
78 masm.movdqa(xmm12, Operand(rsp, 16 * 6)); |
|
79 masm.movdqa(xmm13, Operand(rsp, 16 * 7)); |
|
80 masm.movdqa(xmm14, Operand(rsp, 16 * 8)); |
|
81 masm.movdqa(xmm15, Operand(rsp, 16 * 9)); |
|
82 #endif |
|
83 |
|
84 // Push the EnterJIT sps mark. |
|
85 masm.spsMarkJit(&cx->runtime()->spsProfiler, rbp, rbx); |
|
86 |
|
87 // Save arguments passed in registers needed after function call. |
|
88 masm.push(result); |
|
89 |
|
90 // Remember stack depth without padding and arguments. |
|
91 masm.mov(rsp, r14); |
|
92 |
|
93 // Remember number of bytes occupied by argument vector |
|
94 masm.mov(reg_argc, r13); |
|
95 masm.shll(Imm32(3), r13); |
|
96 |
|
97 // Guarantee 16-byte alignment. |
|
98 // We push argc, callee token, frame size, and return address. |
|
99 // The latter two are 16 bytes together, so we only consider argc and the |
|
100 // token. |
|
101 masm.mov(rsp, r12); |
|
102 masm.subq(r13, r12); |
|
103 masm.subq(Imm32(8), r12); |
|
104 masm.andl(Imm32(0xf), r12); |
|
105 masm.subq(r12, rsp); |
|
106 |
|
107 /*************************************************************** |
|
108 Loop over argv vector, push arguments onto stack in reverse order |
|
109 ***************************************************************/ |
|
110 |
|
111 // r13 still stores the number of bytes in the argument vector. |
|
112 masm.addq(reg_argv, r13); // r13 points above last argument. |
|
113 |
|
114 // while r13 > rdx, push arguments. |
|
115 { |
|
116 Label header, footer; |
|
117 masm.bind(&header); |
|
118 |
|
119 masm.cmpq(r13, reg_argv); |
|
120 masm.j(AssemblerX86Shared::BelowOrEqual, &footer); |
|
121 |
|
122 masm.subq(Imm32(8), r13); |
|
123 masm.push(Operand(r13, 0)); |
|
124 masm.jmp(&header); |
|
125 |
|
126 masm.bind(&footer); |
|
127 } |
|
128 |
|
129 // Push the number of actual arguments. |result| is used to store the |
|
130 // actual number of arguments without adding an extra argument to the enter |
|
131 // JIT. |
|
132 masm.movq(result, reg_argc); |
|
133 masm.unboxInt32(Operand(reg_argc, 0), reg_argc); |
|
134 masm.push(reg_argc); |
|
135 |
|
136 // Push the callee token. |
|
137 masm.push(token); |
|
138 |
|
139 /***************************************************************** |
|
140 Push the number of bytes we've pushed so far on the stack and call |
|
141 *****************************************************************/ |
|
142 masm.subq(rsp, r14); |
|
143 |
|
144 // Create a frame descriptor. |
|
145 masm.makeFrameDescriptor(r14, JitFrame_Entry); |
|
146 masm.push(r14); |
|
147 |
|
148 CodeLabel returnLabel; |
|
149 if (type == EnterJitBaseline) { |
|
150 // Handle OSR. |
|
151 GeneralRegisterSet regs(GeneralRegisterSet::All()); |
|
152 regs.takeUnchecked(OsrFrameReg); |
|
153 regs.take(rbp); |
|
154 regs.take(reg_code); |
|
155 |
|
156 // Ensure that |scratch| does not end up being JSReturnOperand. |
|
157 // Do takeUnchecked because on Win64/x64, reg_code (IntArgReg0) and JSReturnOperand are |
|
158 // the same (rcx). See bug 849398. |
|
159 regs.takeUnchecked(JSReturnOperand); |
|
160 Register scratch = regs.takeAny(); |
|
161 |
|
162 Label notOsr; |
|
163 masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, ¬Osr); |
|
164 |
|
165 Register numStackValues = regs.takeAny(); |
|
166 masm.movq(numStackValuesAddr, numStackValues); |
|
167 |
|
168 // Push return address, previous frame pointer. |
|
169 masm.mov(returnLabel.dest(), scratch); |
|
170 masm.push(scratch); |
|
171 masm.push(rbp); |
|
172 |
|
173 // Reserve frame. |
|
174 Register framePtr = rbp; |
|
175 masm.subPtr(Imm32(BaselineFrame::Size()), rsp); |
|
176 masm.mov(rsp, framePtr); |
|
177 |
|
178 #ifdef XP_WIN |
|
179 // Can't push large frames blindly on windows. Touch frame memory incrementally. |
|
180 masm.mov(numStackValues, scratch); |
|
181 masm.lshiftPtr(Imm32(3), scratch); |
|
182 masm.subPtr(scratch, framePtr); |
|
183 { |
|
184 masm.movePtr(rsp, scratch); |
|
185 masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch); |
|
186 |
|
187 Label touchFrameLoop; |
|
188 Label touchFrameLoopEnd; |
|
189 masm.bind(&touchFrameLoop); |
|
190 masm.branchPtr(Assembler::Below, scratch, framePtr, &touchFrameLoopEnd); |
|
191 masm.store32(Imm32(0), Address(scratch, 0)); |
|
192 masm.subPtr(Imm32(WINDOWS_BIG_FRAME_TOUCH_INCREMENT), scratch); |
|
193 masm.jump(&touchFrameLoop); |
|
194 masm.bind(&touchFrameLoopEnd); |
|
195 } |
|
196 masm.mov(rsp, framePtr); |
|
197 #endif |
|
198 |
|
199 // Reserve space for locals and stack values. |
|
200 Register valuesSize = regs.takeAny(); |
|
201 masm.mov(numStackValues, valuesSize); |
|
202 masm.shll(Imm32(3), valuesSize); |
|
203 masm.subPtr(valuesSize, rsp); |
|
204 |
|
205 // Enter exit frame. |
|
206 masm.addPtr(Imm32(BaselineFrame::Size() + BaselineFrame::FramePointerOffset), valuesSize); |
|
207 masm.makeFrameDescriptor(valuesSize, JitFrame_BaselineJS); |
|
208 masm.push(valuesSize); |
|
209 masm.push(Imm32(0)); // Fake return address. |
|
210 masm.enterFakeExitFrame(); |
|
211 |
|
212 regs.add(valuesSize); |
|
213 |
|
214 masm.push(framePtr); |
|
215 masm.push(reg_code); |
|
216 |
|
217 masm.setupUnalignedABICall(3, scratch); |
|
218 masm.passABIArg(framePtr); // BaselineFrame |
|
219 masm.passABIArg(OsrFrameReg); // InterpreterFrame |
|
220 masm.passABIArg(numStackValues); |
|
221 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, jit::InitBaselineFrameForOsr)); |
|
222 |
|
223 masm.pop(reg_code); |
|
224 masm.pop(framePtr); |
|
225 |
|
226 JS_ASSERT(reg_code != ReturnReg); |
|
227 |
|
228 Label error; |
|
229 masm.addPtr(Imm32(IonExitFrameLayout::SizeWithFooter()), rsp); |
|
230 masm.addPtr(Imm32(BaselineFrame::Size()), framePtr); |
|
231 masm.branchIfFalseBool(ReturnReg, &error); |
|
232 |
|
233 masm.jump(reg_code); |
|
234 |
|
235 // OOM: load error value, discard return address and previous frame |
|
236 // pointer and return. |
|
237 masm.bind(&error); |
|
238 masm.mov(framePtr, rsp); |
|
239 masm.addPtr(Imm32(2 * sizeof(uintptr_t)), rsp); |
|
240 masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand); |
|
241 masm.mov(returnLabel.dest(), scratch); |
|
242 masm.jump(scratch); |
|
243 |
|
244 masm.bind(¬Osr); |
|
245 masm.movq(scopeChain, R1.scratchReg()); |
|
246 } |
|
247 |
|
248 // Call function. |
|
249 masm.call(reg_code); |
|
250 |
|
251 if (type == EnterJitBaseline) { |
|
252 // Baseline OSR will return here. |
|
253 masm.bind(returnLabel.src()); |
|
254 if (!masm.addCodeLabel(returnLabel)) |
|
255 return nullptr; |
|
256 } |
|
257 |
|
258 // Pop arguments and padding from stack. |
|
259 masm.pop(r14); // Pop and decode descriptor. |
|
260 masm.shrq(Imm32(FRAMESIZE_SHIFT), r14); |
|
261 masm.addq(r14, rsp); // Remove arguments. |
|
262 |
|
263 /***************************************************************** |
|
264 Place return value where it belongs, pop all saved registers |
|
265 *****************************************************************/ |
|
266 masm.pop(r12); // vp |
|
267 masm.storeValue(JSReturnOperand, Operand(r12, 0)); |
|
268 |
|
269 // Unwind the sps mark. |
|
270 masm.spsUnmarkJit(&cx->runtime()->spsProfiler, rbx); |
|
271 |
|
272 // Restore non-volatile registers. |
|
273 #if defined(_WIN64) |
|
274 masm.movdqa(Operand(rsp, 16 * 0), xmm6); |
|
275 masm.movdqa(Operand(rsp, 16 * 1), xmm7); |
|
276 masm.movdqa(Operand(rsp, 16 * 2), xmm8); |
|
277 masm.movdqa(Operand(rsp, 16 * 3), xmm9); |
|
278 masm.movdqa(Operand(rsp, 16 * 4), xmm10); |
|
279 masm.movdqa(Operand(rsp, 16 * 5), xmm11); |
|
280 masm.movdqa(Operand(rsp, 16 * 6), xmm12); |
|
281 masm.movdqa(Operand(rsp, 16 * 7), xmm13); |
|
282 masm.movdqa(Operand(rsp, 16 * 8), xmm14); |
|
283 masm.movdqa(Operand(rsp, 16 * 9), xmm15); |
|
284 |
|
285 masm.addq(Imm32(16 * 10 + 8), rsp); |
|
286 |
|
287 masm.pop(rsi); |
|
288 masm.pop(rdi); |
|
289 #endif |
|
290 masm.pop(r15); |
|
291 masm.pop(r14); |
|
292 masm.pop(r13); |
|
293 masm.pop(r12); |
|
294 masm.pop(rbx); |
|
295 |
|
296 // Restore frame pointer and return. |
|
297 masm.pop(rbp); |
|
298 masm.ret(); |
|
299 |
|
300 Linker linker(masm); |
|
301 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
302 |
|
303 #ifdef JS_ION_PERF |
|
304 writePerfSpewerJitCodeProfile(code, "EnterJIT"); |
|
305 #endif |
|
306 |
|
307 return code; |
|
308 } |
|
309 |
|
310 JitCode * |
|
311 JitRuntime::generateInvalidator(JSContext *cx) |
|
312 { |
|
313 AutoIonContextAlloc aica(cx); |
|
314 MacroAssembler masm(cx); |
|
315 |
|
316 // See explanatory comment in x86's JitRuntime::generateInvalidator. |
|
317 |
|
318 masm.addq(Imm32(sizeof(uintptr_t)), rsp); |
|
319 |
|
320 // Push registers such that we can access them from [base + code]. |
|
321 masm.PushRegsInMask(AllRegs); |
|
322 |
|
323 masm.movq(rsp, rax); // Argument to jit::InvalidationBailout. |
|
324 |
|
325 // Make space for InvalidationBailout's frameSize outparam. |
|
326 masm.reserveStack(sizeof(size_t)); |
|
327 masm.movq(rsp, rbx); |
|
328 |
|
329 // Make space for InvalidationBailout's bailoutInfo outparam. |
|
330 masm.reserveStack(sizeof(void *)); |
|
331 masm.movq(rsp, r9); |
|
332 |
|
333 masm.setupUnalignedABICall(3, rdx); |
|
334 masm.passABIArg(rax); |
|
335 masm.passABIArg(rbx); |
|
336 masm.passABIArg(r9); |
|
337 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, InvalidationBailout)); |
|
338 |
|
339 masm.pop(r9); // Get the bailoutInfo outparam. |
|
340 masm.pop(rbx); // Get the frameSize outparam. |
|
341 |
|
342 // Pop the machine state and the dead frame. |
|
343 masm.lea(Operand(rsp, rbx, TimesOne, sizeof(InvalidationBailoutStack)), rsp); |
|
344 |
|
345 // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9. |
|
346 JitCode *bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail(); |
|
347 masm.jmp(bailoutTail); |
|
348 |
|
349 Linker linker(masm); |
|
350 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
351 |
|
352 #ifdef JS_ION_PERF |
|
353 writePerfSpewerJitCodeProfile(code, "Invalidator"); |
|
354 #endif |
|
355 |
|
356 return code; |
|
357 } |
|
358 |
|
359 JitCode * |
|
360 JitRuntime::generateArgumentsRectifier(JSContext *cx, ExecutionMode mode, void **returnAddrOut) |
|
361 { |
|
362 // Do not erase the frame pointer in this function. |
|
363 |
|
364 MacroAssembler masm(cx); |
|
365 |
|
366 // ArgumentsRectifierReg contains the |nargs| pushed onto the current frame. |
|
367 // Including |this|, there are (|nargs| + 1) arguments to copy. |
|
368 JS_ASSERT(ArgumentsRectifierReg == r8); |
|
369 |
|
370 // Load the number of |undefined|s to push into %rcx. |
|
371 masm.loadPtr(Address(rsp, IonRectifierFrameLayout::offsetOfCalleeToken()), rax); |
|
372 masm.movzwl(Operand(rax, JSFunction::offsetOfNargs()), rcx); |
|
373 masm.subq(r8, rcx); |
|
374 |
|
375 // Copy the number of actual arguments |
|
376 masm.loadPtr(Address(rsp, IonRectifierFrameLayout::offsetOfNumActualArgs()), rdx); |
|
377 |
|
378 masm.moveValue(UndefinedValue(), r10); |
|
379 |
|
380 masm.movq(rsp, r9); // Save %rsp. |
|
381 |
|
382 // Push undefined. |
|
383 { |
|
384 Label undefLoopTop; |
|
385 masm.bind(&undefLoopTop); |
|
386 |
|
387 masm.push(r10); |
|
388 masm.subl(Imm32(1), rcx); |
|
389 masm.j(Assembler::NonZero, &undefLoopTop); |
|
390 } |
|
391 |
|
392 // Get the topmost argument. |
|
393 BaseIndex b = BaseIndex(r9, r8, TimesEight, sizeof(IonRectifierFrameLayout)); |
|
394 masm.lea(Operand(b), rcx); |
|
395 |
|
396 // Push arguments, |nargs| + 1 times (to include |this|). |
|
397 masm.addl(Imm32(1), r8); |
|
398 { |
|
399 Label copyLoopTop; |
|
400 |
|
401 masm.bind(©LoopTop); |
|
402 masm.push(Operand(rcx, 0x0)); |
|
403 masm.subq(Imm32(sizeof(Value)), rcx); |
|
404 masm.subl(Imm32(1), r8); |
|
405 masm.j(Assembler::NonZero, ©LoopTop); |
|
406 } |
|
407 |
|
408 // Construct descriptor. |
|
409 masm.subq(rsp, r9); |
|
410 masm.makeFrameDescriptor(r9, JitFrame_Rectifier); |
|
411 |
|
412 // Construct IonJSFrameLayout. |
|
413 masm.push(rdx); // numActualArgs |
|
414 masm.push(rax); // callee token |
|
415 masm.push(r9); // descriptor |
|
416 |
|
417 // Call the target function. |
|
418 // Note that this code assumes the function is JITted. |
|
419 masm.loadPtr(Address(rax, JSFunction::offsetOfNativeOrScript()), rax); |
|
420 masm.loadBaselineOrIonRaw(rax, rax, mode, nullptr); |
|
421 masm.call(rax); |
|
422 uint32_t returnOffset = masm.currentOffset(); |
|
423 |
|
424 // Remove the rectifier frame. |
|
425 masm.pop(r9); // r9 <- descriptor with FrameType. |
|
426 masm.shrq(Imm32(FRAMESIZE_SHIFT), r9); |
|
427 masm.pop(r11); // Discard calleeToken. |
|
428 masm.pop(r11); // Discard numActualArgs. |
|
429 masm.addq(r9, rsp); // Discard pushed arguments. |
|
430 |
|
431 masm.ret(); |
|
432 |
|
433 Linker linker(masm); |
|
434 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
435 |
|
436 #ifdef JS_ION_PERF |
|
437 writePerfSpewerJitCodeProfile(code, "ArgumentsRectifier"); |
|
438 #endif |
|
439 |
|
440 CodeOffsetLabel returnLabel(returnOffset); |
|
441 returnLabel.fixup(&masm); |
|
442 if (returnAddrOut) |
|
443 *returnAddrOut = (void *) (code->raw() + returnLabel.offset()); |
|
444 return code; |
|
445 } |
|
446 |
|
447 static void |
|
448 GenerateBailoutThunk(JSContext *cx, MacroAssembler &masm, uint32_t frameClass) |
|
449 { |
|
450 // Push registers such that we can access them from [base + code]. |
|
451 masm.PushRegsInMask(AllRegs); |
|
452 |
|
453 // Get the stack pointer into a register, pre-alignment. |
|
454 masm.movq(rsp, r8); |
|
455 |
|
456 // Make space for Bailout's bailoutInfo outparam. |
|
457 masm.reserveStack(sizeof(void *)); |
|
458 masm.movq(rsp, r9); |
|
459 |
|
460 // Call the bailout function. |
|
461 masm.setupUnalignedABICall(2, rax); |
|
462 masm.passABIArg(r8); |
|
463 masm.passABIArg(r9); |
|
464 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, Bailout)); |
|
465 |
|
466 masm.pop(r9); // Get the bailoutInfo outparam. |
|
467 |
|
468 // Stack is: |
|
469 // [frame] |
|
470 // snapshotOffset |
|
471 // frameSize |
|
472 // [bailoutFrame] |
|
473 // |
|
474 // Remove both the bailout frame and the topmost Ion frame's stack. |
|
475 static const uint32_t BailoutDataSize = sizeof(void *) * Registers::Total + |
|
476 sizeof(double) * FloatRegisters::Total; |
|
477 masm.addq(Imm32(BailoutDataSize), rsp); |
|
478 masm.pop(rcx); |
|
479 masm.lea(Operand(rsp, rcx, TimesOne, sizeof(void *)), rsp); |
|
480 |
|
481 // Jump to shared bailout tail. The BailoutInfo pointer has to be in r9. |
|
482 JitCode *bailoutTail = cx->runtime()->jitRuntime()->getBailoutTail(); |
|
483 masm.jmp(bailoutTail); |
|
484 } |
|
485 |
|
486 JitCode * |
|
487 JitRuntime::generateBailoutTable(JSContext *cx, uint32_t frameClass) |
|
488 { |
|
489 MOZ_ASSUME_UNREACHABLE("x64 does not use bailout tables"); |
|
490 } |
|
491 |
|
492 JitCode * |
|
493 JitRuntime::generateBailoutHandler(JSContext *cx) |
|
494 { |
|
495 MacroAssembler masm; |
|
496 |
|
497 GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID); |
|
498 |
|
499 Linker linker(masm); |
|
500 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
501 |
|
502 #ifdef JS_ION_PERF |
|
503 writePerfSpewerJitCodeProfile(code, "BailoutHandler"); |
|
504 #endif |
|
505 |
|
506 return code; |
|
507 } |
|
508 |
|
509 JitCode * |
|
510 JitRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f) |
|
511 { |
|
512 JS_ASSERT(!StackKeptAligned); |
|
513 JS_ASSERT(functionWrappers_); |
|
514 JS_ASSERT(functionWrappers_->initialized()); |
|
515 VMWrapperMap::AddPtr p = functionWrappers_->lookupForAdd(&f); |
|
516 if (p) |
|
517 return p->value(); |
|
518 |
|
519 // Generate a separated code for the wrapper. |
|
520 MacroAssembler masm; |
|
521 |
|
522 // Avoid conflicts with argument registers while discarding the result after |
|
523 // the function call. |
|
524 GeneralRegisterSet regs = GeneralRegisterSet(Register::Codes::WrapperMask); |
|
525 |
|
526 // Wrapper register set is a superset of Volatile register set. |
|
527 JS_STATIC_ASSERT((Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0); |
|
528 |
|
529 // The context is the first argument. |
|
530 Register cxreg = IntArgReg0; |
|
531 regs.take(cxreg); |
|
532 |
|
533 // Stack is: |
|
534 // ... frame ... |
|
535 // +12 [args] |
|
536 // +8 descriptor |
|
537 // +0 returnAddress |
|
538 // |
|
539 // We're aligned to an exit frame, so link it up. |
|
540 masm.enterExitFrameAndLoadContext(&f, cxreg, regs.getAny(), f.executionMode); |
|
541 |
|
542 // Save the current stack pointer as the base for copying arguments. |
|
543 Register argsBase = InvalidReg; |
|
544 if (f.explicitArgs) { |
|
545 argsBase = r10; |
|
546 regs.take(argsBase); |
|
547 masm.lea(Operand(rsp,IonExitFrameLayout::SizeWithFooter()), argsBase); |
|
548 } |
|
549 |
|
550 // Reserve space for the outparameter. |
|
551 Register outReg = InvalidReg; |
|
552 switch (f.outParam) { |
|
553 case Type_Value: |
|
554 outReg = regs.takeAny(); |
|
555 masm.reserveStack(sizeof(Value)); |
|
556 masm.movq(esp, outReg); |
|
557 break; |
|
558 |
|
559 case Type_Handle: |
|
560 outReg = regs.takeAny(); |
|
561 masm.PushEmptyRooted(f.outParamRootType); |
|
562 masm.movq(esp, outReg); |
|
563 break; |
|
564 |
|
565 case Type_Int32: |
|
566 case Type_Bool: |
|
567 outReg = regs.takeAny(); |
|
568 masm.reserveStack(sizeof(int32_t)); |
|
569 masm.movq(esp, outReg); |
|
570 break; |
|
571 |
|
572 case Type_Double: |
|
573 outReg = regs.takeAny(); |
|
574 masm.reserveStack(sizeof(double)); |
|
575 masm.movq(esp, outReg); |
|
576 break; |
|
577 |
|
578 case Type_Pointer: |
|
579 outReg = regs.takeAny(); |
|
580 masm.reserveStack(sizeof(uintptr_t)); |
|
581 masm.movq(esp, outReg); |
|
582 break; |
|
583 |
|
584 default: |
|
585 JS_ASSERT(f.outParam == Type_Void); |
|
586 break; |
|
587 } |
|
588 |
|
589 masm.setupUnalignedABICall(f.argc(), regs.getAny()); |
|
590 masm.passABIArg(cxreg); |
|
591 |
|
592 size_t argDisp = 0; |
|
593 |
|
594 // Copy arguments. |
|
595 for (uint32_t explicitArg = 0; explicitArg < f.explicitArgs; explicitArg++) { |
|
596 MoveOperand from; |
|
597 switch (f.argProperties(explicitArg)) { |
|
598 case VMFunction::WordByValue: |
|
599 if (f.argPassedInFloatReg(explicitArg)) |
|
600 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::DOUBLE); |
|
601 else |
|
602 masm.passABIArg(MoveOperand(argsBase, argDisp), MoveOp::GENERAL); |
|
603 argDisp += sizeof(void *); |
|
604 break; |
|
605 case VMFunction::WordByRef: |
|
606 masm.passABIArg(MoveOperand(argsBase, argDisp, MoveOperand::EFFECTIVE_ADDRESS), |
|
607 MoveOp::GENERAL); |
|
608 argDisp += sizeof(void *); |
|
609 break; |
|
610 case VMFunction::DoubleByValue: |
|
611 case VMFunction::DoubleByRef: |
|
612 MOZ_ASSUME_UNREACHABLE("NYI: x64 callVM should not be used with 128bits values."); |
|
613 } |
|
614 } |
|
615 |
|
616 // Copy the implicit outparam, if any. |
|
617 if (outReg != InvalidReg) |
|
618 masm.passABIArg(outReg); |
|
619 |
|
620 masm.callWithABI(f.wrapped); |
|
621 |
|
622 // Test for failure. |
|
623 switch (f.failType()) { |
|
624 case Type_Object: |
|
625 masm.branchTestPtr(Assembler::Zero, rax, rax, masm.failureLabel(f.executionMode)); |
|
626 break; |
|
627 case Type_Bool: |
|
628 masm.testb(rax, rax); |
|
629 masm.j(Assembler::Zero, masm.failureLabel(f.executionMode)); |
|
630 break; |
|
631 default: |
|
632 MOZ_ASSUME_UNREACHABLE("unknown failure kind"); |
|
633 } |
|
634 |
|
635 // Load the outparam and free any allocated stack. |
|
636 switch (f.outParam) { |
|
637 case Type_Handle: |
|
638 masm.popRooted(f.outParamRootType, ReturnReg, JSReturnOperand); |
|
639 break; |
|
640 |
|
641 case Type_Value: |
|
642 masm.loadValue(Address(esp, 0), JSReturnOperand); |
|
643 masm.freeStack(sizeof(Value)); |
|
644 break; |
|
645 |
|
646 case Type_Int32: |
|
647 masm.load32(Address(esp, 0), ReturnReg); |
|
648 masm.freeStack(sizeof(int32_t)); |
|
649 break; |
|
650 |
|
651 case Type_Bool: |
|
652 masm.load8ZeroExtend(Address(esp, 0), ReturnReg); |
|
653 masm.freeStack(sizeof(int32_t)); |
|
654 break; |
|
655 |
|
656 case Type_Double: |
|
657 JS_ASSERT(cx->runtime()->jitSupportsFloatingPoint); |
|
658 masm.loadDouble(Address(esp, 0), ReturnFloatReg); |
|
659 masm.freeStack(sizeof(double)); |
|
660 break; |
|
661 |
|
662 case Type_Pointer: |
|
663 masm.loadPtr(Address(esp, 0), ReturnReg); |
|
664 masm.freeStack(sizeof(uintptr_t)); |
|
665 break; |
|
666 |
|
667 default: |
|
668 JS_ASSERT(f.outParam == Type_Void); |
|
669 break; |
|
670 } |
|
671 masm.leaveExitFrame(); |
|
672 masm.retn(Imm32(sizeof(IonExitFrameLayout) + |
|
673 f.explicitStackSlots() * sizeof(void *) + |
|
674 f.extraValuesToPop * sizeof(Value))); |
|
675 |
|
676 Linker linker(masm); |
|
677 JitCode *wrapper = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
678 if (!wrapper) |
|
679 return nullptr; |
|
680 |
|
681 #ifdef JS_ION_PERF |
|
682 writePerfSpewerJitCodeProfile(wrapper, "VMWrapper"); |
|
683 #endif |
|
684 |
|
685 // linker.newCode may trigger a GC and sweep functionWrappers_ so we have to |
|
686 // use relookupOrAdd instead of add. |
|
687 if (!functionWrappers_->relookupOrAdd(p, &f, wrapper)) |
|
688 return nullptr; |
|
689 |
|
690 return wrapper; |
|
691 } |
|
692 |
|
693 JitCode * |
|
694 JitRuntime::generatePreBarrier(JSContext *cx, MIRType type) |
|
695 { |
|
696 MacroAssembler masm; |
|
697 |
|
698 RegisterSet regs = RegisterSet(GeneralRegisterSet(Registers::VolatileMask), |
|
699 FloatRegisterSet(FloatRegisters::VolatileMask)); |
|
700 masm.PushRegsInMask(regs); |
|
701 |
|
702 JS_ASSERT(PreBarrierReg == rdx); |
|
703 masm.mov(ImmPtr(cx->runtime()), rcx); |
|
704 |
|
705 masm.setupUnalignedABICall(2, rax); |
|
706 masm.passABIArg(rcx); |
|
707 masm.passABIArg(rdx); |
|
708 if (type == MIRType_Value) { |
|
709 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkValueFromIon)); |
|
710 } else { |
|
711 JS_ASSERT(type == MIRType_Shape); |
|
712 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkShapeFromIon)); |
|
713 } |
|
714 |
|
715 masm.PopRegsInMask(regs); |
|
716 masm.ret(); |
|
717 |
|
718 Linker linker(masm); |
|
719 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
720 |
|
721 #ifdef JS_ION_PERF |
|
722 writePerfSpewerJitCodeProfile(code, "PreBarrier"); |
|
723 #endif |
|
724 |
|
725 return code; |
|
726 } |
|
727 |
|
728 typedef bool (*HandleDebugTrapFn)(JSContext *, BaselineFrame *, uint8_t *, bool *); |
|
729 static const VMFunction HandleDebugTrapInfo = FunctionInfo<HandleDebugTrapFn>(HandleDebugTrap); |
|
730 |
|
731 JitCode * |
|
732 JitRuntime::generateDebugTrapHandler(JSContext *cx) |
|
733 { |
|
734 MacroAssembler masm; |
|
735 |
|
736 Register scratch1 = rax; |
|
737 Register scratch2 = rcx; |
|
738 Register scratch3 = rdx; |
|
739 |
|
740 // Load the return address in scratch1. |
|
741 masm.loadPtr(Address(rsp, 0), scratch1); |
|
742 |
|
743 // Load BaselineFrame pointer in scratch2. |
|
744 masm.mov(rbp, scratch2); |
|
745 masm.subPtr(Imm32(BaselineFrame::Size()), scratch2); |
|
746 |
|
747 // Enter a stub frame and call the HandleDebugTrap VM function. Ensure |
|
748 // the stub frame has a nullptr ICStub pointer, since this pointer is marked |
|
749 // during GC. |
|
750 masm.movePtr(ImmPtr(nullptr), BaselineStubReg); |
|
751 EmitEnterStubFrame(masm, scratch3); |
|
752 |
|
753 JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(HandleDebugTrapInfo); |
|
754 if (!code) |
|
755 return nullptr; |
|
756 |
|
757 masm.push(scratch1); |
|
758 masm.push(scratch2); |
|
759 EmitCallVM(code, masm); |
|
760 |
|
761 EmitLeaveStubFrame(masm); |
|
762 |
|
763 // If the stub returns |true|, we have to perform a forced return |
|
764 // (return from the JS frame). If the stub returns |false|, just return |
|
765 // from the trap stub so that execution continues at the current pc. |
|
766 Label forcedReturn; |
|
767 masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn); |
|
768 masm.ret(); |
|
769 |
|
770 masm.bind(&forcedReturn); |
|
771 masm.loadValue(Address(ebp, BaselineFrame::reverseOffsetOfReturnValue()), |
|
772 JSReturnOperand); |
|
773 masm.mov(rbp, rsp); |
|
774 masm.pop(rbp); |
|
775 masm.ret(); |
|
776 |
|
777 Linker linker(masm); |
|
778 JitCode *codeDbg = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
779 |
|
780 #ifdef JS_ION_PERF |
|
781 writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler"); |
|
782 #endif |
|
783 |
|
784 return codeDbg; |
|
785 } |
|
786 |
|
787 JitCode * |
|
788 JitRuntime::generateExceptionTailStub(JSContext *cx) |
|
789 { |
|
790 MacroAssembler masm; |
|
791 |
|
792 masm.handleFailureWithHandlerTail(); |
|
793 |
|
794 Linker linker(masm); |
|
795 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
796 |
|
797 #ifdef JS_ION_PERF |
|
798 writePerfSpewerJitCodeProfile(code, "ExceptionTailStub"); |
|
799 #endif |
|
800 |
|
801 return code; |
|
802 } |
|
803 |
|
804 JitCode * |
|
805 JitRuntime::generateBailoutTailStub(JSContext *cx) |
|
806 { |
|
807 MacroAssembler masm; |
|
808 |
|
809 masm.generateBailoutTail(rdx, r9); |
|
810 |
|
811 Linker linker(masm); |
|
812 JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE); |
|
813 |
|
814 #ifdef JS_ION_PERF |
|
815 writePerfSpewerJitCodeProfile(code, "BailoutTailStub"); |
|
816 #endif |
|
817 |
|
818 return code; |
|
819 } |