Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/x64/MacroAssembler-x64.h"
9 #include "jit/Bailouts.h"
10 #include "jit/BaselineFrame.h"
11 #include "jit/IonFrames.h"
12 #include "jit/JitCompartment.h"
13 #include "jit/MoveEmitter.h"
15 using namespace js;
16 using namespace js::jit;
18 void
19 MacroAssemblerX64::loadConstantDouble(double d, const FloatRegister &dest)
20 {
21 if (maybeInlineDouble(d, dest))
22 return;
24 if (!doubleMap_.initialized()) {
25 enoughMemory_ &= doubleMap_.init();
26 if (!enoughMemory_)
27 return;
28 }
29 size_t doubleIndex;
30 if (DoubleMap::AddPtr p = doubleMap_.lookupForAdd(d)) {
31 doubleIndex = p->value();
32 } else {
33 doubleIndex = doubles_.length();
34 enoughMemory_ &= doubles_.append(Double(d));
35 enoughMemory_ &= doubleMap_.add(p, d, doubleIndex);
36 if (!enoughMemory_)
37 return;
38 }
39 Double &dbl = doubles_[doubleIndex];
40 JS_ASSERT(!dbl.uses.bound());
42 // The constants will be stored in a pool appended to the text (see
43 // finish()), so they will always be a fixed distance from the
44 // instructions which reference them. This allows the instructions to use
45 // PC-relative addressing. Use "jump" label support code, because we need
46 // the same PC-relative address patching that jumps use.
47 JmpSrc j = masm.movsd_ripr(dest.code());
48 JmpSrc prev = JmpSrc(dbl.uses.use(j.offset()));
49 masm.setNextJump(j, prev);
50 }
52 void
53 MacroAssemblerX64::loadConstantFloat32(float f, const FloatRegister &dest)
54 {
55 if (maybeInlineFloat(f, dest))
56 return;
58 if (!floatMap_.initialized()) {
59 enoughMemory_ &= floatMap_.init();
60 if (!enoughMemory_)
61 return;
62 }
63 size_t floatIndex;
64 if (FloatMap::AddPtr p = floatMap_.lookupForAdd(f)) {
65 floatIndex = p->value();
66 } else {
67 floatIndex = floats_.length();
68 enoughMemory_ &= floats_.append(Float(f));
69 enoughMemory_ &= floatMap_.add(p, f, floatIndex);
70 if (!enoughMemory_)
71 return;
72 }
73 Float &flt = floats_[floatIndex];
74 JS_ASSERT(!flt.uses.bound());
76 // See comment in loadConstantDouble
77 JmpSrc j = masm.movss_ripr(dest.code());
78 JmpSrc prev = JmpSrc(flt.uses.use(j.offset()));
79 masm.setNextJump(j, prev);
80 }
82 void
83 MacroAssemblerX64::finish()
84 {
85 if (!doubles_.empty())
86 masm.align(sizeof(double));
87 for (size_t i = 0; i < doubles_.length(); i++) {
88 Double &dbl = doubles_[i];
89 bind(&dbl.uses);
90 masm.doubleConstant(dbl.value);
91 }
93 if (!floats_.empty())
94 masm.align(sizeof(float));
95 for (size_t i = 0; i < floats_.length(); i++) {
96 Float &flt = floats_[i];
97 bind(&flt.uses);
98 masm.floatConstant(flt.value);
99 }
101 MacroAssemblerX86Shared::finish();
102 }
104 void
105 MacroAssemblerX64::setupABICall(uint32_t args)
106 {
107 JS_ASSERT(!inCall_);
108 inCall_ = true;
110 args_ = args;
111 passedIntArgs_ = 0;
112 passedFloatArgs_ = 0;
113 stackForCall_ = ShadowStackSpace;
114 }
116 void
117 MacroAssemblerX64::setupAlignedABICall(uint32_t args)
118 {
119 setupABICall(args);
120 dynamicAlignment_ = false;
121 }
123 void
124 MacroAssemblerX64::setupUnalignedABICall(uint32_t args, const Register &scratch)
125 {
126 setupABICall(args);
127 dynamicAlignment_ = true;
129 movq(rsp, scratch);
130 andq(Imm32(~(StackAlignment - 1)), rsp);
131 push(scratch);
132 }
134 void
135 MacroAssemblerX64::passABIArg(const MoveOperand &from, MoveOp::Type type)
136 {
137 MoveOperand to;
138 switch (type) {
139 case MoveOp::FLOAT32:
140 case MoveOp::DOUBLE: {
141 FloatRegister dest;
142 if (GetFloatArgReg(passedIntArgs_, passedFloatArgs_++, &dest)) {
143 if (from.isFloatReg() && from.floatReg() == dest) {
144 // Nothing to do; the value is in the right register already
145 return;
146 }
147 to = MoveOperand(dest);
148 } else {
149 to = MoveOperand(StackPointer, stackForCall_);
150 switch (type) {
151 case MoveOp::FLOAT32: stackForCall_ += sizeof(float); break;
152 case MoveOp::DOUBLE: stackForCall_ += sizeof(double); break;
153 default: MOZ_ASSUME_UNREACHABLE("Unexpected float register class argument type");
154 }
155 }
156 break;
157 }
158 case MoveOp::GENERAL: {
159 Register dest;
160 if (GetIntArgReg(passedIntArgs_++, passedFloatArgs_, &dest)) {
161 if (from.isGeneralReg() && from.reg() == dest) {
162 // Nothing to do; the value is in the right register already
163 return;
164 }
165 to = MoveOperand(dest);
166 } else {
167 to = MoveOperand(StackPointer, stackForCall_);
168 stackForCall_ += sizeof(int64_t);
169 }
170 break;
171 }
172 default:
173 MOZ_ASSUME_UNREACHABLE("Unexpected argument type");
174 }
176 enoughMemory_ = moveResolver_.addMove(from, to, type);
177 }
179 void
180 MacroAssemblerX64::passABIArg(const Register ®)
181 {
182 passABIArg(MoveOperand(reg), MoveOp::GENERAL);
183 }
185 void
186 MacroAssemblerX64::passABIArg(const FloatRegister ®, MoveOp::Type type)
187 {
188 passABIArg(MoveOperand(reg), type);
189 }
191 void
192 MacroAssemblerX64::callWithABIPre(uint32_t *stackAdjust)
193 {
194 JS_ASSERT(inCall_);
195 JS_ASSERT(args_ == passedIntArgs_ + passedFloatArgs_);
197 if (dynamicAlignment_) {
198 *stackAdjust = stackForCall_
199 + ComputeByteAlignment(stackForCall_ + sizeof(intptr_t),
200 StackAlignment);
201 } else {
202 *stackAdjust = stackForCall_
203 + ComputeByteAlignment(stackForCall_ + framePushed_,
204 StackAlignment);
205 }
207 reserveStack(*stackAdjust);
209 // Position all arguments.
210 {
211 enoughMemory_ &= moveResolver_.resolve();
212 if (!enoughMemory_)
213 return;
215 MoveEmitter emitter(*this);
216 emitter.emit(moveResolver_);
217 emitter.finish();
218 }
220 #ifdef DEBUG
221 {
222 Label good;
223 testq(rsp, Imm32(StackAlignment - 1));
224 j(Equal, &good);
225 breakpoint();
226 bind(&good);
227 }
228 #endif
229 }
231 void
232 MacroAssemblerX64::callWithABIPost(uint32_t stackAdjust, MoveOp::Type result)
233 {
234 freeStack(stackAdjust);
235 if (dynamicAlignment_)
236 pop(rsp);
238 JS_ASSERT(inCall_);
239 inCall_ = false;
240 }
242 void
243 MacroAssemblerX64::callWithABI(void *fun, MoveOp::Type result)
244 {
245 uint32_t stackAdjust;
246 callWithABIPre(&stackAdjust);
247 call(ImmPtr(fun));
248 callWithABIPost(stackAdjust, result);
249 }
251 void
252 MacroAssemblerX64::callWithABI(AsmJSImmPtr imm, MoveOp::Type result)
253 {
254 uint32_t stackAdjust;
255 callWithABIPre(&stackAdjust);
256 call(imm);
257 callWithABIPost(stackAdjust, result);
258 }
260 static bool
261 IsIntArgReg(Register reg)
262 {
263 for (uint32_t i = 0; i < NumIntArgRegs; i++) {
264 if (IntArgRegs[i] == reg)
265 return true;
266 }
268 return false;
269 }
271 void
272 MacroAssemblerX64::callWithABI(Address fun, MoveOp::Type result)
273 {
274 if (IsIntArgReg(fun.base)) {
275 // Callee register may be clobbered for an argument. Move the callee to
276 // r10, a volatile, non-argument register.
277 moveResolver_.addMove(MoveOperand(fun.base), MoveOperand(r10), MoveOp::GENERAL);
278 fun.base = r10;
279 }
281 JS_ASSERT(!IsIntArgReg(fun.base));
283 uint32_t stackAdjust;
284 callWithABIPre(&stackAdjust);
285 call(Operand(fun));
286 callWithABIPost(stackAdjust, result);
287 }
289 void
290 MacroAssemblerX64::handleFailureWithHandler(void *handler)
291 {
292 // Reserve space for exception information.
293 subq(Imm32(sizeof(ResumeFromException)), rsp);
294 movq(rsp, rax);
296 // Ask for an exception handler.
297 setupUnalignedABICall(1, rcx);
298 passABIArg(rax);
299 callWithABI(handler);
301 JitCode *excTail = GetIonContext()->runtime->jitRuntime()->getExceptionTail();
302 jmp(excTail);
303 }
305 void
306 MacroAssemblerX64::handleFailureWithHandlerTail()
307 {
308 Label entryFrame;
309 Label catch_;
310 Label finally;
311 Label return_;
312 Label bailout;
314 loadPtr(Address(rsp, offsetof(ResumeFromException, kind)), rax);
315 branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_ENTRY_FRAME), &entryFrame);
316 branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_CATCH), &catch_);
317 branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_FINALLY), &finally);
318 branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_FORCED_RETURN), &return_);
319 branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_BAILOUT), &bailout);
321 breakpoint(); // Invalid kind.
323 // No exception handler. Load the error value, load the new stack pointer
324 // and return from the entry frame.
325 bind(&entryFrame);
326 moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
327 loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp);
328 ret();
330 // If we found a catch handler, this must be a baseline frame. Restore state
331 // and jump to the catch block.
332 bind(&catch_);
333 loadPtr(Address(rsp, offsetof(ResumeFromException, target)), rax);
334 loadPtr(Address(rsp, offsetof(ResumeFromException, framePointer)), rbp);
335 loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp);
336 jmp(Operand(rax));
338 // If we found a finally block, this must be a baseline frame. Push
339 // two values expected by JSOP_RETSUB: BooleanValue(true) and the
340 // exception.
341 bind(&finally);
342 ValueOperand exception = ValueOperand(rcx);
343 loadValue(Address(esp, offsetof(ResumeFromException, exception)), exception);
345 loadPtr(Address(rsp, offsetof(ResumeFromException, target)), rax);
346 loadPtr(Address(rsp, offsetof(ResumeFromException, framePointer)), rbp);
347 loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp);
349 pushValue(BooleanValue(true));
350 pushValue(exception);
351 jmp(Operand(rax));
353 // Only used in debug mode. Return BaselineFrame->returnValue() to the caller.
354 bind(&return_);
355 loadPtr(Address(rsp, offsetof(ResumeFromException, framePointer)), rbp);
356 loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp);
357 loadValue(Address(rbp, BaselineFrame::reverseOffsetOfReturnValue()), JSReturnOperand);
358 movq(rbp, rsp);
359 pop(rbp);
360 ret();
362 // If we are bailing out to baseline to handle an exception, jump to
363 // the bailout tail stub.
364 bind(&bailout);
365 loadPtr(Address(esp, offsetof(ResumeFromException, bailoutInfo)), r9);
366 mov(ImmWord(BAILOUT_RETURN_OK), rax);
367 jmp(Operand(rsp, offsetof(ResumeFromException, target)));
368 }
370 #ifdef JSGC_GENERATIONAL
372 void
373 MacroAssemblerX64::branchPtrInNurseryRange(Register ptr, Register temp, Label *label)
374 {
375 JS_ASSERT(ptr != temp);
376 JS_ASSERT(ptr != ScratchReg);
378 const Nursery &nursery = GetIonContext()->runtime->gcNursery();
379 movePtr(ImmWord(-ptrdiff_t(nursery.start())), ScratchReg);
380 addPtr(ptr, ScratchReg);
381 branchPtr(Assembler::Below, ScratchReg, Imm32(Nursery::NurserySize), label);
382 }
384 void
385 MacroAssemblerX64::branchValueIsNurseryObject(ValueOperand value, Register temp, Label *label)
386 {
387 // 'Value' representing the start of the nursery tagged as a JSObject
388 const Nursery &nursery = GetIonContext()->runtime->gcNursery();
389 Value start = ObjectValue(*reinterpret_cast<JSObject *>(nursery.start()));
391 movePtr(ImmWord(-ptrdiff_t(start.asRawBits())), ScratchReg);
392 addPtr(value.valueReg(), ScratchReg);
393 branchPtr(Assembler::Below, ScratchReg, Imm32(Nursery::NurserySize), label);
394 }
396 #endif