Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #include "jit/x64/MacroAssembler-x64.h" |
michael@0 | 8 | |
michael@0 | 9 | #include "jit/Bailouts.h" |
michael@0 | 10 | #include "jit/BaselineFrame.h" |
michael@0 | 11 | #include "jit/IonFrames.h" |
michael@0 | 12 | #include "jit/JitCompartment.h" |
michael@0 | 13 | #include "jit/MoveEmitter.h" |
michael@0 | 14 | |
michael@0 | 15 | using namespace js; |
michael@0 | 16 | using namespace js::jit; |
michael@0 | 17 | |
michael@0 | 18 | void |
michael@0 | 19 | MacroAssemblerX64::loadConstantDouble(double d, const FloatRegister &dest) |
michael@0 | 20 | { |
michael@0 | 21 | if (maybeInlineDouble(d, dest)) |
michael@0 | 22 | return; |
michael@0 | 23 | |
michael@0 | 24 | if (!doubleMap_.initialized()) { |
michael@0 | 25 | enoughMemory_ &= doubleMap_.init(); |
michael@0 | 26 | if (!enoughMemory_) |
michael@0 | 27 | return; |
michael@0 | 28 | } |
michael@0 | 29 | size_t doubleIndex; |
michael@0 | 30 | if (DoubleMap::AddPtr p = doubleMap_.lookupForAdd(d)) { |
michael@0 | 31 | doubleIndex = p->value(); |
michael@0 | 32 | } else { |
michael@0 | 33 | doubleIndex = doubles_.length(); |
michael@0 | 34 | enoughMemory_ &= doubles_.append(Double(d)); |
michael@0 | 35 | enoughMemory_ &= doubleMap_.add(p, d, doubleIndex); |
michael@0 | 36 | if (!enoughMemory_) |
michael@0 | 37 | return; |
michael@0 | 38 | } |
michael@0 | 39 | Double &dbl = doubles_[doubleIndex]; |
michael@0 | 40 | JS_ASSERT(!dbl.uses.bound()); |
michael@0 | 41 | |
michael@0 | 42 | // The constants will be stored in a pool appended to the text (see |
michael@0 | 43 | // finish()), so they will always be a fixed distance from the |
michael@0 | 44 | // instructions which reference them. This allows the instructions to use |
michael@0 | 45 | // PC-relative addressing. Use "jump" label support code, because we need |
michael@0 | 46 | // the same PC-relative address patching that jumps use. |
michael@0 | 47 | JmpSrc j = masm.movsd_ripr(dest.code()); |
michael@0 | 48 | JmpSrc prev = JmpSrc(dbl.uses.use(j.offset())); |
michael@0 | 49 | masm.setNextJump(j, prev); |
michael@0 | 50 | } |
michael@0 | 51 | |
michael@0 | 52 | void |
michael@0 | 53 | MacroAssemblerX64::loadConstantFloat32(float f, const FloatRegister &dest) |
michael@0 | 54 | { |
michael@0 | 55 | if (maybeInlineFloat(f, dest)) |
michael@0 | 56 | return; |
michael@0 | 57 | |
michael@0 | 58 | if (!floatMap_.initialized()) { |
michael@0 | 59 | enoughMemory_ &= floatMap_.init(); |
michael@0 | 60 | if (!enoughMemory_) |
michael@0 | 61 | return; |
michael@0 | 62 | } |
michael@0 | 63 | size_t floatIndex; |
michael@0 | 64 | if (FloatMap::AddPtr p = floatMap_.lookupForAdd(f)) { |
michael@0 | 65 | floatIndex = p->value(); |
michael@0 | 66 | } else { |
michael@0 | 67 | floatIndex = floats_.length(); |
michael@0 | 68 | enoughMemory_ &= floats_.append(Float(f)); |
michael@0 | 69 | enoughMemory_ &= floatMap_.add(p, f, floatIndex); |
michael@0 | 70 | if (!enoughMemory_) |
michael@0 | 71 | return; |
michael@0 | 72 | } |
michael@0 | 73 | Float &flt = floats_[floatIndex]; |
michael@0 | 74 | JS_ASSERT(!flt.uses.bound()); |
michael@0 | 75 | |
michael@0 | 76 | // See comment in loadConstantDouble |
michael@0 | 77 | JmpSrc j = masm.movss_ripr(dest.code()); |
michael@0 | 78 | JmpSrc prev = JmpSrc(flt.uses.use(j.offset())); |
michael@0 | 79 | masm.setNextJump(j, prev); |
michael@0 | 80 | } |
michael@0 | 81 | |
michael@0 | 82 | void |
michael@0 | 83 | MacroAssemblerX64::finish() |
michael@0 | 84 | { |
michael@0 | 85 | if (!doubles_.empty()) |
michael@0 | 86 | masm.align(sizeof(double)); |
michael@0 | 87 | for (size_t i = 0; i < doubles_.length(); i++) { |
michael@0 | 88 | Double &dbl = doubles_[i]; |
michael@0 | 89 | bind(&dbl.uses); |
michael@0 | 90 | masm.doubleConstant(dbl.value); |
michael@0 | 91 | } |
michael@0 | 92 | |
michael@0 | 93 | if (!floats_.empty()) |
michael@0 | 94 | masm.align(sizeof(float)); |
michael@0 | 95 | for (size_t i = 0; i < floats_.length(); i++) { |
michael@0 | 96 | Float &flt = floats_[i]; |
michael@0 | 97 | bind(&flt.uses); |
michael@0 | 98 | masm.floatConstant(flt.value); |
michael@0 | 99 | } |
michael@0 | 100 | |
michael@0 | 101 | MacroAssemblerX86Shared::finish(); |
michael@0 | 102 | } |
michael@0 | 103 | |
michael@0 | 104 | void |
michael@0 | 105 | MacroAssemblerX64::setupABICall(uint32_t args) |
michael@0 | 106 | { |
michael@0 | 107 | JS_ASSERT(!inCall_); |
michael@0 | 108 | inCall_ = true; |
michael@0 | 109 | |
michael@0 | 110 | args_ = args; |
michael@0 | 111 | passedIntArgs_ = 0; |
michael@0 | 112 | passedFloatArgs_ = 0; |
michael@0 | 113 | stackForCall_ = ShadowStackSpace; |
michael@0 | 114 | } |
michael@0 | 115 | |
michael@0 | 116 | void |
michael@0 | 117 | MacroAssemblerX64::setupAlignedABICall(uint32_t args) |
michael@0 | 118 | { |
michael@0 | 119 | setupABICall(args); |
michael@0 | 120 | dynamicAlignment_ = false; |
michael@0 | 121 | } |
michael@0 | 122 | |
michael@0 | 123 | void |
michael@0 | 124 | MacroAssemblerX64::setupUnalignedABICall(uint32_t args, const Register &scratch) |
michael@0 | 125 | { |
michael@0 | 126 | setupABICall(args); |
michael@0 | 127 | dynamicAlignment_ = true; |
michael@0 | 128 | |
michael@0 | 129 | movq(rsp, scratch); |
michael@0 | 130 | andq(Imm32(~(StackAlignment - 1)), rsp); |
michael@0 | 131 | push(scratch); |
michael@0 | 132 | } |
michael@0 | 133 | |
michael@0 | 134 | void |
michael@0 | 135 | MacroAssemblerX64::passABIArg(const MoveOperand &from, MoveOp::Type type) |
michael@0 | 136 | { |
michael@0 | 137 | MoveOperand to; |
michael@0 | 138 | switch (type) { |
michael@0 | 139 | case MoveOp::FLOAT32: |
michael@0 | 140 | case MoveOp::DOUBLE: { |
michael@0 | 141 | FloatRegister dest; |
michael@0 | 142 | if (GetFloatArgReg(passedIntArgs_, passedFloatArgs_++, &dest)) { |
michael@0 | 143 | if (from.isFloatReg() && from.floatReg() == dest) { |
michael@0 | 144 | // Nothing to do; the value is in the right register already |
michael@0 | 145 | return; |
michael@0 | 146 | } |
michael@0 | 147 | to = MoveOperand(dest); |
michael@0 | 148 | } else { |
michael@0 | 149 | to = MoveOperand(StackPointer, stackForCall_); |
michael@0 | 150 | switch (type) { |
michael@0 | 151 | case MoveOp::FLOAT32: stackForCall_ += sizeof(float); break; |
michael@0 | 152 | case MoveOp::DOUBLE: stackForCall_ += sizeof(double); break; |
michael@0 | 153 | default: MOZ_ASSUME_UNREACHABLE("Unexpected float register class argument type"); |
michael@0 | 154 | } |
michael@0 | 155 | } |
michael@0 | 156 | break; |
michael@0 | 157 | } |
michael@0 | 158 | case MoveOp::GENERAL: { |
michael@0 | 159 | Register dest; |
michael@0 | 160 | if (GetIntArgReg(passedIntArgs_++, passedFloatArgs_, &dest)) { |
michael@0 | 161 | if (from.isGeneralReg() && from.reg() == dest) { |
michael@0 | 162 | // Nothing to do; the value is in the right register already |
michael@0 | 163 | return; |
michael@0 | 164 | } |
michael@0 | 165 | to = MoveOperand(dest); |
michael@0 | 166 | } else { |
michael@0 | 167 | to = MoveOperand(StackPointer, stackForCall_); |
michael@0 | 168 | stackForCall_ += sizeof(int64_t); |
michael@0 | 169 | } |
michael@0 | 170 | break; |
michael@0 | 171 | } |
michael@0 | 172 | default: |
michael@0 | 173 | MOZ_ASSUME_UNREACHABLE("Unexpected argument type"); |
michael@0 | 174 | } |
michael@0 | 175 | |
michael@0 | 176 | enoughMemory_ = moveResolver_.addMove(from, to, type); |
michael@0 | 177 | } |
michael@0 | 178 | |
michael@0 | 179 | void |
michael@0 | 180 | MacroAssemblerX64::passABIArg(const Register ®) |
michael@0 | 181 | { |
michael@0 | 182 | passABIArg(MoveOperand(reg), MoveOp::GENERAL); |
michael@0 | 183 | } |
michael@0 | 184 | |
michael@0 | 185 | void |
michael@0 | 186 | MacroAssemblerX64::passABIArg(const FloatRegister ®, MoveOp::Type type) |
michael@0 | 187 | { |
michael@0 | 188 | passABIArg(MoveOperand(reg), type); |
michael@0 | 189 | } |
michael@0 | 190 | |
michael@0 | 191 | void |
michael@0 | 192 | MacroAssemblerX64::callWithABIPre(uint32_t *stackAdjust) |
michael@0 | 193 | { |
michael@0 | 194 | JS_ASSERT(inCall_); |
michael@0 | 195 | JS_ASSERT(args_ == passedIntArgs_ + passedFloatArgs_); |
michael@0 | 196 | |
michael@0 | 197 | if (dynamicAlignment_) { |
michael@0 | 198 | *stackAdjust = stackForCall_ |
michael@0 | 199 | + ComputeByteAlignment(stackForCall_ + sizeof(intptr_t), |
michael@0 | 200 | StackAlignment); |
michael@0 | 201 | } else { |
michael@0 | 202 | *stackAdjust = stackForCall_ |
michael@0 | 203 | + ComputeByteAlignment(stackForCall_ + framePushed_, |
michael@0 | 204 | StackAlignment); |
michael@0 | 205 | } |
michael@0 | 206 | |
michael@0 | 207 | reserveStack(*stackAdjust); |
michael@0 | 208 | |
michael@0 | 209 | // Position all arguments. |
michael@0 | 210 | { |
michael@0 | 211 | enoughMemory_ &= moveResolver_.resolve(); |
michael@0 | 212 | if (!enoughMemory_) |
michael@0 | 213 | return; |
michael@0 | 214 | |
michael@0 | 215 | MoveEmitter emitter(*this); |
michael@0 | 216 | emitter.emit(moveResolver_); |
michael@0 | 217 | emitter.finish(); |
michael@0 | 218 | } |
michael@0 | 219 | |
michael@0 | 220 | #ifdef DEBUG |
michael@0 | 221 | { |
michael@0 | 222 | Label good; |
michael@0 | 223 | testq(rsp, Imm32(StackAlignment - 1)); |
michael@0 | 224 | j(Equal, &good); |
michael@0 | 225 | breakpoint(); |
michael@0 | 226 | bind(&good); |
michael@0 | 227 | } |
michael@0 | 228 | #endif |
michael@0 | 229 | } |
michael@0 | 230 | |
michael@0 | 231 | void |
michael@0 | 232 | MacroAssemblerX64::callWithABIPost(uint32_t stackAdjust, MoveOp::Type result) |
michael@0 | 233 | { |
michael@0 | 234 | freeStack(stackAdjust); |
michael@0 | 235 | if (dynamicAlignment_) |
michael@0 | 236 | pop(rsp); |
michael@0 | 237 | |
michael@0 | 238 | JS_ASSERT(inCall_); |
michael@0 | 239 | inCall_ = false; |
michael@0 | 240 | } |
michael@0 | 241 | |
michael@0 | 242 | void |
michael@0 | 243 | MacroAssemblerX64::callWithABI(void *fun, MoveOp::Type result) |
michael@0 | 244 | { |
michael@0 | 245 | uint32_t stackAdjust; |
michael@0 | 246 | callWithABIPre(&stackAdjust); |
michael@0 | 247 | call(ImmPtr(fun)); |
michael@0 | 248 | callWithABIPost(stackAdjust, result); |
michael@0 | 249 | } |
michael@0 | 250 | |
michael@0 | 251 | void |
michael@0 | 252 | MacroAssemblerX64::callWithABI(AsmJSImmPtr imm, MoveOp::Type result) |
michael@0 | 253 | { |
michael@0 | 254 | uint32_t stackAdjust; |
michael@0 | 255 | callWithABIPre(&stackAdjust); |
michael@0 | 256 | call(imm); |
michael@0 | 257 | callWithABIPost(stackAdjust, result); |
michael@0 | 258 | } |
michael@0 | 259 | |
michael@0 | 260 | static bool |
michael@0 | 261 | IsIntArgReg(Register reg) |
michael@0 | 262 | { |
michael@0 | 263 | for (uint32_t i = 0; i < NumIntArgRegs; i++) { |
michael@0 | 264 | if (IntArgRegs[i] == reg) |
michael@0 | 265 | return true; |
michael@0 | 266 | } |
michael@0 | 267 | |
michael@0 | 268 | return false; |
michael@0 | 269 | } |
michael@0 | 270 | |
michael@0 | 271 | void |
michael@0 | 272 | MacroAssemblerX64::callWithABI(Address fun, MoveOp::Type result) |
michael@0 | 273 | { |
michael@0 | 274 | if (IsIntArgReg(fun.base)) { |
michael@0 | 275 | // Callee register may be clobbered for an argument. Move the callee to |
michael@0 | 276 | // r10, a volatile, non-argument register. |
michael@0 | 277 | moveResolver_.addMove(MoveOperand(fun.base), MoveOperand(r10), MoveOp::GENERAL); |
michael@0 | 278 | fun.base = r10; |
michael@0 | 279 | } |
michael@0 | 280 | |
michael@0 | 281 | JS_ASSERT(!IsIntArgReg(fun.base)); |
michael@0 | 282 | |
michael@0 | 283 | uint32_t stackAdjust; |
michael@0 | 284 | callWithABIPre(&stackAdjust); |
michael@0 | 285 | call(Operand(fun)); |
michael@0 | 286 | callWithABIPost(stackAdjust, result); |
michael@0 | 287 | } |
michael@0 | 288 | |
michael@0 | 289 | void |
michael@0 | 290 | MacroAssemblerX64::handleFailureWithHandler(void *handler) |
michael@0 | 291 | { |
michael@0 | 292 | // Reserve space for exception information. |
michael@0 | 293 | subq(Imm32(sizeof(ResumeFromException)), rsp); |
michael@0 | 294 | movq(rsp, rax); |
michael@0 | 295 | |
michael@0 | 296 | // Ask for an exception handler. |
michael@0 | 297 | setupUnalignedABICall(1, rcx); |
michael@0 | 298 | passABIArg(rax); |
michael@0 | 299 | callWithABI(handler); |
michael@0 | 300 | |
michael@0 | 301 | JitCode *excTail = GetIonContext()->runtime->jitRuntime()->getExceptionTail(); |
michael@0 | 302 | jmp(excTail); |
michael@0 | 303 | } |
michael@0 | 304 | |
michael@0 | 305 | void |
michael@0 | 306 | MacroAssemblerX64::handleFailureWithHandlerTail() |
michael@0 | 307 | { |
michael@0 | 308 | Label entryFrame; |
michael@0 | 309 | Label catch_; |
michael@0 | 310 | Label finally; |
michael@0 | 311 | Label return_; |
michael@0 | 312 | Label bailout; |
michael@0 | 313 | |
michael@0 | 314 | loadPtr(Address(rsp, offsetof(ResumeFromException, kind)), rax); |
michael@0 | 315 | branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_ENTRY_FRAME), &entryFrame); |
michael@0 | 316 | branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_CATCH), &catch_); |
michael@0 | 317 | branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_FINALLY), &finally); |
michael@0 | 318 | branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_FORCED_RETURN), &return_); |
michael@0 | 319 | branch32(Assembler::Equal, rax, Imm32(ResumeFromException::RESUME_BAILOUT), &bailout); |
michael@0 | 320 | |
michael@0 | 321 | breakpoint(); // Invalid kind. |
michael@0 | 322 | |
michael@0 | 323 | // No exception handler. Load the error value, load the new stack pointer |
michael@0 | 324 | // and return from the entry frame. |
michael@0 | 325 | bind(&entryFrame); |
michael@0 | 326 | moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand); |
michael@0 | 327 | loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp); |
michael@0 | 328 | ret(); |
michael@0 | 329 | |
michael@0 | 330 | // If we found a catch handler, this must be a baseline frame. Restore state |
michael@0 | 331 | // and jump to the catch block. |
michael@0 | 332 | bind(&catch_); |
michael@0 | 333 | loadPtr(Address(rsp, offsetof(ResumeFromException, target)), rax); |
michael@0 | 334 | loadPtr(Address(rsp, offsetof(ResumeFromException, framePointer)), rbp); |
michael@0 | 335 | loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp); |
michael@0 | 336 | jmp(Operand(rax)); |
michael@0 | 337 | |
michael@0 | 338 | // If we found a finally block, this must be a baseline frame. Push |
michael@0 | 339 | // two values expected by JSOP_RETSUB: BooleanValue(true) and the |
michael@0 | 340 | // exception. |
michael@0 | 341 | bind(&finally); |
michael@0 | 342 | ValueOperand exception = ValueOperand(rcx); |
michael@0 | 343 | loadValue(Address(esp, offsetof(ResumeFromException, exception)), exception); |
michael@0 | 344 | |
michael@0 | 345 | loadPtr(Address(rsp, offsetof(ResumeFromException, target)), rax); |
michael@0 | 346 | loadPtr(Address(rsp, offsetof(ResumeFromException, framePointer)), rbp); |
michael@0 | 347 | loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp); |
michael@0 | 348 | |
michael@0 | 349 | pushValue(BooleanValue(true)); |
michael@0 | 350 | pushValue(exception); |
michael@0 | 351 | jmp(Operand(rax)); |
michael@0 | 352 | |
michael@0 | 353 | // Only used in debug mode. Return BaselineFrame->returnValue() to the caller. |
michael@0 | 354 | bind(&return_); |
michael@0 | 355 | loadPtr(Address(rsp, offsetof(ResumeFromException, framePointer)), rbp); |
michael@0 | 356 | loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp); |
michael@0 | 357 | loadValue(Address(rbp, BaselineFrame::reverseOffsetOfReturnValue()), JSReturnOperand); |
michael@0 | 358 | movq(rbp, rsp); |
michael@0 | 359 | pop(rbp); |
michael@0 | 360 | ret(); |
michael@0 | 361 | |
michael@0 | 362 | // If we are bailing out to baseline to handle an exception, jump to |
michael@0 | 363 | // the bailout tail stub. |
michael@0 | 364 | bind(&bailout); |
michael@0 | 365 | loadPtr(Address(esp, offsetof(ResumeFromException, bailoutInfo)), r9); |
michael@0 | 366 | mov(ImmWord(BAILOUT_RETURN_OK), rax); |
michael@0 | 367 | jmp(Operand(rsp, offsetof(ResumeFromException, target))); |
michael@0 | 368 | } |
michael@0 | 369 | |
michael@0 | 370 | #ifdef JSGC_GENERATIONAL |
michael@0 | 371 | |
michael@0 | 372 | void |
michael@0 | 373 | MacroAssemblerX64::branchPtrInNurseryRange(Register ptr, Register temp, Label *label) |
michael@0 | 374 | { |
michael@0 | 375 | JS_ASSERT(ptr != temp); |
michael@0 | 376 | JS_ASSERT(ptr != ScratchReg); |
michael@0 | 377 | |
michael@0 | 378 | const Nursery &nursery = GetIonContext()->runtime->gcNursery(); |
michael@0 | 379 | movePtr(ImmWord(-ptrdiff_t(nursery.start())), ScratchReg); |
michael@0 | 380 | addPtr(ptr, ScratchReg); |
michael@0 | 381 | branchPtr(Assembler::Below, ScratchReg, Imm32(Nursery::NurserySize), label); |
michael@0 | 382 | } |
michael@0 | 383 | |
michael@0 | 384 | void |
michael@0 | 385 | MacroAssemblerX64::branchValueIsNurseryObject(ValueOperand value, Register temp, Label *label) |
michael@0 | 386 | { |
michael@0 | 387 | // 'Value' representing the start of the nursery tagged as a JSObject |
michael@0 | 388 | const Nursery &nursery = GetIonContext()->runtime->gcNursery(); |
michael@0 | 389 | Value start = ObjectValue(*reinterpret_cast<JSObject *>(nursery.start())); |
michael@0 | 390 | |
michael@0 | 391 | movePtr(ImmWord(-ptrdiff_t(start.asRawBits())), ScratchReg); |
michael@0 | 392 | addPtr(value.valueReg(), ScratchReg); |
michael@0 | 393 | branchPtr(Assembler::Below, ScratchReg, Imm32(Nursery::NurserySize), label); |
michael@0 | 394 | } |
michael@0 | 395 | |
michael@0 | 396 | #endif |