js/src/jit/x86/MacroAssembler-x86.cpp

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

     1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     2  * vim: set ts=8 sts=4 et sw=4 tw=99:
     3  * This Source Code Form is subject to the terms of the Mozilla Public
     4  * License, v. 2.0. If a copy of the MPL was not distributed with this
     5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     7 #include "jit/x86/MacroAssembler-x86.h"
     9 #include "mozilla/Casting.h"
    11 #include "jit/Bailouts.h"
    12 #include "jit/BaselineFrame.h"
    13 #include "jit/IonFrames.h"
    14 #include "jit/MoveEmitter.h"
    16 #include "jsscriptinlines.h"
    18 using namespace js;
    19 using namespace js::jit;
    21 MacroAssemblerX86::Double *
    22 MacroAssemblerX86::getDouble(double d)
    23 {
    24     if (!doubleMap_.initialized()) {
    25         enoughMemory_ &= doubleMap_.init();
    26         if (!enoughMemory_)
    27             return nullptr;
    28     }
    29     size_t doubleIndex;
    30     DoubleMap::AddPtr p = doubleMap_.lookupForAdd(d);
    31     if (p) {
    32         doubleIndex = p->value();
    33     } else {
    34         doubleIndex = doubles_.length();
    35         enoughMemory_ &= doubles_.append(Double(d));
    36         enoughMemory_ &= doubleMap_.add(p, d, doubleIndex);
    37         if (!enoughMemory_)
    38             return nullptr;
    39     }
    40     Double &dbl = doubles_[doubleIndex];
    41     JS_ASSERT(!dbl.uses.bound());
    42     return &dbl;
    43 }
    45 void
    46 MacroAssemblerX86::loadConstantDouble(double d, const FloatRegister &dest)
    47 {
    48     if (maybeInlineDouble(d, dest))
    49         return;
    50     Double *dbl = getDouble(d);
    51     if (!dbl)
    52         return;
    53     masm.movsd_mr(reinterpret_cast<const void *>(dbl->uses.prev()), dest.code());
    54     dbl->uses.setPrev(masm.size());
    55 }
    57 void
    58 MacroAssemblerX86::addConstantDouble(double d, const FloatRegister &dest)
    59 {
    60     Double *dbl = getDouble(d);
    61     if (!dbl)
    62         return;
    63     masm.addsd_mr(reinterpret_cast<const void *>(dbl->uses.prev()), dest.code());
    64     dbl->uses.setPrev(masm.size());
    65 }
    67 MacroAssemblerX86::Float *
    68 MacroAssemblerX86::getFloat(float f)
    69 {
    70     if (!floatMap_.initialized()) {
    71         enoughMemory_ &= floatMap_.init();
    72         if (!enoughMemory_)
    73             return nullptr;
    74     }
    75     size_t floatIndex;
    76     FloatMap::AddPtr p = floatMap_.lookupForAdd(f);
    77     if (p) {
    78         floatIndex = p->value();
    79     } else {
    80         floatIndex = floats_.length();
    81         enoughMemory_ &= floats_.append(Float(f));
    82         enoughMemory_ &= floatMap_.add(p, f, floatIndex);
    83         if (!enoughMemory_)
    84             return nullptr;
    85     }
    86     Float &flt = floats_[floatIndex];
    87     JS_ASSERT(!flt.uses.bound());
    88     return &flt;
    89 }
    91 void
    92 MacroAssemblerX86::loadConstantFloat32(float f, const FloatRegister &dest)
    93 {
    94     if (maybeInlineFloat(f, dest))
    95         return;
    96     Float *flt = getFloat(f);
    97     if (!flt)
    98         return;
    99     masm.movss_mr(reinterpret_cast<const void *>(flt->uses.prev()), dest.code());
   100     flt->uses.setPrev(masm.size());
   101 }
   103 void
   104 MacroAssemblerX86::addConstantFloat32(float f, const FloatRegister &dest)
   105 {
   106     Float *flt = getFloat(f);
   107     if (!flt)
   108         return;
   109     masm.addss_mr(reinterpret_cast<const void *>(flt->uses.prev()), dest.code());
   110     flt->uses.setPrev(masm.size());
   111 }
   113 void
   114 MacroAssemblerX86::finish()
   115 {
   116     if (!doubles_.empty())
   117         masm.align(sizeof(double));
   118     for (size_t i = 0; i < doubles_.length(); i++) {
   119         CodeLabel cl(doubles_[i].uses);
   120         writeDoubleConstant(doubles_[i].value, cl.src());
   121         enoughMemory_ &= addCodeLabel(cl);
   122         if (!enoughMemory_)
   123             return;
   124     }
   126     if (!floats_.empty())
   127         masm.align(sizeof(float));
   128     for (size_t i = 0; i < floats_.length(); i++) {
   129         CodeLabel cl(floats_[i].uses);
   130         writeFloatConstant(floats_[i].value, cl.src());
   131         enoughMemory_ &= addCodeLabel(cl);
   132         if (!enoughMemory_)
   133             return;
   134     }
   135 }
   137 void
   138 MacroAssemblerX86::setupABICall(uint32_t args)
   139 {
   140     JS_ASSERT(!inCall_);
   141     inCall_ = true;
   143     args_ = args;
   144     passedArgs_ = 0;
   145     stackForCall_ = 0;
   146 }
   148 void
   149 MacroAssemblerX86::setupAlignedABICall(uint32_t args)
   150 {
   151     setupABICall(args);
   152     dynamicAlignment_ = false;
   153 }
   155 void
   156 MacroAssemblerX86::setupUnalignedABICall(uint32_t args, const Register &scratch)
   157 {
   158     setupABICall(args);
   159     dynamicAlignment_ = true;
   161     movl(esp, scratch);
   162     andl(Imm32(~(StackAlignment - 1)), esp);
   163     push(scratch);
   164 }
   166 void
   167 MacroAssemblerX86::passABIArg(const MoveOperand &from, MoveOp::Type type)
   168 {
   169     ++passedArgs_;
   170     MoveOperand to = MoveOperand(StackPointer, stackForCall_);
   171     switch (type) {
   172       case MoveOp::FLOAT32: stackForCall_ += sizeof(float); break;
   173       case MoveOp::DOUBLE:  stackForCall_ += sizeof(double); break;
   174       case MoveOp::INT32:   stackForCall_ += sizeof(int32_t); break;
   175       case MoveOp::GENERAL: stackForCall_ += sizeof(intptr_t); break;
   176       default: MOZ_ASSUME_UNREACHABLE("Unexpected argument type");
   177     }
   178     enoughMemory_ &= moveResolver_.addMove(from, to, type);
   179 }
   181 void
   182 MacroAssemblerX86::passABIArg(const Register &reg)
   183 {
   184     passABIArg(MoveOperand(reg), MoveOp::GENERAL);
   185 }
   187 void
   188 MacroAssemblerX86::passABIArg(const FloatRegister &reg, MoveOp::Type type)
   189 {
   190     passABIArg(MoveOperand(reg), type);
   191 }
   193 void
   194 MacroAssemblerX86::callWithABIPre(uint32_t *stackAdjust)
   195 {
   196     JS_ASSERT(inCall_);
   197     JS_ASSERT(args_ == passedArgs_);
   199     if (dynamicAlignment_) {
   200         *stackAdjust = stackForCall_
   201                      + ComputeByteAlignment(stackForCall_ + sizeof(intptr_t),
   202                                             StackAlignment);
   203     } else {
   204         *stackAdjust = stackForCall_
   205                      + ComputeByteAlignment(stackForCall_ + framePushed_,
   206                                             StackAlignment);
   207     }
   209     reserveStack(*stackAdjust);
   211     // Position all arguments.
   212     {
   213         enoughMemory_ &= moveResolver_.resolve();
   214         if (!enoughMemory_)
   215             return;
   217         MoveEmitter emitter(*this);
   218         emitter.emit(moveResolver_);
   219         emitter.finish();
   220     }
   222 #ifdef DEBUG
   223     {
   224         // Check call alignment.
   225         Label good;
   226         testl(esp, Imm32(StackAlignment - 1));
   227         j(Equal, &good);
   228         breakpoint();
   229         bind(&good);
   230     }
   231 #endif
   232 }
   234 void
   235 MacroAssemblerX86::callWithABIPost(uint32_t stackAdjust, MoveOp::Type result)
   236 {
   237     freeStack(stackAdjust);
   238     if (result == MoveOp::DOUBLE) {
   239         reserveStack(sizeof(double));
   240         fstp(Operand(esp, 0));
   241         loadDouble(Operand(esp, 0), ReturnFloatReg);
   242         freeStack(sizeof(double));
   243     } else if (result == MoveOp::FLOAT32) {
   244         reserveStack(sizeof(float));
   245         fstp32(Operand(esp, 0));
   246         loadFloat32(Operand(esp, 0), ReturnFloatReg);
   247         freeStack(sizeof(float));
   248     }
   249     if (dynamicAlignment_)
   250         pop(esp);
   252     JS_ASSERT(inCall_);
   253     inCall_ = false;
   254 }
   256 void
   257 MacroAssemblerX86::callWithABI(void *fun, MoveOp::Type result)
   258 {
   259     uint32_t stackAdjust;
   260     callWithABIPre(&stackAdjust);
   261     call(ImmPtr(fun));
   262     callWithABIPost(stackAdjust, result);
   263 }
   265 void
   266 MacroAssemblerX86::callWithABI(AsmJSImmPtr fun, MoveOp::Type result)
   267 {
   268     uint32_t stackAdjust;
   269     callWithABIPre(&stackAdjust);
   270     call(fun);
   271     callWithABIPost(stackAdjust, result);
   272 }
   274 void
   275 MacroAssemblerX86::callWithABI(const Address &fun, MoveOp::Type result)
   276 {
   277     uint32_t stackAdjust;
   278     callWithABIPre(&stackAdjust);
   279     call(Operand(fun));
   280     callWithABIPost(stackAdjust, result);
   281 }
   283 void
   284 MacroAssemblerX86::handleFailureWithHandler(void *handler)
   285 {
   286     // Reserve space for exception information.
   287     subl(Imm32(sizeof(ResumeFromException)), esp);
   288     movl(esp, eax);
   290     // Ask for an exception handler.
   291     setupUnalignedABICall(1, ecx);
   292     passABIArg(eax);
   293     callWithABI(handler);
   295     JitCode *excTail = GetIonContext()->runtime->jitRuntime()->getExceptionTail();
   296     jmp(excTail);
   297 }
   299 void
   300 MacroAssemblerX86::handleFailureWithHandlerTail()
   301 {
   302     Label entryFrame;
   303     Label catch_;
   304     Label finally;
   305     Label return_;
   306     Label bailout;
   308     loadPtr(Address(esp, offsetof(ResumeFromException, kind)), eax);
   309     branch32(Assembler::Equal, eax, Imm32(ResumeFromException::RESUME_ENTRY_FRAME), &entryFrame);
   310     branch32(Assembler::Equal, eax, Imm32(ResumeFromException::RESUME_CATCH), &catch_);
   311     branch32(Assembler::Equal, eax, Imm32(ResumeFromException::RESUME_FINALLY), &finally);
   312     branch32(Assembler::Equal, eax, Imm32(ResumeFromException::RESUME_FORCED_RETURN), &return_);
   313     branch32(Assembler::Equal, eax, Imm32(ResumeFromException::RESUME_BAILOUT), &bailout);
   315     breakpoint(); // Invalid kind.
   317     // No exception handler. Load the error value, load the new stack pointer
   318     // and return from the entry frame.
   319     bind(&entryFrame);
   320     moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
   321     loadPtr(Address(esp, offsetof(ResumeFromException, stackPointer)), esp);
   322     ret();
   324     // If we found a catch handler, this must be a baseline frame. Restore state
   325     // and jump to the catch block.
   326     bind(&catch_);
   327     loadPtr(Address(esp, offsetof(ResumeFromException, target)), eax);
   328     loadPtr(Address(esp, offsetof(ResumeFromException, framePointer)), ebp);
   329     loadPtr(Address(esp, offsetof(ResumeFromException, stackPointer)), esp);
   330     jmp(Operand(eax));
   332     // If we found a finally block, this must be a baseline frame. Push
   333     // two values expected by JSOP_RETSUB: BooleanValue(true) and the
   334     // exception.
   335     bind(&finally);
   336     ValueOperand exception = ValueOperand(ecx, edx);
   337     loadValue(Address(esp, offsetof(ResumeFromException, exception)), exception);
   339     loadPtr(Address(esp, offsetof(ResumeFromException, target)), eax);
   340     loadPtr(Address(esp, offsetof(ResumeFromException, framePointer)), ebp);
   341     loadPtr(Address(esp, offsetof(ResumeFromException, stackPointer)), esp);
   343     pushValue(BooleanValue(true));
   344     pushValue(exception);
   345     jmp(Operand(eax));
   347     // Only used in debug mode. Return BaselineFrame->returnValue() to the caller.
   348     bind(&return_);
   349     loadPtr(Address(esp, offsetof(ResumeFromException, framePointer)), ebp);
   350     loadPtr(Address(esp, offsetof(ResumeFromException, stackPointer)), esp);
   351     loadValue(Address(ebp, BaselineFrame::reverseOffsetOfReturnValue()), JSReturnOperand);
   352     movl(ebp, esp);
   353     pop(ebp);
   354     ret();
   356     // If we are bailing out to baseline to handle an exception, jump to
   357     // the bailout tail stub.
   358     bind(&bailout);
   359     loadPtr(Address(esp, offsetof(ResumeFromException, bailoutInfo)), ecx);
   360     movl(Imm32(BAILOUT_RETURN_OK), eax);
   361     jmp(Operand(esp, offsetof(ResumeFromException, target)));
   362 }
   364 void
   365 MacroAssemblerX86::branchTestValue(Condition cond, const ValueOperand &value, const Value &v, Label *label)
   366 {
   367     jsval_layout jv = JSVAL_TO_IMPL(v);
   368     if (v.isMarkable())
   369         cmpl(value.payloadReg(), ImmGCPtr(reinterpret_cast<gc::Cell *>(v.toGCThing())));
   370     else
   371         cmpl(value.payloadReg(), Imm32(jv.s.payload.i32));
   373     if (cond == Equal) {
   374         Label done;
   375         j(NotEqual, &done);
   376         {
   377             cmpl(value.typeReg(), Imm32(jv.s.tag));
   378             j(Equal, label);
   379         }
   380         bind(&done);
   381     } else {
   382         JS_ASSERT(cond == NotEqual);
   383         j(NotEqual, label);
   385         cmpl(value.typeReg(), Imm32(jv.s.tag));
   386         j(NotEqual, label);
   387     }
   388 }
   390 #ifdef JSGC_GENERATIONAL
   392 void
   393 MacroAssemblerX86::branchPtrInNurseryRange(Register ptr, Register temp, Label *label)
   394 {
   395     JS_ASSERT(ptr != temp);
   396     JS_ASSERT(temp != InvalidReg);  // A temp register is required for x86.
   398     const Nursery &nursery = GetIonContext()->runtime->gcNursery();
   399     movePtr(ImmWord(-ptrdiff_t(nursery.start())), temp);
   400     addPtr(ptr, temp);
   401     branchPtr(Assembler::Below, temp, Imm32(Nursery::NurserySize), label);
   402 }
   404 void
   405 MacroAssemblerX86::branchValueIsNurseryObject(ValueOperand value, Register temp, Label *label)
   406 {
   407     Label done;
   409     branchTestObject(Assembler::NotEqual, value, &done);
   410     branchPtrInNurseryRange(value.payloadReg(), temp, label);
   412     bind(&done);
   413 }
   415 #endif

mercurial