js/src/jit/IonBuilder.cpp

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

     1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     2  * vim: set ts=8 sts=4 et sw=4 tw=99:
     3  * This Source Code Form is subject to the terms of the Mozilla Public
     4  * License, v. 2.0. If a copy of the MPL was not distributed with this
     5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     7 #include "jit/IonBuilder.h"
     9 #include "mozilla/DebugOnly.h"
    11 #include "builtin/Eval.h"
    12 #include "builtin/TypedObject.h"
    13 #include "frontend/SourceNotes.h"
    14 #include "jit/BaselineFrame.h"
    15 #include "jit/BaselineInspector.h"
    16 #include "jit/Ion.h"
    17 #include "jit/IonOptimizationLevels.h"
    18 #include "jit/IonSpewer.h"
    19 #include "jit/Lowering.h"
    20 #include "jit/MIRGraph.h"
    21 #include "vm/ArgumentsObject.h"
    22 #include "vm/Opcodes.h"
    23 #include "vm/RegExpStatics.h"
    25 #include "jsinferinlines.h"
    26 #include "jsobjinlines.h"
    27 #include "jsopcodeinlines.h"
    28 #include "jsscriptinlines.h"
    30 #include "jit/CompileInfo-inl.h"
    31 #include "jit/ExecutionMode-inl.h"
    33 using namespace js;
    34 using namespace js::jit;
    36 using mozilla::DebugOnly;
    37 using mozilla::Maybe;
    38 using mozilla::SafeCast;
    40 class jit::BaselineFrameInspector
    41 {
    42   public:
    43     types::Type thisType;
    44     JSObject *singletonScopeChain;
    46     Vector<types::Type, 4, IonAllocPolicy> argTypes;
    47     Vector<types::Type, 4, IonAllocPolicy> varTypes;
    49     BaselineFrameInspector(TempAllocator *temp)
    50       : thisType(types::Type::UndefinedType()),
    51         singletonScopeChain(nullptr),
    52         argTypes(*temp),
    53         varTypes(*temp)
    54     {}
    55 };
    57 BaselineFrameInspector *
    58 jit::NewBaselineFrameInspector(TempAllocator *temp, BaselineFrame *frame, CompileInfo *info)
    59 {
    60     JS_ASSERT(frame);
    62     BaselineFrameInspector *inspector = temp->lifoAlloc()->new_<BaselineFrameInspector>(temp);
    63     if (!inspector)
    64         return nullptr;
    66     // Note: copying the actual values into a temporary structure for use
    67     // during compilation could capture nursery pointers, so the values' types
    68     // are recorded instead.
    70     inspector->thisType = types::GetMaybeOptimizedOutValueType(frame->thisValue());
    72     if (frame->scopeChain()->hasSingletonType())
    73         inspector->singletonScopeChain = frame->scopeChain();
    75     JSScript *script = frame->script();
    77     if (script->functionNonDelazifying()) {
    78         if (!inspector->argTypes.reserve(frame->numFormalArgs()))
    79             return nullptr;
    80         for (size_t i = 0; i < frame->numFormalArgs(); i++) {
    81             if (script->formalIsAliased(i)) {
    82                 inspector->argTypes.infallibleAppend(types::Type::UndefinedType());
    83             } else if (!script->argsObjAliasesFormals()) {
    84                 types::Type type = types::GetMaybeOptimizedOutValueType(frame->unaliasedFormal(i));
    85                 inspector->argTypes.infallibleAppend(type);
    86             } else if (frame->hasArgsObj()) {
    87                 types::Type type = types::GetMaybeOptimizedOutValueType(frame->argsObj().arg(i));
    88                 inspector->argTypes.infallibleAppend(type);
    89             } else {
    90                 inspector->argTypes.infallibleAppend(types::Type::UndefinedType());
    91             }
    92         }
    93     }
    95     if (!inspector->varTypes.reserve(frame->script()->nfixed()))
    96         return nullptr;
    97     for (size_t i = 0; i < frame->script()->nfixed(); i++) {
    98         if (info->isSlotAliasedAtOsr(i + info->firstLocalSlot())) {
    99             inspector->varTypes.infallibleAppend(types::Type::UndefinedType());
   100         } else {
   101             types::Type type = types::GetMaybeOptimizedOutValueType(frame->unaliasedLocal(i));
   102             inspector->varTypes.infallibleAppend(type);
   103         }
   104     }
   106     return inspector;
   107 }
   109 IonBuilder::IonBuilder(JSContext *analysisContext, CompileCompartment *comp,
   110                        const JitCompileOptions &options, TempAllocator *temp,
   111                        MIRGraph *graph, types::CompilerConstraintList *constraints,
   112                        BaselineInspector *inspector, CompileInfo *info,
   113                        const OptimizationInfo *optimizationInfo,
   114                        BaselineFrameInspector *baselineFrame, size_t inliningDepth,
   115                        uint32_t loopDepth)
   116   : MIRGenerator(comp, options, temp, graph, info, optimizationInfo),
   117     backgroundCodegen_(nullptr),
   118     analysisContext(analysisContext),
   119     baselineFrame_(baselineFrame),
   120     abortReason_(AbortReason_Disable),
   121     descrSetHash_(nullptr),
   122     constraints_(constraints),
   123     analysis_(*temp, info->script()),
   124     thisTypes(nullptr),
   125     argTypes(nullptr),
   126     typeArray(nullptr),
   127     typeArrayHint(0),
   128     bytecodeTypeMap(nullptr),
   129     loopDepth_(loopDepth),
   130     callerResumePoint_(nullptr),
   131     callerBuilder_(nullptr),
   132     cfgStack_(*temp),
   133     loops_(*temp),
   134     switches_(*temp),
   135     labels_(*temp),
   136     iterators_(*temp),
   137     loopHeaders_(*temp),
   138     inspector(inspector),
   139     inliningDepth_(inliningDepth),
   140     numLoopRestarts_(0),
   141     failedBoundsCheck_(info->script()->failedBoundsCheck()),
   142     failedShapeGuard_(info->script()->failedShapeGuard()),
   143     nonStringIteration_(false),
   144     lazyArguments_(nullptr),
   145     inlineCallInfo_(nullptr)
   146 {
   147     script_ = info->script();
   148     pc = info->startPC();
   150     JS_ASSERT(script()->hasBaselineScript() == (info->executionMode() != ArgumentsUsageAnalysis));
   151     JS_ASSERT(!!analysisContext == (info->executionMode() == DefinitePropertiesAnalysis));
   152 }
   154 void
   155 IonBuilder::clearForBackEnd()
   156 {
   157     JS_ASSERT(!analysisContext);
   158     baselineFrame_ = nullptr;
   160     // The caches below allocate data from the malloc heap. Release this before
   161     // later phases of compilation to avoid leaks, as the top level IonBuilder
   162     // is not explicitly destroyed. Note that builders for inner scripts are
   163     // constructed on the stack and will release this memory on destruction.
   164     gsn.purge();
   165     scopeCoordinateNameCache.purge();
   166 }
   168 bool
   169 IonBuilder::abort(const char *message, ...)
   170 {
   171     // Don't call PCToLineNumber in release builds.
   172 #ifdef DEBUG
   173     va_list ap;
   174     va_start(ap, message);
   175     abortFmt(message, ap);
   176     va_end(ap);
   177     IonSpew(IonSpew_Abort, "aborted @ %s:%d", script()->filename(), PCToLineNumber(script(), pc));
   178 #endif
   179     return false;
   180 }
   182 void
   183 IonBuilder::spew(const char *message)
   184 {
   185     // Don't call PCToLineNumber in release builds.
   186 #ifdef DEBUG
   187     IonSpew(IonSpew_MIR, "%s @ %s:%d", message, script()->filename(), PCToLineNumber(script(), pc));
   188 #endif
   189 }
   191 static inline int32_t
   192 GetJumpOffset(jsbytecode *pc)
   193 {
   194     JS_ASSERT(js_CodeSpec[JSOp(*pc)].type() == JOF_JUMP);
   195     return GET_JUMP_OFFSET(pc);
   196 }
   198 IonBuilder::CFGState
   199 IonBuilder::CFGState::If(jsbytecode *join, MTest *test)
   200 {
   201     CFGState state;
   202     state.state = IF_TRUE;
   203     state.stopAt = join;
   204     state.branch.ifFalse = test->ifFalse();
   205     state.branch.test = test;
   206     return state;
   207 }
   209 IonBuilder::CFGState
   210 IonBuilder::CFGState::IfElse(jsbytecode *trueEnd, jsbytecode *falseEnd, MTest *test)
   211 {
   212     MBasicBlock *ifFalse = test->ifFalse();
   214     CFGState state;
   215     // If the end of the false path is the same as the start of the
   216     // false path, then the "else" block is empty and we can devolve
   217     // this to the IF_TRUE case. We handle this here because there is
   218     // still an extra GOTO on the true path and we want stopAt to point
   219     // there, whereas the IF_TRUE case does not have the GOTO.
   220     state.state = (falseEnd == ifFalse->pc())
   221                   ? IF_TRUE_EMPTY_ELSE
   222                   : IF_ELSE_TRUE;
   223     state.stopAt = trueEnd;
   224     state.branch.falseEnd = falseEnd;
   225     state.branch.ifFalse = ifFalse;
   226     state.branch.test = test;
   227     return state;
   228 }
   230 IonBuilder::CFGState
   231 IonBuilder::CFGState::AndOr(jsbytecode *join, MBasicBlock *joinStart)
   232 {
   233     CFGState state;
   234     state.state = AND_OR;
   235     state.stopAt = join;
   236     state.branch.ifFalse = joinStart;
   237     state.branch.test = nullptr;
   238     return state;
   239 }
   241 IonBuilder::CFGState
   242 IonBuilder::CFGState::TableSwitch(jsbytecode *exitpc, MTableSwitch *ins)
   243 {
   244     CFGState state;
   245     state.state = TABLE_SWITCH;
   246     state.stopAt = exitpc;
   247     state.tableswitch.exitpc = exitpc;
   248     state.tableswitch.breaks = nullptr;
   249     state.tableswitch.ins = ins;
   250     state.tableswitch.currentBlock = 0;
   251     return state;
   252 }
   254 JSFunction *
   255 IonBuilder::getSingleCallTarget(types::TemporaryTypeSet *calleeTypes)
   256 {
   257     if (!calleeTypes)
   258         return nullptr;
   260     JSObject *obj = calleeTypes->getSingleton();
   261     if (!obj || !obj->is<JSFunction>())
   262         return nullptr;
   264     return &obj->as<JSFunction>();
   265 }
   267 bool
   268 IonBuilder::getPolyCallTargets(types::TemporaryTypeSet *calleeTypes, bool constructing,
   269                                ObjectVector &targets, uint32_t maxTargets, bool *gotLambda)
   270 {
   271     JS_ASSERT(targets.empty());
   272     JS_ASSERT(gotLambda);
   273     *gotLambda = false;
   275     if (!calleeTypes)
   276         return true;
   278     if (calleeTypes->baseFlags() != 0)
   279         return true;
   281     unsigned objCount = calleeTypes->getObjectCount();
   283     if (objCount == 0 || objCount > maxTargets)
   284         return true;
   286     if (!targets.reserve(objCount))
   287         return false;
   288     for(unsigned i = 0; i < objCount; i++) {
   289         JSObject *obj = calleeTypes->getSingleObject(i);
   290         JSFunction *fun;
   291         if (obj) {
   292             if (!obj->is<JSFunction>()) {
   293                 targets.clear();
   294                 return true;
   295             }
   296             fun = &obj->as<JSFunction>();
   297         } else {
   298             types::TypeObject *typeObj = calleeTypes->getTypeObject(i);
   299             JS_ASSERT(typeObj);
   300             if (!typeObj->interpretedFunction) {
   301                 targets.clear();
   302                 return true;
   303             }
   305             fun = typeObj->interpretedFunction;
   306             *gotLambda = true;
   307         }
   309         // Don't optimize if we're constructing and the callee is not a
   310         // constructor, so that CallKnown does not have to handle this case
   311         // (it should always throw).
   312         if (constructing && !fun->isInterpretedConstructor() && !fun->isNativeConstructor()) {
   313             targets.clear();
   314             return true;
   315         }
   317         DebugOnly<bool> appendOk = targets.append(fun);
   318         JS_ASSERT(appendOk);
   319     }
   321     // For now, only inline "singleton" lambda calls
   322     if (*gotLambda && targets.length() > 1)
   323         targets.clear();
   325     return true;
   326 }
   328 IonBuilder::InliningDecision
   329 IonBuilder::DontInline(JSScript *targetScript, const char *reason)
   330 {
   331     if (targetScript) {
   332         IonSpew(IonSpew_Inlining, "Cannot inline %s:%u: %s",
   333                 targetScript->filename(), targetScript->lineno(), reason);
   334     } else {
   335         IonSpew(IonSpew_Inlining, "Cannot inline: %s", reason);
   336     }
   338     return InliningDecision_DontInline;
   339 }
   341 IonBuilder::InliningDecision
   342 IonBuilder::canInlineTarget(JSFunction *target, CallInfo &callInfo)
   343 {
   344     if (!optimizationInfo().inlineInterpreted())
   345         return InliningDecision_DontInline;
   347     if (!target->isInterpreted())
   348         return DontInline(nullptr, "Non-interpreted target");
   350     // Allow constructing lazy scripts when performing the definite properties
   351     // analysis, as baseline has not been used to warm the caller up yet.
   352     if (target->isInterpreted() && info().executionMode() == DefinitePropertiesAnalysis) {
   353         RootedScript script(analysisContext, target->getOrCreateScript(analysisContext));
   354         if (!script)
   355             return InliningDecision_Error;
   357         if (!script->hasBaselineScript() && script->canBaselineCompile()) {
   358             MethodStatus status = BaselineCompile(analysisContext, script);
   359             if (status == Method_Error)
   360                 return InliningDecision_Error;
   361             if (status != Method_Compiled)
   362                 return InliningDecision_DontInline;
   363         }
   364     }
   366     if (!target->hasScript())
   367         return DontInline(nullptr, "Lazy script");
   369     JSScript *inlineScript = target->nonLazyScript();
   370     if (callInfo.constructing() && !target->isInterpretedConstructor())
   371         return DontInline(inlineScript, "Callee is not a constructor");
   373     ExecutionMode executionMode = info().executionMode();
   374     if (!CanIonCompile(inlineScript, executionMode))
   375         return DontInline(inlineScript, "Disabled Ion compilation");
   377     // Don't inline functions which don't have baseline scripts.
   378     if (!inlineScript->hasBaselineScript())
   379         return DontInline(inlineScript, "No baseline jitcode");
   381     if (TooManyArguments(target->nargs()))
   382         return DontInline(inlineScript, "Too many args");
   384     if (TooManyArguments(callInfo.argc()))
   385         return DontInline(inlineScript, "Too many args");
   387     // Allow inlining of recursive calls, but only one level deep.
   388     IonBuilder *builder = callerBuilder_;
   389     while (builder) {
   390         if (builder->script() == inlineScript)
   391             return DontInline(inlineScript, "Recursive call");
   392         builder = builder->callerBuilder_;
   393     }
   395     if (target->isHeavyweight())
   396         return DontInline(inlineScript, "Heavyweight function");
   398     if (inlineScript->uninlineable())
   399         return DontInline(inlineScript, "Uninlineable script");
   401     if (inlineScript->needsArgsObj())
   402         return DontInline(inlineScript, "Script that needs an arguments object");
   404     if (!inlineScript->compileAndGo())
   405         return DontInline(inlineScript, "Non-compileAndGo script");
   407     types::TypeObjectKey *targetType = types::TypeObjectKey::get(target);
   408     if (targetType->unknownProperties())
   409         return DontInline(inlineScript, "Target type has unknown properties");
   411     return InliningDecision_Inline;
   412 }
   414 void
   415 IonBuilder::popCfgStack()
   416 {
   417     if (cfgStack_.back().isLoop())
   418         loops_.popBack();
   419     if (cfgStack_.back().state == CFGState::LABEL)
   420         labels_.popBack();
   421     cfgStack_.popBack();
   422 }
   424 bool
   425 IonBuilder::analyzeNewLoopTypes(MBasicBlock *entry, jsbytecode *start, jsbytecode *end)
   426 {
   427     // The phi inputs at the loop head only reflect types for variables that
   428     // were present at the start of the loop. If the variable changes to a new
   429     // type within the loop body, and that type is carried around to the loop
   430     // head, then we need to know about the new type up front.
   431     //
   432     // Since SSA information hasn't been constructed for the loop body yet, we
   433     // need a separate analysis to pick out the types that might flow around
   434     // the loop header. This is a best-effort analysis that may either over-
   435     // or under-approximate the set of such types.
   436     //
   437     // Over-approximating the types may lead to inefficient generated code, and
   438     // under-approximating the types will cause the loop body to be analyzed
   439     // multiple times as the correct types are deduced (see finishLoop).
   441     // If we restarted processing of an outer loop then get loop header types
   442     // directly from the last time we have previously processed this loop. This
   443     // both avoids repeated work from the bytecode traverse below, and will
   444     // also pick up types discovered while previously building the loop body.
   445     for (size_t i = 0; i < loopHeaders_.length(); i++) {
   446         if (loopHeaders_[i].pc == start) {
   447             MBasicBlock *oldEntry = loopHeaders_[i].header;
   448             for (MPhiIterator oldPhi = oldEntry->phisBegin();
   449                  oldPhi != oldEntry->phisEnd();
   450                  oldPhi++)
   451             {
   452                 MPhi *newPhi = entry->getSlot(oldPhi->slot())->toPhi();
   453                 if (!newPhi->addBackedgeType(oldPhi->type(), oldPhi->resultTypeSet()))
   454                     return false;
   455             }
   456             // Update the most recent header for this loop encountered, in case
   457             // new types flow to the phis and the loop is processed at least
   458             // three times.
   459             loopHeaders_[i].header = entry;
   460             return true;
   461         }
   462     }
   463     loopHeaders_.append(LoopHeader(start, entry));
   465     jsbytecode *last = nullptr, *earlier = nullptr;
   466     for (jsbytecode *pc = start; pc != end; earlier = last, last = pc, pc += GetBytecodeLength(pc)) {
   467         uint32_t slot;
   468         if (*pc == JSOP_SETLOCAL)
   469             slot = info().localSlot(GET_LOCALNO(pc));
   470         else if (*pc == JSOP_SETARG)
   471             slot = info().argSlotUnchecked(GET_ARGNO(pc));
   472         else
   473             continue;
   474         if (slot >= info().firstStackSlot())
   475             continue;
   476         if (!analysis().maybeInfo(pc))
   477             continue;
   479         MPhi *phi = entry->getSlot(slot)->toPhi();
   481         if (*last == JSOP_POS)
   482             last = earlier;
   484         if (js_CodeSpec[*last].format & JOF_TYPESET) {
   485             types::TemporaryTypeSet *typeSet = bytecodeTypes(last);
   486             if (!typeSet->empty()) {
   487                 MIRType type = typeSet->getKnownMIRType();
   488                 if (!phi->addBackedgeType(type, typeSet))
   489                     return false;
   490             }
   491         } else if (*last == JSOP_GETLOCAL || *last == JSOP_GETARG) {
   492             uint32_t slot = (*last == JSOP_GETLOCAL)
   493                             ? info().localSlot(GET_LOCALNO(last))
   494                             : info().argSlotUnchecked(GET_ARGNO(last));
   495             if (slot < info().firstStackSlot()) {
   496                 MPhi *otherPhi = entry->getSlot(slot)->toPhi();
   497                 if (otherPhi->hasBackedgeType()) {
   498                     if (!phi->addBackedgeType(otherPhi->type(), otherPhi->resultTypeSet()))
   499                         return false;
   500                 }
   501             }
   502         } else {
   503             MIRType type = MIRType_None;
   504             switch (*last) {
   505               case JSOP_VOID:
   506               case JSOP_UNDEFINED:
   507                 type = MIRType_Undefined;
   508                 break;
   509               case JSOP_NULL:
   510                 type = MIRType_Null;
   511                 break;
   512               case JSOP_ZERO:
   513               case JSOP_ONE:
   514               case JSOP_INT8:
   515               case JSOP_INT32:
   516               case JSOP_UINT16:
   517               case JSOP_UINT24:
   518               case JSOP_BITAND:
   519               case JSOP_BITOR:
   520               case JSOP_BITXOR:
   521               case JSOP_BITNOT:
   522               case JSOP_RSH:
   523               case JSOP_LSH:
   524               case JSOP_URSH:
   525                 type = MIRType_Int32;
   526                 break;
   527               case JSOP_FALSE:
   528               case JSOP_TRUE:
   529               case JSOP_EQ:
   530               case JSOP_NE:
   531               case JSOP_LT:
   532               case JSOP_LE:
   533               case JSOP_GT:
   534               case JSOP_GE:
   535               case JSOP_NOT:
   536               case JSOP_STRICTEQ:
   537               case JSOP_STRICTNE:
   538               case JSOP_IN:
   539               case JSOP_INSTANCEOF:
   540                 type = MIRType_Boolean;
   541                 break;
   542               case JSOP_DOUBLE:
   543                 type = MIRType_Double;
   544                 break;
   545               case JSOP_STRING:
   546               case JSOP_TYPEOF:
   547               case JSOP_TYPEOFEXPR:
   548               case JSOP_ITERNEXT:
   549                 type = MIRType_String;
   550                 break;
   551               case JSOP_ADD:
   552               case JSOP_SUB:
   553               case JSOP_MUL:
   554               case JSOP_DIV:
   555               case JSOP_MOD:
   556               case JSOP_NEG:
   557                 type = inspector->expectedResultType(last);
   558               default:
   559                 break;
   560             }
   561             if (type != MIRType_None) {
   562                 if (!phi->addBackedgeType(type, nullptr))
   563                     return false;
   564             }
   565         }
   566     }
   567     return true;
   568 }
   570 bool
   571 IonBuilder::pushLoop(CFGState::State initial, jsbytecode *stopAt, MBasicBlock *entry, bool osr,
   572                      jsbytecode *loopHead, jsbytecode *initialPc,
   573                      jsbytecode *bodyStart, jsbytecode *bodyEnd, jsbytecode *exitpc,
   574                      jsbytecode *continuepc)
   575 {
   576     if (!continuepc)
   577         continuepc = entry->pc();
   579     ControlFlowInfo loop(cfgStack_.length(), continuepc);
   580     if (!loops_.append(loop))
   581         return false;
   583     CFGState state;
   584     state.state = initial;
   585     state.stopAt = stopAt;
   586     state.loop.bodyStart = bodyStart;
   587     state.loop.bodyEnd = bodyEnd;
   588     state.loop.exitpc = exitpc;
   589     state.loop.continuepc = continuepc;
   590     state.loop.entry = entry;
   591     state.loop.osr = osr;
   592     state.loop.successor = nullptr;
   593     state.loop.breaks = nullptr;
   594     state.loop.continues = nullptr;
   595     state.loop.initialState = initial;
   596     state.loop.initialPc = initialPc;
   597     state.loop.initialStopAt = stopAt;
   598     state.loop.loopHead = loopHead;
   599     return cfgStack_.append(state);
   600 }
   602 bool
   603 IonBuilder::init()
   604 {
   605     if (!types::TypeScript::FreezeTypeSets(constraints(), script(),
   606                                            &thisTypes, &argTypes, &typeArray))
   607     {
   608         return false;
   609     }
   611     if (!analysis().init(alloc(), gsn))
   612         return false;
   614     // The baseline script normally has the bytecode type map, but compute
   615     // it ourselves if we do not have a baseline script.
   616     if (script()->hasBaselineScript()) {
   617         bytecodeTypeMap = script()->baselineScript()->bytecodeTypeMap();
   618     } else {
   619         bytecodeTypeMap = alloc_->lifoAlloc()->newArrayUninitialized<uint32_t>(script()->nTypeSets());
   620         if (!bytecodeTypeMap)
   621             return false;
   622         types::FillBytecodeTypeMap(script(), bytecodeTypeMap);
   623     }
   625     return true;
   626 }
   628 bool
   629 IonBuilder::build()
   630 {
   631     if (!init())
   632         return false;
   634     if (!setCurrentAndSpecializePhis(newBlock(pc)))
   635         return false;
   636     if (!current)
   637         return false;
   639 #ifdef DEBUG
   640     if (info().executionMode() == SequentialExecution && script()->hasIonScript()) {
   641         IonSpew(IonSpew_Scripts, "Recompiling script %s:%d (%p) (usecount=%d, level=%s)",
   642                 script()->filename(), script()->lineno(), (void *)script(),
   643                 (int)script()->getUseCount(), OptimizationLevelString(optimizationInfo().level()));
   644     } else {
   645         IonSpew(IonSpew_Scripts, "Analyzing script %s:%d (%p) (usecount=%d, level=%s)",
   646                 script()->filename(), script()->lineno(), (void *)script(),
   647                 (int)script()->getUseCount(), OptimizationLevelString(optimizationInfo().level()));
   648     }
   649 #endif
   651     initParameters();
   653     // Initialize local variables.
   654     for (uint32_t i = 0; i < info().nlocals(); i++) {
   655         MConstant *undef = MConstant::New(alloc(), UndefinedValue());
   656         current->add(undef);
   657         current->initSlot(info().localSlot(i), undef);
   658     }
   660     // Initialize something for the scope chain. We can bail out before the
   661     // start instruction, but the snapshot is encoded *at* the start
   662     // instruction, which means generating any code that could load into
   663     // registers is illegal.
   664     MInstruction *scope = MConstant::New(alloc(), UndefinedValue());
   665     current->add(scope);
   666     current->initSlot(info().scopeChainSlot(), scope);
   668     // Initialize the return value.
   669     MInstruction *returnValue = MConstant::New(alloc(), UndefinedValue());
   670     current->add(returnValue);
   671     current->initSlot(info().returnValueSlot(), returnValue);
   673     // Initialize the arguments object slot to undefined if necessary.
   674     if (info().hasArguments()) {
   675         MInstruction *argsObj = MConstant::New(alloc(), UndefinedValue());
   676         current->add(argsObj);
   677         current->initSlot(info().argsObjSlot(), argsObj);
   678     }
   680     // Emit the start instruction, so we can begin real instructions.
   681     current->makeStart(MStart::New(alloc(), MStart::StartType_Default));
   682     if (instrumentedProfiling())
   683         current->add(MProfilerStackOp::New(alloc(), script(), MProfilerStackOp::Enter));
   685     // Guard against over-recursion. Do this before we start unboxing, since
   686     // this will create an OSI point that will read the incoming argument
   687     // values, which is nice to do before their last real use, to minimize
   688     // register/stack pressure.
   689     MCheckOverRecursed *check = MCheckOverRecursed::New(alloc());
   690     current->add(check);
   691     check->setResumePoint(current->entryResumePoint());
   693     // Parameters have been checked to correspond to the typeset, now we unbox
   694     // what we can in an infallible manner.
   695     rewriteParameters();
   697     // It's safe to start emitting actual IR, so now build the scope chain.
   698     if (!initScopeChain())
   699         return false;
   701     if (info().needsArgsObj() && !initArgumentsObject())
   702         return false;
   704     // Prevent |this| from being DCE'd: necessary for constructors.
   705     if (info().funMaybeLazy())
   706         current->getSlot(info().thisSlot())->setGuard();
   708     // The type analysis phase attempts to insert unbox operations near
   709     // definitions of values. It also attempts to replace uses in resume points
   710     // with the narrower, unboxed variants. However, we must prevent this
   711     // replacement from happening on values in the entry snapshot. Otherwise we
   712     // could get this:
   713     //
   714     //       v0 = MParameter(0)
   715     //       v1 = MParameter(1)
   716     //       --   ResumePoint(v2, v3)
   717     //       v2 = Unbox(v0, INT32)
   718     //       v3 = Unbox(v1, INT32)
   719     //
   720     // So we attach the initial resume point to each parameter, which the type
   721     // analysis explicitly checks (this is the same mechanism used for
   722     // effectful operations).
   723     for (uint32_t i = 0; i < info().endArgSlot(); i++) {
   724         MInstruction *ins = current->getEntrySlot(i)->toInstruction();
   725         if (ins->type() == MIRType_Value)
   726             ins->setResumePoint(current->entryResumePoint());
   727     }
   729     // lazyArguments should never be accessed in |argsObjAliasesFormals| scripts.
   730     if (info().hasArguments() && !info().argsObjAliasesFormals()) {
   731         lazyArguments_ = MConstant::New(alloc(), MagicValue(JS_OPTIMIZED_ARGUMENTS));
   732         current->add(lazyArguments_);
   733     }
   735     insertRecompileCheck();
   737     if (!traverseBytecode())
   738         return false;
   740     if (!maybeAddOsrTypeBarriers())
   741         return false;
   743     if (!processIterators())
   744         return false;
   746     JS_ASSERT(loopDepth_ == 0);
   747     abortReason_ = AbortReason_NoAbort;
   748     return true;
   749 }
   751 bool
   752 IonBuilder::processIterators()
   753 {
   754     // Find phis that must directly hold an iterator live.
   755     Vector<MPhi *, 0, SystemAllocPolicy> worklist;
   756     for (size_t i = 0; i < iterators_.length(); i++) {
   757         MInstruction *ins = iterators_[i];
   758         for (MUseDefIterator iter(ins); iter; iter++) {
   759             if (iter.def()->isPhi()) {
   760                 if (!worklist.append(iter.def()->toPhi()))
   761                     return false;
   762             }
   763         }
   764     }
   766     // Propagate the iterator and live status of phis to all other connected
   767     // phis.
   768     while (!worklist.empty()) {
   769         MPhi *phi = worklist.popCopy();
   770         phi->setIterator();
   771         phi->setImplicitlyUsedUnchecked();
   773         for (MUseDefIterator iter(phi); iter; iter++) {
   774             if (iter.def()->isPhi()) {
   775                 MPhi *other = iter.def()->toPhi();
   776                 if (!other->isIterator() && !worklist.append(other))
   777                     return false;
   778             }
   779         }
   780     }
   782     return true;
   783 }
   785 bool
   786 IonBuilder::buildInline(IonBuilder *callerBuilder, MResumePoint *callerResumePoint,
   787                         CallInfo &callInfo)
   788 {
   789     if (!init())
   790         return false;
   792     inlineCallInfo_ = &callInfo;
   794     IonSpew(IonSpew_Scripts, "Inlining script %s:%d (%p)",
   795             script()->filename(), script()->lineno(), (void *)script());
   797     callerBuilder_ = callerBuilder;
   798     callerResumePoint_ = callerResumePoint;
   800     if (callerBuilder->failedBoundsCheck_)
   801         failedBoundsCheck_ = true;
   803     if (callerBuilder->failedShapeGuard_)
   804         failedShapeGuard_ = true;
   806     // Generate single entrance block.
   807     if (!setCurrentAndSpecializePhis(newBlock(pc)))
   808         return false;
   809     if (!current)
   810         return false;
   812     current->setCallerResumePoint(callerResumePoint);
   814     // Connect the entrance block to the last block in the caller's graph.
   815     MBasicBlock *predecessor = callerBuilder->current;
   816     JS_ASSERT(predecessor == callerResumePoint->block());
   818     // All further instructions generated in from this scope should be
   819     // considered as part of the function that we're inlining. We also need to
   820     // keep track of the inlining depth because all scripts inlined on the same
   821     // level contiguously have only one InlineExit node.
   822     if (instrumentedProfiling()) {
   823         predecessor->add(MProfilerStackOp::New(alloc(), script(),
   824                                                MProfilerStackOp::InlineEnter,
   825                                                inliningDepth_));
   826     }
   828     predecessor->end(MGoto::New(alloc(), current));
   829     if (!current->addPredecessorWithoutPhis(predecessor))
   830         return false;
   832     // Initialize scope chain slot to Undefined.  It's set later by |initScopeChain|.
   833     MInstruction *scope = MConstant::New(alloc(), UndefinedValue());
   834     current->add(scope);
   835     current->initSlot(info().scopeChainSlot(), scope);
   837     // Initialize |return value| slot.
   838     MInstruction *returnValue = MConstant::New(alloc(), UndefinedValue());
   839     current->add(returnValue);
   840     current->initSlot(info().returnValueSlot(), returnValue);
   842     // Initialize |arguments| slot.
   843     if (info().hasArguments()) {
   844         MInstruction *argsObj = MConstant::New(alloc(), UndefinedValue());
   845         current->add(argsObj);
   846         current->initSlot(info().argsObjSlot(), argsObj);
   847     }
   849     // Initialize |this| slot.
   850     current->initSlot(info().thisSlot(), callInfo.thisArg());
   852     IonSpew(IonSpew_Inlining, "Initializing %u arg slots", info().nargs());
   854     // NB: Ion does not inline functions which |needsArgsObj|.  So using argSlot()
   855     // instead of argSlotUnchecked() below is OK
   856     JS_ASSERT(!info().needsArgsObj());
   858     // Initialize actually set arguments.
   859     uint32_t existing_args = Min<uint32_t>(callInfo.argc(), info().nargs());
   860     for (size_t i = 0; i < existing_args; ++i) {
   861         MDefinition *arg = callInfo.getArg(i);
   862         current->initSlot(info().argSlot(i), arg);
   863     }
   865     // Pass Undefined for missing arguments
   866     for (size_t i = callInfo.argc(); i < info().nargs(); ++i) {
   867         MConstant *arg = MConstant::New(alloc(), UndefinedValue());
   868         current->add(arg);
   869         current->initSlot(info().argSlot(i), arg);
   870     }
   872     // Initialize the scope chain now that args are initialized.
   873     if (!initScopeChain(callInfo.fun()))
   874         return false;
   876     IonSpew(IonSpew_Inlining, "Initializing %u local slots", info().nlocals());
   878     // Initialize local variables.
   879     for (uint32_t i = 0; i < info().nlocals(); i++) {
   880         MConstant *undef = MConstant::New(alloc(), UndefinedValue());
   881         current->add(undef);
   882         current->initSlot(info().localSlot(i), undef);
   883     }
   885     IonSpew(IonSpew_Inlining, "Inline entry block MResumePoint %p, %u operands",
   886             (void *) current->entryResumePoint(), current->entryResumePoint()->numOperands());
   888     // +2 for the scope chain and |this|, maybe another +1 for arguments object slot.
   889     JS_ASSERT(current->entryResumePoint()->numOperands() == info().totalSlots());
   891     if (script_->argumentsHasVarBinding()) {
   892         lazyArguments_ = MConstant::New(alloc(), MagicValue(JS_OPTIMIZED_ARGUMENTS));
   893         current->add(lazyArguments_);
   894     }
   896     insertRecompileCheck();
   898     if (!traverseBytecode())
   899         return false;
   901     return true;
   902 }
   904 void
   905 IonBuilder::rewriteParameter(uint32_t slotIdx, MDefinition *param, int32_t argIndex)
   906 {
   907     JS_ASSERT(param->isParameter() || param->isGetArgumentsObjectArg());
   909     types::TemporaryTypeSet *types = param->resultTypeSet();
   910     MDefinition *actual = ensureDefiniteType(param, types->getKnownMIRType());
   911     if (actual == param)
   912         return;
   914     // Careful! We leave the original MParameter in the entry resume point. The
   915     // arguments still need to be checked unless proven otherwise at the call
   916     // site, and these checks can bailout. We can end up:
   917     //   v0 = Parameter(0)
   918     //   v1 = Unbox(v0, INT32)
   919     //   --   ResumePoint(v0)
   920     //
   921     // As usual, it would be invalid for v1 to be captured in the initial
   922     // resume point, rather than v0.
   923     current->rewriteSlot(slotIdx, actual);
   924 }
   926 // Apply Type Inference information to parameters early on, unboxing them if
   927 // they have a definitive type. The actual guards will be emitted by the code
   928 // generator, explicitly, as part of the function prologue.
   929 void
   930 IonBuilder::rewriteParameters()
   931 {
   932     JS_ASSERT(info().scopeChainSlot() == 0);
   934     if (!info().funMaybeLazy())
   935         return;
   937     for (uint32_t i = info().startArgSlot(); i < info().endArgSlot(); i++) {
   938         MDefinition *param = current->getSlot(i);
   939         rewriteParameter(i, param, param->toParameter()->index());
   940     }
   941 }
   943 void
   944 IonBuilder::initParameters()
   945 {
   946     if (!info().funMaybeLazy())
   947         return;
   949     // If we are doing OSR on a frame which initially executed in the
   950     // interpreter and didn't accumulate type information, try to use that OSR
   951     // frame to determine possible initial types for 'this' and parameters.
   953     if (thisTypes->empty() && baselineFrame_)
   954         thisTypes->addType(baselineFrame_->thisType, alloc_->lifoAlloc());
   956     MParameter *param = MParameter::New(alloc(), MParameter::THIS_SLOT, thisTypes);
   957     current->add(param);
   958     current->initSlot(info().thisSlot(), param);
   960     for (uint32_t i = 0; i < info().nargs(); i++) {
   961         types::TemporaryTypeSet *types = &argTypes[i];
   962         if (types->empty() && baselineFrame_ &&
   963             !script_->baselineScript()->modifiesArguments())
   964         {
   965             types->addType(baselineFrame_->argTypes[i], alloc_->lifoAlloc());
   966         }
   968         param = MParameter::New(alloc(), i, types);
   969         current->add(param);
   970         current->initSlot(info().argSlotUnchecked(i), param);
   971     }
   972 }
   974 bool
   975 IonBuilder::initScopeChain(MDefinition *callee)
   976 {
   977     MInstruction *scope = nullptr;
   979     // If the script doesn't use the scopechain, then it's already initialized
   980     // from earlier.  However, always make a scope chain when |needsArgsObj| is true
   981     // for the script, since arguments object construction requires the scope chain
   982     // to be passed in.
   983     if (!info().needsArgsObj() && !analysis().usesScopeChain())
   984         return true;
   986     // The scope chain is only tracked in scripts that have NAME opcodes which
   987     // will try to access the scope. For other scripts, the scope instructions
   988     // will be held live by resume points and code will still be generated for
   989     // them, so just use a constant undefined value.
   990     if (!script()->compileAndGo())
   991         return abort("non-CNG global scripts are not supported");
   993     if (JSFunction *fun = info().funMaybeLazy()) {
   994         if (!callee) {
   995             MCallee *calleeIns = MCallee::New(alloc());
   996             current->add(calleeIns);
   997             callee = calleeIns;
   998         }
   999         scope = MFunctionEnvironment::New(alloc(), callee);
  1000         current->add(scope);
  1002         // This reproduce what is done in CallObject::createForFunction. Skip
  1003         // this for analyses, as the script might not have a baseline script
  1004         // with template objects yet.
  1005         if (fun->isHeavyweight() && !info().executionModeIsAnalysis()) {
  1006             if (fun->isNamedLambda()) {
  1007                 scope = createDeclEnvObject(callee, scope);
  1008                 if (!scope)
  1009                     return false;
  1012             scope = createCallObject(callee, scope);
  1013             if (!scope)
  1014                 return false;
  1016     } else {
  1017         scope = constant(ObjectValue(script()->global()));
  1020     current->setScopeChain(scope);
  1021     return true;
  1024 bool
  1025 IonBuilder::initArgumentsObject()
  1027     IonSpew(IonSpew_MIR, "%s:%d - Emitting code to initialize arguments object! block=%p",
  1028                               script()->filename(), script()->lineno(), current);
  1029     JS_ASSERT(info().needsArgsObj());
  1030     MCreateArgumentsObject *argsObj = MCreateArgumentsObject::New(alloc(), current->scopeChain());
  1031     current->add(argsObj);
  1032     current->setArgumentsObject(argsObj);
  1033     return true;
  1036 bool
  1037 IonBuilder::addOsrValueTypeBarrier(uint32_t slot, MInstruction **def_,
  1038                                    MIRType type, types::TemporaryTypeSet *typeSet)
  1040     MInstruction *&def = *def_;
  1041     MBasicBlock *osrBlock = def->block();
  1043     // Clear bogus type information added in newOsrPreheader().
  1044     def->setResultType(MIRType_Value);
  1045     def->setResultTypeSet(nullptr);
  1047     if (typeSet && !typeSet->unknown()) {
  1048         MInstruction *barrier = MTypeBarrier::New(alloc(), def, typeSet);
  1049         osrBlock->insertBefore(osrBlock->lastIns(), barrier);
  1050         osrBlock->rewriteSlot(slot, barrier);
  1051         def = barrier;
  1052     } else if (type == MIRType_Null ||
  1053                type == MIRType_Undefined ||
  1054                type == MIRType_MagicOptimizedArguments)
  1056         // No unbox instruction will be added below, so check the type by
  1057         // adding a type barrier for a singleton type set.
  1058         types::Type ntype = types::Type::PrimitiveType(ValueTypeFromMIRType(type));
  1059         typeSet = alloc_->lifoAlloc()->new_<types::TemporaryTypeSet>(ntype);
  1060         if (!typeSet)
  1061             return false;
  1062         MInstruction *barrier = MTypeBarrier::New(alloc(), def, typeSet);
  1063         osrBlock->insertBefore(osrBlock->lastIns(), barrier);
  1064         osrBlock->rewriteSlot(slot, barrier);
  1065         def = barrier;
  1068     switch (type) {
  1069       case MIRType_Boolean:
  1070       case MIRType_Int32:
  1071       case MIRType_Double:
  1072       case MIRType_String:
  1073       case MIRType_Object:
  1074         if (type != def->type()) {
  1075             MUnbox *unbox = MUnbox::New(alloc(), def, type, MUnbox::Fallible);
  1076             osrBlock->insertBefore(osrBlock->lastIns(), unbox);
  1077             osrBlock->rewriteSlot(slot, unbox);
  1078             def = unbox;
  1080         break;
  1082       case MIRType_Null:
  1084         MConstant *c = MConstant::New(alloc(), NullValue());
  1085         osrBlock->insertBefore(osrBlock->lastIns(), c);
  1086         osrBlock->rewriteSlot(slot, c);
  1087         def = c;
  1088         break;
  1091       case MIRType_Undefined:
  1093         MConstant *c = MConstant::New(alloc(), UndefinedValue());
  1094         osrBlock->insertBefore(osrBlock->lastIns(), c);
  1095         osrBlock->rewriteSlot(slot, c);
  1096         def = c;
  1097         break;
  1100       case MIRType_MagicOptimizedArguments:
  1101         JS_ASSERT(lazyArguments_);
  1102         osrBlock->rewriteSlot(slot, lazyArguments_);
  1103         def = lazyArguments_;
  1104         break;
  1106       default:
  1107         break;
  1110     JS_ASSERT(def == osrBlock->getSlot(slot));
  1111     return true;
  1114 bool
  1115 IonBuilder::maybeAddOsrTypeBarriers()
  1117     if (!info().osrPc())
  1118         return true;
  1120     // The loop has successfully been processed, and the loop header phis
  1121     // have their final type. Add unboxes and type barriers in the OSR
  1122     // block to check that the values have the appropriate type, and update
  1123     // the types in the preheader.
  1125     MBasicBlock *osrBlock = graph().osrBlock();
  1126     if (!osrBlock) {
  1127         // Because IonBuilder does not compile catch blocks, it's possible to
  1128         // end up without an OSR block if the OSR pc is only reachable via a
  1129         // break-statement inside the catch block. For instance:
  1130         //
  1131         //   for (;;) {
  1132         //       try {
  1133         //           throw 3;
  1134         //       } catch(e) {
  1135         //           break;
  1136         //       }
  1137         //   }
  1138         //   while (..) { } // <= OSR here, only reachable via catch block.
  1139         //
  1140         // For now we just abort in this case.
  1141         JS_ASSERT(graph().hasTryBlock());
  1142         return abort("OSR block only reachable through catch block");
  1145     MBasicBlock *preheader = osrBlock->getSuccessor(0);
  1146     MBasicBlock *header = preheader->getSuccessor(0);
  1147     static const size_t OSR_PHI_POSITION = 1;
  1148     JS_ASSERT(preheader->getPredecessor(OSR_PHI_POSITION) == osrBlock);
  1150     MPhiIterator headerPhi = header->phisBegin();
  1151     while (headerPhi != header->phisEnd() && headerPhi->slot() < info().startArgSlot())
  1152         headerPhi++;
  1154     for (uint32_t i = info().startArgSlot(); i < osrBlock->stackDepth(); i++, headerPhi++) {
  1155         // Aliased slots are never accessed, since they need to go through
  1156         // the callobject. The typebarriers are added there and can be
  1157         // discarded here.
  1158         if (info().isSlotAliasedAtOsr(i))
  1159             continue;
  1161         MInstruction *def = osrBlock->getSlot(i)->toInstruction();
  1163         JS_ASSERT(headerPhi->slot() == i);
  1164         MPhi *preheaderPhi = preheader->getSlot(i)->toPhi();
  1166         MIRType type = headerPhi->type();
  1167         types::TemporaryTypeSet *typeSet = headerPhi->resultTypeSet();
  1169         if (!addOsrValueTypeBarrier(i, &def, type, typeSet))
  1170             return false;
  1172         preheaderPhi->replaceOperand(OSR_PHI_POSITION, def);
  1173         preheaderPhi->setResultType(type);
  1174         preheaderPhi->setResultTypeSet(typeSet);
  1177     return true;
  1180 // We try to build a control-flow graph in the order that it would be built as
  1181 // if traversing the AST. This leads to a nice ordering and lets us build SSA
  1182 // in one pass, since the bytecode is structured.
  1183 //
  1184 // We traverse the bytecode iteratively, maintaining a current basic block.
  1185 // Each basic block has a mapping of local slots to instructions, as well as a
  1186 // stack depth. As we encounter instructions we mutate this mapping in the
  1187 // current block.
  1188 //
  1189 // Things get interesting when we encounter a control structure. This can be
  1190 // either an IFEQ, downward GOTO, or a decompiler hint stashed away in source
  1191 // notes. Once we encounter such an opcode, we recover the structure of the
  1192 // control flow (its branches and bounds), and push it on a stack.
  1193 //
  1194 // As we continue traversing the bytecode, we look for points that would
  1195 // terminate the topmost control flow path pushed on the stack. These are:
  1196 //  (1) The bounds of the current structure (end of a loop or join/edge of a
  1197 //      branch).
  1198 //  (2) A "return", "break", or "continue" statement.
  1199 //
  1200 // For (1), we expect that there is a current block in the progress of being
  1201 // built, and we complete the necessary edges in the CFG. For (2), we expect
  1202 // that there is no active block.
  1203 //
  1204 // For normal diamond join points, we construct Phi nodes as we add
  1205 // predecessors. For loops, care must be taken to propagate Phi nodes back
  1206 // through uses in the loop body.
  1207 bool
  1208 IonBuilder::traverseBytecode()
  1210     for (;;) {
  1211         JS_ASSERT(pc < info().limitPC());
  1213         for (;;) {
  1214             if (!alloc().ensureBallast())
  1215                 return false;
  1217             // Check if we've hit an expected join point or edge in the bytecode.
  1218             // Leaving one control structure could place us at the edge of another,
  1219             // thus |while| instead of |if| so we don't skip any opcodes.
  1220             if (!cfgStack_.empty() && cfgStack_.back().stopAt == pc) {
  1221                 ControlStatus status = processCfgStack();
  1222                 if (status == ControlStatus_Error)
  1223                     return false;
  1224                 if (status == ControlStatus_Abort)
  1225                     return abort("Aborted while processing control flow");
  1226                 if (!current)
  1227                     return true;
  1228                 continue;
  1231             // Some opcodes need to be handled early because they affect control
  1232             // flow, terminating the current basic block and/or instructing the
  1233             // traversal algorithm to continue from a new pc.
  1234             //
  1235             //   (1) If the opcode does not affect control flow, then the opcode
  1236             //       is inspected and transformed to IR. This is the process_opcode
  1237             //       label.
  1238             //   (2) A loop could be detected via a forward GOTO. In this case,
  1239             //       we don't want to process the GOTO, but the following
  1240             //       instruction.
  1241             //   (3) A RETURN, STOP, BREAK, or CONTINUE may require processing the
  1242             //       CFG stack to terminate open branches.
  1243             //
  1244             // Similar to above, snooping control flow could land us at another
  1245             // control flow point, so we iterate until it's time to inspect a real
  1246             // opcode.
  1247             ControlStatus status;
  1248             if ((status = snoopControlFlow(JSOp(*pc))) == ControlStatus_None)
  1249                 break;
  1250             if (status == ControlStatus_Error)
  1251                 return false;
  1252             if (status == ControlStatus_Abort)
  1253                 return abort("Aborted while processing control flow");
  1254             if (!current)
  1255                 return true;
  1258 #ifdef DEBUG
  1259         // In debug builds, after compiling this op, check that all values
  1260         // popped by this opcode either:
  1261         //
  1262         //   (1) Have the ImplicitlyUsed flag set on them.
  1263         //   (2) Have more uses than before compiling this op (the value is
  1264         //       used as operand of a new MIR instruction).
  1265         //
  1266         // This is used to catch problems where IonBuilder pops a value without
  1267         // adding any SSA uses and doesn't call setImplicitlyUsedUnchecked on it.
  1268         Vector<MDefinition *, 4, IonAllocPolicy> popped(alloc());
  1269         Vector<size_t, 4, IonAllocPolicy> poppedUses(alloc());
  1270         unsigned nuses = GetUseCount(script_, script_->pcToOffset(pc));
  1272         for (unsigned i = 0; i < nuses; i++) {
  1273             MDefinition *def = current->peek(-int32_t(i + 1));
  1274             if (!popped.append(def) || !poppedUses.append(def->defUseCount()))
  1275                 return false;
  1277 #endif
  1279         // Nothing in inspectOpcode() is allowed to advance the pc.
  1280         JSOp op = JSOp(*pc);
  1281         if (!inspectOpcode(op))
  1282             return false;
  1284 #ifdef DEBUG
  1285         for (size_t i = 0; i < popped.length(); i++) {
  1286             switch (op) {
  1287               case JSOP_POP:
  1288               case JSOP_POPN:
  1289               case JSOP_DUPAT:
  1290               case JSOP_DUP:
  1291               case JSOP_DUP2:
  1292               case JSOP_PICK:
  1293               case JSOP_SWAP:
  1294               case JSOP_SETARG:
  1295               case JSOP_SETLOCAL:
  1296               case JSOP_SETRVAL:
  1297               case JSOP_VOID:
  1298                 // Don't require SSA uses for values popped by these ops.
  1299                 break;
  1301               case JSOP_POS:
  1302               case JSOP_TOID:
  1303                 // These ops may leave their input on the stack without setting
  1304                 // the ImplicitlyUsed flag. If this value will be popped immediately,
  1305                 // we may replace it with |undefined|, but the difference is
  1306                 // not observable.
  1307                 JS_ASSERT(i == 0);
  1308                 if (current->peek(-1) == popped[0])
  1309                     break;
  1310                 // FALL THROUGH
  1312               default:
  1313                 JS_ASSERT(popped[i]->isImplicitlyUsed() ||
  1315                           // MNewDerivedTypedObject instances are
  1316                           // often dead unless they escape from the
  1317                           // fn. See IonBuilder::loadTypedObjectData()
  1318                           // for more details.
  1319                           popped[i]->isNewDerivedTypedObject() ||
  1321                           popped[i]->defUseCount() > poppedUses[i]);
  1322                 break;
  1325 #endif
  1327         pc += js_CodeSpec[op].length;
  1328         current->updateTrackedPc(pc);
  1331     return true;
  1334 IonBuilder::ControlStatus
  1335 IonBuilder::snoopControlFlow(JSOp op)
  1337     switch (op) {
  1338       case JSOP_NOP:
  1339         return maybeLoop(op, info().getNote(gsn, pc));
  1341       case JSOP_POP:
  1342         return maybeLoop(op, info().getNote(gsn, pc));
  1344       case JSOP_RETURN:
  1345       case JSOP_RETRVAL:
  1346         return processReturn(op);
  1348       case JSOP_THROW:
  1349         return processThrow();
  1351       case JSOP_GOTO:
  1353         jssrcnote *sn = info().getNote(gsn, pc);
  1354         switch (sn ? SN_TYPE(sn) : SRC_NULL) {
  1355           case SRC_BREAK:
  1356           case SRC_BREAK2LABEL:
  1357             return processBreak(op, sn);
  1359           case SRC_CONTINUE:
  1360             return processContinue(op);
  1362           case SRC_SWITCHBREAK:
  1363             return processSwitchBreak(op);
  1365           case SRC_WHILE:
  1366           case SRC_FOR_IN:
  1367           case SRC_FOR_OF:
  1368             // while (cond) { }
  1369             return whileOrForInLoop(sn);
  1371           default:
  1372             // Hard assert for now - make an error later.
  1373             MOZ_ASSUME_UNREACHABLE("unknown goto case");
  1375         break;
  1378       case JSOP_TABLESWITCH:
  1379         return tableSwitch(op, info().getNote(gsn, pc));
  1381       case JSOP_IFNE:
  1382         // We should never reach an IFNE, it's a stopAt point, which will
  1383         // trigger closing the loop.
  1384         MOZ_ASSUME_UNREACHABLE("we should never reach an ifne!");
  1386       default:
  1387         break;
  1389     return ControlStatus_None;
  1392 bool
  1393 IonBuilder::inspectOpcode(JSOp op)
  1395     switch (op) {
  1396       case JSOP_NOP:
  1397       case JSOP_LINENO:
  1398       case JSOP_LOOPENTRY:
  1399         return true;
  1401       case JSOP_LABEL:
  1402         return jsop_label();
  1404       case JSOP_UNDEFINED:
  1405         return pushConstant(UndefinedValue());
  1407       case JSOP_IFEQ:
  1408         return jsop_ifeq(JSOP_IFEQ);
  1410       case JSOP_TRY:
  1411         return jsop_try();
  1413       case JSOP_CONDSWITCH:
  1414         return jsop_condswitch();
  1416       case JSOP_BITNOT:
  1417         return jsop_bitnot();
  1419       case JSOP_BITAND:
  1420       case JSOP_BITOR:
  1421       case JSOP_BITXOR:
  1422       case JSOP_LSH:
  1423       case JSOP_RSH:
  1424       case JSOP_URSH:
  1425         return jsop_bitop(op);
  1427       case JSOP_ADD:
  1428       case JSOP_SUB:
  1429       case JSOP_MUL:
  1430       case JSOP_DIV:
  1431       case JSOP_MOD:
  1432         return jsop_binary(op);
  1434       case JSOP_POS:
  1435         return jsop_pos();
  1437       case JSOP_NEG:
  1438         return jsop_neg();
  1440       case JSOP_AND:
  1441       case JSOP_OR:
  1442         return jsop_andor(op);
  1444       case JSOP_DEFVAR:
  1445       case JSOP_DEFCONST:
  1446         return jsop_defvar(GET_UINT32_INDEX(pc));
  1448       case JSOP_DEFFUN:
  1449         return jsop_deffun(GET_UINT32_INDEX(pc));
  1451       case JSOP_EQ:
  1452       case JSOP_NE:
  1453       case JSOP_STRICTEQ:
  1454       case JSOP_STRICTNE:
  1455       case JSOP_LT:
  1456       case JSOP_LE:
  1457       case JSOP_GT:
  1458       case JSOP_GE:
  1459         return jsop_compare(op);
  1461       case JSOP_DOUBLE:
  1462         return pushConstant(info().getConst(pc));
  1464       case JSOP_STRING:
  1465         return pushConstant(StringValue(info().getAtom(pc)));
  1467       case JSOP_ZERO:
  1468         return pushConstant(Int32Value(0));
  1470       case JSOP_ONE:
  1471         return pushConstant(Int32Value(1));
  1473       case JSOP_NULL:
  1474         return pushConstant(NullValue());
  1476       case JSOP_VOID:
  1477         current->pop();
  1478         return pushConstant(UndefinedValue());
  1480       case JSOP_HOLE:
  1481         return pushConstant(MagicValue(JS_ELEMENTS_HOLE));
  1483       case JSOP_FALSE:
  1484         return pushConstant(BooleanValue(false));
  1486       case JSOP_TRUE:
  1487         return pushConstant(BooleanValue(true));
  1489       case JSOP_ARGUMENTS:
  1490         return jsop_arguments();
  1492       case JSOP_RUNONCE:
  1493         return jsop_runonce();
  1495       case JSOP_REST:
  1496         return jsop_rest();
  1498       case JSOP_GETARG:
  1499         if (info().argsObjAliasesFormals()) {
  1500             MGetArgumentsObjectArg *getArg = MGetArgumentsObjectArg::New(alloc(),
  1501                                                                          current->argumentsObject(),
  1502                                                                          GET_ARGNO(pc));
  1503             current->add(getArg);
  1504             current->push(getArg);
  1505         } else {
  1506             current->pushArg(GET_ARGNO(pc));
  1508         return true;
  1510       case JSOP_SETARG:
  1511         return jsop_setarg(GET_ARGNO(pc));
  1513       case JSOP_GETLOCAL:
  1514         current->pushLocal(GET_LOCALNO(pc));
  1515         return true;
  1517       case JSOP_SETLOCAL:
  1518         current->setLocal(GET_LOCALNO(pc));
  1519         return true;
  1521       case JSOP_POP:
  1522         current->pop();
  1524         // POP opcodes frequently appear where values are killed, e.g. after
  1525         // SET* opcodes. Place a resume point afterwards to avoid capturing
  1526         // the dead value in later snapshots, except in places where that
  1527         // resume point is obviously unnecessary.
  1528         if (pc[JSOP_POP_LENGTH] == JSOP_POP)
  1529             return true;
  1530         return maybeInsertResume();
  1532       case JSOP_POPN:
  1533         for (uint32_t i = 0, n = GET_UINT16(pc); i < n; i++)
  1534             current->pop();
  1535         return true;
  1537       case JSOP_DUPAT:
  1538         current->pushSlot(current->stackDepth() - 1 - GET_UINT24(pc));
  1539         return true;
  1541       case JSOP_NEWINIT:
  1542         if (GET_UINT8(pc) == JSProto_Array)
  1543             return jsop_newarray(0);
  1544         return jsop_newobject();
  1546       case JSOP_NEWARRAY:
  1547         return jsop_newarray(GET_UINT24(pc));
  1549       case JSOP_NEWOBJECT:
  1550         return jsop_newobject();
  1552       case JSOP_INITELEM:
  1553         return jsop_initelem();
  1555       case JSOP_INITELEM_ARRAY:
  1556         return jsop_initelem_array();
  1558       case JSOP_INITPROP:
  1560         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1561         return jsop_initprop(name);
  1564       case JSOP_MUTATEPROTO:
  1566         return jsop_mutateproto();
  1569       case JSOP_INITPROP_GETTER:
  1570       case JSOP_INITPROP_SETTER: {
  1571         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1572         return jsop_initprop_getter_setter(name);
  1575       case JSOP_INITELEM_GETTER:
  1576       case JSOP_INITELEM_SETTER:
  1577         return jsop_initelem_getter_setter();
  1579       case JSOP_ENDINIT:
  1580         return true;
  1582       case JSOP_FUNCALL:
  1583         return jsop_funcall(GET_ARGC(pc));
  1585       case JSOP_FUNAPPLY:
  1586         return jsop_funapply(GET_ARGC(pc));
  1588       case JSOP_CALL:
  1589       case JSOP_NEW:
  1590         return jsop_call(GET_ARGC(pc), (JSOp)*pc == JSOP_NEW);
  1592       case JSOP_EVAL:
  1593         return jsop_eval(GET_ARGC(pc));
  1595       case JSOP_INT8:
  1596         return pushConstant(Int32Value(GET_INT8(pc)));
  1598       case JSOP_UINT16:
  1599         return pushConstant(Int32Value(GET_UINT16(pc)));
  1601       case JSOP_GETGNAME:
  1603         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1604         return jsop_getgname(name);
  1607       case JSOP_BINDGNAME:
  1608         return pushConstant(ObjectValue(script()->global()));
  1610       case JSOP_SETGNAME:
  1612         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1613         JSObject *obj = &script()->global();
  1614         return setStaticName(obj, name);
  1617       case JSOP_NAME:
  1619         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1620         return jsop_getname(name);
  1623       case JSOP_GETINTRINSIC:
  1625         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1626         return jsop_intrinsic(name);
  1629       case JSOP_BINDNAME:
  1630         return jsop_bindname(info().getName(pc));
  1632       case JSOP_DUP:
  1633         current->pushSlot(current->stackDepth() - 1);
  1634         return true;
  1636       case JSOP_DUP2:
  1637         return jsop_dup2();
  1639       case JSOP_SWAP:
  1640         current->swapAt(-1);
  1641         return true;
  1643       case JSOP_PICK:
  1644         current->pick(-GET_INT8(pc));
  1645         return true;
  1647       case JSOP_GETALIASEDVAR:
  1648         return jsop_getaliasedvar(ScopeCoordinate(pc));
  1650       case JSOP_SETALIASEDVAR:
  1651         return jsop_setaliasedvar(ScopeCoordinate(pc));
  1653       case JSOP_UINT24:
  1654         return pushConstant(Int32Value(GET_UINT24(pc)));
  1656       case JSOP_INT32:
  1657         return pushConstant(Int32Value(GET_INT32(pc)));
  1659       case JSOP_LOOPHEAD:
  1660         // JSOP_LOOPHEAD is handled when processing the loop header.
  1661         MOZ_ASSUME_UNREACHABLE("JSOP_LOOPHEAD outside loop");
  1663       case JSOP_GETELEM:
  1664       case JSOP_CALLELEM:
  1665         return jsop_getelem();
  1667       case JSOP_SETELEM:
  1668         return jsop_setelem();
  1670       case JSOP_LENGTH:
  1671         return jsop_length();
  1673       case JSOP_NOT:
  1674         return jsop_not();
  1676       case JSOP_THIS:
  1677         return jsop_this();
  1679       case JSOP_CALLEE: {
  1680          MDefinition *callee = getCallee();
  1681          current->push(callee);
  1682          return true;
  1685       case JSOP_GETPROP:
  1686       case JSOP_CALLPROP:
  1688         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1689         return jsop_getprop(name);
  1692       case JSOP_SETPROP:
  1693       case JSOP_SETNAME:
  1695         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1696         return jsop_setprop(name);
  1699       case JSOP_DELPROP:
  1701         PropertyName *name = info().getAtom(pc)->asPropertyName();
  1702         return jsop_delprop(name);
  1705       case JSOP_DELELEM:
  1706         return jsop_delelem();
  1708       case JSOP_REGEXP:
  1709         return jsop_regexp(info().getRegExp(pc));
  1711       case JSOP_OBJECT:
  1712         return jsop_object(info().getObject(pc));
  1714       case JSOP_TYPEOF:
  1715       case JSOP_TYPEOFEXPR:
  1716         return jsop_typeof();
  1718       case JSOP_TOID:
  1719         return jsop_toid();
  1721       case JSOP_LAMBDA:
  1722         return jsop_lambda(info().getFunction(pc));
  1724       case JSOP_LAMBDA_ARROW:
  1725         return jsop_lambda_arrow(info().getFunction(pc));
  1727       case JSOP_ITER:
  1728         return jsop_iter(GET_INT8(pc));
  1730       case JSOP_ITERNEXT:
  1731         return jsop_iternext();
  1733       case JSOP_MOREITER:
  1734         return jsop_itermore();
  1736       case JSOP_ENDITER:
  1737         return jsop_iterend();
  1739       case JSOP_IN:
  1740         return jsop_in();
  1742       case JSOP_SETRVAL:
  1743         JS_ASSERT(!script()->noScriptRval());
  1744         current->setSlot(info().returnValueSlot(), current->pop());
  1745         return true;
  1747       case JSOP_INSTANCEOF:
  1748         return jsop_instanceof();
  1750       case JSOP_DEBUGLEAVEBLOCK:
  1751         return true;
  1753       default:
  1754 #ifdef DEBUG
  1755         return abort("Unsupported opcode: %s (line %d)", js_CodeName[op], info().lineno(pc));
  1756 #else
  1757         return abort("Unsupported opcode: %d (line %d)", op, info().lineno(pc));
  1758 #endif
  1762 // Given that the current control flow structure has ended forcefully,
  1763 // via a return, break, or continue (rather than joining), propagate the
  1764 // termination up. For example, a return nested 5 loops deep may terminate
  1765 // every outer loop at once, if there are no intervening conditionals:
  1766 //
  1767 // for (...) {
  1768 //   for (...) {
  1769 //     return x;
  1770 //   }
  1771 // }
  1772 //
  1773 // If |current| is nullptr when this function returns, then there is no more
  1774 // control flow to be processed.
  1775 IonBuilder::ControlStatus
  1776 IonBuilder::processControlEnd()
  1778     JS_ASSERT(!current);
  1780     if (cfgStack_.empty()) {
  1781         // If there is no more control flow to process, then this is the
  1782         // last return in the function.
  1783         return ControlStatus_Ended;
  1786     return processCfgStack();
  1789 // Processes the top of the CFG stack. This is used from two places:
  1790 // (1) processControlEnd(), whereby a break, continue, or return may interrupt
  1791 //     an in-progress CFG structure before reaching its actual termination
  1792 //     point in the bytecode.
  1793 // (2) traverseBytecode(), whereby we reach the last instruction in a CFG
  1794 //     structure.
  1795 IonBuilder::ControlStatus
  1796 IonBuilder::processCfgStack()
  1798     ControlStatus status = processCfgEntry(cfgStack_.back());
  1800     // If this terminated a CFG structure, act like processControlEnd() and
  1801     // keep propagating upward.
  1802     while (status == ControlStatus_Ended) {
  1803         popCfgStack();
  1804         if (cfgStack_.empty())
  1805             return status;
  1806         status = processCfgEntry(cfgStack_.back());
  1809     // If some join took place, the current structure is finished.
  1810     if (status == ControlStatus_Joined)
  1811         popCfgStack();
  1813     return status;
  1816 IonBuilder::ControlStatus
  1817 IonBuilder::processCfgEntry(CFGState &state)
  1819     switch (state.state) {
  1820       case CFGState::IF_TRUE:
  1821       case CFGState::IF_TRUE_EMPTY_ELSE:
  1822         return processIfEnd(state);
  1824       case CFGState::IF_ELSE_TRUE:
  1825         return processIfElseTrueEnd(state);
  1827       case CFGState::IF_ELSE_FALSE:
  1828         return processIfElseFalseEnd(state);
  1830       case CFGState::DO_WHILE_LOOP_BODY:
  1831         return processDoWhileBodyEnd(state);
  1833       case CFGState::DO_WHILE_LOOP_COND:
  1834         return processDoWhileCondEnd(state);
  1836       case CFGState::WHILE_LOOP_COND:
  1837         return processWhileCondEnd(state);
  1839       case CFGState::WHILE_LOOP_BODY:
  1840         return processWhileBodyEnd(state);
  1842       case CFGState::FOR_LOOP_COND:
  1843         return processForCondEnd(state);
  1845       case CFGState::FOR_LOOP_BODY:
  1846         return processForBodyEnd(state);
  1848       case CFGState::FOR_LOOP_UPDATE:
  1849         return processForUpdateEnd(state);
  1851       case CFGState::TABLE_SWITCH:
  1852         return processNextTableSwitchCase(state);
  1854       case CFGState::COND_SWITCH_CASE:
  1855         return processCondSwitchCase(state);
  1857       case CFGState::COND_SWITCH_BODY:
  1858         return processCondSwitchBody(state);
  1860       case CFGState::AND_OR:
  1861         return processAndOrEnd(state);
  1863       case CFGState::LABEL:
  1864         return processLabelEnd(state);
  1866       case CFGState::TRY:
  1867         return processTryEnd(state);
  1869       default:
  1870         MOZ_ASSUME_UNREACHABLE("unknown cfgstate");
  1874 IonBuilder::ControlStatus
  1875 IonBuilder::processIfEnd(CFGState &state)
  1877     if (current) {
  1878         // Here, the false block is the join point. Create an edge from the
  1879         // current block to the false block. Note that a RETURN opcode
  1880         // could have already ended the block.
  1881         current->end(MGoto::New(alloc(), state.branch.ifFalse));
  1883         if (!state.branch.ifFalse->addPredecessor(alloc(), current))
  1884             return ControlStatus_Error;
  1887     if (!setCurrentAndSpecializePhis(state.branch.ifFalse))
  1888         return ControlStatus_Error;
  1889     graph().moveBlockToEnd(current);
  1890     pc = current->pc();
  1891     return ControlStatus_Joined;
  1894 IonBuilder::ControlStatus
  1895 IonBuilder::processIfElseTrueEnd(CFGState &state)
  1897     // We've reached the end of the true branch of an if-else. Don't
  1898     // create an edge yet, just transition to parsing the false branch.
  1899     state.state = CFGState::IF_ELSE_FALSE;
  1900     state.branch.ifTrue = current;
  1901     state.stopAt = state.branch.falseEnd;
  1902     pc = state.branch.ifFalse->pc();
  1903     if (!setCurrentAndSpecializePhis(state.branch.ifFalse))
  1904         return ControlStatus_Error;
  1905     graph().moveBlockToEnd(current);
  1907     if (state.branch.test)
  1908         filterTypesAtTest(state.branch.test);
  1910     return ControlStatus_Jumped;
  1913 IonBuilder::ControlStatus
  1914 IonBuilder::processIfElseFalseEnd(CFGState &state)
  1916     // Update the state to have the latest block from the false path.
  1917     state.branch.ifFalse = current;
  1919     // To create the join node, we need an incoming edge that has not been
  1920     // terminated yet.
  1921     MBasicBlock *pred = state.branch.ifTrue
  1922                         ? state.branch.ifTrue
  1923                         : state.branch.ifFalse;
  1924     MBasicBlock *other = (pred == state.branch.ifTrue) ? state.branch.ifFalse : state.branch.ifTrue;
  1926     if (!pred)
  1927         return ControlStatus_Ended;
  1929     // Create a new block to represent the join.
  1930     MBasicBlock *join = newBlock(pred, state.branch.falseEnd);
  1931     if (!join)
  1932         return ControlStatus_Error;
  1934     // Create edges from the true and false blocks as needed.
  1935     pred->end(MGoto::New(alloc(), join));
  1937     if (other) {
  1938         other->end(MGoto::New(alloc(), join));
  1939         if (!join->addPredecessor(alloc(), other))
  1940             return ControlStatus_Error;
  1943     // Ignore unreachable remainder of false block if existent.
  1944     if (!setCurrentAndSpecializePhis(join))
  1945         return ControlStatus_Error;
  1946     pc = current->pc();
  1947     return ControlStatus_Joined;
  1950 IonBuilder::ControlStatus
  1951 IonBuilder::processBrokenLoop(CFGState &state)
  1953     JS_ASSERT(!current);
  1955     JS_ASSERT(loopDepth_);
  1956     loopDepth_--;
  1958     // A broken loop is not a real loop (it has no header or backedge), so
  1959     // reset the loop depth.
  1960     for (MBasicBlockIterator i(graph().begin(state.loop.entry)); i != graph().end(); i++) {
  1961         if (i->loopDepth() > loopDepth_)
  1962             i->setLoopDepth(i->loopDepth() - 1);
  1965     // If the loop started with a condition (while/for) then even if the
  1966     // structure never actually loops, the condition itself can still fail and
  1967     // thus we must resume at the successor, if one exists.
  1968     if (!setCurrentAndSpecializePhis(state.loop.successor))
  1969         return ControlStatus_Error;
  1970     if (current) {
  1971         JS_ASSERT(current->loopDepth() == loopDepth_);
  1972         graph().moveBlockToEnd(current);
  1975     // Join the breaks together and continue parsing.
  1976     if (state.loop.breaks) {
  1977         MBasicBlock *block = createBreakCatchBlock(state.loop.breaks, state.loop.exitpc);
  1978         if (!block)
  1979             return ControlStatus_Error;
  1981         if (current) {
  1982             current->end(MGoto::New(alloc(), block));
  1983             if (!block->addPredecessor(alloc(), current))
  1984                 return ControlStatus_Error;
  1987         if (!setCurrentAndSpecializePhis(block))
  1988             return ControlStatus_Error;
  1991     // If the loop is not gated on a condition, and has only returns, we'll
  1992     // reach this case. For example:
  1993     // do { ... return; } while ();
  1994     if (!current)
  1995         return ControlStatus_Ended;
  1997     // Otherwise, the loop is gated on a condition and/or has breaks so keep
  1998     // parsing at the successor.
  1999     pc = current->pc();
  2000     return ControlStatus_Joined;
  2003 IonBuilder::ControlStatus
  2004 IonBuilder::finishLoop(CFGState &state, MBasicBlock *successor)
  2006     JS_ASSERT(current);
  2008     JS_ASSERT(loopDepth_);
  2009     loopDepth_--;
  2010     JS_ASSERT_IF(successor, successor->loopDepth() == loopDepth_);
  2012     // Compute phis in the loop header and propagate them throughout the loop,
  2013     // including the successor.
  2014     AbortReason r = state.loop.entry->setBackedge(current);
  2015     if (r == AbortReason_Alloc)
  2016         return ControlStatus_Error;
  2017     if (r == AbortReason_Disable) {
  2018         // If there are types for variables on the backedge that were not
  2019         // present at the original loop header, then uses of the variables'
  2020         // phis may have generated incorrect nodes. The new types have been
  2021         // incorporated into the header phis, so remove all blocks for the
  2022         // loop body and restart with the new types.
  2023         return restartLoop(state);
  2026     if (successor) {
  2027         graph().moveBlockToEnd(successor);
  2028         successor->inheritPhis(state.loop.entry);
  2031     if (state.loop.breaks) {
  2032         // Propagate phis placed in the header to individual break exit points.
  2033         DeferredEdge *edge = state.loop.breaks;
  2034         while (edge) {
  2035             edge->block->inheritPhis(state.loop.entry);
  2036             edge = edge->next;
  2039         // Create a catch block to join all break exits.
  2040         MBasicBlock *block = createBreakCatchBlock(state.loop.breaks, state.loop.exitpc);
  2041         if (!block)
  2042             return ControlStatus_Error;
  2044         if (successor) {
  2045             // Finally, create an unconditional edge from the successor to the
  2046             // catch block.
  2047             successor->end(MGoto::New(alloc(), block));
  2048             if (!block->addPredecessor(alloc(), successor))
  2049                 return ControlStatus_Error;
  2051         successor = block;
  2054     if (!setCurrentAndSpecializePhis(successor))
  2055         return ControlStatus_Error;
  2057     // An infinite loop (for (;;) { }) will not have a successor.
  2058     if (!current)
  2059         return ControlStatus_Ended;
  2061     pc = current->pc();
  2062     return ControlStatus_Joined;
  2065 IonBuilder::ControlStatus
  2066 IonBuilder::restartLoop(CFGState state)
  2068     spew("New types at loop header, restarting loop body");
  2070     if (js_JitOptions.limitScriptSize) {
  2071         if (++numLoopRestarts_ >= MAX_LOOP_RESTARTS)
  2072             return ControlStatus_Abort;
  2075     MBasicBlock *header = state.loop.entry;
  2077     // Remove all blocks in the loop body other than the header, which has phis
  2078     // of the appropriate type and incoming edges to preserve.
  2079     graph().removeBlocksAfter(header);
  2081     // Remove all instructions from the header itself, and all resume points
  2082     // except the entry resume point.
  2083     header->discardAllInstructions();
  2084     header->discardAllResumePoints(/* discardEntry = */ false);
  2085     header->setStackDepth(header->getPredecessor(0)->stackDepth());
  2087     popCfgStack();
  2089     loopDepth_++;
  2091     if (!pushLoop(state.loop.initialState, state.loop.initialStopAt, header, state.loop.osr,
  2092                   state.loop.loopHead, state.loop.initialPc,
  2093                   state.loop.bodyStart, state.loop.bodyEnd,
  2094                   state.loop.exitpc, state.loop.continuepc))
  2096         return ControlStatus_Error;
  2099     CFGState &nstate = cfgStack_.back();
  2101     nstate.loop.condpc = state.loop.condpc;
  2102     nstate.loop.updatepc = state.loop.updatepc;
  2103     nstate.loop.updateEnd = state.loop.updateEnd;
  2105     // Don't specializePhis(), as the header has been visited before and the
  2106     // phis have already had their type set.
  2107     setCurrent(header);
  2109     if (!jsop_loophead(nstate.loop.loopHead))
  2110         return ControlStatus_Error;
  2112     pc = nstate.loop.initialPc;
  2113     return ControlStatus_Jumped;
  2116 IonBuilder::ControlStatus
  2117 IonBuilder::processDoWhileBodyEnd(CFGState &state)
  2119     if (!processDeferredContinues(state))
  2120         return ControlStatus_Error;
  2122     // No current means control flow cannot reach the condition, so this will
  2123     // never loop.
  2124     if (!current)
  2125         return processBrokenLoop(state);
  2127     MBasicBlock *header = newBlock(current, state.loop.updatepc);
  2128     if (!header)
  2129         return ControlStatus_Error;
  2130     current->end(MGoto::New(alloc(), header));
  2132     state.state = CFGState::DO_WHILE_LOOP_COND;
  2133     state.stopAt = state.loop.updateEnd;
  2134     pc = state.loop.updatepc;
  2135     if (!setCurrentAndSpecializePhis(header))
  2136         return ControlStatus_Error;
  2137     return ControlStatus_Jumped;
  2140 IonBuilder::ControlStatus
  2141 IonBuilder::processDoWhileCondEnd(CFGState &state)
  2143     JS_ASSERT(JSOp(*pc) == JSOP_IFNE);
  2145     // We're guaranteed a |current|, it's impossible to break or return from
  2146     // inside the conditional expression.
  2147     JS_ASSERT(current);
  2149     // Pop the last value, and create the successor block.
  2150     MDefinition *vins = current->pop();
  2151     MBasicBlock *successor = newBlock(current, GetNextPc(pc), loopDepth_ - 1);
  2152     if (!successor)
  2153         return ControlStatus_Error;
  2155     // Test for do {} while(false) and don't create a loop in that case.
  2156     if (vins->isConstant()) {
  2157         MConstant *cte = vins->toConstant();
  2158         if (cte->value().isBoolean() && !cte->value().toBoolean()) {
  2159             current->end(MGoto::New(alloc(), successor));
  2160             current = nullptr;
  2162             state.loop.successor = successor;
  2163             return processBrokenLoop(state);
  2167     // Create the test instruction and end the current block.
  2168     MTest *test = MTest::New(alloc(), vins, state.loop.entry, successor);
  2169     current->end(test);
  2170     return finishLoop(state, successor);
  2173 IonBuilder::ControlStatus
  2174 IonBuilder::processWhileCondEnd(CFGState &state)
  2176     JS_ASSERT(JSOp(*pc) == JSOP_IFNE || JSOp(*pc) == JSOP_IFEQ);
  2178     // Balance the stack past the IFNE.
  2179     MDefinition *ins = current->pop();
  2181     // Create the body and successor blocks.
  2182     MBasicBlock *body = newBlock(current, state.loop.bodyStart);
  2183     state.loop.successor = newBlock(current, state.loop.exitpc, loopDepth_ - 1);
  2184     if (!body || !state.loop.successor)
  2185         return ControlStatus_Error;
  2187     MTest *test;
  2188     if (JSOp(*pc) == JSOP_IFNE)
  2189         test = MTest::New(alloc(), ins, body, state.loop.successor);
  2190     else
  2191         test = MTest::New(alloc(), ins, state.loop.successor, body);
  2192     current->end(test);
  2194     state.state = CFGState::WHILE_LOOP_BODY;
  2195     state.stopAt = state.loop.bodyEnd;
  2196     pc = state.loop.bodyStart;
  2197     if (!setCurrentAndSpecializePhis(body))
  2198         return ControlStatus_Error;
  2199     return ControlStatus_Jumped;
  2202 IonBuilder::ControlStatus
  2203 IonBuilder::processWhileBodyEnd(CFGState &state)
  2205     if (!processDeferredContinues(state))
  2206         return ControlStatus_Error;
  2208     if (!current)
  2209         return processBrokenLoop(state);
  2211     current->end(MGoto::New(alloc(), state.loop.entry));
  2212     return finishLoop(state, state.loop.successor);
  2215 IonBuilder::ControlStatus
  2216 IonBuilder::processForCondEnd(CFGState &state)
  2218     JS_ASSERT(JSOp(*pc) == JSOP_IFNE);
  2220     // Balance the stack past the IFNE.
  2221     MDefinition *ins = current->pop();
  2223     // Create the body and successor blocks.
  2224     MBasicBlock *body = newBlock(current, state.loop.bodyStart);
  2225     state.loop.successor = newBlock(current, state.loop.exitpc, loopDepth_ - 1);
  2226     if (!body || !state.loop.successor)
  2227         return ControlStatus_Error;
  2229     MTest *test = MTest::New(alloc(), ins, body, state.loop.successor);
  2230     current->end(test);
  2232     state.state = CFGState::FOR_LOOP_BODY;
  2233     state.stopAt = state.loop.bodyEnd;
  2234     pc = state.loop.bodyStart;
  2235     if (!setCurrentAndSpecializePhis(body))
  2236         return ControlStatus_Error;
  2237     return ControlStatus_Jumped;
  2240 IonBuilder::ControlStatus
  2241 IonBuilder::processForBodyEnd(CFGState &state)
  2243     if (!processDeferredContinues(state))
  2244         return ControlStatus_Error;
  2246     // If there is no updatepc, just go right to processing what would be the
  2247     // end of the update clause. Otherwise, |current| might be nullptr; if this is
  2248     // the case, the udpate is unreachable anyway.
  2249     if (!state.loop.updatepc || !current)
  2250         return processForUpdateEnd(state);
  2252     pc = state.loop.updatepc;
  2254     state.state = CFGState::FOR_LOOP_UPDATE;
  2255     state.stopAt = state.loop.updateEnd;
  2256     return ControlStatus_Jumped;
  2259 IonBuilder::ControlStatus
  2260 IonBuilder::processForUpdateEnd(CFGState &state)
  2262     // If there is no current, we couldn't reach the loop edge and there was no
  2263     // update clause.
  2264     if (!current)
  2265         return processBrokenLoop(state);
  2267     current->end(MGoto::New(alloc(), state.loop.entry));
  2268     return finishLoop(state, state.loop.successor);
  2271 IonBuilder::DeferredEdge *
  2272 IonBuilder::filterDeadDeferredEdges(DeferredEdge *edge)
  2274     DeferredEdge *head = edge, *prev = nullptr;
  2276     while (edge) {
  2277         if (edge->block->isDead()) {
  2278             if (prev)
  2279                 prev->next = edge->next;
  2280             else
  2281                 head = edge->next;
  2282         } else {
  2283             prev = edge;
  2285         edge = edge->next;
  2288     // There must be at least one deferred edge from a block that was not
  2289     // deleted; blocks are deleted when restarting processing of a loop, and
  2290     // the final version of the loop body will have edges from live blocks.
  2291     JS_ASSERT(head);
  2293     return head;
  2296 bool
  2297 IonBuilder::processDeferredContinues(CFGState &state)
  2299     // If there are any continues for this loop, and there is an update block,
  2300     // then we need to create a new basic block to house the update.
  2301     if (state.loop.continues) {
  2302         DeferredEdge *edge = filterDeadDeferredEdges(state.loop.continues);
  2304         MBasicBlock *update = newBlock(edge->block, loops_.back().continuepc);
  2305         if (!update)
  2306             return false;
  2308         if (current) {
  2309             current->end(MGoto::New(alloc(), update));
  2310             if (!update->addPredecessor(alloc(), current))
  2311                 return false;
  2314         // No need to use addPredecessor for first edge,
  2315         // because it is already predecessor.
  2316         edge->block->end(MGoto::New(alloc(), update));
  2317         edge = edge->next;
  2319         // Remaining edges
  2320         while (edge) {
  2321             edge->block->end(MGoto::New(alloc(), update));
  2322             if (!update->addPredecessor(alloc(), edge->block))
  2323                 return false;
  2324             edge = edge->next;
  2326         state.loop.continues = nullptr;
  2328         if (!setCurrentAndSpecializePhis(update))
  2329             return ControlStatus_Error;
  2332     return true;
  2335 MBasicBlock *
  2336 IonBuilder::createBreakCatchBlock(DeferredEdge *edge, jsbytecode *pc)
  2338     edge = filterDeadDeferredEdges(edge);
  2340     // Create block, using the first break statement as predecessor
  2341     MBasicBlock *successor = newBlock(edge->block, pc);
  2342     if (!successor)
  2343         return nullptr;
  2345     // No need to use addPredecessor for first edge,
  2346     // because it is already predecessor.
  2347     edge->block->end(MGoto::New(alloc(), successor));
  2348     edge = edge->next;
  2350     // Finish up remaining breaks.
  2351     while (edge) {
  2352         edge->block->end(MGoto::New(alloc(), successor));
  2353         if (!successor->addPredecessor(alloc(), edge->block))
  2354             return nullptr;
  2355         edge = edge->next;
  2358     return successor;
  2361 IonBuilder::ControlStatus
  2362 IonBuilder::processNextTableSwitchCase(CFGState &state)
  2364     JS_ASSERT(state.state == CFGState::TABLE_SWITCH);
  2366     state.tableswitch.currentBlock++;
  2368     // Test if there are still unprocessed successors (cases/default)
  2369     if (state.tableswitch.currentBlock >= state.tableswitch.ins->numBlocks())
  2370         return processSwitchEnd(state.tableswitch.breaks, state.tableswitch.exitpc);
  2372     // Get the next successor
  2373     MBasicBlock *successor = state.tableswitch.ins->getBlock(state.tableswitch.currentBlock);
  2375     // Add current block as predecessor if available.
  2376     // This means the previous case didn't have a break statement.
  2377     // So flow will continue in this block.
  2378     if (current) {
  2379         current->end(MGoto::New(alloc(), successor));
  2380         if (!successor->addPredecessor(alloc(), current))
  2381             return ControlStatus_Error;
  2384     // Insert successor after the current block, to maintain RPO.
  2385     graph().moveBlockToEnd(successor);
  2387     // If this is the last successor the block should stop at the end of the tableswitch
  2388     // Else it should stop at the start of the next successor
  2389     if (state.tableswitch.currentBlock+1 < state.tableswitch.ins->numBlocks())
  2390         state.stopAt = state.tableswitch.ins->getBlock(state.tableswitch.currentBlock+1)->pc();
  2391     else
  2392         state.stopAt = state.tableswitch.exitpc;
  2394     if (!setCurrentAndSpecializePhis(successor))
  2395         return ControlStatus_Error;
  2396     pc = current->pc();
  2397     return ControlStatus_Jumped;
  2400 IonBuilder::ControlStatus
  2401 IonBuilder::processAndOrEnd(CFGState &state)
  2403     // We just processed the RHS of an && or || expression.
  2404     // Now jump to the join point (the false block).
  2405     current->end(MGoto::New(alloc(), state.branch.ifFalse));
  2407     if (!state.branch.ifFalse->addPredecessor(alloc(), current))
  2408         return ControlStatus_Error;
  2410     if (!setCurrentAndSpecializePhis(state.branch.ifFalse))
  2411         return ControlStatus_Error;
  2412     graph().moveBlockToEnd(current);
  2413     pc = current->pc();
  2414     return ControlStatus_Joined;
  2417 IonBuilder::ControlStatus
  2418 IonBuilder::processLabelEnd(CFGState &state)
  2420     JS_ASSERT(state.state == CFGState::LABEL);
  2422     // If there are no breaks and no current, controlflow is terminated.
  2423     if (!state.label.breaks && !current)
  2424         return ControlStatus_Ended;
  2426     // If there are no breaks to this label, there's nothing to do.
  2427     if (!state.label.breaks)
  2428         return ControlStatus_Joined;
  2430     MBasicBlock *successor = createBreakCatchBlock(state.label.breaks, state.stopAt);
  2431     if (!successor)
  2432         return ControlStatus_Error;
  2434     if (current) {
  2435         current->end(MGoto::New(alloc(), successor));
  2436         if (!successor->addPredecessor(alloc(), current))
  2437             return ControlStatus_Error;
  2440     pc = state.stopAt;
  2441     if (!setCurrentAndSpecializePhis(successor))
  2442         return ControlStatus_Error;
  2443     return ControlStatus_Joined;
  2446 IonBuilder::ControlStatus
  2447 IonBuilder::processTryEnd(CFGState &state)
  2449     JS_ASSERT(state.state == CFGState::TRY);
  2451     if (!state.try_.successor) {
  2452         JS_ASSERT(!current);
  2453         return ControlStatus_Ended;
  2456     if (current) {
  2457         current->end(MGoto::New(alloc(), state.try_.successor));
  2459         if (!state.try_.successor->addPredecessor(alloc(), current))
  2460             return ControlStatus_Error;
  2463     // Start parsing the code after this try-catch statement.
  2464     if (!setCurrentAndSpecializePhis(state.try_.successor))
  2465         return ControlStatus_Error;
  2466     graph().moveBlockToEnd(current);
  2467     pc = current->pc();
  2468     return ControlStatus_Joined;
  2471 IonBuilder::ControlStatus
  2472 IonBuilder::processBreak(JSOp op, jssrcnote *sn)
  2474     JS_ASSERT(op == JSOP_GOTO);
  2476     JS_ASSERT(SN_TYPE(sn) == SRC_BREAK ||
  2477               SN_TYPE(sn) == SRC_BREAK2LABEL);
  2479     // Find the break target.
  2480     jsbytecode *target = pc + GetJumpOffset(pc);
  2481     DebugOnly<bool> found = false;
  2483     if (SN_TYPE(sn) == SRC_BREAK2LABEL) {
  2484         for (size_t i = labels_.length() - 1; i < labels_.length(); i--) {
  2485             CFGState &cfg = cfgStack_[labels_[i].cfgEntry];
  2486             JS_ASSERT(cfg.state == CFGState::LABEL);
  2487             if (cfg.stopAt == target) {
  2488                 cfg.label.breaks = new(alloc()) DeferredEdge(current, cfg.label.breaks);
  2489                 found = true;
  2490                 break;
  2493     } else {
  2494         for (size_t i = loops_.length() - 1; i < loops_.length(); i--) {
  2495             CFGState &cfg = cfgStack_[loops_[i].cfgEntry];
  2496             JS_ASSERT(cfg.isLoop());
  2497             if (cfg.loop.exitpc == target) {
  2498                 cfg.loop.breaks = new(alloc()) DeferredEdge(current, cfg.loop.breaks);
  2499                 found = true;
  2500                 break;
  2505     JS_ASSERT(found);
  2507     setCurrent(nullptr);
  2508     pc += js_CodeSpec[op].length;
  2509     return processControlEnd();
  2512 static inline jsbytecode *
  2513 EffectiveContinue(jsbytecode *pc)
  2515     if (JSOp(*pc) == JSOP_GOTO)
  2516         return pc + GetJumpOffset(pc);
  2517     return pc;
  2520 IonBuilder::ControlStatus
  2521 IonBuilder::processContinue(JSOp op)
  2523     JS_ASSERT(op == JSOP_GOTO);
  2525     // Find the target loop.
  2526     CFGState *found = nullptr;
  2527     jsbytecode *target = pc + GetJumpOffset(pc);
  2528     for (size_t i = loops_.length() - 1; i < loops_.length(); i--) {
  2529         if (loops_[i].continuepc == target ||
  2530             EffectiveContinue(loops_[i].continuepc) == target)
  2532             found = &cfgStack_[loops_[i].cfgEntry];
  2533             break;
  2537     // There must always be a valid target loop structure. If not, there's
  2538     // probably an off-by-something error in which pc we track.
  2539     JS_ASSERT(found);
  2540     CFGState &state = *found;
  2542     state.loop.continues = new(alloc()) DeferredEdge(current, state.loop.continues);
  2544     setCurrent(nullptr);
  2545     pc += js_CodeSpec[op].length;
  2546     return processControlEnd();
  2549 IonBuilder::ControlStatus
  2550 IonBuilder::processSwitchBreak(JSOp op)
  2552     JS_ASSERT(op == JSOP_GOTO);
  2554     // Find the target switch.
  2555     CFGState *found = nullptr;
  2556     jsbytecode *target = pc + GetJumpOffset(pc);
  2557     for (size_t i = switches_.length() - 1; i < switches_.length(); i--) {
  2558         if (switches_[i].continuepc == target) {
  2559             found = &cfgStack_[switches_[i].cfgEntry];
  2560             break;
  2564     // There must always be a valid target loop structure. If not, there's
  2565     // probably an off-by-something error in which pc we track.
  2566     JS_ASSERT(found);
  2567     CFGState &state = *found;
  2569     DeferredEdge **breaks = nullptr;
  2570     switch (state.state) {
  2571       case CFGState::TABLE_SWITCH:
  2572         breaks = &state.tableswitch.breaks;
  2573         break;
  2574       case CFGState::COND_SWITCH_BODY:
  2575         breaks = &state.condswitch.breaks;
  2576         break;
  2577       default:
  2578         MOZ_ASSUME_UNREACHABLE("Unexpected switch state.");
  2581     *breaks = new(alloc()) DeferredEdge(current, *breaks);
  2583     setCurrent(nullptr);
  2584     pc += js_CodeSpec[op].length;
  2585     return processControlEnd();
  2588 IonBuilder::ControlStatus
  2589 IonBuilder::processSwitchEnd(DeferredEdge *breaks, jsbytecode *exitpc)
  2591     // No break statements, no current.
  2592     // This means that control flow is cut-off from this point
  2593     // (e.g. all cases have return statements).
  2594     if (!breaks && !current)
  2595         return ControlStatus_Ended;
  2597     // Create successor block.
  2598     // If there are breaks, create block with breaks as predecessor
  2599     // Else create a block with current as predecessor
  2600     MBasicBlock *successor = nullptr;
  2601     if (breaks)
  2602         successor = createBreakCatchBlock(breaks, exitpc);
  2603     else
  2604         successor = newBlock(current, exitpc);
  2606     if (!successor)
  2607         return ControlStatus_Ended;
  2609     // If there is current, the current block flows into this one.
  2610     // So current is also a predecessor to this block
  2611     if (current) {
  2612         current->end(MGoto::New(alloc(), successor));
  2613         if (breaks) {
  2614             if (!successor->addPredecessor(alloc(), current))
  2615                 return ControlStatus_Error;
  2619     pc = exitpc;
  2620     if (!setCurrentAndSpecializePhis(successor))
  2621         return ControlStatus_Error;
  2622     return ControlStatus_Joined;
  2625 IonBuilder::ControlStatus
  2626 IonBuilder::maybeLoop(JSOp op, jssrcnote *sn)
  2628     // This function looks at the opcode and source note and tries to
  2629     // determine the structure of the loop. For some opcodes, like
  2630     // POP/NOP which are not explicitly control flow, this source note is
  2631     // optional. For opcodes with control flow, like GOTO, an unrecognized
  2632     // or not-present source note is a compilation failure.
  2633     switch (op) {
  2634       case JSOP_POP:
  2635         // for (init; ; update?) ...
  2636         if (sn && SN_TYPE(sn) == SRC_FOR) {
  2637             current->pop();
  2638             return forLoop(op, sn);
  2640         break;
  2642       case JSOP_NOP:
  2643         if (sn) {
  2644             // do { } while (cond)
  2645             if (SN_TYPE(sn) == SRC_WHILE)
  2646                 return doWhileLoop(op, sn);
  2647             // Build a mapping such that given a basic block, whose successor
  2648             // has a phi
  2650             // for (; ; update?)
  2651             if (SN_TYPE(sn) == SRC_FOR)
  2652                 return forLoop(op, sn);
  2654         break;
  2656       default:
  2657         MOZ_ASSUME_UNREACHABLE("unexpected opcode");
  2660     return ControlStatus_None;
  2663 void
  2664 IonBuilder::assertValidLoopHeadOp(jsbytecode *pc)
  2666 #ifdef DEBUG
  2667     JS_ASSERT(JSOp(*pc) == JSOP_LOOPHEAD);
  2669     // Make sure this is the next opcode after the loop header,
  2670     // unless the for loop is unconditional.
  2671     CFGState &state = cfgStack_.back();
  2672     JS_ASSERT_IF((JSOp)*(state.loop.entry->pc()) == JSOP_GOTO,
  2673         GetNextPc(state.loop.entry->pc()) == pc);
  2675     // do-while loops have a source note.
  2676     jssrcnote *sn = info().getNote(gsn, pc);
  2677     if (sn) {
  2678         jsbytecode *ifne = pc + js_GetSrcNoteOffset(sn, 0);
  2680         jsbytecode *expected_ifne;
  2681         switch (state.state) {
  2682           case CFGState::DO_WHILE_LOOP_BODY:
  2683             expected_ifne = state.loop.updateEnd;
  2684             break;
  2686           default:
  2687             MOZ_ASSUME_UNREACHABLE("JSOP_LOOPHEAD unexpected source note");
  2690         // Make sure this loop goes to the same ifne as the loop header's
  2691         // source notes or GOTO.
  2692         JS_ASSERT(ifne == expected_ifne);
  2693     } else {
  2694         JS_ASSERT(state.state != CFGState::DO_WHILE_LOOP_BODY);
  2696 #endif
  2699 IonBuilder::ControlStatus
  2700 IonBuilder::doWhileLoop(JSOp op, jssrcnote *sn)
  2702     // do { } while() loops have the following structure:
  2703     //    NOP         ; SRC_WHILE (offset to COND)
  2704     //    LOOPHEAD    ; SRC_WHILE (offset to IFNE)
  2705     //    LOOPENTRY
  2706     //    ...         ; body
  2707     //    ...
  2708     //    COND        ; start of condition
  2709     //    ...
  2710     //    IFNE ->     ; goes to LOOPHEAD
  2711     int condition_offset = js_GetSrcNoteOffset(sn, 0);
  2712     jsbytecode *conditionpc = pc + condition_offset;
  2714     jssrcnote *sn2 = info().getNote(gsn, pc+1);
  2715     int offset = js_GetSrcNoteOffset(sn2, 0);
  2716     jsbytecode *ifne = pc + offset + 1;
  2717     JS_ASSERT(ifne > pc);
  2719     // Verify that the IFNE goes back to a loophead op.
  2720     jsbytecode *loopHead = GetNextPc(pc);
  2721     JS_ASSERT(JSOp(*loopHead) == JSOP_LOOPHEAD);
  2722     JS_ASSERT(loopHead == ifne + GetJumpOffset(ifne));
  2724     jsbytecode *loopEntry = GetNextPc(loopHead);
  2725     bool canOsr = LoopEntryCanIonOsr(loopEntry);
  2726     bool osr = info().hasOsrAt(loopEntry);
  2728     if (osr) {
  2729         MBasicBlock *preheader = newOsrPreheader(current, loopEntry);
  2730         if (!preheader)
  2731             return ControlStatus_Error;
  2732         current->end(MGoto::New(alloc(), preheader));
  2733         if (!setCurrentAndSpecializePhis(preheader))
  2734             return ControlStatus_Error;
  2737     unsigned stackPhiCount = 0;
  2738     MBasicBlock *header = newPendingLoopHeader(current, pc, osr, canOsr, stackPhiCount);
  2739     if (!header)
  2740         return ControlStatus_Error;
  2741     current->end(MGoto::New(alloc(), header));
  2743     jsbytecode *loophead = GetNextPc(pc);
  2744     jsbytecode *bodyStart = GetNextPc(loophead);
  2745     jsbytecode *bodyEnd = conditionpc;
  2746     jsbytecode *exitpc = GetNextPc(ifne);
  2747     if (!analyzeNewLoopTypes(header, bodyStart, exitpc))
  2748         return ControlStatus_Error;
  2749     if (!pushLoop(CFGState::DO_WHILE_LOOP_BODY, conditionpc, header, osr,
  2750                   loopHead, bodyStart, bodyStart, bodyEnd, exitpc, conditionpc))
  2752         return ControlStatus_Error;
  2755     CFGState &state = cfgStack_.back();
  2756     state.loop.updatepc = conditionpc;
  2757     state.loop.updateEnd = ifne;
  2759     if (!setCurrentAndSpecializePhis(header))
  2760         return ControlStatus_Error;
  2761     if (!jsop_loophead(loophead))
  2762         return ControlStatus_Error;
  2764     pc = bodyStart;
  2765     return ControlStatus_Jumped;
  2768 IonBuilder::ControlStatus
  2769 IonBuilder::whileOrForInLoop(jssrcnote *sn)
  2771     // while (cond) { } loops have the following structure:
  2772     //    GOTO cond   ; SRC_WHILE (offset to IFNE)
  2773     //    LOOPHEAD
  2774     //    ...
  2775     //  cond:
  2776     //    LOOPENTRY
  2777     //    ...
  2778     //    IFNE        ; goes to LOOPHEAD
  2779     // for (x in y) { } loops are similar; the cond will be a MOREITER.
  2780     JS_ASSERT(SN_TYPE(sn) == SRC_FOR_OF || SN_TYPE(sn) == SRC_FOR_IN || SN_TYPE(sn) == SRC_WHILE);
  2781     int ifneOffset = js_GetSrcNoteOffset(sn, 0);
  2782     jsbytecode *ifne = pc + ifneOffset;
  2783     JS_ASSERT(ifne > pc);
  2785     // Verify that the IFNE goes back to a loophead op.
  2786     JS_ASSERT(JSOp(*GetNextPc(pc)) == JSOP_LOOPHEAD);
  2787     JS_ASSERT(GetNextPc(pc) == ifne + GetJumpOffset(ifne));
  2789     jsbytecode *loopEntry = pc + GetJumpOffset(pc);
  2790     bool canOsr = LoopEntryCanIonOsr(loopEntry);
  2791     bool osr = info().hasOsrAt(loopEntry);
  2793     if (osr) {
  2794         MBasicBlock *preheader = newOsrPreheader(current, loopEntry);
  2795         if (!preheader)
  2796             return ControlStatus_Error;
  2797         current->end(MGoto::New(alloc(), preheader));
  2798         if (!setCurrentAndSpecializePhis(preheader))
  2799             return ControlStatus_Error;
  2802     unsigned stackPhiCount;
  2803     if (SN_TYPE(sn) == SRC_FOR_OF)
  2804         stackPhiCount = 2;
  2805     else if (SN_TYPE(sn) == SRC_FOR_IN)
  2806         stackPhiCount = 1;
  2807     else
  2808         stackPhiCount = 0;
  2810     MBasicBlock *header = newPendingLoopHeader(current, pc, osr, canOsr, stackPhiCount);
  2811     if (!header)
  2812         return ControlStatus_Error;
  2813     current->end(MGoto::New(alloc(), header));
  2815     // Skip past the JSOP_LOOPHEAD for the body start.
  2816     jsbytecode *loopHead = GetNextPc(pc);
  2817     jsbytecode *bodyStart = GetNextPc(loopHead);
  2818     jsbytecode *bodyEnd = pc + GetJumpOffset(pc);
  2819     jsbytecode *exitpc = GetNextPc(ifne);
  2820     if (!analyzeNewLoopTypes(header, bodyStart, exitpc))
  2821         return ControlStatus_Error;
  2822     if (!pushLoop(CFGState::WHILE_LOOP_COND, ifne, header, osr,
  2823                   loopHead, bodyEnd, bodyStart, bodyEnd, exitpc))
  2825         return ControlStatus_Error;
  2828     // Parse the condition first.
  2829     if (!setCurrentAndSpecializePhis(header))
  2830         return ControlStatus_Error;
  2831     if (!jsop_loophead(loopHead))
  2832         return ControlStatus_Error;
  2834     pc = bodyEnd;
  2835     return ControlStatus_Jumped;
  2838 IonBuilder::ControlStatus
  2839 IonBuilder::forLoop(JSOp op, jssrcnote *sn)
  2841     // Skip the NOP or POP.
  2842     JS_ASSERT(op == JSOP_POP || op == JSOP_NOP);
  2843     pc = GetNextPc(pc);
  2845     jsbytecode *condpc = pc + js_GetSrcNoteOffset(sn, 0);
  2846     jsbytecode *updatepc = pc + js_GetSrcNoteOffset(sn, 1);
  2847     jsbytecode *ifne = pc + js_GetSrcNoteOffset(sn, 2);
  2848     jsbytecode *exitpc = GetNextPc(ifne);
  2850     // for loops have the following structures:
  2851     //
  2852     //   NOP or POP
  2853     //   [GOTO cond | NOP]
  2854     //   LOOPHEAD
  2855     // body:
  2856     //    ; [body]
  2857     // [increment:]
  2858     //    ; [increment]
  2859     // [cond:]
  2860     //   LOOPENTRY
  2861     //   GOTO body
  2862     //
  2863     // If there is a condition (condpc != ifne), this acts similar to a while
  2864     // loop otherwise, it acts like a do-while loop.
  2865     jsbytecode *bodyStart = pc;
  2866     jsbytecode *bodyEnd = updatepc;
  2867     jsbytecode *loopEntry = condpc;
  2868     if (condpc != ifne) {
  2869         JS_ASSERT(JSOp(*bodyStart) == JSOP_GOTO);
  2870         JS_ASSERT(bodyStart + GetJumpOffset(bodyStart) == condpc);
  2871         bodyStart = GetNextPc(bodyStart);
  2872     } else {
  2873         // No loop condition, such as for(j = 0; ; j++)
  2874         if (op != JSOP_NOP) {
  2875             // If the loop starts with POP, we have to skip a NOP.
  2876             JS_ASSERT(JSOp(*bodyStart) == JSOP_NOP);
  2877             bodyStart = GetNextPc(bodyStart);
  2879         loopEntry = GetNextPc(bodyStart);
  2881     jsbytecode *loopHead = bodyStart;
  2882     JS_ASSERT(JSOp(*bodyStart) == JSOP_LOOPHEAD);
  2883     JS_ASSERT(ifne + GetJumpOffset(ifne) == bodyStart);
  2884     bodyStart = GetNextPc(bodyStart);
  2886     bool osr = info().hasOsrAt(loopEntry);
  2887     bool canOsr = LoopEntryCanIonOsr(loopEntry);
  2889     if (osr) {
  2890         MBasicBlock *preheader = newOsrPreheader(current, loopEntry);
  2891         if (!preheader)
  2892             return ControlStatus_Error;
  2893         current->end(MGoto::New(alloc(), preheader));
  2894         if (!setCurrentAndSpecializePhis(preheader))
  2895             return ControlStatus_Error;
  2898     unsigned stackPhiCount = 0;
  2899     MBasicBlock *header = newPendingLoopHeader(current, pc, osr, canOsr, stackPhiCount);
  2900     if (!header)
  2901         return ControlStatus_Error;
  2902     current->end(MGoto::New(alloc(), header));
  2904     // If there is no condition, we immediately parse the body. Otherwise, we
  2905     // parse the condition.
  2906     jsbytecode *stopAt;
  2907     CFGState::State initial;
  2908     if (condpc != ifne) {
  2909         pc = condpc;
  2910         stopAt = ifne;
  2911         initial = CFGState::FOR_LOOP_COND;
  2912     } else {
  2913         pc = bodyStart;
  2914         stopAt = bodyEnd;
  2915         initial = CFGState::FOR_LOOP_BODY;
  2918     if (!analyzeNewLoopTypes(header, bodyStart, exitpc))
  2919         return ControlStatus_Error;
  2920     if (!pushLoop(initial, stopAt, header, osr,
  2921                   loopHead, pc, bodyStart, bodyEnd, exitpc, updatepc))
  2923         return ControlStatus_Error;
  2926     CFGState &state = cfgStack_.back();
  2927     state.loop.condpc = (condpc != ifne) ? condpc : nullptr;
  2928     state.loop.updatepc = (updatepc != condpc) ? updatepc : nullptr;
  2929     if (state.loop.updatepc)
  2930         state.loop.updateEnd = condpc;
  2932     if (!setCurrentAndSpecializePhis(header))
  2933         return ControlStatus_Error;
  2934     if (!jsop_loophead(loopHead))
  2935         return ControlStatus_Error;
  2937     return ControlStatus_Jumped;
  2940 int
  2941 IonBuilder::CmpSuccessors(const void *a, const void *b)
  2943     const MBasicBlock *a0 = * (MBasicBlock * const *)a;
  2944     const MBasicBlock *b0 = * (MBasicBlock * const *)b;
  2945     if (a0->pc() == b0->pc())
  2946         return 0;
  2948     return (a0->pc() > b0->pc()) ? 1 : -1;
  2951 IonBuilder::ControlStatus
  2952 IonBuilder::tableSwitch(JSOp op, jssrcnote *sn)
  2954     // TableSwitch op contains the following data
  2955     // (length between data is JUMP_OFFSET_LEN)
  2956     //
  2957     // 0: Offset of default case
  2958     // 1: Lowest number in tableswitch
  2959     // 2: Highest number in tableswitch
  2960     // 3: Offset of case low
  2961     // 4: Offset of case low+1
  2962     // .: ...
  2963     // .: Offset of case high
  2965     JS_ASSERT(op == JSOP_TABLESWITCH);
  2966     JS_ASSERT(SN_TYPE(sn) == SRC_TABLESWITCH);
  2968     // Pop input.
  2969     MDefinition *ins = current->pop();
  2971     // Get the default and exit pc
  2972     jsbytecode *exitpc = pc + js_GetSrcNoteOffset(sn, 0);
  2973     jsbytecode *defaultpc = pc + GET_JUMP_OFFSET(pc);
  2975     JS_ASSERT(defaultpc > pc && defaultpc <= exitpc);
  2977     // Get the low and high from the tableswitch
  2978     jsbytecode *pc2 = pc;
  2979     pc2 += JUMP_OFFSET_LEN;
  2980     int low = GET_JUMP_OFFSET(pc2);
  2981     pc2 += JUMP_OFFSET_LEN;
  2982     int high = GET_JUMP_OFFSET(pc2);
  2983     pc2 += JUMP_OFFSET_LEN;
  2985     // Create MIR instruction
  2986     MTableSwitch *tableswitch = MTableSwitch::New(alloc(), ins, low, high);
  2988     // Create default case
  2989     MBasicBlock *defaultcase = newBlock(current, defaultpc);
  2990     if (!defaultcase)
  2991         return ControlStatus_Error;
  2992     tableswitch->addDefault(defaultcase);
  2993     tableswitch->addBlock(defaultcase);
  2995     // Create cases
  2996     jsbytecode *casepc = nullptr;
  2997     for (int i = 0; i < high-low+1; i++) {
  2998         casepc = pc + GET_JUMP_OFFSET(pc2);
  3000         JS_ASSERT(casepc >= pc && casepc <= exitpc);
  3002         MBasicBlock *caseblock = newBlock(current, casepc);
  3003         if (!caseblock)
  3004             return ControlStatus_Error;
  3006         // If the casepc equals the current pc, it is not a written case,
  3007         // but a filled gap. That way we can use a tableswitch instead of
  3008         // condswitch, even if not all numbers are consecutive.
  3009         // In that case this block goes to the default case
  3010         if (casepc == pc) {
  3011             caseblock->end(MGoto::New(alloc(), defaultcase));
  3012             if (!defaultcase->addPredecessor(alloc(), caseblock))
  3013                 return ControlStatus_Error;
  3016         tableswitch->addCase(tableswitch->addSuccessor(caseblock));
  3018         // If this is an actual case (not filled gap),
  3019         // add this block to the list that still needs to get processed
  3020         if (casepc != pc)
  3021             tableswitch->addBlock(caseblock);
  3023         pc2 += JUMP_OFFSET_LEN;
  3026     // Move defaultcase to the end, to maintain RPO.
  3027     graph().moveBlockToEnd(defaultcase);
  3029     JS_ASSERT(tableswitch->numCases() == (uint32_t)(high - low + 1));
  3030     JS_ASSERT(tableswitch->numSuccessors() > 0);
  3032     // Sort the list of blocks that still needs to get processed by pc
  3033     qsort(tableswitch->blocks(), tableswitch->numBlocks(),
  3034           sizeof(MBasicBlock*), CmpSuccessors);
  3036     // Create info
  3037     ControlFlowInfo switchinfo(cfgStack_.length(), exitpc);
  3038     if (!switches_.append(switchinfo))
  3039         return ControlStatus_Error;
  3041     // Use a state to retrieve some information
  3042     CFGState state = CFGState::TableSwitch(exitpc, tableswitch);
  3044     // Save the MIR instruction as last instruction of this block.
  3045     current->end(tableswitch);
  3047     // If there is only one successor the block should stop at the end of the switch
  3048     // Else it should stop at the start of the next successor
  3049     if (tableswitch->numBlocks() > 1)
  3050         state.stopAt = tableswitch->getBlock(1)->pc();
  3051     if (!setCurrentAndSpecializePhis(tableswitch->getBlock(0)))
  3052         return ControlStatus_Error;
  3054     if (!cfgStack_.append(state))
  3055         return ControlStatus_Error;
  3057     pc = current->pc();
  3058     return ControlStatus_Jumped;
  3061 bool
  3062 IonBuilder::filterTypesAtTest(MTest *test)
  3064     JS_ASSERT(test->ifTrue() == current || test->ifFalse() == current);
  3066     bool trueBranch = test->ifTrue() == current;
  3068     MDefinition *subject = nullptr;
  3069     bool removeUndefined;
  3070     bool removeNull;
  3072     test->filtersUndefinedOrNull(trueBranch, &subject, &removeUndefined, &removeNull);
  3074     // The test filters no undefined or null.
  3075     if (!subject)
  3076         return true;
  3078     // There is no TypeSet that can get filtered.
  3079     if (!subject->resultTypeSet() || subject->resultTypeSet()->unknown())
  3080         return true;
  3082     // Only do this optimization if the typeset does contains null or undefined.
  3083     if ((!(removeUndefined && subject->resultTypeSet()->hasType(types::Type::UndefinedType())) &&
  3084          !(removeNull && subject->resultTypeSet()->hasType(types::Type::NullType()))))
  3086         return true;
  3089     // Find all values on the stack that correspond to the subject
  3090     // and replace it with a MIR with filtered TypeSet information.
  3091     // Create the replacement MIR lazily upon first occurence.
  3092     MDefinition *replace = nullptr;
  3093     for (uint32_t i = 0; i < current->stackDepth(); i++) {
  3094         if (current->getSlot(i) != subject)
  3095             continue;
  3097         // Create replacement MIR with filtered TypesSet.
  3098         if (!replace) {
  3099             types::TemporaryTypeSet *type =
  3100                 subject->resultTypeSet()->filter(alloc_->lifoAlloc(), removeUndefined,
  3101                                                                       removeNull);
  3102             if (!type)
  3103                 return false;
  3105             replace = ensureDefiniteTypeSet(subject, type);
  3106             // Make sure we don't hoist it above the MTest, we can use the
  3107             // 'dependency' of an MInstruction. This is normally used by
  3108             // Alias Analysis, but won't get overwritten, since this
  3109             // instruction doesn't have an AliasSet.
  3110             replace->setDependency(test);
  3113         current->setSlot(i, replace);
  3116    return true;
  3119 bool
  3120 IonBuilder::jsop_label()
  3122     JS_ASSERT(JSOp(*pc) == JSOP_LABEL);
  3124     jsbytecode *endpc = pc + GET_JUMP_OFFSET(pc);
  3125     JS_ASSERT(endpc > pc);
  3127     ControlFlowInfo label(cfgStack_.length(), endpc);
  3128     if (!labels_.append(label))
  3129         return false;
  3131     return cfgStack_.append(CFGState::Label(endpc));
  3134 bool
  3135 IonBuilder::jsop_condswitch()
  3137     // CondSwitch op looks as follows:
  3138     //   condswitch [length +exit_pc; first case offset +next-case ]
  3139     //   {
  3140     //     {
  3141     //       ... any code ...
  3142     //       case (+jump) [pcdelta offset +next-case]
  3143     //     }+
  3144     //     default (+jump)
  3145     //     ... jump targets ...
  3146     //   }
  3147     //
  3148     // The default case is always emitted even if there is no default case in
  3149     // the source.  The last case statement pcdelta source note might have a 0
  3150     // offset on the last case (not all the time).
  3151     //
  3152     // A conditional evaluate the condition of each case and compare it to the
  3153     // switch value with a strict equality.  Cases conditions are iterated
  3154     // linearly until one is matching. If one case succeeds, the flow jumps into
  3155     // the corresponding body block.  The body block might alias others and
  3156     // might continue in the next body block if the body is not terminated with
  3157     // a break.
  3158     //
  3159     // Algorithm:
  3160     //  1/ Loop over the case chain to reach the default target
  3161     //   & Estimate the number of uniq bodies.
  3162     //  2/ Generate code for all cases (see processCondSwitchCase).
  3163     //  3/ Generate code for all bodies (see processCondSwitchBody).
  3165     JS_ASSERT(JSOp(*pc) == JSOP_CONDSWITCH);
  3166     jssrcnote *sn = info().getNote(gsn, pc);
  3167     JS_ASSERT(SN_TYPE(sn) == SRC_CONDSWITCH);
  3169     // Get the exit pc
  3170     jsbytecode *exitpc = pc + js_GetSrcNoteOffset(sn, 0);
  3171     jsbytecode *firstCase = pc + js_GetSrcNoteOffset(sn, 1);
  3173     // Iterate all cases in the conditional switch.
  3174     // - Stop at the default case. (always emitted after the last case)
  3175     // - Estimate the number of uniq bodies. This estimation might be off by 1
  3176     //   if the default body alias a case body.
  3177     jsbytecode *curCase = firstCase;
  3178     jsbytecode *lastTarget = GetJumpOffset(curCase) + curCase;
  3179     size_t nbBodies = 2; // default target and the first body.
  3181     JS_ASSERT(pc < curCase && curCase <= exitpc);
  3182     while (JSOp(*curCase) == JSOP_CASE) {
  3183         // Fetch the next case.
  3184         jssrcnote *caseSn = info().getNote(gsn, curCase);
  3185         JS_ASSERT(caseSn && SN_TYPE(caseSn) == SRC_NEXTCASE);
  3186         ptrdiff_t off = js_GetSrcNoteOffset(caseSn, 0);
  3187         curCase = off ? curCase + off : GetNextPc(curCase);
  3188         JS_ASSERT(pc < curCase && curCase <= exitpc);
  3190         // Count non-aliased cases.
  3191         jsbytecode *curTarget = GetJumpOffset(curCase) + curCase;
  3192         if (lastTarget < curTarget)
  3193             nbBodies++;
  3194         lastTarget = curTarget;
  3197     // The current case now be the default case which jump to the body of the
  3198     // default case, which might be behind the last target.
  3199     JS_ASSERT(JSOp(*curCase) == JSOP_DEFAULT);
  3200     jsbytecode *defaultTarget = GetJumpOffset(curCase) + curCase;
  3201     JS_ASSERT(curCase < defaultTarget && defaultTarget <= exitpc);
  3203     // Allocate the current graph state.
  3204     CFGState state = CFGState::CondSwitch(this, exitpc, defaultTarget);
  3205     if (!state.condswitch.bodies || !state.condswitch.bodies->init(alloc(), nbBodies))
  3206         return ControlStatus_Error;
  3208     // We loop on case conditions with processCondSwitchCase.
  3209     JS_ASSERT(JSOp(*firstCase) == JSOP_CASE);
  3210     state.stopAt = firstCase;
  3211     state.state = CFGState::COND_SWITCH_CASE;
  3213     return cfgStack_.append(state);
  3216 IonBuilder::CFGState
  3217 IonBuilder::CFGState::CondSwitch(IonBuilder *builder, jsbytecode *exitpc, jsbytecode *defaultTarget)
  3219     CFGState state;
  3220     state.state = COND_SWITCH_CASE;
  3221     state.stopAt = nullptr;
  3222     state.condswitch.bodies = (FixedList<MBasicBlock *> *)builder->alloc_->allocate(
  3223         sizeof(FixedList<MBasicBlock *>));
  3224     state.condswitch.currentIdx = 0;
  3225     state.condswitch.defaultTarget = defaultTarget;
  3226     state.condswitch.defaultIdx = uint32_t(-1);
  3227     state.condswitch.exitpc = exitpc;
  3228     state.condswitch.breaks = nullptr;
  3229     return state;
  3232 IonBuilder::CFGState
  3233 IonBuilder::CFGState::Label(jsbytecode *exitpc)
  3235     CFGState state;
  3236     state.state = LABEL;
  3237     state.stopAt = exitpc;
  3238     state.label.breaks = nullptr;
  3239     return state;
  3242 IonBuilder::CFGState
  3243 IonBuilder::CFGState::Try(jsbytecode *exitpc, MBasicBlock *successor)
  3245     CFGState state;
  3246     state.state = TRY;
  3247     state.stopAt = exitpc;
  3248     state.try_.successor = successor;
  3249     return state;
  3252 IonBuilder::ControlStatus
  3253 IonBuilder::processCondSwitchCase(CFGState &state)
  3255     JS_ASSERT(state.state == CFGState::COND_SWITCH_CASE);
  3256     JS_ASSERT(!state.condswitch.breaks);
  3257     JS_ASSERT(current);
  3258     JS_ASSERT(JSOp(*pc) == JSOP_CASE);
  3259     FixedList<MBasicBlock *> &bodies = *state.condswitch.bodies;
  3260     jsbytecode *defaultTarget = state.condswitch.defaultTarget;
  3261     uint32_t &currentIdx = state.condswitch.currentIdx;
  3262     jsbytecode *lastTarget = currentIdx ? bodies[currentIdx - 1]->pc() : nullptr;
  3264     // Fetch the following case in which we will continue.
  3265     jssrcnote *sn = info().getNote(gsn, pc);
  3266     ptrdiff_t off = js_GetSrcNoteOffset(sn, 0);
  3267     jsbytecode *casePc = off ? pc + off : GetNextPc(pc);
  3268     bool caseIsDefault = JSOp(*casePc) == JSOP_DEFAULT;
  3269     JS_ASSERT(JSOp(*casePc) == JSOP_CASE || caseIsDefault);
  3271     // Allocate the block of the matching case.
  3272     bool bodyIsNew = false;
  3273     MBasicBlock *bodyBlock = nullptr;
  3274     jsbytecode *bodyTarget = pc + GetJumpOffset(pc);
  3275     if (lastTarget < bodyTarget) {
  3276         // If the default body is in the middle or aliasing the current target.
  3277         if (lastTarget < defaultTarget && defaultTarget <= bodyTarget) {
  3278             JS_ASSERT(state.condswitch.defaultIdx == uint32_t(-1));
  3279             state.condswitch.defaultIdx = currentIdx;
  3280             bodies[currentIdx] = nullptr;
  3281             // If the default body does not alias any and it would be allocated
  3282             // later and stored in the defaultIdx location.
  3283             if (defaultTarget < bodyTarget)
  3284                 currentIdx++;
  3287         bodyIsNew = true;
  3288         // Pop switch and case operands.
  3289         bodyBlock = newBlockPopN(current, bodyTarget, 2);
  3290         bodies[currentIdx++] = bodyBlock;
  3291     } else {
  3292         // This body alias the previous one.
  3293         JS_ASSERT(lastTarget == bodyTarget);
  3294         JS_ASSERT(currentIdx > 0);
  3295         bodyBlock = bodies[currentIdx - 1];
  3298     if (!bodyBlock)
  3299         return ControlStatus_Error;
  3301     lastTarget = bodyTarget;
  3303     // Allocate the block of the non-matching case.  This can either be a normal
  3304     // case or the default case.
  3305     bool caseIsNew = false;
  3306     MBasicBlock *caseBlock = nullptr;
  3307     if (!caseIsDefault) {
  3308         caseIsNew = true;
  3309         // Pop the case operand.
  3310         caseBlock = newBlockPopN(current, GetNextPc(pc), 1);
  3311     } else {
  3312         // The non-matching case is the default case, which jump directly to its
  3313         // body. Skip the creation of a default case block and directly create
  3314         // the default body if it does not alias any previous body.
  3316         if (state.condswitch.defaultIdx == uint32_t(-1)) {
  3317             // The default target is the last target.
  3318             JS_ASSERT(lastTarget < defaultTarget);
  3319             state.condswitch.defaultIdx = currentIdx++;
  3320             caseIsNew = true;
  3321         } else if (bodies[state.condswitch.defaultIdx] == nullptr) {
  3322             // The default target is in the middle and it does not alias any
  3323             // case target.
  3324             JS_ASSERT(defaultTarget < lastTarget);
  3325             caseIsNew = true;
  3326         } else {
  3327             // The default target is in the middle and it alias a case target.
  3328             JS_ASSERT(defaultTarget <= lastTarget);
  3329             caseBlock = bodies[state.condswitch.defaultIdx];
  3332         // Allocate and register the default body.
  3333         if (caseIsNew) {
  3334             // Pop the case & switch operands.
  3335             caseBlock = newBlockPopN(current, defaultTarget, 2);
  3336             bodies[state.condswitch.defaultIdx] = caseBlock;
  3340     if (!caseBlock)
  3341         return ControlStatus_Error;
  3343     // Terminate the last case condition block by emitting the code
  3344     // corresponding to JSOP_CASE bytecode.
  3345     if (bodyBlock != caseBlock) {
  3346         MDefinition *caseOperand = current->pop();
  3347         MDefinition *switchOperand = current->peek(-1);
  3348         MCompare *cmpResult = MCompare::New(alloc(), switchOperand, caseOperand, JSOP_STRICTEQ);
  3349         cmpResult->infer(inspector, pc);
  3350         JS_ASSERT(!cmpResult->isEffectful());
  3351         current->add(cmpResult);
  3352         current->end(MTest::New(alloc(), cmpResult, bodyBlock, caseBlock));
  3354         // Add last case as predecessor of the body if the body is aliasing
  3355         // the previous case body.
  3356         if (!bodyIsNew && !bodyBlock->addPredecessorPopN(alloc(), current, 1))
  3357             return ControlStatus_Error;
  3359         // Add last case as predecessor of the non-matching case if the
  3360         // non-matching case is an aliased default case. We need to pop the
  3361         // switch operand as we skip the default case block and use the default
  3362         // body block directly.
  3363         JS_ASSERT_IF(!caseIsNew, caseIsDefault);
  3364         if (!caseIsNew && !caseBlock->addPredecessorPopN(alloc(), current, 1))
  3365             return ControlStatus_Error;
  3366     } else {
  3367         // The default case alias the last case body.
  3368         JS_ASSERT(caseIsDefault);
  3369         current->pop(); // Case operand
  3370         current->pop(); // Switch operand
  3371         current->end(MGoto::New(alloc(), bodyBlock));
  3372         if (!bodyIsNew && !bodyBlock->addPredecessor(alloc(), current))
  3373             return ControlStatus_Error;
  3376     if (caseIsDefault) {
  3377         // The last case condition is finished.  Loop in processCondSwitchBody,
  3378         // with potential stops in processSwitchBreak.  Check that the bodies
  3379         // fixed list is over-estimate by at most 1, and shrink the size such as
  3380         // length can be used as an upper bound while iterating bodies.
  3381         JS_ASSERT(currentIdx == bodies.length() || currentIdx + 1 == bodies.length());
  3382         bodies.shrink(bodies.length() - currentIdx);
  3384         // Handle break statements in processSwitchBreak while processing
  3385         // bodies.
  3386         ControlFlowInfo breakInfo(cfgStack_.length() - 1, state.condswitch.exitpc);
  3387         if (!switches_.append(breakInfo))
  3388             return ControlStatus_Error;
  3390         // Jump into the first body.
  3391         currentIdx = 0;
  3392         setCurrent(nullptr);
  3393         state.state = CFGState::COND_SWITCH_BODY;
  3394         return processCondSwitchBody(state);
  3397     // Continue until the case condition.
  3398     if (!setCurrentAndSpecializePhis(caseBlock))
  3399         return ControlStatus_Error;
  3400     pc = current->pc();
  3401     state.stopAt = casePc;
  3402     return ControlStatus_Jumped;
  3405 IonBuilder::ControlStatus
  3406 IonBuilder::processCondSwitchBody(CFGState &state)
  3408     JS_ASSERT(state.state == CFGState::COND_SWITCH_BODY);
  3409     JS_ASSERT(pc <= state.condswitch.exitpc);
  3410     FixedList<MBasicBlock *> &bodies = *state.condswitch.bodies;
  3411     uint32_t &currentIdx = state.condswitch.currentIdx;
  3413     JS_ASSERT(currentIdx <= bodies.length());
  3414     if (currentIdx == bodies.length()) {
  3415         JS_ASSERT_IF(current, pc == state.condswitch.exitpc);
  3416         return processSwitchEnd(state.condswitch.breaks, state.condswitch.exitpc);
  3419     // Get the next body
  3420     MBasicBlock *nextBody = bodies[currentIdx++];
  3421     JS_ASSERT_IF(current, pc == nextBody->pc());
  3423     // Fix the reverse post-order iteration.
  3424     graph().moveBlockToEnd(nextBody);
  3426     // The last body continue into the new one.
  3427     if (current) {
  3428         current->end(MGoto::New(alloc(), nextBody));
  3429         if (!nextBody->addPredecessor(alloc(), current))
  3430             return ControlStatus_Error;
  3433     // Continue in the next body.
  3434     if (!setCurrentAndSpecializePhis(nextBody))
  3435         return ControlStatus_Error;
  3436     pc = current->pc();
  3438     if (currentIdx < bodies.length())
  3439         state.stopAt = bodies[currentIdx]->pc();
  3440     else
  3441         state.stopAt = state.condswitch.exitpc;
  3442     return ControlStatus_Jumped;
  3445 bool
  3446 IonBuilder::jsop_andor(JSOp op)
  3448     JS_ASSERT(op == JSOP_AND || op == JSOP_OR);
  3450     jsbytecode *rhsStart = pc + js_CodeSpec[op].length;
  3451     jsbytecode *joinStart = pc + GetJumpOffset(pc);
  3452     JS_ASSERT(joinStart > pc);
  3454     // We have to leave the LHS on the stack.
  3455     MDefinition *lhs = current->peek(-1);
  3457     MBasicBlock *evalRhs = newBlock(current, rhsStart);
  3458     MBasicBlock *join = newBlock(current, joinStart);
  3459     if (!evalRhs || !join)
  3460         return false;
  3462     MTest *test = (op == JSOP_AND)
  3463                   ? MTest::New(alloc(), lhs, evalRhs, join)
  3464                   : MTest::New(alloc(), lhs, join, evalRhs);
  3465     test->infer();
  3466     current->end(test);
  3468     if (!cfgStack_.append(CFGState::AndOr(joinStart, join)))
  3469         return false;
  3471     return setCurrentAndSpecializePhis(evalRhs);
  3474 bool
  3475 IonBuilder::jsop_dup2()
  3477     uint32_t lhsSlot = current->stackDepth() - 2;
  3478     uint32_t rhsSlot = current->stackDepth() - 1;
  3479     current->pushSlot(lhsSlot);
  3480     current->pushSlot(rhsSlot);
  3481     return true;
  3484 bool
  3485 IonBuilder::jsop_loophead(jsbytecode *pc)
  3487     assertValidLoopHeadOp(pc);
  3489     current->add(MInterruptCheck::New(alloc()));
  3490     insertRecompileCheck();
  3492     return true;
  3495 bool
  3496 IonBuilder::jsop_ifeq(JSOp op)
  3498     // IFEQ always has a forward offset.
  3499     jsbytecode *trueStart = pc + js_CodeSpec[op].length;
  3500     jsbytecode *falseStart = pc + GetJumpOffset(pc);
  3501     JS_ASSERT(falseStart > pc);
  3503     // We only handle cases that emit source notes.
  3504     jssrcnote *sn = info().getNote(gsn, pc);
  3505     if (!sn)
  3506         return abort("expected sourcenote");
  3508     MDefinition *ins = current->pop();
  3510     // Create true and false branches.
  3511     MBasicBlock *ifTrue = newBlock(current, trueStart);
  3512     MBasicBlock *ifFalse = newBlock(current, falseStart);
  3513     if (!ifTrue || !ifFalse)
  3514         return false;
  3516     MTest *test = MTest::New(alloc(), ins, ifTrue, ifFalse);
  3517     current->end(test);
  3519     // The bytecode for if/ternary gets emitted either like this:
  3520     //
  3521     //    IFEQ X  ; src note (IF_ELSE, COND) points to the GOTO
  3522     //    ...
  3523     //    GOTO Z
  3524     // X: ...     ; else/else if
  3525     //    ...
  3526     // Z:         ; join
  3527     //
  3528     // Or like this:
  3529     //
  3530     //    IFEQ X  ; src note (IF) has no offset
  3531     //    ...
  3532     // Z: ...     ; join
  3533     //
  3534     // We want to parse the bytecode as if we were parsing the AST, so for the
  3535     // IF_ELSE/COND cases, we use the source note and follow the GOTO. For the
  3536     // IF case, the IFEQ offset is the join point.
  3537     switch (SN_TYPE(sn)) {
  3538       case SRC_IF:
  3539         if (!cfgStack_.append(CFGState::If(falseStart, test)))
  3540             return false;
  3541         break;
  3543       case SRC_IF_ELSE:
  3544       case SRC_COND:
  3546         // Infer the join point from the JSOP_GOTO[X] sitting here, then
  3547         // assert as we much we can that this is the right GOTO.
  3548         jsbytecode *trueEnd = pc + js_GetSrcNoteOffset(sn, 0);
  3549         JS_ASSERT(trueEnd > pc);
  3550         JS_ASSERT(trueEnd < falseStart);
  3551         JS_ASSERT(JSOp(*trueEnd) == JSOP_GOTO);
  3552         JS_ASSERT(!info().getNote(gsn, trueEnd));
  3554         jsbytecode *falseEnd = trueEnd + GetJumpOffset(trueEnd);
  3555         JS_ASSERT(falseEnd > trueEnd);
  3556         JS_ASSERT(falseEnd >= falseStart);
  3558         if (!cfgStack_.append(CFGState::IfElse(trueEnd, falseEnd, test)))
  3559             return false;
  3560         break;
  3563       default:
  3564         MOZ_ASSUME_UNREACHABLE("unexpected source note type");
  3567     // Switch to parsing the true branch. Note that no PC update is needed,
  3568     // it's the next instruction.
  3569     if (!setCurrentAndSpecializePhis(ifTrue))
  3570         return false;
  3572     // Filter the types in the true branch.
  3573     filterTypesAtTest(test);
  3575     return true;
  3578 bool
  3579 IonBuilder::jsop_try()
  3581     JS_ASSERT(JSOp(*pc) == JSOP_TRY);
  3583     if (!js_JitOptions.compileTryCatch)
  3584         return abort("Try-catch support disabled");
  3586     // Try-finally is not yet supported.
  3587     if (analysis().hasTryFinally())
  3588         return abort("Has try-finally");
  3590     // Try-catch within inline frames is not yet supported.
  3591     JS_ASSERT(!isInlineBuilder());
  3593     // Try-catch during the arguments usage analysis is not yet supported. Code
  3594     // accessing the arguments within the 'catch' block is not accounted for.
  3595     if (info().executionMode() == ArgumentsUsageAnalysis)
  3596         return abort("Try-catch during arguments usage analysis");
  3598     graph().setHasTryBlock();
  3600     jssrcnote *sn = info().getNote(gsn, pc);
  3601     JS_ASSERT(SN_TYPE(sn) == SRC_TRY);
  3603     // Get the pc of the last instruction in the try block. It's a JSOP_GOTO to
  3604     // jump over the catch block.
  3605     jsbytecode *endpc = pc + js_GetSrcNoteOffset(sn, 0);
  3606     JS_ASSERT(JSOp(*endpc) == JSOP_GOTO);
  3607     JS_ASSERT(GetJumpOffset(endpc) > 0);
  3609     jsbytecode *afterTry = endpc + GetJumpOffset(endpc);
  3611     // If controlflow in the try body is terminated (by a return or throw
  3612     // statement), the code after the try-statement may still be reachable
  3613     // via the catch block (which we don't compile) and OSR can enter it.
  3614     // For example:
  3615     //
  3616     //     try {
  3617     //         throw 3;
  3618     //     } catch(e) { }
  3619     //
  3620     //     for (var i=0; i<1000; i++) {}
  3621     //
  3622     // To handle this, we create two blocks: one for the try block and one
  3623     // for the code following the try-catch statement. Both blocks are
  3624     // connected to the graph with an MTest instruction that always jumps to
  3625     // the try block. This ensures the successor block always has a predecessor
  3626     // and later passes will optimize this MTest to a no-op.
  3627     //
  3628     // If the code after the try block is unreachable (control flow in both the
  3629     // try and catch blocks is terminated), only create the try block, to avoid
  3630     // parsing unreachable code.
  3632     MBasicBlock *tryBlock = newBlock(current, GetNextPc(pc));
  3633     if (!tryBlock)
  3634         return false;
  3636     MBasicBlock *successor;
  3637     if (analysis().maybeInfo(afterTry)) {
  3638         successor = newBlock(current, afterTry);
  3639         if (!successor)
  3640             return false;
  3642         // Add MTest(true, tryBlock, successorBlock).
  3643         MConstant *true_ = MConstant::New(alloc(), BooleanValue(true));
  3644         current->add(true_);
  3645         current->end(MTest::New(alloc(), true_, tryBlock, successor));
  3646     } else {
  3647         successor = nullptr;
  3648         current->end(MGoto::New(alloc(), tryBlock));
  3651     if (!cfgStack_.append(CFGState::Try(endpc, successor)))
  3652         return false;
  3654     // The baseline compiler should not attempt to enter the catch block
  3655     // via OSR.
  3656     JS_ASSERT(info().osrPc() < endpc || info().osrPc() >= afterTry);
  3658     // Start parsing the try block.
  3659     return setCurrentAndSpecializePhis(tryBlock);
  3662 IonBuilder::ControlStatus
  3663 IonBuilder::processReturn(JSOp op)
  3665     MDefinition *def;
  3666     switch (op) {
  3667       case JSOP_RETURN:
  3668         // Return the last instruction.
  3669         def = current->pop();
  3670         break;
  3672       case JSOP_RETRVAL:
  3673         // Return undefined eagerly if script doesn't use return value.
  3674         if (script()->noScriptRval()) {
  3675             MInstruction *ins = MConstant::New(alloc(), UndefinedValue());
  3676             current->add(ins);
  3677             def = ins;
  3678             break;
  3681         def = current->getSlot(info().returnValueSlot());
  3682         break;
  3684       default:
  3685         def = nullptr;
  3686         MOZ_ASSUME_UNREACHABLE("unknown return op");
  3689     if (instrumentedProfiling()) {
  3690         current->add(MProfilerStackOp::New(alloc(), script(), MProfilerStackOp::Exit,
  3691                                            inliningDepth_));
  3693     MReturn *ret = MReturn::New(alloc(), def);
  3694     current->end(ret);
  3696     if (!graph().addReturn(current))
  3697         return ControlStatus_Error;
  3699     // Make sure no one tries to use this block now.
  3700     setCurrent(nullptr);
  3701     return processControlEnd();
  3704 IonBuilder::ControlStatus
  3705 IonBuilder::processThrow()
  3707     MDefinition *def = current->pop();
  3709     // MThrow is not marked as effectful. This means when it throws and we
  3710     // are inside a try block, we could use an earlier resume point and this
  3711     // resume point may not be up-to-date, for example:
  3712     //
  3713     // (function() {
  3714     //     try {
  3715     //         var x = 1;
  3716     //         foo(); // resume point
  3717     //         x = 2;
  3718     //         throw foo;
  3719     //     } catch(e) {
  3720     //         print(x);
  3721     //     }
  3722     // ])();
  3723     //
  3724     // If we use the resume point after the call, this will print 1 instead
  3725     // of 2. To fix this, we create a resume point right before the MThrow.
  3726     //
  3727     // Note that this is not a problem for instructions other than MThrow
  3728     // because they are either marked as effectful (have their own resume
  3729     // point) or cannot throw a catchable exception.
  3730     //
  3731     // We always install this resume point (instead of only when the function
  3732     // has a try block) in order to handle the Debugger onExceptionUnwind
  3733     // hook. When we need to handle the hook, we bail out to baseline right
  3734     // after the throw and propagate the exception when debug mode is on. This
  3735     // is opposed to the normal behavior of resuming directly in the
  3736     // associated catch block.
  3737     MNop *nop = MNop::New(alloc());
  3738     current->add(nop);
  3740     if (!resumeAfter(nop))
  3741         return ControlStatus_Error;
  3743     MThrow *ins = MThrow::New(alloc(), def);
  3744     current->end(ins);
  3746     // Make sure no one tries to use this block now.
  3747     setCurrent(nullptr);
  3748     return processControlEnd();
  3751 bool
  3752 IonBuilder::pushConstant(const Value &v)
  3754     current->push(constant(v));
  3755     return true;
  3758 bool
  3759 IonBuilder::jsop_bitnot()
  3761     MDefinition *input = current->pop();
  3762     MBitNot *ins = MBitNot::New(alloc(), input);
  3764     current->add(ins);
  3765     ins->infer();
  3767     current->push(ins);
  3768     if (ins->isEffectful() && !resumeAfter(ins))
  3769         return false;
  3770     return true;
  3772 bool
  3773 IonBuilder::jsop_bitop(JSOp op)
  3775     // Pop inputs.
  3776     MDefinition *right = current->pop();
  3777     MDefinition *left = current->pop();
  3779     MBinaryBitwiseInstruction *ins;
  3780     switch (op) {
  3781       case JSOP_BITAND:
  3782         ins = MBitAnd::New(alloc(), left, right);
  3783         break;
  3785       case JSOP_BITOR:
  3786         ins = MBitOr::New(alloc(), left, right);
  3787         break;
  3789       case JSOP_BITXOR:
  3790         ins = MBitXor::New(alloc(), left, right);
  3791         break;
  3793       case JSOP_LSH:
  3794         ins = MLsh::New(alloc(), left, right);
  3795         break;
  3797       case JSOP_RSH:
  3798         ins = MRsh::New(alloc(), left, right);
  3799         break;
  3801       case JSOP_URSH:
  3802         ins = MUrsh::New(alloc(), left, right);
  3803         break;
  3805       default:
  3806         MOZ_ASSUME_UNREACHABLE("unexpected bitop");
  3809     current->add(ins);
  3810     ins->infer(inspector, pc);
  3812     current->push(ins);
  3813     if (ins->isEffectful() && !resumeAfter(ins))
  3814         return false;
  3816     return true;
  3819 bool
  3820 IonBuilder::jsop_binary(JSOp op, MDefinition *left, MDefinition *right)
  3822     // Do a string concatenation if adding two inputs that are int or string
  3823     // and at least one is a string.
  3824     if (op == JSOP_ADD &&
  3825         ((left->type() == MIRType_String &&
  3826           (right->type() == MIRType_String ||
  3827            right->type() == MIRType_Int32 ||
  3828            right->type() == MIRType_Double)) ||
  3829          (left->type() == MIRType_Int32 &&
  3830           right->type() == MIRType_String) ||
  3831          (left->type() == MIRType_Double &&
  3832           right->type() == MIRType_String)))
  3834         MConcat *ins = MConcat::New(alloc(), left, right);
  3835         current->add(ins);
  3836         current->push(ins);
  3837         return maybeInsertResume();
  3840     MBinaryArithInstruction *ins;
  3841     switch (op) {
  3842       case JSOP_ADD:
  3843         ins = MAdd::New(alloc(), left, right);
  3844         break;
  3846       case JSOP_SUB:
  3847         ins = MSub::New(alloc(), left, right);
  3848         break;
  3850       case JSOP_MUL:
  3851         ins = MMul::New(alloc(), left, right);
  3852         break;
  3854       case JSOP_DIV:
  3855         ins = MDiv::New(alloc(), left, right);
  3856         break;
  3858       case JSOP_MOD:
  3859         ins = MMod::New(alloc(), left, right);
  3860         break;
  3862       default:
  3863         MOZ_ASSUME_UNREACHABLE("unexpected binary opcode");
  3866     current->add(ins);
  3867     ins->infer(alloc(), inspector, pc);
  3868     current->push(ins);
  3870     if (ins->isEffectful())
  3871         return resumeAfter(ins);
  3872     return maybeInsertResume();
  3875 bool
  3876 IonBuilder::jsop_binary(JSOp op)
  3878     MDefinition *right = current->pop();
  3879     MDefinition *left = current->pop();
  3881     return jsop_binary(op, left, right);
  3884 bool
  3885 IonBuilder::jsop_pos()
  3887     if (IsNumberType(current->peek(-1)->type())) {
  3888         // Already int32 or double. Set the operand as implicitly used so it
  3889         // doesn't get optimized out if it has no other uses, as we could bail
  3890         // out.
  3891         current->peek(-1)->setImplicitlyUsedUnchecked();
  3892         return true;
  3895     // Compile +x as x * 1.
  3896     MDefinition *value = current->pop();
  3897     MConstant *one = MConstant::New(alloc(), Int32Value(1));
  3898     current->add(one);
  3900     return jsop_binary(JSOP_MUL, value, one);
  3903 bool
  3904 IonBuilder::jsop_neg()
  3906     // Since JSOP_NEG does not use a slot, we cannot push the MConstant.
  3907     // The MConstant is therefore passed to JSOP_MUL without slot traffic.
  3908     MConstant *negator = MConstant::New(alloc(), Int32Value(-1));
  3909     current->add(negator);
  3911     MDefinition *right = current->pop();
  3913     if (!jsop_binary(JSOP_MUL, negator, right))
  3914         return false;
  3915     return true;
  3918 class AutoAccumulateReturns
  3920     MIRGraph &graph_;
  3921     MIRGraphReturns *prev_;
  3923   public:
  3924     AutoAccumulateReturns(MIRGraph &graph, MIRGraphReturns &returns)
  3925       : graph_(graph)
  3927         prev_ = graph_.returnAccumulator();
  3928         graph_.setReturnAccumulator(&returns);
  3930     ~AutoAccumulateReturns() {
  3931         graph_.setReturnAccumulator(prev_);
  3933 };
  3935 bool
  3936 IonBuilder::inlineScriptedCall(CallInfo &callInfo, JSFunction *target)
  3938     JS_ASSERT(target->hasScript());
  3939     JS_ASSERT(IsIonInlinablePC(pc));
  3941     callInfo.setImplicitlyUsedUnchecked();
  3943     // Ensure sufficient space in the slots: needed for inlining from FUNAPPLY.
  3944     uint32_t depth = current->stackDepth() + callInfo.numFormals();
  3945     if (depth > current->nslots()) {
  3946         if (!current->increaseSlots(depth - current->nslots()))
  3947             return false;
  3950     // Create new |this| on the caller-side for inlined constructors.
  3951     if (callInfo.constructing()) {
  3952         MDefinition *thisDefn = createThis(target, callInfo.fun());
  3953         if (!thisDefn)
  3954             return false;
  3955         callInfo.setThis(thisDefn);
  3958     // Capture formals in the outer resume point.
  3959     callInfo.pushFormals(current);
  3961     MResumePoint *outerResumePoint =
  3962         MResumePoint::New(alloc(), current, pc, callerResumePoint_, MResumePoint::Outer);
  3963     if (!outerResumePoint)
  3964         return false;
  3966     // Pop formals again, except leave |fun| on stack for duration of call.
  3967     callInfo.popFormals(current);
  3968     current->push(callInfo.fun());
  3970     JSScript *calleeScript = target->nonLazyScript();
  3971     BaselineInspector inspector(calleeScript);
  3973     // Improve type information of |this| when not set.
  3974     if (callInfo.constructing() &&
  3975         !callInfo.thisArg()->resultTypeSet() &&
  3976         calleeScript->types)
  3978         types::StackTypeSet *types = types::TypeScript::ThisTypes(calleeScript);
  3979         if (!types->unknown()) {
  3980             types::TemporaryTypeSet *clonedTypes = types->clone(alloc_->lifoAlloc());
  3981             if (!clonedTypes)
  3982                 return oom();
  3983             MTypeBarrier *barrier = MTypeBarrier::New(alloc(), callInfo.thisArg(), clonedTypes);
  3984             current->add(barrier);
  3985             callInfo.setThis(barrier);
  3989     // Start inlining.
  3990     LifoAlloc *lifoAlloc = alloc_->lifoAlloc();
  3991     CompileInfo *info = lifoAlloc->new_<CompileInfo>(calleeScript, target,
  3992                                                      (jsbytecode *)nullptr, callInfo.constructing(),
  3993                                                      this->info().executionMode(),
  3994                                                      /* needsArgsObj = */ false);
  3995     if (!info)
  3996         return false;
  3998     MIRGraphReturns returns(alloc());
  3999     AutoAccumulateReturns aar(graph(), returns);
  4001     // Build the graph.
  4002     IonBuilder inlineBuilder(analysisContext, compartment, options, &alloc(), &graph(), constraints(),
  4003                              &inspector, info, &optimizationInfo(), nullptr, inliningDepth_ + 1,
  4004                              loopDepth_);
  4005     if (!inlineBuilder.buildInline(this, outerResumePoint, callInfo)) {
  4006         if (analysisContext && analysisContext->isExceptionPending()) {
  4007             IonSpew(IonSpew_Abort, "Inline builder raised exception.");
  4008             abortReason_ = AbortReason_Error;
  4009             return false;
  4012         // Inlining the callee failed. Mark the callee as uninlineable only if
  4013         // the inlining was aborted for a non-exception reason.
  4014         if (inlineBuilder.abortReason_ == AbortReason_Disable) {
  4015             calleeScript->setUninlineable();
  4016             abortReason_ = AbortReason_Inlining;
  4017         } else if (inlineBuilder.abortReason_ == AbortReason_Inlining) {
  4018             abortReason_ = AbortReason_Inlining;
  4021         return false;
  4024     // Create return block.
  4025     jsbytecode *postCall = GetNextPc(pc);
  4026     MBasicBlock *returnBlock = newBlock(nullptr, postCall);
  4027     if (!returnBlock)
  4028         return false;
  4029     returnBlock->setCallerResumePoint(callerResumePoint_);
  4031     // When profiling add InlineExit instruction to indicate end of inlined function.
  4032     if (instrumentedProfiling())
  4033         returnBlock->add(MProfilerStackOp::New(alloc(), nullptr, MProfilerStackOp::InlineExit));
  4035     // Inherit the slots from current and pop |fun|.
  4036     returnBlock->inheritSlots(current);
  4037     returnBlock->pop();
  4039     // Accumulate return values.
  4040     if (returns.empty()) {
  4041         // Inlining of functions that have no exit is not supported.
  4042         calleeScript->setUninlineable();
  4043         abortReason_ = AbortReason_Inlining;
  4044         return false;
  4046     MDefinition *retvalDefn = patchInlinedReturns(callInfo, returns, returnBlock);
  4047     if (!retvalDefn)
  4048         return false;
  4049     returnBlock->push(retvalDefn);
  4051     // Initialize entry slots now that the stack has been fixed up.
  4052     if (!returnBlock->initEntrySlots(alloc()))
  4053         return false;
  4055     return setCurrentAndSpecializePhis(returnBlock);
  4058 MDefinition *
  4059 IonBuilder::patchInlinedReturn(CallInfo &callInfo, MBasicBlock *exit, MBasicBlock *bottom)
  4061     // Replaces the MReturn in the exit block with an MGoto.
  4062     MDefinition *rdef = exit->lastIns()->toReturn()->input();
  4063     exit->discardLastIns();
  4065     // Constructors must be patched by the caller to always return an object.
  4066     if (callInfo.constructing()) {
  4067         if (rdef->type() == MIRType_Value) {
  4068             // Unknown return: dynamically detect objects.
  4069             MReturnFromCtor *filter = MReturnFromCtor::New(alloc(), rdef, callInfo.thisArg());
  4070             exit->add(filter);
  4071             rdef = filter;
  4072         } else if (rdef->type() != MIRType_Object) {
  4073             // Known non-object return: force |this|.
  4074             rdef = callInfo.thisArg();
  4076     } else if (callInfo.isSetter()) {
  4077         // Setters return their argument, not whatever value is returned.
  4078         rdef = callInfo.getArg(0);
  4081     MGoto *replacement = MGoto::New(alloc(), bottom);
  4082     exit->end(replacement);
  4083     if (!bottom->addPredecessorWithoutPhis(exit))
  4084         return nullptr;
  4086     return rdef;
  4089 MDefinition *
  4090 IonBuilder::patchInlinedReturns(CallInfo &callInfo, MIRGraphReturns &returns, MBasicBlock *bottom)
  4092     // Replaces MReturns with MGotos, returning the MDefinition
  4093     // representing the return value, or nullptr.
  4094     JS_ASSERT(returns.length() > 0);
  4096     if (returns.length() == 1)
  4097         return patchInlinedReturn(callInfo, returns[0], bottom);
  4099     // Accumulate multiple returns with a phi.
  4100     MPhi *phi = MPhi::New(alloc(), bottom->stackDepth());
  4101     if (!phi->reserveLength(returns.length()))
  4102         return nullptr;
  4104     for (size_t i = 0; i < returns.length(); i++) {
  4105         MDefinition *rdef = patchInlinedReturn(callInfo, returns[i], bottom);
  4106         if (!rdef)
  4107             return nullptr;
  4108         phi->addInput(rdef);
  4111     bottom->addPhi(phi);
  4112     return phi;
  4115 IonBuilder::InliningDecision
  4116 IonBuilder::makeInliningDecision(JSFunction *target, CallInfo &callInfo)
  4118     // When there is no target, inlining is impossible.
  4119     if (target == nullptr)
  4120         return InliningDecision_DontInline;
  4122     // Never inline during the arguments usage analysis.
  4123     if (info().executionMode() == ArgumentsUsageAnalysis)
  4124         return InliningDecision_DontInline;
  4126     // Native functions provide their own detection in inlineNativeCall().
  4127     if (target->isNative())
  4128         return InliningDecision_Inline;
  4130     // Determine whether inlining is possible at callee site
  4131     InliningDecision decision = canInlineTarget(target, callInfo);
  4132     if (decision != InliningDecision_Inline)
  4133         return decision;
  4135     // Heuristics!
  4136     JSScript *targetScript = target->nonLazyScript();
  4138     // Skip heuristics if we have an explicit hint to inline.
  4139     if (!targetScript->shouldInline()) {
  4140         // Cap the inlining depth.
  4141         if (js_JitOptions.isSmallFunction(targetScript)) {
  4142             if (inliningDepth_ >= optimizationInfo().smallFunctionMaxInlineDepth())
  4143                 return DontInline(targetScript, "Vetoed: exceeding allowed inline depth");
  4144         } else {
  4145             if (inliningDepth_ >= optimizationInfo().maxInlineDepth())
  4146                 return DontInline(targetScript, "Vetoed: exceeding allowed inline depth");
  4148             if (targetScript->hasLoops())
  4149                 return DontInline(targetScript, "Vetoed: big function that contains a loop");
  4151             // Caller must not be excessively large.
  4152             if (script()->length() >= optimizationInfo().inliningMaxCallerBytecodeLength())
  4153                 return DontInline(targetScript, "Vetoed: caller excessively large");
  4156         // Callee must not be excessively large.
  4157         // This heuristic also applies to the callsite as a whole.
  4158         if (targetScript->length() > optimizationInfo().inlineMaxTotalBytecodeLength())
  4159             return DontInline(targetScript, "Vetoed: callee excessively large");
  4161         // Callee must have been called a few times to have somewhat stable
  4162         // type information, except for definite properties analysis,
  4163         // as the caller has not run yet.
  4164         if (targetScript->getUseCount() < optimizationInfo().usesBeforeInlining() &&
  4165             info().executionMode() != DefinitePropertiesAnalysis)
  4167             return DontInline(targetScript, "Vetoed: callee is insufficiently hot.");
  4171     // TI calls ObjectStateChange to trigger invalidation of the caller.
  4172     types::TypeObjectKey *targetType = types::TypeObjectKey::get(target);
  4173     targetType->watchStateChangeForInlinedCall(constraints());
  4175     // We mustn't relazify functions that have been inlined, because there's
  4176     // no way to tell if it safe to do so.
  4177     script()->setHasBeenInlined();
  4179     return InliningDecision_Inline;
  4182 bool
  4183 IonBuilder::selectInliningTargets(ObjectVector &targets, CallInfo &callInfo, BoolVector &choiceSet,
  4184                                   uint32_t *numInlineable)
  4186     *numInlineable = 0;
  4187     uint32_t totalSize = 0;
  4189     // For each target, ask whether it may be inlined.
  4190     if (!choiceSet.reserve(targets.length()))
  4191         return false;
  4193     for (size_t i = 0; i < targets.length(); i++) {
  4194         JSFunction *target = &targets[i]->as<JSFunction>();
  4195         bool inlineable;
  4196         InliningDecision decision = makeInliningDecision(target, callInfo);
  4197         switch (decision) {
  4198           case InliningDecision_Error:
  4199             return false;
  4200           case InliningDecision_DontInline:
  4201             inlineable = false;
  4202             break;
  4203           case InliningDecision_Inline:
  4204             inlineable = true;
  4205             break;
  4206           default:
  4207             MOZ_ASSUME_UNREACHABLE("Unhandled InliningDecision value!");
  4210         // Enforce a maximum inlined bytecode limit at the callsite.
  4211         if (inlineable && target->isInterpreted()) {
  4212             totalSize += target->nonLazyScript()->length();
  4213             if (totalSize > optimizationInfo().inlineMaxTotalBytecodeLength())
  4214                 inlineable = false;
  4217         choiceSet.append(inlineable);
  4218         if (inlineable)
  4219             *numInlineable += 1;
  4222     JS_ASSERT(choiceSet.length() == targets.length());
  4223     return true;
  4226 static bool
  4227 CanInlineGetPropertyCache(MGetPropertyCache *cache, MDefinition *thisDef)
  4229     JS_ASSERT(cache->object()->type() == MIRType_Object);
  4230     if (cache->object() != thisDef)
  4231         return false;
  4233     InlinePropertyTable *table = cache->propTable();
  4234     if (!table)
  4235         return false;
  4236     if (table->numEntries() == 0)
  4237         return false;
  4238     return true;
  4241 MGetPropertyCache *
  4242 IonBuilder::getInlineableGetPropertyCache(CallInfo &callInfo)
  4244     if (callInfo.constructing())
  4245         return nullptr;
  4247     MDefinition *thisDef = callInfo.thisArg();
  4248     if (thisDef->type() != MIRType_Object)
  4249         return nullptr;
  4251     MDefinition *funcDef = callInfo.fun();
  4252     if (funcDef->type() != MIRType_Object)
  4253         return nullptr;
  4255     // MGetPropertyCache with no uses may be optimized away.
  4256     if (funcDef->isGetPropertyCache()) {
  4257         MGetPropertyCache *cache = funcDef->toGetPropertyCache();
  4258         if (cache->hasUses())
  4259             return nullptr;
  4260         if (!CanInlineGetPropertyCache(cache, thisDef))
  4261             return nullptr;
  4262         return cache;
  4265     // Optimize away the following common pattern:
  4266     // MTypeBarrier[MIRType_Object] <- MGetPropertyCache
  4267     if (funcDef->isTypeBarrier()) {
  4268         MTypeBarrier *barrier = funcDef->toTypeBarrier();
  4269         if (barrier->hasUses())
  4270             return nullptr;
  4271         if (barrier->type() != MIRType_Object)
  4272             return nullptr;
  4273         if (!barrier->input()->isGetPropertyCache())
  4274             return nullptr;
  4276         MGetPropertyCache *cache = barrier->input()->toGetPropertyCache();
  4277         if (cache->hasUses() && !cache->hasOneUse())
  4278             return nullptr;
  4279         if (!CanInlineGetPropertyCache(cache, thisDef))
  4280             return nullptr;
  4281         return cache;
  4284     return nullptr;
  4287 IonBuilder::InliningStatus
  4288 IonBuilder::inlineSingleCall(CallInfo &callInfo, JSFunction *target)
  4290     // Expects formals to be popped and wrapped.
  4291     if (target->isNative())
  4292         return inlineNativeCall(callInfo, target);
  4294     if (!inlineScriptedCall(callInfo, target))
  4295         return InliningStatus_Error;
  4296     return InliningStatus_Inlined;
  4299 IonBuilder::InliningStatus
  4300 IonBuilder::inlineCallsite(ObjectVector &targets, ObjectVector &originals,
  4301                            bool lambda, CallInfo &callInfo)
  4303     if (targets.empty())
  4304         return InliningStatus_NotInlined;
  4306     // Is the function provided by an MGetPropertyCache?
  4307     // If so, the cache may be movable to a fallback path, with a dispatch
  4308     // instruction guarding on the incoming TypeObject.
  4309     MGetPropertyCache *propCache = getInlineableGetPropertyCache(callInfo);
  4311     // Inline single targets -- unless they derive from a cache, in which case
  4312     // avoiding the cache and guarding is still faster.
  4313     if (!propCache && targets.length() == 1) {
  4314         JSFunction *target = &targets[0]->as<JSFunction>();
  4315         InliningDecision decision = makeInliningDecision(target, callInfo);
  4316         switch (decision) {
  4317           case InliningDecision_Error:
  4318             return InliningStatus_Error;
  4319           case InliningDecision_DontInline:
  4320             return InliningStatus_NotInlined;
  4321           case InliningDecision_Inline:
  4322             break;
  4325         // Inlining will elminate uses of the original callee, but it needs to
  4326         // be preserved in phis if we bail out.  Mark the old callee definition as
  4327         // implicitly used to ensure this happens.
  4328         callInfo.fun()->setImplicitlyUsedUnchecked();
  4330         // If the callee is not going to be a lambda (which may vary across
  4331         // different invocations), then the callee definition can be replaced by a
  4332         // constant.
  4333         if (!lambda) {
  4334             // Replace the function with an MConstant.
  4335             MConstant *constFun = constant(ObjectValue(*target));
  4336             callInfo.setFun(constFun);
  4339         return inlineSingleCall(callInfo, target);
  4342     // Choose a subset of the targets for polymorphic inlining.
  4343     BoolVector choiceSet(alloc());
  4344     uint32_t numInlined;
  4345     if (!selectInliningTargets(targets, callInfo, choiceSet, &numInlined))
  4346         return InliningStatus_Error;
  4347     if (numInlined == 0)
  4348         return InliningStatus_NotInlined;
  4350     // Perform a polymorphic dispatch.
  4351     if (!inlineCalls(callInfo, targets, originals, choiceSet, propCache))
  4352         return InliningStatus_Error;
  4354     return InliningStatus_Inlined;
  4357 bool
  4358 IonBuilder::inlineGenericFallback(JSFunction *target, CallInfo &callInfo, MBasicBlock *dispatchBlock,
  4359                                   bool clonedAtCallsite)
  4361     // Generate a new block with all arguments on-stack.
  4362     MBasicBlock *fallbackBlock = newBlock(dispatchBlock, pc);
  4363     if (!fallbackBlock)
  4364         return false;
  4366     // Create a new CallInfo to track modified state within this block.
  4367     CallInfo fallbackInfo(alloc(), callInfo.constructing());
  4368     if (!fallbackInfo.init(callInfo))
  4369         return false;
  4370     fallbackInfo.popFormals(fallbackBlock);
  4372     // Generate an MCall, which uses stateful |current|.
  4373     if (!setCurrentAndSpecializePhis(fallbackBlock))
  4374         return false;
  4375     if (!makeCall(target, fallbackInfo, clonedAtCallsite))
  4376         return false;
  4378     // Pass return block to caller as |current|.
  4379     return true;
  4382 bool
  4383 IonBuilder::inlineTypeObjectFallback(CallInfo &callInfo, MBasicBlock *dispatchBlock,
  4384                                      MTypeObjectDispatch *dispatch, MGetPropertyCache *cache,
  4385                                      MBasicBlock **fallbackTarget)
  4387     // Getting here implies the following:
  4388     // 1. The call function is an MGetPropertyCache, or an MGetPropertyCache
  4389     //    followed by an MTypeBarrier.
  4390     JS_ASSERT(callInfo.fun()->isGetPropertyCache() || callInfo.fun()->isTypeBarrier());
  4392     // 2. The MGetPropertyCache has inlineable cases by guarding on the TypeObject.
  4393     JS_ASSERT(dispatch->numCases() > 0);
  4395     // 3. The MGetPropertyCache (and, if applicable, MTypeBarrier) only
  4396     //    have at most a single use.
  4397     JS_ASSERT_IF(callInfo.fun()->isGetPropertyCache(), !cache->hasUses());
  4398     JS_ASSERT_IF(callInfo.fun()->isTypeBarrier(), cache->hasOneUse());
  4400     // This means that no resume points yet capture the MGetPropertyCache,
  4401     // so everything from the MGetPropertyCache up until the call is movable.
  4402     // We now move the MGetPropertyCache and friends into a fallback path.
  4404     // Create a new CallInfo to track modified state within the fallback path.
  4405     CallInfo fallbackInfo(alloc(), callInfo.constructing());
  4406     if (!fallbackInfo.init(callInfo))
  4407         return false;
  4409     // Capture stack prior to the call operation. This captures the function.
  4410     MResumePoint *preCallResumePoint =
  4411         MResumePoint::New(alloc(), dispatchBlock, pc, callerResumePoint_, MResumePoint::ResumeAt);
  4412     if (!preCallResumePoint)
  4413         return false;
  4415     DebugOnly<size_t> preCallFuncIndex = preCallResumePoint->numOperands() - callInfo.numFormals();
  4416     JS_ASSERT(preCallResumePoint->getOperand(preCallFuncIndex) == fallbackInfo.fun());
  4418     // In the dispatch block, replace the function's slot entry with Undefined.
  4419     MConstant *undefined = MConstant::New(alloc(), UndefinedValue());
  4420     dispatchBlock->add(undefined);
  4421     dispatchBlock->rewriteAtDepth(-int(callInfo.numFormals()), undefined);
  4423     // Construct a block that does nothing but remove formals from the stack.
  4424     // This is effectively changing the entry resume point of the later fallback block.
  4425     MBasicBlock *prepBlock = newBlock(dispatchBlock, pc);
  4426     if (!prepBlock)
  4427         return false;
  4428     fallbackInfo.popFormals(prepBlock);
  4430     // Construct a block into which the MGetPropertyCache can be moved.
  4431     // This is subtle: the pc and resume point are those of the MGetPropertyCache!
  4432     InlinePropertyTable *propTable = cache->propTable();
  4433     JS_ASSERT(propTable->pc() != nullptr);
  4434     JS_ASSERT(propTable->priorResumePoint() != nullptr);
  4435     MBasicBlock *getPropBlock = newBlock(prepBlock, propTable->pc(), propTable->priorResumePoint());
  4436     if (!getPropBlock)
  4437         return false;
  4439     prepBlock->end(MGoto::New(alloc(), getPropBlock));
  4441     // Since the getPropBlock inherited the stack from right before the MGetPropertyCache,
  4442     // the target of the MGetPropertyCache is still on the stack.
  4443     DebugOnly<MDefinition *> checkObject = getPropBlock->pop();
  4444     JS_ASSERT(checkObject == cache->object());
  4446     // Move the MGetPropertyCache and friends into the getPropBlock.
  4447     if (fallbackInfo.fun()->isGetPropertyCache()) {
  4448         JS_ASSERT(fallbackInfo.fun()->toGetPropertyCache() == cache);
  4449         getPropBlock->addFromElsewhere(cache);
  4450         getPropBlock->push(cache);
  4451     } else {
  4452         MTypeBarrier *barrier = callInfo.fun()->toTypeBarrier();
  4453         JS_ASSERT(barrier->type() == MIRType_Object);
  4454         JS_ASSERT(barrier->input()->isGetPropertyCache());
  4455         JS_ASSERT(barrier->input()->toGetPropertyCache() == cache);
  4457         getPropBlock->addFromElsewhere(cache);
  4458         getPropBlock->addFromElsewhere(barrier);
  4459         getPropBlock->push(barrier);
  4462     // Construct an end block with the correct resume point.
  4463     MBasicBlock *preCallBlock = newBlock(getPropBlock, pc, preCallResumePoint);
  4464     if (!preCallBlock)
  4465         return false;
  4466     getPropBlock->end(MGoto::New(alloc(), preCallBlock));
  4468     // Now inline the MCallGeneric, using preCallBlock as the dispatch point.
  4469     if (!inlineGenericFallback(nullptr, fallbackInfo, preCallBlock, false))
  4470         return false;
  4472     // inlineGenericFallback() set the return block as |current|.
  4473     preCallBlock->end(MGoto::New(alloc(), current));
  4474     *fallbackTarget = prepBlock;
  4475     return true;
  4478 bool
  4479 IonBuilder::inlineCalls(CallInfo &callInfo, ObjectVector &targets,
  4480                         ObjectVector &originals, BoolVector &choiceSet,
  4481                         MGetPropertyCache *maybeCache)
  4483     // Only handle polymorphic inlining.
  4484     JS_ASSERT(IsIonInlinablePC(pc));
  4485     JS_ASSERT(choiceSet.length() == targets.length());
  4486     JS_ASSERT_IF(!maybeCache, targets.length() >= 2);
  4487     JS_ASSERT_IF(maybeCache, targets.length() >= 1);
  4489     MBasicBlock *dispatchBlock = current;
  4490     callInfo.setImplicitlyUsedUnchecked();
  4491     callInfo.pushFormals(dispatchBlock);
  4493     // Patch any InlinePropertyTable to only contain functions that are inlineable.
  4494     //
  4495     // Note that we trim using originals, as callsite clones are not user
  4496     // visible. We don't patch the entries inside the table with the cloned
  4497     // targets, as the entries should only be used for comparison.
  4498     //
  4499     // The InlinePropertyTable will also be patched at the end to exclude native functions
  4500     // that vetoed inlining.
  4501     if (maybeCache) {
  4502         InlinePropertyTable *propTable = maybeCache->propTable();
  4503         propTable->trimToTargets(originals);
  4504         if (propTable->numEntries() == 0)
  4505             maybeCache = nullptr;
  4508     // Generate a dispatch based on guard kind.
  4509     MDispatchInstruction *dispatch;
  4510     if (maybeCache) {
  4511         dispatch = MTypeObjectDispatch::New(alloc(), maybeCache->object(), maybeCache->propTable());
  4512         callInfo.fun()->setImplicitlyUsedUnchecked();
  4513     } else {
  4514         dispatch = MFunctionDispatch::New(alloc(), callInfo.fun());
  4517     // Generate a return block to host the rval-collecting MPhi.
  4518     jsbytecode *postCall = GetNextPc(pc);
  4519     MBasicBlock *returnBlock = newBlock(nullptr, postCall);
  4520     if (!returnBlock)
  4521         return false;
  4522     returnBlock->setCallerResumePoint(callerResumePoint_);
  4524     // Set up stack, used to manually create a post-call resume point.
  4525     returnBlock->inheritSlots(dispatchBlock);
  4526     callInfo.popFormals(returnBlock);
  4528     MPhi *retPhi = MPhi::New(alloc(), returnBlock->stackDepth());
  4529     returnBlock->addPhi(retPhi);
  4530     returnBlock->push(retPhi);
  4532     // Create a resume point from current stack state.
  4533     returnBlock->initEntrySlots(alloc());
  4535     // Reserve the capacity for the phi.
  4536     // Note: this is an upperbound. Unreachable targets and uninlineable natives are also counted.
  4537     uint32_t count = 1; // Possible fallback block.
  4538     for (uint32_t i = 0; i < targets.length(); i++) {
  4539         if (choiceSet[i])
  4540             count++;
  4542     retPhi->reserveLength(count);
  4544     // During inlining the 'this' value is assigned a type set which is
  4545     // specialized to the type objects which can generate that inlining target.
  4546     // After inlining the original type set is restored.
  4547     types::TemporaryTypeSet *cacheObjectTypeSet =
  4548         maybeCache ? maybeCache->object()->resultTypeSet() : nullptr;
  4550     // Inline each of the inlineable targets.
  4551     JS_ASSERT(targets.length() == originals.length());
  4552     for (uint32_t i = 0; i < targets.length(); i++) {
  4553         // When original != target, the target is a callsite clone. The
  4554         // original should be used for guards, and the target should be the
  4555         // actual function inlined.
  4556         JSFunction *original = &originals[i]->as<JSFunction>();
  4557         JSFunction *target = &targets[i]->as<JSFunction>();
  4559         // Target must be inlineable.
  4560         if (!choiceSet[i])
  4561             continue;
  4563         // Target must be reachable by the MDispatchInstruction.
  4564         if (maybeCache && !maybeCache->propTable()->hasFunction(original)) {
  4565             choiceSet[i] = false;
  4566             continue;
  4569         MBasicBlock *inlineBlock = newBlock(dispatchBlock, pc);
  4570         if (!inlineBlock)
  4571             return false;
  4573         // Create a function MConstant to use in the entry ResumePoint.
  4574         MConstant *funcDef = MConstant::New(alloc(), ObjectValue(*target), constraints());
  4575         funcDef->setImplicitlyUsedUnchecked();
  4576         dispatchBlock->add(funcDef);
  4578         // Use the MConstant in the inline resume point and on stack.
  4579         int funIndex = inlineBlock->entryResumePoint()->numOperands() - callInfo.numFormals();
  4580         inlineBlock->entryResumePoint()->replaceOperand(funIndex, funcDef);
  4581         inlineBlock->rewriteSlot(funIndex, funcDef);
  4583         // Create a new CallInfo to track modified state within the inline block.
  4584         CallInfo inlineInfo(alloc(), callInfo.constructing());
  4585         if (!inlineInfo.init(callInfo))
  4586             return false;
  4587         inlineInfo.popFormals(inlineBlock);
  4588         inlineInfo.setFun(funcDef);
  4590         if (maybeCache) {
  4591             JS_ASSERT(callInfo.thisArg() == maybeCache->object());
  4592             types::TemporaryTypeSet *targetThisTypes =
  4593                 maybeCache->propTable()->buildTypeSetForFunction(original);
  4594             if (!targetThisTypes)
  4595                 return false;
  4596             maybeCache->object()->setResultTypeSet(targetThisTypes);
  4599         // Inline the call into the inlineBlock.
  4600         if (!setCurrentAndSpecializePhis(inlineBlock))
  4601             return false;
  4602         InliningStatus status = inlineSingleCall(inlineInfo, target);
  4603         if (status == InliningStatus_Error)
  4604             return false;
  4606         // Natives may veto inlining.
  4607         if (status == InliningStatus_NotInlined) {
  4608             JS_ASSERT(target->isNative());
  4609             JS_ASSERT(current == inlineBlock);
  4610             inlineBlock->discardAllResumePoints();
  4611             graph().removeBlock(inlineBlock);
  4612             choiceSet[i] = false;
  4613             continue;
  4616         // inlineSingleCall() changed |current| to the inline return block.
  4617         MBasicBlock *inlineReturnBlock = current;
  4618         setCurrent(dispatchBlock);
  4620         // Connect the inline path to the returnBlock.
  4621         //
  4622         // Note that guarding is on the original function pointer even
  4623         // if there is a clone, since cloning occurs at the callsite.
  4624         dispatch->addCase(original, inlineBlock);
  4626         MDefinition *retVal = inlineReturnBlock->peek(-1);
  4627         retPhi->addInput(retVal);
  4628         inlineReturnBlock->end(MGoto::New(alloc(), returnBlock));
  4629         if (!returnBlock->addPredecessorWithoutPhis(inlineReturnBlock))
  4630             return false;
  4633     // Patch the InlinePropertyTable to not dispatch to vetoed paths.
  4634     //
  4635     // Note that like above, we trim using originals instead of targets.
  4636     if (maybeCache) {
  4637         maybeCache->object()->setResultTypeSet(cacheObjectTypeSet);
  4639         InlinePropertyTable *propTable = maybeCache->propTable();
  4640         propTable->trimTo(originals, choiceSet);
  4642         // If all paths were vetoed, output only a generic fallback path.
  4643         if (propTable->numEntries() == 0) {
  4644             JS_ASSERT(dispatch->numCases() == 0);
  4645             maybeCache = nullptr;
  4649     // If necessary, generate a fallback path.
  4650     // MTypeObjectDispatch always uses a fallback path.
  4651     if (maybeCache || dispatch->numCases() < targets.length()) {
  4652         // Generate fallback blocks, and set |current| to the fallback return block.
  4653         if (maybeCache) {
  4654             MBasicBlock *fallbackTarget;
  4655             if (!inlineTypeObjectFallback(callInfo, dispatchBlock, (MTypeObjectDispatch *)dispatch,
  4656                                           maybeCache, &fallbackTarget))
  4658                 return false;
  4660             dispatch->addFallback(fallbackTarget);
  4661         } else {
  4662             JSFunction *remaining = nullptr;
  4663             bool clonedAtCallsite = false;
  4665             // If there is only 1 remaining case, we can annotate the fallback call
  4666             // with the target information.
  4667             if (dispatch->numCases() + 1 == originals.length()) {
  4668                 for (uint32_t i = 0; i < originals.length(); i++) {
  4669                     if (choiceSet[i])
  4670                         continue;
  4672                     remaining = &targets[i]->as<JSFunction>();
  4673                     clonedAtCallsite = targets[i] != originals[i];
  4674                     break;
  4678             if (!inlineGenericFallback(remaining, callInfo, dispatchBlock, clonedAtCallsite))
  4679                 return false;
  4680             dispatch->addFallback(current);
  4683         MBasicBlock *fallbackReturnBlock = current;
  4685         // Connect fallback case to return infrastructure.
  4686         MDefinition *retVal = fallbackReturnBlock->peek(-1);
  4687         retPhi->addInput(retVal);
  4688         fallbackReturnBlock->end(MGoto::New(alloc(), returnBlock));
  4689         if (!returnBlock->addPredecessorWithoutPhis(fallbackReturnBlock))
  4690             return false;
  4693     // Finally add the dispatch instruction.
  4694     // This must be done at the end so that add() may be called above.
  4695     dispatchBlock->end(dispatch);
  4697     // Check the depth change: +1 for retval
  4698     JS_ASSERT(returnBlock->stackDepth() == dispatchBlock->stackDepth() - callInfo.numFormals() + 1);
  4700     graph().moveBlockToEnd(returnBlock);
  4701     return setCurrentAndSpecializePhis(returnBlock);
  4704 MInstruction *
  4705 IonBuilder::createDeclEnvObject(MDefinition *callee, MDefinition *scope)
  4707     // Get a template CallObject that we'll use to generate inline object
  4708     // creation.
  4709     DeclEnvObject *templateObj = inspector->templateDeclEnvObject();
  4711     // One field is added to the function to handle its name.  This cannot be a
  4712     // dynamic slot because there is still plenty of room on the DeclEnv object.
  4713     JS_ASSERT(!templateObj->hasDynamicSlots());
  4715     // Allocate the actual object. It is important that no intervening
  4716     // instructions could potentially bailout, thus leaking the dynamic slots
  4717     // pointer.
  4718     MInstruction *declEnvObj = MNewDeclEnvObject::New(alloc(), templateObj);
  4719     current->add(declEnvObj);
  4721     // Initialize the object's reserved slots. No post barrier is needed here:
  4722     // the object will be allocated in the nursery if possible, and if the
  4723     // tenured heap is used instead, a minor collection will have been performed
  4724     // that moved scope/callee to the tenured heap.
  4725     current->add(MStoreFixedSlot::New(alloc(), declEnvObj, DeclEnvObject::enclosingScopeSlot(), scope));
  4726     current->add(MStoreFixedSlot::New(alloc(), declEnvObj, DeclEnvObject::lambdaSlot(), callee));
  4728     return declEnvObj;
  4731 MInstruction *
  4732 IonBuilder::createCallObject(MDefinition *callee, MDefinition *scope)
  4734     // Get a template CallObject that we'll use to generate inline object
  4735     // creation.
  4736     CallObject *templateObj = inspector->templateCallObject();
  4738     // If the CallObject needs dynamic slots, allocate those now.
  4739     MInstruction *slots;
  4740     if (templateObj->hasDynamicSlots()) {
  4741         size_t nslots = JSObject::dynamicSlotsCount(templateObj->numFixedSlots(),
  4742                                                     templateObj->lastProperty()->slotSpan(templateObj->getClass()),
  4743                                                     templateObj->getClass());
  4744         slots = MNewSlots::New(alloc(), nslots);
  4745     } else {
  4746         slots = MConstant::New(alloc(), NullValue());
  4748     current->add(slots);
  4750     // Allocate the actual object. It is important that no intervening
  4751     // instructions could potentially bailout, thus leaking the dynamic slots
  4752     // pointer. Run-once scripts need a singleton type, so always do a VM call
  4753     // in such cases.
  4754     MUnaryInstruction *callObj;
  4755     if (script()->treatAsRunOnce())
  4756         callObj = MNewRunOnceCallObject::New(alloc(), templateObj, slots);
  4757     else
  4758         callObj = MNewCallObject::New(alloc(), templateObj, slots);
  4759     current->add(callObj);
  4761     // Initialize the object's reserved slots. No post barrier is needed here,
  4762     // for the same reason as in createDeclEnvObject.
  4763     current->add(MStoreFixedSlot::New(alloc(), callObj, CallObject::enclosingScopeSlot(), scope));
  4764     current->add(MStoreFixedSlot::New(alloc(), callObj, CallObject::calleeSlot(), callee));
  4766     // Initialize argument slots.
  4767     for (AliasedFormalIter i(script()); i; i++) {
  4768         unsigned slot = i.scopeSlot();
  4769         unsigned formal = i.frameIndex();
  4770         MDefinition *param = current->getSlot(info().argSlotUnchecked(formal));
  4771         if (slot >= templateObj->numFixedSlots())
  4772             current->add(MStoreSlot::New(alloc(), slots, slot - templateObj->numFixedSlots(), param));
  4773         else
  4774             current->add(MStoreFixedSlot::New(alloc(), callObj, slot, param));
  4777     return callObj;
  4780 MDefinition *
  4781 IonBuilder::createThisScripted(MDefinition *callee)
  4783     // Get callee.prototype.
  4784     //
  4785     // This instruction MUST be idempotent: since it does not correspond to an
  4786     // explicit operation in the bytecode, we cannot use resumeAfter().
  4787     // Getters may not override |prototype| fetching, so this operation is indeed idempotent.
  4788     // - First try an idempotent property cache.
  4789     // - Upon failing idempotent property cache, we can't use a non-idempotent cache,
  4790     //   therefore we fallback to CallGetProperty
  4791     //
  4792     // Note: both CallGetProperty and GetPropertyCache can trigger a GC,
  4793     //       and thus invalidation.
  4794     MInstruction *getProto;
  4795     if (!invalidatedIdempotentCache()) {
  4796         MGetPropertyCache *getPropCache = MGetPropertyCache::New(alloc(), callee, names().prototype,
  4797                                                                  /* monitored = */ false);
  4798         getPropCache->setIdempotent();
  4799         getProto = getPropCache;
  4800     } else {
  4801         MCallGetProperty *callGetProp = MCallGetProperty::New(alloc(), callee, names().prototype,
  4802                                                               /* callprop = */ false);
  4803         callGetProp->setIdempotent();
  4804         getProto = callGetProp;
  4806     current->add(getProto);
  4808     // Create this from prototype
  4809     MCreateThisWithProto *createThis = MCreateThisWithProto::New(alloc(), callee, getProto);
  4810     current->add(createThis);
  4812     return createThis;
  4815 JSObject *
  4816 IonBuilder::getSingletonPrototype(JSFunction *target)
  4818     if (!target || !target->hasSingletonType())
  4819         return nullptr;
  4820     types::TypeObjectKey *targetType = types::TypeObjectKey::get(target);
  4821     if (targetType->unknownProperties())
  4822         return nullptr;
  4824     jsid protoid = NameToId(names().prototype);
  4825     types::HeapTypeSetKey protoProperty = targetType->property(protoid);
  4827     return protoProperty.singleton(constraints());
  4830 MDefinition *
  4831 IonBuilder::createThisScriptedSingleton(JSFunction *target, MDefinition *callee)
  4833     // Get the singleton prototype (if exists)
  4834     JSObject *proto = getSingletonPrototype(target);
  4835     if (!proto)
  4836         return nullptr;
  4838     JSObject *templateObject = inspector->getTemplateObject(pc);
  4839     if (!templateObject || !templateObject->is<JSObject>())
  4840         return nullptr;
  4841     if (!templateObject->hasTenuredProto() || templateObject->getProto() != proto)
  4842         return nullptr;
  4844     if (!target->nonLazyScript()->types)
  4845         return nullptr;
  4846     if (!types::TypeScript::ThisTypes(target->nonLazyScript())->hasType(types::Type::ObjectType(templateObject)))
  4847         return nullptr;
  4849     // For template objects with NewScript info, the appropriate allocation
  4850     // kind to use may change due to dynamic property adds. In these cases
  4851     // calling Ion code will be invalidated, but any baseline template object
  4852     // may be stale. Update to the correct template object in this case.
  4853     types::TypeObject *templateType = templateObject->type();
  4854     if (templateType->hasNewScript()) {
  4855         templateObject = templateType->newScript()->templateObject;
  4856         JS_ASSERT(templateObject->type() == templateType);
  4858         // Trigger recompilation if the templateObject changes.
  4859         types::TypeObjectKey::get(templateType)->watchStateChangeForNewScriptTemplate(constraints());
  4862     // Generate an inline path to create a new |this| object with
  4863     // the given singleton prototype.
  4864     MCreateThisWithTemplate *createThis =
  4865         MCreateThisWithTemplate::New(alloc(), constraints(), templateObject,
  4866                                      templateObject->type()->initialHeap(constraints()));
  4867     current->add(createThis);
  4869     return createThis;
  4872 MDefinition *
  4873 IonBuilder::createThis(JSFunction *target, MDefinition *callee)
  4875     // Create this for unknown target
  4876     if (!target) {
  4877         MCreateThis *createThis = MCreateThis::New(alloc(), callee);
  4878         current->add(createThis);
  4879         return createThis;
  4882     // Native constructors build the new Object themselves.
  4883     if (target->isNative()) {
  4884         if (!target->isNativeConstructor())
  4885             return nullptr;
  4887         MConstant *magic = MConstant::New(alloc(), MagicValue(JS_IS_CONSTRUCTING));
  4888         current->add(magic);
  4889         return magic;
  4892     // Try baking in the prototype.
  4893     MDefinition *createThis = createThisScriptedSingleton(target, callee);
  4894     if (createThis)
  4895         return createThis;
  4897     return createThisScripted(callee);
  4900 bool
  4901 IonBuilder::jsop_funcall(uint32_t argc)
  4903     // Stack for JSOP_FUNCALL:
  4904     // 1:      arg0
  4905     // ...
  4906     // argc:   argN
  4907     // argc+1: JSFunction*, the 'f' in |f.call()|, in |this| position.
  4908     // argc+2: The native 'call' function.
  4910     int calleeDepth = -((int)argc + 2);
  4911     int funcDepth = -((int)argc + 1);
  4913     // If |Function.prototype.call| may be overridden, don't optimize callsite.
  4914     types::TemporaryTypeSet *calleeTypes = current->peek(calleeDepth)->resultTypeSet();
  4915     JSFunction *native = getSingleCallTarget(calleeTypes);
  4916     if (!native || !native->isNative() || native->native() != &js_fun_call) {
  4917         CallInfo callInfo(alloc(), false);
  4918         if (!callInfo.init(current, argc))
  4919             return false;
  4920         return makeCall(native, callInfo, false);
  4922     current->peek(calleeDepth)->setImplicitlyUsedUnchecked();
  4924     // Extract call target.
  4925     types::TemporaryTypeSet *funTypes = current->peek(funcDepth)->resultTypeSet();
  4926     JSFunction *target = getSingleCallTarget(funTypes);
  4928     // Shimmy the slots down to remove the native 'call' function.
  4929     current->shimmySlots(funcDepth - 1);
  4931     bool zeroArguments = (argc == 0);
  4933     // If no |this| argument was provided, explicitly pass Undefined.
  4934     // Pushing is safe here, since one stack slot has been removed.
  4935     if (zeroArguments) {
  4936         pushConstant(UndefinedValue());
  4937     } else {
  4938         // |this| becomes implicit in the call.
  4939         argc -= 1;
  4942     CallInfo callInfo(alloc(), false);
  4943     if (!callInfo.init(current, argc))
  4944         return false;
  4946     // Try to inline the call.
  4947     if (!zeroArguments) {
  4948         InliningDecision decision = makeInliningDecision(target, callInfo);
  4949         switch (decision) {
  4950           case InliningDecision_Error:
  4951             return false;
  4952           case InliningDecision_DontInline:
  4953             break;
  4954           case InliningDecision_Inline:
  4955             if (target->isInterpreted())
  4956                 return inlineScriptedCall(callInfo, target);
  4957             break;
  4961     // Call without inlining.
  4962     return makeCall(target, callInfo, false);
  4965 bool
  4966 IonBuilder::jsop_funapply(uint32_t argc)
  4968     int calleeDepth = -((int)argc + 2);
  4970     types::TemporaryTypeSet *calleeTypes = current->peek(calleeDepth)->resultTypeSet();
  4971     JSFunction *native = getSingleCallTarget(calleeTypes);
  4972     if (argc != 2) {
  4973         CallInfo callInfo(alloc(), false);
  4974         if (!callInfo.init(current, argc))
  4975             return false;
  4976         return makeCall(native, callInfo, false);
  4979     // Disable compilation if the second argument to |apply| cannot be guaranteed
  4980     // to be either definitely |arguments| or definitely not |arguments|.
  4981     MDefinition *argument = current->peek(-1);
  4982     if (script()->argumentsHasVarBinding() &&
  4983         argument->mightBeType(MIRType_MagicOptimizedArguments) &&
  4984         argument->type() != MIRType_MagicOptimizedArguments)
  4986         return abort("fun.apply with MaybeArguments");
  4989     // Fallback to regular call if arg 2 is not definitely |arguments|.
  4990     if (argument->type() != MIRType_MagicOptimizedArguments) {
  4991         CallInfo callInfo(alloc(), false);
  4992         if (!callInfo.init(current, argc))
  4993             return false;
  4994         return makeCall(native, callInfo, false);
  4997     if (!native ||
  4998         !native->isNative() ||
  4999         native->native() != js_fun_apply)
  5001         return abort("fun.apply speculation failed");
  5004     current->peek(calleeDepth)->setImplicitlyUsedUnchecked();
  5006     // Use funapply that definitely uses |arguments|
  5007     return jsop_funapplyarguments(argc);
  5010 bool
  5011 IonBuilder::jsop_funapplyarguments(uint32_t argc)
  5013     // Stack for JSOP_FUNAPPLY:
  5014     // 1:      Vp
  5015     // 2:      This
  5016     // argc+1: JSFunction*, the 'f' in |f.call()|, in |this| position.
  5017     // argc+2: The native 'apply' function.
  5019     int funcDepth = -((int)argc + 1);
  5021     // Extract call target.
  5022     types::TemporaryTypeSet *funTypes = current->peek(funcDepth)->resultTypeSet();
  5023     JSFunction *target = getSingleCallTarget(funTypes);
  5025     // When this script isn't inlined, use MApplyArgs,
  5026     // to copy the arguments from the stack and call the function
  5027     if (inliningDepth_ == 0 && info().executionMode() != DefinitePropertiesAnalysis) {
  5028         // The array argument corresponds to the arguments object. As the JIT
  5029         // is implicitly reading the arguments object in the next instruction,
  5030         // we need to prevent the deletion of the arguments object from resume
  5031         // points, so that Baseline will behave correctly after a bailout.
  5032         MDefinition *vp = current->pop();
  5033         vp->setImplicitlyUsedUnchecked();
  5035         MDefinition *argThis = current->pop();
  5037         // Unwrap the (JSFunction *) parameter.
  5038         MDefinition *argFunc = current->pop();
  5040         // Pop apply function.
  5041         current->pop();
  5043         MArgumentsLength *numArgs = MArgumentsLength::New(alloc());
  5044         current->add(numArgs);
  5046         MApplyArgs *apply = MApplyArgs::New(alloc(), target, argFunc, numArgs, argThis);
  5047         current->add(apply);
  5048         current->push(apply);
  5049         if (!resumeAfter(apply))
  5050             return false;
  5052         types::TemporaryTypeSet *types = bytecodeTypes(pc);
  5053         return pushTypeBarrier(apply, types, true);
  5056     // When inlining we have the arguments the function gets called with
  5057     // and can optimize even more, by just calling the functions with the args.
  5058     // We also try this path when doing the definite properties analysis, as we
  5059     // can inline the apply() target and don't care about the actual arguments
  5060     // that were passed in.
  5062     CallInfo callInfo(alloc(), false);
  5064     // Vp
  5065     MDefinition *vp = current->pop();
  5066     vp->setImplicitlyUsedUnchecked();
  5068     // Arguments
  5069     MDefinitionVector args(alloc());
  5070     if (inliningDepth_) {
  5071         if (!args.appendAll(inlineCallInfo_->argv()))
  5072             return false;
  5074     callInfo.setArgs(&args);
  5076     // This
  5077     MDefinition *argThis = current->pop();
  5078     callInfo.setThis(argThis);
  5080     // Pop function parameter.
  5081     MDefinition *argFunc = current->pop();
  5082     callInfo.setFun(argFunc);
  5084     // Pop apply function.
  5085     current->pop();
  5087     // Try to inline the call.
  5088     InliningDecision decision = makeInliningDecision(target, callInfo);
  5089     switch (decision) {
  5090       case InliningDecision_Error:
  5091         return false;
  5092       case InliningDecision_DontInline:
  5093         break;
  5094       case InliningDecision_Inline:
  5095         if (target->isInterpreted())
  5096             return inlineScriptedCall(callInfo, target);
  5099     return makeCall(target, callInfo, false);
  5102 bool
  5103 IonBuilder::jsop_call(uint32_t argc, bool constructing)
  5105     // If this call has never executed, try to seed the observed type set
  5106     // based on how the call result is used.
  5107     types::TemporaryTypeSet *observed = bytecodeTypes(pc);
  5108     if (observed->empty()) {
  5109         if (BytecodeFlowsToBitop(pc)) {
  5110             observed->addType(types::Type::Int32Type(), alloc_->lifoAlloc());
  5111         } else if (*GetNextPc(pc) == JSOP_POS) {
  5112             // Note: this is lame, overspecialized on the code patterns used
  5113             // by asm.js and should be replaced by a more general mechanism.
  5114             // See bug 870847.
  5115             observed->addType(types::Type::DoubleType(), alloc_->lifoAlloc());
  5119     int calleeDepth = -((int)argc + 2);
  5121     // Acquire known call target if existent.
  5122     ObjectVector originals(alloc());
  5123     bool gotLambda = false;
  5124     types::TemporaryTypeSet *calleeTypes = current->peek(calleeDepth)->resultTypeSet();
  5125     if (calleeTypes) {
  5126         if (!getPolyCallTargets(calleeTypes, constructing, originals, 4, &gotLambda))
  5127             return false;
  5129     JS_ASSERT_IF(gotLambda, originals.length() <= 1);
  5131     // If any call targets need to be cloned, look for existing clones to use.
  5132     // Keep track of the originals as we need to case on them for poly inline.
  5133     bool hasClones = false;
  5134     ObjectVector targets(alloc());
  5135     for (uint32_t i = 0; i < originals.length(); i++) {
  5136         JSFunction *fun = &originals[i]->as<JSFunction>();
  5137         if (fun->hasScript() && fun->nonLazyScript()->shouldCloneAtCallsite()) {
  5138             if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment->callsiteClones(), fun, script(), pc)) {
  5139                 fun = clone;
  5140                 hasClones = true;
  5143         if (!targets.append(fun))
  5144             return false;
  5147     CallInfo callInfo(alloc(), constructing);
  5148     if (!callInfo.init(current, argc))
  5149         return false;
  5151     // Try inlining
  5152     InliningStatus status = inlineCallsite(targets, originals, gotLambda, callInfo);
  5153     if (status == InliningStatus_Inlined)
  5154         return true;
  5155     if (status == InliningStatus_Error)
  5156         return false;
  5158     // No inline, just make the call.
  5159     JSFunction *target = nullptr;
  5160     if (targets.length() == 1)
  5161         target = &targets[0]->as<JSFunction>();
  5163     return makeCall(target, callInfo, hasClones);
  5166 MDefinition *
  5167 IonBuilder::makeCallsiteClone(JSFunction *target, MDefinition *fun)
  5169     // Bake in the clone eagerly if we have a known target. We have arrived here
  5170     // because TI told us that the known target is a should-clone-at-callsite
  5171     // function, which means that target already is the clone. Make sure to ensure
  5172     // that the old definition remains in resume points.
  5173     if (target) {
  5174         fun->setImplicitlyUsedUnchecked();
  5175         return constant(ObjectValue(*target));
  5178     // Add a callsite clone IC if we have multiple targets. Note that we
  5179     // should have checked already that at least some targets are marked as
  5180     // should-clone-at-callsite.
  5181     MCallsiteCloneCache *clone = MCallsiteCloneCache::New(alloc(), fun, pc);
  5182     current->add(clone);
  5183     return clone;
  5186 bool
  5187 IonBuilder::testShouldDOMCall(types::TypeSet *inTypes,
  5188                               JSFunction *func, JSJitInfo::OpType opType)
  5190     if (!func->isNative() || !func->jitInfo())
  5191         return false;
  5193     // If all the DOM objects flowing through are legal with this
  5194     // property, we can bake in a call to the bottom half of the DOM
  5195     // accessor
  5196     DOMInstanceClassMatchesProto instanceChecker =
  5197         compartment->runtime()->DOMcallbacks()->instanceClassMatchesProto;
  5199     const JSJitInfo *jinfo = func->jitInfo();
  5200     if (jinfo->type() != opType)
  5201         return false;
  5203     for (unsigned i = 0; i < inTypes->getObjectCount(); i++) {
  5204         types::TypeObjectKey *curType = inTypes->getObject(i);
  5205         if (!curType)
  5206             continue;
  5208         if (!curType->hasTenuredProto())
  5209             return false;
  5210         JSObject *proto = curType->proto().toObjectOrNull();
  5211         if (!instanceChecker(proto, jinfo->protoID, jinfo->depth))
  5212             return false;
  5215     return true;
  5218 static bool
  5219 ArgumentTypesMatch(MDefinition *def, types::StackTypeSet *calleeTypes)
  5221     if (def->resultTypeSet()) {
  5222         JS_ASSERT(def->type() == MIRType_Value || def->mightBeType(def->type()));
  5223         return def->resultTypeSet()->isSubset(calleeTypes);
  5226     if (def->type() == MIRType_Value)
  5227         return false;
  5229     if (def->type() == MIRType_Object)
  5230         return calleeTypes->unknownObject();
  5232     return calleeTypes->mightBeMIRType(def->type());
  5235 bool
  5236 IonBuilder::testNeedsArgumentCheck(JSFunction *target, CallInfo &callInfo)
  5238     // If we have a known target, check if the caller arg types are a subset of callee.
  5239     // Since typeset accumulates and can't decrease that means we don't need to check
  5240     // the arguments anymore.
  5241     if (!target->hasScript())
  5242         return true;
  5244     JSScript *targetScript = target->nonLazyScript();
  5246     if (!targetScript->types)
  5247         return true;
  5249     if (!ArgumentTypesMatch(callInfo.thisArg(), types::TypeScript::ThisTypes(targetScript)))
  5250         return true;
  5251     uint32_t expected_args = Min<uint32_t>(callInfo.argc(), target->nargs());
  5252     for (size_t i = 0; i < expected_args; i++) {
  5253         if (!ArgumentTypesMatch(callInfo.getArg(i), types::TypeScript::ArgTypes(targetScript, i)))
  5254             return true;
  5256     for (size_t i = callInfo.argc(); i < target->nargs(); i++) {
  5257         if (!types::TypeScript::ArgTypes(targetScript, i)->mightBeMIRType(MIRType_Undefined))
  5258             return true;
  5261     return false;
  5264 MCall *
  5265 IonBuilder::makeCallHelper(JSFunction *target, CallInfo &callInfo, bool cloneAtCallsite)
  5267     // This function may be called with mutated stack.
  5268     // Querying TI for popped types is invalid.
  5270     uint32_t targetArgs = callInfo.argc();
  5272     // Collect number of missing arguments provided that the target is
  5273     // scripted. Native functions are passed an explicit 'argc' parameter.
  5274     if (target && !target->isNative())
  5275         targetArgs = Max<uint32_t>(target->nargs(), callInfo.argc());
  5277     bool isDOMCall = false;
  5278     if (target && !callInfo.constructing()) {
  5279         // We know we have a single call target.  Check whether the "this" types
  5280         // are DOM types and our function a DOM function, and if so flag the
  5281         // MCall accordingly.
  5282         types::TemporaryTypeSet *thisTypes = callInfo.thisArg()->resultTypeSet();
  5283         if (thisTypes &&
  5284             thisTypes->getKnownMIRType() == MIRType_Object &&
  5285             thisTypes->isDOMClass() &&
  5286             testShouldDOMCall(thisTypes, target, JSJitInfo::Method))
  5288             isDOMCall = true;
  5292     MCall *call = MCall::New(alloc(), target, targetArgs + 1, callInfo.argc(),
  5293                              callInfo.constructing(), isDOMCall);
  5294     if (!call)
  5295         return nullptr;
  5297     // Explicitly pad any missing arguments with |undefined|.
  5298     // This permits skipping the argumentsRectifier.
  5299     for (int i = targetArgs; i > (int)callInfo.argc(); i--) {
  5300         JS_ASSERT_IF(target, !target->isNative());
  5301         MConstant *undef = constant(UndefinedValue());
  5302         call->addArg(i, undef);
  5305     // Add explicit arguments.
  5306     // Skip addArg(0) because it is reserved for this
  5307     for (int32_t i = callInfo.argc() - 1; i >= 0; i--)
  5308         call->addArg(i + 1, callInfo.getArg(i));
  5310     // Now that we've told it about all the args, compute whether it's movable
  5311     call->computeMovable();
  5313     // Inline the constructor on the caller-side.
  5314     if (callInfo.constructing()) {
  5315         MDefinition *create = createThis(target, callInfo.fun());
  5316         if (!create) {
  5317             abort("Failure inlining constructor for call.");
  5318             return nullptr;
  5321         callInfo.thisArg()->setImplicitlyUsedUnchecked();
  5322         callInfo.setThis(create);
  5325     // Pass |this| and function.
  5326     MDefinition *thisArg = callInfo.thisArg();
  5327     call->addArg(0, thisArg);
  5329     // Add a callsite clone IC for multiple targets which all should be
  5330     // callsite cloned, or bake in the clone for a single target.
  5331     if (cloneAtCallsite) {
  5332         MDefinition *fun = makeCallsiteClone(target, callInfo.fun());
  5333         callInfo.setFun(fun);
  5336     if (target && !testNeedsArgumentCheck(target, callInfo))
  5337         call->disableArgCheck();
  5339     call->initFunction(callInfo.fun());
  5341     current->add(call);
  5342     return call;
  5345 static bool
  5346 DOMCallNeedsBarrier(const JSJitInfo* jitinfo, types::TemporaryTypeSet *types)
  5348     // If the return type of our DOM native is in "types" already, we don't
  5349     // actually need a barrier.
  5350     if (jitinfo->returnType() == JSVAL_TYPE_UNKNOWN)
  5351         return true;
  5353     // JSVAL_TYPE_OBJECT doesn't tell us much; we still have to barrier on the
  5354     // actual type of the object.
  5355     if (jitinfo->returnType() == JSVAL_TYPE_OBJECT)
  5356         return true;
  5358     // No need for a barrier if we're already expecting the type we'll produce.
  5359     return MIRTypeFromValueType(jitinfo->returnType()) != types->getKnownMIRType();
  5362 bool
  5363 IonBuilder::makeCall(JSFunction *target, CallInfo &callInfo, bool cloneAtCallsite)
  5365     // Constructor calls to non-constructors should throw. We don't want to use
  5366     // CallKnown in this case.
  5367     JS_ASSERT_IF(callInfo.constructing() && target,
  5368                  target->isInterpretedConstructor() || target->isNativeConstructor());
  5370     MCall *call = makeCallHelper(target, callInfo, cloneAtCallsite);
  5371     if (!call)
  5372         return false;
  5374     current->push(call);
  5375     if (call->isEffectful() && !resumeAfter(call))
  5376         return false;
  5378     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  5380     if (call->isCallDOMNative())
  5381         return pushDOMTypeBarrier(call, types, call->getSingleTarget());
  5383     return pushTypeBarrier(call, types, true);
  5386 bool
  5387 IonBuilder::jsop_eval(uint32_t argc)
  5389     int calleeDepth = -((int)argc + 2);
  5390     types::TemporaryTypeSet *calleeTypes = current->peek(calleeDepth)->resultTypeSet();
  5392     // Emit a normal call if the eval has never executed. This keeps us from
  5393     // disabling compilation for the script when testing with --ion-eager.
  5394     if (calleeTypes && calleeTypes->empty())
  5395         return jsop_call(argc, /* constructing = */ false);
  5397     JSFunction *singleton = getSingleCallTarget(calleeTypes);
  5398     if (!singleton)
  5399         return abort("No singleton callee for eval()");
  5401     if (script()->global().valueIsEval(ObjectValue(*singleton))) {
  5402         if (argc != 1)
  5403             return abort("Direct eval with more than one argument");
  5405         if (!info().funMaybeLazy())
  5406             return abort("Direct eval in global code");
  5408         // The 'this' value for the outer and eval scripts must be the
  5409         // same. This is not guaranteed if a primitive string/number/etc.
  5410         // is passed through to the eval invoke as the primitive may be
  5411         // boxed into different objects if accessed via 'this'.
  5412         MIRType type = thisTypes->getKnownMIRType();
  5413         if (type != MIRType_Object && type != MIRType_Null && type != MIRType_Undefined)
  5414             return abort("Direct eval from script with maybe-primitive 'this'");
  5416         CallInfo callInfo(alloc(), /* constructing = */ false);
  5417         if (!callInfo.init(current, argc))
  5418             return false;
  5419         callInfo.setImplicitlyUsedUnchecked();
  5421         callInfo.fun()->setImplicitlyUsedUnchecked();
  5423         MDefinition *scopeChain = current->scopeChain();
  5424         MDefinition *string = callInfo.getArg(0);
  5426         // Direct eval acts as identity on non-string types according to
  5427         // ES5 15.1.2.1 step 1.
  5428         if (!string->mightBeType(MIRType_String)) {
  5429             current->push(string);
  5430             types::TemporaryTypeSet *types = bytecodeTypes(pc);
  5431             return pushTypeBarrier(string, types, true);
  5434         current->pushSlot(info().thisSlot());
  5435         MDefinition *thisValue = current->pop();
  5437         // Try to pattern match 'eval(v + "()")'. In this case v is likely a
  5438         // name on the scope chain and the eval is performing a call on that
  5439         // value. Use a dynamic scope chain lookup rather than a full eval.
  5440         if (string->isConcat() &&
  5441             string->getOperand(1)->isConstant() &&
  5442             string->getOperand(1)->toConstant()->value().isString())
  5444             JSAtom *atom = &string->getOperand(1)->toConstant()->value().toString()->asAtom();
  5446             if (StringEqualsAscii(atom, "()")) {
  5447                 MDefinition *name = string->getOperand(0);
  5448                 MInstruction *dynamicName = MGetDynamicName::New(alloc(), scopeChain, name);
  5449                 current->add(dynamicName);
  5451                 current->push(dynamicName);
  5452                 current->push(thisValue);
  5454                 CallInfo evalCallInfo(alloc(), /* constructing = */ false);
  5455                 if (!evalCallInfo.init(current, /* argc = */ 0))
  5456                     return false;
  5458                 return makeCall(nullptr, evalCallInfo, false);
  5462         MInstruction *filterArguments = MFilterArgumentsOrEval::New(alloc(), string);
  5463         current->add(filterArguments);
  5465         MInstruction *ins = MCallDirectEval::New(alloc(), scopeChain, string, thisValue, pc);
  5466         current->add(ins);
  5467         current->push(ins);
  5469         types::TemporaryTypeSet *types = bytecodeTypes(pc);
  5470         return resumeAfter(ins) && pushTypeBarrier(ins, types, true);
  5473     return jsop_call(argc, /* constructing = */ false);
  5476 bool
  5477 IonBuilder::jsop_compare(JSOp op)
  5479     MDefinition *right = current->pop();
  5480     MDefinition *left = current->pop();
  5482     MCompare *ins = MCompare::New(alloc(), left, right, op);
  5483     current->add(ins);
  5484     current->push(ins);
  5486     ins->infer(inspector, pc);
  5488     if (ins->isEffectful() && !resumeAfter(ins))
  5489         return false;
  5490     return true;
  5493 bool
  5494 IonBuilder::jsop_newarray(uint32_t count)
  5496     JS_ASSERT(script()->compileAndGo());
  5498     JSObject *templateObject = inspector->getTemplateObject(pc);
  5499     if (!templateObject)
  5500         return abort("No template object for NEWARRAY");
  5502     JS_ASSERT(templateObject->is<ArrayObject>());
  5503     if (templateObject->type()->unknownProperties()) {
  5504         // We will get confused in jsop_initelem_array if we can't find the
  5505         // type object being initialized.
  5506         return abort("New array has unknown properties");
  5509     MNewArray *ins = MNewArray::New(alloc(), constraints(), count, templateObject,
  5510                                     templateObject->type()->initialHeap(constraints()),
  5511                                     MNewArray::NewArray_Allocating);
  5512     current->add(ins);
  5513     current->push(ins);
  5515     types::TemporaryTypeSet::DoubleConversion conversion =
  5516         ins->resultTypeSet()->convertDoubleElements(constraints());
  5518     if (conversion == types::TemporaryTypeSet::AlwaysConvertToDoubles)
  5519         templateObject->setShouldConvertDoubleElements();
  5520     else
  5521         templateObject->clearShouldConvertDoubleElements();
  5522     return true;
  5525 bool
  5526 IonBuilder::jsop_newobject()
  5528     // Don't bake in the TypeObject for non-CNG scripts.
  5529     JS_ASSERT(script()->compileAndGo());
  5531     JSObject *templateObject = inspector->getTemplateObject(pc);
  5532     if (!templateObject)
  5533         return abort("No template object for NEWOBJECT");
  5535     JS_ASSERT(templateObject->is<JSObject>());
  5536     MNewObject *ins = MNewObject::New(alloc(), constraints(), templateObject,
  5537                                       templateObject->hasSingletonType()
  5538                                       ? gc::TenuredHeap
  5539                                       : templateObject->type()->initialHeap(constraints()),
  5540                                       /* templateObjectIsClassPrototype = */ false);
  5542     current->add(ins);
  5543     current->push(ins);
  5545     return resumeAfter(ins);
  5548 bool
  5549 IonBuilder::jsop_initelem()
  5551     MDefinition *value = current->pop();
  5552     MDefinition *id = current->pop();
  5553     MDefinition *obj = current->peek(-1);
  5555     MInitElem *initElem = MInitElem::New(alloc(), obj, id, value);
  5556     current->add(initElem);
  5558     return resumeAfter(initElem);
  5561 bool
  5562 IonBuilder::jsop_initelem_array()
  5564     MDefinition *value = current->pop();
  5565     MDefinition *obj = current->peek(-1);
  5567     // Make sure that arrays have the type being written to them by the
  5568     // intializer, and that arrays are marked as non-packed when writing holes
  5569     // to them during initialization.
  5570     bool needStub = false;
  5571     types::TypeObjectKey *initializer = obj->resultTypeSet()->getObject(0);
  5572     if (value->type() == MIRType_MagicHole) {
  5573         if (!initializer->hasFlags(constraints(), types::OBJECT_FLAG_NON_PACKED))
  5574             needStub = true;
  5575     } else if (!initializer->unknownProperties()) {
  5576         types::HeapTypeSetKey elemTypes = initializer->property(JSID_VOID);
  5577         if (!TypeSetIncludes(elemTypes.maybeTypes(), value->type(), value->resultTypeSet())) {
  5578             elemTypes.freeze(constraints());
  5579             needStub = true;
  5583     if (NeedsPostBarrier(info(), value))
  5584         current->add(MPostWriteBarrier::New(alloc(), obj, value));
  5586     if (needStub) {
  5587         MCallInitElementArray *store = MCallInitElementArray::New(alloc(), obj, GET_UINT24(pc), value);
  5588         current->add(store);
  5589         return resumeAfter(store);
  5592     MConstant *id = MConstant::New(alloc(), Int32Value(GET_UINT24(pc)));
  5593     current->add(id);
  5595     // Get the elements vector.
  5596     MElements *elements = MElements::New(alloc(), obj);
  5597     current->add(elements);
  5599     JSObject *templateObject = obj->toNewArray()->templateObject();
  5601     if (templateObject->shouldConvertDoubleElements()) {
  5602         MInstruction *valueDouble = MToDouble::New(alloc(), value);
  5603         current->add(valueDouble);
  5604         value = valueDouble;
  5607     // Store the value.
  5608     MStoreElement *store = MStoreElement::New(alloc(), elements, id, value, /* needsHoleCheck = */ false);
  5609     current->add(store);
  5611     // Update the initialized length. (The template object for this array has
  5612     // the array's ultimate length, so the length field is already correct: no
  5613     // updating needed.)
  5614     MSetInitializedLength *initLength = MSetInitializedLength::New(alloc(), elements, id);
  5615     current->add(initLength);
  5617     if (!resumeAfter(initLength))
  5618         return false;
  5620    return true;
  5623 bool
  5624 IonBuilder::jsop_mutateproto()
  5626     MDefinition *value = current->pop();
  5627     MDefinition *obj = current->peek(-1);
  5629     MMutateProto *mutate = MMutateProto::New(alloc(), obj, value);
  5630     current->add(mutate);
  5631     return resumeAfter(mutate);
  5634 bool
  5635 IonBuilder::jsop_initprop(PropertyName *name)
  5637     MDefinition *value = current->pop();
  5638     MDefinition *obj = current->peek(-1);
  5640     JSObject *templateObject = obj->toNewObject()->templateObject();
  5642     Shape *shape = templateObject->lastProperty()->searchLinear(NameToId(name));
  5644     if (!shape) {
  5645         // JSOP_NEWINIT becomes an MNewObject without preconfigured properties.
  5646         MInitProp *init = MInitProp::New(alloc(), obj, name, value);
  5647         current->add(init);
  5648         return resumeAfter(init);
  5651     if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
  5652                                       &obj, name, &value, /* canModify = */ true))
  5654         // JSOP_NEWINIT becomes an MNewObject without preconfigured properties.
  5655         MInitProp *init = MInitProp::New(alloc(), obj, name, value);
  5656         current->add(init);
  5657         return resumeAfter(init);
  5660     if (NeedsPostBarrier(info(), value))
  5661         current->add(MPostWriteBarrier::New(alloc(), obj, value));
  5663     bool needsBarrier = true;
  5664     if (obj->resultTypeSet() &&
  5665         !obj->resultTypeSet()->propertyNeedsBarrier(constraints(), NameToId(name)))
  5667         needsBarrier = false;
  5670     // In parallel execution, we never require write barriers.  See
  5671     // forkjoin.cpp for more information.
  5672     if (info().executionMode() == ParallelExecution)
  5673         needsBarrier = false;
  5675     if (templateObject->isFixedSlot(shape->slot())) {
  5676         MStoreFixedSlot *store = MStoreFixedSlot::New(alloc(), obj, shape->slot(), value);
  5677         if (needsBarrier)
  5678             store->setNeedsBarrier();
  5680         current->add(store);
  5681         return resumeAfter(store);
  5684     MSlots *slots = MSlots::New(alloc(), obj);
  5685     current->add(slots);
  5687     uint32_t slot = templateObject->dynamicSlotIndex(shape->slot());
  5688     MStoreSlot *store = MStoreSlot::New(alloc(), slots, slot, value);
  5689     if (needsBarrier)
  5690         store->setNeedsBarrier();
  5692     current->add(store);
  5693     return resumeAfter(store);
  5696 bool
  5697 IonBuilder::jsop_initprop_getter_setter(PropertyName *name)
  5699     MDefinition *value = current->pop();
  5700     MDefinition *obj = current->peek(-1);
  5702     MInitPropGetterSetter *init = MInitPropGetterSetter::New(alloc(), obj, name, value);
  5703     current->add(init);
  5704     return resumeAfter(init);
  5707 bool
  5708 IonBuilder::jsop_initelem_getter_setter()
  5710     MDefinition *value = current->pop();
  5711     MDefinition *id = current->pop();
  5712     MDefinition *obj = current->peek(-1);
  5714     MInitElemGetterSetter *init = MInitElemGetterSetter::New(alloc(), obj, id, value);
  5715     current->add(init);
  5716     return resumeAfter(init);
  5719 MBasicBlock *
  5720 IonBuilder::addBlock(MBasicBlock *block, uint32_t loopDepth)
  5722     if (!block)
  5723         return nullptr;
  5724     graph().addBlock(block);
  5725     block->setLoopDepth(loopDepth);
  5726     return block;
  5729 MBasicBlock *
  5730 IonBuilder::newBlock(MBasicBlock *predecessor, jsbytecode *pc)
  5732     MBasicBlock *block = MBasicBlock::New(graph(), &analysis(), info(),
  5733                                           predecessor, pc, MBasicBlock::NORMAL);
  5734     return addBlock(block, loopDepth_);
  5737 MBasicBlock *
  5738 IonBuilder::newBlock(MBasicBlock *predecessor, jsbytecode *pc, MResumePoint *priorResumePoint)
  5740     MBasicBlock *block = MBasicBlock::NewWithResumePoint(graph(), info(), predecessor, pc,
  5741                                                          priorResumePoint);
  5742     return addBlock(block, loopDepth_);
  5745 MBasicBlock *
  5746 IonBuilder::newBlockPopN(MBasicBlock *predecessor, jsbytecode *pc, uint32_t popped)
  5748     MBasicBlock *block = MBasicBlock::NewPopN(graph(), info(), predecessor, pc, MBasicBlock::NORMAL, popped);
  5749     return addBlock(block, loopDepth_);
  5752 MBasicBlock *
  5753 IonBuilder::newBlockAfter(MBasicBlock *at, MBasicBlock *predecessor, jsbytecode *pc)
  5755     MBasicBlock *block = MBasicBlock::New(graph(), &analysis(), info(),
  5756                                           predecessor, pc, MBasicBlock::NORMAL);
  5757     if (!block)
  5758         return nullptr;
  5759     graph().insertBlockAfter(at, block);
  5760     return block;
  5763 MBasicBlock *
  5764 IonBuilder::newBlock(MBasicBlock *predecessor, jsbytecode *pc, uint32_t loopDepth)
  5766     MBasicBlock *block = MBasicBlock::New(graph(), &analysis(), info(),
  5767                                           predecessor, pc, MBasicBlock::NORMAL);
  5768     return addBlock(block, loopDepth);
  5771 MBasicBlock *
  5772 IonBuilder::newOsrPreheader(MBasicBlock *predecessor, jsbytecode *loopEntry)
  5774     JS_ASSERT(LoopEntryCanIonOsr(loopEntry));
  5775     JS_ASSERT(loopEntry == info().osrPc());
  5777     // Create two blocks: one for the OSR entry with no predecessors, one for
  5778     // the preheader, which has the OSR entry block as a predecessor. The
  5779     // OSR block is always the second block (with id 1).
  5780     MBasicBlock *osrBlock  = newBlockAfter(*graph().begin(), loopEntry);
  5781     MBasicBlock *preheader = newBlock(predecessor, loopEntry);
  5782     if (!osrBlock || !preheader)
  5783         return nullptr;
  5785     MOsrEntry *entry = MOsrEntry::New(alloc());
  5786     osrBlock->add(entry);
  5788     // Initialize |scopeChain|.
  5790         uint32_t slot = info().scopeChainSlot();
  5792         MInstruction *scopev;
  5793         if (analysis().usesScopeChain()) {
  5794             scopev = MOsrScopeChain::New(alloc(), entry);
  5795         } else {
  5796             // Use an undefined value if the script does not need its scope
  5797             // chain, to match the type that is already being tracked for the
  5798             // slot.
  5799             scopev = MConstant::New(alloc(), UndefinedValue());
  5802         osrBlock->add(scopev);
  5803         osrBlock->initSlot(slot, scopev);
  5805     // Initialize |return value|
  5807         MInstruction *returnValue;
  5808         if (!script()->noScriptRval())
  5809             returnValue = MOsrReturnValue::New(alloc(), entry);
  5810         else
  5811             returnValue = MConstant::New(alloc(), UndefinedValue());
  5812         osrBlock->add(returnValue);
  5813         osrBlock->initSlot(info().returnValueSlot(), returnValue);
  5816     // Initialize arguments object.
  5817     bool needsArgsObj = info().needsArgsObj();
  5818     MInstruction *argsObj = nullptr;
  5819     if (info().hasArguments()) {
  5820         if (needsArgsObj)
  5821             argsObj = MOsrArgumentsObject::New(alloc(), entry);
  5822         else
  5823             argsObj = MConstant::New(alloc(), UndefinedValue());
  5824         osrBlock->add(argsObj);
  5825         osrBlock->initSlot(info().argsObjSlot(), argsObj);
  5828     if (info().funMaybeLazy()) {
  5829         // Initialize |this| parameter.
  5830         MParameter *thisv = MParameter::New(alloc(), MParameter::THIS_SLOT, nullptr);
  5831         osrBlock->add(thisv);
  5832         osrBlock->initSlot(info().thisSlot(), thisv);
  5834         // Initialize arguments.
  5835         for (uint32_t i = 0; i < info().nargs(); i++) {
  5836             uint32_t slot = needsArgsObj ? info().argSlotUnchecked(i) : info().argSlot(i);
  5838             // Only grab arguments from the arguments object if the arguments object
  5839             // aliases formals.  If the argsobj does not alias formals, then the
  5840             // formals may have been assigned to during interpretation, and that change
  5841             // will not be reflected in the argsobj.
  5842             if (needsArgsObj && info().argsObjAliasesFormals()) {
  5843                 JS_ASSERT(argsObj && argsObj->isOsrArgumentsObject());
  5844                 // If this is an aliased formal, then the arguments object
  5845                 // contains a hole at this index.  Any references to this
  5846                 // variable in the jitcode will come from JSOP_*ALIASEDVAR
  5847                 // opcodes, so the slot itself can be set to undefined.  If
  5848                 // it's not aliased, it must be retrieved from the arguments
  5849                 // object.
  5850                 MInstruction *osrv;
  5851                 if (script()->formalIsAliased(i))
  5852                     osrv = MConstant::New(alloc(), UndefinedValue());
  5853                 else
  5854                     osrv = MGetArgumentsObjectArg::New(alloc(), argsObj, i);
  5856                 osrBlock->add(osrv);
  5857                 osrBlock->initSlot(slot, osrv);
  5858             } else {
  5859                 MParameter *arg = MParameter::New(alloc(), i, nullptr);
  5860                 osrBlock->add(arg);
  5861                 osrBlock->initSlot(slot, arg);
  5866     // Initialize locals.
  5867     for (uint32_t i = 0; i < info().nlocals(); i++) {
  5868         uint32_t slot = info().localSlot(i);
  5869         ptrdiff_t offset = BaselineFrame::reverseOffsetOfLocal(i);
  5871         MOsrValue *osrv = MOsrValue::New(alloc(), entry, offset);
  5872         osrBlock->add(osrv);
  5873         osrBlock->initSlot(slot, osrv);
  5876     // Initialize stack.
  5877     uint32_t numStackSlots = preheader->stackDepth() - info().firstStackSlot();
  5878     for (uint32_t i = 0; i < numStackSlots; i++) {
  5879         uint32_t slot = info().stackSlot(i);
  5880         ptrdiff_t offset = BaselineFrame::reverseOffsetOfLocal(info().nlocals() + i);
  5882         MOsrValue *osrv = MOsrValue::New(alloc(), entry, offset);
  5883         osrBlock->add(osrv);
  5884         osrBlock->initSlot(slot, osrv);
  5887     // Create an MStart to hold the first valid MResumePoint.
  5888     MStart *start = MStart::New(alloc(), MStart::StartType_Osr);
  5889     osrBlock->add(start);
  5890     graph().setOsrStart(start);
  5892     // MOsrValue instructions are infallible, so the first MResumePoint must
  5893     // occur after they execute, at the point of the MStart.
  5894     if (!resumeAt(start, loopEntry))
  5895         return nullptr;
  5897     // Link the same MResumePoint from the MStart to each MOsrValue.
  5898     // This causes logic in ShouldSpecializeInput() to not replace Uses with
  5899     // Unboxes in the MResumePiont, so that the MStart always sees Values.
  5900     osrBlock->linkOsrValues(start);
  5902     // Clone types of the other predecessor of the pre-header to the osr block,
  5903     // such as pre-header phi's won't discard specialized type of the
  5904     // predecessor.
  5905     JS_ASSERT(predecessor->stackDepth() == osrBlock->stackDepth());
  5906     JS_ASSERT(info().scopeChainSlot() == 0);
  5908     // Treat the OSR values as having the same type as the existing values
  5909     // coming in to the loop. These will be fixed up with appropriate
  5910     // unboxing and type barriers in finishLoop, once the possible types
  5911     // at the loop header are known.
  5912     for (uint32_t i = info().startArgSlot(); i < osrBlock->stackDepth(); i++) {
  5913         MDefinition *existing = current->getSlot(i);
  5914         MDefinition *def = osrBlock->getSlot(i);
  5915         JS_ASSERT_IF(!needsArgsObj || !info().isSlotAliasedAtOsr(i), def->type() == MIRType_Value);
  5917         // Aliased slots are never accessed, since they need to go through
  5918         // the callobject. No need to type them here.
  5919         if (info().isSlotAliasedAtOsr(i))
  5920             continue;
  5922         def->setResultType(existing->type());
  5923         def->setResultTypeSet(existing->resultTypeSet());
  5926     // Finish the osrBlock.
  5927     osrBlock->end(MGoto::New(alloc(), preheader));
  5928     if (!preheader->addPredecessor(alloc(), osrBlock))
  5929         return nullptr;
  5930     graph().setOsrBlock(osrBlock);
  5932     // Wrap |this| with a guaranteed use, to prevent instruction elimination.
  5933     // Prevent |this| from being DCE'd: necessary for constructors.
  5934     if (info().funMaybeLazy())
  5935         preheader->getSlot(info().thisSlot())->setGuard();
  5937     return preheader;
  5940 MBasicBlock *
  5941 IonBuilder::newPendingLoopHeader(MBasicBlock *predecessor, jsbytecode *pc, bool osr, bool canOsr,
  5942                                  unsigned stackPhiCount)
  5944     loopDepth_++;
  5945     // If this site can OSR, all values on the expression stack are part of the loop.
  5946     if (canOsr)
  5947         stackPhiCount = predecessor->stackDepth() - info().firstStackSlot();
  5948     MBasicBlock *block = MBasicBlock::NewPendingLoopHeader(graph(), info(), predecessor, pc,
  5949                                                            stackPhiCount);
  5950     if (!addBlock(block, loopDepth_))
  5951         return nullptr;
  5953     if (osr) {
  5954         // Incorporate type information from the OSR frame into the loop
  5955         // header. The OSR frame may have unexpected types due to type changes
  5956         // within the loop body or due to incomplete profiling information,
  5957         // in which case this may avoid restarts of loop analysis or bailouts
  5958         // during the OSR itself.
  5960         // Unbox the MOsrValue if it is known to be unboxable.
  5961         for (uint32_t i = info().startArgSlot(); i < block->stackDepth(); i++) {
  5963             // The value of aliased args and slots are in the callobject. So we can't
  5964             // the value from the baseline frame.
  5965             if (info().isSlotAliasedAtOsr(i))
  5966                 continue;
  5968             // Don't bother with expression stack values. The stack should be
  5969             // empty except for let variables (not Ion-compiled) or iterators.
  5970             if (i >= info().firstStackSlot())
  5971                 continue;
  5973             MPhi *phi = block->getSlot(i)->toPhi();
  5975             // Get the type from the baseline frame.
  5976             types::Type existingType = types::Type::UndefinedType();
  5977             uint32_t arg = i - info().firstArgSlot();
  5978             uint32_t var = i - info().firstLocalSlot();
  5979             if (info().funMaybeLazy() && i == info().thisSlot())
  5980                 existingType = baselineFrame_->thisType;
  5981             else if (arg < info().nargs())
  5982                 existingType = baselineFrame_->argTypes[arg];
  5983             else
  5984                 existingType = baselineFrame_->varTypes[var];
  5986             // Extract typeset from value.
  5987             types::TemporaryTypeSet *typeSet =
  5988                 alloc_->lifoAlloc()->new_<types::TemporaryTypeSet>(existingType);
  5989             if (!typeSet)
  5990                 return nullptr;
  5991             MIRType type = typeSet->getKnownMIRType();
  5992             if (!phi->addBackedgeType(type, typeSet))
  5993                 return nullptr;
  5997     return block;
  6000 // A resume point is a mapping of stack slots to MDefinitions. It is used to
  6001 // capture the environment such that if a guard fails, and IonMonkey needs
  6002 // to exit back to the interpreter, the interpreter state can be
  6003 // reconstructed.
  6004 //
  6005 // We capture stack state at critical points:
  6006 //   * (1) At the beginning of every basic block.
  6007 //   * (2) After every effectful operation.
  6008 //
  6009 // As long as these two properties are maintained, instructions can
  6010 // be moved, hoisted, or, eliminated without problems, and ops without side
  6011 // effects do not need to worry about capturing state at precisely the
  6012 // right point in time.
  6013 //
  6014 // Effectful instructions, of course, need to capture state after completion,
  6015 // where the interpreter will not attempt to repeat the operation. For this,
  6016 // ResumeAfter must be used. The state is attached directly to the effectful
  6017 // instruction to ensure that no intermediate instructions could be injected
  6018 // in between by a future analysis pass.
  6019 //
  6020 // During LIR construction, if an instruction can bail back to the interpreter,
  6021 // we create an LSnapshot, which uses the last known resume point to request
  6022 // register/stack assignments for every live value.
  6023 bool
  6024 IonBuilder::resume(MInstruction *ins, jsbytecode *pc, MResumePoint::Mode mode)
  6026     JS_ASSERT(ins->isEffectful() || !ins->isMovable());
  6028     MResumePoint *resumePoint = MResumePoint::New(alloc(), ins->block(), pc, callerResumePoint_,
  6029                                                   mode);
  6030     if (!resumePoint)
  6031         return false;
  6032     ins->setResumePoint(resumePoint);
  6033     resumePoint->setInstruction(ins);
  6034     return true;
  6037 bool
  6038 IonBuilder::resumeAt(MInstruction *ins, jsbytecode *pc)
  6040     return resume(ins, pc, MResumePoint::ResumeAt);
  6043 bool
  6044 IonBuilder::resumeAfter(MInstruction *ins)
  6046     return resume(ins, pc, MResumePoint::ResumeAfter);
  6049 bool
  6050 IonBuilder::maybeInsertResume()
  6052     // Create a resume point at the current position, without an existing
  6053     // effectful instruction. This resume point is not necessary for correct
  6054     // behavior (see above), but is added to avoid holding any values from the
  6055     // previous resume point which are now dead. This shortens the live ranges
  6056     // of such values and improves register allocation.
  6057     //
  6058     // This optimization is not performed outside of loop bodies, where good
  6059     // register allocation is not as critical, in order to avoid creating
  6060     // excessive resume points.
  6062     if (loopDepth_ == 0)
  6063         return true;
  6065     MNop *ins = MNop::New(alloc());
  6066     current->add(ins);
  6068     return resumeAfter(ins);
  6071 static bool
  6072 ClassHasEffectlessLookup(const Class *clasp, PropertyName *name)
  6074     return clasp->isNative() && !clasp->ops.lookupGeneric;
  6077 static bool
  6078 ClassHasResolveHook(CompileCompartment *comp, const Class *clasp, PropertyName *name)
  6080     // While arrays do not have resolve hooks, the types of their |length|
  6081     // properties are not reflected in type information, so pretend there is a
  6082     // resolve hook for this property.
  6083     if (clasp == &ArrayObject::class_)
  6084         return name == comp->runtime()->names().length;
  6086     if (clasp->resolve == JS_ResolveStub)
  6087         return false;
  6089     if (clasp->resolve == (JSResolveOp)str_resolve) {
  6090         // str_resolve only resolves integers, not names.
  6091         return false;
  6094     if (clasp->resolve == (JSResolveOp)fun_resolve)
  6095         return FunctionHasResolveHook(comp->runtime()->names(), name);
  6097     return true;
  6100 void
  6101 IonBuilder::insertRecompileCheck()
  6103     // PJS doesn't recompile and doesn't need recompile checks.
  6104     if (info().executionMode() != SequentialExecution)
  6105         return;
  6107     // No need for recompile checks if this is the highest optimization level.
  6108     OptimizationLevel curLevel = optimizationInfo().level();
  6109     if (js_IonOptimizations.isLastLevel(curLevel))
  6110         return;
  6112     // Add recompile check.
  6114     // Get the topmost builder. The topmost script will get recompiled when
  6115     // usecount is high enough to justify a higher optimization level.
  6116     IonBuilder *topBuilder = this;
  6117     while (topBuilder->callerBuilder_)
  6118         topBuilder = topBuilder->callerBuilder_;
  6120     // Add recompile check to recompile when the usecount reaches the usecount
  6121     // of the next optimization level.
  6122     OptimizationLevel nextLevel = js_IonOptimizations.nextLevel(curLevel);
  6123     const OptimizationInfo *info = js_IonOptimizations.get(nextLevel);
  6124     uint32_t useCount = info->usesBeforeCompile(topBuilder->script());
  6125     current->add(MRecompileCheck::New(alloc(), topBuilder->script(), useCount));
  6128 JSObject *
  6129 IonBuilder::testSingletonProperty(JSObject *obj, PropertyName *name)
  6131     // We would like to completely no-op property/global accesses which can
  6132     // produce only a particular JSObject. When indicating the access result is
  6133     // definitely an object, type inference does not account for the
  6134     // possibility that the property is entirely missing from the input object
  6135     // and its prototypes (if this happens, a semantic trigger would be hit and
  6136     // the pushed types updated, even if there is no type barrier).
  6137     //
  6138     // If the access definitely goes through obj, either directly or on the
  6139     // prototype chain, and the object has singleton type, then the type
  6140     // information for that property reflects the value that will definitely be
  6141     // read on accesses to the object. If the property is later deleted or
  6142     // reconfigured as a getter/setter then the type information for the
  6143     // property will change and trigger invalidation.
  6145     while (obj) {
  6146         if (!ClassHasEffectlessLookup(obj->getClass(), name))
  6147             return nullptr;
  6149         types::TypeObjectKey *objType = types::TypeObjectKey::get(obj);
  6150         if (analysisContext)
  6151             objType->ensureTrackedProperty(analysisContext, NameToId(name));
  6153         if (objType->unknownProperties())
  6154             return nullptr;
  6156         types::HeapTypeSetKey property = objType->property(NameToId(name));
  6157         if (property.isOwnProperty(constraints())) {
  6158             if (obj->hasSingletonType())
  6159                 return property.singleton(constraints());
  6160             return nullptr;
  6163         if (ClassHasResolveHook(compartment, obj->getClass(), name))
  6164             return nullptr;
  6166         if (!obj->hasTenuredProto())
  6167             return nullptr;
  6168         obj = obj->getProto();
  6171     return nullptr;
  6174 bool
  6175 IonBuilder::testSingletonPropertyTypes(MDefinition *obj, JSObject *singleton, PropertyName *name,
  6176                                        bool *testObject, bool *testString)
  6178     // As for TestSingletonProperty, but the input is any value in a type set
  6179     // rather than a specific object. If testObject is set then the constant
  6180     // result can only be used after ensuring the input is an object.
  6182     *testObject = false;
  6183     *testString = false;
  6185     types::TemporaryTypeSet *types = obj->resultTypeSet();
  6186     if (types && types->unknownObject())
  6187         return false;
  6189     JSObject *objectSingleton = types ? types->getSingleton() : nullptr;
  6190     if (objectSingleton)
  6191         return testSingletonProperty(objectSingleton, name) == singleton;
  6193     JSProtoKey key;
  6194     switch (obj->type()) {
  6195       case MIRType_String:
  6196         key = JSProto_String;
  6197         break;
  6199       case MIRType_Int32:
  6200       case MIRType_Double:
  6201         key = JSProto_Number;
  6202         break;
  6204       case MIRType_Boolean:
  6205         key = JSProto_Boolean;
  6206         break;
  6208       case MIRType_Object:
  6209       case MIRType_Value: {
  6210         if (!types)
  6211             return false;
  6213         if (types->hasType(types::Type::StringType())) {
  6214             key = JSProto_String;
  6215             *testString = true;
  6216             break;
  6219         if (!types->maybeObject())
  6220             return false;
  6222         // For property accesses which may be on many objects, we just need to
  6223         // find a prototype common to all the objects; if that prototype
  6224         // has the singleton property, the access will not be on a missing property.
  6225         for (unsigned i = 0; i < types->getObjectCount(); i++) {
  6226             types::TypeObjectKey *object = types->getObject(i);
  6227             if (!object)
  6228                 continue;
  6229             if (analysisContext)
  6230                 object->ensureTrackedProperty(analysisContext, NameToId(name));
  6232             const Class *clasp = object->clasp();
  6233             if (!ClassHasEffectlessLookup(clasp, name) || ClassHasResolveHook(compartment, clasp, name))
  6234                 return false;
  6235             if (object->unknownProperties())
  6236                 return false;
  6237             types::HeapTypeSetKey property = object->property(NameToId(name));
  6238             if (property.isOwnProperty(constraints()))
  6239                 return false;
  6241             if (!object->hasTenuredProto())
  6242                 return false;
  6243             if (JSObject *proto = object->proto().toObjectOrNull()) {
  6244                 // Test this type.
  6245                 if (testSingletonProperty(proto, name) != singleton)
  6246                     return false;
  6247             } else {
  6248                 // Can't be on the prototype chain with no prototypes...
  6249                 return false;
  6252         // If this is not a known object, a test will be needed.
  6253         *testObject = (obj->type() != MIRType_Object);
  6254         return true;
  6256       default:
  6257         return false;
  6260     JSObject *proto = GetBuiltinPrototypePure(&script()->global(), key);
  6261     if (proto)
  6262         return testSingletonProperty(proto, name) == singleton;
  6264     return false;
  6267 // Given an observed type set, annotates the IR as much as possible:
  6268 // (1) If no type information is provided, the value on the top of the stack is
  6269 //     left in place.
  6270 // (2) If a single type definitely exists, and no type barrier is needed,
  6271 //     then an infallible unbox instruction replaces the value on the top of
  6272 //     the stack.
  6273 // (3) If a type barrier is needed, but has an unknown type set, leave the
  6274 //     value at the top of the stack.
  6275 // (4) If a type barrier is needed, and has a single type, an unbox
  6276 //     instruction replaces the top of the stack.
  6277 // (5) Lastly, a type barrier instruction replaces the top of the stack.
  6278 bool
  6279 IonBuilder::pushTypeBarrier(MDefinition *def, types::TemporaryTypeSet *observed, bool needsBarrier)
  6281     // Barriers are never needed for instructions whose result will not be used.
  6282     if (BytecodeIsPopped(pc))
  6283         return true;
  6285     // If the instruction has no side effects, we'll resume the entire operation.
  6286     // The actual type barrier will occur in the interpreter. If the
  6287     // instruction is effectful, even if it has a singleton type, there
  6288     // must be a resume point capturing the original def, and resuming
  6289     // to that point will explicitly monitor the new type.
  6291     if (!needsBarrier) {
  6292         MDefinition *replace = ensureDefiniteType(def, observed->getKnownMIRType());
  6293         if (replace != def) {
  6294             current->pop();
  6295             current->push(replace);
  6297         replace->setResultTypeSet(observed);
  6298         return true;
  6301     if (observed->unknown())
  6302         return true;
  6304     current->pop();
  6306     MInstruction *barrier = MTypeBarrier::New(alloc(), def, observed);
  6307     current->add(barrier);
  6309     if (barrier->type() == MIRType_Undefined)
  6310         return pushConstant(UndefinedValue());
  6311     if (barrier->type() == MIRType_Null)
  6312         return pushConstant(NullValue());
  6314     current->push(barrier);
  6315     return true;
  6318 bool
  6319 IonBuilder::pushDOMTypeBarrier(MInstruction *ins, types::TemporaryTypeSet *observed, JSFunction* func)
  6321     JS_ASSERT(func && func->isNative() && func->jitInfo());
  6323     const JSJitInfo *jitinfo = func->jitInfo();
  6324     bool barrier = DOMCallNeedsBarrier(jitinfo, observed);
  6325     // Need to be a bit careful: if jitinfo->returnType is JSVAL_TYPE_DOUBLE but
  6326     // types->getKnownMIRType() is MIRType_Int32, then don't unconditionally
  6327     // unbox as a double.  Instead, go ahead and barrier on having an int type,
  6328     // since we know we need a barrier anyway due to the type mismatch.  This is
  6329     // the only situation in which TI actually has more information about the
  6330     // JSValueType than codegen can, short of jitinfo->returnType just being
  6331     // JSVAL_TYPE_UNKNOWN.
  6332     MDefinition* replace = ins;
  6333     if (jitinfo->returnType() != JSVAL_TYPE_DOUBLE ||
  6334         observed->getKnownMIRType() != MIRType_Int32) {
  6335         replace = ensureDefiniteType(ins, MIRTypeFromValueType(jitinfo->returnType()));
  6336         if (replace != ins) {
  6337             current->pop();
  6338             current->push(replace);
  6340     } else {
  6341         JS_ASSERT(barrier);
  6344     return pushTypeBarrier(replace, observed, barrier);
  6347 MDefinition *
  6348 IonBuilder::ensureDefiniteType(MDefinition *def, MIRType definiteType)
  6350     MInstruction *replace;
  6351     switch (definiteType) {
  6352       case MIRType_Undefined:
  6353         def->setImplicitlyUsedUnchecked();
  6354         replace = MConstant::New(alloc(), UndefinedValue());
  6355         break;
  6357       case MIRType_Null:
  6358         def->setImplicitlyUsedUnchecked();
  6359         replace = MConstant::New(alloc(), NullValue());
  6360         break;
  6362       case MIRType_Value:
  6363         return def;
  6365       default: {
  6366         if (def->type() != MIRType_Value) {
  6367             JS_ASSERT(def->type() == definiteType);
  6368             return def;
  6370         replace = MUnbox::New(alloc(), def, definiteType, MUnbox::Infallible);
  6371         break;
  6375     current->add(replace);
  6376     return replace;
  6379 MDefinition *
  6380 IonBuilder::ensureDefiniteTypeSet(MDefinition *def, types::TemporaryTypeSet *types)
  6382     // We cannot arbitrarily add a typeset to a definition. It can be shared
  6383     // in another path. So we always need to create a new MIR.
  6385     // Use ensureDefiniteType to do unboxing. If that happened the type can
  6386     // be added on the newly created unbox operation.
  6387     MDefinition *replace = ensureDefiniteType(def, types->getKnownMIRType());
  6388     if (replace != def) {
  6389         replace->setResultTypeSet(types);
  6390         return replace;
  6393     // Create a NOP mir instruction to filter the typeset.
  6394     MFilterTypeSet *filter = MFilterTypeSet::New(alloc(), def, types);
  6395     current->add(filter);
  6396     return filter;
  6399 static size_t
  6400 NumFixedSlots(JSObject *object)
  6402     // Note: we can't use object->numFixedSlots() here, as this will read the
  6403     // shape and can race with the main thread if we are building off thread.
  6404     // The allocation kind and object class (which goes through the type) can
  6405     // be read freely, however.
  6406     gc::AllocKind kind = object->tenuredGetAllocKind();
  6407     return gc::GetGCKindSlots(kind, object->getClass());
  6410 bool
  6411 IonBuilder::getStaticName(JSObject *staticObject, PropertyName *name, bool *psucceeded)
  6413     jsid id = NameToId(name);
  6415     JS_ASSERT(staticObject->is<GlobalObject>() || staticObject->is<CallObject>());
  6416     JS_ASSERT(staticObject->hasSingletonType());
  6418     *psucceeded = true;
  6420     if (staticObject->is<GlobalObject>()) {
  6421         // Optimize undefined, NaN, and Infinity.
  6422         if (name == names().undefined)
  6423             return pushConstant(UndefinedValue());
  6424         if (name == names().NaN)
  6425             return pushConstant(compartment->runtime()->NaNValue());
  6426         if (name == names().Infinity)
  6427             return pushConstant(compartment->runtime()->positiveInfinityValue());
  6430     types::TypeObjectKey *staticType = types::TypeObjectKey::get(staticObject);
  6431     if (analysisContext)
  6432         staticType->ensureTrackedProperty(analysisContext, NameToId(name));
  6434     if (staticType->unknownProperties()) {
  6435         *psucceeded = false;
  6436         return true;
  6439     types::HeapTypeSetKey property = staticType->property(id);
  6440     if (!property.maybeTypes() ||
  6441         !property.maybeTypes()->definiteProperty() ||
  6442         property.nonData(constraints()))
  6444         // The property has been reconfigured as non-configurable, non-enumerable
  6445         // or non-writable.
  6446         *psucceeded = false;
  6447         return true;
  6450     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  6451     bool barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(), staticType,
  6452                                                 name, types, /* updateObserved = */ true);
  6454     JSObject *singleton = types->getSingleton();
  6456     MIRType knownType = types->getKnownMIRType();
  6457     if (!barrier) {
  6458         if (singleton) {
  6459             // Try to inline a known constant value.
  6460             if (testSingletonProperty(staticObject, name) == singleton)
  6461                 return pushConstant(ObjectValue(*singleton));
  6463         if (knownType == MIRType_Undefined)
  6464             return pushConstant(UndefinedValue());
  6465         if (knownType == MIRType_Null)
  6466             return pushConstant(NullValue());
  6469     MInstruction *obj = constant(ObjectValue(*staticObject));
  6471     MIRType rvalType = types->getKnownMIRType();
  6472     if (barrier)
  6473         rvalType = MIRType_Value;
  6475     return loadSlot(obj, property.maybeTypes()->definiteSlot(), NumFixedSlots(staticObject),
  6476                     rvalType, barrier, types);
  6479 // Whether 'types' includes all possible values represented by input/inputTypes.
  6480 bool
  6481 jit::TypeSetIncludes(types::TypeSet *types, MIRType input, types::TypeSet *inputTypes)
  6483     if (!types)
  6484         return inputTypes && inputTypes->empty();
  6486     switch (input) {
  6487       case MIRType_Undefined:
  6488       case MIRType_Null:
  6489       case MIRType_Boolean:
  6490       case MIRType_Int32:
  6491       case MIRType_Double:
  6492       case MIRType_Float32:
  6493       case MIRType_String:
  6494       case MIRType_MagicOptimizedArguments:
  6495         return types->hasType(types::Type::PrimitiveType(ValueTypeFromMIRType(input)));
  6497       case MIRType_Object:
  6498         return types->unknownObject() || (inputTypes && inputTypes->isSubset(types));
  6500       case MIRType_Value:
  6501         return types->unknown() || (inputTypes && inputTypes->isSubset(types));
  6503       default:
  6504         MOZ_ASSUME_UNREACHABLE("Bad input type");
  6508 // Whether a write of the given value may need a post-write barrier for GC purposes.
  6509 bool
  6510 jit::NeedsPostBarrier(CompileInfo &info, MDefinition *value)
  6512     return info.executionMode() != ParallelExecution && value->mightBeType(MIRType_Object);
  6515 bool
  6516 IonBuilder::setStaticName(JSObject *staticObject, PropertyName *name)
  6518     jsid id = NameToId(name);
  6520     JS_ASSERT(staticObject->is<GlobalObject>() || staticObject->is<CallObject>());
  6522     MDefinition *value = current->peek(-1);
  6524     types::TypeObjectKey *staticType = types::TypeObjectKey::get(staticObject);
  6525     if (staticType->unknownProperties())
  6526         return jsop_setprop(name);
  6528     types::HeapTypeSetKey property = staticType->property(id);
  6529     if (!property.maybeTypes() ||
  6530         !property.maybeTypes()->definiteProperty() ||
  6531         property.nonData(constraints()) ||
  6532         property.nonWritable(constraints()))
  6534         // The property has been reconfigured as non-configurable, non-enumerable
  6535         // or non-writable.
  6536         return jsop_setprop(name);
  6539     if (!TypeSetIncludes(property.maybeTypes(), value->type(), value->resultTypeSet()))
  6540         return jsop_setprop(name);
  6542     current->pop();
  6544     // Pop the bound object on the stack.
  6545     MDefinition *obj = current->pop();
  6546     JS_ASSERT(&obj->toConstant()->value().toObject() == staticObject);
  6548     if (NeedsPostBarrier(info(), value))
  6549         current->add(MPostWriteBarrier::New(alloc(), obj, value));
  6551     // If the property has a known type, we may be able to optimize typed stores by not
  6552     // storing the type tag.
  6553     MIRType slotType = MIRType_None;
  6554     MIRType knownType = property.knownMIRType(constraints());
  6555     if (knownType != MIRType_Value)
  6556         slotType = knownType;
  6558     bool needsBarrier = property.needsBarrier(constraints());
  6559     return storeSlot(obj, property.maybeTypes()->definiteSlot(), NumFixedSlots(staticObject),
  6560                      value, needsBarrier, slotType);
  6563 bool
  6564 IonBuilder::jsop_getgname(PropertyName *name)
  6566     JSObject *obj = &script()->global();
  6567     bool succeeded;
  6568     if (!getStaticName(obj, name, &succeeded))
  6569         return false;
  6570     if (succeeded)
  6571         return true;
  6573     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  6574     // Spoof the stack to call into the getProp path.
  6575     // First, make sure there's room.
  6576     if (!current->ensureHasSlots(1))
  6577         return false;
  6578     pushConstant(ObjectValue(*obj));
  6579     if (!getPropTryCommonGetter(&succeeded, name, types))
  6580         return false;
  6581     if (succeeded)
  6582         return true;
  6584     // Clean up the pushed global object if we were not sucessful.
  6585     current->pop();
  6586     return jsop_getname(name);
  6589 bool
  6590 IonBuilder::jsop_getname(PropertyName *name)
  6592     MDefinition *object;
  6593     if (js_CodeSpec[*pc].format & JOF_GNAME) {
  6594         MInstruction *global = constant(ObjectValue(script()->global()));
  6595         object = global;
  6596     } else {
  6597         current->push(current->scopeChain());
  6598         object = current->pop();
  6601     MGetNameCache *ins;
  6602     if (JSOp(*GetNextPc(pc)) == JSOP_TYPEOF)
  6603         ins = MGetNameCache::New(alloc(), object, name, MGetNameCache::NAMETYPEOF);
  6604     else
  6605         ins = MGetNameCache::New(alloc(), object, name, MGetNameCache::NAME);
  6607     current->add(ins);
  6608     current->push(ins);
  6610     if (!resumeAfter(ins))
  6611         return false;
  6613     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  6614     return pushTypeBarrier(ins, types, true);
  6617 bool
  6618 IonBuilder::jsop_intrinsic(PropertyName *name)
  6620     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  6622     // If we haven't executed this opcode yet, we need to get the intrinsic
  6623     // value and monitor the result.
  6624     if (types->empty()) {
  6625         MCallGetIntrinsicValue *ins = MCallGetIntrinsicValue::New(alloc(), name);
  6627         current->add(ins);
  6628         current->push(ins);
  6630         if (!resumeAfter(ins))
  6631             return false;
  6633         return pushTypeBarrier(ins, types, true);
  6636     // Bake in the intrinsic. Make sure that TI agrees with us on the type.
  6637     Value vp;
  6638     JS_ALWAYS_TRUE(script()->global().maybeGetIntrinsicValue(name, &vp));
  6639     JS_ASSERT(types->hasType(types::GetValueType(vp)));
  6641     pushConstant(vp);
  6642     return true;
  6645 bool
  6646 IonBuilder::jsop_bindname(PropertyName *name)
  6648     JS_ASSERT(analysis().usesScopeChain());
  6650     MDefinition *scopeChain = current->scopeChain();
  6651     MBindNameCache *ins = MBindNameCache::New(alloc(), scopeChain, name, script(), pc);
  6653     current->add(ins);
  6654     current->push(ins);
  6656     return resumeAfter(ins);
  6659 static MIRType
  6660 GetElemKnownType(bool needsHoleCheck, types::TemporaryTypeSet *types)
  6662     MIRType knownType = types->getKnownMIRType();
  6664     // Null and undefined have no payload so they can't be specialized.
  6665     // Since folding null/undefined while building SSA is not safe (see the
  6666     // comment in IsPhiObservable), we just add an untyped load instruction
  6667     // and rely on pushTypeBarrier and DCE to replace it with a null/undefined
  6668     // constant.
  6669     if (knownType == MIRType_Undefined || knownType == MIRType_Null)
  6670         knownType = MIRType_Value;
  6672     // Different architectures may want typed element reads which require
  6673     // hole checks to be done as either value or typed reads.
  6674     if (needsHoleCheck && !LIRGenerator::allowTypedElementHoleCheck())
  6675         knownType = MIRType_Value;
  6677     return knownType;
  6680 bool
  6681 IonBuilder::jsop_getelem()
  6683     MDefinition *index = current->pop();
  6684     MDefinition *obj = current->pop();
  6686     // Always use a call if we are performing analysis and not actually
  6687     // emitting code, to simplify later analysis.
  6688     if (info().executionModeIsAnalysis()) {
  6689         MInstruction *ins = MCallGetElement::New(alloc(), obj, index);
  6691         current->add(ins);
  6692         current->push(ins);
  6694         if (!resumeAfter(ins))
  6695             return false;
  6697         types::TemporaryTypeSet *types = bytecodeTypes(pc);
  6698         return pushTypeBarrier(ins, types, true);
  6701     bool emitted = false;
  6703     if (!getElemTryTypedObject(&emitted, obj, index) || emitted)
  6704         return emitted;
  6706     if (!getElemTryDense(&emitted, obj, index) || emitted)
  6707         return emitted;
  6709     if (!getElemTryTypedStatic(&emitted, obj, index) || emitted)
  6710         return emitted;
  6712     if (!getElemTryTypedArray(&emitted, obj, index) || emitted)
  6713         return emitted;
  6715     if (!getElemTryString(&emitted, obj, index) || emitted)
  6716         return emitted;
  6718     if (!getElemTryArguments(&emitted, obj, index) || emitted)
  6719         return emitted;
  6721     if (!getElemTryArgumentsInlined(&emitted, obj, index) || emitted)
  6722         return emitted;
  6724     if (script()->argumentsHasVarBinding() && obj->mightBeType(MIRType_MagicOptimizedArguments))
  6725         return abort("Type is not definitely lazy arguments.");
  6727     if (!getElemTryCache(&emitted, obj, index) || emitted)
  6728         return emitted;
  6730     // Emit call.
  6731     MInstruction *ins = MCallGetElement::New(alloc(), obj, index);
  6733     current->add(ins);
  6734     current->push(ins);
  6736     if (!resumeAfter(ins))
  6737         return false;
  6739     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  6740     return pushTypeBarrier(ins, types, true);
  6743 bool
  6744 IonBuilder::getElemTryTypedObject(bool *emitted, MDefinition *obj, MDefinition *index)
  6746     JS_ASSERT(*emitted == false);
  6748     TypeDescrSet objDescrs;
  6749     if (!lookupTypeDescrSet(obj, &objDescrs))
  6750         return false;
  6752     if (!objDescrs.allOfArrayKind())
  6753         return true;
  6755     TypeDescrSet elemDescrs;
  6756     if (!objDescrs.arrayElementType(*this, &elemDescrs))
  6757         return false;
  6758     if (elemDescrs.empty())
  6759         return true;
  6761     JS_ASSERT(TypeDescr::isSized(elemDescrs.kind()));
  6763     int32_t elemSize;
  6764     if (!elemDescrs.allHaveSameSize(&elemSize))
  6765         return true;
  6767     switch (elemDescrs.kind()) {
  6768       case TypeDescr::X4:
  6769         // FIXME (bug 894105): load into a MIRType_float32x4 etc
  6770         return true;
  6772       case TypeDescr::Struct:
  6773       case TypeDescr::SizedArray:
  6774         return getElemTryComplexElemOfTypedObject(emitted,
  6775                                                   obj,
  6776                                                   index,
  6777                                                   objDescrs,
  6778                                                   elemDescrs,
  6779                                                   elemSize);
  6780       case TypeDescr::Scalar:
  6781         return getElemTryScalarElemOfTypedObject(emitted,
  6782                                                  obj,
  6783                                                  index,
  6784                                                  objDescrs,
  6785                                                  elemDescrs,
  6786                                                  elemSize);
  6788       case TypeDescr::Reference:
  6789         return true;
  6791       case TypeDescr::UnsizedArray:
  6792         MOZ_ASSUME_UNREACHABLE("Unsized arrays cannot be element types");
  6795     MOZ_ASSUME_UNREACHABLE("Bad kind");
  6798 static MIRType
  6799 MIRTypeForTypedArrayRead(ScalarTypeDescr::Type arrayType,
  6800                          bool observedDouble);
  6802 bool
  6803 IonBuilder::checkTypedObjectIndexInBounds(int32_t elemSize,
  6804                                           MDefinition *obj,
  6805                                           MDefinition *index,
  6806                                           TypeDescrSet objDescrs,
  6807                                           MDefinition **indexAsByteOffset,
  6808                                           bool *canBeNeutered)
  6810     // Ensure index is an integer.
  6811     MInstruction *idInt32 = MToInt32::New(alloc(), index);
  6812     current->add(idInt32);
  6814     // If we know the length statically from the type, just embed it.
  6815     // Otherwise, load it from the appropriate reserved slot on the
  6816     // typed object.  We know it's an int32, so we can convert from
  6817     // Value to int32 using truncation.
  6818     int32_t lenOfAll;
  6819     MDefinition *length;
  6820     if (objDescrs.hasKnownArrayLength(&lenOfAll)) {
  6821         length = constantInt(lenOfAll);
  6823         // If we are not loading the length from the object itself,
  6824         // then we still need to check if the object was neutered.
  6825         *canBeNeutered = true;
  6826     } else {
  6827         MInstruction *lengthValue = MLoadFixedSlot::New(alloc(), obj, JS_TYPEDOBJ_SLOT_LENGTH);
  6828         current->add(lengthValue);
  6830         MInstruction *length32 = MTruncateToInt32::New(alloc(), lengthValue);
  6831         current->add(length32);
  6833         length = length32;
  6835         // If we are loading the length from the object itself,
  6836         // then we do not need an extra neuter check, because the length
  6837         // will have been set to 0 when the object was neutered.
  6838         *canBeNeutered = false;
  6841     index = addBoundsCheck(idInt32, length);
  6843     // Since we passed the bounds check, it is impossible for the
  6844     // result of multiplication to overflow; so enable imul path.
  6845     MMul *mul = MMul::New(alloc(), index, constantInt(elemSize),
  6846                           MIRType_Int32, MMul::Integer);
  6847     current->add(mul);
  6849     *indexAsByteOffset = mul;
  6850     return true;
  6853 bool
  6854 IonBuilder::getElemTryScalarElemOfTypedObject(bool *emitted,
  6855                                               MDefinition *obj,
  6856                                               MDefinition *index,
  6857                                               TypeDescrSet objDescrs,
  6858                                               TypeDescrSet elemDescrs,
  6859                                               int32_t elemSize)
  6861     JS_ASSERT(objDescrs.allOfArrayKind());
  6863     // Must always be loading the same scalar type
  6864     ScalarTypeDescr::Type elemType;
  6865     if (!elemDescrs.scalarType(&elemType))
  6866         return true;
  6867     JS_ASSERT(elemSize == ScalarTypeDescr::alignment(elemType));
  6869     bool canBeNeutered;
  6870     MDefinition *indexAsByteOffset;
  6871     if (!checkTypedObjectIndexInBounds(elemSize, obj, index, objDescrs,
  6872                                        &indexAsByteOffset, &canBeNeutered))
  6874         return false;
  6877     return pushScalarLoadFromTypedObject(emitted, obj, indexAsByteOffset, elemType, canBeNeutered);
  6880 bool
  6881 IonBuilder::pushScalarLoadFromTypedObject(bool *emitted,
  6882                                           MDefinition *obj,
  6883                                           MDefinition *offset,
  6884                                           ScalarTypeDescr::Type elemType,
  6885                                           bool canBeNeutered)
  6887     int32_t size = ScalarTypeDescr::size(elemType);
  6888     JS_ASSERT(size == ScalarTypeDescr::alignment(elemType));
  6890     // Find location within the owner object.
  6891     MDefinition *elements, *scaledOffset;
  6892     loadTypedObjectElements(obj, offset, size, canBeNeutered,
  6893                             &elements, &scaledOffset);
  6895     // Load the element.
  6896     MLoadTypedArrayElement *load = MLoadTypedArrayElement::New(alloc(), elements, scaledOffset, elemType);
  6897     current->add(load);
  6898     current->push(load);
  6900     // If we are reading in-bounds elements, we can use knowledge about
  6901     // the array type to determine the result type, even if the opcode has
  6902     // never executed. The known pushed type is only used to distinguish
  6903     // uint32 reads that may produce either doubles or integers.
  6904     types::TemporaryTypeSet *resultTypes = bytecodeTypes(pc);
  6905     bool allowDouble = resultTypes->hasType(types::Type::DoubleType());
  6907     // Note: knownType is not necessarily in resultTypes; e.g. if we
  6908     // have only observed integers coming out of float array.
  6909     MIRType knownType = MIRTypeForTypedArrayRead(elemType, allowDouble);
  6911     // Note: we can ignore the type barrier here, we know the type must
  6912     // be valid and unbarriered. Also, need not set resultTypeSet,
  6913     // because knownType is scalar and a resultTypeSet would provide
  6914     // no useful additional info.
  6915     load->setResultType(knownType);
  6917     *emitted = true;
  6918     return true;
  6921 bool
  6922 IonBuilder::getElemTryComplexElemOfTypedObject(bool *emitted,
  6923                                                MDefinition *obj,
  6924                                                MDefinition *index,
  6925                                                TypeDescrSet objDescrs,
  6926                                                TypeDescrSet elemDescrs,
  6927                                                int32_t elemSize)
  6929     JS_ASSERT(objDescrs.allOfArrayKind());
  6931     MDefinition *type = loadTypedObjectType(obj);
  6932     MDefinition *elemTypeObj = typeObjectForElementFromArrayStructType(type);
  6934     bool canBeNeutered;
  6935     MDefinition *indexAsByteOffset;
  6936     if (!checkTypedObjectIndexInBounds(elemSize, obj, index, objDescrs,
  6937                                        &indexAsByteOffset, &canBeNeutered))
  6939         return false;
  6942     return pushDerivedTypedObject(emitted, obj, indexAsByteOffset,
  6943                                   elemDescrs, elemTypeObj, canBeNeutered);
  6946 bool
  6947 IonBuilder::pushDerivedTypedObject(bool *emitted,
  6948                                    MDefinition *obj,
  6949                                    MDefinition *offset,
  6950                                    TypeDescrSet derivedTypeDescrs,
  6951                                    MDefinition *derivedTypeObj,
  6952                                    bool canBeNeutered)
  6954     // Find location within the owner object.
  6955     MDefinition *owner, *ownerOffset;
  6956     loadTypedObjectData(obj, offset, canBeNeutered, &owner, &ownerOffset);
  6958     // Create the derived typed object.
  6959     MInstruction *derivedTypedObj = MNewDerivedTypedObject::New(alloc(),
  6960                                                                 derivedTypeDescrs,
  6961                                                                 derivedTypeObj,
  6962                                                                 owner,
  6963                                                                 ownerOffset);
  6964     current->add(derivedTypedObj);
  6965     current->push(derivedTypedObj);
  6967     // Determine (if possible) the class/proto that `derivedTypedObj`
  6968     // will have. For derived typed objects, the class (transparent vs
  6969     // opaque) will be the same as the incoming object from which the
  6970     // derived typed object is, well, derived. The prototype will be
  6971     // determined based on the type descriptor (and is immutable).
  6972     types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
  6973     const Class *expectedClass = objTypes ? objTypes->getKnownClass() : nullptr;
  6974     JSObject *expectedProto = derivedTypeDescrs.knownPrototype();
  6975     JS_ASSERT_IF(expectedClass, IsTypedObjectClass(expectedClass));
  6977     // Determine (if possible) the class/proto that the observed type set
  6978     // describes.
  6979     types::TemporaryTypeSet *observedTypes = bytecodeTypes(pc);
  6980     const Class *observedClass = observedTypes->getKnownClass();
  6981     JSObject *observedProto = observedTypes->getCommonPrototype();
  6983     // If expectedClass/expectedProto are both non-null (and hence
  6984     // known), we can predict precisely what TI type object
  6985     // derivedTypedObj will have. Therefore, if we observe that this
  6986     // TI type object is already contained in the set of
  6987     // observedTypes, we can skip the barrier.
  6988     //
  6989     // Barriers still wind up being needed in some relatively
  6990     // rare cases:
  6991     //
  6992     // - if multiple kinds of typed objects flow into this point,
  6993     //   in which case we will not be able to predict expectedClass
  6994     //   nor expectedProto.
  6995     //
  6996     // - if the code has never executed, in which case the set of
  6997     //   observed types will be incomplete.
  6998     //
  6999     // Barriers are particularly expensive here because they prevent
  7000     // us from optimizing the MNewDerivedTypedObject away.
  7001     if (observedClass && observedProto && observedClass == expectedClass &&
  7002         observedProto == expectedProto)
  7004         derivedTypedObj->setResultTypeSet(observedTypes);
  7005     } else {
  7006         if (!pushTypeBarrier(derivedTypedObj, observedTypes, true))
  7007             return false;
  7010     *emitted = true;
  7011     return true;
  7014 bool
  7015 IonBuilder::getElemTryDense(bool *emitted, MDefinition *obj, MDefinition *index)
  7017     JS_ASSERT(*emitted == false);
  7019     if (!ElementAccessIsDenseNative(obj, index))
  7020         return true;
  7022     // Don't generate a fast path if there have been bounds check failures
  7023     // and this access might be on a sparse property.
  7024     if (ElementAccessHasExtraIndexedProperty(constraints(), obj) && failedBoundsCheck_)
  7025         return true;
  7027     // Don't generate a fast path if this pc has seen negative indexes accessed,
  7028     // which will not appear to be extra indexed properties.
  7029     if (inspector->hasSeenNegativeIndexGetElement(pc))
  7030         return true;
  7032     // Emit dense getelem variant.
  7033     if (!jsop_getelem_dense(obj, index))
  7034         return false;
  7036     *emitted = true;
  7037     return true;
  7040 bool
  7041 IonBuilder::getElemTryTypedStatic(bool *emitted, MDefinition *obj, MDefinition *index)
  7043     JS_ASSERT(*emitted == false);
  7045     ScalarTypeDescr::Type arrayType;
  7046     if (!ElementAccessIsTypedArray(obj, index, &arrayType))
  7047         return true;
  7049     if (!LIRGenerator::allowStaticTypedArrayAccesses())
  7050         return true;
  7052     if (ElementAccessHasExtraIndexedProperty(constraints(), obj))
  7053         return true;
  7055     if (!obj->resultTypeSet())
  7056         return true;
  7058     JSObject *tarrObj = obj->resultTypeSet()->getSingleton();
  7059     if (!tarrObj)
  7060         return true;
  7062     TypedArrayObject *tarr = &tarrObj->as<TypedArrayObject>();
  7064     types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
  7065     if (tarrType->unknownProperties())
  7066         return true;
  7068     // LoadTypedArrayElementStatic currently treats uint32 arrays as int32.
  7069     ArrayBufferView::ViewType viewType = (ArrayBufferView::ViewType) tarr->type();
  7070     if (viewType == ArrayBufferView::TYPE_UINT32)
  7071         return true;
  7073     MDefinition *ptr = convertShiftToMaskForStaticTypedArray(index, viewType);
  7074     if (!ptr)
  7075         return true;
  7077     // Emit LoadTypedArrayElementStatic.
  7078     tarrType->watchStateChangeForTypedArrayData(constraints());
  7080     obj->setImplicitlyUsedUnchecked();
  7081     index->setImplicitlyUsedUnchecked();
  7083     MLoadTypedArrayElementStatic *load = MLoadTypedArrayElementStatic::New(alloc(), tarr, ptr);
  7084     current->add(load);
  7085     current->push(load);
  7087     // The load is infallible if an undefined result will be coerced to the
  7088     // appropriate numeric type if the read is out of bounds. The truncation
  7089     // analysis picks up some of these cases, but is incomplete with respect
  7090     // to others. For now, sniff the bytecode for simple patterns following
  7091     // the load which guarantee a truncation or numeric conversion.
  7092     if (viewType == ArrayBufferView::TYPE_FLOAT32 || viewType == ArrayBufferView::TYPE_FLOAT64) {
  7093         jsbytecode *next = pc + JSOP_GETELEM_LENGTH;
  7094         if (*next == JSOP_POS)
  7095             load->setInfallible();
  7096     } else {
  7097         jsbytecode *next = pc + JSOP_GETELEM_LENGTH;
  7098         if (*next == JSOP_ZERO && *(next + JSOP_ZERO_LENGTH) == JSOP_BITOR)
  7099             load->setInfallible();
  7102     *emitted = true;
  7103     return true;
  7106 bool
  7107 IonBuilder::getElemTryTypedArray(bool *emitted, MDefinition *obj, MDefinition *index)
  7109     JS_ASSERT(*emitted == false);
  7111     ScalarTypeDescr::Type arrayType;
  7112     if (!ElementAccessIsTypedArray(obj, index, &arrayType))
  7113         return true;
  7115     // Emit typed getelem variant.
  7116     if (!jsop_getelem_typed(obj, index, arrayType))
  7117         return false;
  7119     *emitted = true;
  7120     return true;
  7123 bool
  7124 IonBuilder::getElemTryString(bool *emitted, MDefinition *obj, MDefinition *index)
  7126     JS_ASSERT(*emitted == false);
  7128     if (obj->type() != MIRType_String || !IsNumberType(index->type()))
  7129         return true;
  7131     // If the index is expected to be out-of-bounds, don't optimize to avoid
  7132     // frequent bailouts.
  7133     if (bytecodeTypes(pc)->hasType(types::Type::UndefinedType()))
  7134         return true;
  7136     // Emit fast path for string[index].
  7137     MInstruction *idInt32 = MToInt32::New(alloc(), index);
  7138     current->add(idInt32);
  7139     index = idInt32;
  7141     MStringLength *length = MStringLength::New(alloc(), obj);
  7142     current->add(length);
  7144     index = addBoundsCheck(index, length);
  7146     MCharCodeAt *charCode = MCharCodeAt::New(alloc(), obj, index);
  7147     current->add(charCode);
  7149     MFromCharCode *result = MFromCharCode::New(alloc(), charCode);
  7150     current->add(result);
  7151     current->push(result);
  7153     *emitted = true;
  7154     return true;
  7157 bool
  7158 IonBuilder::getElemTryArguments(bool *emitted, MDefinition *obj, MDefinition *index)
  7160     JS_ASSERT(*emitted == false);
  7162     if (inliningDepth_ > 0)
  7163         return true;
  7165     if (obj->type() != MIRType_MagicOptimizedArguments)
  7166         return true;
  7168     // Emit GetFrameArgument.
  7170     JS_ASSERT(!info().argsObjAliasesFormals());
  7172     // Type Inference has guaranteed this is an optimized arguments object.
  7173     obj->setImplicitlyUsedUnchecked();
  7175     // To ensure that we are not looking above the number of actual arguments.
  7176     MArgumentsLength *length = MArgumentsLength::New(alloc());
  7177     current->add(length);
  7179     // Ensure index is an integer.
  7180     MInstruction *idInt32 = MToInt32::New(alloc(), index);
  7181     current->add(idInt32);
  7182     index = idInt32;
  7184     // Bailouts if we read more than the number of actual arguments.
  7185     index = addBoundsCheck(index, length);
  7187     // Load the argument from the actual arguments.
  7188     MGetFrameArgument *load = MGetFrameArgument::New(alloc(), index, analysis_.hasSetArg());
  7189     current->add(load);
  7190     current->push(load);
  7192     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  7193     if (!pushTypeBarrier(load, types, true))
  7194         return false;
  7196     *emitted = true;
  7197     return true;
  7200 bool
  7201 IonBuilder::getElemTryArgumentsInlined(bool *emitted, MDefinition *obj, MDefinition *index)
  7203     JS_ASSERT(*emitted == false);
  7205     if (inliningDepth_ == 0)
  7206         return true;
  7208     if (obj->type() != MIRType_MagicOptimizedArguments)
  7209         return true;
  7211     // Emit inlined arguments.
  7212     obj->setImplicitlyUsedUnchecked();
  7214     JS_ASSERT(!info().argsObjAliasesFormals());
  7216     // When the id is constant, we can just return the corresponding inlined argument
  7217     if (index->isConstant() && index->toConstant()->value().isInt32()) {
  7218         JS_ASSERT(inliningDepth_ > 0);
  7220         int32_t id = index->toConstant()->value().toInt32();
  7221         index->setImplicitlyUsedUnchecked();
  7223         if (id < (int32_t)inlineCallInfo_->argc() && id >= 0)
  7224             current->push(inlineCallInfo_->getArg(id));
  7225         else
  7226             pushConstant(UndefinedValue());
  7228         *emitted = true;
  7229         return true;
  7232     // inlined not constant not supported, yet.
  7233     return abort("NYI inlined not constant get argument element");
  7236 bool
  7237 IonBuilder::getElemTryCache(bool *emitted, MDefinition *obj, MDefinition *index)
  7239     JS_ASSERT(*emitted == false);
  7241     // Make sure we have at least an object.
  7242     if (!obj->mightBeType(MIRType_Object))
  7243         return true;
  7245     // Don't cache for strings.
  7246     if (obj->mightBeType(MIRType_String))
  7247         return true;
  7249     // Index should be integer or string
  7250     if (!index->mightBeType(MIRType_Int32) && !index->mightBeType(MIRType_String))
  7251         return true;
  7253     // Turn off cacheing if the element is int32 and we've seen non-native objects as the target
  7254     // of this getelem.
  7255     bool nonNativeGetElement = inspector->hasSeenNonNativeGetElement(pc);
  7256     if (index->mightBeType(MIRType_Int32) && nonNativeGetElement)
  7257         return true;
  7259     // Emit GetElementCache.
  7261     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  7262     bool barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(), obj, nullptr, types);
  7264     // Always add a barrier if the index might be a string, so that the cache
  7265     // can attach stubs for particular properties.
  7266     if (index->mightBeType(MIRType_String))
  7267         barrier = true;
  7269     // See note about always needing a barrier in jsop_getprop.
  7270     if (needsToMonitorMissingProperties(types))
  7271         barrier = true;
  7273     MInstruction *ins = MGetElementCache::New(alloc(), obj, index, barrier);
  7275     current->add(ins);
  7276     current->push(ins);
  7278     if (!resumeAfter(ins))
  7279         return false;
  7281     // Spice up type information.
  7282     if (index->type() == MIRType_Int32 && !barrier) {
  7283         bool needHoleCheck = !ElementAccessIsPacked(constraints(), obj);
  7284         MIRType knownType = GetElemKnownType(needHoleCheck, types);
  7286         if (knownType != MIRType_Value && knownType != MIRType_Double)
  7287             ins->setResultType(knownType);
  7290     if (!pushTypeBarrier(ins, types, barrier))
  7291         return false;
  7293     *emitted = true;
  7294     return true;
  7297 bool
  7298 IonBuilder::jsop_getelem_dense(MDefinition *obj, MDefinition *index)
  7300     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  7302     if (JSOp(*pc) == JSOP_CALLELEM && !index->mightBeType(MIRType_String)) {
  7303         // Indexed call on an element of an array. Populate the observed types
  7304         // with any objects that could be in the array, to avoid extraneous
  7305         // type barriers.
  7306         AddObjectsForPropertyRead(obj, nullptr, types);
  7309     bool barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(), obj, nullptr, types);
  7310     bool needsHoleCheck = !ElementAccessIsPacked(constraints(), obj);
  7312     // Reads which are on holes in the object do not have to bail out if
  7313     // undefined values have been observed at this access site and the access
  7314     // cannot hit another indexed property on the object or its prototypes.
  7315     bool readOutOfBounds =
  7316         types->hasType(types::Type::UndefinedType()) &&
  7317         !ElementAccessHasExtraIndexedProperty(constraints(), obj);
  7319     MIRType knownType = MIRType_Value;
  7320     if (!barrier)
  7321         knownType = GetElemKnownType(needsHoleCheck, types);
  7323     // Ensure index is an integer.
  7324     MInstruction *idInt32 = MToInt32::New(alloc(), index);
  7325     current->add(idInt32);
  7326     index = idInt32;
  7328     // Get the elements vector.
  7329     MInstruction *elements = MElements::New(alloc(), obj);
  7330     current->add(elements);
  7332     // Note: to help GVN, use the original MElements instruction and not
  7333     // MConvertElementsToDoubles as operand. This is fine because converting
  7334     // elements to double does not change the initialized length.
  7335     MInitializedLength *initLength = MInitializedLength::New(alloc(), elements);
  7336     current->add(initLength);
  7338     // If we can load the element as a definite double, make sure to check that
  7339     // the array has been converted to homogenous doubles first.
  7340     //
  7341     // NB: We disable this optimization in parallel execution mode
  7342     // because it is inherently not threadsafe (how do you convert the
  7343     // array atomically when there might be concurrent readers)?
  7344     types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
  7345     ExecutionMode executionMode = info().executionMode();
  7346     bool loadDouble =
  7347         executionMode == SequentialExecution &&
  7348         !barrier &&
  7349         loopDepth_ &&
  7350         !readOutOfBounds &&
  7351         !needsHoleCheck &&
  7352         knownType == MIRType_Double &&
  7353         objTypes &&
  7354         objTypes->convertDoubleElements(constraints()) == types::TemporaryTypeSet::AlwaysConvertToDoubles;
  7355     if (loadDouble)
  7356         elements = addConvertElementsToDoubles(elements);
  7358     MInstruction *load;
  7360     if (!readOutOfBounds) {
  7361         // This load should not return undefined, so likely we're reading
  7362         // in-bounds elements, and the array is packed or its holes are not
  7363         // read. This is the best case: we can separate the bounds check for
  7364         // hoisting.
  7365         index = addBoundsCheck(index, initLength);
  7367         load = MLoadElement::New(alloc(), elements, index, needsHoleCheck, loadDouble);
  7368         current->add(load);
  7369     } else {
  7370         // This load may return undefined, so assume that we *can* read holes,
  7371         // or that we can read out-of-bounds accesses. In this case, the bounds
  7372         // check is part of the opcode.
  7373         load = MLoadElementHole::New(alloc(), elements, index, initLength, needsHoleCheck);
  7374         current->add(load);
  7376         // If maybeUndefined was true, the typeset must have undefined, and
  7377         // then either additional types or a barrier. This means we should
  7378         // never have a typed version of LoadElementHole.
  7379         JS_ASSERT(knownType == MIRType_Value);
  7382     // If the array is being converted to doubles, but we've observed
  7383     // just int, substitute a type set of int+double into the observed
  7384     // type set. The reason for this is that, in the
  7385     // interpreter+baseline, such arrays may consist of mixed
  7386     // ints/doubles, but when we enter ion code, we will be coercing
  7387     // all inputs to doubles. Therefore, the type barrier checking for
  7388     // just int is highly likely (*almost* guaranteed) to fail sooner
  7389     // or later. Essentially, by eagerly coercing to double, ion is
  7390     // making the observed types outdated. To compensate for this, we
  7391     // substitute a broader observed type set consisting of both ints
  7392     // and doubles. There is perhaps a tradeoff here, so we limit this
  7393     // optimization to parallel code, where it is needed to prevent
  7394     // perpetual bailouts in some extreme cases. (Bug 977853)
  7395     //
  7396     // NB: we have not added a MConvertElementsToDoubles MIR, so we
  7397     // cannot *assume* the result is a double.
  7398     if (executionMode == ParallelExecution &&
  7399         barrier &&
  7400         types->getKnownMIRType() == MIRType_Int32 &&
  7401         objTypes &&
  7402         objTypes->convertDoubleElements(constraints()) == types::TemporaryTypeSet::AlwaysConvertToDoubles)
  7404         // Note: double implies int32 as well for typesets
  7405         types = alloc_->lifoAlloc()->new_<types::TemporaryTypeSet>(types::Type::DoubleType());
  7406         if (!types)
  7407             return false;
  7409         barrier = false; // Don't need a barrier anymore
  7412     if (knownType != MIRType_Value)
  7413         load->setResultType(knownType);
  7415     current->push(load);
  7416     return pushTypeBarrier(load, types, barrier);
  7419 void
  7420 IonBuilder::addTypedArrayLengthAndData(MDefinition *obj,
  7421                                        BoundsChecking checking,
  7422                                        MDefinition **index,
  7423                                        MInstruction **length, MInstruction **elements)
  7425     MOZ_ASSERT((index != nullptr) == (elements != nullptr));
  7427     if (obj->isConstant() && obj->toConstant()->value().isObject()) {
  7428         TypedArrayObject *tarr = &obj->toConstant()->value().toObject().as<TypedArrayObject>();
  7429         void *data = tarr->viewData();
  7430         // Bug 979449 - Optimistically embed the elements and use TI to
  7431         //              invalidate if we move them.
  7432         if (!gc::IsInsideNursery(tarr->runtimeFromMainThread(), data)) {
  7433             // The 'data' pointer can change in rare circumstances
  7434             // (ArrayBufferObject::changeContents).
  7435             types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
  7436             if (!tarrType->unknownProperties()) {
  7437                 tarrType->watchStateChangeForTypedArrayData(constraints());
  7439                 obj->setImplicitlyUsedUnchecked();
  7441                 int32_t len = SafeCast<int32_t>(tarr->length());
  7442                 *length = MConstant::New(alloc(), Int32Value(len));
  7443                 current->add(*length);
  7445                 if (index) {
  7446                     if (checking == DoBoundsCheck)
  7447                         *index = addBoundsCheck(*index, *length);
  7449                     *elements = MConstantElements::New(alloc(), data);
  7450                     current->add(*elements);
  7452                 return;
  7457     *length = MTypedArrayLength::New(alloc(), obj);
  7458     current->add(*length);
  7460     if (index) {
  7461         if (checking == DoBoundsCheck)
  7462             *index = addBoundsCheck(*index, *length);
  7464         *elements = MTypedArrayElements::New(alloc(), obj);
  7465         current->add(*elements);
  7469 MDefinition *
  7470 IonBuilder::convertShiftToMaskForStaticTypedArray(MDefinition *id,
  7471                                                   ArrayBufferView::ViewType viewType)
  7473     // No shifting is necessary if the typed array has single byte elements.
  7474     if (TypedArrayShift(viewType) == 0)
  7475         return id;
  7477     // If the index is an already shifted constant, undo the shift to get the
  7478     // absolute offset being accessed.
  7479     if (id->isConstant() && id->toConstant()->value().isInt32()) {
  7480         int32_t index = id->toConstant()->value().toInt32();
  7481         MConstant *offset = MConstant::New(alloc(), Int32Value(index << TypedArrayShift(viewType)));
  7482         current->add(offset);
  7483         return offset;
  7486     if (!id->isRsh() || id->isEffectful())
  7487         return nullptr;
  7488     if (!id->getOperand(1)->isConstant())
  7489         return nullptr;
  7490     const Value &value = id->getOperand(1)->toConstant()->value();
  7491     if (!value.isInt32() || uint32_t(value.toInt32()) != TypedArrayShift(viewType))
  7492         return nullptr;
  7494     // Instead of shifting, mask off the low bits of the index so that
  7495     // a non-scaled access on the typed array can be performed.
  7496     MConstant *mask = MConstant::New(alloc(), Int32Value(~((1 << value.toInt32()) - 1)));
  7497     MBitAnd *ptr = MBitAnd::New(alloc(), id->getOperand(0), mask);
  7499     ptr->infer(nullptr, nullptr);
  7500     JS_ASSERT(!ptr->isEffectful());
  7502     current->add(mask);
  7503     current->add(ptr);
  7505     return ptr;
  7508 static MIRType
  7509 MIRTypeForTypedArrayRead(ScalarTypeDescr::Type arrayType,
  7510                          bool observedDouble)
  7512     switch (arrayType) {
  7513       case ScalarTypeDescr::TYPE_INT8:
  7514       case ScalarTypeDescr::TYPE_UINT8:
  7515       case ScalarTypeDescr::TYPE_UINT8_CLAMPED:
  7516       case ScalarTypeDescr::TYPE_INT16:
  7517       case ScalarTypeDescr::TYPE_UINT16:
  7518       case ScalarTypeDescr::TYPE_INT32:
  7519         return MIRType_Int32;
  7520       case ScalarTypeDescr::TYPE_UINT32:
  7521         return observedDouble ? MIRType_Double : MIRType_Int32;
  7522       case ScalarTypeDescr::TYPE_FLOAT32:
  7523         return (LIRGenerator::allowFloat32Optimizations()) ? MIRType_Float32 : MIRType_Double;
  7524       case ScalarTypeDescr::TYPE_FLOAT64:
  7525         return MIRType_Double;
  7527     MOZ_ASSUME_UNREACHABLE("Unknown typed array type");
  7530 bool
  7531 IonBuilder::jsop_getelem_typed(MDefinition *obj, MDefinition *index,
  7532                                ScalarTypeDescr::Type arrayType)
  7534     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  7536     bool maybeUndefined = types->hasType(types::Type::UndefinedType());
  7538     // Reading from an Uint32Array will result in a double for values
  7539     // that don't fit in an int32. We have to bailout if this happens
  7540     // and the instruction is not known to return a double.
  7541     bool allowDouble = types->hasType(types::Type::DoubleType());
  7543     // Ensure id is an integer.
  7544     MInstruction *idInt32 = MToInt32::New(alloc(), index);
  7545     current->add(idInt32);
  7546     index = idInt32;
  7548     if (!maybeUndefined) {
  7549         // Assume the index is in range, so that we can hoist the length,
  7550         // elements vector and bounds check.
  7552         // If we are reading in-bounds elements, we can use knowledge about
  7553         // the array type to determine the result type, even if the opcode has
  7554         // never executed. The known pushed type is only used to distinguish
  7555         // uint32 reads that may produce either doubles or integers.
  7556         MIRType knownType = MIRTypeForTypedArrayRead(arrayType, allowDouble);
  7558         // Get length, bounds-check, then get elements, and add all instructions.
  7559         MInstruction *length;
  7560         MInstruction *elements;
  7561         addTypedArrayLengthAndData(obj, DoBoundsCheck, &index, &length, &elements);
  7563         // Load the element.
  7564         MLoadTypedArrayElement *load = MLoadTypedArrayElement::New(alloc(), elements, index, arrayType);
  7565         current->add(load);
  7566         current->push(load);
  7568         // Note: we can ignore the type barrier here, we know the type must
  7569         // be valid and unbarriered.
  7570         load->setResultType(knownType);
  7571         return true;
  7572     } else {
  7573         // We need a type barrier if the array's element type has never been
  7574         // observed (we've only read out-of-bounds values). Note that for
  7575         // Uint32Array, we only check for int32: if allowDouble is false we
  7576         // will bailout when we read a double.
  7577         bool needsBarrier = true;
  7578         switch (arrayType) {
  7579           case ScalarTypeDescr::TYPE_INT8:
  7580           case ScalarTypeDescr::TYPE_UINT8:
  7581           case ScalarTypeDescr::TYPE_UINT8_CLAMPED:
  7582           case ScalarTypeDescr::TYPE_INT16:
  7583           case ScalarTypeDescr::TYPE_UINT16:
  7584           case ScalarTypeDescr::TYPE_INT32:
  7585           case ScalarTypeDescr::TYPE_UINT32:
  7586             if (types->hasType(types::Type::Int32Type()))
  7587                 needsBarrier = false;
  7588             break;
  7589           case ScalarTypeDescr::TYPE_FLOAT32:
  7590           case ScalarTypeDescr::TYPE_FLOAT64:
  7591             if (allowDouble)
  7592                 needsBarrier = false;
  7593             break;
  7594           default:
  7595             MOZ_ASSUME_UNREACHABLE("Unknown typed array type");
  7598         // Assume we will read out-of-bound values. In this case the
  7599         // bounds check will be part of the instruction, and the instruction
  7600         // will always return a Value.
  7601         MLoadTypedArrayElementHole *load =
  7602             MLoadTypedArrayElementHole::New(alloc(), obj, index, arrayType, allowDouble);
  7603         current->add(load);
  7604         current->push(load);
  7606         return pushTypeBarrier(load, types, needsBarrier);
  7610 bool
  7611 IonBuilder::jsop_setelem()
  7613     bool emitted = false;
  7615     MDefinition *value = current->pop();
  7616     MDefinition *index = current->pop();
  7617     MDefinition *object = current->pop();
  7619     if (!setElemTryTypedObject(&emitted, object, index, value) || emitted)
  7620         return emitted;
  7622     if (!setElemTryTypedStatic(&emitted, object, index, value) || emitted)
  7623         return emitted;
  7625     if (!setElemTryTypedArray(&emitted, object, index, value) || emitted)
  7626         return emitted;
  7628     if (!setElemTryDense(&emitted, object, index, value) || emitted)
  7629         return emitted;
  7631     if (!setElemTryArguments(&emitted, object, index, value) || emitted)
  7632         return emitted;
  7634     if (script()->argumentsHasVarBinding() && object->mightBeType(MIRType_MagicOptimizedArguments))
  7635         return abort("Type is not definitely lazy arguments.");
  7637     if (!setElemTryCache(&emitted, object, index, value) || emitted)
  7638         return emitted;
  7640     // Emit call.
  7641     MInstruction *ins = MCallSetElement::New(alloc(), object, index, value);
  7642     current->add(ins);
  7643     current->push(value);
  7645     return resumeAfter(ins);
  7648 bool
  7649 IonBuilder::setElemTryTypedObject(bool *emitted, MDefinition *obj,
  7650                                   MDefinition *index, MDefinition *value)
  7652     JS_ASSERT(*emitted == false);
  7654     TypeDescrSet objTypeDescrs;
  7655     if (!lookupTypeDescrSet(obj, &objTypeDescrs))
  7656         return false;
  7658     if (!objTypeDescrs.allOfArrayKind())
  7659         return true;
  7661     TypeDescrSet elemTypeDescrs;
  7662     if (!objTypeDescrs.arrayElementType(*this, &elemTypeDescrs))
  7663         return false;
  7664     if (elemTypeDescrs.empty())
  7665         return true;
  7667     JS_ASSERT(TypeDescr::isSized(elemTypeDescrs.kind()));
  7669     int32_t elemSize;
  7670     if (!elemTypeDescrs.allHaveSameSize(&elemSize))
  7671         return true;
  7673     switch (elemTypeDescrs.kind()) {
  7674       case TypeDescr::X4:
  7675         // FIXME (bug 894105): store a MIRType_float32x4 etc
  7676         return true;
  7678       case TypeDescr::Reference:
  7679       case TypeDescr::Struct:
  7680       case TypeDescr::SizedArray:
  7681       case TypeDescr::UnsizedArray:
  7682         // For now, only optimize storing scalars.
  7683         return true;
  7685       case TypeDescr::Scalar:
  7686         return setElemTryScalarElemOfTypedObject(emitted,
  7687                                                  obj,
  7688                                                  index,
  7689                                                  objTypeDescrs,
  7690                                                  value,
  7691                                                  elemTypeDescrs,
  7692                                                  elemSize);
  7695     MOZ_ASSUME_UNREACHABLE("Bad kind");
  7698 bool
  7699 IonBuilder::setElemTryScalarElemOfTypedObject(bool *emitted,
  7700                                               MDefinition *obj,
  7701                                               MDefinition *index,
  7702                                               TypeDescrSet objTypeDescrs,
  7703                                               MDefinition *value,
  7704                                               TypeDescrSet elemTypeDescrs,
  7705                                               int32_t elemSize)
  7707     // Must always be loading the same scalar type
  7708     ScalarTypeDescr::Type elemType;
  7709     if (!elemTypeDescrs.scalarType(&elemType))
  7710         return true;
  7711     JS_ASSERT(elemSize == ScalarTypeDescr::alignment(elemType));
  7713     bool canBeNeutered;
  7714     MDefinition *indexAsByteOffset;
  7715     if (!checkTypedObjectIndexInBounds(elemSize, obj, index, objTypeDescrs,
  7716                                        &indexAsByteOffset, &canBeNeutered))
  7718         return false;
  7721     // Store the element
  7722     if (!storeScalarTypedObjectValue(obj, indexAsByteOffset, elemType, canBeNeutered, false, value))
  7723         return false;
  7725     current->push(value);
  7727     *emitted = true;
  7728     return true;
  7731 bool
  7732 IonBuilder::setElemTryTypedStatic(bool *emitted, MDefinition *object,
  7733                                   MDefinition *index, MDefinition *value)
  7735     JS_ASSERT(*emitted == false);
  7737     ScalarTypeDescr::Type arrayType;
  7738     if (!ElementAccessIsTypedArray(object, index, &arrayType))
  7739         return true;
  7741     if (!LIRGenerator::allowStaticTypedArrayAccesses())
  7742         return true;
  7744     if (ElementAccessHasExtraIndexedProperty(constraints(), object))
  7745         return true;
  7747     if (!object->resultTypeSet())
  7748         return true;
  7749     JSObject *tarrObj = object->resultTypeSet()->getSingleton();
  7750     if (!tarrObj)
  7751         return true;
  7753     TypedArrayObject *tarr = &tarrObj->as<TypedArrayObject>();
  7755     types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
  7756     if (tarrType->unknownProperties())
  7757         return true;
  7759     ArrayBufferView::ViewType viewType = (ArrayBufferView::ViewType) tarr->type();
  7760     MDefinition *ptr = convertShiftToMaskForStaticTypedArray(index, viewType);
  7761     if (!ptr)
  7762         return true;
  7764     // Emit StoreTypedArrayElementStatic.
  7765     tarrType->watchStateChangeForTypedArrayData(constraints());
  7767     object->setImplicitlyUsedUnchecked();
  7768     index->setImplicitlyUsedUnchecked();
  7770     // Clamp value to [0, 255] for Uint8ClampedArray.
  7771     MDefinition *toWrite = value;
  7772     if (viewType == ArrayBufferView::TYPE_UINT8_CLAMPED) {
  7773         toWrite = MClampToUint8::New(alloc(), value);
  7774         current->add(toWrite->toInstruction());
  7777     MInstruction *store = MStoreTypedArrayElementStatic::New(alloc(), tarr, ptr, toWrite);
  7778     current->add(store);
  7779     current->push(value);
  7781     if (!resumeAfter(store))
  7782         return false;
  7784     *emitted = true;
  7785     return true;
  7788 bool
  7789 IonBuilder::setElemTryTypedArray(bool *emitted, MDefinition *object,
  7790                                  MDefinition *index, MDefinition *value)
  7792     JS_ASSERT(*emitted == false);
  7794     ScalarTypeDescr::Type arrayType;
  7795     if (!ElementAccessIsTypedArray(object, index, &arrayType))
  7796         return true;
  7798     // Emit typed setelem variant.
  7799     if (!jsop_setelem_typed(arrayType, SetElem_Normal, object, index, value))
  7800         return false;
  7802     *emitted = true;
  7803     return true;
  7806 bool
  7807 IonBuilder::setElemTryDense(bool *emitted, MDefinition *object,
  7808                             MDefinition *index, MDefinition *value)
  7810     JS_ASSERT(*emitted == false);
  7812     if (!ElementAccessIsDenseNative(object, index))
  7813         return true;
  7814     if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
  7815                                       &object, nullptr, &value, /* canModify = */ true))
  7817         return true;
  7819     if (!object->resultTypeSet())
  7820         return true;
  7822     types::TemporaryTypeSet::DoubleConversion conversion =
  7823         object->resultTypeSet()->convertDoubleElements(constraints());
  7825     // If AmbiguousDoubleConversion, only handle int32 values for now.
  7826     if (conversion == types::TemporaryTypeSet::AmbiguousDoubleConversion &&
  7827         value->type() != MIRType_Int32)
  7829         return true;
  7832     // Don't generate a fast path if there have been bounds check failures
  7833     // and this access might be on a sparse property.
  7834     if (ElementAccessHasExtraIndexedProperty(constraints(), object) && failedBoundsCheck_)
  7835         return true;
  7837     // Emit dense setelem variant.
  7838     if (!jsop_setelem_dense(conversion, SetElem_Normal, object, index, value))
  7839         return false;
  7841     *emitted = true;
  7842     return true;
  7845 bool
  7846 IonBuilder::setElemTryArguments(bool *emitted, MDefinition *object,
  7847                                 MDefinition *index, MDefinition *value)
  7849     JS_ASSERT(*emitted == false);
  7851     if (object->type() != MIRType_MagicOptimizedArguments)
  7852         return true;
  7854     // Arguments are not supported yet.
  7855     return abort("NYI arguments[]=");
  7858 bool
  7859 IonBuilder::setElemTryCache(bool *emitted, MDefinition *object,
  7860                             MDefinition *index, MDefinition *value)
  7862     JS_ASSERT(*emitted == false);
  7864     if (!object->mightBeType(MIRType_Object))
  7865         return true;
  7867     if (!index->mightBeType(MIRType_Int32) && !index->mightBeType(MIRType_String))
  7868         return true;
  7870     // TODO: Bug 876650: remove this check:
  7871     // Temporary disable the cache if non dense native,
  7872     // until the cache supports more ics
  7873     SetElemICInspector icInspect(inspector->setElemICInspector(pc));
  7874     if (!icInspect.sawDenseWrite() && !icInspect.sawTypedArrayWrite())
  7875         return true;
  7877     if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
  7878                                       &object, nullptr, &value, /* canModify = */ true))
  7880         return true;
  7883     // We can avoid worrying about holes in the IC if we know a priori we are safe
  7884     // from them. If TI can guard that there are no indexed properties on the prototype
  7885     // chain, we know that we anen't missing any setters by overwriting the hole with
  7886     // another value.
  7887     bool guardHoles = ElementAccessHasExtraIndexedProperty(constraints(), object);
  7889     if (NeedsPostBarrier(info(), value))
  7890         current->add(MPostWriteBarrier::New(alloc(), object, value));
  7892     // Emit SetElementCache.
  7893     MInstruction *ins = MSetElementCache::New(alloc(), object, index, value, script()->strict(), guardHoles);
  7894     current->add(ins);
  7895     current->push(value);
  7897     if (!resumeAfter(ins))
  7898         return false;
  7900     *emitted = true;
  7901     return true;
  7904 bool
  7905 IonBuilder::jsop_setelem_dense(types::TemporaryTypeSet::DoubleConversion conversion,
  7906                                SetElemSafety safety,
  7907                                MDefinition *obj, MDefinition *id, MDefinition *value)
  7909     MIRType elementType = DenseNativeElementType(constraints(), obj);
  7910     bool packed = ElementAccessIsPacked(constraints(), obj);
  7912     // Writes which are on holes in the object do not have to bail out if they
  7913     // cannot hit another indexed property on the object or its prototypes.
  7914     bool writeOutOfBounds = !ElementAccessHasExtraIndexedProperty(constraints(), obj);
  7916     if (NeedsPostBarrier(info(), value))
  7917         current->add(MPostWriteBarrier::New(alloc(), obj, value));
  7919     // Ensure id is an integer.
  7920     MInstruction *idInt32 = MToInt32::New(alloc(), id);
  7921     current->add(idInt32);
  7922     id = idInt32;
  7924     // Get the elements vector.
  7925     MElements *elements = MElements::New(alloc(), obj);
  7926     current->add(elements);
  7928     // Ensure the value is a double, if double conversion might be needed.
  7929     MDefinition *newValue = value;
  7930     switch (conversion) {
  7931       case types::TemporaryTypeSet::AlwaysConvertToDoubles:
  7932       case types::TemporaryTypeSet::MaybeConvertToDoubles: {
  7933         MInstruction *valueDouble = MToDouble::New(alloc(), value);
  7934         current->add(valueDouble);
  7935         newValue = valueDouble;
  7936         break;
  7939       case types::TemporaryTypeSet::AmbiguousDoubleConversion: {
  7940         JS_ASSERT(value->type() == MIRType_Int32);
  7941         MInstruction *maybeDouble = MMaybeToDoubleElement::New(alloc(), elements, value);
  7942         current->add(maybeDouble);
  7943         newValue = maybeDouble;
  7944         break;
  7947       case types::TemporaryTypeSet::DontConvertToDoubles:
  7948         break;
  7950       default:
  7951         MOZ_ASSUME_UNREACHABLE("Unknown double conversion");
  7954     bool writeHole = false;
  7955     if (safety == SetElem_Normal) {
  7956         SetElemICInspector icInspect(inspector->setElemICInspector(pc));
  7957         writeHole = icInspect.sawOOBDenseWrite();
  7960     // Use MStoreElementHole if this SETELEM has written to out-of-bounds
  7961     // indexes in the past. Otherwise, use MStoreElement so that we can hoist
  7962     // the initialized length and bounds check.
  7963     MStoreElementCommon *store;
  7964     if (writeHole && writeOutOfBounds) {
  7965         JS_ASSERT(safety == SetElem_Normal);
  7967         MStoreElementHole *ins = MStoreElementHole::New(alloc(), obj, elements, id, newValue);
  7968         store = ins;
  7970         current->add(ins);
  7971         current->push(value);
  7973         if (!resumeAfter(ins))
  7974             return false;
  7975     } else {
  7976         MInitializedLength *initLength = MInitializedLength::New(alloc(), elements);
  7977         current->add(initLength);
  7979         bool needsHoleCheck;
  7980         if (safety == SetElem_Normal) {
  7981             id = addBoundsCheck(id, initLength);
  7982             needsHoleCheck = !packed && !writeOutOfBounds;
  7983         } else {
  7984             needsHoleCheck = false;
  7987         MStoreElement *ins = MStoreElement::New(alloc(), elements, id, newValue, needsHoleCheck);
  7988         store = ins;
  7990         if (safety == SetElem_Unsafe)
  7991             ins->setRacy();
  7993         current->add(ins);
  7995         if (safety == SetElem_Normal)
  7996             current->push(value);
  7998         if (!resumeAfter(ins))
  7999             return false;
  8002     // Determine whether a write barrier is required.
  8003     if (obj->resultTypeSet()->propertyNeedsBarrier(constraints(), JSID_VOID))
  8004         store->setNeedsBarrier();
  8006     if (elementType != MIRType_None && packed)
  8007         store->setElementType(elementType);
  8009     return true;
  8013 bool
  8014 IonBuilder::jsop_setelem_typed(ScalarTypeDescr::Type arrayType,
  8015                                SetElemSafety safety,
  8016                                MDefinition *obj, MDefinition *id, MDefinition *value)
  8018     bool expectOOB;
  8019     if (safety == SetElem_Normal) {
  8020         SetElemICInspector icInspect(inspector->setElemICInspector(pc));
  8021         expectOOB = icInspect.sawOOBTypedArrayWrite();
  8022     } else {
  8023         expectOOB = false;
  8026     if (expectOOB)
  8027         spew("Emitting OOB TypedArray SetElem");
  8029     // Ensure id is an integer.
  8030     MInstruction *idInt32 = MToInt32::New(alloc(), id);
  8031     current->add(idInt32);
  8032     id = idInt32;
  8034     // Get length, bounds-check, then get elements, and add all instructions.
  8035     MInstruction *length;
  8036     MInstruction *elements;
  8037     BoundsChecking checking = (!expectOOB && safety == SetElem_Normal)
  8038                               ? DoBoundsCheck
  8039                               : SkipBoundsCheck;
  8040     addTypedArrayLengthAndData(obj, checking, &id, &length, &elements);
  8042     // Clamp value to [0, 255] for Uint8ClampedArray.
  8043     MDefinition *toWrite = value;
  8044     if (arrayType == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
  8045         toWrite = MClampToUint8::New(alloc(), value);
  8046         current->add(toWrite->toInstruction());
  8049     // Store the value.
  8050     MInstruction *ins;
  8051     if (expectOOB) {
  8052         ins = MStoreTypedArrayElementHole::New(alloc(), elements, length, id, toWrite, arrayType);
  8053     } else {
  8054         MStoreTypedArrayElement *store =
  8055             MStoreTypedArrayElement::New(alloc(), elements, id, toWrite, arrayType);
  8056         if (safety == SetElem_Unsafe)
  8057             store->setRacy();
  8058         ins = store;
  8061     current->add(ins);
  8063     if (safety == SetElem_Normal)
  8064         current->push(value);
  8066     return resumeAfter(ins);
  8069 bool
  8070 IonBuilder::jsop_setelem_typed_object(ScalarTypeDescr::Type arrayType,
  8071                                       SetElemSafety safety,
  8072                                       bool racy,
  8073                                       MDefinition *object, MDefinition *index, MDefinition *value)
  8075     JS_ASSERT(safety == SetElem_Unsafe); // Can be fixed, but there's been no reason to as of yet
  8077     MInstruction *int_index = MToInt32::New(alloc(), index);
  8078     current->add(int_index);
  8080     size_t elemSize = ScalarTypeDescr::alignment(arrayType);
  8081     MMul *byteOffset = MMul::New(alloc(), int_index, constantInt(elemSize),
  8082                                         MIRType_Int32, MMul::Integer);
  8083     current->add(byteOffset);
  8085     if (!storeScalarTypedObjectValue(object, byteOffset, arrayType, false, racy, value))
  8086         return false;
  8088     return true;
  8091 bool
  8092 IonBuilder::jsop_length()
  8094     if (jsop_length_fastPath())
  8095         return true;
  8097     PropertyName *name = info().getAtom(pc)->asPropertyName();
  8098     return jsop_getprop(name);
  8101 bool
  8102 IonBuilder::jsop_length_fastPath()
  8104     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  8106     if (types->getKnownMIRType() != MIRType_Int32)
  8107         return false;
  8109     MDefinition *obj = current->peek(-1);
  8111     if (obj->mightBeType(MIRType_String)) {
  8112         if (obj->mightBeType(MIRType_Object))
  8113             return false;
  8114         current->pop();
  8115         MStringLength *ins = MStringLength::New(alloc(), obj);
  8116         current->add(ins);
  8117         current->push(ins);
  8118         return true;
  8121     if (obj->mightBeType(MIRType_Object)) {
  8122         types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
  8124         if (objTypes &&
  8125             objTypes->getKnownClass() == &ArrayObject::class_ &&
  8126             !objTypes->hasObjectFlags(constraints(), types::OBJECT_FLAG_LENGTH_OVERFLOW))
  8128             current->pop();
  8129             MElements *elements = MElements::New(alloc(), obj);
  8130             current->add(elements);
  8132             // Read length.
  8133             MArrayLength *length = MArrayLength::New(alloc(), elements);
  8134             current->add(length);
  8135             current->push(length);
  8136             return true;
  8139         if (objTypes && objTypes->getTypedArrayType() != ScalarTypeDescr::TYPE_MAX) {
  8140             current->pop();
  8141             MInstruction *length = addTypedArrayLength(obj);
  8142             current->push(length);
  8143             return true;
  8147     return false;
  8150 bool
  8151 IonBuilder::jsop_arguments()
  8153     if (info().needsArgsObj()) {
  8154         current->push(current->argumentsObject());
  8155         return true;
  8157     JS_ASSERT(lazyArguments_);
  8158     current->push(lazyArguments_);
  8159     return true;
  8162 bool
  8163 IonBuilder::jsop_arguments_length()
  8165     // Type Inference has guaranteed this is an optimized arguments object.
  8166     MDefinition *args = current->pop();
  8167     args->setImplicitlyUsedUnchecked();
  8169     // We don't know anything from the callee
  8170     if (inliningDepth_ == 0) {
  8171         MInstruction *ins = MArgumentsLength::New(alloc());
  8172         current->add(ins);
  8173         current->push(ins);
  8174         return true;
  8177     // We are inlining and know the number of arguments the callee pushed
  8178     return pushConstant(Int32Value(inlineCallInfo_->argv().length()));
  8181 bool
  8182 IonBuilder::jsop_rest()
  8184     JSObject *templateObject = inspector->getTemplateObject(pc);
  8185     JS_ASSERT(templateObject->is<ArrayObject>());
  8187     if (inliningDepth_ == 0) {
  8188         // We don't know anything about the callee.
  8189         MArgumentsLength *numActuals = MArgumentsLength::New(alloc());
  8190         current->add(numActuals);
  8192         // Pass in the number of actual arguments, the number of formals (not
  8193         // including the rest parameter slot itself), and the template object.
  8194         MRest *rest = MRest::New(alloc(), constraints(), numActuals, info().nargs() - 1,
  8195                                  templateObject);
  8196         current->add(rest);
  8197         current->push(rest);
  8198         return true;
  8201     // We know the exact number of arguments the callee pushed.
  8202     unsigned numActuals = inlineCallInfo_->argv().length();
  8203     unsigned numFormals = info().nargs() - 1;
  8204     unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
  8206     MNewArray *array = MNewArray::New(alloc(), constraints(), numRest, templateObject,
  8207                                       templateObject->type()->initialHeap(constraints()),
  8208                                       MNewArray::NewArray_Allocating);
  8209     current->add(array);
  8211     if (numRest == 0) {
  8212         // No more updating to do. (Note that in this one case the length from
  8213         // the template object is already correct.)
  8214         current->push(array);
  8215         return true;
  8218     MElements *elements = MElements::New(alloc(), array);
  8219     current->add(elements);
  8221     // Unroll the argument copy loop. We don't need to do any bounds or hole
  8222     // checking here.
  8223     MConstant *index = nullptr;
  8224     for (unsigned i = numFormals; i < numActuals; i++) {
  8225         index = MConstant::New(alloc(), Int32Value(i - numFormals));
  8226         current->add(index);
  8228         MDefinition *arg = inlineCallInfo_->argv()[i];
  8229         MStoreElement *store = MStoreElement::New(alloc(), elements, index, arg,
  8230                                                   /* needsHoleCheck = */ false);
  8231         current->add(store);
  8233         if (NeedsPostBarrier(info(), arg))
  8234             current->add(MPostWriteBarrier::New(alloc(), array, arg));
  8237     // The array's length is incorrectly 0 now, from the template object
  8238     // created by BaselineCompiler::emit_JSOP_REST() before the actual argument
  8239     // count was known. Set the correct length now that we know that count.
  8240     MSetArrayLength *length = MSetArrayLength::New(alloc(), elements, index);
  8241     current->add(length);
  8243     // Update the initialized length for all the (necessarily non-hole)
  8244     // elements added.
  8245     MSetInitializedLength *initLength = MSetInitializedLength::New(alloc(), elements, index);
  8246     current->add(initLength);
  8248     current->push(array);
  8249     return true;
  8252 bool
  8253 IonBuilder::getDefiniteSlot(types::TemporaryTypeSet *types, PropertyName *name,
  8254                             types::HeapTypeSetKey *property)
  8256     if (!types || types->unknownObject() || types->getObjectCount() != 1)
  8257         return false;
  8259     types::TypeObjectKey *type = types->getObject(0);
  8260     if (type->unknownProperties() || type->singleton())
  8261         return false;
  8263     jsid id = NameToId(name);
  8265     *property = type->property(id);
  8266     return property->maybeTypes() &&
  8267            property->maybeTypes()->definiteProperty() &&
  8268            !property->nonData(constraints());
  8271 bool
  8272 IonBuilder::jsop_runonce()
  8274     MRunOncePrologue *ins = MRunOncePrologue::New(alloc());
  8275     current->add(ins);
  8276     return resumeAfter(ins);
  8279 bool
  8280 IonBuilder::jsop_not()
  8282     MDefinition *value = current->pop();
  8284     MNot *ins = MNot::New(alloc(), value);
  8285     current->add(ins);
  8286     current->push(ins);
  8287     ins->infer();
  8288     return true;
  8291 bool
  8292 IonBuilder::objectsHaveCommonPrototype(types::TemporaryTypeSet *types, PropertyName *name,
  8293                                        bool isGetter, JSObject *foundProto)
  8295     // With foundProto a prototype with a getter or setter for name, return
  8296     // whether looking up name on any object in |types| will go through
  8297     // foundProto, i.e. all the objects have foundProto on their prototype
  8298     // chain and do not have a property for name before reaching foundProto.
  8300     // No sense looking if we don't know what's going on.
  8301     if (!types || types->unknownObject())
  8302         return false;
  8304     for (unsigned i = 0; i < types->getObjectCount(); i++) {
  8305         if (types->getSingleObject(i) == foundProto)
  8306             continue;
  8308         types::TypeObjectKey *type = types->getObject(i);
  8309         if (!type)
  8310             continue;
  8312         while (type) {
  8313             if (type->unknownProperties())
  8314                 return false;
  8316             const Class *clasp = type->clasp();
  8317             if (!ClassHasEffectlessLookup(clasp, name) || ClassHasResolveHook(compartment, clasp, name))
  8318                 return false;
  8320             // Look for a getter/setter on the class itself which may need
  8321             // to be called. Ignore the getGeneric hook for typed arrays, it
  8322             // only handles integers and forwards names to the prototype.
  8323             if (isGetter && clasp->ops.getGeneric && !IsTypedArrayClass(clasp))
  8324                 return false;
  8325             if (!isGetter && clasp->ops.setGeneric)
  8326                 return false;
  8328             // Test for isOwnProperty() without freezing. If we end up
  8329             // optimizing, freezePropertiesForCommonPropFunc will freeze the
  8330             // property type sets later on.
  8331             types::HeapTypeSetKey property = type->property(NameToId(name));
  8332             if (types::TypeSet *types = property.maybeTypes()) {
  8333                 if (!types->empty() || types->nonDataProperty())
  8334                     return false;
  8336             if (JSObject *obj = type->singleton()) {
  8337                 if (types::CanHaveEmptyPropertyTypesForOwnProperty(obj))
  8338                     return false;
  8341             if (!type->hasTenuredProto())
  8342                 return false;
  8343             JSObject *proto = type->proto().toObjectOrNull();
  8344             if (proto == foundProto)
  8345                 break;
  8346             if (!proto) {
  8347                 // The foundProto being searched for did not show up on the
  8348                 // object's prototype chain.
  8349                 return false;
  8351             type = types::TypeObjectKey::get(type->proto().toObjectOrNull());
  8355     return true;
  8358 void
  8359 IonBuilder::freezePropertiesForCommonPrototype(types::TemporaryTypeSet *types, PropertyName *name,
  8360                                                JSObject *foundProto)
  8362     for (unsigned i = 0; i < types->getObjectCount(); i++) {
  8363         // If we found a Singleton object's own-property, there's nothing to
  8364         // freeze.
  8365         if (types->getSingleObject(i) == foundProto)
  8366             continue;
  8368         types::TypeObjectKey *type = types->getObject(i);
  8369         if (!type)
  8370             continue;
  8372         while (true) {
  8373             types::HeapTypeSetKey property = type->property(NameToId(name));
  8374             JS_ALWAYS_TRUE(!property.isOwnProperty(constraints()));
  8376             // Don't mark the proto. It will be held down by the shape
  8377             // guard. This allows us to use properties found on prototypes
  8378             // with properties unknown to TI.
  8379             if (type->proto() == foundProto)
  8380                 break;
  8381             type = types::TypeObjectKey::get(type->proto().toObjectOrNull());
  8386 inline MDefinition *
  8387 IonBuilder::testCommonGetterSetter(types::TemporaryTypeSet *types, PropertyName *name,
  8388                                    bool isGetter, JSObject *foundProto, Shape *lastProperty)
  8390     // Check if all objects being accessed will lookup the name through foundProto.
  8391     if (!objectsHaveCommonPrototype(types, name, isGetter, foundProto))
  8392         return nullptr;
  8394     // We can optimize the getter/setter, so freeze all involved properties to
  8395     // ensure there isn't a lower shadowing getter or setter installed in the
  8396     // future.
  8397     freezePropertiesForCommonPrototype(types, name, foundProto);
  8399     // Add a shape guard on the prototype we found the property on. The rest of
  8400     // the prototype chain is guarded by TI freezes. Note that a shape guard is
  8401     // good enough here, even in the proxy case, because we have ensured there
  8402     // are no lookup hooks for this property.
  8403     MInstruction *wrapper = constant(ObjectValue(*foundProto));
  8404     return addShapeGuard(wrapper, lastProperty, Bailout_ShapeGuard);
  8407 bool
  8408 IonBuilder::annotateGetPropertyCache(MDefinition *obj, MGetPropertyCache *getPropCache,
  8409                                      types::TemporaryTypeSet *objTypes,
  8410                                      types::TemporaryTypeSet *pushedTypes)
  8412     PropertyName *name = getPropCache->name();
  8414     // Ensure every pushed value is a singleton.
  8415     if (pushedTypes->unknownObject() || pushedTypes->baseFlags() != 0)
  8416         return true;
  8418     for (unsigned i = 0; i < pushedTypes->getObjectCount(); i++) {
  8419         if (pushedTypes->getTypeObject(i) != nullptr)
  8420             return true;
  8423     // Object's typeset should be a proper object
  8424     if (!objTypes || objTypes->baseFlags() || objTypes->unknownObject())
  8425         return true;
  8427     unsigned int objCount = objTypes->getObjectCount();
  8428     if (objCount == 0)
  8429         return true;
  8431     InlinePropertyTable *inlinePropTable = getPropCache->initInlinePropertyTable(alloc(), pc);
  8432     if (!inlinePropTable)
  8433         return false;
  8435     // Ensure that the relevant property typeset for each type object is
  8436     // is a single-object typeset containing a JSFunction
  8437     for (unsigned int i = 0; i < objCount; i++) {
  8438         types::TypeObject *baseTypeObj = objTypes->getTypeObject(i);
  8439         if (!baseTypeObj)
  8440             continue;
  8441         types::TypeObjectKey *typeObj = types::TypeObjectKey::get(baseTypeObj);
  8442         if (typeObj->unknownProperties() || !typeObj->hasTenuredProto() || !typeObj->proto().isObject())
  8443             continue;
  8445         const Class *clasp = typeObj->clasp();
  8446         if (!ClassHasEffectlessLookup(clasp, name) || ClassHasResolveHook(compartment, clasp, name))
  8447             continue;
  8449         types::HeapTypeSetKey ownTypes = typeObj->property(NameToId(name));
  8450         if (ownTypes.isOwnProperty(constraints()))
  8451             continue;
  8453         JSObject *singleton = testSingletonProperty(typeObj->proto().toObject(), name);
  8454         if (!singleton || !singleton->is<JSFunction>())
  8455             continue;
  8457         // Don't add cases corresponding to non-observed pushes
  8458         if (!pushedTypes->hasType(types::Type::ObjectType(singleton)))
  8459             continue;
  8461         if (!inlinePropTable->addEntry(alloc(), baseTypeObj, &singleton->as<JSFunction>()))
  8462             return false;
  8465     if (inlinePropTable->numEntries() == 0) {
  8466         getPropCache->clearInlinePropertyTable();
  8467         return true;
  8470 #ifdef DEBUG
  8471     if (inlinePropTable->numEntries() > 0)
  8472         IonSpew(IonSpew_Inlining, "Annotated GetPropertyCache with %d/%d inline cases",
  8473                                     (int) inlinePropTable->numEntries(), (int) objCount);
  8474 #endif
  8476     // If we successfully annotated the GetPropertyCache and there are inline cases,
  8477     // then keep a resume point of the state right before this instruction for use
  8478     // later when we have to bail out to this point in the fallback case of a
  8479     // PolyInlineDispatch.
  8480     if (inlinePropTable->numEntries() > 0) {
  8481         // Push the object back onto the stack temporarily to capture the resume point.
  8482         current->push(obj);
  8483         MResumePoint *resumePoint = MResumePoint::New(alloc(), current, pc, callerResumePoint_,
  8484                                                       MResumePoint::ResumeAt);
  8485         if (!resumePoint)
  8486             return false;
  8487         inlinePropTable->setPriorResumePoint(resumePoint);
  8488         current->pop();
  8490     return true;
  8493 // Returns true if an idempotent cache has ever invalidated this script
  8494 // or an outer script.
  8495 bool
  8496 IonBuilder::invalidatedIdempotentCache()
  8498     IonBuilder *builder = this;
  8499     do {
  8500         if (builder->script()->invalidatedIdempotentCache())
  8501             return true;
  8502         builder = builder->callerBuilder_;
  8503     } while (builder);
  8505     return false;
  8508 bool
  8509 IonBuilder::loadSlot(MDefinition *obj, size_t slot, size_t nfixed, MIRType rvalType,
  8510                      bool barrier, types::TemporaryTypeSet *types)
  8512     if (slot < nfixed) {
  8513         MLoadFixedSlot *load = MLoadFixedSlot::New(alloc(), obj, slot);
  8514         current->add(load);
  8515         current->push(load);
  8517         load->setResultType(rvalType);
  8518         return pushTypeBarrier(load, types, barrier);
  8521     MSlots *slots = MSlots::New(alloc(), obj);
  8522     current->add(slots);
  8524     MLoadSlot *load = MLoadSlot::New(alloc(), slots, slot - nfixed);
  8525     current->add(load);
  8526     current->push(load);
  8528     load->setResultType(rvalType);
  8529     return pushTypeBarrier(load, types, barrier);
  8532 bool
  8533 IonBuilder::loadSlot(MDefinition *obj, Shape *shape, MIRType rvalType,
  8534                      bool barrier, types::TemporaryTypeSet *types)
  8536     return loadSlot(obj, shape->slot(), shape->numFixedSlots(), rvalType, barrier, types);
  8539 bool
  8540 IonBuilder::storeSlot(MDefinition *obj, size_t slot, size_t nfixed,
  8541                       MDefinition *value, bool needsBarrier,
  8542                       MIRType slotType /* = MIRType_None */)
  8544     if (slot < nfixed) {
  8545         MStoreFixedSlot *store = MStoreFixedSlot::New(alloc(), obj, slot, value);
  8546         current->add(store);
  8547         current->push(value);
  8548         if (needsBarrier)
  8549             store->setNeedsBarrier();
  8550         return resumeAfter(store);
  8553     MSlots *slots = MSlots::New(alloc(), obj);
  8554     current->add(slots);
  8556     MStoreSlot *store = MStoreSlot::New(alloc(), slots, slot - nfixed, value);
  8557     current->add(store);
  8558     current->push(value);
  8559     if (needsBarrier)
  8560         store->setNeedsBarrier();
  8561     if (slotType != MIRType_None)
  8562         store->setSlotType(slotType);
  8563     return resumeAfter(store);
  8566 bool
  8567 IonBuilder::storeSlot(MDefinition *obj, Shape *shape, MDefinition *value, bool needsBarrier,
  8568                       MIRType slotType /* = MIRType_None */)
  8570     JS_ASSERT(shape->writable());
  8571     return storeSlot(obj, shape->slot(), shape->numFixedSlots(), value, needsBarrier, slotType);
  8574 bool
  8575 IonBuilder::jsop_getprop(PropertyName *name)
  8577     bool emitted = false;
  8579     // Try to optimize arguments.length.
  8580     if (!getPropTryArgumentsLength(&emitted) || emitted)
  8581         return emitted;
  8583     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  8584     bool barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(),
  8585                                                 current->peek(-1), name, types);
  8587     // Always use a call if we are performing analysis and
  8588     // not actually emitting code, to simplify later analysis. Also skip deeper
  8589     // analysis if there are no known types for this operation, as it will
  8590     // always invalidate when executing.
  8591     if (info().executionModeIsAnalysis() || types->empty()) {
  8592         MDefinition *obj = current->peek(-1);
  8593         MCallGetProperty *call = MCallGetProperty::New(alloc(), obj, name, *pc == JSOP_CALLPROP);
  8594         current->add(call);
  8596         // During the definite properties analysis we can still try to bake in
  8597         // constants read off the prototype chain, to allow inlining later on.
  8598         // In this case we still need the getprop call so that the later
  8599         // analysis knows when the |this| value has been read from.
  8600         if (info().executionModeIsAnalysis()) {
  8601             if (!getPropTryConstant(&emitted, name, types) || emitted)
  8602                 return emitted;
  8605         current->pop();
  8606         current->push(call);
  8607         return resumeAfter(call) && pushTypeBarrier(call, types, true);
  8610     // Try to hardcode known constants.
  8611     if (!getPropTryConstant(&emitted, name, types) || emitted)
  8612         return emitted;
  8614     // Try to emit loads from known binary data blocks
  8615     if (!getPropTryTypedObject(&emitted, name, types) || emitted)
  8616         return emitted;
  8618     // Try to emit loads from definite slots.
  8619     if (!getPropTryDefiniteSlot(&emitted, name, barrier, types) || emitted)
  8620         return emitted;
  8622     // Try to inline a common property getter, or make a call.
  8623     if (!getPropTryCommonGetter(&emitted, name, types) || emitted)
  8624         return emitted;
  8626     // Try to emit a monomorphic/polymorphic access based on baseline caches.
  8627     if (!getPropTryInlineAccess(&emitted, name, barrier, types) || emitted)
  8628         return emitted;
  8630     // Try to emit a polymorphic cache.
  8631     if (!getPropTryCache(&emitted, name, barrier, types) || emitted)
  8632         return emitted;
  8634     // Emit a call.
  8635     MDefinition *obj = current->pop();
  8636     MCallGetProperty *call = MCallGetProperty::New(alloc(), obj, name, *pc == JSOP_CALLPROP);
  8637     current->add(call);
  8638     current->push(call);
  8639     if (!resumeAfter(call))
  8640         return false;
  8642     return pushTypeBarrier(call, types, true);
  8645 bool
  8646 IonBuilder::getPropTryArgumentsLength(bool *emitted)
  8648     JS_ASSERT(*emitted == false);
  8649     if (current->peek(-1)->type() != MIRType_MagicOptimizedArguments) {
  8650         if (script()->argumentsHasVarBinding() &&
  8651             current->peek(-1)->mightBeType(MIRType_MagicOptimizedArguments))
  8653             return abort("Type is not definitely lazy arguments.");
  8655         return true;
  8657     if (JSOp(*pc) != JSOP_LENGTH)
  8658         return true;
  8660     *emitted = true;
  8661     return jsop_arguments_length();
  8664 bool
  8665 IonBuilder::getPropTryConstant(bool *emitted, PropertyName *name,
  8666                                types::TemporaryTypeSet *types)
  8668     JS_ASSERT(*emitted == false);
  8669     JSObject *singleton = types ? types->getSingleton() : nullptr;
  8670     if (!singleton)
  8671         return true;
  8673     bool testObject, testString;
  8674     if (!testSingletonPropertyTypes(current->peek(-1), singleton, name, &testObject, &testString))
  8675         return true;
  8677     MDefinition *obj = current->pop();
  8679     // Property access is a known constant -- safe to emit.
  8680     JS_ASSERT(!testString || !testObject);
  8681     if (testObject)
  8682         current->add(MGuardObject::New(alloc(), obj));
  8683     else if (testString)
  8684         current->add(MGuardString::New(alloc(), obj));
  8685     else
  8686         obj->setImplicitlyUsedUnchecked();
  8688     pushConstant(ObjectValue(*singleton));
  8690     *emitted = true;
  8691     return true;
  8694 bool
  8695 IonBuilder::getPropTryTypedObject(bool *emitted, PropertyName *name,
  8696                                   types::TemporaryTypeSet *resultTypes)
  8698     TypeDescrSet fieldDescrs;
  8699     int32_t fieldOffset;
  8700     size_t fieldIndex;
  8701     if (!lookupTypedObjectField(current->peek(-1), name, &fieldOffset,
  8702                                 &fieldDescrs, &fieldIndex))
  8703         return false;
  8704     if (fieldDescrs.empty())
  8705         return true;
  8707     switch (fieldDescrs.kind()) {
  8708       case TypeDescr::Reference:
  8709         return true;
  8711       case TypeDescr::X4:
  8712         // FIXME (bug 894104): load into a MIRType_float32x4 etc
  8713         return true;
  8715       case TypeDescr::Struct:
  8716       case TypeDescr::SizedArray:
  8717         return getPropTryComplexPropOfTypedObject(emitted,
  8718                                                   fieldOffset,
  8719                                                   fieldDescrs,
  8720                                                   fieldIndex,
  8721                                                   resultTypes);
  8723       case TypeDescr::Scalar:
  8724         return getPropTryScalarPropOfTypedObject(emitted,
  8725                                                  fieldOffset,
  8726                                                  fieldDescrs,
  8727                                                  resultTypes);
  8729       case TypeDescr::UnsizedArray:
  8730         MOZ_ASSUME_UNREACHABLE("Field of unsized array type");
  8733     MOZ_ASSUME_UNREACHABLE("Bad kind");
  8736 bool
  8737 IonBuilder::getPropTryScalarPropOfTypedObject(bool *emitted,
  8738                                               int32_t fieldOffset,
  8739                                               TypeDescrSet fieldDescrs,
  8740                                               types::TemporaryTypeSet *resultTypes)
  8742     // Must always be loading the same scalar type
  8743     ScalarTypeDescr::Type fieldType;
  8744     if (!fieldDescrs.scalarType(&fieldType))
  8745         return true;
  8747     // OK, perform the optimization
  8749     MDefinition *typedObj = current->pop();
  8751     return pushScalarLoadFromTypedObject(emitted, typedObj, constantInt(fieldOffset),
  8752                                          fieldType, true);
  8755 bool
  8756 IonBuilder::getPropTryComplexPropOfTypedObject(bool *emitted,
  8757                                                int32_t fieldOffset,
  8758                                                TypeDescrSet fieldDescrs,
  8759                                                size_t fieldIndex,
  8760                                                types::TemporaryTypeSet *resultTypes)
  8762     // Must know the field index so that we can load the new type
  8763     // object for the derived value
  8764     if (fieldIndex == SIZE_MAX)
  8765         return true;
  8767     // OK, perform the optimization
  8769     MDefinition *typedObj = current->pop();
  8771     // Identify the type object for the field.
  8772     MDefinition *type = loadTypedObjectType(typedObj);
  8773     MDefinition *fieldTypeObj = typeObjectForFieldFromStructType(type, fieldIndex);
  8775     return pushDerivedTypedObject(emitted, typedObj, constantInt(fieldOffset),
  8776                                   fieldDescrs, fieldTypeObj, true);
  8779 bool
  8780 IonBuilder::getPropTryDefiniteSlot(bool *emitted, PropertyName *name,
  8781                                    bool barrier, types::TemporaryTypeSet *types)
  8783     JS_ASSERT(*emitted == false);
  8784     types::HeapTypeSetKey property;
  8785     if (!getDefiniteSlot(current->peek(-1)->resultTypeSet(), name, &property))
  8786         return true;
  8788     MDefinition *obj = current->pop();
  8789     MDefinition *useObj = obj;
  8790     if (obj->type() != MIRType_Object) {
  8791         MGuardObject *guard = MGuardObject::New(alloc(), obj);
  8792         current->add(guard);
  8793         useObj = guard;
  8796     MLoadFixedSlot *fixed = MLoadFixedSlot::New(alloc(), useObj, property.maybeTypes()->definiteSlot());
  8797     if (!barrier)
  8798         fixed->setResultType(types->getKnownMIRType());
  8800     current->add(fixed);
  8801     current->push(fixed);
  8803     if (!pushTypeBarrier(fixed, types, barrier))
  8804         return false;
  8806     *emitted = true;
  8807     return true;
  8810 bool
  8811 IonBuilder::getPropTryCommonGetter(bool *emitted, PropertyName *name,
  8812                                    types::TemporaryTypeSet *types)
  8814     JS_ASSERT(*emitted == false);
  8816     Shape *lastProperty = nullptr;
  8817     JSFunction *commonGetter = nullptr;
  8818     JSObject *foundProto = inspector->commonGetPropFunction(pc, &lastProperty, &commonGetter);
  8819     if (!foundProto)
  8820         return true;
  8822     types::TemporaryTypeSet *objTypes = current->peek(-1)->resultTypeSet();
  8823     MDefinition *guard = testCommonGetterSetter(objTypes, name, /* isGetter = */ true,
  8824                                                 foundProto, lastProperty);
  8825     if (!guard)
  8826         return true;
  8828     bool isDOM = objTypes->isDOMClass();
  8830     MDefinition *obj = current->pop();
  8832     if (isDOM && testShouldDOMCall(objTypes, commonGetter, JSJitInfo::Getter)) {
  8833         const JSJitInfo *jitinfo = commonGetter->jitInfo();
  8834         MInstruction *get;
  8835         if (jitinfo->isInSlot) {
  8836             // We can't use MLoadFixedSlot here because it might not have the
  8837             // right aliasing behavior; we want to alias DOM setters.
  8838             get = MGetDOMMember::New(alloc(), jitinfo, obj, guard);
  8839         } else {
  8840             get = MGetDOMProperty::New(alloc(), jitinfo, obj, guard);
  8842         current->add(get);
  8843         current->push(get);
  8845         if (get->isEffectful() && !resumeAfter(get))
  8846             return false;
  8848         if (!pushDOMTypeBarrier(get, types, commonGetter))
  8849             return false;
  8851         *emitted = true;
  8852         return true;
  8855     // Don't call the getter with a primitive value.
  8856     if (objTypes->getKnownMIRType() != MIRType_Object) {
  8857         MGuardObject *guardObj = MGuardObject::New(alloc(), obj);
  8858         current->add(guardObj);
  8859         obj = guardObj;
  8862     // Spoof stack to expected state for call.
  8864     // Make sure there's enough room
  8865     if (!current->ensureHasSlots(2))
  8866         return false;
  8867     pushConstant(ObjectValue(*commonGetter));
  8869     current->push(obj);
  8871     CallInfo callInfo(alloc(), false);
  8872     if (!callInfo.init(current, 0))
  8873         return false;
  8875     // Inline if we can, otherwise, forget it and just generate a call.
  8876     bool inlineable = false;
  8877     if (commonGetter->isInterpreted()) {
  8878         InliningDecision decision = makeInliningDecision(commonGetter, callInfo);
  8879         switch (decision) {
  8880           case InliningDecision_Error:
  8881             return false;
  8882           case InliningDecision_DontInline:
  8883             break;
  8884           case InliningDecision_Inline:
  8885             inlineable = true;
  8886             break;
  8890     if (inlineable) {
  8891         if (!inlineScriptedCall(callInfo, commonGetter))
  8892             return false;
  8893     } else {
  8894         if (!makeCall(commonGetter, callInfo, false))
  8895             return false;
  8898     *emitted = true;
  8899     return true;
  8902 static bool
  8903 CanInlinePropertyOpShapes(const BaselineInspector::ShapeVector &shapes)
  8905     for (size_t i = 0; i < shapes.length(); i++) {
  8906         // We inline the property access as long as the shape is not in
  8907         // dictionary made. We cannot be sure that the shape is still a
  8908         // lastProperty, and calling Shape::search() on dictionary mode
  8909         // shapes that aren't lastProperty is invalid.
  8910         if (shapes[i]->inDictionary())
  8911             return false;
  8914     return true;
  8917 bool
  8918 IonBuilder::getPropTryInlineAccess(bool *emitted, PropertyName *name,
  8919                                    bool barrier, types::TemporaryTypeSet *types)
  8921     JS_ASSERT(*emitted == false);
  8922     if (current->peek(-1)->type() != MIRType_Object)
  8923         return true;
  8925     BaselineInspector::ShapeVector shapes(alloc());
  8926     if (!inspector->maybeShapesForPropertyOp(pc, shapes))
  8927         return false;
  8929     if (shapes.empty() || !CanInlinePropertyOpShapes(shapes))
  8930         return true;
  8932     MIRType rvalType = types->getKnownMIRType();
  8933     if (barrier || IsNullOrUndefined(rvalType))
  8934         rvalType = MIRType_Value;
  8936     MDefinition *obj = current->pop();
  8937     if (shapes.length() == 1) {
  8938         // In the monomorphic case, use separate ShapeGuard and LoadSlot
  8939         // instructions.
  8940         spew("Inlining monomorphic GETPROP");
  8942         Shape *objShape = shapes[0];
  8943         obj = addShapeGuard(obj, objShape, Bailout_ShapeGuard);
  8945         Shape *shape = objShape->searchLinear(NameToId(name));
  8946         JS_ASSERT(shape);
  8948         if (!loadSlot(obj, shape, rvalType, barrier, types))
  8949             return false;
  8950     } else {
  8951         JS_ASSERT(shapes.length() > 1);
  8952         spew("Inlining polymorphic GETPROP");
  8954         MGetPropertyPolymorphic *load = MGetPropertyPolymorphic::New(alloc(), obj, name);
  8955         current->add(load);
  8956         current->push(load);
  8958         for (size_t i = 0; i < shapes.length(); i++) {
  8959             Shape *objShape = shapes[i];
  8960             Shape *shape =  objShape->searchLinear(NameToId(name));
  8961             JS_ASSERT(shape);
  8962             if (!load->addShape(objShape, shape))
  8963                 return false;
  8966         if (failedShapeGuard_)
  8967             load->setNotMovable();
  8969         load->setResultType(rvalType);
  8970         if (!pushTypeBarrier(load, types, barrier))
  8971             return false;
  8974     *emitted = true;
  8975     return true;
  8978 bool
  8979 IonBuilder::getPropTryCache(bool *emitted, PropertyName *name,
  8980                             bool barrier, types::TemporaryTypeSet *types)
  8982     JS_ASSERT(*emitted == false);
  8984     MDefinition *obj = current->peek(-1);
  8986     // The input value must either be an object, or we should have strong suspicions
  8987     // that it can be safely unboxed to an object.
  8988     if (obj->type() != MIRType_Object) {
  8989         types::TemporaryTypeSet *types = obj->resultTypeSet();
  8990         if (!types || !types->objectOrSentinel())
  8991             return true;
  8994     // Since getters have no guaranteed return values, we must barrier in order to be
  8995     // able to attach stubs for them.
  8996     if (inspector->hasSeenAccessedGetter(pc))
  8997         barrier = true;
  8999     if (needsToMonitorMissingProperties(types))
  9000         barrier = true;
  9002     // Caches can read values from prototypes, so update the barrier to
  9003     // reflect such possible values.
  9004     if (!barrier)
  9005         barrier = PropertyReadOnPrototypeNeedsTypeBarrier(constraints(), obj, name, types);
  9007     current->pop();
  9008     MGetPropertyCache *load = MGetPropertyCache::New(alloc(), obj, name, barrier);
  9010     // Try to mark the cache as idempotent.
  9011     //
  9012     // In parallel execution, idempotency of caches is ignored, since we
  9013     // repeat the entire ForkJoin workload if we bail out. Note that it's
  9014     // overly restrictive to mark everything as idempotent, because we can
  9015     // treat non-idempotent caches in parallel as repeatable.
  9016     if (obj->type() == MIRType_Object && !invalidatedIdempotentCache() &&
  9017         info().executionMode() != ParallelExecution)
  9019         if (PropertyReadIsIdempotent(constraints(), obj, name))
  9020             load->setIdempotent();
  9023     if (JSOp(*pc) == JSOP_CALLPROP) {
  9024         if (!annotateGetPropertyCache(obj, load, obj->resultTypeSet(), types))
  9025             return false;
  9028     current->add(load);
  9029     current->push(load);
  9031     if (load->isEffectful() && !resumeAfter(load))
  9032         return false;
  9034     MIRType rvalType = types->getKnownMIRType();
  9035     if (barrier || IsNullOrUndefined(rvalType))
  9036         rvalType = MIRType_Value;
  9037     load->setResultType(rvalType);
  9039     if (!pushTypeBarrier(load, types, barrier))
  9040         return false;
  9042     *emitted = true;
  9043     return true;
  9046 bool
  9047 IonBuilder::needsToMonitorMissingProperties(types::TemporaryTypeSet *types)
  9049     // GetPropertyParIC and GetElementParIC cannot safely call
  9050     // TypeScript::Monitor to ensure that the observed type set contains
  9051     // undefined. To account for possible missing properties, which property
  9052     // types do not track, we must always insert a type barrier.
  9053     return (info().executionMode() == ParallelExecution &&
  9054             !types->hasType(types::Type::UndefinedType()));
  9057 bool
  9058 IonBuilder::jsop_setprop(PropertyName *name)
  9060     MDefinition *value = current->pop();
  9061     MDefinition *obj = current->pop();
  9063     bool emitted = false;
  9065     // Always use a call if we are doing the definite properties analysis and
  9066     // not actually emitting code, to simplify later analysis.
  9067     if (info().executionModeIsAnalysis()) {
  9068         MInstruction *ins = MCallSetProperty::New(alloc(), obj, value, name, script()->strict());
  9069         current->add(ins);
  9070         current->push(value);
  9071         return resumeAfter(ins);
  9074     // Add post barrier if needed.
  9075     if (NeedsPostBarrier(info(), value))
  9076         current->add(MPostWriteBarrier::New(alloc(), obj, value));
  9078     // Try to inline a common property setter, or make a call.
  9079     if (!setPropTryCommonSetter(&emitted, obj, name, value) || emitted)
  9080         return emitted;
  9082     types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
  9083     bool barrier = PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current, &obj, name, &value,
  9084                                                  /* canModify = */ true);
  9086     // Try to emit stores to known binary data blocks
  9087     if (!setPropTryTypedObject(&emitted, obj, name, value) || emitted)
  9088         return emitted;
  9090     // Try to emit store from definite slots.
  9091     if (!setPropTryDefiniteSlot(&emitted, obj, name, value, barrier, objTypes) || emitted)
  9092         return emitted;
  9094     // Try to emit a monomorphic/polymorphic store based on baseline caches.
  9095     if (!setPropTryInlineAccess(&emitted, obj, name, value, barrier, objTypes) || emitted)
  9096         return emitted;
  9098     // Try to emit a polymorphic cache.
  9099     if (!setPropTryCache(&emitted, obj, name, value, barrier, objTypes) || emitted)
  9100         return emitted;
  9102     // Emit call.
  9103     MInstruction *ins = MCallSetProperty::New(alloc(), obj, value, name, script()->strict());
  9104     current->add(ins);
  9105     current->push(value);
  9106     return resumeAfter(ins);
  9109 bool
  9110 IonBuilder::setPropTryCommonSetter(bool *emitted, MDefinition *obj,
  9111                                    PropertyName *name, MDefinition *value)
  9113     JS_ASSERT(*emitted == false);
  9115     Shape *lastProperty = nullptr;
  9116     JSFunction *commonSetter = nullptr;
  9117     JSObject *foundProto = inspector->commonSetPropFunction(pc, &lastProperty, &commonSetter);
  9118     if (!foundProto)
  9119         return true;
  9121     types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
  9122     MDefinition *guard = testCommonGetterSetter(objTypes, name, /* isGetter = */ false,
  9123                                                 foundProto, lastProperty);
  9124     if (!guard)
  9125         return true;
  9127     bool isDOM = objTypes->isDOMClass();
  9129     // Emit common setter.
  9131     // Setters can be called even if the property write needs a type
  9132     // barrier, as calling the setter does not actually write any data
  9133     // properties.
  9135     // Try emitting dom call.
  9136     if (!setPropTryCommonDOMSetter(emitted, obj, value, commonSetter, isDOM))
  9137         return false;
  9139     if (*emitted)
  9140         return true;
  9142     // Don't call the setter with a primitive value.
  9143     if (objTypes->getKnownMIRType() != MIRType_Object) {
  9144         MGuardObject *guardObj = MGuardObject::New(alloc(), obj);
  9145         current->add(guardObj);
  9146         obj = guardObj;
  9149     // Dummy up the stack, as in getprop. We are pushing an extra value, so
  9150     // ensure there is enough space.
  9151     if (!current->ensureHasSlots(3))
  9152         return false;
  9154     pushConstant(ObjectValue(*commonSetter));
  9156     current->push(obj);
  9157     current->push(value);
  9159     // Call the setter. Note that we have to push the original value, not
  9160     // the setter's return value.
  9161     CallInfo callInfo(alloc(), false);
  9162     if (!callInfo.init(current, 1))
  9163         return false;
  9165     // Ensure that we know we are calling a setter in case we inline it.
  9166     callInfo.markAsSetter();
  9168     // Inline the setter if we can.
  9169     if (commonSetter->isInterpreted()) {
  9170         InliningDecision decision = makeInliningDecision(commonSetter, callInfo);
  9171         switch (decision) {
  9172           case InliningDecision_Error:
  9173             return false;
  9174           case InliningDecision_DontInline:
  9175             break;
  9176           case InliningDecision_Inline:
  9177             if (!inlineScriptedCall(callInfo, commonSetter))
  9178                 return false;
  9179             *emitted = true;
  9180             return true;
  9184     MCall *call = makeCallHelper(commonSetter, callInfo, false);
  9185     if (!call)
  9186         return false;
  9188     current->push(value);
  9189     if (!resumeAfter(call))
  9190         return false;
  9192     *emitted = true;
  9193     return true;
  9196 bool
  9197 IonBuilder::setPropTryCommonDOMSetter(bool *emitted, MDefinition *obj,
  9198                                       MDefinition *value, JSFunction *setter,
  9199                                       bool isDOM)
  9201     JS_ASSERT(*emitted == false);
  9203     if (!isDOM)
  9204         return true;
  9206     types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
  9207     if (!testShouldDOMCall(objTypes, setter, JSJitInfo::Setter))
  9208         return true;
  9210     // Emit SetDOMProperty.
  9211     JS_ASSERT(setter->jitInfo()->type() == JSJitInfo::Setter);
  9212     MSetDOMProperty *set = MSetDOMProperty::New(alloc(), setter->jitInfo()->setter, obj, value);
  9214     current->add(set);
  9215     current->push(value);
  9217     if (!resumeAfter(set))
  9218         return false;
  9220     *emitted = true;
  9221     return true;
  9224 bool
  9225 IonBuilder::setPropTryTypedObject(bool *emitted, MDefinition *obj,
  9226                                   PropertyName *name, MDefinition *value)
  9228     TypeDescrSet fieldDescrs;
  9229     int32_t fieldOffset;
  9230     size_t fieldIndex;
  9231     if (!lookupTypedObjectField(obj, name, &fieldOffset, &fieldDescrs,
  9232                                 &fieldIndex))
  9233         return false;
  9234     if (fieldDescrs.empty())
  9235         return true;
  9237     switch (fieldDescrs.kind()) {
  9238       case TypeDescr::X4:
  9239         // FIXME (bug 894104): store into a MIRType_float32x4 etc
  9240         return true;
  9242       case TypeDescr::Reference:
  9243       case TypeDescr::Struct:
  9244       case TypeDescr::SizedArray:
  9245       case TypeDescr::UnsizedArray:
  9246         // For now, only optimize storing scalars.
  9247         return true;
  9249       case TypeDescr::Scalar:
  9250         return setPropTryScalarPropOfTypedObject(emitted, obj, fieldOffset,
  9251                                                  value, fieldDescrs);
  9254     MOZ_ASSUME_UNREACHABLE("Unknown kind");
  9257 bool
  9258 IonBuilder::setPropTryScalarPropOfTypedObject(bool *emitted,
  9259                                               MDefinition *obj,
  9260                                               int32_t fieldOffset,
  9261                                               MDefinition *value,
  9262                                               TypeDescrSet fieldDescrs)
  9264     // Must always be loading the same scalar type
  9265     ScalarTypeDescr::Type fieldType;
  9266     if (!fieldDescrs.scalarType(&fieldType))
  9267         return true;
  9269     // OK! Perform the optimization.
  9271     if (!storeScalarTypedObjectValue(obj, constantInt(fieldOffset), fieldType, true, false, value))
  9272         return false;
  9274     current->push(value);
  9276     *emitted = true;
  9277     return true;
  9280 bool
  9281 IonBuilder::setPropTryDefiniteSlot(bool *emitted, MDefinition *obj,
  9282                                    PropertyName *name, MDefinition *value,
  9283                                    bool barrier, types::TemporaryTypeSet *objTypes)
  9285     JS_ASSERT(*emitted == false);
  9287     if (barrier)
  9288         return true;
  9290     types::HeapTypeSetKey property;
  9291     if (!getDefiniteSlot(obj->resultTypeSet(), name, &property))
  9292         return true;
  9294     if (property.nonWritable(constraints()))
  9295         return true;
  9297     MStoreFixedSlot *fixed = MStoreFixedSlot::New(alloc(), obj, property.maybeTypes()->definiteSlot(), value);
  9298     current->add(fixed);
  9299     current->push(value);
  9301     if (property.needsBarrier(constraints()))
  9302         fixed->setNeedsBarrier();
  9304     if (!resumeAfter(fixed))
  9305         return false;
  9307     *emitted = true;
  9308     return true;
  9311 bool
  9312 IonBuilder::setPropTryInlineAccess(bool *emitted, MDefinition *obj,
  9313                                    PropertyName *name,
  9314                                    MDefinition *value, bool barrier,
  9315                                    types::TemporaryTypeSet *objTypes)
  9317     JS_ASSERT(*emitted == false);
  9319     if (barrier)
  9320         return true;
  9322     BaselineInspector::ShapeVector shapes(alloc());
  9323     if (!inspector->maybeShapesForPropertyOp(pc, shapes))
  9324         return false;
  9326     if (shapes.empty())
  9327         return true;
  9329     if (!CanInlinePropertyOpShapes(shapes))
  9330         return true;
  9332     if (shapes.length() == 1) {
  9333         spew("Inlining monomorphic SETPROP");
  9335         // The Baseline IC was monomorphic, so we inline the property access as
  9336         // long as the shape is not in dictionary mode. We cannot be sure
  9337         // that the shape is still a lastProperty, and calling Shape::search
  9338         // on dictionary mode shapes that aren't lastProperty is invalid.
  9339         Shape *objShape = shapes[0];
  9340         obj = addShapeGuard(obj, objShape, Bailout_ShapeGuard);
  9342         Shape *shape = objShape->searchLinear(NameToId(name));
  9343         JS_ASSERT(shape);
  9345         bool needsBarrier = objTypes->propertyNeedsBarrier(constraints(), NameToId(name));
  9346         if (!storeSlot(obj, shape, value, needsBarrier))
  9347             return false;
  9348     } else {
  9349         JS_ASSERT(shapes.length() > 1);
  9350         spew("Inlining polymorphic SETPROP");
  9352         MSetPropertyPolymorphic *ins = MSetPropertyPolymorphic::New(alloc(), obj, value);
  9353         current->add(ins);
  9354         current->push(value);
  9356         for (size_t i = 0; i < shapes.length(); i++) {
  9357             Shape *objShape = shapes[i];
  9358             Shape *shape =  objShape->searchLinear(NameToId(name));
  9359             JS_ASSERT(shape);
  9360             if (!ins->addShape(objShape, shape))
  9361                 return false;
  9364         if (objTypes->propertyNeedsBarrier(constraints(), NameToId(name)))
  9365             ins->setNeedsBarrier();
  9367         if (!resumeAfter(ins))
  9368             return false;
  9371     *emitted = true;
  9372     return true;
  9375 bool
  9376 IonBuilder::setPropTryCache(bool *emitted, MDefinition *obj,
  9377                             PropertyName *name, MDefinition *value,
  9378                             bool barrier, types::TemporaryTypeSet *objTypes)
  9380     JS_ASSERT(*emitted == false);
  9382     // Emit SetPropertyCache.
  9383     MSetPropertyCache *ins = MSetPropertyCache::New(alloc(), obj, value, name, script()->strict(), barrier);
  9385     if (!objTypes || objTypes->propertyNeedsBarrier(constraints(), NameToId(name)))
  9386         ins->setNeedsBarrier();
  9388     current->add(ins);
  9389     current->push(value);
  9391     if (!resumeAfter(ins))
  9392         return false;
  9394     *emitted = true;
  9395     return true;
  9398 bool
  9399 IonBuilder::jsop_delprop(PropertyName *name)
  9401     MDefinition *obj = current->pop();
  9403     MInstruction *ins = MDeleteProperty::New(alloc(), obj, name);
  9405     current->add(ins);
  9406     current->push(ins);
  9408     return resumeAfter(ins);
  9411 bool
  9412 IonBuilder::jsop_delelem()
  9414     MDefinition *index = current->pop();
  9415     MDefinition *obj = current->pop();
  9417     MDeleteElement *ins = MDeleteElement::New(alloc(), obj, index);
  9418     current->add(ins);
  9419     current->push(ins);
  9421     return resumeAfter(ins);
  9424 bool
  9425 IonBuilder::jsop_regexp(RegExpObject *reobj)
  9427     // JS semantics require regular expression literals to create different
  9428     // objects every time they execute. We only need to do this cloning if the
  9429     // script could actually observe the effect of such cloning, for instance
  9430     // by getting or setting properties on it.
  9431     //
  9432     // First, make sure the regex is one we can safely optimize. Lowering can
  9433     // then check if this regex object only flows into known natives and can
  9434     // avoid cloning in this case.
  9436     bool mustClone = true;
  9437     types::TypeObjectKey *typeObj = types::TypeObjectKey::get(&script()->global());
  9438     if (!typeObj->hasFlags(constraints(), types::OBJECT_FLAG_REGEXP_FLAGS_SET)) {
  9439         RegExpStatics *res = script()->global().getRegExpStatics();
  9441         DebugOnly<uint32_t> origFlags = reobj->getFlags();
  9442         DebugOnly<uint32_t> staticsFlags = res->getFlags();
  9443         JS_ASSERT((origFlags & staticsFlags) == staticsFlags);
  9445         if (!reobj->global() && !reobj->sticky())
  9446             mustClone = false;
  9449     MRegExp *regexp = MRegExp::New(alloc(), constraints(), reobj, mustClone);
  9450     current->add(regexp);
  9451     current->push(regexp);
  9453     return true;
  9456 bool
  9457 IonBuilder::jsop_object(JSObject *obj)
  9459     if (options.cloneSingletons()) {
  9460         MCloneLiteral *clone = MCloneLiteral::New(alloc(), constant(ObjectValue(*obj)));
  9461         current->add(clone);
  9462         current->push(clone);
  9463         return resumeAfter(clone);
  9466     compartment->setSingletonsAsValues();
  9467     pushConstant(ObjectValue(*obj));
  9468     return true;
  9471 bool
  9472 IonBuilder::jsop_lambda(JSFunction *fun)
  9474     MOZ_ASSERT(analysis().usesScopeChain());
  9475     MOZ_ASSERT(!fun->isArrow());
  9477     if (fun->isNative() && IsAsmJSModuleNative(fun->native()))
  9478         return abort("asm.js module function");
  9480     MLambda *ins = MLambda::New(alloc(), constraints(), current->scopeChain(), fun);
  9481     current->add(ins);
  9482     current->push(ins);
  9484     return resumeAfter(ins);
  9487 bool
  9488 IonBuilder::jsop_lambda_arrow(JSFunction *fun)
  9490     MOZ_ASSERT(analysis().usesScopeChain());
  9491     MOZ_ASSERT(fun->isArrow());
  9492     MOZ_ASSERT(!fun->isNative());
  9494     MDefinition *thisDef = current->pop();
  9496     MLambdaArrow *ins = MLambdaArrow::New(alloc(), constraints(), current->scopeChain(),
  9497                                           thisDef, fun);
  9498     current->add(ins);
  9499     current->push(ins);
  9501     return resumeAfter(ins);
  9504 bool
  9505 IonBuilder::jsop_setarg(uint32_t arg)
  9507     // To handle this case, we should spill the arguments to the space where
  9508     // actual arguments are stored. The tricky part is that if we add a MIR
  9509     // to wrap the spilling action, we don't want the spilling to be
  9510     // captured by the GETARG and by the resume point, only by
  9511     // MGetFrameArgument.
  9512     JS_ASSERT(analysis_.hasSetArg());
  9513     MDefinition *val = current->peek(-1);
  9515     // If an arguments object is in use, and it aliases formals, then all SETARGs
  9516     // must go through the arguments object.
  9517     if (info().argsObjAliasesFormals()) {
  9518         if (NeedsPostBarrier(info(), val))
  9519             current->add(MPostWriteBarrier::New(alloc(), current->argumentsObject(), val));
  9520         current->add(MSetArgumentsObjectArg::New(alloc(), current->argumentsObject(),
  9521                                                  GET_ARGNO(pc), val));
  9522         return true;
  9525     // :TODO: if hasArguments() is true, and the script has a JSOP_SETARG, then
  9526     // convert all arg accesses to go through the arguments object. (see Bug 957475)
  9527     if (info().hasArguments())
  9528 	return abort("NYI: arguments & setarg.");
  9530     // Otherwise, if a magic arguments is in use, and it aliases formals, and there exist
  9531     // arguments[...] GETELEM expressions in the script, then SetFrameArgument must be used.
  9532     // If no arguments[...] GETELEM expressions are in the script, and an argsobj is not
  9533     // required, then it means that any aliased argument set can never be observed, and
  9534     // the frame does not actually need to be updated with the new arg value.
  9535     if (info().argumentsAliasesFormals()) {
  9536         // JSOP_SETARG with magic arguments within inline frames is not yet supported.
  9537         JS_ASSERT(script()->uninlineable() && !isInlineBuilder());
  9539         MSetFrameArgument *store = MSetFrameArgument::New(alloc(), arg, val);
  9540         modifiesFrameArguments_ = true;
  9541         current->add(store);
  9542         current->setArg(arg);
  9543         return true;
  9546     // If this assignment is at the start of the function and is coercing
  9547     // the original value for the argument which was passed in, loosen
  9548     // the type information for that original argument if it is currently
  9549     // empty due to originally executing in the interpreter.
  9550     if (graph().numBlocks() == 1 &&
  9551         (val->isBitOr() || val->isBitAnd() || val->isMul() /* for JSOP_POS */))
  9553          for (size_t i = 0; i < val->numOperands(); i++) {
  9554             MDefinition *op = val->getOperand(i);
  9555             if (op->isParameter() &&
  9556                 op->toParameter()->index() == (int32_t)arg &&
  9557                 op->resultTypeSet() &&
  9558                 op->resultTypeSet()->empty())
  9560                 bool otherUses = false;
  9561                 for (MUseDefIterator iter(op); iter; iter++) {
  9562                     MDefinition *def = iter.def();
  9563                     if (def == val)
  9564                         continue;
  9565                     otherUses = true;
  9567                 if (!otherUses) {
  9568                     JS_ASSERT(op->resultTypeSet() == &argTypes[arg]);
  9569                     argTypes[arg].addType(types::Type::UnknownType(), alloc_->lifoAlloc());
  9570                     if (val->isMul()) {
  9571                         val->setResultType(MIRType_Double);
  9572                         val->toMul()->setSpecialization(MIRType_Double);
  9573                     } else {
  9574                         JS_ASSERT(val->type() == MIRType_Int32);
  9576                     val->setResultTypeSet(nullptr);
  9582     current->setArg(arg);
  9583     return true;
  9586 bool
  9587 IonBuilder::jsop_defvar(uint32_t index)
  9589     JS_ASSERT(JSOp(*pc) == JSOP_DEFVAR || JSOp(*pc) == JSOP_DEFCONST);
  9591     PropertyName *name = script()->getName(index);
  9593     // Bake in attrs.
  9594     unsigned attrs = JSPROP_ENUMERATE | JSPROP_PERMANENT;
  9595     if (JSOp(*pc) == JSOP_DEFCONST)
  9596         attrs |= JSPROP_READONLY;
  9598     // Pass the ScopeChain.
  9599     JS_ASSERT(analysis().usesScopeChain());
  9601     // Bake the name pointer into the MDefVar.
  9602     MDefVar *defvar = MDefVar::New(alloc(), name, attrs, current->scopeChain());
  9603     current->add(defvar);
  9605     return resumeAfter(defvar);
  9608 bool
  9609 IonBuilder::jsop_deffun(uint32_t index)
  9611     JSFunction *fun = script()->getFunction(index);
  9612     if (fun->isNative() && IsAsmJSModuleNative(fun->native()))
  9613         return abort("asm.js module function");
  9615     JS_ASSERT(analysis().usesScopeChain());
  9617     MDefFun *deffun = MDefFun::New(alloc(), fun, current->scopeChain());
  9618     current->add(deffun);
  9620     return resumeAfter(deffun);
  9623 bool
  9624 IonBuilder::jsop_this()
  9626     if (!info().funMaybeLazy())
  9627         return abort("JSOP_THIS outside of a JSFunction.");
  9629     if (info().funMaybeLazy()->isArrow()) {
  9630         // Arrow functions store their lexical |this| in an extended slot.
  9631         MLoadArrowThis *thisObj = MLoadArrowThis::New(alloc(), getCallee());
  9632         current->add(thisObj);
  9633         current->push(thisObj);
  9634         return true;
  9637     if (script()->strict() || info().funMaybeLazy()->isSelfHostedBuiltin()) {
  9638         // No need to wrap primitive |this| in strict mode or self-hosted code.
  9639         current->pushSlot(info().thisSlot());
  9640         return true;
  9643     if (thisTypes->getKnownMIRType() == MIRType_Object ||
  9644         (thisTypes->empty() && baselineFrame_ && baselineFrame_->thisType.isSomeObject()))
  9646         // This is safe, because if the entry type of |this| is an object, it
  9647         // will necessarily be an object throughout the entire function. OSR
  9648         // can introduce a phi, but this phi will be specialized.
  9649         current->pushSlot(info().thisSlot());
  9650         return true;
  9653     // If we are doing an analysis, we might not yet know the type of |this|.
  9654     // Instead of bailing out just push the |this| slot, as this code won't
  9655     // actually execute and it does not matter whether |this| is primitive.
  9656     if (info().executionModeIsAnalysis()) {
  9657         current->pushSlot(info().thisSlot());
  9658         return true;
  9661     // Hard case: |this| may be a primitive we have to wrap.
  9662     MDefinition *def = current->getSlot(info().thisSlot());
  9664     if (def->type() == MIRType_Object) {
  9665         // If we already computed a |this| object, we can reuse it.
  9666         current->push(def);
  9667         return true;
  9670     MComputeThis *thisObj = MComputeThis::New(alloc(), def);
  9671     current->add(thisObj);
  9672     current->push(thisObj);
  9674     current->setSlot(info().thisSlot(), thisObj);
  9676     return resumeAfter(thisObj);
  9679 bool
  9680 IonBuilder::jsop_typeof()
  9682     MDefinition *input = current->pop();
  9683     MTypeOf *ins = MTypeOf::New(alloc(), input, input->type());
  9685     ins->infer();
  9687     current->add(ins);
  9688     current->push(ins);
  9690     return true;
  9693 bool
  9694 IonBuilder::jsop_toid()
  9696     // No-op if the index is an integer.
  9697     if (current->peek(-1)->type() == MIRType_Int32)
  9698         return true;
  9700     MDefinition *index = current->pop();
  9701     MToId *ins = MToId::New(alloc(), current->peek(-1), index);
  9703     current->add(ins);
  9704     current->push(ins);
  9706     return resumeAfter(ins);
  9709 bool
  9710 IonBuilder::jsop_iter(uint8_t flags)
  9712     if (flags != JSITER_ENUMERATE)
  9713         nonStringIteration_ = true;
  9715     MDefinition *obj = current->pop();
  9716     MInstruction *ins = MIteratorStart::New(alloc(), obj, flags);
  9718     if (!iterators_.append(ins))
  9719         return false;
  9721     current->add(ins);
  9722     current->push(ins);
  9724     return resumeAfter(ins);
  9727 bool
  9728 IonBuilder::jsop_iternext()
  9730     MDefinition *iter = current->peek(-1);
  9731     MInstruction *ins = MIteratorNext::New(alloc(), iter);
  9733     current->add(ins);
  9734     current->push(ins);
  9736     if (!resumeAfter(ins))
  9737         return false;
  9739     if (!nonStringIteration_ && !inspector->hasSeenNonStringIterNext(pc)) {
  9740         ins = MUnbox::New(alloc(), ins, MIRType_String, MUnbox::Fallible, Bailout_BaselineInfo);
  9741         current->add(ins);
  9742         current->rewriteAtDepth(-1, ins);
  9745     return true;
  9748 bool
  9749 IonBuilder::jsop_itermore()
  9751     MDefinition *iter = current->peek(-1);
  9752     MInstruction *ins = MIteratorMore::New(alloc(), iter);
  9754     current->add(ins);
  9755     current->push(ins);
  9757     return resumeAfter(ins);
  9760 bool
  9761 IonBuilder::jsop_iterend()
  9763     MDefinition *iter = current->pop();
  9764     MInstruction *ins = MIteratorEnd::New(alloc(), iter);
  9766     current->add(ins);
  9768     return resumeAfter(ins);
  9771 MDefinition *
  9772 IonBuilder::walkScopeChain(unsigned hops)
  9774     MDefinition *scope = current->getSlot(info().scopeChainSlot());
  9776     for (unsigned i = 0; i < hops; i++) {
  9777         MInstruction *ins = MEnclosingScope::New(alloc(), scope);
  9778         current->add(ins);
  9779         scope = ins;
  9782     return scope;
  9785 bool
  9786 IonBuilder::hasStaticScopeObject(ScopeCoordinate sc, JSObject **pcall)
  9788     JSScript *outerScript = ScopeCoordinateFunctionScript(script(), pc);
  9789     if (!outerScript || !outerScript->treatAsRunOnce())
  9790         return false;
  9792     types::TypeObjectKey *funType =
  9793             types::TypeObjectKey::get(outerScript->functionNonDelazifying());
  9794     if (funType->hasFlags(constraints(), types::OBJECT_FLAG_RUNONCE_INVALIDATED))
  9795         return false;
  9797     // The script this aliased var operation is accessing will run only once,
  9798     // so there will be only one call object and the aliased var access can be
  9799     // compiled in the same manner as a global access. We still need to find
  9800     // the call object though.
  9802     // Look for the call object on the current script's function's scope chain.
  9803     // If the current script is inner to the outer script and the function has
  9804     // singleton type then it should show up here.
  9806     MDefinition *scope = current->getSlot(info().scopeChainSlot());
  9807     scope->setImplicitlyUsedUnchecked();
  9809     JSObject *environment = script()->functionNonDelazifying()->environment();
  9810     while (environment && !environment->is<GlobalObject>()) {
  9811         if (environment->is<CallObject>() &&
  9812             !environment->as<CallObject>().isForEval() &&
  9813             environment->as<CallObject>().callee().nonLazyScript() == outerScript)
  9815             JS_ASSERT(environment->hasSingletonType());
  9816             *pcall = environment;
  9817             return true;
  9819         environment = environment->enclosingScope();
  9822     // Look for the call object on the current frame, if we are compiling the
  9823     // outer script itself. Don't do this if we are at entry to the outer
  9824     // script, as the call object we see will not be the real one --- after
  9825     // entering the Ion code a different call object will be created.
  9827     if (script() == outerScript && baselineFrame_ && info().osrPc()) {
  9828         JSObject *singletonScope = baselineFrame_->singletonScopeChain;
  9829         if (singletonScope &&
  9830             singletonScope->is<CallObject>() &&
  9831             singletonScope->as<CallObject>().callee().nonLazyScript() == outerScript)
  9833             JS_ASSERT(singletonScope->hasSingletonType());
  9834             *pcall = singletonScope;
  9835             return true;
  9839     return true;
  9842 bool
  9843 IonBuilder::jsop_getaliasedvar(ScopeCoordinate sc)
  9845     JSObject *call = nullptr;
  9846     if (hasStaticScopeObject(sc, &call) && call) {
  9847         PropertyName *name = ScopeCoordinateName(scopeCoordinateNameCache, script(), pc);
  9848         bool succeeded;
  9849         if (!getStaticName(call, name, &succeeded))
  9850             return false;
  9851         if (succeeded)
  9852             return true;
  9855     MDefinition *obj = walkScopeChain(sc.hops());
  9857     Shape *shape = ScopeCoordinateToStaticScopeShape(script(), pc);
  9859     MInstruction *load;
  9860     if (shape->numFixedSlots() <= sc.slot()) {
  9861         MInstruction *slots = MSlots::New(alloc(), obj);
  9862         current->add(slots);
  9864         load = MLoadSlot::New(alloc(), slots, sc.slot() - shape->numFixedSlots());
  9865     } else {
  9866         load = MLoadFixedSlot::New(alloc(), obj, sc.slot());
  9869     current->add(load);
  9870     current->push(load);
  9872     types::TemporaryTypeSet *types = bytecodeTypes(pc);
  9873     return pushTypeBarrier(load, types, true);
  9876 bool
  9877 IonBuilder::jsop_setaliasedvar(ScopeCoordinate sc)
  9879     JSObject *call = nullptr;
  9880     if (hasStaticScopeObject(sc, &call)) {
  9881         uint32_t depth = current->stackDepth() + 1;
  9882         if (depth > current->nslots()) {
  9883             if (!current->increaseSlots(depth - current->nslots()))
  9884                 return false;
  9886         MDefinition *value = current->pop();
  9887         PropertyName *name = ScopeCoordinateName(scopeCoordinateNameCache, script(), pc);
  9889         if (call) {
  9890             // Push the object on the stack to match the bound object expected in
  9891             // the global and property set cases.
  9892             pushConstant(ObjectValue(*call));
  9893             current->push(value);
  9894             return setStaticName(call, name);
  9897         // The call object has type information we need to respect but we
  9898         // couldn't find it. Just do a normal property assign.
  9899         MDefinition *obj = walkScopeChain(sc.hops());
  9900         current->push(obj);
  9901         current->push(value);
  9902         return jsop_setprop(name);
  9905     MDefinition *rval = current->peek(-1);
  9906     MDefinition *obj = walkScopeChain(sc.hops());
  9908     Shape *shape = ScopeCoordinateToStaticScopeShape(script(), pc);
  9910     if (NeedsPostBarrier(info(), rval))
  9911         current->add(MPostWriteBarrier::New(alloc(), obj, rval));
  9913     MInstruction *store;
  9914     if (shape->numFixedSlots() <= sc.slot()) {
  9915         MInstruction *slots = MSlots::New(alloc(), obj);
  9916         current->add(slots);
  9918         store = MStoreSlot::NewBarriered(alloc(), slots, sc.slot() - shape->numFixedSlots(), rval);
  9919     } else {
  9920         store = MStoreFixedSlot::NewBarriered(alloc(), obj, sc.slot(), rval);
  9923     current->add(store);
  9924     return resumeAfter(store);
  9927 bool
  9928 IonBuilder::jsop_in()
  9930     MDefinition *obj = current->peek(-1);
  9931     MDefinition *id = current->peek(-2);
  9933     if (ElementAccessIsDenseNative(obj, id) &&
  9934         !ElementAccessHasExtraIndexedProperty(constraints(), obj))
  9936         return jsop_in_dense();
  9939     current->pop();
  9940     current->pop();
  9941     MIn *ins = MIn::New(alloc(), id, obj);
  9943     current->add(ins);
  9944     current->push(ins);
  9946     return resumeAfter(ins);
  9949 bool
  9950 IonBuilder::jsop_in_dense()
  9952     MDefinition *obj = current->pop();
  9953     MDefinition *id = current->pop();
  9955     bool needsHoleCheck = !ElementAccessIsPacked(constraints(), obj);
  9957     // Ensure id is an integer.
  9958     MInstruction *idInt32 = MToInt32::New(alloc(), id);
  9959     current->add(idInt32);
  9960     id = idInt32;
  9962     // Get the elements vector.
  9963     MElements *elements = MElements::New(alloc(), obj);
  9964     current->add(elements);
  9966     MInitializedLength *initLength = MInitializedLength::New(alloc(), elements);
  9967     current->add(initLength);
  9969     // Check if id < initLength and elem[id] not a hole.
  9970     MInArray *ins = MInArray::New(alloc(), elements, id, initLength, obj, needsHoleCheck);
  9972     current->add(ins);
  9973     current->push(ins);
  9975     return true;
  9978 bool
  9979 IonBuilder::jsop_instanceof()
  9981     MDefinition *rhs = current->pop();
  9982     MDefinition *obj = current->pop();
  9984     // If this is an 'x instanceof function' operation and we can determine the
  9985     // exact function and prototype object being tested for, use a typed path.
  9986     do {
  9987         types::TemporaryTypeSet *rhsTypes = rhs->resultTypeSet();
  9988         JSObject *rhsObject = rhsTypes ? rhsTypes->getSingleton() : nullptr;
  9989         if (!rhsObject || !rhsObject->is<JSFunction>() || rhsObject->isBoundFunction())
  9990             break;
  9992         types::TypeObjectKey *rhsType = types::TypeObjectKey::get(rhsObject);
  9993         if (rhsType->unknownProperties())
  9994             break;
  9996         types::HeapTypeSetKey protoProperty =
  9997             rhsType->property(NameToId(names().prototype));
  9998         JSObject *protoObject = protoProperty.singleton(constraints());
  9999         if (!protoObject)
 10000             break;
 10002         rhs->setImplicitlyUsedUnchecked();
 10004         MInstanceOf *ins = MInstanceOf::New(alloc(), obj, protoObject);
 10006         current->add(ins);
 10007         current->push(ins);
 10009         return resumeAfter(ins);
 10010     } while (false);
 10012     MCallInstanceOf *ins = MCallInstanceOf::New(alloc(), obj, rhs);
 10014     current->add(ins);
 10015     current->push(ins);
 10017     return resumeAfter(ins);
 10020 MInstruction *
 10021 IonBuilder::addConvertElementsToDoubles(MDefinition *elements)
 10023     MInstruction *convert = MConvertElementsToDoubles::New(alloc(), elements);
 10024     current->add(convert);
 10025     return convert;
 10028 MInstruction *
 10029 IonBuilder::addBoundsCheck(MDefinition *index, MDefinition *length)
 10031     MInstruction *check = MBoundsCheck::New(alloc(), index, length);
 10032     current->add(check);
 10034     // If a bounds check failed in the past, don't optimize bounds checks.
 10035     if (failedBoundsCheck_)
 10036         check->setNotMovable();
 10038     return check;
 10041 MInstruction *
 10042 IonBuilder::addShapeGuard(MDefinition *obj, Shape *const shape, BailoutKind bailoutKind)
 10044     MGuardShape *guard = MGuardShape::New(alloc(), obj, shape, bailoutKind);
 10045     current->add(guard);
 10047     // If a shape guard failed in the past, don't optimize shape guard.
 10048     if (failedShapeGuard_)
 10049         guard->setNotMovable();
 10051     return guard;
 10054 types::TemporaryTypeSet *
 10055 IonBuilder::bytecodeTypes(jsbytecode *pc)
 10057     return types::TypeScript::BytecodeTypes(script(), pc, bytecodeTypeMap, &typeArrayHint, typeArray);
 10060 TypeDescrSetHash *
 10061 IonBuilder::getOrCreateDescrSetHash()
 10063     if (!descrSetHash_) {
 10064         TypeDescrSetHash *hash =
 10065             alloc_->lifoAlloc()->new_<TypeDescrSetHash>(alloc());
 10066         if (!hash || !hash->init())
 10067             return nullptr;
 10069         descrSetHash_ = hash;
 10071     return descrSetHash_;
 10074 bool
 10075 IonBuilder::lookupTypeDescrSet(MDefinition *typedObj,
 10076                                         TypeDescrSet *out)
 10078     *out = TypeDescrSet(); // default to unknown
 10080     // Extract TypeDescrSet directly if we can
 10081     if (typedObj->isNewDerivedTypedObject()) {
 10082         *out = typedObj->toNewDerivedTypedObject()->set();
 10083         return true;
 10086     types::TemporaryTypeSet *types = typedObj->resultTypeSet();
 10087     return typeSetToTypeDescrSet(types, out);
 10090 bool
 10091 IonBuilder::typeSetToTypeDescrSet(types::TemporaryTypeSet *types,
 10092                                   TypeDescrSet *out)
 10094     // Extract TypeDescrSet directly if we can
 10095     if (!types || types->getKnownMIRType() != MIRType_Object)
 10096         return true;
 10098     // And only known objects.
 10099     if (types->unknownObject())
 10100         return true;
 10102     TypeDescrSetBuilder set;
 10103     for (uint32_t i = 0; i < types->getObjectCount(); i++) {
 10104         types::TypeObject *type = types->getTypeObject(i);
 10105         if (!type || type->unknownProperties())
 10106             return true;
 10108         if (!type->hasTypedObject())
 10109             return true;
 10111         TypeDescr &descr = type->typedObject()->descr();
 10112         if (!set.insert(&descr))
 10113             return false;
 10116     return set.build(*this, out);
 10119 MDefinition *
 10120 IonBuilder::loadTypedObjectType(MDefinition *typedObj)
 10122     // Shortcircuit derived type objects, meaning the intermediate
 10123     // objects created to represent `a.b` in an expression like
 10124     // `a.b.c`. In that case, the type object can be simply pulled
 10125     // from the operands of that instruction.
 10126     if (typedObj->isNewDerivedTypedObject())
 10127         return typedObj->toNewDerivedTypedObject()->type();
 10129     MInstruction *load = MLoadFixedSlot::New(alloc(), typedObj,
 10130                                              JS_TYPEDOBJ_SLOT_TYPE_DESCR);
 10131     current->add(load);
 10132     return load;
 10135 // Given a typed object `typedObj` and an offset `offset` into that
 10136 // object's data, returns another typed object and adusted offset
 10137 // where the data can be found. Often, these returned values are the
 10138 // same as the inputs, but in cases where intermediate derived type
 10139 // objects have been created, the return values will remove
 10140 // intermediate layers (often rendering those derived type objects
 10141 // into dead code).
 10142 void
 10143 IonBuilder::loadTypedObjectData(MDefinition *typedObj,
 10144                                 MDefinition *offset,
 10145                                 bool canBeNeutered,
 10146                                 MDefinition **owner,
 10147                                 MDefinition **ownerOffset)
 10149     JS_ASSERT(typedObj->type() == MIRType_Object);
 10150     JS_ASSERT(offset->type() == MIRType_Int32);
 10152     // Shortcircuit derived type objects, meaning the intermediate
 10153     // objects created to represent `a.b` in an expression like
 10154     // `a.b.c`. In that case, the owned and a base offset can be
 10155     // pulled from the operands of the instruction and combined with
 10156     // `offset`.
 10157     if (typedObj->isNewDerivedTypedObject()) {
 10158         MNewDerivedTypedObject *ins = typedObj->toNewDerivedTypedObject();
 10160         // Note: we never need to check for neutering on this path,
 10161         // because when we create the derived typed object, we check
 10162         // for neutering there, if needed.
 10164         MAdd *offsetAdd = MAdd::NewAsmJS(alloc(), ins->offset(), offset, MIRType_Int32);
 10165         current->add(offsetAdd);
 10167         *owner = ins->owner();
 10168         *ownerOffset = offsetAdd;
 10169         return;
 10172     if (canBeNeutered) {
 10173         MNeuterCheck *chk = MNeuterCheck::New(alloc(), typedObj);
 10174         current->add(chk);
 10175         typedObj = chk;
 10178     *owner = typedObj;
 10179     *ownerOffset = offset;
 10182 // Takes as input a typed object, an offset into that typed object's
 10183 // memory, and the type repr of the data found at that offset. Returns
 10184 // the elements pointer and a scaled offset. The scaled offset is
 10185 // expressed in units of `unit`; when working with typed array MIR,
 10186 // this is typically the alignment.
 10187 void
 10188 IonBuilder::loadTypedObjectElements(MDefinition *typedObj,
 10189                                     MDefinition *offset,
 10190                                     int32_t unit,
 10191                                     bool canBeNeutered,
 10192                                     MDefinition **ownerElements,
 10193                                     MDefinition **ownerScaledOffset)
 10195     MDefinition *owner, *ownerOffset;
 10196     loadTypedObjectData(typedObj, offset, canBeNeutered, &owner, &ownerOffset);
 10198     // Load the element data.
 10199     MTypedObjectElements *elements = MTypedObjectElements::New(alloc(), owner);
 10200     current->add(elements);
 10202     // Scale to a different unit for compat with typed array MIRs.
 10203     if (unit != 1) {
 10204         MDiv *scaledOffset = MDiv::NewAsmJS(alloc(), ownerOffset, constantInt(unit), MIRType_Int32,
 10205                                             /* unsignd = */ false);
 10206         current->add(scaledOffset);
 10207         *ownerScaledOffset = scaledOffset;
 10208     } else {
 10209         *ownerScaledOffset = ownerOffset;
 10212     *ownerElements = elements;
 10215 // Looks up the offset/type-repr-set of the field `id`, given the type
 10216 // set `objTypes` of the field owner. Note that even when true is
 10217 // returned, `*fieldDescrs` might be empty if no useful type/offset
 10218 // pair could be determined.
 10219 bool
 10220 IonBuilder::lookupTypedObjectField(MDefinition *typedObj,
 10221                                    PropertyName *name,
 10222                                    int32_t *fieldOffset,
 10223                                    TypeDescrSet *fieldDescrs,
 10224                                    size_t *fieldIndex)
 10226     TypeDescrSet objDescrs;
 10227     if (!lookupTypeDescrSet(typedObj, &objDescrs))
 10228         return false;
 10230     // Must be accessing a struct.
 10231     if (!objDescrs.allOfKind(TypeDescr::Struct))
 10232         return true;
 10234     // Determine the type/offset of the field `name`, if any.
 10235     int32_t offset;
 10236     if (!objDescrs.fieldNamed(*this, NameToId(name), &offset,
 10237                               fieldDescrs, fieldIndex))
 10238         return false;
 10239     if (fieldDescrs->empty())
 10240         return true;
 10242     JS_ASSERT(offset >= 0);
 10243     *fieldOffset = offset;
 10245     return true;
 10248 MDefinition *
 10249 IonBuilder::typeObjectForElementFromArrayStructType(MDefinition *typeObj)
 10251     MInstruction *elemType = MLoadFixedSlot::New(alloc(), typeObj, JS_DESCR_SLOT_ARRAY_ELEM_TYPE);
 10252     current->add(elemType);
 10254     MInstruction *unboxElemType = MUnbox::New(alloc(), elemType, MIRType_Object, MUnbox::Infallible);
 10255     current->add(unboxElemType);
 10257     return unboxElemType;
 10260 MDefinition *
 10261 IonBuilder::typeObjectForFieldFromStructType(MDefinition *typeObj,
 10262                                              size_t fieldIndex)
 10264     // Load list of field type objects.
 10266     MInstruction *fieldTypes = MLoadFixedSlot::New(alloc(), typeObj, JS_DESCR_SLOT_STRUCT_FIELD_TYPES);
 10267     current->add(fieldTypes);
 10269     MInstruction *unboxFieldTypes = MUnbox::New(alloc(), fieldTypes, MIRType_Object, MUnbox::Infallible);
 10270     current->add(unboxFieldTypes);
 10272     // Index into list with index of field.
 10274     MInstruction *fieldTypesElements = MElements::New(alloc(), unboxFieldTypes);
 10275     current->add(fieldTypesElements);
 10277     MConstant *fieldIndexDef = constantInt(fieldIndex);
 10279     MInstruction *fieldType = MLoadElement::New(alloc(), fieldTypesElements, fieldIndexDef, false, false);
 10280     current->add(fieldType);
 10282     MInstruction *unboxFieldType = MUnbox::New(alloc(), fieldType, MIRType_Object, MUnbox::Infallible);
 10283     current->add(unboxFieldType);
 10285     return unboxFieldType;
 10288 bool
 10289 IonBuilder::storeScalarTypedObjectValue(MDefinition *typedObj,
 10290                                         MDefinition *byteOffset,
 10291                                         ScalarTypeDescr::Type type,
 10292                                         bool canBeNeutered,
 10293                                         bool racy,
 10294                                         MDefinition *value)
 10296     // Find location within the owner object.
 10297     MDefinition *elements, *scaledOffset;
 10298     size_t alignment = ScalarTypeDescr::alignment(type);
 10299     loadTypedObjectElements(typedObj, byteOffset, alignment, canBeNeutered,
 10300                             &elements, &scaledOffset);
 10302     // Clamp value to [0, 255] when type is Uint8Clamped
 10303     MDefinition *toWrite = value;
 10304     if (type == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
 10305         toWrite = MClampToUint8::New(alloc(), value);
 10306         current->add(toWrite->toInstruction());
 10309     MStoreTypedArrayElement *store =
 10310         MStoreTypedArrayElement::New(alloc(), elements, scaledOffset, toWrite,
 10311                                      type);
 10312     if (racy)
 10313         store->setRacy();
 10314     current->add(store);
 10316     return true;
 10319 MConstant *
 10320 IonBuilder::constant(const Value &v)
 10322     MConstant *c = MConstant::New(alloc(), v, constraints());
 10323     current->add(c);
 10324     return c;
 10327 MConstant *
 10328 IonBuilder::constantInt(int32_t i)
 10330     return constant(Int32Value(i));
 10333 MDefinition *
 10334 IonBuilder::getCallee()
 10336     if (inliningDepth_ == 0) {
 10337         MInstruction *callee = MCallee::New(alloc());
 10338         current->add(callee);
 10339         return callee;
 10342     return inlineCallInfo_->fun();

mercurial