js/src/jsanalyze.cpp

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

     1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     2  * vim: set ts=8 sts=4 et sw=4 tw=99:
     3  * This Source Code Form is subject to the terms of the Mozilla Public
     4  * License, v. 2.0. If a copy of the MPL was not distributed with this
     5  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     7 #include "mozilla/DebugOnly.h"
     8 #include "mozilla/MathAlgorithms.h"
     9 #include "mozilla/PodOperations.h"
    11 #include "jscntxt.h"
    12 #include "jscompartment.h"
    14 #include "vm/Opcodes.h"
    16 #include "jsinferinlines.h"
    17 #include "jsobjinlines.h"
    18 #include "jsopcodeinlines.h"
    20 using mozilla::DebugOnly;
    21 using mozilla::PodCopy;
    22 using mozilla::PodZero;
    23 using mozilla::FloorLog2;
    25 namespace js {
    26 namespace analyze {
    27 class LoopAnalysis;
    28 } // namespace analyze
    29 } // namespace js
    31 namespace mozilla {
    33 template <> struct IsPod<js::analyze::LifetimeVariable> : TrueType {};
    34 template <> struct IsPod<js::analyze::LoopAnalysis>     : TrueType {};
    35 template <> struct IsPod<js::analyze::SlotValue>        : TrueType {};
    36 template <> struct IsPod<js::analyze::SSAValue>         : TrueType {};
    37 template <> struct IsPod<js::analyze::SSAUseChain>      : TrueType {};
    39 } /* namespace mozilla */
    41 namespace js {
    42 namespace analyze {
    44 /*
    45  * There are three analyses we can perform on a JSScript, outlined below.
    46  * The results of all three are stored in ScriptAnalysis, but the analyses
    47  * themselves can be performed separately. Along with type inference results,
    48  * per-script analysis results are tied to the per-compartment analysis pool
    49  * and are freed on every garbage collection.
    50  *
    51  * - Basic bytecode analysis. For each bytecode, determine the stack depth at
    52  * that point and control flow information needed for compilation. Also does
    53  * a defined-variables analysis to look for local variables which have uses
    54  * before definitions.
    55  *
    56  * - Lifetime analysis. Makes a backwards pass over the script to approximate
    57  * the regions where each variable is live, avoiding a full fixpointing
    58  * live-variables pass. This is based on the algorithm described in:
    59  *
    60  *     "Quality and Speed in Linear-scan Register Allocation"
    61  *     Traub et. al.
    62  *     PLDI, 1998
    63  *
    64  * - SSA analysis of the script's variables and stack values. For each stack
    65  * value popped and non-escaping local variable or argument read, determines
    66  * which push(es) or write(s) produced that value.
    67  *
    68  * Intermediate type inference results are additionally stored here. The above
    69  * analyses are independent from type inference.
    70  */
    72 /* Information about a bytecode instruction. */
    73 class Bytecode
    74 {
    75     friend class ScriptAnalysis;
    77   public:
    78     Bytecode() { mozilla::PodZero(this); }
    80     /* --------- Bytecode analysis --------- */
    82     /* Whether there are any incoming jumps to this instruction. */
    83     bool jumpTarget : 1;
    85     /* Whether there is fallthrough to this instruction from a non-branching instruction. */
    86     bool fallthrough : 1;
    88     /* Whether this instruction is the fall through point of a conditional jump. */
    89     bool jumpFallthrough : 1;
    91     /*
    92      * Whether this instruction must always execute, unless the script throws
    93      * an exception which it does not later catch.
    94      */
    95     bool unconditional : 1;
    97     /* Whether this instruction has been analyzed to get its output defines and stack. */
    98     bool analyzed : 1;
   100     /* Whether this is a catch/finally entry point. */
   101     bool exceptionEntry : 1;
   103     /* Stack depth before this opcode. */
   104     uint32_t stackDepth;
   106   private:
   108     /* If this is a JSOP_LOOPHEAD or JSOP_LOOPENTRY, information about the loop. */
   109     LoopAnalysis *loop;
   111     /* --------- SSA analysis --------- */
   113     /* Generated location of each value popped by this bytecode. */
   114     SSAValue *poppedValues;
   116     /* Points where values pushed or written by this bytecode are popped. */
   117     SSAUseChain **pushedUses;
   119     union {
   120         /*
   121          * If this is a join point (implies jumpTarget), any slots at this
   122          * point which can have a different values than at the immediate
   123          * predecessor in the bytecode. Array is terminated by an entry with
   124          * a zero slot.
   125          */
   126         SlotValue *newValues;
   128         /*
   129          * Vector used during SSA analysis to store values in need of merging
   130          * at this point. If this has incoming forward jumps and we have not
   131          * yet reached this point, stores values for entries on the stack and
   132          * for variables which have changed since the branch. If this is a loop
   133          * head and we haven't reached the back edge yet, stores loop phi nodes
   134          * for variables and entries live at the head of the loop.
   135          */
   136         Vector<SlotValue> *pendingValues;
   137     };
   138 };
   140 /*
   141  * Information about the lifetime of a local or argument. These form a linked
   142  * list describing successive intervals in the program where the variable's
   143  * value may be live. At points in the script not in one of these segments
   144  * (points in a 'lifetime hole'), the variable is dead and registers containing
   145  * its type/payload can be discarded without needing to be synced.
   146  */
   147 struct Lifetime
   148 {
   149     /*
   150      * Start and end offsets of this lifetime. The variable is live at the
   151      * beginning of every bytecode in this (inclusive) range.
   152      */
   153     uint32_t start;
   154     uint32_t end;
   156     /*
   157      * In a loop body, endpoint to extend this lifetime with if the variable is
   158      * live in the next iteration.
   159      */
   160     uint32_t savedEnd;
   162     /*
   163      * The start of this lifetime is a bytecode writing the variable. Each
   164      * write to a variable is associated with a lifetime.
   165      */
   166     bool write;
   168     /* Next lifetime. The variable is dead from this->end to next->start. */
   169     Lifetime *next;
   171     Lifetime(uint32_t offset, uint32_t savedEnd, Lifetime *next)
   172         : start(offset), end(offset), savedEnd(savedEnd),
   173           write(false), next(next)
   174     {}
   175 };
   177 /* Basic information for a loop. */
   178 class LoopAnalysis
   179 {
   180   public:
   181     /* Any loop this one is nested in. */
   182     LoopAnalysis *parent;
   184     /* Offset of the head of the loop. */
   185     uint32_t head;
   187     /*
   188      * Offset of the unique jump going to the head of the loop. The code
   189      * between the head and the backedge forms the loop body.
   190      */
   191     uint32_t backedge;
   192 };
   194 /* Current lifetime information for a variable. */
   195 struct LifetimeVariable
   196 {
   197     /* If the variable is currently live, the lifetime segment. */
   198     Lifetime *lifetime;
   200     /* If the variable is currently dead, the next live segment. */
   201     Lifetime *saved;
   203     /* Jump preceding the basic block which killed this variable. */
   204     uint32_t savedEnd : 31;
   206     /* If the variable needs to be kept alive until lifetime->start. */
   207     bool ensured : 1;
   209     /* Whether this variable is live at offset. */
   210     Lifetime * live(uint32_t offset) const {
   211         if (lifetime && lifetime->end >= offset)
   212             return lifetime;
   213         Lifetime *segment = lifetime ? lifetime : saved;
   214         while (segment && segment->start <= offset) {
   215             if (segment->end >= offset)
   216                 return segment;
   217             segment = segment->next;
   218         }
   219         return nullptr;
   220     }
   222     /*
   223      * Get the offset of the first write to the variable in an inclusive range,
   224      * UINT32_MAX if the variable is not written in the range.
   225      */
   226     uint32_t firstWrite(uint32_t start, uint32_t end) const {
   227         Lifetime *segment = lifetime ? lifetime : saved;
   228         while (segment && segment->start <= end) {
   229             if (segment->start >= start && segment->write)
   230                 return segment->start;
   231             segment = segment->next;
   232         }
   233         return UINT32_MAX;
   234     }
   235     uint32_t firstWrite(LoopAnalysis *loop) const {
   236         return firstWrite(loop->head, loop->backedge);
   237     }
   239     /*
   240      * If the variable is only written once in the body of a loop, offset of
   241      * that write. UINT32_MAX otherwise.
   242      */
   243     uint32_t onlyWrite(LoopAnalysis *loop) const {
   244         uint32_t offset = UINT32_MAX;
   245         Lifetime *segment = lifetime ? lifetime : saved;
   246         while (segment && segment->start <= loop->backedge) {
   247             if (segment->start >= loop->head && segment->write) {
   248                 if (offset != UINT32_MAX)
   249                     return UINT32_MAX;
   250                 offset = segment->start;
   251             }
   252             segment = segment->next;
   253         }
   254         return offset;
   255     }
   257 #ifdef DEBUG
   258     void print() const;
   259 #endif
   260 };
   262 struct SSAPhiNode;
   264 /*
   265  * Representation of values on stack or in slots at each point in the script.
   266  * Values are independent from the bytecode position, and mean the same thing
   267  * everywhere in the script. SSA values are immutable, except for contents of
   268  * the values and types in an SSAPhiNode.
   269  */
   270 class SSAValue
   271 {
   272     friend class ScriptAnalysis;
   274   public:
   275     enum Kind {
   276         EMPTY  = 0, /* Invalid entry. */
   277         PUSHED = 1, /* Value pushed by some bytecode. */
   278         VAR    = 2, /* Initial or written value to some argument or local. */
   279         PHI    = 3  /* Selector for one of several values. */
   280     };
   282     Kind kind() const {
   283         JS_ASSERT(u.pushed.kind == u.var.kind && u.pushed.kind == u.phi.kind);
   285         /* Use a bitmask because MSVC wants to use -1 for PHI nodes. */
   286         return (Kind) (u.pushed.kind & 0x3);
   287     }
   289     bool operator==(const SSAValue &o) const {
   290         return !memcmp(this, &o, sizeof(SSAValue));
   291     }
   293     /* Accessors for values pushed by a bytecode within this script. */
   295     uint32_t pushedOffset() const {
   296         JS_ASSERT(kind() == PUSHED);
   297         return u.pushed.offset;
   298     }
   300     uint32_t pushedIndex() const {
   301         JS_ASSERT(kind() == PUSHED);
   302         return u.pushed.index;
   303     }
   305     /* Accessors for initial and written values of arguments and (undefined) locals. */
   307     bool varInitial() const {
   308         JS_ASSERT(kind() == VAR);
   309         return u.var.initial;
   310     }
   312     uint32_t varSlot() const {
   313         JS_ASSERT(kind() == VAR);
   314         return u.var.slot;
   315     }
   317     uint32_t varOffset() const {
   318         JS_ASSERT(!varInitial());
   319         return u.var.offset;
   320     }
   322     /* Accessors for phi nodes. */
   324     uint32_t phiSlot() const;
   325     uint32_t phiLength() const;
   326     const SSAValue &phiValue(uint32_t i) const;
   328     /* Offset at which this phi node was created. */
   329     uint32_t phiOffset() const {
   330         JS_ASSERT(kind() == PHI);
   331         return u.phi.offset;
   332     }
   334     SSAPhiNode *phiNode() const {
   335         JS_ASSERT(kind() == PHI);
   336         return u.phi.node;
   337     }
   339     /* Other accessors. */
   341 #ifdef DEBUG
   342     void print() const;
   343 #endif
   345     void clear() {
   346         mozilla::PodZero(this);
   347         JS_ASSERT(kind() == EMPTY);
   348     }
   350     void initPushed(uint32_t offset, uint32_t index) {
   351         clear();
   352         u.pushed.kind = PUSHED;
   353         u.pushed.offset = offset;
   354         u.pushed.index = index;
   355     }
   357     static SSAValue PushedValue(uint32_t offset, uint32_t index) {
   358         SSAValue v;
   359         v.initPushed(offset, index);
   360         return v;
   361     }
   363     void initInitial(uint32_t slot) {
   364         clear();
   365         u.var.kind = VAR;
   366         u.var.initial = true;
   367         u.var.slot = slot;
   368     }
   370     void initWritten(uint32_t slot, uint32_t offset) {
   371         clear();
   372         u.var.kind = VAR;
   373         u.var.initial = false;
   374         u.var.slot = slot;
   375         u.var.offset = offset;
   376     }
   378     static SSAValue WrittenVar(uint32_t slot, uint32_t offset) {
   379         SSAValue v;
   380         v.initWritten(slot, offset);
   381         return v;
   382     }
   384     void initPhi(uint32_t offset, SSAPhiNode *node) {
   385         clear();
   386         u.phi.kind = PHI;
   387         u.phi.offset = offset;
   388         u.phi.node = node;
   389     }
   391     static SSAValue PhiValue(uint32_t offset, SSAPhiNode *node) {
   392         SSAValue v;
   393         v.initPhi(offset, node);
   394         return v;
   395     }
   397   private:
   398     union {
   399         struct {
   400             Kind kind : 2;
   401             uint32_t offset : 30;
   402             uint32_t index;
   403         } pushed;
   404         struct {
   405             Kind kind : 2;
   406             bool initial : 1;
   407             uint32_t slot : 29;
   408             uint32_t offset;
   409         } var;
   410         struct {
   411             Kind kind : 2;
   412             uint32_t offset : 30;
   413             SSAPhiNode *node;
   414         } phi;
   415     } u;
   416 };
   418 /*
   419  * Mutable component of a phi node, with the possible values of the phi
   420  * and the possible types of the node as determined by type inference.
   421  * When phi nodes are copied around, any updates to the original will
   422  * be seen by all copies made.
   423  */
   424 struct SSAPhiNode
   425 {
   426     uint32_t slot;
   427     uint32_t length;
   428     SSAValue *options;
   429     SSAUseChain *uses;
   430     SSAPhiNode() { mozilla::PodZero(this); }
   431 };
   433 inline uint32_t
   434 SSAValue::phiSlot() const
   435 {
   436     return u.phi.node->slot;
   437 }
   439 inline uint32_t
   440 SSAValue::phiLength() const
   441 {
   442     JS_ASSERT(kind() == PHI);
   443     return u.phi.node->length;
   444 }
   446 inline const SSAValue &
   447 SSAValue::phiValue(uint32_t i) const
   448 {
   449     JS_ASSERT(kind() == PHI && i < phiLength());
   450     return u.phi.node->options[i];
   451 }
   453 class SSAUseChain
   454 {
   455   public:
   456     bool popped : 1;
   457     uint32_t offset : 31;
   458     /* FIXME: Assert that only the proper arm of this union is accessed. */
   459     union {
   460         uint32_t which;
   461         SSAPhiNode *phi;
   462     } u;
   463     SSAUseChain *next;
   465     SSAUseChain() { mozilla::PodZero(this); }
   466 };
   468 class SlotValue
   469 {
   470   public:
   471     uint32_t slot;
   472     SSAValue value;
   473     SlotValue(uint32_t slot, const SSAValue &value) : slot(slot), value(value) {}
   474 };
   476 /////////////////////////////////////////////////////////////////////
   477 // Bytecode
   478 /////////////////////////////////////////////////////////////////////
   480 #ifdef DEBUG
   481 void
   482 PrintBytecode(JSContext *cx, HandleScript script, jsbytecode *pc)
   483 {
   484     fprintf(stderr, "#%u:", script->id());
   485     Sprinter sprinter(cx);
   486     if (!sprinter.init())
   487         return;
   488     js_Disassemble1(cx, script, pc, script->pcToOffset(pc), true, &sprinter);
   489     fprintf(stderr, "%s", sprinter.string());
   490 }
   491 #endif
   493 /*
   494  * For opcodes which assign to a local variable or argument, track an extra def
   495  * during SSA analysis for the value's use chain and assigned type.
   496  */
   497 static inline bool
   498 ExtendedDef(jsbytecode *pc)
   499 {
   500     switch ((JSOp)*pc) {
   501       case JSOP_SETARG:
   502       case JSOP_SETLOCAL:
   503         return true;
   504       default:
   505         return false;
   506     }
   507 }
   509 /*
   510  * For opcodes which access local variables or arguments, we track an extra
   511  * use during SSA analysis for the value of the variable before/after the op.
   512  */
   513 static inline bool
   514 ExtendedUse(jsbytecode *pc)
   515 {
   516     if (ExtendedDef(pc))
   517         return true;
   518     switch ((JSOp)*pc) {
   519       case JSOP_GETARG:
   520       case JSOP_GETLOCAL:
   521         return true;
   522       default:
   523         return false;
   524     }
   525 }
   527 static inline unsigned
   528 FollowBranch(JSContext *cx, JSScript *script, unsigned offset)
   529 {
   530     /*
   531      * Get the target offset of a branch. For GOTO opcodes implementing
   532      * 'continue' statements, short circuit any artificial backwards jump
   533      * inserted by the emitter.
   534      */
   535     jsbytecode *pc = script->offsetToPC(offset);
   536     unsigned targetOffset = offset + GET_JUMP_OFFSET(pc);
   537     if (targetOffset < offset) {
   538         jsbytecode *target = script->offsetToPC(targetOffset);
   539         JSOp nop = JSOp(*target);
   540         if (nop == JSOP_GOTO)
   541             return targetOffset + GET_JUMP_OFFSET(target);
   542     }
   543     return targetOffset;
   544 }
   546 static inline uint32_t StackSlot(JSScript *script, uint32_t index) {
   547     return TotalSlots(script) + index;
   548 }
   550 static inline uint32_t GetBytecodeSlot(JSScript *script, jsbytecode *pc)
   551 {
   552     switch (JSOp(*pc)) {
   554       case JSOP_GETARG:
   555       case JSOP_SETARG:
   556         return ArgSlot(GET_ARGNO(pc));
   558       case JSOP_GETLOCAL:
   559       case JSOP_SETLOCAL:
   560         return LocalSlot(script, GET_LOCALNO(pc));
   562       case JSOP_THIS:
   563         return ThisSlot();
   565       default:
   566         MOZ_ASSUME_UNREACHABLE("Bad slot opcode");
   567     }
   568 }
   570 /////////////////////////////////////////////////////////////////////
   571 // Bytecode Analysis
   572 /////////////////////////////////////////////////////////////////////
   574 inline bool
   575 ScriptAnalysis::addJump(JSContext *cx, unsigned offset,
   576                         unsigned *currentOffset, unsigned *forwardJump, unsigned *forwardLoop,
   577                         unsigned stackDepth)
   578 {
   579     JS_ASSERT(offset < script_->length());
   581     Bytecode *&code = codeArray[offset];
   582     if (!code) {
   583         code = cx->typeLifoAlloc().new_<Bytecode>();
   584         if (!code) {
   585             js_ReportOutOfMemory(cx);
   586             return false;
   587         }
   588         code->stackDepth = stackDepth;
   589     }
   590     JS_ASSERT(code->stackDepth == stackDepth);
   592     code->jumpTarget = true;
   594     if (offset < *currentOffset) {
   595         if (!code->analyzed) {
   596             /*
   597              * Backedge in a while/for loop, whose body has not been analyzed
   598              * due to a lack of fallthrough at the loop head. Roll back the
   599              * offset to analyze the body.
   600              */
   601             if (*forwardJump == 0)
   602                 *forwardJump = *currentOffset;
   603             if (*forwardLoop == 0)
   604                 *forwardLoop = *currentOffset;
   605             *currentOffset = offset;
   606         }
   607     } else if (offset > *forwardJump) {
   608         *forwardJump = offset;
   609     }
   611     return true;
   612 }
   614 inline bool
   615 ScriptAnalysis::jumpTarget(uint32_t offset)
   616 {
   617     JS_ASSERT(offset < script_->length());
   618     return codeArray[offset] && codeArray[offset]->jumpTarget;
   619 }
   621 inline bool
   622 ScriptAnalysis::jumpTarget(const jsbytecode *pc)
   623 {
   624     return jumpTarget(script_->pcToOffset(pc));
   625 }
   627 inline const SSAValue &
   628 ScriptAnalysis::poppedValue(uint32_t offset, uint32_t which)
   629 {
   630     JS_ASSERT(which < GetUseCount(script_, offset) +
   631               (ExtendedUse(script_->offsetToPC(offset)) ? 1 : 0));
   632     return getCode(offset).poppedValues[which];
   633 }
   635 inline const SSAValue &
   636 ScriptAnalysis::poppedValue(const jsbytecode *pc, uint32_t which)
   637 {
   638     return poppedValue(script_->pcToOffset(pc), which);
   639 }
   641 inline const SlotValue *
   642 ScriptAnalysis::newValues(uint32_t offset)
   643 {
   644     JS_ASSERT(offset < script_->length());
   645     return getCode(offset).newValues;
   646 }
   648 inline const SlotValue *
   649 ScriptAnalysis::newValues(const jsbytecode *pc)
   650 {
   651     return newValues(script_->pcToOffset(pc));
   652 }
   654 inline bool
   655 ScriptAnalysis::trackUseChain(const SSAValue &v)
   656 {
   657     JS_ASSERT_IF(v.kind() == SSAValue::VAR, trackSlot(v.varSlot()));
   658     return v.kind() != SSAValue::EMPTY &&
   659         (v.kind() != SSAValue::VAR || !v.varInitial());
   660 }
   662 /*
   663  * Get the use chain for an SSA value.
   664  */
   665 inline SSAUseChain *&
   666 ScriptAnalysis::useChain(const SSAValue &v)
   667 {
   668     JS_ASSERT(trackUseChain(v));
   669     if (v.kind() == SSAValue::PUSHED)
   670         return getCode(v.pushedOffset()).pushedUses[v.pushedIndex()];
   671     if (v.kind() == SSAValue::VAR)
   672         return getCode(v.varOffset()).pushedUses[GetDefCount(script_, v.varOffset())];
   673     return v.phiNode()->uses;
   674 }
   676 /* For a JSOP_CALL* op, get the pc of the corresponding JSOP_CALL/NEW/etc. */
   677 inline jsbytecode *
   678 ScriptAnalysis::getCallPC(jsbytecode *pc)
   679 {
   680     SSAUseChain *uses = useChain(SSAValue::PushedValue(script_->pcToOffset(pc), 0));
   681     JS_ASSERT(uses && uses->popped);
   682     JS_ASSERT(js_CodeSpec[script_->code()[uses->offset]].format & JOF_INVOKE);
   683     return script_->offsetToPC(uses->offset);
   684 }
   686 /* Accessors for local variable information. */
   688 /*
   689  * Escaping slots include all slots that can be accessed in ways other than
   690  * through the corresponding LOCAL/ARG opcode. This includes all closed
   691  * slots in the script, all slots in scripts which use eval or are in debug
   692  * mode, and slots which are aliased by NAME or similar opcodes in the
   693  * containing script (which does not imply the variable is closed).
   694  */
   695 inline bool
   696 ScriptAnalysis::slotEscapes(uint32_t slot)
   697 {
   698     JS_ASSERT(script_->compartment()->activeAnalysis);
   699     if (slot >= numSlots)
   700         return true;
   701     return escapedSlots[slot];
   702 }
   704 /*
   705  * Whether we distinguish different writes of this variable while doing
   706  * SSA analysis. Escaping locals can be written in other scripts, and the
   707  * presence of NAME opcodes which could alias local variables or arguments
   708  * keeps us from tracking variable values at each point.
   709  */
   710 inline bool
   711 ScriptAnalysis::trackSlot(uint32_t slot)
   712 {
   713     return !slotEscapes(slot) && canTrackVars && slot < 1000;
   714 }
   716 inline const LifetimeVariable &
   717 ScriptAnalysis::liveness(uint32_t slot)
   718 {
   719     JS_ASSERT(script_->compartment()->activeAnalysis);
   720     JS_ASSERT(!slotEscapes(slot));
   721     return lifetimes[slot];
   722 }
   724 bool
   725 ScriptAnalysis::analyzeBytecode(JSContext *cx)
   726 {
   727     JS_ASSERT(cx->compartment()->activeAnalysis);
   728     JS_ASSERT(!codeArray);
   730     LifoAlloc &alloc = cx->typeLifoAlloc();
   732     numSlots = TotalSlots(script_);
   734     unsigned length = script_->length();
   735     codeArray = alloc.newArray<Bytecode*>(length);
   736     escapedSlots = alloc.newArray<bool>(numSlots);
   738     if (!codeArray || !escapedSlots) {
   739         js_ReportOutOfMemory(cx);
   740         return false;
   741     }
   743     PodZero(codeArray, length);
   745     /*
   746      * Populate arg and local slots which can escape and be accessed in ways
   747      * other than through ARG* and LOCAL* opcodes (though arguments can still
   748      * be indirectly read but not written through 'arguments' properties).
   749      * All escaping locals are treated as having possible use-before-defs.
   750      * Conservatively use 'argumentsHasVarBinding' instead of 'needsArgsObj'
   751      * (needsArgsObj requires SSA which requires escapedSlots). Lastly, the
   752      * debugger can access any local at any time. Even though debugger
   753      * reads/writes are monitored by the DebugScopeProxy, this monitoring
   754      * updates the flow-insensitive type sets, so we cannot use SSA.
   755      */
   757     PodZero(escapedSlots, numSlots);
   759     bool allVarsAliased = script_->compartment()->debugMode();
   760     bool allArgsAliased = allVarsAliased || script_->argumentsHasVarBinding();
   762     RootedScript script(cx, script_);
   763     for (BindingIter bi(script); bi; bi++) {
   764         if (bi->kind() == Binding::ARGUMENT)
   765             escapedSlots[ArgSlot(bi.frameIndex())] = allArgsAliased || bi->aliased();
   766         else
   767             escapedSlots[LocalSlot(script_, bi.frameIndex())] = allVarsAliased || bi->aliased();
   768     }
   770     canTrackVars = true;
   772     /*
   773      * If we are in the middle of one or more jumps, the offset of the highest
   774      * target jumping over this bytecode.  Includes implicit jumps from
   775      * try/catch/finally blocks.
   776      */
   777     unsigned forwardJump = 0;
   779     /* If we are in the middle of a loop, the offset of the highest backedge. */
   780     unsigned forwardLoop = 0;
   782     /*
   783      * If we are in the middle of a try block, the offset of the highest
   784      * catch/finally/enditer.
   785      */
   786     unsigned forwardCatch = 0;
   788     /* Fill in stack depth and definitions at initial bytecode. */
   789     Bytecode *startcode = alloc.new_<Bytecode>();
   790     if (!startcode) {
   791         js_ReportOutOfMemory(cx);
   792         return false;
   793     }
   795     startcode->stackDepth = 0;
   796     codeArray[0] = startcode;
   798     unsigned offset, nextOffset = 0;
   799     while (nextOffset < length) {
   800         offset = nextOffset;
   802         JS_ASSERT(forwardCatch <= forwardJump);
   804         /* Check if the current forward jump/try-block has finished. */
   805         if (forwardJump && forwardJump == offset)
   806             forwardJump = 0;
   807         if (forwardCatch && forwardCatch == offset)
   808             forwardCatch = 0;
   810         Bytecode *code = maybeCode(offset);
   811         jsbytecode *pc = script_->offsetToPC(offset);
   813         JSOp op = (JSOp)*pc;
   814         JS_ASSERT(op < JSOP_LIMIT);
   816         /* Immediate successor of this bytecode. */
   817         unsigned successorOffset = offset + GetBytecodeLength(pc);
   819         /*
   820          * Next bytecode to analyze.  This is either the successor, or is an
   821          * earlier bytecode if this bytecode has a loop backedge.
   822          */
   823         nextOffset = successorOffset;
   825         if (!code) {
   826             /* Haven't found a path by which this bytecode is reachable. */
   827             continue;
   828         }
   830         /*
   831          * Update info about bytecodes inside loops, which may have been
   832          * analyzed before the backedge was seen.
   833          */
   834         if (forwardLoop) {
   835             if (forwardLoop <= offset)
   836                 forwardLoop = 0;
   837         }
   839         if (code->analyzed) {
   840             /* No need to reanalyze, see Bytecode::mergeDefines. */
   841             continue;
   842         }
   844         code->analyzed = true;
   846         if (script_->hasBreakpointsAt(pc))
   847             canTrackVars = false;
   849         unsigned stackDepth = code->stackDepth;
   851         if (!forwardJump)
   852             code->unconditional = true;
   854         unsigned nuses = GetUseCount(script_, offset);
   855         unsigned ndefs = GetDefCount(script_, offset);
   857         JS_ASSERT(stackDepth >= nuses);
   858         stackDepth -= nuses;
   859         stackDepth += ndefs;
   861         switch (op) {
   863           case JSOP_DEFFUN:
   864           case JSOP_DEFVAR:
   865           case JSOP_DEFCONST:
   866           case JSOP_SETCONST:
   867             canTrackVars = false;
   868             break;
   870           case JSOP_EVAL:
   871           case JSOP_SPREADEVAL:
   872           case JSOP_ENTERWITH:
   873             canTrackVars = false;
   874             break;
   876           case JSOP_TABLESWITCH: {
   877             unsigned defaultOffset = offset + GET_JUMP_OFFSET(pc);
   878             jsbytecode *pc2 = pc + JUMP_OFFSET_LEN;
   879             int32_t low = GET_JUMP_OFFSET(pc2);
   880             pc2 += JUMP_OFFSET_LEN;
   881             int32_t high = GET_JUMP_OFFSET(pc2);
   882             pc2 += JUMP_OFFSET_LEN;
   884             if (!addJump(cx, defaultOffset, &nextOffset, &forwardJump, &forwardLoop, stackDepth))
   885                 return false;
   887             for (int32_t i = low; i <= high; i++) {
   888                 unsigned targetOffset = offset + GET_JUMP_OFFSET(pc2);
   889                 if (targetOffset != offset) {
   890                     if (!addJump(cx, targetOffset, &nextOffset, &forwardJump, &forwardLoop, stackDepth))
   891                         return false;
   892                 }
   893                 pc2 += JUMP_OFFSET_LEN;
   894             }
   895             break;
   896           }
   898           case JSOP_TRY: {
   899             /*
   900              * Everything between a try and corresponding catch or finally is conditional.
   901              * Note that there is no problem with code which is skipped by a thrown
   902              * exception but is not caught by a later handler in the same function:
   903              * no more code will execute, and it does not matter what is defined.
   904              */
   905             JSTryNote *tn = script_->trynotes()->vector;
   906             JSTryNote *tnlimit = tn + script_->trynotes()->length;
   907             for (; tn < tnlimit; tn++) {
   908                 unsigned startOffset = script_->mainOffset() + tn->start;
   909                 if (startOffset == offset + 1) {
   910                     unsigned catchOffset = startOffset + tn->length;
   912                     /* This will overestimate try block code, for multiple catch/finally. */
   913                     if (catchOffset > forwardCatch)
   914                         forwardCatch = catchOffset;
   916                     if (tn->kind != JSTRY_ITER && tn->kind != JSTRY_LOOP) {
   917                         if (!addJump(cx, catchOffset, &nextOffset, &forwardJump, &forwardLoop, stackDepth))
   918                             return false;
   919                         getCode(catchOffset).exceptionEntry = true;
   920                     }
   921                 }
   922             }
   923             break;
   924           }
   926           case JSOP_GETLOCAL:
   927           case JSOP_SETLOCAL:
   928             JS_ASSERT(GET_LOCALNO(pc) < script_->nfixed());
   929             break;
   931           default:
   932             break;
   933         }
   935         bool jump = IsJumpOpcode(op);
   937         /* Check basic jump opcodes, which may or may not have a fallthrough. */
   938         if (jump) {
   939             /* Case instructions do not push the lvalue back when branching. */
   940             unsigned newStackDepth = stackDepth;
   941             if (op == JSOP_CASE)
   942                 newStackDepth--;
   944             unsigned targetOffset = offset + GET_JUMP_OFFSET(pc);
   945             if (!addJump(cx, targetOffset, &nextOffset, &forwardJump, &forwardLoop, newStackDepth))
   946                 return false;
   947         }
   949         /* Handle any fallthrough from this opcode. */
   950         if (BytecodeFallsThrough(op)) {
   951             JS_ASSERT(successorOffset < script_->length());
   953             Bytecode *&nextcode = codeArray[successorOffset];
   955             if (!nextcode) {
   956                 nextcode = alloc.new_<Bytecode>();
   957                 if (!nextcode) {
   958                     js_ReportOutOfMemory(cx);
   959                     return false;
   960                 }
   961                 nextcode->stackDepth = stackDepth;
   962             }
   963             JS_ASSERT(nextcode->stackDepth == stackDepth);
   965             if (jump)
   966                 nextcode->jumpFallthrough = true;
   968             /* Treat the fallthrough of a branch instruction as a jump target. */
   969             if (jump)
   970                 nextcode->jumpTarget = true;
   971             else
   972                 nextcode->fallthrough = true;
   973         }
   974     }
   976     JS_ASSERT(forwardJump == 0 && forwardLoop == 0 && forwardCatch == 0);
   978     /*
   979      * Always ensure that a script's arguments usage has been analyzed before
   980      * entering the script. This allows the functionPrologue to ensure that
   981      * arguments are always created eagerly which simplifies interp logic.
   982      */
   983     if (!script_->analyzedArgsUsage()) {
   984         if (!analyzeLifetimes(cx))
   985             return false;
   986         if (!analyzeSSA(cx))
   987             return false;
   989         /*
   990          * Now that we have full SSA information for the script, analyze whether
   991          * we can avoid creating the arguments object.
   992          */
   993         script_->setNeedsArgsObj(needsArgsObj(cx));
   994     }
   996     return true;
   997 }
   999 /////////////////////////////////////////////////////////////////////
  1000 // Lifetime Analysis
  1001 /////////////////////////////////////////////////////////////////////
  1003 bool
  1004 ScriptAnalysis::analyzeLifetimes(JSContext *cx)
  1006     JS_ASSERT(cx->compartment()->activeAnalysis);
  1007     JS_ASSERT(codeArray);
  1008     JS_ASSERT(!lifetimes);
  1010     LifoAlloc &alloc = cx->typeLifoAlloc();
  1012     lifetimes = alloc.newArray<LifetimeVariable>(numSlots);
  1013     if (!lifetimes) {
  1014         js_ReportOutOfMemory(cx);
  1015         return false;
  1017     PodZero(lifetimes, numSlots);
  1019     /*
  1020      * Variables which are currently dead. On forward branches to locations
  1021      * where these are live, they need to be marked as live.
  1022      */
  1023     LifetimeVariable **saved = cx->pod_calloc<LifetimeVariable*>(numSlots);
  1024     if (!saved) {
  1025         js_ReportOutOfMemory(cx);
  1026         return false;
  1028     unsigned savedCount = 0;
  1030     LoopAnalysis *loop = nullptr;
  1032     uint32_t offset = script_->length() - 1;
  1033     while (offset < script_->length()) {
  1034         Bytecode *code = maybeCode(offset);
  1035         if (!code) {
  1036             offset--;
  1037             continue;
  1040         jsbytecode *pc = script_->offsetToPC(offset);
  1042         JSOp op = (JSOp) *pc;
  1044         if (op == JSOP_LOOPHEAD && code->loop) {
  1045             /*
  1046              * This is the head of a loop, we need to go and make sure that any
  1047              * variables live at the head are live at the backedge and points prior.
  1048              * For each such variable, look for the last lifetime segment in the body
  1049              * and extend it to the end of the loop.
  1050              */
  1051             JS_ASSERT(loop == code->loop);
  1052             unsigned backedge = code->loop->backedge;
  1053             for (unsigned i = 0; i < numSlots; i++) {
  1054                 if (lifetimes[i].lifetime) {
  1055                     if (!extendVariable(cx, lifetimes[i], offset, backedge))
  1056                         return false;
  1060             loop = loop->parent;
  1061             JS_ASSERT_IF(loop, loop->head < offset);
  1064         if (code->exceptionEntry) {
  1065             DebugOnly<bool> found = false;
  1066             JSTryNote *tn = script_->trynotes()->vector;
  1067             JSTryNote *tnlimit = tn + script_->trynotes()->length;
  1068             for (; tn < tnlimit; tn++) {
  1069                 unsigned startOffset = script_->mainOffset() + tn->start;
  1070                 if (startOffset + tn->length == offset) {
  1071                     /*
  1072                      * Extend all live variables at exception entry to the start of
  1073                      * the try block.
  1074                      */
  1075                     for (unsigned i = 0; i < numSlots; i++) {
  1076                         if (lifetimes[i].lifetime)
  1077                             ensureVariable(lifetimes[i], startOffset - 1);
  1080                     found = true;
  1081                     break;
  1084             JS_ASSERT(found);
  1087         switch (op) {
  1088           case JSOP_GETARG:
  1089           case JSOP_GETLOCAL:
  1090           case JSOP_THIS: {
  1091             uint32_t slot = GetBytecodeSlot(script_, pc);
  1092             if (!slotEscapes(slot)) {
  1093                 if (!addVariable(cx, lifetimes[slot], offset, saved, savedCount))
  1094                     return false;
  1096             break;
  1099           case JSOP_SETARG:
  1100           case JSOP_SETLOCAL: {
  1101             uint32_t slot = GetBytecodeSlot(script_, pc);
  1102             if (!slotEscapes(slot)) {
  1103                 if (!killVariable(cx, lifetimes[slot], offset, saved, savedCount))
  1104                     return false;
  1106             break;
  1109           case JSOP_TABLESWITCH:
  1110             /* Restore all saved variables. :FIXME: maybe do this precisely. */
  1111             for (unsigned i = 0; i < savedCount; i++) {
  1112                 LifetimeVariable &var = *saved[i];
  1113                 uint32_t savedEnd = var.savedEnd;
  1114                 var.lifetime = alloc.new_<Lifetime>(offset, savedEnd, var.saved);
  1115                 if (!var.lifetime) {
  1116                     js_free(saved);
  1117                     js_ReportOutOfMemory(cx);
  1118                     return false;
  1120                 var.saved = nullptr;
  1121                 saved[i--] = saved[--savedCount];
  1123             savedCount = 0;
  1124             break;
  1126           case JSOP_TRY:
  1127             for (unsigned i = 0; i < numSlots; i++) {
  1128                 LifetimeVariable &var = lifetimes[i];
  1129                 if (var.ensured) {
  1130                     JS_ASSERT(var.lifetime);
  1131                     if (var.lifetime->start == offset)
  1132                         var.ensured = false;
  1135             break;
  1137           case JSOP_LOOPENTRY:
  1138             getCode(offset).loop = loop;
  1139             break;
  1141           default:;
  1144         if (IsJumpOpcode(op)) {
  1145             /*
  1146              * Forward jumps need to pull in all variables which are live at
  1147              * their target offset --- the variables live before the jump are
  1148              * the union of those live at the fallthrough and at the target.
  1149              */
  1150             uint32_t targetOffset = FollowBranch(cx, script_, offset);
  1152             if (targetOffset < offset) {
  1153                 /* This is a loop back edge, no lifetime to pull in yet. */
  1155 #ifdef DEBUG
  1156                 JSOp nop = JSOp(script_->code()[targetOffset]);
  1157                 JS_ASSERT(nop == JSOP_LOOPHEAD);
  1158 #endif
  1160                 LoopAnalysis *nloop = alloc.new_<LoopAnalysis>();
  1161                 if (!nloop) {
  1162                     js_free(saved);
  1163                     js_ReportOutOfMemory(cx);
  1164                     return false;
  1166                 PodZero(nloop);
  1168                 nloop->parent = loop;
  1169                 loop = nloop;
  1171                 getCode(targetOffset).loop = loop;
  1172                 loop->head = targetOffset;
  1173                 loop->backedge = offset;
  1175                 /*
  1176                  * Find the entry jump, which will be a GOTO for 'for' or
  1177                  * 'while' loops or a fallthrough for 'do while' loops.
  1178                  */
  1179                 uint32_t entry = targetOffset;
  1180                 if (entry) {
  1181                     do {
  1182                         entry--;
  1183                     } while (!maybeCode(entry));
  1185                     jsbytecode *entrypc = script_->offsetToPC(entry);
  1187                     if (JSOp(*entrypc) == JSOP_GOTO)
  1188                         entry += GET_JUMP_OFFSET(entrypc);
  1189                     else
  1190                         entry = targetOffset;
  1191                 } else {
  1192                     /* Do-while loop at the start of the script. */
  1193                     entry = targetOffset;
  1195                 JS_ASSERT(script_->code()[entry] == JSOP_LOOPHEAD ||
  1196                           script_->code()[entry] == JSOP_LOOPENTRY);
  1197             } else {
  1198                 for (unsigned i = 0; i < savedCount; i++) {
  1199                     LifetimeVariable &var = *saved[i];
  1200                     JS_ASSERT(!var.lifetime && var.saved);
  1201                     if (var.live(targetOffset)) {
  1202                         /*
  1203                          * Jumping to a place where this variable is live. Make a new
  1204                          * lifetime segment for the variable.
  1205                          */
  1206                         uint32_t savedEnd = var.savedEnd;
  1207                         var.lifetime = alloc.new_<Lifetime>(offset, savedEnd, var.saved);
  1208                         if (!var.lifetime) {
  1209                             js_free(saved);
  1210                             js_ReportOutOfMemory(cx);
  1211                             return false;
  1213                         var.saved = nullptr;
  1214                         saved[i--] = saved[--savedCount];
  1215                     } else if (loop && !var.savedEnd) {
  1216                         /*
  1217                          * This jump precedes the basic block which killed the variable,
  1218                          * remember it and use it for the end of the next lifetime
  1219                          * segment should the variable become live again. This is needed
  1220                          * for loops, as if we wrap liveness around the loop the isLive
  1221                          * test below may have given the wrong answer.
  1222                          */
  1223                         var.savedEnd = offset;
  1229         offset--;
  1232     js_free(saved);
  1234     return true;
  1237 #ifdef DEBUG
  1238 void
  1239 LifetimeVariable::print() const
  1241     Lifetime *segment = lifetime ? lifetime : saved;
  1242     while (segment) {
  1243         printf(" (%u,%u)", segment->start, segment->end);
  1244         segment = segment->next;
  1246     printf("\n");
  1248 #endif /* DEBUG */
  1250 inline bool
  1251 ScriptAnalysis::addVariable(JSContext *cx, LifetimeVariable &var, unsigned offset,
  1252                             LifetimeVariable **&saved, unsigned &savedCount)
  1254     if (var.lifetime) {
  1255         if (var.ensured)
  1256             return true;
  1258         JS_ASSERT(offset < var.lifetime->start);
  1259         var.lifetime->start = offset;
  1260     } else {
  1261         if (var.saved) {
  1262             /* Remove from the list of saved entries. */
  1263             for (unsigned i = 0; i < savedCount; i++) {
  1264                 if (saved[i] == &var) {
  1265                     JS_ASSERT(savedCount);
  1266                     saved[i--] = saved[--savedCount];
  1267                     break;
  1271         uint32_t savedEnd = var.savedEnd;
  1272         var.lifetime = cx->typeLifoAlloc().new_<Lifetime>(offset, savedEnd, var.saved);
  1273         if (!var.lifetime) {
  1274             js_ReportOutOfMemory(cx);
  1275             return false;
  1277         var.saved = nullptr;
  1280     return true;
  1283 inline bool
  1284 ScriptAnalysis::killVariable(JSContext *cx, LifetimeVariable &var, unsigned offset,
  1285                              LifetimeVariable **&saved, unsigned &savedCount)
  1287     if (!var.lifetime) {
  1288         /* Make a point lifetime indicating the write. */
  1289         uint32_t savedEnd = var.savedEnd;
  1290         Lifetime *lifetime = cx->typeLifoAlloc().new_<Lifetime>(offset, savedEnd, var.saved);
  1291         if (!lifetime) {
  1292             js_ReportOutOfMemory(cx);
  1293             return false;
  1295         if (!var.saved)
  1296             saved[savedCount++] = &var;
  1297         var.saved = lifetime;
  1298         var.saved->write = true;
  1299         var.savedEnd = 0;
  1300         return true;
  1303     JS_ASSERT_IF(!var.ensured, offset < var.lifetime->start);
  1304     unsigned start = var.lifetime->start;
  1306     /*
  1307      * The variable is considered to be live at the bytecode which kills it
  1308      * (just not at earlier bytecodes). This behavior is needed by downstream
  1309      * register allocation (see FrameState::bestEvictReg).
  1310      */
  1311     var.lifetime->start = offset;
  1312     var.lifetime->write = true;
  1314     if (var.ensured) {
  1315         /*
  1316          * The variable is live even before the write, due to an enclosing try
  1317          * block. We need to split the lifetime to indicate there was a write.
  1318          * We set the new interval's savedEnd to 0, since it will always be
  1319          * adjacent to the old interval, so it never needs to be extended.
  1320          */
  1321         var.lifetime = cx->typeLifoAlloc().new_<Lifetime>(start, 0, var.lifetime);
  1322         if (!var.lifetime) {
  1323             js_ReportOutOfMemory(cx);
  1324             return false;
  1326         var.lifetime->end = offset;
  1327     } else {
  1328         var.saved = var.lifetime;
  1329         var.savedEnd = 0;
  1330         var.lifetime = nullptr;
  1332         saved[savedCount++] = &var;
  1335     return true;
  1338 inline bool
  1339 ScriptAnalysis::extendVariable(JSContext *cx, LifetimeVariable &var,
  1340                                unsigned start, unsigned end)
  1342     JS_ASSERT(var.lifetime);
  1343     if (var.ensured) {
  1344         /*
  1345          * If we are still ensured to be live, the try block must scope over
  1346          * the loop, in which case the variable is already guaranteed to be
  1347          * live for the entire loop.
  1348          */
  1349         JS_ASSERT(var.lifetime->start < start);
  1350         return true;
  1353     var.lifetime->start = start;
  1355     /*
  1356      * Consider this code:
  1358      *   while (...) { (#1)
  1359      *       use x;    (#2)
  1360      *       ...
  1361      *       x = ...;  (#3)
  1362      *       ...
  1363      *   }             (#4)
  1365      * Just before analyzing the while statement, there would be a live range
  1366      * from #1..#2 and a "point range" at #3. The job of extendVariable is to
  1367      * create a new live range from #3..#4.
  1369      * However, more extensions may be required if the definition of x is
  1370      * conditional. Consider the following.
  1372      *   while (...) {     (#1)
  1373      *       use x;        (#2)
  1374      *       ...
  1375      *       if (...)      (#5)
  1376      *           x = ...;  (#3)
  1377      *       ...
  1378      *   }                 (#4)
  1380      * Assume that x is not used after the loop. Then, before extendVariable is
  1381      * run, the live ranges would be the same as before (#1..#2 and #3..#3). We
  1382      * still need to create a range from #3..#4. But, since the assignment at #3
  1383      * may never run, we also need to create a range from #2..#3. This is done
  1384      * as follows.
  1386      * Each time we create a Lifetime, we store the start of the most recently
  1387      * seen sequence of conditional code in the Lifetime's savedEnd field. So,
  1388      * when creating the Lifetime at #2, we set the Lifetime's savedEnd to
  1389      * #5. (The start of the most recent conditional is cached in each
  1390      * variable's savedEnd field.) Consequently, extendVariable is able to
  1391      * create a new interval from #2..#5 using the savedEnd field of the
  1392      * existing #1..#2 interval.
  1393      */
  1395     Lifetime *segment = var.lifetime;
  1396     while (segment && segment->start < end) {
  1397         uint32_t savedEnd = segment->savedEnd;
  1398         if (!segment->next || segment->next->start >= end) {
  1399             /*
  1400              * savedEnd is only set for variables killed in the middle of the
  1401              * loop. Make a tail segment connecting the last use with the
  1402              * back edge.
  1403              */
  1404             if (segment->end >= end) {
  1405                 /* Variable known to be live after the loop finishes. */
  1406                 break;
  1408             savedEnd = end;
  1410         JS_ASSERT(savedEnd <= end);
  1411         if (savedEnd > segment->end) {
  1412             Lifetime *tail = cx->typeLifoAlloc().new_<Lifetime>(savedEnd, 0, segment->next);
  1413             if (!tail) {
  1414                 js_ReportOutOfMemory(cx);
  1415                 return false;
  1417             tail->start = segment->end;
  1419             /*
  1420              * Clear the segment's saved end, but preserve in the tail if this
  1421              * is the last segment in the loop and the variable is killed in an
  1422              * outer loop before the backedge.
  1423              */
  1424             if (segment->savedEnd > end) {
  1425                 JS_ASSERT(savedEnd == end);
  1426                 tail->savedEnd = segment->savedEnd;
  1428             segment->savedEnd = 0;
  1430             segment->next = tail;
  1431             segment = tail->next;
  1432         } else {
  1433             JS_ASSERT(segment->savedEnd == 0);
  1434             segment = segment->next;
  1437     return true;
  1440 inline void
  1441 ScriptAnalysis::ensureVariable(LifetimeVariable &var, unsigned until)
  1443     JS_ASSERT(var.lifetime);
  1445     /*
  1446      * If we are already ensured, the current range we are trying to ensure
  1447      * should already be included.
  1448      */
  1449     if (var.ensured) {
  1450         JS_ASSERT(var.lifetime->start <= until);
  1451         return;
  1454     JS_ASSERT(until < var.lifetime->start);
  1455     var.lifetime->start = until;
  1456     var.ensured = true;
  1459 /////////////////////////////////////////////////////////////////////
  1460 // SSA Analysis
  1461 /////////////////////////////////////////////////////////////////////
  1463 /* Current value for a variable or stack value, as tracked during SSA. */
  1464 struct SSAValueInfo
  1466     SSAValue v;
  1468     /*
  1469      * Sizes of branchTargets the last time this slot was written. Branches less
  1470      * than this threshold do not need to be inspected if the slot is written
  1471      * again, as they will already reflect the slot's value at the branch.
  1472      */
  1473     int32_t branchSize;
  1474 };
  1476 bool
  1477 ScriptAnalysis::analyzeSSA(JSContext *cx)
  1479     JS_ASSERT(cx->compartment()->activeAnalysis);
  1480     JS_ASSERT(codeArray);
  1481     JS_ASSERT(lifetimes);
  1483     LifoAlloc &alloc = cx->typeLifoAlloc();
  1484     unsigned maxDepth = script_->nslots() - script_->nfixed();
  1486     /*
  1487      * Current value of each variable and stack value. Empty for missing or
  1488      * untracked entries, i.e. escaping locals and arguments.
  1489      */
  1490     SSAValueInfo *values = cx->pod_calloc<SSAValueInfo>(numSlots + maxDepth);
  1491     if (!values) {
  1492         js_ReportOutOfMemory(cx);
  1493         return false;
  1495     struct FreeSSAValues {
  1496         SSAValueInfo *values;
  1497         FreeSSAValues(SSAValueInfo *values) : values(values) {}
  1498         ~FreeSSAValues() { js_free(values); }
  1499     } free(values);
  1501     SSAValueInfo *stack = values + numSlots;
  1502     uint32_t stackDepth = 0;
  1504     for (uint32_t slot = ArgSlot(0); slot < numSlots; slot++) {
  1505         if (trackSlot(slot))
  1506             values[slot].v.initInitial(slot);
  1509     /*
  1510      * All target offsets for forward jumps we have seen (including ones whose
  1511      * target we have advanced past). We lazily add pending entries at these
  1512      * targets for the original value of variables modified before the branch
  1513      * rejoins.
  1514      */
  1515     Vector<uint32_t> branchTargets(cx);
  1517     /*
  1518      * Subset of branchTargets which are exception handlers at future offsets.
  1519      * Any new value of a variable modified before the target is reached is a
  1520      * potential value at that target, along with the lazy original value.
  1521      */
  1522     Vector<uint32_t> exceptionTargets(cx);
  1524     uint32_t offset = 0;
  1525     while (offset < script_->length()) {
  1526         jsbytecode *pc = script_->offsetToPC(offset);
  1527         JSOp op = (JSOp)*pc;
  1529         uint32_t successorOffset = offset + GetBytecodeLength(pc);
  1531         Bytecode *code = maybeCode(pc);
  1532         if (!code) {
  1533             offset = successorOffset;
  1534             continue;
  1537         if (code->exceptionEntry) {
  1538             /* Remove from exception targets list, which reflects only future targets. */
  1539             for (size_t i = 0; i < exceptionTargets.length(); i++) {
  1540                 if (exceptionTargets[i] == offset) {
  1541                     exceptionTargets[i] = exceptionTargets.back();
  1542                     exceptionTargets.popBack();
  1543                     break;
  1548         if (code->stackDepth > stackDepth)
  1549             PodZero(stack + stackDepth, code->stackDepth - stackDepth);
  1550         stackDepth = code->stackDepth;
  1552         if (op == JSOP_LOOPHEAD && code->loop) {
  1553             /*
  1554              * Make sure there is a pending value array for phi nodes at the
  1555              * loop head. We won't be able to clear these until we reach the
  1556              * loop's back edge.
  1558              * We need phi nodes for all variables which might be modified
  1559              * during the loop. This ensures that in the loop body we have
  1560              * already updated state to reflect possible changes that happen
  1561              * before the back edge, and don't need to go back and fix things
  1562              * up when we *do* get to the back edge. This could be made lazier.
  1564              * We don't make phi nodes for values on the stack at the head of
  1565              * the loop. These may be popped during the loop (i.e. for ITER
  1566              * loops), but in such cases the original value is pushed back.
  1567              */
  1568             Vector<SlotValue> *&pending = code->pendingValues;
  1569             if (!pending) {
  1570                 pending = cx->new_< Vector<SlotValue> >(cx);
  1571                 if (!pending) {
  1572                     js_ReportOutOfMemory(cx);
  1573                     return false;
  1577             /*
  1578              * Make phi nodes and update state for slots which are already in
  1579              * pending from previous branches to the loop head, and which are
  1580              * modified in the body of the loop.
  1581              */
  1582             for (unsigned i = 0; i < pending->length(); i++) {
  1583                 SlotValue &v = (*pending)[i];
  1584                 if (v.slot < numSlots && liveness(v.slot).firstWrite(code->loop) != UINT32_MAX) {
  1585                     if (v.value.kind() != SSAValue::PHI || v.value.phiOffset() != offset) {
  1586                         JS_ASSERT(v.value.phiOffset() < offset);
  1587                         SSAValue ov = v.value;
  1588                         if (!makePhi(cx, v.slot, offset, &ov))
  1589                             return false;
  1590                         if (!insertPhi(cx, ov, v.value))
  1591                             return false;
  1592                         v.value = ov;
  1595                 if (code->fallthrough || code->jumpFallthrough) {
  1596                     if (!mergeValue(cx, offset, values[v.slot].v, &v))
  1597                         return false;
  1599                 if (!mergeBranchTarget(cx, values[v.slot], v.slot, branchTargets, offset - 1))
  1600                     return false;
  1601                 values[v.slot].v = v.value;
  1604             /*
  1605              * Make phi nodes for all other slots which might be modified
  1606              * during the loop. This ensures that in the loop body we have
  1607              * already updated state to reflect possible changes that happen
  1608              * before the back edge, and don't need to go back and fix things
  1609              * up when we *do* get to the back edge. This could be made lazier.
  1610              */
  1611             for (uint32_t slot = ArgSlot(0); slot < numSlots + stackDepth; slot++) {
  1612                 if (slot >= numSlots || !trackSlot(slot))
  1613                     continue;
  1614                 if (liveness(slot).firstWrite(code->loop) == UINT32_MAX)
  1615                     continue;
  1616                 if (values[slot].v.kind() == SSAValue::PHI && values[slot].v.phiOffset() == offset) {
  1617                     /* There is already a pending entry for this slot. */
  1618                     continue;
  1620                 SSAValue ov;
  1621                 if (!makePhi(cx, slot, offset, &ov))
  1622                     return false;
  1623                 if (code->fallthrough || code->jumpFallthrough) {
  1624                     if (!insertPhi(cx, ov, values[slot].v))
  1625                         return false;
  1627                 if (!mergeBranchTarget(cx, values[slot], slot, branchTargets, offset - 1))
  1628                     return false;
  1629                 values[slot].v = ov;
  1630                 if (!pending->append(SlotValue(slot, ov))) {
  1631                     js_ReportOutOfMemory(cx);
  1632                     return false;
  1635         } else if (code->pendingValues) {
  1636             /*
  1637              * New values at this point from a previous jump to this bytecode.
  1638              * If there is fallthrough from the previous instruction, merge
  1639              * with the current state and create phi nodes where necessary,
  1640              * otherwise replace current values with the new values.
  1642              * Catch blocks are artifically treated as having fallthrough, so
  1643              * that values written inside the block but not subsequently
  1644              * overwritten are picked up.
  1645              */
  1646             bool exception = getCode(offset).exceptionEntry;
  1647             Vector<SlotValue> *pending = code->pendingValues;
  1648             for (unsigned i = 0; i < pending->length(); i++) {
  1649                 SlotValue &v = (*pending)[i];
  1650                 if (code->fallthrough || code->jumpFallthrough ||
  1651                     (exception && values[v.slot].v.kind() != SSAValue::EMPTY)) {
  1652                     if (!mergeValue(cx, offset, values[v.slot].v, &v))
  1653                         return false;
  1655                 if (!mergeBranchTarget(cx, values[v.slot], v.slot, branchTargets, offset))
  1656                     return false;
  1657                 values[v.slot].v = v.value;
  1659             if (!freezeNewValues(cx, offset))
  1660                 return false;
  1663         unsigned nuses = GetUseCount(script_, offset);
  1664         unsigned ndefs = GetDefCount(script_, offset);
  1665         JS_ASSERT(stackDepth >= nuses);
  1667         unsigned xuses = ExtendedUse(pc) ? nuses + 1 : nuses;
  1669         if (xuses) {
  1670             code->poppedValues = alloc.newArray<SSAValue>(xuses);
  1671             if (!code->poppedValues) {
  1672                 js_ReportOutOfMemory(cx);
  1673                 return false;
  1675             for (unsigned i = 0; i < nuses; i++) {
  1676                 SSAValue &v = stack[stackDepth - 1 - i].v;
  1677                 code->poppedValues[i] = v;
  1678                 v.clear();
  1680             if (xuses > nuses) {
  1681                 /*
  1682                  * For SETLOCAL, etc. opcodes, add an extra popped value
  1683                  * holding the value of the local before the op.
  1684                  */
  1685                 uint32_t slot = GetBytecodeSlot(script_, pc);
  1686                 if (trackSlot(slot))
  1687                     code->poppedValues[nuses] = values[slot].v;
  1688                 else
  1689                     code->poppedValues[nuses].clear();
  1692             if (xuses) {
  1693                 SSAUseChain *useChains = alloc.newArray<SSAUseChain>(xuses);
  1694                 if (!useChains) {
  1695                     js_ReportOutOfMemory(cx);
  1696                     return false;
  1698                 PodZero(useChains, xuses);
  1699                 for (unsigned i = 0; i < xuses; i++) {
  1700                     const SSAValue &v = code->poppedValues[i];
  1701                     if (trackUseChain(v)) {
  1702                         SSAUseChain *&uses = useChain(v);
  1703                         useChains[i].popped = true;
  1704                         useChains[i].offset = offset;
  1705                         useChains[i].u.which = i;
  1706                         useChains[i].next = uses;
  1707                         uses = &useChains[i];
  1713         stackDepth -= nuses;
  1715         for (unsigned i = 0; i < ndefs; i++)
  1716             stack[stackDepth + i].v.initPushed(offset, i);
  1718         unsigned xdefs = ExtendedDef(pc) ? ndefs + 1 : ndefs;
  1719         if (xdefs) {
  1720             code->pushedUses = alloc.newArray<SSAUseChain *>(xdefs);
  1721             if (!code->pushedUses) {
  1722                 js_ReportOutOfMemory(cx);
  1723                 return false;
  1725             PodZero(code->pushedUses, xdefs);
  1728         stackDepth += ndefs;
  1730         if (op == JSOP_SETARG || op == JSOP_SETLOCAL) {
  1731             uint32_t slot = GetBytecodeSlot(script_, pc);
  1732             if (trackSlot(slot)) {
  1733                 if (!mergeBranchTarget(cx, values[slot], slot, branchTargets, offset))
  1734                     return false;
  1735                 if (!mergeExceptionTarget(cx, values[slot].v, slot, exceptionTargets))
  1736                     return false;
  1737                 values[slot].v.initWritten(slot, offset);
  1741         switch (op) {
  1742           case JSOP_GETARG:
  1743           case JSOP_GETLOCAL: {
  1744             uint32_t slot = GetBytecodeSlot(script_, pc);
  1745             if (trackSlot(slot)) {
  1746                 /*
  1747                  * Propagate the current value of the local to the pushed value,
  1748                  * and remember it with an extended use on the opcode.
  1749                  */
  1750                 stack[stackDepth - 1].v = code->poppedValues[0] = values[slot].v;
  1752             break;
  1755           /* Short circuit ops which push back one of their operands. */
  1757           case JSOP_MOREITER:
  1758             stack[stackDepth - 2].v = code->poppedValues[0];
  1759             break;
  1761           case JSOP_MUTATEPROTO:
  1762           case JSOP_INITPROP:
  1763           case JSOP_INITPROP_GETTER:
  1764           case JSOP_INITPROP_SETTER:
  1765             stack[stackDepth - 1].v = code->poppedValues[1];
  1766             break;
  1768           case JSOP_SPREAD:
  1769           case JSOP_INITELEM_INC:
  1770             stack[stackDepth - 2].v = code->poppedValues[2];
  1771             break;
  1773           case JSOP_INITELEM_ARRAY:
  1774             stack[stackDepth - 1].v = code->poppedValues[1];
  1775             break;
  1777           case JSOP_INITELEM:
  1778           case JSOP_INITELEM_GETTER:
  1779           case JSOP_INITELEM_SETTER:
  1780             stack[stackDepth - 1].v = code->poppedValues[2];
  1781             break;
  1783           case JSOP_DUP:
  1784             stack[stackDepth - 1].v = stack[stackDepth - 2].v = code->poppedValues[0];
  1785             break;
  1787           case JSOP_DUP2:
  1788             stack[stackDepth - 1].v = stack[stackDepth - 3].v = code->poppedValues[0];
  1789             stack[stackDepth - 2].v = stack[stackDepth - 4].v = code->poppedValues[1];
  1790             break;
  1792           case JSOP_DUPAT: {
  1793             unsigned pickedDepth = GET_UINT24 (pc);
  1794             JS_ASSERT(pickedDepth < stackDepth - 1);
  1795             stack[stackDepth - 1].v = stack[stackDepth - 2 - pickedDepth].v;
  1796             break;
  1799           case JSOP_SWAP:
  1800             /* Swap is like pick 1. */
  1801           case JSOP_PICK: {
  1802             unsigned pickedDepth = (op == JSOP_SWAP ? 1 : pc[1]);
  1803             stack[stackDepth - 1].v = code->poppedValues[pickedDepth];
  1804             for (unsigned i = 0; i < pickedDepth; i++)
  1805                 stack[stackDepth - 2 - i].v = code->poppedValues[i];
  1806             break;
  1809           /*
  1810            * Switch and try blocks preserve the stack between the original op
  1811            * and all case statements or exception/finally handlers.
  1812            */
  1814           case JSOP_TABLESWITCH: {
  1815             unsigned defaultOffset = offset + GET_JUMP_OFFSET(pc);
  1816             jsbytecode *pc2 = pc + JUMP_OFFSET_LEN;
  1817             int32_t low = GET_JUMP_OFFSET(pc2);
  1818             pc2 += JUMP_OFFSET_LEN;
  1819             int32_t high = GET_JUMP_OFFSET(pc2);
  1820             pc2 += JUMP_OFFSET_LEN;
  1822             for (int32_t i = low; i <= high; i++) {
  1823                 unsigned targetOffset = offset + GET_JUMP_OFFSET(pc2);
  1824                 if (targetOffset != offset) {
  1825                     if (!checkBranchTarget(cx, targetOffset, branchTargets, values, stackDepth))
  1826                         return false;
  1828                 pc2 += JUMP_OFFSET_LEN;
  1831             if (!checkBranchTarget(cx, defaultOffset, branchTargets, values, stackDepth))
  1832                 return false;
  1833             break;
  1836           case JSOP_TRY: {
  1837             JSTryNote *tn = script_->trynotes()->vector;
  1838             JSTryNote *tnlimit = tn + script_->trynotes()->length;
  1839             for (; tn < tnlimit; tn++) {
  1840                 unsigned startOffset = script_->mainOffset() + tn->start;
  1841                 if (startOffset == offset + 1) {
  1842                     unsigned catchOffset = startOffset + tn->length;
  1844                     if (tn->kind != JSTRY_ITER && tn->kind != JSTRY_LOOP) {
  1845                         if (!checkBranchTarget(cx, catchOffset, branchTargets, values, stackDepth))
  1846                             return false;
  1847                         if (!checkExceptionTarget(cx, catchOffset, exceptionTargets))
  1848                             return false;
  1852             break;
  1855           case JSOP_THROW:
  1856           case JSOP_RETURN:
  1857           case JSOP_RETRVAL:
  1858             if (!mergeAllExceptionTargets(cx, values, exceptionTargets))
  1859                 return false;
  1860             break;
  1862           default:;
  1865         if (IsJumpOpcode(op)) {
  1866             unsigned targetOffset = FollowBranch(cx, script_, offset);
  1867             if (!checkBranchTarget(cx, targetOffset, branchTargets, values, stackDepth))
  1868                 return false;
  1870             /*
  1871              * If this is a back edge, we're done with the loop and can freeze
  1872              * the phi values at the head now.
  1873              */
  1874             if (targetOffset < offset) {
  1875                 if (!freezeNewValues(cx, targetOffset))
  1876                     return false;
  1880         offset = successorOffset;
  1883     return true;
  1886 /* Get a phi node's capacity for a given length. */
  1887 static inline unsigned
  1888 PhiNodeCapacity(unsigned length)
  1890     if (length <= 4)
  1891         return 4;
  1893     return 1 << (FloorLog2(length - 1) + 1);
  1896 bool
  1897 ScriptAnalysis::makePhi(JSContext *cx, uint32_t slot, uint32_t offset, SSAValue *pv)
  1899     SSAPhiNode *node = cx->typeLifoAlloc().new_<SSAPhiNode>();
  1900     SSAValue *options = cx->typeLifoAlloc().newArray<SSAValue>(PhiNodeCapacity(0));
  1901     if (!node || !options) {
  1902         js_ReportOutOfMemory(cx);
  1903         return false;
  1905     node->slot = slot;
  1906     node->options = options;
  1907     pv->initPhi(offset, node);
  1908     return true;
  1911 bool
  1912 ScriptAnalysis::insertPhi(JSContext *cx, SSAValue &phi, const SSAValue &v)
  1914     JS_ASSERT(phi.kind() == SSAValue::PHI);
  1915     SSAPhiNode *node = phi.phiNode();
  1917     /*
  1918      * Filter dupes inserted into small nodes to keep things clean and avoid
  1919      * extra type constraints, but don't bother on large phi nodes to avoid
  1920      * quadratic behavior.
  1921      */
  1922     if (node->length <= 8) {
  1923         for (unsigned i = 0; i < node->length; i++) {
  1924             if (v == node->options[i])
  1925                 return true;
  1929     if (trackUseChain(v)) {
  1930         SSAUseChain *&uses = useChain(v);
  1932         SSAUseChain *use = cx->typeLifoAlloc().new_<SSAUseChain>();
  1933         if (!use) {
  1934             js_ReportOutOfMemory(cx);
  1935             return false;
  1938         use->popped = false;
  1939         use->offset = phi.phiOffset();
  1940         use->u.phi = node;
  1941         use->next = uses;
  1942         uses = use;
  1945     if (node->length < PhiNodeCapacity(node->length)) {
  1946         node->options[node->length++] = v;
  1947         return true;
  1950     SSAValue *newOptions =
  1951         cx->typeLifoAlloc().newArray<SSAValue>(PhiNodeCapacity(node->length + 1));
  1952     if (!newOptions) {
  1953         js_ReportOutOfMemory(cx);
  1954         return false;
  1957     PodCopy(newOptions, node->options, node->length);
  1958     node->options = newOptions;
  1959     node->options[node->length++] = v;
  1961     return true;
  1964 inline bool
  1965 ScriptAnalysis::mergeValue(JSContext *cx, uint32_t offset, const SSAValue &v, SlotValue *pv)
  1967     /* Make sure that v is accounted for in the pending value or phi value at pv. */
  1968     JS_ASSERT(v.kind() != SSAValue::EMPTY && pv->value.kind() != SSAValue::EMPTY);
  1970     if (v == pv->value)
  1971         return true;
  1973     if (pv->value.kind() != SSAValue::PHI || pv->value.phiOffset() < offset) {
  1974         SSAValue ov = pv->value;
  1975         return makePhi(cx, pv->slot, offset, &pv->value)
  1976             && insertPhi(cx, pv->value, v)
  1977             && insertPhi(cx, pv->value, ov);
  1980     JS_ASSERT(pv->value.phiOffset() == offset);
  1981     return insertPhi(cx, pv->value, v);
  1984 bool
  1985 ScriptAnalysis::checkPendingValue(JSContext *cx, const SSAValue &v, uint32_t slot,
  1986                                   Vector<SlotValue> *pending)
  1988     JS_ASSERT(v.kind() != SSAValue::EMPTY);
  1990     for (unsigned i = 0; i < pending->length(); i++) {
  1991         if ((*pending)[i].slot == slot)
  1992             return true;
  1995     if (!pending->append(SlotValue(slot, v))) {
  1996         js_ReportOutOfMemory(cx);
  1997         return false;
  2000     return true;
  2003 bool
  2004 ScriptAnalysis::checkBranchTarget(JSContext *cx, uint32_t targetOffset,
  2005                                   Vector<uint32_t> &branchTargets,
  2006                                   SSAValueInfo *values, uint32_t stackDepth)
  2008     unsigned targetDepth = getCode(targetOffset).stackDepth;
  2009     JS_ASSERT(targetDepth <= stackDepth);
  2011     /*
  2012      * If there is already an active branch to target, make sure its pending
  2013      * values reflect any changes made since the first branch. Otherwise, add a
  2014      * new pending branch and determine its pending values lazily.
  2015      */
  2016     Vector<SlotValue> *&pending = getCode(targetOffset).pendingValues;
  2017     if (pending) {
  2018         for (unsigned i = 0; i < pending->length(); i++) {
  2019             SlotValue &v = (*pending)[i];
  2020             if (!mergeValue(cx, targetOffset, values[v.slot].v, &v))
  2021                 return false;
  2023     } else {
  2024         pending = cx->new_< Vector<SlotValue> >(cx);
  2025         if (!pending || !branchTargets.append(targetOffset)) {
  2026             js_ReportOutOfMemory(cx);
  2027             return false;
  2031     /*
  2032      * Make sure there is a pending entry for each value on the stack.
  2033      * The number of stack entries at join points is usually zero, and
  2034      * we don't want to look at the active branches while popping and
  2035      * pushing values in each opcode.
  2036      */
  2037     for (unsigned i = 0; i < targetDepth; i++) {
  2038         uint32_t slot = StackSlot(script_, i);
  2039         if (!checkPendingValue(cx, values[slot].v, slot, pending))
  2040             return false;
  2043     return true;
  2046 bool
  2047 ScriptAnalysis::checkExceptionTarget(JSContext *cx, uint32_t catchOffset,
  2048                                      Vector<uint32_t> &exceptionTargets)
  2050     JS_ASSERT(getCode(catchOffset).exceptionEntry);
  2052     /*
  2053      * The catch offset will already be in the branch targets, just check
  2054      * whether this is already a known exception target.
  2055      */
  2056     for (unsigned i = 0; i < exceptionTargets.length(); i++) {
  2057         if (exceptionTargets[i] == catchOffset)
  2058             return true;
  2060     if (!exceptionTargets.append(catchOffset)) {
  2061         js_ReportOutOfMemory(cx);
  2062         return false;
  2065     return true;
  2068 bool
  2069 ScriptAnalysis::mergeBranchTarget(JSContext *cx, SSAValueInfo &value, uint32_t slot,
  2070                                   const Vector<uint32_t> &branchTargets, uint32_t currentOffset)
  2072     if (slot >= numSlots) {
  2073         /*
  2074          * There is no need to lazily check that there are pending values at
  2075          * branch targets for slots on the stack, these are added to pending
  2076          * eagerly.
  2077          */
  2078         return true;
  2081     JS_ASSERT(trackSlot(slot));
  2083     /*
  2084      * Before changing the value of a variable, make sure the old value is
  2085      * marked at the target of any branches jumping over the current opcode.
  2086      * Only look at new branch targets which have appeared since the last time
  2087      * the variable was written.
  2088      */
  2089     for (int i = branchTargets.length() - 1; i >= value.branchSize; i--) {
  2090         if (branchTargets[i] <= currentOffset)
  2091             continue;
  2093         const Bytecode &code = getCode(branchTargets[i]);
  2095         Vector<SlotValue> *pending = code.pendingValues;
  2096         if (!checkPendingValue(cx, value.v, slot, pending))
  2097             return false;
  2100     value.branchSize = branchTargets.length();
  2101     return true;
  2104 bool
  2105 ScriptAnalysis::mergeExceptionTarget(JSContext *cx, const SSAValue &value, uint32_t slot,
  2106                                      const Vector<uint32_t> &exceptionTargets)
  2108     JS_ASSERT(trackSlot(slot));
  2110     /*
  2111      * Update the value at exception targets with the value of a variable
  2112      * before it is overwritten. Unlike mergeBranchTarget, this is done whether
  2113      * or not the overwritten value is the value of the variable at the
  2114      * original branch. Values for a variable which are written after the
  2115      * try block starts and overwritten before it is finished can still be
  2116      * seen at exception handlers via exception paths.
  2117      */
  2118     for (unsigned i = 0; i < exceptionTargets.length(); i++) {
  2119         unsigned offset = exceptionTargets[i];
  2120         Vector<SlotValue> *pending = getCode(offset).pendingValues;
  2122         bool duplicate = false;
  2123         for (unsigned i = 0; i < pending->length(); i++) {
  2124             if ((*pending)[i].slot == slot) {
  2125                 duplicate = true;
  2126                 SlotValue &v = (*pending)[i];
  2127                 if (!mergeValue(cx, offset, value, &v))
  2128                     return false;
  2129                 break;
  2133         if (!duplicate && !pending->append(SlotValue(slot, value))) {
  2134             js_ReportOutOfMemory(cx);
  2135             return false;
  2139     return true;
  2142 bool
  2143 ScriptAnalysis::mergeAllExceptionTargets(JSContext *cx, SSAValueInfo *values,
  2144                                          const Vector<uint32_t> &exceptionTargets)
  2146     for (unsigned i = 0; i < exceptionTargets.length(); i++) {
  2147         Vector<SlotValue> *pending = getCode(exceptionTargets[i]).pendingValues;
  2148         for (unsigned i = 0; i < pending->length(); i++) {
  2149             const SlotValue &v = (*pending)[i];
  2150             if (trackSlot(v.slot)) {
  2151                 if (!mergeExceptionTarget(cx, values[v.slot].v, v.slot, exceptionTargets))
  2152                     return false;
  2156     return true;
  2159 bool
  2160 ScriptAnalysis::freezeNewValues(JSContext *cx, uint32_t offset)
  2162     Bytecode &code = getCode(offset);
  2164     Vector<SlotValue> *pending = code.pendingValues;
  2165     code.pendingValues = nullptr;
  2167     unsigned count = pending->length();
  2168     if (count == 0) {
  2169         js_delete(pending);
  2170         return true;
  2173     code.newValues = cx->typeLifoAlloc().newArray<SlotValue>(count + 1);
  2174     if (!code.newValues) {
  2175         js_ReportOutOfMemory(cx);
  2176         return false;
  2179     for (unsigned i = 0; i < count; i++)
  2180         code.newValues[i] = (*pending)[i];
  2181     code.newValues[count].slot = 0;
  2182     code.newValues[count].value.clear();
  2184     js_delete(pending);
  2185     return true;
  2188 bool
  2189 ScriptAnalysis::needsArgsObj(JSContext *cx, SeenVector &seen, const SSAValue &v)
  2191     /*
  2192      * trackUseChain is false for initial values of variables, which
  2193      * cannot hold the script's arguments object.
  2194      */
  2195     if (!trackUseChain(v))
  2196         return false;
  2198     for (unsigned i = 0; i < seen.length(); i++) {
  2199         if (v == seen[i])
  2200             return false;
  2202     if (!seen.append(v))
  2203         return true;
  2205     SSAUseChain *use = useChain(v);
  2206     while (use) {
  2207         if (needsArgsObj(cx, seen, use))
  2208             return true;
  2209         use = use->next;
  2212     return false;
  2215 bool
  2216 ScriptAnalysis::needsArgsObj(JSContext *cx, SeenVector &seen, SSAUseChain *use)
  2218     if (!use->popped)
  2219         return needsArgsObj(cx, seen, SSAValue::PhiValue(use->offset, use->u.phi));
  2221     jsbytecode *pc = script_->offsetToPC(use->offset);
  2222     JSOp op = JSOp(*pc);
  2224     if (op == JSOP_POP || op == JSOP_POPN)
  2225         return false;
  2227     /* We can read the frame's arguments directly for f.apply(x, arguments). */
  2228     if (op == JSOP_FUNAPPLY && GET_ARGC(pc) == 2 && use->u.which == 0) {
  2229         argumentsContentsObserved_ = true;
  2230         return false;
  2233     /* arguments[i] can read fp->canonicalActualArg(i) directly. */
  2234     if (op == JSOP_GETELEM && use->u.which == 1) {
  2235         argumentsContentsObserved_ = true;
  2236         return false;
  2239     /* arguments.length length can read fp->numActualArgs() directly. */
  2240     if (op == JSOP_LENGTH)
  2241         return false;
  2243     /* Allow assignments to non-closed locals (but not arguments). */
  2245     if (op == JSOP_SETLOCAL) {
  2246         uint32_t slot = GetBytecodeSlot(script_, pc);
  2247         if (!trackSlot(slot))
  2248             return true;
  2249         return needsArgsObj(cx, seen, SSAValue::PushedValue(use->offset, 0)) ||
  2250                needsArgsObj(cx, seen, SSAValue::WrittenVar(slot, use->offset));
  2253     if (op == JSOP_GETLOCAL)
  2254         return needsArgsObj(cx, seen, SSAValue::PushedValue(use->offset, 0));
  2256     return true;
  2259 bool
  2260 ScriptAnalysis::needsArgsObj(JSContext *cx)
  2262     JS_ASSERT(script_->argumentsHasVarBinding());
  2264     /*
  2265      * Always construct arguments objects when in debug mode and for generator
  2266      * scripts (generators can be suspended when speculation fails).
  2268      * FIXME: Don't build arguments for ES6 generator expressions.
  2269      */
  2270     if (cx->compartment()->debugMode() || script_->isGenerator())
  2271         return true;
  2273     /*
  2274      * If the script has dynamic name accesses which could reach 'arguments',
  2275      * the parser will already have checked to ensure there are no explicit
  2276      * uses of 'arguments' in the function. If there are such uses, the script
  2277      * will be marked as definitely needing an arguments object.
  2279      * New accesses on 'arguments' can occur through 'eval' or the debugger
  2280      * statement. In the former case, we will dynamically detect the use and
  2281      * mark the arguments optimization as having failed.
  2282      */
  2283     if (script_->bindingsAccessedDynamically())
  2284         return false;
  2286     unsigned pcOff = script_->pcToOffset(script_->argumentsBytecode());
  2288     SeenVector seen(cx);
  2289     if (needsArgsObj(cx, seen, SSAValue::PushedValue(pcOff, 0)))
  2290         return true;
  2292     /*
  2293      * If a script explicitly accesses the contents of 'arguments', and has
  2294      * formals which may be stored as part of a call object, don't use lazy
  2295      * arguments. The compiler can then assume that accesses through
  2296      * arguments[i] will be on unaliased variables.
  2297      */
  2298     if (script_->funHasAnyAliasedFormal() && argumentsContentsObserved_)
  2299         return true;
  2301     return false;
  2304 #ifdef DEBUG
  2306 void
  2307 ScriptAnalysis::printSSA(JSContext *cx)
  2309     types::AutoEnterAnalysis enter(cx);
  2311     printf("\n");
  2313     RootedScript script(cx, script_);
  2314     for (unsigned offset = 0; offset < script_->length(); offset++) {
  2315         Bytecode *code = maybeCode(offset);
  2316         if (!code)
  2317             continue;
  2319         jsbytecode *pc = script_->offsetToPC(offset);
  2321         PrintBytecode(cx, script, pc);
  2323         SlotValue *newv = code->newValues;
  2324         if (newv) {
  2325             while (newv->slot) {
  2326                 if (newv->value.kind() != SSAValue::PHI || newv->value.phiOffset() != offset) {
  2327                     newv++;
  2328                     continue;
  2330                 printf("  phi ");
  2331                 newv->value.print();
  2332                 printf(" [");
  2333                 for (unsigned i = 0; i < newv->value.phiLength(); i++) {
  2334                     if (i)
  2335                         printf(",");
  2336                     newv->value.phiValue(i).print();
  2338                 printf("]\n");
  2339                 newv++;
  2343         unsigned nuses = GetUseCount(script_, offset);
  2344         unsigned xuses = ExtendedUse(pc) ? nuses + 1 : nuses;
  2346         for (unsigned i = 0; i < xuses; i++) {
  2347             printf("  popped%d: ", i);
  2348             code->poppedValues[i].print();
  2349             printf("\n");
  2353     printf("\n");
  2356 void
  2357 SSAValue::print() const
  2359     switch (kind()) {
  2361       case EMPTY:
  2362         printf("empty");
  2363         break;
  2365       case PUSHED:
  2366         printf("pushed:%05u#%u", pushedOffset(), pushedIndex());
  2367         break;
  2369       case VAR:
  2370         if (varInitial())
  2371             printf("initial:%u", varSlot());
  2372         else
  2373             printf("write:%05u", varOffset());
  2374         break;
  2376       case PHI:
  2377         printf("phi:%05u#%u", phiOffset(), phiSlot());
  2378         break;
  2380       default:
  2381         MOZ_ASSUME_UNREACHABLE("Bad kind");
  2385 void
  2386 ScriptAnalysis::assertMatchingDebugMode()
  2388     JS_ASSERT(!!script_->compartment()->debugMode() == !!originalDebugMode_);
  2391 void
  2392 ScriptAnalysis::assertMatchingStackDepthAtOffset(uint32_t offset, uint32_t stackDepth) {
  2393     JS_ASSERT_IF(maybeCode(offset), getCode(offset).stackDepth == stackDepth);
  2396 #endif  /* DEBUG */
  2398 } // namespace analyze
  2399 } // namespace js

mercurial