michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sts=4 et sw=4 tw=99: michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: /* michael@0: * JS bytecode generation. michael@0: */ michael@0: michael@0: #include "frontend/BytecodeEmitter.h" michael@0: michael@0: #include "mozilla/DebugOnly.h" michael@0: #include "mozilla/FloatingPoint.h" michael@0: #include "mozilla/PodOperations.h" michael@0: michael@0: #include michael@0: michael@0: #include "jsapi.h" michael@0: #include "jsatom.h" michael@0: #include "jscntxt.h" michael@0: #include "jsfun.h" michael@0: #include "jsnum.h" michael@0: #include "jsopcode.h" michael@0: #include "jsscript.h" michael@0: #include "jstypes.h" michael@0: #include "jsutil.h" michael@0: michael@0: #include "frontend/Parser.h" michael@0: #include "frontend/TokenStream.h" michael@0: #include "jit/AsmJSLink.h" michael@0: #include "vm/Debugger.h" michael@0: michael@0: #include "jsatominlines.h" michael@0: #include "jsobjinlines.h" michael@0: #include "jsscriptinlines.h" michael@0: michael@0: #include "frontend/ParseMaps-inl.h" michael@0: #include "frontend/ParseNode-inl.h" michael@0: #include "vm/ScopeObject-inl.h" michael@0: michael@0: using namespace js; michael@0: using namespace js::gc; michael@0: using namespace js::frontend; michael@0: michael@0: using mozilla::DebugOnly; michael@0: using mozilla::NumberIsInt32; michael@0: using mozilla::PodCopy; michael@0: michael@0: static bool michael@0: SetSrcNoteOffset(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned index, unsigned which, ptrdiff_t offset); michael@0: michael@0: struct frontend::StmtInfoBCE : public StmtInfoBase michael@0: { michael@0: StmtInfoBCE *down; /* info for enclosing statement */ michael@0: StmtInfoBCE *downScope; /* next enclosing lexical scope */ michael@0: michael@0: ptrdiff_t update; /* loop update offset (top if none) */ michael@0: ptrdiff_t breaks; /* offset of last break in loop */ michael@0: ptrdiff_t continues; /* offset of last continue in loop */ michael@0: uint32_t blockScopeIndex; /* index of scope in BlockScopeArray */ michael@0: michael@0: StmtInfoBCE(ExclusiveContext *cx) : StmtInfoBase(cx) {} michael@0: michael@0: /* michael@0: * To reuse space, alias two of the ptrdiff_t fields for use during michael@0: * try/catch/finally code generation and backpatching. michael@0: * michael@0: * Only a loop, switch, or label statement info record can have breaks and michael@0: * continues, and only a for loop has an update backpatch chain, so it's michael@0: * safe to overlay these for the "trying" StmtTypes. michael@0: */ michael@0: michael@0: ptrdiff_t &gosubs() { michael@0: JS_ASSERT(type == STMT_FINALLY); michael@0: return breaks; michael@0: } michael@0: michael@0: ptrdiff_t &guardJump() { michael@0: JS_ASSERT(type == STMT_TRY || type == STMT_FINALLY); michael@0: return continues; michael@0: } michael@0: }; michael@0: michael@0: michael@0: namespace { michael@0: michael@0: struct LoopStmtInfo : public StmtInfoBCE michael@0: { michael@0: int32_t stackDepth; // Stack depth when this loop was pushed. michael@0: uint32_t loopDepth; // Loop depth. michael@0: michael@0: // Can we OSR into Ion from here? True unless there is non-loop state on the stack. michael@0: bool canIonOsr; michael@0: michael@0: LoopStmtInfo(ExclusiveContext *cx) : StmtInfoBCE(cx) {} michael@0: michael@0: static LoopStmtInfo* fromStmtInfo(StmtInfoBCE *stmt) { michael@0: JS_ASSERT(stmt->isLoop()); michael@0: return static_cast(stmt); michael@0: } michael@0: }; michael@0: michael@0: } // anonymous namespace michael@0: michael@0: BytecodeEmitter::BytecodeEmitter(BytecodeEmitter *parent, michael@0: Parser *parser, SharedContext *sc, michael@0: HandleScript script, bool insideEval, HandleScript evalCaller, michael@0: bool hasGlobalScope, uint32_t lineNum, EmitterMode emitterMode) michael@0: : sc(sc), michael@0: parent(parent), michael@0: script(sc->context, script), michael@0: prolog(sc->context, lineNum), michael@0: main(sc->context, lineNum), michael@0: current(&main), michael@0: parser(parser), michael@0: evalCaller(evalCaller), michael@0: topStmt(nullptr), michael@0: topScopeStmt(nullptr), michael@0: staticScope(sc->context), michael@0: atomIndices(sc->context), michael@0: firstLine(lineNum), michael@0: stackDepth(0), maxStackDepth(0), michael@0: arrayCompDepth(0), michael@0: emitLevel(0), michael@0: constList(sc->context), michael@0: tryNoteList(sc->context), michael@0: blockScopeList(sc->context), michael@0: typesetCount(0), michael@0: hasSingletons(false), michael@0: emittingForInit(false), michael@0: emittingRunOnceLambda(false), michael@0: lazyRunOnceLambda(false), michael@0: insideEval(insideEval), michael@0: hasGlobalScope(hasGlobalScope), michael@0: emitterMode(emitterMode) michael@0: { michael@0: JS_ASSERT_IF(evalCaller, insideEval); michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::init() michael@0: { michael@0: return atomIndices.ensureMap(sc->context); michael@0: } michael@0: michael@0: static ptrdiff_t michael@0: EmitCheck(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t delta) michael@0: { michael@0: ptrdiff_t offset = bce->code().length(); michael@0: michael@0: // Start it off moderately large to avoid repeated resizings early on. michael@0: if (bce->code().capacity() == 0 && !bce->code().reserve(1024)) michael@0: return -1; michael@0: michael@0: jsbytecode dummy = 0; michael@0: if (!bce->code().appendN(dummy, delta)) { michael@0: js_ReportOutOfMemory(cx); michael@0: return -1; michael@0: } michael@0: return offset; michael@0: } michael@0: michael@0: static void michael@0: UpdateDepth(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t target) michael@0: { michael@0: jsbytecode *pc = bce->code(target); michael@0: JSOp op = (JSOp) *pc; michael@0: const JSCodeSpec *cs = &js_CodeSpec[op]; michael@0: michael@0: if (cs->format & JOF_TMPSLOT_MASK) { michael@0: /* michael@0: * An opcode may temporarily consume stack space during execution. michael@0: * Account for this in maxStackDepth separately from uses/defs here. michael@0: */ michael@0: uint32_t depth = (uint32_t) bce->stackDepth + michael@0: ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT); michael@0: if (depth > bce->maxStackDepth) michael@0: bce->maxStackDepth = depth; michael@0: } michael@0: michael@0: int nuses = StackUses(nullptr, pc); michael@0: int ndefs = StackDefs(nullptr, pc); michael@0: michael@0: bce->stackDepth -= nuses; michael@0: JS_ASSERT(bce->stackDepth >= 0); michael@0: bce->stackDepth += ndefs; michael@0: if ((uint32_t)bce->stackDepth > bce->maxStackDepth) michael@0: bce->maxStackDepth = bce->stackDepth; michael@0: } michael@0: michael@0: ptrdiff_t michael@0: frontend::Emit1(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) michael@0: { michael@0: ptrdiff_t offset = EmitCheck(cx, bce, 1); michael@0: if (offset < 0) michael@0: return -1; michael@0: michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = jsbytecode(op); michael@0: UpdateDepth(cx, bce, offset); michael@0: return offset; michael@0: } michael@0: michael@0: ptrdiff_t michael@0: frontend::Emit2(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1) michael@0: { michael@0: ptrdiff_t offset = EmitCheck(cx, bce, 2); michael@0: if (offset < 0) michael@0: return -1; michael@0: michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = jsbytecode(op); michael@0: code[1] = op1; michael@0: UpdateDepth(cx, bce, offset); michael@0: return offset; michael@0: } michael@0: michael@0: ptrdiff_t michael@0: frontend::Emit3(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1, michael@0: jsbytecode op2) michael@0: { michael@0: /* These should filter through EmitVarOp. */ michael@0: JS_ASSERT(!IsArgOp(op)); michael@0: JS_ASSERT(!IsLocalOp(op)); michael@0: michael@0: ptrdiff_t offset = EmitCheck(cx, bce, 3); michael@0: if (offset < 0) michael@0: return -1; michael@0: michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = jsbytecode(op); michael@0: code[1] = op1; michael@0: code[2] = op2; michael@0: UpdateDepth(cx, bce, offset); michael@0: return offset; michael@0: } michael@0: michael@0: ptrdiff_t michael@0: frontend::EmitN(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, size_t extra) michael@0: { michael@0: ptrdiff_t length = 1 + (ptrdiff_t)extra; michael@0: ptrdiff_t offset = EmitCheck(cx, bce, length); michael@0: if (offset < 0) michael@0: return -1; michael@0: michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = jsbytecode(op); michael@0: /* The remaining |extra| bytes are set by the caller */ michael@0: michael@0: /* michael@0: * Don't UpdateDepth if op's use-count comes from the immediate michael@0: * operand yet to be stored in the extra bytes after op. michael@0: */ michael@0: if (js_CodeSpec[op].nuses >= 0) michael@0: UpdateDepth(cx, bce, offset); michael@0: michael@0: return offset; michael@0: } michael@0: michael@0: static ptrdiff_t michael@0: EmitJump(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, ptrdiff_t off) michael@0: { michael@0: ptrdiff_t offset = EmitCheck(cx, bce, 5); michael@0: if (offset < 0) michael@0: return -1; michael@0: michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = jsbytecode(op); michael@0: SET_JUMP_OFFSET(code, off); michael@0: UpdateDepth(cx, bce, offset); michael@0: return offset; michael@0: } michael@0: michael@0: static ptrdiff_t michael@0: EmitCall(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, uint16_t argc) michael@0: { michael@0: return Emit3(cx, bce, op, ARGC_HI(argc), ARGC_LO(argc)); michael@0: } michael@0: michael@0: // Dup the var in operand stack slot "slot". The first item on the operand michael@0: // stack is one slot past the last fixed slot. The last (most recent) item is michael@0: // slot bce->stackDepth - 1. michael@0: // michael@0: // The instruction that is written (JSOP_DUPAT) switches the depth around so michael@0: // that it is addressed from the sp instead of from the fp. This is useful when michael@0: // you don't know the size of the fixed stack segment (nfixed), as is the case michael@0: // when compiling scripts (because each statement is parsed and compiled michael@0: // separately, but they all together form one script with one fixed stack michael@0: // frame). michael@0: static bool michael@0: EmitDupAt(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned slot) michael@0: { michael@0: JS_ASSERT(slot < unsigned(bce->stackDepth)); michael@0: // The slot's position on the operand stack, measured from the top. michael@0: unsigned slotFromTop = bce->stackDepth - 1 - slot; michael@0: if (slotFromTop >= JS_BIT(24)) { michael@0: bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS); michael@0: return false; michael@0: } michael@0: ptrdiff_t off = EmitN(cx, bce, JSOP_DUPAT, 3); michael@0: if (off < 0) michael@0: return false; michael@0: jsbytecode *pc = bce->code(off); michael@0: SET_UINT24(pc, slotFromTop); michael@0: return true; michael@0: } michael@0: michael@0: /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */ michael@0: const char js_with_statement_str[] = "with statement"; michael@0: const char js_finally_block_str[] = "finally block"; michael@0: const char js_script_str[] = "script"; michael@0: michael@0: static const char * const statementName[] = { michael@0: "label statement", /* LABEL */ michael@0: "if statement", /* IF */ michael@0: "else statement", /* ELSE */ michael@0: "destructuring body", /* BODY */ michael@0: "switch statement", /* SWITCH */ michael@0: "block", /* BLOCK */ michael@0: js_with_statement_str, /* WITH */ michael@0: "catch block", /* CATCH */ michael@0: "try block", /* TRY */ michael@0: js_finally_block_str, /* FINALLY */ michael@0: js_finally_block_str, /* SUBROUTINE */ michael@0: "do loop", /* DO_LOOP */ michael@0: "for loop", /* FOR_LOOP */ michael@0: "for/in loop", /* FOR_IN_LOOP */ michael@0: "for/of loop", /* FOR_OF_LOOP */ michael@0: "while loop", /* WHILE_LOOP */ michael@0: }; michael@0: michael@0: JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT); michael@0: michael@0: static const char * michael@0: StatementName(StmtInfoBCE *topStmt) michael@0: { michael@0: if (!topStmt) michael@0: return js_script_str; michael@0: return statementName[topStmt->type]; michael@0: } michael@0: michael@0: static void michael@0: ReportStatementTooLarge(TokenStream &ts, StmtInfoBCE *topStmt) michael@0: { michael@0: ts.reportError(JSMSG_NEED_DIET, StatementName(topStmt)); michael@0: } michael@0: michael@0: /* michael@0: * Emit a backpatch op with offset pointing to the previous jump of this type, michael@0: * so that we can walk back up the chain fixing up the op and jump offset. michael@0: */ michael@0: static ptrdiff_t michael@0: EmitBackPatchOp(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t *lastp) michael@0: { michael@0: ptrdiff_t offset, delta; michael@0: michael@0: offset = bce->offset(); michael@0: delta = offset - *lastp; michael@0: *lastp = offset; michael@0: JS_ASSERT(delta > 0); michael@0: return EmitJump(cx, bce, JSOP_BACKPATCH, delta); michael@0: } michael@0: michael@0: static inline unsigned michael@0: LengthOfSetLine(unsigned line) michael@0: { michael@0: return 1 /* SN_SETLINE */ + (line > SN_4BYTE_OFFSET_MASK ? 4 : 1); michael@0: } michael@0: michael@0: /* Updates line number notes, not column notes. */ michael@0: static inline bool michael@0: UpdateLineNumberNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t offset) michael@0: { michael@0: TokenStream *ts = &bce->parser->tokenStream; michael@0: if (!ts->srcCoords.isOnThisLine(offset, bce->currentLine())) { michael@0: unsigned line = ts->srcCoords.lineNum(offset); michael@0: unsigned delta = line - bce->currentLine(); michael@0: michael@0: /* michael@0: * Encode any change in the current source line number by using michael@0: * either several SRC_NEWLINE notes or just one SRC_SETLINE note, michael@0: * whichever consumes less space. michael@0: * michael@0: * NB: We handle backward line number deltas (possible with for michael@0: * loops where the update part is emitted after the body, but its michael@0: * line number is <= any line number in the body) here by letting michael@0: * unsigned delta_ wrap to a very large number, which triggers a michael@0: * SRC_SETLINE. michael@0: */ michael@0: bce->current->currentLine = line; michael@0: bce->current->lastColumn = 0; michael@0: if (delta >= LengthOfSetLine(line)) { michael@0: if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)line) < 0) michael@0: return false; michael@0: } else { michael@0: do { michael@0: if (NewSrcNote(cx, bce, SRC_NEWLINE) < 0) michael@0: return false; michael@0: } while (--delta != 0); michael@0: } michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: /* A function, so that we avoid macro-bloating all the other callsites. */ michael@0: static bool michael@0: UpdateSourceCoordNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t offset) michael@0: { michael@0: if (!UpdateLineNumberNotes(cx, bce, offset)) michael@0: return false; michael@0: michael@0: uint32_t columnIndex = bce->parser->tokenStream.srcCoords.columnIndex(offset); michael@0: ptrdiff_t colspan = ptrdiff_t(columnIndex) - ptrdiff_t(bce->current->lastColumn); michael@0: if (colspan != 0) { michael@0: if (colspan < 0) { michael@0: colspan += SN_COLSPAN_DOMAIN; michael@0: } else if (colspan >= SN_COLSPAN_DOMAIN / 2) { michael@0: // If the column span is so large that we can't store it, then just michael@0: // discard this information because column information would most michael@0: // likely be useless anyway once the column numbers are ~4000000. michael@0: // This has been known to happen with scripts that have been michael@0: // minimized and put into all one line. michael@0: return true; michael@0: } michael@0: if (NewSrcNote2(cx, bce, SRC_COLSPAN, colspan) < 0) michael@0: return false; michael@0: bce->current->lastColumn = columnIndex; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static ptrdiff_t michael@0: EmitLoopHead(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *nextpn) michael@0: { michael@0: if (nextpn) { michael@0: /* michael@0: * Try to give the JSOP_LOOPHEAD the same line number as the next michael@0: * instruction. nextpn is often a block, in which case the next michael@0: * instruction typically comes from the first statement inside. michael@0: */ michael@0: JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST)); michael@0: if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head) michael@0: nextpn = nextpn->pn_head; michael@0: if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin)) michael@0: return -1; michael@0: } michael@0: michael@0: return Emit1(cx, bce, JSOP_LOOPHEAD); michael@0: } michael@0: michael@0: static bool michael@0: EmitLoopEntry(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *nextpn) michael@0: { michael@0: if (nextpn) { michael@0: /* Update the line number, as for LOOPHEAD. */ michael@0: JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST)); michael@0: if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head) michael@0: nextpn = nextpn->pn_head; michael@0: if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin)) michael@0: return false; michael@0: } michael@0: michael@0: LoopStmtInfo *loop = LoopStmtInfo::fromStmtInfo(bce->topStmt); michael@0: JS_ASSERT(loop->loopDepth > 0); michael@0: michael@0: uint8_t loopDepthAndFlags = PackLoopEntryDepthHintAndFlags(loop->loopDepth, loop->canIonOsr); michael@0: return Emit2(cx, bce, JSOP_LOOPENTRY, loopDepthAndFlags) >= 0; michael@0: } michael@0: michael@0: /* michael@0: * If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve michael@0: * a type set to store its result. michael@0: */ michael@0: static inline void michael@0: CheckTypeSet(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) michael@0: { michael@0: if (js_CodeSpec[op].format & JOF_TYPESET) { michael@0: if (bce->typesetCount < UINT16_MAX) michael@0: bce->typesetCount++; michael@0: } michael@0: } michael@0: michael@0: /* michael@0: * Macro to emit a bytecode followed by a uint16_t immediate operand stored in michael@0: * big-endian order. michael@0: * michael@0: * NB: We use cx and bce from our caller's lexical environment, and return michael@0: * false on error. michael@0: */ michael@0: #define EMIT_UINT16_IMM_OP(op, i) \ michael@0: JS_BEGIN_MACRO \ michael@0: if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0) \ michael@0: return false; \ michael@0: CheckTypeSet(cx, bce, op); \ michael@0: JS_END_MACRO michael@0: michael@0: static bool michael@0: FlushPops(ExclusiveContext *cx, BytecodeEmitter *bce, int *npops) michael@0: { michael@0: JS_ASSERT(*npops != 0); michael@0: EMIT_UINT16_IMM_OP(JSOP_POPN, *npops); michael@0: *npops = 0; michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: PopIterator(ExclusiveContext *cx, BytecodeEmitter *bce) michael@0: { michael@0: if (Emit1(cx, bce, JSOP_ENDITER) < 0) michael@0: return false; michael@0: return true; michael@0: } michael@0: michael@0: namespace { michael@0: michael@0: class NonLocalExitScope { michael@0: ExclusiveContext *cx; michael@0: BytecodeEmitter *bce; michael@0: const uint32_t savedScopeIndex; michael@0: const int savedDepth; michael@0: uint32_t openScopeIndex; michael@0: michael@0: NonLocalExitScope(const NonLocalExitScope &) MOZ_DELETE; michael@0: michael@0: public: michael@0: explicit NonLocalExitScope(ExclusiveContext *cx_, BytecodeEmitter *bce_) michael@0: : cx(cx_), michael@0: bce(bce_), michael@0: savedScopeIndex(bce->blockScopeList.length()), michael@0: savedDepth(bce->stackDepth), michael@0: openScopeIndex(UINT32_MAX) { michael@0: if (bce->staticScope) { michael@0: StmtInfoBCE *stmt = bce->topStmt; michael@0: while (1) { michael@0: JS_ASSERT(stmt); michael@0: if (stmt->isNestedScope) { michael@0: openScopeIndex = stmt->blockScopeIndex; michael@0: break; michael@0: } michael@0: stmt = stmt->down; michael@0: } michael@0: } michael@0: } michael@0: michael@0: ~NonLocalExitScope() { michael@0: for (uint32_t n = savedScopeIndex; n < bce->blockScopeList.length(); n++) michael@0: bce->blockScopeList.recordEnd(n, bce->offset()); michael@0: bce->stackDepth = savedDepth; michael@0: } michael@0: michael@0: bool popScopeForNonLocalExit(uint32_t blockScopeIndex) { michael@0: uint32_t scopeObjectIndex = bce->blockScopeList.findEnclosingScope(blockScopeIndex); michael@0: uint32_t parent = openScopeIndex; michael@0: michael@0: if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent)) michael@0: return false; michael@0: openScopeIndex = bce->blockScopeList.length() - 1; michael@0: return true; michael@0: } michael@0: michael@0: bool prepareForNonLocalJump(StmtInfoBCE *toStmt); michael@0: }; michael@0: michael@0: /* michael@0: * Emit additional bytecode(s) for non-local jumps. michael@0: */ michael@0: bool michael@0: NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE *toStmt) michael@0: { michael@0: int npops = 0; michael@0: michael@0: #define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return false michael@0: michael@0: for (StmtInfoBCE *stmt = bce->topStmt; stmt != toStmt; stmt = stmt->down) { michael@0: switch (stmt->type) { michael@0: case STMT_FINALLY: michael@0: FLUSH_POPS(); michael@0: if (EmitBackPatchOp(cx, bce, &stmt->gosubs()) < 0) michael@0: return false; michael@0: break; michael@0: michael@0: case STMT_WITH: michael@0: if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0) michael@0: return false; michael@0: JS_ASSERT(stmt->isNestedScope); michael@0: if (!popScopeForNonLocalExit(stmt->blockScopeIndex)) michael@0: return false; michael@0: break; michael@0: michael@0: case STMT_FOR_OF_LOOP: michael@0: npops += 2; michael@0: break; michael@0: michael@0: case STMT_FOR_IN_LOOP: michael@0: FLUSH_POPS(); michael@0: if (!PopIterator(cx, bce)) michael@0: return false; michael@0: break; michael@0: michael@0: case STMT_SUBROUTINE: michael@0: /* michael@0: * There's a [exception or hole, retsub pc-index] pair on the michael@0: * stack that we need to pop. michael@0: */ michael@0: npops += 2; michael@0: break; michael@0: michael@0: default:; michael@0: } michael@0: michael@0: if (stmt->isBlockScope) { michael@0: JS_ASSERT(stmt->isNestedScope); michael@0: StaticBlockObject &blockObj = stmt->staticBlock(); michael@0: if (Emit1(cx, bce, JSOP_DEBUGLEAVEBLOCK) < 0) michael@0: return false; michael@0: if (!popScopeForNonLocalExit(stmt->blockScopeIndex)) michael@0: return false; michael@0: if (blockObj.needsClone()) { michael@0: if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0) michael@0: return false; michael@0: } michael@0: } michael@0: } michael@0: michael@0: FLUSH_POPS(); michael@0: return true; michael@0: michael@0: #undef FLUSH_POPS michael@0: } michael@0: michael@0: } // anonymous namespace michael@0: michael@0: static ptrdiff_t michael@0: EmitGoto(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *toStmt, ptrdiff_t *lastp, michael@0: SrcNoteType noteType = SRC_NULL) michael@0: { michael@0: NonLocalExitScope nle(cx, bce); michael@0: michael@0: if (!nle.prepareForNonLocalJump(toStmt)) michael@0: return -1; michael@0: michael@0: if (noteType != SRC_NULL) { michael@0: if (NewSrcNote(cx, bce, noteType) < 0) michael@0: return -1; michael@0: } michael@0: michael@0: return EmitBackPatchOp(cx, bce, lastp); michael@0: } michael@0: michael@0: static bool michael@0: BackPatch(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t last, jsbytecode *target, jsbytecode op) michael@0: { michael@0: jsbytecode *pc, *stop; michael@0: ptrdiff_t delta, span; michael@0: michael@0: pc = bce->code(last); michael@0: stop = bce->code(-1); michael@0: while (pc != stop) { michael@0: delta = GET_JUMP_OFFSET(pc); michael@0: span = target - pc; michael@0: SET_JUMP_OFFSET(pc, span); michael@0: *pc = op; michael@0: pc -= delta; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: #define SET_STATEMENT_TOP(stmt, top) \ michael@0: ((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1)) michael@0: michael@0: static void michael@0: PushStatementInner(BytecodeEmitter *bce, StmtInfoBCE *stmt, StmtType type, ptrdiff_t top) michael@0: { michael@0: SET_STATEMENT_TOP(stmt, top); michael@0: PushStatement(bce, stmt, type); michael@0: } michael@0: michael@0: static void michael@0: PushStatementBCE(BytecodeEmitter *bce, StmtInfoBCE *stmt, StmtType type, ptrdiff_t top) michael@0: { michael@0: PushStatementInner(bce, stmt, type, top); michael@0: JS_ASSERT(!stmt->isLoop()); michael@0: } michael@0: michael@0: static void michael@0: PushLoopStatement(BytecodeEmitter *bce, LoopStmtInfo *stmt, StmtType type, ptrdiff_t top) michael@0: { michael@0: PushStatementInner(bce, stmt, type, top); michael@0: JS_ASSERT(stmt->isLoop()); michael@0: michael@0: LoopStmtInfo *downLoop = nullptr; michael@0: for (StmtInfoBCE *outer = stmt->down; outer; outer = outer->down) { michael@0: if (outer->isLoop()) { michael@0: downLoop = LoopStmtInfo::fromStmtInfo(outer); michael@0: break; michael@0: } michael@0: } michael@0: michael@0: stmt->stackDepth = bce->stackDepth; michael@0: stmt->loopDepth = downLoop ? downLoop->loopDepth + 1 : 1; michael@0: michael@0: int loopSlots; michael@0: if (type == STMT_FOR_OF_LOOP) michael@0: loopSlots = 2; michael@0: else if (type == STMT_FOR_IN_LOOP) michael@0: loopSlots = 1; michael@0: else michael@0: loopSlots = 0; michael@0: michael@0: if (downLoop) michael@0: stmt->canIonOsr = (downLoop->canIonOsr && michael@0: stmt->stackDepth == downLoop->stackDepth + loopSlots); michael@0: else michael@0: stmt->canIonOsr = stmt->stackDepth == loopSlots; michael@0: } michael@0: michael@0: /* michael@0: * Return the enclosing lexical scope, which is the innermost enclosing static michael@0: * block object or compiler created function. michael@0: */ michael@0: static JSObject * michael@0: EnclosingStaticScope(BytecodeEmitter *bce) michael@0: { michael@0: if (bce->staticScope) michael@0: return bce->staticScope; michael@0: michael@0: if (!bce->sc->isFunctionBox()) { michael@0: JS_ASSERT(!bce->parent); michael@0: return nullptr; michael@0: } michael@0: michael@0: return bce->sc->asFunctionBox()->function(); michael@0: } michael@0: michael@0: #ifdef DEBUG michael@0: static bool michael@0: AllLocalsAliased(StaticBlockObject &obj) michael@0: { michael@0: for (unsigned i = 0; i < obj.numVariables(); i++) michael@0: if (!obj.isAliased(i)) michael@0: return false; michael@0: return true; michael@0: } michael@0: #endif michael@0: michael@0: static bool michael@0: ComputeAliasedSlots(ExclusiveContext *cx, BytecodeEmitter *bce, Handle blockObj) michael@0: { michael@0: for (unsigned i = 0; i < blockObj->numVariables(); i++) { michael@0: Definition *dn = blockObj->definitionParseNode(i); michael@0: michael@0: JS_ASSERT(dn->isDefn()); michael@0: if (!dn->pn_cookie.set(bce->parser->tokenStream, dn->pn_cookie.level(), michael@0: blockObj->blockIndexToLocalIndex(dn->frameSlot()))) michael@0: { michael@0: return false; michael@0: } michael@0: michael@0: #ifdef DEBUG michael@0: for (ParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) { michael@0: JS_ASSERT(pnu->pn_lexdef == dn); michael@0: JS_ASSERT(!(pnu->pn_dflags & PND_BOUND)); michael@0: JS_ASSERT(pnu->pn_cookie.isFree()); michael@0: } michael@0: #endif michael@0: michael@0: blockObj->setAliased(i, bce->isAliasedName(dn)); michael@0: } michael@0: michael@0: JS_ASSERT_IF(bce->sc->allLocalsAliased(), AllLocalsAliased(*blockObj)); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitInternedObjectOp(ExclusiveContext *cx, uint32_t index, JSOp op, BytecodeEmitter *bce); michael@0: michael@0: // In a function, block-scoped locals go after the vars, and form part of the michael@0: // fixed part of a stack frame. Outside a function, there are no fixed vars, michael@0: // but block-scoped locals still form part of the fixed part of a stack frame michael@0: // and are thus addressable via GETLOCAL and friends. michael@0: static void michael@0: ComputeLocalOffset(ExclusiveContext *cx, BytecodeEmitter *bce, Handle blockObj) michael@0: { michael@0: unsigned nfixedvars = bce->sc->isFunctionBox() ? bce->script->bindings.numVars() : 0; michael@0: unsigned localOffset = nfixedvars; michael@0: michael@0: if (bce->staticScope) { michael@0: Rooted outer(cx, bce->staticScope); michael@0: for (; outer; outer = outer->enclosingNestedScope()) { michael@0: if (outer->is()) { michael@0: StaticBlockObject &outerBlock = outer->as(); michael@0: localOffset = outerBlock.localOffset() + outerBlock.numVariables(); michael@0: break; michael@0: } michael@0: } michael@0: } michael@0: michael@0: JS_ASSERT(localOffset + blockObj->numVariables() michael@0: <= nfixedvars + bce->script->bindings.numBlockScoped()); michael@0: michael@0: blockObj->setLocalOffset(localOffset); michael@0: } michael@0: michael@0: // ~ Nested Scopes ~ michael@0: // michael@0: // A nested scope is a region of a compilation unit (function, script, or eval michael@0: // code) with an additional node on the scope chain. This node may either be a michael@0: // "with" object or a "block" object. "With" objects represent "with" scopes. michael@0: // Block objects represent lexical scopes, and contain named block-scoped michael@0: // bindings, for example "let" bindings or the exception in a catch block. michael@0: // Those variables may be local and thus accessible directly from the stack, or michael@0: // "aliased" (accessed by name from nested functions, or dynamically via nested michael@0: // "eval" or "with") and only accessible through the scope chain. michael@0: // michael@0: // All nested scopes are present on the "static scope chain". A nested scope michael@0: // that is a "with" scope will be present on the scope chain at run-time as michael@0: // well. A block scope may or may not have a corresponding link on the run-time michael@0: // scope chain; if no variable declared in the block scope is "aliased", then no michael@0: // scope chain node is allocated. michael@0: // michael@0: // To help debuggers, the bytecode emitter arranges to record the PC ranges michael@0: // comprehended by a nested scope, and ultimately attach them to the JSScript. michael@0: // An element in the "block scope array" specifies the PC range, and links to a michael@0: // NestedScopeObject in the object list of the script. That scope object is michael@0: // linked to the previous link in the static scope chain, if any. The static michael@0: // scope chain at any pre-retire PC can be retrieved using michael@0: // JSScript::getStaticScope(jsbytecode *pc). michael@0: // michael@0: // Block scopes store their locals in the fixed part of a stack frame, after the michael@0: // "fixed var" bindings. A fixed var binding is a "var" or legacy "const" michael@0: // binding that occurs in a function (as opposed to a script or in eval code). michael@0: // Only functions have fixed var bindings. michael@0: // michael@0: // To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a michael@0: // block scope, even if the block has no aliased locals. This allows michael@0: // DebugScopes to invalidate any association between a debugger scope object, michael@0: // which can proxy access to unaliased stack locals, and the actual live frame. michael@0: // In normal, non-debug mode, this opcode does not cause any baseline code to be michael@0: // emitted. michael@0: // michael@0: // Enter a nested scope with EnterNestedScope. It will emit michael@0: // PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of michael@0: // the scope. Leave a nested scope with LeaveNestedScope, which, for blocks, michael@0: // will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it michael@0: // emits LEAVEWITH, of course.) Pass EnterNestedScope a fresh StmtInfoBCE michael@0: // object, and pass that same object to the corresponding LeaveNestedScope. If michael@0: // the statement is a block scope, pass STMT_BLOCK as stmtType; otherwise for michael@0: // with scopes pass STMT_WITH. michael@0: // michael@0: static bool michael@0: EnterNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt, ObjectBox *objbox, michael@0: StmtType stmtType) michael@0: { michael@0: Rooted scopeObj(cx, &objbox->object->as()); michael@0: uint32_t scopeObjectIndex = bce->objectList.add(objbox); michael@0: michael@0: switch (stmtType) { michael@0: case STMT_BLOCK: { michael@0: Rooted blockObj(cx, &scopeObj->as()); michael@0: michael@0: ComputeLocalOffset(cx, bce, blockObj); michael@0: michael@0: if (!ComputeAliasedSlots(cx, bce, blockObj)) michael@0: return false; michael@0: michael@0: if (blockObj->needsClone()) { michael@0: if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_PUSHBLOCKSCOPE, bce)) michael@0: return false; michael@0: } michael@0: break; michael@0: } michael@0: case STMT_WITH: michael@0: JS_ASSERT(scopeObj->is()); michael@0: if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_ENTERWITH, bce)) michael@0: return false; michael@0: break; michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE(); michael@0: } michael@0: michael@0: uint32_t parent = BlockScopeNote::NoBlockScopeIndex; michael@0: if (StmtInfoBCE *stmt = bce->topScopeStmt) { michael@0: for (; stmt->staticScope != bce->staticScope; stmt = stmt->down) {} michael@0: parent = stmt->blockScopeIndex; michael@0: } michael@0: michael@0: stmt->blockScopeIndex = bce->blockScopeList.length(); michael@0: if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent)) michael@0: return false; michael@0: michael@0: PushStatementBCE(bce, stmt, stmtType, bce->offset()); michael@0: scopeObj->initEnclosingNestedScope(EnclosingStaticScope(bce)); michael@0: FinishPushNestedScope(bce, stmt, *scopeObj); michael@0: JS_ASSERT(stmt->isNestedScope); michael@0: stmt->isBlockScope = (stmtType == STMT_BLOCK); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: // Patches |breaks| and |continues| unless the top statement info record michael@0: // represents a try-catch-finally suite. May fail if a jump offset overflows. michael@0: static bool michael@0: PopStatementBCE(ExclusiveContext *cx, BytecodeEmitter *bce) michael@0: { michael@0: StmtInfoBCE *stmt = bce->topStmt; michael@0: if (!stmt->isTrying() && michael@0: (!BackPatch(cx, bce, stmt->breaks, bce->code().end(), JSOP_GOTO) || michael@0: !BackPatch(cx, bce, stmt->continues, bce->code(stmt->update), JSOP_GOTO))) michael@0: { michael@0: return false; michael@0: } michael@0: michael@0: FinishPopStatement(bce); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: LeaveNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt) michael@0: { michael@0: JS_ASSERT(stmt == bce->topStmt); michael@0: JS_ASSERT(stmt->isNestedScope); michael@0: JS_ASSERT(stmt->isBlockScope == !(stmt->type == STMT_WITH)); michael@0: uint32_t blockScopeIndex = stmt->blockScopeIndex; michael@0: michael@0: #ifdef DEBUG michael@0: JS_ASSERT(bce->blockScopeList.list[blockScopeIndex].length == 0); michael@0: uint32_t blockObjIndex = bce->blockScopeList.list[blockScopeIndex].index; michael@0: ObjectBox *blockObjBox = bce->objectList.find(blockObjIndex); michael@0: NestedScopeObject *staticScope = &blockObjBox->object->as(); michael@0: JS_ASSERT(stmt->staticScope == staticScope); michael@0: JS_ASSERT(staticScope == bce->staticScope); michael@0: JS_ASSERT_IF(!stmt->isBlockScope, staticScope->is()); michael@0: #endif michael@0: michael@0: if (!PopStatementBCE(cx, bce)) michael@0: return false; michael@0: michael@0: if (Emit1(cx, bce, stmt->isBlockScope ? JSOP_DEBUGLEAVEBLOCK : JSOP_LEAVEWITH) < 0) michael@0: return false; michael@0: michael@0: bce->blockScopeList.recordEnd(blockScopeIndex, bce->offset()); michael@0: michael@0: if (stmt->isBlockScope && stmt->staticScope->as().needsClone()) { michael@0: if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitIndex32(ExclusiveContext *cx, JSOp op, uint32_t index, BytecodeEmitter *bce) michael@0: { michael@0: const size_t len = 1 + UINT32_INDEX_LEN; michael@0: JS_ASSERT(len == size_t(js_CodeSpec[op].length)); michael@0: ptrdiff_t offset = EmitCheck(cx, bce, len); michael@0: if (offset < 0) michael@0: return false; michael@0: michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = jsbytecode(op); michael@0: SET_UINT32_INDEX(code, index); michael@0: UpdateDepth(cx, bce, offset); michael@0: CheckTypeSet(cx, bce, op); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitIndexOp(ExclusiveContext *cx, JSOp op, uint32_t index, BytecodeEmitter *bce) michael@0: { michael@0: const size_t len = js_CodeSpec[op].length; michael@0: JS_ASSERT(len >= 1 + UINT32_INDEX_LEN); michael@0: ptrdiff_t offset = EmitCheck(cx, bce, len); michael@0: if (offset < 0) michael@0: return false; michael@0: michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = jsbytecode(op); michael@0: SET_UINT32_INDEX(code, index); michael@0: UpdateDepth(cx, bce, offset); michael@0: CheckTypeSet(cx, bce, op); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitAtomOp(ExclusiveContext *cx, JSAtom *atom, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); michael@0: michael@0: if (op == JSOP_GETPROP && atom == cx->names().length) { michael@0: /* Specialize length accesses for the interpreter. */ michael@0: op = JSOP_LENGTH; michael@0: } michael@0: michael@0: jsatomid index; michael@0: if (!bce->makeAtomIndex(atom, &index)) michael@0: return false; michael@0: michael@0: return EmitIndexOp(cx, op, index, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitAtomOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(pn->pn_atom != nullptr); michael@0: return EmitAtomOp(cx, pn->pn_atom, op, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitInternedObjectOp(ExclusiveContext *cx, uint32_t index, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT); michael@0: JS_ASSERT(index < bce->objectList.length); michael@0: return EmitIndex32(cx, op, index, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitObjectOp(ExclusiveContext *cx, ObjectBox *objbox, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: return EmitInternedObjectOp(cx, bce->objectList.add(objbox), op, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitRegExp(ExclusiveContext *cx, uint32_t index, BytecodeEmitter *bce) michael@0: { michael@0: return EmitIndex32(cx, JSOP_REGEXP, index, bce); michael@0: } michael@0: michael@0: /* michael@0: * To catch accidental misuse, EMIT_UINT16_IMM_OP/Emit3 assert that they are michael@0: * not used to unconditionally emit JSOP_GETLOCAL. Variable access should michael@0: * instead be emitted using EmitVarOp. In special cases, when the caller michael@0: * definitely knows that a given local slot is unaliased, this function may be michael@0: * used as a non-asserting version of EMIT_UINT16_IMM_OP. michael@0: */ michael@0: static bool michael@0: EmitUnaliasedVarOp(ExclusiveContext *cx, JSOp op, uint32_t slot, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD); michael@0: michael@0: if (IsLocalOp(op)) { michael@0: ptrdiff_t off = EmitN(cx, bce, op, LOCALNO_LEN); michael@0: if (off < 0) michael@0: return false; michael@0: michael@0: SET_LOCALNO(bce->code(off), slot); michael@0: return true; michael@0: } michael@0: michael@0: JS_ASSERT(IsArgOp(op)); michael@0: ptrdiff_t off = EmitN(cx, bce, op, ARGNO_LEN); michael@0: if (off < 0) michael@0: return false; michael@0: michael@0: SET_ARGNO(bce->code(off), slot); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitAliasedVarOp(ExclusiveContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD); michael@0: michael@0: unsigned n = SCOPECOORD_HOPS_LEN + SCOPECOORD_SLOT_LEN; michael@0: JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length); michael@0: michael@0: ptrdiff_t off = EmitN(cx, bce, op, n); michael@0: if (off < 0) michael@0: return false; michael@0: michael@0: jsbytecode *pc = bce->code(off); michael@0: SET_SCOPECOORD_HOPS(pc, sc.hops()); michael@0: pc += SCOPECOORD_HOPS_LEN; michael@0: SET_SCOPECOORD_SLOT(pc, sc.slot()); michael@0: pc += SCOPECOORD_SLOT_LEN; michael@0: CheckTypeSet(cx, bce, op); michael@0: return true; michael@0: } michael@0: michael@0: // Compute the number of nested scope objects that will actually be on the scope michael@0: // chain at runtime, given the BCE's current staticScope. michael@0: static unsigned michael@0: DynamicNestedScopeDepth(BytecodeEmitter *bce) michael@0: { michael@0: unsigned depth = 0; michael@0: for (NestedScopeObject *b = bce->staticScope; b; b = b->enclosingNestedScope()) { michael@0: if (!b->is() || b->as().needsClone()) michael@0: ++depth; michael@0: } michael@0: michael@0: return depth; michael@0: } michael@0: michael@0: static bool michael@0: LookupAliasedName(HandleScript script, PropertyName *name, uint32_t *pslot) michael@0: { michael@0: /* michael@0: * Beware: BindingIter may contain more than one Binding for a given name michael@0: * (in the case of |function f(x,x) {}|) but only one will be aliased. michael@0: */ michael@0: uint32_t slot = CallObject::RESERVED_SLOTS; michael@0: for (BindingIter bi(script); !bi.done(); bi++) { michael@0: if (bi->aliased()) { michael@0: if (bi->name() == name) { michael@0: *pslot = slot; michael@0: return true; michael@0: } michael@0: slot++; michael@0: } michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: static bool michael@0: LookupAliasedNameSlot(HandleScript script, PropertyName *name, ScopeCoordinate *sc) michael@0: { michael@0: uint32_t slot; michael@0: if (!LookupAliasedName(script, name, &slot)) michael@0: return false; michael@0: michael@0: sc->setSlot(slot); michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Use this function instead of assigning directly to 'hops' to guard for michael@0: * uint8_t overflows. michael@0: */ michael@0: static bool michael@0: AssignHops(BytecodeEmitter *bce, ParseNode *pn, unsigned src, ScopeCoordinate *dst) michael@0: { michael@0: if (src > UINT8_MAX) { michael@0: bce->reportError(pn, JSMSG_TOO_DEEP, js_function_str); michael@0: return false; michael@0: } michael@0: michael@0: dst->setHops(src); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitAliasedVarOp(ExclusiveContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce) michael@0: { michael@0: /* michael@0: * While pn->pn_cookie tells us how many function scopes are between the use and the def this michael@0: * is not the same as how many hops up the dynamic scope chain are needed. In particular: michael@0: * - a lexical function scope only contributes a hop if it is "heavyweight" (has a dynamic michael@0: * scope object). michael@0: * - a heavyweight named function scope contributes an extra scope to the scope chain (a michael@0: * DeclEnvObject that holds just the name). michael@0: * - all the intervening let/catch blocks must be counted. michael@0: */ michael@0: unsigned skippedScopes = 0; michael@0: BytecodeEmitter *bceOfDef = bce; michael@0: if (pn->isUsed()) { michael@0: /* michael@0: * As explained in BindNameToSlot, the 'level' of a use indicates how michael@0: * many function scopes (i.e., BytecodeEmitters) to skip to find the michael@0: * enclosing function scope of the definition being accessed. michael@0: */ michael@0: for (unsigned i = pn->pn_cookie.level(); i; i--) { michael@0: skippedScopes += DynamicNestedScopeDepth(bceOfDef); michael@0: FunctionBox *funbox = bceOfDef->sc->asFunctionBox(); michael@0: if (funbox->isHeavyweight()) { michael@0: skippedScopes++; michael@0: if (funbox->function()->isNamedLambda()) michael@0: skippedScopes++; michael@0: } michael@0: bceOfDef = bceOfDef->parent; michael@0: } michael@0: } else { michael@0: JS_ASSERT(pn->isDefn()); michael@0: JS_ASSERT(pn->pn_cookie.level() == bce->script->staticLevel()); michael@0: } michael@0: michael@0: /* michael@0: * The final part of the skippedScopes computation depends on the type of michael@0: * variable. An arg or local variable is at the outer scope of a function michael@0: * and so includes the full DynamicNestedScopeDepth. A let/catch-binding michael@0: * requires a search of the block chain to see how many (dynamic) block michael@0: * objects to skip. michael@0: */ michael@0: ScopeCoordinate sc; michael@0: if (IsArgOp(pn->getOp())) { michael@0: if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc)) michael@0: return false; michael@0: JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc)); michael@0: } else { michael@0: JS_ASSERT(IsLocalOp(pn->getOp()) || pn->isKind(PNK_FUNCTION)); michael@0: uint32_t local = pn->pn_cookie.slot(); michael@0: if (local < bceOfDef->script->bindings.numVars()) { michael@0: if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc)) michael@0: return false; michael@0: JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc)); michael@0: } else { michael@0: JS_ASSERT_IF(bce->sc->isFunctionBox(), local <= bceOfDef->script->bindings.numLocals()); michael@0: JS_ASSERT(bceOfDef->staticScope->is()); michael@0: Rooted b(cx, &bceOfDef->staticScope->as()); michael@0: while (local < b->localOffset()) { michael@0: if (b->needsClone()) michael@0: skippedScopes++; michael@0: b = &b->enclosingNestedScope()->as(); michael@0: } michael@0: if (!AssignHops(bce, pn, skippedScopes, &sc)) michael@0: return false; michael@0: sc.setSlot(b->localIndexToSlot(local)); michael@0: } michael@0: } michael@0: michael@0: return EmitAliasedVarOp(cx, op, sc, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitVarOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_FUNCTION) || pn->isKind(PNK_NAME)); michael@0: JS_ASSERT(!pn->pn_cookie.isFree()); michael@0: michael@0: if (IsAliasedVarOp(op)) { michael@0: ScopeCoordinate sc; michael@0: sc.setHops(pn->pn_cookie.level()); michael@0: sc.setSlot(pn->pn_cookie.slot()); michael@0: return EmitAliasedVarOp(cx, op, sc, bce); michael@0: } michael@0: michael@0: JS_ASSERT_IF(pn->isKind(PNK_NAME), IsArgOp(op) || IsLocalOp(op)); michael@0: michael@0: if (!bce->isAliasedName(pn)) { michael@0: JS_ASSERT(pn->isUsed() || pn->isDefn()); michael@0: JS_ASSERT_IF(pn->isUsed(), pn->pn_cookie.level() == 0); michael@0: JS_ASSERT_IF(pn->isDefn(), pn->pn_cookie.level() == bce->script->staticLevel()); michael@0: return EmitUnaliasedVarOp(cx, op, pn->pn_cookie.slot(), bce); michael@0: } michael@0: michael@0: switch (op) { michael@0: case JSOP_GETARG: case JSOP_GETLOCAL: op = JSOP_GETALIASEDVAR; break; michael@0: case JSOP_SETARG: case JSOP_SETLOCAL: op = JSOP_SETALIASEDVAR; break; michael@0: default: MOZ_ASSUME_UNREACHABLE("unexpected var op"); michael@0: } michael@0: michael@0: return EmitAliasedVarOp(cx, op, pn, bce); michael@0: } michael@0: michael@0: static JSOp michael@0: GetIncDecInfo(ParseNodeKind kind, bool *post) michael@0: { michael@0: JS_ASSERT(kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT || michael@0: kind == PNK_POSTDECREMENT || kind == PNK_PREDECREMENT); michael@0: *post = kind == PNK_POSTINCREMENT || kind == PNK_POSTDECREMENT; michael@0: return (kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT) ? JSOP_ADD : JSOP_SUB; michael@0: } michael@0: michael@0: static bool michael@0: EmitVarIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) michael@0: { michael@0: JSOp op = pn->pn_kid->getOp(); michael@0: JS_ASSERT(IsArgOp(op) || IsLocalOp(op) || IsAliasedVarOp(op)); michael@0: JS_ASSERT(pn->pn_kid->isKind(PNK_NAME)); michael@0: JS_ASSERT(!pn->pn_kid->pn_cookie.isFree()); michael@0: michael@0: bool post; michael@0: JSOp binop = GetIncDecInfo(pn->getKind(), &post); michael@0: michael@0: JSOp getOp, setOp; michael@0: if (IsLocalOp(op)) { michael@0: getOp = JSOP_GETLOCAL; michael@0: setOp = JSOP_SETLOCAL; michael@0: } else if (IsArgOp(op)) { michael@0: getOp = JSOP_GETARG; michael@0: setOp = JSOP_SETARG; michael@0: } else { michael@0: getOp = JSOP_GETALIASEDVAR; michael@0: setOp = JSOP_SETALIASEDVAR; michael@0: } michael@0: michael@0: if (!EmitVarOp(cx, pn->pn_kid, getOp, bce)) // V michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POS) < 0) // N michael@0: return false; michael@0: if (post && Emit1(cx, bce, JSOP_DUP) < 0) // N? N michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_ONE) < 0) // N? N 1 michael@0: return false; michael@0: if (Emit1(cx, bce, binop) < 0) // N? N+1 michael@0: return false; michael@0: if (!EmitVarOp(cx, pn->pn_kid, setOp, bce)) // N? N+1 michael@0: return false; michael@0: if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::isAliasedName(ParseNode *pn) michael@0: { michael@0: Definition *dn = pn->resolve(); michael@0: JS_ASSERT(dn->isDefn()); michael@0: JS_ASSERT(!dn->isPlaceholder()); michael@0: JS_ASSERT(dn->isBound()); michael@0: michael@0: /* If dn is in an enclosing function, it is definitely aliased. */ michael@0: if (dn->pn_cookie.level() != script->staticLevel()) michael@0: return true; michael@0: michael@0: switch (dn->kind()) { michael@0: case Definition::LET: michael@0: /* michael@0: * There are two ways to alias a let variable: nested functions and michael@0: * dynamic scope operations. (This is overly conservative since the michael@0: * bindingsAccessedDynamically flag, checked by allLocalsAliased, is michael@0: * function-wide.) michael@0: * michael@0: * In addition all locals in generators are marked as aliased, to ensure michael@0: * that they are allocated on scope chains instead of on the stack. See michael@0: * the definition of SharedContext::allLocalsAliased. michael@0: */ michael@0: return dn->isClosed() || sc->allLocalsAliased(); michael@0: case Definition::ARG: michael@0: /* michael@0: * Consult the bindings, since they already record aliasing. We might michael@0: * be tempted to use the same definition as VAR/CONST/LET, but there is michael@0: * a problem caused by duplicate arguments: only the last argument with michael@0: * a given name is aliased. This is necessary to avoid generating a michael@0: * shape for the call object with with more than one name for a given michael@0: * slot (which violates internal engine invariants). All this means that michael@0: * the '|| sc->allLocalsAliased()' disjunct is incorrect since it will michael@0: * mark both parameters in function(x,x) as aliased. michael@0: */ michael@0: return script->formalIsAliased(pn->pn_cookie.slot()); michael@0: case Definition::VAR: michael@0: case Definition::CONST: michael@0: JS_ASSERT_IF(sc->allLocalsAliased(), script->varIsAliased(pn->pn_cookie.slot())); michael@0: return script->varIsAliased(pn->pn_cookie.slot()); michael@0: case Definition::PLACEHOLDER: michael@0: case Definition::NAMED_LAMBDA: michael@0: case Definition::MISSING: michael@0: MOZ_ASSUME_UNREACHABLE("unexpected dn->kind"); michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: /* michael@0: * Try to convert a *NAME op with a free name to a more specialized GNAME, michael@0: * INTRINSIC or ALIASEDVAR op, which optimize accesses on that name. michael@0: * Return true if a conversion was made. michael@0: */ michael@0: static bool michael@0: TryConvertFreeName(BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: /* michael@0: * In self-hosting mode, JSOP_*NAME is unconditionally converted to michael@0: * JSOP_*INTRINSIC. This causes lookups to be redirected to the special michael@0: * intrinsics holder in the global object, into which any missing values are michael@0: * cloned lazily upon first access. michael@0: */ michael@0: if (bce->emitterMode == BytecodeEmitter::SelfHosting) { michael@0: JSOp op; michael@0: switch (pn->getOp()) { michael@0: case JSOP_NAME: op = JSOP_GETINTRINSIC; break; michael@0: case JSOP_SETNAME: op = JSOP_SETINTRINSIC; break; michael@0: /* Other *NAME ops aren't (yet) supported in self-hosted code. */ michael@0: default: MOZ_ASSUME_UNREACHABLE("intrinsic"); michael@0: } michael@0: pn->setOp(op); michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * When parsing inner functions lazily, parse nodes for outer functions no michael@0: * longer exist and only the function's scope chain is available for michael@0: * resolving upvar accesses within the inner function. michael@0: */ michael@0: if (bce->emitterMode == BytecodeEmitter::LazyFunction) { michael@0: // The only statements within a lazy function which can push lexical michael@0: // scopes are try/catch blocks. Use generic ops in this case. michael@0: for (StmtInfoBCE *stmt = bce->topStmt; stmt; stmt = stmt->down) { michael@0: if (stmt->type == STMT_CATCH) michael@0: return true; michael@0: } michael@0: michael@0: size_t hops = 0; michael@0: FunctionBox *funbox = bce->sc->asFunctionBox(); michael@0: if (funbox->hasExtensibleScope()) michael@0: return false; michael@0: if (funbox->function()->isNamedLambda() && funbox->function()->atom() == pn->pn_atom) michael@0: return false; michael@0: if (funbox->isHeavyweight()) { michael@0: hops++; michael@0: if (funbox->function()->isNamedLambda()) michael@0: hops++; michael@0: } michael@0: if (bce->script->directlyInsideEval()) michael@0: return false; michael@0: RootedObject outerScope(bce->sc->context, bce->script->enclosingStaticScope()); michael@0: for (StaticScopeIter ssi(bce->sc->context, outerScope); !ssi.done(); ssi++) { michael@0: if (ssi.type() != StaticScopeIter::FUNCTION) { michael@0: if (ssi.type() == StaticScopeIter::BLOCK) { michael@0: // Use generic ops if a catch block is encountered. michael@0: return false; michael@0: } michael@0: if (ssi.hasDynamicScopeObject()) michael@0: hops++; michael@0: continue; michael@0: } michael@0: RootedScript script(bce->sc->context, ssi.funScript()); michael@0: if (script->functionNonDelazifying()->atom() == pn->pn_atom) michael@0: return false; michael@0: if (ssi.hasDynamicScopeObject()) { michael@0: uint32_t slot; michael@0: if (LookupAliasedName(script, pn->pn_atom->asPropertyName(), &slot)) { michael@0: JSOp op; michael@0: switch (pn->getOp()) { michael@0: case JSOP_NAME: op = JSOP_GETALIASEDVAR; break; michael@0: case JSOP_SETNAME: op = JSOP_SETALIASEDVAR; break; michael@0: default: return false; michael@0: } michael@0: pn->setOp(op); michael@0: JS_ALWAYS_TRUE(pn->pn_cookie.set(bce->parser->tokenStream, hops, slot)); michael@0: return true; michael@0: } michael@0: hops++; michael@0: } michael@0: michael@0: if (script->funHasExtensibleScope() || script->directlyInsideEval()) michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: // Unbound names aren't recognizable global-property references if the michael@0: // script isn't running against its global object. michael@0: if (!bce->script->compileAndGo() || !bce->hasGlobalScope) michael@0: return false; michael@0: michael@0: // Deoptimized names also aren't necessarily globals. michael@0: if (pn->isDeoptimized()) michael@0: return false; michael@0: michael@0: if (bce->sc->isFunctionBox()) { michael@0: // Unbound names in function code may not be globals if new locals can michael@0: // be added to this function (or an enclosing one) to alias a global michael@0: // reference. michael@0: FunctionBox *funbox = bce->sc->asFunctionBox(); michael@0: if (funbox->mightAliasLocals()) michael@0: return false; michael@0: } michael@0: michael@0: // If this is eval code, being evaluated inside strict mode eval code, michael@0: // an "unbound" name might be a binding local to that outer eval: michael@0: // michael@0: // var x = "GLOBAL"; michael@0: // eval('"use strict"; ' + michael@0: // 'var x; ' + michael@0: // 'eval("print(x)");'); // "undefined", not "GLOBAL" michael@0: // michael@0: // Given the enclosing eval code's strictness and its bindings (neither is michael@0: // readily available now), we could exactly check global-ness, but it's not michael@0: // worth the trouble for doubly-nested eval code. So we conservatively michael@0: // approximate. If the outer eval code is strict, then this eval code will michael@0: // be: thus, don't optimize if we're compiling strict code inside an eval. michael@0: if (bce->insideEval && bce->sc->strict) michael@0: return false; michael@0: michael@0: // Beware: if you change anything here, you might also need to change michael@0: // js::ReportIfUndeclaredVarAssignment. michael@0: JSOp op; michael@0: switch (pn->getOp()) { michael@0: case JSOP_NAME: op = JSOP_GETGNAME; break; michael@0: case JSOP_SETNAME: op = JSOP_SETGNAME; break; michael@0: case JSOP_SETCONST: michael@0: // Not supported. michael@0: return false; michael@0: default: MOZ_ASSUME_UNREACHABLE("gname"); michael@0: } michael@0: pn->setOp(op); michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * BindNameToSlotHelper attempts to optimize name gets and sets to stack slot michael@0: * loads and stores, given the compile-time information in bce and a PNK_NAME michael@0: * node pn. It returns false on error, true on success. michael@0: * michael@0: * The caller can test pn->pn_cookie.isFree() to tell whether optimization michael@0: * occurred, in which case BindNameToSlotHelper also updated pn->pn_op. If michael@0: * pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have michael@0: * been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not michael@0: * pn->pn_op was modified, if this function finds an argument or local variable michael@0: * name, PND_CONST will be set in pn_dflags for read-only properties after a michael@0: * successful return. michael@0: * michael@0: * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget michael@0: * to update the special cases in EmitFor (for-in) and EmitAssignment (= and michael@0: * op=, e.g. +=). michael@0: */ michael@0: static bool michael@0: BindNameToSlotHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_NAME)); michael@0: michael@0: JS_ASSERT_IF(pn->isKind(PNK_FUNCTION), pn->isBound()); michael@0: michael@0: /* Don't attempt if 'pn' is already bound or deoptimized or a function. */ michael@0: if (pn->isBound() || pn->isDeoptimized()) michael@0: return true; michael@0: michael@0: /* JSOP_CALLEE is pre-bound by definition. */ michael@0: JSOp op = pn->getOp(); michael@0: JS_ASSERT(op != JSOP_CALLEE); michael@0: JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); michael@0: michael@0: /* michael@0: * The parser already linked name uses to definitions when (where not michael@0: * prevented by non-lexical constructs like 'with' and 'eval'). michael@0: */ michael@0: Definition *dn; michael@0: if (pn->isUsed()) { michael@0: JS_ASSERT(pn->pn_cookie.isFree()); michael@0: dn = pn->pn_lexdef; michael@0: JS_ASSERT(dn->isDefn()); michael@0: pn->pn_dflags |= (dn->pn_dflags & PND_CONST); michael@0: } else if (pn->isDefn()) { michael@0: dn = (Definition *) pn; michael@0: } else { michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Turn attempts to mutate const-declared bindings into get ops (for michael@0: * pre-increment and pre-decrement ops, our caller will have to emit michael@0: * JSOP_POS, JSOP_ONE, and JSOP_ADD as well). michael@0: * michael@0: * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared michael@0: * bindings visible to the compiler are permanent in JS unless the michael@0: * declaration originates at top level in eval code. michael@0: */ michael@0: switch (op) { michael@0: case JSOP_NAME: michael@0: case JSOP_SETCONST: michael@0: break; michael@0: default: michael@0: if (pn->isConst()) { michael@0: if (bce->sc->needStrictChecks()) { michael@0: JSAutoByteString name; michael@0: if (!AtomToPrintableString(cx, pn->pn_atom, &name) || michael@0: !bce->reportStrictModeError(pn, JSMSG_READ_ONLY, name.ptr())) michael@0: { michael@0: return false; michael@0: } michael@0: } michael@0: pn->setOp(op = JSOP_NAME); michael@0: } michael@0: } michael@0: michael@0: if (dn->pn_cookie.isFree()) { michael@0: if (HandleScript caller = bce->evalCaller) { michael@0: JS_ASSERT(bce->script->compileAndGo()); michael@0: michael@0: /* michael@0: * Don't generate upvars on the left side of a for loop. See michael@0: * bug 470758. michael@0: */ michael@0: if (bce->emittingForInit) michael@0: return true; michael@0: michael@0: /* michael@0: * If this is an eval in the global scope, then unbound variables michael@0: * must be globals, so try to use GNAME ops. michael@0: */ michael@0: if (!caller->functionOrCallerFunction() && TryConvertFreeName(bce, pn)) { michael@0: pn->pn_dflags |= PND_BOUND; michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Out of tricks, so we must rely on PICs to optimize named michael@0: * accesses from direct eval called from function code. michael@0: */ michael@0: return true; michael@0: } michael@0: michael@0: /* Optimize accesses to undeclared globals. */ michael@0: if (!TryConvertFreeName(bce, pn)) michael@0: return true; michael@0: michael@0: pn->pn_dflags |= PND_BOUND; michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * At this point, we are only dealing with uses that have already been michael@0: * bound to definitions via pn_lexdef. The rest of this routine converts michael@0: * the parse node of the use from its initial JSOP_*NAME* op to a LOCAL/ARG michael@0: * op. This requires setting the node's pn_cookie with a pair (level, slot) michael@0: * where 'level' is the number of function scopes between the use and the michael@0: * def and 'slot' is the index to emit as the immediate of the ARG/LOCAL michael@0: * op. For example, in this code: michael@0: * michael@0: * function(a,b,x) { return x } michael@0: * function(y) { function() { return y } } michael@0: * michael@0: * x will get (level = 0, slot = 2) and y will get (level = 1, slot = 0). michael@0: */ michael@0: JS_ASSERT(!pn->isDefn()); michael@0: JS_ASSERT(pn->isUsed()); michael@0: JS_ASSERT(pn->pn_lexdef); michael@0: JS_ASSERT(pn->pn_cookie.isFree()); michael@0: michael@0: /* michael@0: * We are compiling a function body and may be able to optimize name michael@0: * to stack slot. Look for an argument or variable in the function and michael@0: * rewrite pn_op and update pn accordingly. michael@0: */ michael@0: switch (dn->kind()) { michael@0: case Definition::ARG: michael@0: switch (op) { michael@0: case JSOP_NAME: op = JSOP_GETARG; break; michael@0: case JSOP_SETNAME: op = JSOP_SETARG; break; michael@0: default: MOZ_ASSUME_UNREACHABLE("arg"); michael@0: } michael@0: JS_ASSERT(!pn->isConst()); michael@0: break; michael@0: michael@0: case Definition::VAR: michael@0: case Definition::CONST: michael@0: case Definition::LET: michael@0: switch (op) { michael@0: case JSOP_NAME: op = JSOP_GETLOCAL; break; michael@0: case JSOP_SETNAME: op = JSOP_SETLOCAL; break; michael@0: case JSOP_SETCONST: op = JSOP_SETLOCAL; break; michael@0: default: MOZ_ASSUME_UNREACHABLE("local"); michael@0: } michael@0: break; michael@0: michael@0: case Definition::NAMED_LAMBDA: { michael@0: JS_ASSERT(dn->isOp(JSOP_CALLEE)); michael@0: JS_ASSERT(op != JSOP_CALLEE); michael@0: michael@0: /* michael@0: * Currently, the ALIASEDVAR ops do not support accessing the michael@0: * callee of a DeclEnvObject, so use NAME. michael@0: */ michael@0: if (dn->pn_cookie.level() != bce->script->staticLevel()) michael@0: return true; michael@0: michael@0: DebugOnly fun = bce->sc->asFunctionBox()->function(); michael@0: JS_ASSERT(fun->isLambda()); michael@0: JS_ASSERT(pn->pn_atom == fun->atom()); michael@0: michael@0: /* michael@0: * Leave pn->isOp(JSOP_NAME) if bce->fun is heavyweight to michael@0: * address two cases: a new binding introduced by eval, and michael@0: * assignment to the name in strict mode. michael@0: * michael@0: * var fun = (function f(s) { eval(s); return f; }); michael@0: * assertEq(fun("var f = 42"), 42); michael@0: * michael@0: * ECMAScript specifies that a function expression's name is bound michael@0: * in a lexical environment distinct from that used to bind its michael@0: * named parameters, the arguments object, and its variables. The michael@0: * new binding for "var f = 42" shadows the binding for the michael@0: * function itself, so the name of the function will not refer to michael@0: * the function. michael@0: * michael@0: * (function f() { "use strict"; f = 12; })(); michael@0: * michael@0: * Outside strict mode, assignment to a function expression's name michael@0: * has no effect. But in strict mode, this attempt to mutate an michael@0: * immutable binding must throw a TypeError. We implement this by michael@0: * not optimizing such assignments and by marking such functions as michael@0: * heavyweight, ensuring that the function name is represented in michael@0: * the scope chain so that assignment will throw a TypeError. michael@0: */ michael@0: if (!bce->sc->asFunctionBox()->isHeavyweight()) { michael@0: op = JSOP_CALLEE; michael@0: pn->pn_dflags |= PND_CONST; michael@0: } michael@0: michael@0: pn->setOp(op); michael@0: pn->pn_dflags |= PND_BOUND; michael@0: return true; michael@0: } michael@0: michael@0: case Definition::PLACEHOLDER: michael@0: return true; michael@0: michael@0: case Definition::MISSING: michael@0: MOZ_ASSUME_UNREACHABLE("missing"); michael@0: } michael@0: michael@0: /* michael@0: * The difference between the current static level and the static level of michael@0: * the definition is the number of function scopes between the current michael@0: * scope and dn's scope. michael@0: */ michael@0: unsigned skip = bce->script->staticLevel() - dn->pn_cookie.level(); michael@0: JS_ASSERT_IF(skip, dn->isClosed()); michael@0: michael@0: /* michael@0: * Explicitly disallow accessing var/let bindings in global scope from michael@0: * nested functions. The reason for this limitation is that, since the michael@0: * global script is not included in the static scope chain (1. because it michael@0: * has no object to stand in the static scope chain, 2. to minimize memory michael@0: * bloat where a single live function keeps its whole global script michael@0: * alive.), ScopeCoordinateToTypeSet is not able to find the var/let's michael@0: * associated types::TypeSet. michael@0: */ michael@0: if (skip) { michael@0: BytecodeEmitter *bceSkipped = bce; michael@0: for (unsigned i = 0; i < skip; i++) michael@0: bceSkipped = bceSkipped->parent; michael@0: if (!bceSkipped->sc->isFunctionBox()) michael@0: return true; michael@0: } michael@0: michael@0: JS_ASSERT(!pn->isOp(op)); michael@0: pn->setOp(op); michael@0: if (!pn->pn_cookie.set(bce->parser->tokenStream, skip, dn->pn_cookie.slot())) michael@0: return false; michael@0: michael@0: pn->pn_dflags |= PND_BOUND; michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Attempts to bind the name, then checks that no dynamic scope lookup ops are michael@0: * emitted in self-hosting mode. NAME ops do lookups off current scope chain, michael@0: * and we do not want to allow self-hosted code to use the dynamic scope. michael@0: */ michael@0: static bool michael@0: BindNameToSlot(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: if (!BindNameToSlotHelper(cx, bce, pn)) michael@0: return false; michael@0: michael@0: if (bce->emitterMode == BytecodeEmitter::SelfHosting && !pn->isBound()) { michael@0: bce->reportError(pn, JSMSG_SELFHOSTED_UNBOUND_NAME); michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * If pn contains a useful expression, return true with *answer set to true. michael@0: * If pn contains a useless expression, return true with *answer set to false. michael@0: * Return false on error. michael@0: * michael@0: * The caller should initialize *answer to false and invoke this function on michael@0: * an expression statement or similar subtree to decide whether the tree could michael@0: * produce code that has any side effects. For an expression statement, we michael@0: * define useless code as code with no side effects, because the main effect, michael@0: * the value left on the stack after the code executes, will be discarded by a michael@0: * pop bytecode. michael@0: */ michael@0: static bool michael@0: CheckSideEffects(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool *answer) michael@0: { michael@0: if (!pn || *answer) michael@0: return true; michael@0: michael@0: switch (pn->getArity()) { michael@0: case PN_CODE: michael@0: /* michael@0: * A named function, contrary to ES3, is no longer useful, because we michael@0: * bind its name lexically (using JSOP_CALLEE) instead of creating an michael@0: * Object instance and binding a readonly, permanent property in it michael@0: * (the object and binding can be detected and hijacked or captured). michael@0: * This is a bug fix to ES3; it is fixed in ES3.1 drafts. michael@0: */ michael@0: MOZ_ASSERT(*answer == false); michael@0: return true; michael@0: michael@0: case PN_LIST: michael@0: if (pn->isOp(JSOP_NOP) || pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || michael@0: pn->isOp(JSOP_STRICTEQ) || pn->isOp(JSOP_STRICTNE)) { michael@0: /* michael@0: * Non-operators along with ||, &&, ===, and !== never invoke michael@0: * toString or valueOf. michael@0: */ michael@0: bool ok = true; michael@0: for (ParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) michael@0: ok &= CheckSideEffects(cx, bce, pn2, answer); michael@0: return ok; michael@0: } michael@0: michael@0: if (pn->isKind(PNK_GENEXP)) { michael@0: /* Generator-expressions are harmless if the result is ignored. */ michael@0: MOZ_ASSERT(*answer == false); michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * All invocation operations (construct: PNK_NEW, call: PNK_CALL) michael@0: * are presumed to be useful, because they may have side effects michael@0: * even if their main effect (their return value) is discarded. michael@0: * michael@0: * PNK_ELEM binary trees of 3+ nodes are flattened into lists to michael@0: * avoid too much recursion. All such lists must be presumed to be michael@0: * useful because each index operation could invoke a getter. michael@0: * michael@0: * Likewise, array and object initialisers may call prototype michael@0: * setters (the __defineSetter__ built-in, and writable __proto__ michael@0: * on Array.prototype create this hazard). Initialiser list nodes michael@0: * have JSOP_NEWINIT in their pn_op. michael@0: */ michael@0: *answer = true; michael@0: return true; michael@0: michael@0: case PN_TERNARY: michael@0: return CheckSideEffects(cx, bce, pn->pn_kid1, answer) && michael@0: CheckSideEffects(cx, bce, pn->pn_kid2, answer) && michael@0: CheckSideEffects(cx, bce, pn->pn_kid3, answer); michael@0: michael@0: case PN_BINARY: michael@0: case PN_BINARY_OBJ: michael@0: if (pn->isAssignment()) { michael@0: /* michael@0: * Assignment is presumed to be useful, even if the next operation michael@0: * is another assignment overwriting this one's ostensible effect, michael@0: * because the left operand may be a property with a setter that michael@0: * has side effects. michael@0: * michael@0: * The only exception is assignment of a useless value to a const michael@0: * declared in the function currently being compiled. michael@0: */ michael@0: ParseNode *pn2 = pn->pn_left; michael@0: if (!pn2->isKind(PNK_NAME)) { michael@0: *answer = true; michael@0: } else { michael@0: if (!BindNameToSlot(cx, bce, pn2)) michael@0: return false; michael@0: if (!CheckSideEffects(cx, bce, pn->pn_right, answer)) michael@0: return false; michael@0: if (!*answer && (!pn->isOp(JSOP_NOP) || !pn2->isConst())) michael@0: *answer = true; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: if (pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || pn->isOp(JSOP_STRICTEQ) || michael@0: pn->isOp(JSOP_STRICTNE)) { michael@0: /* michael@0: * ||, &&, ===, and !== do not convert their operands via michael@0: * toString or valueOf method calls. michael@0: */ michael@0: return CheckSideEffects(cx, bce, pn->pn_left, answer) && michael@0: CheckSideEffects(cx, bce, pn->pn_right, answer); michael@0: } michael@0: michael@0: /* michael@0: * We can't easily prove that neither operand ever denotes an michael@0: * object with a toString or valueOf method. michael@0: */ michael@0: *answer = true; michael@0: return true; michael@0: michael@0: case PN_UNARY: michael@0: switch (pn->getKind()) { michael@0: case PNK_DELETE: michael@0: { michael@0: ParseNode *pn2 = pn->pn_kid; michael@0: switch (pn2->getKind()) { michael@0: case PNK_NAME: michael@0: if (!BindNameToSlot(cx, bce, pn2)) michael@0: return false; michael@0: if (pn2->isConst()) { michael@0: MOZ_ASSERT(*answer == false); michael@0: return true; michael@0: } michael@0: /* FALL THROUGH */ michael@0: case PNK_DOT: michael@0: case PNK_CALL: michael@0: case PNK_ELEM: michael@0: /* All these delete addressing modes have effects too. */ michael@0: *answer = true; michael@0: return true; michael@0: default: michael@0: return CheckSideEffects(cx, bce, pn2, answer); michael@0: } michael@0: MOZ_ASSUME_UNREACHABLE("We have a returning default case"); michael@0: } michael@0: michael@0: case PNK_TYPEOF: michael@0: case PNK_VOID: michael@0: case PNK_NOT: michael@0: case PNK_BITNOT: michael@0: if (pn->isOp(JSOP_NOT)) { michael@0: /* ! does not convert its operand via toString or valueOf. */ michael@0: return CheckSideEffects(cx, bce, pn->pn_kid, answer); michael@0: } michael@0: /* FALL THROUGH */ michael@0: michael@0: default: michael@0: /* michael@0: * All of PNK_INC, PNK_DEC, PNK_THROW, PNK_YIELD, and PNK_YIELD_STAR michael@0: * have direct effects. Of the remaining unary-arity node types, we michael@0: * can't easily prove that the operand never denotes an object with michael@0: * a toString or valueOf method. michael@0: */ michael@0: *answer = true; michael@0: return true; michael@0: } michael@0: MOZ_ASSUME_UNREACHABLE("We have a returning default case"); michael@0: michael@0: case PN_NAME: michael@0: /* michael@0: * Take care to avoid trying to bind a label name (labels, both for michael@0: * statements and property values in object initialisers, have pn_op michael@0: * defaulted to JSOP_NOP). michael@0: */ michael@0: if (pn->isKind(PNK_NAME) && !pn->isOp(JSOP_NOP)) { michael@0: if (!BindNameToSlot(cx, bce, pn)) michael@0: return false; michael@0: if (!pn->isOp(JSOP_CALLEE) && pn->pn_cookie.isFree()) { michael@0: /* michael@0: * Not a use of an unshadowed named function expression's given michael@0: * name, so this expression could invoke a getter that has side michael@0: * effects. michael@0: */ michael@0: *answer = true; michael@0: } michael@0: } michael@0: if (pn->isKind(PNK_DOT)) { michael@0: /* Dotted property references in general can call getters. */ michael@0: *answer = true; michael@0: } michael@0: return CheckSideEffects(cx, bce, pn->maybeExpr(), answer); michael@0: michael@0: case PN_NULLARY: michael@0: if (pn->isKind(PNK_DEBUGGER)) michael@0: *answer = true; michael@0: return true; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::isInLoop() michael@0: { michael@0: for (StmtInfoBCE *stmt = topStmt; stmt; stmt = stmt->down) { michael@0: if (stmt->isLoop()) michael@0: return true; michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::checkSingletonContext() michael@0: { michael@0: if (!script->compileAndGo() || sc->isFunctionBox() || isInLoop()) michael@0: return false; michael@0: hasSingletons = true; michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::needsImplicitThis() michael@0: { michael@0: if (!script->compileAndGo()) michael@0: return true; michael@0: michael@0: if (sc->isFunctionBox()) { michael@0: if (sc->asFunctionBox()->inWith) michael@0: return true; michael@0: } else { michael@0: JSObject *scope = sc->asGlobalSharedContext()->scopeChain(); michael@0: while (scope) { michael@0: if (scope->is()) michael@0: return true; michael@0: scope = scope->enclosingScope(); michael@0: } michael@0: } michael@0: michael@0: for (StmtInfoBCE *stmt = topStmt; stmt; stmt = stmt->down) { michael@0: if (stmt->type == STMT_WITH) michael@0: return true; michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: void michael@0: BytecodeEmitter::tellDebuggerAboutCompiledScript(ExclusiveContext *cx) michael@0: { michael@0: // Note: when parsing off thread the resulting scripts need to be handed to michael@0: // the debugger after rejoining to the main thread. michael@0: if (!cx->isJSContext()) michael@0: return; michael@0: michael@0: RootedFunction function(cx, script->functionNonDelazifying()); michael@0: CallNewScriptHook(cx->asJSContext(), script, function); michael@0: // Lazy scripts are never top level (despite always being invoked with a michael@0: // nullptr parent), and so the hook should never be fired. michael@0: if (emitterMode != LazyFunction && !parent) { michael@0: GlobalObject *compileAndGoGlobal = nullptr; michael@0: if (script->compileAndGo()) michael@0: compileAndGoGlobal = &script->global(); michael@0: Debugger::onNewScript(cx->asJSContext(), script, compileAndGoGlobal); michael@0: } michael@0: } michael@0: michael@0: inline TokenStream * michael@0: BytecodeEmitter::tokenStream() michael@0: { michael@0: return &parser->tokenStream; michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::reportError(ParseNode *pn, unsigned errorNumber, ...) michael@0: { michael@0: TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; michael@0: michael@0: va_list args; michael@0: va_start(args, errorNumber); michael@0: bool result = tokenStream()->reportCompileErrorNumberVA(pos.begin, JSREPORT_ERROR, michael@0: errorNumber, args); michael@0: va_end(args); michael@0: return result; michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::reportStrictWarning(ParseNode *pn, unsigned errorNumber, ...) michael@0: { michael@0: TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; michael@0: michael@0: va_list args; michael@0: va_start(args, errorNumber); michael@0: bool result = tokenStream()->reportStrictWarningErrorNumberVA(pos.begin, errorNumber, args); michael@0: va_end(args); michael@0: return result; michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::reportStrictModeError(ParseNode *pn, unsigned errorNumber, ...) michael@0: { michael@0: TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; michael@0: michael@0: va_list args; michael@0: va_start(args, errorNumber); michael@0: bool result = tokenStream()->reportStrictModeErrorNumberVA(pos.begin, sc->strict, michael@0: errorNumber, args); michael@0: va_end(args); michael@0: return result; michael@0: } michael@0: michael@0: static bool michael@0: EmitNewInit(ExclusiveContext *cx, BytecodeEmitter *bce, JSProtoKey key) michael@0: { michael@0: const size_t len = 1 + UINT32_INDEX_LEN; michael@0: ptrdiff_t offset = EmitCheck(cx, bce, len); michael@0: if (offset < 0) michael@0: return false; michael@0: michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = JSOP_NEWINIT; michael@0: code[1] = jsbytecode(key); michael@0: code[2] = 0; michael@0: code[3] = 0; michael@0: code[4] = 0; michael@0: UpdateDepth(cx, bce, offset); michael@0: CheckTypeSet(cx, bce, JSOP_NEWINIT); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: IteratorResultShape(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned *shape) michael@0: { michael@0: JS_ASSERT(bce->script->compileAndGo()); michael@0: michael@0: RootedObject obj(cx); michael@0: gc::AllocKind kind = GuessObjectGCKind(2); michael@0: obj = NewBuiltinClassInstance(cx, &JSObject::class_, kind); michael@0: if (!obj) michael@0: return false; michael@0: michael@0: Rooted value_id(cx, AtomToId(cx->names().value)); michael@0: Rooted done_id(cx, AtomToId(cx->names().done)); michael@0: if (!DefineNativeProperty(cx, obj, value_id, UndefinedHandleValue, nullptr, nullptr, michael@0: JSPROP_ENUMERATE)) michael@0: return false; michael@0: if (!DefineNativeProperty(cx, obj, done_id, UndefinedHandleValue, nullptr, nullptr, michael@0: JSPROP_ENUMERATE)) michael@0: return false; michael@0: michael@0: ObjectBox *objbox = bce->parser->newObjectBox(obj); michael@0: if (!objbox) michael@0: return false; michael@0: michael@0: *shape = bce->objectList.add(objbox); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitPrepareIteratorResult(ExclusiveContext *cx, BytecodeEmitter *bce) michael@0: { michael@0: if (bce->script->compileAndGo()) { michael@0: unsigned shape; michael@0: if (!IteratorResultShape(cx, bce, &shape)) michael@0: return false; michael@0: return EmitIndex32(cx, JSOP_NEWOBJECT, shape, bce); michael@0: } michael@0: michael@0: return EmitNewInit(cx, bce, JSProto_Object); michael@0: } michael@0: michael@0: static bool michael@0: EmitFinishIteratorResult(ExclusiveContext *cx, BytecodeEmitter *bce, bool done) michael@0: { michael@0: jsatomid value_id; michael@0: if (!bce->makeAtomIndex(cx->names().value, &value_id)) michael@0: return UINT_MAX; michael@0: jsatomid done_id; michael@0: if (!bce->makeAtomIndex(cx->names().done, &done_id)) michael@0: return UINT_MAX; michael@0: michael@0: if (!EmitIndex32(cx, JSOP_INITPROP, value_id, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, done ? JSOP_TRUE : JSOP_FALSE) < 0) michael@0: return false; michael@0: if (!EmitIndex32(cx, JSOP_INITPROP, done_id, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_ENDINIT) < 0) michael@0: return false; michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitNameOp(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool callContext) michael@0: { michael@0: if (!BindNameToSlot(cx, bce, pn)) michael@0: return false; michael@0: michael@0: JSOp op = pn->getOp(); michael@0: michael@0: if (op == JSOP_CALLEE) { michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: } else { michael@0: if (!pn->pn_cookie.isFree()) { michael@0: JS_ASSERT(JOF_OPTYPE(op) != JOF_ATOM); michael@0: if (!EmitVarOp(cx, pn, op, bce)) michael@0: return false; michael@0: } else { michael@0: if (!EmitAtomOp(cx, pn, op, bce)) michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: /* Need to provide |this| value for call */ michael@0: if (callContext) { michael@0: if (op == JSOP_NAME && bce->needsImplicitThis()) { michael@0: if (!EmitAtomOp(cx, pn, JSOP_IMPLICITTHIS, bce)) michael@0: return false; michael@0: } else { michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitPropLHS(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_DOT)); michael@0: ParseNode *pn2 = pn->maybeExpr(); michael@0: michael@0: /* michael@0: * If the object operand is also a dotted property reference, reverse the michael@0: * list linked via pn_expr temporarily so we can iterate over it from the michael@0: * bottom up (reversing again as we go), to avoid excessive recursion. michael@0: */ michael@0: if (pn2->isKind(PNK_DOT)) { michael@0: ParseNode *pndot = pn2; michael@0: ParseNode *pnup = nullptr, *pndown; michael@0: ptrdiff_t top = bce->offset(); michael@0: for (;;) { michael@0: /* Reverse pndot->pn_expr to point up, not down. */ michael@0: pndot->pn_offset = top; michael@0: JS_ASSERT(!pndot->isUsed()); michael@0: pndown = pndot->pn_expr; michael@0: pndot->pn_expr = pnup; michael@0: if (!pndown->isKind(PNK_DOT)) michael@0: break; michael@0: pnup = pndot; michael@0: pndot = pndown; michael@0: } michael@0: michael@0: /* pndown is a primary expression, not a dotted property reference. */ michael@0: if (!EmitTree(cx, bce, pndown)) michael@0: return false; michael@0: michael@0: do { michael@0: /* Walk back up the list, emitting annotated name ops. */ michael@0: if (!EmitAtomOp(cx, pndot, JSOP_GETPROP, bce)) michael@0: return false; michael@0: michael@0: /* Reverse the pn_expr link again. */ michael@0: pnup = pndot->pn_expr; michael@0: pndot->pn_expr = pndown; michael@0: pndown = pndot; michael@0: } while ((pndot = pnup) != nullptr); michael@0: return true; michael@0: } michael@0: michael@0: // The non-optimized case. michael@0: return EmitTree(cx, bce, pn2); michael@0: } michael@0: michael@0: static bool michael@0: EmitPropOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(pn->isArity(PN_NAME)); michael@0: michael@0: if (!EmitPropLHS(cx, pn, op, bce)) michael@0: return false; michael@0: michael@0: if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_DUP) < 0) michael@0: return false; michael@0: michael@0: if (!EmitAtomOp(cx, pn, op, bce)) michael@0: return false; michael@0: michael@0: if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_SWAP) < 0) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitPropIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(pn->pn_kid->getKind() == PNK_DOT); michael@0: michael@0: bool post; michael@0: JSOp binop = GetIncDecInfo(pn->getKind(), &post); michael@0: michael@0: JSOp get = JSOP_GETPROP; michael@0: if (!EmitPropLHS(cx, pn->pn_kid, get, bce)) // OBJ michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ michael@0: return false; michael@0: if (!EmitAtomOp(cx, pn->pn_kid, JSOP_GETPROP, bce)) // OBJ V michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N michael@0: return false; michael@0: if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1 michael@0: return false; michael@0: if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1 michael@0: return false; michael@0: michael@0: if (post) { michael@0: if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1 michael@0: return false; michael@0: } michael@0: michael@0: if (!EmitAtomOp(cx, pn->pn_kid, JSOP_SETPROP, bce)) // N? N+1 michael@0: return false; michael@0: if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitNameIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) michael@0: { michael@0: const JSCodeSpec *cs = &js_CodeSpec[pn->pn_kid->getOp()]; michael@0: michael@0: bool global = (cs->format & JOF_GNAME); michael@0: bool post; michael@0: JSOp binop = GetIncDecInfo(pn->getKind(), &post); michael@0: michael@0: if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_BINDGNAME : JSOP_BINDNAME, bce)) // OBJ michael@0: return false; michael@0: if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_GETGNAME : JSOP_NAME, bce)) // OBJ V michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N michael@0: return false; michael@0: if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1 michael@0: return false; michael@0: if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1 michael@0: return false; michael@0: michael@0: if (post) { michael@0: if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1 michael@0: return false; michael@0: } michael@0: michael@0: if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_SETGNAME : JSOP_SETNAME, bce)) // N? N+1 michael@0: return false; michael@0: if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Emit bytecode to put operands for a JSOP_GETELEM/CALLELEM/SETELEM/DELELEM michael@0: * opcode onto the stack in the right order. In the case of SETELEM, the michael@0: * value to be assigned must already be pushed. michael@0: */ michael@0: static bool michael@0: EmitElemOperands(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(pn->isArity(PN_BINARY)); michael@0: if (!EmitTree(cx, bce, pn->pn_left)) michael@0: return false; michael@0: if (op == JSOP_CALLELEM && Emit1(cx, bce, JSOP_DUP) < 0) michael@0: return false; michael@0: if (!EmitTree(cx, bce, pn->pn_right)) michael@0: return false; michael@0: if (op == JSOP_SETELEM && Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) michael@0: return false; michael@0: return true; michael@0: } michael@0: michael@0: static inline bool michael@0: EmitElemOpBase(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) michael@0: { michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: CheckTypeSet(cx, bce, op); michael@0: michael@0: if (op == JSOP_CALLELEM) { michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitElemOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) michael@0: { michael@0: return EmitElemOperands(cx, pn, op, bce) && EmitElemOpBase(cx, bce, op); michael@0: } michael@0: michael@0: static bool michael@0: EmitElemIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) michael@0: { michael@0: JS_ASSERT(pn->pn_kid->getKind() == PNK_ELEM); michael@0: michael@0: if (!EmitElemOperands(cx, pn->pn_kid, JSOP_GETELEM, bce)) michael@0: return false; michael@0: michael@0: bool post; michael@0: JSOp binop = GetIncDecInfo(pn->getKind(), &post); michael@0: michael@0: /* michael@0: * We need to convert the key to an object id first, so that we do not do michael@0: * it inside both the GETELEM and the SETELEM. michael@0: */ michael@0: // OBJ KEY* michael@0: if (Emit1(cx, bce, JSOP_TOID) < 0) // OBJ KEY michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DUP2) < 0) // OBJ KEY OBJ KEY michael@0: return false; michael@0: if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) // OBJ KEY V michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ KEY N michael@0: return false; michael@0: if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ KEY N? N michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ KEY N? N 1 michael@0: return false; michael@0: if (Emit1(cx, bce, binop) < 0) // OBJ KEY N? N+1 michael@0: return false; michael@0: michael@0: if (post) { michael@0: if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // KEY N N+1 OBJ michael@0: return false; michael@0: if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // N N+1 OBJ KEY michael@0: return false; michael@0: if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N OBJ KEY N+1 michael@0: return false; michael@0: } michael@0: michael@0: if (!EmitElemOpBase(cx, bce, JSOP_SETELEM)) // N? N+1 michael@0: return false; michael@0: if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitNumberOp(ExclusiveContext *cx, double dval, BytecodeEmitter *bce) michael@0: { michael@0: int32_t ival; michael@0: uint32_t u; michael@0: ptrdiff_t off; michael@0: jsbytecode *pc; michael@0: michael@0: if (NumberIsInt32(dval, &ival)) { michael@0: if (ival == 0) michael@0: return Emit1(cx, bce, JSOP_ZERO) >= 0; michael@0: if (ival == 1) michael@0: return Emit1(cx, bce, JSOP_ONE) >= 0; michael@0: if ((int)(int8_t)ival == ival) michael@0: return Emit2(cx, bce, JSOP_INT8, (jsbytecode)(int8_t)ival) >= 0; michael@0: michael@0: u = (uint32_t)ival; michael@0: if (u < JS_BIT(16)) { michael@0: EMIT_UINT16_IMM_OP(JSOP_UINT16, u); michael@0: } else if (u < JS_BIT(24)) { michael@0: off = EmitN(cx, bce, JSOP_UINT24, 3); michael@0: if (off < 0) michael@0: return false; michael@0: pc = bce->code(off); michael@0: SET_UINT24(pc, u); michael@0: } else { michael@0: off = EmitN(cx, bce, JSOP_INT32, 4); michael@0: if (off < 0) michael@0: return false; michael@0: pc = bce->code(off); michael@0: SET_INT32(pc, ival); michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: if (!bce->constList.append(DoubleValue(dval))) michael@0: return false; michael@0: michael@0: return EmitIndex32(cx, JSOP_DOUBLE, bce->constList.length() - 1, bce); michael@0: } michael@0: michael@0: static inline void michael@0: SetJumpOffsetAt(BytecodeEmitter *bce, ptrdiff_t off) michael@0: { michael@0: SET_JUMP_OFFSET(bce->code(off), bce->offset() - off); michael@0: } michael@0: michael@0: static bool michael@0: PushUndefinedValues(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned n) michael@0: { michael@0: for (unsigned i = 0; i < n; ++i) { michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: InitializeBlockScopedLocalsFromStack(ExclusiveContext *cx, BytecodeEmitter *bce, michael@0: Handle blockObj) michael@0: { michael@0: for (unsigned i = blockObj->numVariables(); i > 0; --i) { michael@0: if (blockObj->isAliased(i - 1)) { michael@0: ScopeCoordinate sc; michael@0: sc.setHops(0); michael@0: sc.setSlot(BlockObject::RESERVED_SLOTS + i - 1); michael@0: if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce)) michael@0: return false; michael@0: } else { michael@0: unsigned local = blockObj->blockIndexToLocalIndex(i - 1); michael@0: if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, local, bce)) michael@0: return false; michael@0: } michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EnterBlockScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmtInfo, michael@0: ObjectBox *objbox, unsigned alreadyPushed = 0) michael@0: { michael@0: // Initial values for block-scoped locals. michael@0: Rooted blockObj(cx, &objbox->object->as()); michael@0: if (!PushUndefinedValues(cx, bce, blockObj->numVariables() - alreadyPushed)) michael@0: return false; michael@0: michael@0: if (!EnterNestedScope(cx, bce, stmtInfo, objbox, STMT_BLOCK)) michael@0: return false; michael@0: michael@0: if (!InitializeBlockScopedLocalsFromStack(cx, bce, blockObj)) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. michael@0: * LLVM is deciding to inline this function which uses a lot of stack space michael@0: * into EmitTree which is recursive and uses relatively little stack space. michael@0: */ michael@0: MOZ_NEVER_INLINE static bool michael@0: EmitSwitch(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: JSOp switchOp; michael@0: bool hasDefault; michael@0: ptrdiff_t top, off, defaultOffset; michael@0: ParseNode *pn2, *pn3, *pn4; michael@0: int32_t low, high; michael@0: int noteIndex; michael@0: size_t switchSize; michael@0: jsbytecode *pc; michael@0: michael@0: /* Try for most optimal, fall back if not dense ints. */ michael@0: switchOp = JSOP_TABLESWITCH; michael@0: hasDefault = false; michael@0: defaultOffset = -1; michael@0: michael@0: pn2 = pn->pn_right; michael@0: JS_ASSERT(pn2->isKind(PNK_LEXICALSCOPE) || pn2->isKind(PNK_STATEMENTLIST)); michael@0: michael@0: /* Push the discriminant. */ michael@0: if (!EmitTree(cx, bce, pn->pn_left)) michael@0: return false; michael@0: michael@0: StmtInfoBCE stmtInfo(cx); michael@0: if (pn2->isKind(PNK_LEXICALSCOPE)) { michael@0: if (!EnterBlockScope(cx, bce, &stmtInfo, pn2->pn_objbox, 0)) michael@0: return false; michael@0: michael@0: stmtInfo.type = STMT_SWITCH; michael@0: stmtInfo.update = top = bce->offset(); michael@0: /* Advance pn2 to refer to the switch case list. */ michael@0: pn2 = pn2->expr(); michael@0: } else { michael@0: JS_ASSERT(pn2->isKind(PNK_STATEMENTLIST)); michael@0: top = bce->offset(); michael@0: PushStatementBCE(bce, &stmtInfo, STMT_SWITCH, top); michael@0: } michael@0: michael@0: /* Switch bytecodes run from here till end of final case. */ michael@0: uint32_t caseCount = pn2->pn_count; michael@0: uint32_t tableLength = 0; michael@0: ScopedJSFreePtr table(nullptr); michael@0: michael@0: if (caseCount > JS_BIT(16)) { michael@0: bce->parser->tokenStream.reportError(JSMSG_TOO_MANY_CASES); michael@0: return false; michael@0: } michael@0: michael@0: if (caseCount == 0 || michael@0: (caseCount == 1 && michael@0: (hasDefault = (pn2->pn_head->isKind(PNK_DEFAULT))))) { michael@0: caseCount = 0; michael@0: low = 0; michael@0: high = -1; michael@0: } else { michael@0: bool ok = true; michael@0: #define INTMAP_LENGTH 256 michael@0: jsbitmap intmap_space[INTMAP_LENGTH]; michael@0: jsbitmap *intmap = nullptr; michael@0: int32_t intmap_bitlen = 0; michael@0: michael@0: low = JSVAL_INT_MAX; michael@0: high = JSVAL_INT_MIN; michael@0: michael@0: for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { michael@0: if (pn3->isKind(PNK_DEFAULT)) { michael@0: hasDefault = true; michael@0: caseCount--; /* one of the "cases" was the default */ michael@0: continue; michael@0: } michael@0: michael@0: JS_ASSERT(pn3->isKind(PNK_CASE)); michael@0: if (switchOp == JSOP_CONDSWITCH) michael@0: continue; michael@0: michael@0: JS_ASSERT(switchOp == JSOP_TABLESWITCH); michael@0: michael@0: pn4 = pn3->pn_left; michael@0: michael@0: if (pn4->getKind() != PNK_NUMBER) { michael@0: switchOp = JSOP_CONDSWITCH; michael@0: continue; michael@0: } michael@0: michael@0: int32_t i; michael@0: if (!NumberIsInt32(pn4->pn_dval, &i)) { michael@0: switchOp = JSOP_CONDSWITCH; michael@0: continue; michael@0: } michael@0: michael@0: if ((unsigned)(i + (int)JS_BIT(15)) >= (unsigned)JS_BIT(16)) { michael@0: switchOp = JSOP_CONDSWITCH; michael@0: continue; michael@0: } michael@0: if (i < low) michael@0: low = i; michael@0: if (high < i) michael@0: high = i; michael@0: michael@0: /* michael@0: * Check for duplicates, which require a JSOP_CONDSWITCH. michael@0: * We bias i by 65536 if it's negative, and hope that's a rare michael@0: * case (because it requires a malloc'd bitmap). michael@0: */ michael@0: if (i < 0) michael@0: i += JS_BIT(16); michael@0: if (i >= intmap_bitlen) { michael@0: if (!intmap && michael@0: size_t(i) < (INTMAP_LENGTH * JS_BITMAP_NBITS)) { michael@0: intmap = intmap_space; michael@0: intmap_bitlen = INTMAP_LENGTH * JS_BITMAP_NBITS; michael@0: } else { michael@0: /* Just grab 8K for the worst-case bitmap. */ michael@0: intmap_bitlen = JS_BIT(16); michael@0: intmap = cx->pod_malloc(JS_BIT(16) / JS_BITMAP_NBITS); michael@0: if (!intmap) { michael@0: js_ReportOutOfMemory(cx); michael@0: return false; michael@0: } michael@0: } michael@0: memset(intmap, 0, size_t(intmap_bitlen) / CHAR_BIT); michael@0: } michael@0: if (JS_TEST_BIT(intmap, i)) { michael@0: switchOp = JSOP_CONDSWITCH; michael@0: continue; michael@0: } michael@0: JS_SET_BIT(intmap, i); michael@0: } michael@0: michael@0: if (intmap && intmap != intmap_space) michael@0: js_free(intmap); michael@0: if (!ok) michael@0: return false; michael@0: michael@0: /* michael@0: * Compute table length and select condswitch instead if overlarge or michael@0: * more than half-sparse. michael@0: */ michael@0: if (switchOp == JSOP_TABLESWITCH) { michael@0: tableLength = (uint32_t)(high - low + 1); michael@0: if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount) michael@0: switchOp = JSOP_CONDSWITCH; michael@0: } michael@0: } michael@0: michael@0: /* michael@0: * The note has one or two offsets: first tells total switch code length; michael@0: * second (if condswitch) tells offset to first JSOP_CASE. michael@0: */ michael@0: if (switchOp == JSOP_CONDSWITCH) { michael@0: /* 0 bytes of immediate for unoptimized switch. */ michael@0: switchSize = 0; michael@0: noteIndex = NewSrcNote3(cx, bce, SRC_CONDSWITCH, 0, 0); michael@0: } else { michael@0: JS_ASSERT(switchOp == JSOP_TABLESWITCH); michael@0: michael@0: /* 3 offsets (len, low, high) before the table, 1 per entry. */ michael@0: switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength)); michael@0: noteIndex = NewSrcNote2(cx, bce, SRC_TABLESWITCH, 0); michael@0: } michael@0: if (noteIndex < 0) michael@0: return false; michael@0: michael@0: /* Emit switchOp followed by switchSize bytes of jump or lookup table. */ michael@0: if (EmitN(cx, bce, switchOp, switchSize) < 0) michael@0: return false; michael@0: michael@0: off = -1; michael@0: if (switchOp == JSOP_CONDSWITCH) { michael@0: int caseNoteIndex = -1; michael@0: bool beforeCases = true; michael@0: michael@0: /* Emit code for evaluating cases and jumping to case statements. */ michael@0: for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { michael@0: pn4 = pn3->pn_left; michael@0: if (pn4 && !EmitTree(cx, bce, pn4)) michael@0: return false; michael@0: if (caseNoteIndex >= 0) { michael@0: /* off is the previous JSOP_CASE's bytecode offset. */ michael@0: if (!SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off)) michael@0: return false; michael@0: } michael@0: if (!pn4) { michael@0: JS_ASSERT(pn3->isKind(PNK_DEFAULT)); michael@0: continue; michael@0: } michael@0: caseNoteIndex = NewSrcNote2(cx, bce, SRC_NEXTCASE, 0); michael@0: if (caseNoteIndex < 0) michael@0: return false; michael@0: off = EmitJump(cx, bce, JSOP_CASE, 0); michael@0: if (off < 0) michael@0: return false; michael@0: pn3->pn_offset = off; michael@0: if (beforeCases) { michael@0: unsigned noteCount, noteCountDelta; michael@0: michael@0: /* Switch note's second offset is to first JSOP_CASE. */ michael@0: noteCount = bce->notes().length(); michael@0: if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, off - top)) michael@0: return false; michael@0: noteCountDelta = bce->notes().length() - noteCount; michael@0: if (noteCountDelta != 0) michael@0: caseNoteIndex += noteCountDelta; michael@0: beforeCases = false; michael@0: } michael@0: } michael@0: michael@0: /* michael@0: * If we didn't have an explicit default (which could fall in between michael@0: * cases, preventing us from fusing this SetSrcNoteOffset with the call michael@0: * in the loop above), link the last case to the implicit default for michael@0: * the benefit of IonBuilder. michael@0: */ michael@0: if (!hasDefault && michael@0: caseNoteIndex >= 0 && michael@0: !SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off)) michael@0: { michael@0: return false; michael@0: } michael@0: michael@0: /* Emit default even if no explicit default statement. */ michael@0: defaultOffset = EmitJump(cx, bce, JSOP_DEFAULT, 0); michael@0: if (defaultOffset < 0) michael@0: return false; michael@0: } else { michael@0: JS_ASSERT(switchOp == JSOP_TABLESWITCH); michael@0: pc = bce->code(top + JUMP_OFFSET_LEN); michael@0: michael@0: /* Fill in switch bounds, which we know fit in 16-bit offsets. */ michael@0: SET_JUMP_OFFSET(pc, low); michael@0: pc += JUMP_OFFSET_LEN; michael@0: SET_JUMP_OFFSET(pc, high); michael@0: pc += JUMP_OFFSET_LEN; michael@0: michael@0: /* michael@0: * Use malloc to avoid arena bloat for programs with many switches. michael@0: * ScopedJSFreePtr takes care of freeing it on exit. michael@0: */ michael@0: if (tableLength != 0) { michael@0: table = cx->pod_calloc(tableLength); michael@0: if (!table) michael@0: return false; michael@0: for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { michael@0: if (pn3->isKind(PNK_DEFAULT)) michael@0: continue; michael@0: michael@0: JS_ASSERT(pn3->isKind(PNK_CASE)); michael@0: michael@0: pn4 = pn3->pn_left; michael@0: JS_ASSERT(pn4->getKind() == PNK_NUMBER); michael@0: michael@0: int32_t i = int32_t(pn4->pn_dval); michael@0: JS_ASSERT(double(i) == pn4->pn_dval); michael@0: michael@0: i -= low; michael@0: JS_ASSERT(uint32_t(i) < tableLength); michael@0: table[i] = pn3; michael@0: } michael@0: } michael@0: } michael@0: michael@0: /* Emit code for each case's statements, copying pn_offset up to pn3. */ michael@0: for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { michael@0: if (switchOp == JSOP_CONDSWITCH && !pn3->isKind(PNK_DEFAULT)) michael@0: SetJumpOffsetAt(bce, pn3->pn_offset); michael@0: pn4 = pn3->pn_right; michael@0: if (!EmitTree(cx, bce, pn4)) michael@0: return false; michael@0: pn3->pn_offset = pn4->pn_offset; michael@0: if (pn3->isKind(PNK_DEFAULT)) michael@0: off = pn3->pn_offset - top; michael@0: } michael@0: michael@0: if (!hasDefault) { michael@0: /* If no default case, offset for default is to end of switch. */ michael@0: off = bce->offset() - top; michael@0: } michael@0: michael@0: /* We better have set "off" by now. */ michael@0: JS_ASSERT(off != -1); michael@0: michael@0: /* Set the default offset (to end of switch if no default). */ michael@0: if (switchOp == JSOP_CONDSWITCH) { michael@0: pc = nullptr; michael@0: JS_ASSERT(defaultOffset != -1); michael@0: SET_JUMP_OFFSET(bce->code(defaultOffset), off - (defaultOffset - top)); michael@0: } else { michael@0: pc = bce->code(top); michael@0: SET_JUMP_OFFSET(pc, off); michael@0: pc += JUMP_OFFSET_LEN; michael@0: } michael@0: michael@0: /* Set the SRC_SWITCH note's offset operand to tell end of switch. */ michael@0: off = bce->offset() - top; michael@0: if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, off)) michael@0: return false; michael@0: michael@0: if (switchOp == JSOP_TABLESWITCH) { michael@0: /* Skip over the already-initialized switch bounds. */ michael@0: pc += 2 * JUMP_OFFSET_LEN; michael@0: michael@0: /* Fill in the jump table, if there is one. */ michael@0: for (uint32_t i = 0; i < tableLength; i++) { michael@0: pn3 = table[i]; michael@0: off = pn3 ? pn3->pn_offset - top : 0; michael@0: SET_JUMP_OFFSET(pc, off); michael@0: pc += JUMP_OFFSET_LEN; michael@0: } michael@0: } michael@0: michael@0: if (pn->pn_right->isKind(PNK_LEXICALSCOPE)) { michael@0: if (!LeaveNestedScope(cx, bce, &stmtInfo)) michael@0: return false; michael@0: } else { michael@0: if (!PopStatementBCE(cx, bce)) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: BytecodeEmitter::isRunOnceLambda() michael@0: { michael@0: // The run once lambda flags set by the parser are approximate, and we look michael@0: // at properties of the function itself before deciding to emit a function michael@0: // as a run once lambda. michael@0: michael@0: if (!(parent && parent->emittingRunOnceLambda) && !lazyRunOnceLambda) michael@0: return false; michael@0: michael@0: FunctionBox *funbox = sc->asFunctionBox(); michael@0: return !funbox->argumentsHasLocalBinding() && michael@0: !funbox->isGenerator() && michael@0: !funbox->function()->name(); michael@0: } michael@0: michael@0: bool michael@0: frontend::EmitFunctionScript(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *body) michael@0: { michael@0: /* michael@0: * IonBuilder has assumptions about what may occur immediately after michael@0: * script->main (e.g., in the case of destructuring params). Thus, put the michael@0: * following ops into the range [script->code, script->main). Note: michael@0: * execution starts from script->code, so this has no semantic effect. michael@0: */ michael@0: michael@0: FunctionBox *funbox = bce->sc->asFunctionBox(); michael@0: if (funbox->argumentsHasLocalBinding()) { michael@0: JS_ASSERT(bce->offset() == 0); /* See JSScript::argumentsBytecode. */ michael@0: bce->switchToProlog(); michael@0: if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0) michael@0: return false; michael@0: InternalBindingsHandle bindings(bce->script, &bce->script->bindings); michael@0: uint32_t varIndex = Bindings::argumentsVarIndex(cx, bindings); michael@0: if (bce->script->varIsAliased(varIndex)) { michael@0: ScopeCoordinate sc; michael@0: sc.setHops(0); michael@0: JS_ALWAYS_TRUE(LookupAliasedNameSlot(bce->script, cx->names().arguments, &sc)); michael@0: if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce)) michael@0: return false; michael@0: } else { michael@0: if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, varIndex, bce)) michael@0: return false; michael@0: } michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: bce->switchToMain(); michael@0: } michael@0: michael@0: if (funbox->isGenerator()) { michael@0: bce->switchToProlog(); michael@0: if (Emit1(cx, bce, JSOP_GENERATOR) < 0) michael@0: return false; michael@0: bce->switchToMain(); michael@0: } michael@0: michael@0: /* michael@0: * Emit a prologue for run-once scripts which will deoptimize JIT code if michael@0: * the script ends up running multiple times via foo.caller related michael@0: * shenanigans. michael@0: */ michael@0: bool runOnce = bce->isRunOnceLambda(); michael@0: if (runOnce) { michael@0: bce->switchToProlog(); michael@0: if (Emit1(cx, bce, JSOP_RUNONCE) < 0) michael@0: return false; michael@0: bce->switchToMain(); michael@0: } michael@0: michael@0: if (!EmitTree(cx, bce, body)) michael@0: return false; michael@0: michael@0: // If we fall off the end of an ES6 generator, return a boxed iterator michael@0: // result object of the form { value: undefined, done: true }. michael@0: if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { michael@0: if (!EmitPrepareIteratorResult(cx, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) michael@0: return false; michael@0: if (!EmitFinishIteratorResult(cx, bce, true)) michael@0: return false; michael@0: michael@0: // No need to check for finally blocks, etc as in EmitReturn. michael@0: if (Emit1(cx, bce, JSOP_RETURN) < 0) michael@0: return false; michael@0: } michael@0: michael@0: /* michael@0: * Always end the script with a JSOP_RETRVAL. Some other parts of the codebase michael@0: * depend on this opcode, e.g. js_InternalInterpret. michael@0: */ michael@0: if (Emit1(cx, bce, JSOP_RETRVAL) < 0) michael@0: return false; michael@0: michael@0: if (!JSScript::fullyInitFromEmitter(cx, bce->script, bce)) michael@0: return false; michael@0: michael@0: /* michael@0: * If this function is only expected to run once, mark the script so that michael@0: * initializers created within it may be given more precise types. michael@0: */ michael@0: if (runOnce) { michael@0: bce->script->setTreatAsRunOnce(); michael@0: JS_ASSERT(!bce->script->hasRunOnce()); michael@0: } michael@0: michael@0: /* Initialize fun->script() so that the debugger has a valid fun->script(). */ michael@0: RootedFunction fun(cx, bce->script->functionNonDelazifying()); michael@0: JS_ASSERT(fun->isInterpreted()); michael@0: michael@0: if (fun->isInterpretedLazy()) michael@0: fun->setUnlazifiedScript(bce->script); michael@0: else michael@0: fun->setScript(bce->script); michael@0: michael@0: bce->tellDebuggerAboutCompiledScript(cx); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: MaybeEmitVarDecl(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn, michael@0: jsatomid *result) michael@0: { michael@0: jsatomid atomIndex; michael@0: michael@0: if (!pn->pn_cookie.isFree()) { michael@0: atomIndex = pn->pn_cookie.slot(); michael@0: } else { michael@0: if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex)) michael@0: return false; michael@0: } michael@0: michael@0: if (JOF_OPTYPE(pn->getOp()) == JOF_ATOM && michael@0: (!bce->sc->isFunctionBox() || bce->sc->asFunctionBox()->isHeavyweight())) michael@0: { michael@0: bce->switchToProlog(); michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) michael@0: return false; michael@0: if (!EmitIndexOp(cx, prologOp, atomIndex, bce)) michael@0: return false; michael@0: bce->switchToMain(); michael@0: } michael@0: michael@0: if (result) michael@0: *result = atomIndex; michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * This enum tells EmitVariables and the destructuring functions how emit the michael@0: * given Parser::variables parse tree. In the base case, DefineVars, the caller michael@0: * only wants variables to be defined in the prologue (if necessary). For michael@0: * PushInitialValues, variable initializer expressions are evaluated and left michael@0: * on the stack. For InitializeVars, the initializer expressions values are michael@0: * assigned (to local variables) and popped. michael@0: */ michael@0: enum VarEmitOption michael@0: { michael@0: DefineVars = 0, michael@0: PushInitialValues = 1, michael@0: InitializeVars = 2 michael@0: }; michael@0: michael@0: typedef bool michael@0: (*DestructuringDeclEmitter)(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn); michael@0: michael@0: static bool michael@0: EmitDestructuringDecl(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_NAME)); michael@0: if (!BindNameToSlot(cx, bce, pn)) michael@0: return false; michael@0: michael@0: JS_ASSERT(!pn->isOp(JSOP_CALLEE)); michael@0: return MaybeEmitVarDecl(cx, bce, prologOp, pn, nullptr); michael@0: } michael@0: michael@0: static bool michael@0: EmitDestructuringDecls(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, michael@0: ParseNode *pattern) michael@0: { michael@0: if (pattern->isKind(PNK_ARRAY)) { michael@0: for (ParseNode *element = pattern->pn_head; element; element = element->pn_next) { michael@0: if (element->isKind(PNK_ELISION)) michael@0: continue; michael@0: DestructuringDeclEmitter emitter = michael@0: element->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls; michael@0: if (!emitter(cx, bce, prologOp, element)) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: MOZ_ASSERT(pattern->isKind(PNK_OBJECT)); michael@0: for (ParseNode *member = pattern->pn_head; member; member = member->pn_next) { michael@0: ParseNode *target = member->pn_right; michael@0: DestructuringDeclEmitter emitter = michael@0: target->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls; michael@0: if (!emitter(cx, bce, prologOp, target)) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitDestructuringOpsHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, michael@0: VarEmitOption emitOption); michael@0: michael@0: /* michael@0: * EmitDestructuringLHS assumes the to-be-destructured value has been pushed on michael@0: * the stack and emits code to destructure a single lhs expression (either a michael@0: * name or a compound []/{} expression). michael@0: * michael@0: * If emitOption is InitializeVars, the to-be-destructured value is assigned to michael@0: * locals and ultimately the initial slot is popped (-1 total depth change). michael@0: * michael@0: * If emitOption is PushInitialValues, the to-be-destructured value is replaced michael@0: * with the initial values of the N (where 0 <= N) variables assigned in the michael@0: * lhs expression. (Same post-condition as EmitDestructuringOpsHelper) michael@0: */ michael@0: static bool michael@0: EmitDestructuringLHS(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, VarEmitOption emitOption) michael@0: { michael@0: JS_ASSERT(emitOption != DefineVars); michael@0: michael@0: // Now emit the lvalue opcode sequence. If the lvalue is a nested michael@0: // destructuring initialiser-form, call ourselves to handle it, then pop michael@0: // the matched value. Otherwise emit an lvalue bytecode sequence followed michael@0: // by an assignment op. michael@0: if (pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)) { michael@0: if (!EmitDestructuringOpsHelper(cx, bce, pn, emitOption)) michael@0: return false; michael@0: if (emitOption == InitializeVars) { michael@0: // Per its post-condition, EmitDestructuringOpsHelper has left the michael@0: // to-be-destructured value on top of the stack. michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: } else if (emitOption == PushInitialValues) { michael@0: // The lhs is a simple name so the to-be-destructured value is michael@0: // its initial value and there is nothing to do. michael@0: JS_ASSERT(pn->getOp() == JSOP_GETLOCAL); michael@0: JS_ASSERT(pn->pn_dflags & PND_BOUND); michael@0: } else { michael@0: switch (pn->getKind()) { michael@0: case PNK_NAME: michael@0: if (!BindNameToSlot(cx, bce, pn)) michael@0: return false; michael@0: michael@0: // Allow 'const [x,y] = o', make 'const x,y; [x,y] = o' a nop. michael@0: if (pn->isConst() && !pn->isDefn()) michael@0: return Emit1(cx, bce, JSOP_POP) >= 0; michael@0: michael@0: switch (pn->getOp()) { michael@0: case JSOP_SETNAME: michael@0: case JSOP_SETGNAME: michael@0: case JSOP_SETCONST: { michael@0: // This is like ordinary assignment, but with one difference. michael@0: // michael@0: // In `a = b`, we first determine a binding for `a` (using michael@0: // JSOP_BINDNAME or JSOP_BINDGNAME), then we evaluate `b`, then michael@0: // a JSOP_SETNAME instruction. michael@0: // michael@0: // In `[a] = [b]`, per spec, `b` is evaluated first, then we michael@0: // determine a binding for `a`. Then we need to do assignment-- michael@0: // but the operands are on the stack in the wrong order for michael@0: // JSOP_SETPROP, so we have to add a JSOP_SWAP. michael@0: jsatomid atomIndex; michael@0: if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex)) michael@0: return false; michael@0: michael@0: if (!pn->isOp(JSOP_SETCONST)) { michael@0: JSOp bindOp = pn->isOp(JSOP_SETNAME) ? JSOP_BINDNAME : JSOP_BINDGNAME; michael@0: if (!EmitIndex32(cx, bindOp, atomIndex, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: if (!EmitIndexOp(cx, pn->getOp(), atomIndex, bce)) michael@0: return false; michael@0: break; michael@0: } michael@0: michael@0: case JSOP_SETLOCAL: michael@0: case JSOP_SETARG: michael@0: if (!EmitVarOp(cx, pn, pn->getOp(), bce)) michael@0: return false; michael@0: break; michael@0: michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("EmitDestructuringLHS: bad name op"); michael@0: } michael@0: break; michael@0: michael@0: case PNK_DOT: michael@0: // See the (PNK_NAME, JSOP_SETNAME) case above. michael@0: // michael@0: // In `a.x = b`, `a` is evaluated first, then `b`, then a michael@0: // JSOP_SETPROP instruction. michael@0: // michael@0: // In `[a.x] = [b]`, per spec, `b` is evaluated before `a`. Then we michael@0: // need a property set -- but the operands are on the stack in the michael@0: // wrong order for JSOP_SETPROP, so we have to add a JSOP_SWAP. michael@0: if (!EmitTree(cx, bce, pn->pn_expr)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) michael@0: return false; michael@0: if (!EmitAtomOp(cx, pn, JSOP_SETPROP, bce)) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_ELEM: michael@0: // See the comment at `case PNK_DOT:` above. This case, michael@0: // `[a[x]] = [b]`, is handled much the same way. The JSOP_SWAP michael@0: // is emitted by EmitElemOperands. michael@0: if (!EmitElemOp(cx, pn, JSOP_SETELEM, bce)) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_CALL: michael@0: JS_ASSERT(pn->pn_xflags & PNX_SETCALL); michael@0: if (!EmitTree(cx, bce, pn)) michael@0: return false; michael@0: michael@0: // Pop the call return value. Below, we pop the RHS too, balancing michael@0: // the stack --- presumably for the benefit of bytecode michael@0: // analysis. (The interpreter will never reach these instructions michael@0: // since we just emitted JSOP_SETCALL, which always throws. It's michael@0: // possible no analyses actually depend on this either.) michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: break; michael@0: michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("EmitDestructuringLHS: bad lhs kind"); michael@0: } michael@0: michael@0: // Pop the assigned value. michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Recursive helper for EmitDestructuringOps. michael@0: * EmitDestructuringOpsHelper assumes the to-be-destructured value has been michael@0: * pushed on the stack and emits code to destructure each part of a [] or {} michael@0: * lhs expression. michael@0: * michael@0: * If emitOption is InitializeVars, the initial to-be-destructured value is michael@0: * left untouched on the stack and the overall depth is not changed. michael@0: * michael@0: * If emitOption is PushInitialValues, the to-be-destructured value is replaced michael@0: * with the initial values of the N (where 0 <= N) variables assigned in the michael@0: * lhs expression. (Same post-condition as EmitDestructuringLHS) michael@0: */ michael@0: static bool michael@0: EmitDestructuringOpsHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, michael@0: VarEmitOption emitOption) michael@0: { michael@0: JS_ASSERT(emitOption != DefineVars); michael@0: michael@0: unsigned index; michael@0: ParseNode *pn2, *pn3; michael@0: bool doElemOp; michael@0: michael@0: #ifdef DEBUG michael@0: int stackDepth = bce->stackDepth; michael@0: JS_ASSERT(stackDepth != 0); michael@0: JS_ASSERT(pn->isArity(PN_LIST)); michael@0: JS_ASSERT(pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)); michael@0: #endif michael@0: michael@0: index = 0; michael@0: for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) { michael@0: /* Duplicate the value being destructured to use as a reference base. */ michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) michael@0: return false; michael@0: michael@0: /* michael@0: * Now push the property name currently being matched, which is either michael@0: * the array initialiser's current index, or the current property name michael@0: * "label" on the left of a colon in the object initialiser. Set pn3 michael@0: * to the lvalue node, which is in the value-initializing position. michael@0: */ michael@0: doElemOp = true; michael@0: if (pn->isKind(PNK_ARRAY)) { michael@0: if (!EmitNumberOp(cx, index, bce)) michael@0: return false; michael@0: pn3 = pn2; michael@0: } else { michael@0: JS_ASSERT(pn->isKind(PNK_OBJECT)); michael@0: JS_ASSERT(pn2->isKind(PNK_COLON)); michael@0: michael@0: ParseNode *key = pn2->pn_left; michael@0: if (key->isKind(PNK_NUMBER)) { michael@0: if (!EmitNumberOp(cx, key->pn_dval, bce)) michael@0: return false; michael@0: } else { michael@0: MOZ_ASSERT(key->isKind(PNK_STRING) || key->isKind(PNK_NAME)); michael@0: PropertyName *name = key->pn_atom->asPropertyName(); michael@0: michael@0: // The parser already checked for atoms representing indexes and michael@0: // used PNK_NUMBER instead, but also watch for ids which TI treats michael@0: // as indexes for simplification of downstream analysis. michael@0: jsid id = NameToId(name); michael@0: if (id != types::IdToTypeId(id)) { michael@0: if (!EmitTree(cx, bce, key)) michael@0: return false; michael@0: } else { michael@0: if (!EmitAtomOp(cx, name, JSOP_GETPROP, bce)) michael@0: return false; michael@0: doElemOp = false; michael@0: } michael@0: } michael@0: michael@0: pn3 = pn2->pn_right; michael@0: } michael@0: michael@0: if (doElemOp) { michael@0: /* michael@0: * Ok, get the value of the matching property name. This leaves michael@0: * that value on top of the value being destructured, so the stack michael@0: * is one deeper than when we started. michael@0: */ michael@0: if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) michael@0: return false; michael@0: JS_ASSERT(bce->stackDepth >= stackDepth + 1); michael@0: } michael@0: michael@0: /* Elision node makes a hole in the array destructurer. */ michael@0: if (pn3->isKind(PNK_ELISION)) { michael@0: JS_ASSERT(pn->isKind(PNK_ARRAY)); michael@0: JS_ASSERT(pn2 == pn3); michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } else { michael@0: int32_t depthBefore = bce->stackDepth; michael@0: if (!EmitDestructuringLHS(cx, bce, pn3, emitOption)) michael@0: return false; michael@0: michael@0: if (emitOption == PushInitialValues) { michael@0: /* michael@0: * After '[x,y]' in 'let ([[x,y], z] = o)', the stack is michael@0: * | to-be-destructured-value | x | y | michael@0: * The goal is: michael@0: * | x | y | z | michael@0: * so emit a pick to produce the intermediate state michael@0: * | x | y | to-be-destructured-value | michael@0: * before destructuring z. This gives the loop invariant that michael@0: * the to-be-destructured-value is always on top of the stack. michael@0: */ michael@0: JS_ASSERT((bce->stackDepth - bce->stackDepth) >= -1); michael@0: uint32_t pickDistance = (uint32_t)((bce->stackDepth + 1) - depthBefore); michael@0: if (pickDistance > 0) { michael@0: if (pickDistance > UINT8_MAX) { michael@0: bce->reportError(pn3, JSMSG_TOO_MANY_LOCALS); michael@0: return false; michael@0: } michael@0: if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)pickDistance) < 0) michael@0: return false; michael@0: } michael@0: } michael@0: } michael@0: michael@0: ++index; michael@0: } michael@0: michael@0: if (emitOption == PushInitialValues) { michael@0: /* michael@0: * Per the above loop invariant, to-be-destructured-value is at the top michael@0: * of the stack. To achieve the post-condition, pop it. michael@0: */ michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitDestructuringOps(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool isLet = false) michael@0: { michael@0: /* michael@0: * Call our recursive helper to emit the destructuring assignments and michael@0: * related stack manipulations. michael@0: */ michael@0: VarEmitOption emitOption = isLet ? PushInitialValues : InitializeVars; michael@0: return EmitDestructuringOpsHelper(cx, bce, pn, emitOption); michael@0: } michael@0: michael@0: static bool michael@0: EmitGroupAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, michael@0: ParseNode *lhs, ParseNode *rhs) michael@0: { michael@0: uint32_t depth, limit, i, nslots; michael@0: ParseNode *pn; michael@0: michael@0: depth = limit = (uint32_t) bce->stackDepth; michael@0: for (pn = rhs->pn_head; pn; pn = pn->pn_next) { michael@0: if (limit == JS_BIT(16)) { michael@0: bce->reportError(rhs, JSMSG_ARRAY_INIT_TOO_BIG); michael@0: return false; michael@0: } michael@0: michael@0: /* MaybeEmitGroupAssignment won't call us if rhs is holey. */ michael@0: JS_ASSERT(!pn->isKind(PNK_ELISION)); michael@0: if (!EmitTree(cx, bce, pn)) michael@0: return false; michael@0: ++limit; michael@0: } michael@0: michael@0: i = depth; michael@0: for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) { michael@0: /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */ michael@0: JS_ASSERT(i < limit); michael@0: michael@0: if (!EmitDupAt(cx, bce, i)) michael@0: return false; michael@0: michael@0: if (pn->isKind(PNK_ELISION)) { michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } else { michael@0: if (!EmitDestructuringLHS(cx, bce, pn, InitializeVars)) michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: nslots = limit - depth; michael@0: EMIT_UINT16_IMM_OP(JSOP_POPN, nslots); michael@0: bce->stackDepth = (uint32_t) depth; michael@0: return true; michael@0: } michael@0: michael@0: enum GroupOption { GroupIsDecl, GroupIsNotDecl }; michael@0: michael@0: /* michael@0: * Helper called with pop out param initialized to a JSOP_POP* opcode. If we michael@0: * can emit a group assignment sequence, which results in 0 stack depth delta, michael@0: * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop. michael@0: */ michael@0: static bool michael@0: MaybeEmitGroupAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn, michael@0: GroupOption groupOption, JSOp *pop) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_ASSIGN)); michael@0: JS_ASSERT(pn->isOp(JSOP_NOP)); michael@0: JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_SETRVAL); michael@0: michael@0: ParseNode *lhs = pn->pn_left; michael@0: ParseNode *rhs = pn->pn_right; michael@0: if (lhs->isKind(PNK_ARRAY) && rhs->isKind(PNK_ARRAY) && michael@0: !(rhs->pn_xflags & PNX_SPECIALARRAYINIT) && michael@0: lhs->pn_count <= rhs->pn_count) michael@0: { michael@0: if (groupOption == GroupIsDecl && !EmitDestructuringDecls(cx, bce, prologOp, lhs)) michael@0: return false; michael@0: if (!EmitGroupAssignment(cx, bce, prologOp, lhs, rhs)) michael@0: return false; michael@0: *pop = JSOP_NOP; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Like MaybeEmitGroupAssignment, but for 'let ([x,y] = [a,b]) ...'. michael@0: * michael@0: * Instead of issuing a sequence |dup|eval-rhs|set-lhs|pop| (which doesn't work michael@0: * since the bound vars don't yet have slots), just eval/push each rhs element michael@0: * just like what EmitLet would do for 'let (x = a, y = b) ...'. While shorter, michael@0: * simpler and more efficient than MaybeEmitGroupAssignment, it is harder to michael@0: * decompile so we restrict the ourselves to cases where the lhs and rhs are in michael@0: * 1:1 correspondence and lhs elements are simple names. michael@0: */ michael@0: static bool michael@0: MaybeEmitLetGroupDecl(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp *pop) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_ASSIGN)); michael@0: JS_ASSERT(pn->isOp(JSOP_NOP)); michael@0: JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_SETRVAL); michael@0: michael@0: ParseNode *lhs = pn->pn_left; michael@0: ParseNode *rhs = pn->pn_right; michael@0: if (lhs->isKind(PNK_ARRAY) && rhs->isKind(PNK_ARRAY) && michael@0: !(rhs->pn_xflags & PNX_SPECIALARRAYINIT) && michael@0: !(lhs->pn_xflags & PNX_SPECIALARRAYINIT) && michael@0: lhs->pn_count == rhs->pn_count) michael@0: { michael@0: for (ParseNode *l = lhs->pn_head; l; l = l->pn_next) { michael@0: if (l->getOp() != JSOP_SETLOCAL) michael@0: return true; michael@0: } michael@0: michael@0: for (ParseNode *r = rhs->pn_head; r; r = r->pn_next) { michael@0: if (!EmitTree(cx, bce, r)) michael@0: return false; michael@0: } michael@0: michael@0: *pop = JSOP_NOP; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitVariables(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, VarEmitOption emitOption, michael@0: bool isLet = false) michael@0: { michael@0: JS_ASSERT(pn->isArity(PN_LIST)); michael@0: JS_ASSERT(isLet == (emitOption == PushInitialValues)); michael@0: michael@0: ParseNode *next; michael@0: for (ParseNode *pn2 = pn->pn_head; ; pn2 = next) { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) michael@0: return false; michael@0: next = pn2->pn_next; michael@0: michael@0: ParseNode *pn3; michael@0: if (!pn2->isKind(PNK_NAME)) { michael@0: if (pn2->isKind(PNK_ARRAY) || pn2->isKind(PNK_OBJECT)) { michael@0: /* michael@0: * Emit variable binding ops, but not destructuring ops. The michael@0: * parser (see Parser::variables) has ensured that our caller michael@0: * will be the PNK_FOR/PNK_FORIN/PNK_FOROF case in EmitTree, and michael@0: * that case will emit the destructuring code only after michael@0: * emitting an enumerating opcode and a branch that tests michael@0: * whether the enumeration ended. michael@0: */ michael@0: JS_ASSERT(emitOption == DefineVars); michael@0: JS_ASSERT(pn->pn_count == 1); michael@0: if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn2)) michael@0: return false; michael@0: break; michael@0: } michael@0: michael@0: /* michael@0: * A destructuring initialiser assignment preceded by var will michael@0: * never occur to the left of 'in' in a for-in loop. As with 'for michael@0: * (var x = i in o)...', this will cause the entire 'var [a, b] = michael@0: * i' to be hoisted out of the loop. michael@0: */ michael@0: JS_ASSERT(pn2->isKind(PNK_ASSIGN)); michael@0: JS_ASSERT(pn2->isOp(JSOP_NOP)); michael@0: JS_ASSERT(emitOption != DefineVars); michael@0: michael@0: /* michael@0: * To allow the front end to rewrite var f = x; as f = x; when a michael@0: * function f(){} precedes the var, detect simple name assignment michael@0: * here and initialize the name. michael@0: */ michael@0: if (pn2->pn_left->isKind(PNK_NAME)) { michael@0: pn3 = pn2->pn_right; michael@0: pn2 = pn2->pn_left; michael@0: goto do_name; michael@0: } michael@0: michael@0: JSOp op = JSOP_POP; michael@0: if (pn->pn_count == 1) { michael@0: /* michael@0: * If this is the only destructuring assignment in the list, michael@0: * try to optimize to a group assignment. If we're in a let michael@0: * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP michael@0: * in pn->pn_op, to suppress a second (and misplaced) 'let'. michael@0: */ michael@0: JS_ASSERT(!pn2->pn_next); michael@0: if (isLet) { michael@0: if (!MaybeEmitLetGroupDecl(cx, bce, pn2, &op)) michael@0: return false; michael@0: } else { michael@0: if (!MaybeEmitGroupAssignment(cx, bce, pn->getOp(), pn2, GroupIsDecl, &op)) michael@0: return false; michael@0: } michael@0: } michael@0: if (op == JSOP_NOP) { michael@0: pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT; michael@0: } else { michael@0: pn3 = pn2->pn_left; michael@0: if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn3)) michael@0: return false; michael@0: michael@0: if (!EmitTree(cx, bce, pn2->pn_right)) michael@0: return false; michael@0: michael@0: if (!EmitDestructuringOps(cx, bce, pn3, isLet)) michael@0: return false; michael@0: } michael@0: michael@0: /* If we are not initializing, nothing to pop. */ michael@0: if (emitOption != InitializeVars) { michael@0: if (next) michael@0: continue; michael@0: break; michael@0: } michael@0: goto emit_note_pop; michael@0: } michael@0: michael@0: /* michael@0: * Load initializer early to share code above that jumps to do_name. michael@0: * NB: if this var redeclares an existing binding, then pn2 is linked michael@0: * on its definition's use-chain and pn_expr has been overlayed with michael@0: * pn_lexdef. michael@0: */ michael@0: pn3 = pn2->maybeExpr(); michael@0: michael@0: do_name: michael@0: if (!BindNameToSlot(cx, bce, pn2)) michael@0: return false; michael@0: michael@0: michael@0: JSOp op; michael@0: op = pn2->getOp(); michael@0: JS_ASSERT(op != JSOP_CALLEE); michael@0: JS_ASSERT(!pn2->pn_cookie.isFree() || !pn->isOp(JSOP_NOP)); michael@0: michael@0: jsatomid atomIndex; michael@0: if (!MaybeEmitVarDecl(cx, bce, pn->getOp(), pn2, &atomIndex)) michael@0: return false; michael@0: michael@0: if (pn3) { michael@0: JS_ASSERT(emitOption != DefineVars); michael@0: if (op == JSOP_SETNAME || op == JSOP_SETGNAME || op == JSOP_SETINTRINSIC) { michael@0: JS_ASSERT(emitOption != PushInitialValues); michael@0: JSOp bindOp; michael@0: if (op == JSOP_SETNAME) michael@0: bindOp = JSOP_BINDNAME; michael@0: else if (op == JSOP_SETGNAME) michael@0: bindOp = JSOP_BINDGNAME; michael@0: else michael@0: bindOp = JSOP_BINDINTRINSIC; michael@0: if (!EmitIndex32(cx, bindOp, atomIndex, bce)) michael@0: return false; michael@0: } michael@0: michael@0: bool oldEmittingForInit = bce->emittingForInit; michael@0: bce->emittingForInit = false; michael@0: if (!EmitTree(cx, bce, pn3)) michael@0: return false; michael@0: bce->emittingForInit = oldEmittingForInit; michael@0: } else if (isLet) { michael@0: /* JSOP_ENTERLETx expects at least 1 slot to have been pushed. */ michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) michael@0: return false; michael@0: } michael@0: michael@0: /* If we are not initializing, nothing to pop. */ michael@0: if (emitOption != InitializeVars) { michael@0: if (next) michael@0: continue; michael@0: break; michael@0: } michael@0: michael@0: JS_ASSERT_IF(pn2->isDefn(), pn3 == pn2->pn_expr); michael@0: if (!pn2->pn_cookie.isFree()) { michael@0: if (!EmitVarOp(cx, pn2, op, bce)) michael@0: return false; michael@0: } else { michael@0: if (!EmitIndexOp(cx, op, atomIndex, bce)) michael@0: return false; michael@0: } michael@0: michael@0: emit_note_pop: michael@0: if (!next) michael@0: break; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: if (pn->pn_xflags & PNX_POPVAR) { michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *lhs, JSOp op, ParseNode *rhs) michael@0: { michael@0: /* michael@0: * Check left operand type and generate specialized code for it. michael@0: * Specialize to avoid ECMA "reference type" values on the operand michael@0: * stack, which impose pervasive runtime "GetValue" costs. michael@0: */ michael@0: jsatomid atomIndex = (jsatomid) -1; michael@0: jsbytecode offset = 1; michael@0: michael@0: switch (lhs->getKind()) { michael@0: case PNK_NAME: michael@0: if (!BindNameToSlot(cx, bce, lhs)) michael@0: return false; michael@0: if (lhs->pn_cookie.isFree()) { michael@0: if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex)) michael@0: return false; michael@0: if (!lhs->isConst()) { michael@0: JSOp bindOp; michael@0: if (lhs->isOp(JSOP_SETNAME)) michael@0: bindOp = JSOP_BINDNAME; michael@0: else if (lhs->isOp(JSOP_SETGNAME)) michael@0: bindOp = JSOP_BINDGNAME; michael@0: else michael@0: bindOp = JSOP_BINDINTRINSIC; michael@0: if (!EmitIndex32(cx, bindOp, atomIndex, bce)) michael@0: return false; michael@0: offset++; michael@0: } michael@0: } michael@0: break; michael@0: case PNK_DOT: michael@0: if (!EmitTree(cx, bce, lhs->expr())) michael@0: return false; michael@0: offset++; michael@0: if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex)) michael@0: return false; michael@0: break; michael@0: case PNK_ELEM: michael@0: JS_ASSERT(lhs->isArity(PN_BINARY)); michael@0: if (!EmitTree(cx, bce, lhs->pn_left)) michael@0: return false; michael@0: if (!EmitTree(cx, bce, lhs->pn_right)) michael@0: return false; michael@0: offset += 2; michael@0: break; michael@0: case PNK_ARRAY: michael@0: case PNK_OBJECT: michael@0: break; michael@0: case PNK_CALL: michael@0: JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); michael@0: if (!EmitTree(cx, bce, lhs)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: break; michael@0: default: michael@0: JS_ASSERT(0); michael@0: } michael@0: michael@0: if (op != JSOP_NOP) { michael@0: JS_ASSERT(rhs); michael@0: switch (lhs->getKind()) { michael@0: case PNK_NAME: michael@0: if (lhs->isConst()) { michael@0: if (lhs->isOp(JSOP_CALLEE)) { michael@0: if (Emit1(cx, bce, JSOP_CALLEE) < 0) michael@0: return false; michael@0: } else if (lhs->isOp(JSOP_NAME) || lhs->isOp(JSOP_GETGNAME)) { michael@0: if (!EmitIndex32(cx, lhs->getOp(), atomIndex, bce)) michael@0: return false; michael@0: } else { michael@0: JS_ASSERT(JOF_OPTYPE(lhs->getOp()) != JOF_ATOM); michael@0: if (!EmitVarOp(cx, lhs, lhs->getOp(), bce)) michael@0: return false; michael@0: } michael@0: } else if (lhs->isOp(JSOP_SETNAME)) { michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) michael@0: return false; michael@0: if (!EmitIndex32(cx, JSOP_GETXPROP, atomIndex, bce)) michael@0: return false; michael@0: } else if (lhs->isOp(JSOP_SETGNAME)) { michael@0: JS_ASSERT(lhs->pn_cookie.isFree()); michael@0: if (!EmitAtomOp(cx, lhs, JSOP_GETGNAME, bce)) michael@0: return false; michael@0: } else if (lhs->isOp(JSOP_SETINTRINSIC)) { michael@0: JS_ASSERT(lhs->pn_cookie.isFree()); michael@0: if (!EmitAtomOp(cx, lhs, JSOP_GETINTRINSIC, bce)) michael@0: return false; michael@0: } else { michael@0: JSOp op; michael@0: switch (lhs->getOp()) { michael@0: case JSOP_SETARG: op = JSOP_GETARG; break; michael@0: case JSOP_SETLOCAL: op = JSOP_GETLOCAL; break; michael@0: case JSOP_SETALIASEDVAR: op = JSOP_GETALIASEDVAR; break; michael@0: default: MOZ_ASSUME_UNREACHABLE("Bad op"); michael@0: } michael@0: if (!EmitVarOp(cx, lhs, op, bce)) michael@0: return false; michael@0: } michael@0: break; michael@0: case PNK_DOT: { michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) michael@0: return false; michael@0: bool isLength = (lhs->pn_atom == cx->names().length); michael@0: if (!EmitIndex32(cx, isLength ? JSOP_LENGTH : JSOP_GETPROP, atomIndex, bce)) michael@0: return false; michael@0: break; michael@0: } michael@0: case PNK_ELEM: michael@0: if (Emit1(cx, bce, JSOP_DUP2) < 0) michael@0: return false; michael@0: if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) michael@0: return false; michael@0: break; michael@0: case PNK_CALL: michael@0: /* michael@0: * We just emitted a JSOP_SETCALL (which will always throw) and michael@0: * popped the call's return value. Push a random value to make sure michael@0: * the stack depth is correct. michael@0: */ michael@0: JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); michael@0: if (Emit1(cx, bce, JSOP_NULL) < 0) michael@0: return false; michael@0: break; michael@0: default:; michael@0: } michael@0: } michael@0: michael@0: /* Now emit the right operand (it may affect the namespace). */ michael@0: if (rhs) { michael@0: if (!EmitTree(cx, bce, rhs)) michael@0: return false; michael@0: } else { michael@0: /* michael@0: * The value to assign is the next enumeration value in a for-in or michael@0: * for-of loop. That value has already been emitted: by JSOP_ITERNEXT michael@0: * in the for-in case, or via a GETPROP "value" on the result object in michael@0: * the for-of case. If offset == 1, that slot is already at the top of michael@0: * the stack. Otherwise, rearrange the stack to put that value on top. michael@0: */ michael@0: if (offset != 1 && Emit2(cx, bce, JSOP_PICK, offset - 1) < 0) michael@0: return false; michael@0: } michael@0: michael@0: /* If += etc., emit the binary operator with a source note. */ michael@0: if (op != JSOP_NOP) { michael@0: /* michael@0: * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const michael@0: * declared in the current compilation unit, as in this case (just michael@0: * a bit further below) we will avoid emitting the assignment op. michael@0: */ michael@0: if (!lhs->isKind(PNK_NAME) || !lhs->isConst()) { michael@0: if (NewSrcNote(cx, bce, SRC_ASSIGNOP) < 0) michael@0: return false; michael@0: } michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: } michael@0: michael@0: /* Finally, emit the specialized assignment bytecode. */ michael@0: switch (lhs->getKind()) { michael@0: case PNK_NAME: michael@0: if (lhs->isConst()) { michael@0: if (!rhs) { michael@0: bce->reportError(lhs, JSMSG_BAD_FOR_LEFTSIDE); michael@0: return false; michael@0: } michael@0: break; michael@0: } michael@0: if (lhs->isOp(JSOP_SETARG) || lhs->isOp(JSOP_SETLOCAL) || lhs->isOp(JSOP_SETALIASEDVAR)) { michael@0: if (!EmitVarOp(cx, lhs, lhs->getOp(), bce)) michael@0: return false; michael@0: } else { michael@0: if (!EmitIndexOp(cx, lhs->getOp(), atomIndex, bce)) michael@0: return false; michael@0: } michael@0: break; michael@0: case PNK_DOT: michael@0: if (!EmitIndexOp(cx, JSOP_SETPROP, atomIndex, bce)) michael@0: return false; michael@0: break; michael@0: case PNK_CALL: michael@0: /* Do nothing. The JSOP_SETCALL we emitted will always throw. */ michael@0: JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); michael@0: break; michael@0: case PNK_ELEM: michael@0: if (Emit1(cx, bce, JSOP_SETELEM) < 0) michael@0: return false; michael@0: break; michael@0: case PNK_ARRAY: michael@0: case PNK_OBJECT: michael@0: if (!EmitDestructuringOps(cx, bce, lhs)) michael@0: return false; michael@0: break; michael@0: default: michael@0: JS_ASSERT(0); michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ParseNode::getConstantValue(ExclusiveContext *cx, bool strictChecks, MutableHandleValue vp) michael@0: { michael@0: switch (getKind()) { michael@0: case PNK_NUMBER: michael@0: vp.setNumber(pn_dval); michael@0: return true; michael@0: case PNK_STRING: michael@0: vp.setString(pn_atom); michael@0: return true; michael@0: case PNK_TRUE: michael@0: vp.setBoolean(true); michael@0: return true; michael@0: case PNK_FALSE: michael@0: vp.setBoolean(false); michael@0: return true; michael@0: case PNK_NULL: michael@0: vp.setNull(); michael@0: return true; michael@0: case PNK_SPREAD: michael@0: return false; michael@0: case PNK_ARRAY: { michael@0: JS_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST)); michael@0: michael@0: RootedObject obj(cx, michael@0: NewDenseAllocatedArray(cx, pn_count, nullptr, MaybeSingletonObject)); michael@0: if (!obj) michael@0: return false; michael@0: michael@0: unsigned idx = 0; michael@0: RootedId id(cx); michael@0: RootedValue value(cx); michael@0: for (ParseNode *pn = pn_head; pn; idx++, pn = pn->pn_next) { michael@0: if (!pn->getConstantValue(cx, strictChecks, &value)) michael@0: return false; michael@0: id = INT_TO_JSID(idx); michael@0: if (!JSObject::defineGeneric(cx, obj, id, value, nullptr, nullptr, JSPROP_ENUMERATE)) michael@0: return false; michael@0: } michael@0: JS_ASSERT(idx == pn_count); michael@0: michael@0: types::FixArrayType(cx, obj); michael@0: vp.setObject(*obj); michael@0: return true; michael@0: } michael@0: case PNK_OBJECT: { michael@0: JS_ASSERT(isOp(JSOP_NEWINIT)); michael@0: JS_ASSERT(!(pn_xflags & PNX_NONCONST)); michael@0: michael@0: gc::AllocKind kind = GuessObjectGCKind(pn_count); michael@0: RootedObject obj(cx, NewBuiltinClassInstance(cx, &JSObject::class_, kind, MaybeSingletonObject)); michael@0: if (!obj) michael@0: return false; michael@0: michael@0: RootedValue value(cx), idvalue(cx); michael@0: for (ParseNode *pn = pn_head; pn; pn = pn->pn_next) { michael@0: if (!pn->pn_right->getConstantValue(cx, strictChecks, &value)) michael@0: return false; michael@0: michael@0: ParseNode *pnid = pn->pn_left; michael@0: if (pnid->isKind(PNK_NUMBER)) { michael@0: idvalue = NumberValue(pnid->pn_dval); michael@0: } else { michael@0: JS_ASSERT(pnid->isKind(PNK_NAME) || pnid->isKind(PNK_STRING)); michael@0: JS_ASSERT(pnid->pn_atom != cx->names().proto); michael@0: idvalue = StringValue(pnid->pn_atom); michael@0: } michael@0: michael@0: uint32_t index; michael@0: if (IsDefinitelyIndex(idvalue, &index)) { michael@0: if (!JSObject::defineElement(cx, obj, index, value, nullptr, nullptr, michael@0: JSPROP_ENUMERATE)) michael@0: { michael@0: return false; michael@0: } michael@0: michael@0: continue; michael@0: } michael@0: michael@0: JSAtom *name = ToAtom(cx, idvalue); michael@0: if (!name) michael@0: return false; michael@0: michael@0: if (name->isIndex(&index)) { michael@0: if (!JSObject::defineElement(cx, obj, index, value, michael@0: nullptr, nullptr, JSPROP_ENUMERATE)) michael@0: return false; michael@0: } else { michael@0: if (!JSObject::defineProperty(cx, obj, name->asPropertyName(), value, michael@0: nullptr, nullptr, JSPROP_ENUMERATE)) michael@0: { michael@0: return false; michael@0: } michael@0: } michael@0: } michael@0: michael@0: types::FixObjectType(cx, obj); michael@0: vp.setObject(*obj); michael@0: return true; michael@0: } michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Unexpected node"); michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: static bool michael@0: EmitSingletonInitialiser(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: RootedValue value(cx); michael@0: if (!pn->getConstantValue(cx, bce->sc->needStrictChecks(), &value)) michael@0: return false; michael@0: michael@0: JS_ASSERT(value.isObject()); michael@0: ObjectBox *objbox = bce->parser->newObjectBox(&value.toObject()); michael@0: if (!objbox) michael@0: return false; michael@0: michael@0: return EmitObjectOp(cx, objbox, JSOP_OBJECT, bce); michael@0: } michael@0: michael@0: /* See the SRC_FOR source note offsetBias comments later in this file. */ michael@0: JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1); michael@0: JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1); michael@0: michael@0: namespace { michael@0: michael@0: class EmitLevelManager michael@0: { michael@0: BytecodeEmitter *bce; michael@0: public: michael@0: EmitLevelManager(BytecodeEmitter *bce) : bce(bce) { bce->emitLevel++; } michael@0: ~EmitLevelManager() { bce->emitLevel--; } michael@0: }; michael@0: michael@0: } /* anonymous namespace */ michael@0: michael@0: static bool michael@0: EmitCatch(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: /* michael@0: * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset, michael@0: * and save the block object atom. michael@0: */ michael@0: StmtInfoBCE *stmt = bce->topStmt; michael@0: JS_ASSERT(stmt->type == STMT_BLOCK && stmt->isBlockScope); michael@0: stmt->type = STMT_CATCH; michael@0: michael@0: /* Go up one statement info record to the TRY or FINALLY record. */ michael@0: stmt = stmt->down; michael@0: JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY); michael@0: michael@0: /* Pick up the pending exception and bind it to the catch variable. */ michael@0: if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) michael@0: return false; michael@0: michael@0: /* michael@0: * Dup the exception object if there is a guard for rethrowing to use michael@0: * it later when rethrowing or in other catches. michael@0: */ michael@0: if (pn->pn_kid2 && Emit1(cx, bce, JSOP_DUP) < 0) michael@0: return false; michael@0: michael@0: ParseNode *pn2 = pn->pn_kid1; michael@0: switch (pn2->getKind()) { michael@0: case PNK_ARRAY: michael@0: case PNK_OBJECT: michael@0: if (!EmitDestructuringOps(cx, bce, pn2)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_NAME: michael@0: /* Inline and specialize BindNameToSlot for pn2. */ michael@0: JS_ASSERT(!pn2->pn_cookie.isFree()); michael@0: if (!EmitVarOp(cx, pn2, JSOP_SETLOCAL, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: break; michael@0: michael@0: default: michael@0: JS_ASSERT(0); michael@0: } michael@0: michael@0: // If there is a guard expression, emit it and arrange to jump to the next michael@0: // catch block if the guard expression is false. michael@0: if (pn->pn_kid2) { michael@0: if (!EmitTree(cx, bce, pn->pn_kid2)) michael@0: return false; michael@0: michael@0: // If the guard expression is false, fall through, pop the block scope, michael@0: // and jump to the next catch block. Otherwise jump over that code and michael@0: // pop the dupped exception. michael@0: ptrdiff_t guardCheck = EmitJump(cx, bce, JSOP_IFNE, 0); michael@0: if (guardCheck < 0) michael@0: return false; michael@0: michael@0: { michael@0: NonLocalExitScope nle(cx, bce); michael@0: michael@0: // Move exception back to cx->exception to prepare for michael@0: // the next catch. michael@0: if (Emit1(cx, bce, JSOP_THROWING) < 0) michael@0: return false; michael@0: michael@0: // Leave the scope for this catch block. michael@0: if (!nle.prepareForNonLocalJump(stmt)) michael@0: return false; michael@0: michael@0: // Jump to the next handler. The jump target is backpatched by EmitTry. michael@0: ptrdiff_t guardJump = EmitJump(cx, bce, JSOP_GOTO, 0); michael@0: if (guardJump < 0) michael@0: return false; michael@0: stmt->guardJump() = guardJump; michael@0: } michael@0: michael@0: // Back to normal control flow. michael@0: SetJumpOffsetAt(bce, guardCheck); michael@0: michael@0: // Pop duplicated exception object as we no longer need it. michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: /* Emit the catch body. */ michael@0: return EmitTree(cx, bce, pn->pn_kid3); michael@0: } michael@0: michael@0: // Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See the michael@0: // comment on EmitSwitch. michael@0: // michael@0: MOZ_NEVER_INLINE static bool michael@0: EmitTry(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: StmtInfoBCE stmtInfo(cx); michael@0: michael@0: // Push stmtInfo to track jumps-over-catches and gosubs-to-finally michael@0: // for later fixup. michael@0: // michael@0: // When a finally block is active (STMT_FINALLY in our parse context), michael@0: // non-local jumps (including jumps-over-catches) result in a GOSUB michael@0: // being written into the bytecode stream and fixed-up later (c.f. michael@0: // EmitBackPatchOp and BackPatch). michael@0: // michael@0: PushStatementBCE(bce, &stmtInfo, pn->pn_kid3 ? STMT_FINALLY : STMT_TRY, bce->offset()); michael@0: michael@0: // Since an exception can be thrown at any place inside the try block, michael@0: // we need to restore the stack and the scope chain before we transfer michael@0: // the control to the exception handler. michael@0: // michael@0: // For that we store in a try note associated with the catch or michael@0: // finally block the stack depth upon the try entry. The interpreter michael@0: // uses this depth to properly unwind the stack and the scope chain. michael@0: // michael@0: int depth = bce->stackDepth; michael@0: michael@0: // Record the try location, then emit the try block. michael@0: ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY); michael@0: if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0) michael@0: return false; michael@0: ptrdiff_t tryStart = bce->offset(); michael@0: if (!EmitTree(cx, bce, pn->pn_kid1)) michael@0: return false; michael@0: JS_ASSERT(depth == bce->stackDepth); michael@0: michael@0: // GOSUB to finally, if present. michael@0: if (pn->pn_kid3) { michael@0: if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0) michael@0: return false; michael@0: } michael@0: michael@0: // Source note points to the jump at the end of the try block. michael@0: if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH)) michael@0: return false; michael@0: michael@0: // Emit jump over catch and/or finally. michael@0: ptrdiff_t catchJump = -1; michael@0: if (EmitBackPatchOp(cx, bce, &catchJump) < 0) michael@0: return false; michael@0: michael@0: ptrdiff_t tryEnd = bce->offset(); michael@0: michael@0: // If this try has a catch block, emit it. michael@0: if (ParseNode *pn2 = pn->pn_kid2) { michael@0: // The emitted code for a catch block looks like: michael@0: // michael@0: // [pushblockscope] only if any local aliased michael@0: // exception michael@0: // if there is a catchguard: michael@0: // dup michael@0: // setlocal 0; pop assign or possibly destructure exception michael@0: // if there is a catchguard: michael@0: // < catchguard code > michael@0: // ifne POST michael@0: // debugleaveblock michael@0: // [popblockscope] only if any local aliased michael@0: // throwing pop exception to cx->exception michael@0: // goto michael@0: // POST: pop michael@0: // < catch block contents > michael@0: // debugleaveblock michael@0: // [popblockscope] only if any local aliased michael@0: // goto non-local; finally applies michael@0: // michael@0: // If there's no catch block without a catchguard, the last points to rethrow code. This code will [gosub] to the finally michael@0: // code if appropriate, and is also used for the catch-all trynote for michael@0: // capturing exceptions thrown from catch{} blocks. michael@0: // michael@0: for (ParseNode *pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { michael@0: JS_ASSERT(bce->stackDepth == depth); michael@0: michael@0: // Emit the lexical scope and catch body. michael@0: JS_ASSERT(pn3->isKind(PNK_LEXICALSCOPE)); michael@0: if (!EmitTree(cx, bce, pn3)) michael@0: return false; michael@0: michael@0: // gosub , if required. michael@0: if (pn->pn_kid3) { michael@0: if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0) michael@0: return false; michael@0: JS_ASSERT(bce->stackDepth == depth); michael@0: } michael@0: michael@0: // Jump over the remaining catch blocks. This will get fixed michael@0: // up to jump to after catch/finally. michael@0: if (EmitBackPatchOp(cx, bce, &catchJump) < 0) michael@0: return false; michael@0: michael@0: // If this catch block had a guard clause, patch the guard jump to michael@0: // come here. michael@0: if (stmtInfo.guardJump() != -1) { michael@0: SetJumpOffsetAt(bce, stmtInfo.guardJump()); michael@0: stmtInfo.guardJump() = -1; michael@0: michael@0: // If this catch block is the last one, rethrow, delegating michael@0: // execution of any finally block to the exception handler. michael@0: if (!pn3->pn_next) { michael@0: if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_THROW) < 0) michael@0: return false; michael@0: } michael@0: } michael@0: } michael@0: } michael@0: michael@0: JS_ASSERT(bce->stackDepth == depth); michael@0: michael@0: // Emit the finally handler, if there is one. michael@0: ptrdiff_t finallyStart = 0; michael@0: if (pn->pn_kid3) { michael@0: // Fix up the gosubs that might have been emitted before non-local michael@0: // jumps to the finally code. michael@0: if (!BackPatch(cx, bce, stmtInfo.gosubs(), bce->code().end(), JSOP_GOSUB)) michael@0: return false; michael@0: michael@0: finallyStart = bce->offset(); michael@0: michael@0: // Indicate that we're emitting a subroutine body. michael@0: stmtInfo.type = STMT_SUBROUTINE; michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn->pn_kid3->pn_pos.begin)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_FINALLY) < 0 || michael@0: !EmitTree(cx, bce, pn->pn_kid3) || michael@0: Emit1(cx, bce, JSOP_RETSUB) < 0) michael@0: { michael@0: return false; michael@0: } michael@0: JS_ASSERT(bce->stackDepth == depth); michael@0: } michael@0: if (!PopStatementBCE(cx, bce)) michael@0: return false; michael@0: michael@0: // ReconstructPCStack needs a NOP here to mark the end of the last catch block. michael@0: if (Emit1(cx, bce, JSOP_NOP) < 0) michael@0: return false; michael@0: michael@0: // Fix up the end-of-try/catch jumps to come here. michael@0: if (!BackPatch(cx, bce, catchJump, bce->code().end(), JSOP_GOTO)) michael@0: return false; michael@0: michael@0: // Add the try note last, to let post-order give us the right ordering michael@0: // (first to last for a given nesting level, inner to outer by level). michael@0: if (pn->pn_kid2 && !bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd)) michael@0: return false; michael@0: michael@0: // If we've got a finally, mark try+catch region with additional michael@0: // trynote to catch exceptions (re)thrown from a catch block or michael@0: // for the try{}finally{} case. michael@0: if (pn->pn_kid3 && !bce->tryNoteList.append(JSTRY_FINALLY, depth, tryStart, finallyStart)) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitIf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: StmtInfoBCE stmtInfo(cx); michael@0: michael@0: /* Initialize so we can detect else-if chains and avoid recursion. */ michael@0: stmtInfo.type = STMT_IF; michael@0: ptrdiff_t beq = -1; michael@0: ptrdiff_t jmp = -1; michael@0: ptrdiff_t noteIndex = -1; michael@0: michael@0: if_again: michael@0: /* Emit code for the condition before pushing stmtInfo. */ michael@0: if (!EmitTree(cx, bce, pn->pn_kid1)) michael@0: return false; michael@0: ptrdiff_t top = bce->offset(); michael@0: if (stmtInfo.type == STMT_IF) { michael@0: PushStatementBCE(bce, &stmtInfo, STMT_IF, top); michael@0: } else { michael@0: /* michael@0: * We came here from the goto further below that detects else-if michael@0: * chains, so we must mutate stmtInfo back into a STMT_IF record. michael@0: * Also we need a note offset for SRC_IF_ELSE to help IonMonkey. michael@0: */ michael@0: JS_ASSERT(stmtInfo.type == STMT_ELSE); michael@0: stmtInfo.type = STMT_IF; michael@0: stmtInfo.update = top; michael@0: if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq)) michael@0: return false; michael@0: } michael@0: michael@0: /* Emit an annotated branch-if-false around the then part. */ michael@0: ParseNode *pn3 = pn->pn_kid3; michael@0: noteIndex = NewSrcNote(cx, bce, pn3 ? SRC_IF_ELSE : SRC_IF); michael@0: if (noteIndex < 0) michael@0: return false; michael@0: beq = EmitJump(cx, bce, JSOP_IFEQ, 0); michael@0: if (beq < 0) michael@0: return false; michael@0: michael@0: /* Emit code for the then and optional else parts. */ michael@0: if (!EmitTree(cx, bce, pn->pn_kid2)) michael@0: return false; michael@0: if (pn3) { michael@0: /* Modify stmtInfo so we know we're in the else part. */ michael@0: stmtInfo.type = STMT_ELSE; michael@0: michael@0: /* michael@0: * Emit a JSOP_BACKPATCH op to jump from the end of our then part michael@0: * around the else part. The PopStatementBCE call at the bottom of michael@0: * this function will fix up the backpatch chain linked from michael@0: * stmtInfo.breaks. michael@0: */ michael@0: jmp = EmitGoto(cx, bce, &stmtInfo, &stmtInfo.breaks); michael@0: if (jmp < 0) michael@0: return false; michael@0: michael@0: /* Ensure the branch-if-false comes here, then emit the else. */ michael@0: SetJumpOffsetAt(bce, beq); michael@0: if (pn3->isKind(PNK_IF)) { michael@0: pn = pn3; michael@0: goto if_again; michael@0: } michael@0: michael@0: if (!EmitTree(cx, bce, pn3)) michael@0: return false; michael@0: michael@0: /* michael@0: * Annotate SRC_IF_ELSE with the offset from branch to jump, for michael@0: * IonMonkey's benefit. We can't just "back up" from the pc michael@0: * of the else clause, because we don't know whether an extended michael@0: * jump was required to leap from the end of the then clause over michael@0: * the else clause. michael@0: */ michael@0: if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq)) michael@0: return false; michael@0: } else { michael@0: /* No else part, fixup the branch-if-false to come here. */ michael@0: SetJumpOffsetAt(bce, beq); michael@0: } michael@0: return PopStatementBCE(cx, bce); michael@0: } michael@0: michael@0: /* michael@0: * pnLet represents one of: michael@0: * michael@0: * let-expression: (let (x = y) EXPR) michael@0: * let-statement: let (x = y) { ... } michael@0: * michael@0: * For a let-expression 'let (x = a, [y,z] = b) e', EmitLet produces: michael@0: * michael@0: * bytecode stackDepth srcnotes michael@0: * evaluate a +1 michael@0: * evaluate b +1 michael@0: * dup +1 michael@0: * destructure y michael@0: * pick 1 michael@0: * dup +1 michael@0: * destructure z michael@0: * pick 1 michael@0: * pop -1 michael@0: * setlocal 2 -1 michael@0: * setlocal 1 -1 michael@0: * setlocal 0 -1 michael@0: * pushblockscope (if needed) michael@0: * evaluate e +1 michael@0: * debugleaveblock michael@0: * popblockscope (if needed) michael@0: * michael@0: * Note that, since pushblockscope simply changes fp->scopeChain and does not michael@0: * otherwise touch the stack, evaluation of the let-var initializers must leave michael@0: * the initial value in the let-var's future slot. michael@0: */ michael@0: /* michael@0: * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See michael@0: * the comment on EmitSwitch. michael@0: */ michael@0: MOZ_NEVER_INLINE static bool michael@0: EmitLet(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pnLet) michael@0: { michael@0: JS_ASSERT(pnLet->isArity(PN_BINARY)); michael@0: ParseNode *varList = pnLet->pn_left; michael@0: JS_ASSERT(varList->isArity(PN_LIST)); michael@0: ParseNode *letBody = pnLet->pn_right; michael@0: JS_ASSERT(letBody->isLet() && letBody->isKind(PNK_LEXICALSCOPE)); michael@0: michael@0: int letHeadDepth = bce->stackDepth; michael@0: michael@0: if (!EmitVariables(cx, bce, varList, PushInitialValues, true)) michael@0: return false; michael@0: michael@0: /* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */ michael@0: uint32_t alreadyPushed = bce->stackDepth - letHeadDepth; michael@0: StmtInfoBCE stmtInfo(cx); michael@0: if (!EnterBlockScope(cx, bce, &stmtInfo, letBody->pn_objbox, alreadyPushed)) michael@0: return false; michael@0: michael@0: if (!EmitTree(cx, bce, letBody->pn_expr)) michael@0: return false; michael@0: michael@0: if (!LeaveNestedScope(cx, bce, &stmtInfo)) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See michael@0: * the comment on EmitSwitch. michael@0: */ michael@0: MOZ_NEVER_INLINE static bool michael@0: EmitLexicalScope(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_LEXICALSCOPE)); michael@0: michael@0: StmtInfoBCE stmtInfo(cx); michael@0: if (!EnterBlockScope(cx, bce, &stmtInfo, pn->pn_objbox, 0)) michael@0: return false; michael@0: michael@0: if (!EmitTree(cx, bce, pn->pn_expr)) michael@0: return false; michael@0: michael@0: if (!LeaveNestedScope(cx, bce, &stmtInfo)) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitWith(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: StmtInfoBCE stmtInfo(cx); michael@0: if (!EmitTree(cx, bce, pn->pn_left)) michael@0: return false; michael@0: if (!EnterNestedScope(cx, bce, &stmtInfo, pn->pn_binary_obj, STMT_WITH)) michael@0: return false; michael@0: if (!EmitTree(cx, bce, pn->pn_right)) michael@0: return false; michael@0: if (!LeaveNestedScope(cx, bce, &stmtInfo)) michael@0: return false; michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitForOf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) michael@0: { michael@0: ParseNode *forHead = pn->pn_left; michael@0: ParseNode *forBody = pn->pn_right; michael@0: michael@0: ParseNode *pn1 = forHead->pn_kid1; michael@0: bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE); michael@0: JS_ASSERT_IF(letDecl, pn1->isLet()); michael@0: michael@0: // If the left part is 'var x', emit code to define x if necessary using a michael@0: // prolog opcode, but do not emit a pop. michael@0: if (pn1) { michael@0: ParseNode *decl = letDecl ? pn1->pn_expr : pn1; michael@0: JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET)); michael@0: bce->emittingForInit = true; michael@0: if (!EmitVariables(cx, bce, decl, DefineVars)) michael@0: return false; michael@0: bce->emittingForInit = false; michael@0: } michael@0: michael@0: // For-of loops run with two values on the stack: the iterator and the michael@0: // current result object. michael@0: michael@0: // Compile the object expression to the right of 'of'. michael@0: if (!EmitTree(cx, bce, forHead->pn_kid3)) michael@0: return false; michael@0: michael@0: // Convert iterable to iterator. michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // OBJ @@ITERATOR michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // @@ITERATOR OBJ michael@0: return false; michael@0: if (EmitCall(cx, bce, JSOP_CALL, 0) < 0) // ITER michael@0: return false; michael@0: CheckTypeSet(cx, bce, JSOP_CALL); michael@0: michael@0: // Push a dummy result so that we properly enter iteration midstream. michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RESULT michael@0: return false; michael@0: michael@0: // Enter the block before the loop body, after evaluating the obj. michael@0: StmtInfoBCE letStmt(cx); michael@0: if (letDecl) { michael@0: if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0)) michael@0: return false; michael@0: } michael@0: michael@0: LoopStmtInfo stmtInfo(cx); michael@0: PushLoopStatement(bce, &stmtInfo, STMT_FOR_OF_LOOP, top); michael@0: michael@0: // Jump down to the loop condition to minimize overhead assuming at least michael@0: // one iteration, as the other loop forms do. Annotate so IonMonkey can michael@0: // find the loop-closing jump. michael@0: int noteIndex = NewSrcNote(cx, bce, SRC_FOR_OF); michael@0: if (noteIndex < 0) michael@0: return false; michael@0: ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); michael@0: if (jmp < 0) michael@0: return false; michael@0: michael@0: top = bce->offset(); michael@0: SET_STATEMENT_TOP(&stmtInfo, top); michael@0: if (EmitLoopHead(cx, bce, nullptr) < 0) michael@0: return false; michael@0: michael@0: #ifdef DEBUG michael@0: int loopDepth = bce->stackDepth; michael@0: #endif michael@0: michael@0: // Emit code to assign result.value to the iteration variable. michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // ITER RESULT VALUE michael@0: return false; michael@0: if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr)) // ITER RESULT VALUE michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) // ITER RESULT michael@0: return false; michael@0: michael@0: // The stack should be balanced around the assignment opcode sequence. michael@0: JS_ASSERT(bce->stackDepth == loopDepth); michael@0: michael@0: // Emit code for the loop body. michael@0: if (!EmitTree(cx, bce, forBody)) michael@0: return false; michael@0: michael@0: // Set loop and enclosing "update" offsets, for continue. michael@0: StmtInfoBCE *stmt = &stmtInfo; michael@0: do { michael@0: stmt->update = bce->offset(); michael@0: } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); michael@0: michael@0: // COME FROM the beginning of the loop to here. michael@0: SetJumpOffsetAt(bce, jmp); michael@0: if (!EmitLoopEntry(cx, bce, nullptr)) michael@0: return false; michael@0: michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) // ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER ITER michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // ITER ITER NEXT michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // ITER NEXT ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER NEXT ITER UNDEFINED michael@0: return false; michael@0: if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT michael@0: return false; michael@0: CheckTypeSet(cx, bce, JSOP_CALL); michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE? michael@0: return false; michael@0: michael@0: ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, top - bce->offset()); // ITER RESULT michael@0: if (beq < 0) michael@0: return false; michael@0: michael@0: JS_ASSERT(bce->stackDepth == loopDepth); michael@0: michael@0: // Let Ion know where the closing jump of this loop is. michael@0: if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp)) michael@0: return false; michael@0: michael@0: // Fixup breaks and continues. michael@0: if (!PopStatementBCE(cx, bce)) michael@0: return false; michael@0: michael@0: if (letDecl) { michael@0: if (!LeaveNestedScope(cx, bce, &letStmt)) michael@0: return false; michael@0: } michael@0: michael@0: // Pop the result and the iter. michael@0: EMIT_UINT16_IMM_OP(JSOP_POPN, 2); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitForIn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) michael@0: { michael@0: ParseNode *forHead = pn->pn_left; michael@0: ParseNode *forBody = pn->pn_right; michael@0: michael@0: ParseNode *pn1 = forHead->pn_kid1; michael@0: bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE); michael@0: JS_ASSERT_IF(letDecl, pn1->isLet()); michael@0: michael@0: /* michael@0: * If the left part is 'var x', emit code to define x if necessary michael@0: * using a prolog opcode, but do not emit a pop. If the left part was michael@0: * originally 'var x = i', the parser will have rewritten it; see michael@0: * Parser::forStatement. 'for (let x = i in o)' is mercifully banned. michael@0: */ michael@0: if (pn1) { michael@0: ParseNode *decl = letDecl ? pn1->pn_expr : pn1; michael@0: JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET)); michael@0: bce->emittingForInit = true; michael@0: if (!EmitVariables(cx, bce, decl, DefineVars)) michael@0: return false; michael@0: bce->emittingForInit = false; michael@0: } michael@0: michael@0: /* Compile the object expression to the right of 'in'. */ michael@0: if (!EmitTree(cx, bce, forHead->pn_kid3)) michael@0: return false; michael@0: michael@0: /* michael@0: * Emit a bytecode to convert top of stack value to the iterator michael@0: * object depending on the loop variant (for-in, for-each-in, or michael@0: * destructuring for-in). michael@0: */ michael@0: JS_ASSERT(pn->isOp(JSOP_ITER)); michael@0: if (Emit2(cx, bce, JSOP_ITER, (uint8_t) pn->pn_iflags) < 0) michael@0: return false; michael@0: michael@0: /* Enter the block before the loop body, after evaluating the obj. */ michael@0: StmtInfoBCE letStmt(cx); michael@0: if (letDecl) { michael@0: if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0)) michael@0: return false; michael@0: } michael@0: michael@0: LoopStmtInfo stmtInfo(cx); michael@0: PushLoopStatement(bce, &stmtInfo, STMT_FOR_IN_LOOP, top); michael@0: michael@0: /* Annotate so IonMonkey can find the loop-closing jump. */ michael@0: int noteIndex = NewSrcNote(cx, bce, SRC_FOR_IN); michael@0: if (noteIndex < 0) michael@0: return false; michael@0: michael@0: /* michael@0: * Jump down to the loop condition to minimize overhead assuming at michael@0: * least one iteration, as the other loop forms do. michael@0: */ michael@0: ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); michael@0: if (jmp < 0) michael@0: return false; michael@0: michael@0: top = bce->offset(); michael@0: SET_STATEMENT_TOP(&stmtInfo, top); michael@0: if (EmitLoopHead(cx, bce, nullptr) < 0) michael@0: return false; michael@0: michael@0: #ifdef DEBUG michael@0: int loopDepth = bce->stackDepth; michael@0: #endif michael@0: michael@0: /* michael@0: * Emit code to get the next enumeration value and assign it to the michael@0: * left hand side. michael@0: */ michael@0: if (Emit1(cx, bce, JSOP_ITERNEXT) < 0) michael@0: return false; michael@0: if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr)) michael@0: return false; michael@0: michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: michael@0: /* The stack should be balanced around the assignment opcode sequence. */ michael@0: JS_ASSERT(bce->stackDepth == loopDepth); michael@0: michael@0: /* Emit code for the loop body. */ michael@0: if (!EmitTree(cx, bce, forBody)) michael@0: return false; michael@0: michael@0: /* Set loop and enclosing "update" offsets, for continue. */ michael@0: StmtInfoBCE *stmt = &stmtInfo; michael@0: do { michael@0: stmt->update = bce->offset(); michael@0: } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); michael@0: michael@0: /* michael@0: * Fixup the goto that starts the loop to jump down to JSOP_MOREITER. michael@0: */ michael@0: SetJumpOffsetAt(bce, jmp); michael@0: if (!EmitLoopEntry(cx, bce, nullptr)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_MOREITER) < 0) michael@0: return false; michael@0: ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); michael@0: if (beq < 0) michael@0: return false; michael@0: michael@0: /* Set the srcnote offset so we can find the closing jump. */ michael@0: if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp)) michael@0: return false; michael@0: michael@0: // Fix up breaks and continues. michael@0: if (!PopStatementBCE(cx, bce)) michael@0: return false; michael@0: michael@0: if (!bce->tryNoteList.append(JSTRY_ITER, bce->stackDepth, top, bce->offset())) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_ENDITER) < 0) michael@0: return false; michael@0: michael@0: if (letDecl) { michael@0: if (!LeaveNestedScope(cx, bce, &letStmt)) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitNormalFor(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) michael@0: { michael@0: LoopStmtInfo stmtInfo(cx); michael@0: PushLoopStatement(bce, &stmtInfo, STMT_FOR_LOOP, top); michael@0: michael@0: ParseNode *forHead = pn->pn_left; michael@0: ParseNode *forBody = pn->pn_right; michael@0: michael@0: /* C-style for (init; cond; update) ... loop. */ michael@0: JSOp op = JSOP_POP; michael@0: ParseNode *pn3 = forHead->pn_kid1; michael@0: if (!pn3) { michael@0: // No initializer, but emit a nop so that there's somewhere to put the michael@0: // SRC_FOR annotation that IonBuilder will look for. michael@0: op = JSOP_NOP; michael@0: } else { michael@0: bce->emittingForInit = true; michael@0: if (pn3->isKind(PNK_ASSIGN)) { michael@0: JS_ASSERT(pn3->isOp(JSOP_NOP)); michael@0: if (!MaybeEmitGroupAssignment(cx, bce, op, pn3, GroupIsNotDecl, &op)) michael@0: return false; michael@0: } michael@0: if (op == JSOP_POP) { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin)) michael@0: return false; michael@0: if (!EmitTree(cx, bce, pn3)) michael@0: return false; michael@0: if (pn3->isKind(PNK_VAR) || pn3->isKind(PNK_CONST) || pn3->isKind(PNK_LET)) { michael@0: /* michael@0: * Check whether a destructuring-initialized var decl michael@0: * was optimized to a group assignment. If so, we do michael@0: * not need to emit a pop below, so switch to a nop, michael@0: * just for IonBuilder. michael@0: */ michael@0: JS_ASSERT(pn3->isArity(PN_LIST) || pn3->isArity(PN_BINARY)); michael@0: if (pn3->pn_xflags & PNX_GROUPINIT) michael@0: op = JSOP_NOP; michael@0: } michael@0: } michael@0: bce->emittingForInit = false; michael@0: } michael@0: michael@0: /* michael@0: * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH). michael@0: * Use tmp to hold the biased srcnote "top" offset, which differs michael@0: * from the top local variable by the length of the JSOP_GOTO michael@0: * emitted in between tmp and top if this loop has a condition. michael@0: */ michael@0: int noteIndex = NewSrcNote(cx, bce, SRC_FOR); michael@0: if (noteIndex < 0 || Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: ptrdiff_t tmp = bce->offset(); michael@0: michael@0: ptrdiff_t jmp = -1; michael@0: if (forHead->pn_kid2) { michael@0: /* Goto the loop condition, which branches back to iterate. */ michael@0: jmp = EmitJump(cx, bce, JSOP_GOTO, 0); michael@0: if (jmp < 0) michael@0: return false; michael@0: } else { michael@0: if (op != JSOP_NOP && Emit1(cx, bce, JSOP_NOP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: top = bce->offset(); michael@0: SET_STATEMENT_TOP(&stmtInfo, top); michael@0: michael@0: /* Emit code for the loop body. */ michael@0: if (EmitLoopHead(cx, bce, forBody) < 0) michael@0: return false; michael@0: if (jmp == -1 && !EmitLoopEntry(cx, bce, forBody)) michael@0: return false; michael@0: if (!EmitTree(cx, bce, forBody)) michael@0: return false; michael@0: michael@0: /* Set the second note offset so we can find the update part. */ michael@0: JS_ASSERT(noteIndex != -1); michael@0: ptrdiff_t tmp2 = bce->offset(); michael@0: michael@0: /* Set loop and enclosing "update" offsets, for continue. */ michael@0: StmtInfoBCE *stmt = &stmtInfo; michael@0: do { michael@0: stmt->update = bce->offset(); michael@0: } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); michael@0: michael@0: /* Check for update code to do before the condition (if any). */ michael@0: pn3 = forHead->pn_kid3; michael@0: if (pn3) { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin)) michael@0: return false; michael@0: op = JSOP_POP; michael@0: if (pn3->isKind(PNK_ASSIGN)) { michael@0: JS_ASSERT(pn3->isOp(JSOP_NOP)); michael@0: if (!MaybeEmitGroupAssignment(cx, bce, op, pn3, GroupIsNotDecl, &op)) michael@0: return false; michael@0: } michael@0: if (op == JSOP_POP && !EmitTree(cx, bce, pn3)) michael@0: return false; michael@0: michael@0: /* Always emit the POP or NOP to help IonBuilder. */ michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: michael@0: /* Restore the absolute line number for source note readers. */ michael@0: uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.end); michael@0: if (bce->currentLine() != lineNum) { michael@0: if (NewSrcNote2(cx, bce, SRC_SETLINE, ptrdiff_t(lineNum)) < 0) michael@0: return false; michael@0: bce->current->currentLine = lineNum; michael@0: bce->current->lastColumn = 0; michael@0: } michael@0: } michael@0: michael@0: ptrdiff_t tmp3 = bce->offset(); michael@0: michael@0: if (forHead->pn_kid2) { michael@0: /* Fix up the goto from top to target the loop condition. */ michael@0: JS_ASSERT(jmp >= 0); michael@0: SetJumpOffsetAt(bce, jmp); michael@0: if (!EmitLoopEntry(cx, bce, forHead->pn_kid2)) michael@0: return false; michael@0: michael@0: if (!EmitTree(cx, bce, forHead->pn_kid2)) michael@0: return false; michael@0: } michael@0: michael@0: /* Set the first note offset so we can find the loop condition. */ michael@0: if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, tmp3 - tmp)) michael@0: return false; michael@0: if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, tmp2 - tmp)) michael@0: return false; michael@0: /* The third note offset helps us find the loop-closing jump. */ michael@0: if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 2, bce->offset() - tmp)) michael@0: return false; michael@0: michael@0: /* If no loop condition, just emit a loop-closing jump. */ michael@0: op = forHead->pn_kid2 ? JSOP_IFNE : JSOP_GOTO; michael@0: if (EmitJump(cx, bce, op, top - bce->offset()) < 0) michael@0: return false; michael@0: michael@0: if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) michael@0: return false; michael@0: michael@0: /* Now fixup all breaks and continues. */ michael@0: return PopStatementBCE(cx, bce); michael@0: } michael@0: michael@0: static inline bool michael@0: EmitFor(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) michael@0: { michael@0: if (pn->pn_left->isKind(PNK_FORIN)) michael@0: return EmitForIn(cx, bce, pn, top); michael@0: michael@0: if (pn->pn_left->isKind(PNK_FOROF)) michael@0: return EmitForOf(cx, bce, pn, top); michael@0: michael@0: JS_ASSERT(pn->pn_left->isKind(PNK_FORHEAD)); michael@0: return EmitNormalFor(cx, bce, pn, top); michael@0: } michael@0: michael@0: static MOZ_NEVER_INLINE bool michael@0: EmitFunc(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: FunctionBox *funbox = pn->pn_funbox; michael@0: RootedFunction fun(cx, funbox->function()); michael@0: JS_ASSERT_IF(fun->isInterpretedLazy(), fun->lazyScript()); michael@0: michael@0: /* michael@0: * Set the EMITTEDFUNCTION flag in function definitions once they have been michael@0: * emitted. Function definitions that need hoisting to the top of the michael@0: * function will be seen by EmitFunc in two places. michael@0: */ michael@0: if (pn->pn_dflags & PND_EMITTEDFUNCTION) { michael@0: JS_ASSERT_IF(fun->hasScript(), fun->nonLazyScript()); michael@0: JS_ASSERT(pn->functionIsHoisted()); michael@0: JS_ASSERT(bce->sc->isFunctionBox()); michael@0: return true; michael@0: } michael@0: michael@0: pn->pn_dflags |= PND_EMITTEDFUNCTION; michael@0: michael@0: /* michael@0: * Mark as singletons any function which will only be executed once, or michael@0: * which is inner to a lambda we only expect to run once. In the latter michael@0: * case, if the lambda runs multiple times then CloneFunctionObject will michael@0: * make a deep clone of its contents. michael@0: */ michael@0: if (fun->isInterpreted()) { michael@0: bool singleton = michael@0: bce->script->compileAndGo() && michael@0: fun->isInterpreted() && michael@0: (bce->checkSingletonContext() || michael@0: (!bce->isInLoop() && bce->isRunOnceLambda())); michael@0: if (!JSFunction::setTypeForScriptedFunction(cx, fun, singleton)) michael@0: return false; michael@0: michael@0: if (fun->isInterpretedLazy()) { michael@0: if (!fun->lazyScript()->sourceObject()) { michael@0: JSObject *scope = bce->staticScope; michael@0: if (!scope && bce->sc->isFunctionBox()) michael@0: scope = bce->sc->asFunctionBox()->function(); michael@0: JSObject *source = bce->script->sourceObject(); michael@0: fun->lazyScript()->setParent(scope, &source->as()); michael@0: } michael@0: if (bce->emittingRunOnceLambda) michael@0: fun->lazyScript()->setTreatAsRunOnce(); michael@0: } else { michael@0: SharedContext *outersc = bce->sc; michael@0: michael@0: if (outersc->isFunctionBox() && outersc->asFunctionBox()->mightAliasLocals()) michael@0: funbox->setMightAliasLocals(); // inherit mightAliasLocals from parent michael@0: JS_ASSERT_IF(outersc->strict, funbox->strict); michael@0: michael@0: // Inherit most things (principals, version, etc) from the parent. michael@0: Rooted parent(cx, bce->script); michael@0: CompileOptions options(cx, bce->parser->options()); michael@0: options.setOriginPrincipals(parent->originPrincipals()) michael@0: .setCompileAndGo(parent->compileAndGo()) michael@0: .setSelfHostingMode(parent->selfHosted()) michael@0: .setNoScriptRval(false) michael@0: .setForEval(false) michael@0: .setVersion(parent->getVersion()); michael@0: michael@0: Rooted enclosingScope(cx, EnclosingStaticScope(bce)); michael@0: Rooted sourceObject(cx, bce->script->sourceObject()); michael@0: Rooted script(cx, JSScript::Create(cx, enclosingScope, false, options, michael@0: parent->staticLevel() + 1, michael@0: sourceObject, michael@0: funbox->bufStart, funbox->bufEnd)); michael@0: if (!script) michael@0: return false; michael@0: michael@0: script->bindings = funbox->bindings; michael@0: michael@0: uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin); michael@0: BytecodeEmitter bce2(bce, bce->parser, funbox, script, bce->insideEval, michael@0: bce->evalCaller, bce->hasGlobalScope, lineNum, michael@0: bce->emitterMode); michael@0: if (!bce2.init()) michael@0: return false; michael@0: michael@0: /* We measured the max scope depth when we parsed the function. */ michael@0: if (!EmitFunctionScript(cx, &bce2, pn->pn_body)) michael@0: return false; michael@0: michael@0: if (funbox->usesArguments && funbox->usesApply) michael@0: script->setUsesArgumentsAndApply(); michael@0: } michael@0: } else { michael@0: JS_ASSERT(IsAsmJSModuleNative(fun->native())); michael@0: } michael@0: michael@0: /* Make the function object a literal in the outer script's pool. */ michael@0: unsigned index = bce->objectList.add(pn->pn_funbox); michael@0: michael@0: /* Non-hoisted functions simply emit their respective op. */ michael@0: if (!pn->functionIsHoisted()) { michael@0: /* JSOP_LAMBDA_ARROW is always preceded by JSOP_THIS. */ michael@0: MOZ_ASSERT(fun->isArrow() == (pn->getOp() == JSOP_LAMBDA_ARROW)); michael@0: if (fun->isArrow() && Emit1(cx, bce, JSOP_THIS) < 0) michael@0: return false; michael@0: return EmitIndex32(cx, pn->getOp(), index, bce); michael@0: } michael@0: michael@0: /* michael@0: * For a script we emit the code as we parse. Thus the bytecode for michael@0: * top-level functions should go in the prolog to predefine their michael@0: * names in the variable object before the already-generated main code michael@0: * is executed. This extra work for top-level scripts is not necessary michael@0: * when we emit the code for a function. It is fully parsed prior to michael@0: * invocation of the emitter and calls to EmitTree for function michael@0: * definitions can be scheduled before generating the rest of code. michael@0: */ michael@0: if (!bce->sc->isFunctionBox()) { michael@0: JS_ASSERT(pn->pn_cookie.isFree()); michael@0: JS_ASSERT(pn->getOp() == JSOP_NOP); michael@0: JS_ASSERT(!bce->topStmt); michael@0: bce->switchToProlog(); michael@0: if (!EmitIndex32(cx, JSOP_DEFFUN, index, bce)) michael@0: return false; michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) michael@0: return false; michael@0: bce->switchToMain(); michael@0: } else { michael@0: #ifdef DEBUG michael@0: BindingIter bi(bce->script); michael@0: while (bi->name() != fun->atom()) michael@0: bi++; michael@0: JS_ASSERT(bi->kind() == Binding::VARIABLE || bi->kind() == Binding::CONSTANT || michael@0: bi->kind() == Binding::ARGUMENT); michael@0: JS_ASSERT(bi.frameIndex() < JS_BIT(20)); michael@0: #endif michael@0: pn->pn_index = index; michael@0: if (!EmitIndexOp(cx, JSOP_LAMBDA, index, bce)) michael@0: return false; michael@0: JS_ASSERT(pn->getOp() == JSOP_GETLOCAL || pn->getOp() == JSOP_GETARG); michael@0: JSOp setOp = pn->getOp() == JSOP_GETLOCAL ? JSOP_SETLOCAL : JSOP_SETARG; michael@0: if (!EmitVarOp(cx, pn, setOp, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitDo(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: /* Emit an annotated nop so IonBuilder can recognize the 'do' loop. */ michael@0: ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE); michael@0: if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0) michael@0: return false; michael@0: michael@0: ptrdiff_t noteIndex2 = NewSrcNote(cx, bce, SRC_WHILE); michael@0: if (noteIndex2 < 0) michael@0: return false; michael@0: michael@0: /* Compile the loop body. */ michael@0: ptrdiff_t top = EmitLoopHead(cx, bce, pn->pn_left); michael@0: if (top < 0) michael@0: return false; michael@0: michael@0: LoopStmtInfo stmtInfo(cx); michael@0: PushLoopStatement(bce, &stmtInfo, STMT_DO_LOOP, top); michael@0: michael@0: if (!EmitLoopEntry(cx, bce, nullptr)) michael@0: return false; michael@0: michael@0: if (!EmitTree(cx, bce, pn->pn_left)) michael@0: return false; michael@0: michael@0: /* Set loop and enclosing label update offsets, for continue. */ michael@0: ptrdiff_t off = bce->offset(); michael@0: StmtInfoBCE *stmt = &stmtInfo; michael@0: do { michael@0: stmt->update = off; michael@0: } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); michael@0: michael@0: /* Compile the loop condition, now that continues know where to go. */ michael@0: if (!EmitTree(cx, bce, pn->pn_right)) michael@0: return false; michael@0: michael@0: ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); michael@0: if (beq < 0) michael@0: return false; michael@0: michael@0: if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) michael@0: return false; michael@0: michael@0: /* michael@0: * Update the annotations with the update and back edge positions, for michael@0: * IonBuilder. michael@0: * michael@0: * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex michael@0: * note gets bigger. michael@0: */ michael@0: if (!SetSrcNoteOffset(cx, bce, noteIndex2, 0, beq - top)) michael@0: return false; michael@0: if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, 1 + (off - top))) michael@0: return false; michael@0: michael@0: return PopStatementBCE(cx, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitWhile(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) michael@0: { michael@0: /* michael@0: * Minimize bytecodes issued for one or more iterations by jumping to michael@0: * the condition below the body and closing the loop if the condition michael@0: * is true with a backward branch. For iteration count i: michael@0: * michael@0: * i test at the top test at the bottom michael@0: * = =============== ================== michael@0: * 0 ifeq-pass goto; ifne-fail michael@0: * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail michael@0: * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail michael@0: * . . . michael@0: * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail michael@0: */ michael@0: LoopStmtInfo stmtInfo(cx); michael@0: PushLoopStatement(bce, &stmtInfo, STMT_WHILE_LOOP, top); michael@0: michael@0: ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE); michael@0: if (noteIndex < 0) michael@0: return false; michael@0: michael@0: ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); michael@0: if (jmp < 0) michael@0: return false; michael@0: michael@0: top = EmitLoopHead(cx, bce, pn->pn_right); michael@0: if (top < 0) michael@0: return false; michael@0: michael@0: if (!EmitTree(cx, bce, pn->pn_right)) michael@0: return false; michael@0: michael@0: SetJumpOffsetAt(bce, jmp); michael@0: if (!EmitLoopEntry(cx, bce, pn->pn_left)) michael@0: return false; michael@0: if (!EmitTree(cx, bce, pn->pn_left)) michael@0: return false; michael@0: michael@0: ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); michael@0: if (beq < 0) michael@0: return false; michael@0: michael@0: if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) michael@0: return false; michael@0: michael@0: if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, beq - jmp)) michael@0: return false; michael@0: michael@0: return PopStatementBCE(cx, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitBreak(ExclusiveContext *cx, BytecodeEmitter *bce, PropertyName *label) michael@0: { michael@0: StmtInfoBCE *stmt = bce->topStmt; michael@0: SrcNoteType noteType; michael@0: if (label) { michael@0: while (stmt->type != STMT_LABEL || stmt->label != label) michael@0: stmt = stmt->down; michael@0: noteType = SRC_BREAK2LABEL; michael@0: } else { michael@0: while (!stmt->isLoop() && stmt->type != STMT_SWITCH) michael@0: stmt = stmt->down; michael@0: noteType = (stmt->type == STMT_SWITCH) ? SRC_SWITCHBREAK : SRC_BREAK; michael@0: } michael@0: michael@0: return EmitGoto(cx, bce, stmt, &stmt->breaks, noteType) >= 0; michael@0: } michael@0: michael@0: static bool michael@0: EmitContinue(ExclusiveContext *cx, BytecodeEmitter *bce, PropertyName *label) michael@0: { michael@0: StmtInfoBCE *stmt = bce->topStmt; michael@0: if (label) { michael@0: /* Find the loop statement enclosed by the matching label. */ michael@0: StmtInfoBCE *loop = nullptr; michael@0: while (stmt->type != STMT_LABEL || stmt->label != label) { michael@0: if (stmt->isLoop()) michael@0: loop = stmt; michael@0: stmt = stmt->down; michael@0: } michael@0: stmt = loop; michael@0: } else { michael@0: while (!stmt->isLoop()) michael@0: stmt = stmt->down; michael@0: } michael@0: michael@0: return EmitGoto(cx, bce, stmt, &stmt->continues, SRC_CONTINUE) >= 0; michael@0: } michael@0: michael@0: static bool michael@0: EmitReturn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) michael@0: return false; michael@0: michael@0: if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { michael@0: if (!EmitPrepareIteratorResult(cx, bce)) michael@0: return false; michael@0: } michael@0: michael@0: /* Push a return value */ michael@0: if (ParseNode *pn2 = pn->pn_kid) { michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: } else { michael@0: /* No explicit return value provided */ michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) michael@0: return false; michael@0: } michael@0: michael@0: if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { michael@0: if (!EmitFinishIteratorResult(cx, bce, true)) michael@0: return false; michael@0: } michael@0: michael@0: /* michael@0: * EmitNonLocalJumpFixup may add fixup bytecode to close open try michael@0: * blocks having finally clauses and to exit intermingled let blocks. michael@0: * We can't simply transfer control flow to our caller in that case, michael@0: * because we must gosub to those finally clauses from inner to outer, michael@0: * with the correct stack pointer (i.e., after popping any with, michael@0: * for/in, etc., slots nested inside the finally's try). michael@0: * michael@0: * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an michael@0: * extra JSOP_RETRVAL after the fixups. michael@0: */ michael@0: ptrdiff_t top = bce->offset(); michael@0: michael@0: if (Emit1(cx, bce, JSOP_RETURN) < 0) michael@0: return false; michael@0: michael@0: NonLocalExitScope nle(cx, bce); michael@0: michael@0: if (!nle.prepareForNonLocalJump(nullptr)) michael@0: return false; michael@0: michael@0: if (top + static_cast(JSOP_RETURN_LENGTH) != bce->offset()) { michael@0: bce->code()[top] = JSOP_SETRVAL; michael@0: if (Emit1(cx, bce, JSOP_RETRVAL) < 0) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitYieldStar(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *iter) michael@0: { michael@0: JS_ASSERT(bce->sc->isFunctionBox()); michael@0: JS_ASSERT(bce->sc->asFunctionBox()->isStarGenerator()); michael@0: michael@0: if (!EmitTree(cx, bce, iter)) // ITERABLE michael@0: return false; michael@0: michael@0: // Convert iterable to iterator. michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // ITERABLE ITERABLE michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // ITERABLE @@ITERATOR michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // @@ITERATOR ITERABLE michael@0: return false; michael@0: if (EmitCall(cx, bce, JSOP_CALL, 0) < 0) // ITER michael@0: return false; michael@0: CheckTypeSet(cx, bce, JSOP_CALL); michael@0: michael@0: int depth = bce->stackDepth; michael@0: JS_ASSERT(depth >= 1); michael@0: michael@0: // Initial send value is undefined. michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RECEIVED michael@0: return false; michael@0: ptrdiff_t initialSend = -1; michael@0: if (EmitBackPatchOp(cx, bce, &initialSend) < 0) // goto initialSend michael@0: return false; michael@0: michael@0: // Try prologue. // ITER RESULT michael@0: StmtInfoBCE stmtInfo(cx); michael@0: PushStatementBCE(bce, &stmtInfo, STMT_TRY, bce->offset()); michael@0: ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY); michael@0: if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0) michael@0: return false; michael@0: ptrdiff_t tryStart = bce->offset(); // tryStart: michael@0: JS_ASSERT(bce->stackDepth == depth + 1); michael@0: michael@0: // Yield RESULT as-is, without re-boxing. michael@0: if (Emit1(cx, bce, JSOP_YIELD) < 0) // ITER RECEIVED michael@0: return false; michael@0: michael@0: // Try epilogue. michael@0: if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH)) michael@0: return false; michael@0: ptrdiff_t subsequentSend = -1; michael@0: if (EmitBackPatchOp(cx, bce, &subsequentSend) < 0) // goto subsequentSend michael@0: return false; michael@0: ptrdiff_t tryEnd = bce->offset(); // tryEnd: michael@0: michael@0: // Catch location. michael@0: // THROW? = 'throw' in ITER // ITER michael@0: bce->stackDepth = (uint32_t) depth; michael@0: if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) // ITER EXCEPTION michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().throw_, JSOP_STRING, bce)) // EXCEPTION ITER ITER "throw" michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER "throw" ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_IN) < 0) // EXCEPTION ITER THROW? michael@0: return false; michael@0: // if (THROW?) goto delegate michael@0: ptrdiff_t checkThrow = EmitJump(cx, bce, JSOP_IFNE, 0); // EXCEPTION ITER michael@0: if (checkThrow < 0) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) // EXCEPTION michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_THROW) < 0) // throw EXCEPTION michael@0: return false; michael@0: michael@0: SetJumpOffsetAt(bce, checkThrow); // delegate: michael@0: // RESULT = ITER.throw(EXCEPTION) // EXCEPTION ITER michael@0: bce->stackDepth = (uint32_t) depth + 1; michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER ITER michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().throw_, JSOP_CALLPROP, bce)) // EXCEPTION ITER ITER THROW michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER THROW ITER michael@0: return false; michael@0: if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER THROW ITER EXCEPTION michael@0: return false; michael@0: if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT michael@0: return false; michael@0: CheckTypeSet(cx, bce, JSOP_CALL); michael@0: JS_ASSERT(bce->stackDepth == depth + 1); michael@0: ptrdiff_t checkResult = -1; michael@0: if (EmitBackPatchOp(cx, bce, &checkResult) < 0) // goto checkResult michael@0: return false; michael@0: michael@0: // Catch epilogue. michael@0: if (!PopStatementBCE(cx, bce)) michael@0: return false; michael@0: // This is a peace offering to ReconstructPCStack. See the note in EmitTry. michael@0: if (Emit1(cx, bce, JSOP_NOP) < 0) michael@0: return false; michael@0: if (!bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd)) michael@0: return false; michael@0: michael@0: // After the try/catch block: send the received value to the iterator. michael@0: if (!BackPatch(cx, bce, initialSend, bce->code().end(), JSOP_GOTO)) // initialSend: michael@0: return false; michael@0: if (!BackPatch(cx, bce, subsequentSend, bce->code().end(), JSOP_GOTO)) // subsequentSend: michael@0: return false; michael@0: michael@0: // Send location. michael@0: // result = iter.next(received) // ITER RECEIVED michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER ITER michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // RECEIVED ITER ITER NEXT michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER NEXT ITER michael@0: return false; michael@0: if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER NEXT ITER RECEIVED michael@0: return false; michael@0: if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT michael@0: return false; michael@0: CheckTypeSet(cx, bce, JSOP_CALL); michael@0: JS_ASSERT(bce->stackDepth == depth + 1); michael@0: michael@0: if (!BackPatch(cx, bce, checkResult, bce->code().end(), JSOP_GOTO)) // checkResult: michael@0: return false; michael@0: // if (!result.done) goto tryStart; // ITER RESULT michael@0: if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE michael@0: return false; michael@0: // if (!DONE) goto tryStart; michael@0: if (EmitJump(cx, bce, JSOP_IFEQ, tryStart - bce->offset()) < 0) // ITER RESULT michael@0: return false; michael@0: michael@0: // result.value michael@0: if (Emit1(cx, bce, JSOP_SWAP) < 0) // RESULT ITER michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) // RESULT michael@0: return false; michael@0: if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // VALUE michael@0: return false; michael@0: michael@0: JS_ASSERT(bce->stackDepth == depth); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitStatementList(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) michael@0: { michael@0: JS_ASSERT(pn->isArity(PN_LIST)); michael@0: michael@0: StmtInfoBCE stmtInfo(cx); michael@0: PushStatementBCE(bce, &stmtInfo, STMT_BLOCK, top); michael@0: michael@0: ParseNode *pnchild = pn->pn_head; michael@0: michael@0: if (pn->pn_xflags & PNX_DESTRUCT) michael@0: pnchild = pnchild->pn_next; michael@0: michael@0: for (ParseNode *pn2 = pnchild; pn2; pn2 = pn2->pn_next) { michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: } michael@0: michael@0: return PopStatementBCE(cx, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitStatement(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_SEMI)); michael@0: michael@0: ParseNode *pn2 = pn->pn_kid; michael@0: if (!pn2) michael@0: return true; michael@0: michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) michael@0: return false; michael@0: michael@0: /* michael@0: * Top-level or called-from-a-native JS_Execute/EvaluateScript, michael@0: * debugger, and eval frames may need the value of the ultimate michael@0: * expression statement as the script's result, despite the fact michael@0: * that it appears useless to the compiler. michael@0: * michael@0: * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when michael@0: * calling JS_Compile* to suppress JSOP_SETRVAL. michael@0: */ michael@0: bool wantval = false; michael@0: bool useful = false; michael@0: if (bce->sc->isFunctionBox()) { michael@0: JS_ASSERT(!bce->script->noScriptRval()); michael@0: } else { michael@0: useful = wantval = !bce->script->noScriptRval(); michael@0: } michael@0: michael@0: /* Don't eliminate expressions with side effects. */ michael@0: if (!useful) { michael@0: if (!CheckSideEffects(cx, bce, pn2, &useful)) michael@0: return false; michael@0: michael@0: /* michael@0: * Don't eliminate apparently useless expressions if they are michael@0: * labeled expression statements. The pc->topStmt->update test michael@0: * catches the case where we are nesting in EmitTree for a labeled michael@0: * compound statement. michael@0: */ michael@0: if (bce->topStmt && michael@0: bce->topStmt->type == STMT_LABEL && michael@0: bce->topStmt->update >= bce->offset()) michael@0: { michael@0: useful = true; michael@0: } michael@0: } michael@0: michael@0: if (useful) { michael@0: JSOp op = wantval ? JSOP_SETRVAL : JSOP_POP; michael@0: JS_ASSERT_IF(pn2->isKind(PNK_ASSIGN), pn2->isOp(JSOP_NOP)); michael@0: if (!wantval && michael@0: pn2->isKind(PNK_ASSIGN) && michael@0: !MaybeEmitGroupAssignment(cx, bce, op, pn2, GroupIsNotDecl, &op)) michael@0: { michael@0: return false; michael@0: } michael@0: if (op != JSOP_NOP) { michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: } michael@0: } else if (!pn->isDirectivePrologueMember()) { michael@0: /* Don't complain about directive prologue members; just don't emit their code. */ michael@0: bce->current->currentLine = bce->parser->tokenStream.srcCoords.lineNum(pn2->pn_pos.begin); michael@0: bce->current->lastColumn = 0; michael@0: if (!bce->reportStrictWarning(pn2, JSMSG_USELESS_EXPR)) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitDelete(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: /* michael@0: * Under ECMA 3, deleting a non-reference returns true -- but alas we michael@0: * must evaluate the operand if it appears it might have side effects. michael@0: */ michael@0: ParseNode *pn2 = pn->pn_kid; michael@0: switch (pn2->getKind()) { michael@0: case PNK_NAME: michael@0: { michael@0: if (!BindNameToSlot(cx, bce, pn2)) michael@0: return false; michael@0: JSOp op = pn2->getOp(); michael@0: if (op == JSOP_FALSE) { michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: } else { michael@0: if (!EmitAtomOp(cx, pn2, op, bce)) michael@0: return false; michael@0: } michael@0: break; michael@0: } michael@0: case PNK_DOT: michael@0: if (!EmitPropOp(cx, pn2, JSOP_DELPROP, bce)) michael@0: return false; michael@0: break; michael@0: case PNK_ELEM: michael@0: if (!EmitElemOp(cx, pn2, JSOP_DELELEM, bce)) michael@0: return false; michael@0: break; michael@0: default: michael@0: { michael@0: /* michael@0: * If useless, just emit JSOP_TRUE; otherwise convert delete foo() michael@0: * to foo(), true (a comma expression). michael@0: */ michael@0: bool useful = false; michael@0: if (!CheckSideEffects(cx, bce, pn2, &useful)) michael@0: return false; michael@0: michael@0: if (useful) { michael@0: JS_ASSERT_IF(pn2->isKind(PNK_CALL), !(pn2->pn_xflags & PNX_SETCALL)); michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: if (Emit1(cx, bce, JSOP_TRUE) < 0) michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitArray(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, uint32_t count); michael@0: michael@0: static bool michael@0: EmitCallOrNew(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: bool callop = pn->isKind(PNK_CALL); michael@0: michael@0: /* michael@0: * Emit callable invocation or operator new (constructor call) code. michael@0: * First, emit code for the left operand to evaluate the callable or michael@0: * constructable object expression. michael@0: * michael@0: * For operator new, we emit JSOP_GETPROP instead of JSOP_CALLPROP, etc. michael@0: * This is necessary to interpose the lambda-initialized method read michael@0: * barrier -- see the code in jsinterp.cpp for JSOP_LAMBDA followed by michael@0: * JSOP_{SET,INIT}PROP. michael@0: * michael@0: * Then (or in a call case that has no explicit reference-base michael@0: * object) we emit JSOP_UNDEFINED to produce the undefined |this| michael@0: * value required for calls (which non-strict mode functions michael@0: * will box into the global object). michael@0: */ michael@0: uint32_t argc = pn->pn_count - 1; michael@0: michael@0: if (argc >= ARGC_LIMIT) { michael@0: bce->parser->tokenStream.reportError(callop michael@0: ? JSMSG_TOO_MANY_FUN_ARGS michael@0: : JSMSG_TOO_MANY_CON_ARGS); michael@0: return false; michael@0: } michael@0: michael@0: bool emitArgs = true; michael@0: ParseNode *pn2 = pn->pn_head; michael@0: bool spread = JOF_OPTYPE(pn->getOp()) == JOF_BYTE; michael@0: switch (pn2->getKind()) { michael@0: case PNK_NAME: michael@0: if (bce->emitterMode == BytecodeEmitter::SelfHosting && michael@0: pn2->name() == cx->names().callFunction && michael@0: !spread) michael@0: { michael@0: /* michael@0: * Special-casing of callFunction to emit bytecode that directly michael@0: * invokes the callee with the correct |this| object and arguments. michael@0: * callFunction(fun, thisArg, arg0, arg1) thus becomes: michael@0: * - emit lookup for fun michael@0: * - emit lookup for thisArg michael@0: * - emit lookups for arg0, arg1 michael@0: * michael@0: * argc is set to the amount of actually emitted args and the michael@0: * emitting of args below is disabled by setting emitArgs to false. michael@0: */ michael@0: if (pn->pn_count < 3) { michael@0: bce->reportError(pn, JSMSG_MORE_ARGS_NEEDED, "callFunction", "1", "s"); michael@0: return false; michael@0: } michael@0: ParseNode *funNode = pn2->pn_next; michael@0: if (!EmitTree(cx, bce, funNode)) michael@0: return false; michael@0: ParseNode *thisArg = funNode->pn_next; michael@0: if (!EmitTree(cx, bce, thisArg)) michael@0: return false; michael@0: bool oldEmittingForInit = bce->emittingForInit; michael@0: bce->emittingForInit = false; michael@0: for (ParseNode *argpn = thisArg->pn_next; argpn; argpn = argpn->pn_next) { michael@0: if (!EmitTree(cx, bce, argpn)) michael@0: return false; michael@0: } michael@0: bce->emittingForInit = oldEmittingForInit; michael@0: argc -= 2; michael@0: emitArgs = false; michael@0: break; michael@0: } michael@0: if (!EmitNameOp(cx, bce, pn2, callop)) michael@0: return false; michael@0: break; michael@0: case PNK_DOT: michael@0: if (!EmitPropOp(cx, pn2, callop ? JSOP_CALLPROP : JSOP_GETPROP, bce)) michael@0: return false; michael@0: break; michael@0: case PNK_ELEM: michael@0: if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, bce)) michael@0: return false; michael@0: break; michael@0: case PNK_FUNCTION: michael@0: /* michael@0: * Top level lambdas which are immediately invoked should be michael@0: * treated as only running once. Every time they execute we will michael@0: * create new types and scripts for their contents, to increase michael@0: * the quality of type information within them and enable more michael@0: * backend optimizations. Note that this does not depend on the michael@0: * lambda being invoked at most once (it may be named or be michael@0: * accessed via foo.caller indirection), as multiple executions michael@0: * will just cause the inner scripts to be repeatedly cloned. michael@0: */ michael@0: JS_ASSERT(!bce->emittingRunOnceLambda); michael@0: if (bce->checkSingletonContext() || (!bce->isInLoop() && bce->isRunOnceLambda())) { michael@0: bce->emittingRunOnceLambda = true; michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: bce->emittingRunOnceLambda = false; michael@0: } else { michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: } michael@0: callop = false; michael@0: break; michael@0: default: michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: callop = false; /* trigger JSOP_UNDEFINED after */ michael@0: break; michael@0: } michael@0: if (!callop) { michael@0: JSOp thisop = pn->isKind(PNK_GENEXP) ? JSOP_THIS : JSOP_UNDEFINED; michael@0: if (Emit1(cx, bce, thisop) < 0) michael@0: return false; michael@0: } michael@0: michael@0: if (emitArgs) { michael@0: /* michael@0: * Emit code for each argument in order, then emit the JSOP_*CALL or michael@0: * JSOP_NEW bytecode with a two-byte immediate telling how many args michael@0: * were pushed on the operand stack. michael@0: */ michael@0: bool oldEmittingForInit = bce->emittingForInit; michael@0: bce->emittingForInit = false; michael@0: if (!spread) { michael@0: for (ParseNode *pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) { michael@0: if (!EmitTree(cx, bce, pn3)) michael@0: return false; michael@0: } michael@0: } else { michael@0: if (!EmitArray(cx, bce, pn2->pn_next, argc)) michael@0: return false; michael@0: } michael@0: bce->emittingForInit = oldEmittingForInit; michael@0: } michael@0: michael@0: if (!spread) { michael@0: if (EmitCall(cx, bce, pn->getOp(), argc) < 0) michael@0: return false; michael@0: } else { michael@0: if (Emit1(cx, bce, pn->getOp()) < 0) michael@0: return false; michael@0: } michael@0: CheckTypeSet(cx, bce, pn->getOp()); michael@0: if (pn->isOp(JSOP_EVAL) || pn->isOp(JSOP_SPREADEVAL)) { michael@0: uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin); michael@0: EMIT_UINT16_IMM_OP(JSOP_LINENO, lineNum); michael@0: } michael@0: if (pn->pn_xflags & PNX_SETCALL) { michael@0: if (Emit1(cx, bce, JSOP_SETCALL) < 0) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitLogical(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: /* michael@0: * JSOP_OR converts the operand on the stack to boolean, leaves the original michael@0: * value on the stack and jumps if true; otherwise it falls into the next michael@0: * bytecode, which pops the left operand and then evaluates the right operand. michael@0: * The jump goes around the right operand evaluation. michael@0: * michael@0: * JSOP_AND converts the operand on the stack to boolean and jumps if false; michael@0: * otherwise it falls into the right operand's bytecode. michael@0: */ michael@0: michael@0: if (pn->isArity(PN_BINARY)) { michael@0: if (!EmitTree(cx, bce, pn->pn_left)) michael@0: return false; michael@0: ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0); michael@0: if (top < 0) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: if (!EmitTree(cx, bce, pn->pn_right)) michael@0: return false; michael@0: ptrdiff_t off = bce->offset(); michael@0: jsbytecode *pc = bce->code(top); michael@0: SET_JUMP_OFFSET(pc, off - top); michael@0: *pc = pn->getOp(); michael@0: return true; michael@0: } michael@0: michael@0: JS_ASSERT(pn->isArity(PN_LIST)); michael@0: JS_ASSERT(pn->pn_head->pn_next->pn_next); michael@0: michael@0: /* Left-associative operator chain: avoid too much recursion. */ michael@0: ParseNode *pn2 = pn->pn_head; michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0); michael@0: if (top < 0) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: michael@0: /* Emit nodes between the head and the tail. */ michael@0: ptrdiff_t jmp = top; michael@0: while ((pn2 = pn2->pn_next)->pn_next) { michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: ptrdiff_t off = EmitJump(cx, bce, JSOP_BACKPATCH, 0); michael@0: if (off < 0) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: SET_JUMP_OFFSET(bce->code(jmp), off - jmp); michael@0: jmp = off; michael@0: } michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: michael@0: pn2 = pn->pn_head; michael@0: ptrdiff_t off = bce->offset(); michael@0: do { michael@0: jsbytecode *pc = bce->code(top); michael@0: ptrdiff_t tmp = GET_JUMP_OFFSET(pc); michael@0: SET_JUMP_OFFSET(pc, off - top); michael@0: *pc = pn->getOp(); michael@0: top += tmp; michael@0: } while ((pn2 = pn2->pn_next)->pn_next); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See michael@0: * the comment on EmitSwitch. michael@0: */ michael@0: MOZ_NEVER_INLINE static bool michael@0: EmitIncOrDec(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: /* Emit lvalue-specialized code for ++/-- operators. */ michael@0: ParseNode *pn2 = pn->pn_kid; michael@0: switch (pn2->getKind()) { michael@0: case PNK_DOT: michael@0: if (!EmitPropIncDec(cx, pn, bce)) michael@0: return false; michael@0: break; michael@0: case PNK_ELEM: michael@0: if (!EmitElemIncDec(cx, pn, bce)) michael@0: return false; michael@0: break; michael@0: case PNK_CALL: michael@0: JS_ASSERT(pn2->pn_xflags & PNX_SETCALL); michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: break; michael@0: default: michael@0: JS_ASSERT(pn2->isKind(PNK_NAME)); michael@0: pn2->setOp(JSOP_SETNAME); michael@0: if (!BindNameToSlot(cx, bce, pn2)) michael@0: return false; michael@0: JSOp op = pn2->getOp(); michael@0: bool maySet; michael@0: switch (op) { michael@0: case JSOP_SETLOCAL: michael@0: case JSOP_SETARG: michael@0: case JSOP_SETALIASEDVAR: michael@0: case JSOP_SETNAME: michael@0: case JSOP_SETGNAME: michael@0: maySet = true; michael@0: break; michael@0: default: michael@0: maySet = false; michael@0: } michael@0: if (op == JSOP_CALLEE) { michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: } else if (!pn2->pn_cookie.isFree()) { michael@0: if (maySet) { michael@0: if (!EmitVarIncDec(cx, pn, bce)) michael@0: return false; michael@0: } else { michael@0: if (!EmitVarOp(cx, pn2, op, bce)) michael@0: return false; michael@0: } michael@0: } else { michael@0: JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); michael@0: if (maySet) { michael@0: if (!EmitNameIncDec(cx, pn, bce)) michael@0: return false; michael@0: } else { michael@0: if (!EmitAtomOp(cx, pn2, op, bce)) michael@0: return false; michael@0: } michael@0: break; michael@0: } michael@0: if (pn2->isConst()) { michael@0: if (Emit1(cx, bce, JSOP_POS) < 0) michael@0: return false; michael@0: bool post; michael@0: JSOp binop = GetIncDecInfo(pn->getKind(), &post); michael@0: if (!post) { michael@0: if (Emit1(cx, bce, JSOP_ONE) < 0) michael@0: return false; michael@0: if (Emit1(cx, bce, binop) < 0) michael@0: return false; michael@0: } michael@0: } michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See michael@0: * the comment on EmitSwitch. michael@0: */ michael@0: MOZ_NEVER_INLINE static bool michael@0: EmitLabeledStatement(ExclusiveContext *cx, BytecodeEmitter *bce, const LabeledStatement *pn) michael@0: { michael@0: /* michael@0: * Emit a JSOP_LABEL instruction. The argument is the offset to the statement michael@0: * following the labeled statement. michael@0: */ michael@0: jsatomid index; michael@0: if (!bce->makeAtomIndex(pn->label(), &index)) michael@0: return false; michael@0: michael@0: ptrdiff_t top = EmitJump(cx, bce, JSOP_LABEL, 0); michael@0: if (top < 0) michael@0: return false; michael@0: michael@0: /* Emit code for the labeled statement. */ michael@0: StmtInfoBCE stmtInfo(cx); michael@0: PushStatementBCE(bce, &stmtInfo, STMT_LABEL, bce->offset()); michael@0: stmtInfo.label = pn->label(); michael@0: if (!EmitTree(cx, bce, pn->statement())) michael@0: return false; michael@0: if (!PopStatementBCE(cx, bce)) michael@0: return false; michael@0: michael@0: /* Patch the JSOP_LABEL offset. */ michael@0: SetJumpOffsetAt(bce, top); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitSyntheticStatements(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) michael@0: { michael@0: JS_ASSERT(pn->isArity(PN_LIST)); michael@0: StmtInfoBCE stmtInfo(cx); michael@0: PushStatementBCE(bce, &stmtInfo, STMT_SEQ, top); michael@0: ParseNode *pn2 = pn->pn_head; michael@0: if (pn->pn_xflags & PNX_DESTRUCT) michael@0: pn2 = pn2->pn_next; michael@0: for (; pn2; pn2 = pn2->pn_next) { michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: } michael@0: return PopStatementBCE(cx, bce); michael@0: } michael@0: michael@0: static bool michael@0: EmitConditionalExpression(ExclusiveContext *cx, BytecodeEmitter *bce, ConditionalExpression &conditional) michael@0: { michael@0: /* Emit the condition, then branch if false to the else part. */ michael@0: if (!EmitTree(cx, bce, &conditional.condition())) michael@0: return false; michael@0: ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_COND); michael@0: if (noteIndex < 0) michael@0: return false; michael@0: ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, 0); michael@0: if (beq < 0 || !EmitTree(cx, bce, &conditional.thenExpression())) michael@0: return false; michael@0: michael@0: /* Jump around else, fixup the branch, emit else, fixup jump. */ michael@0: ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); michael@0: if (jmp < 0) michael@0: return false; michael@0: SetJumpOffsetAt(bce, beq); michael@0: michael@0: /* michael@0: * Because each branch pushes a single value, but our stack budgeting michael@0: * analysis ignores branches, we now have to adjust bce->stackDepth to michael@0: * ignore the value pushed by the first branch. Execution will follow michael@0: * only one path, so we must decrement bce->stackDepth. michael@0: * michael@0: * Failing to do this will foil code, such as let expression and block michael@0: * code generation, which must use the stack depth to compute local michael@0: * stack indexes correctly. michael@0: */ michael@0: JS_ASSERT(bce->stackDepth > 0); michael@0: bce->stackDepth--; michael@0: if (!EmitTree(cx, bce, &conditional.elseExpression())) michael@0: return false; michael@0: SetJumpOffsetAt(bce, jmp); michael@0: return SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq); michael@0: } michael@0: michael@0: /* michael@0: * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See michael@0: * the comment on EmitSwitch. michael@0: */ michael@0: MOZ_NEVER_INLINE static bool michael@0: EmitObject(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: if (pn->pn_xflags & PNX_DESTRUCT) { michael@0: bce->reportError(pn, JSMSG_BAD_OBJECT_INIT); michael@0: return false; michael@0: } michael@0: michael@0: if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext()) michael@0: return EmitSingletonInitialiser(cx, bce, pn); michael@0: michael@0: /* michael@0: * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing michael@0: * a new object and defining (in source order) each property on the object michael@0: * (or mutating the object's [[Prototype]], in the case of __proto__). michael@0: */ michael@0: ptrdiff_t offset = bce->offset(); michael@0: if (!EmitNewInit(cx, bce, JSProto_Object)) michael@0: return false; michael@0: michael@0: /* michael@0: * Try to construct the shape of the object as we go, so we can emit a michael@0: * JSOP_NEWOBJECT with the final shape instead. michael@0: */ michael@0: RootedObject obj(cx); michael@0: if (bce->script->compileAndGo()) { michael@0: gc::AllocKind kind = GuessObjectGCKind(pn->pn_count); michael@0: obj = NewBuiltinClassInstance(cx, &JSObject::class_, kind, TenuredObject); michael@0: if (!obj) michael@0: return false; michael@0: } michael@0: michael@0: for (ParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) michael@0: return false; michael@0: michael@0: /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */ michael@0: ParseNode *pn3 = pn2->pn_left; michael@0: bool isIndex = false; michael@0: if (pn3->isKind(PNK_NUMBER)) { michael@0: if (!EmitNumberOp(cx, pn3->pn_dval, bce)) michael@0: return false; michael@0: isIndex = true; michael@0: } else { michael@0: // The parser already checked for atoms representing indexes and michael@0: // used PNK_NUMBER instead, but also watch for ids which TI treats michael@0: // as indexes for simpliciation of downstream analysis. michael@0: JS_ASSERT(pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING)); michael@0: jsid id = NameToId(pn3->pn_atom->asPropertyName()); michael@0: if (id != types::IdToTypeId(id)) { michael@0: if (!EmitTree(cx, bce, pn3)) michael@0: return false; michael@0: isIndex = true; michael@0: } michael@0: } michael@0: michael@0: /* Emit code for the property initializer. */ michael@0: if (!EmitTree(cx, bce, pn2->pn_right)) michael@0: return false; michael@0: michael@0: JSOp op = pn2->getOp(); michael@0: JS_ASSERT(op == JSOP_INITPROP || michael@0: op == JSOP_INITPROP_GETTER || michael@0: op == JSOP_INITPROP_SETTER); michael@0: michael@0: if (op == JSOP_INITPROP_GETTER || op == JSOP_INITPROP_SETTER) michael@0: obj = nullptr; michael@0: michael@0: if (isIndex) { michael@0: obj = nullptr; michael@0: switch (op) { michael@0: case JSOP_INITPROP: op = JSOP_INITELEM; break; michael@0: case JSOP_INITPROP_GETTER: op = JSOP_INITELEM_GETTER; break; michael@0: case JSOP_INITPROP_SETTER: op = JSOP_INITELEM_SETTER; break; michael@0: default: MOZ_ASSUME_UNREACHABLE("Invalid op"); michael@0: } michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: } else { michael@0: JS_ASSERT(pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING)); michael@0: michael@0: // If we have { __proto__: expr }, implement prototype mutation. michael@0: if (op == JSOP_INITPROP && pn3->pn_atom == cx->names().proto) { michael@0: obj = nullptr; michael@0: if (Emit1(cx, bce, JSOP_MUTATEPROTO) < 0) michael@0: return false; michael@0: continue; michael@0: } michael@0: michael@0: jsatomid index; michael@0: if (!bce->makeAtomIndex(pn3->pn_atom, &index)) michael@0: return false; michael@0: michael@0: MOZ_ASSERT(op == JSOP_INITPROP || michael@0: op == JSOP_INITPROP_GETTER || michael@0: op == JSOP_INITPROP_SETTER); michael@0: michael@0: if (obj) { michael@0: JS_ASSERT(!obj->inDictionaryMode()); michael@0: Rooted id(cx, AtomToId(pn3->pn_atom)); michael@0: RootedValue undefinedValue(cx, UndefinedValue()); michael@0: if (!DefineNativeProperty(cx, obj, id, undefinedValue, nullptr, michael@0: nullptr, JSPROP_ENUMERATE)) michael@0: { michael@0: return false; michael@0: } michael@0: if (obj->inDictionaryMode()) michael@0: obj = nullptr; michael@0: } michael@0: michael@0: if (!EmitIndex32(cx, op, index, bce)) michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: if (Emit1(cx, bce, JSOP_ENDINIT) < 0) michael@0: return false; michael@0: michael@0: if (obj) { michael@0: /* michael@0: * The object survived and has a predictable shape: update the original michael@0: * bytecode. michael@0: */ michael@0: ObjectBox *objbox = bce->parser->newObjectBox(obj); michael@0: if (!objbox) michael@0: return false; michael@0: michael@0: static_assert(JSOP_NEWINIT_LENGTH == JSOP_NEWOBJECT_LENGTH, michael@0: "newinit and newobject must have equal length to edit in-place"); michael@0: michael@0: uint32_t index = bce->objectList.add(objbox); michael@0: jsbytecode *code = bce->code(offset); michael@0: code[0] = JSOP_NEWOBJECT; michael@0: code[1] = jsbytecode(index >> 24); michael@0: code[2] = jsbytecode(index >> 16); michael@0: code[3] = jsbytecode(index >> 8); michael@0: code[4] = jsbytecode(index); michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: EmitArrayComp(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: if (!EmitNewInit(cx, bce, JSProto_Array)) michael@0: return false; michael@0: michael@0: /* michael@0: * Pass the new array's stack index to the PNK_ARRAYPUSH case via michael@0: * bce->arrayCompDepth, then simply traverse the PNK_FOR node and michael@0: * its kids under pn2 to generate this comprehension. michael@0: */ michael@0: JS_ASSERT(bce->stackDepth > 0); michael@0: uint32_t saveDepth = bce->arrayCompDepth; michael@0: bce->arrayCompDepth = (uint32_t) (bce->stackDepth - 1); michael@0: if (!EmitTree(cx, bce, pn->pn_head)) michael@0: return false; michael@0: bce->arrayCompDepth = saveDepth; michael@0: michael@0: /* Emit the usual op needed for decompilation. */ michael@0: return Emit1(cx, bce, JSOP_ENDINIT) >= 0; michael@0: } michael@0: michael@0: static bool michael@0: EmitArray(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, uint32_t count) michael@0: { michael@0: /* michael@0: * Emit code for [a, b, c] that is equivalent to constructing a new michael@0: * array and in source order evaluating each element value and adding michael@0: * it to the array, without invoking latent setters. We use the michael@0: * JSOP_NEWINIT and JSOP_INITELEM_ARRAY bytecodes to ignore setters and michael@0: * to avoid dup'ing and popping the array as each element is added, as michael@0: * JSOP_SETELEM/JSOP_SETPROP would do. michael@0: */ michael@0: michael@0: int32_t nspread = 0; michael@0: for (ParseNode *elt = pn; elt; elt = elt->pn_next) { michael@0: if (elt->isKind(PNK_SPREAD)) michael@0: nspread++; michael@0: } michael@0: michael@0: ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3); michael@0: if (off < 0) michael@0: return false; michael@0: CheckTypeSet(cx, bce, JSOP_NEWARRAY); michael@0: jsbytecode *pc = bce->code(off); michael@0: michael@0: // For arrays with spread, this is a very pessimistic allocation, the michael@0: // minimum possible final size. michael@0: SET_UINT24(pc, count - nspread); michael@0: michael@0: ParseNode *pn2 = pn; michael@0: jsatomid atomIndex; michael@0: if (nspread && !EmitNumberOp(cx, 0, bce)) michael@0: return false; michael@0: for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) michael@0: return false; michael@0: if (pn2->isKind(PNK_ELISION)) { michael@0: if (Emit1(cx, bce, JSOP_HOLE) < 0) michael@0: return false; michael@0: } else { michael@0: ParseNode *expr = pn2->isKind(PNK_SPREAD) ? pn2->pn_kid : pn2; michael@0: if (!EmitTree(cx, bce, expr)) michael@0: return false; michael@0: } michael@0: if (pn2->isKind(PNK_SPREAD)) { michael@0: if (Emit1(cx, bce, JSOP_SPREAD) < 0) michael@0: return false; michael@0: } else if (nspread) { michael@0: if (Emit1(cx, bce, JSOP_INITELEM_INC) < 0) michael@0: return false; michael@0: } else { michael@0: off = EmitN(cx, bce, JSOP_INITELEM_ARRAY, 3); michael@0: if (off < 0) michael@0: return false; michael@0: SET_UINT24(bce->code(off), atomIndex); michael@0: } michael@0: } michael@0: JS_ASSERT(atomIndex == count); michael@0: if (nspread) { michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: michael@0: /* Emit an op to finish the array and aid in decompilation. */ michael@0: return Emit1(cx, bce, JSOP_ENDINIT) >= 0; michael@0: } michael@0: michael@0: static bool michael@0: EmitUnary(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) michael@0: return false; michael@0: /* Unary op, including unary +/-. */ michael@0: JSOp op = pn->getOp(); michael@0: ParseNode *pn2 = pn->pn_kid; michael@0: michael@0: if (op == JSOP_TYPEOF && !pn2->isKind(PNK_NAME)) michael@0: op = JSOP_TYPEOFEXPR; michael@0: michael@0: bool oldEmittingForInit = bce->emittingForInit; michael@0: bce->emittingForInit = false; michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: michael@0: bce->emittingForInit = oldEmittingForInit; michael@0: return Emit1(cx, bce, op) >= 0; michael@0: } michael@0: michael@0: static bool michael@0: EmitDefaults(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: JS_ASSERT(pn->isKind(PNK_ARGSBODY)); michael@0: michael@0: ParseNode *arg, *pnlast = pn->last(); michael@0: for (arg = pn->pn_head; arg != pnlast; arg = arg->pn_next) { michael@0: if (!(arg->pn_dflags & PND_DEFAULT) || !arg->isKind(PNK_NAME)) michael@0: continue; michael@0: if (!BindNameToSlot(cx, bce, arg)) michael@0: return false; michael@0: if (!EmitVarOp(cx, arg, JSOP_GETARG, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_STRICTEQ) < 0) michael@0: return false; michael@0: // Emit source note to enable ion compilation. michael@0: if (NewSrcNote(cx, bce, SRC_IF) < 0) michael@0: return false; michael@0: ptrdiff_t jump = EmitJump(cx, bce, JSOP_IFEQ, 0); michael@0: if (jump < 0) michael@0: return false; michael@0: if (!EmitTree(cx, bce, arg->expr())) michael@0: return false; michael@0: if (!EmitVarOp(cx, arg, JSOP_SETARG, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: SET_JUMP_OFFSET(bce->code(jump), bce->offset() - jump); michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: frontend::EmitTree(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) michael@0: { michael@0: JS_CHECK_RECURSION(cx, return false); michael@0: michael@0: EmitLevelManager elm(bce); michael@0: michael@0: bool ok = true; michael@0: ptrdiff_t top = bce->offset(); michael@0: pn->pn_offset = top; michael@0: michael@0: /* Emit notes to tell the current bytecode's source line number. */ michael@0: if (!UpdateLineNumberNotes(cx, bce, pn->pn_pos.begin)) michael@0: return false; michael@0: michael@0: switch (pn->getKind()) { michael@0: case PNK_FUNCTION: michael@0: ok = EmitFunc(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_ARGSBODY: michael@0: { michael@0: RootedFunction fun(cx, bce->sc->asFunctionBox()->function()); michael@0: ParseNode *pnlast = pn->last(); michael@0: michael@0: // Carefully emit everything in the right order: michael@0: // 1. Destructuring michael@0: // 2. Functions michael@0: // 3. Defaults michael@0: ParseNode *pnchild = pnlast->pn_head; michael@0: if (pnlast->pn_xflags & PNX_DESTRUCT) { michael@0: // Assign the destructuring arguments before defining any functions, michael@0: // see bug 419662. michael@0: JS_ASSERT(pnchild->isKind(PNK_SEMI)); michael@0: JS_ASSERT(pnchild->pn_kid->isKind(PNK_VAR) || pnchild->pn_kid->isKind(PNK_CONST)); michael@0: if (!EmitTree(cx, bce, pnchild)) michael@0: return false; michael@0: pnchild = pnchild->pn_next; michael@0: } michael@0: if (pnlast->pn_xflags & PNX_FUNCDEFS) { michael@0: // This block contains top-level function definitions. To ensure michael@0: // that we emit the bytecode defining them before the rest of code michael@0: // in the block we use a separate pass over functions. During the michael@0: // main pass later the emitter will add JSOP_NOP with source notes michael@0: // for the function to preserve the original functions position michael@0: // when decompiling. michael@0: // michael@0: // Currently this is used only for functions, as compile-as-we go michael@0: // mode for scripts does not allow separate emitter passes. michael@0: for (ParseNode *pn2 = pnchild; pn2; pn2 = pn2->pn_next) { michael@0: if (pn2->isKind(PNK_FUNCTION) && pn2->functionIsHoisted()) { michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: } michael@0: } michael@0: } michael@0: bool hasDefaults = bce->sc->asFunctionBox()->hasDefaults(); michael@0: if (hasDefaults) { michael@0: ParseNode *rest = nullptr; michael@0: bool restIsDefn = false; michael@0: if (fun->hasRest()) { michael@0: JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding()); michael@0: michael@0: // Defaults with a rest parameter need special handling. The michael@0: // rest parameter needs to be undefined while defaults are being michael@0: // processed. To do this, we create the rest argument and let it michael@0: // sit on the stack while processing defaults. The rest michael@0: // parameter's slot is set to undefined for the course of michael@0: // default processing. michael@0: rest = pn->pn_head; michael@0: while (rest->pn_next != pnlast) michael@0: rest = rest->pn_next; michael@0: restIsDefn = rest->isDefn(); michael@0: if (Emit1(cx, bce, JSOP_REST) < 0) michael@0: return false; michael@0: CheckTypeSet(cx, bce, JSOP_REST); michael@0: michael@0: // Only set the rest parameter if it's not aliased by a nested michael@0: // function in the body. michael@0: if (restIsDefn) { michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) michael@0: return false; michael@0: if (!BindNameToSlot(cx, bce, rest)) michael@0: return false; michael@0: if (!EmitVarOp(cx, rest, JSOP_SETARG, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: } michael@0: if (!EmitDefaults(cx, bce, pn)) michael@0: return false; michael@0: if (fun->hasRest()) { michael@0: if (restIsDefn && !EmitVarOp(cx, rest, JSOP_SETARG, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: } michael@0: for (ParseNode *pn2 = pn->pn_head; pn2 != pnlast; pn2 = pn2->pn_next) { michael@0: // Only bind the parameter if it's not aliased by a nested function michael@0: // in the body. michael@0: if (!pn2->isDefn()) michael@0: continue; michael@0: if (!BindNameToSlot(cx, bce, pn2)) michael@0: return false; michael@0: if (pn2->pn_next == pnlast && fun->hasRest() && !hasDefaults) { michael@0: // Fill rest parameter. We handled the case with defaults above. michael@0: JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding()); michael@0: bce->switchToProlog(); michael@0: if (Emit1(cx, bce, JSOP_REST) < 0) michael@0: return false; michael@0: CheckTypeSet(cx, bce, JSOP_REST); michael@0: if (!EmitVarOp(cx, pn2, JSOP_SETARG, bce)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: bce->switchToMain(); michael@0: } michael@0: } michael@0: ok = EmitTree(cx, bce, pnlast); michael@0: break; michael@0: } michael@0: michael@0: case PNK_IF: michael@0: ok = EmitIf(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_SWITCH: michael@0: ok = EmitSwitch(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_WHILE: michael@0: ok = EmitWhile(cx, bce, pn, top); michael@0: break; michael@0: michael@0: case PNK_DOWHILE: michael@0: ok = EmitDo(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_FOR: michael@0: ok = EmitFor(cx, bce, pn, top); michael@0: break; michael@0: michael@0: case PNK_BREAK: michael@0: ok = EmitBreak(cx, bce, pn->as().label()); michael@0: break; michael@0: michael@0: case PNK_CONTINUE: michael@0: ok = EmitContinue(cx, bce, pn->as().label()); michael@0: break; michael@0: michael@0: case PNK_WITH: michael@0: ok = EmitWith(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_TRY: michael@0: if (!EmitTry(cx, bce, pn)) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_CATCH: michael@0: if (!EmitCatch(cx, bce, pn)) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_VAR: michael@0: case PNK_CONST: michael@0: if (!EmitVariables(cx, bce, pn, InitializeVars)) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_RETURN: michael@0: ok = EmitReturn(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_YIELD_STAR: michael@0: ok = EmitYieldStar(cx, bce, pn->pn_kid); michael@0: break; michael@0: michael@0: case PNK_YIELD: michael@0: JS_ASSERT(bce->sc->isFunctionBox()); michael@0: if (bce->sc->asFunctionBox()->isStarGenerator()) { michael@0: if (!EmitPrepareIteratorResult(cx, bce)) michael@0: return false; michael@0: } michael@0: if (pn->pn_kid) { michael@0: if (!EmitTree(cx, bce, pn->pn_kid)) michael@0: return false; michael@0: } else { michael@0: if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) michael@0: return false; michael@0: } michael@0: if (bce->sc->asFunctionBox()->isStarGenerator()) { michael@0: if (!EmitFinishIteratorResult(cx, bce, false)) michael@0: return false; michael@0: } michael@0: if (Emit1(cx, bce, JSOP_YIELD) < 0) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_STATEMENTLIST: michael@0: ok = EmitStatementList(cx, bce, pn, top); michael@0: break; michael@0: michael@0: case PNK_SEQ: michael@0: ok = EmitSyntheticStatements(cx, bce, pn, top); michael@0: break; michael@0: michael@0: case PNK_SEMI: michael@0: ok = EmitStatement(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_LABEL: michael@0: ok = EmitLabeledStatement(cx, bce, &pn->as()); michael@0: break; michael@0: michael@0: case PNK_COMMA: michael@0: { michael@0: for (ParseNode *pn2 = pn->pn_head; ; pn2 = pn2->pn_next) { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) michael@0: return false; michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: if (!pn2->pn_next) michael@0: break; michael@0: if (Emit1(cx, bce, JSOP_POP) < 0) michael@0: return false; michael@0: } michael@0: break; michael@0: } michael@0: michael@0: case PNK_ASSIGN: michael@0: case PNK_ADDASSIGN: michael@0: case PNK_SUBASSIGN: michael@0: case PNK_BITORASSIGN: michael@0: case PNK_BITXORASSIGN: michael@0: case PNK_BITANDASSIGN: michael@0: case PNK_LSHASSIGN: michael@0: case PNK_RSHASSIGN: michael@0: case PNK_URSHASSIGN: michael@0: case PNK_MULASSIGN: michael@0: case PNK_DIVASSIGN: michael@0: case PNK_MODASSIGN: michael@0: if (!EmitAssignment(cx, bce, pn->pn_left, pn->getOp(), pn->pn_right)) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_CONDITIONAL: michael@0: ok = EmitConditionalExpression(cx, bce, pn->as()); michael@0: break; michael@0: michael@0: case PNK_OR: michael@0: case PNK_AND: michael@0: ok = EmitLogical(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_ADD: michael@0: case PNK_SUB: michael@0: case PNK_BITOR: michael@0: case PNK_BITXOR: michael@0: case PNK_BITAND: michael@0: case PNK_STRICTEQ: michael@0: case PNK_EQ: michael@0: case PNK_STRICTNE: michael@0: case PNK_NE: michael@0: case PNK_LT: michael@0: case PNK_LE: michael@0: case PNK_GT: michael@0: case PNK_GE: michael@0: case PNK_IN: michael@0: case PNK_INSTANCEOF: michael@0: case PNK_LSH: michael@0: case PNK_RSH: michael@0: case PNK_URSH: michael@0: case PNK_STAR: michael@0: case PNK_DIV: michael@0: case PNK_MOD: michael@0: if (pn->isArity(PN_LIST)) { michael@0: /* Left-associative operator chain: avoid too much recursion. */ michael@0: ParseNode *pn2 = pn->pn_head; michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: JSOp op = pn->getOp(); michael@0: while ((pn2 = pn2->pn_next) != nullptr) { michael@0: if (!EmitTree(cx, bce, pn2)) michael@0: return false; michael@0: if (Emit1(cx, bce, op) < 0) michael@0: return false; michael@0: } michael@0: } else { michael@0: /* Binary operators that evaluate both operands unconditionally. */ michael@0: if (!EmitTree(cx, bce, pn->pn_left)) michael@0: return false; michael@0: if (!EmitTree(cx, bce, pn->pn_right)) michael@0: return false; michael@0: if (Emit1(cx, bce, pn->getOp()) < 0) michael@0: return false; michael@0: } michael@0: break; michael@0: michael@0: case PNK_THROW: michael@0: case PNK_TYPEOF: michael@0: case PNK_VOID: michael@0: case PNK_NOT: michael@0: case PNK_BITNOT: michael@0: case PNK_POS: michael@0: case PNK_NEG: michael@0: ok = EmitUnary(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_PREINCREMENT: michael@0: case PNK_PREDECREMENT: michael@0: case PNK_POSTINCREMENT: michael@0: case PNK_POSTDECREMENT: michael@0: ok = EmitIncOrDec(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_DELETE: michael@0: ok = EmitDelete(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_DOT: michael@0: ok = EmitPropOp(cx, pn, JSOP_GETPROP, bce); michael@0: break; michael@0: michael@0: case PNK_ELEM: michael@0: ok = EmitElemOp(cx, pn, JSOP_GETELEM, bce); michael@0: break; michael@0: michael@0: case PNK_NEW: michael@0: case PNK_CALL: michael@0: case PNK_GENEXP: michael@0: ok = EmitCallOrNew(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_LEXICALSCOPE: michael@0: ok = EmitLexicalScope(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_LET: michael@0: ok = pn->isArity(PN_BINARY) michael@0: ? EmitLet(cx, bce, pn) michael@0: : EmitVariables(cx, bce, pn, InitializeVars); michael@0: break; michael@0: michael@0: case PNK_IMPORT: michael@0: case PNK_EXPORT: michael@0: // TODO: Implement emitter support for modules michael@0: bce->reportError(nullptr, JSMSG_MODULES_NOT_IMPLEMENTED); michael@0: return false; michael@0: michael@0: case PNK_ARRAYPUSH: { michael@0: /* michael@0: * The array object's stack index is in bce->arrayCompDepth. See below michael@0: * under the array initialiser code generator for array comprehension michael@0: * special casing. Note that the array object is a pure stack value, michael@0: * unaliased by blocks, so we can EmitUnaliasedVarOp. michael@0: */ michael@0: if (!EmitTree(cx, bce, pn->pn_kid)) michael@0: return false; michael@0: if (!EmitDupAt(cx, bce, bce->arrayCompDepth)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_ARRAYPUSH) < 0) michael@0: return false; michael@0: break; michael@0: } michael@0: michael@0: case PNK_ARRAY: michael@0: if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext()) michael@0: ok = EmitSingletonInitialiser(cx, bce, pn); michael@0: else michael@0: ok = EmitArray(cx, bce, pn->pn_head, pn->pn_count); michael@0: break; michael@0: michael@0: case PNK_ARRAYCOMP: michael@0: ok = EmitArrayComp(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_OBJECT: michael@0: ok = EmitObject(cx, bce, pn); michael@0: break; michael@0: michael@0: case PNK_NAME: michael@0: if (!EmitNameOp(cx, bce, pn, false)) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_STRING: michael@0: ok = EmitAtomOp(cx, pn, pn->getOp(), bce); michael@0: break; michael@0: michael@0: case PNK_NUMBER: michael@0: ok = EmitNumberOp(cx, pn->pn_dval, bce); michael@0: break; michael@0: michael@0: case PNK_REGEXP: michael@0: ok = EmitRegExp(cx, bce->regexpList.add(pn->as().objbox()), bce); michael@0: break; michael@0: michael@0: case PNK_TRUE: michael@0: case PNK_FALSE: michael@0: case PNK_THIS: michael@0: case PNK_NULL: michael@0: if (Emit1(cx, bce, pn->getOp()) < 0) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_DEBUGGER: michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) michael@0: return false; michael@0: if (Emit1(cx, bce, JSOP_DEBUGGER) < 0) michael@0: return false; michael@0: break; michael@0: michael@0: case PNK_NOP: michael@0: JS_ASSERT(pn->getArity() == PN_NULLARY); michael@0: break; michael@0: michael@0: default: michael@0: JS_ASSERT(0); michael@0: } michael@0: michael@0: /* bce->emitLevel == 1 means we're last on the stack, so finish up. */ michael@0: if (ok && bce->emitLevel == 1) { michael@0: if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.end)) michael@0: return false; michael@0: } michael@0: michael@0: return ok; michael@0: } michael@0: michael@0: static int michael@0: AllocSrcNote(ExclusiveContext *cx, SrcNotesVector ¬es) michael@0: { michael@0: // Start it off moderately large to avoid repeated resizings early on. michael@0: if (notes.capacity() == 0 && !notes.reserve(1024)) michael@0: return -1; michael@0: michael@0: jssrcnote dummy = 0; michael@0: if (!notes.append(dummy)) { michael@0: js_ReportOutOfMemory(cx); michael@0: return -1; michael@0: } michael@0: return notes.length() - 1; michael@0: } michael@0: michael@0: int michael@0: frontend::NewSrcNote(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type) michael@0: { michael@0: SrcNotesVector ¬es = bce->notes(); michael@0: int index; michael@0: michael@0: index = AllocSrcNote(cx, notes); michael@0: if (index < 0) michael@0: return -1; michael@0: michael@0: /* michael@0: * Compute delta from the last annotated bytecode's offset. If it's too michael@0: * big to fit in sn, allocate one or more xdelta notes and reset sn. michael@0: */ michael@0: ptrdiff_t offset = bce->offset(); michael@0: ptrdiff_t delta = offset - bce->lastNoteOffset(); michael@0: bce->current->lastNoteOffset = offset; michael@0: if (delta >= SN_DELTA_LIMIT) { michael@0: do { michael@0: ptrdiff_t xdelta = Min(delta, SN_XDELTA_MASK); michael@0: SN_MAKE_XDELTA(¬es[index], xdelta); michael@0: delta -= xdelta; michael@0: index = AllocSrcNote(cx, notes); michael@0: if (index < 0) michael@0: return -1; michael@0: } while (delta >= SN_DELTA_LIMIT); michael@0: } michael@0: michael@0: /* michael@0: * Initialize type and delta, then allocate the minimum number of notes michael@0: * needed for type's arity. Usually, we won't need more, but if an offset michael@0: * does take two bytes, SetSrcNoteOffset will grow notes. michael@0: */ michael@0: SN_MAKE_NOTE(¬es[index], type, delta); michael@0: for (int n = (int)js_SrcNoteSpec[type].arity; n > 0; n--) { michael@0: if (NewSrcNote(cx, bce, SRC_NULL) < 0) michael@0: return -1; michael@0: } michael@0: return index; michael@0: } michael@0: michael@0: int michael@0: frontend::NewSrcNote2(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset) michael@0: { michael@0: int index; michael@0: michael@0: index = NewSrcNote(cx, bce, type); michael@0: if (index >= 0) { michael@0: if (!SetSrcNoteOffset(cx, bce, index, 0, offset)) michael@0: return -1; michael@0: } michael@0: return index; michael@0: } michael@0: michael@0: int michael@0: frontend::NewSrcNote3(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset1, michael@0: ptrdiff_t offset2) michael@0: { michael@0: int index; michael@0: michael@0: index = NewSrcNote(cx, bce, type); michael@0: if (index >= 0) { michael@0: if (!SetSrcNoteOffset(cx, bce, index, 0, offset1)) michael@0: return -1; michael@0: if (!SetSrcNoteOffset(cx, bce, index, 1, offset2)) michael@0: return -1; michael@0: } michael@0: return index; michael@0: } michael@0: michael@0: bool michael@0: frontend::AddToSrcNoteDelta(ExclusiveContext *cx, BytecodeEmitter *bce, jssrcnote *sn, ptrdiff_t delta) michael@0: { michael@0: /* michael@0: * Called only from FinishTakingSrcNotes to add to main script note michael@0: * deltas, and only by a small positive amount. michael@0: */ michael@0: JS_ASSERT(bce->current == &bce->main); michael@0: JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT); michael@0: michael@0: ptrdiff_t base = SN_DELTA(sn); michael@0: ptrdiff_t limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT; michael@0: ptrdiff_t newdelta = base + delta; michael@0: if (newdelta < limit) { michael@0: SN_SET_DELTA(sn, newdelta); michael@0: } else { michael@0: jssrcnote xdelta; michael@0: SN_MAKE_XDELTA(&xdelta, delta); michael@0: if (!(sn = bce->main.notes.insert(sn, xdelta))) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: SetSrcNoteOffset(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned index, unsigned which, michael@0: ptrdiff_t offset) michael@0: { michael@0: if (size_t(offset) > SN_MAX_OFFSET) { michael@0: ReportStatementTooLarge(bce->parser->tokenStream, bce->topStmt); michael@0: return false; michael@0: } michael@0: michael@0: SrcNotesVector ¬es = bce->notes(); michael@0: michael@0: /* Find the offset numbered which (i.e., skip exactly which offsets). */ michael@0: jssrcnote *sn = notes.begin() + index; michael@0: JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA); michael@0: JS_ASSERT((int) which < js_SrcNoteSpec[SN_TYPE(sn)].arity); michael@0: for (sn++; which; sn++, which--) { michael@0: if (*sn & SN_4BYTE_OFFSET_FLAG) michael@0: sn += 3; michael@0: } michael@0: michael@0: /* michael@0: * See if the new offset requires three bytes either by being too big or if michael@0: * the offset has already been inflated (in which case, we need to stay big michael@0: * to not break the srcnote encoding if this isn't the last srcnote). michael@0: */ michael@0: if (offset > (ptrdiff_t)SN_4BYTE_OFFSET_MASK || (*sn & SN_4BYTE_OFFSET_FLAG)) { michael@0: /* Maybe this offset was already set to a three-byte value. */ michael@0: if (!(*sn & SN_4BYTE_OFFSET_FLAG)) { michael@0: /* Insert two dummy bytes that will be overwritten shortly. */ michael@0: jssrcnote dummy = 0; michael@0: if (!(sn = notes.insert(sn, dummy)) || michael@0: !(sn = notes.insert(sn, dummy)) || michael@0: !(sn = notes.insert(sn, dummy))) michael@0: { michael@0: js_ReportOutOfMemory(cx); michael@0: return false; michael@0: } michael@0: } michael@0: *sn++ = (jssrcnote)(SN_4BYTE_OFFSET_FLAG | (offset >> 24)); michael@0: *sn++ = (jssrcnote)(offset >> 16); michael@0: *sn++ = (jssrcnote)(offset >> 8); michael@0: } michael@0: *sn = (jssrcnote)offset; michael@0: return true; michael@0: } michael@0: michael@0: /* michael@0: * Finish taking source notes in cx's notePool. michael@0: * If successful, the final source note count is stored in the out outparam. michael@0: */ michael@0: bool michael@0: frontend::FinishTakingSrcNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t *out) michael@0: { michael@0: JS_ASSERT(bce->current == &bce->main); michael@0: michael@0: unsigned prologCount = bce->prolog.notes.length(); michael@0: if (prologCount && bce->prolog.currentLine != bce->firstLine) { michael@0: bce->switchToProlog(); michael@0: if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)bce->firstLine) < 0) michael@0: return false; michael@0: bce->switchToMain(); michael@0: } else { michael@0: /* michael@0: * Either no prolog srcnotes, or no line number change over prolog. michael@0: * We don't need a SRC_SETLINE, but we may need to adjust the offset michael@0: * of the first main note, by adding to its delta and possibly even michael@0: * prepending SRC_XDELTA notes to it to account for prolog bytecodes michael@0: * that came at and after the last annotated bytecode. michael@0: */ michael@0: ptrdiff_t offset = bce->prologOffset() - bce->prolog.lastNoteOffset; michael@0: JS_ASSERT(offset >= 0); michael@0: if (offset > 0 && bce->main.notes.length() != 0) { michael@0: /* NB: Use as much of the first main note's delta as we can. */ michael@0: jssrcnote *sn = bce->main.notes.begin(); michael@0: ptrdiff_t delta = SN_IS_XDELTA(sn) michael@0: ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK) michael@0: : SN_DELTA_MASK - (*sn & SN_DELTA_MASK); michael@0: if (offset < delta) michael@0: delta = offset; michael@0: for (;;) { michael@0: if (!AddToSrcNoteDelta(cx, bce, sn, delta)) michael@0: return false; michael@0: offset -= delta; michael@0: if (offset == 0) michael@0: break; michael@0: delta = Min(offset, SN_XDELTA_MASK); michael@0: sn = bce->main.notes.begin(); michael@0: } michael@0: } michael@0: } michael@0: michael@0: // The prolog count might have changed, so we can't reuse prologCount. michael@0: // The + 1 is to account for the final SN_MAKE_TERMINATOR that is appended michael@0: // when the notes are copied to their final destination by CopySrcNotes. michael@0: *out = bce->prolog.notes.length() + bce->main.notes.length() + 1; michael@0: return true; michael@0: } michael@0: michael@0: void michael@0: frontend::CopySrcNotes(BytecodeEmitter *bce, jssrcnote *destination, uint32_t nsrcnotes) michael@0: { michael@0: unsigned prologCount = bce->prolog.notes.length(); michael@0: unsigned mainCount = bce->main.notes.length(); michael@0: unsigned totalCount = prologCount + mainCount; michael@0: MOZ_ASSERT(totalCount == nsrcnotes - 1); michael@0: if (prologCount) michael@0: PodCopy(destination, bce->prolog.notes.begin(), prologCount); michael@0: PodCopy(destination + prologCount, bce->main.notes.begin(), mainCount); michael@0: SN_MAKE_TERMINATOR(&destination[totalCount]); michael@0: } michael@0: michael@0: void michael@0: CGConstList::finish(ConstArray *array) michael@0: { michael@0: JS_ASSERT(length() == array->length); michael@0: michael@0: for (unsigned i = 0; i < length(); i++) michael@0: array->vector[i] = list[i]; michael@0: } michael@0: michael@0: /* michael@0: * Find the index of the given object for code generator. michael@0: * michael@0: * Since the emitter refers to each parsed object only once, for the index we michael@0: * use the number of already indexes objects. We also add the object to a list michael@0: * to convert the list to a fixed-size array when we complete code generation, michael@0: * see js::CGObjectList::finish below. michael@0: * michael@0: * Most of the objects go to BytecodeEmitter::objectList but for regexp we use michael@0: * a separated BytecodeEmitter::regexpList. In this way the emitted index can michael@0: * be directly used to store and fetch a reference to a cloned RegExp object michael@0: * that shares the same JSRegExp private data created for the object literal in michael@0: * objbox. We need a cloned object to hold lastIndex and other direct michael@0: * properties that should not be shared among threads sharing a precompiled michael@0: * function or script. michael@0: * michael@0: * If the code being compiled is function code, allocate a reserved slot in michael@0: * the cloned function object that shares its precompiled script with other michael@0: * cloned function objects and with the compiler-created clone-parent. There michael@0: * are nregexps = script->regexps()->length such reserved slots in each michael@0: * function object cloned from fun->object. NB: during compilation, a funobj michael@0: * slots element must never be allocated, because JSObject::allocSlot could michael@0: * hand out one of the slots that should be given to a regexp clone. michael@0: * michael@0: * If the code being compiled is global code, the cloned regexp are stored in michael@0: * fp->vars slot and to protect regexp slots from GC we set fp->nvars to michael@0: * nregexps. michael@0: * michael@0: * The slots initially contain undefined or null. We populate them lazily when michael@0: * JSOP_REGEXP is executed for the first time. michael@0: * michael@0: * Why clone regexp objects? ECMA specifies that when a regular expression michael@0: * literal is scanned, a RegExp object is created. In the spec, compilation michael@0: * and execution happen indivisibly, but in this implementation and many of michael@0: * its embeddings, code is precompiled early and re-executed in multiple michael@0: * threads, or using multiple global objects, or both, for efficiency. michael@0: * michael@0: * In such cases, naively following ECMA leads to wrongful sharing of RegExp michael@0: * objects, which makes for collisions on the lastIndex property (especially michael@0: * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to michael@0: * the pre-compilation prototype, a pigeon-hole problem for instanceof tests. michael@0: */ michael@0: unsigned michael@0: CGObjectList::add(ObjectBox *objbox) michael@0: { michael@0: JS_ASSERT(!objbox->emitLink); michael@0: objbox->emitLink = lastbox; michael@0: lastbox = objbox; michael@0: return length++; michael@0: } michael@0: michael@0: unsigned michael@0: CGObjectList::indexOf(JSObject *obj) michael@0: { michael@0: JS_ASSERT(length > 0); michael@0: unsigned index = length - 1; michael@0: for (ObjectBox *box = lastbox; box->object != obj; box = box->emitLink) michael@0: index--; michael@0: return index; michael@0: } michael@0: michael@0: void michael@0: CGObjectList::finish(ObjectArray *array) michael@0: { michael@0: JS_ASSERT(length <= INDEX_LIMIT); michael@0: JS_ASSERT(length == array->length); michael@0: michael@0: js::HeapPtrObject *cursor = array->vector + array->length; michael@0: ObjectBox *objbox = lastbox; michael@0: do { michael@0: --cursor; michael@0: JS_ASSERT(!*cursor); michael@0: *cursor = objbox->object; michael@0: } while ((objbox = objbox->emitLink) != nullptr); michael@0: JS_ASSERT(cursor == array->vector); michael@0: } michael@0: michael@0: ObjectBox* michael@0: CGObjectList::find(uint32_t index) michael@0: { michael@0: JS_ASSERT(index < length); michael@0: ObjectBox *box = lastbox; michael@0: for (unsigned n = length - 1; n > index; n--) michael@0: box = box->emitLink; michael@0: return box; michael@0: } michael@0: michael@0: bool michael@0: CGTryNoteList::append(JSTryNoteKind kind, uint32_t stackDepth, size_t start, size_t end) michael@0: { michael@0: JS_ASSERT(start <= end); michael@0: JS_ASSERT(size_t(uint32_t(start)) == start); michael@0: JS_ASSERT(size_t(uint32_t(end)) == end); michael@0: michael@0: JSTryNote note; michael@0: note.kind = kind; michael@0: note.stackDepth = stackDepth; michael@0: note.start = uint32_t(start); michael@0: note.length = uint32_t(end - start); michael@0: michael@0: return list.append(note); michael@0: } michael@0: michael@0: void michael@0: CGTryNoteList::finish(TryNoteArray *array) michael@0: { michael@0: JS_ASSERT(length() == array->length); michael@0: michael@0: for (unsigned i = 0; i < length(); i++) michael@0: array->vector[i] = list[i]; michael@0: } michael@0: michael@0: bool michael@0: CGBlockScopeList::append(uint32_t scopeObject, uint32_t offset, uint32_t parent) michael@0: { michael@0: BlockScopeNote note; michael@0: mozilla::PodZero(¬e); michael@0: michael@0: note.index = scopeObject; michael@0: note.start = offset; michael@0: note.parent = parent; michael@0: michael@0: return list.append(note); michael@0: } michael@0: michael@0: uint32_t michael@0: CGBlockScopeList::findEnclosingScope(uint32_t index) michael@0: { michael@0: JS_ASSERT(index < length()); michael@0: JS_ASSERT(list[index].index != BlockScopeNote::NoBlockScopeIndex); michael@0: michael@0: DebugOnly pos = list[index].start; michael@0: while (index--) { michael@0: JS_ASSERT(list[index].start <= pos); michael@0: if (list[index].length == 0) { michael@0: // We are looking for the nearest enclosing live scope. If the michael@0: // scope contains POS, it should still be open, so its length should michael@0: // be zero. michael@0: return list[index].index; michael@0: } else { michael@0: // Conversely, if the length is not zero, it should not contain michael@0: // POS. michael@0: JS_ASSERT(list[index].start + list[index].length <= pos); michael@0: } michael@0: } michael@0: michael@0: return BlockScopeNote::NoBlockScopeIndex; michael@0: } michael@0: michael@0: void michael@0: CGBlockScopeList::recordEnd(uint32_t index, uint32_t offset) michael@0: { michael@0: JS_ASSERT(index < length()); michael@0: JS_ASSERT(offset >= list[index].start); michael@0: JS_ASSERT(list[index].length == 0); michael@0: michael@0: list[index].length = offset - list[index].start; michael@0: } michael@0: michael@0: void michael@0: CGBlockScopeList::finish(BlockScopeArray *array) michael@0: { michael@0: JS_ASSERT(length() == array->length); michael@0: michael@0: for (unsigned i = 0; i < length(); i++) michael@0: array->vector[i] = list[i]; michael@0: } michael@0: michael@0: /* michael@0: * We should try to get rid of offsetBias (always 0 or 1, where 1 is michael@0: * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR. michael@0: */ michael@0: const JSSrcNoteSpec js_SrcNoteSpec[] = { michael@0: #define DEFINE_SRC_NOTE_SPEC(sym, name, arity) { name, arity }, michael@0: FOR_EACH_SRC_NOTE_TYPE(DEFINE_SRC_NOTE_SPEC) michael@0: #undef DEFINE_SRC_NOTE_SPEC michael@0: }; michael@0: michael@0: static int michael@0: SrcNoteArity(jssrcnote *sn) michael@0: { michael@0: JS_ASSERT(SN_TYPE(sn) < SRC_LAST); michael@0: return js_SrcNoteSpec[SN_TYPE(sn)].arity; michael@0: } michael@0: michael@0: JS_FRIEND_API(unsigned) michael@0: js_SrcNoteLength(jssrcnote *sn) michael@0: { michael@0: unsigned arity; michael@0: jssrcnote *base; michael@0: michael@0: arity = SrcNoteArity(sn); michael@0: for (base = sn++; arity; sn++, arity--) { michael@0: if (*sn & SN_4BYTE_OFFSET_FLAG) michael@0: sn += 3; michael@0: } michael@0: return sn - base; michael@0: } michael@0: michael@0: JS_FRIEND_API(ptrdiff_t) michael@0: js_GetSrcNoteOffset(jssrcnote *sn, unsigned which) michael@0: { michael@0: /* Find the offset numbered which (i.e., skip exactly which offsets). */ michael@0: JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA); michael@0: JS_ASSERT((int) which < SrcNoteArity(sn)); michael@0: for (sn++; which; sn++, which--) { michael@0: if (*sn & SN_4BYTE_OFFSET_FLAG) michael@0: sn += 3; michael@0: } michael@0: if (*sn & SN_4BYTE_OFFSET_FLAG) { michael@0: return (ptrdiff_t)(((uint32_t)(sn[0] & SN_4BYTE_OFFSET_MASK) << 24) michael@0: | (sn[1] << 16) michael@0: | (sn[2] << 8) michael@0: | sn[3]); michael@0: } michael@0: return (ptrdiff_t)*sn; michael@0: }