1.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000 1.2 +++ b/js/src/frontend/BytecodeEmitter.cpp Wed Dec 31 06:09:35 2014 +0100 1.3 @@ -0,0 +1,7039 @@ 1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- 1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99: 1.6 + * This Source Code Form is subject to the terms of the Mozilla Public 1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this 1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 1.9 + 1.10 +/* 1.11 + * JS bytecode generation. 1.12 + */ 1.13 + 1.14 +#include "frontend/BytecodeEmitter.h" 1.15 + 1.16 +#include "mozilla/DebugOnly.h" 1.17 +#include "mozilla/FloatingPoint.h" 1.18 +#include "mozilla/PodOperations.h" 1.19 + 1.20 +#include <string.h> 1.21 + 1.22 +#include "jsapi.h" 1.23 +#include "jsatom.h" 1.24 +#include "jscntxt.h" 1.25 +#include "jsfun.h" 1.26 +#include "jsnum.h" 1.27 +#include "jsopcode.h" 1.28 +#include "jsscript.h" 1.29 +#include "jstypes.h" 1.30 +#include "jsutil.h" 1.31 + 1.32 +#include "frontend/Parser.h" 1.33 +#include "frontend/TokenStream.h" 1.34 +#include "jit/AsmJSLink.h" 1.35 +#include "vm/Debugger.h" 1.36 + 1.37 +#include "jsatominlines.h" 1.38 +#include "jsobjinlines.h" 1.39 +#include "jsscriptinlines.h" 1.40 + 1.41 +#include "frontend/ParseMaps-inl.h" 1.42 +#include "frontend/ParseNode-inl.h" 1.43 +#include "vm/ScopeObject-inl.h" 1.44 + 1.45 +using namespace js; 1.46 +using namespace js::gc; 1.47 +using namespace js::frontend; 1.48 + 1.49 +using mozilla::DebugOnly; 1.50 +using mozilla::NumberIsInt32; 1.51 +using mozilla::PodCopy; 1.52 + 1.53 +static bool 1.54 +SetSrcNoteOffset(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned index, unsigned which, ptrdiff_t offset); 1.55 + 1.56 +struct frontend::StmtInfoBCE : public StmtInfoBase 1.57 +{ 1.58 + StmtInfoBCE *down; /* info for enclosing statement */ 1.59 + StmtInfoBCE *downScope; /* next enclosing lexical scope */ 1.60 + 1.61 + ptrdiff_t update; /* loop update offset (top if none) */ 1.62 + ptrdiff_t breaks; /* offset of last break in loop */ 1.63 + ptrdiff_t continues; /* offset of last continue in loop */ 1.64 + uint32_t blockScopeIndex; /* index of scope in BlockScopeArray */ 1.65 + 1.66 + StmtInfoBCE(ExclusiveContext *cx) : StmtInfoBase(cx) {} 1.67 + 1.68 + /* 1.69 + * To reuse space, alias two of the ptrdiff_t fields for use during 1.70 + * try/catch/finally code generation and backpatching. 1.71 + * 1.72 + * Only a loop, switch, or label statement info record can have breaks and 1.73 + * continues, and only a for loop has an update backpatch chain, so it's 1.74 + * safe to overlay these for the "trying" StmtTypes. 1.75 + */ 1.76 + 1.77 + ptrdiff_t &gosubs() { 1.78 + JS_ASSERT(type == STMT_FINALLY); 1.79 + return breaks; 1.80 + } 1.81 + 1.82 + ptrdiff_t &guardJump() { 1.83 + JS_ASSERT(type == STMT_TRY || type == STMT_FINALLY); 1.84 + return continues; 1.85 + } 1.86 +}; 1.87 + 1.88 + 1.89 +namespace { 1.90 + 1.91 +struct LoopStmtInfo : public StmtInfoBCE 1.92 +{ 1.93 + int32_t stackDepth; // Stack depth when this loop was pushed. 1.94 + uint32_t loopDepth; // Loop depth. 1.95 + 1.96 + // Can we OSR into Ion from here? True unless there is non-loop state on the stack. 1.97 + bool canIonOsr; 1.98 + 1.99 + LoopStmtInfo(ExclusiveContext *cx) : StmtInfoBCE(cx) {} 1.100 + 1.101 + static LoopStmtInfo* fromStmtInfo(StmtInfoBCE *stmt) { 1.102 + JS_ASSERT(stmt->isLoop()); 1.103 + return static_cast<LoopStmtInfo*>(stmt); 1.104 + } 1.105 +}; 1.106 + 1.107 +} // anonymous namespace 1.108 + 1.109 +BytecodeEmitter::BytecodeEmitter(BytecodeEmitter *parent, 1.110 + Parser<FullParseHandler> *parser, SharedContext *sc, 1.111 + HandleScript script, bool insideEval, HandleScript evalCaller, 1.112 + bool hasGlobalScope, uint32_t lineNum, EmitterMode emitterMode) 1.113 + : sc(sc), 1.114 + parent(parent), 1.115 + script(sc->context, script), 1.116 + prolog(sc->context, lineNum), 1.117 + main(sc->context, lineNum), 1.118 + current(&main), 1.119 + parser(parser), 1.120 + evalCaller(evalCaller), 1.121 + topStmt(nullptr), 1.122 + topScopeStmt(nullptr), 1.123 + staticScope(sc->context), 1.124 + atomIndices(sc->context), 1.125 + firstLine(lineNum), 1.126 + stackDepth(0), maxStackDepth(0), 1.127 + arrayCompDepth(0), 1.128 + emitLevel(0), 1.129 + constList(sc->context), 1.130 + tryNoteList(sc->context), 1.131 + blockScopeList(sc->context), 1.132 + typesetCount(0), 1.133 + hasSingletons(false), 1.134 + emittingForInit(false), 1.135 + emittingRunOnceLambda(false), 1.136 + lazyRunOnceLambda(false), 1.137 + insideEval(insideEval), 1.138 + hasGlobalScope(hasGlobalScope), 1.139 + emitterMode(emitterMode) 1.140 +{ 1.141 + JS_ASSERT_IF(evalCaller, insideEval); 1.142 +} 1.143 + 1.144 +bool 1.145 +BytecodeEmitter::init() 1.146 +{ 1.147 + return atomIndices.ensureMap(sc->context); 1.148 +} 1.149 + 1.150 +static ptrdiff_t 1.151 +EmitCheck(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t delta) 1.152 +{ 1.153 + ptrdiff_t offset = bce->code().length(); 1.154 + 1.155 + // Start it off moderately large to avoid repeated resizings early on. 1.156 + if (bce->code().capacity() == 0 && !bce->code().reserve(1024)) 1.157 + return -1; 1.158 + 1.159 + jsbytecode dummy = 0; 1.160 + if (!bce->code().appendN(dummy, delta)) { 1.161 + js_ReportOutOfMemory(cx); 1.162 + return -1; 1.163 + } 1.164 + return offset; 1.165 +} 1.166 + 1.167 +static void 1.168 +UpdateDepth(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t target) 1.169 +{ 1.170 + jsbytecode *pc = bce->code(target); 1.171 + JSOp op = (JSOp) *pc; 1.172 + const JSCodeSpec *cs = &js_CodeSpec[op]; 1.173 + 1.174 + if (cs->format & JOF_TMPSLOT_MASK) { 1.175 + /* 1.176 + * An opcode may temporarily consume stack space during execution. 1.177 + * Account for this in maxStackDepth separately from uses/defs here. 1.178 + */ 1.179 + uint32_t depth = (uint32_t) bce->stackDepth + 1.180 + ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT); 1.181 + if (depth > bce->maxStackDepth) 1.182 + bce->maxStackDepth = depth; 1.183 + } 1.184 + 1.185 + int nuses = StackUses(nullptr, pc); 1.186 + int ndefs = StackDefs(nullptr, pc); 1.187 + 1.188 + bce->stackDepth -= nuses; 1.189 + JS_ASSERT(bce->stackDepth >= 0); 1.190 + bce->stackDepth += ndefs; 1.191 + if ((uint32_t)bce->stackDepth > bce->maxStackDepth) 1.192 + bce->maxStackDepth = bce->stackDepth; 1.193 +} 1.194 + 1.195 +ptrdiff_t 1.196 +frontend::Emit1(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) 1.197 +{ 1.198 + ptrdiff_t offset = EmitCheck(cx, bce, 1); 1.199 + if (offset < 0) 1.200 + return -1; 1.201 + 1.202 + jsbytecode *code = bce->code(offset); 1.203 + code[0] = jsbytecode(op); 1.204 + UpdateDepth(cx, bce, offset); 1.205 + return offset; 1.206 +} 1.207 + 1.208 +ptrdiff_t 1.209 +frontend::Emit2(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1) 1.210 +{ 1.211 + ptrdiff_t offset = EmitCheck(cx, bce, 2); 1.212 + if (offset < 0) 1.213 + return -1; 1.214 + 1.215 + jsbytecode *code = bce->code(offset); 1.216 + code[0] = jsbytecode(op); 1.217 + code[1] = op1; 1.218 + UpdateDepth(cx, bce, offset); 1.219 + return offset; 1.220 +} 1.221 + 1.222 +ptrdiff_t 1.223 +frontend::Emit3(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1, 1.224 + jsbytecode op2) 1.225 +{ 1.226 + /* These should filter through EmitVarOp. */ 1.227 + JS_ASSERT(!IsArgOp(op)); 1.228 + JS_ASSERT(!IsLocalOp(op)); 1.229 + 1.230 + ptrdiff_t offset = EmitCheck(cx, bce, 3); 1.231 + if (offset < 0) 1.232 + return -1; 1.233 + 1.234 + jsbytecode *code = bce->code(offset); 1.235 + code[0] = jsbytecode(op); 1.236 + code[1] = op1; 1.237 + code[2] = op2; 1.238 + UpdateDepth(cx, bce, offset); 1.239 + return offset; 1.240 +} 1.241 + 1.242 +ptrdiff_t 1.243 +frontend::EmitN(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, size_t extra) 1.244 +{ 1.245 + ptrdiff_t length = 1 + (ptrdiff_t)extra; 1.246 + ptrdiff_t offset = EmitCheck(cx, bce, length); 1.247 + if (offset < 0) 1.248 + return -1; 1.249 + 1.250 + jsbytecode *code = bce->code(offset); 1.251 + code[0] = jsbytecode(op); 1.252 + /* The remaining |extra| bytes are set by the caller */ 1.253 + 1.254 + /* 1.255 + * Don't UpdateDepth if op's use-count comes from the immediate 1.256 + * operand yet to be stored in the extra bytes after op. 1.257 + */ 1.258 + if (js_CodeSpec[op].nuses >= 0) 1.259 + UpdateDepth(cx, bce, offset); 1.260 + 1.261 + return offset; 1.262 +} 1.263 + 1.264 +static ptrdiff_t 1.265 +EmitJump(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, ptrdiff_t off) 1.266 +{ 1.267 + ptrdiff_t offset = EmitCheck(cx, bce, 5); 1.268 + if (offset < 0) 1.269 + return -1; 1.270 + 1.271 + jsbytecode *code = bce->code(offset); 1.272 + code[0] = jsbytecode(op); 1.273 + SET_JUMP_OFFSET(code, off); 1.274 + UpdateDepth(cx, bce, offset); 1.275 + return offset; 1.276 +} 1.277 + 1.278 +static ptrdiff_t 1.279 +EmitCall(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, uint16_t argc) 1.280 +{ 1.281 + return Emit3(cx, bce, op, ARGC_HI(argc), ARGC_LO(argc)); 1.282 +} 1.283 + 1.284 +// Dup the var in operand stack slot "slot". The first item on the operand 1.285 +// stack is one slot past the last fixed slot. The last (most recent) item is 1.286 +// slot bce->stackDepth - 1. 1.287 +// 1.288 +// The instruction that is written (JSOP_DUPAT) switches the depth around so 1.289 +// that it is addressed from the sp instead of from the fp. This is useful when 1.290 +// you don't know the size of the fixed stack segment (nfixed), as is the case 1.291 +// when compiling scripts (because each statement is parsed and compiled 1.292 +// separately, but they all together form one script with one fixed stack 1.293 +// frame). 1.294 +static bool 1.295 +EmitDupAt(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned slot) 1.296 +{ 1.297 + JS_ASSERT(slot < unsigned(bce->stackDepth)); 1.298 + // The slot's position on the operand stack, measured from the top. 1.299 + unsigned slotFromTop = bce->stackDepth - 1 - slot; 1.300 + if (slotFromTop >= JS_BIT(24)) { 1.301 + bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS); 1.302 + return false; 1.303 + } 1.304 + ptrdiff_t off = EmitN(cx, bce, JSOP_DUPAT, 3); 1.305 + if (off < 0) 1.306 + return false; 1.307 + jsbytecode *pc = bce->code(off); 1.308 + SET_UINT24(pc, slotFromTop); 1.309 + return true; 1.310 +} 1.311 + 1.312 +/* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */ 1.313 +const char js_with_statement_str[] = "with statement"; 1.314 +const char js_finally_block_str[] = "finally block"; 1.315 +const char js_script_str[] = "script"; 1.316 + 1.317 +static const char * const statementName[] = { 1.318 + "label statement", /* LABEL */ 1.319 + "if statement", /* IF */ 1.320 + "else statement", /* ELSE */ 1.321 + "destructuring body", /* BODY */ 1.322 + "switch statement", /* SWITCH */ 1.323 + "block", /* BLOCK */ 1.324 + js_with_statement_str, /* WITH */ 1.325 + "catch block", /* CATCH */ 1.326 + "try block", /* TRY */ 1.327 + js_finally_block_str, /* FINALLY */ 1.328 + js_finally_block_str, /* SUBROUTINE */ 1.329 + "do loop", /* DO_LOOP */ 1.330 + "for loop", /* FOR_LOOP */ 1.331 + "for/in loop", /* FOR_IN_LOOP */ 1.332 + "for/of loop", /* FOR_OF_LOOP */ 1.333 + "while loop", /* WHILE_LOOP */ 1.334 +}; 1.335 + 1.336 +JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT); 1.337 + 1.338 +static const char * 1.339 +StatementName(StmtInfoBCE *topStmt) 1.340 +{ 1.341 + if (!topStmt) 1.342 + return js_script_str; 1.343 + return statementName[topStmt->type]; 1.344 +} 1.345 + 1.346 +static void 1.347 +ReportStatementTooLarge(TokenStream &ts, StmtInfoBCE *topStmt) 1.348 +{ 1.349 + ts.reportError(JSMSG_NEED_DIET, StatementName(topStmt)); 1.350 +} 1.351 + 1.352 +/* 1.353 + * Emit a backpatch op with offset pointing to the previous jump of this type, 1.354 + * so that we can walk back up the chain fixing up the op and jump offset. 1.355 + */ 1.356 +static ptrdiff_t 1.357 +EmitBackPatchOp(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t *lastp) 1.358 +{ 1.359 + ptrdiff_t offset, delta; 1.360 + 1.361 + offset = bce->offset(); 1.362 + delta = offset - *lastp; 1.363 + *lastp = offset; 1.364 + JS_ASSERT(delta > 0); 1.365 + return EmitJump(cx, bce, JSOP_BACKPATCH, delta); 1.366 +} 1.367 + 1.368 +static inline unsigned 1.369 +LengthOfSetLine(unsigned line) 1.370 +{ 1.371 + return 1 /* SN_SETLINE */ + (line > SN_4BYTE_OFFSET_MASK ? 4 : 1); 1.372 +} 1.373 + 1.374 +/* Updates line number notes, not column notes. */ 1.375 +static inline bool 1.376 +UpdateLineNumberNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t offset) 1.377 +{ 1.378 + TokenStream *ts = &bce->parser->tokenStream; 1.379 + if (!ts->srcCoords.isOnThisLine(offset, bce->currentLine())) { 1.380 + unsigned line = ts->srcCoords.lineNum(offset); 1.381 + unsigned delta = line - bce->currentLine(); 1.382 + 1.383 + /* 1.384 + * Encode any change in the current source line number by using 1.385 + * either several SRC_NEWLINE notes or just one SRC_SETLINE note, 1.386 + * whichever consumes less space. 1.387 + * 1.388 + * NB: We handle backward line number deltas (possible with for 1.389 + * loops where the update part is emitted after the body, but its 1.390 + * line number is <= any line number in the body) here by letting 1.391 + * unsigned delta_ wrap to a very large number, which triggers a 1.392 + * SRC_SETLINE. 1.393 + */ 1.394 + bce->current->currentLine = line; 1.395 + bce->current->lastColumn = 0; 1.396 + if (delta >= LengthOfSetLine(line)) { 1.397 + if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)line) < 0) 1.398 + return false; 1.399 + } else { 1.400 + do { 1.401 + if (NewSrcNote(cx, bce, SRC_NEWLINE) < 0) 1.402 + return false; 1.403 + } while (--delta != 0); 1.404 + } 1.405 + } 1.406 + return true; 1.407 +} 1.408 + 1.409 +/* A function, so that we avoid macro-bloating all the other callsites. */ 1.410 +static bool 1.411 +UpdateSourceCoordNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t offset) 1.412 +{ 1.413 + if (!UpdateLineNumberNotes(cx, bce, offset)) 1.414 + return false; 1.415 + 1.416 + uint32_t columnIndex = bce->parser->tokenStream.srcCoords.columnIndex(offset); 1.417 + ptrdiff_t colspan = ptrdiff_t(columnIndex) - ptrdiff_t(bce->current->lastColumn); 1.418 + if (colspan != 0) { 1.419 + if (colspan < 0) { 1.420 + colspan += SN_COLSPAN_DOMAIN; 1.421 + } else if (colspan >= SN_COLSPAN_DOMAIN / 2) { 1.422 + // If the column span is so large that we can't store it, then just 1.423 + // discard this information because column information would most 1.424 + // likely be useless anyway once the column numbers are ~4000000. 1.425 + // This has been known to happen with scripts that have been 1.426 + // minimized and put into all one line. 1.427 + return true; 1.428 + } 1.429 + if (NewSrcNote2(cx, bce, SRC_COLSPAN, colspan) < 0) 1.430 + return false; 1.431 + bce->current->lastColumn = columnIndex; 1.432 + } 1.433 + return true; 1.434 +} 1.435 + 1.436 +static ptrdiff_t 1.437 +EmitLoopHead(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *nextpn) 1.438 +{ 1.439 + if (nextpn) { 1.440 + /* 1.441 + * Try to give the JSOP_LOOPHEAD the same line number as the next 1.442 + * instruction. nextpn is often a block, in which case the next 1.443 + * instruction typically comes from the first statement inside. 1.444 + */ 1.445 + JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST)); 1.446 + if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head) 1.447 + nextpn = nextpn->pn_head; 1.448 + if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin)) 1.449 + return -1; 1.450 + } 1.451 + 1.452 + return Emit1(cx, bce, JSOP_LOOPHEAD); 1.453 +} 1.454 + 1.455 +static bool 1.456 +EmitLoopEntry(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *nextpn) 1.457 +{ 1.458 + if (nextpn) { 1.459 + /* Update the line number, as for LOOPHEAD. */ 1.460 + JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST)); 1.461 + if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head) 1.462 + nextpn = nextpn->pn_head; 1.463 + if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin)) 1.464 + return false; 1.465 + } 1.466 + 1.467 + LoopStmtInfo *loop = LoopStmtInfo::fromStmtInfo(bce->topStmt); 1.468 + JS_ASSERT(loop->loopDepth > 0); 1.469 + 1.470 + uint8_t loopDepthAndFlags = PackLoopEntryDepthHintAndFlags(loop->loopDepth, loop->canIonOsr); 1.471 + return Emit2(cx, bce, JSOP_LOOPENTRY, loopDepthAndFlags) >= 0; 1.472 +} 1.473 + 1.474 +/* 1.475 + * If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve 1.476 + * a type set to store its result. 1.477 + */ 1.478 +static inline void 1.479 +CheckTypeSet(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) 1.480 +{ 1.481 + if (js_CodeSpec[op].format & JOF_TYPESET) { 1.482 + if (bce->typesetCount < UINT16_MAX) 1.483 + bce->typesetCount++; 1.484 + } 1.485 +} 1.486 + 1.487 +/* 1.488 + * Macro to emit a bytecode followed by a uint16_t immediate operand stored in 1.489 + * big-endian order. 1.490 + * 1.491 + * NB: We use cx and bce from our caller's lexical environment, and return 1.492 + * false on error. 1.493 + */ 1.494 +#define EMIT_UINT16_IMM_OP(op, i) \ 1.495 + JS_BEGIN_MACRO \ 1.496 + if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0) \ 1.497 + return false; \ 1.498 + CheckTypeSet(cx, bce, op); \ 1.499 + JS_END_MACRO 1.500 + 1.501 +static bool 1.502 +FlushPops(ExclusiveContext *cx, BytecodeEmitter *bce, int *npops) 1.503 +{ 1.504 + JS_ASSERT(*npops != 0); 1.505 + EMIT_UINT16_IMM_OP(JSOP_POPN, *npops); 1.506 + *npops = 0; 1.507 + return true; 1.508 +} 1.509 + 1.510 +static bool 1.511 +PopIterator(ExclusiveContext *cx, BytecodeEmitter *bce) 1.512 +{ 1.513 + if (Emit1(cx, bce, JSOP_ENDITER) < 0) 1.514 + return false; 1.515 + return true; 1.516 +} 1.517 + 1.518 +namespace { 1.519 + 1.520 +class NonLocalExitScope { 1.521 + ExclusiveContext *cx; 1.522 + BytecodeEmitter *bce; 1.523 + const uint32_t savedScopeIndex; 1.524 + const int savedDepth; 1.525 + uint32_t openScopeIndex; 1.526 + 1.527 + NonLocalExitScope(const NonLocalExitScope &) MOZ_DELETE; 1.528 + 1.529 + public: 1.530 + explicit NonLocalExitScope(ExclusiveContext *cx_, BytecodeEmitter *bce_) 1.531 + : cx(cx_), 1.532 + bce(bce_), 1.533 + savedScopeIndex(bce->blockScopeList.length()), 1.534 + savedDepth(bce->stackDepth), 1.535 + openScopeIndex(UINT32_MAX) { 1.536 + if (bce->staticScope) { 1.537 + StmtInfoBCE *stmt = bce->topStmt; 1.538 + while (1) { 1.539 + JS_ASSERT(stmt); 1.540 + if (stmt->isNestedScope) { 1.541 + openScopeIndex = stmt->blockScopeIndex; 1.542 + break; 1.543 + } 1.544 + stmt = stmt->down; 1.545 + } 1.546 + } 1.547 + } 1.548 + 1.549 + ~NonLocalExitScope() { 1.550 + for (uint32_t n = savedScopeIndex; n < bce->blockScopeList.length(); n++) 1.551 + bce->blockScopeList.recordEnd(n, bce->offset()); 1.552 + bce->stackDepth = savedDepth; 1.553 + } 1.554 + 1.555 + bool popScopeForNonLocalExit(uint32_t blockScopeIndex) { 1.556 + uint32_t scopeObjectIndex = bce->blockScopeList.findEnclosingScope(blockScopeIndex); 1.557 + uint32_t parent = openScopeIndex; 1.558 + 1.559 + if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent)) 1.560 + return false; 1.561 + openScopeIndex = bce->blockScopeList.length() - 1; 1.562 + return true; 1.563 + } 1.564 + 1.565 + bool prepareForNonLocalJump(StmtInfoBCE *toStmt); 1.566 +}; 1.567 + 1.568 +/* 1.569 + * Emit additional bytecode(s) for non-local jumps. 1.570 + */ 1.571 +bool 1.572 +NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE *toStmt) 1.573 +{ 1.574 + int npops = 0; 1.575 + 1.576 +#define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return false 1.577 + 1.578 + for (StmtInfoBCE *stmt = bce->topStmt; stmt != toStmt; stmt = stmt->down) { 1.579 + switch (stmt->type) { 1.580 + case STMT_FINALLY: 1.581 + FLUSH_POPS(); 1.582 + if (EmitBackPatchOp(cx, bce, &stmt->gosubs()) < 0) 1.583 + return false; 1.584 + break; 1.585 + 1.586 + case STMT_WITH: 1.587 + if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0) 1.588 + return false; 1.589 + JS_ASSERT(stmt->isNestedScope); 1.590 + if (!popScopeForNonLocalExit(stmt->blockScopeIndex)) 1.591 + return false; 1.592 + break; 1.593 + 1.594 + case STMT_FOR_OF_LOOP: 1.595 + npops += 2; 1.596 + break; 1.597 + 1.598 + case STMT_FOR_IN_LOOP: 1.599 + FLUSH_POPS(); 1.600 + if (!PopIterator(cx, bce)) 1.601 + return false; 1.602 + break; 1.603 + 1.604 + case STMT_SUBROUTINE: 1.605 + /* 1.606 + * There's a [exception or hole, retsub pc-index] pair on the 1.607 + * stack that we need to pop. 1.608 + */ 1.609 + npops += 2; 1.610 + break; 1.611 + 1.612 + default:; 1.613 + } 1.614 + 1.615 + if (stmt->isBlockScope) { 1.616 + JS_ASSERT(stmt->isNestedScope); 1.617 + StaticBlockObject &blockObj = stmt->staticBlock(); 1.618 + if (Emit1(cx, bce, JSOP_DEBUGLEAVEBLOCK) < 0) 1.619 + return false; 1.620 + if (!popScopeForNonLocalExit(stmt->blockScopeIndex)) 1.621 + return false; 1.622 + if (blockObj.needsClone()) { 1.623 + if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0) 1.624 + return false; 1.625 + } 1.626 + } 1.627 + } 1.628 + 1.629 + FLUSH_POPS(); 1.630 + return true; 1.631 + 1.632 +#undef FLUSH_POPS 1.633 +} 1.634 + 1.635 +} // anonymous namespace 1.636 + 1.637 +static ptrdiff_t 1.638 +EmitGoto(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *toStmt, ptrdiff_t *lastp, 1.639 + SrcNoteType noteType = SRC_NULL) 1.640 +{ 1.641 + NonLocalExitScope nle(cx, bce); 1.642 + 1.643 + if (!nle.prepareForNonLocalJump(toStmt)) 1.644 + return -1; 1.645 + 1.646 + if (noteType != SRC_NULL) { 1.647 + if (NewSrcNote(cx, bce, noteType) < 0) 1.648 + return -1; 1.649 + } 1.650 + 1.651 + return EmitBackPatchOp(cx, bce, lastp); 1.652 +} 1.653 + 1.654 +static bool 1.655 +BackPatch(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t last, jsbytecode *target, jsbytecode op) 1.656 +{ 1.657 + jsbytecode *pc, *stop; 1.658 + ptrdiff_t delta, span; 1.659 + 1.660 + pc = bce->code(last); 1.661 + stop = bce->code(-1); 1.662 + while (pc != stop) { 1.663 + delta = GET_JUMP_OFFSET(pc); 1.664 + span = target - pc; 1.665 + SET_JUMP_OFFSET(pc, span); 1.666 + *pc = op; 1.667 + pc -= delta; 1.668 + } 1.669 + return true; 1.670 +} 1.671 + 1.672 +#define SET_STATEMENT_TOP(stmt, top) \ 1.673 + ((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1)) 1.674 + 1.675 +static void 1.676 +PushStatementInner(BytecodeEmitter *bce, StmtInfoBCE *stmt, StmtType type, ptrdiff_t top) 1.677 +{ 1.678 + SET_STATEMENT_TOP(stmt, top); 1.679 + PushStatement(bce, stmt, type); 1.680 +} 1.681 + 1.682 +static void 1.683 +PushStatementBCE(BytecodeEmitter *bce, StmtInfoBCE *stmt, StmtType type, ptrdiff_t top) 1.684 +{ 1.685 + PushStatementInner(bce, stmt, type, top); 1.686 + JS_ASSERT(!stmt->isLoop()); 1.687 +} 1.688 + 1.689 +static void 1.690 +PushLoopStatement(BytecodeEmitter *bce, LoopStmtInfo *stmt, StmtType type, ptrdiff_t top) 1.691 +{ 1.692 + PushStatementInner(bce, stmt, type, top); 1.693 + JS_ASSERT(stmt->isLoop()); 1.694 + 1.695 + LoopStmtInfo *downLoop = nullptr; 1.696 + for (StmtInfoBCE *outer = stmt->down; outer; outer = outer->down) { 1.697 + if (outer->isLoop()) { 1.698 + downLoop = LoopStmtInfo::fromStmtInfo(outer); 1.699 + break; 1.700 + } 1.701 + } 1.702 + 1.703 + stmt->stackDepth = bce->stackDepth; 1.704 + stmt->loopDepth = downLoop ? downLoop->loopDepth + 1 : 1; 1.705 + 1.706 + int loopSlots; 1.707 + if (type == STMT_FOR_OF_LOOP) 1.708 + loopSlots = 2; 1.709 + else if (type == STMT_FOR_IN_LOOP) 1.710 + loopSlots = 1; 1.711 + else 1.712 + loopSlots = 0; 1.713 + 1.714 + if (downLoop) 1.715 + stmt->canIonOsr = (downLoop->canIonOsr && 1.716 + stmt->stackDepth == downLoop->stackDepth + loopSlots); 1.717 + else 1.718 + stmt->canIonOsr = stmt->stackDepth == loopSlots; 1.719 +} 1.720 + 1.721 +/* 1.722 + * Return the enclosing lexical scope, which is the innermost enclosing static 1.723 + * block object or compiler created function. 1.724 + */ 1.725 +static JSObject * 1.726 +EnclosingStaticScope(BytecodeEmitter *bce) 1.727 +{ 1.728 + if (bce->staticScope) 1.729 + return bce->staticScope; 1.730 + 1.731 + if (!bce->sc->isFunctionBox()) { 1.732 + JS_ASSERT(!bce->parent); 1.733 + return nullptr; 1.734 + } 1.735 + 1.736 + return bce->sc->asFunctionBox()->function(); 1.737 +} 1.738 + 1.739 +#ifdef DEBUG 1.740 +static bool 1.741 +AllLocalsAliased(StaticBlockObject &obj) 1.742 +{ 1.743 + for (unsigned i = 0; i < obj.numVariables(); i++) 1.744 + if (!obj.isAliased(i)) 1.745 + return false; 1.746 + return true; 1.747 +} 1.748 +#endif 1.749 + 1.750 +static bool 1.751 +ComputeAliasedSlots(ExclusiveContext *cx, BytecodeEmitter *bce, Handle<StaticBlockObject *> blockObj) 1.752 +{ 1.753 + for (unsigned i = 0; i < blockObj->numVariables(); i++) { 1.754 + Definition *dn = blockObj->definitionParseNode(i); 1.755 + 1.756 + JS_ASSERT(dn->isDefn()); 1.757 + if (!dn->pn_cookie.set(bce->parser->tokenStream, dn->pn_cookie.level(), 1.758 + blockObj->blockIndexToLocalIndex(dn->frameSlot()))) 1.759 + { 1.760 + return false; 1.761 + } 1.762 + 1.763 +#ifdef DEBUG 1.764 + for (ParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) { 1.765 + JS_ASSERT(pnu->pn_lexdef == dn); 1.766 + JS_ASSERT(!(pnu->pn_dflags & PND_BOUND)); 1.767 + JS_ASSERT(pnu->pn_cookie.isFree()); 1.768 + } 1.769 +#endif 1.770 + 1.771 + blockObj->setAliased(i, bce->isAliasedName(dn)); 1.772 + } 1.773 + 1.774 + JS_ASSERT_IF(bce->sc->allLocalsAliased(), AllLocalsAliased(*blockObj)); 1.775 + 1.776 + return true; 1.777 +} 1.778 + 1.779 +static bool 1.780 +EmitInternedObjectOp(ExclusiveContext *cx, uint32_t index, JSOp op, BytecodeEmitter *bce); 1.781 + 1.782 +// In a function, block-scoped locals go after the vars, and form part of the 1.783 +// fixed part of a stack frame. Outside a function, there are no fixed vars, 1.784 +// but block-scoped locals still form part of the fixed part of a stack frame 1.785 +// and are thus addressable via GETLOCAL and friends. 1.786 +static void 1.787 +ComputeLocalOffset(ExclusiveContext *cx, BytecodeEmitter *bce, Handle<StaticBlockObject *> blockObj) 1.788 +{ 1.789 + unsigned nfixedvars = bce->sc->isFunctionBox() ? bce->script->bindings.numVars() : 0; 1.790 + unsigned localOffset = nfixedvars; 1.791 + 1.792 + if (bce->staticScope) { 1.793 + Rooted<NestedScopeObject *> outer(cx, bce->staticScope); 1.794 + for (; outer; outer = outer->enclosingNestedScope()) { 1.795 + if (outer->is<StaticBlockObject>()) { 1.796 + StaticBlockObject &outerBlock = outer->as<StaticBlockObject>(); 1.797 + localOffset = outerBlock.localOffset() + outerBlock.numVariables(); 1.798 + break; 1.799 + } 1.800 + } 1.801 + } 1.802 + 1.803 + JS_ASSERT(localOffset + blockObj->numVariables() 1.804 + <= nfixedvars + bce->script->bindings.numBlockScoped()); 1.805 + 1.806 + blockObj->setLocalOffset(localOffset); 1.807 +} 1.808 + 1.809 +// ~ Nested Scopes ~ 1.810 +// 1.811 +// A nested scope is a region of a compilation unit (function, script, or eval 1.812 +// code) with an additional node on the scope chain. This node may either be a 1.813 +// "with" object or a "block" object. "With" objects represent "with" scopes. 1.814 +// Block objects represent lexical scopes, and contain named block-scoped 1.815 +// bindings, for example "let" bindings or the exception in a catch block. 1.816 +// Those variables may be local and thus accessible directly from the stack, or 1.817 +// "aliased" (accessed by name from nested functions, or dynamically via nested 1.818 +// "eval" or "with") and only accessible through the scope chain. 1.819 +// 1.820 +// All nested scopes are present on the "static scope chain". A nested scope 1.821 +// that is a "with" scope will be present on the scope chain at run-time as 1.822 +// well. A block scope may or may not have a corresponding link on the run-time 1.823 +// scope chain; if no variable declared in the block scope is "aliased", then no 1.824 +// scope chain node is allocated. 1.825 +// 1.826 +// To help debuggers, the bytecode emitter arranges to record the PC ranges 1.827 +// comprehended by a nested scope, and ultimately attach them to the JSScript. 1.828 +// An element in the "block scope array" specifies the PC range, and links to a 1.829 +// NestedScopeObject in the object list of the script. That scope object is 1.830 +// linked to the previous link in the static scope chain, if any. The static 1.831 +// scope chain at any pre-retire PC can be retrieved using 1.832 +// JSScript::getStaticScope(jsbytecode *pc). 1.833 +// 1.834 +// Block scopes store their locals in the fixed part of a stack frame, after the 1.835 +// "fixed var" bindings. A fixed var binding is a "var" or legacy "const" 1.836 +// binding that occurs in a function (as opposed to a script or in eval code). 1.837 +// Only functions have fixed var bindings. 1.838 +// 1.839 +// To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a 1.840 +// block scope, even if the block has no aliased locals. This allows 1.841 +// DebugScopes to invalidate any association between a debugger scope object, 1.842 +// which can proxy access to unaliased stack locals, and the actual live frame. 1.843 +// In normal, non-debug mode, this opcode does not cause any baseline code to be 1.844 +// emitted. 1.845 +// 1.846 +// Enter a nested scope with EnterNestedScope. It will emit 1.847 +// PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of 1.848 +// the scope. Leave a nested scope with LeaveNestedScope, which, for blocks, 1.849 +// will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it 1.850 +// emits LEAVEWITH, of course.) Pass EnterNestedScope a fresh StmtInfoBCE 1.851 +// object, and pass that same object to the corresponding LeaveNestedScope. If 1.852 +// the statement is a block scope, pass STMT_BLOCK as stmtType; otherwise for 1.853 +// with scopes pass STMT_WITH. 1.854 +// 1.855 +static bool 1.856 +EnterNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt, ObjectBox *objbox, 1.857 + StmtType stmtType) 1.858 +{ 1.859 + Rooted<NestedScopeObject *> scopeObj(cx, &objbox->object->as<NestedScopeObject>()); 1.860 + uint32_t scopeObjectIndex = bce->objectList.add(objbox); 1.861 + 1.862 + switch (stmtType) { 1.863 + case STMT_BLOCK: { 1.864 + Rooted<StaticBlockObject *> blockObj(cx, &scopeObj->as<StaticBlockObject>()); 1.865 + 1.866 + ComputeLocalOffset(cx, bce, blockObj); 1.867 + 1.868 + if (!ComputeAliasedSlots(cx, bce, blockObj)) 1.869 + return false; 1.870 + 1.871 + if (blockObj->needsClone()) { 1.872 + if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_PUSHBLOCKSCOPE, bce)) 1.873 + return false; 1.874 + } 1.875 + break; 1.876 + } 1.877 + case STMT_WITH: 1.878 + JS_ASSERT(scopeObj->is<StaticWithObject>()); 1.879 + if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_ENTERWITH, bce)) 1.880 + return false; 1.881 + break; 1.882 + default: 1.883 + MOZ_ASSUME_UNREACHABLE(); 1.884 + } 1.885 + 1.886 + uint32_t parent = BlockScopeNote::NoBlockScopeIndex; 1.887 + if (StmtInfoBCE *stmt = bce->topScopeStmt) { 1.888 + for (; stmt->staticScope != bce->staticScope; stmt = stmt->down) {} 1.889 + parent = stmt->blockScopeIndex; 1.890 + } 1.891 + 1.892 + stmt->blockScopeIndex = bce->blockScopeList.length(); 1.893 + if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent)) 1.894 + return false; 1.895 + 1.896 + PushStatementBCE(bce, stmt, stmtType, bce->offset()); 1.897 + scopeObj->initEnclosingNestedScope(EnclosingStaticScope(bce)); 1.898 + FinishPushNestedScope(bce, stmt, *scopeObj); 1.899 + JS_ASSERT(stmt->isNestedScope); 1.900 + stmt->isBlockScope = (stmtType == STMT_BLOCK); 1.901 + 1.902 + return true; 1.903 +} 1.904 + 1.905 +// Patches |breaks| and |continues| unless the top statement info record 1.906 +// represents a try-catch-finally suite. May fail if a jump offset overflows. 1.907 +static bool 1.908 +PopStatementBCE(ExclusiveContext *cx, BytecodeEmitter *bce) 1.909 +{ 1.910 + StmtInfoBCE *stmt = bce->topStmt; 1.911 + if (!stmt->isTrying() && 1.912 + (!BackPatch(cx, bce, stmt->breaks, bce->code().end(), JSOP_GOTO) || 1.913 + !BackPatch(cx, bce, stmt->continues, bce->code(stmt->update), JSOP_GOTO))) 1.914 + { 1.915 + return false; 1.916 + } 1.917 + 1.918 + FinishPopStatement(bce); 1.919 + return true; 1.920 +} 1.921 + 1.922 +static bool 1.923 +LeaveNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt) 1.924 +{ 1.925 + JS_ASSERT(stmt == bce->topStmt); 1.926 + JS_ASSERT(stmt->isNestedScope); 1.927 + JS_ASSERT(stmt->isBlockScope == !(stmt->type == STMT_WITH)); 1.928 + uint32_t blockScopeIndex = stmt->blockScopeIndex; 1.929 + 1.930 +#ifdef DEBUG 1.931 + JS_ASSERT(bce->blockScopeList.list[blockScopeIndex].length == 0); 1.932 + uint32_t blockObjIndex = bce->blockScopeList.list[blockScopeIndex].index; 1.933 + ObjectBox *blockObjBox = bce->objectList.find(blockObjIndex); 1.934 + NestedScopeObject *staticScope = &blockObjBox->object->as<NestedScopeObject>(); 1.935 + JS_ASSERT(stmt->staticScope == staticScope); 1.936 + JS_ASSERT(staticScope == bce->staticScope); 1.937 + JS_ASSERT_IF(!stmt->isBlockScope, staticScope->is<StaticWithObject>()); 1.938 +#endif 1.939 + 1.940 + if (!PopStatementBCE(cx, bce)) 1.941 + return false; 1.942 + 1.943 + if (Emit1(cx, bce, stmt->isBlockScope ? JSOP_DEBUGLEAVEBLOCK : JSOP_LEAVEWITH) < 0) 1.944 + return false; 1.945 + 1.946 + bce->blockScopeList.recordEnd(blockScopeIndex, bce->offset()); 1.947 + 1.948 + if (stmt->isBlockScope && stmt->staticScope->as<StaticBlockObject>().needsClone()) { 1.949 + if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0) 1.950 + return false; 1.951 + } 1.952 + 1.953 + return true; 1.954 +} 1.955 + 1.956 +static bool 1.957 +EmitIndex32(ExclusiveContext *cx, JSOp op, uint32_t index, BytecodeEmitter *bce) 1.958 +{ 1.959 + const size_t len = 1 + UINT32_INDEX_LEN; 1.960 + JS_ASSERT(len == size_t(js_CodeSpec[op].length)); 1.961 + ptrdiff_t offset = EmitCheck(cx, bce, len); 1.962 + if (offset < 0) 1.963 + return false; 1.964 + 1.965 + jsbytecode *code = bce->code(offset); 1.966 + code[0] = jsbytecode(op); 1.967 + SET_UINT32_INDEX(code, index); 1.968 + UpdateDepth(cx, bce, offset); 1.969 + CheckTypeSet(cx, bce, op); 1.970 + return true; 1.971 +} 1.972 + 1.973 +static bool 1.974 +EmitIndexOp(ExclusiveContext *cx, JSOp op, uint32_t index, BytecodeEmitter *bce) 1.975 +{ 1.976 + const size_t len = js_CodeSpec[op].length; 1.977 + JS_ASSERT(len >= 1 + UINT32_INDEX_LEN); 1.978 + ptrdiff_t offset = EmitCheck(cx, bce, len); 1.979 + if (offset < 0) 1.980 + return false; 1.981 + 1.982 + jsbytecode *code = bce->code(offset); 1.983 + code[0] = jsbytecode(op); 1.984 + SET_UINT32_INDEX(code, index); 1.985 + UpdateDepth(cx, bce, offset); 1.986 + CheckTypeSet(cx, bce, op); 1.987 + return true; 1.988 +} 1.989 + 1.990 +static bool 1.991 +EmitAtomOp(ExclusiveContext *cx, JSAtom *atom, JSOp op, BytecodeEmitter *bce) 1.992 +{ 1.993 + JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); 1.994 + 1.995 + if (op == JSOP_GETPROP && atom == cx->names().length) { 1.996 + /* Specialize length accesses for the interpreter. */ 1.997 + op = JSOP_LENGTH; 1.998 + } 1.999 + 1.1000 + jsatomid index; 1.1001 + if (!bce->makeAtomIndex(atom, &index)) 1.1002 + return false; 1.1003 + 1.1004 + return EmitIndexOp(cx, op, index, bce); 1.1005 +} 1.1006 + 1.1007 +static bool 1.1008 +EmitAtomOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) 1.1009 +{ 1.1010 + JS_ASSERT(pn->pn_atom != nullptr); 1.1011 + return EmitAtomOp(cx, pn->pn_atom, op, bce); 1.1012 +} 1.1013 + 1.1014 +static bool 1.1015 +EmitInternedObjectOp(ExclusiveContext *cx, uint32_t index, JSOp op, BytecodeEmitter *bce) 1.1016 +{ 1.1017 + JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT); 1.1018 + JS_ASSERT(index < bce->objectList.length); 1.1019 + return EmitIndex32(cx, op, index, bce); 1.1020 +} 1.1021 + 1.1022 +static bool 1.1023 +EmitObjectOp(ExclusiveContext *cx, ObjectBox *objbox, JSOp op, BytecodeEmitter *bce) 1.1024 +{ 1.1025 + return EmitInternedObjectOp(cx, bce->objectList.add(objbox), op, bce); 1.1026 +} 1.1027 + 1.1028 +static bool 1.1029 +EmitRegExp(ExclusiveContext *cx, uint32_t index, BytecodeEmitter *bce) 1.1030 +{ 1.1031 + return EmitIndex32(cx, JSOP_REGEXP, index, bce); 1.1032 +} 1.1033 + 1.1034 +/* 1.1035 + * To catch accidental misuse, EMIT_UINT16_IMM_OP/Emit3 assert that they are 1.1036 + * not used to unconditionally emit JSOP_GETLOCAL. Variable access should 1.1037 + * instead be emitted using EmitVarOp. In special cases, when the caller 1.1038 + * definitely knows that a given local slot is unaliased, this function may be 1.1039 + * used as a non-asserting version of EMIT_UINT16_IMM_OP. 1.1040 + */ 1.1041 +static bool 1.1042 +EmitUnaliasedVarOp(ExclusiveContext *cx, JSOp op, uint32_t slot, BytecodeEmitter *bce) 1.1043 +{ 1.1044 + JS_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD); 1.1045 + 1.1046 + if (IsLocalOp(op)) { 1.1047 + ptrdiff_t off = EmitN(cx, bce, op, LOCALNO_LEN); 1.1048 + if (off < 0) 1.1049 + return false; 1.1050 + 1.1051 + SET_LOCALNO(bce->code(off), slot); 1.1052 + return true; 1.1053 + } 1.1054 + 1.1055 + JS_ASSERT(IsArgOp(op)); 1.1056 + ptrdiff_t off = EmitN(cx, bce, op, ARGNO_LEN); 1.1057 + if (off < 0) 1.1058 + return false; 1.1059 + 1.1060 + SET_ARGNO(bce->code(off), slot); 1.1061 + return true; 1.1062 +} 1.1063 + 1.1064 +static bool 1.1065 +EmitAliasedVarOp(ExclusiveContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bce) 1.1066 +{ 1.1067 + JS_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD); 1.1068 + 1.1069 + unsigned n = SCOPECOORD_HOPS_LEN + SCOPECOORD_SLOT_LEN; 1.1070 + JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length); 1.1071 + 1.1072 + ptrdiff_t off = EmitN(cx, bce, op, n); 1.1073 + if (off < 0) 1.1074 + return false; 1.1075 + 1.1076 + jsbytecode *pc = bce->code(off); 1.1077 + SET_SCOPECOORD_HOPS(pc, sc.hops()); 1.1078 + pc += SCOPECOORD_HOPS_LEN; 1.1079 + SET_SCOPECOORD_SLOT(pc, sc.slot()); 1.1080 + pc += SCOPECOORD_SLOT_LEN; 1.1081 + CheckTypeSet(cx, bce, op); 1.1082 + return true; 1.1083 +} 1.1084 + 1.1085 +// Compute the number of nested scope objects that will actually be on the scope 1.1086 +// chain at runtime, given the BCE's current staticScope. 1.1087 +static unsigned 1.1088 +DynamicNestedScopeDepth(BytecodeEmitter *bce) 1.1089 +{ 1.1090 + unsigned depth = 0; 1.1091 + for (NestedScopeObject *b = bce->staticScope; b; b = b->enclosingNestedScope()) { 1.1092 + if (!b->is<StaticBlockObject>() || b->as<StaticBlockObject>().needsClone()) 1.1093 + ++depth; 1.1094 + } 1.1095 + 1.1096 + return depth; 1.1097 +} 1.1098 + 1.1099 +static bool 1.1100 +LookupAliasedName(HandleScript script, PropertyName *name, uint32_t *pslot) 1.1101 +{ 1.1102 + /* 1.1103 + * Beware: BindingIter may contain more than one Binding for a given name 1.1104 + * (in the case of |function f(x,x) {}|) but only one will be aliased. 1.1105 + */ 1.1106 + uint32_t slot = CallObject::RESERVED_SLOTS; 1.1107 + for (BindingIter bi(script); !bi.done(); bi++) { 1.1108 + if (bi->aliased()) { 1.1109 + if (bi->name() == name) { 1.1110 + *pslot = slot; 1.1111 + return true; 1.1112 + } 1.1113 + slot++; 1.1114 + } 1.1115 + } 1.1116 + return false; 1.1117 +} 1.1118 + 1.1119 +static bool 1.1120 +LookupAliasedNameSlot(HandleScript script, PropertyName *name, ScopeCoordinate *sc) 1.1121 +{ 1.1122 + uint32_t slot; 1.1123 + if (!LookupAliasedName(script, name, &slot)) 1.1124 + return false; 1.1125 + 1.1126 + sc->setSlot(slot); 1.1127 + return true; 1.1128 +} 1.1129 + 1.1130 +/* 1.1131 + * Use this function instead of assigning directly to 'hops' to guard for 1.1132 + * uint8_t overflows. 1.1133 + */ 1.1134 +static bool 1.1135 +AssignHops(BytecodeEmitter *bce, ParseNode *pn, unsigned src, ScopeCoordinate *dst) 1.1136 +{ 1.1137 + if (src > UINT8_MAX) { 1.1138 + bce->reportError(pn, JSMSG_TOO_DEEP, js_function_str); 1.1139 + return false; 1.1140 + } 1.1141 + 1.1142 + dst->setHops(src); 1.1143 + return true; 1.1144 +} 1.1145 + 1.1146 +static bool 1.1147 +EmitAliasedVarOp(ExclusiveContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce) 1.1148 +{ 1.1149 + /* 1.1150 + * While pn->pn_cookie tells us how many function scopes are between the use and the def this 1.1151 + * is not the same as how many hops up the dynamic scope chain are needed. In particular: 1.1152 + * - a lexical function scope only contributes a hop if it is "heavyweight" (has a dynamic 1.1153 + * scope object). 1.1154 + * - a heavyweight named function scope contributes an extra scope to the scope chain (a 1.1155 + * DeclEnvObject that holds just the name). 1.1156 + * - all the intervening let/catch blocks must be counted. 1.1157 + */ 1.1158 + unsigned skippedScopes = 0; 1.1159 + BytecodeEmitter *bceOfDef = bce; 1.1160 + if (pn->isUsed()) { 1.1161 + /* 1.1162 + * As explained in BindNameToSlot, the 'level' of a use indicates how 1.1163 + * many function scopes (i.e., BytecodeEmitters) to skip to find the 1.1164 + * enclosing function scope of the definition being accessed. 1.1165 + */ 1.1166 + for (unsigned i = pn->pn_cookie.level(); i; i--) { 1.1167 + skippedScopes += DynamicNestedScopeDepth(bceOfDef); 1.1168 + FunctionBox *funbox = bceOfDef->sc->asFunctionBox(); 1.1169 + if (funbox->isHeavyweight()) { 1.1170 + skippedScopes++; 1.1171 + if (funbox->function()->isNamedLambda()) 1.1172 + skippedScopes++; 1.1173 + } 1.1174 + bceOfDef = bceOfDef->parent; 1.1175 + } 1.1176 + } else { 1.1177 + JS_ASSERT(pn->isDefn()); 1.1178 + JS_ASSERT(pn->pn_cookie.level() == bce->script->staticLevel()); 1.1179 + } 1.1180 + 1.1181 + /* 1.1182 + * The final part of the skippedScopes computation depends on the type of 1.1183 + * variable. An arg or local variable is at the outer scope of a function 1.1184 + * and so includes the full DynamicNestedScopeDepth. A let/catch-binding 1.1185 + * requires a search of the block chain to see how many (dynamic) block 1.1186 + * objects to skip. 1.1187 + */ 1.1188 + ScopeCoordinate sc; 1.1189 + if (IsArgOp(pn->getOp())) { 1.1190 + if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc)) 1.1191 + return false; 1.1192 + JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc)); 1.1193 + } else { 1.1194 + JS_ASSERT(IsLocalOp(pn->getOp()) || pn->isKind(PNK_FUNCTION)); 1.1195 + uint32_t local = pn->pn_cookie.slot(); 1.1196 + if (local < bceOfDef->script->bindings.numVars()) { 1.1197 + if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc)) 1.1198 + return false; 1.1199 + JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc)); 1.1200 + } else { 1.1201 + JS_ASSERT_IF(bce->sc->isFunctionBox(), local <= bceOfDef->script->bindings.numLocals()); 1.1202 + JS_ASSERT(bceOfDef->staticScope->is<StaticBlockObject>()); 1.1203 + Rooted<StaticBlockObject*> b(cx, &bceOfDef->staticScope->as<StaticBlockObject>()); 1.1204 + while (local < b->localOffset()) { 1.1205 + if (b->needsClone()) 1.1206 + skippedScopes++; 1.1207 + b = &b->enclosingNestedScope()->as<StaticBlockObject>(); 1.1208 + } 1.1209 + if (!AssignHops(bce, pn, skippedScopes, &sc)) 1.1210 + return false; 1.1211 + sc.setSlot(b->localIndexToSlot(local)); 1.1212 + } 1.1213 + } 1.1214 + 1.1215 + return EmitAliasedVarOp(cx, op, sc, bce); 1.1216 +} 1.1217 + 1.1218 +static bool 1.1219 +EmitVarOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) 1.1220 +{ 1.1221 + JS_ASSERT(pn->isKind(PNK_FUNCTION) || pn->isKind(PNK_NAME)); 1.1222 + JS_ASSERT(!pn->pn_cookie.isFree()); 1.1223 + 1.1224 + if (IsAliasedVarOp(op)) { 1.1225 + ScopeCoordinate sc; 1.1226 + sc.setHops(pn->pn_cookie.level()); 1.1227 + sc.setSlot(pn->pn_cookie.slot()); 1.1228 + return EmitAliasedVarOp(cx, op, sc, bce); 1.1229 + } 1.1230 + 1.1231 + JS_ASSERT_IF(pn->isKind(PNK_NAME), IsArgOp(op) || IsLocalOp(op)); 1.1232 + 1.1233 + if (!bce->isAliasedName(pn)) { 1.1234 + JS_ASSERT(pn->isUsed() || pn->isDefn()); 1.1235 + JS_ASSERT_IF(pn->isUsed(), pn->pn_cookie.level() == 0); 1.1236 + JS_ASSERT_IF(pn->isDefn(), pn->pn_cookie.level() == bce->script->staticLevel()); 1.1237 + return EmitUnaliasedVarOp(cx, op, pn->pn_cookie.slot(), bce); 1.1238 + } 1.1239 + 1.1240 + switch (op) { 1.1241 + case JSOP_GETARG: case JSOP_GETLOCAL: op = JSOP_GETALIASEDVAR; break; 1.1242 + case JSOP_SETARG: case JSOP_SETLOCAL: op = JSOP_SETALIASEDVAR; break; 1.1243 + default: MOZ_ASSUME_UNREACHABLE("unexpected var op"); 1.1244 + } 1.1245 + 1.1246 + return EmitAliasedVarOp(cx, op, pn, bce); 1.1247 +} 1.1248 + 1.1249 +static JSOp 1.1250 +GetIncDecInfo(ParseNodeKind kind, bool *post) 1.1251 +{ 1.1252 + JS_ASSERT(kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT || 1.1253 + kind == PNK_POSTDECREMENT || kind == PNK_PREDECREMENT); 1.1254 + *post = kind == PNK_POSTINCREMENT || kind == PNK_POSTDECREMENT; 1.1255 + return (kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT) ? JSOP_ADD : JSOP_SUB; 1.1256 +} 1.1257 + 1.1258 +static bool 1.1259 +EmitVarIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) 1.1260 +{ 1.1261 + JSOp op = pn->pn_kid->getOp(); 1.1262 + JS_ASSERT(IsArgOp(op) || IsLocalOp(op) || IsAliasedVarOp(op)); 1.1263 + JS_ASSERT(pn->pn_kid->isKind(PNK_NAME)); 1.1264 + JS_ASSERT(!pn->pn_kid->pn_cookie.isFree()); 1.1265 + 1.1266 + bool post; 1.1267 + JSOp binop = GetIncDecInfo(pn->getKind(), &post); 1.1268 + 1.1269 + JSOp getOp, setOp; 1.1270 + if (IsLocalOp(op)) { 1.1271 + getOp = JSOP_GETLOCAL; 1.1272 + setOp = JSOP_SETLOCAL; 1.1273 + } else if (IsArgOp(op)) { 1.1274 + getOp = JSOP_GETARG; 1.1275 + setOp = JSOP_SETARG; 1.1276 + } else { 1.1277 + getOp = JSOP_GETALIASEDVAR; 1.1278 + setOp = JSOP_SETALIASEDVAR; 1.1279 + } 1.1280 + 1.1281 + if (!EmitVarOp(cx, pn->pn_kid, getOp, bce)) // V 1.1282 + return false; 1.1283 + if (Emit1(cx, bce, JSOP_POS) < 0) // N 1.1284 + return false; 1.1285 + if (post && Emit1(cx, bce, JSOP_DUP) < 0) // N? N 1.1286 + return false; 1.1287 + if (Emit1(cx, bce, JSOP_ONE) < 0) // N? N 1 1.1288 + return false; 1.1289 + if (Emit1(cx, bce, binop) < 0) // N? N+1 1.1290 + return false; 1.1291 + if (!EmitVarOp(cx, pn->pn_kid, setOp, bce)) // N? N+1 1.1292 + return false; 1.1293 + if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT 1.1294 + return false; 1.1295 + 1.1296 + return true; 1.1297 +} 1.1298 + 1.1299 +bool 1.1300 +BytecodeEmitter::isAliasedName(ParseNode *pn) 1.1301 +{ 1.1302 + Definition *dn = pn->resolve(); 1.1303 + JS_ASSERT(dn->isDefn()); 1.1304 + JS_ASSERT(!dn->isPlaceholder()); 1.1305 + JS_ASSERT(dn->isBound()); 1.1306 + 1.1307 + /* If dn is in an enclosing function, it is definitely aliased. */ 1.1308 + if (dn->pn_cookie.level() != script->staticLevel()) 1.1309 + return true; 1.1310 + 1.1311 + switch (dn->kind()) { 1.1312 + case Definition::LET: 1.1313 + /* 1.1314 + * There are two ways to alias a let variable: nested functions and 1.1315 + * dynamic scope operations. (This is overly conservative since the 1.1316 + * bindingsAccessedDynamically flag, checked by allLocalsAliased, is 1.1317 + * function-wide.) 1.1318 + * 1.1319 + * In addition all locals in generators are marked as aliased, to ensure 1.1320 + * that they are allocated on scope chains instead of on the stack. See 1.1321 + * the definition of SharedContext::allLocalsAliased. 1.1322 + */ 1.1323 + return dn->isClosed() || sc->allLocalsAliased(); 1.1324 + case Definition::ARG: 1.1325 + /* 1.1326 + * Consult the bindings, since they already record aliasing. We might 1.1327 + * be tempted to use the same definition as VAR/CONST/LET, but there is 1.1328 + * a problem caused by duplicate arguments: only the last argument with 1.1329 + * a given name is aliased. This is necessary to avoid generating a 1.1330 + * shape for the call object with with more than one name for a given 1.1331 + * slot (which violates internal engine invariants). All this means that 1.1332 + * the '|| sc->allLocalsAliased()' disjunct is incorrect since it will 1.1333 + * mark both parameters in function(x,x) as aliased. 1.1334 + */ 1.1335 + return script->formalIsAliased(pn->pn_cookie.slot()); 1.1336 + case Definition::VAR: 1.1337 + case Definition::CONST: 1.1338 + JS_ASSERT_IF(sc->allLocalsAliased(), script->varIsAliased(pn->pn_cookie.slot())); 1.1339 + return script->varIsAliased(pn->pn_cookie.slot()); 1.1340 + case Definition::PLACEHOLDER: 1.1341 + case Definition::NAMED_LAMBDA: 1.1342 + case Definition::MISSING: 1.1343 + MOZ_ASSUME_UNREACHABLE("unexpected dn->kind"); 1.1344 + } 1.1345 + return false; 1.1346 +} 1.1347 + 1.1348 +/* 1.1349 + * Try to convert a *NAME op with a free name to a more specialized GNAME, 1.1350 + * INTRINSIC or ALIASEDVAR op, which optimize accesses on that name. 1.1351 + * Return true if a conversion was made. 1.1352 + */ 1.1353 +static bool 1.1354 +TryConvertFreeName(BytecodeEmitter *bce, ParseNode *pn) 1.1355 +{ 1.1356 + /* 1.1357 + * In self-hosting mode, JSOP_*NAME is unconditionally converted to 1.1358 + * JSOP_*INTRINSIC. This causes lookups to be redirected to the special 1.1359 + * intrinsics holder in the global object, into which any missing values are 1.1360 + * cloned lazily upon first access. 1.1361 + */ 1.1362 + if (bce->emitterMode == BytecodeEmitter::SelfHosting) { 1.1363 + JSOp op; 1.1364 + switch (pn->getOp()) { 1.1365 + case JSOP_NAME: op = JSOP_GETINTRINSIC; break; 1.1366 + case JSOP_SETNAME: op = JSOP_SETINTRINSIC; break; 1.1367 + /* Other *NAME ops aren't (yet) supported in self-hosted code. */ 1.1368 + default: MOZ_ASSUME_UNREACHABLE("intrinsic"); 1.1369 + } 1.1370 + pn->setOp(op); 1.1371 + return true; 1.1372 + } 1.1373 + 1.1374 + /* 1.1375 + * When parsing inner functions lazily, parse nodes for outer functions no 1.1376 + * longer exist and only the function's scope chain is available for 1.1377 + * resolving upvar accesses within the inner function. 1.1378 + */ 1.1379 + if (bce->emitterMode == BytecodeEmitter::LazyFunction) { 1.1380 + // The only statements within a lazy function which can push lexical 1.1381 + // scopes are try/catch blocks. Use generic ops in this case. 1.1382 + for (StmtInfoBCE *stmt = bce->topStmt; stmt; stmt = stmt->down) { 1.1383 + if (stmt->type == STMT_CATCH) 1.1384 + return true; 1.1385 + } 1.1386 + 1.1387 + size_t hops = 0; 1.1388 + FunctionBox *funbox = bce->sc->asFunctionBox(); 1.1389 + if (funbox->hasExtensibleScope()) 1.1390 + return false; 1.1391 + if (funbox->function()->isNamedLambda() && funbox->function()->atom() == pn->pn_atom) 1.1392 + return false; 1.1393 + if (funbox->isHeavyweight()) { 1.1394 + hops++; 1.1395 + if (funbox->function()->isNamedLambda()) 1.1396 + hops++; 1.1397 + } 1.1398 + if (bce->script->directlyInsideEval()) 1.1399 + return false; 1.1400 + RootedObject outerScope(bce->sc->context, bce->script->enclosingStaticScope()); 1.1401 + for (StaticScopeIter<CanGC> ssi(bce->sc->context, outerScope); !ssi.done(); ssi++) { 1.1402 + if (ssi.type() != StaticScopeIter<CanGC>::FUNCTION) { 1.1403 + if (ssi.type() == StaticScopeIter<CanGC>::BLOCK) { 1.1404 + // Use generic ops if a catch block is encountered. 1.1405 + return false; 1.1406 + } 1.1407 + if (ssi.hasDynamicScopeObject()) 1.1408 + hops++; 1.1409 + continue; 1.1410 + } 1.1411 + RootedScript script(bce->sc->context, ssi.funScript()); 1.1412 + if (script->functionNonDelazifying()->atom() == pn->pn_atom) 1.1413 + return false; 1.1414 + if (ssi.hasDynamicScopeObject()) { 1.1415 + uint32_t slot; 1.1416 + if (LookupAliasedName(script, pn->pn_atom->asPropertyName(), &slot)) { 1.1417 + JSOp op; 1.1418 + switch (pn->getOp()) { 1.1419 + case JSOP_NAME: op = JSOP_GETALIASEDVAR; break; 1.1420 + case JSOP_SETNAME: op = JSOP_SETALIASEDVAR; break; 1.1421 + default: return false; 1.1422 + } 1.1423 + pn->setOp(op); 1.1424 + JS_ALWAYS_TRUE(pn->pn_cookie.set(bce->parser->tokenStream, hops, slot)); 1.1425 + return true; 1.1426 + } 1.1427 + hops++; 1.1428 + } 1.1429 + 1.1430 + if (script->funHasExtensibleScope() || script->directlyInsideEval()) 1.1431 + return false; 1.1432 + } 1.1433 + } 1.1434 + 1.1435 + // Unbound names aren't recognizable global-property references if the 1.1436 + // script isn't running against its global object. 1.1437 + if (!bce->script->compileAndGo() || !bce->hasGlobalScope) 1.1438 + return false; 1.1439 + 1.1440 + // Deoptimized names also aren't necessarily globals. 1.1441 + if (pn->isDeoptimized()) 1.1442 + return false; 1.1443 + 1.1444 + if (bce->sc->isFunctionBox()) { 1.1445 + // Unbound names in function code may not be globals if new locals can 1.1446 + // be added to this function (or an enclosing one) to alias a global 1.1447 + // reference. 1.1448 + FunctionBox *funbox = bce->sc->asFunctionBox(); 1.1449 + if (funbox->mightAliasLocals()) 1.1450 + return false; 1.1451 + } 1.1452 + 1.1453 + // If this is eval code, being evaluated inside strict mode eval code, 1.1454 + // an "unbound" name might be a binding local to that outer eval: 1.1455 + // 1.1456 + // var x = "GLOBAL"; 1.1457 + // eval('"use strict"; ' + 1.1458 + // 'var x; ' + 1.1459 + // 'eval("print(x)");'); // "undefined", not "GLOBAL" 1.1460 + // 1.1461 + // Given the enclosing eval code's strictness and its bindings (neither is 1.1462 + // readily available now), we could exactly check global-ness, but it's not 1.1463 + // worth the trouble for doubly-nested eval code. So we conservatively 1.1464 + // approximate. If the outer eval code is strict, then this eval code will 1.1465 + // be: thus, don't optimize if we're compiling strict code inside an eval. 1.1466 + if (bce->insideEval && bce->sc->strict) 1.1467 + return false; 1.1468 + 1.1469 + // Beware: if you change anything here, you might also need to change 1.1470 + // js::ReportIfUndeclaredVarAssignment. 1.1471 + JSOp op; 1.1472 + switch (pn->getOp()) { 1.1473 + case JSOP_NAME: op = JSOP_GETGNAME; break; 1.1474 + case JSOP_SETNAME: op = JSOP_SETGNAME; break; 1.1475 + case JSOP_SETCONST: 1.1476 + // Not supported. 1.1477 + return false; 1.1478 + default: MOZ_ASSUME_UNREACHABLE("gname"); 1.1479 + } 1.1480 + pn->setOp(op); 1.1481 + return true; 1.1482 +} 1.1483 + 1.1484 +/* 1.1485 + * BindNameToSlotHelper attempts to optimize name gets and sets to stack slot 1.1486 + * loads and stores, given the compile-time information in bce and a PNK_NAME 1.1487 + * node pn. It returns false on error, true on success. 1.1488 + * 1.1489 + * The caller can test pn->pn_cookie.isFree() to tell whether optimization 1.1490 + * occurred, in which case BindNameToSlotHelper also updated pn->pn_op. If 1.1491 + * pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have 1.1492 + * been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not 1.1493 + * pn->pn_op was modified, if this function finds an argument or local variable 1.1494 + * name, PND_CONST will be set in pn_dflags for read-only properties after a 1.1495 + * successful return. 1.1496 + * 1.1497 + * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget 1.1498 + * to update the special cases in EmitFor (for-in) and EmitAssignment (= and 1.1499 + * op=, e.g. +=). 1.1500 + */ 1.1501 +static bool 1.1502 +BindNameToSlotHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.1503 +{ 1.1504 + JS_ASSERT(pn->isKind(PNK_NAME)); 1.1505 + 1.1506 + JS_ASSERT_IF(pn->isKind(PNK_FUNCTION), pn->isBound()); 1.1507 + 1.1508 + /* Don't attempt if 'pn' is already bound or deoptimized or a function. */ 1.1509 + if (pn->isBound() || pn->isDeoptimized()) 1.1510 + return true; 1.1511 + 1.1512 + /* JSOP_CALLEE is pre-bound by definition. */ 1.1513 + JSOp op = pn->getOp(); 1.1514 + JS_ASSERT(op != JSOP_CALLEE); 1.1515 + JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); 1.1516 + 1.1517 + /* 1.1518 + * The parser already linked name uses to definitions when (where not 1.1519 + * prevented by non-lexical constructs like 'with' and 'eval'). 1.1520 + */ 1.1521 + Definition *dn; 1.1522 + if (pn->isUsed()) { 1.1523 + JS_ASSERT(pn->pn_cookie.isFree()); 1.1524 + dn = pn->pn_lexdef; 1.1525 + JS_ASSERT(dn->isDefn()); 1.1526 + pn->pn_dflags |= (dn->pn_dflags & PND_CONST); 1.1527 + } else if (pn->isDefn()) { 1.1528 + dn = (Definition *) pn; 1.1529 + } else { 1.1530 + return true; 1.1531 + } 1.1532 + 1.1533 + /* 1.1534 + * Turn attempts to mutate const-declared bindings into get ops (for 1.1535 + * pre-increment and pre-decrement ops, our caller will have to emit 1.1536 + * JSOP_POS, JSOP_ONE, and JSOP_ADD as well). 1.1537 + * 1.1538 + * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared 1.1539 + * bindings visible to the compiler are permanent in JS unless the 1.1540 + * declaration originates at top level in eval code. 1.1541 + */ 1.1542 + switch (op) { 1.1543 + case JSOP_NAME: 1.1544 + case JSOP_SETCONST: 1.1545 + break; 1.1546 + default: 1.1547 + if (pn->isConst()) { 1.1548 + if (bce->sc->needStrictChecks()) { 1.1549 + JSAutoByteString name; 1.1550 + if (!AtomToPrintableString(cx, pn->pn_atom, &name) || 1.1551 + !bce->reportStrictModeError(pn, JSMSG_READ_ONLY, name.ptr())) 1.1552 + { 1.1553 + return false; 1.1554 + } 1.1555 + } 1.1556 + pn->setOp(op = JSOP_NAME); 1.1557 + } 1.1558 + } 1.1559 + 1.1560 + if (dn->pn_cookie.isFree()) { 1.1561 + if (HandleScript caller = bce->evalCaller) { 1.1562 + JS_ASSERT(bce->script->compileAndGo()); 1.1563 + 1.1564 + /* 1.1565 + * Don't generate upvars on the left side of a for loop. See 1.1566 + * bug 470758. 1.1567 + */ 1.1568 + if (bce->emittingForInit) 1.1569 + return true; 1.1570 + 1.1571 + /* 1.1572 + * If this is an eval in the global scope, then unbound variables 1.1573 + * must be globals, so try to use GNAME ops. 1.1574 + */ 1.1575 + if (!caller->functionOrCallerFunction() && TryConvertFreeName(bce, pn)) { 1.1576 + pn->pn_dflags |= PND_BOUND; 1.1577 + return true; 1.1578 + } 1.1579 + 1.1580 + /* 1.1581 + * Out of tricks, so we must rely on PICs to optimize named 1.1582 + * accesses from direct eval called from function code. 1.1583 + */ 1.1584 + return true; 1.1585 + } 1.1586 + 1.1587 + /* Optimize accesses to undeclared globals. */ 1.1588 + if (!TryConvertFreeName(bce, pn)) 1.1589 + return true; 1.1590 + 1.1591 + pn->pn_dflags |= PND_BOUND; 1.1592 + return true; 1.1593 + } 1.1594 + 1.1595 + /* 1.1596 + * At this point, we are only dealing with uses that have already been 1.1597 + * bound to definitions via pn_lexdef. The rest of this routine converts 1.1598 + * the parse node of the use from its initial JSOP_*NAME* op to a LOCAL/ARG 1.1599 + * op. This requires setting the node's pn_cookie with a pair (level, slot) 1.1600 + * where 'level' is the number of function scopes between the use and the 1.1601 + * def and 'slot' is the index to emit as the immediate of the ARG/LOCAL 1.1602 + * op. For example, in this code: 1.1603 + * 1.1604 + * function(a,b,x) { return x } 1.1605 + * function(y) { function() { return y } } 1.1606 + * 1.1607 + * x will get (level = 0, slot = 2) and y will get (level = 1, slot = 0). 1.1608 + */ 1.1609 + JS_ASSERT(!pn->isDefn()); 1.1610 + JS_ASSERT(pn->isUsed()); 1.1611 + JS_ASSERT(pn->pn_lexdef); 1.1612 + JS_ASSERT(pn->pn_cookie.isFree()); 1.1613 + 1.1614 + /* 1.1615 + * We are compiling a function body and may be able to optimize name 1.1616 + * to stack slot. Look for an argument or variable in the function and 1.1617 + * rewrite pn_op and update pn accordingly. 1.1618 + */ 1.1619 + switch (dn->kind()) { 1.1620 + case Definition::ARG: 1.1621 + switch (op) { 1.1622 + case JSOP_NAME: op = JSOP_GETARG; break; 1.1623 + case JSOP_SETNAME: op = JSOP_SETARG; break; 1.1624 + default: MOZ_ASSUME_UNREACHABLE("arg"); 1.1625 + } 1.1626 + JS_ASSERT(!pn->isConst()); 1.1627 + break; 1.1628 + 1.1629 + case Definition::VAR: 1.1630 + case Definition::CONST: 1.1631 + case Definition::LET: 1.1632 + switch (op) { 1.1633 + case JSOP_NAME: op = JSOP_GETLOCAL; break; 1.1634 + case JSOP_SETNAME: op = JSOP_SETLOCAL; break; 1.1635 + case JSOP_SETCONST: op = JSOP_SETLOCAL; break; 1.1636 + default: MOZ_ASSUME_UNREACHABLE("local"); 1.1637 + } 1.1638 + break; 1.1639 + 1.1640 + case Definition::NAMED_LAMBDA: { 1.1641 + JS_ASSERT(dn->isOp(JSOP_CALLEE)); 1.1642 + JS_ASSERT(op != JSOP_CALLEE); 1.1643 + 1.1644 + /* 1.1645 + * Currently, the ALIASEDVAR ops do not support accessing the 1.1646 + * callee of a DeclEnvObject, so use NAME. 1.1647 + */ 1.1648 + if (dn->pn_cookie.level() != bce->script->staticLevel()) 1.1649 + return true; 1.1650 + 1.1651 + DebugOnly<JSFunction *> fun = bce->sc->asFunctionBox()->function(); 1.1652 + JS_ASSERT(fun->isLambda()); 1.1653 + JS_ASSERT(pn->pn_atom == fun->atom()); 1.1654 + 1.1655 + /* 1.1656 + * Leave pn->isOp(JSOP_NAME) if bce->fun is heavyweight to 1.1657 + * address two cases: a new binding introduced by eval, and 1.1658 + * assignment to the name in strict mode. 1.1659 + * 1.1660 + * var fun = (function f(s) { eval(s); return f; }); 1.1661 + * assertEq(fun("var f = 42"), 42); 1.1662 + * 1.1663 + * ECMAScript specifies that a function expression's name is bound 1.1664 + * in a lexical environment distinct from that used to bind its 1.1665 + * named parameters, the arguments object, and its variables. The 1.1666 + * new binding for "var f = 42" shadows the binding for the 1.1667 + * function itself, so the name of the function will not refer to 1.1668 + * the function. 1.1669 + * 1.1670 + * (function f() { "use strict"; f = 12; })(); 1.1671 + * 1.1672 + * Outside strict mode, assignment to a function expression's name 1.1673 + * has no effect. But in strict mode, this attempt to mutate an 1.1674 + * immutable binding must throw a TypeError. We implement this by 1.1675 + * not optimizing such assignments and by marking such functions as 1.1676 + * heavyweight, ensuring that the function name is represented in 1.1677 + * the scope chain so that assignment will throw a TypeError. 1.1678 + */ 1.1679 + if (!bce->sc->asFunctionBox()->isHeavyweight()) { 1.1680 + op = JSOP_CALLEE; 1.1681 + pn->pn_dflags |= PND_CONST; 1.1682 + } 1.1683 + 1.1684 + pn->setOp(op); 1.1685 + pn->pn_dflags |= PND_BOUND; 1.1686 + return true; 1.1687 + } 1.1688 + 1.1689 + case Definition::PLACEHOLDER: 1.1690 + return true; 1.1691 + 1.1692 + case Definition::MISSING: 1.1693 + MOZ_ASSUME_UNREACHABLE("missing"); 1.1694 + } 1.1695 + 1.1696 + /* 1.1697 + * The difference between the current static level and the static level of 1.1698 + * the definition is the number of function scopes between the current 1.1699 + * scope and dn's scope. 1.1700 + */ 1.1701 + unsigned skip = bce->script->staticLevel() - dn->pn_cookie.level(); 1.1702 + JS_ASSERT_IF(skip, dn->isClosed()); 1.1703 + 1.1704 + /* 1.1705 + * Explicitly disallow accessing var/let bindings in global scope from 1.1706 + * nested functions. The reason for this limitation is that, since the 1.1707 + * global script is not included in the static scope chain (1. because it 1.1708 + * has no object to stand in the static scope chain, 2. to minimize memory 1.1709 + * bloat where a single live function keeps its whole global script 1.1710 + * alive.), ScopeCoordinateToTypeSet is not able to find the var/let's 1.1711 + * associated types::TypeSet. 1.1712 + */ 1.1713 + if (skip) { 1.1714 + BytecodeEmitter *bceSkipped = bce; 1.1715 + for (unsigned i = 0; i < skip; i++) 1.1716 + bceSkipped = bceSkipped->parent; 1.1717 + if (!bceSkipped->sc->isFunctionBox()) 1.1718 + return true; 1.1719 + } 1.1720 + 1.1721 + JS_ASSERT(!pn->isOp(op)); 1.1722 + pn->setOp(op); 1.1723 + if (!pn->pn_cookie.set(bce->parser->tokenStream, skip, dn->pn_cookie.slot())) 1.1724 + return false; 1.1725 + 1.1726 + pn->pn_dflags |= PND_BOUND; 1.1727 + return true; 1.1728 +} 1.1729 + 1.1730 +/* 1.1731 + * Attempts to bind the name, then checks that no dynamic scope lookup ops are 1.1732 + * emitted in self-hosting mode. NAME ops do lookups off current scope chain, 1.1733 + * and we do not want to allow self-hosted code to use the dynamic scope. 1.1734 + */ 1.1735 +static bool 1.1736 +BindNameToSlot(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.1737 +{ 1.1738 + if (!BindNameToSlotHelper(cx, bce, pn)) 1.1739 + return false; 1.1740 + 1.1741 + if (bce->emitterMode == BytecodeEmitter::SelfHosting && !pn->isBound()) { 1.1742 + bce->reportError(pn, JSMSG_SELFHOSTED_UNBOUND_NAME); 1.1743 + return false; 1.1744 + } 1.1745 + 1.1746 + return true; 1.1747 +} 1.1748 + 1.1749 +/* 1.1750 + * If pn contains a useful expression, return true with *answer set to true. 1.1751 + * If pn contains a useless expression, return true with *answer set to false. 1.1752 + * Return false on error. 1.1753 + * 1.1754 + * The caller should initialize *answer to false and invoke this function on 1.1755 + * an expression statement or similar subtree to decide whether the tree could 1.1756 + * produce code that has any side effects. For an expression statement, we 1.1757 + * define useless code as code with no side effects, because the main effect, 1.1758 + * the value left on the stack after the code executes, will be discarded by a 1.1759 + * pop bytecode. 1.1760 + */ 1.1761 +static bool 1.1762 +CheckSideEffects(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool *answer) 1.1763 +{ 1.1764 + if (!pn || *answer) 1.1765 + return true; 1.1766 + 1.1767 + switch (pn->getArity()) { 1.1768 + case PN_CODE: 1.1769 + /* 1.1770 + * A named function, contrary to ES3, is no longer useful, because we 1.1771 + * bind its name lexically (using JSOP_CALLEE) instead of creating an 1.1772 + * Object instance and binding a readonly, permanent property in it 1.1773 + * (the object and binding can be detected and hijacked or captured). 1.1774 + * This is a bug fix to ES3; it is fixed in ES3.1 drafts. 1.1775 + */ 1.1776 + MOZ_ASSERT(*answer == false); 1.1777 + return true; 1.1778 + 1.1779 + case PN_LIST: 1.1780 + if (pn->isOp(JSOP_NOP) || pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || 1.1781 + pn->isOp(JSOP_STRICTEQ) || pn->isOp(JSOP_STRICTNE)) { 1.1782 + /* 1.1783 + * Non-operators along with ||, &&, ===, and !== never invoke 1.1784 + * toString or valueOf. 1.1785 + */ 1.1786 + bool ok = true; 1.1787 + for (ParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) 1.1788 + ok &= CheckSideEffects(cx, bce, pn2, answer); 1.1789 + return ok; 1.1790 + } 1.1791 + 1.1792 + if (pn->isKind(PNK_GENEXP)) { 1.1793 + /* Generator-expressions are harmless if the result is ignored. */ 1.1794 + MOZ_ASSERT(*answer == false); 1.1795 + return true; 1.1796 + } 1.1797 + 1.1798 + /* 1.1799 + * All invocation operations (construct: PNK_NEW, call: PNK_CALL) 1.1800 + * are presumed to be useful, because they may have side effects 1.1801 + * even if their main effect (their return value) is discarded. 1.1802 + * 1.1803 + * PNK_ELEM binary trees of 3+ nodes are flattened into lists to 1.1804 + * avoid too much recursion. All such lists must be presumed to be 1.1805 + * useful because each index operation could invoke a getter. 1.1806 + * 1.1807 + * Likewise, array and object initialisers may call prototype 1.1808 + * setters (the __defineSetter__ built-in, and writable __proto__ 1.1809 + * on Array.prototype create this hazard). Initialiser list nodes 1.1810 + * have JSOP_NEWINIT in their pn_op. 1.1811 + */ 1.1812 + *answer = true; 1.1813 + return true; 1.1814 + 1.1815 + case PN_TERNARY: 1.1816 + return CheckSideEffects(cx, bce, pn->pn_kid1, answer) && 1.1817 + CheckSideEffects(cx, bce, pn->pn_kid2, answer) && 1.1818 + CheckSideEffects(cx, bce, pn->pn_kid3, answer); 1.1819 + 1.1820 + case PN_BINARY: 1.1821 + case PN_BINARY_OBJ: 1.1822 + if (pn->isAssignment()) { 1.1823 + /* 1.1824 + * Assignment is presumed to be useful, even if the next operation 1.1825 + * is another assignment overwriting this one's ostensible effect, 1.1826 + * because the left operand may be a property with a setter that 1.1827 + * has side effects. 1.1828 + * 1.1829 + * The only exception is assignment of a useless value to a const 1.1830 + * declared in the function currently being compiled. 1.1831 + */ 1.1832 + ParseNode *pn2 = pn->pn_left; 1.1833 + if (!pn2->isKind(PNK_NAME)) { 1.1834 + *answer = true; 1.1835 + } else { 1.1836 + if (!BindNameToSlot(cx, bce, pn2)) 1.1837 + return false; 1.1838 + if (!CheckSideEffects(cx, bce, pn->pn_right, answer)) 1.1839 + return false; 1.1840 + if (!*answer && (!pn->isOp(JSOP_NOP) || !pn2->isConst())) 1.1841 + *answer = true; 1.1842 + } 1.1843 + return true; 1.1844 + } 1.1845 + 1.1846 + if (pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || pn->isOp(JSOP_STRICTEQ) || 1.1847 + pn->isOp(JSOP_STRICTNE)) { 1.1848 + /* 1.1849 + * ||, &&, ===, and !== do not convert their operands via 1.1850 + * toString or valueOf method calls. 1.1851 + */ 1.1852 + return CheckSideEffects(cx, bce, pn->pn_left, answer) && 1.1853 + CheckSideEffects(cx, bce, pn->pn_right, answer); 1.1854 + } 1.1855 + 1.1856 + /* 1.1857 + * We can't easily prove that neither operand ever denotes an 1.1858 + * object with a toString or valueOf method. 1.1859 + */ 1.1860 + *answer = true; 1.1861 + return true; 1.1862 + 1.1863 + case PN_UNARY: 1.1864 + switch (pn->getKind()) { 1.1865 + case PNK_DELETE: 1.1866 + { 1.1867 + ParseNode *pn2 = pn->pn_kid; 1.1868 + switch (pn2->getKind()) { 1.1869 + case PNK_NAME: 1.1870 + if (!BindNameToSlot(cx, bce, pn2)) 1.1871 + return false; 1.1872 + if (pn2->isConst()) { 1.1873 + MOZ_ASSERT(*answer == false); 1.1874 + return true; 1.1875 + } 1.1876 + /* FALL THROUGH */ 1.1877 + case PNK_DOT: 1.1878 + case PNK_CALL: 1.1879 + case PNK_ELEM: 1.1880 + /* All these delete addressing modes have effects too. */ 1.1881 + *answer = true; 1.1882 + return true; 1.1883 + default: 1.1884 + return CheckSideEffects(cx, bce, pn2, answer); 1.1885 + } 1.1886 + MOZ_ASSUME_UNREACHABLE("We have a returning default case"); 1.1887 + } 1.1888 + 1.1889 + case PNK_TYPEOF: 1.1890 + case PNK_VOID: 1.1891 + case PNK_NOT: 1.1892 + case PNK_BITNOT: 1.1893 + if (pn->isOp(JSOP_NOT)) { 1.1894 + /* ! does not convert its operand via toString or valueOf. */ 1.1895 + return CheckSideEffects(cx, bce, pn->pn_kid, answer); 1.1896 + } 1.1897 + /* FALL THROUGH */ 1.1898 + 1.1899 + default: 1.1900 + /* 1.1901 + * All of PNK_INC, PNK_DEC, PNK_THROW, PNK_YIELD, and PNK_YIELD_STAR 1.1902 + * have direct effects. Of the remaining unary-arity node types, we 1.1903 + * can't easily prove that the operand never denotes an object with 1.1904 + * a toString or valueOf method. 1.1905 + */ 1.1906 + *answer = true; 1.1907 + return true; 1.1908 + } 1.1909 + MOZ_ASSUME_UNREACHABLE("We have a returning default case"); 1.1910 + 1.1911 + case PN_NAME: 1.1912 + /* 1.1913 + * Take care to avoid trying to bind a label name (labels, both for 1.1914 + * statements and property values in object initialisers, have pn_op 1.1915 + * defaulted to JSOP_NOP). 1.1916 + */ 1.1917 + if (pn->isKind(PNK_NAME) && !pn->isOp(JSOP_NOP)) { 1.1918 + if (!BindNameToSlot(cx, bce, pn)) 1.1919 + return false; 1.1920 + if (!pn->isOp(JSOP_CALLEE) && pn->pn_cookie.isFree()) { 1.1921 + /* 1.1922 + * Not a use of an unshadowed named function expression's given 1.1923 + * name, so this expression could invoke a getter that has side 1.1924 + * effects. 1.1925 + */ 1.1926 + *answer = true; 1.1927 + } 1.1928 + } 1.1929 + if (pn->isKind(PNK_DOT)) { 1.1930 + /* Dotted property references in general can call getters. */ 1.1931 + *answer = true; 1.1932 + } 1.1933 + return CheckSideEffects(cx, bce, pn->maybeExpr(), answer); 1.1934 + 1.1935 + case PN_NULLARY: 1.1936 + if (pn->isKind(PNK_DEBUGGER)) 1.1937 + *answer = true; 1.1938 + return true; 1.1939 + } 1.1940 + return true; 1.1941 +} 1.1942 + 1.1943 +bool 1.1944 +BytecodeEmitter::isInLoop() 1.1945 +{ 1.1946 + for (StmtInfoBCE *stmt = topStmt; stmt; stmt = stmt->down) { 1.1947 + if (stmt->isLoop()) 1.1948 + return true; 1.1949 + } 1.1950 + return false; 1.1951 +} 1.1952 + 1.1953 +bool 1.1954 +BytecodeEmitter::checkSingletonContext() 1.1955 +{ 1.1956 + if (!script->compileAndGo() || sc->isFunctionBox() || isInLoop()) 1.1957 + return false; 1.1958 + hasSingletons = true; 1.1959 + return true; 1.1960 +} 1.1961 + 1.1962 +bool 1.1963 +BytecodeEmitter::needsImplicitThis() 1.1964 +{ 1.1965 + if (!script->compileAndGo()) 1.1966 + return true; 1.1967 + 1.1968 + if (sc->isFunctionBox()) { 1.1969 + if (sc->asFunctionBox()->inWith) 1.1970 + return true; 1.1971 + } else { 1.1972 + JSObject *scope = sc->asGlobalSharedContext()->scopeChain(); 1.1973 + while (scope) { 1.1974 + if (scope->is<DynamicWithObject>()) 1.1975 + return true; 1.1976 + scope = scope->enclosingScope(); 1.1977 + } 1.1978 + } 1.1979 + 1.1980 + for (StmtInfoBCE *stmt = topStmt; stmt; stmt = stmt->down) { 1.1981 + if (stmt->type == STMT_WITH) 1.1982 + return true; 1.1983 + } 1.1984 + return false; 1.1985 +} 1.1986 + 1.1987 +void 1.1988 +BytecodeEmitter::tellDebuggerAboutCompiledScript(ExclusiveContext *cx) 1.1989 +{ 1.1990 + // Note: when parsing off thread the resulting scripts need to be handed to 1.1991 + // the debugger after rejoining to the main thread. 1.1992 + if (!cx->isJSContext()) 1.1993 + return; 1.1994 + 1.1995 + RootedFunction function(cx, script->functionNonDelazifying()); 1.1996 + CallNewScriptHook(cx->asJSContext(), script, function); 1.1997 + // Lazy scripts are never top level (despite always being invoked with a 1.1998 + // nullptr parent), and so the hook should never be fired. 1.1999 + if (emitterMode != LazyFunction && !parent) { 1.2000 + GlobalObject *compileAndGoGlobal = nullptr; 1.2001 + if (script->compileAndGo()) 1.2002 + compileAndGoGlobal = &script->global(); 1.2003 + Debugger::onNewScript(cx->asJSContext(), script, compileAndGoGlobal); 1.2004 + } 1.2005 +} 1.2006 + 1.2007 +inline TokenStream * 1.2008 +BytecodeEmitter::tokenStream() 1.2009 +{ 1.2010 + return &parser->tokenStream; 1.2011 +} 1.2012 + 1.2013 +bool 1.2014 +BytecodeEmitter::reportError(ParseNode *pn, unsigned errorNumber, ...) 1.2015 +{ 1.2016 + TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; 1.2017 + 1.2018 + va_list args; 1.2019 + va_start(args, errorNumber); 1.2020 + bool result = tokenStream()->reportCompileErrorNumberVA(pos.begin, JSREPORT_ERROR, 1.2021 + errorNumber, args); 1.2022 + va_end(args); 1.2023 + return result; 1.2024 +} 1.2025 + 1.2026 +bool 1.2027 +BytecodeEmitter::reportStrictWarning(ParseNode *pn, unsigned errorNumber, ...) 1.2028 +{ 1.2029 + TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; 1.2030 + 1.2031 + va_list args; 1.2032 + va_start(args, errorNumber); 1.2033 + bool result = tokenStream()->reportStrictWarningErrorNumberVA(pos.begin, errorNumber, args); 1.2034 + va_end(args); 1.2035 + return result; 1.2036 +} 1.2037 + 1.2038 +bool 1.2039 +BytecodeEmitter::reportStrictModeError(ParseNode *pn, unsigned errorNumber, ...) 1.2040 +{ 1.2041 + TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; 1.2042 + 1.2043 + va_list args; 1.2044 + va_start(args, errorNumber); 1.2045 + bool result = tokenStream()->reportStrictModeErrorNumberVA(pos.begin, sc->strict, 1.2046 + errorNumber, args); 1.2047 + va_end(args); 1.2048 + return result; 1.2049 +} 1.2050 + 1.2051 +static bool 1.2052 +EmitNewInit(ExclusiveContext *cx, BytecodeEmitter *bce, JSProtoKey key) 1.2053 +{ 1.2054 + const size_t len = 1 + UINT32_INDEX_LEN; 1.2055 + ptrdiff_t offset = EmitCheck(cx, bce, len); 1.2056 + if (offset < 0) 1.2057 + return false; 1.2058 + 1.2059 + jsbytecode *code = bce->code(offset); 1.2060 + code[0] = JSOP_NEWINIT; 1.2061 + code[1] = jsbytecode(key); 1.2062 + code[2] = 0; 1.2063 + code[3] = 0; 1.2064 + code[4] = 0; 1.2065 + UpdateDepth(cx, bce, offset); 1.2066 + CheckTypeSet(cx, bce, JSOP_NEWINIT); 1.2067 + return true; 1.2068 +} 1.2069 + 1.2070 +static bool 1.2071 +IteratorResultShape(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned *shape) 1.2072 +{ 1.2073 + JS_ASSERT(bce->script->compileAndGo()); 1.2074 + 1.2075 + RootedObject obj(cx); 1.2076 + gc::AllocKind kind = GuessObjectGCKind(2); 1.2077 + obj = NewBuiltinClassInstance(cx, &JSObject::class_, kind); 1.2078 + if (!obj) 1.2079 + return false; 1.2080 + 1.2081 + Rooted<jsid> value_id(cx, AtomToId(cx->names().value)); 1.2082 + Rooted<jsid> done_id(cx, AtomToId(cx->names().done)); 1.2083 + if (!DefineNativeProperty(cx, obj, value_id, UndefinedHandleValue, nullptr, nullptr, 1.2084 + JSPROP_ENUMERATE)) 1.2085 + return false; 1.2086 + if (!DefineNativeProperty(cx, obj, done_id, UndefinedHandleValue, nullptr, nullptr, 1.2087 + JSPROP_ENUMERATE)) 1.2088 + return false; 1.2089 + 1.2090 + ObjectBox *objbox = bce->parser->newObjectBox(obj); 1.2091 + if (!objbox) 1.2092 + return false; 1.2093 + 1.2094 + *shape = bce->objectList.add(objbox); 1.2095 + 1.2096 + return true; 1.2097 +} 1.2098 + 1.2099 +static bool 1.2100 +EmitPrepareIteratorResult(ExclusiveContext *cx, BytecodeEmitter *bce) 1.2101 +{ 1.2102 + if (bce->script->compileAndGo()) { 1.2103 + unsigned shape; 1.2104 + if (!IteratorResultShape(cx, bce, &shape)) 1.2105 + return false; 1.2106 + return EmitIndex32(cx, JSOP_NEWOBJECT, shape, bce); 1.2107 + } 1.2108 + 1.2109 + return EmitNewInit(cx, bce, JSProto_Object); 1.2110 +} 1.2111 + 1.2112 +static bool 1.2113 +EmitFinishIteratorResult(ExclusiveContext *cx, BytecodeEmitter *bce, bool done) 1.2114 +{ 1.2115 + jsatomid value_id; 1.2116 + if (!bce->makeAtomIndex(cx->names().value, &value_id)) 1.2117 + return UINT_MAX; 1.2118 + jsatomid done_id; 1.2119 + if (!bce->makeAtomIndex(cx->names().done, &done_id)) 1.2120 + return UINT_MAX; 1.2121 + 1.2122 + if (!EmitIndex32(cx, JSOP_INITPROP, value_id, bce)) 1.2123 + return false; 1.2124 + if (Emit1(cx, bce, done ? JSOP_TRUE : JSOP_FALSE) < 0) 1.2125 + return false; 1.2126 + if (!EmitIndex32(cx, JSOP_INITPROP, done_id, bce)) 1.2127 + return false; 1.2128 + if (Emit1(cx, bce, JSOP_ENDINIT) < 0) 1.2129 + return false; 1.2130 + return true; 1.2131 +} 1.2132 + 1.2133 +static bool 1.2134 +EmitNameOp(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool callContext) 1.2135 +{ 1.2136 + if (!BindNameToSlot(cx, bce, pn)) 1.2137 + return false; 1.2138 + 1.2139 + JSOp op = pn->getOp(); 1.2140 + 1.2141 + if (op == JSOP_CALLEE) { 1.2142 + if (Emit1(cx, bce, op) < 0) 1.2143 + return false; 1.2144 + } else { 1.2145 + if (!pn->pn_cookie.isFree()) { 1.2146 + JS_ASSERT(JOF_OPTYPE(op) != JOF_ATOM); 1.2147 + if (!EmitVarOp(cx, pn, op, bce)) 1.2148 + return false; 1.2149 + } else { 1.2150 + if (!EmitAtomOp(cx, pn, op, bce)) 1.2151 + return false; 1.2152 + } 1.2153 + } 1.2154 + 1.2155 + /* Need to provide |this| value for call */ 1.2156 + if (callContext) { 1.2157 + if (op == JSOP_NAME && bce->needsImplicitThis()) { 1.2158 + if (!EmitAtomOp(cx, pn, JSOP_IMPLICITTHIS, bce)) 1.2159 + return false; 1.2160 + } else { 1.2161 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) 1.2162 + return false; 1.2163 + } 1.2164 + } 1.2165 + 1.2166 + return true; 1.2167 +} 1.2168 + 1.2169 +static bool 1.2170 +EmitPropLHS(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) 1.2171 +{ 1.2172 + JS_ASSERT(pn->isKind(PNK_DOT)); 1.2173 + ParseNode *pn2 = pn->maybeExpr(); 1.2174 + 1.2175 + /* 1.2176 + * If the object operand is also a dotted property reference, reverse the 1.2177 + * list linked via pn_expr temporarily so we can iterate over it from the 1.2178 + * bottom up (reversing again as we go), to avoid excessive recursion. 1.2179 + */ 1.2180 + if (pn2->isKind(PNK_DOT)) { 1.2181 + ParseNode *pndot = pn2; 1.2182 + ParseNode *pnup = nullptr, *pndown; 1.2183 + ptrdiff_t top = bce->offset(); 1.2184 + for (;;) { 1.2185 + /* Reverse pndot->pn_expr to point up, not down. */ 1.2186 + pndot->pn_offset = top; 1.2187 + JS_ASSERT(!pndot->isUsed()); 1.2188 + pndown = pndot->pn_expr; 1.2189 + pndot->pn_expr = pnup; 1.2190 + if (!pndown->isKind(PNK_DOT)) 1.2191 + break; 1.2192 + pnup = pndot; 1.2193 + pndot = pndown; 1.2194 + } 1.2195 + 1.2196 + /* pndown is a primary expression, not a dotted property reference. */ 1.2197 + if (!EmitTree(cx, bce, pndown)) 1.2198 + return false; 1.2199 + 1.2200 + do { 1.2201 + /* Walk back up the list, emitting annotated name ops. */ 1.2202 + if (!EmitAtomOp(cx, pndot, JSOP_GETPROP, bce)) 1.2203 + return false; 1.2204 + 1.2205 + /* Reverse the pn_expr link again. */ 1.2206 + pnup = pndot->pn_expr; 1.2207 + pndot->pn_expr = pndown; 1.2208 + pndown = pndot; 1.2209 + } while ((pndot = pnup) != nullptr); 1.2210 + return true; 1.2211 + } 1.2212 + 1.2213 + // The non-optimized case. 1.2214 + return EmitTree(cx, bce, pn2); 1.2215 +} 1.2216 + 1.2217 +static bool 1.2218 +EmitPropOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) 1.2219 +{ 1.2220 + JS_ASSERT(pn->isArity(PN_NAME)); 1.2221 + 1.2222 + if (!EmitPropLHS(cx, pn, op, bce)) 1.2223 + return false; 1.2224 + 1.2225 + if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_DUP) < 0) 1.2226 + return false; 1.2227 + 1.2228 + if (!EmitAtomOp(cx, pn, op, bce)) 1.2229 + return false; 1.2230 + 1.2231 + if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_SWAP) < 0) 1.2232 + return false; 1.2233 + 1.2234 + return true; 1.2235 +} 1.2236 + 1.2237 +static bool 1.2238 +EmitPropIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) 1.2239 +{ 1.2240 + JS_ASSERT(pn->pn_kid->getKind() == PNK_DOT); 1.2241 + 1.2242 + bool post; 1.2243 + JSOp binop = GetIncDecInfo(pn->getKind(), &post); 1.2244 + 1.2245 + JSOp get = JSOP_GETPROP; 1.2246 + if (!EmitPropLHS(cx, pn->pn_kid, get, bce)) // OBJ 1.2247 + return false; 1.2248 + if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ 1.2249 + return false; 1.2250 + if (!EmitAtomOp(cx, pn->pn_kid, JSOP_GETPROP, bce)) // OBJ V 1.2251 + return false; 1.2252 + if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N 1.2253 + return false; 1.2254 + if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N 1.2255 + return false; 1.2256 + if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1 1.2257 + return false; 1.2258 + if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1 1.2259 + return false; 1.2260 + 1.2261 + if (post) { 1.2262 + if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ 1.2263 + return false; 1.2264 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1 1.2265 + return false; 1.2266 + } 1.2267 + 1.2268 + if (!EmitAtomOp(cx, pn->pn_kid, JSOP_SETPROP, bce)) // N? N+1 1.2269 + return false; 1.2270 + if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT 1.2271 + return false; 1.2272 + 1.2273 + return true; 1.2274 +} 1.2275 + 1.2276 +static bool 1.2277 +EmitNameIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) 1.2278 +{ 1.2279 + const JSCodeSpec *cs = &js_CodeSpec[pn->pn_kid->getOp()]; 1.2280 + 1.2281 + bool global = (cs->format & JOF_GNAME); 1.2282 + bool post; 1.2283 + JSOp binop = GetIncDecInfo(pn->getKind(), &post); 1.2284 + 1.2285 + if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_BINDGNAME : JSOP_BINDNAME, bce)) // OBJ 1.2286 + return false; 1.2287 + if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_GETGNAME : JSOP_NAME, bce)) // OBJ V 1.2288 + return false; 1.2289 + if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N 1.2290 + return false; 1.2291 + if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N 1.2292 + return false; 1.2293 + if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1 1.2294 + return false; 1.2295 + if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1 1.2296 + return false; 1.2297 + 1.2298 + if (post) { 1.2299 + if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ 1.2300 + return false; 1.2301 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1 1.2302 + return false; 1.2303 + } 1.2304 + 1.2305 + if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_SETGNAME : JSOP_SETNAME, bce)) // N? N+1 1.2306 + return false; 1.2307 + if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT 1.2308 + return false; 1.2309 + 1.2310 + return true; 1.2311 +} 1.2312 + 1.2313 +/* 1.2314 + * Emit bytecode to put operands for a JSOP_GETELEM/CALLELEM/SETELEM/DELELEM 1.2315 + * opcode onto the stack in the right order. In the case of SETELEM, the 1.2316 + * value to be assigned must already be pushed. 1.2317 + */ 1.2318 +static bool 1.2319 +EmitElemOperands(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) 1.2320 +{ 1.2321 + JS_ASSERT(pn->isArity(PN_BINARY)); 1.2322 + if (!EmitTree(cx, bce, pn->pn_left)) 1.2323 + return false; 1.2324 + if (op == JSOP_CALLELEM && Emit1(cx, bce, JSOP_DUP) < 0) 1.2325 + return false; 1.2326 + if (!EmitTree(cx, bce, pn->pn_right)) 1.2327 + return false; 1.2328 + if (op == JSOP_SETELEM && Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) 1.2329 + return false; 1.2330 + return true; 1.2331 +} 1.2332 + 1.2333 +static inline bool 1.2334 +EmitElemOpBase(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) 1.2335 +{ 1.2336 + if (Emit1(cx, bce, op) < 0) 1.2337 + return false; 1.2338 + CheckTypeSet(cx, bce, op); 1.2339 + 1.2340 + if (op == JSOP_CALLELEM) { 1.2341 + if (Emit1(cx, bce, JSOP_SWAP) < 0) 1.2342 + return false; 1.2343 + } 1.2344 + return true; 1.2345 +} 1.2346 + 1.2347 +static bool 1.2348 +EmitElemOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) 1.2349 +{ 1.2350 + return EmitElemOperands(cx, pn, op, bce) && EmitElemOpBase(cx, bce, op); 1.2351 +} 1.2352 + 1.2353 +static bool 1.2354 +EmitElemIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) 1.2355 +{ 1.2356 + JS_ASSERT(pn->pn_kid->getKind() == PNK_ELEM); 1.2357 + 1.2358 + if (!EmitElemOperands(cx, pn->pn_kid, JSOP_GETELEM, bce)) 1.2359 + return false; 1.2360 + 1.2361 + bool post; 1.2362 + JSOp binop = GetIncDecInfo(pn->getKind(), &post); 1.2363 + 1.2364 + /* 1.2365 + * We need to convert the key to an object id first, so that we do not do 1.2366 + * it inside both the GETELEM and the SETELEM. 1.2367 + */ 1.2368 + // OBJ KEY* 1.2369 + if (Emit1(cx, bce, JSOP_TOID) < 0) // OBJ KEY 1.2370 + return false; 1.2371 + if (Emit1(cx, bce, JSOP_DUP2) < 0) // OBJ KEY OBJ KEY 1.2372 + return false; 1.2373 + if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) // OBJ KEY V 1.2374 + return false; 1.2375 + if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ KEY N 1.2376 + return false; 1.2377 + if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ KEY N? N 1.2378 + return false; 1.2379 + if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ KEY N? N 1 1.2380 + return false; 1.2381 + if (Emit1(cx, bce, binop) < 0) // OBJ KEY N? N+1 1.2382 + return false; 1.2383 + 1.2384 + if (post) { 1.2385 + if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // KEY N N+1 OBJ 1.2386 + return false; 1.2387 + if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // N N+1 OBJ KEY 1.2388 + return false; 1.2389 + if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N OBJ KEY N+1 1.2390 + return false; 1.2391 + } 1.2392 + 1.2393 + if (!EmitElemOpBase(cx, bce, JSOP_SETELEM)) // N? N+1 1.2394 + return false; 1.2395 + if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT 1.2396 + return false; 1.2397 + 1.2398 + return true; 1.2399 +} 1.2400 + 1.2401 +static bool 1.2402 +EmitNumberOp(ExclusiveContext *cx, double dval, BytecodeEmitter *bce) 1.2403 +{ 1.2404 + int32_t ival; 1.2405 + uint32_t u; 1.2406 + ptrdiff_t off; 1.2407 + jsbytecode *pc; 1.2408 + 1.2409 + if (NumberIsInt32(dval, &ival)) { 1.2410 + if (ival == 0) 1.2411 + return Emit1(cx, bce, JSOP_ZERO) >= 0; 1.2412 + if (ival == 1) 1.2413 + return Emit1(cx, bce, JSOP_ONE) >= 0; 1.2414 + if ((int)(int8_t)ival == ival) 1.2415 + return Emit2(cx, bce, JSOP_INT8, (jsbytecode)(int8_t)ival) >= 0; 1.2416 + 1.2417 + u = (uint32_t)ival; 1.2418 + if (u < JS_BIT(16)) { 1.2419 + EMIT_UINT16_IMM_OP(JSOP_UINT16, u); 1.2420 + } else if (u < JS_BIT(24)) { 1.2421 + off = EmitN(cx, bce, JSOP_UINT24, 3); 1.2422 + if (off < 0) 1.2423 + return false; 1.2424 + pc = bce->code(off); 1.2425 + SET_UINT24(pc, u); 1.2426 + } else { 1.2427 + off = EmitN(cx, bce, JSOP_INT32, 4); 1.2428 + if (off < 0) 1.2429 + return false; 1.2430 + pc = bce->code(off); 1.2431 + SET_INT32(pc, ival); 1.2432 + } 1.2433 + return true; 1.2434 + } 1.2435 + 1.2436 + if (!bce->constList.append(DoubleValue(dval))) 1.2437 + return false; 1.2438 + 1.2439 + return EmitIndex32(cx, JSOP_DOUBLE, bce->constList.length() - 1, bce); 1.2440 +} 1.2441 + 1.2442 +static inline void 1.2443 +SetJumpOffsetAt(BytecodeEmitter *bce, ptrdiff_t off) 1.2444 +{ 1.2445 + SET_JUMP_OFFSET(bce->code(off), bce->offset() - off); 1.2446 +} 1.2447 + 1.2448 +static bool 1.2449 +PushUndefinedValues(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned n) 1.2450 +{ 1.2451 + for (unsigned i = 0; i < n; ++i) { 1.2452 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) 1.2453 + return false; 1.2454 + } 1.2455 + return true; 1.2456 +} 1.2457 + 1.2458 +static bool 1.2459 +InitializeBlockScopedLocalsFromStack(ExclusiveContext *cx, BytecodeEmitter *bce, 1.2460 + Handle<StaticBlockObject *> blockObj) 1.2461 +{ 1.2462 + for (unsigned i = blockObj->numVariables(); i > 0; --i) { 1.2463 + if (blockObj->isAliased(i - 1)) { 1.2464 + ScopeCoordinate sc; 1.2465 + sc.setHops(0); 1.2466 + sc.setSlot(BlockObject::RESERVED_SLOTS + i - 1); 1.2467 + if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce)) 1.2468 + return false; 1.2469 + } else { 1.2470 + unsigned local = blockObj->blockIndexToLocalIndex(i - 1); 1.2471 + if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, local, bce)) 1.2472 + return false; 1.2473 + } 1.2474 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.2475 + return false; 1.2476 + } 1.2477 + return true; 1.2478 +} 1.2479 + 1.2480 +static bool 1.2481 +EnterBlockScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmtInfo, 1.2482 + ObjectBox *objbox, unsigned alreadyPushed = 0) 1.2483 +{ 1.2484 + // Initial values for block-scoped locals. 1.2485 + Rooted<StaticBlockObject *> blockObj(cx, &objbox->object->as<StaticBlockObject>()); 1.2486 + if (!PushUndefinedValues(cx, bce, blockObj->numVariables() - alreadyPushed)) 1.2487 + return false; 1.2488 + 1.2489 + if (!EnterNestedScope(cx, bce, stmtInfo, objbox, STMT_BLOCK)) 1.2490 + return false; 1.2491 + 1.2492 + if (!InitializeBlockScopedLocalsFromStack(cx, bce, blockObj)) 1.2493 + return false; 1.2494 + 1.2495 + return true; 1.2496 +} 1.2497 + 1.2498 +/* 1.2499 + * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. 1.2500 + * LLVM is deciding to inline this function which uses a lot of stack space 1.2501 + * into EmitTree which is recursive and uses relatively little stack space. 1.2502 + */ 1.2503 +MOZ_NEVER_INLINE static bool 1.2504 +EmitSwitch(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.2505 +{ 1.2506 + JSOp switchOp; 1.2507 + bool hasDefault; 1.2508 + ptrdiff_t top, off, defaultOffset; 1.2509 + ParseNode *pn2, *pn3, *pn4; 1.2510 + int32_t low, high; 1.2511 + int noteIndex; 1.2512 + size_t switchSize; 1.2513 + jsbytecode *pc; 1.2514 + 1.2515 + /* Try for most optimal, fall back if not dense ints. */ 1.2516 + switchOp = JSOP_TABLESWITCH; 1.2517 + hasDefault = false; 1.2518 + defaultOffset = -1; 1.2519 + 1.2520 + pn2 = pn->pn_right; 1.2521 + JS_ASSERT(pn2->isKind(PNK_LEXICALSCOPE) || pn2->isKind(PNK_STATEMENTLIST)); 1.2522 + 1.2523 + /* Push the discriminant. */ 1.2524 + if (!EmitTree(cx, bce, pn->pn_left)) 1.2525 + return false; 1.2526 + 1.2527 + StmtInfoBCE stmtInfo(cx); 1.2528 + if (pn2->isKind(PNK_LEXICALSCOPE)) { 1.2529 + if (!EnterBlockScope(cx, bce, &stmtInfo, pn2->pn_objbox, 0)) 1.2530 + return false; 1.2531 + 1.2532 + stmtInfo.type = STMT_SWITCH; 1.2533 + stmtInfo.update = top = bce->offset(); 1.2534 + /* Advance pn2 to refer to the switch case list. */ 1.2535 + pn2 = pn2->expr(); 1.2536 + } else { 1.2537 + JS_ASSERT(pn2->isKind(PNK_STATEMENTLIST)); 1.2538 + top = bce->offset(); 1.2539 + PushStatementBCE(bce, &stmtInfo, STMT_SWITCH, top); 1.2540 + } 1.2541 + 1.2542 + /* Switch bytecodes run from here till end of final case. */ 1.2543 + uint32_t caseCount = pn2->pn_count; 1.2544 + uint32_t tableLength = 0; 1.2545 + ScopedJSFreePtr<ParseNode*> table(nullptr); 1.2546 + 1.2547 + if (caseCount > JS_BIT(16)) { 1.2548 + bce->parser->tokenStream.reportError(JSMSG_TOO_MANY_CASES); 1.2549 + return false; 1.2550 + } 1.2551 + 1.2552 + if (caseCount == 0 || 1.2553 + (caseCount == 1 && 1.2554 + (hasDefault = (pn2->pn_head->isKind(PNK_DEFAULT))))) { 1.2555 + caseCount = 0; 1.2556 + low = 0; 1.2557 + high = -1; 1.2558 + } else { 1.2559 + bool ok = true; 1.2560 +#define INTMAP_LENGTH 256 1.2561 + jsbitmap intmap_space[INTMAP_LENGTH]; 1.2562 + jsbitmap *intmap = nullptr; 1.2563 + int32_t intmap_bitlen = 0; 1.2564 + 1.2565 + low = JSVAL_INT_MAX; 1.2566 + high = JSVAL_INT_MIN; 1.2567 + 1.2568 + for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { 1.2569 + if (pn3->isKind(PNK_DEFAULT)) { 1.2570 + hasDefault = true; 1.2571 + caseCount--; /* one of the "cases" was the default */ 1.2572 + continue; 1.2573 + } 1.2574 + 1.2575 + JS_ASSERT(pn3->isKind(PNK_CASE)); 1.2576 + if (switchOp == JSOP_CONDSWITCH) 1.2577 + continue; 1.2578 + 1.2579 + JS_ASSERT(switchOp == JSOP_TABLESWITCH); 1.2580 + 1.2581 + pn4 = pn3->pn_left; 1.2582 + 1.2583 + if (pn4->getKind() != PNK_NUMBER) { 1.2584 + switchOp = JSOP_CONDSWITCH; 1.2585 + continue; 1.2586 + } 1.2587 + 1.2588 + int32_t i; 1.2589 + if (!NumberIsInt32(pn4->pn_dval, &i)) { 1.2590 + switchOp = JSOP_CONDSWITCH; 1.2591 + continue; 1.2592 + } 1.2593 + 1.2594 + if ((unsigned)(i + (int)JS_BIT(15)) >= (unsigned)JS_BIT(16)) { 1.2595 + switchOp = JSOP_CONDSWITCH; 1.2596 + continue; 1.2597 + } 1.2598 + if (i < low) 1.2599 + low = i; 1.2600 + if (high < i) 1.2601 + high = i; 1.2602 + 1.2603 + /* 1.2604 + * Check for duplicates, which require a JSOP_CONDSWITCH. 1.2605 + * We bias i by 65536 if it's negative, and hope that's a rare 1.2606 + * case (because it requires a malloc'd bitmap). 1.2607 + */ 1.2608 + if (i < 0) 1.2609 + i += JS_BIT(16); 1.2610 + if (i >= intmap_bitlen) { 1.2611 + if (!intmap && 1.2612 + size_t(i) < (INTMAP_LENGTH * JS_BITMAP_NBITS)) { 1.2613 + intmap = intmap_space; 1.2614 + intmap_bitlen = INTMAP_LENGTH * JS_BITMAP_NBITS; 1.2615 + } else { 1.2616 + /* Just grab 8K for the worst-case bitmap. */ 1.2617 + intmap_bitlen = JS_BIT(16); 1.2618 + intmap = cx->pod_malloc<jsbitmap>(JS_BIT(16) / JS_BITMAP_NBITS); 1.2619 + if (!intmap) { 1.2620 + js_ReportOutOfMemory(cx); 1.2621 + return false; 1.2622 + } 1.2623 + } 1.2624 + memset(intmap, 0, size_t(intmap_bitlen) / CHAR_BIT); 1.2625 + } 1.2626 + if (JS_TEST_BIT(intmap, i)) { 1.2627 + switchOp = JSOP_CONDSWITCH; 1.2628 + continue; 1.2629 + } 1.2630 + JS_SET_BIT(intmap, i); 1.2631 + } 1.2632 + 1.2633 + if (intmap && intmap != intmap_space) 1.2634 + js_free(intmap); 1.2635 + if (!ok) 1.2636 + return false; 1.2637 + 1.2638 + /* 1.2639 + * Compute table length and select condswitch instead if overlarge or 1.2640 + * more than half-sparse. 1.2641 + */ 1.2642 + if (switchOp == JSOP_TABLESWITCH) { 1.2643 + tableLength = (uint32_t)(high - low + 1); 1.2644 + if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount) 1.2645 + switchOp = JSOP_CONDSWITCH; 1.2646 + } 1.2647 + } 1.2648 + 1.2649 + /* 1.2650 + * The note has one or two offsets: first tells total switch code length; 1.2651 + * second (if condswitch) tells offset to first JSOP_CASE. 1.2652 + */ 1.2653 + if (switchOp == JSOP_CONDSWITCH) { 1.2654 + /* 0 bytes of immediate for unoptimized switch. */ 1.2655 + switchSize = 0; 1.2656 + noteIndex = NewSrcNote3(cx, bce, SRC_CONDSWITCH, 0, 0); 1.2657 + } else { 1.2658 + JS_ASSERT(switchOp == JSOP_TABLESWITCH); 1.2659 + 1.2660 + /* 3 offsets (len, low, high) before the table, 1 per entry. */ 1.2661 + switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength)); 1.2662 + noteIndex = NewSrcNote2(cx, bce, SRC_TABLESWITCH, 0); 1.2663 + } 1.2664 + if (noteIndex < 0) 1.2665 + return false; 1.2666 + 1.2667 + /* Emit switchOp followed by switchSize bytes of jump or lookup table. */ 1.2668 + if (EmitN(cx, bce, switchOp, switchSize) < 0) 1.2669 + return false; 1.2670 + 1.2671 + off = -1; 1.2672 + if (switchOp == JSOP_CONDSWITCH) { 1.2673 + int caseNoteIndex = -1; 1.2674 + bool beforeCases = true; 1.2675 + 1.2676 + /* Emit code for evaluating cases and jumping to case statements. */ 1.2677 + for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { 1.2678 + pn4 = pn3->pn_left; 1.2679 + if (pn4 && !EmitTree(cx, bce, pn4)) 1.2680 + return false; 1.2681 + if (caseNoteIndex >= 0) { 1.2682 + /* off is the previous JSOP_CASE's bytecode offset. */ 1.2683 + if (!SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off)) 1.2684 + return false; 1.2685 + } 1.2686 + if (!pn4) { 1.2687 + JS_ASSERT(pn3->isKind(PNK_DEFAULT)); 1.2688 + continue; 1.2689 + } 1.2690 + caseNoteIndex = NewSrcNote2(cx, bce, SRC_NEXTCASE, 0); 1.2691 + if (caseNoteIndex < 0) 1.2692 + return false; 1.2693 + off = EmitJump(cx, bce, JSOP_CASE, 0); 1.2694 + if (off < 0) 1.2695 + return false; 1.2696 + pn3->pn_offset = off; 1.2697 + if (beforeCases) { 1.2698 + unsigned noteCount, noteCountDelta; 1.2699 + 1.2700 + /* Switch note's second offset is to first JSOP_CASE. */ 1.2701 + noteCount = bce->notes().length(); 1.2702 + if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, off - top)) 1.2703 + return false; 1.2704 + noteCountDelta = bce->notes().length() - noteCount; 1.2705 + if (noteCountDelta != 0) 1.2706 + caseNoteIndex += noteCountDelta; 1.2707 + beforeCases = false; 1.2708 + } 1.2709 + } 1.2710 + 1.2711 + /* 1.2712 + * If we didn't have an explicit default (which could fall in between 1.2713 + * cases, preventing us from fusing this SetSrcNoteOffset with the call 1.2714 + * in the loop above), link the last case to the implicit default for 1.2715 + * the benefit of IonBuilder. 1.2716 + */ 1.2717 + if (!hasDefault && 1.2718 + caseNoteIndex >= 0 && 1.2719 + !SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off)) 1.2720 + { 1.2721 + return false; 1.2722 + } 1.2723 + 1.2724 + /* Emit default even if no explicit default statement. */ 1.2725 + defaultOffset = EmitJump(cx, bce, JSOP_DEFAULT, 0); 1.2726 + if (defaultOffset < 0) 1.2727 + return false; 1.2728 + } else { 1.2729 + JS_ASSERT(switchOp == JSOP_TABLESWITCH); 1.2730 + pc = bce->code(top + JUMP_OFFSET_LEN); 1.2731 + 1.2732 + /* Fill in switch bounds, which we know fit in 16-bit offsets. */ 1.2733 + SET_JUMP_OFFSET(pc, low); 1.2734 + pc += JUMP_OFFSET_LEN; 1.2735 + SET_JUMP_OFFSET(pc, high); 1.2736 + pc += JUMP_OFFSET_LEN; 1.2737 + 1.2738 + /* 1.2739 + * Use malloc to avoid arena bloat for programs with many switches. 1.2740 + * ScopedJSFreePtr takes care of freeing it on exit. 1.2741 + */ 1.2742 + if (tableLength != 0) { 1.2743 + table = cx->pod_calloc<ParseNode*>(tableLength); 1.2744 + if (!table) 1.2745 + return false; 1.2746 + for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { 1.2747 + if (pn3->isKind(PNK_DEFAULT)) 1.2748 + continue; 1.2749 + 1.2750 + JS_ASSERT(pn3->isKind(PNK_CASE)); 1.2751 + 1.2752 + pn4 = pn3->pn_left; 1.2753 + JS_ASSERT(pn4->getKind() == PNK_NUMBER); 1.2754 + 1.2755 + int32_t i = int32_t(pn4->pn_dval); 1.2756 + JS_ASSERT(double(i) == pn4->pn_dval); 1.2757 + 1.2758 + i -= low; 1.2759 + JS_ASSERT(uint32_t(i) < tableLength); 1.2760 + table[i] = pn3; 1.2761 + } 1.2762 + } 1.2763 + } 1.2764 + 1.2765 + /* Emit code for each case's statements, copying pn_offset up to pn3. */ 1.2766 + for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { 1.2767 + if (switchOp == JSOP_CONDSWITCH && !pn3->isKind(PNK_DEFAULT)) 1.2768 + SetJumpOffsetAt(bce, pn3->pn_offset); 1.2769 + pn4 = pn3->pn_right; 1.2770 + if (!EmitTree(cx, bce, pn4)) 1.2771 + return false; 1.2772 + pn3->pn_offset = pn4->pn_offset; 1.2773 + if (pn3->isKind(PNK_DEFAULT)) 1.2774 + off = pn3->pn_offset - top; 1.2775 + } 1.2776 + 1.2777 + if (!hasDefault) { 1.2778 + /* If no default case, offset for default is to end of switch. */ 1.2779 + off = bce->offset() - top; 1.2780 + } 1.2781 + 1.2782 + /* We better have set "off" by now. */ 1.2783 + JS_ASSERT(off != -1); 1.2784 + 1.2785 + /* Set the default offset (to end of switch if no default). */ 1.2786 + if (switchOp == JSOP_CONDSWITCH) { 1.2787 + pc = nullptr; 1.2788 + JS_ASSERT(defaultOffset != -1); 1.2789 + SET_JUMP_OFFSET(bce->code(defaultOffset), off - (defaultOffset - top)); 1.2790 + } else { 1.2791 + pc = bce->code(top); 1.2792 + SET_JUMP_OFFSET(pc, off); 1.2793 + pc += JUMP_OFFSET_LEN; 1.2794 + } 1.2795 + 1.2796 + /* Set the SRC_SWITCH note's offset operand to tell end of switch. */ 1.2797 + off = bce->offset() - top; 1.2798 + if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, off)) 1.2799 + return false; 1.2800 + 1.2801 + if (switchOp == JSOP_TABLESWITCH) { 1.2802 + /* Skip over the already-initialized switch bounds. */ 1.2803 + pc += 2 * JUMP_OFFSET_LEN; 1.2804 + 1.2805 + /* Fill in the jump table, if there is one. */ 1.2806 + for (uint32_t i = 0; i < tableLength; i++) { 1.2807 + pn3 = table[i]; 1.2808 + off = pn3 ? pn3->pn_offset - top : 0; 1.2809 + SET_JUMP_OFFSET(pc, off); 1.2810 + pc += JUMP_OFFSET_LEN; 1.2811 + } 1.2812 + } 1.2813 + 1.2814 + if (pn->pn_right->isKind(PNK_LEXICALSCOPE)) { 1.2815 + if (!LeaveNestedScope(cx, bce, &stmtInfo)) 1.2816 + return false; 1.2817 + } else { 1.2818 + if (!PopStatementBCE(cx, bce)) 1.2819 + return false; 1.2820 + } 1.2821 + 1.2822 + return true; 1.2823 +} 1.2824 + 1.2825 +bool 1.2826 +BytecodeEmitter::isRunOnceLambda() 1.2827 +{ 1.2828 + // The run once lambda flags set by the parser are approximate, and we look 1.2829 + // at properties of the function itself before deciding to emit a function 1.2830 + // as a run once lambda. 1.2831 + 1.2832 + if (!(parent && parent->emittingRunOnceLambda) && !lazyRunOnceLambda) 1.2833 + return false; 1.2834 + 1.2835 + FunctionBox *funbox = sc->asFunctionBox(); 1.2836 + return !funbox->argumentsHasLocalBinding() && 1.2837 + !funbox->isGenerator() && 1.2838 + !funbox->function()->name(); 1.2839 +} 1.2840 + 1.2841 +bool 1.2842 +frontend::EmitFunctionScript(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *body) 1.2843 +{ 1.2844 + /* 1.2845 + * IonBuilder has assumptions about what may occur immediately after 1.2846 + * script->main (e.g., in the case of destructuring params). Thus, put the 1.2847 + * following ops into the range [script->code, script->main). Note: 1.2848 + * execution starts from script->code, so this has no semantic effect. 1.2849 + */ 1.2850 + 1.2851 + FunctionBox *funbox = bce->sc->asFunctionBox(); 1.2852 + if (funbox->argumentsHasLocalBinding()) { 1.2853 + JS_ASSERT(bce->offset() == 0); /* See JSScript::argumentsBytecode. */ 1.2854 + bce->switchToProlog(); 1.2855 + if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0) 1.2856 + return false; 1.2857 + InternalBindingsHandle bindings(bce->script, &bce->script->bindings); 1.2858 + uint32_t varIndex = Bindings::argumentsVarIndex(cx, bindings); 1.2859 + if (bce->script->varIsAliased(varIndex)) { 1.2860 + ScopeCoordinate sc; 1.2861 + sc.setHops(0); 1.2862 + JS_ALWAYS_TRUE(LookupAliasedNameSlot(bce->script, cx->names().arguments, &sc)); 1.2863 + if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce)) 1.2864 + return false; 1.2865 + } else { 1.2866 + if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, varIndex, bce)) 1.2867 + return false; 1.2868 + } 1.2869 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.2870 + return false; 1.2871 + bce->switchToMain(); 1.2872 + } 1.2873 + 1.2874 + if (funbox->isGenerator()) { 1.2875 + bce->switchToProlog(); 1.2876 + if (Emit1(cx, bce, JSOP_GENERATOR) < 0) 1.2877 + return false; 1.2878 + bce->switchToMain(); 1.2879 + } 1.2880 + 1.2881 + /* 1.2882 + * Emit a prologue for run-once scripts which will deoptimize JIT code if 1.2883 + * the script ends up running multiple times via foo.caller related 1.2884 + * shenanigans. 1.2885 + */ 1.2886 + bool runOnce = bce->isRunOnceLambda(); 1.2887 + if (runOnce) { 1.2888 + bce->switchToProlog(); 1.2889 + if (Emit1(cx, bce, JSOP_RUNONCE) < 0) 1.2890 + return false; 1.2891 + bce->switchToMain(); 1.2892 + } 1.2893 + 1.2894 + if (!EmitTree(cx, bce, body)) 1.2895 + return false; 1.2896 + 1.2897 + // If we fall off the end of an ES6 generator, return a boxed iterator 1.2898 + // result object of the form { value: undefined, done: true }. 1.2899 + if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { 1.2900 + if (!EmitPrepareIteratorResult(cx, bce)) 1.2901 + return false; 1.2902 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) 1.2903 + return false; 1.2904 + if (!EmitFinishIteratorResult(cx, bce, true)) 1.2905 + return false; 1.2906 + 1.2907 + // No need to check for finally blocks, etc as in EmitReturn. 1.2908 + if (Emit1(cx, bce, JSOP_RETURN) < 0) 1.2909 + return false; 1.2910 + } 1.2911 + 1.2912 + /* 1.2913 + * Always end the script with a JSOP_RETRVAL. Some other parts of the codebase 1.2914 + * depend on this opcode, e.g. js_InternalInterpret. 1.2915 + */ 1.2916 + if (Emit1(cx, bce, JSOP_RETRVAL) < 0) 1.2917 + return false; 1.2918 + 1.2919 + if (!JSScript::fullyInitFromEmitter(cx, bce->script, bce)) 1.2920 + return false; 1.2921 + 1.2922 + /* 1.2923 + * If this function is only expected to run once, mark the script so that 1.2924 + * initializers created within it may be given more precise types. 1.2925 + */ 1.2926 + if (runOnce) { 1.2927 + bce->script->setTreatAsRunOnce(); 1.2928 + JS_ASSERT(!bce->script->hasRunOnce()); 1.2929 + } 1.2930 + 1.2931 + /* Initialize fun->script() so that the debugger has a valid fun->script(). */ 1.2932 + RootedFunction fun(cx, bce->script->functionNonDelazifying()); 1.2933 + JS_ASSERT(fun->isInterpreted()); 1.2934 + 1.2935 + if (fun->isInterpretedLazy()) 1.2936 + fun->setUnlazifiedScript(bce->script); 1.2937 + else 1.2938 + fun->setScript(bce->script); 1.2939 + 1.2940 + bce->tellDebuggerAboutCompiledScript(cx); 1.2941 + 1.2942 + return true; 1.2943 +} 1.2944 + 1.2945 +static bool 1.2946 +MaybeEmitVarDecl(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn, 1.2947 + jsatomid *result) 1.2948 +{ 1.2949 + jsatomid atomIndex; 1.2950 + 1.2951 + if (!pn->pn_cookie.isFree()) { 1.2952 + atomIndex = pn->pn_cookie.slot(); 1.2953 + } else { 1.2954 + if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex)) 1.2955 + return false; 1.2956 + } 1.2957 + 1.2958 + if (JOF_OPTYPE(pn->getOp()) == JOF_ATOM && 1.2959 + (!bce->sc->isFunctionBox() || bce->sc->asFunctionBox()->isHeavyweight())) 1.2960 + { 1.2961 + bce->switchToProlog(); 1.2962 + if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) 1.2963 + return false; 1.2964 + if (!EmitIndexOp(cx, prologOp, atomIndex, bce)) 1.2965 + return false; 1.2966 + bce->switchToMain(); 1.2967 + } 1.2968 + 1.2969 + if (result) 1.2970 + *result = atomIndex; 1.2971 + return true; 1.2972 +} 1.2973 + 1.2974 +/* 1.2975 + * This enum tells EmitVariables and the destructuring functions how emit the 1.2976 + * given Parser::variables parse tree. In the base case, DefineVars, the caller 1.2977 + * only wants variables to be defined in the prologue (if necessary). For 1.2978 + * PushInitialValues, variable initializer expressions are evaluated and left 1.2979 + * on the stack. For InitializeVars, the initializer expressions values are 1.2980 + * assigned (to local variables) and popped. 1.2981 + */ 1.2982 +enum VarEmitOption 1.2983 +{ 1.2984 + DefineVars = 0, 1.2985 + PushInitialValues = 1, 1.2986 + InitializeVars = 2 1.2987 +}; 1.2988 + 1.2989 +typedef bool 1.2990 +(*DestructuringDeclEmitter)(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn); 1.2991 + 1.2992 +static bool 1.2993 +EmitDestructuringDecl(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn) 1.2994 +{ 1.2995 + JS_ASSERT(pn->isKind(PNK_NAME)); 1.2996 + if (!BindNameToSlot(cx, bce, pn)) 1.2997 + return false; 1.2998 + 1.2999 + JS_ASSERT(!pn->isOp(JSOP_CALLEE)); 1.3000 + return MaybeEmitVarDecl(cx, bce, prologOp, pn, nullptr); 1.3001 +} 1.3002 + 1.3003 +static bool 1.3004 +EmitDestructuringDecls(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, 1.3005 + ParseNode *pattern) 1.3006 +{ 1.3007 + if (pattern->isKind(PNK_ARRAY)) { 1.3008 + for (ParseNode *element = pattern->pn_head; element; element = element->pn_next) { 1.3009 + if (element->isKind(PNK_ELISION)) 1.3010 + continue; 1.3011 + DestructuringDeclEmitter emitter = 1.3012 + element->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls; 1.3013 + if (!emitter(cx, bce, prologOp, element)) 1.3014 + return false; 1.3015 + } 1.3016 + return true; 1.3017 + } 1.3018 + 1.3019 + MOZ_ASSERT(pattern->isKind(PNK_OBJECT)); 1.3020 + for (ParseNode *member = pattern->pn_head; member; member = member->pn_next) { 1.3021 + ParseNode *target = member->pn_right; 1.3022 + DestructuringDeclEmitter emitter = 1.3023 + target->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls; 1.3024 + if (!emitter(cx, bce, prologOp, target)) 1.3025 + return false; 1.3026 + } 1.3027 + return true; 1.3028 +} 1.3029 + 1.3030 +static bool 1.3031 +EmitDestructuringOpsHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, 1.3032 + VarEmitOption emitOption); 1.3033 + 1.3034 +/* 1.3035 + * EmitDestructuringLHS assumes the to-be-destructured value has been pushed on 1.3036 + * the stack and emits code to destructure a single lhs expression (either a 1.3037 + * name or a compound []/{} expression). 1.3038 + * 1.3039 + * If emitOption is InitializeVars, the to-be-destructured value is assigned to 1.3040 + * locals and ultimately the initial slot is popped (-1 total depth change). 1.3041 + * 1.3042 + * If emitOption is PushInitialValues, the to-be-destructured value is replaced 1.3043 + * with the initial values of the N (where 0 <= N) variables assigned in the 1.3044 + * lhs expression. (Same post-condition as EmitDestructuringOpsHelper) 1.3045 + */ 1.3046 +static bool 1.3047 +EmitDestructuringLHS(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, VarEmitOption emitOption) 1.3048 +{ 1.3049 + JS_ASSERT(emitOption != DefineVars); 1.3050 + 1.3051 + // Now emit the lvalue opcode sequence. If the lvalue is a nested 1.3052 + // destructuring initialiser-form, call ourselves to handle it, then pop 1.3053 + // the matched value. Otherwise emit an lvalue bytecode sequence followed 1.3054 + // by an assignment op. 1.3055 + if (pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)) { 1.3056 + if (!EmitDestructuringOpsHelper(cx, bce, pn, emitOption)) 1.3057 + return false; 1.3058 + if (emitOption == InitializeVars) { 1.3059 + // Per its post-condition, EmitDestructuringOpsHelper has left the 1.3060 + // to-be-destructured value on top of the stack. 1.3061 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3062 + return false; 1.3063 + } 1.3064 + } else if (emitOption == PushInitialValues) { 1.3065 + // The lhs is a simple name so the to-be-destructured value is 1.3066 + // its initial value and there is nothing to do. 1.3067 + JS_ASSERT(pn->getOp() == JSOP_GETLOCAL); 1.3068 + JS_ASSERT(pn->pn_dflags & PND_BOUND); 1.3069 + } else { 1.3070 + switch (pn->getKind()) { 1.3071 + case PNK_NAME: 1.3072 + if (!BindNameToSlot(cx, bce, pn)) 1.3073 + return false; 1.3074 + 1.3075 + // Allow 'const [x,y] = o', make 'const x,y; [x,y] = o' a nop. 1.3076 + if (pn->isConst() && !pn->isDefn()) 1.3077 + return Emit1(cx, bce, JSOP_POP) >= 0; 1.3078 + 1.3079 + switch (pn->getOp()) { 1.3080 + case JSOP_SETNAME: 1.3081 + case JSOP_SETGNAME: 1.3082 + case JSOP_SETCONST: { 1.3083 + // This is like ordinary assignment, but with one difference. 1.3084 + // 1.3085 + // In `a = b`, we first determine a binding for `a` (using 1.3086 + // JSOP_BINDNAME or JSOP_BINDGNAME), then we evaluate `b`, then 1.3087 + // a JSOP_SETNAME instruction. 1.3088 + // 1.3089 + // In `[a] = [b]`, per spec, `b` is evaluated first, then we 1.3090 + // determine a binding for `a`. Then we need to do assignment-- 1.3091 + // but the operands are on the stack in the wrong order for 1.3092 + // JSOP_SETPROP, so we have to add a JSOP_SWAP. 1.3093 + jsatomid atomIndex; 1.3094 + if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex)) 1.3095 + return false; 1.3096 + 1.3097 + if (!pn->isOp(JSOP_SETCONST)) { 1.3098 + JSOp bindOp = pn->isOp(JSOP_SETNAME) ? JSOP_BINDNAME : JSOP_BINDGNAME; 1.3099 + if (!EmitIndex32(cx, bindOp, atomIndex, bce)) 1.3100 + return false; 1.3101 + if (Emit1(cx, bce, JSOP_SWAP) < 0) 1.3102 + return false; 1.3103 + } 1.3104 + 1.3105 + if (!EmitIndexOp(cx, pn->getOp(), atomIndex, bce)) 1.3106 + return false; 1.3107 + break; 1.3108 + } 1.3109 + 1.3110 + case JSOP_SETLOCAL: 1.3111 + case JSOP_SETARG: 1.3112 + if (!EmitVarOp(cx, pn, pn->getOp(), bce)) 1.3113 + return false; 1.3114 + break; 1.3115 + 1.3116 + default: 1.3117 + MOZ_ASSUME_UNREACHABLE("EmitDestructuringLHS: bad name op"); 1.3118 + } 1.3119 + break; 1.3120 + 1.3121 + case PNK_DOT: 1.3122 + // See the (PNK_NAME, JSOP_SETNAME) case above. 1.3123 + // 1.3124 + // In `a.x = b`, `a` is evaluated first, then `b`, then a 1.3125 + // JSOP_SETPROP instruction. 1.3126 + // 1.3127 + // In `[a.x] = [b]`, per spec, `b` is evaluated before `a`. Then we 1.3128 + // need a property set -- but the operands are on the stack in the 1.3129 + // wrong order for JSOP_SETPROP, so we have to add a JSOP_SWAP. 1.3130 + if (!EmitTree(cx, bce, pn->pn_expr)) 1.3131 + return false; 1.3132 + if (Emit1(cx, bce, JSOP_SWAP) < 0) 1.3133 + return false; 1.3134 + if (!EmitAtomOp(cx, pn, JSOP_SETPROP, bce)) 1.3135 + return false; 1.3136 + break; 1.3137 + 1.3138 + case PNK_ELEM: 1.3139 + // See the comment at `case PNK_DOT:` above. This case, 1.3140 + // `[a[x]] = [b]`, is handled much the same way. The JSOP_SWAP 1.3141 + // is emitted by EmitElemOperands. 1.3142 + if (!EmitElemOp(cx, pn, JSOP_SETELEM, bce)) 1.3143 + return false; 1.3144 + break; 1.3145 + 1.3146 + case PNK_CALL: 1.3147 + JS_ASSERT(pn->pn_xflags & PNX_SETCALL); 1.3148 + if (!EmitTree(cx, bce, pn)) 1.3149 + return false; 1.3150 + 1.3151 + // Pop the call return value. Below, we pop the RHS too, balancing 1.3152 + // the stack --- presumably for the benefit of bytecode 1.3153 + // analysis. (The interpreter will never reach these instructions 1.3154 + // since we just emitted JSOP_SETCALL, which always throws. It's 1.3155 + // possible no analyses actually depend on this either.) 1.3156 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3157 + return false; 1.3158 + break; 1.3159 + 1.3160 + default: 1.3161 + MOZ_ASSUME_UNREACHABLE("EmitDestructuringLHS: bad lhs kind"); 1.3162 + } 1.3163 + 1.3164 + // Pop the assigned value. 1.3165 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3166 + return false; 1.3167 + } 1.3168 + 1.3169 + return true; 1.3170 +} 1.3171 + 1.3172 +/* 1.3173 + * Recursive helper for EmitDestructuringOps. 1.3174 + * EmitDestructuringOpsHelper assumes the to-be-destructured value has been 1.3175 + * pushed on the stack and emits code to destructure each part of a [] or {} 1.3176 + * lhs expression. 1.3177 + * 1.3178 + * If emitOption is InitializeVars, the initial to-be-destructured value is 1.3179 + * left untouched on the stack and the overall depth is not changed. 1.3180 + * 1.3181 + * If emitOption is PushInitialValues, the to-be-destructured value is replaced 1.3182 + * with the initial values of the N (where 0 <= N) variables assigned in the 1.3183 + * lhs expression. (Same post-condition as EmitDestructuringLHS) 1.3184 + */ 1.3185 +static bool 1.3186 +EmitDestructuringOpsHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, 1.3187 + VarEmitOption emitOption) 1.3188 +{ 1.3189 + JS_ASSERT(emitOption != DefineVars); 1.3190 + 1.3191 + unsigned index; 1.3192 + ParseNode *pn2, *pn3; 1.3193 + bool doElemOp; 1.3194 + 1.3195 +#ifdef DEBUG 1.3196 + int stackDepth = bce->stackDepth; 1.3197 + JS_ASSERT(stackDepth != 0); 1.3198 + JS_ASSERT(pn->isArity(PN_LIST)); 1.3199 + JS_ASSERT(pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)); 1.3200 +#endif 1.3201 + 1.3202 + index = 0; 1.3203 + for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) { 1.3204 + /* Duplicate the value being destructured to use as a reference base. */ 1.3205 + if (Emit1(cx, bce, JSOP_DUP) < 0) 1.3206 + return false; 1.3207 + 1.3208 + /* 1.3209 + * Now push the property name currently being matched, which is either 1.3210 + * the array initialiser's current index, or the current property name 1.3211 + * "label" on the left of a colon in the object initialiser. Set pn3 1.3212 + * to the lvalue node, which is in the value-initializing position. 1.3213 + */ 1.3214 + doElemOp = true; 1.3215 + if (pn->isKind(PNK_ARRAY)) { 1.3216 + if (!EmitNumberOp(cx, index, bce)) 1.3217 + return false; 1.3218 + pn3 = pn2; 1.3219 + } else { 1.3220 + JS_ASSERT(pn->isKind(PNK_OBJECT)); 1.3221 + JS_ASSERT(pn2->isKind(PNK_COLON)); 1.3222 + 1.3223 + ParseNode *key = pn2->pn_left; 1.3224 + if (key->isKind(PNK_NUMBER)) { 1.3225 + if (!EmitNumberOp(cx, key->pn_dval, bce)) 1.3226 + return false; 1.3227 + } else { 1.3228 + MOZ_ASSERT(key->isKind(PNK_STRING) || key->isKind(PNK_NAME)); 1.3229 + PropertyName *name = key->pn_atom->asPropertyName(); 1.3230 + 1.3231 + // The parser already checked for atoms representing indexes and 1.3232 + // used PNK_NUMBER instead, but also watch for ids which TI treats 1.3233 + // as indexes for simplification of downstream analysis. 1.3234 + jsid id = NameToId(name); 1.3235 + if (id != types::IdToTypeId(id)) { 1.3236 + if (!EmitTree(cx, bce, key)) 1.3237 + return false; 1.3238 + } else { 1.3239 + if (!EmitAtomOp(cx, name, JSOP_GETPROP, bce)) 1.3240 + return false; 1.3241 + doElemOp = false; 1.3242 + } 1.3243 + } 1.3244 + 1.3245 + pn3 = pn2->pn_right; 1.3246 + } 1.3247 + 1.3248 + if (doElemOp) { 1.3249 + /* 1.3250 + * Ok, get the value of the matching property name. This leaves 1.3251 + * that value on top of the value being destructured, so the stack 1.3252 + * is one deeper than when we started. 1.3253 + */ 1.3254 + if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) 1.3255 + return false; 1.3256 + JS_ASSERT(bce->stackDepth >= stackDepth + 1); 1.3257 + } 1.3258 + 1.3259 + /* Elision node makes a hole in the array destructurer. */ 1.3260 + if (pn3->isKind(PNK_ELISION)) { 1.3261 + JS_ASSERT(pn->isKind(PNK_ARRAY)); 1.3262 + JS_ASSERT(pn2 == pn3); 1.3263 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3264 + return false; 1.3265 + } else { 1.3266 + int32_t depthBefore = bce->stackDepth; 1.3267 + if (!EmitDestructuringLHS(cx, bce, pn3, emitOption)) 1.3268 + return false; 1.3269 + 1.3270 + if (emitOption == PushInitialValues) { 1.3271 + /* 1.3272 + * After '[x,y]' in 'let ([[x,y], z] = o)', the stack is 1.3273 + * | to-be-destructured-value | x | y | 1.3274 + * The goal is: 1.3275 + * | x | y | z | 1.3276 + * so emit a pick to produce the intermediate state 1.3277 + * | x | y | to-be-destructured-value | 1.3278 + * before destructuring z. This gives the loop invariant that 1.3279 + * the to-be-destructured-value is always on top of the stack. 1.3280 + */ 1.3281 + JS_ASSERT((bce->stackDepth - bce->stackDepth) >= -1); 1.3282 + uint32_t pickDistance = (uint32_t)((bce->stackDepth + 1) - depthBefore); 1.3283 + if (pickDistance > 0) { 1.3284 + if (pickDistance > UINT8_MAX) { 1.3285 + bce->reportError(pn3, JSMSG_TOO_MANY_LOCALS); 1.3286 + return false; 1.3287 + } 1.3288 + if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)pickDistance) < 0) 1.3289 + return false; 1.3290 + } 1.3291 + } 1.3292 + } 1.3293 + 1.3294 + ++index; 1.3295 + } 1.3296 + 1.3297 + if (emitOption == PushInitialValues) { 1.3298 + /* 1.3299 + * Per the above loop invariant, to-be-destructured-value is at the top 1.3300 + * of the stack. To achieve the post-condition, pop it. 1.3301 + */ 1.3302 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3303 + return false; 1.3304 + } 1.3305 + 1.3306 + return true; 1.3307 +} 1.3308 + 1.3309 +static bool 1.3310 +EmitDestructuringOps(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool isLet = false) 1.3311 +{ 1.3312 + /* 1.3313 + * Call our recursive helper to emit the destructuring assignments and 1.3314 + * related stack manipulations. 1.3315 + */ 1.3316 + VarEmitOption emitOption = isLet ? PushInitialValues : InitializeVars; 1.3317 + return EmitDestructuringOpsHelper(cx, bce, pn, emitOption); 1.3318 +} 1.3319 + 1.3320 +static bool 1.3321 +EmitGroupAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, 1.3322 + ParseNode *lhs, ParseNode *rhs) 1.3323 +{ 1.3324 + uint32_t depth, limit, i, nslots; 1.3325 + ParseNode *pn; 1.3326 + 1.3327 + depth = limit = (uint32_t) bce->stackDepth; 1.3328 + for (pn = rhs->pn_head; pn; pn = pn->pn_next) { 1.3329 + if (limit == JS_BIT(16)) { 1.3330 + bce->reportError(rhs, JSMSG_ARRAY_INIT_TOO_BIG); 1.3331 + return false; 1.3332 + } 1.3333 + 1.3334 + /* MaybeEmitGroupAssignment won't call us if rhs is holey. */ 1.3335 + JS_ASSERT(!pn->isKind(PNK_ELISION)); 1.3336 + if (!EmitTree(cx, bce, pn)) 1.3337 + return false; 1.3338 + ++limit; 1.3339 + } 1.3340 + 1.3341 + i = depth; 1.3342 + for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) { 1.3343 + /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */ 1.3344 + JS_ASSERT(i < limit); 1.3345 + 1.3346 + if (!EmitDupAt(cx, bce, i)) 1.3347 + return false; 1.3348 + 1.3349 + if (pn->isKind(PNK_ELISION)) { 1.3350 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3351 + return false; 1.3352 + } else { 1.3353 + if (!EmitDestructuringLHS(cx, bce, pn, InitializeVars)) 1.3354 + return false; 1.3355 + } 1.3356 + } 1.3357 + 1.3358 + nslots = limit - depth; 1.3359 + EMIT_UINT16_IMM_OP(JSOP_POPN, nslots); 1.3360 + bce->stackDepth = (uint32_t) depth; 1.3361 + return true; 1.3362 +} 1.3363 + 1.3364 +enum GroupOption { GroupIsDecl, GroupIsNotDecl }; 1.3365 + 1.3366 +/* 1.3367 + * Helper called with pop out param initialized to a JSOP_POP* opcode. If we 1.3368 + * can emit a group assignment sequence, which results in 0 stack depth delta, 1.3369 + * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop. 1.3370 + */ 1.3371 +static bool 1.3372 +MaybeEmitGroupAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn, 1.3373 + GroupOption groupOption, JSOp *pop) 1.3374 +{ 1.3375 + JS_ASSERT(pn->isKind(PNK_ASSIGN)); 1.3376 + JS_ASSERT(pn->isOp(JSOP_NOP)); 1.3377 + JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_SETRVAL); 1.3378 + 1.3379 + ParseNode *lhs = pn->pn_left; 1.3380 + ParseNode *rhs = pn->pn_right; 1.3381 + if (lhs->isKind(PNK_ARRAY) && rhs->isKind(PNK_ARRAY) && 1.3382 + !(rhs->pn_xflags & PNX_SPECIALARRAYINIT) && 1.3383 + lhs->pn_count <= rhs->pn_count) 1.3384 + { 1.3385 + if (groupOption == GroupIsDecl && !EmitDestructuringDecls(cx, bce, prologOp, lhs)) 1.3386 + return false; 1.3387 + if (!EmitGroupAssignment(cx, bce, prologOp, lhs, rhs)) 1.3388 + return false; 1.3389 + *pop = JSOP_NOP; 1.3390 + } 1.3391 + return true; 1.3392 +} 1.3393 + 1.3394 +/* 1.3395 + * Like MaybeEmitGroupAssignment, but for 'let ([x,y] = [a,b]) ...'. 1.3396 + * 1.3397 + * Instead of issuing a sequence |dup|eval-rhs|set-lhs|pop| (which doesn't work 1.3398 + * since the bound vars don't yet have slots), just eval/push each rhs element 1.3399 + * just like what EmitLet would do for 'let (x = a, y = b) ...'. While shorter, 1.3400 + * simpler and more efficient than MaybeEmitGroupAssignment, it is harder to 1.3401 + * decompile so we restrict the ourselves to cases where the lhs and rhs are in 1.3402 + * 1:1 correspondence and lhs elements are simple names. 1.3403 + */ 1.3404 +static bool 1.3405 +MaybeEmitLetGroupDecl(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp *pop) 1.3406 +{ 1.3407 + JS_ASSERT(pn->isKind(PNK_ASSIGN)); 1.3408 + JS_ASSERT(pn->isOp(JSOP_NOP)); 1.3409 + JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_SETRVAL); 1.3410 + 1.3411 + ParseNode *lhs = pn->pn_left; 1.3412 + ParseNode *rhs = pn->pn_right; 1.3413 + if (lhs->isKind(PNK_ARRAY) && rhs->isKind(PNK_ARRAY) && 1.3414 + !(rhs->pn_xflags & PNX_SPECIALARRAYINIT) && 1.3415 + !(lhs->pn_xflags & PNX_SPECIALARRAYINIT) && 1.3416 + lhs->pn_count == rhs->pn_count) 1.3417 + { 1.3418 + for (ParseNode *l = lhs->pn_head; l; l = l->pn_next) { 1.3419 + if (l->getOp() != JSOP_SETLOCAL) 1.3420 + return true; 1.3421 + } 1.3422 + 1.3423 + for (ParseNode *r = rhs->pn_head; r; r = r->pn_next) { 1.3424 + if (!EmitTree(cx, bce, r)) 1.3425 + return false; 1.3426 + } 1.3427 + 1.3428 + *pop = JSOP_NOP; 1.3429 + } 1.3430 + return true; 1.3431 +} 1.3432 + 1.3433 +static bool 1.3434 +EmitVariables(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, VarEmitOption emitOption, 1.3435 + bool isLet = false) 1.3436 +{ 1.3437 + JS_ASSERT(pn->isArity(PN_LIST)); 1.3438 + JS_ASSERT(isLet == (emitOption == PushInitialValues)); 1.3439 + 1.3440 + ParseNode *next; 1.3441 + for (ParseNode *pn2 = pn->pn_head; ; pn2 = next) { 1.3442 + if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) 1.3443 + return false; 1.3444 + next = pn2->pn_next; 1.3445 + 1.3446 + ParseNode *pn3; 1.3447 + if (!pn2->isKind(PNK_NAME)) { 1.3448 + if (pn2->isKind(PNK_ARRAY) || pn2->isKind(PNK_OBJECT)) { 1.3449 + /* 1.3450 + * Emit variable binding ops, but not destructuring ops. The 1.3451 + * parser (see Parser::variables) has ensured that our caller 1.3452 + * will be the PNK_FOR/PNK_FORIN/PNK_FOROF case in EmitTree, and 1.3453 + * that case will emit the destructuring code only after 1.3454 + * emitting an enumerating opcode and a branch that tests 1.3455 + * whether the enumeration ended. 1.3456 + */ 1.3457 + JS_ASSERT(emitOption == DefineVars); 1.3458 + JS_ASSERT(pn->pn_count == 1); 1.3459 + if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn2)) 1.3460 + return false; 1.3461 + break; 1.3462 + } 1.3463 + 1.3464 + /* 1.3465 + * A destructuring initialiser assignment preceded by var will 1.3466 + * never occur to the left of 'in' in a for-in loop. As with 'for 1.3467 + * (var x = i in o)...', this will cause the entire 'var [a, b] = 1.3468 + * i' to be hoisted out of the loop. 1.3469 + */ 1.3470 + JS_ASSERT(pn2->isKind(PNK_ASSIGN)); 1.3471 + JS_ASSERT(pn2->isOp(JSOP_NOP)); 1.3472 + JS_ASSERT(emitOption != DefineVars); 1.3473 + 1.3474 + /* 1.3475 + * To allow the front end to rewrite var f = x; as f = x; when a 1.3476 + * function f(){} precedes the var, detect simple name assignment 1.3477 + * here and initialize the name. 1.3478 + */ 1.3479 + if (pn2->pn_left->isKind(PNK_NAME)) { 1.3480 + pn3 = pn2->pn_right; 1.3481 + pn2 = pn2->pn_left; 1.3482 + goto do_name; 1.3483 + } 1.3484 + 1.3485 + JSOp op = JSOP_POP; 1.3486 + if (pn->pn_count == 1) { 1.3487 + /* 1.3488 + * If this is the only destructuring assignment in the list, 1.3489 + * try to optimize to a group assignment. If we're in a let 1.3490 + * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP 1.3491 + * in pn->pn_op, to suppress a second (and misplaced) 'let'. 1.3492 + */ 1.3493 + JS_ASSERT(!pn2->pn_next); 1.3494 + if (isLet) { 1.3495 + if (!MaybeEmitLetGroupDecl(cx, bce, pn2, &op)) 1.3496 + return false; 1.3497 + } else { 1.3498 + if (!MaybeEmitGroupAssignment(cx, bce, pn->getOp(), pn2, GroupIsDecl, &op)) 1.3499 + return false; 1.3500 + } 1.3501 + } 1.3502 + if (op == JSOP_NOP) { 1.3503 + pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT; 1.3504 + } else { 1.3505 + pn3 = pn2->pn_left; 1.3506 + if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn3)) 1.3507 + return false; 1.3508 + 1.3509 + if (!EmitTree(cx, bce, pn2->pn_right)) 1.3510 + return false; 1.3511 + 1.3512 + if (!EmitDestructuringOps(cx, bce, pn3, isLet)) 1.3513 + return false; 1.3514 + } 1.3515 + 1.3516 + /* If we are not initializing, nothing to pop. */ 1.3517 + if (emitOption != InitializeVars) { 1.3518 + if (next) 1.3519 + continue; 1.3520 + break; 1.3521 + } 1.3522 + goto emit_note_pop; 1.3523 + } 1.3524 + 1.3525 + /* 1.3526 + * Load initializer early to share code above that jumps to do_name. 1.3527 + * NB: if this var redeclares an existing binding, then pn2 is linked 1.3528 + * on its definition's use-chain and pn_expr has been overlayed with 1.3529 + * pn_lexdef. 1.3530 + */ 1.3531 + pn3 = pn2->maybeExpr(); 1.3532 + 1.3533 + do_name: 1.3534 + if (!BindNameToSlot(cx, bce, pn2)) 1.3535 + return false; 1.3536 + 1.3537 + 1.3538 + JSOp op; 1.3539 + op = pn2->getOp(); 1.3540 + JS_ASSERT(op != JSOP_CALLEE); 1.3541 + JS_ASSERT(!pn2->pn_cookie.isFree() || !pn->isOp(JSOP_NOP)); 1.3542 + 1.3543 + jsatomid atomIndex; 1.3544 + if (!MaybeEmitVarDecl(cx, bce, pn->getOp(), pn2, &atomIndex)) 1.3545 + return false; 1.3546 + 1.3547 + if (pn3) { 1.3548 + JS_ASSERT(emitOption != DefineVars); 1.3549 + if (op == JSOP_SETNAME || op == JSOP_SETGNAME || op == JSOP_SETINTRINSIC) { 1.3550 + JS_ASSERT(emitOption != PushInitialValues); 1.3551 + JSOp bindOp; 1.3552 + if (op == JSOP_SETNAME) 1.3553 + bindOp = JSOP_BINDNAME; 1.3554 + else if (op == JSOP_SETGNAME) 1.3555 + bindOp = JSOP_BINDGNAME; 1.3556 + else 1.3557 + bindOp = JSOP_BINDINTRINSIC; 1.3558 + if (!EmitIndex32(cx, bindOp, atomIndex, bce)) 1.3559 + return false; 1.3560 + } 1.3561 + 1.3562 + bool oldEmittingForInit = bce->emittingForInit; 1.3563 + bce->emittingForInit = false; 1.3564 + if (!EmitTree(cx, bce, pn3)) 1.3565 + return false; 1.3566 + bce->emittingForInit = oldEmittingForInit; 1.3567 + } else if (isLet) { 1.3568 + /* JSOP_ENTERLETx expects at least 1 slot to have been pushed. */ 1.3569 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) 1.3570 + return false; 1.3571 + } 1.3572 + 1.3573 + /* If we are not initializing, nothing to pop. */ 1.3574 + if (emitOption != InitializeVars) { 1.3575 + if (next) 1.3576 + continue; 1.3577 + break; 1.3578 + } 1.3579 + 1.3580 + JS_ASSERT_IF(pn2->isDefn(), pn3 == pn2->pn_expr); 1.3581 + if (!pn2->pn_cookie.isFree()) { 1.3582 + if (!EmitVarOp(cx, pn2, op, bce)) 1.3583 + return false; 1.3584 + } else { 1.3585 + if (!EmitIndexOp(cx, op, atomIndex, bce)) 1.3586 + return false; 1.3587 + } 1.3588 + 1.3589 + emit_note_pop: 1.3590 + if (!next) 1.3591 + break; 1.3592 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3593 + return false; 1.3594 + } 1.3595 + 1.3596 + if (pn->pn_xflags & PNX_POPVAR) { 1.3597 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3598 + return false; 1.3599 + } 1.3600 + 1.3601 + return true; 1.3602 +} 1.3603 + 1.3604 +static bool 1.3605 +EmitAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *lhs, JSOp op, ParseNode *rhs) 1.3606 +{ 1.3607 + /* 1.3608 + * Check left operand type and generate specialized code for it. 1.3609 + * Specialize to avoid ECMA "reference type" values on the operand 1.3610 + * stack, which impose pervasive runtime "GetValue" costs. 1.3611 + */ 1.3612 + jsatomid atomIndex = (jsatomid) -1; 1.3613 + jsbytecode offset = 1; 1.3614 + 1.3615 + switch (lhs->getKind()) { 1.3616 + case PNK_NAME: 1.3617 + if (!BindNameToSlot(cx, bce, lhs)) 1.3618 + return false; 1.3619 + if (lhs->pn_cookie.isFree()) { 1.3620 + if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex)) 1.3621 + return false; 1.3622 + if (!lhs->isConst()) { 1.3623 + JSOp bindOp; 1.3624 + if (lhs->isOp(JSOP_SETNAME)) 1.3625 + bindOp = JSOP_BINDNAME; 1.3626 + else if (lhs->isOp(JSOP_SETGNAME)) 1.3627 + bindOp = JSOP_BINDGNAME; 1.3628 + else 1.3629 + bindOp = JSOP_BINDINTRINSIC; 1.3630 + if (!EmitIndex32(cx, bindOp, atomIndex, bce)) 1.3631 + return false; 1.3632 + offset++; 1.3633 + } 1.3634 + } 1.3635 + break; 1.3636 + case PNK_DOT: 1.3637 + if (!EmitTree(cx, bce, lhs->expr())) 1.3638 + return false; 1.3639 + offset++; 1.3640 + if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex)) 1.3641 + return false; 1.3642 + break; 1.3643 + case PNK_ELEM: 1.3644 + JS_ASSERT(lhs->isArity(PN_BINARY)); 1.3645 + if (!EmitTree(cx, bce, lhs->pn_left)) 1.3646 + return false; 1.3647 + if (!EmitTree(cx, bce, lhs->pn_right)) 1.3648 + return false; 1.3649 + offset += 2; 1.3650 + break; 1.3651 + case PNK_ARRAY: 1.3652 + case PNK_OBJECT: 1.3653 + break; 1.3654 + case PNK_CALL: 1.3655 + JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); 1.3656 + if (!EmitTree(cx, bce, lhs)) 1.3657 + return false; 1.3658 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3659 + return false; 1.3660 + break; 1.3661 + default: 1.3662 + JS_ASSERT(0); 1.3663 + } 1.3664 + 1.3665 + if (op != JSOP_NOP) { 1.3666 + JS_ASSERT(rhs); 1.3667 + switch (lhs->getKind()) { 1.3668 + case PNK_NAME: 1.3669 + if (lhs->isConst()) { 1.3670 + if (lhs->isOp(JSOP_CALLEE)) { 1.3671 + if (Emit1(cx, bce, JSOP_CALLEE) < 0) 1.3672 + return false; 1.3673 + } else if (lhs->isOp(JSOP_NAME) || lhs->isOp(JSOP_GETGNAME)) { 1.3674 + if (!EmitIndex32(cx, lhs->getOp(), atomIndex, bce)) 1.3675 + return false; 1.3676 + } else { 1.3677 + JS_ASSERT(JOF_OPTYPE(lhs->getOp()) != JOF_ATOM); 1.3678 + if (!EmitVarOp(cx, lhs, lhs->getOp(), bce)) 1.3679 + return false; 1.3680 + } 1.3681 + } else if (lhs->isOp(JSOP_SETNAME)) { 1.3682 + if (Emit1(cx, bce, JSOP_DUP) < 0) 1.3683 + return false; 1.3684 + if (!EmitIndex32(cx, JSOP_GETXPROP, atomIndex, bce)) 1.3685 + return false; 1.3686 + } else if (lhs->isOp(JSOP_SETGNAME)) { 1.3687 + JS_ASSERT(lhs->pn_cookie.isFree()); 1.3688 + if (!EmitAtomOp(cx, lhs, JSOP_GETGNAME, bce)) 1.3689 + return false; 1.3690 + } else if (lhs->isOp(JSOP_SETINTRINSIC)) { 1.3691 + JS_ASSERT(lhs->pn_cookie.isFree()); 1.3692 + if (!EmitAtomOp(cx, lhs, JSOP_GETINTRINSIC, bce)) 1.3693 + return false; 1.3694 + } else { 1.3695 + JSOp op; 1.3696 + switch (lhs->getOp()) { 1.3697 + case JSOP_SETARG: op = JSOP_GETARG; break; 1.3698 + case JSOP_SETLOCAL: op = JSOP_GETLOCAL; break; 1.3699 + case JSOP_SETALIASEDVAR: op = JSOP_GETALIASEDVAR; break; 1.3700 + default: MOZ_ASSUME_UNREACHABLE("Bad op"); 1.3701 + } 1.3702 + if (!EmitVarOp(cx, lhs, op, bce)) 1.3703 + return false; 1.3704 + } 1.3705 + break; 1.3706 + case PNK_DOT: { 1.3707 + if (Emit1(cx, bce, JSOP_DUP) < 0) 1.3708 + return false; 1.3709 + bool isLength = (lhs->pn_atom == cx->names().length); 1.3710 + if (!EmitIndex32(cx, isLength ? JSOP_LENGTH : JSOP_GETPROP, atomIndex, bce)) 1.3711 + return false; 1.3712 + break; 1.3713 + } 1.3714 + case PNK_ELEM: 1.3715 + if (Emit1(cx, bce, JSOP_DUP2) < 0) 1.3716 + return false; 1.3717 + if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) 1.3718 + return false; 1.3719 + break; 1.3720 + case PNK_CALL: 1.3721 + /* 1.3722 + * We just emitted a JSOP_SETCALL (which will always throw) and 1.3723 + * popped the call's return value. Push a random value to make sure 1.3724 + * the stack depth is correct. 1.3725 + */ 1.3726 + JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); 1.3727 + if (Emit1(cx, bce, JSOP_NULL) < 0) 1.3728 + return false; 1.3729 + break; 1.3730 + default:; 1.3731 + } 1.3732 + } 1.3733 + 1.3734 + /* Now emit the right operand (it may affect the namespace). */ 1.3735 + if (rhs) { 1.3736 + if (!EmitTree(cx, bce, rhs)) 1.3737 + return false; 1.3738 + } else { 1.3739 + /* 1.3740 + * The value to assign is the next enumeration value in a for-in or 1.3741 + * for-of loop. That value has already been emitted: by JSOP_ITERNEXT 1.3742 + * in the for-in case, or via a GETPROP "value" on the result object in 1.3743 + * the for-of case. If offset == 1, that slot is already at the top of 1.3744 + * the stack. Otherwise, rearrange the stack to put that value on top. 1.3745 + */ 1.3746 + if (offset != 1 && Emit2(cx, bce, JSOP_PICK, offset - 1) < 0) 1.3747 + return false; 1.3748 + } 1.3749 + 1.3750 + /* If += etc., emit the binary operator with a source note. */ 1.3751 + if (op != JSOP_NOP) { 1.3752 + /* 1.3753 + * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const 1.3754 + * declared in the current compilation unit, as in this case (just 1.3755 + * a bit further below) we will avoid emitting the assignment op. 1.3756 + */ 1.3757 + if (!lhs->isKind(PNK_NAME) || !lhs->isConst()) { 1.3758 + if (NewSrcNote(cx, bce, SRC_ASSIGNOP) < 0) 1.3759 + return false; 1.3760 + } 1.3761 + if (Emit1(cx, bce, op) < 0) 1.3762 + return false; 1.3763 + } 1.3764 + 1.3765 + /* Finally, emit the specialized assignment bytecode. */ 1.3766 + switch (lhs->getKind()) { 1.3767 + case PNK_NAME: 1.3768 + if (lhs->isConst()) { 1.3769 + if (!rhs) { 1.3770 + bce->reportError(lhs, JSMSG_BAD_FOR_LEFTSIDE); 1.3771 + return false; 1.3772 + } 1.3773 + break; 1.3774 + } 1.3775 + if (lhs->isOp(JSOP_SETARG) || lhs->isOp(JSOP_SETLOCAL) || lhs->isOp(JSOP_SETALIASEDVAR)) { 1.3776 + if (!EmitVarOp(cx, lhs, lhs->getOp(), bce)) 1.3777 + return false; 1.3778 + } else { 1.3779 + if (!EmitIndexOp(cx, lhs->getOp(), atomIndex, bce)) 1.3780 + return false; 1.3781 + } 1.3782 + break; 1.3783 + case PNK_DOT: 1.3784 + if (!EmitIndexOp(cx, JSOP_SETPROP, atomIndex, bce)) 1.3785 + return false; 1.3786 + break; 1.3787 + case PNK_CALL: 1.3788 + /* Do nothing. The JSOP_SETCALL we emitted will always throw. */ 1.3789 + JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); 1.3790 + break; 1.3791 + case PNK_ELEM: 1.3792 + if (Emit1(cx, bce, JSOP_SETELEM) < 0) 1.3793 + return false; 1.3794 + break; 1.3795 + case PNK_ARRAY: 1.3796 + case PNK_OBJECT: 1.3797 + if (!EmitDestructuringOps(cx, bce, lhs)) 1.3798 + return false; 1.3799 + break; 1.3800 + default: 1.3801 + JS_ASSERT(0); 1.3802 + } 1.3803 + return true; 1.3804 +} 1.3805 + 1.3806 +bool 1.3807 +ParseNode::getConstantValue(ExclusiveContext *cx, bool strictChecks, MutableHandleValue vp) 1.3808 +{ 1.3809 + switch (getKind()) { 1.3810 + case PNK_NUMBER: 1.3811 + vp.setNumber(pn_dval); 1.3812 + return true; 1.3813 + case PNK_STRING: 1.3814 + vp.setString(pn_atom); 1.3815 + return true; 1.3816 + case PNK_TRUE: 1.3817 + vp.setBoolean(true); 1.3818 + return true; 1.3819 + case PNK_FALSE: 1.3820 + vp.setBoolean(false); 1.3821 + return true; 1.3822 + case PNK_NULL: 1.3823 + vp.setNull(); 1.3824 + return true; 1.3825 + case PNK_SPREAD: 1.3826 + return false; 1.3827 + case PNK_ARRAY: { 1.3828 + JS_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST)); 1.3829 + 1.3830 + RootedObject obj(cx, 1.3831 + NewDenseAllocatedArray(cx, pn_count, nullptr, MaybeSingletonObject)); 1.3832 + if (!obj) 1.3833 + return false; 1.3834 + 1.3835 + unsigned idx = 0; 1.3836 + RootedId id(cx); 1.3837 + RootedValue value(cx); 1.3838 + for (ParseNode *pn = pn_head; pn; idx++, pn = pn->pn_next) { 1.3839 + if (!pn->getConstantValue(cx, strictChecks, &value)) 1.3840 + return false; 1.3841 + id = INT_TO_JSID(idx); 1.3842 + if (!JSObject::defineGeneric(cx, obj, id, value, nullptr, nullptr, JSPROP_ENUMERATE)) 1.3843 + return false; 1.3844 + } 1.3845 + JS_ASSERT(idx == pn_count); 1.3846 + 1.3847 + types::FixArrayType(cx, obj); 1.3848 + vp.setObject(*obj); 1.3849 + return true; 1.3850 + } 1.3851 + case PNK_OBJECT: { 1.3852 + JS_ASSERT(isOp(JSOP_NEWINIT)); 1.3853 + JS_ASSERT(!(pn_xflags & PNX_NONCONST)); 1.3854 + 1.3855 + gc::AllocKind kind = GuessObjectGCKind(pn_count); 1.3856 + RootedObject obj(cx, NewBuiltinClassInstance(cx, &JSObject::class_, kind, MaybeSingletonObject)); 1.3857 + if (!obj) 1.3858 + return false; 1.3859 + 1.3860 + RootedValue value(cx), idvalue(cx); 1.3861 + for (ParseNode *pn = pn_head; pn; pn = pn->pn_next) { 1.3862 + if (!pn->pn_right->getConstantValue(cx, strictChecks, &value)) 1.3863 + return false; 1.3864 + 1.3865 + ParseNode *pnid = pn->pn_left; 1.3866 + if (pnid->isKind(PNK_NUMBER)) { 1.3867 + idvalue = NumberValue(pnid->pn_dval); 1.3868 + } else { 1.3869 + JS_ASSERT(pnid->isKind(PNK_NAME) || pnid->isKind(PNK_STRING)); 1.3870 + JS_ASSERT(pnid->pn_atom != cx->names().proto); 1.3871 + idvalue = StringValue(pnid->pn_atom); 1.3872 + } 1.3873 + 1.3874 + uint32_t index; 1.3875 + if (IsDefinitelyIndex(idvalue, &index)) { 1.3876 + if (!JSObject::defineElement(cx, obj, index, value, nullptr, nullptr, 1.3877 + JSPROP_ENUMERATE)) 1.3878 + { 1.3879 + return false; 1.3880 + } 1.3881 + 1.3882 + continue; 1.3883 + } 1.3884 + 1.3885 + JSAtom *name = ToAtom<CanGC>(cx, idvalue); 1.3886 + if (!name) 1.3887 + return false; 1.3888 + 1.3889 + if (name->isIndex(&index)) { 1.3890 + if (!JSObject::defineElement(cx, obj, index, value, 1.3891 + nullptr, nullptr, JSPROP_ENUMERATE)) 1.3892 + return false; 1.3893 + } else { 1.3894 + if (!JSObject::defineProperty(cx, obj, name->asPropertyName(), value, 1.3895 + nullptr, nullptr, JSPROP_ENUMERATE)) 1.3896 + { 1.3897 + return false; 1.3898 + } 1.3899 + } 1.3900 + } 1.3901 + 1.3902 + types::FixObjectType(cx, obj); 1.3903 + vp.setObject(*obj); 1.3904 + return true; 1.3905 + } 1.3906 + default: 1.3907 + MOZ_ASSUME_UNREACHABLE("Unexpected node"); 1.3908 + } 1.3909 + return false; 1.3910 +} 1.3911 + 1.3912 +static bool 1.3913 +EmitSingletonInitialiser(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.3914 +{ 1.3915 + RootedValue value(cx); 1.3916 + if (!pn->getConstantValue(cx, bce->sc->needStrictChecks(), &value)) 1.3917 + return false; 1.3918 + 1.3919 + JS_ASSERT(value.isObject()); 1.3920 + ObjectBox *objbox = bce->parser->newObjectBox(&value.toObject()); 1.3921 + if (!objbox) 1.3922 + return false; 1.3923 + 1.3924 + return EmitObjectOp(cx, objbox, JSOP_OBJECT, bce); 1.3925 +} 1.3926 + 1.3927 +/* See the SRC_FOR source note offsetBias comments later in this file. */ 1.3928 +JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1); 1.3929 +JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1); 1.3930 + 1.3931 +namespace { 1.3932 + 1.3933 +class EmitLevelManager 1.3934 +{ 1.3935 + BytecodeEmitter *bce; 1.3936 + public: 1.3937 + EmitLevelManager(BytecodeEmitter *bce) : bce(bce) { bce->emitLevel++; } 1.3938 + ~EmitLevelManager() { bce->emitLevel--; } 1.3939 +}; 1.3940 + 1.3941 +} /* anonymous namespace */ 1.3942 + 1.3943 +static bool 1.3944 +EmitCatch(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.3945 +{ 1.3946 + /* 1.3947 + * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset, 1.3948 + * and save the block object atom. 1.3949 + */ 1.3950 + StmtInfoBCE *stmt = bce->topStmt; 1.3951 + JS_ASSERT(stmt->type == STMT_BLOCK && stmt->isBlockScope); 1.3952 + stmt->type = STMT_CATCH; 1.3953 + 1.3954 + /* Go up one statement info record to the TRY or FINALLY record. */ 1.3955 + stmt = stmt->down; 1.3956 + JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY); 1.3957 + 1.3958 + /* Pick up the pending exception and bind it to the catch variable. */ 1.3959 + if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) 1.3960 + return false; 1.3961 + 1.3962 + /* 1.3963 + * Dup the exception object if there is a guard for rethrowing to use 1.3964 + * it later when rethrowing or in other catches. 1.3965 + */ 1.3966 + if (pn->pn_kid2 && Emit1(cx, bce, JSOP_DUP) < 0) 1.3967 + return false; 1.3968 + 1.3969 + ParseNode *pn2 = pn->pn_kid1; 1.3970 + switch (pn2->getKind()) { 1.3971 + case PNK_ARRAY: 1.3972 + case PNK_OBJECT: 1.3973 + if (!EmitDestructuringOps(cx, bce, pn2)) 1.3974 + return false; 1.3975 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3976 + return false; 1.3977 + break; 1.3978 + 1.3979 + case PNK_NAME: 1.3980 + /* Inline and specialize BindNameToSlot for pn2. */ 1.3981 + JS_ASSERT(!pn2->pn_cookie.isFree()); 1.3982 + if (!EmitVarOp(cx, pn2, JSOP_SETLOCAL, bce)) 1.3983 + return false; 1.3984 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.3985 + return false; 1.3986 + break; 1.3987 + 1.3988 + default: 1.3989 + JS_ASSERT(0); 1.3990 + } 1.3991 + 1.3992 + // If there is a guard expression, emit it and arrange to jump to the next 1.3993 + // catch block if the guard expression is false. 1.3994 + if (pn->pn_kid2) { 1.3995 + if (!EmitTree(cx, bce, pn->pn_kid2)) 1.3996 + return false; 1.3997 + 1.3998 + // If the guard expression is false, fall through, pop the block scope, 1.3999 + // and jump to the next catch block. Otherwise jump over that code and 1.4000 + // pop the dupped exception. 1.4001 + ptrdiff_t guardCheck = EmitJump(cx, bce, JSOP_IFNE, 0); 1.4002 + if (guardCheck < 0) 1.4003 + return false; 1.4004 + 1.4005 + { 1.4006 + NonLocalExitScope nle(cx, bce); 1.4007 + 1.4008 + // Move exception back to cx->exception to prepare for 1.4009 + // the next catch. 1.4010 + if (Emit1(cx, bce, JSOP_THROWING) < 0) 1.4011 + return false; 1.4012 + 1.4013 + // Leave the scope for this catch block. 1.4014 + if (!nle.prepareForNonLocalJump(stmt)) 1.4015 + return false; 1.4016 + 1.4017 + // Jump to the next handler. The jump target is backpatched by EmitTry. 1.4018 + ptrdiff_t guardJump = EmitJump(cx, bce, JSOP_GOTO, 0); 1.4019 + if (guardJump < 0) 1.4020 + return false; 1.4021 + stmt->guardJump() = guardJump; 1.4022 + } 1.4023 + 1.4024 + // Back to normal control flow. 1.4025 + SetJumpOffsetAt(bce, guardCheck); 1.4026 + 1.4027 + // Pop duplicated exception object as we no longer need it. 1.4028 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.4029 + return false; 1.4030 + } 1.4031 + 1.4032 + /* Emit the catch body. */ 1.4033 + return EmitTree(cx, bce, pn->pn_kid3); 1.4034 +} 1.4035 + 1.4036 +// Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See the 1.4037 +// comment on EmitSwitch. 1.4038 +// 1.4039 +MOZ_NEVER_INLINE static bool 1.4040 +EmitTry(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.4041 +{ 1.4042 + StmtInfoBCE stmtInfo(cx); 1.4043 + 1.4044 + // Push stmtInfo to track jumps-over-catches and gosubs-to-finally 1.4045 + // for later fixup. 1.4046 + // 1.4047 + // When a finally block is active (STMT_FINALLY in our parse context), 1.4048 + // non-local jumps (including jumps-over-catches) result in a GOSUB 1.4049 + // being written into the bytecode stream and fixed-up later (c.f. 1.4050 + // EmitBackPatchOp and BackPatch). 1.4051 + // 1.4052 + PushStatementBCE(bce, &stmtInfo, pn->pn_kid3 ? STMT_FINALLY : STMT_TRY, bce->offset()); 1.4053 + 1.4054 + // Since an exception can be thrown at any place inside the try block, 1.4055 + // we need to restore the stack and the scope chain before we transfer 1.4056 + // the control to the exception handler. 1.4057 + // 1.4058 + // For that we store in a try note associated with the catch or 1.4059 + // finally block the stack depth upon the try entry. The interpreter 1.4060 + // uses this depth to properly unwind the stack and the scope chain. 1.4061 + // 1.4062 + int depth = bce->stackDepth; 1.4063 + 1.4064 + // Record the try location, then emit the try block. 1.4065 + ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY); 1.4066 + if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0) 1.4067 + return false; 1.4068 + ptrdiff_t tryStart = bce->offset(); 1.4069 + if (!EmitTree(cx, bce, pn->pn_kid1)) 1.4070 + return false; 1.4071 + JS_ASSERT(depth == bce->stackDepth); 1.4072 + 1.4073 + // GOSUB to finally, if present. 1.4074 + if (pn->pn_kid3) { 1.4075 + if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0) 1.4076 + return false; 1.4077 + } 1.4078 + 1.4079 + // Source note points to the jump at the end of the try block. 1.4080 + if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH)) 1.4081 + return false; 1.4082 + 1.4083 + // Emit jump over catch and/or finally. 1.4084 + ptrdiff_t catchJump = -1; 1.4085 + if (EmitBackPatchOp(cx, bce, &catchJump) < 0) 1.4086 + return false; 1.4087 + 1.4088 + ptrdiff_t tryEnd = bce->offset(); 1.4089 + 1.4090 + // If this try has a catch block, emit it. 1.4091 + if (ParseNode *pn2 = pn->pn_kid2) { 1.4092 + // The emitted code for a catch block looks like: 1.4093 + // 1.4094 + // [pushblockscope] only if any local aliased 1.4095 + // exception 1.4096 + // if there is a catchguard: 1.4097 + // dup 1.4098 + // setlocal 0; pop assign or possibly destructure exception 1.4099 + // if there is a catchguard: 1.4100 + // < catchguard code > 1.4101 + // ifne POST 1.4102 + // debugleaveblock 1.4103 + // [popblockscope] only if any local aliased 1.4104 + // throwing pop exception to cx->exception 1.4105 + // goto <next catch block> 1.4106 + // POST: pop 1.4107 + // < catch block contents > 1.4108 + // debugleaveblock 1.4109 + // [popblockscope] only if any local aliased 1.4110 + // goto <end of catch blocks> non-local; finally applies 1.4111 + // 1.4112 + // If there's no catch block without a catchguard, the last <next catch 1.4113 + // block> points to rethrow code. This code will [gosub] to the finally 1.4114 + // code if appropriate, and is also used for the catch-all trynote for 1.4115 + // capturing exceptions thrown from catch{} blocks. 1.4116 + // 1.4117 + for (ParseNode *pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { 1.4118 + JS_ASSERT(bce->stackDepth == depth); 1.4119 + 1.4120 + // Emit the lexical scope and catch body. 1.4121 + JS_ASSERT(pn3->isKind(PNK_LEXICALSCOPE)); 1.4122 + if (!EmitTree(cx, bce, pn3)) 1.4123 + return false; 1.4124 + 1.4125 + // gosub <finally>, if required. 1.4126 + if (pn->pn_kid3) { 1.4127 + if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0) 1.4128 + return false; 1.4129 + JS_ASSERT(bce->stackDepth == depth); 1.4130 + } 1.4131 + 1.4132 + // Jump over the remaining catch blocks. This will get fixed 1.4133 + // up to jump to after catch/finally. 1.4134 + if (EmitBackPatchOp(cx, bce, &catchJump) < 0) 1.4135 + return false; 1.4136 + 1.4137 + // If this catch block had a guard clause, patch the guard jump to 1.4138 + // come here. 1.4139 + if (stmtInfo.guardJump() != -1) { 1.4140 + SetJumpOffsetAt(bce, stmtInfo.guardJump()); 1.4141 + stmtInfo.guardJump() = -1; 1.4142 + 1.4143 + // If this catch block is the last one, rethrow, delegating 1.4144 + // execution of any finally block to the exception handler. 1.4145 + if (!pn3->pn_next) { 1.4146 + if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) 1.4147 + return false; 1.4148 + if (Emit1(cx, bce, JSOP_THROW) < 0) 1.4149 + return false; 1.4150 + } 1.4151 + } 1.4152 + } 1.4153 + } 1.4154 + 1.4155 + JS_ASSERT(bce->stackDepth == depth); 1.4156 + 1.4157 + // Emit the finally handler, if there is one. 1.4158 + ptrdiff_t finallyStart = 0; 1.4159 + if (pn->pn_kid3) { 1.4160 + // Fix up the gosubs that might have been emitted before non-local 1.4161 + // jumps to the finally code. 1.4162 + if (!BackPatch(cx, bce, stmtInfo.gosubs(), bce->code().end(), JSOP_GOSUB)) 1.4163 + return false; 1.4164 + 1.4165 + finallyStart = bce->offset(); 1.4166 + 1.4167 + // Indicate that we're emitting a subroutine body. 1.4168 + stmtInfo.type = STMT_SUBROUTINE; 1.4169 + if (!UpdateSourceCoordNotes(cx, bce, pn->pn_kid3->pn_pos.begin)) 1.4170 + return false; 1.4171 + if (Emit1(cx, bce, JSOP_FINALLY) < 0 || 1.4172 + !EmitTree(cx, bce, pn->pn_kid3) || 1.4173 + Emit1(cx, bce, JSOP_RETSUB) < 0) 1.4174 + { 1.4175 + return false; 1.4176 + } 1.4177 + JS_ASSERT(bce->stackDepth == depth); 1.4178 + } 1.4179 + if (!PopStatementBCE(cx, bce)) 1.4180 + return false; 1.4181 + 1.4182 + // ReconstructPCStack needs a NOP here to mark the end of the last catch block. 1.4183 + if (Emit1(cx, bce, JSOP_NOP) < 0) 1.4184 + return false; 1.4185 + 1.4186 + // Fix up the end-of-try/catch jumps to come here. 1.4187 + if (!BackPatch(cx, bce, catchJump, bce->code().end(), JSOP_GOTO)) 1.4188 + return false; 1.4189 + 1.4190 + // Add the try note last, to let post-order give us the right ordering 1.4191 + // (first to last for a given nesting level, inner to outer by level). 1.4192 + if (pn->pn_kid2 && !bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd)) 1.4193 + return false; 1.4194 + 1.4195 + // If we've got a finally, mark try+catch region with additional 1.4196 + // trynote to catch exceptions (re)thrown from a catch block or 1.4197 + // for the try{}finally{} case. 1.4198 + if (pn->pn_kid3 && !bce->tryNoteList.append(JSTRY_FINALLY, depth, tryStart, finallyStart)) 1.4199 + return false; 1.4200 + 1.4201 + return true; 1.4202 +} 1.4203 + 1.4204 +static bool 1.4205 +EmitIf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.4206 +{ 1.4207 + StmtInfoBCE stmtInfo(cx); 1.4208 + 1.4209 + /* Initialize so we can detect else-if chains and avoid recursion. */ 1.4210 + stmtInfo.type = STMT_IF; 1.4211 + ptrdiff_t beq = -1; 1.4212 + ptrdiff_t jmp = -1; 1.4213 + ptrdiff_t noteIndex = -1; 1.4214 + 1.4215 + if_again: 1.4216 + /* Emit code for the condition before pushing stmtInfo. */ 1.4217 + if (!EmitTree(cx, bce, pn->pn_kid1)) 1.4218 + return false; 1.4219 + ptrdiff_t top = bce->offset(); 1.4220 + if (stmtInfo.type == STMT_IF) { 1.4221 + PushStatementBCE(bce, &stmtInfo, STMT_IF, top); 1.4222 + } else { 1.4223 + /* 1.4224 + * We came here from the goto further below that detects else-if 1.4225 + * chains, so we must mutate stmtInfo back into a STMT_IF record. 1.4226 + * Also we need a note offset for SRC_IF_ELSE to help IonMonkey. 1.4227 + */ 1.4228 + JS_ASSERT(stmtInfo.type == STMT_ELSE); 1.4229 + stmtInfo.type = STMT_IF; 1.4230 + stmtInfo.update = top; 1.4231 + if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq)) 1.4232 + return false; 1.4233 + } 1.4234 + 1.4235 + /* Emit an annotated branch-if-false around the then part. */ 1.4236 + ParseNode *pn3 = pn->pn_kid3; 1.4237 + noteIndex = NewSrcNote(cx, bce, pn3 ? SRC_IF_ELSE : SRC_IF); 1.4238 + if (noteIndex < 0) 1.4239 + return false; 1.4240 + beq = EmitJump(cx, bce, JSOP_IFEQ, 0); 1.4241 + if (beq < 0) 1.4242 + return false; 1.4243 + 1.4244 + /* Emit code for the then and optional else parts. */ 1.4245 + if (!EmitTree(cx, bce, pn->pn_kid2)) 1.4246 + return false; 1.4247 + if (pn3) { 1.4248 + /* Modify stmtInfo so we know we're in the else part. */ 1.4249 + stmtInfo.type = STMT_ELSE; 1.4250 + 1.4251 + /* 1.4252 + * Emit a JSOP_BACKPATCH op to jump from the end of our then part 1.4253 + * around the else part. The PopStatementBCE call at the bottom of 1.4254 + * this function will fix up the backpatch chain linked from 1.4255 + * stmtInfo.breaks. 1.4256 + */ 1.4257 + jmp = EmitGoto(cx, bce, &stmtInfo, &stmtInfo.breaks); 1.4258 + if (jmp < 0) 1.4259 + return false; 1.4260 + 1.4261 + /* Ensure the branch-if-false comes here, then emit the else. */ 1.4262 + SetJumpOffsetAt(bce, beq); 1.4263 + if (pn3->isKind(PNK_IF)) { 1.4264 + pn = pn3; 1.4265 + goto if_again; 1.4266 + } 1.4267 + 1.4268 + if (!EmitTree(cx, bce, pn3)) 1.4269 + return false; 1.4270 + 1.4271 + /* 1.4272 + * Annotate SRC_IF_ELSE with the offset from branch to jump, for 1.4273 + * IonMonkey's benefit. We can't just "back up" from the pc 1.4274 + * of the else clause, because we don't know whether an extended 1.4275 + * jump was required to leap from the end of the then clause over 1.4276 + * the else clause. 1.4277 + */ 1.4278 + if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq)) 1.4279 + return false; 1.4280 + } else { 1.4281 + /* No else part, fixup the branch-if-false to come here. */ 1.4282 + SetJumpOffsetAt(bce, beq); 1.4283 + } 1.4284 + return PopStatementBCE(cx, bce); 1.4285 +} 1.4286 + 1.4287 +/* 1.4288 + * pnLet represents one of: 1.4289 + * 1.4290 + * let-expression: (let (x = y) EXPR) 1.4291 + * let-statement: let (x = y) { ... } 1.4292 + * 1.4293 + * For a let-expression 'let (x = a, [y,z] = b) e', EmitLet produces: 1.4294 + * 1.4295 + * bytecode stackDepth srcnotes 1.4296 + * evaluate a +1 1.4297 + * evaluate b +1 1.4298 + * dup +1 1.4299 + * destructure y 1.4300 + * pick 1 1.4301 + * dup +1 1.4302 + * destructure z 1.4303 + * pick 1 1.4304 + * pop -1 1.4305 + * setlocal 2 -1 1.4306 + * setlocal 1 -1 1.4307 + * setlocal 0 -1 1.4308 + * pushblockscope (if needed) 1.4309 + * evaluate e +1 1.4310 + * debugleaveblock 1.4311 + * popblockscope (if needed) 1.4312 + * 1.4313 + * Note that, since pushblockscope simply changes fp->scopeChain and does not 1.4314 + * otherwise touch the stack, evaluation of the let-var initializers must leave 1.4315 + * the initial value in the let-var's future slot. 1.4316 + */ 1.4317 +/* 1.4318 + * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See 1.4319 + * the comment on EmitSwitch. 1.4320 + */ 1.4321 +MOZ_NEVER_INLINE static bool 1.4322 +EmitLet(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pnLet) 1.4323 +{ 1.4324 + JS_ASSERT(pnLet->isArity(PN_BINARY)); 1.4325 + ParseNode *varList = pnLet->pn_left; 1.4326 + JS_ASSERT(varList->isArity(PN_LIST)); 1.4327 + ParseNode *letBody = pnLet->pn_right; 1.4328 + JS_ASSERT(letBody->isLet() && letBody->isKind(PNK_LEXICALSCOPE)); 1.4329 + 1.4330 + int letHeadDepth = bce->stackDepth; 1.4331 + 1.4332 + if (!EmitVariables(cx, bce, varList, PushInitialValues, true)) 1.4333 + return false; 1.4334 + 1.4335 + /* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */ 1.4336 + uint32_t alreadyPushed = bce->stackDepth - letHeadDepth; 1.4337 + StmtInfoBCE stmtInfo(cx); 1.4338 + if (!EnterBlockScope(cx, bce, &stmtInfo, letBody->pn_objbox, alreadyPushed)) 1.4339 + return false; 1.4340 + 1.4341 + if (!EmitTree(cx, bce, letBody->pn_expr)) 1.4342 + return false; 1.4343 + 1.4344 + if (!LeaveNestedScope(cx, bce, &stmtInfo)) 1.4345 + return false; 1.4346 + 1.4347 + return true; 1.4348 +} 1.4349 + 1.4350 +/* 1.4351 + * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See 1.4352 + * the comment on EmitSwitch. 1.4353 + */ 1.4354 +MOZ_NEVER_INLINE static bool 1.4355 +EmitLexicalScope(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.4356 +{ 1.4357 + JS_ASSERT(pn->isKind(PNK_LEXICALSCOPE)); 1.4358 + 1.4359 + StmtInfoBCE stmtInfo(cx); 1.4360 + if (!EnterBlockScope(cx, bce, &stmtInfo, pn->pn_objbox, 0)) 1.4361 + return false; 1.4362 + 1.4363 + if (!EmitTree(cx, bce, pn->pn_expr)) 1.4364 + return false; 1.4365 + 1.4366 + if (!LeaveNestedScope(cx, bce, &stmtInfo)) 1.4367 + return false; 1.4368 + 1.4369 + return true; 1.4370 +} 1.4371 + 1.4372 +static bool 1.4373 +EmitWith(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.4374 +{ 1.4375 + StmtInfoBCE stmtInfo(cx); 1.4376 + if (!EmitTree(cx, bce, pn->pn_left)) 1.4377 + return false; 1.4378 + if (!EnterNestedScope(cx, bce, &stmtInfo, pn->pn_binary_obj, STMT_WITH)) 1.4379 + return false; 1.4380 + if (!EmitTree(cx, bce, pn->pn_right)) 1.4381 + return false; 1.4382 + if (!LeaveNestedScope(cx, bce, &stmtInfo)) 1.4383 + return false; 1.4384 + return true; 1.4385 +} 1.4386 + 1.4387 +static bool 1.4388 +EmitForOf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) 1.4389 +{ 1.4390 + ParseNode *forHead = pn->pn_left; 1.4391 + ParseNode *forBody = pn->pn_right; 1.4392 + 1.4393 + ParseNode *pn1 = forHead->pn_kid1; 1.4394 + bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE); 1.4395 + JS_ASSERT_IF(letDecl, pn1->isLet()); 1.4396 + 1.4397 + // If the left part is 'var x', emit code to define x if necessary using a 1.4398 + // prolog opcode, but do not emit a pop. 1.4399 + if (pn1) { 1.4400 + ParseNode *decl = letDecl ? pn1->pn_expr : pn1; 1.4401 + JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET)); 1.4402 + bce->emittingForInit = true; 1.4403 + if (!EmitVariables(cx, bce, decl, DefineVars)) 1.4404 + return false; 1.4405 + bce->emittingForInit = false; 1.4406 + } 1.4407 + 1.4408 + // For-of loops run with two values on the stack: the iterator and the 1.4409 + // current result object. 1.4410 + 1.4411 + // Compile the object expression to the right of 'of'. 1.4412 + if (!EmitTree(cx, bce, forHead->pn_kid3)) 1.4413 + return false; 1.4414 + 1.4415 + // Convert iterable to iterator. 1.4416 + if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ 1.4417 + return false; 1.4418 + if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // OBJ @@ITERATOR 1.4419 + return false; 1.4420 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // @@ITERATOR OBJ 1.4421 + return false; 1.4422 + if (EmitCall(cx, bce, JSOP_CALL, 0) < 0) // ITER 1.4423 + return false; 1.4424 + CheckTypeSet(cx, bce, JSOP_CALL); 1.4425 + 1.4426 + // Push a dummy result so that we properly enter iteration midstream. 1.4427 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RESULT 1.4428 + return false; 1.4429 + 1.4430 + // Enter the block before the loop body, after evaluating the obj. 1.4431 + StmtInfoBCE letStmt(cx); 1.4432 + if (letDecl) { 1.4433 + if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0)) 1.4434 + return false; 1.4435 + } 1.4436 + 1.4437 + LoopStmtInfo stmtInfo(cx); 1.4438 + PushLoopStatement(bce, &stmtInfo, STMT_FOR_OF_LOOP, top); 1.4439 + 1.4440 + // Jump down to the loop condition to minimize overhead assuming at least 1.4441 + // one iteration, as the other loop forms do. Annotate so IonMonkey can 1.4442 + // find the loop-closing jump. 1.4443 + int noteIndex = NewSrcNote(cx, bce, SRC_FOR_OF); 1.4444 + if (noteIndex < 0) 1.4445 + return false; 1.4446 + ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); 1.4447 + if (jmp < 0) 1.4448 + return false; 1.4449 + 1.4450 + top = bce->offset(); 1.4451 + SET_STATEMENT_TOP(&stmtInfo, top); 1.4452 + if (EmitLoopHead(cx, bce, nullptr) < 0) 1.4453 + return false; 1.4454 + 1.4455 +#ifdef DEBUG 1.4456 + int loopDepth = bce->stackDepth; 1.4457 +#endif 1.4458 + 1.4459 + // Emit code to assign result.value to the iteration variable. 1.4460 + if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT 1.4461 + return false; 1.4462 + if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // ITER RESULT VALUE 1.4463 + return false; 1.4464 + if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr)) // ITER RESULT VALUE 1.4465 + return false; 1.4466 + if (Emit1(cx, bce, JSOP_POP) < 0) // ITER RESULT 1.4467 + return false; 1.4468 + 1.4469 + // The stack should be balanced around the assignment opcode sequence. 1.4470 + JS_ASSERT(bce->stackDepth == loopDepth); 1.4471 + 1.4472 + // Emit code for the loop body. 1.4473 + if (!EmitTree(cx, bce, forBody)) 1.4474 + return false; 1.4475 + 1.4476 + // Set loop and enclosing "update" offsets, for continue. 1.4477 + StmtInfoBCE *stmt = &stmtInfo; 1.4478 + do { 1.4479 + stmt->update = bce->offset(); 1.4480 + } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); 1.4481 + 1.4482 + // COME FROM the beginning of the loop to here. 1.4483 + SetJumpOffsetAt(bce, jmp); 1.4484 + if (!EmitLoopEntry(cx, bce, nullptr)) 1.4485 + return false; 1.4486 + 1.4487 + if (Emit1(cx, bce, JSOP_POP) < 0) // ITER 1.4488 + return false; 1.4489 + if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER 1.4490 + return false; 1.4491 + if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER ITER 1.4492 + return false; 1.4493 + if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // ITER ITER NEXT 1.4494 + return false; 1.4495 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // ITER NEXT ITER 1.4496 + return false; 1.4497 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER NEXT ITER UNDEFINED 1.4498 + return false; 1.4499 + if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT 1.4500 + return false; 1.4501 + CheckTypeSet(cx, bce, JSOP_CALL); 1.4502 + if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT 1.4503 + return false; 1.4504 + if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE? 1.4505 + return false; 1.4506 + 1.4507 + ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, top - bce->offset()); // ITER RESULT 1.4508 + if (beq < 0) 1.4509 + return false; 1.4510 + 1.4511 + JS_ASSERT(bce->stackDepth == loopDepth); 1.4512 + 1.4513 + // Let Ion know where the closing jump of this loop is. 1.4514 + if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp)) 1.4515 + return false; 1.4516 + 1.4517 + // Fixup breaks and continues. 1.4518 + if (!PopStatementBCE(cx, bce)) 1.4519 + return false; 1.4520 + 1.4521 + if (letDecl) { 1.4522 + if (!LeaveNestedScope(cx, bce, &letStmt)) 1.4523 + return false; 1.4524 + } 1.4525 + 1.4526 + // Pop the result and the iter. 1.4527 + EMIT_UINT16_IMM_OP(JSOP_POPN, 2); 1.4528 + 1.4529 + return true; 1.4530 +} 1.4531 + 1.4532 +static bool 1.4533 +EmitForIn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) 1.4534 +{ 1.4535 + ParseNode *forHead = pn->pn_left; 1.4536 + ParseNode *forBody = pn->pn_right; 1.4537 + 1.4538 + ParseNode *pn1 = forHead->pn_kid1; 1.4539 + bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE); 1.4540 + JS_ASSERT_IF(letDecl, pn1->isLet()); 1.4541 + 1.4542 + /* 1.4543 + * If the left part is 'var x', emit code to define x if necessary 1.4544 + * using a prolog opcode, but do not emit a pop. If the left part was 1.4545 + * originally 'var x = i', the parser will have rewritten it; see 1.4546 + * Parser::forStatement. 'for (let x = i in o)' is mercifully banned. 1.4547 + */ 1.4548 + if (pn1) { 1.4549 + ParseNode *decl = letDecl ? pn1->pn_expr : pn1; 1.4550 + JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET)); 1.4551 + bce->emittingForInit = true; 1.4552 + if (!EmitVariables(cx, bce, decl, DefineVars)) 1.4553 + return false; 1.4554 + bce->emittingForInit = false; 1.4555 + } 1.4556 + 1.4557 + /* Compile the object expression to the right of 'in'. */ 1.4558 + if (!EmitTree(cx, bce, forHead->pn_kid3)) 1.4559 + return false; 1.4560 + 1.4561 + /* 1.4562 + * Emit a bytecode to convert top of stack value to the iterator 1.4563 + * object depending on the loop variant (for-in, for-each-in, or 1.4564 + * destructuring for-in). 1.4565 + */ 1.4566 + JS_ASSERT(pn->isOp(JSOP_ITER)); 1.4567 + if (Emit2(cx, bce, JSOP_ITER, (uint8_t) pn->pn_iflags) < 0) 1.4568 + return false; 1.4569 + 1.4570 + /* Enter the block before the loop body, after evaluating the obj. */ 1.4571 + StmtInfoBCE letStmt(cx); 1.4572 + if (letDecl) { 1.4573 + if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0)) 1.4574 + return false; 1.4575 + } 1.4576 + 1.4577 + LoopStmtInfo stmtInfo(cx); 1.4578 + PushLoopStatement(bce, &stmtInfo, STMT_FOR_IN_LOOP, top); 1.4579 + 1.4580 + /* Annotate so IonMonkey can find the loop-closing jump. */ 1.4581 + int noteIndex = NewSrcNote(cx, bce, SRC_FOR_IN); 1.4582 + if (noteIndex < 0) 1.4583 + return false; 1.4584 + 1.4585 + /* 1.4586 + * Jump down to the loop condition to minimize overhead assuming at 1.4587 + * least one iteration, as the other loop forms do. 1.4588 + */ 1.4589 + ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); 1.4590 + if (jmp < 0) 1.4591 + return false; 1.4592 + 1.4593 + top = bce->offset(); 1.4594 + SET_STATEMENT_TOP(&stmtInfo, top); 1.4595 + if (EmitLoopHead(cx, bce, nullptr) < 0) 1.4596 + return false; 1.4597 + 1.4598 +#ifdef DEBUG 1.4599 + int loopDepth = bce->stackDepth; 1.4600 +#endif 1.4601 + 1.4602 + /* 1.4603 + * Emit code to get the next enumeration value and assign it to the 1.4604 + * left hand side. 1.4605 + */ 1.4606 + if (Emit1(cx, bce, JSOP_ITERNEXT) < 0) 1.4607 + return false; 1.4608 + if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr)) 1.4609 + return false; 1.4610 + 1.4611 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.4612 + return false; 1.4613 + 1.4614 + /* The stack should be balanced around the assignment opcode sequence. */ 1.4615 + JS_ASSERT(bce->stackDepth == loopDepth); 1.4616 + 1.4617 + /* Emit code for the loop body. */ 1.4618 + if (!EmitTree(cx, bce, forBody)) 1.4619 + return false; 1.4620 + 1.4621 + /* Set loop and enclosing "update" offsets, for continue. */ 1.4622 + StmtInfoBCE *stmt = &stmtInfo; 1.4623 + do { 1.4624 + stmt->update = bce->offset(); 1.4625 + } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); 1.4626 + 1.4627 + /* 1.4628 + * Fixup the goto that starts the loop to jump down to JSOP_MOREITER. 1.4629 + */ 1.4630 + SetJumpOffsetAt(bce, jmp); 1.4631 + if (!EmitLoopEntry(cx, bce, nullptr)) 1.4632 + return false; 1.4633 + if (Emit1(cx, bce, JSOP_MOREITER) < 0) 1.4634 + return false; 1.4635 + ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); 1.4636 + if (beq < 0) 1.4637 + return false; 1.4638 + 1.4639 + /* Set the srcnote offset so we can find the closing jump. */ 1.4640 + if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp)) 1.4641 + return false; 1.4642 + 1.4643 + // Fix up breaks and continues. 1.4644 + if (!PopStatementBCE(cx, bce)) 1.4645 + return false; 1.4646 + 1.4647 + if (!bce->tryNoteList.append(JSTRY_ITER, bce->stackDepth, top, bce->offset())) 1.4648 + return false; 1.4649 + if (Emit1(cx, bce, JSOP_ENDITER) < 0) 1.4650 + return false; 1.4651 + 1.4652 + if (letDecl) { 1.4653 + if (!LeaveNestedScope(cx, bce, &letStmt)) 1.4654 + return false; 1.4655 + } 1.4656 + 1.4657 + return true; 1.4658 +} 1.4659 + 1.4660 +static bool 1.4661 +EmitNormalFor(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) 1.4662 +{ 1.4663 + LoopStmtInfo stmtInfo(cx); 1.4664 + PushLoopStatement(bce, &stmtInfo, STMT_FOR_LOOP, top); 1.4665 + 1.4666 + ParseNode *forHead = pn->pn_left; 1.4667 + ParseNode *forBody = pn->pn_right; 1.4668 + 1.4669 + /* C-style for (init; cond; update) ... loop. */ 1.4670 + JSOp op = JSOP_POP; 1.4671 + ParseNode *pn3 = forHead->pn_kid1; 1.4672 + if (!pn3) { 1.4673 + // No initializer, but emit a nop so that there's somewhere to put the 1.4674 + // SRC_FOR annotation that IonBuilder will look for. 1.4675 + op = JSOP_NOP; 1.4676 + } else { 1.4677 + bce->emittingForInit = true; 1.4678 + if (pn3->isKind(PNK_ASSIGN)) { 1.4679 + JS_ASSERT(pn3->isOp(JSOP_NOP)); 1.4680 + if (!MaybeEmitGroupAssignment(cx, bce, op, pn3, GroupIsNotDecl, &op)) 1.4681 + return false; 1.4682 + } 1.4683 + if (op == JSOP_POP) { 1.4684 + if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin)) 1.4685 + return false; 1.4686 + if (!EmitTree(cx, bce, pn3)) 1.4687 + return false; 1.4688 + if (pn3->isKind(PNK_VAR) || pn3->isKind(PNK_CONST) || pn3->isKind(PNK_LET)) { 1.4689 + /* 1.4690 + * Check whether a destructuring-initialized var decl 1.4691 + * was optimized to a group assignment. If so, we do 1.4692 + * not need to emit a pop below, so switch to a nop, 1.4693 + * just for IonBuilder. 1.4694 + */ 1.4695 + JS_ASSERT(pn3->isArity(PN_LIST) || pn3->isArity(PN_BINARY)); 1.4696 + if (pn3->pn_xflags & PNX_GROUPINIT) 1.4697 + op = JSOP_NOP; 1.4698 + } 1.4699 + } 1.4700 + bce->emittingForInit = false; 1.4701 + } 1.4702 + 1.4703 + /* 1.4704 + * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH). 1.4705 + * Use tmp to hold the biased srcnote "top" offset, which differs 1.4706 + * from the top local variable by the length of the JSOP_GOTO 1.4707 + * emitted in between tmp and top if this loop has a condition. 1.4708 + */ 1.4709 + int noteIndex = NewSrcNote(cx, bce, SRC_FOR); 1.4710 + if (noteIndex < 0 || Emit1(cx, bce, op) < 0) 1.4711 + return false; 1.4712 + ptrdiff_t tmp = bce->offset(); 1.4713 + 1.4714 + ptrdiff_t jmp = -1; 1.4715 + if (forHead->pn_kid2) { 1.4716 + /* Goto the loop condition, which branches back to iterate. */ 1.4717 + jmp = EmitJump(cx, bce, JSOP_GOTO, 0); 1.4718 + if (jmp < 0) 1.4719 + return false; 1.4720 + } else { 1.4721 + if (op != JSOP_NOP && Emit1(cx, bce, JSOP_NOP) < 0) 1.4722 + return false; 1.4723 + } 1.4724 + 1.4725 + top = bce->offset(); 1.4726 + SET_STATEMENT_TOP(&stmtInfo, top); 1.4727 + 1.4728 + /* Emit code for the loop body. */ 1.4729 + if (EmitLoopHead(cx, bce, forBody) < 0) 1.4730 + return false; 1.4731 + if (jmp == -1 && !EmitLoopEntry(cx, bce, forBody)) 1.4732 + return false; 1.4733 + if (!EmitTree(cx, bce, forBody)) 1.4734 + return false; 1.4735 + 1.4736 + /* Set the second note offset so we can find the update part. */ 1.4737 + JS_ASSERT(noteIndex != -1); 1.4738 + ptrdiff_t tmp2 = bce->offset(); 1.4739 + 1.4740 + /* Set loop and enclosing "update" offsets, for continue. */ 1.4741 + StmtInfoBCE *stmt = &stmtInfo; 1.4742 + do { 1.4743 + stmt->update = bce->offset(); 1.4744 + } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); 1.4745 + 1.4746 + /* Check for update code to do before the condition (if any). */ 1.4747 + pn3 = forHead->pn_kid3; 1.4748 + if (pn3) { 1.4749 + if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin)) 1.4750 + return false; 1.4751 + op = JSOP_POP; 1.4752 + if (pn3->isKind(PNK_ASSIGN)) { 1.4753 + JS_ASSERT(pn3->isOp(JSOP_NOP)); 1.4754 + if (!MaybeEmitGroupAssignment(cx, bce, op, pn3, GroupIsNotDecl, &op)) 1.4755 + return false; 1.4756 + } 1.4757 + if (op == JSOP_POP && !EmitTree(cx, bce, pn3)) 1.4758 + return false; 1.4759 + 1.4760 + /* Always emit the POP or NOP to help IonBuilder. */ 1.4761 + if (Emit1(cx, bce, op) < 0) 1.4762 + return false; 1.4763 + 1.4764 + /* Restore the absolute line number for source note readers. */ 1.4765 + uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.end); 1.4766 + if (bce->currentLine() != lineNum) { 1.4767 + if (NewSrcNote2(cx, bce, SRC_SETLINE, ptrdiff_t(lineNum)) < 0) 1.4768 + return false; 1.4769 + bce->current->currentLine = lineNum; 1.4770 + bce->current->lastColumn = 0; 1.4771 + } 1.4772 + } 1.4773 + 1.4774 + ptrdiff_t tmp3 = bce->offset(); 1.4775 + 1.4776 + if (forHead->pn_kid2) { 1.4777 + /* Fix up the goto from top to target the loop condition. */ 1.4778 + JS_ASSERT(jmp >= 0); 1.4779 + SetJumpOffsetAt(bce, jmp); 1.4780 + if (!EmitLoopEntry(cx, bce, forHead->pn_kid2)) 1.4781 + return false; 1.4782 + 1.4783 + if (!EmitTree(cx, bce, forHead->pn_kid2)) 1.4784 + return false; 1.4785 + } 1.4786 + 1.4787 + /* Set the first note offset so we can find the loop condition. */ 1.4788 + if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, tmp3 - tmp)) 1.4789 + return false; 1.4790 + if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, tmp2 - tmp)) 1.4791 + return false; 1.4792 + /* The third note offset helps us find the loop-closing jump. */ 1.4793 + if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 2, bce->offset() - tmp)) 1.4794 + return false; 1.4795 + 1.4796 + /* If no loop condition, just emit a loop-closing jump. */ 1.4797 + op = forHead->pn_kid2 ? JSOP_IFNE : JSOP_GOTO; 1.4798 + if (EmitJump(cx, bce, op, top - bce->offset()) < 0) 1.4799 + return false; 1.4800 + 1.4801 + if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) 1.4802 + return false; 1.4803 + 1.4804 + /* Now fixup all breaks and continues. */ 1.4805 + return PopStatementBCE(cx, bce); 1.4806 +} 1.4807 + 1.4808 +static inline bool 1.4809 +EmitFor(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) 1.4810 +{ 1.4811 + if (pn->pn_left->isKind(PNK_FORIN)) 1.4812 + return EmitForIn(cx, bce, pn, top); 1.4813 + 1.4814 + if (pn->pn_left->isKind(PNK_FOROF)) 1.4815 + return EmitForOf(cx, bce, pn, top); 1.4816 + 1.4817 + JS_ASSERT(pn->pn_left->isKind(PNK_FORHEAD)); 1.4818 + return EmitNormalFor(cx, bce, pn, top); 1.4819 +} 1.4820 + 1.4821 +static MOZ_NEVER_INLINE bool 1.4822 +EmitFunc(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.4823 +{ 1.4824 + FunctionBox *funbox = pn->pn_funbox; 1.4825 + RootedFunction fun(cx, funbox->function()); 1.4826 + JS_ASSERT_IF(fun->isInterpretedLazy(), fun->lazyScript()); 1.4827 + 1.4828 + /* 1.4829 + * Set the EMITTEDFUNCTION flag in function definitions once they have been 1.4830 + * emitted. Function definitions that need hoisting to the top of the 1.4831 + * function will be seen by EmitFunc in two places. 1.4832 + */ 1.4833 + if (pn->pn_dflags & PND_EMITTEDFUNCTION) { 1.4834 + JS_ASSERT_IF(fun->hasScript(), fun->nonLazyScript()); 1.4835 + JS_ASSERT(pn->functionIsHoisted()); 1.4836 + JS_ASSERT(bce->sc->isFunctionBox()); 1.4837 + return true; 1.4838 + } 1.4839 + 1.4840 + pn->pn_dflags |= PND_EMITTEDFUNCTION; 1.4841 + 1.4842 + /* 1.4843 + * Mark as singletons any function which will only be executed once, or 1.4844 + * which is inner to a lambda we only expect to run once. In the latter 1.4845 + * case, if the lambda runs multiple times then CloneFunctionObject will 1.4846 + * make a deep clone of its contents. 1.4847 + */ 1.4848 + if (fun->isInterpreted()) { 1.4849 + bool singleton = 1.4850 + bce->script->compileAndGo() && 1.4851 + fun->isInterpreted() && 1.4852 + (bce->checkSingletonContext() || 1.4853 + (!bce->isInLoop() && bce->isRunOnceLambda())); 1.4854 + if (!JSFunction::setTypeForScriptedFunction(cx, fun, singleton)) 1.4855 + return false; 1.4856 + 1.4857 + if (fun->isInterpretedLazy()) { 1.4858 + if (!fun->lazyScript()->sourceObject()) { 1.4859 + JSObject *scope = bce->staticScope; 1.4860 + if (!scope && bce->sc->isFunctionBox()) 1.4861 + scope = bce->sc->asFunctionBox()->function(); 1.4862 + JSObject *source = bce->script->sourceObject(); 1.4863 + fun->lazyScript()->setParent(scope, &source->as<ScriptSourceObject>()); 1.4864 + } 1.4865 + if (bce->emittingRunOnceLambda) 1.4866 + fun->lazyScript()->setTreatAsRunOnce(); 1.4867 + } else { 1.4868 + SharedContext *outersc = bce->sc; 1.4869 + 1.4870 + if (outersc->isFunctionBox() && outersc->asFunctionBox()->mightAliasLocals()) 1.4871 + funbox->setMightAliasLocals(); // inherit mightAliasLocals from parent 1.4872 + JS_ASSERT_IF(outersc->strict, funbox->strict); 1.4873 + 1.4874 + // Inherit most things (principals, version, etc) from the parent. 1.4875 + Rooted<JSScript*> parent(cx, bce->script); 1.4876 + CompileOptions options(cx, bce->parser->options()); 1.4877 + options.setOriginPrincipals(parent->originPrincipals()) 1.4878 + .setCompileAndGo(parent->compileAndGo()) 1.4879 + .setSelfHostingMode(parent->selfHosted()) 1.4880 + .setNoScriptRval(false) 1.4881 + .setForEval(false) 1.4882 + .setVersion(parent->getVersion()); 1.4883 + 1.4884 + Rooted<JSObject*> enclosingScope(cx, EnclosingStaticScope(bce)); 1.4885 + Rooted<JSObject*> sourceObject(cx, bce->script->sourceObject()); 1.4886 + Rooted<JSScript*> script(cx, JSScript::Create(cx, enclosingScope, false, options, 1.4887 + parent->staticLevel() + 1, 1.4888 + sourceObject, 1.4889 + funbox->bufStart, funbox->bufEnd)); 1.4890 + if (!script) 1.4891 + return false; 1.4892 + 1.4893 + script->bindings = funbox->bindings; 1.4894 + 1.4895 + uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin); 1.4896 + BytecodeEmitter bce2(bce, bce->parser, funbox, script, bce->insideEval, 1.4897 + bce->evalCaller, bce->hasGlobalScope, lineNum, 1.4898 + bce->emitterMode); 1.4899 + if (!bce2.init()) 1.4900 + return false; 1.4901 + 1.4902 + /* We measured the max scope depth when we parsed the function. */ 1.4903 + if (!EmitFunctionScript(cx, &bce2, pn->pn_body)) 1.4904 + return false; 1.4905 + 1.4906 + if (funbox->usesArguments && funbox->usesApply) 1.4907 + script->setUsesArgumentsAndApply(); 1.4908 + } 1.4909 + } else { 1.4910 + JS_ASSERT(IsAsmJSModuleNative(fun->native())); 1.4911 + } 1.4912 + 1.4913 + /* Make the function object a literal in the outer script's pool. */ 1.4914 + unsigned index = bce->objectList.add(pn->pn_funbox); 1.4915 + 1.4916 + /* Non-hoisted functions simply emit their respective op. */ 1.4917 + if (!pn->functionIsHoisted()) { 1.4918 + /* JSOP_LAMBDA_ARROW is always preceded by JSOP_THIS. */ 1.4919 + MOZ_ASSERT(fun->isArrow() == (pn->getOp() == JSOP_LAMBDA_ARROW)); 1.4920 + if (fun->isArrow() && Emit1(cx, bce, JSOP_THIS) < 0) 1.4921 + return false; 1.4922 + return EmitIndex32(cx, pn->getOp(), index, bce); 1.4923 + } 1.4924 + 1.4925 + /* 1.4926 + * For a script we emit the code as we parse. Thus the bytecode for 1.4927 + * top-level functions should go in the prolog to predefine their 1.4928 + * names in the variable object before the already-generated main code 1.4929 + * is executed. This extra work for top-level scripts is not necessary 1.4930 + * when we emit the code for a function. It is fully parsed prior to 1.4931 + * invocation of the emitter and calls to EmitTree for function 1.4932 + * definitions can be scheduled before generating the rest of code. 1.4933 + */ 1.4934 + if (!bce->sc->isFunctionBox()) { 1.4935 + JS_ASSERT(pn->pn_cookie.isFree()); 1.4936 + JS_ASSERT(pn->getOp() == JSOP_NOP); 1.4937 + JS_ASSERT(!bce->topStmt); 1.4938 + bce->switchToProlog(); 1.4939 + if (!EmitIndex32(cx, JSOP_DEFFUN, index, bce)) 1.4940 + return false; 1.4941 + if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) 1.4942 + return false; 1.4943 + bce->switchToMain(); 1.4944 + } else { 1.4945 +#ifdef DEBUG 1.4946 + BindingIter bi(bce->script); 1.4947 + while (bi->name() != fun->atom()) 1.4948 + bi++; 1.4949 + JS_ASSERT(bi->kind() == Binding::VARIABLE || bi->kind() == Binding::CONSTANT || 1.4950 + bi->kind() == Binding::ARGUMENT); 1.4951 + JS_ASSERT(bi.frameIndex() < JS_BIT(20)); 1.4952 +#endif 1.4953 + pn->pn_index = index; 1.4954 + if (!EmitIndexOp(cx, JSOP_LAMBDA, index, bce)) 1.4955 + return false; 1.4956 + JS_ASSERT(pn->getOp() == JSOP_GETLOCAL || pn->getOp() == JSOP_GETARG); 1.4957 + JSOp setOp = pn->getOp() == JSOP_GETLOCAL ? JSOP_SETLOCAL : JSOP_SETARG; 1.4958 + if (!EmitVarOp(cx, pn, setOp, bce)) 1.4959 + return false; 1.4960 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.4961 + return false; 1.4962 + } 1.4963 + 1.4964 + return true; 1.4965 +} 1.4966 + 1.4967 +static bool 1.4968 +EmitDo(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.4969 +{ 1.4970 + /* Emit an annotated nop so IonBuilder can recognize the 'do' loop. */ 1.4971 + ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE); 1.4972 + if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0) 1.4973 + return false; 1.4974 + 1.4975 + ptrdiff_t noteIndex2 = NewSrcNote(cx, bce, SRC_WHILE); 1.4976 + if (noteIndex2 < 0) 1.4977 + return false; 1.4978 + 1.4979 + /* Compile the loop body. */ 1.4980 + ptrdiff_t top = EmitLoopHead(cx, bce, pn->pn_left); 1.4981 + if (top < 0) 1.4982 + return false; 1.4983 + 1.4984 + LoopStmtInfo stmtInfo(cx); 1.4985 + PushLoopStatement(bce, &stmtInfo, STMT_DO_LOOP, top); 1.4986 + 1.4987 + if (!EmitLoopEntry(cx, bce, nullptr)) 1.4988 + return false; 1.4989 + 1.4990 + if (!EmitTree(cx, bce, pn->pn_left)) 1.4991 + return false; 1.4992 + 1.4993 + /* Set loop and enclosing label update offsets, for continue. */ 1.4994 + ptrdiff_t off = bce->offset(); 1.4995 + StmtInfoBCE *stmt = &stmtInfo; 1.4996 + do { 1.4997 + stmt->update = off; 1.4998 + } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); 1.4999 + 1.5000 + /* Compile the loop condition, now that continues know where to go. */ 1.5001 + if (!EmitTree(cx, bce, pn->pn_right)) 1.5002 + return false; 1.5003 + 1.5004 + ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); 1.5005 + if (beq < 0) 1.5006 + return false; 1.5007 + 1.5008 + if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) 1.5009 + return false; 1.5010 + 1.5011 + /* 1.5012 + * Update the annotations with the update and back edge positions, for 1.5013 + * IonBuilder. 1.5014 + * 1.5015 + * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex 1.5016 + * note gets bigger. 1.5017 + */ 1.5018 + if (!SetSrcNoteOffset(cx, bce, noteIndex2, 0, beq - top)) 1.5019 + return false; 1.5020 + if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, 1 + (off - top))) 1.5021 + return false; 1.5022 + 1.5023 + return PopStatementBCE(cx, bce); 1.5024 +} 1.5025 + 1.5026 +static bool 1.5027 +EmitWhile(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) 1.5028 +{ 1.5029 + /* 1.5030 + * Minimize bytecodes issued for one or more iterations by jumping to 1.5031 + * the condition below the body and closing the loop if the condition 1.5032 + * is true with a backward branch. For iteration count i: 1.5033 + * 1.5034 + * i test at the top test at the bottom 1.5035 + * = =============== ================== 1.5036 + * 0 ifeq-pass goto; ifne-fail 1.5037 + * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail 1.5038 + * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail 1.5039 + * . . . 1.5040 + * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail 1.5041 + */ 1.5042 + LoopStmtInfo stmtInfo(cx); 1.5043 + PushLoopStatement(bce, &stmtInfo, STMT_WHILE_LOOP, top); 1.5044 + 1.5045 + ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE); 1.5046 + if (noteIndex < 0) 1.5047 + return false; 1.5048 + 1.5049 + ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); 1.5050 + if (jmp < 0) 1.5051 + return false; 1.5052 + 1.5053 + top = EmitLoopHead(cx, bce, pn->pn_right); 1.5054 + if (top < 0) 1.5055 + return false; 1.5056 + 1.5057 + if (!EmitTree(cx, bce, pn->pn_right)) 1.5058 + return false; 1.5059 + 1.5060 + SetJumpOffsetAt(bce, jmp); 1.5061 + if (!EmitLoopEntry(cx, bce, pn->pn_left)) 1.5062 + return false; 1.5063 + if (!EmitTree(cx, bce, pn->pn_left)) 1.5064 + return false; 1.5065 + 1.5066 + ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); 1.5067 + if (beq < 0) 1.5068 + return false; 1.5069 + 1.5070 + if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) 1.5071 + return false; 1.5072 + 1.5073 + if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, beq - jmp)) 1.5074 + return false; 1.5075 + 1.5076 + return PopStatementBCE(cx, bce); 1.5077 +} 1.5078 + 1.5079 +static bool 1.5080 +EmitBreak(ExclusiveContext *cx, BytecodeEmitter *bce, PropertyName *label) 1.5081 +{ 1.5082 + StmtInfoBCE *stmt = bce->topStmt; 1.5083 + SrcNoteType noteType; 1.5084 + if (label) { 1.5085 + while (stmt->type != STMT_LABEL || stmt->label != label) 1.5086 + stmt = stmt->down; 1.5087 + noteType = SRC_BREAK2LABEL; 1.5088 + } else { 1.5089 + while (!stmt->isLoop() && stmt->type != STMT_SWITCH) 1.5090 + stmt = stmt->down; 1.5091 + noteType = (stmt->type == STMT_SWITCH) ? SRC_SWITCHBREAK : SRC_BREAK; 1.5092 + } 1.5093 + 1.5094 + return EmitGoto(cx, bce, stmt, &stmt->breaks, noteType) >= 0; 1.5095 +} 1.5096 + 1.5097 +static bool 1.5098 +EmitContinue(ExclusiveContext *cx, BytecodeEmitter *bce, PropertyName *label) 1.5099 +{ 1.5100 + StmtInfoBCE *stmt = bce->topStmt; 1.5101 + if (label) { 1.5102 + /* Find the loop statement enclosed by the matching label. */ 1.5103 + StmtInfoBCE *loop = nullptr; 1.5104 + while (stmt->type != STMT_LABEL || stmt->label != label) { 1.5105 + if (stmt->isLoop()) 1.5106 + loop = stmt; 1.5107 + stmt = stmt->down; 1.5108 + } 1.5109 + stmt = loop; 1.5110 + } else { 1.5111 + while (!stmt->isLoop()) 1.5112 + stmt = stmt->down; 1.5113 + } 1.5114 + 1.5115 + return EmitGoto(cx, bce, stmt, &stmt->continues, SRC_CONTINUE) >= 0; 1.5116 +} 1.5117 + 1.5118 +static bool 1.5119 +EmitReturn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.5120 +{ 1.5121 + if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) 1.5122 + return false; 1.5123 + 1.5124 + if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { 1.5125 + if (!EmitPrepareIteratorResult(cx, bce)) 1.5126 + return false; 1.5127 + } 1.5128 + 1.5129 + /* Push a return value */ 1.5130 + if (ParseNode *pn2 = pn->pn_kid) { 1.5131 + if (!EmitTree(cx, bce, pn2)) 1.5132 + return false; 1.5133 + } else { 1.5134 + /* No explicit return value provided */ 1.5135 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) 1.5136 + return false; 1.5137 + } 1.5138 + 1.5139 + if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { 1.5140 + if (!EmitFinishIteratorResult(cx, bce, true)) 1.5141 + return false; 1.5142 + } 1.5143 + 1.5144 + /* 1.5145 + * EmitNonLocalJumpFixup may add fixup bytecode to close open try 1.5146 + * blocks having finally clauses and to exit intermingled let blocks. 1.5147 + * We can't simply transfer control flow to our caller in that case, 1.5148 + * because we must gosub to those finally clauses from inner to outer, 1.5149 + * with the correct stack pointer (i.e., after popping any with, 1.5150 + * for/in, etc., slots nested inside the finally's try). 1.5151 + * 1.5152 + * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an 1.5153 + * extra JSOP_RETRVAL after the fixups. 1.5154 + */ 1.5155 + ptrdiff_t top = bce->offset(); 1.5156 + 1.5157 + if (Emit1(cx, bce, JSOP_RETURN) < 0) 1.5158 + return false; 1.5159 + 1.5160 + NonLocalExitScope nle(cx, bce); 1.5161 + 1.5162 + if (!nle.prepareForNonLocalJump(nullptr)) 1.5163 + return false; 1.5164 + 1.5165 + if (top + static_cast<ptrdiff_t>(JSOP_RETURN_LENGTH) != bce->offset()) { 1.5166 + bce->code()[top] = JSOP_SETRVAL; 1.5167 + if (Emit1(cx, bce, JSOP_RETRVAL) < 0) 1.5168 + return false; 1.5169 + } 1.5170 + 1.5171 + return true; 1.5172 +} 1.5173 + 1.5174 +static bool 1.5175 +EmitYieldStar(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *iter) 1.5176 +{ 1.5177 + JS_ASSERT(bce->sc->isFunctionBox()); 1.5178 + JS_ASSERT(bce->sc->asFunctionBox()->isStarGenerator()); 1.5179 + 1.5180 + if (!EmitTree(cx, bce, iter)) // ITERABLE 1.5181 + return false; 1.5182 + 1.5183 + // Convert iterable to iterator. 1.5184 + if (Emit1(cx, bce, JSOP_DUP) < 0) // ITERABLE ITERABLE 1.5185 + return false; 1.5186 + if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // ITERABLE @@ITERATOR 1.5187 + return false; 1.5188 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // @@ITERATOR ITERABLE 1.5189 + return false; 1.5190 + if (EmitCall(cx, bce, JSOP_CALL, 0) < 0) // ITER 1.5191 + return false; 1.5192 + CheckTypeSet(cx, bce, JSOP_CALL); 1.5193 + 1.5194 + int depth = bce->stackDepth; 1.5195 + JS_ASSERT(depth >= 1); 1.5196 + 1.5197 + // Initial send value is undefined. 1.5198 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RECEIVED 1.5199 + return false; 1.5200 + ptrdiff_t initialSend = -1; 1.5201 + if (EmitBackPatchOp(cx, bce, &initialSend) < 0) // goto initialSend 1.5202 + return false; 1.5203 + 1.5204 + // Try prologue. // ITER RESULT 1.5205 + StmtInfoBCE stmtInfo(cx); 1.5206 + PushStatementBCE(bce, &stmtInfo, STMT_TRY, bce->offset()); 1.5207 + ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY); 1.5208 + if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0) 1.5209 + return false; 1.5210 + ptrdiff_t tryStart = bce->offset(); // tryStart: 1.5211 + JS_ASSERT(bce->stackDepth == depth + 1); 1.5212 + 1.5213 + // Yield RESULT as-is, without re-boxing. 1.5214 + if (Emit1(cx, bce, JSOP_YIELD) < 0) // ITER RECEIVED 1.5215 + return false; 1.5216 + 1.5217 + // Try epilogue. 1.5218 + if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH)) 1.5219 + return false; 1.5220 + ptrdiff_t subsequentSend = -1; 1.5221 + if (EmitBackPatchOp(cx, bce, &subsequentSend) < 0) // goto subsequentSend 1.5222 + return false; 1.5223 + ptrdiff_t tryEnd = bce->offset(); // tryEnd: 1.5224 + 1.5225 + // Catch location. 1.5226 + // THROW? = 'throw' in ITER // ITER 1.5227 + bce->stackDepth = (uint32_t) depth; 1.5228 + if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) // ITER EXCEPTION 1.5229 + return false; 1.5230 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER 1.5231 + return false; 1.5232 + if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER 1.5233 + return false; 1.5234 + if (!EmitAtomOp(cx, cx->names().throw_, JSOP_STRING, bce)) // EXCEPTION ITER ITER "throw" 1.5235 + return false; 1.5236 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER "throw" ITER 1.5237 + return false; 1.5238 + if (Emit1(cx, bce, JSOP_IN) < 0) // EXCEPTION ITER THROW? 1.5239 + return false; 1.5240 + // if (THROW?) goto delegate 1.5241 + ptrdiff_t checkThrow = EmitJump(cx, bce, JSOP_IFNE, 0); // EXCEPTION ITER 1.5242 + if (checkThrow < 0) 1.5243 + return false; 1.5244 + if (Emit1(cx, bce, JSOP_POP) < 0) // EXCEPTION 1.5245 + return false; 1.5246 + if (Emit1(cx, bce, JSOP_THROW) < 0) // throw EXCEPTION 1.5247 + return false; 1.5248 + 1.5249 + SetJumpOffsetAt(bce, checkThrow); // delegate: 1.5250 + // RESULT = ITER.throw(EXCEPTION) // EXCEPTION ITER 1.5251 + bce->stackDepth = (uint32_t) depth + 1; 1.5252 + if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER 1.5253 + return false; 1.5254 + if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER ITER 1.5255 + return false; 1.5256 + if (!EmitAtomOp(cx, cx->names().throw_, JSOP_CALLPROP, bce)) // EXCEPTION ITER ITER THROW 1.5257 + return false; 1.5258 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER THROW ITER 1.5259 + return false; 1.5260 + if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER THROW ITER EXCEPTION 1.5261 + return false; 1.5262 + if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT 1.5263 + return false; 1.5264 + CheckTypeSet(cx, bce, JSOP_CALL); 1.5265 + JS_ASSERT(bce->stackDepth == depth + 1); 1.5266 + ptrdiff_t checkResult = -1; 1.5267 + if (EmitBackPatchOp(cx, bce, &checkResult) < 0) // goto checkResult 1.5268 + return false; 1.5269 + 1.5270 + // Catch epilogue. 1.5271 + if (!PopStatementBCE(cx, bce)) 1.5272 + return false; 1.5273 + // This is a peace offering to ReconstructPCStack. See the note in EmitTry. 1.5274 + if (Emit1(cx, bce, JSOP_NOP) < 0) 1.5275 + return false; 1.5276 + if (!bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd)) 1.5277 + return false; 1.5278 + 1.5279 + // After the try/catch block: send the received value to the iterator. 1.5280 + if (!BackPatch(cx, bce, initialSend, bce->code().end(), JSOP_GOTO)) // initialSend: 1.5281 + return false; 1.5282 + if (!BackPatch(cx, bce, subsequentSend, bce->code().end(), JSOP_GOTO)) // subsequentSend: 1.5283 + return false; 1.5284 + 1.5285 + // Send location. 1.5286 + // result = iter.next(received) // ITER RECEIVED 1.5287 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER 1.5288 + return false; 1.5289 + if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER 1.5290 + return false; 1.5291 + if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER ITER 1.5292 + return false; 1.5293 + if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // RECEIVED ITER ITER NEXT 1.5294 + return false; 1.5295 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER NEXT ITER 1.5296 + return false; 1.5297 + if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER NEXT ITER RECEIVED 1.5298 + return false; 1.5299 + if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT 1.5300 + return false; 1.5301 + CheckTypeSet(cx, bce, JSOP_CALL); 1.5302 + JS_ASSERT(bce->stackDepth == depth + 1); 1.5303 + 1.5304 + if (!BackPatch(cx, bce, checkResult, bce->code().end(), JSOP_GOTO)) // checkResult: 1.5305 + return false; 1.5306 + // if (!result.done) goto tryStart; // ITER RESULT 1.5307 + if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT 1.5308 + return false; 1.5309 + if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE 1.5310 + return false; 1.5311 + // if (!DONE) goto tryStart; 1.5312 + if (EmitJump(cx, bce, JSOP_IFEQ, tryStart - bce->offset()) < 0) // ITER RESULT 1.5313 + return false; 1.5314 + 1.5315 + // result.value 1.5316 + if (Emit1(cx, bce, JSOP_SWAP) < 0) // RESULT ITER 1.5317 + return false; 1.5318 + if (Emit1(cx, bce, JSOP_POP) < 0) // RESULT 1.5319 + return false; 1.5320 + if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // VALUE 1.5321 + return false; 1.5322 + 1.5323 + JS_ASSERT(bce->stackDepth == depth); 1.5324 + 1.5325 + return true; 1.5326 +} 1.5327 + 1.5328 +static bool 1.5329 +EmitStatementList(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) 1.5330 +{ 1.5331 + JS_ASSERT(pn->isArity(PN_LIST)); 1.5332 + 1.5333 + StmtInfoBCE stmtInfo(cx); 1.5334 + PushStatementBCE(bce, &stmtInfo, STMT_BLOCK, top); 1.5335 + 1.5336 + ParseNode *pnchild = pn->pn_head; 1.5337 + 1.5338 + if (pn->pn_xflags & PNX_DESTRUCT) 1.5339 + pnchild = pnchild->pn_next; 1.5340 + 1.5341 + for (ParseNode *pn2 = pnchild; pn2; pn2 = pn2->pn_next) { 1.5342 + if (!EmitTree(cx, bce, pn2)) 1.5343 + return false; 1.5344 + } 1.5345 + 1.5346 + return PopStatementBCE(cx, bce); 1.5347 +} 1.5348 + 1.5349 +static bool 1.5350 +EmitStatement(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.5351 +{ 1.5352 + JS_ASSERT(pn->isKind(PNK_SEMI)); 1.5353 + 1.5354 + ParseNode *pn2 = pn->pn_kid; 1.5355 + if (!pn2) 1.5356 + return true; 1.5357 + 1.5358 + if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) 1.5359 + return false; 1.5360 + 1.5361 + /* 1.5362 + * Top-level or called-from-a-native JS_Execute/EvaluateScript, 1.5363 + * debugger, and eval frames may need the value of the ultimate 1.5364 + * expression statement as the script's result, despite the fact 1.5365 + * that it appears useless to the compiler. 1.5366 + * 1.5367 + * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when 1.5368 + * calling JS_Compile* to suppress JSOP_SETRVAL. 1.5369 + */ 1.5370 + bool wantval = false; 1.5371 + bool useful = false; 1.5372 + if (bce->sc->isFunctionBox()) { 1.5373 + JS_ASSERT(!bce->script->noScriptRval()); 1.5374 + } else { 1.5375 + useful = wantval = !bce->script->noScriptRval(); 1.5376 + } 1.5377 + 1.5378 + /* Don't eliminate expressions with side effects. */ 1.5379 + if (!useful) { 1.5380 + if (!CheckSideEffects(cx, bce, pn2, &useful)) 1.5381 + return false; 1.5382 + 1.5383 + /* 1.5384 + * Don't eliminate apparently useless expressions if they are 1.5385 + * labeled expression statements. The pc->topStmt->update test 1.5386 + * catches the case where we are nesting in EmitTree for a labeled 1.5387 + * compound statement. 1.5388 + */ 1.5389 + if (bce->topStmt && 1.5390 + bce->topStmt->type == STMT_LABEL && 1.5391 + bce->topStmt->update >= bce->offset()) 1.5392 + { 1.5393 + useful = true; 1.5394 + } 1.5395 + } 1.5396 + 1.5397 + if (useful) { 1.5398 + JSOp op = wantval ? JSOP_SETRVAL : JSOP_POP; 1.5399 + JS_ASSERT_IF(pn2->isKind(PNK_ASSIGN), pn2->isOp(JSOP_NOP)); 1.5400 + if (!wantval && 1.5401 + pn2->isKind(PNK_ASSIGN) && 1.5402 + !MaybeEmitGroupAssignment(cx, bce, op, pn2, GroupIsNotDecl, &op)) 1.5403 + { 1.5404 + return false; 1.5405 + } 1.5406 + if (op != JSOP_NOP) { 1.5407 + if (!EmitTree(cx, bce, pn2)) 1.5408 + return false; 1.5409 + if (Emit1(cx, bce, op) < 0) 1.5410 + return false; 1.5411 + } 1.5412 + } else if (!pn->isDirectivePrologueMember()) { 1.5413 + /* Don't complain about directive prologue members; just don't emit their code. */ 1.5414 + bce->current->currentLine = bce->parser->tokenStream.srcCoords.lineNum(pn2->pn_pos.begin); 1.5415 + bce->current->lastColumn = 0; 1.5416 + if (!bce->reportStrictWarning(pn2, JSMSG_USELESS_EXPR)) 1.5417 + return false; 1.5418 + } 1.5419 + 1.5420 + return true; 1.5421 +} 1.5422 + 1.5423 +static bool 1.5424 +EmitDelete(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.5425 +{ 1.5426 + /* 1.5427 + * Under ECMA 3, deleting a non-reference returns true -- but alas we 1.5428 + * must evaluate the operand if it appears it might have side effects. 1.5429 + */ 1.5430 + ParseNode *pn2 = pn->pn_kid; 1.5431 + switch (pn2->getKind()) { 1.5432 + case PNK_NAME: 1.5433 + { 1.5434 + if (!BindNameToSlot(cx, bce, pn2)) 1.5435 + return false; 1.5436 + JSOp op = pn2->getOp(); 1.5437 + if (op == JSOP_FALSE) { 1.5438 + if (Emit1(cx, bce, op) < 0) 1.5439 + return false; 1.5440 + } else { 1.5441 + if (!EmitAtomOp(cx, pn2, op, bce)) 1.5442 + return false; 1.5443 + } 1.5444 + break; 1.5445 + } 1.5446 + case PNK_DOT: 1.5447 + if (!EmitPropOp(cx, pn2, JSOP_DELPROP, bce)) 1.5448 + return false; 1.5449 + break; 1.5450 + case PNK_ELEM: 1.5451 + if (!EmitElemOp(cx, pn2, JSOP_DELELEM, bce)) 1.5452 + return false; 1.5453 + break; 1.5454 + default: 1.5455 + { 1.5456 + /* 1.5457 + * If useless, just emit JSOP_TRUE; otherwise convert delete foo() 1.5458 + * to foo(), true (a comma expression). 1.5459 + */ 1.5460 + bool useful = false; 1.5461 + if (!CheckSideEffects(cx, bce, pn2, &useful)) 1.5462 + return false; 1.5463 + 1.5464 + if (useful) { 1.5465 + JS_ASSERT_IF(pn2->isKind(PNK_CALL), !(pn2->pn_xflags & PNX_SETCALL)); 1.5466 + if (!EmitTree(cx, bce, pn2)) 1.5467 + return false; 1.5468 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.5469 + return false; 1.5470 + } 1.5471 + 1.5472 + if (Emit1(cx, bce, JSOP_TRUE) < 0) 1.5473 + return false; 1.5474 + } 1.5475 + } 1.5476 + 1.5477 + return true; 1.5478 +} 1.5479 + 1.5480 +static bool 1.5481 +EmitArray(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, uint32_t count); 1.5482 + 1.5483 +static bool 1.5484 +EmitCallOrNew(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.5485 +{ 1.5486 + bool callop = pn->isKind(PNK_CALL); 1.5487 + 1.5488 + /* 1.5489 + * Emit callable invocation or operator new (constructor call) code. 1.5490 + * First, emit code for the left operand to evaluate the callable or 1.5491 + * constructable object expression. 1.5492 + * 1.5493 + * For operator new, we emit JSOP_GETPROP instead of JSOP_CALLPROP, etc. 1.5494 + * This is necessary to interpose the lambda-initialized method read 1.5495 + * barrier -- see the code in jsinterp.cpp for JSOP_LAMBDA followed by 1.5496 + * JSOP_{SET,INIT}PROP. 1.5497 + * 1.5498 + * Then (or in a call case that has no explicit reference-base 1.5499 + * object) we emit JSOP_UNDEFINED to produce the undefined |this| 1.5500 + * value required for calls (which non-strict mode functions 1.5501 + * will box into the global object). 1.5502 + */ 1.5503 + uint32_t argc = pn->pn_count - 1; 1.5504 + 1.5505 + if (argc >= ARGC_LIMIT) { 1.5506 + bce->parser->tokenStream.reportError(callop 1.5507 + ? JSMSG_TOO_MANY_FUN_ARGS 1.5508 + : JSMSG_TOO_MANY_CON_ARGS); 1.5509 + return false; 1.5510 + } 1.5511 + 1.5512 + bool emitArgs = true; 1.5513 + ParseNode *pn2 = pn->pn_head; 1.5514 + bool spread = JOF_OPTYPE(pn->getOp()) == JOF_BYTE; 1.5515 + switch (pn2->getKind()) { 1.5516 + case PNK_NAME: 1.5517 + if (bce->emitterMode == BytecodeEmitter::SelfHosting && 1.5518 + pn2->name() == cx->names().callFunction && 1.5519 + !spread) 1.5520 + { 1.5521 + /* 1.5522 + * Special-casing of callFunction to emit bytecode that directly 1.5523 + * invokes the callee with the correct |this| object and arguments. 1.5524 + * callFunction(fun, thisArg, arg0, arg1) thus becomes: 1.5525 + * - emit lookup for fun 1.5526 + * - emit lookup for thisArg 1.5527 + * - emit lookups for arg0, arg1 1.5528 + * 1.5529 + * argc is set to the amount of actually emitted args and the 1.5530 + * emitting of args below is disabled by setting emitArgs to false. 1.5531 + */ 1.5532 + if (pn->pn_count < 3) { 1.5533 + bce->reportError(pn, JSMSG_MORE_ARGS_NEEDED, "callFunction", "1", "s"); 1.5534 + return false; 1.5535 + } 1.5536 + ParseNode *funNode = pn2->pn_next; 1.5537 + if (!EmitTree(cx, bce, funNode)) 1.5538 + return false; 1.5539 + ParseNode *thisArg = funNode->pn_next; 1.5540 + if (!EmitTree(cx, bce, thisArg)) 1.5541 + return false; 1.5542 + bool oldEmittingForInit = bce->emittingForInit; 1.5543 + bce->emittingForInit = false; 1.5544 + for (ParseNode *argpn = thisArg->pn_next; argpn; argpn = argpn->pn_next) { 1.5545 + if (!EmitTree(cx, bce, argpn)) 1.5546 + return false; 1.5547 + } 1.5548 + bce->emittingForInit = oldEmittingForInit; 1.5549 + argc -= 2; 1.5550 + emitArgs = false; 1.5551 + break; 1.5552 + } 1.5553 + if (!EmitNameOp(cx, bce, pn2, callop)) 1.5554 + return false; 1.5555 + break; 1.5556 + case PNK_DOT: 1.5557 + if (!EmitPropOp(cx, pn2, callop ? JSOP_CALLPROP : JSOP_GETPROP, bce)) 1.5558 + return false; 1.5559 + break; 1.5560 + case PNK_ELEM: 1.5561 + if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, bce)) 1.5562 + return false; 1.5563 + break; 1.5564 + case PNK_FUNCTION: 1.5565 + /* 1.5566 + * Top level lambdas which are immediately invoked should be 1.5567 + * treated as only running once. Every time they execute we will 1.5568 + * create new types and scripts for their contents, to increase 1.5569 + * the quality of type information within them and enable more 1.5570 + * backend optimizations. Note that this does not depend on the 1.5571 + * lambda being invoked at most once (it may be named or be 1.5572 + * accessed via foo.caller indirection), as multiple executions 1.5573 + * will just cause the inner scripts to be repeatedly cloned. 1.5574 + */ 1.5575 + JS_ASSERT(!bce->emittingRunOnceLambda); 1.5576 + if (bce->checkSingletonContext() || (!bce->isInLoop() && bce->isRunOnceLambda())) { 1.5577 + bce->emittingRunOnceLambda = true; 1.5578 + if (!EmitTree(cx, bce, pn2)) 1.5579 + return false; 1.5580 + bce->emittingRunOnceLambda = false; 1.5581 + } else { 1.5582 + if (!EmitTree(cx, bce, pn2)) 1.5583 + return false; 1.5584 + } 1.5585 + callop = false; 1.5586 + break; 1.5587 + default: 1.5588 + if (!EmitTree(cx, bce, pn2)) 1.5589 + return false; 1.5590 + callop = false; /* trigger JSOP_UNDEFINED after */ 1.5591 + break; 1.5592 + } 1.5593 + if (!callop) { 1.5594 + JSOp thisop = pn->isKind(PNK_GENEXP) ? JSOP_THIS : JSOP_UNDEFINED; 1.5595 + if (Emit1(cx, bce, thisop) < 0) 1.5596 + return false; 1.5597 + } 1.5598 + 1.5599 + if (emitArgs) { 1.5600 + /* 1.5601 + * Emit code for each argument in order, then emit the JSOP_*CALL or 1.5602 + * JSOP_NEW bytecode with a two-byte immediate telling how many args 1.5603 + * were pushed on the operand stack. 1.5604 + */ 1.5605 + bool oldEmittingForInit = bce->emittingForInit; 1.5606 + bce->emittingForInit = false; 1.5607 + if (!spread) { 1.5608 + for (ParseNode *pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) { 1.5609 + if (!EmitTree(cx, bce, pn3)) 1.5610 + return false; 1.5611 + } 1.5612 + } else { 1.5613 + if (!EmitArray(cx, bce, pn2->pn_next, argc)) 1.5614 + return false; 1.5615 + } 1.5616 + bce->emittingForInit = oldEmittingForInit; 1.5617 + } 1.5618 + 1.5619 + if (!spread) { 1.5620 + if (EmitCall(cx, bce, pn->getOp(), argc) < 0) 1.5621 + return false; 1.5622 + } else { 1.5623 + if (Emit1(cx, bce, pn->getOp()) < 0) 1.5624 + return false; 1.5625 + } 1.5626 + CheckTypeSet(cx, bce, pn->getOp()); 1.5627 + if (pn->isOp(JSOP_EVAL) || pn->isOp(JSOP_SPREADEVAL)) { 1.5628 + uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin); 1.5629 + EMIT_UINT16_IMM_OP(JSOP_LINENO, lineNum); 1.5630 + } 1.5631 + if (pn->pn_xflags & PNX_SETCALL) { 1.5632 + if (Emit1(cx, bce, JSOP_SETCALL) < 0) 1.5633 + return false; 1.5634 + } 1.5635 + return true; 1.5636 +} 1.5637 + 1.5638 +static bool 1.5639 +EmitLogical(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.5640 +{ 1.5641 + /* 1.5642 + * JSOP_OR converts the operand on the stack to boolean, leaves the original 1.5643 + * value on the stack and jumps if true; otherwise it falls into the next 1.5644 + * bytecode, which pops the left operand and then evaluates the right operand. 1.5645 + * The jump goes around the right operand evaluation. 1.5646 + * 1.5647 + * JSOP_AND converts the operand on the stack to boolean and jumps if false; 1.5648 + * otherwise it falls into the right operand's bytecode. 1.5649 + */ 1.5650 + 1.5651 + if (pn->isArity(PN_BINARY)) { 1.5652 + if (!EmitTree(cx, bce, pn->pn_left)) 1.5653 + return false; 1.5654 + ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0); 1.5655 + if (top < 0) 1.5656 + return false; 1.5657 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.5658 + return false; 1.5659 + if (!EmitTree(cx, bce, pn->pn_right)) 1.5660 + return false; 1.5661 + ptrdiff_t off = bce->offset(); 1.5662 + jsbytecode *pc = bce->code(top); 1.5663 + SET_JUMP_OFFSET(pc, off - top); 1.5664 + *pc = pn->getOp(); 1.5665 + return true; 1.5666 + } 1.5667 + 1.5668 + JS_ASSERT(pn->isArity(PN_LIST)); 1.5669 + JS_ASSERT(pn->pn_head->pn_next->pn_next); 1.5670 + 1.5671 + /* Left-associative operator chain: avoid too much recursion. */ 1.5672 + ParseNode *pn2 = pn->pn_head; 1.5673 + if (!EmitTree(cx, bce, pn2)) 1.5674 + return false; 1.5675 + ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0); 1.5676 + if (top < 0) 1.5677 + return false; 1.5678 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.5679 + return false; 1.5680 + 1.5681 + /* Emit nodes between the head and the tail. */ 1.5682 + ptrdiff_t jmp = top; 1.5683 + while ((pn2 = pn2->pn_next)->pn_next) { 1.5684 + if (!EmitTree(cx, bce, pn2)) 1.5685 + return false; 1.5686 + ptrdiff_t off = EmitJump(cx, bce, JSOP_BACKPATCH, 0); 1.5687 + if (off < 0) 1.5688 + return false; 1.5689 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.5690 + return false; 1.5691 + SET_JUMP_OFFSET(bce->code(jmp), off - jmp); 1.5692 + jmp = off; 1.5693 + } 1.5694 + if (!EmitTree(cx, bce, pn2)) 1.5695 + return false; 1.5696 + 1.5697 + pn2 = pn->pn_head; 1.5698 + ptrdiff_t off = bce->offset(); 1.5699 + do { 1.5700 + jsbytecode *pc = bce->code(top); 1.5701 + ptrdiff_t tmp = GET_JUMP_OFFSET(pc); 1.5702 + SET_JUMP_OFFSET(pc, off - top); 1.5703 + *pc = pn->getOp(); 1.5704 + top += tmp; 1.5705 + } while ((pn2 = pn2->pn_next)->pn_next); 1.5706 + 1.5707 + return true; 1.5708 +} 1.5709 + 1.5710 +/* 1.5711 + * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See 1.5712 + * the comment on EmitSwitch. 1.5713 + */ 1.5714 +MOZ_NEVER_INLINE static bool 1.5715 +EmitIncOrDec(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.5716 +{ 1.5717 + /* Emit lvalue-specialized code for ++/-- operators. */ 1.5718 + ParseNode *pn2 = pn->pn_kid; 1.5719 + switch (pn2->getKind()) { 1.5720 + case PNK_DOT: 1.5721 + if (!EmitPropIncDec(cx, pn, bce)) 1.5722 + return false; 1.5723 + break; 1.5724 + case PNK_ELEM: 1.5725 + if (!EmitElemIncDec(cx, pn, bce)) 1.5726 + return false; 1.5727 + break; 1.5728 + case PNK_CALL: 1.5729 + JS_ASSERT(pn2->pn_xflags & PNX_SETCALL); 1.5730 + if (!EmitTree(cx, bce, pn2)) 1.5731 + return false; 1.5732 + break; 1.5733 + default: 1.5734 + JS_ASSERT(pn2->isKind(PNK_NAME)); 1.5735 + pn2->setOp(JSOP_SETNAME); 1.5736 + if (!BindNameToSlot(cx, bce, pn2)) 1.5737 + return false; 1.5738 + JSOp op = pn2->getOp(); 1.5739 + bool maySet; 1.5740 + switch (op) { 1.5741 + case JSOP_SETLOCAL: 1.5742 + case JSOP_SETARG: 1.5743 + case JSOP_SETALIASEDVAR: 1.5744 + case JSOP_SETNAME: 1.5745 + case JSOP_SETGNAME: 1.5746 + maySet = true; 1.5747 + break; 1.5748 + default: 1.5749 + maySet = false; 1.5750 + } 1.5751 + if (op == JSOP_CALLEE) { 1.5752 + if (Emit1(cx, bce, op) < 0) 1.5753 + return false; 1.5754 + } else if (!pn2->pn_cookie.isFree()) { 1.5755 + if (maySet) { 1.5756 + if (!EmitVarIncDec(cx, pn, bce)) 1.5757 + return false; 1.5758 + } else { 1.5759 + if (!EmitVarOp(cx, pn2, op, bce)) 1.5760 + return false; 1.5761 + } 1.5762 + } else { 1.5763 + JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); 1.5764 + if (maySet) { 1.5765 + if (!EmitNameIncDec(cx, pn, bce)) 1.5766 + return false; 1.5767 + } else { 1.5768 + if (!EmitAtomOp(cx, pn2, op, bce)) 1.5769 + return false; 1.5770 + } 1.5771 + break; 1.5772 + } 1.5773 + if (pn2->isConst()) { 1.5774 + if (Emit1(cx, bce, JSOP_POS) < 0) 1.5775 + return false; 1.5776 + bool post; 1.5777 + JSOp binop = GetIncDecInfo(pn->getKind(), &post); 1.5778 + if (!post) { 1.5779 + if (Emit1(cx, bce, JSOP_ONE) < 0) 1.5780 + return false; 1.5781 + if (Emit1(cx, bce, binop) < 0) 1.5782 + return false; 1.5783 + } 1.5784 + } 1.5785 + } 1.5786 + return true; 1.5787 +} 1.5788 + 1.5789 +/* 1.5790 + * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See 1.5791 + * the comment on EmitSwitch. 1.5792 + */ 1.5793 +MOZ_NEVER_INLINE static bool 1.5794 +EmitLabeledStatement(ExclusiveContext *cx, BytecodeEmitter *bce, const LabeledStatement *pn) 1.5795 +{ 1.5796 + /* 1.5797 + * Emit a JSOP_LABEL instruction. The argument is the offset to the statement 1.5798 + * following the labeled statement. 1.5799 + */ 1.5800 + jsatomid index; 1.5801 + if (!bce->makeAtomIndex(pn->label(), &index)) 1.5802 + return false; 1.5803 + 1.5804 + ptrdiff_t top = EmitJump(cx, bce, JSOP_LABEL, 0); 1.5805 + if (top < 0) 1.5806 + return false; 1.5807 + 1.5808 + /* Emit code for the labeled statement. */ 1.5809 + StmtInfoBCE stmtInfo(cx); 1.5810 + PushStatementBCE(bce, &stmtInfo, STMT_LABEL, bce->offset()); 1.5811 + stmtInfo.label = pn->label(); 1.5812 + if (!EmitTree(cx, bce, pn->statement())) 1.5813 + return false; 1.5814 + if (!PopStatementBCE(cx, bce)) 1.5815 + return false; 1.5816 + 1.5817 + /* Patch the JSOP_LABEL offset. */ 1.5818 + SetJumpOffsetAt(bce, top); 1.5819 + return true; 1.5820 +} 1.5821 + 1.5822 +static bool 1.5823 +EmitSyntheticStatements(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) 1.5824 +{ 1.5825 + JS_ASSERT(pn->isArity(PN_LIST)); 1.5826 + StmtInfoBCE stmtInfo(cx); 1.5827 + PushStatementBCE(bce, &stmtInfo, STMT_SEQ, top); 1.5828 + ParseNode *pn2 = pn->pn_head; 1.5829 + if (pn->pn_xflags & PNX_DESTRUCT) 1.5830 + pn2 = pn2->pn_next; 1.5831 + for (; pn2; pn2 = pn2->pn_next) { 1.5832 + if (!EmitTree(cx, bce, pn2)) 1.5833 + return false; 1.5834 + } 1.5835 + return PopStatementBCE(cx, bce); 1.5836 +} 1.5837 + 1.5838 +static bool 1.5839 +EmitConditionalExpression(ExclusiveContext *cx, BytecodeEmitter *bce, ConditionalExpression &conditional) 1.5840 +{ 1.5841 + /* Emit the condition, then branch if false to the else part. */ 1.5842 + if (!EmitTree(cx, bce, &conditional.condition())) 1.5843 + return false; 1.5844 + ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_COND); 1.5845 + if (noteIndex < 0) 1.5846 + return false; 1.5847 + ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, 0); 1.5848 + if (beq < 0 || !EmitTree(cx, bce, &conditional.thenExpression())) 1.5849 + return false; 1.5850 + 1.5851 + /* Jump around else, fixup the branch, emit else, fixup jump. */ 1.5852 + ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); 1.5853 + if (jmp < 0) 1.5854 + return false; 1.5855 + SetJumpOffsetAt(bce, beq); 1.5856 + 1.5857 + /* 1.5858 + * Because each branch pushes a single value, but our stack budgeting 1.5859 + * analysis ignores branches, we now have to adjust bce->stackDepth to 1.5860 + * ignore the value pushed by the first branch. Execution will follow 1.5861 + * only one path, so we must decrement bce->stackDepth. 1.5862 + * 1.5863 + * Failing to do this will foil code, such as let expression and block 1.5864 + * code generation, which must use the stack depth to compute local 1.5865 + * stack indexes correctly. 1.5866 + */ 1.5867 + JS_ASSERT(bce->stackDepth > 0); 1.5868 + bce->stackDepth--; 1.5869 + if (!EmitTree(cx, bce, &conditional.elseExpression())) 1.5870 + return false; 1.5871 + SetJumpOffsetAt(bce, jmp); 1.5872 + return SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq); 1.5873 +} 1.5874 + 1.5875 +/* 1.5876 + * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See 1.5877 + * the comment on EmitSwitch. 1.5878 + */ 1.5879 +MOZ_NEVER_INLINE static bool 1.5880 +EmitObject(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.5881 +{ 1.5882 + if (pn->pn_xflags & PNX_DESTRUCT) { 1.5883 + bce->reportError(pn, JSMSG_BAD_OBJECT_INIT); 1.5884 + return false; 1.5885 + } 1.5886 + 1.5887 + if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext()) 1.5888 + return EmitSingletonInitialiser(cx, bce, pn); 1.5889 + 1.5890 + /* 1.5891 + * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing 1.5892 + * a new object and defining (in source order) each property on the object 1.5893 + * (or mutating the object's [[Prototype]], in the case of __proto__). 1.5894 + */ 1.5895 + ptrdiff_t offset = bce->offset(); 1.5896 + if (!EmitNewInit(cx, bce, JSProto_Object)) 1.5897 + return false; 1.5898 + 1.5899 + /* 1.5900 + * Try to construct the shape of the object as we go, so we can emit a 1.5901 + * JSOP_NEWOBJECT with the final shape instead. 1.5902 + */ 1.5903 + RootedObject obj(cx); 1.5904 + if (bce->script->compileAndGo()) { 1.5905 + gc::AllocKind kind = GuessObjectGCKind(pn->pn_count); 1.5906 + obj = NewBuiltinClassInstance(cx, &JSObject::class_, kind, TenuredObject); 1.5907 + if (!obj) 1.5908 + return false; 1.5909 + } 1.5910 + 1.5911 + for (ParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) { 1.5912 + if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) 1.5913 + return false; 1.5914 + 1.5915 + /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */ 1.5916 + ParseNode *pn3 = pn2->pn_left; 1.5917 + bool isIndex = false; 1.5918 + if (pn3->isKind(PNK_NUMBER)) { 1.5919 + if (!EmitNumberOp(cx, pn3->pn_dval, bce)) 1.5920 + return false; 1.5921 + isIndex = true; 1.5922 + } else { 1.5923 + // The parser already checked for atoms representing indexes and 1.5924 + // used PNK_NUMBER instead, but also watch for ids which TI treats 1.5925 + // as indexes for simpliciation of downstream analysis. 1.5926 + JS_ASSERT(pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING)); 1.5927 + jsid id = NameToId(pn3->pn_atom->asPropertyName()); 1.5928 + if (id != types::IdToTypeId(id)) { 1.5929 + if (!EmitTree(cx, bce, pn3)) 1.5930 + return false; 1.5931 + isIndex = true; 1.5932 + } 1.5933 + } 1.5934 + 1.5935 + /* Emit code for the property initializer. */ 1.5936 + if (!EmitTree(cx, bce, pn2->pn_right)) 1.5937 + return false; 1.5938 + 1.5939 + JSOp op = pn2->getOp(); 1.5940 + JS_ASSERT(op == JSOP_INITPROP || 1.5941 + op == JSOP_INITPROP_GETTER || 1.5942 + op == JSOP_INITPROP_SETTER); 1.5943 + 1.5944 + if (op == JSOP_INITPROP_GETTER || op == JSOP_INITPROP_SETTER) 1.5945 + obj = nullptr; 1.5946 + 1.5947 + if (isIndex) { 1.5948 + obj = nullptr; 1.5949 + switch (op) { 1.5950 + case JSOP_INITPROP: op = JSOP_INITELEM; break; 1.5951 + case JSOP_INITPROP_GETTER: op = JSOP_INITELEM_GETTER; break; 1.5952 + case JSOP_INITPROP_SETTER: op = JSOP_INITELEM_SETTER; break; 1.5953 + default: MOZ_ASSUME_UNREACHABLE("Invalid op"); 1.5954 + } 1.5955 + if (Emit1(cx, bce, op) < 0) 1.5956 + return false; 1.5957 + } else { 1.5958 + JS_ASSERT(pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING)); 1.5959 + 1.5960 + // If we have { __proto__: expr }, implement prototype mutation. 1.5961 + if (op == JSOP_INITPROP && pn3->pn_atom == cx->names().proto) { 1.5962 + obj = nullptr; 1.5963 + if (Emit1(cx, bce, JSOP_MUTATEPROTO) < 0) 1.5964 + return false; 1.5965 + continue; 1.5966 + } 1.5967 + 1.5968 + jsatomid index; 1.5969 + if (!bce->makeAtomIndex(pn3->pn_atom, &index)) 1.5970 + return false; 1.5971 + 1.5972 + MOZ_ASSERT(op == JSOP_INITPROP || 1.5973 + op == JSOP_INITPROP_GETTER || 1.5974 + op == JSOP_INITPROP_SETTER); 1.5975 + 1.5976 + if (obj) { 1.5977 + JS_ASSERT(!obj->inDictionaryMode()); 1.5978 + Rooted<jsid> id(cx, AtomToId(pn3->pn_atom)); 1.5979 + RootedValue undefinedValue(cx, UndefinedValue()); 1.5980 + if (!DefineNativeProperty(cx, obj, id, undefinedValue, nullptr, 1.5981 + nullptr, JSPROP_ENUMERATE)) 1.5982 + { 1.5983 + return false; 1.5984 + } 1.5985 + if (obj->inDictionaryMode()) 1.5986 + obj = nullptr; 1.5987 + } 1.5988 + 1.5989 + if (!EmitIndex32(cx, op, index, bce)) 1.5990 + return false; 1.5991 + } 1.5992 + } 1.5993 + 1.5994 + if (Emit1(cx, bce, JSOP_ENDINIT) < 0) 1.5995 + return false; 1.5996 + 1.5997 + if (obj) { 1.5998 + /* 1.5999 + * The object survived and has a predictable shape: update the original 1.6000 + * bytecode. 1.6001 + */ 1.6002 + ObjectBox *objbox = bce->parser->newObjectBox(obj); 1.6003 + if (!objbox) 1.6004 + return false; 1.6005 + 1.6006 + static_assert(JSOP_NEWINIT_LENGTH == JSOP_NEWOBJECT_LENGTH, 1.6007 + "newinit and newobject must have equal length to edit in-place"); 1.6008 + 1.6009 + uint32_t index = bce->objectList.add(objbox); 1.6010 + jsbytecode *code = bce->code(offset); 1.6011 + code[0] = JSOP_NEWOBJECT; 1.6012 + code[1] = jsbytecode(index >> 24); 1.6013 + code[2] = jsbytecode(index >> 16); 1.6014 + code[3] = jsbytecode(index >> 8); 1.6015 + code[4] = jsbytecode(index); 1.6016 + } 1.6017 + 1.6018 + return true; 1.6019 +} 1.6020 + 1.6021 +static bool 1.6022 +EmitArrayComp(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.6023 +{ 1.6024 + if (!EmitNewInit(cx, bce, JSProto_Array)) 1.6025 + return false; 1.6026 + 1.6027 + /* 1.6028 + * Pass the new array's stack index to the PNK_ARRAYPUSH case via 1.6029 + * bce->arrayCompDepth, then simply traverse the PNK_FOR node and 1.6030 + * its kids under pn2 to generate this comprehension. 1.6031 + */ 1.6032 + JS_ASSERT(bce->stackDepth > 0); 1.6033 + uint32_t saveDepth = bce->arrayCompDepth; 1.6034 + bce->arrayCompDepth = (uint32_t) (bce->stackDepth - 1); 1.6035 + if (!EmitTree(cx, bce, pn->pn_head)) 1.6036 + return false; 1.6037 + bce->arrayCompDepth = saveDepth; 1.6038 + 1.6039 + /* Emit the usual op needed for decompilation. */ 1.6040 + return Emit1(cx, bce, JSOP_ENDINIT) >= 0; 1.6041 +} 1.6042 + 1.6043 +static bool 1.6044 +EmitArray(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, uint32_t count) 1.6045 +{ 1.6046 + /* 1.6047 + * Emit code for [a, b, c] that is equivalent to constructing a new 1.6048 + * array and in source order evaluating each element value and adding 1.6049 + * it to the array, without invoking latent setters. We use the 1.6050 + * JSOP_NEWINIT and JSOP_INITELEM_ARRAY bytecodes to ignore setters and 1.6051 + * to avoid dup'ing and popping the array as each element is added, as 1.6052 + * JSOP_SETELEM/JSOP_SETPROP would do. 1.6053 + */ 1.6054 + 1.6055 + int32_t nspread = 0; 1.6056 + for (ParseNode *elt = pn; elt; elt = elt->pn_next) { 1.6057 + if (elt->isKind(PNK_SPREAD)) 1.6058 + nspread++; 1.6059 + } 1.6060 + 1.6061 + ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3); 1.6062 + if (off < 0) 1.6063 + return false; 1.6064 + CheckTypeSet(cx, bce, JSOP_NEWARRAY); 1.6065 + jsbytecode *pc = bce->code(off); 1.6066 + 1.6067 + // For arrays with spread, this is a very pessimistic allocation, the 1.6068 + // minimum possible final size. 1.6069 + SET_UINT24(pc, count - nspread); 1.6070 + 1.6071 + ParseNode *pn2 = pn; 1.6072 + jsatomid atomIndex; 1.6073 + if (nspread && !EmitNumberOp(cx, 0, bce)) 1.6074 + return false; 1.6075 + for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) { 1.6076 + if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) 1.6077 + return false; 1.6078 + if (pn2->isKind(PNK_ELISION)) { 1.6079 + if (Emit1(cx, bce, JSOP_HOLE) < 0) 1.6080 + return false; 1.6081 + } else { 1.6082 + ParseNode *expr = pn2->isKind(PNK_SPREAD) ? pn2->pn_kid : pn2; 1.6083 + if (!EmitTree(cx, bce, expr)) 1.6084 + return false; 1.6085 + } 1.6086 + if (pn2->isKind(PNK_SPREAD)) { 1.6087 + if (Emit1(cx, bce, JSOP_SPREAD) < 0) 1.6088 + return false; 1.6089 + } else if (nspread) { 1.6090 + if (Emit1(cx, bce, JSOP_INITELEM_INC) < 0) 1.6091 + return false; 1.6092 + } else { 1.6093 + off = EmitN(cx, bce, JSOP_INITELEM_ARRAY, 3); 1.6094 + if (off < 0) 1.6095 + return false; 1.6096 + SET_UINT24(bce->code(off), atomIndex); 1.6097 + } 1.6098 + } 1.6099 + JS_ASSERT(atomIndex == count); 1.6100 + if (nspread) { 1.6101 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.6102 + return false; 1.6103 + } 1.6104 + 1.6105 + /* Emit an op to finish the array and aid in decompilation. */ 1.6106 + return Emit1(cx, bce, JSOP_ENDINIT) >= 0; 1.6107 +} 1.6108 + 1.6109 +static bool 1.6110 +EmitUnary(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.6111 +{ 1.6112 + if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) 1.6113 + return false; 1.6114 + /* Unary op, including unary +/-. */ 1.6115 + JSOp op = pn->getOp(); 1.6116 + ParseNode *pn2 = pn->pn_kid; 1.6117 + 1.6118 + if (op == JSOP_TYPEOF && !pn2->isKind(PNK_NAME)) 1.6119 + op = JSOP_TYPEOFEXPR; 1.6120 + 1.6121 + bool oldEmittingForInit = bce->emittingForInit; 1.6122 + bce->emittingForInit = false; 1.6123 + if (!EmitTree(cx, bce, pn2)) 1.6124 + return false; 1.6125 + 1.6126 + bce->emittingForInit = oldEmittingForInit; 1.6127 + return Emit1(cx, bce, op) >= 0; 1.6128 +} 1.6129 + 1.6130 +static bool 1.6131 +EmitDefaults(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.6132 +{ 1.6133 + JS_ASSERT(pn->isKind(PNK_ARGSBODY)); 1.6134 + 1.6135 + ParseNode *arg, *pnlast = pn->last(); 1.6136 + for (arg = pn->pn_head; arg != pnlast; arg = arg->pn_next) { 1.6137 + if (!(arg->pn_dflags & PND_DEFAULT) || !arg->isKind(PNK_NAME)) 1.6138 + continue; 1.6139 + if (!BindNameToSlot(cx, bce, arg)) 1.6140 + return false; 1.6141 + if (!EmitVarOp(cx, arg, JSOP_GETARG, bce)) 1.6142 + return false; 1.6143 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) 1.6144 + return false; 1.6145 + if (Emit1(cx, bce, JSOP_STRICTEQ) < 0) 1.6146 + return false; 1.6147 + // Emit source note to enable ion compilation. 1.6148 + if (NewSrcNote(cx, bce, SRC_IF) < 0) 1.6149 + return false; 1.6150 + ptrdiff_t jump = EmitJump(cx, bce, JSOP_IFEQ, 0); 1.6151 + if (jump < 0) 1.6152 + return false; 1.6153 + if (!EmitTree(cx, bce, arg->expr())) 1.6154 + return false; 1.6155 + if (!EmitVarOp(cx, arg, JSOP_SETARG, bce)) 1.6156 + return false; 1.6157 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.6158 + return false; 1.6159 + SET_JUMP_OFFSET(bce->code(jump), bce->offset() - jump); 1.6160 + } 1.6161 + 1.6162 + return true; 1.6163 +} 1.6164 + 1.6165 +bool 1.6166 +frontend::EmitTree(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) 1.6167 +{ 1.6168 + JS_CHECK_RECURSION(cx, return false); 1.6169 + 1.6170 + EmitLevelManager elm(bce); 1.6171 + 1.6172 + bool ok = true; 1.6173 + ptrdiff_t top = bce->offset(); 1.6174 + pn->pn_offset = top; 1.6175 + 1.6176 + /* Emit notes to tell the current bytecode's source line number. */ 1.6177 + if (!UpdateLineNumberNotes(cx, bce, pn->pn_pos.begin)) 1.6178 + return false; 1.6179 + 1.6180 + switch (pn->getKind()) { 1.6181 + case PNK_FUNCTION: 1.6182 + ok = EmitFunc(cx, bce, pn); 1.6183 + break; 1.6184 + 1.6185 + case PNK_ARGSBODY: 1.6186 + { 1.6187 + RootedFunction fun(cx, bce->sc->asFunctionBox()->function()); 1.6188 + ParseNode *pnlast = pn->last(); 1.6189 + 1.6190 + // Carefully emit everything in the right order: 1.6191 + // 1. Destructuring 1.6192 + // 2. Functions 1.6193 + // 3. Defaults 1.6194 + ParseNode *pnchild = pnlast->pn_head; 1.6195 + if (pnlast->pn_xflags & PNX_DESTRUCT) { 1.6196 + // Assign the destructuring arguments before defining any functions, 1.6197 + // see bug 419662. 1.6198 + JS_ASSERT(pnchild->isKind(PNK_SEMI)); 1.6199 + JS_ASSERT(pnchild->pn_kid->isKind(PNK_VAR) || pnchild->pn_kid->isKind(PNK_CONST)); 1.6200 + if (!EmitTree(cx, bce, pnchild)) 1.6201 + return false; 1.6202 + pnchild = pnchild->pn_next; 1.6203 + } 1.6204 + if (pnlast->pn_xflags & PNX_FUNCDEFS) { 1.6205 + // This block contains top-level function definitions. To ensure 1.6206 + // that we emit the bytecode defining them before the rest of code 1.6207 + // in the block we use a separate pass over functions. During the 1.6208 + // main pass later the emitter will add JSOP_NOP with source notes 1.6209 + // for the function to preserve the original functions position 1.6210 + // when decompiling. 1.6211 + // 1.6212 + // Currently this is used only for functions, as compile-as-we go 1.6213 + // mode for scripts does not allow separate emitter passes. 1.6214 + for (ParseNode *pn2 = pnchild; pn2; pn2 = pn2->pn_next) { 1.6215 + if (pn2->isKind(PNK_FUNCTION) && pn2->functionIsHoisted()) { 1.6216 + if (!EmitTree(cx, bce, pn2)) 1.6217 + return false; 1.6218 + } 1.6219 + } 1.6220 + } 1.6221 + bool hasDefaults = bce->sc->asFunctionBox()->hasDefaults(); 1.6222 + if (hasDefaults) { 1.6223 + ParseNode *rest = nullptr; 1.6224 + bool restIsDefn = false; 1.6225 + if (fun->hasRest()) { 1.6226 + JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding()); 1.6227 + 1.6228 + // Defaults with a rest parameter need special handling. The 1.6229 + // rest parameter needs to be undefined while defaults are being 1.6230 + // processed. To do this, we create the rest argument and let it 1.6231 + // sit on the stack while processing defaults. The rest 1.6232 + // parameter's slot is set to undefined for the course of 1.6233 + // default processing. 1.6234 + rest = pn->pn_head; 1.6235 + while (rest->pn_next != pnlast) 1.6236 + rest = rest->pn_next; 1.6237 + restIsDefn = rest->isDefn(); 1.6238 + if (Emit1(cx, bce, JSOP_REST) < 0) 1.6239 + return false; 1.6240 + CheckTypeSet(cx, bce, JSOP_REST); 1.6241 + 1.6242 + // Only set the rest parameter if it's not aliased by a nested 1.6243 + // function in the body. 1.6244 + if (restIsDefn) { 1.6245 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) 1.6246 + return false; 1.6247 + if (!BindNameToSlot(cx, bce, rest)) 1.6248 + return false; 1.6249 + if (!EmitVarOp(cx, rest, JSOP_SETARG, bce)) 1.6250 + return false; 1.6251 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.6252 + return false; 1.6253 + } 1.6254 + } 1.6255 + if (!EmitDefaults(cx, bce, pn)) 1.6256 + return false; 1.6257 + if (fun->hasRest()) { 1.6258 + if (restIsDefn && !EmitVarOp(cx, rest, JSOP_SETARG, bce)) 1.6259 + return false; 1.6260 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.6261 + return false; 1.6262 + } 1.6263 + } 1.6264 + for (ParseNode *pn2 = pn->pn_head; pn2 != pnlast; pn2 = pn2->pn_next) { 1.6265 + // Only bind the parameter if it's not aliased by a nested function 1.6266 + // in the body. 1.6267 + if (!pn2->isDefn()) 1.6268 + continue; 1.6269 + if (!BindNameToSlot(cx, bce, pn2)) 1.6270 + return false; 1.6271 + if (pn2->pn_next == pnlast && fun->hasRest() && !hasDefaults) { 1.6272 + // Fill rest parameter. We handled the case with defaults above. 1.6273 + JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding()); 1.6274 + bce->switchToProlog(); 1.6275 + if (Emit1(cx, bce, JSOP_REST) < 0) 1.6276 + return false; 1.6277 + CheckTypeSet(cx, bce, JSOP_REST); 1.6278 + if (!EmitVarOp(cx, pn2, JSOP_SETARG, bce)) 1.6279 + return false; 1.6280 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.6281 + return false; 1.6282 + bce->switchToMain(); 1.6283 + } 1.6284 + } 1.6285 + ok = EmitTree(cx, bce, pnlast); 1.6286 + break; 1.6287 + } 1.6288 + 1.6289 + case PNK_IF: 1.6290 + ok = EmitIf(cx, bce, pn); 1.6291 + break; 1.6292 + 1.6293 + case PNK_SWITCH: 1.6294 + ok = EmitSwitch(cx, bce, pn); 1.6295 + break; 1.6296 + 1.6297 + case PNK_WHILE: 1.6298 + ok = EmitWhile(cx, bce, pn, top); 1.6299 + break; 1.6300 + 1.6301 + case PNK_DOWHILE: 1.6302 + ok = EmitDo(cx, bce, pn); 1.6303 + break; 1.6304 + 1.6305 + case PNK_FOR: 1.6306 + ok = EmitFor(cx, bce, pn, top); 1.6307 + break; 1.6308 + 1.6309 + case PNK_BREAK: 1.6310 + ok = EmitBreak(cx, bce, pn->as<BreakStatement>().label()); 1.6311 + break; 1.6312 + 1.6313 + case PNK_CONTINUE: 1.6314 + ok = EmitContinue(cx, bce, pn->as<ContinueStatement>().label()); 1.6315 + break; 1.6316 + 1.6317 + case PNK_WITH: 1.6318 + ok = EmitWith(cx, bce, pn); 1.6319 + break; 1.6320 + 1.6321 + case PNK_TRY: 1.6322 + if (!EmitTry(cx, bce, pn)) 1.6323 + return false; 1.6324 + break; 1.6325 + 1.6326 + case PNK_CATCH: 1.6327 + if (!EmitCatch(cx, bce, pn)) 1.6328 + return false; 1.6329 + break; 1.6330 + 1.6331 + case PNK_VAR: 1.6332 + case PNK_CONST: 1.6333 + if (!EmitVariables(cx, bce, pn, InitializeVars)) 1.6334 + return false; 1.6335 + break; 1.6336 + 1.6337 + case PNK_RETURN: 1.6338 + ok = EmitReturn(cx, bce, pn); 1.6339 + break; 1.6340 + 1.6341 + case PNK_YIELD_STAR: 1.6342 + ok = EmitYieldStar(cx, bce, pn->pn_kid); 1.6343 + break; 1.6344 + 1.6345 + case PNK_YIELD: 1.6346 + JS_ASSERT(bce->sc->isFunctionBox()); 1.6347 + if (bce->sc->asFunctionBox()->isStarGenerator()) { 1.6348 + if (!EmitPrepareIteratorResult(cx, bce)) 1.6349 + return false; 1.6350 + } 1.6351 + if (pn->pn_kid) { 1.6352 + if (!EmitTree(cx, bce, pn->pn_kid)) 1.6353 + return false; 1.6354 + } else { 1.6355 + if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) 1.6356 + return false; 1.6357 + } 1.6358 + if (bce->sc->asFunctionBox()->isStarGenerator()) { 1.6359 + if (!EmitFinishIteratorResult(cx, bce, false)) 1.6360 + return false; 1.6361 + } 1.6362 + if (Emit1(cx, bce, JSOP_YIELD) < 0) 1.6363 + return false; 1.6364 + break; 1.6365 + 1.6366 + case PNK_STATEMENTLIST: 1.6367 + ok = EmitStatementList(cx, bce, pn, top); 1.6368 + break; 1.6369 + 1.6370 + case PNK_SEQ: 1.6371 + ok = EmitSyntheticStatements(cx, bce, pn, top); 1.6372 + break; 1.6373 + 1.6374 + case PNK_SEMI: 1.6375 + ok = EmitStatement(cx, bce, pn); 1.6376 + break; 1.6377 + 1.6378 + case PNK_LABEL: 1.6379 + ok = EmitLabeledStatement(cx, bce, &pn->as<LabeledStatement>()); 1.6380 + break; 1.6381 + 1.6382 + case PNK_COMMA: 1.6383 + { 1.6384 + for (ParseNode *pn2 = pn->pn_head; ; pn2 = pn2->pn_next) { 1.6385 + if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) 1.6386 + return false; 1.6387 + if (!EmitTree(cx, bce, pn2)) 1.6388 + return false; 1.6389 + if (!pn2->pn_next) 1.6390 + break; 1.6391 + if (Emit1(cx, bce, JSOP_POP) < 0) 1.6392 + return false; 1.6393 + } 1.6394 + break; 1.6395 + } 1.6396 + 1.6397 + case PNK_ASSIGN: 1.6398 + case PNK_ADDASSIGN: 1.6399 + case PNK_SUBASSIGN: 1.6400 + case PNK_BITORASSIGN: 1.6401 + case PNK_BITXORASSIGN: 1.6402 + case PNK_BITANDASSIGN: 1.6403 + case PNK_LSHASSIGN: 1.6404 + case PNK_RSHASSIGN: 1.6405 + case PNK_URSHASSIGN: 1.6406 + case PNK_MULASSIGN: 1.6407 + case PNK_DIVASSIGN: 1.6408 + case PNK_MODASSIGN: 1.6409 + if (!EmitAssignment(cx, bce, pn->pn_left, pn->getOp(), pn->pn_right)) 1.6410 + return false; 1.6411 + break; 1.6412 + 1.6413 + case PNK_CONDITIONAL: 1.6414 + ok = EmitConditionalExpression(cx, bce, pn->as<ConditionalExpression>()); 1.6415 + break; 1.6416 + 1.6417 + case PNK_OR: 1.6418 + case PNK_AND: 1.6419 + ok = EmitLogical(cx, bce, pn); 1.6420 + break; 1.6421 + 1.6422 + case PNK_ADD: 1.6423 + case PNK_SUB: 1.6424 + case PNK_BITOR: 1.6425 + case PNK_BITXOR: 1.6426 + case PNK_BITAND: 1.6427 + case PNK_STRICTEQ: 1.6428 + case PNK_EQ: 1.6429 + case PNK_STRICTNE: 1.6430 + case PNK_NE: 1.6431 + case PNK_LT: 1.6432 + case PNK_LE: 1.6433 + case PNK_GT: 1.6434 + case PNK_GE: 1.6435 + case PNK_IN: 1.6436 + case PNK_INSTANCEOF: 1.6437 + case PNK_LSH: 1.6438 + case PNK_RSH: 1.6439 + case PNK_URSH: 1.6440 + case PNK_STAR: 1.6441 + case PNK_DIV: 1.6442 + case PNK_MOD: 1.6443 + if (pn->isArity(PN_LIST)) { 1.6444 + /* Left-associative operator chain: avoid too much recursion. */ 1.6445 + ParseNode *pn2 = pn->pn_head; 1.6446 + if (!EmitTree(cx, bce, pn2)) 1.6447 + return false; 1.6448 + JSOp op = pn->getOp(); 1.6449 + while ((pn2 = pn2->pn_next) != nullptr) { 1.6450 + if (!EmitTree(cx, bce, pn2)) 1.6451 + return false; 1.6452 + if (Emit1(cx, bce, op) < 0) 1.6453 + return false; 1.6454 + } 1.6455 + } else { 1.6456 + /* Binary operators that evaluate both operands unconditionally. */ 1.6457 + if (!EmitTree(cx, bce, pn->pn_left)) 1.6458 + return false; 1.6459 + if (!EmitTree(cx, bce, pn->pn_right)) 1.6460 + return false; 1.6461 + if (Emit1(cx, bce, pn->getOp()) < 0) 1.6462 + return false; 1.6463 + } 1.6464 + break; 1.6465 + 1.6466 + case PNK_THROW: 1.6467 + case PNK_TYPEOF: 1.6468 + case PNK_VOID: 1.6469 + case PNK_NOT: 1.6470 + case PNK_BITNOT: 1.6471 + case PNK_POS: 1.6472 + case PNK_NEG: 1.6473 + ok = EmitUnary(cx, bce, pn); 1.6474 + break; 1.6475 + 1.6476 + case PNK_PREINCREMENT: 1.6477 + case PNK_PREDECREMENT: 1.6478 + case PNK_POSTINCREMENT: 1.6479 + case PNK_POSTDECREMENT: 1.6480 + ok = EmitIncOrDec(cx, bce, pn); 1.6481 + break; 1.6482 + 1.6483 + case PNK_DELETE: 1.6484 + ok = EmitDelete(cx, bce, pn); 1.6485 + break; 1.6486 + 1.6487 + case PNK_DOT: 1.6488 + ok = EmitPropOp(cx, pn, JSOP_GETPROP, bce); 1.6489 + break; 1.6490 + 1.6491 + case PNK_ELEM: 1.6492 + ok = EmitElemOp(cx, pn, JSOP_GETELEM, bce); 1.6493 + break; 1.6494 + 1.6495 + case PNK_NEW: 1.6496 + case PNK_CALL: 1.6497 + case PNK_GENEXP: 1.6498 + ok = EmitCallOrNew(cx, bce, pn); 1.6499 + break; 1.6500 + 1.6501 + case PNK_LEXICALSCOPE: 1.6502 + ok = EmitLexicalScope(cx, bce, pn); 1.6503 + break; 1.6504 + 1.6505 + case PNK_LET: 1.6506 + ok = pn->isArity(PN_BINARY) 1.6507 + ? EmitLet(cx, bce, pn) 1.6508 + : EmitVariables(cx, bce, pn, InitializeVars); 1.6509 + break; 1.6510 + 1.6511 + case PNK_IMPORT: 1.6512 + case PNK_EXPORT: 1.6513 + // TODO: Implement emitter support for modules 1.6514 + bce->reportError(nullptr, JSMSG_MODULES_NOT_IMPLEMENTED); 1.6515 + return false; 1.6516 + 1.6517 + case PNK_ARRAYPUSH: { 1.6518 + /* 1.6519 + * The array object's stack index is in bce->arrayCompDepth. See below 1.6520 + * under the array initialiser code generator for array comprehension 1.6521 + * special casing. Note that the array object is a pure stack value, 1.6522 + * unaliased by blocks, so we can EmitUnaliasedVarOp. 1.6523 + */ 1.6524 + if (!EmitTree(cx, bce, pn->pn_kid)) 1.6525 + return false; 1.6526 + if (!EmitDupAt(cx, bce, bce->arrayCompDepth)) 1.6527 + return false; 1.6528 + if (Emit1(cx, bce, JSOP_ARRAYPUSH) < 0) 1.6529 + return false; 1.6530 + break; 1.6531 + } 1.6532 + 1.6533 + case PNK_ARRAY: 1.6534 + if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext()) 1.6535 + ok = EmitSingletonInitialiser(cx, bce, pn); 1.6536 + else 1.6537 + ok = EmitArray(cx, bce, pn->pn_head, pn->pn_count); 1.6538 + break; 1.6539 + 1.6540 + case PNK_ARRAYCOMP: 1.6541 + ok = EmitArrayComp(cx, bce, pn); 1.6542 + break; 1.6543 + 1.6544 + case PNK_OBJECT: 1.6545 + ok = EmitObject(cx, bce, pn); 1.6546 + break; 1.6547 + 1.6548 + case PNK_NAME: 1.6549 + if (!EmitNameOp(cx, bce, pn, false)) 1.6550 + return false; 1.6551 + break; 1.6552 + 1.6553 + case PNK_STRING: 1.6554 + ok = EmitAtomOp(cx, pn, pn->getOp(), bce); 1.6555 + break; 1.6556 + 1.6557 + case PNK_NUMBER: 1.6558 + ok = EmitNumberOp(cx, pn->pn_dval, bce); 1.6559 + break; 1.6560 + 1.6561 + case PNK_REGEXP: 1.6562 + ok = EmitRegExp(cx, bce->regexpList.add(pn->as<RegExpLiteral>().objbox()), bce); 1.6563 + break; 1.6564 + 1.6565 + case PNK_TRUE: 1.6566 + case PNK_FALSE: 1.6567 + case PNK_THIS: 1.6568 + case PNK_NULL: 1.6569 + if (Emit1(cx, bce, pn->getOp()) < 0) 1.6570 + return false; 1.6571 + break; 1.6572 + 1.6573 + case PNK_DEBUGGER: 1.6574 + if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) 1.6575 + return false; 1.6576 + if (Emit1(cx, bce, JSOP_DEBUGGER) < 0) 1.6577 + return false; 1.6578 + break; 1.6579 + 1.6580 + case PNK_NOP: 1.6581 + JS_ASSERT(pn->getArity() == PN_NULLARY); 1.6582 + break; 1.6583 + 1.6584 + default: 1.6585 + JS_ASSERT(0); 1.6586 + } 1.6587 + 1.6588 + /* bce->emitLevel == 1 means we're last on the stack, so finish up. */ 1.6589 + if (ok && bce->emitLevel == 1) { 1.6590 + if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.end)) 1.6591 + return false; 1.6592 + } 1.6593 + 1.6594 + return ok; 1.6595 +} 1.6596 + 1.6597 +static int 1.6598 +AllocSrcNote(ExclusiveContext *cx, SrcNotesVector ¬es) 1.6599 +{ 1.6600 + // Start it off moderately large to avoid repeated resizings early on. 1.6601 + if (notes.capacity() == 0 && !notes.reserve(1024)) 1.6602 + return -1; 1.6603 + 1.6604 + jssrcnote dummy = 0; 1.6605 + if (!notes.append(dummy)) { 1.6606 + js_ReportOutOfMemory(cx); 1.6607 + return -1; 1.6608 + } 1.6609 + return notes.length() - 1; 1.6610 +} 1.6611 + 1.6612 +int 1.6613 +frontend::NewSrcNote(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type) 1.6614 +{ 1.6615 + SrcNotesVector ¬es = bce->notes(); 1.6616 + int index; 1.6617 + 1.6618 + index = AllocSrcNote(cx, notes); 1.6619 + if (index < 0) 1.6620 + return -1; 1.6621 + 1.6622 + /* 1.6623 + * Compute delta from the last annotated bytecode's offset. If it's too 1.6624 + * big to fit in sn, allocate one or more xdelta notes and reset sn. 1.6625 + */ 1.6626 + ptrdiff_t offset = bce->offset(); 1.6627 + ptrdiff_t delta = offset - bce->lastNoteOffset(); 1.6628 + bce->current->lastNoteOffset = offset; 1.6629 + if (delta >= SN_DELTA_LIMIT) { 1.6630 + do { 1.6631 + ptrdiff_t xdelta = Min(delta, SN_XDELTA_MASK); 1.6632 + SN_MAKE_XDELTA(¬es[index], xdelta); 1.6633 + delta -= xdelta; 1.6634 + index = AllocSrcNote(cx, notes); 1.6635 + if (index < 0) 1.6636 + return -1; 1.6637 + } while (delta >= SN_DELTA_LIMIT); 1.6638 + } 1.6639 + 1.6640 + /* 1.6641 + * Initialize type and delta, then allocate the minimum number of notes 1.6642 + * needed for type's arity. Usually, we won't need more, but if an offset 1.6643 + * does take two bytes, SetSrcNoteOffset will grow notes. 1.6644 + */ 1.6645 + SN_MAKE_NOTE(¬es[index], type, delta); 1.6646 + for (int n = (int)js_SrcNoteSpec[type].arity; n > 0; n--) { 1.6647 + if (NewSrcNote(cx, bce, SRC_NULL) < 0) 1.6648 + return -1; 1.6649 + } 1.6650 + return index; 1.6651 +} 1.6652 + 1.6653 +int 1.6654 +frontend::NewSrcNote2(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset) 1.6655 +{ 1.6656 + int index; 1.6657 + 1.6658 + index = NewSrcNote(cx, bce, type); 1.6659 + if (index >= 0) { 1.6660 + if (!SetSrcNoteOffset(cx, bce, index, 0, offset)) 1.6661 + return -1; 1.6662 + } 1.6663 + return index; 1.6664 +} 1.6665 + 1.6666 +int 1.6667 +frontend::NewSrcNote3(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset1, 1.6668 + ptrdiff_t offset2) 1.6669 +{ 1.6670 + int index; 1.6671 + 1.6672 + index = NewSrcNote(cx, bce, type); 1.6673 + if (index >= 0) { 1.6674 + if (!SetSrcNoteOffset(cx, bce, index, 0, offset1)) 1.6675 + return -1; 1.6676 + if (!SetSrcNoteOffset(cx, bce, index, 1, offset2)) 1.6677 + return -1; 1.6678 + } 1.6679 + return index; 1.6680 +} 1.6681 + 1.6682 +bool 1.6683 +frontend::AddToSrcNoteDelta(ExclusiveContext *cx, BytecodeEmitter *bce, jssrcnote *sn, ptrdiff_t delta) 1.6684 +{ 1.6685 + /* 1.6686 + * Called only from FinishTakingSrcNotes to add to main script note 1.6687 + * deltas, and only by a small positive amount. 1.6688 + */ 1.6689 + JS_ASSERT(bce->current == &bce->main); 1.6690 + JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT); 1.6691 + 1.6692 + ptrdiff_t base = SN_DELTA(sn); 1.6693 + ptrdiff_t limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT; 1.6694 + ptrdiff_t newdelta = base + delta; 1.6695 + if (newdelta < limit) { 1.6696 + SN_SET_DELTA(sn, newdelta); 1.6697 + } else { 1.6698 + jssrcnote xdelta; 1.6699 + SN_MAKE_XDELTA(&xdelta, delta); 1.6700 + if (!(sn = bce->main.notes.insert(sn, xdelta))) 1.6701 + return false; 1.6702 + } 1.6703 + return true; 1.6704 +} 1.6705 + 1.6706 +static bool 1.6707 +SetSrcNoteOffset(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned index, unsigned which, 1.6708 + ptrdiff_t offset) 1.6709 +{ 1.6710 + if (size_t(offset) > SN_MAX_OFFSET) { 1.6711 + ReportStatementTooLarge(bce->parser->tokenStream, bce->topStmt); 1.6712 + return false; 1.6713 + } 1.6714 + 1.6715 + SrcNotesVector ¬es = bce->notes(); 1.6716 + 1.6717 + /* Find the offset numbered which (i.e., skip exactly which offsets). */ 1.6718 + jssrcnote *sn = notes.begin() + index; 1.6719 + JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA); 1.6720 + JS_ASSERT((int) which < js_SrcNoteSpec[SN_TYPE(sn)].arity); 1.6721 + for (sn++; which; sn++, which--) { 1.6722 + if (*sn & SN_4BYTE_OFFSET_FLAG) 1.6723 + sn += 3; 1.6724 + } 1.6725 + 1.6726 + /* 1.6727 + * See if the new offset requires three bytes either by being too big or if 1.6728 + * the offset has already been inflated (in which case, we need to stay big 1.6729 + * to not break the srcnote encoding if this isn't the last srcnote). 1.6730 + */ 1.6731 + if (offset > (ptrdiff_t)SN_4BYTE_OFFSET_MASK || (*sn & SN_4BYTE_OFFSET_FLAG)) { 1.6732 + /* Maybe this offset was already set to a three-byte value. */ 1.6733 + if (!(*sn & SN_4BYTE_OFFSET_FLAG)) { 1.6734 + /* Insert two dummy bytes that will be overwritten shortly. */ 1.6735 + jssrcnote dummy = 0; 1.6736 + if (!(sn = notes.insert(sn, dummy)) || 1.6737 + !(sn = notes.insert(sn, dummy)) || 1.6738 + !(sn = notes.insert(sn, dummy))) 1.6739 + { 1.6740 + js_ReportOutOfMemory(cx); 1.6741 + return false; 1.6742 + } 1.6743 + } 1.6744 + *sn++ = (jssrcnote)(SN_4BYTE_OFFSET_FLAG | (offset >> 24)); 1.6745 + *sn++ = (jssrcnote)(offset >> 16); 1.6746 + *sn++ = (jssrcnote)(offset >> 8); 1.6747 + } 1.6748 + *sn = (jssrcnote)offset; 1.6749 + return true; 1.6750 +} 1.6751 + 1.6752 +/* 1.6753 + * Finish taking source notes in cx's notePool. 1.6754 + * If successful, the final source note count is stored in the out outparam. 1.6755 + */ 1.6756 +bool 1.6757 +frontend::FinishTakingSrcNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t *out) 1.6758 +{ 1.6759 + JS_ASSERT(bce->current == &bce->main); 1.6760 + 1.6761 + unsigned prologCount = bce->prolog.notes.length(); 1.6762 + if (prologCount && bce->prolog.currentLine != bce->firstLine) { 1.6763 + bce->switchToProlog(); 1.6764 + if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)bce->firstLine) < 0) 1.6765 + return false; 1.6766 + bce->switchToMain(); 1.6767 + } else { 1.6768 + /* 1.6769 + * Either no prolog srcnotes, or no line number change over prolog. 1.6770 + * We don't need a SRC_SETLINE, but we may need to adjust the offset 1.6771 + * of the first main note, by adding to its delta and possibly even 1.6772 + * prepending SRC_XDELTA notes to it to account for prolog bytecodes 1.6773 + * that came at and after the last annotated bytecode. 1.6774 + */ 1.6775 + ptrdiff_t offset = bce->prologOffset() - bce->prolog.lastNoteOffset; 1.6776 + JS_ASSERT(offset >= 0); 1.6777 + if (offset > 0 && bce->main.notes.length() != 0) { 1.6778 + /* NB: Use as much of the first main note's delta as we can. */ 1.6779 + jssrcnote *sn = bce->main.notes.begin(); 1.6780 + ptrdiff_t delta = SN_IS_XDELTA(sn) 1.6781 + ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK) 1.6782 + : SN_DELTA_MASK - (*sn & SN_DELTA_MASK); 1.6783 + if (offset < delta) 1.6784 + delta = offset; 1.6785 + for (;;) { 1.6786 + if (!AddToSrcNoteDelta(cx, bce, sn, delta)) 1.6787 + return false; 1.6788 + offset -= delta; 1.6789 + if (offset == 0) 1.6790 + break; 1.6791 + delta = Min(offset, SN_XDELTA_MASK); 1.6792 + sn = bce->main.notes.begin(); 1.6793 + } 1.6794 + } 1.6795 + } 1.6796 + 1.6797 + // The prolog count might have changed, so we can't reuse prologCount. 1.6798 + // The + 1 is to account for the final SN_MAKE_TERMINATOR that is appended 1.6799 + // when the notes are copied to their final destination by CopySrcNotes. 1.6800 + *out = bce->prolog.notes.length() + bce->main.notes.length() + 1; 1.6801 + return true; 1.6802 +} 1.6803 + 1.6804 +void 1.6805 +frontend::CopySrcNotes(BytecodeEmitter *bce, jssrcnote *destination, uint32_t nsrcnotes) 1.6806 +{ 1.6807 + unsigned prologCount = bce->prolog.notes.length(); 1.6808 + unsigned mainCount = bce->main.notes.length(); 1.6809 + unsigned totalCount = prologCount + mainCount; 1.6810 + MOZ_ASSERT(totalCount == nsrcnotes - 1); 1.6811 + if (prologCount) 1.6812 + PodCopy(destination, bce->prolog.notes.begin(), prologCount); 1.6813 + PodCopy(destination + prologCount, bce->main.notes.begin(), mainCount); 1.6814 + SN_MAKE_TERMINATOR(&destination[totalCount]); 1.6815 +} 1.6816 + 1.6817 +void 1.6818 +CGConstList::finish(ConstArray *array) 1.6819 +{ 1.6820 + JS_ASSERT(length() == array->length); 1.6821 + 1.6822 + for (unsigned i = 0; i < length(); i++) 1.6823 + array->vector[i] = list[i]; 1.6824 +} 1.6825 + 1.6826 +/* 1.6827 + * Find the index of the given object for code generator. 1.6828 + * 1.6829 + * Since the emitter refers to each parsed object only once, for the index we 1.6830 + * use the number of already indexes objects. We also add the object to a list 1.6831 + * to convert the list to a fixed-size array when we complete code generation, 1.6832 + * see js::CGObjectList::finish below. 1.6833 + * 1.6834 + * Most of the objects go to BytecodeEmitter::objectList but for regexp we use 1.6835 + * a separated BytecodeEmitter::regexpList. In this way the emitted index can 1.6836 + * be directly used to store and fetch a reference to a cloned RegExp object 1.6837 + * that shares the same JSRegExp private data created for the object literal in 1.6838 + * objbox. We need a cloned object to hold lastIndex and other direct 1.6839 + * properties that should not be shared among threads sharing a precompiled 1.6840 + * function or script. 1.6841 + * 1.6842 + * If the code being compiled is function code, allocate a reserved slot in 1.6843 + * the cloned function object that shares its precompiled script with other 1.6844 + * cloned function objects and with the compiler-created clone-parent. There 1.6845 + * are nregexps = script->regexps()->length such reserved slots in each 1.6846 + * function object cloned from fun->object. NB: during compilation, a funobj 1.6847 + * slots element must never be allocated, because JSObject::allocSlot could 1.6848 + * hand out one of the slots that should be given to a regexp clone. 1.6849 + * 1.6850 + * If the code being compiled is global code, the cloned regexp are stored in 1.6851 + * fp->vars slot and to protect regexp slots from GC we set fp->nvars to 1.6852 + * nregexps. 1.6853 + * 1.6854 + * The slots initially contain undefined or null. We populate them lazily when 1.6855 + * JSOP_REGEXP is executed for the first time. 1.6856 + * 1.6857 + * Why clone regexp objects? ECMA specifies that when a regular expression 1.6858 + * literal is scanned, a RegExp object is created. In the spec, compilation 1.6859 + * and execution happen indivisibly, but in this implementation and many of 1.6860 + * its embeddings, code is precompiled early and re-executed in multiple 1.6861 + * threads, or using multiple global objects, or both, for efficiency. 1.6862 + * 1.6863 + * In such cases, naively following ECMA leads to wrongful sharing of RegExp 1.6864 + * objects, which makes for collisions on the lastIndex property (especially 1.6865 + * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to 1.6866 + * the pre-compilation prototype, a pigeon-hole problem for instanceof tests. 1.6867 + */ 1.6868 +unsigned 1.6869 +CGObjectList::add(ObjectBox *objbox) 1.6870 +{ 1.6871 + JS_ASSERT(!objbox->emitLink); 1.6872 + objbox->emitLink = lastbox; 1.6873 + lastbox = objbox; 1.6874 + return length++; 1.6875 +} 1.6876 + 1.6877 +unsigned 1.6878 +CGObjectList::indexOf(JSObject *obj) 1.6879 +{ 1.6880 + JS_ASSERT(length > 0); 1.6881 + unsigned index = length - 1; 1.6882 + for (ObjectBox *box = lastbox; box->object != obj; box = box->emitLink) 1.6883 + index--; 1.6884 + return index; 1.6885 +} 1.6886 + 1.6887 +void 1.6888 +CGObjectList::finish(ObjectArray *array) 1.6889 +{ 1.6890 + JS_ASSERT(length <= INDEX_LIMIT); 1.6891 + JS_ASSERT(length == array->length); 1.6892 + 1.6893 + js::HeapPtrObject *cursor = array->vector + array->length; 1.6894 + ObjectBox *objbox = lastbox; 1.6895 + do { 1.6896 + --cursor; 1.6897 + JS_ASSERT(!*cursor); 1.6898 + *cursor = objbox->object; 1.6899 + } while ((objbox = objbox->emitLink) != nullptr); 1.6900 + JS_ASSERT(cursor == array->vector); 1.6901 +} 1.6902 + 1.6903 +ObjectBox* 1.6904 +CGObjectList::find(uint32_t index) 1.6905 +{ 1.6906 + JS_ASSERT(index < length); 1.6907 + ObjectBox *box = lastbox; 1.6908 + for (unsigned n = length - 1; n > index; n--) 1.6909 + box = box->emitLink; 1.6910 + return box; 1.6911 +} 1.6912 + 1.6913 +bool 1.6914 +CGTryNoteList::append(JSTryNoteKind kind, uint32_t stackDepth, size_t start, size_t end) 1.6915 +{ 1.6916 + JS_ASSERT(start <= end); 1.6917 + JS_ASSERT(size_t(uint32_t(start)) == start); 1.6918 + JS_ASSERT(size_t(uint32_t(end)) == end); 1.6919 + 1.6920 + JSTryNote note; 1.6921 + note.kind = kind; 1.6922 + note.stackDepth = stackDepth; 1.6923 + note.start = uint32_t(start); 1.6924 + note.length = uint32_t(end - start); 1.6925 + 1.6926 + return list.append(note); 1.6927 +} 1.6928 + 1.6929 +void 1.6930 +CGTryNoteList::finish(TryNoteArray *array) 1.6931 +{ 1.6932 + JS_ASSERT(length() == array->length); 1.6933 + 1.6934 + for (unsigned i = 0; i < length(); i++) 1.6935 + array->vector[i] = list[i]; 1.6936 +} 1.6937 + 1.6938 +bool 1.6939 +CGBlockScopeList::append(uint32_t scopeObject, uint32_t offset, uint32_t parent) 1.6940 +{ 1.6941 + BlockScopeNote note; 1.6942 + mozilla::PodZero(¬e); 1.6943 + 1.6944 + note.index = scopeObject; 1.6945 + note.start = offset; 1.6946 + note.parent = parent; 1.6947 + 1.6948 + return list.append(note); 1.6949 +} 1.6950 + 1.6951 +uint32_t 1.6952 +CGBlockScopeList::findEnclosingScope(uint32_t index) 1.6953 +{ 1.6954 + JS_ASSERT(index < length()); 1.6955 + JS_ASSERT(list[index].index != BlockScopeNote::NoBlockScopeIndex); 1.6956 + 1.6957 + DebugOnly<uint32_t> pos = list[index].start; 1.6958 + while (index--) { 1.6959 + JS_ASSERT(list[index].start <= pos); 1.6960 + if (list[index].length == 0) { 1.6961 + // We are looking for the nearest enclosing live scope. If the 1.6962 + // scope contains POS, it should still be open, so its length should 1.6963 + // be zero. 1.6964 + return list[index].index; 1.6965 + } else { 1.6966 + // Conversely, if the length is not zero, it should not contain 1.6967 + // POS. 1.6968 + JS_ASSERT(list[index].start + list[index].length <= pos); 1.6969 + } 1.6970 + } 1.6971 + 1.6972 + return BlockScopeNote::NoBlockScopeIndex; 1.6973 +} 1.6974 + 1.6975 +void 1.6976 +CGBlockScopeList::recordEnd(uint32_t index, uint32_t offset) 1.6977 +{ 1.6978 + JS_ASSERT(index < length()); 1.6979 + JS_ASSERT(offset >= list[index].start); 1.6980 + JS_ASSERT(list[index].length == 0); 1.6981 + 1.6982 + list[index].length = offset - list[index].start; 1.6983 +} 1.6984 + 1.6985 +void 1.6986 +CGBlockScopeList::finish(BlockScopeArray *array) 1.6987 +{ 1.6988 + JS_ASSERT(length() == array->length); 1.6989 + 1.6990 + for (unsigned i = 0; i < length(); i++) 1.6991 + array->vector[i] = list[i]; 1.6992 +} 1.6993 + 1.6994 +/* 1.6995 + * We should try to get rid of offsetBias (always 0 or 1, where 1 is 1.6996 + * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR. 1.6997 + */ 1.6998 +const JSSrcNoteSpec js_SrcNoteSpec[] = { 1.6999 +#define DEFINE_SRC_NOTE_SPEC(sym, name, arity) { name, arity }, 1.7000 + FOR_EACH_SRC_NOTE_TYPE(DEFINE_SRC_NOTE_SPEC) 1.7001 +#undef DEFINE_SRC_NOTE_SPEC 1.7002 +}; 1.7003 + 1.7004 +static int 1.7005 +SrcNoteArity(jssrcnote *sn) 1.7006 +{ 1.7007 + JS_ASSERT(SN_TYPE(sn) < SRC_LAST); 1.7008 + return js_SrcNoteSpec[SN_TYPE(sn)].arity; 1.7009 +} 1.7010 + 1.7011 +JS_FRIEND_API(unsigned) 1.7012 +js_SrcNoteLength(jssrcnote *sn) 1.7013 +{ 1.7014 + unsigned arity; 1.7015 + jssrcnote *base; 1.7016 + 1.7017 + arity = SrcNoteArity(sn); 1.7018 + for (base = sn++; arity; sn++, arity--) { 1.7019 + if (*sn & SN_4BYTE_OFFSET_FLAG) 1.7020 + sn += 3; 1.7021 + } 1.7022 + return sn - base; 1.7023 +} 1.7024 + 1.7025 +JS_FRIEND_API(ptrdiff_t) 1.7026 +js_GetSrcNoteOffset(jssrcnote *sn, unsigned which) 1.7027 +{ 1.7028 + /* Find the offset numbered which (i.e., skip exactly which offsets). */ 1.7029 + JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA); 1.7030 + JS_ASSERT((int) which < SrcNoteArity(sn)); 1.7031 + for (sn++; which; sn++, which--) { 1.7032 + if (*sn & SN_4BYTE_OFFSET_FLAG) 1.7033 + sn += 3; 1.7034 + } 1.7035 + if (*sn & SN_4BYTE_OFFSET_FLAG) { 1.7036 + return (ptrdiff_t)(((uint32_t)(sn[0] & SN_4BYTE_OFFSET_MASK) << 24) 1.7037 + | (sn[1] << 16) 1.7038 + | (sn[2] << 8) 1.7039 + | sn[3]); 1.7040 + } 1.7041 + return (ptrdiff_t)*sn; 1.7042 +}