|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
|
2 * vim: set ts=8 sts=4 et sw=4 tw=99: |
|
3 * This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 /* |
|
8 * JS bytecode generation. |
|
9 */ |
|
10 |
|
11 #include "frontend/BytecodeEmitter.h" |
|
12 |
|
13 #include "mozilla/DebugOnly.h" |
|
14 #include "mozilla/FloatingPoint.h" |
|
15 #include "mozilla/PodOperations.h" |
|
16 |
|
17 #include <string.h> |
|
18 |
|
19 #include "jsapi.h" |
|
20 #include "jsatom.h" |
|
21 #include "jscntxt.h" |
|
22 #include "jsfun.h" |
|
23 #include "jsnum.h" |
|
24 #include "jsopcode.h" |
|
25 #include "jsscript.h" |
|
26 #include "jstypes.h" |
|
27 #include "jsutil.h" |
|
28 |
|
29 #include "frontend/Parser.h" |
|
30 #include "frontend/TokenStream.h" |
|
31 #include "jit/AsmJSLink.h" |
|
32 #include "vm/Debugger.h" |
|
33 |
|
34 #include "jsatominlines.h" |
|
35 #include "jsobjinlines.h" |
|
36 #include "jsscriptinlines.h" |
|
37 |
|
38 #include "frontend/ParseMaps-inl.h" |
|
39 #include "frontend/ParseNode-inl.h" |
|
40 #include "vm/ScopeObject-inl.h" |
|
41 |
|
42 using namespace js; |
|
43 using namespace js::gc; |
|
44 using namespace js::frontend; |
|
45 |
|
46 using mozilla::DebugOnly; |
|
47 using mozilla::NumberIsInt32; |
|
48 using mozilla::PodCopy; |
|
49 |
|
50 static bool |
|
51 SetSrcNoteOffset(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned index, unsigned which, ptrdiff_t offset); |
|
52 |
|
53 struct frontend::StmtInfoBCE : public StmtInfoBase |
|
54 { |
|
55 StmtInfoBCE *down; /* info for enclosing statement */ |
|
56 StmtInfoBCE *downScope; /* next enclosing lexical scope */ |
|
57 |
|
58 ptrdiff_t update; /* loop update offset (top if none) */ |
|
59 ptrdiff_t breaks; /* offset of last break in loop */ |
|
60 ptrdiff_t continues; /* offset of last continue in loop */ |
|
61 uint32_t blockScopeIndex; /* index of scope in BlockScopeArray */ |
|
62 |
|
63 StmtInfoBCE(ExclusiveContext *cx) : StmtInfoBase(cx) {} |
|
64 |
|
65 /* |
|
66 * To reuse space, alias two of the ptrdiff_t fields for use during |
|
67 * try/catch/finally code generation and backpatching. |
|
68 * |
|
69 * Only a loop, switch, or label statement info record can have breaks and |
|
70 * continues, and only a for loop has an update backpatch chain, so it's |
|
71 * safe to overlay these for the "trying" StmtTypes. |
|
72 */ |
|
73 |
|
74 ptrdiff_t &gosubs() { |
|
75 JS_ASSERT(type == STMT_FINALLY); |
|
76 return breaks; |
|
77 } |
|
78 |
|
79 ptrdiff_t &guardJump() { |
|
80 JS_ASSERT(type == STMT_TRY || type == STMT_FINALLY); |
|
81 return continues; |
|
82 } |
|
83 }; |
|
84 |
|
85 |
|
86 namespace { |
|
87 |
|
88 struct LoopStmtInfo : public StmtInfoBCE |
|
89 { |
|
90 int32_t stackDepth; // Stack depth when this loop was pushed. |
|
91 uint32_t loopDepth; // Loop depth. |
|
92 |
|
93 // Can we OSR into Ion from here? True unless there is non-loop state on the stack. |
|
94 bool canIonOsr; |
|
95 |
|
96 LoopStmtInfo(ExclusiveContext *cx) : StmtInfoBCE(cx) {} |
|
97 |
|
98 static LoopStmtInfo* fromStmtInfo(StmtInfoBCE *stmt) { |
|
99 JS_ASSERT(stmt->isLoop()); |
|
100 return static_cast<LoopStmtInfo*>(stmt); |
|
101 } |
|
102 }; |
|
103 |
|
104 } // anonymous namespace |
|
105 |
|
106 BytecodeEmitter::BytecodeEmitter(BytecodeEmitter *parent, |
|
107 Parser<FullParseHandler> *parser, SharedContext *sc, |
|
108 HandleScript script, bool insideEval, HandleScript evalCaller, |
|
109 bool hasGlobalScope, uint32_t lineNum, EmitterMode emitterMode) |
|
110 : sc(sc), |
|
111 parent(parent), |
|
112 script(sc->context, script), |
|
113 prolog(sc->context, lineNum), |
|
114 main(sc->context, lineNum), |
|
115 current(&main), |
|
116 parser(parser), |
|
117 evalCaller(evalCaller), |
|
118 topStmt(nullptr), |
|
119 topScopeStmt(nullptr), |
|
120 staticScope(sc->context), |
|
121 atomIndices(sc->context), |
|
122 firstLine(lineNum), |
|
123 stackDepth(0), maxStackDepth(0), |
|
124 arrayCompDepth(0), |
|
125 emitLevel(0), |
|
126 constList(sc->context), |
|
127 tryNoteList(sc->context), |
|
128 blockScopeList(sc->context), |
|
129 typesetCount(0), |
|
130 hasSingletons(false), |
|
131 emittingForInit(false), |
|
132 emittingRunOnceLambda(false), |
|
133 lazyRunOnceLambda(false), |
|
134 insideEval(insideEval), |
|
135 hasGlobalScope(hasGlobalScope), |
|
136 emitterMode(emitterMode) |
|
137 { |
|
138 JS_ASSERT_IF(evalCaller, insideEval); |
|
139 } |
|
140 |
|
141 bool |
|
142 BytecodeEmitter::init() |
|
143 { |
|
144 return atomIndices.ensureMap(sc->context); |
|
145 } |
|
146 |
|
147 static ptrdiff_t |
|
148 EmitCheck(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t delta) |
|
149 { |
|
150 ptrdiff_t offset = bce->code().length(); |
|
151 |
|
152 // Start it off moderately large to avoid repeated resizings early on. |
|
153 if (bce->code().capacity() == 0 && !bce->code().reserve(1024)) |
|
154 return -1; |
|
155 |
|
156 jsbytecode dummy = 0; |
|
157 if (!bce->code().appendN(dummy, delta)) { |
|
158 js_ReportOutOfMemory(cx); |
|
159 return -1; |
|
160 } |
|
161 return offset; |
|
162 } |
|
163 |
|
164 static void |
|
165 UpdateDepth(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t target) |
|
166 { |
|
167 jsbytecode *pc = bce->code(target); |
|
168 JSOp op = (JSOp) *pc; |
|
169 const JSCodeSpec *cs = &js_CodeSpec[op]; |
|
170 |
|
171 if (cs->format & JOF_TMPSLOT_MASK) { |
|
172 /* |
|
173 * An opcode may temporarily consume stack space during execution. |
|
174 * Account for this in maxStackDepth separately from uses/defs here. |
|
175 */ |
|
176 uint32_t depth = (uint32_t) bce->stackDepth + |
|
177 ((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT); |
|
178 if (depth > bce->maxStackDepth) |
|
179 bce->maxStackDepth = depth; |
|
180 } |
|
181 |
|
182 int nuses = StackUses(nullptr, pc); |
|
183 int ndefs = StackDefs(nullptr, pc); |
|
184 |
|
185 bce->stackDepth -= nuses; |
|
186 JS_ASSERT(bce->stackDepth >= 0); |
|
187 bce->stackDepth += ndefs; |
|
188 if ((uint32_t)bce->stackDepth > bce->maxStackDepth) |
|
189 bce->maxStackDepth = bce->stackDepth; |
|
190 } |
|
191 |
|
192 ptrdiff_t |
|
193 frontend::Emit1(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) |
|
194 { |
|
195 ptrdiff_t offset = EmitCheck(cx, bce, 1); |
|
196 if (offset < 0) |
|
197 return -1; |
|
198 |
|
199 jsbytecode *code = bce->code(offset); |
|
200 code[0] = jsbytecode(op); |
|
201 UpdateDepth(cx, bce, offset); |
|
202 return offset; |
|
203 } |
|
204 |
|
205 ptrdiff_t |
|
206 frontend::Emit2(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1) |
|
207 { |
|
208 ptrdiff_t offset = EmitCheck(cx, bce, 2); |
|
209 if (offset < 0) |
|
210 return -1; |
|
211 |
|
212 jsbytecode *code = bce->code(offset); |
|
213 code[0] = jsbytecode(op); |
|
214 code[1] = op1; |
|
215 UpdateDepth(cx, bce, offset); |
|
216 return offset; |
|
217 } |
|
218 |
|
219 ptrdiff_t |
|
220 frontend::Emit3(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, jsbytecode op1, |
|
221 jsbytecode op2) |
|
222 { |
|
223 /* These should filter through EmitVarOp. */ |
|
224 JS_ASSERT(!IsArgOp(op)); |
|
225 JS_ASSERT(!IsLocalOp(op)); |
|
226 |
|
227 ptrdiff_t offset = EmitCheck(cx, bce, 3); |
|
228 if (offset < 0) |
|
229 return -1; |
|
230 |
|
231 jsbytecode *code = bce->code(offset); |
|
232 code[0] = jsbytecode(op); |
|
233 code[1] = op1; |
|
234 code[2] = op2; |
|
235 UpdateDepth(cx, bce, offset); |
|
236 return offset; |
|
237 } |
|
238 |
|
239 ptrdiff_t |
|
240 frontend::EmitN(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, size_t extra) |
|
241 { |
|
242 ptrdiff_t length = 1 + (ptrdiff_t)extra; |
|
243 ptrdiff_t offset = EmitCheck(cx, bce, length); |
|
244 if (offset < 0) |
|
245 return -1; |
|
246 |
|
247 jsbytecode *code = bce->code(offset); |
|
248 code[0] = jsbytecode(op); |
|
249 /* The remaining |extra| bytes are set by the caller */ |
|
250 |
|
251 /* |
|
252 * Don't UpdateDepth if op's use-count comes from the immediate |
|
253 * operand yet to be stored in the extra bytes after op. |
|
254 */ |
|
255 if (js_CodeSpec[op].nuses >= 0) |
|
256 UpdateDepth(cx, bce, offset); |
|
257 |
|
258 return offset; |
|
259 } |
|
260 |
|
261 static ptrdiff_t |
|
262 EmitJump(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, ptrdiff_t off) |
|
263 { |
|
264 ptrdiff_t offset = EmitCheck(cx, bce, 5); |
|
265 if (offset < 0) |
|
266 return -1; |
|
267 |
|
268 jsbytecode *code = bce->code(offset); |
|
269 code[0] = jsbytecode(op); |
|
270 SET_JUMP_OFFSET(code, off); |
|
271 UpdateDepth(cx, bce, offset); |
|
272 return offset; |
|
273 } |
|
274 |
|
275 static ptrdiff_t |
|
276 EmitCall(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op, uint16_t argc) |
|
277 { |
|
278 return Emit3(cx, bce, op, ARGC_HI(argc), ARGC_LO(argc)); |
|
279 } |
|
280 |
|
281 // Dup the var in operand stack slot "slot". The first item on the operand |
|
282 // stack is one slot past the last fixed slot. The last (most recent) item is |
|
283 // slot bce->stackDepth - 1. |
|
284 // |
|
285 // The instruction that is written (JSOP_DUPAT) switches the depth around so |
|
286 // that it is addressed from the sp instead of from the fp. This is useful when |
|
287 // you don't know the size of the fixed stack segment (nfixed), as is the case |
|
288 // when compiling scripts (because each statement is parsed and compiled |
|
289 // separately, but they all together form one script with one fixed stack |
|
290 // frame). |
|
291 static bool |
|
292 EmitDupAt(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned slot) |
|
293 { |
|
294 JS_ASSERT(slot < unsigned(bce->stackDepth)); |
|
295 // The slot's position on the operand stack, measured from the top. |
|
296 unsigned slotFromTop = bce->stackDepth - 1 - slot; |
|
297 if (slotFromTop >= JS_BIT(24)) { |
|
298 bce->reportError(nullptr, JSMSG_TOO_MANY_LOCALS); |
|
299 return false; |
|
300 } |
|
301 ptrdiff_t off = EmitN(cx, bce, JSOP_DUPAT, 3); |
|
302 if (off < 0) |
|
303 return false; |
|
304 jsbytecode *pc = bce->code(off); |
|
305 SET_UINT24(pc, slotFromTop); |
|
306 return true; |
|
307 } |
|
308 |
|
309 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */ |
|
310 const char js_with_statement_str[] = "with statement"; |
|
311 const char js_finally_block_str[] = "finally block"; |
|
312 const char js_script_str[] = "script"; |
|
313 |
|
314 static const char * const statementName[] = { |
|
315 "label statement", /* LABEL */ |
|
316 "if statement", /* IF */ |
|
317 "else statement", /* ELSE */ |
|
318 "destructuring body", /* BODY */ |
|
319 "switch statement", /* SWITCH */ |
|
320 "block", /* BLOCK */ |
|
321 js_with_statement_str, /* WITH */ |
|
322 "catch block", /* CATCH */ |
|
323 "try block", /* TRY */ |
|
324 js_finally_block_str, /* FINALLY */ |
|
325 js_finally_block_str, /* SUBROUTINE */ |
|
326 "do loop", /* DO_LOOP */ |
|
327 "for loop", /* FOR_LOOP */ |
|
328 "for/in loop", /* FOR_IN_LOOP */ |
|
329 "for/of loop", /* FOR_OF_LOOP */ |
|
330 "while loop", /* WHILE_LOOP */ |
|
331 }; |
|
332 |
|
333 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(statementName) == STMT_LIMIT); |
|
334 |
|
335 static const char * |
|
336 StatementName(StmtInfoBCE *topStmt) |
|
337 { |
|
338 if (!topStmt) |
|
339 return js_script_str; |
|
340 return statementName[topStmt->type]; |
|
341 } |
|
342 |
|
343 static void |
|
344 ReportStatementTooLarge(TokenStream &ts, StmtInfoBCE *topStmt) |
|
345 { |
|
346 ts.reportError(JSMSG_NEED_DIET, StatementName(topStmt)); |
|
347 } |
|
348 |
|
349 /* |
|
350 * Emit a backpatch op with offset pointing to the previous jump of this type, |
|
351 * so that we can walk back up the chain fixing up the op and jump offset. |
|
352 */ |
|
353 static ptrdiff_t |
|
354 EmitBackPatchOp(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t *lastp) |
|
355 { |
|
356 ptrdiff_t offset, delta; |
|
357 |
|
358 offset = bce->offset(); |
|
359 delta = offset - *lastp; |
|
360 *lastp = offset; |
|
361 JS_ASSERT(delta > 0); |
|
362 return EmitJump(cx, bce, JSOP_BACKPATCH, delta); |
|
363 } |
|
364 |
|
365 static inline unsigned |
|
366 LengthOfSetLine(unsigned line) |
|
367 { |
|
368 return 1 /* SN_SETLINE */ + (line > SN_4BYTE_OFFSET_MASK ? 4 : 1); |
|
369 } |
|
370 |
|
371 /* Updates line number notes, not column notes. */ |
|
372 static inline bool |
|
373 UpdateLineNumberNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t offset) |
|
374 { |
|
375 TokenStream *ts = &bce->parser->tokenStream; |
|
376 if (!ts->srcCoords.isOnThisLine(offset, bce->currentLine())) { |
|
377 unsigned line = ts->srcCoords.lineNum(offset); |
|
378 unsigned delta = line - bce->currentLine(); |
|
379 |
|
380 /* |
|
381 * Encode any change in the current source line number by using |
|
382 * either several SRC_NEWLINE notes or just one SRC_SETLINE note, |
|
383 * whichever consumes less space. |
|
384 * |
|
385 * NB: We handle backward line number deltas (possible with for |
|
386 * loops where the update part is emitted after the body, but its |
|
387 * line number is <= any line number in the body) here by letting |
|
388 * unsigned delta_ wrap to a very large number, which triggers a |
|
389 * SRC_SETLINE. |
|
390 */ |
|
391 bce->current->currentLine = line; |
|
392 bce->current->lastColumn = 0; |
|
393 if (delta >= LengthOfSetLine(line)) { |
|
394 if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)line) < 0) |
|
395 return false; |
|
396 } else { |
|
397 do { |
|
398 if (NewSrcNote(cx, bce, SRC_NEWLINE) < 0) |
|
399 return false; |
|
400 } while (--delta != 0); |
|
401 } |
|
402 } |
|
403 return true; |
|
404 } |
|
405 |
|
406 /* A function, so that we avoid macro-bloating all the other callsites. */ |
|
407 static bool |
|
408 UpdateSourceCoordNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t offset) |
|
409 { |
|
410 if (!UpdateLineNumberNotes(cx, bce, offset)) |
|
411 return false; |
|
412 |
|
413 uint32_t columnIndex = bce->parser->tokenStream.srcCoords.columnIndex(offset); |
|
414 ptrdiff_t colspan = ptrdiff_t(columnIndex) - ptrdiff_t(bce->current->lastColumn); |
|
415 if (colspan != 0) { |
|
416 if (colspan < 0) { |
|
417 colspan += SN_COLSPAN_DOMAIN; |
|
418 } else if (colspan >= SN_COLSPAN_DOMAIN / 2) { |
|
419 // If the column span is so large that we can't store it, then just |
|
420 // discard this information because column information would most |
|
421 // likely be useless anyway once the column numbers are ~4000000. |
|
422 // This has been known to happen with scripts that have been |
|
423 // minimized and put into all one line. |
|
424 return true; |
|
425 } |
|
426 if (NewSrcNote2(cx, bce, SRC_COLSPAN, colspan) < 0) |
|
427 return false; |
|
428 bce->current->lastColumn = columnIndex; |
|
429 } |
|
430 return true; |
|
431 } |
|
432 |
|
433 static ptrdiff_t |
|
434 EmitLoopHead(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *nextpn) |
|
435 { |
|
436 if (nextpn) { |
|
437 /* |
|
438 * Try to give the JSOP_LOOPHEAD the same line number as the next |
|
439 * instruction. nextpn is often a block, in which case the next |
|
440 * instruction typically comes from the first statement inside. |
|
441 */ |
|
442 JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST)); |
|
443 if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head) |
|
444 nextpn = nextpn->pn_head; |
|
445 if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin)) |
|
446 return -1; |
|
447 } |
|
448 |
|
449 return Emit1(cx, bce, JSOP_LOOPHEAD); |
|
450 } |
|
451 |
|
452 static bool |
|
453 EmitLoopEntry(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *nextpn) |
|
454 { |
|
455 if (nextpn) { |
|
456 /* Update the line number, as for LOOPHEAD. */ |
|
457 JS_ASSERT_IF(nextpn->isKind(PNK_STATEMENTLIST), nextpn->isArity(PN_LIST)); |
|
458 if (nextpn->isKind(PNK_STATEMENTLIST) && nextpn->pn_head) |
|
459 nextpn = nextpn->pn_head; |
|
460 if (!UpdateSourceCoordNotes(cx, bce, nextpn->pn_pos.begin)) |
|
461 return false; |
|
462 } |
|
463 |
|
464 LoopStmtInfo *loop = LoopStmtInfo::fromStmtInfo(bce->topStmt); |
|
465 JS_ASSERT(loop->loopDepth > 0); |
|
466 |
|
467 uint8_t loopDepthAndFlags = PackLoopEntryDepthHintAndFlags(loop->loopDepth, loop->canIonOsr); |
|
468 return Emit2(cx, bce, JSOP_LOOPENTRY, loopDepthAndFlags) >= 0; |
|
469 } |
|
470 |
|
471 /* |
|
472 * If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve |
|
473 * a type set to store its result. |
|
474 */ |
|
475 static inline void |
|
476 CheckTypeSet(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) |
|
477 { |
|
478 if (js_CodeSpec[op].format & JOF_TYPESET) { |
|
479 if (bce->typesetCount < UINT16_MAX) |
|
480 bce->typesetCount++; |
|
481 } |
|
482 } |
|
483 |
|
484 /* |
|
485 * Macro to emit a bytecode followed by a uint16_t immediate operand stored in |
|
486 * big-endian order. |
|
487 * |
|
488 * NB: We use cx and bce from our caller's lexical environment, and return |
|
489 * false on error. |
|
490 */ |
|
491 #define EMIT_UINT16_IMM_OP(op, i) \ |
|
492 JS_BEGIN_MACRO \ |
|
493 if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0) \ |
|
494 return false; \ |
|
495 CheckTypeSet(cx, bce, op); \ |
|
496 JS_END_MACRO |
|
497 |
|
498 static bool |
|
499 FlushPops(ExclusiveContext *cx, BytecodeEmitter *bce, int *npops) |
|
500 { |
|
501 JS_ASSERT(*npops != 0); |
|
502 EMIT_UINT16_IMM_OP(JSOP_POPN, *npops); |
|
503 *npops = 0; |
|
504 return true; |
|
505 } |
|
506 |
|
507 static bool |
|
508 PopIterator(ExclusiveContext *cx, BytecodeEmitter *bce) |
|
509 { |
|
510 if (Emit1(cx, bce, JSOP_ENDITER) < 0) |
|
511 return false; |
|
512 return true; |
|
513 } |
|
514 |
|
515 namespace { |
|
516 |
|
517 class NonLocalExitScope { |
|
518 ExclusiveContext *cx; |
|
519 BytecodeEmitter *bce; |
|
520 const uint32_t savedScopeIndex; |
|
521 const int savedDepth; |
|
522 uint32_t openScopeIndex; |
|
523 |
|
524 NonLocalExitScope(const NonLocalExitScope &) MOZ_DELETE; |
|
525 |
|
526 public: |
|
527 explicit NonLocalExitScope(ExclusiveContext *cx_, BytecodeEmitter *bce_) |
|
528 : cx(cx_), |
|
529 bce(bce_), |
|
530 savedScopeIndex(bce->blockScopeList.length()), |
|
531 savedDepth(bce->stackDepth), |
|
532 openScopeIndex(UINT32_MAX) { |
|
533 if (bce->staticScope) { |
|
534 StmtInfoBCE *stmt = bce->topStmt; |
|
535 while (1) { |
|
536 JS_ASSERT(stmt); |
|
537 if (stmt->isNestedScope) { |
|
538 openScopeIndex = stmt->blockScopeIndex; |
|
539 break; |
|
540 } |
|
541 stmt = stmt->down; |
|
542 } |
|
543 } |
|
544 } |
|
545 |
|
546 ~NonLocalExitScope() { |
|
547 for (uint32_t n = savedScopeIndex; n < bce->blockScopeList.length(); n++) |
|
548 bce->blockScopeList.recordEnd(n, bce->offset()); |
|
549 bce->stackDepth = savedDepth; |
|
550 } |
|
551 |
|
552 bool popScopeForNonLocalExit(uint32_t blockScopeIndex) { |
|
553 uint32_t scopeObjectIndex = bce->blockScopeList.findEnclosingScope(blockScopeIndex); |
|
554 uint32_t parent = openScopeIndex; |
|
555 |
|
556 if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent)) |
|
557 return false; |
|
558 openScopeIndex = bce->blockScopeList.length() - 1; |
|
559 return true; |
|
560 } |
|
561 |
|
562 bool prepareForNonLocalJump(StmtInfoBCE *toStmt); |
|
563 }; |
|
564 |
|
565 /* |
|
566 * Emit additional bytecode(s) for non-local jumps. |
|
567 */ |
|
568 bool |
|
569 NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE *toStmt) |
|
570 { |
|
571 int npops = 0; |
|
572 |
|
573 #define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return false |
|
574 |
|
575 for (StmtInfoBCE *stmt = bce->topStmt; stmt != toStmt; stmt = stmt->down) { |
|
576 switch (stmt->type) { |
|
577 case STMT_FINALLY: |
|
578 FLUSH_POPS(); |
|
579 if (EmitBackPatchOp(cx, bce, &stmt->gosubs()) < 0) |
|
580 return false; |
|
581 break; |
|
582 |
|
583 case STMT_WITH: |
|
584 if (Emit1(cx, bce, JSOP_LEAVEWITH) < 0) |
|
585 return false; |
|
586 JS_ASSERT(stmt->isNestedScope); |
|
587 if (!popScopeForNonLocalExit(stmt->blockScopeIndex)) |
|
588 return false; |
|
589 break; |
|
590 |
|
591 case STMT_FOR_OF_LOOP: |
|
592 npops += 2; |
|
593 break; |
|
594 |
|
595 case STMT_FOR_IN_LOOP: |
|
596 FLUSH_POPS(); |
|
597 if (!PopIterator(cx, bce)) |
|
598 return false; |
|
599 break; |
|
600 |
|
601 case STMT_SUBROUTINE: |
|
602 /* |
|
603 * There's a [exception or hole, retsub pc-index] pair on the |
|
604 * stack that we need to pop. |
|
605 */ |
|
606 npops += 2; |
|
607 break; |
|
608 |
|
609 default:; |
|
610 } |
|
611 |
|
612 if (stmt->isBlockScope) { |
|
613 JS_ASSERT(stmt->isNestedScope); |
|
614 StaticBlockObject &blockObj = stmt->staticBlock(); |
|
615 if (Emit1(cx, bce, JSOP_DEBUGLEAVEBLOCK) < 0) |
|
616 return false; |
|
617 if (!popScopeForNonLocalExit(stmt->blockScopeIndex)) |
|
618 return false; |
|
619 if (blockObj.needsClone()) { |
|
620 if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0) |
|
621 return false; |
|
622 } |
|
623 } |
|
624 } |
|
625 |
|
626 FLUSH_POPS(); |
|
627 return true; |
|
628 |
|
629 #undef FLUSH_POPS |
|
630 } |
|
631 |
|
632 } // anonymous namespace |
|
633 |
|
634 static ptrdiff_t |
|
635 EmitGoto(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *toStmt, ptrdiff_t *lastp, |
|
636 SrcNoteType noteType = SRC_NULL) |
|
637 { |
|
638 NonLocalExitScope nle(cx, bce); |
|
639 |
|
640 if (!nle.prepareForNonLocalJump(toStmt)) |
|
641 return -1; |
|
642 |
|
643 if (noteType != SRC_NULL) { |
|
644 if (NewSrcNote(cx, bce, noteType) < 0) |
|
645 return -1; |
|
646 } |
|
647 |
|
648 return EmitBackPatchOp(cx, bce, lastp); |
|
649 } |
|
650 |
|
651 static bool |
|
652 BackPatch(ExclusiveContext *cx, BytecodeEmitter *bce, ptrdiff_t last, jsbytecode *target, jsbytecode op) |
|
653 { |
|
654 jsbytecode *pc, *stop; |
|
655 ptrdiff_t delta, span; |
|
656 |
|
657 pc = bce->code(last); |
|
658 stop = bce->code(-1); |
|
659 while (pc != stop) { |
|
660 delta = GET_JUMP_OFFSET(pc); |
|
661 span = target - pc; |
|
662 SET_JUMP_OFFSET(pc, span); |
|
663 *pc = op; |
|
664 pc -= delta; |
|
665 } |
|
666 return true; |
|
667 } |
|
668 |
|
669 #define SET_STATEMENT_TOP(stmt, top) \ |
|
670 ((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1)) |
|
671 |
|
672 static void |
|
673 PushStatementInner(BytecodeEmitter *bce, StmtInfoBCE *stmt, StmtType type, ptrdiff_t top) |
|
674 { |
|
675 SET_STATEMENT_TOP(stmt, top); |
|
676 PushStatement(bce, stmt, type); |
|
677 } |
|
678 |
|
679 static void |
|
680 PushStatementBCE(BytecodeEmitter *bce, StmtInfoBCE *stmt, StmtType type, ptrdiff_t top) |
|
681 { |
|
682 PushStatementInner(bce, stmt, type, top); |
|
683 JS_ASSERT(!stmt->isLoop()); |
|
684 } |
|
685 |
|
686 static void |
|
687 PushLoopStatement(BytecodeEmitter *bce, LoopStmtInfo *stmt, StmtType type, ptrdiff_t top) |
|
688 { |
|
689 PushStatementInner(bce, stmt, type, top); |
|
690 JS_ASSERT(stmt->isLoop()); |
|
691 |
|
692 LoopStmtInfo *downLoop = nullptr; |
|
693 for (StmtInfoBCE *outer = stmt->down; outer; outer = outer->down) { |
|
694 if (outer->isLoop()) { |
|
695 downLoop = LoopStmtInfo::fromStmtInfo(outer); |
|
696 break; |
|
697 } |
|
698 } |
|
699 |
|
700 stmt->stackDepth = bce->stackDepth; |
|
701 stmt->loopDepth = downLoop ? downLoop->loopDepth + 1 : 1; |
|
702 |
|
703 int loopSlots; |
|
704 if (type == STMT_FOR_OF_LOOP) |
|
705 loopSlots = 2; |
|
706 else if (type == STMT_FOR_IN_LOOP) |
|
707 loopSlots = 1; |
|
708 else |
|
709 loopSlots = 0; |
|
710 |
|
711 if (downLoop) |
|
712 stmt->canIonOsr = (downLoop->canIonOsr && |
|
713 stmt->stackDepth == downLoop->stackDepth + loopSlots); |
|
714 else |
|
715 stmt->canIonOsr = stmt->stackDepth == loopSlots; |
|
716 } |
|
717 |
|
718 /* |
|
719 * Return the enclosing lexical scope, which is the innermost enclosing static |
|
720 * block object or compiler created function. |
|
721 */ |
|
722 static JSObject * |
|
723 EnclosingStaticScope(BytecodeEmitter *bce) |
|
724 { |
|
725 if (bce->staticScope) |
|
726 return bce->staticScope; |
|
727 |
|
728 if (!bce->sc->isFunctionBox()) { |
|
729 JS_ASSERT(!bce->parent); |
|
730 return nullptr; |
|
731 } |
|
732 |
|
733 return bce->sc->asFunctionBox()->function(); |
|
734 } |
|
735 |
|
736 #ifdef DEBUG |
|
737 static bool |
|
738 AllLocalsAliased(StaticBlockObject &obj) |
|
739 { |
|
740 for (unsigned i = 0; i < obj.numVariables(); i++) |
|
741 if (!obj.isAliased(i)) |
|
742 return false; |
|
743 return true; |
|
744 } |
|
745 #endif |
|
746 |
|
747 static bool |
|
748 ComputeAliasedSlots(ExclusiveContext *cx, BytecodeEmitter *bce, Handle<StaticBlockObject *> blockObj) |
|
749 { |
|
750 for (unsigned i = 0; i < blockObj->numVariables(); i++) { |
|
751 Definition *dn = blockObj->definitionParseNode(i); |
|
752 |
|
753 JS_ASSERT(dn->isDefn()); |
|
754 if (!dn->pn_cookie.set(bce->parser->tokenStream, dn->pn_cookie.level(), |
|
755 blockObj->blockIndexToLocalIndex(dn->frameSlot()))) |
|
756 { |
|
757 return false; |
|
758 } |
|
759 |
|
760 #ifdef DEBUG |
|
761 for (ParseNode *pnu = dn->dn_uses; pnu; pnu = pnu->pn_link) { |
|
762 JS_ASSERT(pnu->pn_lexdef == dn); |
|
763 JS_ASSERT(!(pnu->pn_dflags & PND_BOUND)); |
|
764 JS_ASSERT(pnu->pn_cookie.isFree()); |
|
765 } |
|
766 #endif |
|
767 |
|
768 blockObj->setAliased(i, bce->isAliasedName(dn)); |
|
769 } |
|
770 |
|
771 JS_ASSERT_IF(bce->sc->allLocalsAliased(), AllLocalsAliased(*blockObj)); |
|
772 |
|
773 return true; |
|
774 } |
|
775 |
|
776 static bool |
|
777 EmitInternedObjectOp(ExclusiveContext *cx, uint32_t index, JSOp op, BytecodeEmitter *bce); |
|
778 |
|
779 // In a function, block-scoped locals go after the vars, and form part of the |
|
780 // fixed part of a stack frame. Outside a function, there are no fixed vars, |
|
781 // but block-scoped locals still form part of the fixed part of a stack frame |
|
782 // and are thus addressable via GETLOCAL and friends. |
|
783 static void |
|
784 ComputeLocalOffset(ExclusiveContext *cx, BytecodeEmitter *bce, Handle<StaticBlockObject *> blockObj) |
|
785 { |
|
786 unsigned nfixedvars = bce->sc->isFunctionBox() ? bce->script->bindings.numVars() : 0; |
|
787 unsigned localOffset = nfixedvars; |
|
788 |
|
789 if (bce->staticScope) { |
|
790 Rooted<NestedScopeObject *> outer(cx, bce->staticScope); |
|
791 for (; outer; outer = outer->enclosingNestedScope()) { |
|
792 if (outer->is<StaticBlockObject>()) { |
|
793 StaticBlockObject &outerBlock = outer->as<StaticBlockObject>(); |
|
794 localOffset = outerBlock.localOffset() + outerBlock.numVariables(); |
|
795 break; |
|
796 } |
|
797 } |
|
798 } |
|
799 |
|
800 JS_ASSERT(localOffset + blockObj->numVariables() |
|
801 <= nfixedvars + bce->script->bindings.numBlockScoped()); |
|
802 |
|
803 blockObj->setLocalOffset(localOffset); |
|
804 } |
|
805 |
|
806 // ~ Nested Scopes ~ |
|
807 // |
|
808 // A nested scope is a region of a compilation unit (function, script, or eval |
|
809 // code) with an additional node on the scope chain. This node may either be a |
|
810 // "with" object or a "block" object. "With" objects represent "with" scopes. |
|
811 // Block objects represent lexical scopes, and contain named block-scoped |
|
812 // bindings, for example "let" bindings or the exception in a catch block. |
|
813 // Those variables may be local and thus accessible directly from the stack, or |
|
814 // "aliased" (accessed by name from nested functions, or dynamically via nested |
|
815 // "eval" or "with") and only accessible through the scope chain. |
|
816 // |
|
817 // All nested scopes are present on the "static scope chain". A nested scope |
|
818 // that is a "with" scope will be present on the scope chain at run-time as |
|
819 // well. A block scope may or may not have a corresponding link on the run-time |
|
820 // scope chain; if no variable declared in the block scope is "aliased", then no |
|
821 // scope chain node is allocated. |
|
822 // |
|
823 // To help debuggers, the bytecode emitter arranges to record the PC ranges |
|
824 // comprehended by a nested scope, and ultimately attach them to the JSScript. |
|
825 // An element in the "block scope array" specifies the PC range, and links to a |
|
826 // NestedScopeObject in the object list of the script. That scope object is |
|
827 // linked to the previous link in the static scope chain, if any. The static |
|
828 // scope chain at any pre-retire PC can be retrieved using |
|
829 // JSScript::getStaticScope(jsbytecode *pc). |
|
830 // |
|
831 // Block scopes store their locals in the fixed part of a stack frame, after the |
|
832 // "fixed var" bindings. A fixed var binding is a "var" or legacy "const" |
|
833 // binding that occurs in a function (as opposed to a script or in eval code). |
|
834 // Only functions have fixed var bindings. |
|
835 // |
|
836 // To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a |
|
837 // block scope, even if the block has no aliased locals. This allows |
|
838 // DebugScopes to invalidate any association between a debugger scope object, |
|
839 // which can proxy access to unaliased stack locals, and the actual live frame. |
|
840 // In normal, non-debug mode, this opcode does not cause any baseline code to be |
|
841 // emitted. |
|
842 // |
|
843 // Enter a nested scope with EnterNestedScope. It will emit |
|
844 // PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of |
|
845 // the scope. Leave a nested scope with LeaveNestedScope, which, for blocks, |
|
846 // will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it |
|
847 // emits LEAVEWITH, of course.) Pass EnterNestedScope a fresh StmtInfoBCE |
|
848 // object, and pass that same object to the corresponding LeaveNestedScope. If |
|
849 // the statement is a block scope, pass STMT_BLOCK as stmtType; otherwise for |
|
850 // with scopes pass STMT_WITH. |
|
851 // |
|
852 static bool |
|
853 EnterNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt, ObjectBox *objbox, |
|
854 StmtType stmtType) |
|
855 { |
|
856 Rooted<NestedScopeObject *> scopeObj(cx, &objbox->object->as<NestedScopeObject>()); |
|
857 uint32_t scopeObjectIndex = bce->objectList.add(objbox); |
|
858 |
|
859 switch (stmtType) { |
|
860 case STMT_BLOCK: { |
|
861 Rooted<StaticBlockObject *> blockObj(cx, &scopeObj->as<StaticBlockObject>()); |
|
862 |
|
863 ComputeLocalOffset(cx, bce, blockObj); |
|
864 |
|
865 if (!ComputeAliasedSlots(cx, bce, blockObj)) |
|
866 return false; |
|
867 |
|
868 if (blockObj->needsClone()) { |
|
869 if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_PUSHBLOCKSCOPE, bce)) |
|
870 return false; |
|
871 } |
|
872 break; |
|
873 } |
|
874 case STMT_WITH: |
|
875 JS_ASSERT(scopeObj->is<StaticWithObject>()); |
|
876 if (!EmitInternedObjectOp(cx, scopeObjectIndex, JSOP_ENTERWITH, bce)) |
|
877 return false; |
|
878 break; |
|
879 default: |
|
880 MOZ_ASSUME_UNREACHABLE(); |
|
881 } |
|
882 |
|
883 uint32_t parent = BlockScopeNote::NoBlockScopeIndex; |
|
884 if (StmtInfoBCE *stmt = bce->topScopeStmt) { |
|
885 for (; stmt->staticScope != bce->staticScope; stmt = stmt->down) {} |
|
886 parent = stmt->blockScopeIndex; |
|
887 } |
|
888 |
|
889 stmt->blockScopeIndex = bce->blockScopeList.length(); |
|
890 if (!bce->blockScopeList.append(scopeObjectIndex, bce->offset(), parent)) |
|
891 return false; |
|
892 |
|
893 PushStatementBCE(bce, stmt, stmtType, bce->offset()); |
|
894 scopeObj->initEnclosingNestedScope(EnclosingStaticScope(bce)); |
|
895 FinishPushNestedScope(bce, stmt, *scopeObj); |
|
896 JS_ASSERT(stmt->isNestedScope); |
|
897 stmt->isBlockScope = (stmtType == STMT_BLOCK); |
|
898 |
|
899 return true; |
|
900 } |
|
901 |
|
902 // Patches |breaks| and |continues| unless the top statement info record |
|
903 // represents a try-catch-finally suite. May fail if a jump offset overflows. |
|
904 static bool |
|
905 PopStatementBCE(ExclusiveContext *cx, BytecodeEmitter *bce) |
|
906 { |
|
907 StmtInfoBCE *stmt = bce->topStmt; |
|
908 if (!stmt->isTrying() && |
|
909 (!BackPatch(cx, bce, stmt->breaks, bce->code().end(), JSOP_GOTO) || |
|
910 !BackPatch(cx, bce, stmt->continues, bce->code(stmt->update), JSOP_GOTO))) |
|
911 { |
|
912 return false; |
|
913 } |
|
914 |
|
915 FinishPopStatement(bce); |
|
916 return true; |
|
917 } |
|
918 |
|
919 static bool |
|
920 LeaveNestedScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmt) |
|
921 { |
|
922 JS_ASSERT(stmt == bce->topStmt); |
|
923 JS_ASSERT(stmt->isNestedScope); |
|
924 JS_ASSERT(stmt->isBlockScope == !(stmt->type == STMT_WITH)); |
|
925 uint32_t blockScopeIndex = stmt->blockScopeIndex; |
|
926 |
|
927 #ifdef DEBUG |
|
928 JS_ASSERT(bce->blockScopeList.list[blockScopeIndex].length == 0); |
|
929 uint32_t blockObjIndex = bce->blockScopeList.list[blockScopeIndex].index; |
|
930 ObjectBox *blockObjBox = bce->objectList.find(blockObjIndex); |
|
931 NestedScopeObject *staticScope = &blockObjBox->object->as<NestedScopeObject>(); |
|
932 JS_ASSERT(stmt->staticScope == staticScope); |
|
933 JS_ASSERT(staticScope == bce->staticScope); |
|
934 JS_ASSERT_IF(!stmt->isBlockScope, staticScope->is<StaticWithObject>()); |
|
935 #endif |
|
936 |
|
937 if (!PopStatementBCE(cx, bce)) |
|
938 return false; |
|
939 |
|
940 if (Emit1(cx, bce, stmt->isBlockScope ? JSOP_DEBUGLEAVEBLOCK : JSOP_LEAVEWITH) < 0) |
|
941 return false; |
|
942 |
|
943 bce->blockScopeList.recordEnd(blockScopeIndex, bce->offset()); |
|
944 |
|
945 if (stmt->isBlockScope && stmt->staticScope->as<StaticBlockObject>().needsClone()) { |
|
946 if (Emit1(cx, bce, JSOP_POPBLOCKSCOPE) < 0) |
|
947 return false; |
|
948 } |
|
949 |
|
950 return true; |
|
951 } |
|
952 |
|
953 static bool |
|
954 EmitIndex32(ExclusiveContext *cx, JSOp op, uint32_t index, BytecodeEmitter *bce) |
|
955 { |
|
956 const size_t len = 1 + UINT32_INDEX_LEN; |
|
957 JS_ASSERT(len == size_t(js_CodeSpec[op].length)); |
|
958 ptrdiff_t offset = EmitCheck(cx, bce, len); |
|
959 if (offset < 0) |
|
960 return false; |
|
961 |
|
962 jsbytecode *code = bce->code(offset); |
|
963 code[0] = jsbytecode(op); |
|
964 SET_UINT32_INDEX(code, index); |
|
965 UpdateDepth(cx, bce, offset); |
|
966 CheckTypeSet(cx, bce, op); |
|
967 return true; |
|
968 } |
|
969 |
|
970 static bool |
|
971 EmitIndexOp(ExclusiveContext *cx, JSOp op, uint32_t index, BytecodeEmitter *bce) |
|
972 { |
|
973 const size_t len = js_CodeSpec[op].length; |
|
974 JS_ASSERT(len >= 1 + UINT32_INDEX_LEN); |
|
975 ptrdiff_t offset = EmitCheck(cx, bce, len); |
|
976 if (offset < 0) |
|
977 return false; |
|
978 |
|
979 jsbytecode *code = bce->code(offset); |
|
980 code[0] = jsbytecode(op); |
|
981 SET_UINT32_INDEX(code, index); |
|
982 UpdateDepth(cx, bce, offset); |
|
983 CheckTypeSet(cx, bce, op); |
|
984 return true; |
|
985 } |
|
986 |
|
987 static bool |
|
988 EmitAtomOp(ExclusiveContext *cx, JSAtom *atom, JSOp op, BytecodeEmitter *bce) |
|
989 { |
|
990 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); |
|
991 |
|
992 if (op == JSOP_GETPROP && atom == cx->names().length) { |
|
993 /* Specialize length accesses for the interpreter. */ |
|
994 op = JSOP_LENGTH; |
|
995 } |
|
996 |
|
997 jsatomid index; |
|
998 if (!bce->makeAtomIndex(atom, &index)) |
|
999 return false; |
|
1000 |
|
1001 return EmitIndexOp(cx, op, index, bce); |
|
1002 } |
|
1003 |
|
1004 static bool |
|
1005 EmitAtomOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) |
|
1006 { |
|
1007 JS_ASSERT(pn->pn_atom != nullptr); |
|
1008 return EmitAtomOp(cx, pn->pn_atom, op, bce); |
|
1009 } |
|
1010 |
|
1011 static bool |
|
1012 EmitInternedObjectOp(ExclusiveContext *cx, uint32_t index, JSOp op, BytecodeEmitter *bce) |
|
1013 { |
|
1014 JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT); |
|
1015 JS_ASSERT(index < bce->objectList.length); |
|
1016 return EmitIndex32(cx, op, index, bce); |
|
1017 } |
|
1018 |
|
1019 static bool |
|
1020 EmitObjectOp(ExclusiveContext *cx, ObjectBox *objbox, JSOp op, BytecodeEmitter *bce) |
|
1021 { |
|
1022 return EmitInternedObjectOp(cx, bce->objectList.add(objbox), op, bce); |
|
1023 } |
|
1024 |
|
1025 static bool |
|
1026 EmitRegExp(ExclusiveContext *cx, uint32_t index, BytecodeEmitter *bce) |
|
1027 { |
|
1028 return EmitIndex32(cx, JSOP_REGEXP, index, bce); |
|
1029 } |
|
1030 |
|
1031 /* |
|
1032 * To catch accidental misuse, EMIT_UINT16_IMM_OP/Emit3 assert that they are |
|
1033 * not used to unconditionally emit JSOP_GETLOCAL. Variable access should |
|
1034 * instead be emitted using EmitVarOp. In special cases, when the caller |
|
1035 * definitely knows that a given local slot is unaliased, this function may be |
|
1036 * used as a non-asserting version of EMIT_UINT16_IMM_OP. |
|
1037 */ |
|
1038 static bool |
|
1039 EmitUnaliasedVarOp(ExclusiveContext *cx, JSOp op, uint32_t slot, BytecodeEmitter *bce) |
|
1040 { |
|
1041 JS_ASSERT(JOF_OPTYPE(op) != JOF_SCOPECOORD); |
|
1042 |
|
1043 if (IsLocalOp(op)) { |
|
1044 ptrdiff_t off = EmitN(cx, bce, op, LOCALNO_LEN); |
|
1045 if (off < 0) |
|
1046 return false; |
|
1047 |
|
1048 SET_LOCALNO(bce->code(off), slot); |
|
1049 return true; |
|
1050 } |
|
1051 |
|
1052 JS_ASSERT(IsArgOp(op)); |
|
1053 ptrdiff_t off = EmitN(cx, bce, op, ARGNO_LEN); |
|
1054 if (off < 0) |
|
1055 return false; |
|
1056 |
|
1057 SET_ARGNO(bce->code(off), slot); |
|
1058 return true; |
|
1059 } |
|
1060 |
|
1061 static bool |
|
1062 EmitAliasedVarOp(ExclusiveContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bce) |
|
1063 { |
|
1064 JS_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD); |
|
1065 |
|
1066 unsigned n = SCOPECOORD_HOPS_LEN + SCOPECOORD_SLOT_LEN; |
|
1067 JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length); |
|
1068 |
|
1069 ptrdiff_t off = EmitN(cx, bce, op, n); |
|
1070 if (off < 0) |
|
1071 return false; |
|
1072 |
|
1073 jsbytecode *pc = bce->code(off); |
|
1074 SET_SCOPECOORD_HOPS(pc, sc.hops()); |
|
1075 pc += SCOPECOORD_HOPS_LEN; |
|
1076 SET_SCOPECOORD_SLOT(pc, sc.slot()); |
|
1077 pc += SCOPECOORD_SLOT_LEN; |
|
1078 CheckTypeSet(cx, bce, op); |
|
1079 return true; |
|
1080 } |
|
1081 |
|
1082 // Compute the number of nested scope objects that will actually be on the scope |
|
1083 // chain at runtime, given the BCE's current staticScope. |
|
1084 static unsigned |
|
1085 DynamicNestedScopeDepth(BytecodeEmitter *bce) |
|
1086 { |
|
1087 unsigned depth = 0; |
|
1088 for (NestedScopeObject *b = bce->staticScope; b; b = b->enclosingNestedScope()) { |
|
1089 if (!b->is<StaticBlockObject>() || b->as<StaticBlockObject>().needsClone()) |
|
1090 ++depth; |
|
1091 } |
|
1092 |
|
1093 return depth; |
|
1094 } |
|
1095 |
|
1096 static bool |
|
1097 LookupAliasedName(HandleScript script, PropertyName *name, uint32_t *pslot) |
|
1098 { |
|
1099 /* |
|
1100 * Beware: BindingIter may contain more than one Binding for a given name |
|
1101 * (in the case of |function f(x,x) {}|) but only one will be aliased. |
|
1102 */ |
|
1103 uint32_t slot = CallObject::RESERVED_SLOTS; |
|
1104 for (BindingIter bi(script); !bi.done(); bi++) { |
|
1105 if (bi->aliased()) { |
|
1106 if (bi->name() == name) { |
|
1107 *pslot = slot; |
|
1108 return true; |
|
1109 } |
|
1110 slot++; |
|
1111 } |
|
1112 } |
|
1113 return false; |
|
1114 } |
|
1115 |
|
1116 static bool |
|
1117 LookupAliasedNameSlot(HandleScript script, PropertyName *name, ScopeCoordinate *sc) |
|
1118 { |
|
1119 uint32_t slot; |
|
1120 if (!LookupAliasedName(script, name, &slot)) |
|
1121 return false; |
|
1122 |
|
1123 sc->setSlot(slot); |
|
1124 return true; |
|
1125 } |
|
1126 |
|
1127 /* |
|
1128 * Use this function instead of assigning directly to 'hops' to guard for |
|
1129 * uint8_t overflows. |
|
1130 */ |
|
1131 static bool |
|
1132 AssignHops(BytecodeEmitter *bce, ParseNode *pn, unsigned src, ScopeCoordinate *dst) |
|
1133 { |
|
1134 if (src > UINT8_MAX) { |
|
1135 bce->reportError(pn, JSMSG_TOO_DEEP, js_function_str); |
|
1136 return false; |
|
1137 } |
|
1138 |
|
1139 dst->setHops(src); |
|
1140 return true; |
|
1141 } |
|
1142 |
|
1143 static bool |
|
1144 EmitAliasedVarOp(ExclusiveContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce) |
|
1145 { |
|
1146 /* |
|
1147 * While pn->pn_cookie tells us how many function scopes are between the use and the def this |
|
1148 * is not the same as how many hops up the dynamic scope chain are needed. In particular: |
|
1149 * - a lexical function scope only contributes a hop if it is "heavyweight" (has a dynamic |
|
1150 * scope object). |
|
1151 * - a heavyweight named function scope contributes an extra scope to the scope chain (a |
|
1152 * DeclEnvObject that holds just the name). |
|
1153 * - all the intervening let/catch blocks must be counted. |
|
1154 */ |
|
1155 unsigned skippedScopes = 0; |
|
1156 BytecodeEmitter *bceOfDef = bce; |
|
1157 if (pn->isUsed()) { |
|
1158 /* |
|
1159 * As explained in BindNameToSlot, the 'level' of a use indicates how |
|
1160 * many function scopes (i.e., BytecodeEmitters) to skip to find the |
|
1161 * enclosing function scope of the definition being accessed. |
|
1162 */ |
|
1163 for (unsigned i = pn->pn_cookie.level(); i; i--) { |
|
1164 skippedScopes += DynamicNestedScopeDepth(bceOfDef); |
|
1165 FunctionBox *funbox = bceOfDef->sc->asFunctionBox(); |
|
1166 if (funbox->isHeavyweight()) { |
|
1167 skippedScopes++; |
|
1168 if (funbox->function()->isNamedLambda()) |
|
1169 skippedScopes++; |
|
1170 } |
|
1171 bceOfDef = bceOfDef->parent; |
|
1172 } |
|
1173 } else { |
|
1174 JS_ASSERT(pn->isDefn()); |
|
1175 JS_ASSERT(pn->pn_cookie.level() == bce->script->staticLevel()); |
|
1176 } |
|
1177 |
|
1178 /* |
|
1179 * The final part of the skippedScopes computation depends on the type of |
|
1180 * variable. An arg or local variable is at the outer scope of a function |
|
1181 * and so includes the full DynamicNestedScopeDepth. A let/catch-binding |
|
1182 * requires a search of the block chain to see how many (dynamic) block |
|
1183 * objects to skip. |
|
1184 */ |
|
1185 ScopeCoordinate sc; |
|
1186 if (IsArgOp(pn->getOp())) { |
|
1187 if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc)) |
|
1188 return false; |
|
1189 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc)); |
|
1190 } else { |
|
1191 JS_ASSERT(IsLocalOp(pn->getOp()) || pn->isKind(PNK_FUNCTION)); |
|
1192 uint32_t local = pn->pn_cookie.slot(); |
|
1193 if (local < bceOfDef->script->bindings.numVars()) { |
|
1194 if (!AssignHops(bce, pn, skippedScopes + DynamicNestedScopeDepth(bceOfDef), &sc)) |
|
1195 return false; |
|
1196 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef->script, pn->name(), &sc)); |
|
1197 } else { |
|
1198 JS_ASSERT_IF(bce->sc->isFunctionBox(), local <= bceOfDef->script->bindings.numLocals()); |
|
1199 JS_ASSERT(bceOfDef->staticScope->is<StaticBlockObject>()); |
|
1200 Rooted<StaticBlockObject*> b(cx, &bceOfDef->staticScope->as<StaticBlockObject>()); |
|
1201 while (local < b->localOffset()) { |
|
1202 if (b->needsClone()) |
|
1203 skippedScopes++; |
|
1204 b = &b->enclosingNestedScope()->as<StaticBlockObject>(); |
|
1205 } |
|
1206 if (!AssignHops(bce, pn, skippedScopes, &sc)) |
|
1207 return false; |
|
1208 sc.setSlot(b->localIndexToSlot(local)); |
|
1209 } |
|
1210 } |
|
1211 |
|
1212 return EmitAliasedVarOp(cx, op, sc, bce); |
|
1213 } |
|
1214 |
|
1215 static bool |
|
1216 EmitVarOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) |
|
1217 { |
|
1218 JS_ASSERT(pn->isKind(PNK_FUNCTION) || pn->isKind(PNK_NAME)); |
|
1219 JS_ASSERT(!pn->pn_cookie.isFree()); |
|
1220 |
|
1221 if (IsAliasedVarOp(op)) { |
|
1222 ScopeCoordinate sc; |
|
1223 sc.setHops(pn->pn_cookie.level()); |
|
1224 sc.setSlot(pn->pn_cookie.slot()); |
|
1225 return EmitAliasedVarOp(cx, op, sc, bce); |
|
1226 } |
|
1227 |
|
1228 JS_ASSERT_IF(pn->isKind(PNK_NAME), IsArgOp(op) || IsLocalOp(op)); |
|
1229 |
|
1230 if (!bce->isAliasedName(pn)) { |
|
1231 JS_ASSERT(pn->isUsed() || pn->isDefn()); |
|
1232 JS_ASSERT_IF(pn->isUsed(), pn->pn_cookie.level() == 0); |
|
1233 JS_ASSERT_IF(pn->isDefn(), pn->pn_cookie.level() == bce->script->staticLevel()); |
|
1234 return EmitUnaliasedVarOp(cx, op, pn->pn_cookie.slot(), bce); |
|
1235 } |
|
1236 |
|
1237 switch (op) { |
|
1238 case JSOP_GETARG: case JSOP_GETLOCAL: op = JSOP_GETALIASEDVAR; break; |
|
1239 case JSOP_SETARG: case JSOP_SETLOCAL: op = JSOP_SETALIASEDVAR; break; |
|
1240 default: MOZ_ASSUME_UNREACHABLE("unexpected var op"); |
|
1241 } |
|
1242 |
|
1243 return EmitAliasedVarOp(cx, op, pn, bce); |
|
1244 } |
|
1245 |
|
1246 static JSOp |
|
1247 GetIncDecInfo(ParseNodeKind kind, bool *post) |
|
1248 { |
|
1249 JS_ASSERT(kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT || |
|
1250 kind == PNK_POSTDECREMENT || kind == PNK_PREDECREMENT); |
|
1251 *post = kind == PNK_POSTINCREMENT || kind == PNK_POSTDECREMENT; |
|
1252 return (kind == PNK_POSTINCREMENT || kind == PNK_PREINCREMENT) ? JSOP_ADD : JSOP_SUB; |
|
1253 } |
|
1254 |
|
1255 static bool |
|
1256 EmitVarIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) |
|
1257 { |
|
1258 JSOp op = pn->pn_kid->getOp(); |
|
1259 JS_ASSERT(IsArgOp(op) || IsLocalOp(op) || IsAliasedVarOp(op)); |
|
1260 JS_ASSERT(pn->pn_kid->isKind(PNK_NAME)); |
|
1261 JS_ASSERT(!pn->pn_kid->pn_cookie.isFree()); |
|
1262 |
|
1263 bool post; |
|
1264 JSOp binop = GetIncDecInfo(pn->getKind(), &post); |
|
1265 |
|
1266 JSOp getOp, setOp; |
|
1267 if (IsLocalOp(op)) { |
|
1268 getOp = JSOP_GETLOCAL; |
|
1269 setOp = JSOP_SETLOCAL; |
|
1270 } else if (IsArgOp(op)) { |
|
1271 getOp = JSOP_GETARG; |
|
1272 setOp = JSOP_SETARG; |
|
1273 } else { |
|
1274 getOp = JSOP_GETALIASEDVAR; |
|
1275 setOp = JSOP_SETALIASEDVAR; |
|
1276 } |
|
1277 |
|
1278 if (!EmitVarOp(cx, pn->pn_kid, getOp, bce)) // V |
|
1279 return false; |
|
1280 if (Emit1(cx, bce, JSOP_POS) < 0) // N |
|
1281 return false; |
|
1282 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // N? N |
|
1283 return false; |
|
1284 if (Emit1(cx, bce, JSOP_ONE) < 0) // N? N 1 |
|
1285 return false; |
|
1286 if (Emit1(cx, bce, binop) < 0) // N? N+1 |
|
1287 return false; |
|
1288 if (!EmitVarOp(cx, pn->pn_kid, setOp, bce)) // N? N+1 |
|
1289 return false; |
|
1290 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT |
|
1291 return false; |
|
1292 |
|
1293 return true; |
|
1294 } |
|
1295 |
|
1296 bool |
|
1297 BytecodeEmitter::isAliasedName(ParseNode *pn) |
|
1298 { |
|
1299 Definition *dn = pn->resolve(); |
|
1300 JS_ASSERT(dn->isDefn()); |
|
1301 JS_ASSERT(!dn->isPlaceholder()); |
|
1302 JS_ASSERT(dn->isBound()); |
|
1303 |
|
1304 /* If dn is in an enclosing function, it is definitely aliased. */ |
|
1305 if (dn->pn_cookie.level() != script->staticLevel()) |
|
1306 return true; |
|
1307 |
|
1308 switch (dn->kind()) { |
|
1309 case Definition::LET: |
|
1310 /* |
|
1311 * There are two ways to alias a let variable: nested functions and |
|
1312 * dynamic scope operations. (This is overly conservative since the |
|
1313 * bindingsAccessedDynamically flag, checked by allLocalsAliased, is |
|
1314 * function-wide.) |
|
1315 * |
|
1316 * In addition all locals in generators are marked as aliased, to ensure |
|
1317 * that they are allocated on scope chains instead of on the stack. See |
|
1318 * the definition of SharedContext::allLocalsAliased. |
|
1319 */ |
|
1320 return dn->isClosed() || sc->allLocalsAliased(); |
|
1321 case Definition::ARG: |
|
1322 /* |
|
1323 * Consult the bindings, since they already record aliasing. We might |
|
1324 * be tempted to use the same definition as VAR/CONST/LET, but there is |
|
1325 * a problem caused by duplicate arguments: only the last argument with |
|
1326 * a given name is aliased. This is necessary to avoid generating a |
|
1327 * shape for the call object with with more than one name for a given |
|
1328 * slot (which violates internal engine invariants). All this means that |
|
1329 * the '|| sc->allLocalsAliased()' disjunct is incorrect since it will |
|
1330 * mark both parameters in function(x,x) as aliased. |
|
1331 */ |
|
1332 return script->formalIsAliased(pn->pn_cookie.slot()); |
|
1333 case Definition::VAR: |
|
1334 case Definition::CONST: |
|
1335 JS_ASSERT_IF(sc->allLocalsAliased(), script->varIsAliased(pn->pn_cookie.slot())); |
|
1336 return script->varIsAliased(pn->pn_cookie.slot()); |
|
1337 case Definition::PLACEHOLDER: |
|
1338 case Definition::NAMED_LAMBDA: |
|
1339 case Definition::MISSING: |
|
1340 MOZ_ASSUME_UNREACHABLE("unexpected dn->kind"); |
|
1341 } |
|
1342 return false; |
|
1343 } |
|
1344 |
|
1345 /* |
|
1346 * Try to convert a *NAME op with a free name to a more specialized GNAME, |
|
1347 * INTRINSIC or ALIASEDVAR op, which optimize accesses on that name. |
|
1348 * Return true if a conversion was made. |
|
1349 */ |
|
1350 static bool |
|
1351 TryConvertFreeName(BytecodeEmitter *bce, ParseNode *pn) |
|
1352 { |
|
1353 /* |
|
1354 * In self-hosting mode, JSOP_*NAME is unconditionally converted to |
|
1355 * JSOP_*INTRINSIC. This causes lookups to be redirected to the special |
|
1356 * intrinsics holder in the global object, into which any missing values are |
|
1357 * cloned lazily upon first access. |
|
1358 */ |
|
1359 if (bce->emitterMode == BytecodeEmitter::SelfHosting) { |
|
1360 JSOp op; |
|
1361 switch (pn->getOp()) { |
|
1362 case JSOP_NAME: op = JSOP_GETINTRINSIC; break; |
|
1363 case JSOP_SETNAME: op = JSOP_SETINTRINSIC; break; |
|
1364 /* Other *NAME ops aren't (yet) supported in self-hosted code. */ |
|
1365 default: MOZ_ASSUME_UNREACHABLE("intrinsic"); |
|
1366 } |
|
1367 pn->setOp(op); |
|
1368 return true; |
|
1369 } |
|
1370 |
|
1371 /* |
|
1372 * When parsing inner functions lazily, parse nodes for outer functions no |
|
1373 * longer exist and only the function's scope chain is available for |
|
1374 * resolving upvar accesses within the inner function. |
|
1375 */ |
|
1376 if (bce->emitterMode == BytecodeEmitter::LazyFunction) { |
|
1377 // The only statements within a lazy function which can push lexical |
|
1378 // scopes are try/catch blocks. Use generic ops in this case. |
|
1379 for (StmtInfoBCE *stmt = bce->topStmt; stmt; stmt = stmt->down) { |
|
1380 if (stmt->type == STMT_CATCH) |
|
1381 return true; |
|
1382 } |
|
1383 |
|
1384 size_t hops = 0; |
|
1385 FunctionBox *funbox = bce->sc->asFunctionBox(); |
|
1386 if (funbox->hasExtensibleScope()) |
|
1387 return false; |
|
1388 if (funbox->function()->isNamedLambda() && funbox->function()->atom() == pn->pn_atom) |
|
1389 return false; |
|
1390 if (funbox->isHeavyweight()) { |
|
1391 hops++; |
|
1392 if (funbox->function()->isNamedLambda()) |
|
1393 hops++; |
|
1394 } |
|
1395 if (bce->script->directlyInsideEval()) |
|
1396 return false; |
|
1397 RootedObject outerScope(bce->sc->context, bce->script->enclosingStaticScope()); |
|
1398 for (StaticScopeIter<CanGC> ssi(bce->sc->context, outerScope); !ssi.done(); ssi++) { |
|
1399 if (ssi.type() != StaticScopeIter<CanGC>::FUNCTION) { |
|
1400 if (ssi.type() == StaticScopeIter<CanGC>::BLOCK) { |
|
1401 // Use generic ops if a catch block is encountered. |
|
1402 return false; |
|
1403 } |
|
1404 if (ssi.hasDynamicScopeObject()) |
|
1405 hops++; |
|
1406 continue; |
|
1407 } |
|
1408 RootedScript script(bce->sc->context, ssi.funScript()); |
|
1409 if (script->functionNonDelazifying()->atom() == pn->pn_atom) |
|
1410 return false; |
|
1411 if (ssi.hasDynamicScopeObject()) { |
|
1412 uint32_t slot; |
|
1413 if (LookupAliasedName(script, pn->pn_atom->asPropertyName(), &slot)) { |
|
1414 JSOp op; |
|
1415 switch (pn->getOp()) { |
|
1416 case JSOP_NAME: op = JSOP_GETALIASEDVAR; break; |
|
1417 case JSOP_SETNAME: op = JSOP_SETALIASEDVAR; break; |
|
1418 default: return false; |
|
1419 } |
|
1420 pn->setOp(op); |
|
1421 JS_ALWAYS_TRUE(pn->pn_cookie.set(bce->parser->tokenStream, hops, slot)); |
|
1422 return true; |
|
1423 } |
|
1424 hops++; |
|
1425 } |
|
1426 |
|
1427 if (script->funHasExtensibleScope() || script->directlyInsideEval()) |
|
1428 return false; |
|
1429 } |
|
1430 } |
|
1431 |
|
1432 // Unbound names aren't recognizable global-property references if the |
|
1433 // script isn't running against its global object. |
|
1434 if (!bce->script->compileAndGo() || !bce->hasGlobalScope) |
|
1435 return false; |
|
1436 |
|
1437 // Deoptimized names also aren't necessarily globals. |
|
1438 if (pn->isDeoptimized()) |
|
1439 return false; |
|
1440 |
|
1441 if (bce->sc->isFunctionBox()) { |
|
1442 // Unbound names in function code may not be globals if new locals can |
|
1443 // be added to this function (or an enclosing one) to alias a global |
|
1444 // reference. |
|
1445 FunctionBox *funbox = bce->sc->asFunctionBox(); |
|
1446 if (funbox->mightAliasLocals()) |
|
1447 return false; |
|
1448 } |
|
1449 |
|
1450 // If this is eval code, being evaluated inside strict mode eval code, |
|
1451 // an "unbound" name might be a binding local to that outer eval: |
|
1452 // |
|
1453 // var x = "GLOBAL"; |
|
1454 // eval('"use strict"; ' + |
|
1455 // 'var x; ' + |
|
1456 // 'eval("print(x)");'); // "undefined", not "GLOBAL" |
|
1457 // |
|
1458 // Given the enclosing eval code's strictness and its bindings (neither is |
|
1459 // readily available now), we could exactly check global-ness, but it's not |
|
1460 // worth the trouble for doubly-nested eval code. So we conservatively |
|
1461 // approximate. If the outer eval code is strict, then this eval code will |
|
1462 // be: thus, don't optimize if we're compiling strict code inside an eval. |
|
1463 if (bce->insideEval && bce->sc->strict) |
|
1464 return false; |
|
1465 |
|
1466 // Beware: if you change anything here, you might also need to change |
|
1467 // js::ReportIfUndeclaredVarAssignment. |
|
1468 JSOp op; |
|
1469 switch (pn->getOp()) { |
|
1470 case JSOP_NAME: op = JSOP_GETGNAME; break; |
|
1471 case JSOP_SETNAME: op = JSOP_SETGNAME; break; |
|
1472 case JSOP_SETCONST: |
|
1473 // Not supported. |
|
1474 return false; |
|
1475 default: MOZ_ASSUME_UNREACHABLE("gname"); |
|
1476 } |
|
1477 pn->setOp(op); |
|
1478 return true; |
|
1479 } |
|
1480 |
|
1481 /* |
|
1482 * BindNameToSlotHelper attempts to optimize name gets and sets to stack slot |
|
1483 * loads and stores, given the compile-time information in bce and a PNK_NAME |
|
1484 * node pn. It returns false on error, true on success. |
|
1485 * |
|
1486 * The caller can test pn->pn_cookie.isFree() to tell whether optimization |
|
1487 * occurred, in which case BindNameToSlotHelper also updated pn->pn_op. If |
|
1488 * pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have |
|
1489 * been optimized, e.g., from JSOP_NAME to JSOP_CALLEE. Whether or not |
|
1490 * pn->pn_op was modified, if this function finds an argument or local variable |
|
1491 * name, PND_CONST will be set in pn_dflags for read-only properties after a |
|
1492 * successful return. |
|
1493 * |
|
1494 * NB: if you add more opcodes specialized from JSOP_NAME, etc., don't forget |
|
1495 * to update the special cases in EmitFor (for-in) and EmitAssignment (= and |
|
1496 * op=, e.g. +=). |
|
1497 */ |
|
1498 static bool |
|
1499 BindNameToSlotHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
1500 { |
|
1501 JS_ASSERT(pn->isKind(PNK_NAME)); |
|
1502 |
|
1503 JS_ASSERT_IF(pn->isKind(PNK_FUNCTION), pn->isBound()); |
|
1504 |
|
1505 /* Don't attempt if 'pn' is already bound or deoptimized or a function. */ |
|
1506 if (pn->isBound() || pn->isDeoptimized()) |
|
1507 return true; |
|
1508 |
|
1509 /* JSOP_CALLEE is pre-bound by definition. */ |
|
1510 JSOp op = pn->getOp(); |
|
1511 JS_ASSERT(op != JSOP_CALLEE); |
|
1512 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); |
|
1513 |
|
1514 /* |
|
1515 * The parser already linked name uses to definitions when (where not |
|
1516 * prevented by non-lexical constructs like 'with' and 'eval'). |
|
1517 */ |
|
1518 Definition *dn; |
|
1519 if (pn->isUsed()) { |
|
1520 JS_ASSERT(pn->pn_cookie.isFree()); |
|
1521 dn = pn->pn_lexdef; |
|
1522 JS_ASSERT(dn->isDefn()); |
|
1523 pn->pn_dflags |= (dn->pn_dflags & PND_CONST); |
|
1524 } else if (pn->isDefn()) { |
|
1525 dn = (Definition *) pn; |
|
1526 } else { |
|
1527 return true; |
|
1528 } |
|
1529 |
|
1530 /* |
|
1531 * Turn attempts to mutate const-declared bindings into get ops (for |
|
1532 * pre-increment and pre-decrement ops, our caller will have to emit |
|
1533 * JSOP_POS, JSOP_ONE, and JSOP_ADD as well). |
|
1534 * |
|
1535 * Turn JSOP_DELNAME into JSOP_FALSE if dn is known, as all declared |
|
1536 * bindings visible to the compiler are permanent in JS unless the |
|
1537 * declaration originates at top level in eval code. |
|
1538 */ |
|
1539 switch (op) { |
|
1540 case JSOP_NAME: |
|
1541 case JSOP_SETCONST: |
|
1542 break; |
|
1543 default: |
|
1544 if (pn->isConst()) { |
|
1545 if (bce->sc->needStrictChecks()) { |
|
1546 JSAutoByteString name; |
|
1547 if (!AtomToPrintableString(cx, pn->pn_atom, &name) || |
|
1548 !bce->reportStrictModeError(pn, JSMSG_READ_ONLY, name.ptr())) |
|
1549 { |
|
1550 return false; |
|
1551 } |
|
1552 } |
|
1553 pn->setOp(op = JSOP_NAME); |
|
1554 } |
|
1555 } |
|
1556 |
|
1557 if (dn->pn_cookie.isFree()) { |
|
1558 if (HandleScript caller = bce->evalCaller) { |
|
1559 JS_ASSERT(bce->script->compileAndGo()); |
|
1560 |
|
1561 /* |
|
1562 * Don't generate upvars on the left side of a for loop. See |
|
1563 * bug 470758. |
|
1564 */ |
|
1565 if (bce->emittingForInit) |
|
1566 return true; |
|
1567 |
|
1568 /* |
|
1569 * If this is an eval in the global scope, then unbound variables |
|
1570 * must be globals, so try to use GNAME ops. |
|
1571 */ |
|
1572 if (!caller->functionOrCallerFunction() && TryConvertFreeName(bce, pn)) { |
|
1573 pn->pn_dflags |= PND_BOUND; |
|
1574 return true; |
|
1575 } |
|
1576 |
|
1577 /* |
|
1578 * Out of tricks, so we must rely on PICs to optimize named |
|
1579 * accesses from direct eval called from function code. |
|
1580 */ |
|
1581 return true; |
|
1582 } |
|
1583 |
|
1584 /* Optimize accesses to undeclared globals. */ |
|
1585 if (!TryConvertFreeName(bce, pn)) |
|
1586 return true; |
|
1587 |
|
1588 pn->pn_dflags |= PND_BOUND; |
|
1589 return true; |
|
1590 } |
|
1591 |
|
1592 /* |
|
1593 * At this point, we are only dealing with uses that have already been |
|
1594 * bound to definitions via pn_lexdef. The rest of this routine converts |
|
1595 * the parse node of the use from its initial JSOP_*NAME* op to a LOCAL/ARG |
|
1596 * op. This requires setting the node's pn_cookie with a pair (level, slot) |
|
1597 * where 'level' is the number of function scopes between the use and the |
|
1598 * def and 'slot' is the index to emit as the immediate of the ARG/LOCAL |
|
1599 * op. For example, in this code: |
|
1600 * |
|
1601 * function(a,b,x) { return x } |
|
1602 * function(y) { function() { return y } } |
|
1603 * |
|
1604 * x will get (level = 0, slot = 2) and y will get (level = 1, slot = 0). |
|
1605 */ |
|
1606 JS_ASSERT(!pn->isDefn()); |
|
1607 JS_ASSERT(pn->isUsed()); |
|
1608 JS_ASSERT(pn->pn_lexdef); |
|
1609 JS_ASSERT(pn->pn_cookie.isFree()); |
|
1610 |
|
1611 /* |
|
1612 * We are compiling a function body and may be able to optimize name |
|
1613 * to stack slot. Look for an argument or variable in the function and |
|
1614 * rewrite pn_op and update pn accordingly. |
|
1615 */ |
|
1616 switch (dn->kind()) { |
|
1617 case Definition::ARG: |
|
1618 switch (op) { |
|
1619 case JSOP_NAME: op = JSOP_GETARG; break; |
|
1620 case JSOP_SETNAME: op = JSOP_SETARG; break; |
|
1621 default: MOZ_ASSUME_UNREACHABLE("arg"); |
|
1622 } |
|
1623 JS_ASSERT(!pn->isConst()); |
|
1624 break; |
|
1625 |
|
1626 case Definition::VAR: |
|
1627 case Definition::CONST: |
|
1628 case Definition::LET: |
|
1629 switch (op) { |
|
1630 case JSOP_NAME: op = JSOP_GETLOCAL; break; |
|
1631 case JSOP_SETNAME: op = JSOP_SETLOCAL; break; |
|
1632 case JSOP_SETCONST: op = JSOP_SETLOCAL; break; |
|
1633 default: MOZ_ASSUME_UNREACHABLE("local"); |
|
1634 } |
|
1635 break; |
|
1636 |
|
1637 case Definition::NAMED_LAMBDA: { |
|
1638 JS_ASSERT(dn->isOp(JSOP_CALLEE)); |
|
1639 JS_ASSERT(op != JSOP_CALLEE); |
|
1640 |
|
1641 /* |
|
1642 * Currently, the ALIASEDVAR ops do not support accessing the |
|
1643 * callee of a DeclEnvObject, so use NAME. |
|
1644 */ |
|
1645 if (dn->pn_cookie.level() != bce->script->staticLevel()) |
|
1646 return true; |
|
1647 |
|
1648 DebugOnly<JSFunction *> fun = bce->sc->asFunctionBox()->function(); |
|
1649 JS_ASSERT(fun->isLambda()); |
|
1650 JS_ASSERT(pn->pn_atom == fun->atom()); |
|
1651 |
|
1652 /* |
|
1653 * Leave pn->isOp(JSOP_NAME) if bce->fun is heavyweight to |
|
1654 * address two cases: a new binding introduced by eval, and |
|
1655 * assignment to the name in strict mode. |
|
1656 * |
|
1657 * var fun = (function f(s) { eval(s); return f; }); |
|
1658 * assertEq(fun("var f = 42"), 42); |
|
1659 * |
|
1660 * ECMAScript specifies that a function expression's name is bound |
|
1661 * in a lexical environment distinct from that used to bind its |
|
1662 * named parameters, the arguments object, and its variables. The |
|
1663 * new binding for "var f = 42" shadows the binding for the |
|
1664 * function itself, so the name of the function will not refer to |
|
1665 * the function. |
|
1666 * |
|
1667 * (function f() { "use strict"; f = 12; })(); |
|
1668 * |
|
1669 * Outside strict mode, assignment to a function expression's name |
|
1670 * has no effect. But in strict mode, this attempt to mutate an |
|
1671 * immutable binding must throw a TypeError. We implement this by |
|
1672 * not optimizing such assignments and by marking such functions as |
|
1673 * heavyweight, ensuring that the function name is represented in |
|
1674 * the scope chain so that assignment will throw a TypeError. |
|
1675 */ |
|
1676 if (!bce->sc->asFunctionBox()->isHeavyweight()) { |
|
1677 op = JSOP_CALLEE; |
|
1678 pn->pn_dflags |= PND_CONST; |
|
1679 } |
|
1680 |
|
1681 pn->setOp(op); |
|
1682 pn->pn_dflags |= PND_BOUND; |
|
1683 return true; |
|
1684 } |
|
1685 |
|
1686 case Definition::PLACEHOLDER: |
|
1687 return true; |
|
1688 |
|
1689 case Definition::MISSING: |
|
1690 MOZ_ASSUME_UNREACHABLE("missing"); |
|
1691 } |
|
1692 |
|
1693 /* |
|
1694 * The difference between the current static level and the static level of |
|
1695 * the definition is the number of function scopes between the current |
|
1696 * scope and dn's scope. |
|
1697 */ |
|
1698 unsigned skip = bce->script->staticLevel() - dn->pn_cookie.level(); |
|
1699 JS_ASSERT_IF(skip, dn->isClosed()); |
|
1700 |
|
1701 /* |
|
1702 * Explicitly disallow accessing var/let bindings in global scope from |
|
1703 * nested functions. The reason for this limitation is that, since the |
|
1704 * global script is not included in the static scope chain (1. because it |
|
1705 * has no object to stand in the static scope chain, 2. to minimize memory |
|
1706 * bloat where a single live function keeps its whole global script |
|
1707 * alive.), ScopeCoordinateToTypeSet is not able to find the var/let's |
|
1708 * associated types::TypeSet. |
|
1709 */ |
|
1710 if (skip) { |
|
1711 BytecodeEmitter *bceSkipped = bce; |
|
1712 for (unsigned i = 0; i < skip; i++) |
|
1713 bceSkipped = bceSkipped->parent; |
|
1714 if (!bceSkipped->sc->isFunctionBox()) |
|
1715 return true; |
|
1716 } |
|
1717 |
|
1718 JS_ASSERT(!pn->isOp(op)); |
|
1719 pn->setOp(op); |
|
1720 if (!pn->pn_cookie.set(bce->parser->tokenStream, skip, dn->pn_cookie.slot())) |
|
1721 return false; |
|
1722 |
|
1723 pn->pn_dflags |= PND_BOUND; |
|
1724 return true; |
|
1725 } |
|
1726 |
|
1727 /* |
|
1728 * Attempts to bind the name, then checks that no dynamic scope lookup ops are |
|
1729 * emitted in self-hosting mode. NAME ops do lookups off current scope chain, |
|
1730 * and we do not want to allow self-hosted code to use the dynamic scope. |
|
1731 */ |
|
1732 static bool |
|
1733 BindNameToSlot(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
1734 { |
|
1735 if (!BindNameToSlotHelper(cx, bce, pn)) |
|
1736 return false; |
|
1737 |
|
1738 if (bce->emitterMode == BytecodeEmitter::SelfHosting && !pn->isBound()) { |
|
1739 bce->reportError(pn, JSMSG_SELFHOSTED_UNBOUND_NAME); |
|
1740 return false; |
|
1741 } |
|
1742 |
|
1743 return true; |
|
1744 } |
|
1745 |
|
1746 /* |
|
1747 * If pn contains a useful expression, return true with *answer set to true. |
|
1748 * If pn contains a useless expression, return true with *answer set to false. |
|
1749 * Return false on error. |
|
1750 * |
|
1751 * The caller should initialize *answer to false and invoke this function on |
|
1752 * an expression statement or similar subtree to decide whether the tree could |
|
1753 * produce code that has any side effects. For an expression statement, we |
|
1754 * define useless code as code with no side effects, because the main effect, |
|
1755 * the value left on the stack after the code executes, will be discarded by a |
|
1756 * pop bytecode. |
|
1757 */ |
|
1758 static bool |
|
1759 CheckSideEffects(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool *answer) |
|
1760 { |
|
1761 if (!pn || *answer) |
|
1762 return true; |
|
1763 |
|
1764 switch (pn->getArity()) { |
|
1765 case PN_CODE: |
|
1766 /* |
|
1767 * A named function, contrary to ES3, is no longer useful, because we |
|
1768 * bind its name lexically (using JSOP_CALLEE) instead of creating an |
|
1769 * Object instance and binding a readonly, permanent property in it |
|
1770 * (the object and binding can be detected and hijacked or captured). |
|
1771 * This is a bug fix to ES3; it is fixed in ES3.1 drafts. |
|
1772 */ |
|
1773 MOZ_ASSERT(*answer == false); |
|
1774 return true; |
|
1775 |
|
1776 case PN_LIST: |
|
1777 if (pn->isOp(JSOP_NOP) || pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || |
|
1778 pn->isOp(JSOP_STRICTEQ) || pn->isOp(JSOP_STRICTNE)) { |
|
1779 /* |
|
1780 * Non-operators along with ||, &&, ===, and !== never invoke |
|
1781 * toString or valueOf. |
|
1782 */ |
|
1783 bool ok = true; |
|
1784 for (ParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) |
|
1785 ok &= CheckSideEffects(cx, bce, pn2, answer); |
|
1786 return ok; |
|
1787 } |
|
1788 |
|
1789 if (pn->isKind(PNK_GENEXP)) { |
|
1790 /* Generator-expressions are harmless if the result is ignored. */ |
|
1791 MOZ_ASSERT(*answer == false); |
|
1792 return true; |
|
1793 } |
|
1794 |
|
1795 /* |
|
1796 * All invocation operations (construct: PNK_NEW, call: PNK_CALL) |
|
1797 * are presumed to be useful, because they may have side effects |
|
1798 * even if their main effect (their return value) is discarded. |
|
1799 * |
|
1800 * PNK_ELEM binary trees of 3+ nodes are flattened into lists to |
|
1801 * avoid too much recursion. All such lists must be presumed to be |
|
1802 * useful because each index operation could invoke a getter. |
|
1803 * |
|
1804 * Likewise, array and object initialisers may call prototype |
|
1805 * setters (the __defineSetter__ built-in, and writable __proto__ |
|
1806 * on Array.prototype create this hazard). Initialiser list nodes |
|
1807 * have JSOP_NEWINIT in their pn_op. |
|
1808 */ |
|
1809 *answer = true; |
|
1810 return true; |
|
1811 |
|
1812 case PN_TERNARY: |
|
1813 return CheckSideEffects(cx, bce, pn->pn_kid1, answer) && |
|
1814 CheckSideEffects(cx, bce, pn->pn_kid2, answer) && |
|
1815 CheckSideEffects(cx, bce, pn->pn_kid3, answer); |
|
1816 |
|
1817 case PN_BINARY: |
|
1818 case PN_BINARY_OBJ: |
|
1819 if (pn->isAssignment()) { |
|
1820 /* |
|
1821 * Assignment is presumed to be useful, even if the next operation |
|
1822 * is another assignment overwriting this one's ostensible effect, |
|
1823 * because the left operand may be a property with a setter that |
|
1824 * has side effects. |
|
1825 * |
|
1826 * The only exception is assignment of a useless value to a const |
|
1827 * declared in the function currently being compiled. |
|
1828 */ |
|
1829 ParseNode *pn2 = pn->pn_left; |
|
1830 if (!pn2->isKind(PNK_NAME)) { |
|
1831 *answer = true; |
|
1832 } else { |
|
1833 if (!BindNameToSlot(cx, bce, pn2)) |
|
1834 return false; |
|
1835 if (!CheckSideEffects(cx, bce, pn->pn_right, answer)) |
|
1836 return false; |
|
1837 if (!*answer && (!pn->isOp(JSOP_NOP) || !pn2->isConst())) |
|
1838 *answer = true; |
|
1839 } |
|
1840 return true; |
|
1841 } |
|
1842 |
|
1843 if (pn->isOp(JSOP_OR) || pn->isOp(JSOP_AND) || pn->isOp(JSOP_STRICTEQ) || |
|
1844 pn->isOp(JSOP_STRICTNE)) { |
|
1845 /* |
|
1846 * ||, &&, ===, and !== do not convert their operands via |
|
1847 * toString or valueOf method calls. |
|
1848 */ |
|
1849 return CheckSideEffects(cx, bce, pn->pn_left, answer) && |
|
1850 CheckSideEffects(cx, bce, pn->pn_right, answer); |
|
1851 } |
|
1852 |
|
1853 /* |
|
1854 * We can't easily prove that neither operand ever denotes an |
|
1855 * object with a toString or valueOf method. |
|
1856 */ |
|
1857 *answer = true; |
|
1858 return true; |
|
1859 |
|
1860 case PN_UNARY: |
|
1861 switch (pn->getKind()) { |
|
1862 case PNK_DELETE: |
|
1863 { |
|
1864 ParseNode *pn2 = pn->pn_kid; |
|
1865 switch (pn2->getKind()) { |
|
1866 case PNK_NAME: |
|
1867 if (!BindNameToSlot(cx, bce, pn2)) |
|
1868 return false; |
|
1869 if (pn2->isConst()) { |
|
1870 MOZ_ASSERT(*answer == false); |
|
1871 return true; |
|
1872 } |
|
1873 /* FALL THROUGH */ |
|
1874 case PNK_DOT: |
|
1875 case PNK_CALL: |
|
1876 case PNK_ELEM: |
|
1877 /* All these delete addressing modes have effects too. */ |
|
1878 *answer = true; |
|
1879 return true; |
|
1880 default: |
|
1881 return CheckSideEffects(cx, bce, pn2, answer); |
|
1882 } |
|
1883 MOZ_ASSUME_UNREACHABLE("We have a returning default case"); |
|
1884 } |
|
1885 |
|
1886 case PNK_TYPEOF: |
|
1887 case PNK_VOID: |
|
1888 case PNK_NOT: |
|
1889 case PNK_BITNOT: |
|
1890 if (pn->isOp(JSOP_NOT)) { |
|
1891 /* ! does not convert its operand via toString or valueOf. */ |
|
1892 return CheckSideEffects(cx, bce, pn->pn_kid, answer); |
|
1893 } |
|
1894 /* FALL THROUGH */ |
|
1895 |
|
1896 default: |
|
1897 /* |
|
1898 * All of PNK_INC, PNK_DEC, PNK_THROW, PNK_YIELD, and PNK_YIELD_STAR |
|
1899 * have direct effects. Of the remaining unary-arity node types, we |
|
1900 * can't easily prove that the operand never denotes an object with |
|
1901 * a toString or valueOf method. |
|
1902 */ |
|
1903 *answer = true; |
|
1904 return true; |
|
1905 } |
|
1906 MOZ_ASSUME_UNREACHABLE("We have a returning default case"); |
|
1907 |
|
1908 case PN_NAME: |
|
1909 /* |
|
1910 * Take care to avoid trying to bind a label name (labels, both for |
|
1911 * statements and property values in object initialisers, have pn_op |
|
1912 * defaulted to JSOP_NOP). |
|
1913 */ |
|
1914 if (pn->isKind(PNK_NAME) && !pn->isOp(JSOP_NOP)) { |
|
1915 if (!BindNameToSlot(cx, bce, pn)) |
|
1916 return false; |
|
1917 if (!pn->isOp(JSOP_CALLEE) && pn->pn_cookie.isFree()) { |
|
1918 /* |
|
1919 * Not a use of an unshadowed named function expression's given |
|
1920 * name, so this expression could invoke a getter that has side |
|
1921 * effects. |
|
1922 */ |
|
1923 *answer = true; |
|
1924 } |
|
1925 } |
|
1926 if (pn->isKind(PNK_DOT)) { |
|
1927 /* Dotted property references in general can call getters. */ |
|
1928 *answer = true; |
|
1929 } |
|
1930 return CheckSideEffects(cx, bce, pn->maybeExpr(), answer); |
|
1931 |
|
1932 case PN_NULLARY: |
|
1933 if (pn->isKind(PNK_DEBUGGER)) |
|
1934 *answer = true; |
|
1935 return true; |
|
1936 } |
|
1937 return true; |
|
1938 } |
|
1939 |
|
1940 bool |
|
1941 BytecodeEmitter::isInLoop() |
|
1942 { |
|
1943 for (StmtInfoBCE *stmt = topStmt; stmt; stmt = stmt->down) { |
|
1944 if (stmt->isLoop()) |
|
1945 return true; |
|
1946 } |
|
1947 return false; |
|
1948 } |
|
1949 |
|
1950 bool |
|
1951 BytecodeEmitter::checkSingletonContext() |
|
1952 { |
|
1953 if (!script->compileAndGo() || sc->isFunctionBox() || isInLoop()) |
|
1954 return false; |
|
1955 hasSingletons = true; |
|
1956 return true; |
|
1957 } |
|
1958 |
|
1959 bool |
|
1960 BytecodeEmitter::needsImplicitThis() |
|
1961 { |
|
1962 if (!script->compileAndGo()) |
|
1963 return true; |
|
1964 |
|
1965 if (sc->isFunctionBox()) { |
|
1966 if (sc->asFunctionBox()->inWith) |
|
1967 return true; |
|
1968 } else { |
|
1969 JSObject *scope = sc->asGlobalSharedContext()->scopeChain(); |
|
1970 while (scope) { |
|
1971 if (scope->is<DynamicWithObject>()) |
|
1972 return true; |
|
1973 scope = scope->enclosingScope(); |
|
1974 } |
|
1975 } |
|
1976 |
|
1977 for (StmtInfoBCE *stmt = topStmt; stmt; stmt = stmt->down) { |
|
1978 if (stmt->type == STMT_WITH) |
|
1979 return true; |
|
1980 } |
|
1981 return false; |
|
1982 } |
|
1983 |
|
1984 void |
|
1985 BytecodeEmitter::tellDebuggerAboutCompiledScript(ExclusiveContext *cx) |
|
1986 { |
|
1987 // Note: when parsing off thread the resulting scripts need to be handed to |
|
1988 // the debugger after rejoining to the main thread. |
|
1989 if (!cx->isJSContext()) |
|
1990 return; |
|
1991 |
|
1992 RootedFunction function(cx, script->functionNonDelazifying()); |
|
1993 CallNewScriptHook(cx->asJSContext(), script, function); |
|
1994 // Lazy scripts are never top level (despite always being invoked with a |
|
1995 // nullptr parent), and so the hook should never be fired. |
|
1996 if (emitterMode != LazyFunction && !parent) { |
|
1997 GlobalObject *compileAndGoGlobal = nullptr; |
|
1998 if (script->compileAndGo()) |
|
1999 compileAndGoGlobal = &script->global(); |
|
2000 Debugger::onNewScript(cx->asJSContext(), script, compileAndGoGlobal); |
|
2001 } |
|
2002 } |
|
2003 |
|
2004 inline TokenStream * |
|
2005 BytecodeEmitter::tokenStream() |
|
2006 { |
|
2007 return &parser->tokenStream; |
|
2008 } |
|
2009 |
|
2010 bool |
|
2011 BytecodeEmitter::reportError(ParseNode *pn, unsigned errorNumber, ...) |
|
2012 { |
|
2013 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; |
|
2014 |
|
2015 va_list args; |
|
2016 va_start(args, errorNumber); |
|
2017 bool result = tokenStream()->reportCompileErrorNumberVA(pos.begin, JSREPORT_ERROR, |
|
2018 errorNumber, args); |
|
2019 va_end(args); |
|
2020 return result; |
|
2021 } |
|
2022 |
|
2023 bool |
|
2024 BytecodeEmitter::reportStrictWarning(ParseNode *pn, unsigned errorNumber, ...) |
|
2025 { |
|
2026 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; |
|
2027 |
|
2028 va_list args; |
|
2029 va_start(args, errorNumber); |
|
2030 bool result = tokenStream()->reportStrictWarningErrorNumberVA(pos.begin, errorNumber, args); |
|
2031 va_end(args); |
|
2032 return result; |
|
2033 } |
|
2034 |
|
2035 bool |
|
2036 BytecodeEmitter::reportStrictModeError(ParseNode *pn, unsigned errorNumber, ...) |
|
2037 { |
|
2038 TokenPos pos = pn ? pn->pn_pos : tokenStream()->currentToken().pos; |
|
2039 |
|
2040 va_list args; |
|
2041 va_start(args, errorNumber); |
|
2042 bool result = tokenStream()->reportStrictModeErrorNumberVA(pos.begin, sc->strict, |
|
2043 errorNumber, args); |
|
2044 va_end(args); |
|
2045 return result; |
|
2046 } |
|
2047 |
|
2048 static bool |
|
2049 EmitNewInit(ExclusiveContext *cx, BytecodeEmitter *bce, JSProtoKey key) |
|
2050 { |
|
2051 const size_t len = 1 + UINT32_INDEX_LEN; |
|
2052 ptrdiff_t offset = EmitCheck(cx, bce, len); |
|
2053 if (offset < 0) |
|
2054 return false; |
|
2055 |
|
2056 jsbytecode *code = bce->code(offset); |
|
2057 code[0] = JSOP_NEWINIT; |
|
2058 code[1] = jsbytecode(key); |
|
2059 code[2] = 0; |
|
2060 code[3] = 0; |
|
2061 code[4] = 0; |
|
2062 UpdateDepth(cx, bce, offset); |
|
2063 CheckTypeSet(cx, bce, JSOP_NEWINIT); |
|
2064 return true; |
|
2065 } |
|
2066 |
|
2067 static bool |
|
2068 IteratorResultShape(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned *shape) |
|
2069 { |
|
2070 JS_ASSERT(bce->script->compileAndGo()); |
|
2071 |
|
2072 RootedObject obj(cx); |
|
2073 gc::AllocKind kind = GuessObjectGCKind(2); |
|
2074 obj = NewBuiltinClassInstance(cx, &JSObject::class_, kind); |
|
2075 if (!obj) |
|
2076 return false; |
|
2077 |
|
2078 Rooted<jsid> value_id(cx, AtomToId(cx->names().value)); |
|
2079 Rooted<jsid> done_id(cx, AtomToId(cx->names().done)); |
|
2080 if (!DefineNativeProperty(cx, obj, value_id, UndefinedHandleValue, nullptr, nullptr, |
|
2081 JSPROP_ENUMERATE)) |
|
2082 return false; |
|
2083 if (!DefineNativeProperty(cx, obj, done_id, UndefinedHandleValue, nullptr, nullptr, |
|
2084 JSPROP_ENUMERATE)) |
|
2085 return false; |
|
2086 |
|
2087 ObjectBox *objbox = bce->parser->newObjectBox(obj); |
|
2088 if (!objbox) |
|
2089 return false; |
|
2090 |
|
2091 *shape = bce->objectList.add(objbox); |
|
2092 |
|
2093 return true; |
|
2094 } |
|
2095 |
|
2096 static bool |
|
2097 EmitPrepareIteratorResult(ExclusiveContext *cx, BytecodeEmitter *bce) |
|
2098 { |
|
2099 if (bce->script->compileAndGo()) { |
|
2100 unsigned shape; |
|
2101 if (!IteratorResultShape(cx, bce, &shape)) |
|
2102 return false; |
|
2103 return EmitIndex32(cx, JSOP_NEWOBJECT, shape, bce); |
|
2104 } |
|
2105 |
|
2106 return EmitNewInit(cx, bce, JSProto_Object); |
|
2107 } |
|
2108 |
|
2109 static bool |
|
2110 EmitFinishIteratorResult(ExclusiveContext *cx, BytecodeEmitter *bce, bool done) |
|
2111 { |
|
2112 jsatomid value_id; |
|
2113 if (!bce->makeAtomIndex(cx->names().value, &value_id)) |
|
2114 return UINT_MAX; |
|
2115 jsatomid done_id; |
|
2116 if (!bce->makeAtomIndex(cx->names().done, &done_id)) |
|
2117 return UINT_MAX; |
|
2118 |
|
2119 if (!EmitIndex32(cx, JSOP_INITPROP, value_id, bce)) |
|
2120 return false; |
|
2121 if (Emit1(cx, bce, done ? JSOP_TRUE : JSOP_FALSE) < 0) |
|
2122 return false; |
|
2123 if (!EmitIndex32(cx, JSOP_INITPROP, done_id, bce)) |
|
2124 return false; |
|
2125 if (Emit1(cx, bce, JSOP_ENDINIT) < 0) |
|
2126 return false; |
|
2127 return true; |
|
2128 } |
|
2129 |
|
2130 static bool |
|
2131 EmitNameOp(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool callContext) |
|
2132 { |
|
2133 if (!BindNameToSlot(cx, bce, pn)) |
|
2134 return false; |
|
2135 |
|
2136 JSOp op = pn->getOp(); |
|
2137 |
|
2138 if (op == JSOP_CALLEE) { |
|
2139 if (Emit1(cx, bce, op) < 0) |
|
2140 return false; |
|
2141 } else { |
|
2142 if (!pn->pn_cookie.isFree()) { |
|
2143 JS_ASSERT(JOF_OPTYPE(op) != JOF_ATOM); |
|
2144 if (!EmitVarOp(cx, pn, op, bce)) |
|
2145 return false; |
|
2146 } else { |
|
2147 if (!EmitAtomOp(cx, pn, op, bce)) |
|
2148 return false; |
|
2149 } |
|
2150 } |
|
2151 |
|
2152 /* Need to provide |this| value for call */ |
|
2153 if (callContext) { |
|
2154 if (op == JSOP_NAME && bce->needsImplicitThis()) { |
|
2155 if (!EmitAtomOp(cx, pn, JSOP_IMPLICITTHIS, bce)) |
|
2156 return false; |
|
2157 } else { |
|
2158 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) |
|
2159 return false; |
|
2160 } |
|
2161 } |
|
2162 |
|
2163 return true; |
|
2164 } |
|
2165 |
|
2166 static bool |
|
2167 EmitPropLHS(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) |
|
2168 { |
|
2169 JS_ASSERT(pn->isKind(PNK_DOT)); |
|
2170 ParseNode *pn2 = pn->maybeExpr(); |
|
2171 |
|
2172 /* |
|
2173 * If the object operand is also a dotted property reference, reverse the |
|
2174 * list linked via pn_expr temporarily so we can iterate over it from the |
|
2175 * bottom up (reversing again as we go), to avoid excessive recursion. |
|
2176 */ |
|
2177 if (pn2->isKind(PNK_DOT)) { |
|
2178 ParseNode *pndot = pn2; |
|
2179 ParseNode *pnup = nullptr, *pndown; |
|
2180 ptrdiff_t top = bce->offset(); |
|
2181 for (;;) { |
|
2182 /* Reverse pndot->pn_expr to point up, not down. */ |
|
2183 pndot->pn_offset = top; |
|
2184 JS_ASSERT(!pndot->isUsed()); |
|
2185 pndown = pndot->pn_expr; |
|
2186 pndot->pn_expr = pnup; |
|
2187 if (!pndown->isKind(PNK_DOT)) |
|
2188 break; |
|
2189 pnup = pndot; |
|
2190 pndot = pndown; |
|
2191 } |
|
2192 |
|
2193 /* pndown is a primary expression, not a dotted property reference. */ |
|
2194 if (!EmitTree(cx, bce, pndown)) |
|
2195 return false; |
|
2196 |
|
2197 do { |
|
2198 /* Walk back up the list, emitting annotated name ops. */ |
|
2199 if (!EmitAtomOp(cx, pndot, JSOP_GETPROP, bce)) |
|
2200 return false; |
|
2201 |
|
2202 /* Reverse the pn_expr link again. */ |
|
2203 pnup = pndot->pn_expr; |
|
2204 pndot->pn_expr = pndown; |
|
2205 pndown = pndot; |
|
2206 } while ((pndot = pnup) != nullptr); |
|
2207 return true; |
|
2208 } |
|
2209 |
|
2210 // The non-optimized case. |
|
2211 return EmitTree(cx, bce, pn2); |
|
2212 } |
|
2213 |
|
2214 static bool |
|
2215 EmitPropOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) |
|
2216 { |
|
2217 JS_ASSERT(pn->isArity(PN_NAME)); |
|
2218 |
|
2219 if (!EmitPropLHS(cx, pn, op, bce)) |
|
2220 return false; |
|
2221 |
|
2222 if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_DUP) < 0) |
|
2223 return false; |
|
2224 |
|
2225 if (!EmitAtomOp(cx, pn, op, bce)) |
|
2226 return false; |
|
2227 |
|
2228 if (op == JSOP_CALLPROP && Emit1(cx, bce, JSOP_SWAP) < 0) |
|
2229 return false; |
|
2230 |
|
2231 return true; |
|
2232 } |
|
2233 |
|
2234 static bool |
|
2235 EmitPropIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) |
|
2236 { |
|
2237 JS_ASSERT(pn->pn_kid->getKind() == PNK_DOT); |
|
2238 |
|
2239 bool post; |
|
2240 JSOp binop = GetIncDecInfo(pn->getKind(), &post); |
|
2241 |
|
2242 JSOp get = JSOP_GETPROP; |
|
2243 if (!EmitPropLHS(cx, pn->pn_kid, get, bce)) // OBJ |
|
2244 return false; |
|
2245 if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ |
|
2246 return false; |
|
2247 if (!EmitAtomOp(cx, pn->pn_kid, JSOP_GETPROP, bce)) // OBJ V |
|
2248 return false; |
|
2249 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N |
|
2250 return false; |
|
2251 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N |
|
2252 return false; |
|
2253 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1 |
|
2254 return false; |
|
2255 if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1 |
|
2256 return false; |
|
2257 |
|
2258 if (post) { |
|
2259 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ |
|
2260 return false; |
|
2261 if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1 |
|
2262 return false; |
|
2263 } |
|
2264 |
|
2265 if (!EmitAtomOp(cx, pn->pn_kid, JSOP_SETPROP, bce)) // N? N+1 |
|
2266 return false; |
|
2267 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT |
|
2268 return false; |
|
2269 |
|
2270 return true; |
|
2271 } |
|
2272 |
|
2273 static bool |
|
2274 EmitNameIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) |
|
2275 { |
|
2276 const JSCodeSpec *cs = &js_CodeSpec[pn->pn_kid->getOp()]; |
|
2277 |
|
2278 bool global = (cs->format & JOF_GNAME); |
|
2279 bool post; |
|
2280 JSOp binop = GetIncDecInfo(pn->getKind(), &post); |
|
2281 |
|
2282 if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_BINDGNAME : JSOP_BINDNAME, bce)) // OBJ |
|
2283 return false; |
|
2284 if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_GETGNAME : JSOP_NAME, bce)) // OBJ V |
|
2285 return false; |
|
2286 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ N |
|
2287 return false; |
|
2288 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ N? N |
|
2289 return false; |
|
2290 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ N? N 1 |
|
2291 return false; |
|
2292 if (Emit1(cx, bce, binop) < 0) // OBJ N? N+1 |
|
2293 return false; |
|
2294 |
|
2295 if (post) { |
|
2296 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N? N+1 OBJ |
|
2297 return false; |
|
2298 if (Emit1(cx, bce, JSOP_SWAP) < 0) // N? OBJ N+1 |
|
2299 return false; |
|
2300 } |
|
2301 |
|
2302 if (!EmitAtomOp(cx, pn->pn_kid, global ? JSOP_SETGNAME : JSOP_SETNAME, bce)) // N? N+1 |
|
2303 return false; |
|
2304 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT |
|
2305 return false; |
|
2306 |
|
2307 return true; |
|
2308 } |
|
2309 |
|
2310 /* |
|
2311 * Emit bytecode to put operands for a JSOP_GETELEM/CALLELEM/SETELEM/DELELEM |
|
2312 * opcode onto the stack in the right order. In the case of SETELEM, the |
|
2313 * value to be assigned must already be pushed. |
|
2314 */ |
|
2315 static bool |
|
2316 EmitElemOperands(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) |
|
2317 { |
|
2318 JS_ASSERT(pn->isArity(PN_BINARY)); |
|
2319 if (!EmitTree(cx, bce, pn->pn_left)) |
|
2320 return false; |
|
2321 if (op == JSOP_CALLELEM && Emit1(cx, bce, JSOP_DUP) < 0) |
|
2322 return false; |
|
2323 if (!EmitTree(cx, bce, pn->pn_right)) |
|
2324 return false; |
|
2325 if (op == JSOP_SETELEM && Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) |
|
2326 return false; |
|
2327 return true; |
|
2328 } |
|
2329 |
|
2330 static inline bool |
|
2331 EmitElemOpBase(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp op) |
|
2332 { |
|
2333 if (Emit1(cx, bce, op) < 0) |
|
2334 return false; |
|
2335 CheckTypeSet(cx, bce, op); |
|
2336 |
|
2337 if (op == JSOP_CALLELEM) { |
|
2338 if (Emit1(cx, bce, JSOP_SWAP) < 0) |
|
2339 return false; |
|
2340 } |
|
2341 return true; |
|
2342 } |
|
2343 |
|
2344 static bool |
|
2345 EmitElemOp(ExclusiveContext *cx, ParseNode *pn, JSOp op, BytecodeEmitter *bce) |
|
2346 { |
|
2347 return EmitElemOperands(cx, pn, op, bce) && EmitElemOpBase(cx, bce, op); |
|
2348 } |
|
2349 |
|
2350 static bool |
|
2351 EmitElemIncDec(ExclusiveContext *cx, ParseNode *pn, BytecodeEmitter *bce) |
|
2352 { |
|
2353 JS_ASSERT(pn->pn_kid->getKind() == PNK_ELEM); |
|
2354 |
|
2355 if (!EmitElemOperands(cx, pn->pn_kid, JSOP_GETELEM, bce)) |
|
2356 return false; |
|
2357 |
|
2358 bool post; |
|
2359 JSOp binop = GetIncDecInfo(pn->getKind(), &post); |
|
2360 |
|
2361 /* |
|
2362 * We need to convert the key to an object id first, so that we do not do |
|
2363 * it inside both the GETELEM and the SETELEM. |
|
2364 */ |
|
2365 // OBJ KEY* |
|
2366 if (Emit1(cx, bce, JSOP_TOID) < 0) // OBJ KEY |
|
2367 return false; |
|
2368 if (Emit1(cx, bce, JSOP_DUP2) < 0) // OBJ KEY OBJ KEY |
|
2369 return false; |
|
2370 if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) // OBJ KEY V |
|
2371 return false; |
|
2372 if (Emit1(cx, bce, JSOP_POS) < 0) // OBJ KEY N |
|
2373 return false; |
|
2374 if (post && Emit1(cx, bce, JSOP_DUP) < 0) // OBJ KEY N? N |
|
2375 return false; |
|
2376 if (Emit1(cx, bce, JSOP_ONE) < 0) // OBJ KEY N? N 1 |
|
2377 return false; |
|
2378 if (Emit1(cx, bce, binop) < 0) // OBJ KEY N? N+1 |
|
2379 return false; |
|
2380 |
|
2381 if (post) { |
|
2382 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // KEY N N+1 OBJ |
|
2383 return false; |
|
2384 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // N N+1 OBJ KEY |
|
2385 return false; |
|
2386 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)2) < 0) // N OBJ KEY N+1 |
|
2387 return false; |
|
2388 } |
|
2389 |
|
2390 if (!EmitElemOpBase(cx, bce, JSOP_SETELEM)) // N? N+1 |
|
2391 return false; |
|
2392 if (post && Emit1(cx, bce, JSOP_POP) < 0) // RESULT |
|
2393 return false; |
|
2394 |
|
2395 return true; |
|
2396 } |
|
2397 |
|
2398 static bool |
|
2399 EmitNumberOp(ExclusiveContext *cx, double dval, BytecodeEmitter *bce) |
|
2400 { |
|
2401 int32_t ival; |
|
2402 uint32_t u; |
|
2403 ptrdiff_t off; |
|
2404 jsbytecode *pc; |
|
2405 |
|
2406 if (NumberIsInt32(dval, &ival)) { |
|
2407 if (ival == 0) |
|
2408 return Emit1(cx, bce, JSOP_ZERO) >= 0; |
|
2409 if (ival == 1) |
|
2410 return Emit1(cx, bce, JSOP_ONE) >= 0; |
|
2411 if ((int)(int8_t)ival == ival) |
|
2412 return Emit2(cx, bce, JSOP_INT8, (jsbytecode)(int8_t)ival) >= 0; |
|
2413 |
|
2414 u = (uint32_t)ival; |
|
2415 if (u < JS_BIT(16)) { |
|
2416 EMIT_UINT16_IMM_OP(JSOP_UINT16, u); |
|
2417 } else if (u < JS_BIT(24)) { |
|
2418 off = EmitN(cx, bce, JSOP_UINT24, 3); |
|
2419 if (off < 0) |
|
2420 return false; |
|
2421 pc = bce->code(off); |
|
2422 SET_UINT24(pc, u); |
|
2423 } else { |
|
2424 off = EmitN(cx, bce, JSOP_INT32, 4); |
|
2425 if (off < 0) |
|
2426 return false; |
|
2427 pc = bce->code(off); |
|
2428 SET_INT32(pc, ival); |
|
2429 } |
|
2430 return true; |
|
2431 } |
|
2432 |
|
2433 if (!bce->constList.append(DoubleValue(dval))) |
|
2434 return false; |
|
2435 |
|
2436 return EmitIndex32(cx, JSOP_DOUBLE, bce->constList.length() - 1, bce); |
|
2437 } |
|
2438 |
|
2439 static inline void |
|
2440 SetJumpOffsetAt(BytecodeEmitter *bce, ptrdiff_t off) |
|
2441 { |
|
2442 SET_JUMP_OFFSET(bce->code(off), bce->offset() - off); |
|
2443 } |
|
2444 |
|
2445 static bool |
|
2446 PushUndefinedValues(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned n) |
|
2447 { |
|
2448 for (unsigned i = 0; i < n; ++i) { |
|
2449 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) |
|
2450 return false; |
|
2451 } |
|
2452 return true; |
|
2453 } |
|
2454 |
|
2455 static bool |
|
2456 InitializeBlockScopedLocalsFromStack(ExclusiveContext *cx, BytecodeEmitter *bce, |
|
2457 Handle<StaticBlockObject *> blockObj) |
|
2458 { |
|
2459 for (unsigned i = blockObj->numVariables(); i > 0; --i) { |
|
2460 if (blockObj->isAliased(i - 1)) { |
|
2461 ScopeCoordinate sc; |
|
2462 sc.setHops(0); |
|
2463 sc.setSlot(BlockObject::RESERVED_SLOTS + i - 1); |
|
2464 if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce)) |
|
2465 return false; |
|
2466 } else { |
|
2467 unsigned local = blockObj->blockIndexToLocalIndex(i - 1); |
|
2468 if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, local, bce)) |
|
2469 return false; |
|
2470 } |
|
2471 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
2472 return false; |
|
2473 } |
|
2474 return true; |
|
2475 } |
|
2476 |
|
2477 static bool |
|
2478 EnterBlockScope(ExclusiveContext *cx, BytecodeEmitter *bce, StmtInfoBCE *stmtInfo, |
|
2479 ObjectBox *objbox, unsigned alreadyPushed = 0) |
|
2480 { |
|
2481 // Initial values for block-scoped locals. |
|
2482 Rooted<StaticBlockObject *> blockObj(cx, &objbox->object->as<StaticBlockObject>()); |
|
2483 if (!PushUndefinedValues(cx, bce, blockObj->numVariables() - alreadyPushed)) |
|
2484 return false; |
|
2485 |
|
2486 if (!EnterNestedScope(cx, bce, stmtInfo, objbox, STMT_BLOCK)) |
|
2487 return false; |
|
2488 |
|
2489 if (!InitializeBlockScopedLocalsFromStack(cx, bce, blockObj)) |
|
2490 return false; |
|
2491 |
|
2492 return true; |
|
2493 } |
|
2494 |
|
2495 /* |
|
2496 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. |
|
2497 * LLVM is deciding to inline this function which uses a lot of stack space |
|
2498 * into EmitTree which is recursive and uses relatively little stack space. |
|
2499 */ |
|
2500 MOZ_NEVER_INLINE static bool |
|
2501 EmitSwitch(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
2502 { |
|
2503 JSOp switchOp; |
|
2504 bool hasDefault; |
|
2505 ptrdiff_t top, off, defaultOffset; |
|
2506 ParseNode *pn2, *pn3, *pn4; |
|
2507 int32_t low, high; |
|
2508 int noteIndex; |
|
2509 size_t switchSize; |
|
2510 jsbytecode *pc; |
|
2511 |
|
2512 /* Try for most optimal, fall back if not dense ints. */ |
|
2513 switchOp = JSOP_TABLESWITCH; |
|
2514 hasDefault = false; |
|
2515 defaultOffset = -1; |
|
2516 |
|
2517 pn2 = pn->pn_right; |
|
2518 JS_ASSERT(pn2->isKind(PNK_LEXICALSCOPE) || pn2->isKind(PNK_STATEMENTLIST)); |
|
2519 |
|
2520 /* Push the discriminant. */ |
|
2521 if (!EmitTree(cx, bce, pn->pn_left)) |
|
2522 return false; |
|
2523 |
|
2524 StmtInfoBCE stmtInfo(cx); |
|
2525 if (pn2->isKind(PNK_LEXICALSCOPE)) { |
|
2526 if (!EnterBlockScope(cx, bce, &stmtInfo, pn2->pn_objbox, 0)) |
|
2527 return false; |
|
2528 |
|
2529 stmtInfo.type = STMT_SWITCH; |
|
2530 stmtInfo.update = top = bce->offset(); |
|
2531 /* Advance pn2 to refer to the switch case list. */ |
|
2532 pn2 = pn2->expr(); |
|
2533 } else { |
|
2534 JS_ASSERT(pn2->isKind(PNK_STATEMENTLIST)); |
|
2535 top = bce->offset(); |
|
2536 PushStatementBCE(bce, &stmtInfo, STMT_SWITCH, top); |
|
2537 } |
|
2538 |
|
2539 /* Switch bytecodes run from here till end of final case. */ |
|
2540 uint32_t caseCount = pn2->pn_count; |
|
2541 uint32_t tableLength = 0; |
|
2542 ScopedJSFreePtr<ParseNode*> table(nullptr); |
|
2543 |
|
2544 if (caseCount > JS_BIT(16)) { |
|
2545 bce->parser->tokenStream.reportError(JSMSG_TOO_MANY_CASES); |
|
2546 return false; |
|
2547 } |
|
2548 |
|
2549 if (caseCount == 0 || |
|
2550 (caseCount == 1 && |
|
2551 (hasDefault = (pn2->pn_head->isKind(PNK_DEFAULT))))) { |
|
2552 caseCount = 0; |
|
2553 low = 0; |
|
2554 high = -1; |
|
2555 } else { |
|
2556 bool ok = true; |
|
2557 #define INTMAP_LENGTH 256 |
|
2558 jsbitmap intmap_space[INTMAP_LENGTH]; |
|
2559 jsbitmap *intmap = nullptr; |
|
2560 int32_t intmap_bitlen = 0; |
|
2561 |
|
2562 low = JSVAL_INT_MAX; |
|
2563 high = JSVAL_INT_MIN; |
|
2564 |
|
2565 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
|
2566 if (pn3->isKind(PNK_DEFAULT)) { |
|
2567 hasDefault = true; |
|
2568 caseCount--; /* one of the "cases" was the default */ |
|
2569 continue; |
|
2570 } |
|
2571 |
|
2572 JS_ASSERT(pn3->isKind(PNK_CASE)); |
|
2573 if (switchOp == JSOP_CONDSWITCH) |
|
2574 continue; |
|
2575 |
|
2576 JS_ASSERT(switchOp == JSOP_TABLESWITCH); |
|
2577 |
|
2578 pn4 = pn3->pn_left; |
|
2579 |
|
2580 if (pn4->getKind() != PNK_NUMBER) { |
|
2581 switchOp = JSOP_CONDSWITCH; |
|
2582 continue; |
|
2583 } |
|
2584 |
|
2585 int32_t i; |
|
2586 if (!NumberIsInt32(pn4->pn_dval, &i)) { |
|
2587 switchOp = JSOP_CONDSWITCH; |
|
2588 continue; |
|
2589 } |
|
2590 |
|
2591 if ((unsigned)(i + (int)JS_BIT(15)) >= (unsigned)JS_BIT(16)) { |
|
2592 switchOp = JSOP_CONDSWITCH; |
|
2593 continue; |
|
2594 } |
|
2595 if (i < low) |
|
2596 low = i; |
|
2597 if (high < i) |
|
2598 high = i; |
|
2599 |
|
2600 /* |
|
2601 * Check for duplicates, which require a JSOP_CONDSWITCH. |
|
2602 * We bias i by 65536 if it's negative, and hope that's a rare |
|
2603 * case (because it requires a malloc'd bitmap). |
|
2604 */ |
|
2605 if (i < 0) |
|
2606 i += JS_BIT(16); |
|
2607 if (i >= intmap_bitlen) { |
|
2608 if (!intmap && |
|
2609 size_t(i) < (INTMAP_LENGTH * JS_BITMAP_NBITS)) { |
|
2610 intmap = intmap_space; |
|
2611 intmap_bitlen = INTMAP_LENGTH * JS_BITMAP_NBITS; |
|
2612 } else { |
|
2613 /* Just grab 8K for the worst-case bitmap. */ |
|
2614 intmap_bitlen = JS_BIT(16); |
|
2615 intmap = cx->pod_malloc<jsbitmap>(JS_BIT(16) / JS_BITMAP_NBITS); |
|
2616 if (!intmap) { |
|
2617 js_ReportOutOfMemory(cx); |
|
2618 return false; |
|
2619 } |
|
2620 } |
|
2621 memset(intmap, 0, size_t(intmap_bitlen) / CHAR_BIT); |
|
2622 } |
|
2623 if (JS_TEST_BIT(intmap, i)) { |
|
2624 switchOp = JSOP_CONDSWITCH; |
|
2625 continue; |
|
2626 } |
|
2627 JS_SET_BIT(intmap, i); |
|
2628 } |
|
2629 |
|
2630 if (intmap && intmap != intmap_space) |
|
2631 js_free(intmap); |
|
2632 if (!ok) |
|
2633 return false; |
|
2634 |
|
2635 /* |
|
2636 * Compute table length and select condswitch instead if overlarge or |
|
2637 * more than half-sparse. |
|
2638 */ |
|
2639 if (switchOp == JSOP_TABLESWITCH) { |
|
2640 tableLength = (uint32_t)(high - low + 1); |
|
2641 if (tableLength >= JS_BIT(16) || tableLength > 2 * caseCount) |
|
2642 switchOp = JSOP_CONDSWITCH; |
|
2643 } |
|
2644 } |
|
2645 |
|
2646 /* |
|
2647 * The note has one or two offsets: first tells total switch code length; |
|
2648 * second (if condswitch) tells offset to first JSOP_CASE. |
|
2649 */ |
|
2650 if (switchOp == JSOP_CONDSWITCH) { |
|
2651 /* 0 bytes of immediate for unoptimized switch. */ |
|
2652 switchSize = 0; |
|
2653 noteIndex = NewSrcNote3(cx, bce, SRC_CONDSWITCH, 0, 0); |
|
2654 } else { |
|
2655 JS_ASSERT(switchOp == JSOP_TABLESWITCH); |
|
2656 |
|
2657 /* 3 offsets (len, low, high) before the table, 1 per entry. */ |
|
2658 switchSize = (size_t)(JUMP_OFFSET_LEN * (3 + tableLength)); |
|
2659 noteIndex = NewSrcNote2(cx, bce, SRC_TABLESWITCH, 0); |
|
2660 } |
|
2661 if (noteIndex < 0) |
|
2662 return false; |
|
2663 |
|
2664 /* Emit switchOp followed by switchSize bytes of jump or lookup table. */ |
|
2665 if (EmitN(cx, bce, switchOp, switchSize) < 0) |
|
2666 return false; |
|
2667 |
|
2668 off = -1; |
|
2669 if (switchOp == JSOP_CONDSWITCH) { |
|
2670 int caseNoteIndex = -1; |
|
2671 bool beforeCases = true; |
|
2672 |
|
2673 /* Emit code for evaluating cases and jumping to case statements. */ |
|
2674 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
|
2675 pn4 = pn3->pn_left; |
|
2676 if (pn4 && !EmitTree(cx, bce, pn4)) |
|
2677 return false; |
|
2678 if (caseNoteIndex >= 0) { |
|
2679 /* off is the previous JSOP_CASE's bytecode offset. */ |
|
2680 if (!SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off)) |
|
2681 return false; |
|
2682 } |
|
2683 if (!pn4) { |
|
2684 JS_ASSERT(pn3->isKind(PNK_DEFAULT)); |
|
2685 continue; |
|
2686 } |
|
2687 caseNoteIndex = NewSrcNote2(cx, bce, SRC_NEXTCASE, 0); |
|
2688 if (caseNoteIndex < 0) |
|
2689 return false; |
|
2690 off = EmitJump(cx, bce, JSOP_CASE, 0); |
|
2691 if (off < 0) |
|
2692 return false; |
|
2693 pn3->pn_offset = off; |
|
2694 if (beforeCases) { |
|
2695 unsigned noteCount, noteCountDelta; |
|
2696 |
|
2697 /* Switch note's second offset is to first JSOP_CASE. */ |
|
2698 noteCount = bce->notes().length(); |
|
2699 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, off - top)) |
|
2700 return false; |
|
2701 noteCountDelta = bce->notes().length() - noteCount; |
|
2702 if (noteCountDelta != 0) |
|
2703 caseNoteIndex += noteCountDelta; |
|
2704 beforeCases = false; |
|
2705 } |
|
2706 } |
|
2707 |
|
2708 /* |
|
2709 * If we didn't have an explicit default (which could fall in between |
|
2710 * cases, preventing us from fusing this SetSrcNoteOffset with the call |
|
2711 * in the loop above), link the last case to the implicit default for |
|
2712 * the benefit of IonBuilder. |
|
2713 */ |
|
2714 if (!hasDefault && |
|
2715 caseNoteIndex >= 0 && |
|
2716 !SetSrcNoteOffset(cx, bce, (unsigned)caseNoteIndex, 0, bce->offset() - off)) |
|
2717 { |
|
2718 return false; |
|
2719 } |
|
2720 |
|
2721 /* Emit default even if no explicit default statement. */ |
|
2722 defaultOffset = EmitJump(cx, bce, JSOP_DEFAULT, 0); |
|
2723 if (defaultOffset < 0) |
|
2724 return false; |
|
2725 } else { |
|
2726 JS_ASSERT(switchOp == JSOP_TABLESWITCH); |
|
2727 pc = bce->code(top + JUMP_OFFSET_LEN); |
|
2728 |
|
2729 /* Fill in switch bounds, which we know fit in 16-bit offsets. */ |
|
2730 SET_JUMP_OFFSET(pc, low); |
|
2731 pc += JUMP_OFFSET_LEN; |
|
2732 SET_JUMP_OFFSET(pc, high); |
|
2733 pc += JUMP_OFFSET_LEN; |
|
2734 |
|
2735 /* |
|
2736 * Use malloc to avoid arena bloat for programs with many switches. |
|
2737 * ScopedJSFreePtr takes care of freeing it on exit. |
|
2738 */ |
|
2739 if (tableLength != 0) { |
|
2740 table = cx->pod_calloc<ParseNode*>(tableLength); |
|
2741 if (!table) |
|
2742 return false; |
|
2743 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
|
2744 if (pn3->isKind(PNK_DEFAULT)) |
|
2745 continue; |
|
2746 |
|
2747 JS_ASSERT(pn3->isKind(PNK_CASE)); |
|
2748 |
|
2749 pn4 = pn3->pn_left; |
|
2750 JS_ASSERT(pn4->getKind() == PNK_NUMBER); |
|
2751 |
|
2752 int32_t i = int32_t(pn4->pn_dval); |
|
2753 JS_ASSERT(double(i) == pn4->pn_dval); |
|
2754 |
|
2755 i -= low; |
|
2756 JS_ASSERT(uint32_t(i) < tableLength); |
|
2757 table[i] = pn3; |
|
2758 } |
|
2759 } |
|
2760 } |
|
2761 |
|
2762 /* Emit code for each case's statements, copying pn_offset up to pn3. */ |
|
2763 for (pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
|
2764 if (switchOp == JSOP_CONDSWITCH && !pn3->isKind(PNK_DEFAULT)) |
|
2765 SetJumpOffsetAt(bce, pn3->pn_offset); |
|
2766 pn4 = pn3->pn_right; |
|
2767 if (!EmitTree(cx, bce, pn4)) |
|
2768 return false; |
|
2769 pn3->pn_offset = pn4->pn_offset; |
|
2770 if (pn3->isKind(PNK_DEFAULT)) |
|
2771 off = pn3->pn_offset - top; |
|
2772 } |
|
2773 |
|
2774 if (!hasDefault) { |
|
2775 /* If no default case, offset for default is to end of switch. */ |
|
2776 off = bce->offset() - top; |
|
2777 } |
|
2778 |
|
2779 /* We better have set "off" by now. */ |
|
2780 JS_ASSERT(off != -1); |
|
2781 |
|
2782 /* Set the default offset (to end of switch if no default). */ |
|
2783 if (switchOp == JSOP_CONDSWITCH) { |
|
2784 pc = nullptr; |
|
2785 JS_ASSERT(defaultOffset != -1); |
|
2786 SET_JUMP_OFFSET(bce->code(defaultOffset), off - (defaultOffset - top)); |
|
2787 } else { |
|
2788 pc = bce->code(top); |
|
2789 SET_JUMP_OFFSET(pc, off); |
|
2790 pc += JUMP_OFFSET_LEN; |
|
2791 } |
|
2792 |
|
2793 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */ |
|
2794 off = bce->offset() - top; |
|
2795 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, off)) |
|
2796 return false; |
|
2797 |
|
2798 if (switchOp == JSOP_TABLESWITCH) { |
|
2799 /* Skip over the already-initialized switch bounds. */ |
|
2800 pc += 2 * JUMP_OFFSET_LEN; |
|
2801 |
|
2802 /* Fill in the jump table, if there is one. */ |
|
2803 for (uint32_t i = 0; i < tableLength; i++) { |
|
2804 pn3 = table[i]; |
|
2805 off = pn3 ? pn3->pn_offset - top : 0; |
|
2806 SET_JUMP_OFFSET(pc, off); |
|
2807 pc += JUMP_OFFSET_LEN; |
|
2808 } |
|
2809 } |
|
2810 |
|
2811 if (pn->pn_right->isKind(PNK_LEXICALSCOPE)) { |
|
2812 if (!LeaveNestedScope(cx, bce, &stmtInfo)) |
|
2813 return false; |
|
2814 } else { |
|
2815 if (!PopStatementBCE(cx, bce)) |
|
2816 return false; |
|
2817 } |
|
2818 |
|
2819 return true; |
|
2820 } |
|
2821 |
|
2822 bool |
|
2823 BytecodeEmitter::isRunOnceLambda() |
|
2824 { |
|
2825 // The run once lambda flags set by the parser are approximate, and we look |
|
2826 // at properties of the function itself before deciding to emit a function |
|
2827 // as a run once lambda. |
|
2828 |
|
2829 if (!(parent && parent->emittingRunOnceLambda) && !lazyRunOnceLambda) |
|
2830 return false; |
|
2831 |
|
2832 FunctionBox *funbox = sc->asFunctionBox(); |
|
2833 return !funbox->argumentsHasLocalBinding() && |
|
2834 !funbox->isGenerator() && |
|
2835 !funbox->function()->name(); |
|
2836 } |
|
2837 |
|
2838 bool |
|
2839 frontend::EmitFunctionScript(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *body) |
|
2840 { |
|
2841 /* |
|
2842 * IonBuilder has assumptions about what may occur immediately after |
|
2843 * script->main (e.g., in the case of destructuring params). Thus, put the |
|
2844 * following ops into the range [script->code, script->main). Note: |
|
2845 * execution starts from script->code, so this has no semantic effect. |
|
2846 */ |
|
2847 |
|
2848 FunctionBox *funbox = bce->sc->asFunctionBox(); |
|
2849 if (funbox->argumentsHasLocalBinding()) { |
|
2850 JS_ASSERT(bce->offset() == 0); /* See JSScript::argumentsBytecode. */ |
|
2851 bce->switchToProlog(); |
|
2852 if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0) |
|
2853 return false; |
|
2854 InternalBindingsHandle bindings(bce->script, &bce->script->bindings); |
|
2855 uint32_t varIndex = Bindings::argumentsVarIndex(cx, bindings); |
|
2856 if (bce->script->varIsAliased(varIndex)) { |
|
2857 ScopeCoordinate sc; |
|
2858 sc.setHops(0); |
|
2859 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bce->script, cx->names().arguments, &sc)); |
|
2860 if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce)) |
|
2861 return false; |
|
2862 } else { |
|
2863 if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, varIndex, bce)) |
|
2864 return false; |
|
2865 } |
|
2866 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
2867 return false; |
|
2868 bce->switchToMain(); |
|
2869 } |
|
2870 |
|
2871 if (funbox->isGenerator()) { |
|
2872 bce->switchToProlog(); |
|
2873 if (Emit1(cx, bce, JSOP_GENERATOR) < 0) |
|
2874 return false; |
|
2875 bce->switchToMain(); |
|
2876 } |
|
2877 |
|
2878 /* |
|
2879 * Emit a prologue for run-once scripts which will deoptimize JIT code if |
|
2880 * the script ends up running multiple times via foo.caller related |
|
2881 * shenanigans. |
|
2882 */ |
|
2883 bool runOnce = bce->isRunOnceLambda(); |
|
2884 if (runOnce) { |
|
2885 bce->switchToProlog(); |
|
2886 if (Emit1(cx, bce, JSOP_RUNONCE) < 0) |
|
2887 return false; |
|
2888 bce->switchToMain(); |
|
2889 } |
|
2890 |
|
2891 if (!EmitTree(cx, bce, body)) |
|
2892 return false; |
|
2893 |
|
2894 // If we fall off the end of an ES6 generator, return a boxed iterator |
|
2895 // result object of the form { value: undefined, done: true }. |
|
2896 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { |
|
2897 if (!EmitPrepareIteratorResult(cx, bce)) |
|
2898 return false; |
|
2899 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) |
|
2900 return false; |
|
2901 if (!EmitFinishIteratorResult(cx, bce, true)) |
|
2902 return false; |
|
2903 |
|
2904 // No need to check for finally blocks, etc as in EmitReturn. |
|
2905 if (Emit1(cx, bce, JSOP_RETURN) < 0) |
|
2906 return false; |
|
2907 } |
|
2908 |
|
2909 /* |
|
2910 * Always end the script with a JSOP_RETRVAL. Some other parts of the codebase |
|
2911 * depend on this opcode, e.g. js_InternalInterpret. |
|
2912 */ |
|
2913 if (Emit1(cx, bce, JSOP_RETRVAL) < 0) |
|
2914 return false; |
|
2915 |
|
2916 if (!JSScript::fullyInitFromEmitter(cx, bce->script, bce)) |
|
2917 return false; |
|
2918 |
|
2919 /* |
|
2920 * If this function is only expected to run once, mark the script so that |
|
2921 * initializers created within it may be given more precise types. |
|
2922 */ |
|
2923 if (runOnce) { |
|
2924 bce->script->setTreatAsRunOnce(); |
|
2925 JS_ASSERT(!bce->script->hasRunOnce()); |
|
2926 } |
|
2927 |
|
2928 /* Initialize fun->script() so that the debugger has a valid fun->script(). */ |
|
2929 RootedFunction fun(cx, bce->script->functionNonDelazifying()); |
|
2930 JS_ASSERT(fun->isInterpreted()); |
|
2931 |
|
2932 if (fun->isInterpretedLazy()) |
|
2933 fun->setUnlazifiedScript(bce->script); |
|
2934 else |
|
2935 fun->setScript(bce->script); |
|
2936 |
|
2937 bce->tellDebuggerAboutCompiledScript(cx); |
|
2938 |
|
2939 return true; |
|
2940 } |
|
2941 |
|
2942 static bool |
|
2943 MaybeEmitVarDecl(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn, |
|
2944 jsatomid *result) |
|
2945 { |
|
2946 jsatomid atomIndex; |
|
2947 |
|
2948 if (!pn->pn_cookie.isFree()) { |
|
2949 atomIndex = pn->pn_cookie.slot(); |
|
2950 } else { |
|
2951 if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex)) |
|
2952 return false; |
|
2953 } |
|
2954 |
|
2955 if (JOF_OPTYPE(pn->getOp()) == JOF_ATOM && |
|
2956 (!bce->sc->isFunctionBox() || bce->sc->asFunctionBox()->isHeavyweight())) |
|
2957 { |
|
2958 bce->switchToProlog(); |
|
2959 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) |
|
2960 return false; |
|
2961 if (!EmitIndexOp(cx, prologOp, atomIndex, bce)) |
|
2962 return false; |
|
2963 bce->switchToMain(); |
|
2964 } |
|
2965 |
|
2966 if (result) |
|
2967 *result = atomIndex; |
|
2968 return true; |
|
2969 } |
|
2970 |
|
2971 /* |
|
2972 * This enum tells EmitVariables and the destructuring functions how emit the |
|
2973 * given Parser::variables parse tree. In the base case, DefineVars, the caller |
|
2974 * only wants variables to be defined in the prologue (if necessary). For |
|
2975 * PushInitialValues, variable initializer expressions are evaluated and left |
|
2976 * on the stack. For InitializeVars, the initializer expressions values are |
|
2977 * assigned (to local variables) and popped. |
|
2978 */ |
|
2979 enum VarEmitOption |
|
2980 { |
|
2981 DefineVars = 0, |
|
2982 PushInitialValues = 1, |
|
2983 InitializeVars = 2 |
|
2984 }; |
|
2985 |
|
2986 typedef bool |
|
2987 (*DestructuringDeclEmitter)(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn); |
|
2988 |
|
2989 static bool |
|
2990 EmitDestructuringDecl(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn) |
|
2991 { |
|
2992 JS_ASSERT(pn->isKind(PNK_NAME)); |
|
2993 if (!BindNameToSlot(cx, bce, pn)) |
|
2994 return false; |
|
2995 |
|
2996 JS_ASSERT(!pn->isOp(JSOP_CALLEE)); |
|
2997 return MaybeEmitVarDecl(cx, bce, prologOp, pn, nullptr); |
|
2998 } |
|
2999 |
|
3000 static bool |
|
3001 EmitDestructuringDecls(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, |
|
3002 ParseNode *pattern) |
|
3003 { |
|
3004 if (pattern->isKind(PNK_ARRAY)) { |
|
3005 for (ParseNode *element = pattern->pn_head; element; element = element->pn_next) { |
|
3006 if (element->isKind(PNK_ELISION)) |
|
3007 continue; |
|
3008 DestructuringDeclEmitter emitter = |
|
3009 element->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls; |
|
3010 if (!emitter(cx, bce, prologOp, element)) |
|
3011 return false; |
|
3012 } |
|
3013 return true; |
|
3014 } |
|
3015 |
|
3016 MOZ_ASSERT(pattern->isKind(PNK_OBJECT)); |
|
3017 for (ParseNode *member = pattern->pn_head; member; member = member->pn_next) { |
|
3018 ParseNode *target = member->pn_right; |
|
3019 DestructuringDeclEmitter emitter = |
|
3020 target->isKind(PNK_NAME) ? EmitDestructuringDecl : EmitDestructuringDecls; |
|
3021 if (!emitter(cx, bce, prologOp, target)) |
|
3022 return false; |
|
3023 } |
|
3024 return true; |
|
3025 } |
|
3026 |
|
3027 static bool |
|
3028 EmitDestructuringOpsHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, |
|
3029 VarEmitOption emitOption); |
|
3030 |
|
3031 /* |
|
3032 * EmitDestructuringLHS assumes the to-be-destructured value has been pushed on |
|
3033 * the stack and emits code to destructure a single lhs expression (either a |
|
3034 * name or a compound []/{} expression). |
|
3035 * |
|
3036 * If emitOption is InitializeVars, the to-be-destructured value is assigned to |
|
3037 * locals and ultimately the initial slot is popped (-1 total depth change). |
|
3038 * |
|
3039 * If emitOption is PushInitialValues, the to-be-destructured value is replaced |
|
3040 * with the initial values of the N (where 0 <= N) variables assigned in the |
|
3041 * lhs expression. (Same post-condition as EmitDestructuringOpsHelper) |
|
3042 */ |
|
3043 static bool |
|
3044 EmitDestructuringLHS(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, VarEmitOption emitOption) |
|
3045 { |
|
3046 JS_ASSERT(emitOption != DefineVars); |
|
3047 |
|
3048 // Now emit the lvalue opcode sequence. If the lvalue is a nested |
|
3049 // destructuring initialiser-form, call ourselves to handle it, then pop |
|
3050 // the matched value. Otherwise emit an lvalue bytecode sequence followed |
|
3051 // by an assignment op. |
|
3052 if (pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)) { |
|
3053 if (!EmitDestructuringOpsHelper(cx, bce, pn, emitOption)) |
|
3054 return false; |
|
3055 if (emitOption == InitializeVars) { |
|
3056 // Per its post-condition, EmitDestructuringOpsHelper has left the |
|
3057 // to-be-destructured value on top of the stack. |
|
3058 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3059 return false; |
|
3060 } |
|
3061 } else if (emitOption == PushInitialValues) { |
|
3062 // The lhs is a simple name so the to-be-destructured value is |
|
3063 // its initial value and there is nothing to do. |
|
3064 JS_ASSERT(pn->getOp() == JSOP_GETLOCAL); |
|
3065 JS_ASSERT(pn->pn_dflags & PND_BOUND); |
|
3066 } else { |
|
3067 switch (pn->getKind()) { |
|
3068 case PNK_NAME: |
|
3069 if (!BindNameToSlot(cx, bce, pn)) |
|
3070 return false; |
|
3071 |
|
3072 // Allow 'const [x,y] = o', make 'const x,y; [x,y] = o' a nop. |
|
3073 if (pn->isConst() && !pn->isDefn()) |
|
3074 return Emit1(cx, bce, JSOP_POP) >= 0; |
|
3075 |
|
3076 switch (pn->getOp()) { |
|
3077 case JSOP_SETNAME: |
|
3078 case JSOP_SETGNAME: |
|
3079 case JSOP_SETCONST: { |
|
3080 // This is like ordinary assignment, but with one difference. |
|
3081 // |
|
3082 // In `a = b`, we first determine a binding for `a` (using |
|
3083 // JSOP_BINDNAME or JSOP_BINDGNAME), then we evaluate `b`, then |
|
3084 // a JSOP_SETNAME instruction. |
|
3085 // |
|
3086 // In `[a] = [b]`, per spec, `b` is evaluated first, then we |
|
3087 // determine a binding for `a`. Then we need to do assignment-- |
|
3088 // but the operands are on the stack in the wrong order for |
|
3089 // JSOP_SETPROP, so we have to add a JSOP_SWAP. |
|
3090 jsatomid atomIndex; |
|
3091 if (!bce->makeAtomIndex(pn->pn_atom, &atomIndex)) |
|
3092 return false; |
|
3093 |
|
3094 if (!pn->isOp(JSOP_SETCONST)) { |
|
3095 JSOp bindOp = pn->isOp(JSOP_SETNAME) ? JSOP_BINDNAME : JSOP_BINDGNAME; |
|
3096 if (!EmitIndex32(cx, bindOp, atomIndex, bce)) |
|
3097 return false; |
|
3098 if (Emit1(cx, bce, JSOP_SWAP) < 0) |
|
3099 return false; |
|
3100 } |
|
3101 |
|
3102 if (!EmitIndexOp(cx, pn->getOp(), atomIndex, bce)) |
|
3103 return false; |
|
3104 break; |
|
3105 } |
|
3106 |
|
3107 case JSOP_SETLOCAL: |
|
3108 case JSOP_SETARG: |
|
3109 if (!EmitVarOp(cx, pn, pn->getOp(), bce)) |
|
3110 return false; |
|
3111 break; |
|
3112 |
|
3113 default: |
|
3114 MOZ_ASSUME_UNREACHABLE("EmitDestructuringLHS: bad name op"); |
|
3115 } |
|
3116 break; |
|
3117 |
|
3118 case PNK_DOT: |
|
3119 // See the (PNK_NAME, JSOP_SETNAME) case above. |
|
3120 // |
|
3121 // In `a.x = b`, `a` is evaluated first, then `b`, then a |
|
3122 // JSOP_SETPROP instruction. |
|
3123 // |
|
3124 // In `[a.x] = [b]`, per spec, `b` is evaluated before `a`. Then we |
|
3125 // need a property set -- but the operands are on the stack in the |
|
3126 // wrong order for JSOP_SETPROP, so we have to add a JSOP_SWAP. |
|
3127 if (!EmitTree(cx, bce, pn->pn_expr)) |
|
3128 return false; |
|
3129 if (Emit1(cx, bce, JSOP_SWAP) < 0) |
|
3130 return false; |
|
3131 if (!EmitAtomOp(cx, pn, JSOP_SETPROP, bce)) |
|
3132 return false; |
|
3133 break; |
|
3134 |
|
3135 case PNK_ELEM: |
|
3136 // See the comment at `case PNK_DOT:` above. This case, |
|
3137 // `[a[x]] = [b]`, is handled much the same way. The JSOP_SWAP |
|
3138 // is emitted by EmitElemOperands. |
|
3139 if (!EmitElemOp(cx, pn, JSOP_SETELEM, bce)) |
|
3140 return false; |
|
3141 break; |
|
3142 |
|
3143 case PNK_CALL: |
|
3144 JS_ASSERT(pn->pn_xflags & PNX_SETCALL); |
|
3145 if (!EmitTree(cx, bce, pn)) |
|
3146 return false; |
|
3147 |
|
3148 // Pop the call return value. Below, we pop the RHS too, balancing |
|
3149 // the stack --- presumably for the benefit of bytecode |
|
3150 // analysis. (The interpreter will never reach these instructions |
|
3151 // since we just emitted JSOP_SETCALL, which always throws. It's |
|
3152 // possible no analyses actually depend on this either.) |
|
3153 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3154 return false; |
|
3155 break; |
|
3156 |
|
3157 default: |
|
3158 MOZ_ASSUME_UNREACHABLE("EmitDestructuringLHS: bad lhs kind"); |
|
3159 } |
|
3160 |
|
3161 // Pop the assigned value. |
|
3162 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3163 return false; |
|
3164 } |
|
3165 |
|
3166 return true; |
|
3167 } |
|
3168 |
|
3169 /* |
|
3170 * Recursive helper for EmitDestructuringOps. |
|
3171 * EmitDestructuringOpsHelper assumes the to-be-destructured value has been |
|
3172 * pushed on the stack and emits code to destructure each part of a [] or {} |
|
3173 * lhs expression. |
|
3174 * |
|
3175 * If emitOption is InitializeVars, the initial to-be-destructured value is |
|
3176 * left untouched on the stack and the overall depth is not changed. |
|
3177 * |
|
3178 * If emitOption is PushInitialValues, the to-be-destructured value is replaced |
|
3179 * with the initial values of the N (where 0 <= N) variables assigned in the |
|
3180 * lhs expression. (Same post-condition as EmitDestructuringLHS) |
|
3181 */ |
|
3182 static bool |
|
3183 EmitDestructuringOpsHelper(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, |
|
3184 VarEmitOption emitOption) |
|
3185 { |
|
3186 JS_ASSERT(emitOption != DefineVars); |
|
3187 |
|
3188 unsigned index; |
|
3189 ParseNode *pn2, *pn3; |
|
3190 bool doElemOp; |
|
3191 |
|
3192 #ifdef DEBUG |
|
3193 int stackDepth = bce->stackDepth; |
|
3194 JS_ASSERT(stackDepth != 0); |
|
3195 JS_ASSERT(pn->isArity(PN_LIST)); |
|
3196 JS_ASSERT(pn->isKind(PNK_ARRAY) || pn->isKind(PNK_OBJECT)); |
|
3197 #endif |
|
3198 |
|
3199 index = 0; |
|
3200 for (pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) { |
|
3201 /* Duplicate the value being destructured to use as a reference base. */ |
|
3202 if (Emit1(cx, bce, JSOP_DUP) < 0) |
|
3203 return false; |
|
3204 |
|
3205 /* |
|
3206 * Now push the property name currently being matched, which is either |
|
3207 * the array initialiser's current index, or the current property name |
|
3208 * "label" on the left of a colon in the object initialiser. Set pn3 |
|
3209 * to the lvalue node, which is in the value-initializing position. |
|
3210 */ |
|
3211 doElemOp = true; |
|
3212 if (pn->isKind(PNK_ARRAY)) { |
|
3213 if (!EmitNumberOp(cx, index, bce)) |
|
3214 return false; |
|
3215 pn3 = pn2; |
|
3216 } else { |
|
3217 JS_ASSERT(pn->isKind(PNK_OBJECT)); |
|
3218 JS_ASSERT(pn2->isKind(PNK_COLON)); |
|
3219 |
|
3220 ParseNode *key = pn2->pn_left; |
|
3221 if (key->isKind(PNK_NUMBER)) { |
|
3222 if (!EmitNumberOp(cx, key->pn_dval, bce)) |
|
3223 return false; |
|
3224 } else { |
|
3225 MOZ_ASSERT(key->isKind(PNK_STRING) || key->isKind(PNK_NAME)); |
|
3226 PropertyName *name = key->pn_atom->asPropertyName(); |
|
3227 |
|
3228 // The parser already checked for atoms representing indexes and |
|
3229 // used PNK_NUMBER instead, but also watch for ids which TI treats |
|
3230 // as indexes for simplification of downstream analysis. |
|
3231 jsid id = NameToId(name); |
|
3232 if (id != types::IdToTypeId(id)) { |
|
3233 if (!EmitTree(cx, bce, key)) |
|
3234 return false; |
|
3235 } else { |
|
3236 if (!EmitAtomOp(cx, name, JSOP_GETPROP, bce)) |
|
3237 return false; |
|
3238 doElemOp = false; |
|
3239 } |
|
3240 } |
|
3241 |
|
3242 pn3 = pn2->pn_right; |
|
3243 } |
|
3244 |
|
3245 if (doElemOp) { |
|
3246 /* |
|
3247 * Ok, get the value of the matching property name. This leaves |
|
3248 * that value on top of the value being destructured, so the stack |
|
3249 * is one deeper than when we started. |
|
3250 */ |
|
3251 if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) |
|
3252 return false; |
|
3253 JS_ASSERT(bce->stackDepth >= stackDepth + 1); |
|
3254 } |
|
3255 |
|
3256 /* Elision node makes a hole in the array destructurer. */ |
|
3257 if (pn3->isKind(PNK_ELISION)) { |
|
3258 JS_ASSERT(pn->isKind(PNK_ARRAY)); |
|
3259 JS_ASSERT(pn2 == pn3); |
|
3260 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3261 return false; |
|
3262 } else { |
|
3263 int32_t depthBefore = bce->stackDepth; |
|
3264 if (!EmitDestructuringLHS(cx, bce, pn3, emitOption)) |
|
3265 return false; |
|
3266 |
|
3267 if (emitOption == PushInitialValues) { |
|
3268 /* |
|
3269 * After '[x,y]' in 'let ([[x,y], z] = o)', the stack is |
|
3270 * | to-be-destructured-value | x | y | |
|
3271 * The goal is: |
|
3272 * | x | y | z | |
|
3273 * so emit a pick to produce the intermediate state |
|
3274 * | x | y | to-be-destructured-value | |
|
3275 * before destructuring z. This gives the loop invariant that |
|
3276 * the to-be-destructured-value is always on top of the stack. |
|
3277 */ |
|
3278 JS_ASSERT((bce->stackDepth - bce->stackDepth) >= -1); |
|
3279 uint32_t pickDistance = (uint32_t)((bce->stackDepth + 1) - depthBefore); |
|
3280 if (pickDistance > 0) { |
|
3281 if (pickDistance > UINT8_MAX) { |
|
3282 bce->reportError(pn3, JSMSG_TOO_MANY_LOCALS); |
|
3283 return false; |
|
3284 } |
|
3285 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)pickDistance) < 0) |
|
3286 return false; |
|
3287 } |
|
3288 } |
|
3289 } |
|
3290 |
|
3291 ++index; |
|
3292 } |
|
3293 |
|
3294 if (emitOption == PushInitialValues) { |
|
3295 /* |
|
3296 * Per the above loop invariant, to-be-destructured-value is at the top |
|
3297 * of the stack. To achieve the post-condition, pop it. |
|
3298 */ |
|
3299 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3300 return false; |
|
3301 } |
|
3302 |
|
3303 return true; |
|
3304 } |
|
3305 |
|
3306 static bool |
|
3307 EmitDestructuringOps(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, bool isLet = false) |
|
3308 { |
|
3309 /* |
|
3310 * Call our recursive helper to emit the destructuring assignments and |
|
3311 * related stack manipulations. |
|
3312 */ |
|
3313 VarEmitOption emitOption = isLet ? PushInitialValues : InitializeVars; |
|
3314 return EmitDestructuringOpsHelper(cx, bce, pn, emitOption); |
|
3315 } |
|
3316 |
|
3317 static bool |
|
3318 EmitGroupAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, |
|
3319 ParseNode *lhs, ParseNode *rhs) |
|
3320 { |
|
3321 uint32_t depth, limit, i, nslots; |
|
3322 ParseNode *pn; |
|
3323 |
|
3324 depth = limit = (uint32_t) bce->stackDepth; |
|
3325 for (pn = rhs->pn_head; pn; pn = pn->pn_next) { |
|
3326 if (limit == JS_BIT(16)) { |
|
3327 bce->reportError(rhs, JSMSG_ARRAY_INIT_TOO_BIG); |
|
3328 return false; |
|
3329 } |
|
3330 |
|
3331 /* MaybeEmitGroupAssignment won't call us if rhs is holey. */ |
|
3332 JS_ASSERT(!pn->isKind(PNK_ELISION)); |
|
3333 if (!EmitTree(cx, bce, pn)) |
|
3334 return false; |
|
3335 ++limit; |
|
3336 } |
|
3337 |
|
3338 i = depth; |
|
3339 for (pn = lhs->pn_head; pn; pn = pn->pn_next, ++i) { |
|
3340 /* MaybeEmitGroupAssignment requires lhs->pn_count <= rhs->pn_count. */ |
|
3341 JS_ASSERT(i < limit); |
|
3342 |
|
3343 if (!EmitDupAt(cx, bce, i)) |
|
3344 return false; |
|
3345 |
|
3346 if (pn->isKind(PNK_ELISION)) { |
|
3347 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3348 return false; |
|
3349 } else { |
|
3350 if (!EmitDestructuringLHS(cx, bce, pn, InitializeVars)) |
|
3351 return false; |
|
3352 } |
|
3353 } |
|
3354 |
|
3355 nslots = limit - depth; |
|
3356 EMIT_UINT16_IMM_OP(JSOP_POPN, nslots); |
|
3357 bce->stackDepth = (uint32_t) depth; |
|
3358 return true; |
|
3359 } |
|
3360 |
|
3361 enum GroupOption { GroupIsDecl, GroupIsNotDecl }; |
|
3362 |
|
3363 /* |
|
3364 * Helper called with pop out param initialized to a JSOP_POP* opcode. If we |
|
3365 * can emit a group assignment sequence, which results in 0 stack depth delta, |
|
3366 * we set *pop to JSOP_NOP so callers can veto emitting pn followed by a pop. |
|
3367 */ |
|
3368 static bool |
|
3369 MaybeEmitGroupAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, JSOp prologOp, ParseNode *pn, |
|
3370 GroupOption groupOption, JSOp *pop) |
|
3371 { |
|
3372 JS_ASSERT(pn->isKind(PNK_ASSIGN)); |
|
3373 JS_ASSERT(pn->isOp(JSOP_NOP)); |
|
3374 JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_SETRVAL); |
|
3375 |
|
3376 ParseNode *lhs = pn->pn_left; |
|
3377 ParseNode *rhs = pn->pn_right; |
|
3378 if (lhs->isKind(PNK_ARRAY) && rhs->isKind(PNK_ARRAY) && |
|
3379 !(rhs->pn_xflags & PNX_SPECIALARRAYINIT) && |
|
3380 lhs->pn_count <= rhs->pn_count) |
|
3381 { |
|
3382 if (groupOption == GroupIsDecl && !EmitDestructuringDecls(cx, bce, prologOp, lhs)) |
|
3383 return false; |
|
3384 if (!EmitGroupAssignment(cx, bce, prologOp, lhs, rhs)) |
|
3385 return false; |
|
3386 *pop = JSOP_NOP; |
|
3387 } |
|
3388 return true; |
|
3389 } |
|
3390 |
|
3391 /* |
|
3392 * Like MaybeEmitGroupAssignment, but for 'let ([x,y] = [a,b]) ...'. |
|
3393 * |
|
3394 * Instead of issuing a sequence |dup|eval-rhs|set-lhs|pop| (which doesn't work |
|
3395 * since the bound vars don't yet have slots), just eval/push each rhs element |
|
3396 * just like what EmitLet would do for 'let (x = a, y = b) ...'. While shorter, |
|
3397 * simpler and more efficient than MaybeEmitGroupAssignment, it is harder to |
|
3398 * decompile so we restrict the ourselves to cases where the lhs and rhs are in |
|
3399 * 1:1 correspondence and lhs elements are simple names. |
|
3400 */ |
|
3401 static bool |
|
3402 MaybeEmitLetGroupDecl(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp *pop) |
|
3403 { |
|
3404 JS_ASSERT(pn->isKind(PNK_ASSIGN)); |
|
3405 JS_ASSERT(pn->isOp(JSOP_NOP)); |
|
3406 JS_ASSERT(*pop == JSOP_POP || *pop == JSOP_SETRVAL); |
|
3407 |
|
3408 ParseNode *lhs = pn->pn_left; |
|
3409 ParseNode *rhs = pn->pn_right; |
|
3410 if (lhs->isKind(PNK_ARRAY) && rhs->isKind(PNK_ARRAY) && |
|
3411 !(rhs->pn_xflags & PNX_SPECIALARRAYINIT) && |
|
3412 !(lhs->pn_xflags & PNX_SPECIALARRAYINIT) && |
|
3413 lhs->pn_count == rhs->pn_count) |
|
3414 { |
|
3415 for (ParseNode *l = lhs->pn_head; l; l = l->pn_next) { |
|
3416 if (l->getOp() != JSOP_SETLOCAL) |
|
3417 return true; |
|
3418 } |
|
3419 |
|
3420 for (ParseNode *r = rhs->pn_head; r; r = r->pn_next) { |
|
3421 if (!EmitTree(cx, bce, r)) |
|
3422 return false; |
|
3423 } |
|
3424 |
|
3425 *pop = JSOP_NOP; |
|
3426 } |
|
3427 return true; |
|
3428 } |
|
3429 |
|
3430 static bool |
|
3431 EmitVariables(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, VarEmitOption emitOption, |
|
3432 bool isLet = false) |
|
3433 { |
|
3434 JS_ASSERT(pn->isArity(PN_LIST)); |
|
3435 JS_ASSERT(isLet == (emitOption == PushInitialValues)); |
|
3436 |
|
3437 ParseNode *next; |
|
3438 for (ParseNode *pn2 = pn->pn_head; ; pn2 = next) { |
|
3439 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) |
|
3440 return false; |
|
3441 next = pn2->pn_next; |
|
3442 |
|
3443 ParseNode *pn3; |
|
3444 if (!pn2->isKind(PNK_NAME)) { |
|
3445 if (pn2->isKind(PNK_ARRAY) || pn2->isKind(PNK_OBJECT)) { |
|
3446 /* |
|
3447 * Emit variable binding ops, but not destructuring ops. The |
|
3448 * parser (see Parser::variables) has ensured that our caller |
|
3449 * will be the PNK_FOR/PNK_FORIN/PNK_FOROF case in EmitTree, and |
|
3450 * that case will emit the destructuring code only after |
|
3451 * emitting an enumerating opcode and a branch that tests |
|
3452 * whether the enumeration ended. |
|
3453 */ |
|
3454 JS_ASSERT(emitOption == DefineVars); |
|
3455 JS_ASSERT(pn->pn_count == 1); |
|
3456 if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn2)) |
|
3457 return false; |
|
3458 break; |
|
3459 } |
|
3460 |
|
3461 /* |
|
3462 * A destructuring initialiser assignment preceded by var will |
|
3463 * never occur to the left of 'in' in a for-in loop. As with 'for |
|
3464 * (var x = i in o)...', this will cause the entire 'var [a, b] = |
|
3465 * i' to be hoisted out of the loop. |
|
3466 */ |
|
3467 JS_ASSERT(pn2->isKind(PNK_ASSIGN)); |
|
3468 JS_ASSERT(pn2->isOp(JSOP_NOP)); |
|
3469 JS_ASSERT(emitOption != DefineVars); |
|
3470 |
|
3471 /* |
|
3472 * To allow the front end to rewrite var f = x; as f = x; when a |
|
3473 * function f(){} precedes the var, detect simple name assignment |
|
3474 * here and initialize the name. |
|
3475 */ |
|
3476 if (pn2->pn_left->isKind(PNK_NAME)) { |
|
3477 pn3 = pn2->pn_right; |
|
3478 pn2 = pn2->pn_left; |
|
3479 goto do_name; |
|
3480 } |
|
3481 |
|
3482 JSOp op = JSOP_POP; |
|
3483 if (pn->pn_count == 1) { |
|
3484 /* |
|
3485 * If this is the only destructuring assignment in the list, |
|
3486 * try to optimize to a group assignment. If we're in a let |
|
3487 * head, pass JSOP_POP rather than the pseudo-prolog JSOP_NOP |
|
3488 * in pn->pn_op, to suppress a second (and misplaced) 'let'. |
|
3489 */ |
|
3490 JS_ASSERT(!pn2->pn_next); |
|
3491 if (isLet) { |
|
3492 if (!MaybeEmitLetGroupDecl(cx, bce, pn2, &op)) |
|
3493 return false; |
|
3494 } else { |
|
3495 if (!MaybeEmitGroupAssignment(cx, bce, pn->getOp(), pn2, GroupIsDecl, &op)) |
|
3496 return false; |
|
3497 } |
|
3498 } |
|
3499 if (op == JSOP_NOP) { |
|
3500 pn->pn_xflags = (pn->pn_xflags & ~PNX_POPVAR) | PNX_GROUPINIT; |
|
3501 } else { |
|
3502 pn3 = pn2->pn_left; |
|
3503 if (!EmitDestructuringDecls(cx, bce, pn->getOp(), pn3)) |
|
3504 return false; |
|
3505 |
|
3506 if (!EmitTree(cx, bce, pn2->pn_right)) |
|
3507 return false; |
|
3508 |
|
3509 if (!EmitDestructuringOps(cx, bce, pn3, isLet)) |
|
3510 return false; |
|
3511 } |
|
3512 |
|
3513 /* If we are not initializing, nothing to pop. */ |
|
3514 if (emitOption != InitializeVars) { |
|
3515 if (next) |
|
3516 continue; |
|
3517 break; |
|
3518 } |
|
3519 goto emit_note_pop; |
|
3520 } |
|
3521 |
|
3522 /* |
|
3523 * Load initializer early to share code above that jumps to do_name. |
|
3524 * NB: if this var redeclares an existing binding, then pn2 is linked |
|
3525 * on its definition's use-chain and pn_expr has been overlayed with |
|
3526 * pn_lexdef. |
|
3527 */ |
|
3528 pn3 = pn2->maybeExpr(); |
|
3529 |
|
3530 do_name: |
|
3531 if (!BindNameToSlot(cx, bce, pn2)) |
|
3532 return false; |
|
3533 |
|
3534 |
|
3535 JSOp op; |
|
3536 op = pn2->getOp(); |
|
3537 JS_ASSERT(op != JSOP_CALLEE); |
|
3538 JS_ASSERT(!pn2->pn_cookie.isFree() || !pn->isOp(JSOP_NOP)); |
|
3539 |
|
3540 jsatomid atomIndex; |
|
3541 if (!MaybeEmitVarDecl(cx, bce, pn->getOp(), pn2, &atomIndex)) |
|
3542 return false; |
|
3543 |
|
3544 if (pn3) { |
|
3545 JS_ASSERT(emitOption != DefineVars); |
|
3546 if (op == JSOP_SETNAME || op == JSOP_SETGNAME || op == JSOP_SETINTRINSIC) { |
|
3547 JS_ASSERT(emitOption != PushInitialValues); |
|
3548 JSOp bindOp; |
|
3549 if (op == JSOP_SETNAME) |
|
3550 bindOp = JSOP_BINDNAME; |
|
3551 else if (op == JSOP_SETGNAME) |
|
3552 bindOp = JSOP_BINDGNAME; |
|
3553 else |
|
3554 bindOp = JSOP_BINDINTRINSIC; |
|
3555 if (!EmitIndex32(cx, bindOp, atomIndex, bce)) |
|
3556 return false; |
|
3557 } |
|
3558 |
|
3559 bool oldEmittingForInit = bce->emittingForInit; |
|
3560 bce->emittingForInit = false; |
|
3561 if (!EmitTree(cx, bce, pn3)) |
|
3562 return false; |
|
3563 bce->emittingForInit = oldEmittingForInit; |
|
3564 } else if (isLet) { |
|
3565 /* JSOP_ENTERLETx expects at least 1 slot to have been pushed. */ |
|
3566 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) |
|
3567 return false; |
|
3568 } |
|
3569 |
|
3570 /* If we are not initializing, nothing to pop. */ |
|
3571 if (emitOption != InitializeVars) { |
|
3572 if (next) |
|
3573 continue; |
|
3574 break; |
|
3575 } |
|
3576 |
|
3577 JS_ASSERT_IF(pn2->isDefn(), pn3 == pn2->pn_expr); |
|
3578 if (!pn2->pn_cookie.isFree()) { |
|
3579 if (!EmitVarOp(cx, pn2, op, bce)) |
|
3580 return false; |
|
3581 } else { |
|
3582 if (!EmitIndexOp(cx, op, atomIndex, bce)) |
|
3583 return false; |
|
3584 } |
|
3585 |
|
3586 emit_note_pop: |
|
3587 if (!next) |
|
3588 break; |
|
3589 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3590 return false; |
|
3591 } |
|
3592 |
|
3593 if (pn->pn_xflags & PNX_POPVAR) { |
|
3594 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3595 return false; |
|
3596 } |
|
3597 |
|
3598 return true; |
|
3599 } |
|
3600 |
|
3601 static bool |
|
3602 EmitAssignment(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *lhs, JSOp op, ParseNode *rhs) |
|
3603 { |
|
3604 /* |
|
3605 * Check left operand type and generate specialized code for it. |
|
3606 * Specialize to avoid ECMA "reference type" values on the operand |
|
3607 * stack, which impose pervasive runtime "GetValue" costs. |
|
3608 */ |
|
3609 jsatomid atomIndex = (jsatomid) -1; |
|
3610 jsbytecode offset = 1; |
|
3611 |
|
3612 switch (lhs->getKind()) { |
|
3613 case PNK_NAME: |
|
3614 if (!BindNameToSlot(cx, bce, lhs)) |
|
3615 return false; |
|
3616 if (lhs->pn_cookie.isFree()) { |
|
3617 if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex)) |
|
3618 return false; |
|
3619 if (!lhs->isConst()) { |
|
3620 JSOp bindOp; |
|
3621 if (lhs->isOp(JSOP_SETNAME)) |
|
3622 bindOp = JSOP_BINDNAME; |
|
3623 else if (lhs->isOp(JSOP_SETGNAME)) |
|
3624 bindOp = JSOP_BINDGNAME; |
|
3625 else |
|
3626 bindOp = JSOP_BINDINTRINSIC; |
|
3627 if (!EmitIndex32(cx, bindOp, atomIndex, bce)) |
|
3628 return false; |
|
3629 offset++; |
|
3630 } |
|
3631 } |
|
3632 break; |
|
3633 case PNK_DOT: |
|
3634 if (!EmitTree(cx, bce, lhs->expr())) |
|
3635 return false; |
|
3636 offset++; |
|
3637 if (!bce->makeAtomIndex(lhs->pn_atom, &atomIndex)) |
|
3638 return false; |
|
3639 break; |
|
3640 case PNK_ELEM: |
|
3641 JS_ASSERT(lhs->isArity(PN_BINARY)); |
|
3642 if (!EmitTree(cx, bce, lhs->pn_left)) |
|
3643 return false; |
|
3644 if (!EmitTree(cx, bce, lhs->pn_right)) |
|
3645 return false; |
|
3646 offset += 2; |
|
3647 break; |
|
3648 case PNK_ARRAY: |
|
3649 case PNK_OBJECT: |
|
3650 break; |
|
3651 case PNK_CALL: |
|
3652 JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); |
|
3653 if (!EmitTree(cx, bce, lhs)) |
|
3654 return false; |
|
3655 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3656 return false; |
|
3657 break; |
|
3658 default: |
|
3659 JS_ASSERT(0); |
|
3660 } |
|
3661 |
|
3662 if (op != JSOP_NOP) { |
|
3663 JS_ASSERT(rhs); |
|
3664 switch (lhs->getKind()) { |
|
3665 case PNK_NAME: |
|
3666 if (lhs->isConst()) { |
|
3667 if (lhs->isOp(JSOP_CALLEE)) { |
|
3668 if (Emit1(cx, bce, JSOP_CALLEE) < 0) |
|
3669 return false; |
|
3670 } else if (lhs->isOp(JSOP_NAME) || lhs->isOp(JSOP_GETGNAME)) { |
|
3671 if (!EmitIndex32(cx, lhs->getOp(), atomIndex, bce)) |
|
3672 return false; |
|
3673 } else { |
|
3674 JS_ASSERT(JOF_OPTYPE(lhs->getOp()) != JOF_ATOM); |
|
3675 if (!EmitVarOp(cx, lhs, lhs->getOp(), bce)) |
|
3676 return false; |
|
3677 } |
|
3678 } else if (lhs->isOp(JSOP_SETNAME)) { |
|
3679 if (Emit1(cx, bce, JSOP_DUP) < 0) |
|
3680 return false; |
|
3681 if (!EmitIndex32(cx, JSOP_GETXPROP, atomIndex, bce)) |
|
3682 return false; |
|
3683 } else if (lhs->isOp(JSOP_SETGNAME)) { |
|
3684 JS_ASSERT(lhs->pn_cookie.isFree()); |
|
3685 if (!EmitAtomOp(cx, lhs, JSOP_GETGNAME, bce)) |
|
3686 return false; |
|
3687 } else if (lhs->isOp(JSOP_SETINTRINSIC)) { |
|
3688 JS_ASSERT(lhs->pn_cookie.isFree()); |
|
3689 if (!EmitAtomOp(cx, lhs, JSOP_GETINTRINSIC, bce)) |
|
3690 return false; |
|
3691 } else { |
|
3692 JSOp op; |
|
3693 switch (lhs->getOp()) { |
|
3694 case JSOP_SETARG: op = JSOP_GETARG; break; |
|
3695 case JSOP_SETLOCAL: op = JSOP_GETLOCAL; break; |
|
3696 case JSOP_SETALIASEDVAR: op = JSOP_GETALIASEDVAR; break; |
|
3697 default: MOZ_ASSUME_UNREACHABLE("Bad op"); |
|
3698 } |
|
3699 if (!EmitVarOp(cx, lhs, op, bce)) |
|
3700 return false; |
|
3701 } |
|
3702 break; |
|
3703 case PNK_DOT: { |
|
3704 if (Emit1(cx, bce, JSOP_DUP) < 0) |
|
3705 return false; |
|
3706 bool isLength = (lhs->pn_atom == cx->names().length); |
|
3707 if (!EmitIndex32(cx, isLength ? JSOP_LENGTH : JSOP_GETPROP, atomIndex, bce)) |
|
3708 return false; |
|
3709 break; |
|
3710 } |
|
3711 case PNK_ELEM: |
|
3712 if (Emit1(cx, bce, JSOP_DUP2) < 0) |
|
3713 return false; |
|
3714 if (!EmitElemOpBase(cx, bce, JSOP_GETELEM)) |
|
3715 return false; |
|
3716 break; |
|
3717 case PNK_CALL: |
|
3718 /* |
|
3719 * We just emitted a JSOP_SETCALL (which will always throw) and |
|
3720 * popped the call's return value. Push a random value to make sure |
|
3721 * the stack depth is correct. |
|
3722 */ |
|
3723 JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); |
|
3724 if (Emit1(cx, bce, JSOP_NULL) < 0) |
|
3725 return false; |
|
3726 break; |
|
3727 default:; |
|
3728 } |
|
3729 } |
|
3730 |
|
3731 /* Now emit the right operand (it may affect the namespace). */ |
|
3732 if (rhs) { |
|
3733 if (!EmitTree(cx, bce, rhs)) |
|
3734 return false; |
|
3735 } else { |
|
3736 /* |
|
3737 * The value to assign is the next enumeration value in a for-in or |
|
3738 * for-of loop. That value has already been emitted: by JSOP_ITERNEXT |
|
3739 * in the for-in case, or via a GETPROP "value" on the result object in |
|
3740 * the for-of case. If offset == 1, that slot is already at the top of |
|
3741 * the stack. Otherwise, rearrange the stack to put that value on top. |
|
3742 */ |
|
3743 if (offset != 1 && Emit2(cx, bce, JSOP_PICK, offset - 1) < 0) |
|
3744 return false; |
|
3745 } |
|
3746 |
|
3747 /* If += etc., emit the binary operator with a source note. */ |
|
3748 if (op != JSOP_NOP) { |
|
3749 /* |
|
3750 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const |
|
3751 * declared in the current compilation unit, as in this case (just |
|
3752 * a bit further below) we will avoid emitting the assignment op. |
|
3753 */ |
|
3754 if (!lhs->isKind(PNK_NAME) || !lhs->isConst()) { |
|
3755 if (NewSrcNote(cx, bce, SRC_ASSIGNOP) < 0) |
|
3756 return false; |
|
3757 } |
|
3758 if (Emit1(cx, bce, op) < 0) |
|
3759 return false; |
|
3760 } |
|
3761 |
|
3762 /* Finally, emit the specialized assignment bytecode. */ |
|
3763 switch (lhs->getKind()) { |
|
3764 case PNK_NAME: |
|
3765 if (lhs->isConst()) { |
|
3766 if (!rhs) { |
|
3767 bce->reportError(lhs, JSMSG_BAD_FOR_LEFTSIDE); |
|
3768 return false; |
|
3769 } |
|
3770 break; |
|
3771 } |
|
3772 if (lhs->isOp(JSOP_SETARG) || lhs->isOp(JSOP_SETLOCAL) || lhs->isOp(JSOP_SETALIASEDVAR)) { |
|
3773 if (!EmitVarOp(cx, lhs, lhs->getOp(), bce)) |
|
3774 return false; |
|
3775 } else { |
|
3776 if (!EmitIndexOp(cx, lhs->getOp(), atomIndex, bce)) |
|
3777 return false; |
|
3778 } |
|
3779 break; |
|
3780 case PNK_DOT: |
|
3781 if (!EmitIndexOp(cx, JSOP_SETPROP, atomIndex, bce)) |
|
3782 return false; |
|
3783 break; |
|
3784 case PNK_CALL: |
|
3785 /* Do nothing. The JSOP_SETCALL we emitted will always throw. */ |
|
3786 JS_ASSERT(lhs->pn_xflags & PNX_SETCALL); |
|
3787 break; |
|
3788 case PNK_ELEM: |
|
3789 if (Emit1(cx, bce, JSOP_SETELEM) < 0) |
|
3790 return false; |
|
3791 break; |
|
3792 case PNK_ARRAY: |
|
3793 case PNK_OBJECT: |
|
3794 if (!EmitDestructuringOps(cx, bce, lhs)) |
|
3795 return false; |
|
3796 break; |
|
3797 default: |
|
3798 JS_ASSERT(0); |
|
3799 } |
|
3800 return true; |
|
3801 } |
|
3802 |
|
3803 bool |
|
3804 ParseNode::getConstantValue(ExclusiveContext *cx, bool strictChecks, MutableHandleValue vp) |
|
3805 { |
|
3806 switch (getKind()) { |
|
3807 case PNK_NUMBER: |
|
3808 vp.setNumber(pn_dval); |
|
3809 return true; |
|
3810 case PNK_STRING: |
|
3811 vp.setString(pn_atom); |
|
3812 return true; |
|
3813 case PNK_TRUE: |
|
3814 vp.setBoolean(true); |
|
3815 return true; |
|
3816 case PNK_FALSE: |
|
3817 vp.setBoolean(false); |
|
3818 return true; |
|
3819 case PNK_NULL: |
|
3820 vp.setNull(); |
|
3821 return true; |
|
3822 case PNK_SPREAD: |
|
3823 return false; |
|
3824 case PNK_ARRAY: { |
|
3825 JS_ASSERT(isOp(JSOP_NEWINIT) && !(pn_xflags & PNX_NONCONST)); |
|
3826 |
|
3827 RootedObject obj(cx, |
|
3828 NewDenseAllocatedArray(cx, pn_count, nullptr, MaybeSingletonObject)); |
|
3829 if (!obj) |
|
3830 return false; |
|
3831 |
|
3832 unsigned idx = 0; |
|
3833 RootedId id(cx); |
|
3834 RootedValue value(cx); |
|
3835 for (ParseNode *pn = pn_head; pn; idx++, pn = pn->pn_next) { |
|
3836 if (!pn->getConstantValue(cx, strictChecks, &value)) |
|
3837 return false; |
|
3838 id = INT_TO_JSID(idx); |
|
3839 if (!JSObject::defineGeneric(cx, obj, id, value, nullptr, nullptr, JSPROP_ENUMERATE)) |
|
3840 return false; |
|
3841 } |
|
3842 JS_ASSERT(idx == pn_count); |
|
3843 |
|
3844 types::FixArrayType(cx, obj); |
|
3845 vp.setObject(*obj); |
|
3846 return true; |
|
3847 } |
|
3848 case PNK_OBJECT: { |
|
3849 JS_ASSERT(isOp(JSOP_NEWINIT)); |
|
3850 JS_ASSERT(!(pn_xflags & PNX_NONCONST)); |
|
3851 |
|
3852 gc::AllocKind kind = GuessObjectGCKind(pn_count); |
|
3853 RootedObject obj(cx, NewBuiltinClassInstance(cx, &JSObject::class_, kind, MaybeSingletonObject)); |
|
3854 if (!obj) |
|
3855 return false; |
|
3856 |
|
3857 RootedValue value(cx), idvalue(cx); |
|
3858 for (ParseNode *pn = pn_head; pn; pn = pn->pn_next) { |
|
3859 if (!pn->pn_right->getConstantValue(cx, strictChecks, &value)) |
|
3860 return false; |
|
3861 |
|
3862 ParseNode *pnid = pn->pn_left; |
|
3863 if (pnid->isKind(PNK_NUMBER)) { |
|
3864 idvalue = NumberValue(pnid->pn_dval); |
|
3865 } else { |
|
3866 JS_ASSERT(pnid->isKind(PNK_NAME) || pnid->isKind(PNK_STRING)); |
|
3867 JS_ASSERT(pnid->pn_atom != cx->names().proto); |
|
3868 idvalue = StringValue(pnid->pn_atom); |
|
3869 } |
|
3870 |
|
3871 uint32_t index; |
|
3872 if (IsDefinitelyIndex(idvalue, &index)) { |
|
3873 if (!JSObject::defineElement(cx, obj, index, value, nullptr, nullptr, |
|
3874 JSPROP_ENUMERATE)) |
|
3875 { |
|
3876 return false; |
|
3877 } |
|
3878 |
|
3879 continue; |
|
3880 } |
|
3881 |
|
3882 JSAtom *name = ToAtom<CanGC>(cx, idvalue); |
|
3883 if (!name) |
|
3884 return false; |
|
3885 |
|
3886 if (name->isIndex(&index)) { |
|
3887 if (!JSObject::defineElement(cx, obj, index, value, |
|
3888 nullptr, nullptr, JSPROP_ENUMERATE)) |
|
3889 return false; |
|
3890 } else { |
|
3891 if (!JSObject::defineProperty(cx, obj, name->asPropertyName(), value, |
|
3892 nullptr, nullptr, JSPROP_ENUMERATE)) |
|
3893 { |
|
3894 return false; |
|
3895 } |
|
3896 } |
|
3897 } |
|
3898 |
|
3899 types::FixObjectType(cx, obj); |
|
3900 vp.setObject(*obj); |
|
3901 return true; |
|
3902 } |
|
3903 default: |
|
3904 MOZ_ASSUME_UNREACHABLE("Unexpected node"); |
|
3905 } |
|
3906 return false; |
|
3907 } |
|
3908 |
|
3909 static bool |
|
3910 EmitSingletonInitialiser(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
3911 { |
|
3912 RootedValue value(cx); |
|
3913 if (!pn->getConstantValue(cx, bce->sc->needStrictChecks(), &value)) |
|
3914 return false; |
|
3915 |
|
3916 JS_ASSERT(value.isObject()); |
|
3917 ObjectBox *objbox = bce->parser->newObjectBox(&value.toObject()); |
|
3918 if (!objbox) |
|
3919 return false; |
|
3920 |
|
3921 return EmitObjectOp(cx, objbox, JSOP_OBJECT, bce); |
|
3922 } |
|
3923 |
|
3924 /* See the SRC_FOR source note offsetBias comments later in this file. */ |
|
3925 JS_STATIC_ASSERT(JSOP_NOP_LENGTH == 1); |
|
3926 JS_STATIC_ASSERT(JSOP_POP_LENGTH == 1); |
|
3927 |
|
3928 namespace { |
|
3929 |
|
3930 class EmitLevelManager |
|
3931 { |
|
3932 BytecodeEmitter *bce; |
|
3933 public: |
|
3934 EmitLevelManager(BytecodeEmitter *bce) : bce(bce) { bce->emitLevel++; } |
|
3935 ~EmitLevelManager() { bce->emitLevel--; } |
|
3936 }; |
|
3937 |
|
3938 } /* anonymous namespace */ |
|
3939 |
|
3940 static bool |
|
3941 EmitCatch(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
3942 { |
|
3943 /* |
|
3944 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset, |
|
3945 * and save the block object atom. |
|
3946 */ |
|
3947 StmtInfoBCE *stmt = bce->topStmt; |
|
3948 JS_ASSERT(stmt->type == STMT_BLOCK && stmt->isBlockScope); |
|
3949 stmt->type = STMT_CATCH; |
|
3950 |
|
3951 /* Go up one statement info record to the TRY or FINALLY record. */ |
|
3952 stmt = stmt->down; |
|
3953 JS_ASSERT(stmt->type == STMT_TRY || stmt->type == STMT_FINALLY); |
|
3954 |
|
3955 /* Pick up the pending exception and bind it to the catch variable. */ |
|
3956 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) |
|
3957 return false; |
|
3958 |
|
3959 /* |
|
3960 * Dup the exception object if there is a guard for rethrowing to use |
|
3961 * it later when rethrowing or in other catches. |
|
3962 */ |
|
3963 if (pn->pn_kid2 && Emit1(cx, bce, JSOP_DUP) < 0) |
|
3964 return false; |
|
3965 |
|
3966 ParseNode *pn2 = pn->pn_kid1; |
|
3967 switch (pn2->getKind()) { |
|
3968 case PNK_ARRAY: |
|
3969 case PNK_OBJECT: |
|
3970 if (!EmitDestructuringOps(cx, bce, pn2)) |
|
3971 return false; |
|
3972 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3973 return false; |
|
3974 break; |
|
3975 |
|
3976 case PNK_NAME: |
|
3977 /* Inline and specialize BindNameToSlot for pn2. */ |
|
3978 JS_ASSERT(!pn2->pn_cookie.isFree()); |
|
3979 if (!EmitVarOp(cx, pn2, JSOP_SETLOCAL, bce)) |
|
3980 return false; |
|
3981 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
3982 return false; |
|
3983 break; |
|
3984 |
|
3985 default: |
|
3986 JS_ASSERT(0); |
|
3987 } |
|
3988 |
|
3989 // If there is a guard expression, emit it and arrange to jump to the next |
|
3990 // catch block if the guard expression is false. |
|
3991 if (pn->pn_kid2) { |
|
3992 if (!EmitTree(cx, bce, pn->pn_kid2)) |
|
3993 return false; |
|
3994 |
|
3995 // If the guard expression is false, fall through, pop the block scope, |
|
3996 // and jump to the next catch block. Otherwise jump over that code and |
|
3997 // pop the dupped exception. |
|
3998 ptrdiff_t guardCheck = EmitJump(cx, bce, JSOP_IFNE, 0); |
|
3999 if (guardCheck < 0) |
|
4000 return false; |
|
4001 |
|
4002 { |
|
4003 NonLocalExitScope nle(cx, bce); |
|
4004 |
|
4005 // Move exception back to cx->exception to prepare for |
|
4006 // the next catch. |
|
4007 if (Emit1(cx, bce, JSOP_THROWING) < 0) |
|
4008 return false; |
|
4009 |
|
4010 // Leave the scope for this catch block. |
|
4011 if (!nle.prepareForNonLocalJump(stmt)) |
|
4012 return false; |
|
4013 |
|
4014 // Jump to the next handler. The jump target is backpatched by EmitTry. |
|
4015 ptrdiff_t guardJump = EmitJump(cx, bce, JSOP_GOTO, 0); |
|
4016 if (guardJump < 0) |
|
4017 return false; |
|
4018 stmt->guardJump() = guardJump; |
|
4019 } |
|
4020 |
|
4021 // Back to normal control flow. |
|
4022 SetJumpOffsetAt(bce, guardCheck); |
|
4023 |
|
4024 // Pop duplicated exception object as we no longer need it. |
|
4025 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
4026 return false; |
|
4027 } |
|
4028 |
|
4029 /* Emit the catch body. */ |
|
4030 return EmitTree(cx, bce, pn->pn_kid3); |
|
4031 } |
|
4032 |
|
4033 // Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See the |
|
4034 // comment on EmitSwitch. |
|
4035 // |
|
4036 MOZ_NEVER_INLINE static bool |
|
4037 EmitTry(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
4038 { |
|
4039 StmtInfoBCE stmtInfo(cx); |
|
4040 |
|
4041 // Push stmtInfo to track jumps-over-catches and gosubs-to-finally |
|
4042 // for later fixup. |
|
4043 // |
|
4044 // When a finally block is active (STMT_FINALLY in our parse context), |
|
4045 // non-local jumps (including jumps-over-catches) result in a GOSUB |
|
4046 // being written into the bytecode stream and fixed-up later (c.f. |
|
4047 // EmitBackPatchOp and BackPatch). |
|
4048 // |
|
4049 PushStatementBCE(bce, &stmtInfo, pn->pn_kid3 ? STMT_FINALLY : STMT_TRY, bce->offset()); |
|
4050 |
|
4051 // Since an exception can be thrown at any place inside the try block, |
|
4052 // we need to restore the stack and the scope chain before we transfer |
|
4053 // the control to the exception handler. |
|
4054 // |
|
4055 // For that we store in a try note associated with the catch or |
|
4056 // finally block the stack depth upon the try entry. The interpreter |
|
4057 // uses this depth to properly unwind the stack and the scope chain. |
|
4058 // |
|
4059 int depth = bce->stackDepth; |
|
4060 |
|
4061 // Record the try location, then emit the try block. |
|
4062 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY); |
|
4063 if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0) |
|
4064 return false; |
|
4065 ptrdiff_t tryStart = bce->offset(); |
|
4066 if (!EmitTree(cx, bce, pn->pn_kid1)) |
|
4067 return false; |
|
4068 JS_ASSERT(depth == bce->stackDepth); |
|
4069 |
|
4070 // GOSUB to finally, if present. |
|
4071 if (pn->pn_kid3) { |
|
4072 if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0) |
|
4073 return false; |
|
4074 } |
|
4075 |
|
4076 // Source note points to the jump at the end of the try block. |
|
4077 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH)) |
|
4078 return false; |
|
4079 |
|
4080 // Emit jump over catch and/or finally. |
|
4081 ptrdiff_t catchJump = -1; |
|
4082 if (EmitBackPatchOp(cx, bce, &catchJump) < 0) |
|
4083 return false; |
|
4084 |
|
4085 ptrdiff_t tryEnd = bce->offset(); |
|
4086 |
|
4087 // If this try has a catch block, emit it. |
|
4088 if (ParseNode *pn2 = pn->pn_kid2) { |
|
4089 // The emitted code for a catch block looks like: |
|
4090 // |
|
4091 // [pushblockscope] only if any local aliased |
|
4092 // exception |
|
4093 // if there is a catchguard: |
|
4094 // dup |
|
4095 // setlocal 0; pop assign or possibly destructure exception |
|
4096 // if there is a catchguard: |
|
4097 // < catchguard code > |
|
4098 // ifne POST |
|
4099 // debugleaveblock |
|
4100 // [popblockscope] only if any local aliased |
|
4101 // throwing pop exception to cx->exception |
|
4102 // goto <next catch block> |
|
4103 // POST: pop |
|
4104 // < catch block contents > |
|
4105 // debugleaveblock |
|
4106 // [popblockscope] only if any local aliased |
|
4107 // goto <end of catch blocks> non-local; finally applies |
|
4108 // |
|
4109 // If there's no catch block without a catchguard, the last <next catch |
|
4110 // block> points to rethrow code. This code will [gosub] to the finally |
|
4111 // code if appropriate, and is also used for the catch-all trynote for |
|
4112 // capturing exceptions thrown from catch{} blocks. |
|
4113 // |
|
4114 for (ParseNode *pn3 = pn2->pn_head; pn3; pn3 = pn3->pn_next) { |
|
4115 JS_ASSERT(bce->stackDepth == depth); |
|
4116 |
|
4117 // Emit the lexical scope and catch body. |
|
4118 JS_ASSERT(pn3->isKind(PNK_LEXICALSCOPE)); |
|
4119 if (!EmitTree(cx, bce, pn3)) |
|
4120 return false; |
|
4121 |
|
4122 // gosub <finally>, if required. |
|
4123 if (pn->pn_kid3) { |
|
4124 if (EmitBackPatchOp(cx, bce, &stmtInfo.gosubs()) < 0) |
|
4125 return false; |
|
4126 JS_ASSERT(bce->stackDepth == depth); |
|
4127 } |
|
4128 |
|
4129 // Jump over the remaining catch blocks. This will get fixed |
|
4130 // up to jump to after catch/finally. |
|
4131 if (EmitBackPatchOp(cx, bce, &catchJump) < 0) |
|
4132 return false; |
|
4133 |
|
4134 // If this catch block had a guard clause, patch the guard jump to |
|
4135 // come here. |
|
4136 if (stmtInfo.guardJump() != -1) { |
|
4137 SetJumpOffsetAt(bce, stmtInfo.guardJump()); |
|
4138 stmtInfo.guardJump() = -1; |
|
4139 |
|
4140 // If this catch block is the last one, rethrow, delegating |
|
4141 // execution of any finally block to the exception handler. |
|
4142 if (!pn3->pn_next) { |
|
4143 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) |
|
4144 return false; |
|
4145 if (Emit1(cx, bce, JSOP_THROW) < 0) |
|
4146 return false; |
|
4147 } |
|
4148 } |
|
4149 } |
|
4150 } |
|
4151 |
|
4152 JS_ASSERT(bce->stackDepth == depth); |
|
4153 |
|
4154 // Emit the finally handler, if there is one. |
|
4155 ptrdiff_t finallyStart = 0; |
|
4156 if (pn->pn_kid3) { |
|
4157 // Fix up the gosubs that might have been emitted before non-local |
|
4158 // jumps to the finally code. |
|
4159 if (!BackPatch(cx, bce, stmtInfo.gosubs(), bce->code().end(), JSOP_GOSUB)) |
|
4160 return false; |
|
4161 |
|
4162 finallyStart = bce->offset(); |
|
4163 |
|
4164 // Indicate that we're emitting a subroutine body. |
|
4165 stmtInfo.type = STMT_SUBROUTINE; |
|
4166 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_kid3->pn_pos.begin)) |
|
4167 return false; |
|
4168 if (Emit1(cx, bce, JSOP_FINALLY) < 0 || |
|
4169 !EmitTree(cx, bce, pn->pn_kid3) || |
|
4170 Emit1(cx, bce, JSOP_RETSUB) < 0) |
|
4171 { |
|
4172 return false; |
|
4173 } |
|
4174 JS_ASSERT(bce->stackDepth == depth); |
|
4175 } |
|
4176 if (!PopStatementBCE(cx, bce)) |
|
4177 return false; |
|
4178 |
|
4179 // ReconstructPCStack needs a NOP here to mark the end of the last catch block. |
|
4180 if (Emit1(cx, bce, JSOP_NOP) < 0) |
|
4181 return false; |
|
4182 |
|
4183 // Fix up the end-of-try/catch jumps to come here. |
|
4184 if (!BackPatch(cx, bce, catchJump, bce->code().end(), JSOP_GOTO)) |
|
4185 return false; |
|
4186 |
|
4187 // Add the try note last, to let post-order give us the right ordering |
|
4188 // (first to last for a given nesting level, inner to outer by level). |
|
4189 if (pn->pn_kid2 && !bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd)) |
|
4190 return false; |
|
4191 |
|
4192 // If we've got a finally, mark try+catch region with additional |
|
4193 // trynote to catch exceptions (re)thrown from a catch block or |
|
4194 // for the try{}finally{} case. |
|
4195 if (pn->pn_kid3 && !bce->tryNoteList.append(JSTRY_FINALLY, depth, tryStart, finallyStart)) |
|
4196 return false; |
|
4197 |
|
4198 return true; |
|
4199 } |
|
4200 |
|
4201 static bool |
|
4202 EmitIf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
4203 { |
|
4204 StmtInfoBCE stmtInfo(cx); |
|
4205 |
|
4206 /* Initialize so we can detect else-if chains and avoid recursion. */ |
|
4207 stmtInfo.type = STMT_IF; |
|
4208 ptrdiff_t beq = -1; |
|
4209 ptrdiff_t jmp = -1; |
|
4210 ptrdiff_t noteIndex = -1; |
|
4211 |
|
4212 if_again: |
|
4213 /* Emit code for the condition before pushing stmtInfo. */ |
|
4214 if (!EmitTree(cx, bce, pn->pn_kid1)) |
|
4215 return false; |
|
4216 ptrdiff_t top = bce->offset(); |
|
4217 if (stmtInfo.type == STMT_IF) { |
|
4218 PushStatementBCE(bce, &stmtInfo, STMT_IF, top); |
|
4219 } else { |
|
4220 /* |
|
4221 * We came here from the goto further below that detects else-if |
|
4222 * chains, so we must mutate stmtInfo back into a STMT_IF record. |
|
4223 * Also we need a note offset for SRC_IF_ELSE to help IonMonkey. |
|
4224 */ |
|
4225 JS_ASSERT(stmtInfo.type == STMT_ELSE); |
|
4226 stmtInfo.type = STMT_IF; |
|
4227 stmtInfo.update = top; |
|
4228 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq)) |
|
4229 return false; |
|
4230 } |
|
4231 |
|
4232 /* Emit an annotated branch-if-false around the then part. */ |
|
4233 ParseNode *pn3 = pn->pn_kid3; |
|
4234 noteIndex = NewSrcNote(cx, bce, pn3 ? SRC_IF_ELSE : SRC_IF); |
|
4235 if (noteIndex < 0) |
|
4236 return false; |
|
4237 beq = EmitJump(cx, bce, JSOP_IFEQ, 0); |
|
4238 if (beq < 0) |
|
4239 return false; |
|
4240 |
|
4241 /* Emit code for the then and optional else parts. */ |
|
4242 if (!EmitTree(cx, bce, pn->pn_kid2)) |
|
4243 return false; |
|
4244 if (pn3) { |
|
4245 /* Modify stmtInfo so we know we're in the else part. */ |
|
4246 stmtInfo.type = STMT_ELSE; |
|
4247 |
|
4248 /* |
|
4249 * Emit a JSOP_BACKPATCH op to jump from the end of our then part |
|
4250 * around the else part. The PopStatementBCE call at the bottom of |
|
4251 * this function will fix up the backpatch chain linked from |
|
4252 * stmtInfo.breaks. |
|
4253 */ |
|
4254 jmp = EmitGoto(cx, bce, &stmtInfo, &stmtInfo.breaks); |
|
4255 if (jmp < 0) |
|
4256 return false; |
|
4257 |
|
4258 /* Ensure the branch-if-false comes here, then emit the else. */ |
|
4259 SetJumpOffsetAt(bce, beq); |
|
4260 if (pn3->isKind(PNK_IF)) { |
|
4261 pn = pn3; |
|
4262 goto if_again; |
|
4263 } |
|
4264 |
|
4265 if (!EmitTree(cx, bce, pn3)) |
|
4266 return false; |
|
4267 |
|
4268 /* |
|
4269 * Annotate SRC_IF_ELSE with the offset from branch to jump, for |
|
4270 * IonMonkey's benefit. We can't just "back up" from the pc |
|
4271 * of the else clause, because we don't know whether an extended |
|
4272 * jump was required to leap from the end of the then clause over |
|
4273 * the else clause. |
|
4274 */ |
|
4275 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq)) |
|
4276 return false; |
|
4277 } else { |
|
4278 /* No else part, fixup the branch-if-false to come here. */ |
|
4279 SetJumpOffsetAt(bce, beq); |
|
4280 } |
|
4281 return PopStatementBCE(cx, bce); |
|
4282 } |
|
4283 |
|
4284 /* |
|
4285 * pnLet represents one of: |
|
4286 * |
|
4287 * let-expression: (let (x = y) EXPR) |
|
4288 * let-statement: let (x = y) { ... } |
|
4289 * |
|
4290 * For a let-expression 'let (x = a, [y,z] = b) e', EmitLet produces: |
|
4291 * |
|
4292 * bytecode stackDepth srcnotes |
|
4293 * evaluate a +1 |
|
4294 * evaluate b +1 |
|
4295 * dup +1 |
|
4296 * destructure y |
|
4297 * pick 1 |
|
4298 * dup +1 |
|
4299 * destructure z |
|
4300 * pick 1 |
|
4301 * pop -1 |
|
4302 * setlocal 2 -1 |
|
4303 * setlocal 1 -1 |
|
4304 * setlocal 0 -1 |
|
4305 * pushblockscope (if needed) |
|
4306 * evaluate e +1 |
|
4307 * debugleaveblock |
|
4308 * popblockscope (if needed) |
|
4309 * |
|
4310 * Note that, since pushblockscope simply changes fp->scopeChain and does not |
|
4311 * otherwise touch the stack, evaluation of the let-var initializers must leave |
|
4312 * the initial value in the let-var's future slot. |
|
4313 */ |
|
4314 /* |
|
4315 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See |
|
4316 * the comment on EmitSwitch. |
|
4317 */ |
|
4318 MOZ_NEVER_INLINE static bool |
|
4319 EmitLet(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pnLet) |
|
4320 { |
|
4321 JS_ASSERT(pnLet->isArity(PN_BINARY)); |
|
4322 ParseNode *varList = pnLet->pn_left; |
|
4323 JS_ASSERT(varList->isArity(PN_LIST)); |
|
4324 ParseNode *letBody = pnLet->pn_right; |
|
4325 JS_ASSERT(letBody->isLet() && letBody->isKind(PNK_LEXICALSCOPE)); |
|
4326 |
|
4327 int letHeadDepth = bce->stackDepth; |
|
4328 |
|
4329 if (!EmitVariables(cx, bce, varList, PushInitialValues, true)) |
|
4330 return false; |
|
4331 |
|
4332 /* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */ |
|
4333 uint32_t alreadyPushed = bce->stackDepth - letHeadDepth; |
|
4334 StmtInfoBCE stmtInfo(cx); |
|
4335 if (!EnterBlockScope(cx, bce, &stmtInfo, letBody->pn_objbox, alreadyPushed)) |
|
4336 return false; |
|
4337 |
|
4338 if (!EmitTree(cx, bce, letBody->pn_expr)) |
|
4339 return false; |
|
4340 |
|
4341 if (!LeaveNestedScope(cx, bce, &stmtInfo)) |
|
4342 return false; |
|
4343 |
|
4344 return true; |
|
4345 } |
|
4346 |
|
4347 /* |
|
4348 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See |
|
4349 * the comment on EmitSwitch. |
|
4350 */ |
|
4351 MOZ_NEVER_INLINE static bool |
|
4352 EmitLexicalScope(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
4353 { |
|
4354 JS_ASSERT(pn->isKind(PNK_LEXICALSCOPE)); |
|
4355 |
|
4356 StmtInfoBCE stmtInfo(cx); |
|
4357 if (!EnterBlockScope(cx, bce, &stmtInfo, pn->pn_objbox, 0)) |
|
4358 return false; |
|
4359 |
|
4360 if (!EmitTree(cx, bce, pn->pn_expr)) |
|
4361 return false; |
|
4362 |
|
4363 if (!LeaveNestedScope(cx, bce, &stmtInfo)) |
|
4364 return false; |
|
4365 |
|
4366 return true; |
|
4367 } |
|
4368 |
|
4369 static bool |
|
4370 EmitWith(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
4371 { |
|
4372 StmtInfoBCE stmtInfo(cx); |
|
4373 if (!EmitTree(cx, bce, pn->pn_left)) |
|
4374 return false; |
|
4375 if (!EnterNestedScope(cx, bce, &stmtInfo, pn->pn_binary_obj, STMT_WITH)) |
|
4376 return false; |
|
4377 if (!EmitTree(cx, bce, pn->pn_right)) |
|
4378 return false; |
|
4379 if (!LeaveNestedScope(cx, bce, &stmtInfo)) |
|
4380 return false; |
|
4381 return true; |
|
4382 } |
|
4383 |
|
4384 static bool |
|
4385 EmitForOf(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) |
|
4386 { |
|
4387 ParseNode *forHead = pn->pn_left; |
|
4388 ParseNode *forBody = pn->pn_right; |
|
4389 |
|
4390 ParseNode *pn1 = forHead->pn_kid1; |
|
4391 bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE); |
|
4392 JS_ASSERT_IF(letDecl, pn1->isLet()); |
|
4393 |
|
4394 // If the left part is 'var x', emit code to define x if necessary using a |
|
4395 // prolog opcode, but do not emit a pop. |
|
4396 if (pn1) { |
|
4397 ParseNode *decl = letDecl ? pn1->pn_expr : pn1; |
|
4398 JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET)); |
|
4399 bce->emittingForInit = true; |
|
4400 if (!EmitVariables(cx, bce, decl, DefineVars)) |
|
4401 return false; |
|
4402 bce->emittingForInit = false; |
|
4403 } |
|
4404 |
|
4405 // For-of loops run with two values on the stack: the iterator and the |
|
4406 // current result object. |
|
4407 |
|
4408 // Compile the object expression to the right of 'of'. |
|
4409 if (!EmitTree(cx, bce, forHead->pn_kid3)) |
|
4410 return false; |
|
4411 |
|
4412 // Convert iterable to iterator. |
|
4413 if (Emit1(cx, bce, JSOP_DUP) < 0) // OBJ OBJ |
|
4414 return false; |
|
4415 if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // OBJ @@ITERATOR |
|
4416 return false; |
|
4417 if (Emit1(cx, bce, JSOP_SWAP) < 0) // @@ITERATOR OBJ |
|
4418 return false; |
|
4419 if (EmitCall(cx, bce, JSOP_CALL, 0) < 0) // ITER |
|
4420 return false; |
|
4421 CheckTypeSet(cx, bce, JSOP_CALL); |
|
4422 |
|
4423 // Push a dummy result so that we properly enter iteration midstream. |
|
4424 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RESULT |
|
4425 return false; |
|
4426 |
|
4427 // Enter the block before the loop body, after evaluating the obj. |
|
4428 StmtInfoBCE letStmt(cx); |
|
4429 if (letDecl) { |
|
4430 if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0)) |
|
4431 return false; |
|
4432 } |
|
4433 |
|
4434 LoopStmtInfo stmtInfo(cx); |
|
4435 PushLoopStatement(bce, &stmtInfo, STMT_FOR_OF_LOOP, top); |
|
4436 |
|
4437 // Jump down to the loop condition to minimize overhead assuming at least |
|
4438 // one iteration, as the other loop forms do. Annotate so IonMonkey can |
|
4439 // find the loop-closing jump. |
|
4440 int noteIndex = NewSrcNote(cx, bce, SRC_FOR_OF); |
|
4441 if (noteIndex < 0) |
|
4442 return false; |
|
4443 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); |
|
4444 if (jmp < 0) |
|
4445 return false; |
|
4446 |
|
4447 top = bce->offset(); |
|
4448 SET_STATEMENT_TOP(&stmtInfo, top); |
|
4449 if (EmitLoopHead(cx, bce, nullptr) < 0) |
|
4450 return false; |
|
4451 |
|
4452 #ifdef DEBUG |
|
4453 int loopDepth = bce->stackDepth; |
|
4454 #endif |
|
4455 |
|
4456 // Emit code to assign result.value to the iteration variable. |
|
4457 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT |
|
4458 return false; |
|
4459 if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // ITER RESULT VALUE |
|
4460 return false; |
|
4461 if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr)) // ITER RESULT VALUE |
|
4462 return false; |
|
4463 if (Emit1(cx, bce, JSOP_POP) < 0) // ITER RESULT |
|
4464 return false; |
|
4465 |
|
4466 // The stack should be balanced around the assignment opcode sequence. |
|
4467 JS_ASSERT(bce->stackDepth == loopDepth); |
|
4468 |
|
4469 // Emit code for the loop body. |
|
4470 if (!EmitTree(cx, bce, forBody)) |
|
4471 return false; |
|
4472 |
|
4473 // Set loop and enclosing "update" offsets, for continue. |
|
4474 StmtInfoBCE *stmt = &stmtInfo; |
|
4475 do { |
|
4476 stmt->update = bce->offset(); |
|
4477 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); |
|
4478 |
|
4479 // COME FROM the beginning of the loop to here. |
|
4480 SetJumpOffsetAt(bce, jmp); |
|
4481 if (!EmitLoopEntry(cx, bce, nullptr)) |
|
4482 return false; |
|
4483 |
|
4484 if (Emit1(cx, bce, JSOP_POP) < 0) // ITER |
|
4485 return false; |
|
4486 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER |
|
4487 return false; |
|
4488 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER ITER ITER |
|
4489 return false; |
|
4490 if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // ITER ITER NEXT |
|
4491 return false; |
|
4492 if (Emit1(cx, bce, JSOP_SWAP) < 0) // ITER NEXT ITER |
|
4493 return false; |
|
4494 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER NEXT ITER UNDEFINED |
|
4495 return false; |
|
4496 if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT |
|
4497 return false; |
|
4498 CheckTypeSet(cx, bce, JSOP_CALL); |
|
4499 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT |
|
4500 return false; |
|
4501 if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE? |
|
4502 return false; |
|
4503 |
|
4504 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, top - bce->offset()); // ITER RESULT |
|
4505 if (beq < 0) |
|
4506 return false; |
|
4507 |
|
4508 JS_ASSERT(bce->stackDepth == loopDepth); |
|
4509 |
|
4510 // Let Ion know where the closing jump of this loop is. |
|
4511 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp)) |
|
4512 return false; |
|
4513 |
|
4514 // Fixup breaks and continues. |
|
4515 if (!PopStatementBCE(cx, bce)) |
|
4516 return false; |
|
4517 |
|
4518 if (letDecl) { |
|
4519 if (!LeaveNestedScope(cx, bce, &letStmt)) |
|
4520 return false; |
|
4521 } |
|
4522 |
|
4523 // Pop the result and the iter. |
|
4524 EMIT_UINT16_IMM_OP(JSOP_POPN, 2); |
|
4525 |
|
4526 return true; |
|
4527 } |
|
4528 |
|
4529 static bool |
|
4530 EmitForIn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) |
|
4531 { |
|
4532 ParseNode *forHead = pn->pn_left; |
|
4533 ParseNode *forBody = pn->pn_right; |
|
4534 |
|
4535 ParseNode *pn1 = forHead->pn_kid1; |
|
4536 bool letDecl = pn1 && pn1->isKind(PNK_LEXICALSCOPE); |
|
4537 JS_ASSERT_IF(letDecl, pn1->isLet()); |
|
4538 |
|
4539 /* |
|
4540 * If the left part is 'var x', emit code to define x if necessary |
|
4541 * using a prolog opcode, but do not emit a pop. If the left part was |
|
4542 * originally 'var x = i', the parser will have rewritten it; see |
|
4543 * Parser::forStatement. 'for (let x = i in o)' is mercifully banned. |
|
4544 */ |
|
4545 if (pn1) { |
|
4546 ParseNode *decl = letDecl ? pn1->pn_expr : pn1; |
|
4547 JS_ASSERT(decl->isKind(PNK_VAR) || decl->isKind(PNK_LET)); |
|
4548 bce->emittingForInit = true; |
|
4549 if (!EmitVariables(cx, bce, decl, DefineVars)) |
|
4550 return false; |
|
4551 bce->emittingForInit = false; |
|
4552 } |
|
4553 |
|
4554 /* Compile the object expression to the right of 'in'. */ |
|
4555 if (!EmitTree(cx, bce, forHead->pn_kid3)) |
|
4556 return false; |
|
4557 |
|
4558 /* |
|
4559 * Emit a bytecode to convert top of stack value to the iterator |
|
4560 * object depending on the loop variant (for-in, for-each-in, or |
|
4561 * destructuring for-in). |
|
4562 */ |
|
4563 JS_ASSERT(pn->isOp(JSOP_ITER)); |
|
4564 if (Emit2(cx, bce, JSOP_ITER, (uint8_t) pn->pn_iflags) < 0) |
|
4565 return false; |
|
4566 |
|
4567 /* Enter the block before the loop body, after evaluating the obj. */ |
|
4568 StmtInfoBCE letStmt(cx); |
|
4569 if (letDecl) { |
|
4570 if (!EnterBlockScope(cx, bce, &letStmt, pn1->pn_objbox, 0)) |
|
4571 return false; |
|
4572 } |
|
4573 |
|
4574 LoopStmtInfo stmtInfo(cx); |
|
4575 PushLoopStatement(bce, &stmtInfo, STMT_FOR_IN_LOOP, top); |
|
4576 |
|
4577 /* Annotate so IonMonkey can find the loop-closing jump. */ |
|
4578 int noteIndex = NewSrcNote(cx, bce, SRC_FOR_IN); |
|
4579 if (noteIndex < 0) |
|
4580 return false; |
|
4581 |
|
4582 /* |
|
4583 * Jump down to the loop condition to minimize overhead assuming at |
|
4584 * least one iteration, as the other loop forms do. |
|
4585 */ |
|
4586 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); |
|
4587 if (jmp < 0) |
|
4588 return false; |
|
4589 |
|
4590 top = bce->offset(); |
|
4591 SET_STATEMENT_TOP(&stmtInfo, top); |
|
4592 if (EmitLoopHead(cx, bce, nullptr) < 0) |
|
4593 return false; |
|
4594 |
|
4595 #ifdef DEBUG |
|
4596 int loopDepth = bce->stackDepth; |
|
4597 #endif |
|
4598 |
|
4599 /* |
|
4600 * Emit code to get the next enumeration value and assign it to the |
|
4601 * left hand side. |
|
4602 */ |
|
4603 if (Emit1(cx, bce, JSOP_ITERNEXT) < 0) |
|
4604 return false; |
|
4605 if (!EmitAssignment(cx, bce, forHead->pn_kid2, JSOP_NOP, nullptr)) |
|
4606 return false; |
|
4607 |
|
4608 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
4609 return false; |
|
4610 |
|
4611 /* The stack should be balanced around the assignment opcode sequence. */ |
|
4612 JS_ASSERT(bce->stackDepth == loopDepth); |
|
4613 |
|
4614 /* Emit code for the loop body. */ |
|
4615 if (!EmitTree(cx, bce, forBody)) |
|
4616 return false; |
|
4617 |
|
4618 /* Set loop and enclosing "update" offsets, for continue. */ |
|
4619 StmtInfoBCE *stmt = &stmtInfo; |
|
4620 do { |
|
4621 stmt->update = bce->offset(); |
|
4622 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); |
|
4623 |
|
4624 /* |
|
4625 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER. |
|
4626 */ |
|
4627 SetJumpOffsetAt(bce, jmp); |
|
4628 if (!EmitLoopEntry(cx, bce, nullptr)) |
|
4629 return false; |
|
4630 if (Emit1(cx, bce, JSOP_MOREITER) < 0) |
|
4631 return false; |
|
4632 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); |
|
4633 if (beq < 0) |
|
4634 return false; |
|
4635 |
|
4636 /* Set the srcnote offset so we can find the closing jump. */ |
|
4637 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, beq - jmp)) |
|
4638 return false; |
|
4639 |
|
4640 // Fix up breaks and continues. |
|
4641 if (!PopStatementBCE(cx, bce)) |
|
4642 return false; |
|
4643 |
|
4644 if (!bce->tryNoteList.append(JSTRY_ITER, bce->stackDepth, top, bce->offset())) |
|
4645 return false; |
|
4646 if (Emit1(cx, bce, JSOP_ENDITER) < 0) |
|
4647 return false; |
|
4648 |
|
4649 if (letDecl) { |
|
4650 if (!LeaveNestedScope(cx, bce, &letStmt)) |
|
4651 return false; |
|
4652 } |
|
4653 |
|
4654 return true; |
|
4655 } |
|
4656 |
|
4657 static bool |
|
4658 EmitNormalFor(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) |
|
4659 { |
|
4660 LoopStmtInfo stmtInfo(cx); |
|
4661 PushLoopStatement(bce, &stmtInfo, STMT_FOR_LOOP, top); |
|
4662 |
|
4663 ParseNode *forHead = pn->pn_left; |
|
4664 ParseNode *forBody = pn->pn_right; |
|
4665 |
|
4666 /* C-style for (init; cond; update) ... loop. */ |
|
4667 JSOp op = JSOP_POP; |
|
4668 ParseNode *pn3 = forHead->pn_kid1; |
|
4669 if (!pn3) { |
|
4670 // No initializer, but emit a nop so that there's somewhere to put the |
|
4671 // SRC_FOR annotation that IonBuilder will look for. |
|
4672 op = JSOP_NOP; |
|
4673 } else { |
|
4674 bce->emittingForInit = true; |
|
4675 if (pn3->isKind(PNK_ASSIGN)) { |
|
4676 JS_ASSERT(pn3->isOp(JSOP_NOP)); |
|
4677 if (!MaybeEmitGroupAssignment(cx, bce, op, pn3, GroupIsNotDecl, &op)) |
|
4678 return false; |
|
4679 } |
|
4680 if (op == JSOP_POP) { |
|
4681 if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin)) |
|
4682 return false; |
|
4683 if (!EmitTree(cx, bce, pn3)) |
|
4684 return false; |
|
4685 if (pn3->isKind(PNK_VAR) || pn3->isKind(PNK_CONST) || pn3->isKind(PNK_LET)) { |
|
4686 /* |
|
4687 * Check whether a destructuring-initialized var decl |
|
4688 * was optimized to a group assignment. If so, we do |
|
4689 * not need to emit a pop below, so switch to a nop, |
|
4690 * just for IonBuilder. |
|
4691 */ |
|
4692 JS_ASSERT(pn3->isArity(PN_LIST) || pn3->isArity(PN_BINARY)); |
|
4693 if (pn3->pn_xflags & PNX_GROUPINIT) |
|
4694 op = JSOP_NOP; |
|
4695 } |
|
4696 } |
|
4697 bce->emittingForInit = false; |
|
4698 } |
|
4699 |
|
4700 /* |
|
4701 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH). |
|
4702 * Use tmp to hold the biased srcnote "top" offset, which differs |
|
4703 * from the top local variable by the length of the JSOP_GOTO |
|
4704 * emitted in between tmp and top if this loop has a condition. |
|
4705 */ |
|
4706 int noteIndex = NewSrcNote(cx, bce, SRC_FOR); |
|
4707 if (noteIndex < 0 || Emit1(cx, bce, op) < 0) |
|
4708 return false; |
|
4709 ptrdiff_t tmp = bce->offset(); |
|
4710 |
|
4711 ptrdiff_t jmp = -1; |
|
4712 if (forHead->pn_kid2) { |
|
4713 /* Goto the loop condition, which branches back to iterate. */ |
|
4714 jmp = EmitJump(cx, bce, JSOP_GOTO, 0); |
|
4715 if (jmp < 0) |
|
4716 return false; |
|
4717 } else { |
|
4718 if (op != JSOP_NOP && Emit1(cx, bce, JSOP_NOP) < 0) |
|
4719 return false; |
|
4720 } |
|
4721 |
|
4722 top = bce->offset(); |
|
4723 SET_STATEMENT_TOP(&stmtInfo, top); |
|
4724 |
|
4725 /* Emit code for the loop body. */ |
|
4726 if (EmitLoopHead(cx, bce, forBody) < 0) |
|
4727 return false; |
|
4728 if (jmp == -1 && !EmitLoopEntry(cx, bce, forBody)) |
|
4729 return false; |
|
4730 if (!EmitTree(cx, bce, forBody)) |
|
4731 return false; |
|
4732 |
|
4733 /* Set the second note offset so we can find the update part. */ |
|
4734 JS_ASSERT(noteIndex != -1); |
|
4735 ptrdiff_t tmp2 = bce->offset(); |
|
4736 |
|
4737 /* Set loop and enclosing "update" offsets, for continue. */ |
|
4738 StmtInfoBCE *stmt = &stmtInfo; |
|
4739 do { |
|
4740 stmt->update = bce->offset(); |
|
4741 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); |
|
4742 |
|
4743 /* Check for update code to do before the condition (if any). */ |
|
4744 pn3 = forHead->pn_kid3; |
|
4745 if (pn3) { |
|
4746 if (!UpdateSourceCoordNotes(cx, bce, pn3->pn_pos.begin)) |
|
4747 return false; |
|
4748 op = JSOP_POP; |
|
4749 if (pn3->isKind(PNK_ASSIGN)) { |
|
4750 JS_ASSERT(pn3->isOp(JSOP_NOP)); |
|
4751 if (!MaybeEmitGroupAssignment(cx, bce, op, pn3, GroupIsNotDecl, &op)) |
|
4752 return false; |
|
4753 } |
|
4754 if (op == JSOP_POP && !EmitTree(cx, bce, pn3)) |
|
4755 return false; |
|
4756 |
|
4757 /* Always emit the POP or NOP to help IonBuilder. */ |
|
4758 if (Emit1(cx, bce, op) < 0) |
|
4759 return false; |
|
4760 |
|
4761 /* Restore the absolute line number for source note readers. */ |
|
4762 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.end); |
|
4763 if (bce->currentLine() != lineNum) { |
|
4764 if (NewSrcNote2(cx, bce, SRC_SETLINE, ptrdiff_t(lineNum)) < 0) |
|
4765 return false; |
|
4766 bce->current->currentLine = lineNum; |
|
4767 bce->current->lastColumn = 0; |
|
4768 } |
|
4769 } |
|
4770 |
|
4771 ptrdiff_t tmp3 = bce->offset(); |
|
4772 |
|
4773 if (forHead->pn_kid2) { |
|
4774 /* Fix up the goto from top to target the loop condition. */ |
|
4775 JS_ASSERT(jmp >= 0); |
|
4776 SetJumpOffsetAt(bce, jmp); |
|
4777 if (!EmitLoopEntry(cx, bce, forHead->pn_kid2)) |
|
4778 return false; |
|
4779 |
|
4780 if (!EmitTree(cx, bce, forHead->pn_kid2)) |
|
4781 return false; |
|
4782 } |
|
4783 |
|
4784 /* Set the first note offset so we can find the loop condition. */ |
|
4785 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 0, tmp3 - tmp)) |
|
4786 return false; |
|
4787 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 1, tmp2 - tmp)) |
|
4788 return false; |
|
4789 /* The third note offset helps us find the loop-closing jump. */ |
|
4790 if (!SetSrcNoteOffset(cx, bce, (unsigned)noteIndex, 2, bce->offset() - tmp)) |
|
4791 return false; |
|
4792 |
|
4793 /* If no loop condition, just emit a loop-closing jump. */ |
|
4794 op = forHead->pn_kid2 ? JSOP_IFNE : JSOP_GOTO; |
|
4795 if (EmitJump(cx, bce, op, top - bce->offset()) < 0) |
|
4796 return false; |
|
4797 |
|
4798 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) |
|
4799 return false; |
|
4800 |
|
4801 /* Now fixup all breaks and continues. */ |
|
4802 return PopStatementBCE(cx, bce); |
|
4803 } |
|
4804 |
|
4805 static inline bool |
|
4806 EmitFor(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) |
|
4807 { |
|
4808 if (pn->pn_left->isKind(PNK_FORIN)) |
|
4809 return EmitForIn(cx, bce, pn, top); |
|
4810 |
|
4811 if (pn->pn_left->isKind(PNK_FOROF)) |
|
4812 return EmitForOf(cx, bce, pn, top); |
|
4813 |
|
4814 JS_ASSERT(pn->pn_left->isKind(PNK_FORHEAD)); |
|
4815 return EmitNormalFor(cx, bce, pn, top); |
|
4816 } |
|
4817 |
|
4818 static MOZ_NEVER_INLINE bool |
|
4819 EmitFunc(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
4820 { |
|
4821 FunctionBox *funbox = pn->pn_funbox; |
|
4822 RootedFunction fun(cx, funbox->function()); |
|
4823 JS_ASSERT_IF(fun->isInterpretedLazy(), fun->lazyScript()); |
|
4824 |
|
4825 /* |
|
4826 * Set the EMITTEDFUNCTION flag in function definitions once they have been |
|
4827 * emitted. Function definitions that need hoisting to the top of the |
|
4828 * function will be seen by EmitFunc in two places. |
|
4829 */ |
|
4830 if (pn->pn_dflags & PND_EMITTEDFUNCTION) { |
|
4831 JS_ASSERT_IF(fun->hasScript(), fun->nonLazyScript()); |
|
4832 JS_ASSERT(pn->functionIsHoisted()); |
|
4833 JS_ASSERT(bce->sc->isFunctionBox()); |
|
4834 return true; |
|
4835 } |
|
4836 |
|
4837 pn->pn_dflags |= PND_EMITTEDFUNCTION; |
|
4838 |
|
4839 /* |
|
4840 * Mark as singletons any function which will only be executed once, or |
|
4841 * which is inner to a lambda we only expect to run once. In the latter |
|
4842 * case, if the lambda runs multiple times then CloneFunctionObject will |
|
4843 * make a deep clone of its contents. |
|
4844 */ |
|
4845 if (fun->isInterpreted()) { |
|
4846 bool singleton = |
|
4847 bce->script->compileAndGo() && |
|
4848 fun->isInterpreted() && |
|
4849 (bce->checkSingletonContext() || |
|
4850 (!bce->isInLoop() && bce->isRunOnceLambda())); |
|
4851 if (!JSFunction::setTypeForScriptedFunction(cx, fun, singleton)) |
|
4852 return false; |
|
4853 |
|
4854 if (fun->isInterpretedLazy()) { |
|
4855 if (!fun->lazyScript()->sourceObject()) { |
|
4856 JSObject *scope = bce->staticScope; |
|
4857 if (!scope && bce->sc->isFunctionBox()) |
|
4858 scope = bce->sc->asFunctionBox()->function(); |
|
4859 JSObject *source = bce->script->sourceObject(); |
|
4860 fun->lazyScript()->setParent(scope, &source->as<ScriptSourceObject>()); |
|
4861 } |
|
4862 if (bce->emittingRunOnceLambda) |
|
4863 fun->lazyScript()->setTreatAsRunOnce(); |
|
4864 } else { |
|
4865 SharedContext *outersc = bce->sc; |
|
4866 |
|
4867 if (outersc->isFunctionBox() && outersc->asFunctionBox()->mightAliasLocals()) |
|
4868 funbox->setMightAliasLocals(); // inherit mightAliasLocals from parent |
|
4869 JS_ASSERT_IF(outersc->strict, funbox->strict); |
|
4870 |
|
4871 // Inherit most things (principals, version, etc) from the parent. |
|
4872 Rooted<JSScript*> parent(cx, bce->script); |
|
4873 CompileOptions options(cx, bce->parser->options()); |
|
4874 options.setOriginPrincipals(parent->originPrincipals()) |
|
4875 .setCompileAndGo(parent->compileAndGo()) |
|
4876 .setSelfHostingMode(parent->selfHosted()) |
|
4877 .setNoScriptRval(false) |
|
4878 .setForEval(false) |
|
4879 .setVersion(parent->getVersion()); |
|
4880 |
|
4881 Rooted<JSObject*> enclosingScope(cx, EnclosingStaticScope(bce)); |
|
4882 Rooted<JSObject*> sourceObject(cx, bce->script->sourceObject()); |
|
4883 Rooted<JSScript*> script(cx, JSScript::Create(cx, enclosingScope, false, options, |
|
4884 parent->staticLevel() + 1, |
|
4885 sourceObject, |
|
4886 funbox->bufStart, funbox->bufEnd)); |
|
4887 if (!script) |
|
4888 return false; |
|
4889 |
|
4890 script->bindings = funbox->bindings; |
|
4891 |
|
4892 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin); |
|
4893 BytecodeEmitter bce2(bce, bce->parser, funbox, script, bce->insideEval, |
|
4894 bce->evalCaller, bce->hasGlobalScope, lineNum, |
|
4895 bce->emitterMode); |
|
4896 if (!bce2.init()) |
|
4897 return false; |
|
4898 |
|
4899 /* We measured the max scope depth when we parsed the function. */ |
|
4900 if (!EmitFunctionScript(cx, &bce2, pn->pn_body)) |
|
4901 return false; |
|
4902 |
|
4903 if (funbox->usesArguments && funbox->usesApply) |
|
4904 script->setUsesArgumentsAndApply(); |
|
4905 } |
|
4906 } else { |
|
4907 JS_ASSERT(IsAsmJSModuleNative(fun->native())); |
|
4908 } |
|
4909 |
|
4910 /* Make the function object a literal in the outer script's pool. */ |
|
4911 unsigned index = bce->objectList.add(pn->pn_funbox); |
|
4912 |
|
4913 /* Non-hoisted functions simply emit their respective op. */ |
|
4914 if (!pn->functionIsHoisted()) { |
|
4915 /* JSOP_LAMBDA_ARROW is always preceded by JSOP_THIS. */ |
|
4916 MOZ_ASSERT(fun->isArrow() == (pn->getOp() == JSOP_LAMBDA_ARROW)); |
|
4917 if (fun->isArrow() && Emit1(cx, bce, JSOP_THIS) < 0) |
|
4918 return false; |
|
4919 return EmitIndex32(cx, pn->getOp(), index, bce); |
|
4920 } |
|
4921 |
|
4922 /* |
|
4923 * For a script we emit the code as we parse. Thus the bytecode for |
|
4924 * top-level functions should go in the prolog to predefine their |
|
4925 * names in the variable object before the already-generated main code |
|
4926 * is executed. This extra work for top-level scripts is not necessary |
|
4927 * when we emit the code for a function. It is fully parsed prior to |
|
4928 * invocation of the emitter and calls to EmitTree for function |
|
4929 * definitions can be scheduled before generating the rest of code. |
|
4930 */ |
|
4931 if (!bce->sc->isFunctionBox()) { |
|
4932 JS_ASSERT(pn->pn_cookie.isFree()); |
|
4933 JS_ASSERT(pn->getOp() == JSOP_NOP); |
|
4934 JS_ASSERT(!bce->topStmt); |
|
4935 bce->switchToProlog(); |
|
4936 if (!EmitIndex32(cx, JSOP_DEFFUN, index, bce)) |
|
4937 return false; |
|
4938 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) |
|
4939 return false; |
|
4940 bce->switchToMain(); |
|
4941 } else { |
|
4942 #ifdef DEBUG |
|
4943 BindingIter bi(bce->script); |
|
4944 while (bi->name() != fun->atom()) |
|
4945 bi++; |
|
4946 JS_ASSERT(bi->kind() == Binding::VARIABLE || bi->kind() == Binding::CONSTANT || |
|
4947 bi->kind() == Binding::ARGUMENT); |
|
4948 JS_ASSERT(bi.frameIndex() < JS_BIT(20)); |
|
4949 #endif |
|
4950 pn->pn_index = index; |
|
4951 if (!EmitIndexOp(cx, JSOP_LAMBDA, index, bce)) |
|
4952 return false; |
|
4953 JS_ASSERT(pn->getOp() == JSOP_GETLOCAL || pn->getOp() == JSOP_GETARG); |
|
4954 JSOp setOp = pn->getOp() == JSOP_GETLOCAL ? JSOP_SETLOCAL : JSOP_SETARG; |
|
4955 if (!EmitVarOp(cx, pn, setOp, bce)) |
|
4956 return false; |
|
4957 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
4958 return false; |
|
4959 } |
|
4960 |
|
4961 return true; |
|
4962 } |
|
4963 |
|
4964 static bool |
|
4965 EmitDo(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
4966 { |
|
4967 /* Emit an annotated nop so IonBuilder can recognize the 'do' loop. */ |
|
4968 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE); |
|
4969 if (noteIndex < 0 || Emit1(cx, bce, JSOP_NOP) < 0) |
|
4970 return false; |
|
4971 |
|
4972 ptrdiff_t noteIndex2 = NewSrcNote(cx, bce, SRC_WHILE); |
|
4973 if (noteIndex2 < 0) |
|
4974 return false; |
|
4975 |
|
4976 /* Compile the loop body. */ |
|
4977 ptrdiff_t top = EmitLoopHead(cx, bce, pn->pn_left); |
|
4978 if (top < 0) |
|
4979 return false; |
|
4980 |
|
4981 LoopStmtInfo stmtInfo(cx); |
|
4982 PushLoopStatement(bce, &stmtInfo, STMT_DO_LOOP, top); |
|
4983 |
|
4984 if (!EmitLoopEntry(cx, bce, nullptr)) |
|
4985 return false; |
|
4986 |
|
4987 if (!EmitTree(cx, bce, pn->pn_left)) |
|
4988 return false; |
|
4989 |
|
4990 /* Set loop and enclosing label update offsets, for continue. */ |
|
4991 ptrdiff_t off = bce->offset(); |
|
4992 StmtInfoBCE *stmt = &stmtInfo; |
|
4993 do { |
|
4994 stmt->update = off; |
|
4995 } while ((stmt = stmt->down) != nullptr && stmt->type == STMT_LABEL); |
|
4996 |
|
4997 /* Compile the loop condition, now that continues know where to go. */ |
|
4998 if (!EmitTree(cx, bce, pn->pn_right)) |
|
4999 return false; |
|
5000 |
|
5001 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); |
|
5002 if (beq < 0) |
|
5003 return false; |
|
5004 |
|
5005 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) |
|
5006 return false; |
|
5007 |
|
5008 /* |
|
5009 * Update the annotations with the update and back edge positions, for |
|
5010 * IonBuilder. |
|
5011 * |
|
5012 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex |
|
5013 * note gets bigger. |
|
5014 */ |
|
5015 if (!SetSrcNoteOffset(cx, bce, noteIndex2, 0, beq - top)) |
|
5016 return false; |
|
5017 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, 1 + (off - top))) |
|
5018 return false; |
|
5019 |
|
5020 return PopStatementBCE(cx, bce); |
|
5021 } |
|
5022 |
|
5023 static bool |
|
5024 EmitWhile(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) |
|
5025 { |
|
5026 /* |
|
5027 * Minimize bytecodes issued for one or more iterations by jumping to |
|
5028 * the condition below the body and closing the loop if the condition |
|
5029 * is true with a backward branch. For iteration count i: |
|
5030 * |
|
5031 * i test at the top test at the bottom |
|
5032 * = =============== ================== |
|
5033 * 0 ifeq-pass goto; ifne-fail |
|
5034 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail |
|
5035 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail |
|
5036 * . . . |
|
5037 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail |
|
5038 */ |
|
5039 LoopStmtInfo stmtInfo(cx); |
|
5040 PushLoopStatement(bce, &stmtInfo, STMT_WHILE_LOOP, top); |
|
5041 |
|
5042 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_WHILE); |
|
5043 if (noteIndex < 0) |
|
5044 return false; |
|
5045 |
|
5046 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); |
|
5047 if (jmp < 0) |
|
5048 return false; |
|
5049 |
|
5050 top = EmitLoopHead(cx, bce, pn->pn_right); |
|
5051 if (top < 0) |
|
5052 return false; |
|
5053 |
|
5054 if (!EmitTree(cx, bce, pn->pn_right)) |
|
5055 return false; |
|
5056 |
|
5057 SetJumpOffsetAt(bce, jmp); |
|
5058 if (!EmitLoopEntry(cx, bce, pn->pn_left)) |
|
5059 return false; |
|
5060 if (!EmitTree(cx, bce, pn->pn_left)) |
|
5061 return false; |
|
5062 |
|
5063 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFNE, top - bce->offset()); |
|
5064 if (beq < 0) |
|
5065 return false; |
|
5066 |
|
5067 if (!bce->tryNoteList.append(JSTRY_LOOP, bce->stackDepth, top, bce->offset())) |
|
5068 return false; |
|
5069 |
|
5070 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, beq - jmp)) |
|
5071 return false; |
|
5072 |
|
5073 return PopStatementBCE(cx, bce); |
|
5074 } |
|
5075 |
|
5076 static bool |
|
5077 EmitBreak(ExclusiveContext *cx, BytecodeEmitter *bce, PropertyName *label) |
|
5078 { |
|
5079 StmtInfoBCE *stmt = bce->topStmt; |
|
5080 SrcNoteType noteType; |
|
5081 if (label) { |
|
5082 while (stmt->type != STMT_LABEL || stmt->label != label) |
|
5083 stmt = stmt->down; |
|
5084 noteType = SRC_BREAK2LABEL; |
|
5085 } else { |
|
5086 while (!stmt->isLoop() && stmt->type != STMT_SWITCH) |
|
5087 stmt = stmt->down; |
|
5088 noteType = (stmt->type == STMT_SWITCH) ? SRC_SWITCHBREAK : SRC_BREAK; |
|
5089 } |
|
5090 |
|
5091 return EmitGoto(cx, bce, stmt, &stmt->breaks, noteType) >= 0; |
|
5092 } |
|
5093 |
|
5094 static bool |
|
5095 EmitContinue(ExclusiveContext *cx, BytecodeEmitter *bce, PropertyName *label) |
|
5096 { |
|
5097 StmtInfoBCE *stmt = bce->topStmt; |
|
5098 if (label) { |
|
5099 /* Find the loop statement enclosed by the matching label. */ |
|
5100 StmtInfoBCE *loop = nullptr; |
|
5101 while (stmt->type != STMT_LABEL || stmt->label != label) { |
|
5102 if (stmt->isLoop()) |
|
5103 loop = stmt; |
|
5104 stmt = stmt->down; |
|
5105 } |
|
5106 stmt = loop; |
|
5107 } else { |
|
5108 while (!stmt->isLoop()) |
|
5109 stmt = stmt->down; |
|
5110 } |
|
5111 |
|
5112 return EmitGoto(cx, bce, stmt, &stmt->continues, SRC_CONTINUE) >= 0; |
|
5113 } |
|
5114 |
|
5115 static bool |
|
5116 EmitReturn(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
5117 { |
|
5118 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) |
|
5119 return false; |
|
5120 |
|
5121 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { |
|
5122 if (!EmitPrepareIteratorResult(cx, bce)) |
|
5123 return false; |
|
5124 } |
|
5125 |
|
5126 /* Push a return value */ |
|
5127 if (ParseNode *pn2 = pn->pn_kid) { |
|
5128 if (!EmitTree(cx, bce, pn2)) |
|
5129 return false; |
|
5130 } else { |
|
5131 /* No explicit return value provided */ |
|
5132 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) |
|
5133 return false; |
|
5134 } |
|
5135 |
|
5136 if (bce->sc->isFunctionBox() && bce->sc->asFunctionBox()->isStarGenerator()) { |
|
5137 if (!EmitFinishIteratorResult(cx, bce, true)) |
|
5138 return false; |
|
5139 } |
|
5140 |
|
5141 /* |
|
5142 * EmitNonLocalJumpFixup may add fixup bytecode to close open try |
|
5143 * blocks having finally clauses and to exit intermingled let blocks. |
|
5144 * We can't simply transfer control flow to our caller in that case, |
|
5145 * because we must gosub to those finally clauses from inner to outer, |
|
5146 * with the correct stack pointer (i.e., after popping any with, |
|
5147 * for/in, etc., slots nested inside the finally's try). |
|
5148 * |
|
5149 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an |
|
5150 * extra JSOP_RETRVAL after the fixups. |
|
5151 */ |
|
5152 ptrdiff_t top = bce->offset(); |
|
5153 |
|
5154 if (Emit1(cx, bce, JSOP_RETURN) < 0) |
|
5155 return false; |
|
5156 |
|
5157 NonLocalExitScope nle(cx, bce); |
|
5158 |
|
5159 if (!nle.prepareForNonLocalJump(nullptr)) |
|
5160 return false; |
|
5161 |
|
5162 if (top + static_cast<ptrdiff_t>(JSOP_RETURN_LENGTH) != bce->offset()) { |
|
5163 bce->code()[top] = JSOP_SETRVAL; |
|
5164 if (Emit1(cx, bce, JSOP_RETRVAL) < 0) |
|
5165 return false; |
|
5166 } |
|
5167 |
|
5168 return true; |
|
5169 } |
|
5170 |
|
5171 static bool |
|
5172 EmitYieldStar(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *iter) |
|
5173 { |
|
5174 JS_ASSERT(bce->sc->isFunctionBox()); |
|
5175 JS_ASSERT(bce->sc->asFunctionBox()->isStarGenerator()); |
|
5176 |
|
5177 if (!EmitTree(cx, bce, iter)) // ITERABLE |
|
5178 return false; |
|
5179 |
|
5180 // Convert iterable to iterator. |
|
5181 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITERABLE ITERABLE |
|
5182 return false; |
|
5183 if (!EmitAtomOp(cx, cx->names().std_iterator, JSOP_CALLPROP, bce)) // ITERABLE @@ITERATOR |
|
5184 return false; |
|
5185 if (Emit1(cx, bce, JSOP_SWAP) < 0) // @@ITERATOR ITERABLE |
|
5186 return false; |
|
5187 if (EmitCall(cx, bce, JSOP_CALL, 0) < 0) // ITER |
|
5188 return false; |
|
5189 CheckTypeSet(cx, bce, JSOP_CALL); |
|
5190 |
|
5191 int depth = bce->stackDepth; |
|
5192 JS_ASSERT(depth >= 1); |
|
5193 |
|
5194 // Initial send value is undefined. |
|
5195 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) // ITER RECEIVED |
|
5196 return false; |
|
5197 ptrdiff_t initialSend = -1; |
|
5198 if (EmitBackPatchOp(cx, bce, &initialSend) < 0) // goto initialSend |
|
5199 return false; |
|
5200 |
|
5201 // Try prologue. // ITER RESULT |
|
5202 StmtInfoBCE stmtInfo(cx); |
|
5203 PushStatementBCE(bce, &stmtInfo, STMT_TRY, bce->offset()); |
|
5204 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_TRY); |
|
5205 if (noteIndex < 0 || Emit1(cx, bce, JSOP_TRY) < 0) |
|
5206 return false; |
|
5207 ptrdiff_t tryStart = bce->offset(); // tryStart: |
|
5208 JS_ASSERT(bce->stackDepth == depth + 1); |
|
5209 |
|
5210 // Yield RESULT as-is, without re-boxing. |
|
5211 if (Emit1(cx, bce, JSOP_YIELD) < 0) // ITER RECEIVED |
|
5212 return false; |
|
5213 |
|
5214 // Try epilogue. |
|
5215 if (!SetSrcNoteOffset(cx, bce, noteIndex, 0, bce->offset() - tryStart + JSOP_TRY_LENGTH)) |
|
5216 return false; |
|
5217 ptrdiff_t subsequentSend = -1; |
|
5218 if (EmitBackPatchOp(cx, bce, &subsequentSend) < 0) // goto subsequentSend |
|
5219 return false; |
|
5220 ptrdiff_t tryEnd = bce->offset(); // tryEnd: |
|
5221 |
|
5222 // Catch location. |
|
5223 // THROW? = 'throw' in ITER // ITER |
|
5224 bce->stackDepth = (uint32_t) depth; |
|
5225 if (Emit1(cx, bce, JSOP_EXCEPTION) < 0) // ITER EXCEPTION |
|
5226 return false; |
|
5227 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER |
|
5228 return false; |
|
5229 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER |
|
5230 return false; |
|
5231 if (!EmitAtomOp(cx, cx->names().throw_, JSOP_STRING, bce)) // EXCEPTION ITER ITER "throw" |
|
5232 return false; |
|
5233 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER "throw" ITER |
|
5234 return false; |
|
5235 if (Emit1(cx, bce, JSOP_IN) < 0) // EXCEPTION ITER THROW? |
|
5236 return false; |
|
5237 // if (THROW?) goto delegate |
|
5238 ptrdiff_t checkThrow = EmitJump(cx, bce, JSOP_IFNE, 0); // EXCEPTION ITER |
|
5239 if (checkThrow < 0) |
|
5240 return false; |
|
5241 if (Emit1(cx, bce, JSOP_POP) < 0) // EXCEPTION |
|
5242 return false; |
|
5243 if (Emit1(cx, bce, JSOP_THROW) < 0) // throw EXCEPTION |
|
5244 return false; |
|
5245 |
|
5246 SetJumpOffsetAt(bce, checkThrow); // delegate: |
|
5247 // RESULT = ITER.throw(EXCEPTION) // EXCEPTION ITER |
|
5248 bce->stackDepth = (uint32_t) depth + 1; |
|
5249 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER |
|
5250 return false; |
|
5251 if (Emit1(cx, bce, JSOP_DUP) < 0) // EXCEPTION ITER ITER ITER |
|
5252 return false; |
|
5253 if (!EmitAtomOp(cx, cx->names().throw_, JSOP_CALLPROP, bce)) // EXCEPTION ITER ITER THROW |
|
5254 return false; |
|
5255 if (Emit1(cx, bce, JSOP_SWAP) < 0) // EXCEPTION ITER THROW ITER |
|
5256 return false; |
|
5257 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER THROW ITER EXCEPTION |
|
5258 return false; |
|
5259 if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT |
|
5260 return false; |
|
5261 CheckTypeSet(cx, bce, JSOP_CALL); |
|
5262 JS_ASSERT(bce->stackDepth == depth + 1); |
|
5263 ptrdiff_t checkResult = -1; |
|
5264 if (EmitBackPatchOp(cx, bce, &checkResult) < 0) // goto checkResult |
|
5265 return false; |
|
5266 |
|
5267 // Catch epilogue. |
|
5268 if (!PopStatementBCE(cx, bce)) |
|
5269 return false; |
|
5270 // This is a peace offering to ReconstructPCStack. See the note in EmitTry. |
|
5271 if (Emit1(cx, bce, JSOP_NOP) < 0) |
|
5272 return false; |
|
5273 if (!bce->tryNoteList.append(JSTRY_CATCH, depth, tryStart, tryEnd)) |
|
5274 return false; |
|
5275 |
|
5276 // After the try/catch block: send the received value to the iterator. |
|
5277 if (!BackPatch(cx, bce, initialSend, bce->code().end(), JSOP_GOTO)) // initialSend: |
|
5278 return false; |
|
5279 if (!BackPatch(cx, bce, subsequentSend, bce->code().end(), JSOP_GOTO)) // subsequentSend: |
|
5280 return false; |
|
5281 |
|
5282 // Send location. |
|
5283 // result = iter.next(received) // ITER RECEIVED |
|
5284 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER |
|
5285 return false; |
|
5286 if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER |
|
5287 return false; |
|
5288 if (Emit1(cx, bce, JSOP_DUP) < 0) // RECEIVED ITER ITER ITER |
|
5289 return false; |
|
5290 if (!EmitAtomOp(cx, cx->names().next, JSOP_CALLPROP, bce)) // RECEIVED ITER ITER NEXT |
|
5291 return false; |
|
5292 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RECEIVED ITER NEXT ITER |
|
5293 return false; |
|
5294 if (Emit2(cx, bce, JSOP_PICK, (jsbytecode)3) < 0) // ITER NEXT ITER RECEIVED |
|
5295 return false; |
|
5296 if (EmitCall(cx, bce, JSOP_CALL, 1) < 0) // ITER RESULT |
|
5297 return false; |
|
5298 CheckTypeSet(cx, bce, JSOP_CALL); |
|
5299 JS_ASSERT(bce->stackDepth == depth + 1); |
|
5300 |
|
5301 if (!BackPatch(cx, bce, checkResult, bce->code().end(), JSOP_GOTO)) // checkResult: |
|
5302 return false; |
|
5303 // if (!result.done) goto tryStart; // ITER RESULT |
|
5304 if (Emit1(cx, bce, JSOP_DUP) < 0) // ITER RESULT RESULT |
|
5305 return false; |
|
5306 if (!EmitAtomOp(cx, cx->names().done, JSOP_GETPROP, bce)) // ITER RESULT DONE |
|
5307 return false; |
|
5308 // if (!DONE) goto tryStart; |
|
5309 if (EmitJump(cx, bce, JSOP_IFEQ, tryStart - bce->offset()) < 0) // ITER RESULT |
|
5310 return false; |
|
5311 |
|
5312 // result.value |
|
5313 if (Emit1(cx, bce, JSOP_SWAP) < 0) // RESULT ITER |
|
5314 return false; |
|
5315 if (Emit1(cx, bce, JSOP_POP) < 0) // RESULT |
|
5316 return false; |
|
5317 if (!EmitAtomOp(cx, cx->names().value, JSOP_GETPROP, bce)) // VALUE |
|
5318 return false; |
|
5319 |
|
5320 JS_ASSERT(bce->stackDepth == depth); |
|
5321 |
|
5322 return true; |
|
5323 } |
|
5324 |
|
5325 static bool |
|
5326 EmitStatementList(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) |
|
5327 { |
|
5328 JS_ASSERT(pn->isArity(PN_LIST)); |
|
5329 |
|
5330 StmtInfoBCE stmtInfo(cx); |
|
5331 PushStatementBCE(bce, &stmtInfo, STMT_BLOCK, top); |
|
5332 |
|
5333 ParseNode *pnchild = pn->pn_head; |
|
5334 |
|
5335 if (pn->pn_xflags & PNX_DESTRUCT) |
|
5336 pnchild = pnchild->pn_next; |
|
5337 |
|
5338 for (ParseNode *pn2 = pnchild; pn2; pn2 = pn2->pn_next) { |
|
5339 if (!EmitTree(cx, bce, pn2)) |
|
5340 return false; |
|
5341 } |
|
5342 |
|
5343 return PopStatementBCE(cx, bce); |
|
5344 } |
|
5345 |
|
5346 static bool |
|
5347 EmitStatement(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
5348 { |
|
5349 JS_ASSERT(pn->isKind(PNK_SEMI)); |
|
5350 |
|
5351 ParseNode *pn2 = pn->pn_kid; |
|
5352 if (!pn2) |
|
5353 return true; |
|
5354 |
|
5355 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) |
|
5356 return false; |
|
5357 |
|
5358 /* |
|
5359 * Top-level or called-from-a-native JS_Execute/EvaluateScript, |
|
5360 * debugger, and eval frames may need the value of the ultimate |
|
5361 * expression statement as the script's result, despite the fact |
|
5362 * that it appears useless to the compiler. |
|
5363 * |
|
5364 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when |
|
5365 * calling JS_Compile* to suppress JSOP_SETRVAL. |
|
5366 */ |
|
5367 bool wantval = false; |
|
5368 bool useful = false; |
|
5369 if (bce->sc->isFunctionBox()) { |
|
5370 JS_ASSERT(!bce->script->noScriptRval()); |
|
5371 } else { |
|
5372 useful = wantval = !bce->script->noScriptRval(); |
|
5373 } |
|
5374 |
|
5375 /* Don't eliminate expressions with side effects. */ |
|
5376 if (!useful) { |
|
5377 if (!CheckSideEffects(cx, bce, pn2, &useful)) |
|
5378 return false; |
|
5379 |
|
5380 /* |
|
5381 * Don't eliminate apparently useless expressions if they are |
|
5382 * labeled expression statements. The pc->topStmt->update test |
|
5383 * catches the case where we are nesting in EmitTree for a labeled |
|
5384 * compound statement. |
|
5385 */ |
|
5386 if (bce->topStmt && |
|
5387 bce->topStmt->type == STMT_LABEL && |
|
5388 bce->topStmt->update >= bce->offset()) |
|
5389 { |
|
5390 useful = true; |
|
5391 } |
|
5392 } |
|
5393 |
|
5394 if (useful) { |
|
5395 JSOp op = wantval ? JSOP_SETRVAL : JSOP_POP; |
|
5396 JS_ASSERT_IF(pn2->isKind(PNK_ASSIGN), pn2->isOp(JSOP_NOP)); |
|
5397 if (!wantval && |
|
5398 pn2->isKind(PNK_ASSIGN) && |
|
5399 !MaybeEmitGroupAssignment(cx, bce, op, pn2, GroupIsNotDecl, &op)) |
|
5400 { |
|
5401 return false; |
|
5402 } |
|
5403 if (op != JSOP_NOP) { |
|
5404 if (!EmitTree(cx, bce, pn2)) |
|
5405 return false; |
|
5406 if (Emit1(cx, bce, op) < 0) |
|
5407 return false; |
|
5408 } |
|
5409 } else if (!pn->isDirectivePrologueMember()) { |
|
5410 /* Don't complain about directive prologue members; just don't emit their code. */ |
|
5411 bce->current->currentLine = bce->parser->tokenStream.srcCoords.lineNum(pn2->pn_pos.begin); |
|
5412 bce->current->lastColumn = 0; |
|
5413 if (!bce->reportStrictWarning(pn2, JSMSG_USELESS_EXPR)) |
|
5414 return false; |
|
5415 } |
|
5416 |
|
5417 return true; |
|
5418 } |
|
5419 |
|
5420 static bool |
|
5421 EmitDelete(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
5422 { |
|
5423 /* |
|
5424 * Under ECMA 3, deleting a non-reference returns true -- but alas we |
|
5425 * must evaluate the operand if it appears it might have side effects. |
|
5426 */ |
|
5427 ParseNode *pn2 = pn->pn_kid; |
|
5428 switch (pn2->getKind()) { |
|
5429 case PNK_NAME: |
|
5430 { |
|
5431 if (!BindNameToSlot(cx, bce, pn2)) |
|
5432 return false; |
|
5433 JSOp op = pn2->getOp(); |
|
5434 if (op == JSOP_FALSE) { |
|
5435 if (Emit1(cx, bce, op) < 0) |
|
5436 return false; |
|
5437 } else { |
|
5438 if (!EmitAtomOp(cx, pn2, op, bce)) |
|
5439 return false; |
|
5440 } |
|
5441 break; |
|
5442 } |
|
5443 case PNK_DOT: |
|
5444 if (!EmitPropOp(cx, pn2, JSOP_DELPROP, bce)) |
|
5445 return false; |
|
5446 break; |
|
5447 case PNK_ELEM: |
|
5448 if (!EmitElemOp(cx, pn2, JSOP_DELELEM, bce)) |
|
5449 return false; |
|
5450 break; |
|
5451 default: |
|
5452 { |
|
5453 /* |
|
5454 * If useless, just emit JSOP_TRUE; otherwise convert delete foo() |
|
5455 * to foo(), true (a comma expression). |
|
5456 */ |
|
5457 bool useful = false; |
|
5458 if (!CheckSideEffects(cx, bce, pn2, &useful)) |
|
5459 return false; |
|
5460 |
|
5461 if (useful) { |
|
5462 JS_ASSERT_IF(pn2->isKind(PNK_CALL), !(pn2->pn_xflags & PNX_SETCALL)); |
|
5463 if (!EmitTree(cx, bce, pn2)) |
|
5464 return false; |
|
5465 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
5466 return false; |
|
5467 } |
|
5468 |
|
5469 if (Emit1(cx, bce, JSOP_TRUE) < 0) |
|
5470 return false; |
|
5471 } |
|
5472 } |
|
5473 |
|
5474 return true; |
|
5475 } |
|
5476 |
|
5477 static bool |
|
5478 EmitArray(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, uint32_t count); |
|
5479 |
|
5480 static bool |
|
5481 EmitCallOrNew(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
5482 { |
|
5483 bool callop = pn->isKind(PNK_CALL); |
|
5484 |
|
5485 /* |
|
5486 * Emit callable invocation or operator new (constructor call) code. |
|
5487 * First, emit code for the left operand to evaluate the callable or |
|
5488 * constructable object expression. |
|
5489 * |
|
5490 * For operator new, we emit JSOP_GETPROP instead of JSOP_CALLPROP, etc. |
|
5491 * This is necessary to interpose the lambda-initialized method read |
|
5492 * barrier -- see the code in jsinterp.cpp for JSOP_LAMBDA followed by |
|
5493 * JSOP_{SET,INIT}PROP. |
|
5494 * |
|
5495 * Then (or in a call case that has no explicit reference-base |
|
5496 * object) we emit JSOP_UNDEFINED to produce the undefined |this| |
|
5497 * value required for calls (which non-strict mode functions |
|
5498 * will box into the global object). |
|
5499 */ |
|
5500 uint32_t argc = pn->pn_count - 1; |
|
5501 |
|
5502 if (argc >= ARGC_LIMIT) { |
|
5503 bce->parser->tokenStream.reportError(callop |
|
5504 ? JSMSG_TOO_MANY_FUN_ARGS |
|
5505 : JSMSG_TOO_MANY_CON_ARGS); |
|
5506 return false; |
|
5507 } |
|
5508 |
|
5509 bool emitArgs = true; |
|
5510 ParseNode *pn2 = pn->pn_head; |
|
5511 bool spread = JOF_OPTYPE(pn->getOp()) == JOF_BYTE; |
|
5512 switch (pn2->getKind()) { |
|
5513 case PNK_NAME: |
|
5514 if (bce->emitterMode == BytecodeEmitter::SelfHosting && |
|
5515 pn2->name() == cx->names().callFunction && |
|
5516 !spread) |
|
5517 { |
|
5518 /* |
|
5519 * Special-casing of callFunction to emit bytecode that directly |
|
5520 * invokes the callee with the correct |this| object and arguments. |
|
5521 * callFunction(fun, thisArg, arg0, arg1) thus becomes: |
|
5522 * - emit lookup for fun |
|
5523 * - emit lookup for thisArg |
|
5524 * - emit lookups for arg0, arg1 |
|
5525 * |
|
5526 * argc is set to the amount of actually emitted args and the |
|
5527 * emitting of args below is disabled by setting emitArgs to false. |
|
5528 */ |
|
5529 if (pn->pn_count < 3) { |
|
5530 bce->reportError(pn, JSMSG_MORE_ARGS_NEEDED, "callFunction", "1", "s"); |
|
5531 return false; |
|
5532 } |
|
5533 ParseNode *funNode = pn2->pn_next; |
|
5534 if (!EmitTree(cx, bce, funNode)) |
|
5535 return false; |
|
5536 ParseNode *thisArg = funNode->pn_next; |
|
5537 if (!EmitTree(cx, bce, thisArg)) |
|
5538 return false; |
|
5539 bool oldEmittingForInit = bce->emittingForInit; |
|
5540 bce->emittingForInit = false; |
|
5541 for (ParseNode *argpn = thisArg->pn_next; argpn; argpn = argpn->pn_next) { |
|
5542 if (!EmitTree(cx, bce, argpn)) |
|
5543 return false; |
|
5544 } |
|
5545 bce->emittingForInit = oldEmittingForInit; |
|
5546 argc -= 2; |
|
5547 emitArgs = false; |
|
5548 break; |
|
5549 } |
|
5550 if (!EmitNameOp(cx, bce, pn2, callop)) |
|
5551 return false; |
|
5552 break; |
|
5553 case PNK_DOT: |
|
5554 if (!EmitPropOp(cx, pn2, callop ? JSOP_CALLPROP : JSOP_GETPROP, bce)) |
|
5555 return false; |
|
5556 break; |
|
5557 case PNK_ELEM: |
|
5558 if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, bce)) |
|
5559 return false; |
|
5560 break; |
|
5561 case PNK_FUNCTION: |
|
5562 /* |
|
5563 * Top level lambdas which are immediately invoked should be |
|
5564 * treated as only running once. Every time they execute we will |
|
5565 * create new types and scripts for their contents, to increase |
|
5566 * the quality of type information within them and enable more |
|
5567 * backend optimizations. Note that this does not depend on the |
|
5568 * lambda being invoked at most once (it may be named or be |
|
5569 * accessed via foo.caller indirection), as multiple executions |
|
5570 * will just cause the inner scripts to be repeatedly cloned. |
|
5571 */ |
|
5572 JS_ASSERT(!bce->emittingRunOnceLambda); |
|
5573 if (bce->checkSingletonContext() || (!bce->isInLoop() && bce->isRunOnceLambda())) { |
|
5574 bce->emittingRunOnceLambda = true; |
|
5575 if (!EmitTree(cx, bce, pn2)) |
|
5576 return false; |
|
5577 bce->emittingRunOnceLambda = false; |
|
5578 } else { |
|
5579 if (!EmitTree(cx, bce, pn2)) |
|
5580 return false; |
|
5581 } |
|
5582 callop = false; |
|
5583 break; |
|
5584 default: |
|
5585 if (!EmitTree(cx, bce, pn2)) |
|
5586 return false; |
|
5587 callop = false; /* trigger JSOP_UNDEFINED after */ |
|
5588 break; |
|
5589 } |
|
5590 if (!callop) { |
|
5591 JSOp thisop = pn->isKind(PNK_GENEXP) ? JSOP_THIS : JSOP_UNDEFINED; |
|
5592 if (Emit1(cx, bce, thisop) < 0) |
|
5593 return false; |
|
5594 } |
|
5595 |
|
5596 if (emitArgs) { |
|
5597 /* |
|
5598 * Emit code for each argument in order, then emit the JSOP_*CALL or |
|
5599 * JSOP_NEW bytecode with a two-byte immediate telling how many args |
|
5600 * were pushed on the operand stack. |
|
5601 */ |
|
5602 bool oldEmittingForInit = bce->emittingForInit; |
|
5603 bce->emittingForInit = false; |
|
5604 if (!spread) { |
|
5605 for (ParseNode *pn3 = pn2->pn_next; pn3; pn3 = pn3->pn_next) { |
|
5606 if (!EmitTree(cx, bce, pn3)) |
|
5607 return false; |
|
5608 } |
|
5609 } else { |
|
5610 if (!EmitArray(cx, bce, pn2->pn_next, argc)) |
|
5611 return false; |
|
5612 } |
|
5613 bce->emittingForInit = oldEmittingForInit; |
|
5614 } |
|
5615 |
|
5616 if (!spread) { |
|
5617 if (EmitCall(cx, bce, pn->getOp(), argc) < 0) |
|
5618 return false; |
|
5619 } else { |
|
5620 if (Emit1(cx, bce, pn->getOp()) < 0) |
|
5621 return false; |
|
5622 } |
|
5623 CheckTypeSet(cx, bce, pn->getOp()); |
|
5624 if (pn->isOp(JSOP_EVAL) || pn->isOp(JSOP_SPREADEVAL)) { |
|
5625 uint32_t lineNum = bce->parser->tokenStream.srcCoords.lineNum(pn->pn_pos.begin); |
|
5626 EMIT_UINT16_IMM_OP(JSOP_LINENO, lineNum); |
|
5627 } |
|
5628 if (pn->pn_xflags & PNX_SETCALL) { |
|
5629 if (Emit1(cx, bce, JSOP_SETCALL) < 0) |
|
5630 return false; |
|
5631 } |
|
5632 return true; |
|
5633 } |
|
5634 |
|
5635 static bool |
|
5636 EmitLogical(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
5637 { |
|
5638 /* |
|
5639 * JSOP_OR converts the operand on the stack to boolean, leaves the original |
|
5640 * value on the stack and jumps if true; otherwise it falls into the next |
|
5641 * bytecode, which pops the left operand and then evaluates the right operand. |
|
5642 * The jump goes around the right operand evaluation. |
|
5643 * |
|
5644 * JSOP_AND converts the operand on the stack to boolean and jumps if false; |
|
5645 * otherwise it falls into the right operand's bytecode. |
|
5646 */ |
|
5647 |
|
5648 if (pn->isArity(PN_BINARY)) { |
|
5649 if (!EmitTree(cx, bce, pn->pn_left)) |
|
5650 return false; |
|
5651 ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0); |
|
5652 if (top < 0) |
|
5653 return false; |
|
5654 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
5655 return false; |
|
5656 if (!EmitTree(cx, bce, pn->pn_right)) |
|
5657 return false; |
|
5658 ptrdiff_t off = bce->offset(); |
|
5659 jsbytecode *pc = bce->code(top); |
|
5660 SET_JUMP_OFFSET(pc, off - top); |
|
5661 *pc = pn->getOp(); |
|
5662 return true; |
|
5663 } |
|
5664 |
|
5665 JS_ASSERT(pn->isArity(PN_LIST)); |
|
5666 JS_ASSERT(pn->pn_head->pn_next->pn_next); |
|
5667 |
|
5668 /* Left-associative operator chain: avoid too much recursion. */ |
|
5669 ParseNode *pn2 = pn->pn_head; |
|
5670 if (!EmitTree(cx, bce, pn2)) |
|
5671 return false; |
|
5672 ptrdiff_t top = EmitJump(cx, bce, JSOP_BACKPATCH, 0); |
|
5673 if (top < 0) |
|
5674 return false; |
|
5675 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
5676 return false; |
|
5677 |
|
5678 /* Emit nodes between the head and the tail. */ |
|
5679 ptrdiff_t jmp = top; |
|
5680 while ((pn2 = pn2->pn_next)->pn_next) { |
|
5681 if (!EmitTree(cx, bce, pn2)) |
|
5682 return false; |
|
5683 ptrdiff_t off = EmitJump(cx, bce, JSOP_BACKPATCH, 0); |
|
5684 if (off < 0) |
|
5685 return false; |
|
5686 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
5687 return false; |
|
5688 SET_JUMP_OFFSET(bce->code(jmp), off - jmp); |
|
5689 jmp = off; |
|
5690 } |
|
5691 if (!EmitTree(cx, bce, pn2)) |
|
5692 return false; |
|
5693 |
|
5694 pn2 = pn->pn_head; |
|
5695 ptrdiff_t off = bce->offset(); |
|
5696 do { |
|
5697 jsbytecode *pc = bce->code(top); |
|
5698 ptrdiff_t tmp = GET_JUMP_OFFSET(pc); |
|
5699 SET_JUMP_OFFSET(pc, off - top); |
|
5700 *pc = pn->getOp(); |
|
5701 top += tmp; |
|
5702 } while ((pn2 = pn2->pn_next)->pn_next); |
|
5703 |
|
5704 return true; |
|
5705 } |
|
5706 |
|
5707 /* |
|
5708 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See |
|
5709 * the comment on EmitSwitch. |
|
5710 */ |
|
5711 MOZ_NEVER_INLINE static bool |
|
5712 EmitIncOrDec(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
5713 { |
|
5714 /* Emit lvalue-specialized code for ++/-- operators. */ |
|
5715 ParseNode *pn2 = pn->pn_kid; |
|
5716 switch (pn2->getKind()) { |
|
5717 case PNK_DOT: |
|
5718 if (!EmitPropIncDec(cx, pn, bce)) |
|
5719 return false; |
|
5720 break; |
|
5721 case PNK_ELEM: |
|
5722 if (!EmitElemIncDec(cx, pn, bce)) |
|
5723 return false; |
|
5724 break; |
|
5725 case PNK_CALL: |
|
5726 JS_ASSERT(pn2->pn_xflags & PNX_SETCALL); |
|
5727 if (!EmitTree(cx, bce, pn2)) |
|
5728 return false; |
|
5729 break; |
|
5730 default: |
|
5731 JS_ASSERT(pn2->isKind(PNK_NAME)); |
|
5732 pn2->setOp(JSOP_SETNAME); |
|
5733 if (!BindNameToSlot(cx, bce, pn2)) |
|
5734 return false; |
|
5735 JSOp op = pn2->getOp(); |
|
5736 bool maySet; |
|
5737 switch (op) { |
|
5738 case JSOP_SETLOCAL: |
|
5739 case JSOP_SETARG: |
|
5740 case JSOP_SETALIASEDVAR: |
|
5741 case JSOP_SETNAME: |
|
5742 case JSOP_SETGNAME: |
|
5743 maySet = true; |
|
5744 break; |
|
5745 default: |
|
5746 maySet = false; |
|
5747 } |
|
5748 if (op == JSOP_CALLEE) { |
|
5749 if (Emit1(cx, bce, op) < 0) |
|
5750 return false; |
|
5751 } else if (!pn2->pn_cookie.isFree()) { |
|
5752 if (maySet) { |
|
5753 if (!EmitVarIncDec(cx, pn, bce)) |
|
5754 return false; |
|
5755 } else { |
|
5756 if (!EmitVarOp(cx, pn2, op, bce)) |
|
5757 return false; |
|
5758 } |
|
5759 } else { |
|
5760 JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM); |
|
5761 if (maySet) { |
|
5762 if (!EmitNameIncDec(cx, pn, bce)) |
|
5763 return false; |
|
5764 } else { |
|
5765 if (!EmitAtomOp(cx, pn2, op, bce)) |
|
5766 return false; |
|
5767 } |
|
5768 break; |
|
5769 } |
|
5770 if (pn2->isConst()) { |
|
5771 if (Emit1(cx, bce, JSOP_POS) < 0) |
|
5772 return false; |
|
5773 bool post; |
|
5774 JSOp binop = GetIncDecInfo(pn->getKind(), &post); |
|
5775 if (!post) { |
|
5776 if (Emit1(cx, bce, JSOP_ONE) < 0) |
|
5777 return false; |
|
5778 if (Emit1(cx, bce, binop) < 0) |
|
5779 return false; |
|
5780 } |
|
5781 } |
|
5782 } |
|
5783 return true; |
|
5784 } |
|
5785 |
|
5786 /* |
|
5787 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See |
|
5788 * the comment on EmitSwitch. |
|
5789 */ |
|
5790 MOZ_NEVER_INLINE static bool |
|
5791 EmitLabeledStatement(ExclusiveContext *cx, BytecodeEmitter *bce, const LabeledStatement *pn) |
|
5792 { |
|
5793 /* |
|
5794 * Emit a JSOP_LABEL instruction. The argument is the offset to the statement |
|
5795 * following the labeled statement. |
|
5796 */ |
|
5797 jsatomid index; |
|
5798 if (!bce->makeAtomIndex(pn->label(), &index)) |
|
5799 return false; |
|
5800 |
|
5801 ptrdiff_t top = EmitJump(cx, bce, JSOP_LABEL, 0); |
|
5802 if (top < 0) |
|
5803 return false; |
|
5804 |
|
5805 /* Emit code for the labeled statement. */ |
|
5806 StmtInfoBCE stmtInfo(cx); |
|
5807 PushStatementBCE(bce, &stmtInfo, STMT_LABEL, bce->offset()); |
|
5808 stmtInfo.label = pn->label(); |
|
5809 if (!EmitTree(cx, bce, pn->statement())) |
|
5810 return false; |
|
5811 if (!PopStatementBCE(cx, bce)) |
|
5812 return false; |
|
5813 |
|
5814 /* Patch the JSOP_LABEL offset. */ |
|
5815 SetJumpOffsetAt(bce, top); |
|
5816 return true; |
|
5817 } |
|
5818 |
|
5819 static bool |
|
5820 EmitSyntheticStatements(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, ptrdiff_t top) |
|
5821 { |
|
5822 JS_ASSERT(pn->isArity(PN_LIST)); |
|
5823 StmtInfoBCE stmtInfo(cx); |
|
5824 PushStatementBCE(bce, &stmtInfo, STMT_SEQ, top); |
|
5825 ParseNode *pn2 = pn->pn_head; |
|
5826 if (pn->pn_xflags & PNX_DESTRUCT) |
|
5827 pn2 = pn2->pn_next; |
|
5828 for (; pn2; pn2 = pn2->pn_next) { |
|
5829 if (!EmitTree(cx, bce, pn2)) |
|
5830 return false; |
|
5831 } |
|
5832 return PopStatementBCE(cx, bce); |
|
5833 } |
|
5834 |
|
5835 static bool |
|
5836 EmitConditionalExpression(ExclusiveContext *cx, BytecodeEmitter *bce, ConditionalExpression &conditional) |
|
5837 { |
|
5838 /* Emit the condition, then branch if false to the else part. */ |
|
5839 if (!EmitTree(cx, bce, &conditional.condition())) |
|
5840 return false; |
|
5841 ptrdiff_t noteIndex = NewSrcNote(cx, bce, SRC_COND); |
|
5842 if (noteIndex < 0) |
|
5843 return false; |
|
5844 ptrdiff_t beq = EmitJump(cx, bce, JSOP_IFEQ, 0); |
|
5845 if (beq < 0 || !EmitTree(cx, bce, &conditional.thenExpression())) |
|
5846 return false; |
|
5847 |
|
5848 /* Jump around else, fixup the branch, emit else, fixup jump. */ |
|
5849 ptrdiff_t jmp = EmitJump(cx, bce, JSOP_GOTO, 0); |
|
5850 if (jmp < 0) |
|
5851 return false; |
|
5852 SetJumpOffsetAt(bce, beq); |
|
5853 |
|
5854 /* |
|
5855 * Because each branch pushes a single value, but our stack budgeting |
|
5856 * analysis ignores branches, we now have to adjust bce->stackDepth to |
|
5857 * ignore the value pushed by the first branch. Execution will follow |
|
5858 * only one path, so we must decrement bce->stackDepth. |
|
5859 * |
|
5860 * Failing to do this will foil code, such as let expression and block |
|
5861 * code generation, which must use the stack depth to compute local |
|
5862 * stack indexes correctly. |
|
5863 */ |
|
5864 JS_ASSERT(bce->stackDepth > 0); |
|
5865 bce->stackDepth--; |
|
5866 if (!EmitTree(cx, bce, &conditional.elseExpression())) |
|
5867 return false; |
|
5868 SetJumpOffsetAt(bce, jmp); |
|
5869 return SetSrcNoteOffset(cx, bce, noteIndex, 0, jmp - beq); |
|
5870 } |
|
5871 |
|
5872 /* |
|
5873 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See |
|
5874 * the comment on EmitSwitch. |
|
5875 */ |
|
5876 MOZ_NEVER_INLINE static bool |
|
5877 EmitObject(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
5878 { |
|
5879 if (pn->pn_xflags & PNX_DESTRUCT) { |
|
5880 bce->reportError(pn, JSMSG_BAD_OBJECT_INIT); |
|
5881 return false; |
|
5882 } |
|
5883 |
|
5884 if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext()) |
|
5885 return EmitSingletonInitialiser(cx, bce, pn); |
|
5886 |
|
5887 /* |
|
5888 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing |
|
5889 * a new object and defining (in source order) each property on the object |
|
5890 * (or mutating the object's [[Prototype]], in the case of __proto__). |
|
5891 */ |
|
5892 ptrdiff_t offset = bce->offset(); |
|
5893 if (!EmitNewInit(cx, bce, JSProto_Object)) |
|
5894 return false; |
|
5895 |
|
5896 /* |
|
5897 * Try to construct the shape of the object as we go, so we can emit a |
|
5898 * JSOP_NEWOBJECT with the final shape instead. |
|
5899 */ |
|
5900 RootedObject obj(cx); |
|
5901 if (bce->script->compileAndGo()) { |
|
5902 gc::AllocKind kind = GuessObjectGCKind(pn->pn_count); |
|
5903 obj = NewBuiltinClassInstance(cx, &JSObject::class_, kind, TenuredObject); |
|
5904 if (!obj) |
|
5905 return false; |
|
5906 } |
|
5907 |
|
5908 for (ParseNode *pn2 = pn->pn_head; pn2; pn2 = pn2->pn_next) { |
|
5909 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) |
|
5910 return false; |
|
5911 |
|
5912 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */ |
|
5913 ParseNode *pn3 = pn2->pn_left; |
|
5914 bool isIndex = false; |
|
5915 if (pn3->isKind(PNK_NUMBER)) { |
|
5916 if (!EmitNumberOp(cx, pn3->pn_dval, bce)) |
|
5917 return false; |
|
5918 isIndex = true; |
|
5919 } else { |
|
5920 // The parser already checked for atoms representing indexes and |
|
5921 // used PNK_NUMBER instead, but also watch for ids which TI treats |
|
5922 // as indexes for simpliciation of downstream analysis. |
|
5923 JS_ASSERT(pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING)); |
|
5924 jsid id = NameToId(pn3->pn_atom->asPropertyName()); |
|
5925 if (id != types::IdToTypeId(id)) { |
|
5926 if (!EmitTree(cx, bce, pn3)) |
|
5927 return false; |
|
5928 isIndex = true; |
|
5929 } |
|
5930 } |
|
5931 |
|
5932 /* Emit code for the property initializer. */ |
|
5933 if (!EmitTree(cx, bce, pn2->pn_right)) |
|
5934 return false; |
|
5935 |
|
5936 JSOp op = pn2->getOp(); |
|
5937 JS_ASSERT(op == JSOP_INITPROP || |
|
5938 op == JSOP_INITPROP_GETTER || |
|
5939 op == JSOP_INITPROP_SETTER); |
|
5940 |
|
5941 if (op == JSOP_INITPROP_GETTER || op == JSOP_INITPROP_SETTER) |
|
5942 obj = nullptr; |
|
5943 |
|
5944 if (isIndex) { |
|
5945 obj = nullptr; |
|
5946 switch (op) { |
|
5947 case JSOP_INITPROP: op = JSOP_INITELEM; break; |
|
5948 case JSOP_INITPROP_GETTER: op = JSOP_INITELEM_GETTER; break; |
|
5949 case JSOP_INITPROP_SETTER: op = JSOP_INITELEM_SETTER; break; |
|
5950 default: MOZ_ASSUME_UNREACHABLE("Invalid op"); |
|
5951 } |
|
5952 if (Emit1(cx, bce, op) < 0) |
|
5953 return false; |
|
5954 } else { |
|
5955 JS_ASSERT(pn3->isKind(PNK_NAME) || pn3->isKind(PNK_STRING)); |
|
5956 |
|
5957 // If we have { __proto__: expr }, implement prototype mutation. |
|
5958 if (op == JSOP_INITPROP && pn3->pn_atom == cx->names().proto) { |
|
5959 obj = nullptr; |
|
5960 if (Emit1(cx, bce, JSOP_MUTATEPROTO) < 0) |
|
5961 return false; |
|
5962 continue; |
|
5963 } |
|
5964 |
|
5965 jsatomid index; |
|
5966 if (!bce->makeAtomIndex(pn3->pn_atom, &index)) |
|
5967 return false; |
|
5968 |
|
5969 MOZ_ASSERT(op == JSOP_INITPROP || |
|
5970 op == JSOP_INITPROP_GETTER || |
|
5971 op == JSOP_INITPROP_SETTER); |
|
5972 |
|
5973 if (obj) { |
|
5974 JS_ASSERT(!obj->inDictionaryMode()); |
|
5975 Rooted<jsid> id(cx, AtomToId(pn3->pn_atom)); |
|
5976 RootedValue undefinedValue(cx, UndefinedValue()); |
|
5977 if (!DefineNativeProperty(cx, obj, id, undefinedValue, nullptr, |
|
5978 nullptr, JSPROP_ENUMERATE)) |
|
5979 { |
|
5980 return false; |
|
5981 } |
|
5982 if (obj->inDictionaryMode()) |
|
5983 obj = nullptr; |
|
5984 } |
|
5985 |
|
5986 if (!EmitIndex32(cx, op, index, bce)) |
|
5987 return false; |
|
5988 } |
|
5989 } |
|
5990 |
|
5991 if (Emit1(cx, bce, JSOP_ENDINIT) < 0) |
|
5992 return false; |
|
5993 |
|
5994 if (obj) { |
|
5995 /* |
|
5996 * The object survived and has a predictable shape: update the original |
|
5997 * bytecode. |
|
5998 */ |
|
5999 ObjectBox *objbox = bce->parser->newObjectBox(obj); |
|
6000 if (!objbox) |
|
6001 return false; |
|
6002 |
|
6003 static_assert(JSOP_NEWINIT_LENGTH == JSOP_NEWOBJECT_LENGTH, |
|
6004 "newinit and newobject must have equal length to edit in-place"); |
|
6005 |
|
6006 uint32_t index = bce->objectList.add(objbox); |
|
6007 jsbytecode *code = bce->code(offset); |
|
6008 code[0] = JSOP_NEWOBJECT; |
|
6009 code[1] = jsbytecode(index >> 24); |
|
6010 code[2] = jsbytecode(index >> 16); |
|
6011 code[3] = jsbytecode(index >> 8); |
|
6012 code[4] = jsbytecode(index); |
|
6013 } |
|
6014 |
|
6015 return true; |
|
6016 } |
|
6017 |
|
6018 static bool |
|
6019 EmitArrayComp(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
6020 { |
|
6021 if (!EmitNewInit(cx, bce, JSProto_Array)) |
|
6022 return false; |
|
6023 |
|
6024 /* |
|
6025 * Pass the new array's stack index to the PNK_ARRAYPUSH case via |
|
6026 * bce->arrayCompDepth, then simply traverse the PNK_FOR node and |
|
6027 * its kids under pn2 to generate this comprehension. |
|
6028 */ |
|
6029 JS_ASSERT(bce->stackDepth > 0); |
|
6030 uint32_t saveDepth = bce->arrayCompDepth; |
|
6031 bce->arrayCompDepth = (uint32_t) (bce->stackDepth - 1); |
|
6032 if (!EmitTree(cx, bce, pn->pn_head)) |
|
6033 return false; |
|
6034 bce->arrayCompDepth = saveDepth; |
|
6035 |
|
6036 /* Emit the usual op needed for decompilation. */ |
|
6037 return Emit1(cx, bce, JSOP_ENDINIT) >= 0; |
|
6038 } |
|
6039 |
|
6040 static bool |
|
6041 EmitArray(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn, uint32_t count) |
|
6042 { |
|
6043 /* |
|
6044 * Emit code for [a, b, c] that is equivalent to constructing a new |
|
6045 * array and in source order evaluating each element value and adding |
|
6046 * it to the array, without invoking latent setters. We use the |
|
6047 * JSOP_NEWINIT and JSOP_INITELEM_ARRAY bytecodes to ignore setters and |
|
6048 * to avoid dup'ing and popping the array as each element is added, as |
|
6049 * JSOP_SETELEM/JSOP_SETPROP would do. |
|
6050 */ |
|
6051 |
|
6052 int32_t nspread = 0; |
|
6053 for (ParseNode *elt = pn; elt; elt = elt->pn_next) { |
|
6054 if (elt->isKind(PNK_SPREAD)) |
|
6055 nspread++; |
|
6056 } |
|
6057 |
|
6058 ptrdiff_t off = EmitN(cx, bce, JSOP_NEWARRAY, 3); |
|
6059 if (off < 0) |
|
6060 return false; |
|
6061 CheckTypeSet(cx, bce, JSOP_NEWARRAY); |
|
6062 jsbytecode *pc = bce->code(off); |
|
6063 |
|
6064 // For arrays with spread, this is a very pessimistic allocation, the |
|
6065 // minimum possible final size. |
|
6066 SET_UINT24(pc, count - nspread); |
|
6067 |
|
6068 ParseNode *pn2 = pn; |
|
6069 jsatomid atomIndex; |
|
6070 if (nspread && !EmitNumberOp(cx, 0, bce)) |
|
6071 return false; |
|
6072 for (atomIndex = 0; pn2; atomIndex++, pn2 = pn2->pn_next) { |
|
6073 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) |
|
6074 return false; |
|
6075 if (pn2->isKind(PNK_ELISION)) { |
|
6076 if (Emit1(cx, bce, JSOP_HOLE) < 0) |
|
6077 return false; |
|
6078 } else { |
|
6079 ParseNode *expr = pn2->isKind(PNK_SPREAD) ? pn2->pn_kid : pn2; |
|
6080 if (!EmitTree(cx, bce, expr)) |
|
6081 return false; |
|
6082 } |
|
6083 if (pn2->isKind(PNK_SPREAD)) { |
|
6084 if (Emit1(cx, bce, JSOP_SPREAD) < 0) |
|
6085 return false; |
|
6086 } else if (nspread) { |
|
6087 if (Emit1(cx, bce, JSOP_INITELEM_INC) < 0) |
|
6088 return false; |
|
6089 } else { |
|
6090 off = EmitN(cx, bce, JSOP_INITELEM_ARRAY, 3); |
|
6091 if (off < 0) |
|
6092 return false; |
|
6093 SET_UINT24(bce->code(off), atomIndex); |
|
6094 } |
|
6095 } |
|
6096 JS_ASSERT(atomIndex == count); |
|
6097 if (nspread) { |
|
6098 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
6099 return false; |
|
6100 } |
|
6101 |
|
6102 /* Emit an op to finish the array and aid in decompilation. */ |
|
6103 return Emit1(cx, bce, JSOP_ENDINIT) >= 0; |
|
6104 } |
|
6105 |
|
6106 static bool |
|
6107 EmitUnary(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
6108 { |
|
6109 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) |
|
6110 return false; |
|
6111 /* Unary op, including unary +/-. */ |
|
6112 JSOp op = pn->getOp(); |
|
6113 ParseNode *pn2 = pn->pn_kid; |
|
6114 |
|
6115 if (op == JSOP_TYPEOF && !pn2->isKind(PNK_NAME)) |
|
6116 op = JSOP_TYPEOFEXPR; |
|
6117 |
|
6118 bool oldEmittingForInit = bce->emittingForInit; |
|
6119 bce->emittingForInit = false; |
|
6120 if (!EmitTree(cx, bce, pn2)) |
|
6121 return false; |
|
6122 |
|
6123 bce->emittingForInit = oldEmittingForInit; |
|
6124 return Emit1(cx, bce, op) >= 0; |
|
6125 } |
|
6126 |
|
6127 static bool |
|
6128 EmitDefaults(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
6129 { |
|
6130 JS_ASSERT(pn->isKind(PNK_ARGSBODY)); |
|
6131 |
|
6132 ParseNode *arg, *pnlast = pn->last(); |
|
6133 for (arg = pn->pn_head; arg != pnlast; arg = arg->pn_next) { |
|
6134 if (!(arg->pn_dflags & PND_DEFAULT) || !arg->isKind(PNK_NAME)) |
|
6135 continue; |
|
6136 if (!BindNameToSlot(cx, bce, arg)) |
|
6137 return false; |
|
6138 if (!EmitVarOp(cx, arg, JSOP_GETARG, bce)) |
|
6139 return false; |
|
6140 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) |
|
6141 return false; |
|
6142 if (Emit1(cx, bce, JSOP_STRICTEQ) < 0) |
|
6143 return false; |
|
6144 // Emit source note to enable ion compilation. |
|
6145 if (NewSrcNote(cx, bce, SRC_IF) < 0) |
|
6146 return false; |
|
6147 ptrdiff_t jump = EmitJump(cx, bce, JSOP_IFEQ, 0); |
|
6148 if (jump < 0) |
|
6149 return false; |
|
6150 if (!EmitTree(cx, bce, arg->expr())) |
|
6151 return false; |
|
6152 if (!EmitVarOp(cx, arg, JSOP_SETARG, bce)) |
|
6153 return false; |
|
6154 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
6155 return false; |
|
6156 SET_JUMP_OFFSET(bce->code(jump), bce->offset() - jump); |
|
6157 } |
|
6158 |
|
6159 return true; |
|
6160 } |
|
6161 |
|
6162 bool |
|
6163 frontend::EmitTree(ExclusiveContext *cx, BytecodeEmitter *bce, ParseNode *pn) |
|
6164 { |
|
6165 JS_CHECK_RECURSION(cx, return false); |
|
6166 |
|
6167 EmitLevelManager elm(bce); |
|
6168 |
|
6169 bool ok = true; |
|
6170 ptrdiff_t top = bce->offset(); |
|
6171 pn->pn_offset = top; |
|
6172 |
|
6173 /* Emit notes to tell the current bytecode's source line number. */ |
|
6174 if (!UpdateLineNumberNotes(cx, bce, pn->pn_pos.begin)) |
|
6175 return false; |
|
6176 |
|
6177 switch (pn->getKind()) { |
|
6178 case PNK_FUNCTION: |
|
6179 ok = EmitFunc(cx, bce, pn); |
|
6180 break; |
|
6181 |
|
6182 case PNK_ARGSBODY: |
|
6183 { |
|
6184 RootedFunction fun(cx, bce->sc->asFunctionBox()->function()); |
|
6185 ParseNode *pnlast = pn->last(); |
|
6186 |
|
6187 // Carefully emit everything in the right order: |
|
6188 // 1. Destructuring |
|
6189 // 2. Functions |
|
6190 // 3. Defaults |
|
6191 ParseNode *pnchild = pnlast->pn_head; |
|
6192 if (pnlast->pn_xflags & PNX_DESTRUCT) { |
|
6193 // Assign the destructuring arguments before defining any functions, |
|
6194 // see bug 419662. |
|
6195 JS_ASSERT(pnchild->isKind(PNK_SEMI)); |
|
6196 JS_ASSERT(pnchild->pn_kid->isKind(PNK_VAR) || pnchild->pn_kid->isKind(PNK_CONST)); |
|
6197 if (!EmitTree(cx, bce, pnchild)) |
|
6198 return false; |
|
6199 pnchild = pnchild->pn_next; |
|
6200 } |
|
6201 if (pnlast->pn_xflags & PNX_FUNCDEFS) { |
|
6202 // This block contains top-level function definitions. To ensure |
|
6203 // that we emit the bytecode defining them before the rest of code |
|
6204 // in the block we use a separate pass over functions. During the |
|
6205 // main pass later the emitter will add JSOP_NOP with source notes |
|
6206 // for the function to preserve the original functions position |
|
6207 // when decompiling. |
|
6208 // |
|
6209 // Currently this is used only for functions, as compile-as-we go |
|
6210 // mode for scripts does not allow separate emitter passes. |
|
6211 for (ParseNode *pn2 = pnchild; pn2; pn2 = pn2->pn_next) { |
|
6212 if (pn2->isKind(PNK_FUNCTION) && pn2->functionIsHoisted()) { |
|
6213 if (!EmitTree(cx, bce, pn2)) |
|
6214 return false; |
|
6215 } |
|
6216 } |
|
6217 } |
|
6218 bool hasDefaults = bce->sc->asFunctionBox()->hasDefaults(); |
|
6219 if (hasDefaults) { |
|
6220 ParseNode *rest = nullptr; |
|
6221 bool restIsDefn = false; |
|
6222 if (fun->hasRest()) { |
|
6223 JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding()); |
|
6224 |
|
6225 // Defaults with a rest parameter need special handling. The |
|
6226 // rest parameter needs to be undefined while defaults are being |
|
6227 // processed. To do this, we create the rest argument and let it |
|
6228 // sit on the stack while processing defaults. The rest |
|
6229 // parameter's slot is set to undefined for the course of |
|
6230 // default processing. |
|
6231 rest = pn->pn_head; |
|
6232 while (rest->pn_next != pnlast) |
|
6233 rest = rest->pn_next; |
|
6234 restIsDefn = rest->isDefn(); |
|
6235 if (Emit1(cx, bce, JSOP_REST) < 0) |
|
6236 return false; |
|
6237 CheckTypeSet(cx, bce, JSOP_REST); |
|
6238 |
|
6239 // Only set the rest parameter if it's not aliased by a nested |
|
6240 // function in the body. |
|
6241 if (restIsDefn) { |
|
6242 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) |
|
6243 return false; |
|
6244 if (!BindNameToSlot(cx, bce, rest)) |
|
6245 return false; |
|
6246 if (!EmitVarOp(cx, rest, JSOP_SETARG, bce)) |
|
6247 return false; |
|
6248 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
6249 return false; |
|
6250 } |
|
6251 } |
|
6252 if (!EmitDefaults(cx, bce, pn)) |
|
6253 return false; |
|
6254 if (fun->hasRest()) { |
|
6255 if (restIsDefn && !EmitVarOp(cx, rest, JSOP_SETARG, bce)) |
|
6256 return false; |
|
6257 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
6258 return false; |
|
6259 } |
|
6260 } |
|
6261 for (ParseNode *pn2 = pn->pn_head; pn2 != pnlast; pn2 = pn2->pn_next) { |
|
6262 // Only bind the parameter if it's not aliased by a nested function |
|
6263 // in the body. |
|
6264 if (!pn2->isDefn()) |
|
6265 continue; |
|
6266 if (!BindNameToSlot(cx, bce, pn2)) |
|
6267 return false; |
|
6268 if (pn2->pn_next == pnlast && fun->hasRest() && !hasDefaults) { |
|
6269 // Fill rest parameter. We handled the case with defaults above. |
|
6270 JS_ASSERT(!bce->sc->asFunctionBox()->argumentsHasLocalBinding()); |
|
6271 bce->switchToProlog(); |
|
6272 if (Emit1(cx, bce, JSOP_REST) < 0) |
|
6273 return false; |
|
6274 CheckTypeSet(cx, bce, JSOP_REST); |
|
6275 if (!EmitVarOp(cx, pn2, JSOP_SETARG, bce)) |
|
6276 return false; |
|
6277 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
6278 return false; |
|
6279 bce->switchToMain(); |
|
6280 } |
|
6281 } |
|
6282 ok = EmitTree(cx, bce, pnlast); |
|
6283 break; |
|
6284 } |
|
6285 |
|
6286 case PNK_IF: |
|
6287 ok = EmitIf(cx, bce, pn); |
|
6288 break; |
|
6289 |
|
6290 case PNK_SWITCH: |
|
6291 ok = EmitSwitch(cx, bce, pn); |
|
6292 break; |
|
6293 |
|
6294 case PNK_WHILE: |
|
6295 ok = EmitWhile(cx, bce, pn, top); |
|
6296 break; |
|
6297 |
|
6298 case PNK_DOWHILE: |
|
6299 ok = EmitDo(cx, bce, pn); |
|
6300 break; |
|
6301 |
|
6302 case PNK_FOR: |
|
6303 ok = EmitFor(cx, bce, pn, top); |
|
6304 break; |
|
6305 |
|
6306 case PNK_BREAK: |
|
6307 ok = EmitBreak(cx, bce, pn->as<BreakStatement>().label()); |
|
6308 break; |
|
6309 |
|
6310 case PNK_CONTINUE: |
|
6311 ok = EmitContinue(cx, bce, pn->as<ContinueStatement>().label()); |
|
6312 break; |
|
6313 |
|
6314 case PNK_WITH: |
|
6315 ok = EmitWith(cx, bce, pn); |
|
6316 break; |
|
6317 |
|
6318 case PNK_TRY: |
|
6319 if (!EmitTry(cx, bce, pn)) |
|
6320 return false; |
|
6321 break; |
|
6322 |
|
6323 case PNK_CATCH: |
|
6324 if (!EmitCatch(cx, bce, pn)) |
|
6325 return false; |
|
6326 break; |
|
6327 |
|
6328 case PNK_VAR: |
|
6329 case PNK_CONST: |
|
6330 if (!EmitVariables(cx, bce, pn, InitializeVars)) |
|
6331 return false; |
|
6332 break; |
|
6333 |
|
6334 case PNK_RETURN: |
|
6335 ok = EmitReturn(cx, bce, pn); |
|
6336 break; |
|
6337 |
|
6338 case PNK_YIELD_STAR: |
|
6339 ok = EmitYieldStar(cx, bce, pn->pn_kid); |
|
6340 break; |
|
6341 |
|
6342 case PNK_YIELD: |
|
6343 JS_ASSERT(bce->sc->isFunctionBox()); |
|
6344 if (bce->sc->asFunctionBox()->isStarGenerator()) { |
|
6345 if (!EmitPrepareIteratorResult(cx, bce)) |
|
6346 return false; |
|
6347 } |
|
6348 if (pn->pn_kid) { |
|
6349 if (!EmitTree(cx, bce, pn->pn_kid)) |
|
6350 return false; |
|
6351 } else { |
|
6352 if (Emit1(cx, bce, JSOP_UNDEFINED) < 0) |
|
6353 return false; |
|
6354 } |
|
6355 if (bce->sc->asFunctionBox()->isStarGenerator()) { |
|
6356 if (!EmitFinishIteratorResult(cx, bce, false)) |
|
6357 return false; |
|
6358 } |
|
6359 if (Emit1(cx, bce, JSOP_YIELD) < 0) |
|
6360 return false; |
|
6361 break; |
|
6362 |
|
6363 case PNK_STATEMENTLIST: |
|
6364 ok = EmitStatementList(cx, bce, pn, top); |
|
6365 break; |
|
6366 |
|
6367 case PNK_SEQ: |
|
6368 ok = EmitSyntheticStatements(cx, bce, pn, top); |
|
6369 break; |
|
6370 |
|
6371 case PNK_SEMI: |
|
6372 ok = EmitStatement(cx, bce, pn); |
|
6373 break; |
|
6374 |
|
6375 case PNK_LABEL: |
|
6376 ok = EmitLabeledStatement(cx, bce, &pn->as<LabeledStatement>()); |
|
6377 break; |
|
6378 |
|
6379 case PNK_COMMA: |
|
6380 { |
|
6381 for (ParseNode *pn2 = pn->pn_head; ; pn2 = pn2->pn_next) { |
|
6382 if (!UpdateSourceCoordNotes(cx, bce, pn2->pn_pos.begin)) |
|
6383 return false; |
|
6384 if (!EmitTree(cx, bce, pn2)) |
|
6385 return false; |
|
6386 if (!pn2->pn_next) |
|
6387 break; |
|
6388 if (Emit1(cx, bce, JSOP_POP) < 0) |
|
6389 return false; |
|
6390 } |
|
6391 break; |
|
6392 } |
|
6393 |
|
6394 case PNK_ASSIGN: |
|
6395 case PNK_ADDASSIGN: |
|
6396 case PNK_SUBASSIGN: |
|
6397 case PNK_BITORASSIGN: |
|
6398 case PNK_BITXORASSIGN: |
|
6399 case PNK_BITANDASSIGN: |
|
6400 case PNK_LSHASSIGN: |
|
6401 case PNK_RSHASSIGN: |
|
6402 case PNK_URSHASSIGN: |
|
6403 case PNK_MULASSIGN: |
|
6404 case PNK_DIVASSIGN: |
|
6405 case PNK_MODASSIGN: |
|
6406 if (!EmitAssignment(cx, bce, pn->pn_left, pn->getOp(), pn->pn_right)) |
|
6407 return false; |
|
6408 break; |
|
6409 |
|
6410 case PNK_CONDITIONAL: |
|
6411 ok = EmitConditionalExpression(cx, bce, pn->as<ConditionalExpression>()); |
|
6412 break; |
|
6413 |
|
6414 case PNK_OR: |
|
6415 case PNK_AND: |
|
6416 ok = EmitLogical(cx, bce, pn); |
|
6417 break; |
|
6418 |
|
6419 case PNK_ADD: |
|
6420 case PNK_SUB: |
|
6421 case PNK_BITOR: |
|
6422 case PNK_BITXOR: |
|
6423 case PNK_BITAND: |
|
6424 case PNK_STRICTEQ: |
|
6425 case PNK_EQ: |
|
6426 case PNK_STRICTNE: |
|
6427 case PNK_NE: |
|
6428 case PNK_LT: |
|
6429 case PNK_LE: |
|
6430 case PNK_GT: |
|
6431 case PNK_GE: |
|
6432 case PNK_IN: |
|
6433 case PNK_INSTANCEOF: |
|
6434 case PNK_LSH: |
|
6435 case PNK_RSH: |
|
6436 case PNK_URSH: |
|
6437 case PNK_STAR: |
|
6438 case PNK_DIV: |
|
6439 case PNK_MOD: |
|
6440 if (pn->isArity(PN_LIST)) { |
|
6441 /* Left-associative operator chain: avoid too much recursion. */ |
|
6442 ParseNode *pn2 = pn->pn_head; |
|
6443 if (!EmitTree(cx, bce, pn2)) |
|
6444 return false; |
|
6445 JSOp op = pn->getOp(); |
|
6446 while ((pn2 = pn2->pn_next) != nullptr) { |
|
6447 if (!EmitTree(cx, bce, pn2)) |
|
6448 return false; |
|
6449 if (Emit1(cx, bce, op) < 0) |
|
6450 return false; |
|
6451 } |
|
6452 } else { |
|
6453 /* Binary operators that evaluate both operands unconditionally. */ |
|
6454 if (!EmitTree(cx, bce, pn->pn_left)) |
|
6455 return false; |
|
6456 if (!EmitTree(cx, bce, pn->pn_right)) |
|
6457 return false; |
|
6458 if (Emit1(cx, bce, pn->getOp()) < 0) |
|
6459 return false; |
|
6460 } |
|
6461 break; |
|
6462 |
|
6463 case PNK_THROW: |
|
6464 case PNK_TYPEOF: |
|
6465 case PNK_VOID: |
|
6466 case PNK_NOT: |
|
6467 case PNK_BITNOT: |
|
6468 case PNK_POS: |
|
6469 case PNK_NEG: |
|
6470 ok = EmitUnary(cx, bce, pn); |
|
6471 break; |
|
6472 |
|
6473 case PNK_PREINCREMENT: |
|
6474 case PNK_PREDECREMENT: |
|
6475 case PNK_POSTINCREMENT: |
|
6476 case PNK_POSTDECREMENT: |
|
6477 ok = EmitIncOrDec(cx, bce, pn); |
|
6478 break; |
|
6479 |
|
6480 case PNK_DELETE: |
|
6481 ok = EmitDelete(cx, bce, pn); |
|
6482 break; |
|
6483 |
|
6484 case PNK_DOT: |
|
6485 ok = EmitPropOp(cx, pn, JSOP_GETPROP, bce); |
|
6486 break; |
|
6487 |
|
6488 case PNK_ELEM: |
|
6489 ok = EmitElemOp(cx, pn, JSOP_GETELEM, bce); |
|
6490 break; |
|
6491 |
|
6492 case PNK_NEW: |
|
6493 case PNK_CALL: |
|
6494 case PNK_GENEXP: |
|
6495 ok = EmitCallOrNew(cx, bce, pn); |
|
6496 break; |
|
6497 |
|
6498 case PNK_LEXICALSCOPE: |
|
6499 ok = EmitLexicalScope(cx, bce, pn); |
|
6500 break; |
|
6501 |
|
6502 case PNK_LET: |
|
6503 ok = pn->isArity(PN_BINARY) |
|
6504 ? EmitLet(cx, bce, pn) |
|
6505 : EmitVariables(cx, bce, pn, InitializeVars); |
|
6506 break; |
|
6507 |
|
6508 case PNK_IMPORT: |
|
6509 case PNK_EXPORT: |
|
6510 // TODO: Implement emitter support for modules |
|
6511 bce->reportError(nullptr, JSMSG_MODULES_NOT_IMPLEMENTED); |
|
6512 return false; |
|
6513 |
|
6514 case PNK_ARRAYPUSH: { |
|
6515 /* |
|
6516 * The array object's stack index is in bce->arrayCompDepth. See below |
|
6517 * under the array initialiser code generator for array comprehension |
|
6518 * special casing. Note that the array object is a pure stack value, |
|
6519 * unaliased by blocks, so we can EmitUnaliasedVarOp. |
|
6520 */ |
|
6521 if (!EmitTree(cx, bce, pn->pn_kid)) |
|
6522 return false; |
|
6523 if (!EmitDupAt(cx, bce, bce->arrayCompDepth)) |
|
6524 return false; |
|
6525 if (Emit1(cx, bce, JSOP_ARRAYPUSH) < 0) |
|
6526 return false; |
|
6527 break; |
|
6528 } |
|
6529 |
|
6530 case PNK_ARRAY: |
|
6531 if (!(pn->pn_xflags & PNX_NONCONST) && pn->pn_head && bce->checkSingletonContext()) |
|
6532 ok = EmitSingletonInitialiser(cx, bce, pn); |
|
6533 else |
|
6534 ok = EmitArray(cx, bce, pn->pn_head, pn->pn_count); |
|
6535 break; |
|
6536 |
|
6537 case PNK_ARRAYCOMP: |
|
6538 ok = EmitArrayComp(cx, bce, pn); |
|
6539 break; |
|
6540 |
|
6541 case PNK_OBJECT: |
|
6542 ok = EmitObject(cx, bce, pn); |
|
6543 break; |
|
6544 |
|
6545 case PNK_NAME: |
|
6546 if (!EmitNameOp(cx, bce, pn, false)) |
|
6547 return false; |
|
6548 break; |
|
6549 |
|
6550 case PNK_STRING: |
|
6551 ok = EmitAtomOp(cx, pn, pn->getOp(), bce); |
|
6552 break; |
|
6553 |
|
6554 case PNK_NUMBER: |
|
6555 ok = EmitNumberOp(cx, pn->pn_dval, bce); |
|
6556 break; |
|
6557 |
|
6558 case PNK_REGEXP: |
|
6559 ok = EmitRegExp(cx, bce->regexpList.add(pn->as<RegExpLiteral>().objbox()), bce); |
|
6560 break; |
|
6561 |
|
6562 case PNK_TRUE: |
|
6563 case PNK_FALSE: |
|
6564 case PNK_THIS: |
|
6565 case PNK_NULL: |
|
6566 if (Emit1(cx, bce, pn->getOp()) < 0) |
|
6567 return false; |
|
6568 break; |
|
6569 |
|
6570 case PNK_DEBUGGER: |
|
6571 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.begin)) |
|
6572 return false; |
|
6573 if (Emit1(cx, bce, JSOP_DEBUGGER) < 0) |
|
6574 return false; |
|
6575 break; |
|
6576 |
|
6577 case PNK_NOP: |
|
6578 JS_ASSERT(pn->getArity() == PN_NULLARY); |
|
6579 break; |
|
6580 |
|
6581 default: |
|
6582 JS_ASSERT(0); |
|
6583 } |
|
6584 |
|
6585 /* bce->emitLevel == 1 means we're last on the stack, so finish up. */ |
|
6586 if (ok && bce->emitLevel == 1) { |
|
6587 if (!UpdateSourceCoordNotes(cx, bce, pn->pn_pos.end)) |
|
6588 return false; |
|
6589 } |
|
6590 |
|
6591 return ok; |
|
6592 } |
|
6593 |
|
6594 static int |
|
6595 AllocSrcNote(ExclusiveContext *cx, SrcNotesVector ¬es) |
|
6596 { |
|
6597 // Start it off moderately large to avoid repeated resizings early on. |
|
6598 if (notes.capacity() == 0 && !notes.reserve(1024)) |
|
6599 return -1; |
|
6600 |
|
6601 jssrcnote dummy = 0; |
|
6602 if (!notes.append(dummy)) { |
|
6603 js_ReportOutOfMemory(cx); |
|
6604 return -1; |
|
6605 } |
|
6606 return notes.length() - 1; |
|
6607 } |
|
6608 |
|
6609 int |
|
6610 frontend::NewSrcNote(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type) |
|
6611 { |
|
6612 SrcNotesVector ¬es = bce->notes(); |
|
6613 int index; |
|
6614 |
|
6615 index = AllocSrcNote(cx, notes); |
|
6616 if (index < 0) |
|
6617 return -1; |
|
6618 |
|
6619 /* |
|
6620 * Compute delta from the last annotated bytecode's offset. If it's too |
|
6621 * big to fit in sn, allocate one or more xdelta notes and reset sn. |
|
6622 */ |
|
6623 ptrdiff_t offset = bce->offset(); |
|
6624 ptrdiff_t delta = offset - bce->lastNoteOffset(); |
|
6625 bce->current->lastNoteOffset = offset; |
|
6626 if (delta >= SN_DELTA_LIMIT) { |
|
6627 do { |
|
6628 ptrdiff_t xdelta = Min(delta, SN_XDELTA_MASK); |
|
6629 SN_MAKE_XDELTA(¬es[index], xdelta); |
|
6630 delta -= xdelta; |
|
6631 index = AllocSrcNote(cx, notes); |
|
6632 if (index < 0) |
|
6633 return -1; |
|
6634 } while (delta >= SN_DELTA_LIMIT); |
|
6635 } |
|
6636 |
|
6637 /* |
|
6638 * Initialize type and delta, then allocate the minimum number of notes |
|
6639 * needed for type's arity. Usually, we won't need more, but if an offset |
|
6640 * does take two bytes, SetSrcNoteOffset will grow notes. |
|
6641 */ |
|
6642 SN_MAKE_NOTE(¬es[index], type, delta); |
|
6643 for (int n = (int)js_SrcNoteSpec[type].arity; n > 0; n--) { |
|
6644 if (NewSrcNote(cx, bce, SRC_NULL) < 0) |
|
6645 return -1; |
|
6646 } |
|
6647 return index; |
|
6648 } |
|
6649 |
|
6650 int |
|
6651 frontend::NewSrcNote2(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset) |
|
6652 { |
|
6653 int index; |
|
6654 |
|
6655 index = NewSrcNote(cx, bce, type); |
|
6656 if (index >= 0) { |
|
6657 if (!SetSrcNoteOffset(cx, bce, index, 0, offset)) |
|
6658 return -1; |
|
6659 } |
|
6660 return index; |
|
6661 } |
|
6662 |
|
6663 int |
|
6664 frontend::NewSrcNote3(ExclusiveContext *cx, BytecodeEmitter *bce, SrcNoteType type, ptrdiff_t offset1, |
|
6665 ptrdiff_t offset2) |
|
6666 { |
|
6667 int index; |
|
6668 |
|
6669 index = NewSrcNote(cx, bce, type); |
|
6670 if (index >= 0) { |
|
6671 if (!SetSrcNoteOffset(cx, bce, index, 0, offset1)) |
|
6672 return -1; |
|
6673 if (!SetSrcNoteOffset(cx, bce, index, 1, offset2)) |
|
6674 return -1; |
|
6675 } |
|
6676 return index; |
|
6677 } |
|
6678 |
|
6679 bool |
|
6680 frontend::AddToSrcNoteDelta(ExclusiveContext *cx, BytecodeEmitter *bce, jssrcnote *sn, ptrdiff_t delta) |
|
6681 { |
|
6682 /* |
|
6683 * Called only from FinishTakingSrcNotes to add to main script note |
|
6684 * deltas, and only by a small positive amount. |
|
6685 */ |
|
6686 JS_ASSERT(bce->current == &bce->main); |
|
6687 JS_ASSERT((unsigned) delta < (unsigned) SN_XDELTA_LIMIT); |
|
6688 |
|
6689 ptrdiff_t base = SN_DELTA(sn); |
|
6690 ptrdiff_t limit = SN_IS_XDELTA(sn) ? SN_XDELTA_LIMIT : SN_DELTA_LIMIT; |
|
6691 ptrdiff_t newdelta = base + delta; |
|
6692 if (newdelta < limit) { |
|
6693 SN_SET_DELTA(sn, newdelta); |
|
6694 } else { |
|
6695 jssrcnote xdelta; |
|
6696 SN_MAKE_XDELTA(&xdelta, delta); |
|
6697 if (!(sn = bce->main.notes.insert(sn, xdelta))) |
|
6698 return false; |
|
6699 } |
|
6700 return true; |
|
6701 } |
|
6702 |
|
6703 static bool |
|
6704 SetSrcNoteOffset(ExclusiveContext *cx, BytecodeEmitter *bce, unsigned index, unsigned which, |
|
6705 ptrdiff_t offset) |
|
6706 { |
|
6707 if (size_t(offset) > SN_MAX_OFFSET) { |
|
6708 ReportStatementTooLarge(bce->parser->tokenStream, bce->topStmt); |
|
6709 return false; |
|
6710 } |
|
6711 |
|
6712 SrcNotesVector ¬es = bce->notes(); |
|
6713 |
|
6714 /* Find the offset numbered which (i.e., skip exactly which offsets). */ |
|
6715 jssrcnote *sn = notes.begin() + index; |
|
6716 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA); |
|
6717 JS_ASSERT((int) which < js_SrcNoteSpec[SN_TYPE(sn)].arity); |
|
6718 for (sn++; which; sn++, which--) { |
|
6719 if (*sn & SN_4BYTE_OFFSET_FLAG) |
|
6720 sn += 3; |
|
6721 } |
|
6722 |
|
6723 /* |
|
6724 * See if the new offset requires three bytes either by being too big or if |
|
6725 * the offset has already been inflated (in which case, we need to stay big |
|
6726 * to not break the srcnote encoding if this isn't the last srcnote). |
|
6727 */ |
|
6728 if (offset > (ptrdiff_t)SN_4BYTE_OFFSET_MASK || (*sn & SN_4BYTE_OFFSET_FLAG)) { |
|
6729 /* Maybe this offset was already set to a three-byte value. */ |
|
6730 if (!(*sn & SN_4BYTE_OFFSET_FLAG)) { |
|
6731 /* Insert two dummy bytes that will be overwritten shortly. */ |
|
6732 jssrcnote dummy = 0; |
|
6733 if (!(sn = notes.insert(sn, dummy)) || |
|
6734 !(sn = notes.insert(sn, dummy)) || |
|
6735 !(sn = notes.insert(sn, dummy))) |
|
6736 { |
|
6737 js_ReportOutOfMemory(cx); |
|
6738 return false; |
|
6739 } |
|
6740 } |
|
6741 *sn++ = (jssrcnote)(SN_4BYTE_OFFSET_FLAG | (offset >> 24)); |
|
6742 *sn++ = (jssrcnote)(offset >> 16); |
|
6743 *sn++ = (jssrcnote)(offset >> 8); |
|
6744 } |
|
6745 *sn = (jssrcnote)offset; |
|
6746 return true; |
|
6747 } |
|
6748 |
|
6749 /* |
|
6750 * Finish taking source notes in cx's notePool. |
|
6751 * If successful, the final source note count is stored in the out outparam. |
|
6752 */ |
|
6753 bool |
|
6754 frontend::FinishTakingSrcNotes(ExclusiveContext *cx, BytecodeEmitter *bce, uint32_t *out) |
|
6755 { |
|
6756 JS_ASSERT(bce->current == &bce->main); |
|
6757 |
|
6758 unsigned prologCount = bce->prolog.notes.length(); |
|
6759 if (prologCount && bce->prolog.currentLine != bce->firstLine) { |
|
6760 bce->switchToProlog(); |
|
6761 if (NewSrcNote2(cx, bce, SRC_SETLINE, (ptrdiff_t)bce->firstLine) < 0) |
|
6762 return false; |
|
6763 bce->switchToMain(); |
|
6764 } else { |
|
6765 /* |
|
6766 * Either no prolog srcnotes, or no line number change over prolog. |
|
6767 * We don't need a SRC_SETLINE, but we may need to adjust the offset |
|
6768 * of the first main note, by adding to its delta and possibly even |
|
6769 * prepending SRC_XDELTA notes to it to account for prolog bytecodes |
|
6770 * that came at and after the last annotated bytecode. |
|
6771 */ |
|
6772 ptrdiff_t offset = bce->prologOffset() - bce->prolog.lastNoteOffset; |
|
6773 JS_ASSERT(offset >= 0); |
|
6774 if (offset > 0 && bce->main.notes.length() != 0) { |
|
6775 /* NB: Use as much of the first main note's delta as we can. */ |
|
6776 jssrcnote *sn = bce->main.notes.begin(); |
|
6777 ptrdiff_t delta = SN_IS_XDELTA(sn) |
|
6778 ? SN_XDELTA_MASK - (*sn & SN_XDELTA_MASK) |
|
6779 : SN_DELTA_MASK - (*sn & SN_DELTA_MASK); |
|
6780 if (offset < delta) |
|
6781 delta = offset; |
|
6782 for (;;) { |
|
6783 if (!AddToSrcNoteDelta(cx, bce, sn, delta)) |
|
6784 return false; |
|
6785 offset -= delta; |
|
6786 if (offset == 0) |
|
6787 break; |
|
6788 delta = Min(offset, SN_XDELTA_MASK); |
|
6789 sn = bce->main.notes.begin(); |
|
6790 } |
|
6791 } |
|
6792 } |
|
6793 |
|
6794 // The prolog count might have changed, so we can't reuse prologCount. |
|
6795 // The + 1 is to account for the final SN_MAKE_TERMINATOR that is appended |
|
6796 // when the notes are copied to their final destination by CopySrcNotes. |
|
6797 *out = bce->prolog.notes.length() + bce->main.notes.length() + 1; |
|
6798 return true; |
|
6799 } |
|
6800 |
|
6801 void |
|
6802 frontend::CopySrcNotes(BytecodeEmitter *bce, jssrcnote *destination, uint32_t nsrcnotes) |
|
6803 { |
|
6804 unsigned prologCount = bce->prolog.notes.length(); |
|
6805 unsigned mainCount = bce->main.notes.length(); |
|
6806 unsigned totalCount = prologCount + mainCount; |
|
6807 MOZ_ASSERT(totalCount == nsrcnotes - 1); |
|
6808 if (prologCount) |
|
6809 PodCopy(destination, bce->prolog.notes.begin(), prologCount); |
|
6810 PodCopy(destination + prologCount, bce->main.notes.begin(), mainCount); |
|
6811 SN_MAKE_TERMINATOR(&destination[totalCount]); |
|
6812 } |
|
6813 |
|
6814 void |
|
6815 CGConstList::finish(ConstArray *array) |
|
6816 { |
|
6817 JS_ASSERT(length() == array->length); |
|
6818 |
|
6819 for (unsigned i = 0; i < length(); i++) |
|
6820 array->vector[i] = list[i]; |
|
6821 } |
|
6822 |
|
6823 /* |
|
6824 * Find the index of the given object for code generator. |
|
6825 * |
|
6826 * Since the emitter refers to each parsed object only once, for the index we |
|
6827 * use the number of already indexes objects. We also add the object to a list |
|
6828 * to convert the list to a fixed-size array when we complete code generation, |
|
6829 * see js::CGObjectList::finish below. |
|
6830 * |
|
6831 * Most of the objects go to BytecodeEmitter::objectList but for regexp we use |
|
6832 * a separated BytecodeEmitter::regexpList. In this way the emitted index can |
|
6833 * be directly used to store and fetch a reference to a cloned RegExp object |
|
6834 * that shares the same JSRegExp private data created for the object literal in |
|
6835 * objbox. We need a cloned object to hold lastIndex and other direct |
|
6836 * properties that should not be shared among threads sharing a precompiled |
|
6837 * function or script. |
|
6838 * |
|
6839 * If the code being compiled is function code, allocate a reserved slot in |
|
6840 * the cloned function object that shares its precompiled script with other |
|
6841 * cloned function objects and with the compiler-created clone-parent. There |
|
6842 * are nregexps = script->regexps()->length such reserved slots in each |
|
6843 * function object cloned from fun->object. NB: during compilation, a funobj |
|
6844 * slots element must never be allocated, because JSObject::allocSlot could |
|
6845 * hand out one of the slots that should be given to a regexp clone. |
|
6846 * |
|
6847 * If the code being compiled is global code, the cloned regexp are stored in |
|
6848 * fp->vars slot and to protect regexp slots from GC we set fp->nvars to |
|
6849 * nregexps. |
|
6850 * |
|
6851 * The slots initially contain undefined or null. We populate them lazily when |
|
6852 * JSOP_REGEXP is executed for the first time. |
|
6853 * |
|
6854 * Why clone regexp objects? ECMA specifies that when a regular expression |
|
6855 * literal is scanned, a RegExp object is created. In the spec, compilation |
|
6856 * and execution happen indivisibly, but in this implementation and many of |
|
6857 * its embeddings, code is precompiled early and re-executed in multiple |
|
6858 * threads, or using multiple global objects, or both, for efficiency. |
|
6859 * |
|
6860 * In such cases, naively following ECMA leads to wrongful sharing of RegExp |
|
6861 * objects, which makes for collisions on the lastIndex property (especially |
|
6862 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to |
|
6863 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests. |
|
6864 */ |
|
6865 unsigned |
|
6866 CGObjectList::add(ObjectBox *objbox) |
|
6867 { |
|
6868 JS_ASSERT(!objbox->emitLink); |
|
6869 objbox->emitLink = lastbox; |
|
6870 lastbox = objbox; |
|
6871 return length++; |
|
6872 } |
|
6873 |
|
6874 unsigned |
|
6875 CGObjectList::indexOf(JSObject *obj) |
|
6876 { |
|
6877 JS_ASSERT(length > 0); |
|
6878 unsigned index = length - 1; |
|
6879 for (ObjectBox *box = lastbox; box->object != obj; box = box->emitLink) |
|
6880 index--; |
|
6881 return index; |
|
6882 } |
|
6883 |
|
6884 void |
|
6885 CGObjectList::finish(ObjectArray *array) |
|
6886 { |
|
6887 JS_ASSERT(length <= INDEX_LIMIT); |
|
6888 JS_ASSERT(length == array->length); |
|
6889 |
|
6890 js::HeapPtrObject *cursor = array->vector + array->length; |
|
6891 ObjectBox *objbox = lastbox; |
|
6892 do { |
|
6893 --cursor; |
|
6894 JS_ASSERT(!*cursor); |
|
6895 *cursor = objbox->object; |
|
6896 } while ((objbox = objbox->emitLink) != nullptr); |
|
6897 JS_ASSERT(cursor == array->vector); |
|
6898 } |
|
6899 |
|
6900 ObjectBox* |
|
6901 CGObjectList::find(uint32_t index) |
|
6902 { |
|
6903 JS_ASSERT(index < length); |
|
6904 ObjectBox *box = lastbox; |
|
6905 for (unsigned n = length - 1; n > index; n--) |
|
6906 box = box->emitLink; |
|
6907 return box; |
|
6908 } |
|
6909 |
|
6910 bool |
|
6911 CGTryNoteList::append(JSTryNoteKind kind, uint32_t stackDepth, size_t start, size_t end) |
|
6912 { |
|
6913 JS_ASSERT(start <= end); |
|
6914 JS_ASSERT(size_t(uint32_t(start)) == start); |
|
6915 JS_ASSERT(size_t(uint32_t(end)) == end); |
|
6916 |
|
6917 JSTryNote note; |
|
6918 note.kind = kind; |
|
6919 note.stackDepth = stackDepth; |
|
6920 note.start = uint32_t(start); |
|
6921 note.length = uint32_t(end - start); |
|
6922 |
|
6923 return list.append(note); |
|
6924 } |
|
6925 |
|
6926 void |
|
6927 CGTryNoteList::finish(TryNoteArray *array) |
|
6928 { |
|
6929 JS_ASSERT(length() == array->length); |
|
6930 |
|
6931 for (unsigned i = 0; i < length(); i++) |
|
6932 array->vector[i] = list[i]; |
|
6933 } |
|
6934 |
|
6935 bool |
|
6936 CGBlockScopeList::append(uint32_t scopeObject, uint32_t offset, uint32_t parent) |
|
6937 { |
|
6938 BlockScopeNote note; |
|
6939 mozilla::PodZero(¬e); |
|
6940 |
|
6941 note.index = scopeObject; |
|
6942 note.start = offset; |
|
6943 note.parent = parent; |
|
6944 |
|
6945 return list.append(note); |
|
6946 } |
|
6947 |
|
6948 uint32_t |
|
6949 CGBlockScopeList::findEnclosingScope(uint32_t index) |
|
6950 { |
|
6951 JS_ASSERT(index < length()); |
|
6952 JS_ASSERT(list[index].index != BlockScopeNote::NoBlockScopeIndex); |
|
6953 |
|
6954 DebugOnly<uint32_t> pos = list[index].start; |
|
6955 while (index--) { |
|
6956 JS_ASSERT(list[index].start <= pos); |
|
6957 if (list[index].length == 0) { |
|
6958 // We are looking for the nearest enclosing live scope. If the |
|
6959 // scope contains POS, it should still be open, so its length should |
|
6960 // be zero. |
|
6961 return list[index].index; |
|
6962 } else { |
|
6963 // Conversely, if the length is not zero, it should not contain |
|
6964 // POS. |
|
6965 JS_ASSERT(list[index].start + list[index].length <= pos); |
|
6966 } |
|
6967 } |
|
6968 |
|
6969 return BlockScopeNote::NoBlockScopeIndex; |
|
6970 } |
|
6971 |
|
6972 void |
|
6973 CGBlockScopeList::recordEnd(uint32_t index, uint32_t offset) |
|
6974 { |
|
6975 JS_ASSERT(index < length()); |
|
6976 JS_ASSERT(offset >= list[index].start); |
|
6977 JS_ASSERT(list[index].length == 0); |
|
6978 |
|
6979 list[index].length = offset - list[index].start; |
|
6980 } |
|
6981 |
|
6982 void |
|
6983 CGBlockScopeList::finish(BlockScopeArray *array) |
|
6984 { |
|
6985 JS_ASSERT(length() == array->length); |
|
6986 |
|
6987 for (unsigned i = 0; i < length(); i++) |
|
6988 array->vector[i] = list[i]; |
|
6989 } |
|
6990 |
|
6991 /* |
|
6992 * We should try to get rid of offsetBias (always 0 or 1, where 1 is |
|
6993 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR. |
|
6994 */ |
|
6995 const JSSrcNoteSpec js_SrcNoteSpec[] = { |
|
6996 #define DEFINE_SRC_NOTE_SPEC(sym, name, arity) { name, arity }, |
|
6997 FOR_EACH_SRC_NOTE_TYPE(DEFINE_SRC_NOTE_SPEC) |
|
6998 #undef DEFINE_SRC_NOTE_SPEC |
|
6999 }; |
|
7000 |
|
7001 static int |
|
7002 SrcNoteArity(jssrcnote *sn) |
|
7003 { |
|
7004 JS_ASSERT(SN_TYPE(sn) < SRC_LAST); |
|
7005 return js_SrcNoteSpec[SN_TYPE(sn)].arity; |
|
7006 } |
|
7007 |
|
7008 JS_FRIEND_API(unsigned) |
|
7009 js_SrcNoteLength(jssrcnote *sn) |
|
7010 { |
|
7011 unsigned arity; |
|
7012 jssrcnote *base; |
|
7013 |
|
7014 arity = SrcNoteArity(sn); |
|
7015 for (base = sn++; arity; sn++, arity--) { |
|
7016 if (*sn & SN_4BYTE_OFFSET_FLAG) |
|
7017 sn += 3; |
|
7018 } |
|
7019 return sn - base; |
|
7020 } |
|
7021 |
|
7022 JS_FRIEND_API(ptrdiff_t) |
|
7023 js_GetSrcNoteOffset(jssrcnote *sn, unsigned which) |
|
7024 { |
|
7025 /* Find the offset numbered which (i.e., skip exactly which offsets). */ |
|
7026 JS_ASSERT(SN_TYPE(sn) != SRC_XDELTA); |
|
7027 JS_ASSERT((int) which < SrcNoteArity(sn)); |
|
7028 for (sn++; which; sn++, which--) { |
|
7029 if (*sn & SN_4BYTE_OFFSET_FLAG) |
|
7030 sn += 3; |
|
7031 } |
|
7032 if (*sn & SN_4BYTE_OFFSET_FLAG) { |
|
7033 return (ptrdiff_t)(((uint32_t)(sn[0] & SN_4BYTE_OFFSET_MASK) << 24) |
|
7034 | (sn[1] << 16) |
|
7035 | (sn[2] << 8) |
|
7036 | sn[3]); |
|
7037 } |
|
7038 return (ptrdiff_t)*sn; |
|
7039 } |