js/src/jit/IonBuilder.cpp

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sts=4 et sw=4 tw=99:
michael@0 3 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #include "jit/IonBuilder.h"
michael@0 8
michael@0 9 #include "mozilla/DebugOnly.h"
michael@0 10
michael@0 11 #include "builtin/Eval.h"
michael@0 12 #include "builtin/TypedObject.h"
michael@0 13 #include "frontend/SourceNotes.h"
michael@0 14 #include "jit/BaselineFrame.h"
michael@0 15 #include "jit/BaselineInspector.h"
michael@0 16 #include "jit/Ion.h"
michael@0 17 #include "jit/IonOptimizationLevels.h"
michael@0 18 #include "jit/IonSpewer.h"
michael@0 19 #include "jit/Lowering.h"
michael@0 20 #include "jit/MIRGraph.h"
michael@0 21 #include "vm/ArgumentsObject.h"
michael@0 22 #include "vm/Opcodes.h"
michael@0 23 #include "vm/RegExpStatics.h"
michael@0 24
michael@0 25 #include "jsinferinlines.h"
michael@0 26 #include "jsobjinlines.h"
michael@0 27 #include "jsopcodeinlines.h"
michael@0 28 #include "jsscriptinlines.h"
michael@0 29
michael@0 30 #include "jit/CompileInfo-inl.h"
michael@0 31 #include "jit/ExecutionMode-inl.h"
michael@0 32
michael@0 33 using namespace js;
michael@0 34 using namespace js::jit;
michael@0 35
michael@0 36 using mozilla::DebugOnly;
michael@0 37 using mozilla::Maybe;
michael@0 38 using mozilla::SafeCast;
michael@0 39
michael@0 40 class jit::BaselineFrameInspector
michael@0 41 {
michael@0 42 public:
michael@0 43 types::Type thisType;
michael@0 44 JSObject *singletonScopeChain;
michael@0 45
michael@0 46 Vector<types::Type, 4, IonAllocPolicy> argTypes;
michael@0 47 Vector<types::Type, 4, IonAllocPolicy> varTypes;
michael@0 48
michael@0 49 BaselineFrameInspector(TempAllocator *temp)
michael@0 50 : thisType(types::Type::UndefinedType()),
michael@0 51 singletonScopeChain(nullptr),
michael@0 52 argTypes(*temp),
michael@0 53 varTypes(*temp)
michael@0 54 {}
michael@0 55 };
michael@0 56
michael@0 57 BaselineFrameInspector *
michael@0 58 jit::NewBaselineFrameInspector(TempAllocator *temp, BaselineFrame *frame, CompileInfo *info)
michael@0 59 {
michael@0 60 JS_ASSERT(frame);
michael@0 61
michael@0 62 BaselineFrameInspector *inspector = temp->lifoAlloc()->new_<BaselineFrameInspector>(temp);
michael@0 63 if (!inspector)
michael@0 64 return nullptr;
michael@0 65
michael@0 66 // Note: copying the actual values into a temporary structure for use
michael@0 67 // during compilation could capture nursery pointers, so the values' types
michael@0 68 // are recorded instead.
michael@0 69
michael@0 70 inspector->thisType = types::GetMaybeOptimizedOutValueType(frame->thisValue());
michael@0 71
michael@0 72 if (frame->scopeChain()->hasSingletonType())
michael@0 73 inspector->singletonScopeChain = frame->scopeChain();
michael@0 74
michael@0 75 JSScript *script = frame->script();
michael@0 76
michael@0 77 if (script->functionNonDelazifying()) {
michael@0 78 if (!inspector->argTypes.reserve(frame->numFormalArgs()))
michael@0 79 return nullptr;
michael@0 80 for (size_t i = 0; i < frame->numFormalArgs(); i++) {
michael@0 81 if (script->formalIsAliased(i)) {
michael@0 82 inspector->argTypes.infallibleAppend(types::Type::UndefinedType());
michael@0 83 } else if (!script->argsObjAliasesFormals()) {
michael@0 84 types::Type type = types::GetMaybeOptimizedOutValueType(frame->unaliasedFormal(i));
michael@0 85 inspector->argTypes.infallibleAppend(type);
michael@0 86 } else if (frame->hasArgsObj()) {
michael@0 87 types::Type type = types::GetMaybeOptimizedOutValueType(frame->argsObj().arg(i));
michael@0 88 inspector->argTypes.infallibleAppend(type);
michael@0 89 } else {
michael@0 90 inspector->argTypes.infallibleAppend(types::Type::UndefinedType());
michael@0 91 }
michael@0 92 }
michael@0 93 }
michael@0 94
michael@0 95 if (!inspector->varTypes.reserve(frame->script()->nfixed()))
michael@0 96 return nullptr;
michael@0 97 for (size_t i = 0; i < frame->script()->nfixed(); i++) {
michael@0 98 if (info->isSlotAliasedAtOsr(i + info->firstLocalSlot())) {
michael@0 99 inspector->varTypes.infallibleAppend(types::Type::UndefinedType());
michael@0 100 } else {
michael@0 101 types::Type type = types::GetMaybeOptimizedOutValueType(frame->unaliasedLocal(i));
michael@0 102 inspector->varTypes.infallibleAppend(type);
michael@0 103 }
michael@0 104 }
michael@0 105
michael@0 106 return inspector;
michael@0 107 }
michael@0 108
michael@0 109 IonBuilder::IonBuilder(JSContext *analysisContext, CompileCompartment *comp,
michael@0 110 const JitCompileOptions &options, TempAllocator *temp,
michael@0 111 MIRGraph *graph, types::CompilerConstraintList *constraints,
michael@0 112 BaselineInspector *inspector, CompileInfo *info,
michael@0 113 const OptimizationInfo *optimizationInfo,
michael@0 114 BaselineFrameInspector *baselineFrame, size_t inliningDepth,
michael@0 115 uint32_t loopDepth)
michael@0 116 : MIRGenerator(comp, options, temp, graph, info, optimizationInfo),
michael@0 117 backgroundCodegen_(nullptr),
michael@0 118 analysisContext(analysisContext),
michael@0 119 baselineFrame_(baselineFrame),
michael@0 120 abortReason_(AbortReason_Disable),
michael@0 121 descrSetHash_(nullptr),
michael@0 122 constraints_(constraints),
michael@0 123 analysis_(*temp, info->script()),
michael@0 124 thisTypes(nullptr),
michael@0 125 argTypes(nullptr),
michael@0 126 typeArray(nullptr),
michael@0 127 typeArrayHint(0),
michael@0 128 bytecodeTypeMap(nullptr),
michael@0 129 loopDepth_(loopDepth),
michael@0 130 callerResumePoint_(nullptr),
michael@0 131 callerBuilder_(nullptr),
michael@0 132 cfgStack_(*temp),
michael@0 133 loops_(*temp),
michael@0 134 switches_(*temp),
michael@0 135 labels_(*temp),
michael@0 136 iterators_(*temp),
michael@0 137 loopHeaders_(*temp),
michael@0 138 inspector(inspector),
michael@0 139 inliningDepth_(inliningDepth),
michael@0 140 numLoopRestarts_(0),
michael@0 141 failedBoundsCheck_(info->script()->failedBoundsCheck()),
michael@0 142 failedShapeGuard_(info->script()->failedShapeGuard()),
michael@0 143 nonStringIteration_(false),
michael@0 144 lazyArguments_(nullptr),
michael@0 145 inlineCallInfo_(nullptr)
michael@0 146 {
michael@0 147 script_ = info->script();
michael@0 148 pc = info->startPC();
michael@0 149
michael@0 150 JS_ASSERT(script()->hasBaselineScript() == (info->executionMode() != ArgumentsUsageAnalysis));
michael@0 151 JS_ASSERT(!!analysisContext == (info->executionMode() == DefinitePropertiesAnalysis));
michael@0 152 }
michael@0 153
michael@0 154 void
michael@0 155 IonBuilder::clearForBackEnd()
michael@0 156 {
michael@0 157 JS_ASSERT(!analysisContext);
michael@0 158 baselineFrame_ = nullptr;
michael@0 159
michael@0 160 // The caches below allocate data from the malloc heap. Release this before
michael@0 161 // later phases of compilation to avoid leaks, as the top level IonBuilder
michael@0 162 // is not explicitly destroyed. Note that builders for inner scripts are
michael@0 163 // constructed on the stack and will release this memory on destruction.
michael@0 164 gsn.purge();
michael@0 165 scopeCoordinateNameCache.purge();
michael@0 166 }
michael@0 167
michael@0 168 bool
michael@0 169 IonBuilder::abort(const char *message, ...)
michael@0 170 {
michael@0 171 // Don't call PCToLineNumber in release builds.
michael@0 172 #ifdef DEBUG
michael@0 173 va_list ap;
michael@0 174 va_start(ap, message);
michael@0 175 abortFmt(message, ap);
michael@0 176 va_end(ap);
michael@0 177 IonSpew(IonSpew_Abort, "aborted @ %s:%d", script()->filename(), PCToLineNumber(script(), pc));
michael@0 178 #endif
michael@0 179 return false;
michael@0 180 }
michael@0 181
michael@0 182 void
michael@0 183 IonBuilder::spew(const char *message)
michael@0 184 {
michael@0 185 // Don't call PCToLineNumber in release builds.
michael@0 186 #ifdef DEBUG
michael@0 187 IonSpew(IonSpew_MIR, "%s @ %s:%d", message, script()->filename(), PCToLineNumber(script(), pc));
michael@0 188 #endif
michael@0 189 }
michael@0 190
michael@0 191 static inline int32_t
michael@0 192 GetJumpOffset(jsbytecode *pc)
michael@0 193 {
michael@0 194 JS_ASSERT(js_CodeSpec[JSOp(*pc)].type() == JOF_JUMP);
michael@0 195 return GET_JUMP_OFFSET(pc);
michael@0 196 }
michael@0 197
michael@0 198 IonBuilder::CFGState
michael@0 199 IonBuilder::CFGState::If(jsbytecode *join, MTest *test)
michael@0 200 {
michael@0 201 CFGState state;
michael@0 202 state.state = IF_TRUE;
michael@0 203 state.stopAt = join;
michael@0 204 state.branch.ifFalse = test->ifFalse();
michael@0 205 state.branch.test = test;
michael@0 206 return state;
michael@0 207 }
michael@0 208
michael@0 209 IonBuilder::CFGState
michael@0 210 IonBuilder::CFGState::IfElse(jsbytecode *trueEnd, jsbytecode *falseEnd, MTest *test)
michael@0 211 {
michael@0 212 MBasicBlock *ifFalse = test->ifFalse();
michael@0 213
michael@0 214 CFGState state;
michael@0 215 // If the end of the false path is the same as the start of the
michael@0 216 // false path, then the "else" block is empty and we can devolve
michael@0 217 // this to the IF_TRUE case. We handle this here because there is
michael@0 218 // still an extra GOTO on the true path and we want stopAt to point
michael@0 219 // there, whereas the IF_TRUE case does not have the GOTO.
michael@0 220 state.state = (falseEnd == ifFalse->pc())
michael@0 221 ? IF_TRUE_EMPTY_ELSE
michael@0 222 : IF_ELSE_TRUE;
michael@0 223 state.stopAt = trueEnd;
michael@0 224 state.branch.falseEnd = falseEnd;
michael@0 225 state.branch.ifFalse = ifFalse;
michael@0 226 state.branch.test = test;
michael@0 227 return state;
michael@0 228 }
michael@0 229
michael@0 230 IonBuilder::CFGState
michael@0 231 IonBuilder::CFGState::AndOr(jsbytecode *join, MBasicBlock *joinStart)
michael@0 232 {
michael@0 233 CFGState state;
michael@0 234 state.state = AND_OR;
michael@0 235 state.stopAt = join;
michael@0 236 state.branch.ifFalse = joinStart;
michael@0 237 state.branch.test = nullptr;
michael@0 238 return state;
michael@0 239 }
michael@0 240
michael@0 241 IonBuilder::CFGState
michael@0 242 IonBuilder::CFGState::TableSwitch(jsbytecode *exitpc, MTableSwitch *ins)
michael@0 243 {
michael@0 244 CFGState state;
michael@0 245 state.state = TABLE_SWITCH;
michael@0 246 state.stopAt = exitpc;
michael@0 247 state.tableswitch.exitpc = exitpc;
michael@0 248 state.tableswitch.breaks = nullptr;
michael@0 249 state.tableswitch.ins = ins;
michael@0 250 state.tableswitch.currentBlock = 0;
michael@0 251 return state;
michael@0 252 }
michael@0 253
michael@0 254 JSFunction *
michael@0 255 IonBuilder::getSingleCallTarget(types::TemporaryTypeSet *calleeTypes)
michael@0 256 {
michael@0 257 if (!calleeTypes)
michael@0 258 return nullptr;
michael@0 259
michael@0 260 JSObject *obj = calleeTypes->getSingleton();
michael@0 261 if (!obj || !obj->is<JSFunction>())
michael@0 262 return nullptr;
michael@0 263
michael@0 264 return &obj->as<JSFunction>();
michael@0 265 }
michael@0 266
michael@0 267 bool
michael@0 268 IonBuilder::getPolyCallTargets(types::TemporaryTypeSet *calleeTypes, bool constructing,
michael@0 269 ObjectVector &targets, uint32_t maxTargets, bool *gotLambda)
michael@0 270 {
michael@0 271 JS_ASSERT(targets.empty());
michael@0 272 JS_ASSERT(gotLambda);
michael@0 273 *gotLambda = false;
michael@0 274
michael@0 275 if (!calleeTypes)
michael@0 276 return true;
michael@0 277
michael@0 278 if (calleeTypes->baseFlags() != 0)
michael@0 279 return true;
michael@0 280
michael@0 281 unsigned objCount = calleeTypes->getObjectCount();
michael@0 282
michael@0 283 if (objCount == 0 || objCount > maxTargets)
michael@0 284 return true;
michael@0 285
michael@0 286 if (!targets.reserve(objCount))
michael@0 287 return false;
michael@0 288 for(unsigned i = 0; i < objCount; i++) {
michael@0 289 JSObject *obj = calleeTypes->getSingleObject(i);
michael@0 290 JSFunction *fun;
michael@0 291 if (obj) {
michael@0 292 if (!obj->is<JSFunction>()) {
michael@0 293 targets.clear();
michael@0 294 return true;
michael@0 295 }
michael@0 296 fun = &obj->as<JSFunction>();
michael@0 297 } else {
michael@0 298 types::TypeObject *typeObj = calleeTypes->getTypeObject(i);
michael@0 299 JS_ASSERT(typeObj);
michael@0 300 if (!typeObj->interpretedFunction) {
michael@0 301 targets.clear();
michael@0 302 return true;
michael@0 303 }
michael@0 304
michael@0 305 fun = typeObj->interpretedFunction;
michael@0 306 *gotLambda = true;
michael@0 307 }
michael@0 308
michael@0 309 // Don't optimize if we're constructing and the callee is not a
michael@0 310 // constructor, so that CallKnown does not have to handle this case
michael@0 311 // (it should always throw).
michael@0 312 if (constructing && !fun->isInterpretedConstructor() && !fun->isNativeConstructor()) {
michael@0 313 targets.clear();
michael@0 314 return true;
michael@0 315 }
michael@0 316
michael@0 317 DebugOnly<bool> appendOk = targets.append(fun);
michael@0 318 JS_ASSERT(appendOk);
michael@0 319 }
michael@0 320
michael@0 321 // For now, only inline "singleton" lambda calls
michael@0 322 if (*gotLambda && targets.length() > 1)
michael@0 323 targets.clear();
michael@0 324
michael@0 325 return true;
michael@0 326 }
michael@0 327
michael@0 328 IonBuilder::InliningDecision
michael@0 329 IonBuilder::DontInline(JSScript *targetScript, const char *reason)
michael@0 330 {
michael@0 331 if (targetScript) {
michael@0 332 IonSpew(IonSpew_Inlining, "Cannot inline %s:%u: %s",
michael@0 333 targetScript->filename(), targetScript->lineno(), reason);
michael@0 334 } else {
michael@0 335 IonSpew(IonSpew_Inlining, "Cannot inline: %s", reason);
michael@0 336 }
michael@0 337
michael@0 338 return InliningDecision_DontInline;
michael@0 339 }
michael@0 340
michael@0 341 IonBuilder::InliningDecision
michael@0 342 IonBuilder::canInlineTarget(JSFunction *target, CallInfo &callInfo)
michael@0 343 {
michael@0 344 if (!optimizationInfo().inlineInterpreted())
michael@0 345 return InliningDecision_DontInline;
michael@0 346
michael@0 347 if (!target->isInterpreted())
michael@0 348 return DontInline(nullptr, "Non-interpreted target");
michael@0 349
michael@0 350 // Allow constructing lazy scripts when performing the definite properties
michael@0 351 // analysis, as baseline has not been used to warm the caller up yet.
michael@0 352 if (target->isInterpreted() && info().executionMode() == DefinitePropertiesAnalysis) {
michael@0 353 RootedScript script(analysisContext, target->getOrCreateScript(analysisContext));
michael@0 354 if (!script)
michael@0 355 return InliningDecision_Error;
michael@0 356
michael@0 357 if (!script->hasBaselineScript() && script->canBaselineCompile()) {
michael@0 358 MethodStatus status = BaselineCompile(analysisContext, script);
michael@0 359 if (status == Method_Error)
michael@0 360 return InliningDecision_Error;
michael@0 361 if (status != Method_Compiled)
michael@0 362 return InliningDecision_DontInline;
michael@0 363 }
michael@0 364 }
michael@0 365
michael@0 366 if (!target->hasScript())
michael@0 367 return DontInline(nullptr, "Lazy script");
michael@0 368
michael@0 369 JSScript *inlineScript = target->nonLazyScript();
michael@0 370 if (callInfo.constructing() && !target->isInterpretedConstructor())
michael@0 371 return DontInline(inlineScript, "Callee is not a constructor");
michael@0 372
michael@0 373 ExecutionMode executionMode = info().executionMode();
michael@0 374 if (!CanIonCompile(inlineScript, executionMode))
michael@0 375 return DontInline(inlineScript, "Disabled Ion compilation");
michael@0 376
michael@0 377 // Don't inline functions which don't have baseline scripts.
michael@0 378 if (!inlineScript->hasBaselineScript())
michael@0 379 return DontInline(inlineScript, "No baseline jitcode");
michael@0 380
michael@0 381 if (TooManyArguments(target->nargs()))
michael@0 382 return DontInline(inlineScript, "Too many args");
michael@0 383
michael@0 384 if (TooManyArguments(callInfo.argc()))
michael@0 385 return DontInline(inlineScript, "Too many args");
michael@0 386
michael@0 387 // Allow inlining of recursive calls, but only one level deep.
michael@0 388 IonBuilder *builder = callerBuilder_;
michael@0 389 while (builder) {
michael@0 390 if (builder->script() == inlineScript)
michael@0 391 return DontInline(inlineScript, "Recursive call");
michael@0 392 builder = builder->callerBuilder_;
michael@0 393 }
michael@0 394
michael@0 395 if (target->isHeavyweight())
michael@0 396 return DontInline(inlineScript, "Heavyweight function");
michael@0 397
michael@0 398 if (inlineScript->uninlineable())
michael@0 399 return DontInline(inlineScript, "Uninlineable script");
michael@0 400
michael@0 401 if (inlineScript->needsArgsObj())
michael@0 402 return DontInline(inlineScript, "Script that needs an arguments object");
michael@0 403
michael@0 404 if (!inlineScript->compileAndGo())
michael@0 405 return DontInline(inlineScript, "Non-compileAndGo script");
michael@0 406
michael@0 407 types::TypeObjectKey *targetType = types::TypeObjectKey::get(target);
michael@0 408 if (targetType->unknownProperties())
michael@0 409 return DontInline(inlineScript, "Target type has unknown properties");
michael@0 410
michael@0 411 return InliningDecision_Inline;
michael@0 412 }
michael@0 413
michael@0 414 void
michael@0 415 IonBuilder::popCfgStack()
michael@0 416 {
michael@0 417 if (cfgStack_.back().isLoop())
michael@0 418 loops_.popBack();
michael@0 419 if (cfgStack_.back().state == CFGState::LABEL)
michael@0 420 labels_.popBack();
michael@0 421 cfgStack_.popBack();
michael@0 422 }
michael@0 423
michael@0 424 bool
michael@0 425 IonBuilder::analyzeNewLoopTypes(MBasicBlock *entry, jsbytecode *start, jsbytecode *end)
michael@0 426 {
michael@0 427 // The phi inputs at the loop head only reflect types for variables that
michael@0 428 // were present at the start of the loop. If the variable changes to a new
michael@0 429 // type within the loop body, and that type is carried around to the loop
michael@0 430 // head, then we need to know about the new type up front.
michael@0 431 //
michael@0 432 // Since SSA information hasn't been constructed for the loop body yet, we
michael@0 433 // need a separate analysis to pick out the types that might flow around
michael@0 434 // the loop header. This is a best-effort analysis that may either over-
michael@0 435 // or under-approximate the set of such types.
michael@0 436 //
michael@0 437 // Over-approximating the types may lead to inefficient generated code, and
michael@0 438 // under-approximating the types will cause the loop body to be analyzed
michael@0 439 // multiple times as the correct types are deduced (see finishLoop).
michael@0 440
michael@0 441 // If we restarted processing of an outer loop then get loop header types
michael@0 442 // directly from the last time we have previously processed this loop. This
michael@0 443 // both avoids repeated work from the bytecode traverse below, and will
michael@0 444 // also pick up types discovered while previously building the loop body.
michael@0 445 for (size_t i = 0; i < loopHeaders_.length(); i++) {
michael@0 446 if (loopHeaders_[i].pc == start) {
michael@0 447 MBasicBlock *oldEntry = loopHeaders_[i].header;
michael@0 448 for (MPhiIterator oldPhi = oldEntry->phisBegin();
michael@0 449 oldPhi != oldEntry->phisEnd();
michael@0 450 oldPhi++)
michael@0 451 {
michael@0 452 MPhi *newPhi = entry->getSlot(oldPhi->slot())->toPhi();
michael@0 453 if (!newPhi->addBackedgeType(oldPhi->type(), oldPhi->resultTypeSet()))
michael@0 454 return false;
michael@0 455 }
michael@0 456 // Update the most recent header for this loop encountered, in case
michael@0 457 // new types flow to the phis and the loop is processed at least
michael@0 458 // three times.
michael@0 459 loopHeaders_[i].header = entry;
michael@0 460 return true;
michael@0 461 }
michael@0 462 }
michael@0 463 loopHeaders_.append(LoopHeader(start, entry));
michael@0 464
michael@0 465 jsbytecode *last = nullptr, *earlier = nullptr;
michael@0 466 for (jsbytecode *pc = start; pc != end; earlier = last, last = pc, pc += GetBytecodeLength(pc)) {
michael@0 467 uint32_t slot;
michael@0 468 if (*pc == JSOP_SETLOCAL)
michael@0 469 slot = info().localSlot(GET_LOCALNO(pc));
michael@0 470 else if (*pc == JSOP_SETARG)
michael@0 471 slot = info().argSlotUnchecked(GET_ARGNO(pc));
michael@0 472 else
michael@0 473 continue;
michael@0 474 if (slot >= info().firstStackSlot())
michael@0 475 continue;
michael@0 476 if (!analysis().maybeInfo(pc))
michael@0 477 continue;
michael@0 478
michael@0 479 MPhi *phi = entry->getSlot(slot)->toPhi();
michael@0 480
michael@0 481 if (*last == JSOP_POS)
michael@0 482 last = earlier;
michael@0 483
michael@0 484 if (js_CodeSpec[*last].format & JOF_TYPESET) {
michael@0 485 types::TemporaryTypeSet *typeSet = bytecodeTypes(last);
michael@0 486 if (!typeSet->empty()) {
michael@0 487 MIRType type = typeSet->getKnownMIRType();
michael@0 488 if (!phi->addBackedgeType(type, typeSet))
michael@0 489 return false;
michael@0 490 }
michael@0 491 } else if (*last == JSOP_GETLOCAL || *last == JSOP_GETARG) {
michael@0 492 uint32_t slot = (*last == JSOP_GETLOCAL)
michael@0 493 ? info().localSlot(GET_LOCALNO(last))
michael@0 494 : info().argSlotUnchecked(GET_ARGNO(last));
michael@0 495 if (slot < info().firstStackSlot()) {
michael@0 496 MPhi *otherPhi = entry->getSlot(slot)->toPhi();
michael@0 497 if (otherPhi->hasBackedgeType()) {
michael@0 498 if (!phi->addBackedgeType(otherPhi->type(), otherPhi->resultTypeSet()))
michael@0 499 return false;
michael@0 500 }
michael@0 501 }
michael@0 502 } else {
michael@0 503 MIRType type = MIRType_None;
michael@0 504 switch (*last) {
michael@0 505 case JSOP_VOID:
michael@0 506 case JSOP_UNDEFINED:
michael@0 507 type = MIRType_Undefined;
michael@0 508 break;
michael@0 509 case JSOP_NULL:
michael@0 510 type = MIRType_Null;
michael@0 511 break;
michael@0 512 case JSOP_ZERO:
michael@0 513 case JSOP_ONE:
michael@0 514 case JSOP_INT8:
michael@0 515 case JSOP_INT32:
michael@0 516 case JSOP_UINT16:
michael@0 517 case JSOP_UINT24:
michael@0 518 case JSOP_BITAND:
michael@0 519 case JSOP_BITOR:
michael@0 520 case JSOP_BITXOR:
michael@0 521 case JSOP_BITNOT:
michael@0 522 case JSOP_RSH:
michael@0 523 case JSOP_LSH:
michael@0 524 case JSOP_URSH:
michael@0 525 type = MIRType_Int32;
michael@0 526 break;
michael@0 527 case JSOP_FALSE:
michael@0 528 case JSOP_TRUE:
michael@0 529 case JSOP_EQ:
michael@0 530 case JSOP_NE:
michael@0 531 case JSOP_LT:
michael@0 532 case JSOP_LE:
michael@0 533 case JSOP_GT:
michael@0 534 case JSOP_GE:
michael@0 535 case JSOP_NOT:
michael@0 536 case JSOP_STRICTEQ:
michael@0 537 case JSOP_STRICTNE:
michael@0 538 case JSOP_IN:
michael@0 539 case JSOP_INSTANCEOF:
michael@0 540 type = MIRType_Boolean;
michael@0 541 break;
michael@0 542 case JSOP_DOUBLE:
michael@0 543 type = MIRType_Double;
michael@0 544 break;
michael@0 545 case JSOP_STRING:
michael@0 546 case JSOP_TYPEOF:
michael@0 547 case JSOP_TYPEOFEXPR:
michael@0 548 case JSOP_ITERNEXT:
michael@0 549 type = MIRType_String;
michael@0 550 break;
michael@0 551 case JSOP_ADD:
michael@0 552 case JSOP_SUB:
michael@0 553 case JSOP_MUL:
michael@0 554 case JSOP_DIV:
michael@0 555 case JSOP_MOD:
michael@0 556 case JSOP_NEG:
michael@0 557 type = inspector->expectedResultType(last);
michael@0 558 default:
michael@0 559 break;
michael@0 560 }
michael@0 561 if (type != MIRType_None) {
michael@0 562 if (!phi->addBackedgeType(type, nullptr))
michael@0 563 return false;
michael@0 564 }
michael@0 565 }
michael@0 566 }
michael@0 567 return true;
michael@0 568 }
michael@0 569
michael@0 570 bool
michael@0 571 IonBuilder::pushLoop(CFGState::State initial, jsbytecode *stopAt, MBasicBlock *entry, bool osr,
michael@0 572 jsbytecode *loopHead, jsbytecode *initialPc,
michael@0 573 jsbytecode *bodyStart, jsbytecode *bodyEnd, jsbytecode *exitpc,
michael@0 574 jsbytecode *continuepc)
michael@0 575 {
michael@0 576 if (!continuepc)
michael@0 577 continuepc = entry->pc();
michael@0 578
michael@0 579 ControlFlowInfo loop(cfgStack_.length(), continuepc);
michael@0 580 if (!loops_.append(loop))
michael@0 581 return false;
michael@0 582
michael@0 583 CFGState state;
michael@0 584 state.state = initial;
michael@0 585 state.stopAt = stopAt;
michael@0 586 state.loop.bodyStart = bodyStart;
michael@0 587 state.loop.bodyEnd = bodyEnd;
michael@0 588 state.loop.exitpc = exitpc;
michael@0 589 state.loop.continuepc = continuepc;
michael@0 590 state.loop.entry = entry;
michael@0 591 state.loop.osr = osr;
michael@0 592 state.loop.successor = nullptr;
michael@0 593 state.loop.breaks = nullptr;
michael@0 594 state.loop.continues = nullptr;
michael@0 595 state.loop.initialState = initial;
michael@0 596 state.loop.initialPc = initialPc;
michael@0 597 state.loop.initialStopAt = stopAt;
michael@0 598 state.loop.loopHead = loopHead;
michael@0 599 return cfgStack_.append(state);
michael@0 600 }
michael@0 601
michael@0 602 bool
michael@0 603 IonBuilder::init()
michael@0 604 {
michael@0 605 if (!types::TypeScript::FreezeTypeSets(constraints(), script(),
michael@0 606 &thisTypes, &argTypes, &typeArray))
michael@0 607 {
michael@0 608 return false;
michael@0 609 }
michael@0 610
michael@0 611 if (!analysis().init(alloc(), gsn))
michael@0 612 return false;
michael@0 613
michael@0 614 // The baseline script normally has the bytecode type map, but compute
michael@0 615 // it ourselves if we do not have a baseline script.
michael@0 616 if (script()->hasBaselineScript()) {
michael@0 617 bytecodeTypeMap = script()->baselineScript()->bytecodeTypeMap();
michael@0 618 } else {
michael@0 619 bytecodeTypeMap = alloc_->lifoAlloc()->newArrayUninitialized<uint32_t>(script()->nTypeSets());
michael@0 620 if (!bytecodeTypeMap)
michael@0 621 return false;
michael@0 622 types::FillBytecodeTypeMap(script(), bytecodeTypeMap);
michael@0 623 }
michael@0 624
michael@0 625 return true;
michael@0 626 }
michael@0 627
michael@0 628 bool
michael@0 629 IonBuilder::build()
michael@0 630 {
michael@0 631 if (!init())
michael@0 632 return false;
michael@0 633
michael@0 634 if (!setCurrentAndSpecializePhis(newBlock(pc)))
michael@0 635 return false;
michael@0 636 if (!current)
michael@0 637 return false;
michael@0 638
michael@0 639 #ifdef DEBUG
michael@0 640 if (info().executionMode() == SequentialExecution && script()->hasIonScript()) {
michael@0 641 IonSpew(IonSpew_Scripts, "Recompiling script %s:%d (%p) (usecount=%d, level=%s)",
michael@0 642 script()->filename(), script()->lineno(), (void *)script(),
michael@0 643 (int)script()->getUseCount(), OptimizationLevelString(optimizationInfo().level()));
michael@0 644 } else {
michael@0 645 IonSpew(IonSpew_Scripts, "Analyzing script %s:%d (%p) (usecount=%d, level=%s)",
michael@0 646 script()->filename(), script()->lineno(), (void *)script(),
michael@0 647 (int)script()->getUseCount(), OptimizationLevelString(optimizationInfo().level()));
michael@0 648 }
michael@0 649 #endif
michael@0 650
michael@0 651 initParameters();
michael@0 652
michael@0 653 // Initialize local variables.
michael@0 654 for (uint32_t i = 0; i < info().nlocals(); i++) {
michael@0 655 MConstant *undef = MConstant::New(alloc(), UndefinedValue());
michael@0 656 current->add(undef);
michael@0 657 current->initSlot(info().localSlot(i), undef);
michael@0 658 }
michael@0 659
michael@0 660 // Initialize something for the scope chain. We can bail out before the
michael@0 661 // start instruction, but the snapshot is encoded *at* the start
michael@0 662 // instruction, which means generating any code that could load into
michael@0 663 // registers is illegal.
michael@0 664 MInstruction *scope = MConstant::New(alloc(), UndefinedValue());
michael@0 665 current->add(scope);
michael@0 666 current->initSlot(info().scopeChainSlot(), scope);
michael@0 667
michael@0 668 // Initialize the return value.
michael@0 669 MInstruction *returnValue = MConstant::New(alloc(), UndefinedValue());
michael@0 670 current->add(returnValue);
michael@0 671 current->initSlot(info().returnValueSlot(), returnValue);
michael@0 672
michael@0 673 // Initialize the arguments object slot to undefined if necessary.
michael@0 674 if (info().hasArguments()) {
michael@0 675 MInstruction *argsObj = MConstant::New(alloc(), UndefinedValue());
michael@0 676 current->add(argsObj);
michael@0 677 current->initSlot(info().argsObjSlot(), argsObj);
michael@0 678 }
michael@0 679
michael@0 680 // Emit the start instruction, so we can begin real instructions.
michael@0 681 current->makeStart(MStart::New(alloc(), MStart::StartType_Default));
michael@0 682 if (instrumentedProfiling())
michael@0 683 current->add(MProfilerStackOp::New(alloc(), script(), MProfilerStackOp::Enter));
michael@0 684
michael@0 685 // Guard against over-recursion. Do this before we start unboxing, since
michael@0 686 // this will create an OSI point that will read the incoming argument
michael@0 687 // values, which is nice to do before their last real use, to minimize
michael@0 688 // register/stack pressure.
michael@0 689 MCheckOverRecursed *check = MCheckOverRecursed::New(alloc());
michael@0 690 current->add(check);
michael@0 691 check->setResumePoint(current->entryResumePoint());
michael@0 692
michael@0 693 // Parameters have been checked to correspond to the typeset, now we unbox
michael@0 694 // what we can in an infallible manner.
michael@0 695 rewriteParameters();
michael@0 696
michael@0 697 // It's safe to start emitting actual IR, so now build the scope chain.
michael@0 698 if (!initScopeChain())
michael@0 699 return false;
michael@0 700
michael@0 701 if (info().needsArgsObj() && !initArgumentsObject())
michael@0 702 return false;
michael@0 703
michael@0 704 // Prevent |this| from being DCE'd: necessary for constructors.
michael@0 705 if (info().funMaybeLazy())
michael@0 706 current->getSlot(info().thisSlot())->setGuard();
michael@0 707
michael@0 708 // The type analysis phase attempts to insert unbox operations near
michael@0 709 // definitions of values. It also attempts to replace uses in resume points
michael@0 710 // with the narrower, unboxed variants. However, we must prevent this
michael@0 711 // replacement from happening on values in the entry snapshot. Otherwise we
michael@0 712 // could get this:
michael@0 713 //
michael@0 714 // v0 = MParameter(0)
michael@0 715 // v1 = MParameter(1)
michael@0 716 // -- ResumePoint(v2, v3)
michael@0 717 // v2 = Unbox(v0, INT32)
michael@0 718 // v3 = Unbox(v1, INT32)
michael@0 719 //
michael@0 720 // So we attach the initial resume point to each parameter, which the type
michael@0 721 // analysis explicitly checks (this is the same mechanism used for
michael@0 722 // effectful operations).
michael@0 723 for (uint32_t i = 0; i < info().endArgSlot(); i++) {
michael@0 724 MInstruction *ins = current->getEntrySlot(i)->toInstruction();
michael@0 725 if (ins->type() == MIRType_Value)
michael@0 726 ins->setResumePoint(current->entryResumePoint());
michael@0 727 }
michael@0 728
michael@0 729 // lazyArguments should never be accessed in |argsObjAliasesFormals| scripts.
michael@0 730 if (info().hasArguments() && !info().argsObjAliasesFormals()) {
michael@0 731 lazyArguments_ = MConstant::New(alloc(), MagicValue(JS_OPTIMIZED_ARGUMENTS));
michael@0 732 current->add(lazyArguments_);
michael@0 733 }
michael@0 734
michael@0 735 insertRecompileCheck();
michael@0 736
michael@0 737 if (!traverseBytecode())
michael@0 738 return false;
michael@0 739
michael@0 740 if (!maybeAddOsrTypeBarriers())
michael@0 741 return false;
michael@0 742
michael@0 743 if (!processIterators())
michael@0 744 return false;
michael@0 745
michael@0 746 JS_ASSERT(loopDepth_ == 0);
michael@0 747 abortReason_ = AbortReason_NoAbort;
michael@0 748 return true;
michael@0 749 }
michael@0 750
michael@0 751 bool
michael@0 752 IonBuilder::processIterators()
michael@0 753 {
michael@0 754 // Find phis that must directly hold an iterator live.
michael@0 755 Vector<MPhi *, 0, SystemAllocPolicy> worklist;
michael@0 756 for (size_t i = 0; i < iterators_.length(); i++) {
michael@0 757 MInstruction *ins = iterators_[i];
michael@0 758 for (MUseDefIterator iter(ins); iter; iter++) {
michael@0 759 if (iter.def()->isPhi()) {
michael@0 760 if (!worklist.append(iter.def()->toPhi()))
michael@0 761 return false;
michael@0 762 }
michael@0 763 }
michael@0 764 }
michael@0 765
michael@0 766 // Propagate the iterator and live status of phis to all other connected
michael@0 767 // phis.
michael@0 768 while (!worklist.empty()) {
michael@0 769 MPhi *phi = worklist.popCopy();
michael@0 770 phi->setIterator();
michael@0 771 phi->setImplicitlyUsedUnchecked();
michael@0 772
michael@0 773 for (MUseDefIterator iter(phi); iter; iter++) {
michael@0 774 if (iter.def()->isPhi()) {
michael@0 775 MPhi *other = iter.def()->toPhi();
michael@0 776 if (!other->isIterator() && !worklist.append(other))
michael@0 777 return false;
michael@0 778 }
michael@0 779 }
michael@0 780 }
michael@0 781
michael@0 782 return true;
michael@0 783 }
michael@0 784
michael@0 785 bool
michael@0 786 IonBuilder::buildInline(IonBuilder *callerBuilder, MResumePoint *callerResumePoint,
michael@0 787 CallInfo &callInfo)
michael@0 788 {
michael@0 789 if (!init())
michael@0 790 return false;
michael@0 791
michael@0 792 inlineCallInfo_ = &callInfo;
michael@0 793
michael@0 794 IonSpew(IonSpew_Scripts, "Inlining script %s:%d (%p)",
michael@0 795 script()->filename(), script()->lineno(), (void *)script());
michael@0 796
michael@0 797 callerBuilder_ = callerBuilder;
michael@0 798 callerResumePoint_ = callerResumePoint;
michael@0 799
michael@0 800 if (callerBuilder->failedBoundsCheck_)
michael@0 801 failedBoundsCheck_ = true;
michael@0 802
michael@0 803 if (callerBuilder->failedShapeGuard_)
michael@0 804 failedShapeGuard_ = true;
michael@0 805
michael@0 806 // Generate single entrance block.
michael@0 807 if (!setCurrentAndSpecializePhis(newBlock(pc)))
michael@0 808 return false;
michael@0 809 if (!current)
michael@0 810 return false;
michael@0 811
michael@0 812 current->setCallerResumePoint(callerResumePoint);
michael@0 813
michael@0 814 // Connect the entrance block to the last block in the caller's graph.
michael@0 815 MBasicBlock *predecessor = callerBuilder->current;
michael@0 816 JS_ASSERT(predecessor == callerResumePoint->block());
michael@0 817
michael@0 818 // All further instructions generated in from this scope should be
michael@0 819 // considered as part of the function that we're inlining. We also need to
michael@0 820 // keep track of the inlining depth because all scripts inlined on the same
michael@0 821 // level contiguously have only one InlineExit node.
michael@0 822 if (instrumentedProfiling()) {
michael@0 823 predecessor->add(MProfilerStackOp::New(alloc(), script(),
michael@0 824 MProfilerStackOp::InlineEnter,
michael@0 825 inliningDepth_));
michael@0 826 }
michael@0 827
michael@0 828 predecessor->end(MGoto::New(alloc(), current));
michael@0 829 if (!current->addPredecessorWithoutPhis(predecessor))
michael@0 830 return false;
michael@0 831
michael@0 832 // Initialize scope chain slot to Undefined. It's set later by |initScopeChain|.
michael@0 833 MInstruction *scope = MConstant::New(alloc(), UndefinedValue());
michael@0 834 current->add(scope);
michael@0 835 current->initSlot(info().scopeChainSlot(), scope);
michael@0 836
michael@0 837 // Initialize |return value| slot.
michael@0 838 MInstruction *returnValue = MConstant::New(alloc(), UndefinedValue());
michael@0 839 current->add(returnValue);
michael@0 840 current->initSlot(info().returnValueSlot(), returnValue);
michael@0 841
michael@0 842 // Initialize |arguments| slot.
michael@0 843 if (info().hasArguments()) {
michael@0 844 MInstruction *argsObj = MConstant::New(alloc(), UndefinedValue());
michael@0 845 current->add(argsObj);
michael@0 846 current->initSlot(info().argsObjSlot(), argsObj);
michael@0 847 }
michael@0 848
michael@0 849 // Initialize |this| slot.
michael@0 850 current->initSlot(info().thisSlot(), callInfo.thisArg());
michael@0 851
michael@0 852 IonSpew(IonSpew_Inlining, "Initializing %u arg slots", info().nargs());
michael@0 853
michael@0 854 // NB: Ion does not inline functions which |needsArgsObj|. So using argSlot()
michael@0 855 // instead of argSlotUnchecked() below is OK
michael@0 856 JS_ASSERT(!info().needsArgsObj());
michael@0 857
michael@0 858 // Initialize actually set arguments.
michael@0 859 uint32_t existing_args = Min<uint32_t>(callInfo.argc(), info().nargs());
michael@0 860 for (size_t i = 0; i < existing_args; ++i) {
michael@0 861 MDefinition *arg = callInfo.getArg(i);
michael@0 862 current->initSlot(info().argSlot(i), arg);
michael@0 863 }
michael@0 864
michael@0 865 // Pass Undefined for missing arguments
michael@0 866 for (size_t i = callInfo.argc(); i < info().nargs(); ++i) {
michael@0 867 MConstant *arg = MConstant::New(alloc(), UndefinedValue());
michael@0 868 current->add(arg);
michael@0 869 current->initSlot(info().argSlot(i), arg);
michael@0 870 }
michael@0 871
michael@0 872 // Initialize the scope chain now that args are initialized.
michael@0 873 if (!initScopeChain(callInfo.fun()))
michael@0 874 return false;
michael@0 875
michael@0 876 IonSpew(IonSpew_Inlining, "Initializing %u local slots", info().nlocals());
michael@0 877
michael@0 878 // Initialize local variables.
michael@0 879 for (uint32_t i = 0; i < info().nlocals(); i++) {
michael@0 880 MConstant *undef = MConstant::New(alloc(), UndefinedValue());
michael@0 881 current->add(undef);
michael@0 882 current->initSlot(info().localSlot(i), undef);
michael@0 883 }
michael@0 884
michael@0 885 IonSpew(IonSpew_Inlining, "Inline entry block MResumePoint %p, %u operands",
michael@0 886 (void *) current->entryResumePoint(), current->entryResumePoint()->numOperands());
michael@0 887
michael@0 888 // +2 for the scope chain and |this|, maybe another +1 for arguments object slot.
michael@0 889 JS_ASSERT(current->entryResumePoint()->numOperands() == info().totalSlots());
michael@0 890
michael@0 891 if (script_->argumentsHasVarBinding()) {
michael@0 892 lazyArguments_ = MConstant::New(alloc(), MagicValue(JS_OPTIMIZED_ARGUMENTS));
michael@0 893 current->add(lazyArguments_);
michael@0 894 }
michael@0 895
michael@0 896 insertRecompileCheck();
michael@0 897
michael@0 898 if (!traverseBytecode())
michael@0 899 return false;
michael@0 900
michael@0 901 return true;
michael@0 902 }
michael@0 903
michael@0 904 void
michael@0 905 IonBuilder::rewriteParameter(uint32_t slotIdx, MDefinition *param, int32_t argIndex)
michael@0 906 {
michael@0 907 JS_ASSERT(param->isParameter() || param->isGetArgumentsObjectArg());
michael@0 908
michael@0 909 types::TemporaryTypeSet *types = param->resultTypeSet();
michael@0 910 MDefinition *actual = ensureDefiniteType(param, types->getKnownMIRType());
michael@0 911 if (actual == param)
michael@0 912 return;
michael@0 913
michael@0 914 // Careful! We leave the original MParameter in the entry resume point. The
michael@0 915 // arguments still need to be checked unless proven otherwise at the call
michael@0 916 // site, and these checks can bailout. We can end up:
michael@0 917 // v0 = Parameter(0)
michael@0 918 // v1 = Unbox(v0, INT32)
michael@0 919 // -- ResumePoint(v0)
michael@0 920 //
michael@0 921 // As usual, it would be invalid for v1 to be captured in the initial
michael@0 922 // resume point, rather than v0.
michael@0 923 current->rewriteSlot(slotIdx, actual);
michael@0 924 }
michael@0 925
michael@0 926 // Apply Type Inference information to parameters early on, unboxing them if
michael@0 927 // they have a definitive type. The actual guards will be emitted by the code
michael@0 928 // generator, explicitly, as part of the function prologue.
michael@0 929 void
michael@0 930 IonBuilder::rewriteParameters()
michael@0 931 {
michael@0 932 JS_ASSERT(info().scopeChainSlot() == 0);
michael@0 933
michael@0 934 if (!info().funMaybeLazy())
michael@0 935 return;
michael@0 936
michael@0 937 for (uint32_t i = info().startArgSlot(); i < info().endArgSlot(); i++) {
michael@0 938 MDefinition *param = current->getSlot(i);
michael@0 939 rewriteParameter(i, param, param->toParameter()->index());
michael@0 940 }
michael@0 941 }
michael@0 942
michael@0 943 void
michael@0 944 IonBuilder::initParameters()
michael@0 945 {
michael@0 946 if (!info().funMaybeLazy())
michael@0 947 return;
michael@0 948
michael@0 949 // If we are doing OSR on a frame which initially executed in the
michael@0 950 // interpreter and didn't accumulate type information, try to use that OSR
michael@0 951 // frame to determine possible initial types for 'this' and parameters.
michael@0 952
michael@0 953 if (thisTypes->empty() && baselineFrame_)
michael@0 954 thisTypes->addType(baselineFrame_->thisType, alloc_->lifoAlloc());
michael@0 955
michael@0 956 MParameter *param = MParameter::New(alloc(), MParameter::THIS_SLOT, thisTypes);
michael@0 957 current->add(param);
michael@0 958 current->initSlot(info().thisSlot(), param);
michael@0 959
michael@0 960 for (uint32_t i = 0; i < info().nargs(); i++) {
michael@0 961 types::TemporaryTypeSet *types = &argTypes[i];
michael@0 962 if (types->empty() && baselineFrame_ &&
michael@0 963 !script_->baselineScript()->modifiesArguments())
michael@0 964 {
michael@0 965 types->addType(baselineFrame_->argTypes[i], alloc_->lifoAlloc());
michael@0 966 }
michael@0 967
michael@0 968 param = MParameter::New(alloc(), i, types);
michael@0 969 current->add(param);
michael@0 970 current->initSlot(info().argSlotUnchecked(i), param);
michael@0 971 }
michael@0 972 }
michael@0 973
michael@0 974 bool
michael@0 975 IonBuilder::initScopeChain(MDefinition *callee)
michael@0 976 {
michael@0 977 MInstruction *scope = nullptr;
michael@0 978
michael@0 979 // If the script doesn't use the scopechain, then it's already initialized
michael@0 980 // from earlier. However, always make a scope chain when |needsArgsObj| is true
michael@0 981 // for the script, since arguments object construction requires the scope chain
michael@0 982 // to be passed in.
michael@0 983 if (!info().needsArgsObj() && !analysis().usesScopeChain())
michael@0 984 return true;
michael@0 985
michael@0 986 // The scope chain is only tracked in scripts that have NAME opcodes which
michael@0 987 // will try to access the scope. For other scripts, the scope instructions
michael@0 988 // will be held live by resume points and code will still be generated for
michael@0 989 // them, so just use a constant undefined value.
michael@0 990 if (!script()->compileAndGo())
michael@0 991 return abort("non-CNG global scripts are not supported");
michael@0 992
michael@0 993 if (JSFunction *fun = info().funMaybeLazy()) {
michael@0 994 if (!callee) {
michael@0 995 MCallee *calleeIns = MCallee::New(alloc());
michael@0 996 current->add(calleeIns);
michael@0 997 callee = calleeIns;
michael@0 998 }
michael@0 999 scope = MFunctionEnvironment::New(alloc(), callee);
michael@0 1000 current->add(scope);
michael@0 1001
michael@0 1002 // This reproduce what is done in CallObject::createForFunction. Skip
michael@0 1003 // this for analyses, as the script might not have a baseline script
michael@0 1004 // with template objects yet.
michael@0 1005 if (fun->isHeavyweight() && !info().executionModeIsAnalysis()) {
michael@0 1006 if (fun->isNamedLambda()) {
michael@0 1007 scope = createDeclEnvObject(callee, scope);
michael@0 1008 if (!scope)
michael@0 1009 return false;
michael@0 1010 }
michael@0 1011
michael@0 1012 scope = createCallObject(callee, scope);
michael@0 1013 if (!scope)
michael@0 1014 return false;
michael@0 1015 }
michael@0 1016 } else {
michael@0 1017 scope = constant(ObjectValue(script()->global()));
michael@0 1018 }
michael@0 1019
michael@0 1020 current->setScopeChain(scope);
michael@0 1021 return true;
michael@0 1022 }
michael@0 1023
michael@0 1024 bool
michael@0 1025 IonBuilder::initArgumentsObject()
michael@0 1026 {
michael@0 1027 IonSpew(IonSpew_MIR, "%s:%d - Emitting code to initialize arguments object! block=%p",
michael@0 1028 script()->filename(), script()->lineno(), current);
michael@0 1029 JS_ASSERT(info().needsArgsObj());
michael@0 1030 MCreateArgumentsObject *argsObj = MCreateArgumentsObject::New(alloc(), current->scopeChain());
michael@0 1031 current->add(argsObj);
michael@0 1032 current->setArgumentsObject(argsObj);
michael@0 1033 return true;
michael@0 1034 }
michael@0 1035
michael@0 1036 bool
michael@0 1037 IonBuilder::addOsrValueTypeBarrier(uint32_t slot, MInstruction **def_,
michael@0 1038 MIRType type, types::TemporaryTypeSet *typeSet)
michael@0 1039 {
michael@0 1040 MInstruction *&def = *def_;
michael@0 1041 MBasicBlock *osrBlock = def->block();
michael@0 1042
michael@0 1043 // Clear bogus type information added in newOsrPreheader().
michael@0 1044 def->setResultType(MIRType_Value);
michael@0 1045 def->setResultTypeSet(nullptr);
michael@0 1046
michael@0 1047 if (typeSet && !typeSet->unknown()) {
michael@0 1048 MInstruction *barrier = MTypeBarrier::New(alloc(), def, typeSet);
michael@0 1049 osrBlock->insertBefore(osrBlock->lastIns(), barrier);
michael@0 1050 osrBlock->rewriteSlot(slot, barrier);
michael@0 1051 def = barrier;
michael@0 1052 } else if (type == MIRType_Null ||
michael@0 1053 type == MIRType_Undefined ||
michael@0 1054 type == MIRType_MagicOptimizedArguments)
michael@0 1055 {
michael@0 1056 // No unbox instruction will be added below, so check the type by
michael@0 1057 // adding a type barrier for a singleton type set.
michael@0 1058 types::Type ntype = types::Type::PrimitiveType(ValueTypeFromMIRType(type));
michael@0 1059 typeSet = alloc_->lifoAlloc()->new_<types::TemporaryTypeSet>(ntype);
michael@0 1060 if (!typeSet)
michael@0 1061 return false;
michael@0 1062 MInstruction *barrier = MTypeBarrier::New(alloc(), def, typeSet);
michael@0 1063 osrBlock->insertBefore(osrBlock->lastIns(), barrier);
michael@0 1064 osrBlock->rewriteSlot(slot, barrier);
michael@0 1065 def = barrier;
michael@0 1066 }
michael@0 1067
michael@0 1068 switch (type) {
michael@0 1069 case MIRType_Boolean:
michael@0 1070 case MIRType_Int32:
michael@0 1071 case MIRType_Double:
michael@0 1072 case MIRType_String:
michael@0 1073 case MIRType_Object:
michael@0 1074 if (type != def->type()) {
michael@0 1075 MUnbox *unbox = MUnbox::New(alloc(), def, type, MUnbox::Fallible);
michael@0 1076 osrBlock->insertBefore(osrBlock->lastIns(), unbox);
michael@0 1077 osrBlock->rewriteSlot(slot, unbox);
michael@0 1078 def = unbox;
michael@0 1079 }
michael@0 1080 break;
michael@0 1081
michael@0 1082 case MIRType_Null:
michael@0 1083 {
michael@0 1084 MConstant *c = MConstant::New(alloc(), NullValue());
michael@0 1085 osrBlock->insertBefore(osrBlock->lastIns(), c);
michael@0 1086 osrBlock->rewriteSlot(slot, c);
michael@0 1087 def = c;
michael@0 1088 break;
michael@0 1089 }
michael@0 1090
michael@0 1091 case MIRType_Undefined:
michael@0 1092 {
michael@0 1093 MConstant *c = MConstant::New(alloc(), UndefinedValue());
michael@0 1094 osrBlock->insertBefore(osrBlock->lastIns(), c);
michael@0 1095 osrBlock->rewriteSlot(slot, c);
michael@0 1096 def = c;
michael@0 1097 break;
michael@0 1098 }
michael@0 1099
michael@0 1100 case MIRType_MagicOptimizedArguments:
michael@0 1101 JS_ASSERT(lazyArguments_);
michael@0 1102 osrBlock->rewriteSlot(slot, lazyArguments_);
michael@0 1103 def = lazyArguments_;
michael@0 1104 break;
michael@0 1105
michael@0 1106 default:
michael@0 1107 break;
michael@0 1108 }
michael@0 1109
michael@0 1110 JS_ASSERT(def == osrBlock->getSlot(slot));
michael@0 1111 return true;
michael@0 1112 }
michael@0 1113
michael@0 1114 bool
michael@0 1115 IonBuilder::maybeAddOsrTypeBarriers()
michael@0 1116 {
michael@0 1117 if (!info().osrPc())
michael@0 1118 return true;
michael@0 1119
michael@0 1120 // The loop has successfully been processed, and the loop header phis
michael@0 1121 // have their final type. Add unboxes and type barriers in the OSR
michael@0 1122 // block to check that the values have the appropriate type, and update
michael@0 1123 // the types in the preheader.
michael@0 1124
michael@0 1125 MBasicBlock *osrBlock = graph().osrBlock();
michael@0 1126 if (!osrBlock) {
michael@0 1127 // Because IonBuilder does not compile catch blocks, it's possible to
michael@0 1128 // end up without an OSR block if the OSR pc is only reachable via a
michael@0 1129 // break-statement inside the catch block. For instance:
michael@0 1130 //
michael@0 1131 // for (;;) {
michael@0 1132 // try {
michael@0 1133 // throw 3;
michael@0 1134 // } catch(e) {
michael@0 1135 // break;
michael@0 1136 // }
michael@0 1137 // }
michael@0 1138 // while (..) { } // <= OSR here, only reachable via catch block.
michael@0 1139 //
michael@0 1140 // For now we just abort in this case.
michael@0 1141 JS_ASSERT(graph().hasTryBlock());
michael@0 1142 return abort("OSR block only reachable through catch block");
michael@0 1143 }
michael@0 1144
michael@0 1145 MBasicBlock *preheader = osrBlock->getSuccessor(0);
michael@0 1146 MBasicBlock *header = preheader->getSuccessor(0);
michael@0 1147 static const size_t OSR_PHI_POSITION = 1;
michael@0 1148 JS_ASSERT(preheader->getPredecessor(OSR_PHI_POSITION) == osrBlock);
michael@0 1149
michael@0 1150 MPhiIterator headerPhi = header->phisBegin();
michael@0 1151 while (headerPhi != header->phisEnd() && headerPhi->slot() < info().startArgSlot())
michael@0 1152 headerPhi++;
michael@0 1153
michael@0 1154 for (uint32_t i = info().startArgSlot(); i < osrBlock->stackDepth(); i++, headerPhi++) {
michael@0 1155 // Aliased slots are never accessed, since they need to go through
michael@0 1156 // the callobject. The typebarriers are added there and can be
michael@0 1157 // discarded here.
michael@0 1158 if (info().isSlotAliasedAtOsr(i))
michael@0 1159 continue;
michael@0 1160
michael@0 1161 MInstruction *def = osrBlock->getSlot(i)->toInstruction();
michael@0 1162
michael@0 1163 JS_ASSERT(headerPhi->slot() == i);
michael@0 1164 MPhi *preheaderPhi = preheader->getSlot(i)->toPhi();
michael@0 1165
michael@0 1166 MIRType type = headerPhi->type();
michael@0 1167 types::TemporaryTypeSet *typeSet = headerPhi->resultTypeSet();
michael@0 1168
michael@0 1169 if (!addOsrValueTypeBarrier(i, &def, type, typeSet))
michael@0 1170 return false;
michael@0 1171
michael@0 1172 preheaderPhi->replaceOperand(OSR_PHI_POSITION, def);
michael@0 1173 preheaderPhi->setResultType(type);
michael@0 1174 preheaderPhi->setResultTypeSet(typeSet);
michael@0 1175 }
michael@0 1176
michael@0 1177 return true;
michael@0 1178 }
michael@0 1179
michael@0 1180 // We try to build a control-flow graph in the order that it would be built as
michael@0 1181 // if traversing the AST. This leads to a nice ordering and lets us build SSA
michael@0 1182 // in one pass, since the bytecode is structured.
michael@0 1183 //
michael@0 1184 // We traverse the bytecode iteratively, maintaining a current basic block.
michael@0 1185 // Each basic block has a mapping of local slots to instructions, as well as a
michael@0 1186 // stack depth. As we encounter instructions we mutate this mapping in the
michael@0 1187 // current block.
michael@0 1188 //
michael@0 1189 // Things get interesting when we encounter a control structure. This can be
michael@0 1190 // either an IFEQ, downward GOTO, or a decompiler hint stashed away in source
michael@0 1191 // notes. Once we encounter such an opcode, we recover the structure of the
michael@0 1192 // control flow (its branches and bounds), and push it on a stack.
michael@0 1193 //
michael@0 1194 // As we continue traversing the bytecode, we look for points that would
michael@0 1195 // terminate the topmost control flow path pushed on the stack. These are:
michael@0 1196 // (1) The bounds of the current structure (end of a loop or join/edge of a
michael@0 1197 // branch).
michael@0 1198 // (2) A "return", "break", or "continue" statement.
michael@0 1199 //
michael@0 1200 // For (1), we expect that there is a current block in the progress of being
michael@0 1201 // built, and we complete the necessary edges in the CFG. For (2), we expect
michael@0 1202 // that there is no active block.
michael@0 1203 //
michael@0 1204 // For normal diamond join points, we construct Phi nodes as we add
michael@0 1205 // predecessors. For loops, care must be taken to propagate Phi nodes back
michael@0 1206 // through uses in the loop body.
michael@0 1207 bool
michael@0 1208 IonBuilder::traverseBytecode()
michael@0 1209 {
michael@0 1210 for (;;) {
michael@0 1211 JS_ASSERT(pc < info().limitPC());
michael@0 1212
michael@0 1213 for (;;) {
michael@0 1214 if (!alloc().ensureBallast())
michael@0 1215 return false;
michael@0 1216
michael@0 1217 // Check if we've hit an expected join point or edge in the bytecode.
michael@0 1218 // Leaving one control structure could place us at the edge of another,
michael@0 1219 // thus |while| instead of |if| so we don't skip any opcodes.
michael@0 1220 if (!cfgStack_.empty() && cfgStack_.back().stopAt == pc) {
michael@0 1221 ControlStatus status = processCfgStack();
michael@0 1222 if (status == ControlStatus_Error)
michael@0 1223 return false;
michael@0 1224 if (status == ControlStatus_Abort)
michael@0 1225 return abort("Aborted while processing control flow");
michael@0 1226 if (!current)
michael@0 1227 return true;
michael@0 1228 continue;
michael@0 1229 }
michael@0 1230
michael@0 1231 // Some opcodes need to be handled early because they affect control
michael@0 1232 // flow, terminating the current basic block and/or instructing the
michael@0 1233 // traversal algorithm to continue from a new pc.
michael@0 1234 //
michael@0 1235 // (1) If the opcode does not affect control flow, then the opcode
michael@0 1236 // is inspected and transformed to IR. This is the process_opcode
michael@0 1237 // label.
michael@0 1238 // (2) A loop could be detected via a forward GOTO. In this case,
michael@0 1239 // we don't want to process the GOTO, but the following
michael@0 1240 // instruction.
michael@0 1241 // (3) A RETURN, STOP, BREAK, or CONTINUE may require processing the
michael@0 1242 // CFG stack to terminate open branches.
michael@0 1243 //
michael@0 1244 // Similar to above, snooping control flow could land us at another
michael@0 1245 // control flow point, so we iterate until it's time to inspect a real
michael@0 1246 // opcode.
michael@0 1247 ControlStatus status;
michael@0 1248 if ((status = snoopControlFlow(JSOp(*pc))) == ControlStatus_None)
michael@0 1249 break;
michael@0 1250 if (status == ControlStatus_Error)
michael@0 1251 return false;
michael@0 1252 if (status == ControlStatus_Abort)
michael@0 1253 return abort("Aborted while processing control flow");
michael@0 1254 if (!current)
michael@0 1255 return true;
michael@0 1256 }
michael@0 1257
michael@0 1258 #ifdef DEBUG
michael@0 1259 // In debug builds, after compiling this op, check that all values
michael@0 1260 // popped by this opcode either:
michael@0 1261 //
michael@0 1262 // (1) Have the ImplicitlyUsed flag set on them.
michael@0 1263 // (2) Have more uses than before compiling this op (the value is
michael@0 1264 // used as operand of a new MIR instruction).
michael@0 1265 //
michael@0 1266 // This is used to catch problems where IonBuilder pops a value without
michael@0 1267 // adding any SSA uses and doesn't call setImplicitlyUsedUnchecked on it.
michael@0 1268 Vector<MDefinition *, 4, IonAllocPolicy> popped(alloc());
michael@0 1269 Vector<size_t, 4, IonAllocPolicy> poppedUses(alloc());
michael@0 1270 unsigned nuses = GetUseCount(script_, script_->pcToOffset(pc));
michael@0 1271
michael@0 1272 for (unsigned i = 0; i < nuses; i++) {
michael@0 1273 MDefinition *def = current->peek(-int32_t(i + 1));
michael@0 1274 if (!popped.append(def) || !poppedUses.append(def->defUseCount()))
michael@0 1275 return false;
michael@0 1276 }
michael@0 1277 #endif
michael@0 1278
michael@0 1279 // Nothing in inspectOpcode() is allowed to advance the pc.
michael@0 1280 JSOp op = JSOp(*pc);
michael@0 1281 if (!inspectOpcode(op))
michael@0 1282 return false;
michael@0 1283
michael@0 1284 #ifdef DEBUG
michael@0 1285 for (size_t i = 0; i < popped.length(); i++) {
michael@0 1286 switch (op) {
michael@0 1287 case JSOP_POP:
michael@0 1288 case JSOP_POPN:
michael@0 1289 case JSOP_DUPAT:
michael@0 1290 case JSOP_DUP:
michael@0 1291 case JSOP_DUP2:
michael@0 1292 case JSOP_PICK:
michael@0 1293 case JSOP_SWAP:
michael@0 1294 case JSOP_SETARG:
michael@0 1295 case JSOP_SETLOCAL:
michael@0 1296 case JSOP_SETRVAL:
michael@0 1297 case JSOP_VOID:
michael@0 1298 // Don't require SSA uses for values popped by these ops.
michael@0 1299 break;
michael@0 1300
michael@0 1301 case JSOP_POS:
michael@0 1302 case JSOP_TOID:
michael@0 1303 // These ops may leave their input on the stack without setting
michael@0 1304 // the ImplicitlyUsed flag. If this value will be popped immediately,
michael@0 1305 // we may replace it with |undefined|, but the difference is
michael@0 1306 // not observable.
michael@0 1307 JS_ASSERT(i == 0);
michael@0 1308 if (current->peek(-1) == popped[0])
michael@0 1309 break;
michael@0 1310 // FALL THROUGH
michael@0 1311
michael@0 1312 default:
michael@0 1313 JS_ASSERT(popped[i]->isImplicitlyUsed() ||
michael@0 1314
michael@0 1315 // MNewDerivedTypedObject instances are
michael@0 1316 // often dead unless they escape from the
michael@0 1317 // fn. See IonBuilder::loadTypedObjectData()
michael@0 1318 // for more details.
michael@0 1319 popped[i]->isNewDerivedTypedObject() ||
michael@0 1320
michael@0 1321 popped[i]->defUseCount() > poppedUses[i]);
michael@0 1322 break;
michael@0 1323 }
michael@0 1324 }
michael@0 1325 #endif
michael@0 1326
michael@0 1327 pc += js_CodeSpec[op].length;
michael@0 1328 current->updateTrackedPc(pc);
michael@0 1329 }
michael@0 1330
michael@0 1331 return true;
michael@0 1332 }
michael@0 1333
michael@0 1334 IonBuilder::ControlStatus
michael@0 1335 IonBuilder::snoopControlFlow(JSOp op)
michael@0 1336 {
michael@0 1337 switch (op) {
michael@0 1338 case JSOP_NOP:
michael@0 1339 return maybeLoop(op, info().getNote(gsn, pc));
michael@0 1340
michael@0 1341 case JSOP_POP:
michael@0 1342 return maybeLoop(op, info().getNote(gsn, pc));
michael@0 1343
michael@0 1344 case JSOP_RETURN:
michael@0 1345 case JSOP_RETRVAL:
michael@0 1346 return processReturn(op);
michael@0 1347
michael@0 1348 case JSOP_THROW:
michael@0 1349 return processThrow();
michael@0 1350
michael@0 1351 case JSOP_GOTO:
michael@0 1352 {
michael@0 1353 jssrcnote *sn = info().getNote(gsn, pc);
michael@0 1354 switch (sn ? SN_TYPE(sn) : SRC_NULL) {
michael@0 1355 case SRC_BREAK:
michael@0 1356 case SRC_BREAK2LABEL:
michael@0 1357 return processBreak(op, sn);
michael@0 1358
michael@0 1359 case SRC_CONTINUE:
michael@0 1360 return processContinue(op);
michael@0 1361
michael@0 1362 case SRC_SWITCHBREAK:
michael@0 1363 return processSwitchBreak(op);
michael@0 1364
michael@0 1365 case SRC_WHILE:
michael@0 1366 case SRC_FOR_IN:
michael@0 1367 case SRC_FOR_OF:
michael@0 1368 // while (cond) { }
michael@0 1369 return whileOrForInLoop(sn);
michael@0 1370
michael@0 1371 default:
michael@0 1372 // Hard assert for now - make an error later.
michael@0 1373 MOZ_ASSUME_UNREACHABLE("unknown goto case");
michael@0 1374 }
michael@0 1375 break;
michael@0 1376 }
michael@0 1377
michael@0 1378 case JSOP_TABLESWITCH:
michael@0 1379 return tableSwitch(op, info().getNote(gsn, pc));
michael@0 1380
michael@0 1381 case JSOP_IFNE:
michael@0 1382 // We should never reach an IFNE, it's a stopAt point, which will
michael@0 1383 // trigger closing the loop.
michael@0 1384 MOZ_ASSUME_UNREACHABLE("we should never reach an ifne!");
michael@0 1385
michael@0 1386 default:
michael@0 1387 break;
michael@0 1388 }
michael@0 1389 return ControlStatus_None;
michael@0 1390 }
michael@0 1391
michael@0 1392 bool
michael@0 1393 IonBuilder::inspectOpcode(JSOp op)
michael@0 1394 {
michael@0 1395 switch (op) {
michael@0 1396 case JSOP_NOP:
michael@0 1397 case JSOP_LINENO:
michael@0 1398 case JSOP_LOOPENTRY:
michael@0 1399 return true;
michael@0 1400
michael@0 1401 case JSOP_LABEL:
michael@0 1402 return jsop_label();
michael@0 1403
michael@0 1404 case JSOP_UNDEFINED:
michael@0 1405 return pushConstant(UndefinedValue());
michael@0 1406
michael@0 1407 case JSOP_IFEQ:
michael@0 1408 return jsop_ifeq(JSOP_IFEQ);
michael@0 1409
michael@0 1410 case JSOP_TRY:
michael@0 1411 return jsop_try();
michael@0 1412
michael@0 1413 case JSOP_CONDSWITCH:
michael@0 1414 return jsop_condswitch();
michael@0 1415
michael@0 1416 case JSOP_BITNOT:
michael@0 1417 return jsop_bitnot();
michael@0 1418
michael@0 1419 case JSOP_BITAND:
michael@0 1420 case JSOP_BITOR:
michael@0 1421 case JSOP_BITXOR:
michael@0 1422 case JSOP_LSH:
michael@0 1423 case JSOP_RSH:
michael@0 1424 case JSOP_URSH:
michael@0 1425 return jsop_bitop(op);
michael@0 1426
michael@0 1427 case JSOP_ADD:
michael@0 1428 case JSOP_SUB:
michael@0 1429 case JSOP_MUL:
michael@0 1430 case JSOP_DIV:
michael@0 1431 case JSOP_MOD:
michael@0 1432 return jsop_binary(op);
michael@0 1433
michael@0 1434 case JSOP_POS:
michael@0 1435 return jsop_pos();
michael@0 1436
michael@0 1437 case JSOP_NEG:
michael@0 1438 return jsop_neg();
michael@0 1439
michael@0 1440 case JSOP_AND:
michael@0 1441 case JSOP_OR:
michael@0 1442 return jsop_andor(op);
michael@0 1443
michael@0 1444 case JSOP_DEFVAR:
michael@0 1445 case JSOP_DEFCONST:
michael@0 1446 return jsop_defvar(GET_UINT32_INDEX(pc));
michael@0 1447
michael@0 1448 case JSOP_DEFFUN:
michael@0 1449 return jsop_deffun(GET_UINT32_INDEX(pc));
michael@0 1450
michael@0 1451 case JSOP_EQ:
michael@0 1452 case JSOP_NE:
michael@0 1453 case JSOP_STRICTEQ:
michael@0 1454 case JSOP_STRICTNE:
michael@0 1455 case JSOP_LT:
michael@0 1456 case JSOP_LE:
michael@0 1457 case JSOP_GT:
michael@0 1458 case JSOP_GE:
michael@0 1459 return jsop_compare(op);
michael@0 1460
michael@0 1461 case JSOP_DOUBLE:
michael@0 1462 return pushConstant(info().getConst(pc));
michael@0 1463
michael@0 1464 case JSOP_STRING:
michael@0 1465 return pushConstant(StringValue(info().getAtom(pc)));
michael@0 1466
michael@0 1467 case JSOP_ZERO:
michael@0 1468 return pushConstant(Int32Value(0));
michael@0 1469
michael@0 1470 case JSOP_ONE:
michael@0 1471 return pushConstant(Int32Value(1));
michael@0 1472
michael@0 1473 case JSOP_NULL:
michael@0 1474 return pushConstant(NullValue());
michael@0 1475
michael@0 1476 case JSOP_VOID:
michael@0 1477 current->pop();
michael@0 1478 return pushConstant(UndefinedValue());
michael@0 1479
michael@0 1480 case JSOP_HOLE:
michael@0 1481 return pushConstant(MagicValue(JS_ELEMENTS_HOLE));
michael@0 1482
michael@0 1483 case JSOP_FALSE:
michael@0 1484 return pushConstant(BooleanValue(false));
michael@0 1485
michael@0 1486 case JSOP_TRUE:
michael@0 1487 return pushConstant(BooleanValue(true));
michael@0 1488
michael@0 1489 case JSOP_ARGUMENTS:
michael@0 1490 return jsop_arguments();
michael@0 1491
michael@0 1492 case JSOP_RUNONCE:
michael@0 1493 return jsop_runonce();
michael@0 1494
michael@0 1495 case JSOP_REST:
michael@0 1496 return jsop_rest();
michael@0 1497
michael@0 1498 case JSOP_GETARG:
michael@0 1499 if (info().argsObjAliasesFormals()) {
michael@0 1500 MGetArgumentsObjectArg *getArg = MGetArgumentsObjectArg::New(alloc(),
michael@0 1501 current->argumentsObject(),
michael@0 1502 GET_ARGNO(pc));
michael@0 1503 current->add(getArg);
michael@0 1504 current->push(getArg);
michael@0 1505 } else {
michael@0 1506 current->pushArg(GET_ARGNO(pc));
michael@0 1507 }
michael@0 1508 return true;
michael@0 1509
michael@0 1510 case JSOP_SETARG:
michael@0 1511 return jsop_setarg(GET_ARGNO(pc));
michael@0 1512
michael@0 1513 case JSOP_GETLOCAL:
michael@0 1514 current->pushLocal(GET_LOCALNO(pc));
michael@0 1515 return true;
michael@0 1516
michael@0 1517 case JSOP_SETLOCAL:
michael@0 1518 current->setLocal(GET_LOCALNO(pc));
michael@0 1519 return true;
michael@0 1520
michael@0 1521 case JSOP_POP:
michael@0 1522 current->pop();
michael@0 1523
michael@0 1524 // POP opcodes frequently appear where values are killed, e.g. after
michael@0 1525 // SET* opcodes. Place a resume point afterwards to avoid capturing
michael@0 1526 // the dead value in later snapshots, except in places where that
michael@0 1527 // resume point is obviously unnecessary.
michael@0 1528 if (pc[JSOP_POP_LENGTH] == JSOP_POP)
michael@0 1529 return true;
michael@0 1530 return maybeInsertResume();
michael@0 1531
michael@0 1532 case JSOP_POPN:
michael@0 1533 for (uint32_t i = 0, n = GET_UINT16(pc); i < n; i++)
michael@0 1534 current->pop();
michael@0 1535 return true;
michael@0 1536
michael@0 1537 case JSOP_DUPAT:
michael@0 1538 current->pushSlot(current->stackDepth() - 1 - GET_UINT24(pc));
michael@0 1539 return true;
michael@0 1540
michael@0 1541 case JSOP_NEWINIT:
michael@0 1542 if (GET_UINT8(pc) == JSProto_Array)
michael@0 1543 return jsop_newarray(0);
michael@0 1544 return jsop_newobject();
michael@0 1545
michael@0 1546 case JSOP_NEWARRAY:
michael@0 1547 return jsop_newarray(GET_UINT24(pc));
michael@0 1548
michael@0 1549 case JSOP_NEWOBJECT:
michael@0 1550 return jsop_newobject();
michael@0 1551
michael@0 1552 case JSOP_INITELEM:
michael@0 1553 return jsop_initelem();
michael@0 1554
michael@0 1555 case JSOP_INITELEM_ARRAY:
michael@0 1556 return jsop_initelem_array();
michael@0 1557
michael@0 1558 case JSOP_INITPROP:
michael@0 1559 {
michael@0 1560 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1561 return jsop_initprop(name);
michael@0 1562 }
michael@0 1563
michael@0 1564 case JSOP_MUTATEPROTO:
michael@0 1565 {
michael@0 1566 return jsop_mutateproto();
michael@0 1567 }
michael@0 1568
michael@0 1569 case JSOP_INITPROP_GETTER:
michael@0 1570 case JSOP_INITPROP_SETTER: {
michael@0 1571 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1572 return jsop_initprop_getter_setter(name);
michael@0 1573 }
michael@0 1574
michael@0 1575 case JSOP_INITELEM_GETTER:
michael@0 1576 case JSOP_INITELEM_SETTER:
michael@0 1577 return jsop_initelem_getter_setter();
michael@0 1578
michael@0 1579 case JSOP_ENDINIT:
michael@0 1580 return true;
michael@0 1581
michael@0 1582 case JSOP_FUNCALL:
michael@0 1583 return jsop_funcall(GET_ARGC(pc));
michael@0 1584
michael@0 1585 case JSOP_FUNAPPLY:
michael@0 1586 return jsop_funapply(GET_ARGC(pc));
michael@0 1587
michael@0 1588 case JSOP_CALL:
michael@0 1589 case JSOP_NEW:
michael@0 1590 return jsop_call(GET_ARGC(pc), (JSOp)*pc == JSOP_NEW);
michael@0 1591
michael@0 1592 case JSOP_EVAL:
michael@0 1593 return jsop_eval(GET_ARGC(pc));
michael@0 1594
michael@0 1595 case JSOP_INT8:
michael@0 1596 return pushConstant(Int32Value(GET_INT8(pc)));
michael@0 1597
michael@0 1598 case JSOP_UINT16:
michael@0 1599 return pushConstant(Int32Value(GET_UINT16(pc)));
michael@0 1600
michael@0 1601 case JSOP_GETGNAME:
michael@0 1602 {
michael@0 1603 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1604 return jsop_getgname(name);
michael@0 1605 }
michael@0 1606
michael@0 1607 case JSOP_BINDGNAME:
michael@0 1608 return pushConstant(ObjectValue(script()->global()));
michael@0 1609
michael@0 1610 case JSOP_SETGNAME:
michael@0 1611 {
michael@0 1612 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1613 JSObject *obj = &script()->global();
michael@0 1614 return setStaticName(obj, name);
michael@0 1615 }
michael@0 1616
michael@0 1617 case JSOP_NAME:
michael@0 1618 {
michael@0 1619 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1620 return jsop_getname(name);
michael@0 1621 }
michael@0 1622
michael@0 1623 case JSOP_GETINTRINSIC:
michael@0 1624 {
michael@0 1625 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1626 return jsop_intrinsic(name);
michael@0 1627 }
michael@0 1628
michael@0 1629 case JSOP_BINDNAME:
michael@0 1630 return jsop_bindname(info().getName(pc));
michael@0 1631
michael@0 1632 case JSOP_DUP:
michael@0 1633 current->pushSlot(current->stackDepth() - 1);
michael@0 1634 return true;
michael@0 1635
michael@0 1636 case JSOP_DUP2:
michael@0 1637 return jsop_dup2();
michael@0 1638
michael@0 1639 case JSOP_SWAP:
michael@0 1640 current->swapAt(-1);
michael@0 1641 return true;
michael@0 1642
michael@0 1643 case JSOP_PICK:
michael@0 1644 current->pick(-GET_INT8(pc));
michael@0 1645 return true;
michael@0 1646
michael@0 1647 case JSOP_GETALIASEDVAR:
michael@0 1648 return jsop_getaliasedvar(ScopeCoordinate(pc));
michael@0 1649
michael@0 1650 case JSOP_SETALIASEDVAR:
michael@0 1651 return jsop_setaliasedvar(ScopeCoordinate(pc));
michael@0 1652
michael@0 1653 case JSOP_UINT24:
michael@0 1654 return pushConstant(Int32Value(GET_UINT24(pc)));
michael@0 1655
michael@0 1656 case JSOP_INT32:
michael@0 1657 return pushConstant(Int32Value(GET_INT32(pc)));
michael@0 1658
michael@0 1659 case JSOP_LOOPHEAD:
michael@0 1660 // JSOP_LOOPHEAD is handled when processing the loop header.
michael@0 1661 MOZ_ASSUME_UNREACHABLE("JSOP_LOOPHEAD outside loop");
michael@0 1662
michael@0 1663 case JSOP_GETELEM:
michael@0 1664 case JSOP_CALLELEM:
michael@0 1665 return jsop_getelem();
michael@0 1666
michael@0 1667 case JSOP_SETELEM:
michael@0 1668 return jsop_setelem();
michael@0 1669
michael@0 1670 case JSOP_LENGTH:
michael@0 1671 return jsop_length();
michael@0 1672
michael@0 1673 case JSOP_NOT:
michael@0 1674 return jsop_not();
michael@0 1675
michael@0 1676 case JSOP_THIS:
michael@0 1677 return jsop_this();
michael@0 1678
michael@0 1679 case JSOP_CALLEE: {
michael@0 1680 MDefinition *callee = getCallee();
michael@0 1681 current->push(callee);
michael@0 1682 return true;
michael@0 1683 }
michael@0 1684
michael@0 1685 case JSOP_GETPROP:
michael@0 1686 case JSOP_CALLPROP:
michael@0 1687 {
michael@0 1688 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1689 return jsop_getprop(name);
michael@0 1690 }
michael@0 1691
michael@0 1692 case JSOP_SETPROP:
michael@0 1693 case JSOP_SETNAME:
michael@0 1694 {
michael@0 1695 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1696 return jsop_setprop(name);
michael@0 1697 }
michael@0 1698
michael@0 1699 case JSOP_DELPROP:
michael@0 1700 {
michael@0 1701 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 1702 return jsop_delprop(name);
michael@0 1703 }
michael@0 1704
michael@0 1705 case JSOP_DELELEM:
michael@0 1706 return jsop_delelem();
michael@0 1707
michael@0 1708 case JSOP_REGEXP:
michael@0 1709 return jsop_regexp(info().getRegExp(pc));
michael@0 1710
michael@0 1711 case JSOP_OBJECT:
michael@0 1712 return jsop_object(info().getObject(pc));
michael@0 1713
michael@0 1714 case JSOP_TYPEOF:
michael@0 1715 case JSOP_TYPEOFEXPR:
michael@0 1716 return jsop_typeof();
michael@0 1717
michael@0 1718 case JSOP_TOID:
michael@0 1719 return jsop_toid();
michael@0 1720
michael@0 1721 case JSOP_LAMBDA:
michael@0 1722 return jsop_lambda(info().getFunction(pc));
michael@0 1723
michael@0 1724 case JSOP_LAMBDA_ARROW:
michael@0 1725 return jsop_lambda_arrow(info().getFunction(pc));
michael@0 1726
michael@0 1727 case JSOP_ITER:
michael@0 1728 return jsop_iter(GET_INT8(pc));
michael@0 1729
michael@0 1730 case JSOP_ITERNEXT:
michael@0 1731 return jsop_iternext();
michael@0 1732
michael@0 1733 case JSOP_MOREITER:
michael@0 1734 return jsop_itermore();
michael@0 1735
michael@0 1736 case JSOP_ENDITER:
michael@0 1737 return jsop_iterend();
michael@0 1738
michael@0 1739 case JSOP_IN:
michael@0 1740 return jsop_in();
michael@0 1741
michael@0 1742 case JSOP_SETRVAL:
michael@0 1743 JS_ASSERT(!script()->noScriptRval());
michael@0 1744 current->setSlot(info().returnValueSlot(), current->pop());
michael@0 1745 return true;
michael@0 1746
michael@0 1747 case JSOP_INSTANCEOF:
michael@0 1748 return jsop_instanceof();
michael@0 1749
michael@0 1750 case JSOP_DEBUGLEAVEBLOCK:
michael@0 1751 return true;
michael@0 1752
michael@0 1753 default:
michael@0 1754 #ifdef DEBUG
michael@0 1755 return abort("Unsupported opcode: %s (line %d)", js_CodeName[op], info().lineno(pc));
michael@0 1756 #else
michael@0 1757 return abort("Unsupported opcode: %d (line %d)", op, info().lineno(pc));
michael@0 1758 #endif
michael@0 1759 }
michael@0 1760 }
michael@0 1761
michael@0 1762 // Given that the current control flow structure has ended forcefully,
michael@0 1763 // via a return, break, or continue (rather than joining), propagate the
michael@0 1764 // termination up. For example, a return nested 5 loops deep may terminate
michael@0 1765 // every outer loop at once, if there are no intervening conditionals:
michael@0 1766 //
michael@0 1767 // for (...) {
michael@0 1768 // for (...) {
michael@0 1769 // return x;
michael@0 1770 // }
michael@0 1771 // }
michael@0 1772 //
michael@0 1773 // If |current| is nullptr when this function returns, then there is no more
michael@0 1774 // control flow to be processed.
michael@0 1775 IonBuilder::ControlStatus
michael@0 1776 IonBuilder::processControlEnd()
michael@0 1777 {
michael@0 1778 JS_ASSERT(!current);
michael@0 1779
michael@0 1780 if (cfgStack_.empty()) {
michael@0 1781 // If there is no more control flow to process, then this is the
michael@0 1782 // last return in the function.
michael@0 1783 return ControlStatus_Ended;
michael@0 1784 }
michael@0 1785
michael@0 1786 return processCfgStack();
michael@0 1787 }
michael@0 1788
michael@0 1789 // Processes the top of the CFG stack. This is used from two places:
michael@0 1790 // (1) processControlEnd(), whereby a break, continue, or return may interrupt
michael@0 1791 // an in-progress CFG structure before reaching its actual termination
michael@0 1792 // point in the bytecode.
michael@0 1793 // (2) traverseBytecode(), whereby we reach the last instruction in a CFG
michael@0 1794 // structure.
michael@0 1795 IonBuilder::ControlStatus
michael@0 1796 IonBuilder::processCfgStack()
michael@0 1797 {
michael@0 1798 ControlStatus status = processCfgEntry(cfgStack_.back());
michael@0 1799
michael@0 1800 // If this terminated a CFG structure, act like processControlEnd() and
michael@0 1801 // keep propagating upward.
michael@0 1802 while (status == ControlStatus_Ended) {
michael@0 1803 popCfgStack();
michael@0 1804 if (cfgStack_.empty())
michael@0 1805 return status;
michael@0 1806 status = processCfgEntry(cfgStack_.back());
michael@0 1807 }
michael@0 1808
michael@0 1809 // If some join took place, the current structure is finished.
michael@0 1810 if (status == ControlStatus_Joined)
michael@0 1811 popCfgStack();
michael@0 1812
michael@0 1813 return status;
michael@0 1814 }
michael@0 1815
michael@0 1816 IonBuilder::ControlStatus
michael@0 1817 IonBuilder::processCfgEntry(CFGState &state)
michael@0 1818 {
michael@0 1819 switch (state.state) {
michael@0 1820 case CFGState::IF_TRUE:
michael@0 1821 case CFGState::IF_TRUE_EMPTY_ELSE:
michael@0 1822 return processIfEnd(state);
michael@0 1823
michael@0 1824 case CFGState::IF_ELSE_TRUE:
michael@0 1825 return processIfElseTrueEnd(state);
michael@0 1826
michael@0 1827 case CFGState::IF_ELSE_FALSE:
michael@0 1828 return processIfElseFalseEnd(state);
michael@0 1829
michael@0 1830 case CFGState::DO_WHILE_LOOP_BODY:
michael@0 1831 return processDoWhileBodyEnd(state);
michael@0 1832
michael@0 1833 case CFGState::DO_WHILE_LOOP_COND:
michael@0 1834 return processDoWhileCondEnd(state);
michael@0 1835
michael@0 1836 case CFGState::WHILE_LOOP_COND:
michael@0 1837 return processWhileCondEnd(state);
michael@0 1838
michael@0 1839 case CFGState::WHILE_LOOP_BODY:
michael@0 1840 return processWhileBodyEnd(state);
michael@0 1841
michael@0 1842 case CFGState::FOR_LOOP_COND:
michael@0 1843 return processForCondEnd(state);
michael@0 1844
michael@0 1845 case CFGState::FOR_LOOP_BODY:
michael@0 1846 return processForBodyEnd(state);
michael@0 1847
michael@0 1848 case CFGState::FOR_LOOP_UPDATE:
michael@0 1849 return processForUpdateEnd(state);
michael@0 1850
michael@0 1851 case CFGState::TABLE_SWITCH:
michael@0 1852 return processNextTableSwitchCase(state);
michael@0 1853
michael@0 1854 case CFGState::COND_SWITCH_CASE:
michael@0 1855 return processCondSwitchCase(state);
michael@0 1856
michael@0 1857 case CFGState::COND_SWITCH_BODY:
michael@0 1858 return processCondSwitchBody(state);
michael@0 1859
michael@0 1860 case CFGState::AND_OR:
michael@0 1861 return processAndOrEnd(state);
michael@0 1862
michael@0 1863 case CFGState::LABEL:
michael@0 1864 return processLabelEnd(state);
michael@0 1865
michael@0 1866 case CFGState::TRY:
michael@0 1867 return processTryEnd(state);
michael@0 1868
michael@0 1869 default:
michael@0 1870 MOZ_ASSUME_UNREACHABLE("unknown cfgstate");
michael@0 1871 }
michael@0 1872 }
michael@0 1873
michael@0 1874 IonBuilder::ControlStatus
michael@0 1875 IonBuilder::processIfEnd(CFGState &state)
michael@0 1876 {
michael@0 1877 if (current) {
michael@0 1878 // Here, the false block is the join point. Create an edge from the
michael@0 1879 // current block to the false block. Note that a RETURN opcode
michael@0 1880 // could have already ended the block.
michael@0 1881 current->end(MGoto::New(alloc(), state.branch.ifFalse));
michael@0 1882
michael@0 1883 if (!state.branch.ifFalse->addPredecessor(alloc(), current))
michael@0 1884 return ControlStatus_Error;
michael@0 1885 }
michael@0 1886
michael@0 1887 if (!setCurrentAndSpecializePhis(state.branch.ifFalse))
michael@0 1888 return ControlStatus_Error;
michael@0 1889 graph().moveBlockToEnd(current);
michael@0 1890 pc = current->pc();
michael@0 1891 return ControlStatus_Joined;
michael@0 1892 }
michael@0 1893
michael@0 1894 IonBuilder::ControlStatus
michael@0 1895 IonBuilder::processIfElseTrueEnd(CFGState &state)
michael@0 1896 {
michael@0 1897 // We've reached the end of the true branch of an if-else. Don't
michael@0 1898 // create an edge yet, just transition to parsing the false branch.
michael@0 1899 state.state = CFGState::IF_ELSE_FALSE;
michael@0 1900 state.branch.ifTrue = current;
michael@0 1901 state.stopAt = state.branch.falseEnd;
michael@0 1902 pc = state.branch.ifFalse->pc();
michael@0 1903 if (!setCurrentAndSpecializePhis(state.branch.ifFalse))
michael@0 1904 return ControlStatus_Error;
michael@0 1905 graph().moveBlockToEnd(current);
michael@0 1906
michael@0 1907 if (state.branch.test)
michael@0 1908 filterTypesAtTest(state.branch.test);
michael@0 1909
michael@0 1910 return ControlStatus_Jumped;
michael@0 1911 }
michael@0 1912
michael@0 1913 IonBuilder::ControlStatus
michael@0 1914 IonBuilder::processIfElseFalseEnd(CFGState &state)
michael@0 1915 {
michael@0 1916 // Update the state to have the latest block from the false path.
michael@0 1917 state.branch.ifFalse = current;
michael@0 1918
michael@0 1919 // To create the join node, we need an incoming edge that has not been
michael@0 1920 // terminated yet.
michael@0 1921 MBasicBlock *pred = state.branch.ifTrue
michael@0 1922 ? state.branch.ifTrue
michael@0 1923 : state.branch.ifFalse;
michael@0 1924 MBasicBlock *other = (pred == state.branch.ifTrue) ? state.branch.ifFalse : state.branch.ifTrue;
michael@0 1925
michael@0 1926 if (!pred)
michael@0 1927 return ControlStatus_Ended;
michael@0 1928
michael@0 1929 // Create a new block to represent the join.
michael@0 1930 MBasicBlock *join = newBlock(pred, state.branch.falseEnd);
michael@0 1931 if (!join)
michael@0 1932 return ControlStatus_Error;
michael@0 1933
michael@0 1934 // Create edges from the true and false blocks as needed.
michael@0 1935 pred->end(MGoto::New(alloc(), join));
michael@0 1936
michael@0 1937 if (other) {
michael@0 1938 other->end(MGoto::New(alloc(), join));
michael@0 1939 if (!join->addPredecessor(alloc(), other))
michael@0 1940 return ControlStatus_Error;
michael@0 1941 }
michael@0 1942
michael@0 1943 // Ignore unreachable remainder of false block if existent.
michael@0 1944 if (!setCurrentAndSpecializePhis(join))
michael@0 1945 return ControlStatus_Error;
michael@0 1946 pc = current->pc();
michael@0 1947 return ControlStatus_Joined;
michael@0 1948 }
michael@0 1949
michael@0 1950 IonBuilder::ControlStatus
michael@0 1951 IonBuilder::processBrokenLoop(CFGState &state)
michael@0 1952 {
michael@0 1953 JS_ASSERT(!current);
michael@0 1954
michael@0 1955 JS_ASSERT(loopDepth_);
michael@0 1956 loopDepth_--;
michael@0 1957
michael@0 1958 // A broken loop is not a real loop (it has no header or backedge), so
michael@0 1959 // reset the loop depth.
michael@0 1960 for (MBasicBlockIterator i(graph().begin(state.loop.entry)); i != graph().end(); i++) {
michael@0 1961 if (i->loopDepth() > loopDepth_)
michael@0 1962 i->setLoopDepth(i->loopDepth() - 1);
michael@0 1963 }
michael@0 1964
michael@0 1965 // If the loop started with a condition (while/for) then even if the
michael@0 1966 // structure never actually loops, the condition itself can still fail and
michael@0 1967 // thus we must resume at the successor, if one exists.
michael@0 1968 if (!setCurrentAndSpecializePhis(state.loop.successor))
michael@0 1969 return ControlStatus_Error;
michael@0 1970 if (current) {
michael@0 1971 JS_ASSERT(current->loopDepth() == loopDepth_);
michael@0 1972 graph().moveBlockToEnd(current);
michael@0 1973 }
michael@0 1974
michael@0 1975 // Join the breaks together and continue parsing.
michael@0 1976 if (state.loop.breaks) {
michael@0 1977 MBasicBlock *block = createBreakCatchBlock(state.loop.breaks, state.loop.exitpc);
michael@0 1978 if (!block)
michael@0 1979 return ControlStatus_Error;
michael@0 1980
michael@0 1981 if (current) {
michael@0 1982 current->end(MGoto::New(alloc(), block));
michael@0 1983 if (!block->addPredecessor(alloc(), current))
michael@0 1984 return ControlStatus_Error;
michael@0 1985 }
michael@0 1986
michael@0 1987 if (!setCurrentAndSpecializePhis(block))
michael@0 1988 return ControlStatus_Error;
michael@0 1989 }
michael@0 1990
michael@0 1991 // If the loop is not gated on a condition, and has only returns, we'll
michael@0 1992 // reach this case. For example:
michael@0 1993 // do { ... return; } while ();
michael@0 1994 if (!current)
michael@0 1995 return ControlStatus_Ended;
michael@0 1996
michael@0 1997 // Otherwise, the loop is gated on a condition and/or has breaks so keep
michael@0 1998 // parsing at the successor.
michael@0 1999 pc = current->pc();
michael@0 2000 return ControlStatus_Joined;
michael@0 2001 }
michael@0 2002
michael@0 2003 IonBuilder::ControlStatus
michael@0 2004 IonBuilder::finishLoop(CFGState &state, MBasicBlock *successor)
michael@0 2005 {
michael@0 2006 JS_ASSERT(current);
michael@0 2007
michael@0 2008 JS_ASSERT(loopDepth_);
michael@0 2009 loopDepth_--;
michael@0 2010 JS_ASSERT_IF(successor, successor->loopDepth() == loopDepth_);
michael@0 2011
michael@0 2012 // Compute phis in the loop header and propagate them throughout the loop,
michael@0 2013 // including the successor.
michael@0 2014 AbortReason r = state.loop.entry->setBackedge(current);
michael@0 2015 if (r == AbortReason_Alloc)
michael@0 2016 return ControlStatus_Error;
michael@0 2017 if (r == AbortReason_Disable) {
michael@0 2018 // If there are types for variables on the backedge that were not
michael@0 2019 // present at the original loop header, then uses of the variables'
michael@0 2020 // phis may have generated incorrect nodes. The new types have been
michael@0 2021 // incorporated into the header phis, so remove all blocks for the
michael@0 2022 // loop body and restart with the new types.
michael@0 2023 return restartLoop(state);
michael@0 2024 }
michael@0 2025
michael@0 2026 if (successor) {
michael@0 2027 graph().moveBlockToEnd(successor);
michael@0 2028 successor->inheritPhis(state.loop.entry);
michael@0 2029 }
michael@0 2030
michael@0 2031 if (state.loop.breaks) {
michael@0 2032 // Propagate phis placed in the header to individual break exit points.
michael@0 2033 DeferredEdge *edge = state.loop.breaks;
michael@0 2034 while (edge) {
michael@0 2035 edge->block->inheritPhis(state.loop.entry);
michael@0 2036 edge = edge->next;
michael@0 2037 }
michael@0 2038
michael@0 2039 // Create a catch block to join all break exits.
michael@0 2040 MBasicBlock *block = createBreakCatchBlock(state.loop.breaks, state.loop.exitpc);
michael@0 2041 if (!block)
michael@0 2042 return ControlStatus_Error;
michael@0 2043
michael@0 2044 if (successor) {
michael@0 2045 // Finally, create an unconditional edge from the successor to the
michael@0 2046 // catch block.
michael@0 2047 successor->end(MGoto::New(alloc(), block));
michael@0 2048 if (!block->addPredecessor(alloc(), successor))
michael@0 2049 return ControlStatus_Error;
michael@0 2050 }
michael@0 2051 successor = block;
michael@0 2052 }
michael@0 2053
michael@0 2054 if (!setCurrentAndSpecializePhis(successor))
michael@0 2055 return ControlStatus_Error;
michael@0 2056
michael@0 2057 // An infinite loop (for (;;) { }) will not have a successor.
michael@0 2058 if (!current)
michael@0 2059 return ControlStatus_Ended;
michael@0 2060
michael@0 2061 pc = current->pc();
michael@0 2062 return ControlStatus_Joined;
michael@0 2063 }
michael@0 2064
michael@0 2065 IonBuilder::ControlStatus
michael@0 2066 IonBuilder::restartLoop(CFGState state)
michael@0 2067 {
michael@0 2068 spew("New types at loop header, restarting loop body");
michael@0 2069
michael@0 2070 if (js_JitOptions.limitScriptSize) {
michael@0 2071 if (++numLoopRestarts_ >= MAX_LOOP_RESTARTS)
michael@0 2072 return ControlStatus_Abort;
michael@0 2073 }
michael@0 2074
michael@0 2075 MBasicBlock *header = state.loop.entry;
michael@0 2076
michael@0 2077 // Remove all blocks in the loop body other than the header, which has phis
michael@0 2078 // of the appropriate type and incoming edges to preserve.
michael@0 2079 graph().removeBlocksAfter(header);
michael@0 2080
michael@0 2081 // Remove all instructions from the header itself, and all resume points
michael@0 2082 // except the entry resume point.
michael@0 2083 header->discardAllInstructions();
michael@0 2084 header->discardAllResumePoints(/* discardEntry = */ false);
michael@0 2085 header->setStackDepth(header->getPredecessor(0)->stackDepth());
michael@0 2086
michael@0 2087 popCfgStack();
michael@0 2088
michael@0 2089 loopDepth_++;
michael@0 2090
michael@0 2091 if (!pushLoop(state.loop.initialState, state.loop.initialStopAt, header, state.loop.osr,
michael@0 2092 state.loop.loopHead, state.loop.initialPc,
michael@0 2093 state.loop.bodyStart, state.loop.bodyEnd,
michael@0 2094 state.loop.exitpc, state.loop.continuepc))
michael@0 2095 {
michael@0 2096 return ControlStatus_Error;
michael@0 2097 }
michael@0 2098
michael@0 2099 CFGState &nstate = cfgStack_.back();
michael@0 2100
michael@0 2101 nstate.loop.condpc = state.loop.condpc;
michael@0 2102 nstate.loop.updatepc = state.loop.updatepc;
michael@0 2103 nstate.loop.updateEnd = state.loop.updateEnd;
michael@0 2104
michael@0 2105 // Don't specializePhis(), as the header has been visited before and the
michael@0 2106 // phis have already had their type set.
michael@0 2107 setCurrent(header);
michael@0 2108
michael@0 2109 if (!jsop_loophead(nstate.loop.loopHead))
michael@0 2110 return ControlStatus_Error;
michael@0 2111
michael@0 2112 pc = nstate.loop.initialPc;
michael@0 2113 return ControlStatus_Jumped;
michael@0 2114 }
michael@0 2115
michael@0 2116 IonBuilder::ControlStatus
michael@0 2117 IonBuilder::processDoWhileBodyEnd(CFGState &state)
michael@0 2118 {
michael@0 2119 if (!processDeferredContinues(state))
michael@0 2120 return ControlStatus_Error;
michael@0 2121
michael@0 2122 // No current means control flow cannot reach the condition, so this will
michael@0 2123 // never loop.
michael@0 2124 if (!current)
michael@0 2125 return processBrokenLoop(state);
michael@0 2126
michael@0 2127 MBasicBlock *header = newBlock(current, state.loop.updatepc);
michael@0 2128 if (!header)
michael@0 2129 return ControlStatus_Error;
michael@0 2130 current->end(MGoto::New(alloc(), header));
michael@0 2131
michael@0 2132 state.state = CFGState::DO_WHILE_LOOP_COND;
michael@0 2133 state.stopAt = state.loop.updateEnd;
michael@0 2134 pc = state.loop.updatepc;
michael@0 2135 if (!setCurrentAndSpecializePhis(header))
michael@0 2136 return ControlStatus_Error;
michael@0 2137 return ControlStatus_Jumped;
michael@0 2138 }
michael@0 2139
michael@0 2140 IonBuilder::ControlStatus
michael@0 2141 IonBuilder::processDoWhileCondEnd(CFGState &state)
michael@0 2142 {
michael@0 2143 JS_ASSERT(JSOp(*pc) == JSOP_IFNE);
michael@0 2144
michael@0 2145 // We're guaranteed a |current|, it's impossible to break or return from
michael@0 2146 // inside the conditional expression.
michael@0 2147 JS_ASSERT(current);
michael@0 2148
michael@0 2149 // Pop the last value, and create the successor block.
michael@0 2150 MDefinition *vins = current->pop();
michael@0 2151 MBasicBlock *successor = newBlock(current, GetNextPc(pc), loopDepth_ - 1);
michael@0 2152 if (!successor)
michael@0 2153 return ControlStatus_Error;
michael@0 2154
michael@0 2155 // Test for do {} while(false) and don't create a loop in that case.
michael@0 2156 if (vins->isConstant()) {
michael@0 2157 MConstant *cte = vins->toConstant();
michael@0 2158 if (cte->value().isBoolean() && !cte->value().toBoolean()) {
michael@0 2159 current->end(MGoto::New(alloc(), successor));
michael@0 2160 current = nullptr;
michael@0 2161
michael@0 2162 state.loop.successor = successor;
michael@0 2163 return processBrokenLoop(state);
michael@0 2164 }
michael@0 2165 }
michael@0 2166
michael@0 2167 // Create the test instruction and end the current block.
michael@0 2168 MTest *test = MTest::New(alloc(), vins, state.loop.entry, successor);
michael@0 2169 current->end(test);
michael@0 2170 return finishLoop(state, successor);
michael@0 2171 }
michael@0 2172
michael@0 2173 IonBuilder::ControlStatus
michael@0 2174 IonBuilder::processWhileCondEnd(CFGState &state)
michael@0 2175 {
michael@0 2176 JS_ASSERT(JSOp(*pc) == JSOP_IFNE || JSOp(*pc) == JSOP_IFEQ);
michael@0 2177
michael@0 2178 // Balance the stack past the IFNE.
michael@0 2179 MDefinition *ins = current->pop();
michael@0 2180
michael@0 2181 // Create the body and successor blocks.
michael@0 2182 MBasicBlock *body = newBlock(current, state.loop.bodyStart);
michael@0 2183 state.loop.successor = newBlock(current, state.loop.exitpc, loopDepth_ - 1);
michael@0 2184 if (!body || !state.loop.successor)
michael@0 2185 return ControlStatus_Error;
michael@0 2186
michael@0 2187 MTest *test;
michael@0 2188 if (JSOp(*pc) == JSOP_IFNE)
michael@0 2189 test = MTest::New(alloc(), ins, body, state.loop.successor);
michael@0 2190 else
michael@0 2191 test = MTest::New(alloc(), ins, state.loop.successor, body);
michael@0 2192 current->end(test);
michael@0 2193
michael@0 2194 state.state = CFGState::WHILE_LOOP_BODY;
michael@0 2195 state.stopAt = state.loop.bodyEnd;
michael@0 2196 pc = state.loop.bodyStart;
michael@0 2197 if (!setCurrentAndSpecializePhis(body))
michael@0 2198 return ControlStatus_Error;
michael@0 2199 return ControlStatus_Jumped;
michael@0 2200 }
michael@0 2201
michael@0 2202 IonBuilder::ControlStatus
michael@0 2203 IonBuilder::processWhileBodyEnd(CFGState &state)
michael@0 2204 {
michael@0 2205 if (!processDeferredContinues(state))
michael@0 2206 return ControlStatus_Error;
michael@0 2207
michael@0 2208 if (!current)
michael@0 2209 return processBrokenLoop(state);
michael@0 2210
michael@0 2211 current->end(MGoto::New(alloc(), state.loop.entry));
michael@0 2212 return finishLoop(state, state.loop.successor);
michael@0 2213 }
michael@0 2214
michael@0 2215 IonBuilder::ControlStatus
michael@0 2216 IonBuilder::processForCondEnd(CFGState &state)
michael@0 2217 {
michael@0 2218 JS_ASSERT(JSOp(*pc) == JSOP_IFNE);
michael@0 2219
michael@0 2220 // Balance the stack past the IFNE.
michael@0 2221 MDefinition *ins = current->pop();
michael@0 2222
michael@0 2223 // Create the body and successor blocks.
michael@0 2224 MBasicBlock *body = newBlock(current, state.loop.bodyStart);
michael@0 2225 state.loop.successor = newBlock(current, state.loop.exitpc, loopDepth_ - 1);
michael@0 2226 if (!body || !state.loop.successor)
michael@0 2227 return ControlStatus_Error;
michael@0 2228
michael@0 2229 MTest *test = MTest::New(alloc(), ins, body, state.loop.successor);
michael@0 2230 current->end(test);
michael@0 2231
michael@0 2232 state.state = CFGState::FOR_LOOP_BODY;
michael@0 2233 state.stopAt = state.loop.bodyEnd;
michael@0 2234 pc = state.loop.bodyStart;
michael@0 2235 if (!setCurrentAndSpecializePhis(body))
michael@0 2236 return ControlStatus_Error;
michael@0 2237 return ControlStatus_Jumped;
michael@0 2238 }
michael@0 2239
michael@0 2240 IonBuilder::ControlStatus
michael@0 2241 IonBuilder::processForBodyEnd(CFGState &state)
michael@0 2242 {
michael@0 2243 if (!processDeferredContinues(state))
michael@0 2244 return ControlStatus_Error;
michael@0 2245
michael@0 2246 // If there is no updatepc, just go right to processing what would be the
michael@0 2247 // end of the update clause. Otherwise, |current| might be nullptr; if this is
michael@0 2248 // the case, the udpate is unreachable anyway.
michael@0 2249 if (!state.loop.updatepc || !current)
michael@0 2250 return processForUpdateEnd(state);
michael@0 2251
michael@0 2252 pc = state.loop.updatepc;
michael@0 2253
michael@0 2254 state.state = CFGState::FOR_LOOP_UPDATE;
michael@0 2255 state.stopAt = state.loop.updateEnd;
michael@0 2256 return ControlStatus_Jumped;
michael@0 2257 }
michael@0 2258
michael@0 2259 IonBuilder::ControlStatus
michael@0 2260 IonBuilder::processForUpdateEnd(CFGState &state)
michael@0 2261 {
michael@0 2262 // If there is no current, we couldn't reach the loop edge and there was no
michael@0 2263 // update clause.
michael@0 2264 if (!current)
michael@0 2265 return processBrokenLoop(state);
michael@0 2266
michael@0 2267 current->end(MGoto::New(alloc(), state.loop.entry));
michael@0 2268 return finishLoop(state, state.loop.successor);
michael@0 2269 }
michael@0 2270
michael@0 2271 IonBuilder::DeferredEdge *
michael@0 2272 IonBuilder::filterDeadDeferredEdges(DeferredEdge *edge)
michael@0 2273 {
michael@0 2274 DeferredEdge *head = edge, *prev = nullptr;
michael@0 2275
michael@0 2276 while (edge) {
michael@0 2277 if (edge->block->isDead()) {
michael@0 2278 if (prev)
michael@0 2279 prev->next = edge->next;
michael@0 2280 else
michael@0 2281 head = edge->next;
michael@0 2282 } else {
michael@0 2283 prev = edge;
michael@0 2284 }
michael@0 2285 edge = edge->next;
michael@0 2286 }
michael@0 2287
michael@0 2288 // There must be at least one deferred edge from a block that was not
michael@0 2289 // deleted; blocks are deleted when restarting processing of a loop, and
michael@0 2290 // the final version of the loop body will have edges from live blocks.
michael@0 2291 JS_ASSERT(head);
michael@0 2292
michael@0 2293 return head;
michael@0 2294 }
michael@0 2295
michael@0 2296 bool
michael@0 2297 IonBuilder::processDeferredContinues(CFGState &state)
michael@0 2298 {
michael@0 2299 // If there are any continues for this loop, and there is an update block,
michael@0 2300 // then we need to create a new basic block to house the update.
michael@0 2301 if (state.loop.continues) {
michael@0 2302 DeferredEdge *edge = filterDeadDeferredEdges(state.loop.continues);
michael@0 2303
michael@0 2304 MBasicBlock *update = newBlock(edge->block, loops_.back().continuepc);
michael@0 2305 if (!update)
michael@0 2306 return false;
michael@0 2307
michael@0 2308 if (current) {
michael@0 2309 current->end(MGoto::New(alloc(), update));
michael@0 2310 if (!update->addPredecessor(alloc(), current))
michael@0 2311 return false;
michael@0 2312 }
michael@0 2313
michael@0 2314 // No need to use addPredecessor for first edge,
michael@0 2315 // because it is already predecessor.
michael@0 2316 edge->block->end(MGoto::New(alloc(), update));
michael@0 2317 edge = edge->next;
michael@0 2318
michael@0 2319 // Remaining edges
michael@0 2320 while (edge) {
michael@0 2321 edge->block->end(MGoto::New(alloc(), update));
michael@0 2322 if (!update->addPredecessor(alloc(), edge->block))
michael@0 2323 return false;
michael@0 2324 edge = edge->next;
michael@0 2325 }
michael@0 2326 state.loop.continues = nullptr;
michael@0 2327
michael@0 2328 if (!setCurrentAndSpecializePhis(update))
michael@0 2329 return ControlStatus_Error;
michael@0 2330 }
michael@0 2331
michael@0 2332 return true;
michael@0 2333 }
michael@0 2334
michael@0 2335 MBasicBlock *
michael@0 2336 IonBuilder::createBreakCatchBlock(DeferredEdge *edge, jsbytecode *pc)
michael@0 2337 {
michael@0 2338 edge = filterDeadDeferredEdges(edge);
michael@0 2339
michael@0 2340 // Create block, using the first break statement as predecessor
michael@0 2341 MBasicBlock *successor = newBlock(edge->block, pc);
michael@0 2342 if (!successor)
michael@0 2343 return nullptr;
michael@0 2344
michael@0 2345 // No need to use addPredecessor for first edge,
michael@0 2346 // because it is already predecessor.
michael@0 2347 edge->block->end(MGoto::New(alloc(), successor));
michael@0 2348 edge = edge->next;
michael@0 2349
michael@0 2350 // Finish up remaining breaks.
michael@0 2351 while (edge) {
michael@0 2352 edge->block->end(MGoto::New(alloc(), successor));
michael@0 2353 if (!successor->addPredecessor(alloc(), edge->block))
michael@0 2354 return nullptr;
michael@0 2355 edge = edge->next;
michael@0 2356 }
michael@0 2357
michael@0 2358 return successor;
michael@0 2359 }
michael@0 2360
michael@0 2361 IonBuilder::ControlStatus
michael@0 2362 IonBuilder::processNextTableSwitchCase(CFGState &state)
michael@0 2363 {
michael@0 2364 JS_ASSERT(state.state == CFGState::TABLE_SWITCH);
michael@0 2365
michael@0 2366 state.tableswitch.currentBlock++;
michael@0 2367
michael@0 2368 // Test if there are still unprocessed successors (cases/default)
michael@0 2369 if (state.tableswitch.currentBlock >= state.tableswitch.ins->numBlocks())
michael@0 2370 return processSwitchEnd(state.tableswitch.breaks, state.tableswitch.exitpc);
michael@0 2371
michael@0 2372 // Get the next successor
michael@0 2373 MBasicBlock *successor = state.tableswitch.ins->getBlock(state.tableswitch.currentBlock);
michael@0 2374
michael@0 2375 // Add current block as predecessor if available.
michael@0 2376 // This means the previous case didn't have a break statement.
michael@0 2377 // So flow will continue in this block.
michael@0 2378 if (current) {
michael@0 2379 current->end(MGoto::New(alloc(), successor));
michael@0 2380 if (!successor->addPredecessor(alloc(), current))
michael@0 2381 return ControlStatus_Error;
michael@0 2382 }
michael@0 2383
michael@0 2384 // Insert successor after the current block, to maintain RPO.
michael@0 2385 graph().moveBlockToEnd(successor);
michael@0 2386
michael@0 2387 // If this is the last successor the block should stop at the end of the tableswitch
michael@0 2388 // Else it should stop at the start of the next successor
michael@0 2389 if (state.tableswitch.currentBlock+1 < state.tableswitch.ins->numBlocks())
michael@0 2390 state.stopAt = state.tableswitch.ins->getBlock(state.tableswitch.currentBlock+1)->pc();
michael@0 2391 else
michael@0 2392 state.stopAt = state.tableswitch.exitpc;
michael@0 2393
michael@0 2394 if (!setCurrentAndSpecializePhis(successor))
michael@0 2395 return ControlStatus_Error;
michael@0 2396 pc = current->pc();
michael@0 2397 return ControlStatus_Jumped;
michael@0 2398 }
michael@0 2399
michael@0 2400 IonBuilder::ControlStatus
michael@0 2401 IonBuilder::processAndOrEnd(CFGState &state)
michael@0 2402 {
michael@0 2403 // We just processed the RHS of an && or || expression.
michael@0 2404 // Now jump to the join point (the false block).
michael@0 2405 current->end(MGoto::New(alloc(), state.branch.ifFalse));
michael@0 2406
michael@0 2407 if (!state.branch.ifFalse->addPredecessor(alloc(), current))
michael@0 2408 return ControlStatus_Error;
michael@0 2409
michael@0 2410 if (!setCurrentAndSpecializePhis(state.branch.ifFalse))
michael@0 2411 return ControlStatus_Error;
michael@0 2412 graph().moveBlockToEnd(current);
michael@0 2413 pc = current->pc();
michael@0 2414 return ControlStatus_Joined;
michael@0 2415 }
michael@0 2416
michael@0 2417 IonBuilder::ControlStatus
michael@0 2418 IonBuilder::processLabelEnd(CFGState &state)
michael@0 2419 {
michael@0 2420 JS_ASSERT(state.state == CFGState::LABEL);
michael@0 2421
michael@0 2422 // If there are no breaks and no current, controlflow is terminated.
michael@0 2423 if (!state.label.breaks && !current)
michael@0 2424 return ControlStatus_Ended;
michael@0 2425
michael@0 2426 // If there are no breaks to this label, there's nothing to do.
michael@0 2427 if (!state.label.breaks)
michael@0 2428 return ControlStatus_Joined;
michael@0 2429
michael@0 2430 MBasicBlock *successor = createBreakCatchBlock(state.label.breaks, state.stopAt);
michael@0 2431 if (!successor)
michael@0 2432 return ControlStatus_Error;
michael@0 2433
michael@0 2434 if (current) {
michael@0 2435 current->end(MGoto::New(alloc(), successor));
michael@0 2436 if (!successor->addPredecessor(alloc(), current))
michael@0 2437 return ControlStatus_Error;
michael@0 2438 }
michael@0 2439
michael@0 2440 pc = state.stopAt;
michael@0 2441 if (!setCurrentAndSpecializePhis(successor))
michael@0 2442 return ControlStatus_Error;
michael@0 2443 return ControlStatus_Joined;
michael@0 2444 }
michael@0 2445
michael@0 2446 IonBuilder::ControlStatus
michael@0 2447 IonBuilder::processTryEnd(CFGState &state)
michael@0 2448 {
michael@0 2449 JS_ASSERT(state.state == CFGState::TRY);
michael@0 2450
michael@0 2451 if (!state.try_.successor) {
michael@0 2452 JS_ASSERT(!current);
michael@0 2453 return ControlStatus_Ended;
michael@0 2454 }
michael@0 2455
michael@0 2456 if (current) {
michael@0 2457 current->end(MGoto::New(alloc(), state.try_.successor));
michael@0 2458
michael@0 2459 if (!state.try_.successor->addPredecessor(alloc(), current))
michael@0 2460 return ControlStatus_Error;
michael@0 2461 }
michael@0 2462
michael@0 2463 // Start parsing the code after this try-catch statement.
michael@0 2464 if (!setCurrentAndSpecializePhis(state.try_.successor))
michael@0 2465 return ControlStatus_Error;
michael@0 2466 graph().moveBlockToEnd(current);
michael@0 2467 pc = current->pc();
michael@0 2468 return ControlStatus_Joined;
michael@0 2469 }
michael@0 2470
michael@0 2471 IonBuilder::ControlStatus
michael@0 2472 IonBuilder::processBreak(JSOp op, jssrcnote *sn)
michael@0 2473 {
michael@0 2474 JS_ASSERT(op == JSOP_GOTO);
michael@0 2475
michael@0 2476 JS_ASSERT(SN_TYPE(sn) == SRC_BREAK ||
michael@0 2477 SN_TYPE(sn) == SRC_BREAK2LABEL);
michael@0 2478
michael@0 2479 // Find the break target.
michael@0 2480 jsbytecode *target = pc + GetJumpOffset(pc);
michael@0 2481 DebugOnly<bool> found = false;
michael@0 2482
michael@0 2483 if (SN_TYPE(sn) == SRC_BREAK2LABEL) {
michael@0 2484 for (size_t i = labels_.length() - 1; i < labels_.length(); i--) {
michael@0 2485 CFGState &cfg = cfgStack_[labels_[i].cfgEntry];
michael@0 2486 JS_ASSERT(cfg.state == CFGState::LABEL);
michael@0 2487 if (cfg.stopAt == target) {
michael@0 2488 cfg.label.breaks = new(alloc()) DeferredEdge(current, cfg.label.breaks);
michael@0 2489 found = true;
michael@0 2490 break;
michael@0 2491 }
michael@0 2492 }
michael@0 2493 } else {
michael@0 2494 for (size_t i = loops_.length() - 1; i < loops_.length(); i--) {
michael@0 2495 CFGState &cfg = cfgStack_[loops_[i].cfgEntry];
michael@0 2496 JS_ASSERT(cfg.isLoop());
michael@0 2497 if (cfg.loop.exitpc == target) {
michael@0 2498 cfg.loop.breaks = new(alloc()) DeferredEdge(current, cfg.loop.breaks);
michael@0 2499 found = true;
michael@0 2500 break;
michael@0 2501 }
michael@0 2502 }
michael@0 2503 }
michael@0 2504
michael@0 2505 JS_ASSERT(found);
michael@0 2506
michael@0 2507 setCurrent(nullptr);
michael@0 2508 pc += js_CodeSpec[op].length;
michael@0 2509 return processControlEnd();
michael@0 2510 }
michael@0 2511
michael@0 2512 static inline jsbytecode *
michael@0 2513 EffectiveContinue(jsbytecode *pc)
michael@0 2514 {
michael@0 2515 if (JSOp(*pc) == JSOP_GOTO)
michael@0 2516 return pc + GetJumpOffset(pc);
michael@0 2517 return pc;
michael@0 2518 }
michael@0 2519
michael@0 2520 IonBuilder::ControlStatus
michael@0 2521 IonBuilder::processContinue(JSOp op)
michael@0 2522 {
michael@0 2523 JS_ASSERT(op == JSOP_GOTO);
michael@0 2524
michael@0 2525 // Find the target loop.
michael@0 2526 CFGState *found = nullptr;
michael@0 2527 jsbytecode *target = pc + GetJumpOffset(pc);
michael@0 2528 for (size_t i = loops_.length() - 1; i < loops_.length(); i--) {
michael@0 2529 if (loops_[i].continuepc == target ||
michael@0 2530 EffectiveContinue(loops_[i].continuepc) == target)
michael@0 2531 {
michael@0 2532 found = &cfgStack_[loops_[i].cfgEntry];
michael@0 2533 break;
michael@0 2534 }
michael@0 2535 }
michael@0 2536
michael@0 2537 // There must always be a valid target loop structure. If not, there's
michael@0 2538 // probably an off-by-something error in which pc we track.
michael@0 2539 JS_ASSERT(found);
michael@0 2540 CFGState &state = *found;
michael@0 2541
michael@0 2542 state.loop.continues = new(alloc()) DeferredEdge(current, state.loop.continues);
michael@0 2543
michael@0 2544 setCurrent(nullptr);
michael@0 2545 pc += js_CodeSpec[op].length;
michael@0 2546 return processControlEnd();
michael@0 2547 }
michael@0 2548
michael@0 2549 IonBuilder::ControlStatus
michael@0 2550 IonBuilder::processSwitchBreak(JSOp op)
michael@0 2551 {
michael@0 2552 JS_ASSERT(op == JSOP_GOTO);
michael@0 2553
michael@0 2554 // Find the target switch.
michael@0 2555 CFGState *found = nullptr;
michael@0 2556 jsbytecode *target = pc + GetJumpOffset(pc);
michael@0 2557 for (size_t i = switches_.length() - 1; i < switches_.length(); i--) {
michael@0 2558 if (switches_[i].continuepc == target) {
michael@0 2559 found = &cfgStack_[switches_[i].cfgEntry];
michael@0 2560 break;
michael@0 2561 }
michael@0 2562 }
michael@0 2563
michael@0 2564 // There must always be a valid target loop structure. If not, there's
michael@0 2565 // probably an off-by-something error in which pc we track.
michael@0 2566 JS_ASSERT(found);
michael@0 2567 CFGState &state = *found;
michael@0 2568
michael@0 2569 DeferredEdge **breaks = nullptr;
michael@0 2570 switch (state.state) {
michael@0 2571 case CFGState::TABLE_SWITCH:
michael@0 2572 breaks = &state.tableswitch.breaks;
michael@0 2573 break;
michael@0 2574 case CFGState::COND_SWITCH_BODY:
michael@0 2575 breaks = &state.condswitch.breaks;
michael@0 2576 break;
michael@0 2577 default:
michael@0 2578 MOZ_ASSUME_UNREACHABLE("Unexpected switch state.");
michael@0 2579 }
michael@0 2580
michael@0 2581 *breaks = new(alloc()) DeferredEdge(current, *breaks);
michael@0 2582
michael@0 2583 setCurrent(nullptr);
michael@0 2584 pc += js_CodeSpec[op].length;
michael@0 2585 return processControlEnd();
michael@0 2586 }
michael@0 2587
michael@0 2588 IonBuilder::ControlStatus
michael@0 2589 IonBuilder::processSwitchEnd(DeferredEdge *breaks, jsbytecode *exitpc)
michael@0 2590 {
michael@0 2591 // No break statements, no current.
michael@0 2592 // This means that control flow is cut-off from this point
michael@0 2593 // (e.g. all cases have return statements).
michael@0 2594 if (!breaks && !current)
michael@0 2595 return ControlStatus_Ended;
michael@0 2596
michael@0 2597 // Create successor block.
michael@0 2598 // If there are breaks, create block with breaks as predecessor
michael@0 2599 // Else create a block with current as predecessor
michael@0 2600 MBasicBlock *successor = nullptr;
michael@0 2601 if (breaks)
michael@0 2602 successor = createBreakCatchBlock(breaks, exitpc);
michael@0 2603 else
michael@0 2604 successor = newBlock(current, exitpc);
michael@0 2605
michael@0 2606 if (!successor)
michael@0 2607 return ControlStatus_Ended;
michael@0 2608
michael@0 2609 // If there is current, the current block flows into this one.
michael@0 2610 // So current is also a predecessor to this block
michael@0 2611 if (current) {
michael@0 2612 current->end(MGoto::New(alloc(), successor));
michael@0 2613 if (breaks) {
michael@0 2614 if (!successor->addPredecessor(alloc(), current))
michael@0 2615 return ControlStatus_Error;
michael@0 2616 }
michael@0 2617 }
michael@0 2618
michael@0 2619 pc = exitpc;
michael@0 2620 if (!setCurrentAndSpecializePhis(successor))
michael@0 2621 return ControlStatus_Error;
michael@0 2622 return ControlStatus_Joined;
michael@0 2623 }
michael@0 2624
michael@0 2625 IonBuilder::ControlStatus
michael@0 2626 IonBuilder::maybeLoop(JSOp op, jssrcnote *sn)
michael@0 2627 {
michael@0 2628 // This function looks at the opcode and source note and tries to
michael@0 2629 // determine the structure of the loop. For some opcodes, like
michael@0 2630 // POP/NOP which are not explicitly control flow, this source note is
michael@0 2631 // optional. For opcodes with control flow, like GOTO, an unrecognized
michael@0 2632 // or not-present source note is a compilation failure.
michael@0 2633 switch (op) {
michael@0 2634 case JSOP_POP:
michael@0 2635 // for (init; ; update?) ...
michael@0 2636 if (sn && SN_TYPE(sn) == SRC_FOR) {
michael@0 2637 current->pop();
michael@0 2638 return forLoop(op, sn);
michael@0 2639 }
michael@0 2640 break;
michael@0 2641
michael@0 2642 case JSOP_NOP:
michael@0 2643 if (sn) {
michael@0 2644 // do { } while (cond)
michael@0 2645 if (SN_TYPE(sn) == SRC_WHILE)
michael@0 2646 return doWhileLoop(op, sn);
michael@0 2647 // Build a mapping such that given a basic block, whose successor
michael@0 2648 // has a phi
michael@0 2649
michael@0 2650 // for (; ; update?)
michael@0 2651 if (SN_TYPE(sn) == SRC_FOR)
michael@0 2652 return forLoop(op, sn);
michael@0 2653 }
michael@0 2654 break;
michael@0 2655
michael@0 2656 default:
michael@0 2657 MOZ_ASSUME_UNREACHABLE("unexpected opcode");
michael@0 2658 }
michael@0 2659
michael@0 2660 return ControlStatus_None;
michael@0 2661 }
michael@0 2662
michael@0 2663 void
michael@0 2664 IonBuilder::assertValidLoopHeadOp(jsbytecode *pc)
michael@0 2665 {
michael@0 2666 #ifdef DEBUG
michael@0 2667 JS_ASSERT(JSOp(*pc) == JSOP_LOOPHEAD);
michael@0 2668
michael@0 2669 // Make sure this is the next opcode after the loop header,
michael@0 2670 // unless the for loop is unconditional.
michael@0 2671 CFGState &state = cfgStack_.back();
michael@0 2672 JS_ASSERT_IF((JSOp)*(state.loop.entry->pc()) == JSOP_GOTO,
michael@0 2673 GetNextPc(state.loop.entry->pc()) == pc);
michael@0 2674
michael@0 2675 // do-while loops have a source note.
michael@0 2676 jssrcnote *sn = info().getNote(gsn, pc);
michael@0 2677 if (sn) {
michael@0 2678 jsbytecode *ifne = pc + js_GetSrcNoteOffset(sn, 0);
michael@0 2679
michael@0 2680 jsbytecode *expected_ifne;
michael@0 2681 switch (state.state) {
michael@0 2682 case CFGState::DO_WHILE_LOOP_BODY:
michael@0 2683 expected_ifne = state.loop.updateEnd;
michael@0 2684 break;
michael@0 2685
michael@0 2686 default:
michael@0 2687 MOZ_ASSUME_UNREACHABLE("JSOP_LOOPHEAD unexpected source note");
michael@0 2688 }
michael@0 2689
michael@0 2690 // Make sure this loop goes to the same ifne as the loop header's
michael@0 2691 // source notes or GOTO.
michael@0 2692 JS_ASSERT(ifne == expected_ifne);
michael@0 2693 } else {
michael@0 2694 JS_ASSERT(state.state != CFGState::DO_WHILE_LOOP_BODY);
michael@0 2695 }
michael@0 2696 #endif
michael@0 2697 }
michael@0 2698
michael@0 2699 IonBuilder::ControlStatus
michael@0 2700 IonBuilder::doWhileLoop(JSOp op, jssrcnote *sn)
michael@0 2701 {
michael@0 2702 // do { } while() loops have the following structure:
michael@0 2703 // NOP ; SRC_WHILE (offset to COND)
michael@0 2704 // LOOPHEAD ; SRC_WHILE (offset to IFNE)
michael@0 2705 // LOOPENTRY
michael@0 2706 // ... ; body
michael@0 2707 // ...
michael@0 2708 // COND ; start of condition
michael@0 2709 // ...
michael@0 2710 // IFNE -> ; goes to LOOPHEAD
michael@0 2711 int condition_offset = js_GetSrcNoteOffset(sn, 0);
michael@0 2712 jsbytecode *conditionpc = pc + condition_offset;
michael@0 2713
michael@0 2714 jssrcnote *sn2 = info().getNote(gsn, pc+1);
michael@0 2715 int offset = js_GetSrcNoteOffset(sn2, 0);
michael@0 2716 jsbytecode *ifne = pc + offset + 1;
michael@0 2717 JS_ASSERT(ifne > pc);
michael@0 2718
michael@0 2719 // Verify that the IFNE goes back to a loophead op.
michael@0 2720 jsbytecode *loopHead = GetNextPc(pc);
michael@0 2721 JS_ASSERT(JSOp(*loopHead) == JSOP_LOOPHEAD);
michael@0 2722 JS_ASSERT(loopHead == ifne + GetJumpOffset(ifne));
michael@0 2723
michael@0 2724 jsbytecode *loopEntry = GetNextPc(loopHead);
michael@0 2725 bool canOsr = LoopEntryCanIonOsr(loopEntry);
michael@0 2726 bool osr = info().hasOsrAt(loopEntry);
michael@0 2727
michael@0 2728 if (osr) {
michael@0 2729 MBasicBlock *preheader = newOsrPreheader(current, loopEntry);
michael@0 2730 if (!preheader)
michael@0 2731 return ControlStatus_Error;
michael@0 2732 current->end(MGoto::New(alloc(), preheader));
michael@0 2733 if (!setCurrentAndSpecializePhis(preheader))
michael@0 2734 return ControlStatus_Error;
michael@0 2735 }
michael@0 2736
michael@0 2737 unsigned stackPhiCount = 0;
michael@0 2738 MBasicBlock *header = newPendingLoopHeader(current, pc, osr, canOsr, stackPhiCount);
michael@0 2739 if (!header)
michael@0 2740 return ControlStatus_Error;
michael@0 2741 current->end(MGoto::New(alloc(), header));
michael@0 2742
michael@0 2743 jsbytecode *loophead = GetNextPc(pc);
michael@0 2744 jsbytecode *bodyStart = GetNextPc(loophead);
michael@0 2745 jsbytecode *bodyEnd = conditionpc;
michael@0 2746 jsbytecode *exitpc = GetNextPc(ifne);
michael@0 2747 if (!analyzeNewLoopTypes(header, bodyStart, exitpc))
michael@0 2748 return ControlStatus_Error;
michael@0 2749 if (!pushLoop(CFGState::DO_WHILE_LOOP_BODY, conditionpc, header, osr,
michael@0 2750 loopHead, bodyStart, bodyStart, bodyEnd, exitpc, conditionpc))
michael@0 2751 {
michael@0 2752 return ControlStatus_Error;
michael@0 2753 }
michael@0 2754
michael@0 2755 CFGState &state = cfgStack_.back();
michael@0 2756 state.loop.updatepc = conditionpc;
michael@0 2757 state.loop.updateEnd = ifne;
michael@0 2758
michael@0 2759 if (!setCurrentAndSpecializePhis(header))
michael@0 2760 return ControlStatus_Error;
michael@0 2761 if (!jsop_loophead(loophead))
michael@0 2762 return ControlStatus_Error;
michael@0 2763
michael@0 2764 pc = bodyStart;
michael@0 2765 return ControlStatus_Jumped;
michael@0 2766 }
michael@0 2767
michael@0 2768 IonBuilder::ControlStatus
michael@0 2769 IonBuilder::whileOrForInLoop(jssrcnote *sn)
michael@0 2770 {
michael@0 2771 // while (cond) { } loops have the following structure:
michael@0 2772 // GOTO cond ; SRC_WHILE (offset to IFNE)
michael@0 2773 // LOOPHEAD
michael@0 2774 // ...
michael@0 2775 // cond:
michael@0 2776 // LOOPENTRY
michael@0 2777 // ...
michael@0 2778 // IFNE ; goes to LOOPHEAD
michael@0 2779 // for (x in y) { } loops are similar; the cond will be a MOREITER.
michael@0 2780 JS_ASSERT(SN_TYPE(sn) == SRC_FOR_OF || SN_TYPE(sn) == SRC_FOR_IN || SN_TYPE(sn) == SRC_WHILE);
michael@0 2781 int ifneOffset = js_GetSrcNoteOffset(sn, 0);
michael@0 2782 jsbytecode *ifne = pc + ifneOffset;
michael@0 2783 JS_ASSERT(ifne > pc);
michael@0 2784
michael@0 2785 // Verify that the IFNE goes back to a loophead op.
michael@0 2786 JS_ASSERT(JSOp(*GetNextPc(pc)) == JSOP_LOOPHEAD);
michael@0 2787 JS_ASSERT(GetNextPc(pc) == ifne + GetJumpOffset(ifne));
michael@0 2788
michael@0 2789 jsbytecode *loopEntry = pc + GetJumpOffset(pc);
michael@0 2790 bool canOsr = LoopEntryCanIonOsr(loopEntry);
michael@0 2791 bool osr = info().hasOsrAt(loopEntry);
michael@0 2792
michael@0 2793 if (osr) {
michael@0 2794 MBasicBlock *preheader = newOsrPreheader(current, loopEntry);
michael@0 2795 if (!preheader)
michael@0 2796 return ControlStatus_Error;
michael@0 2797 current->end(MGoto::New(alloc(), preheader));
michael@0 2798 if (!setCurrentAndSpecializePhis(preheader))
michael@0 2799 return ControlStatus_Error;
michael@0 2800 }
michael@0 2801
michael@0 2802 unsigned stackPhiCount;
michael@0 2803 if (SN_TYPE(sn) == SRC_FOR_OF)
michael@0 2804 stackPhiCount = 2;
michael@0 2805 else if (SN_TYPE(sn) == SRC_FOR_IN)
michael@0 2806 stackPhiCount = 1;
michael@0 2807 else
michael@0 2808 stackPhiCount = 0;
michael@0 2809
michael@0 2810 MBasicBlock *header = newPendingLoopHeader(current, pc, osr, canOsr, stackPhiCount);
michael@0 2811 if (!header)
michael@0 2812 return ControlStatus_Error;
michael@0 2813 current->end(MGoto::New(alloc(), header));
michael@0 2814
michael@0 2815 // Skip past the JSOP_LOOPHEAD for the body start.
michael@0 2816 jsbytecode *loopHead = GetNextPc(pc);
michael@0 2817 jsbytecode *bodyStart = GetNextPc(loopHead);
michael@0 2818 jsbytecode *bodyEnd = pc + GetJumpOffset(pc);
michael@0 2819 jsbytecode *exitpc = GetNextPc(ifne);
michael@0 2820 if (!analyzeNewLoopTypes(header, bodyStart, exitpc))
michael@0 2821 return ControlStatus_Error;
michael@0 2822 if (!pushLoop(CFGState::WHILE_LOOP_COND, ifne, header, osr,
michael@0 2823 loopHead, bodyEnd, bodyStart, bodyEnd, exitpc))
michael@0 2824 {
michael@0 2825 return ControlStatus_Error;
michael@0 2826 }
michael@0 2827
michael@0 2828 // Parse the condition first.
michael@0 2829 if (!setCurrentAndSpecializePhis(header))
michael@0 2830 return ControlStatus_Error;
michael@0 2831 if (!jsop_loophead(loopHead))
michael@0 2832 return ControlStatus_Error;
michael@0 2833
michael@0 2834 pc = bodyEnd;
michael@0 2835 return ControlStatus_Jumped;
michael@0 2836 }
michael@0 2837
michael@0 2838 IonBuilder::ControlStatus
michael@0 2839 IonBuilder::forLoop(JSOp op, jssrcnote *sn)
michael@0 2840 {
michael@0 2841 // Skip the NOP or POP.
michael@0 2842 JS_ASSERT(op == JSOP_POP || op == JSOP_NOP);
michael@0 2843 pc = GetNextPc(pc);
michael@0 2844
michael@0 2845 jsbytecode *condpc = pc + js_GetSrcNoteOffset(sn, 0);
michael@0 2846 jsbytecode *updatepc = pc + js_GetSrcNoteOffset(sn, 1);
michael@0 2847 jsbytecode *ifne = pc + js_GetSrcNoteOffset(sn, 2);
michael@0 2848 jsbytecode *exitpc = GetNextPc(ifne);
michael@0 2849
michael@0 2850 // for loops have the following structures:
michael@0 2851 //
michael@0 2852 // NOP or POP
michael@0 2853 // [GOTO cond | NOP]
michael@0 2854 // LOOPHEAD
michael@0 2855 // body:
michael@0 2856 // ; [body]
michael@0 2857 // [increment:]
michael@0 2858 // ; [increment]
michael@0 2859 // [cond:]
michael@0 2860 // LOOPENTRY
michael@0 2861 // GOTO body
michael@0 2862 //
michael@0 2863 // If there is a condition (condpc != ifne), this acts similar to a while
michael@0 2864 // loop otherwise, it acts like a do-while loop.
michael@0 2865 jsbytecode *bodyStart = pc;
michael@0 2866 jsbytecode *bodyEnd = updatepc;
michael@0 2867 jsbytecode *loopEntry = condpc;
michael@0 2868 if (condpc != ifne) {
michael@0 2869 JS_ASSERT(JSOp(*bodyStart) == JSOP_GOTO);
michael@0 2870 JS_ASSERT(bodyStart + GetJumpOffset(bodyStart) == condpc);
michael@0 2871 bodyStart = GetNextPc(bodyStart);
michael@0 2872 } else {
michael@0 2873 // No loop condition, such as for(j = 0; ; j++)
michael@0 2874 if (op != JSOP_NOP) {
michael@0 2875 // If the loop starts with POP, we have to skip a NOP.
michael@0 2876 JS_ASSERT(JSOp(*bodyStart) == JSOP_NOP);
michael@0 2877 bodyStart = GetNextPc(bodyStart);
michael@0 2878 }
michael@0 2879 loopEntry = GetNextPc(bodyStart);
michael@0 2880 }
michael@0 2881 jsbytecode *loopHead = bodyStart;
michael@0 2882 JS_ASSERT(JSOp(*bodyStart) == JSOP_LOOPHEAD);
michael@0 2883 JS_ASSERT(ifne + GetJumpOffset(ifne) == bodyStart);
michael@0 2884 bodyStart = GetNextPc(bodyStart);
michael@0 2885
michael@0 2886 bool osr = info().hasOsrAt(loopEntry);
michael@0 2887 bool canOsr = LoopEntryCanIonOsr(loopEntry);
michael@0 2888
michael@0 2889 if (osr) {
michael@0 2890 MBasicBlock *preheader = newOsrPreheader(current, loopEntry);
michael@0 2891 if (!preheader)
michael@0 2892 return ControlStatus_Error;
michael@0 2893 current->end(MGoto::New(alloc(), preheader));
michael@0 2894 if (!setCurrentAndSpecializePhis(preheader))
michael@0 2895 return ControlStatus_Error;
michael@0 2896 }
michael@0 2897
michael@0 2898 unsigned stackPhiCount = 0;
michael@0 2899 MBasicBlock *header = newPendingLoopHeader(current, pc, osr, canOsr, stackPhiCount);
michael@0 2900 if (!header)
michael@0 2901 return ControlStatus_Error;
michael@0 2902 current->end(MGoto::New(alloc(), header));
michael@0 2903
michael@0 2904 // If there is no condition, we immediately parse the body. Otherwise, we
michael@0 2905 // parse the condition.
michael@0 2906 jsbytecode *stopAt;
michael@0 2907 CFGState::State initial;
michael@0 2908 if (condpc != ifne) {
michael@0 2909 pc = condpc;
michael@0 2910 stopAt = ifne;
michael@0 2911 initial = CFGState::FOR_LOOP_COND;
michael@0 2912 } else {
michael@0 2913 pc = bodyStart;
michael@0 2914 stopAt = bodyEnd;
michael@0 2915 initial = CFGState::FOR_LOOP_BODY;
michael@0 2916 }
michael@0 2917
michael@0 2918 if (!analyzeNewLoopTypes(header, bodyStart, exitpc))
michael@0 2919 return ControlStatus_Error;
michael@0 2920 if (!pushLoop(initial, stopAt, header, osr,
michael@0 2921 loopHead, pc, bodyStart, bodyEnd, exitpc, updatepc))
michael@0 2922 {
michael@0 2923 return ControlStatus_Error;
michael@0 2924 }
michael@0 2925
michael@0 2926 CFGState &state = cfgStack_.back();
michael@0 2927 state.loop.condpc = (condpc != ifne) ? condpc : nullptr;
michael@0 2928 state.loop.updatepc = (updatepc != condpc) ? updatepc : nullptr;
michael@0 2929 if (state.loop.updatepc)
michael@0 2930 state.loop.updateEnd = condpc;
michael@0 2931
michael@0 2932 if (!setCurrentAndSpecializePhis(header))
michael@0 2933 return ControlStatus_Error;
michael@0 2934 if (!jsop_loophead(loopHead))
michael@0 2935 return ControlStatus_Error;
michael@0 2936
michael@0 2937 return ControlStatus_Jumped;
michael@0 2938 }
michael@0 2939
michael@0 2940 int
michael@0 2941 IonBuilder::CmpSuccessors(const void *a, const void *b)
michael@0 2942 {
michael@0 2943 const MBasicBlock *a0 = * (MBasicBlock * const *)a;
michael@0 2944 const MBasicBlock *b0 = * (MBasicBlock * const *)b;
michael@0 2945 if (a0->pc() == b0->pc())
michael@0 2946 return 0;
michael@0 2947
michael@0 2948 return (a0->pc() > b0->pc()) ? 1 : -1;
michael@0 2949 }
michael@0 2950
michael@0 2951 IonBuilder::ControlStatus
michael@0 2952 IonBuilder::tableSwitch(JSOp op, jssrcnote *sn)
michael@0 2953 {
michael@0 2954 // TableSwitch op contains the following data
michael@0 2955 // (length between data is JUMP_OFFSET_LEN)
michael@0 2956 //
michael@0 2957 // 0: Offset of default case
michael@0 2958 // 1: Lowest number in tableswitch
michael@0 2959 // 2: Highest number in tableswitch
michael@0 2960 // 3: Offset of case low
michael@0 2961 // 4: Offset of case low+1
michael@0 2962 // .: ...
michael@0 2963 // .: Offset of case high
michael@0 2964
michael@0 2965 JS_ASSERT(op == JSOP_TABLESWITCH);
michael@0 2966 JS_ASSERT(SN_TYPE(sn) == SRC_TABLESWITCH);
michael@0 2967
michael@0 2968 // Pop input.
michael@0 2969 MDefinition *ins = current->pop();
michael@0 2970
michael@0 2971 // Get the default and exit pc
michael@0 2972 jsbytecode *exitpc = pc + js_GetSrcNoteOffset(sn, 0);
michael@0 2973 jsbytecode *defaultpc = pc + GET_JUMP_OFFSET(pc);
michael@0 2974
michael@0 2975 JS_ASSERT(defaultpc > pc && defaultpc <= exitpc);
michael@0 2976
michael@0 2977 // Get the low and high from the tableswitch
michael@0 2978 jsbytecode *pc2 = pc;
michael@0 2979 pc2 += JUMP_OFFSET_LEN;
michael@0 2980 int low = GET_JUMP_OFFSET(pc2);
michael@0 2981 pc2 += JUMP_OFFSET_LEN;
michael@0 2982 int high = GET_JUMP_OFFSET(pc2);
michael@0 2983 pc2 += JUMP_OFFSET_LEN;
michael@0 2984
michael@0 2985 // Create MIR instruction
michael@0 2986 MTableSwitch *tableswitch = MTableSwitch::New(alloc(), ins, low, high);
michael@0 2987
michael@0 2988 // Create default case
michael@0 2989 MBasicBlock *defaultcase = newBlock(current, defaultpc);
michael@0 2990 if (!defaultcase)
michael@0 2991 return ControlStatus_Error;
michael@0 2992 tableswitch->addDefault(defaultcase);
michael@0 2993 tableswitch->addBlock(defaultcase);
michael@0 2994
michael@0 2995 // Create cases
michael@0 2996 jsbytecode *casepc = nullptr;
michael@0 2997 for (int i = 0; i < high-low+1; i++) {
michael@0 2998 casepc = pc + GET_JUMP_OFFSET(pc2);
michael@0 2999
michael@0 3000 JS_ASSERT(casepc >= pc && casepc <= exitpc);
michael@0 3001
michael@0 3002 MBasicBlock *caseblock = newBlock(current, casepc);
michael@0 3003 if (!caseblock)
michael@0 3004 return ControlStatus_Error;
michael@0 3005
michael@0 3006 // If the casepc equals the current pc, it is not a written case,
michael@0 3007 // but a filled gap. That way we can use a tableswitch instead of
michael@0 3008 // condswitch, even if not all numbers are consecutive.
michael@0 3009 // In that case this block goes to the default case
michael@0 3010 if (casepc == pc) {
michael@0 3011 caseblock->end(MGoto::New(alloc(), defaultcase));
michael@0 3012 if (!defaultcase->addPredecessor(alloc(), caseblock))
michael@0 3013 return ControlStatus_Error;
michael@0 3014 }
michael@0 3015
michael@0 3016 tableswitch->addCase(tableswitch->addSuccessor(caseblock));
michael@0 3017
michael@0 3018 // If this is an actual case (not filled gap),
michael@0 3019 // add this block to the list that still needs to get processed
michael@0 3020 if (casepc != pc)
michael@0 3021 tableswitch->addBlock(caseblock);
michael@0 3022
michael@0 3023 pc2 += JUMP_OFFSET_LEN;
michael@0 3024 }
michael@0 3025
michael@0 3026 // Move defaultcase to the end, to maintain RPO.
michael@0 3027 graph().moveBlockToEnd(defaultcase);
michael@0 3028
michael@0 3029 JS_ASSERT(tableswitch->numCases() == (uint32_t)(high - low + 1));
michael@0 3030 JS_ASSERT(tableswitch->numSuccessors() > 0);
michael@0 3031
michael@0 3032 // Sort the list of blocks that still needs to get processed by pc
michael@0 3033 qsort(tableswitch->blocks(), tableswitch->numBlocks(),
michael@0 3034 sizeof(MBasicBlock*), CmpSuccessors);
michael@0 3035
michael@0 3036 // Create info
michael@0 3037 ControlFlowInfo switchinfo(cfgStack_.length(), exitpc);
michael@0 3038 if (!switches_.append(switchinfo))
michael@0 3039 return ControlStatus_Error;
michael@0 3040
michael@0 3041 // Use a state to retrieve some information
michael@0 3042 CFGState state = CFGState::TableSwitch(exitpc, tableswitch);
michael@0 3043
michael@0 3044 // Save the MIR instruction as last instruction of this block.
michael@0 3045 current->end(tableswitch);
michael@0 3046
michael@0 3047 // If there is only one successor the block should stop at the end of the switch
michael@0 3048 // Else it should stop at the start of the next successor
michael@0 3049 if (tableswitch->numBlocks() > 1)
michael@0 3050 state.stopAt = tableswitch->getBlock(1)->pc();
michael@0 3051 if (!setCurrentAndSpecializePhis(tableswitch->getBlock(0)))
michael@0 3052 return ControlStatus_Error;
michael@0 3053
michael@0 3054 if (!cfgStack_.append(state))
michael@0 3055 return ControlStatus_Error;
michael@0 3056
michael@0 3057 pc = current->pc();
michael@0 3058 return ControlStatus_Jumped;
michael@0 3059 }
michael@0 3060
michael@0 3061 bool
michael@0 3062 IonBuilder::filterTypesAtTest(MTest *test)
michael@0 3063 {
michael@0 3064 JS_ASSERT(test->ifTrue() == current || test->ifFalse() == current);
michael@0 3065
michael@0 3066 bool trueBranch = test->ifTrue() == current;
michael@0 3067
michael@0 3068 MDefinition *subject = nullptr;
michael@0 3069 bool removeUndefined;
michael@0 3070 bool removeNull;
michael@0 3071
michael@0 3072 test->filtersUndefinedOrNull(trueBranch, &subject, &removeUndefined, &removeNull);
michael@0 3073
michael@0 3074 // The test filters no undefined or null.
michael@0 3075 if (!subject)
michael@0 3076 return true;
michael@0 3077
michael@0 3078 // There is no TypeSet that can get filtered.
michael@0 3079 if (!subject->resultTypeSet() || subject->resultTypeSet()->unknown())
michael@0 3080 return true;
michael@0 3081
michael@0 3082 // Only do this optimization if the typeset does contains null or undefined.
michael@0 3083 if ((!(removeUndefined && subject->resultTypeSet()->hasType(types::Type::UndefinedType())) &&
michael@0 3084 !(removeNull && subject->resultTypeSet()->hasType(types::Type::NullType()))))
michael@0 3085 {
michael@0 3086 return true;
michael@0 3087 }
michael@0 3088
michael@0 3089 // Find all values on the stack that correspond to the subject
michael@0 3090 // and replace it with a MIR with filtered TypeSet information.
michael@0 3091 // Create the replacement MIR lazily upon first occurence.
michael@0 3092 MDefinition *replace = nullptr;
michael@0 3093 for (uint32_t i = 0; i < current->stackDepth(); i++) {
michael@0 3094 if (current->getSlot(i) != subject)
michael@0 3095 continue;
michael@0 3096
michael@0 3097 // Create replacement MIR with filtered TypesSet.
michael@0 3098 if (!replace) {
michael@0 3099 types::TemporaryTypeSet *type =
michael@0 3100 subject->resultTypeSet()->filter(alloc_->lifoAlloc(), removeUndefined,
michael@0 3101 removeNull);
michael@0 3102 if (!type)
michael@0 3103 return false;
michael@0 3104
michael@0 3105 replace = ensureDefiniteTypeSet(subject, type);
michael@0 3106 // Make sure we don't hoist it above the MTest, we can use the
michael@0 3107 // 'dependency' of an MInstruction. This is normally used by
michael@0 3108 // Alias Analysis, but won't get overwritten, since this
michael@0 3109 // instruction doesn't have an AliasSet.
michael@0 3110 replace->setDependency(test);
michael@0 3111 }
michael@0 3112
michael@0 3113 current->setSlot(i, replace);
michael@0 3114 }
michael@0 3115
michael@0 3116 return true;
michael@0 3117 }
michael@0 3118
michael@0 3119 bool
michael@0 3120 IonBuilder::jsop_label()
michael@0 3121 {
michael@0 3122 JS_ASSERT(JSOp(*pc) == JSOP_LABEL);
michael@0 3123
michael@0 3124 jsbytecode *endpc = pc + GET_JUMP_OFFSET(pc);
michael@0 3125 JS_ASSERT(endpc > pc);
michael@0 3126
michael@0 3127 ControlFlowInfo label(cfgStack_.length(), endpc);
michael@0 3128 if (!labels_.append(label))
michael@0 3129 return false;
michael@0 3130
michael@0 3131 return cfgStack_.append(CFGState::Label(endpc));
michael@0 3132 }
michael@0 3133
michael@0 3134 bool
michael@0 3135 IonBuilder::jsop_condswitch()
michael@0 3136 {
michael@0 3137 // CondSwitch op looks as follows:
michael@0 3138 // condswitch [length +exit_pc; first case offset +next-case ]
michael@0 3139 // {
michael@0 3140 // {
michael@0 3141 // ... any code ...
michael@0 3142 // case (+jump) [pcdelta offset +next-case]
michael@0 3143 // }+
michael@0 3144 // default (+jump)
michael@0 3145 // ... jump targets ...
michael@0 3146 // }
michael@0 3147 //
michael@0 3148 // The default case is always emitted even if there is no default case in
michael@0 3149 // the source. The last case statement pcdelta source note might have a 0
michael@0 3150 // offset on the last case (not all the time).
michael@0 3151 //
michael@0 3152 // A conditional evaluate the condition of each case and compare it to the
michael@0 3153 // switch value with a strict equality. Cases conditions are iterated
michael@0 3154 // linearly until one is matching. If one case succeeds, the flow jumps into
michael@0 3155 // the corresponding body block. The body block might alias others and
michael@0 3156 // might continue in the next body block if the body is not terminated with
michael@0 3157 // a break.
michael@0 3158 //
michael@0 3159 // Algorithm:
michael@0 3160 // 1/ Loop over the case chain to reach the default target
michael@0 3161 // & Estimate the number of uniq bodies.
michael@0 3162 // 2/ Generate code for all cases (see processCondSwitchCase).
michael@0 3163 // 3/ Generate code for all bodies (see processCondSwitchBody).
michael@0 3164
michael@0 3165 JS_ASSERT(JSOp(*pc) == JSOP_CONDSWITCH);
michael@0 3166 jssrcnote *sn = info().getNote(gsn, pc);
michael@0 3167 JS_ASSERT(SN_TYPE(sn) == SRC_CONDSWITCH);
michael@0 3168
michael@0 3169 // Get the exit pc
michael@0 3170 jsbytecode *exitpc = pc + js_GetSrcNoteOffset(sn, 0);
michael@0 3171 jsbytecode *firstCase = pc + js_GetSrcNoteOffset(sn, 1);
michael@0 3172
michael@0 3173 // Iterate all cases in the conditional switch.
michael@0 3174 // - Stop at the default case. (always emitted after the last case)
michael@0 3175 // - Estimate the number of uniq bodies. This estimation might be off by 1
michael@0 3176 // if the default body alias a case body.
michael@0 3177 jsbytecode *curCase = firstCase;
michael@0 3178 jsbytecode *lastTarget = GetJumpOffset(curCase) + curCase;
michael@0 3179 size_t nbBodies = 2; // default target and the first body.
michael@0 3180
michael@0 3181 JS_ASSERT(pc < curCase && curCase <= exitpc);
michael@0 3182 while (JSOp(*curCase) == JSOP_CASE) {
michael@0 3183 // Fetch the next case.
michael@0 3184 jssrcnote *caseSn = info().getNote(gsn, curCase);
michael@0 3185 JS_ASSERT(caseSn && SN_TYPE(caseSn) == SRC_NEXTCASE);
michael@0 3186 ptrdiff_t off = js_GetSrcNoteOffset(caseSn, 0);
michael@0 3187 curCase = off ? curCase + off : GetNextPc(curCase);
michael@0 3188 JS_ASSERT(pc < curCase && curCase <= exitpc);
michael@0 3189
michael@0 3190 // Count non-aliased cases.
michael@0 3191 jsbytecode *curTarget = GetJumpOffset(curCase) + curCase;
michael@0 3192 if (lastTarget < curTarget)
michael@0 3193 nbBodies++;
michael@0 3194 lastTarget = curTarget;
michael@0 3195 }
michael@0 3196
michael@0 3197 // The current case now be the default case which jump to the body of the
michael@0 3198 // default case, which might be behind the last target.
michael@0 3199 JS_ASSERT(JSOp(*curCase) == JSOP_DEFAULT);
michael@0 3200 jsbytecode *defaultTarget = GetJumpOffset(curCase) + curCase;
michael@0 3201 JS_ASSERT(curCase < defaultTarget && defaultTarget <= exitpc);
michael@0 3202
michael@0 3203 // Allocate the current graph state.
michael@0 3204 CFGState state = CFGState::CondSwitch(this, exitpc, defaultTarget);
michael@0 3205 if (!state.condswitch.bodies || !state.condswitch.bodies->init(alloc(), nbBodies))
michael@0 3206 return ControlStatus_Error;
michael@0 3207
michael@0 3208 // We loop on case conditions with processCondSwitchCase.
michael@0 3209 JS_ASSERT(JSOp(*firstCase) == JSOP_CASE);
michael@0 3210 state.stopAt = firstCase;
michael@0 3211 state.state = CFGState::COND_SWITCH_CASE;
michael@0 3212
michael@0 3213 return cfgStack_.append(state);
michael@0 3214 }
michael@0 3215
michael@0 3216 IonBuilder::CFGState
michael@0 3217 IonBuilder::CFGState::CondSwitch(IonBuilder *builder, jsbytecode *exitpc, jsbytecode *defaultTarget)
michael@0 3218 {
michael@0 3219 CFGState state;
michael@0 3220 state.state = COND_SWITCH_CASE;
michael@0 3221 state.stopAt = nullptr;
michael@0 3222 state.condswitch.bodies = (FixedList<MBasicBlock *> *)builder->alloc_->allocate(
michael@0 3223 sizeof(FixedList<MBasicBlock *>));
michael@0 3224 state.condswitch.currentIdx = 0;
michael@0 3225 state.condswitch.defaultTarget = defaultTarget;
michael@0 3226 state.condswitch.defaultIdx = uint32_t(-1);
michael@0 3227 state.condswitch.exitpc = exitpc;
michael@0 3228 state.condswitch.breaks = nullptr;
michael@0 3229 return state;
michael@0 3230 }
michael@0 3231
michael@0 3232 IonBuilder::CFGState
michael@0 3233 IonBuilder::CFGState::Label(jsbytecode *exitpc)
michael@0 3234 {
michael@0 3235 CFGState state;
michael@0 3236 state.state = LABEL;
michael@0 3237 state.stopAt = exitpc;
michael@0 3238 state.label.breaks = nullptr;
michael@0 3239 return state;
michael@0 3240 }
michael@0 3241
michael@0 3242 IonBuilder::CFGState
michael@0 3243 IonBuilder::CFGState::Try(jsbytecode *exitpc, MBasicBlock *successor)
michael@0 3244 {
michael@0 3245 CFGState state;
michael@0 3246 state.state = TRY;
michael@0 3247 state.stopAt = exitpc;
michael@0 3248 state.try_.successor = successor;
michael@0 3249 return state;
michael@0 3250 }
michael@0 3251
michael@0 3252 IonBuilder::ControlStatus
michael@0 3253 IonBuilder::processCondSwitchCase(CFGState &state)
michael@0 3254 {
michael@0 3255 JS_ASSERT(state.state == CFGState::COND_SWITCH_CASE);
michael@0 3256 JS_ASSERT(!state.condswitch.breaks);
michael@0 3257 JS_ASSERT(current);
michael@0 3258 JS_ASSERT(JSOp(*pc) == JSOP_CASE);
michael@0 3259 FixedList<MBasicBlock *> &bodies = *state.condswitch.bodies;
michael@0 3260 jsbytecode *defaultTarget = state.condswitch.defaultTarget;
michael@0 3261 uint32_t &currentIdx = state.condswitch.currentIdx;
michael@0 3262 jsbytecode *lastTarget = currentIdx ? bodies[currentIdx - 1]->pc() : nullptr;
michael@0 3263
michael@0 3264 // Fetch the following case in which we will continue.
michael@0 3265 jssrcnote *sn = info().getNote(gsn, pc);
michael@0 3266 ptrdiff_t off = js_GetSrcNoteOffset(sn, 0);
michael@0 3267 jsbytecode *casePc = off ? pc + off : GetNextPc(pc);
michael@0 3268 bool caseIsDefault = JSOp(*casePc) == JSOP_DEFAULT;
michael@0 3269 JS_ASSERT(JSOp(*casePc) == JSOP_CASE || caseIsDefault);
michael@0 3270
michael@0 3271 // Allocate the block of the matching case.
michael@0 3272 bool bodyIsNew = false;
michael@0 3273 MBasicBlock *bodyBlock = nullptr;
michael@0 3274 jsbytecode *bodyTarget = pc + GetJumpOffset(pc);
michael@0 3275 if (lastTarget < bodyTarget) {
michael@0 3276 // If the default body is in the middle or aliasing the current target.
michael@0 3277 if (lastTarget < defaultTarget && defaultTarget <= bodyTarget) {
michael@0 3278 JS_ASSERT(state.condswitch.defaultIdx == uint32_t(-1));
michael@0 3279 state.condswitch.defaultIdx = currentIdx;
michael@0 3280 bodies[currentIdx] = nullptr;
michael@0 3281 // If the default body does not alias any and it would be allocated
michael@0 3282 // later and stored in the defaultIdx location.
michael@0 3283 if (defaultTarget < bodyTarget)
michael@0 3284 currentIdx++;
michael@0 3285 }
michael@0 3286
michael@0 3287 bodyIsNew = true;
michael@0 3288 // Pop switch and case operands.
michael@0 3289 bodyBlock = newBlockPopN(current, bodyTarget, 2);
michael@0 3290 bodies[currentIdx++] = bodyBlock;
michael@0 3291 } else {
michael@0 3292 // This body alias the previous one.
michael@0 3293 JS_ASSERT(lastTarget == bodyTarget);
michael@0 3294 JS_ASSERT(currentIdx > 0);
michael@0 3295 bodyBlock = bodies[currentIdx - 1];
michael@0 3296 }
michael@0 3297
michael@0 3298 if (!bodyBlock)
michael@0 3299 return ControlStatus_Error;
michael@0 3300
michael@0 3301 lastTarget = bodyTarget;
michael@0 3302
michael@0 3303 // Allocate the block of the non-matching case. This can either be a normal
michael@0 3304 // case or the default case.
michael@0 3305 bool caseIsNew = false;
michael@0 3306 MBasicBlock *caseBlock = nullptr;
michael@0 3307 if (!caseIsDefault) {
michael@0 3308 caseIsNew = true;
michael@0 3309 // Pop the case operand.
michael@0 3310 caseBlock = newBlockPopN(current, GetNextPc(pc), 1);
michael@0 3311 } else {
michael@0 3312 // The non-matching case is the default case, which jump directly to its
michael@0 3313 // body. Skip the creation of a default case block and directly create
michael@0 3314 // the default body if it does not alias any previous body.
michael@0 3315
michael@0 3316 if (state.condswitch.defaultIdx == uint32_t(-1)) {
michael@0 3317 // The default target is the last target.
michael@0 3318 JS_ASSERT(lastTarget < defaultTarget);
michael@0 3319 state.condswitch.defaultIdx = currentIdx++;
michael@0 3320 caseIsNew = true;
michael@0 3321 } else if (bodies[state.condswitch.defaultIdx] == nullptr) {
michael@0 3322 // The default target is in the middle and it does not alias any
michael@0 3323 // case target.
michael@0 3324 JS_ASSERT(defaultTarget < lastTarget);
michael@0 3325 caseIsNew = true;
michael@0 3326 } else {
michael@0 3327 // The default target is in the middle and it alias a case target.
michael@0 3328 JS_ASSERT(defaultTarget <= lastTarget);
michael@0 3329 caseBlock = bodies[state.condswitch.defaultIdx];
michael@0 3330 }
michael@0 3331
michael@0 3332 // Allocate and register the default body.
michael@0 3333 if (caseIsNew) {
michael@0 3334 // Pop the case & switch operands.
michael@0 3335 caseBlock = newBlockPopN(current, defaultTarget, 2);
michael@0 3336 bodies[state.condswitch.defaultIdx] = caseBlock;
michael@0 3337 }
michael@0 3338 }
michael@0 3339
michael@0 3340 if (!caseBlock)
michael@0 3341 return ControlStatus_Error;
michael@0 3342
michael@0 3343 // Terminate the last case condition block by emitting the code
michael@0 3344 // corresponding to JSOP_CASE bytecode.
michael@0 3345 if (bodyBlock != caseBlock) {
michael@0 3346 MDefinition *caseOperand = current->pop();
michael@0 3347 MDefinition *switchOperand = current->peek(-1);
michael@0 3348 MCompare *cmpResult = MCompare::New(alloc(), switchOperand, caseOperand, JSOP_STRICTEQ);
michael@0 3349 cmpResult->infer(inspector, pc);
michael@0 3350 JS_ASSERT(!cmpResult->isEffectful());
michael@0 3351 current->add(cmpResult);
michael@0 3352 current->end(MTest::New(alloc(), cmpResult, bodyBlock, caseBlock));
michael@0 3353
michael@0 3354 // Add last case as predecessor of the body if the body is aliasing
michael@0 3355 // the previous case body.
michael@0 3356 if (!bodyIsNew && !bodyBlock->addPredecessorPopN(alloc(), current, 1))
michael@0 3357 return ControlStatus_Error;
michael@0 3358
michael@0 3359 // Add last case as predecessor of the non-matching case if the
michael@0 3360 // non-matching case is an aliased default case. We need to pop the
michael@0 3361 // switch operand as we skip the default case block and use the default
michael@0 3362 // body block directly.
michael@0 3363 JS_ASSERT_IF(!caseIsNew, caseIsDefault);
michael@0 3364 if (!caseIsNew && !caseBlock->addPredecessorPopN(alloc(), current, 1))
michael@0 3365 return ControlStatus_Error;
michael@0 3366 } else {
michael@0 3367 // The default case alias the last case body.
michael@0 3368 JS_ASSERT(caseIsDefault);
michael@0 3369 current->pop(); // Case operand
michael@0 3370 current->pop(); // Switch operand
michael@0 3371 current->end(MGoto::New(alloc(), bodyBlock));
michael@0 3372 if (!bodyIsNew && !bodyBlock->addPredecessor(alloc(), current))
michael@0 3373 return ControlStatus_Error;
michael@0 3374 }
michael@0 3375
michael@0 3376 if (caseIsDefault) {
michael@0 3377 // The last case condition is finished. Loop in processCondSwitchBody,
michael@0 3378 // with potential stops in processSwitchBreak. Check that the bodies
michael@0 3379 // fixed list is over-estimate by at most 1, and shrink the size such as
michael@0 3380 // length can be used as an upper bound while iterating bodies.
michael@0 3381 JS_ASSERT(currentIdx == bodies.length() || currentIdx + 1 == bodies.length());
michael@0 3382 bodies.shrink(bodies.length() - currentIdx);
michael@0 3383
michael@0 3384 // Handle break statements in processSwitchBreak while processing
michael@0 3385 // bodies.
michael@0 3386 ControlFlowInfo breakInfo(cfgStack_.length() - 1, state.condswitch.exitpc);
michael@0 3387 if (!switches_.append(breakInfo))
michael@0 3388 return ControlStatus_Error;
michael@0 3389
michael@0 3390 // Jump into the first body.
michael@0 3391 currentIdx = 0;
michael@0 3392 setCurrent(nullptr);
michael@0 3393 state.state = CFGState::COND_SWITCH_BODY;
michael@0 3394 return processCondSwitchBody(state);
michael@0 3395 }
michael@0 3396
michael@0 3397 // Continue until the case condition.
michael@0 3398 if (!setCurrentAndSpecializePhis(caseBlock))
michael@0 3399 return ControlStatus_Error;
michael@0 3400 pc = current->pc();
michael@0 3401 state.stopAt = casePc;
michael@0 3402 return ControlStatus_Jumped;
michael@0 3403 }
michael@0 3404
michael@0 3405 IonBuilder::ControlStatus
michael@0 3406 IonBuilder::processCondSwitchBody(CFGState &state)
michael@0 3407 {
michael@0 3408 JS_ASSERT(state.state == CFGState::COND_SWITCH_BODY);
michael@0 3409 JS_ASSERT(pc <= state.condswitch.exitpc);
michael@0 3410 FixedList<MBasicBlock *> &bodies = *state.condswitch.bodies;
michael@0 3411 uint32_t &currentIdx = state.condswitch.currentIdx;
michael@0 3412
michael@0 3413 JS_ASSERT(currentIdx <= bodies.length());
michael@0 3414 if (currentIdx == bodies.length()) {
michael@0 3415 JS_ASSERT_IF(current, pc == state.condswitch.exitpc);
michael@0 3416 return processSwitchEnd(state.condswitch.breaks, state.condswitch.exitpc);
michael@0 3417 }
michael@0 3418
michael@0 3419 // Get the next body
michael@0 3420 MBasicBlock *nextBody = bodies[currentIdx++];
michael@0 3421 JS_ASSERT_IF(current, pc == nextBody->pc());
michael@0 3422
michael@0 3423 // Fix the reverse post-order iteration.
michael@0 3424 graph().moveBlockToEnd(nextBody);
michael@0 3425
michael@0 3426 // The last body continue into the new one.
michael@0 3427 if (current) {
michael@0 3428 current->end(MGoto::New(alloc(), nextBody));
michael@0 3429 if (!nextBody->addPredecessor(alloc(), current))
michael@0 3430 return ControlStatus_Error;
michael@0 3431 }
michael@0 3432
michael@0 3433 // Continue in the next body.
michael@0 3434 if (!setCurrentAndSpecializePhis(nextBody))
michael@0 3435 return ControlStatus_Error;
michael@0 3436 pc = current->pc();
michael@0 3437
michael@0 3438 if (currentIdx < bodies.length())
michael@0 3439 state.stopAt = bodies[currentIdx]->pc();
michael@0 3440 else
michael@0 3441 state.stopAt = state.condswitch.exitpc;
michael@0 3442 return ControlStatus_Jumped;
michael@0 3443 }
michael@0 3444
michael@0 3445 bool
michael@0 3446 IonBuilder::jsop_andor(JSOp op)
michael@0 3447 {
michael@0 3448 JS_ASSERT(op == JSOP_AND || op == JSOP_OR);
michael@0 3449
michael@0 3450 jsbytecode *rhsStart = pc + js_CodeSpec[op].length;
michael@0 3451 jsbytecode *joinStart = pc + GetJumpOffset(pc);
michael@0 3452 JS_ASSERT(joinStart > pc);
michael@0 3453
michael@0 3454 // We have to leave the LHS on the stack.
michael@0 3455 MDefinition *lhs = current->peek(-1);
michael@0 3456
michael@0 3457 MBasicBlock *evalRhs = newBlock(current, rhsStart);
michael@0 3458 MBasicBlock *join = newBlock(current, joinStart);
michael@0 3459 if (!evalRhs || !join)
michael@0 3460 return false;
michael@0 3461
michael@0 3462 MTest *test = (op == JSOP_AND)
michael@0 3463 ? MTest::New(alloc(), lhs, evalRhs, join)
michael@0 3464 : MTest::New(alloc(), lhs, join, evalRhs);
michael@0 3465 test->infer();
michael@0 3466 current->end(test);
michael@0 3467
michael@0 3468 if (!cfgStack_.append(CFGState::AndOr(joinStart, join)))
michael@0 3469 return false;
michael@0 3470
michael@0 3471 return setCurrentAndSpecializePhis(evalRhs);
michael@0 3472 }
michael@0 3473
michael@0 3474 bool
michael@0 3475 IonBuilder::jsop_dup2()
michael@0 3476 {
michael@0 3477 uint32_t lhsSlot = current->stackDepth() - 2;
michael@0 3478 uint32_t rhsSlot = current->stackDepth() - 1;
michael@0 3479 current->pushSlot(lhsSlot);
michael@0 3480 current->pushSlot(rhsSlot);
michael@0 3481 return true;
michael@0 3482 }
michael@0 3483
michael@0 3484 bool
michael@0 3485 IonBuilder::jsop_loophead(jsbytecode *pc)
michael@0 3486 {
michael@0 3487 assertValidLoopHeadOp(pc);
michael@0 3488
michael@0 3489 current->add(MInterruptCheck::New(alloc()));
michael@0 3490 insertRecompileCheck();
michael@0 3491
michael@0 3492 return true;
michael@0 3493 }
michael@0 3494
michael@0 3495 bool
michael@0 3496 IonBuilder::jsop_ifeq(JSOp op)
michael@0 3497 {
michael@0 3498 // IFEQ always has a forward offset.
michael@0 3499 jsbytecode *trueStart = pc + js_CodeSpec[op].length;
michael@0 3500 jsbytecode *falseStart = pc + GetJumpOffset(pc);
michael@0 3501 JS_ASSERT(falseStart > pc);
michael@0 3502
michael@0 3503 // We only handle cases that emit source notes.
michael@0 3504 jssrcnote *sn = info().getNote(gsn, pc);
michael@0 3505 if (!sn)
michael@0 3506 return abort("expected sourcenote");
michael@0 3507
michael@0 3508 MDefinition *ins = current->pop();
michael@0 3509
michael@0 3510 // Create true and false branches.
michael@0 3511 MBasicBlock *ifTrue = newBlock(current, trueStart);
michael@0 3512 MBasicBlock *ifFalse = newBlock(current, falseStart);
michael@0 3513 if (!ifTrue || !ifFalse)
michael@0 3514 return false;
michael@0 3515
michael@0 3516 MTest *test = MTest::New(alloc(), ins, ifTrue, ifFalse);
michael@0 3517 current->end(test);
michael@0 3518
michael@0 3519 // The bytecode for if/ternary gets emitted either like this:
michael@0 3520 //
michael@0 3521 // IFEQ X ; src note (IF_ELSE, COND) points to the GOTO
michael@0 3522 // ...
michael@0 3523 // GOTO Z
michael@0 3524 // X: ... ; else/else if
michael@0 3525 // ...
michael@0 3526 // Z: ; join
michael@0 3527 //
michael@0 3528 // Or like this:
michael@0 3529 //
michael@0 3530 // IFEQ X ; src note (IF) has no offset
michael@0 3531 // ...
michael@0 3532 // Z: ... ; join
michael@0 3533 //
michael@0 3534 // We want to parse the bytecode as if we were parsing the AST, so for the
michael@0 3535 // IF_ELSE/COND cases, we use the source note and follow the GOTO. For the
michael@0 3536 // IF case, the IFEQ offset is the join point.
michael@0 3537 switch (SN_TYPE(sn)) {
michael@0 3538 case SRC_IF:
michael@0 3539 if (!cfgStack_.append(CFGState::If(falseStart, test)))
michael@0 3540 return false;
michael@0 3541 break;
michael@0 3542
michael@0 3543 case SRC_IF_ELSE:
michael@0 3544 case SRC_COND:
michael@0 3545 {
michael@0 3546 // Infer the join point from the JSOP_GOTO[X] sitting here, then
michael@0 3547 // assert as we much we can that this is the right GOTO.
michael@0 3548 jsbytecode *trueEnd = pc + js_GetSrcNoteOffset(sn, 0);
michael@0 3549 JS_ASSERT(trueEnd > pc);
michael@0 3550 JS_ASSERT(trueEnd < falseStart);
michael@0 3551 JS_ASSERT(JSOp(*trueEnd) == JSOP_GOTO);
michael@0 3552 JS_ASSERT(!info().getNote(gsn, trueEnd));
michael@0 3553
michael@0 3554 jsbytecode *falseEnd = trueEnd + GetJumpOffset(trueEnd);
michael@0 3555 JS_ASSERT(falseEnd > trueEnd);
michael@0 3556 JS_ASSERT(falseEnd >= falseStart);
michael@0 3557
michael@0 3558 if (!cfgStack_.append(CFGState::IfElse(trueEnd, falseEnd, test)))
michael@0 3559 return false;
michael@0 3560 break;
michael@0 3561 }
michael@0 3562
michael@0 3563 default:
michael@0 3564 MOZ_ASSUME_UNREACHABLE("unexpected source note type");
michael@0 3565 }
michael@0 3566
michael@0 3567 // Switch to parsing the true branch. Note that no PC update is needed,
michael@0 3568 // it's the next instruction.
michael@0 3569 if (!setCurrentAndSpecializePhis(ifTrue))
michael@0 3570 return false;
michael@0 3571
michael@0 3572 // Filter the types in the true branch.
michael@0 3573 filterTypesAtTest(test);
michael@0 3574
michael@0 3575 return true;
michael@0 3576 }
michael@0 3577
michael@0 3578 bool
michael@0 3579 IonBuilder::jsop_try()
michael@0 3580 {
michael@0 3581 JS_ASSERT(JSOp(*pc) == JSOP_TRY);
michael@0 3582
michael@0 3583 if (!js_JitOptions.compileTryCatch)
michael@0 3584 return abort("Try-catch support disabled");
michael@0 3585
michael@0 3586 // Try-finally is not yet supported.
michael@0 3587 if (analysis().hasTryFinally())
michael@0 3588 return abort("Has try-finally");
michael@0 3589
michael@0 3590 // Try-catch within inline frames is not yet supported.
michael@0 3591 JS_ASSERT(!isInlineBuilder());
michael@0 3592
michael@0 3593 // Try-catch during the arguments usage analysis is not yet supported. Code
michael@0 3594 // accessing the arguments within the 'catch' block is not accounted for.
michael@0 3595 if (info().executionMode() == ArgumentsUsageAnalysis)
michael@0 3596 return abort("Try-catch during arguments usage analysis");
michael@0 3597
michael@0 3598 graph().setHasTryBlock();
michael@0 3599
michael@0 3600 jssrcnote *sn = info().getNote(gsn, pc);
michael@0 3601 JS_ASSERT(SN_TYPE(sn) == SRC_TRY);
michael@0 3602
michael@0 3603 // Get the pc of the last instruction in the try block. It's a JSOP_GOTO to
michael@0 3604 // jump over the catch block.
michael@0 3605 jsbytecode *endpc = pc + js_GetSrcNoteOffset(sn, 0);
michael@0 3606 JS_ASSERT(JSOp(*endpc) == JSOP_GOTO);
michael@0 3607 JS_ASSERT(GetJumpOffset(endpc) > 0);
michael@0 3608
michael@0 3609 jsbytecode *afterTry = endpc + GetJumpOffset(endpc);
michael@0 3610
michael@0 3611 // If controlflow in the try body is terminated (by a return or throw
michael@0 3612 // statement), the code after the try-statement may still be reachable
michael@0 3613 // via the catch block (which we don't compile) and OSR can enter it.
michael@0 3614 // For example:
michael@0 3615 //
michael@0 3616 // try {
michael@0 3617 // throw 3;
michael@0 3618 // } catch(e) { }
michael@0 3619 //
michael@0 3620 // for (var i=0; i<1000; i++) {}
michael@0 3621 //
michael@0 3622 // To handle this, we create two blocks: one for the try block and one
michael@0 3623 // for the code following the try-catch statement. Both blocks are
michael@0 3624 // connected to the graph with an MTest instruction that always jumps to
michael@0 3625 // the try block. This ensures the successor block always has a predecessor
michael@0 3626 // and later passes will optimize this MTest to a no-op.
michael@0 3627 //
michael@0 3628 // If the code after the try block is unreachable (control flow in both the
michael@0 3629 // try and catch blocks is terminated), only create the try block, to avoid
michael@0 3630 // parsing unreachable code.
michael@0 3631
michael@0 3632 MBasicBlock *tryBlock = newBlock(current, GetNextPc(pc));
michael@0 3633 if (!tryBlock)
michael@0 3634 return false;
michael@0 3635
michael@0 3636 MBasicBlock *successor;
michael@0 3637 if (analysis().maybeInfo(afterTry)) {
michael@0 3638 successor = newBlock(current, afterTry);
michael@0 3639 if (!successor)
michael@0 3640 return false;
michael@0 3641
michael@0 3642 // Add MTest(true, tryBlock, successorBlock).
michael@0 3643 MConstant *true_ = MConstant::New(alloc(), BooleanValue(true));
michael@0 3644 current->add(true_);
michael@0 3645 current->end(MTest::New(alloc(), true_, tryBlock, successor));
michael@0 3646 } else {
michael@0 3647 successor = nullptr;
michael@0 3648 current->end(MGoto::New(alloc(), tryBlock));
michael@0 3649 }
michael@0 3650
michael@0 3651 if (!cfgStack_.append(CFGState::Try(endpc, successor)))
michael@0 3652 return false;
michael@0 3653
michael@0 3654 // The baseline compiler should not attempt to enter the catch block
michael@0 3655 // via OSR.
michael@0 3656 JS_ASSERT(info().osrPc() < endpc || info().osrPc() >= afterTry);
michael@0 3657
michael@0 3658 // Start parsing the try block.
michael@0 3659 return setCurrentAndSpecializePhis(tryBlock);
michael@0 3660 }
michael@0 3661
michael@0 3662 IonBuilder::ControlStatus
michael@0 3663 IonBuilder::processReturn(JSOp op)
michael@0 3664 {
michael@0 3665 MDefinition *def;
michael@0 3666 switch (op) {
michael@0 3667 case JSOP_RETURN:
michael@0 3668 // Return the last instruction.
michael@0 3669 def = current->pop();
michael@0 3670 break;
michael@0 3671
michael@0 3672 case JSOP_RETRVAL:
michael@0 3673 // Return undefined eagerly if script doesn't use return value.
michael@0 3674 if (script()->noScriptRval()) {
michael@0 3675 MInstruction *ins = MConstant::New(alloc(), UndefinedValue());
michael@0 3676 current->add(ins);
michael@0 3677 def = ins;
michael@0 3678 break;
michael@0 3679 }
michael@0 3680
michael@0 3681 def = current->getSlot(info().returnValueSlot());
michael@0 3682 break;
michael@0 3683
michael@0 3684 default:
michael@0 3685 def = nullptr;
michael@0 3686 MOZ_ASSUME_UNREACHABLE("unknown return op");
michael@0 3687 }
michael@0 3688
michael@0 3689 if (instrumentedProfiling()) {
michael@0 3690 current->add(MProfilerStackOp::New(alloc(), script(), MProfilerStackOp::Exit,
michael@0 3691 inliningDepth_));
michael@0 3692 }
michael@0 3693 MReturn *ret = MReturn::New(alloc(), def);
michael@0 3694 current->end(ret);
michael@0 3695
michael@0 3696 if (!graph().addReturn(current))
michael@0 3697 return ControlStatus_Error;
michael@0 3698
michael@0 3699 // Make sure no one tries to use this block now.
michael@0 3700 setCurrent(nullptr);
michael@0 3701 return processControlEnd();
michael@0 3702 }
michael@0 3703
michael@0 3704 IonBuilder::ControlStatus
michael@0 3705 IonBuilder::processThrow()
michael@0 3706 {
michael@0 3707 MDefinition *def = current->pop();
michael@0 3708
michael@0 3709 // MThrow is not marked as effectful. This means when it throws and we
michael@0 3710 // are inside a try block, we could use an earlier resume point and this
michael@0 3711 // resume point may not be up-to-date, for example:
michael@0 3712 //
michael@0 3713 // (function() {
michael@0 3714 // try {
michael@0 3715 // var x = 1;
michael@0 3716 // foo(); // resume point
michael@0 3717 // x = 2;
michael@0 3718 // throw foo;
michael@0 3719 // } catch(e) {
michael@0 3720 // print(x);
michael@0 3721 // }
michael@0 3722 // ])();
michael@0 3723 //
michael@0 3724 // If we use the resume point after the call, this will print 1 instead
michael@0 3725 // of 2. To fix this, we create a resume point right before the MThrow.
michael@0 3726 //
michael@0 3727 // Note that this is not a problem for instructions other than MThrow
michael@0 3728 // because they are either marked as effectful (have their own resume
michael@0 3729 // point) or cannot throw a catchable exception.
michael@0 3730 //
michael@0 3731 // We always install this resume point (instead of only when the function
michael@0 3732 // has a try block) in order to handle the Debugger onExceptionUnwind
michael@0 3733 // hook. When we need to handle the hook, we bail out to baseline right
michael@0 3734 // after the throw and propagate the exception when debug mode is on. This
michael@0 3735 // is opposed to the normal behavior of resuming directly in the
michael@0 3736 // associated catch block.
michael@0 3737 MNop *nop = MNop::New(alloc());
michael@0 3738 current->add(nop);
michael@0 3739
michael@0 3740 if (!resumeAfter(nop))
michael@0 3741 return ControlStatus_Error;
michael@0 3742
michael@0 3743 MThrow *ins = MThrow::New(alloc(), def);
michael@0 3744 current->end(ins);
michael@0 3745
michael@0 3746 // Make sure no one tries to use this block now.
michael@0 3747 setCurrent(nullptr);
michael@0 3748 return processControlEnd();
michael@0 3749 }
michael@0 3750
michael@0 3751 bool
michael@0 3752 IonBuilder::pushConstant(const Value &v)
michael@0 3753 {
michael@0 3754 current->push(constant(v));
michael@0 3755 return true;
michael@0 3756 }
michael@0 3757
michael@0 3758 bool
michael@0 3759 IonBuilder::jsop_bitnot()
michael@0 3760 {
michael@0 3761 MDefinition *input = current->pop();
michael@0 3762 MBitNot *ins = MBitNot::New(alloc(), input);
michael@0 3763
michael@0 3764 current->add(ins);
michael@0 3765 ins->infer();
michael@0 3766
michael@0 3767 current->push(ins);
michael@0 3768 if (ins->isEffectful() && !resumeAfter(ins))
michael@0 3769 return false;
michael@0 3770 return true;
michael@0 3771 }
michael@0 3772 bool
michael@0 3773 IonBuilder::jsop_bitop(JSOp op)
michael@0 3774 {
michael@0 3775 // Pop inputs.
michael@0 3776 MDefinition *right = current->pop();
michael@0 3777 MDefinition *left = current->pop();
michael@0 3778
michael@0 3779 MBinaryBitwiseInstruction *ins;
michael@0 3780 switch (op) {
michael@0 3781 case JSOP_BITAND:
michael@0 3782 ins = MBitAnd::New(alloc(), left, right);
michael@0 3783 break;
michael@0 3784
michael@0 3785 case JSOP_BITOR:
michael@0 3786 ins = MBitOr::New(alloc(), left, right);
michael@0 3787 break;
michael@0 3788
michael@0 3789 case JSOP_BITXOR:
michael@0 3790 ins = MBitXor::New(alloc(), left, right);
michael@0 3791 break;
michael@0 3792
michael@0 3793 case JSOP_LSH:
michael@0 3794 ins = MLsh::New(alloc(), left, right);
michael@0 3795 break;
michael@0 3796
michael@0 3797 case JSOP_RSH:
michael@0 3798 ins = MRsh::New(alloc(), left, right);
michael@0 3799 break;
michael@0 3800
michael@0 3801 case JSOP_URSH:
michael@0 3802 ins = MUrsh::New(alloc(), left, right);
michael@0 3803 break;
michael@0 3804
michael@0 3805 default:
michael@0 3806 MOZ_ASSUME_UNREACHABLE("unexpected bitop");
michael@0 3807 }
michael@0 3808
michael@0 3809 current->add(ins);
michael@0 3810 ins->infer(inspector, pc);
michael@0 3811
michael@0 3812 current->push(ins);
michael@0 3813 if (ins->isEffectful() && !resumeAfter(ins))
michael@0 3814 return false;
michael@0 3815
michael@0 3816 return true;
michael@0 3817 }
michael@0 3818
michael@0 3819 bool
michael@0 3820 IonBuilder::jsop_binary(JSOp op, MDefinition *left, MDefinition *right)
michael@0 3821 {
michael@0 3822 // Do a string concatenation if adding two inputs that are int or string
michael@0 3823 // and at least one is a string.
michael@0 3824 if (op == JSOP_ADD &&
michael@0 3825 ((left->type() == MIRType_String &&
michael@0 3826 (right->type() == MIRType_String ||
michael@0 3827 right->type() == MIRType_Int32 ||
michael@0 3828 right->type() == MIRType_Double)) ||
michael@0 3829 (left->type() == MIRType_Int32 &&
michael@0 3830 right->type() == MIRType_String) ||
michael@0 3831 (left->type() == MIRType_Double &&
michael@0 3832 right->type() == MIRType_String)))
michael@0 3833 {
michael@0 3834 MConcat *ins = MConcat::New(alloc(), left, right);
michael@0 3835 current->add(ins);
michael@0 3836 current->push(ins);
michael@0 3837 return maybeInsertResume();
michael@0 3838 }
michael@0 3839
michael@0 3840 MBinaryArithInstruction *ins;
michael@0 3841 switch (op) {
michael@0 3842 case JSOP_ADD:
michael@0 3843 ins = MAdd::New(alloc(), left, right);
michael@0 3844 break;
michael@0 3845
michael@0 3846 case JSOP_SUB:
michael@0 3847 ins = MSub::New(alloc(), left, right);
michael@0 3848 break;
michael@0 3849
michael@0 3850 case JSOP_MUL:
michael@0 3851 ins = MMul::New(alloc(), left, right);
michael@0 3852 break;
michael@0 3853
michael@0 3854 case JSOP_DIV:
michael@0 3855 ins = MDiv::New(alloc(), left, right);
michael@0 3856 break;
michael@0 3857
michael@0 3858 case JSOP_MOD:
michael@0 3859 ins = MMod::New(alloc(), left, right);
michael@0 3860 break;
michael@0 3861
michael@0 3862 default:
michael@0 3863 MOZ_ASSUME_UNREACHABLE("unexpected binary opcode");
michael@0 3864 }
michael@0 3865
michael@0 3866 current->add(ins);
michael@0 3867 ins->infer(alloc(), inspector, pc);
michael@0 3868 current->push(ins);
michael@0 3869
michael@0 3870 if (ins->isEffectful())
michael@0 3871 return resumeAfter(ins);
michael@0 3872 return maybeInsertResume();
michael@0 3873 }
michael@0 3874
michael@0 3875 bool
michael@0 3876 IonBuilder::jsop_binary(JSOp op)
michael@0 3877 {
michael@0 3878 MDefinition *right = current->pop();
michael@0 3879 MDefinition *left = current->pop();
michael@0 3880
michael@0 3881 return jsop_binary(op, left, right);
michael@0 3882 }
michael@0 3883
michael@0 3884 bool
michael@0 3885 IonBuilder::jsop_pos()
michael@0 3886 {
michael@0 3887 if (IsNumberType(current->peek(-1)->type())) {
michael@0 3888 // Already int32 or double. Set the operand as implicitly used so it
michael@0 3889 // doesn't get optimized out if it has no other uses, as we could bail
michael@0 3890 // out.
michael@0 3891 current->peek(-1)->setImplicitlyUsedUnchecked();
michael@0 3892 return true;
michael@0 3893 }
michael@0 3894
michael@0 3895 // Compile +x as x * 1.
michael@0 3896 MDefinition *value = current->pop();
michael@0 3897 MConstant *one = MConstant::New(alloc(), Int32Value(1));
michael@0 3898 current->add(one);
michael@0 3899
michael@0 3900 return jsop_binary(JSOP_MUL, value, one);
michael@0 3901 }
michael@0 3902
michael@0 3903 bool
michael@0 3904 IonBuilder::jsop_neg()
michael@0 3905 {
michael@0 3906 // Since JSOP_NEG does not use a slot, we cannot push the MConstant.
michael@0 3907 // The MConstant is therefore passed to JSOP_MUL without slot traffic.
michael@0 3908 MConstant *negator = MConstant::New(alloc(), Int32Value(-1));
michael@0 3909 current->add(negator);
michael@0 3910
michael@0 3911 MDefinition *right = current->pop();
michael@0 3912
michael@0 3913 if (!jsop_binary(JSOP_MUL, negator, right))
michael@0 3914 return false;
michael@0 3915 return true;
michael@0 3916 }
michael@0 3917
michael@0 3918 class AutoAccumulateReturns
michael@0 3919 {
michael@0 3920 MIRGraph &graph_;
michael@0 3921 MIRGraphReturns *prev_;
michael@0 3922
michael@0 3923 public:
michael@0 3924 AutoAccumulateReturns(MIRGraph &graph, MIRGraphReturns &returns)
michael@0 3925 : graph_(graph)
michael@0 3926 {
michael@0 3927 prev_ = graph_.returnAccumulator();
michael@0 3928 graph_.setReturnAccumulator(&returns);
michael@0 3929 }
michael@0 3930 ~AutoAccumulateReturns() {
michael@0 3931 graph_.setReturnAccumulator(prev_);
michael@0 3932 }
michael@0 3933 };
michael@0 3934
michael@0 3935 bool
michael@0 3936 IonBuilder::inlineScriptedCall(CallInfo &callInfo, JSFunction *target)
michael@0 3937 {
michael@0 3938 JS_ASSERT(target->hasScript());
michael@0 3939 JS_ASSERT(IsIonInlinablePC(pc));
michael@0 3940
michael@0 3941 callInfo.setImplicitlyUsedUnchecked();
michael@0 3942
michael@0 3943 // Ensure sufficient space in the slots: needed for inlining from FUNAPPLY.
michael@0 3944 uint32_t depth = current->stackDepth() + callInfo.numFormals();
michael@0 3945 if (depth > current->nslots()) {
michael@0 3946 if (!current->increaseSlots(depth - current->nslots()))
michael@0 3947 return false;
michael@0 3948 }
michael@0 3949
michael@0 3950 // Create new |this| on the caller-side for inlined constructors.
michael@0 3951 if (callInfo.constructing()) {
michael@0 3952 MDefinition *thisDefn = createThis(target, callInfo.fun());
michael@0 3953 if (!thisDefn)
michael@0 3954 return false;
michael@0 3955 callInfo.setThis(thisDefn);
michael@0 3956 }
michael@0 3957
michael@0 3958 // Capture formals in the outer resume point.
michael@0 3959 callInfo.pushFormals(current);
michael@0 3960
michael@0 3961 MResumePoint *outerResumePoint =
michael@0 3962 MResumePoint::New(alloc(), current, pc, callerResumePoint_, MResumePoint::Outer);
michael@0 3963 if (!outerResumePoint)
michael@0 3964 return false;
michael@0 3965
michael@0 3966 // Pop formals again, except leave |fun| on stack for duration of call.
michael@0 3967 callInfo.popFormals(current);
michael@0 3968 current->push(callInfo.fun());
michael@0 3969
michael@0 3970 JSScript *calleeScript = target->nonLazyScript();
michael@0 3971 BaselineInspector inspector(calleeScript);
michael@0 3972
michael@0 3973 // Improve type information of |this| when not set.
michael@0 3974 if (callInfo.constructing() &&
michael@0 3975 !callInfo.thisArg()->resultTypeSet() &&
michael@0 3976 calleeScript->types)
michael@0 3977 {
michael@0 3978 types::StackTypeSet *types = types::TypeScript::ThisTypes(calleeScript);
michael@0 3979 if (!types->unknown()) {
michael@0 3980 types::TemporaryTypeSet *clonedTypes = types->clone(alloc_->lifoAlloc());
michael@0 3981 if (!clonedTypes)
michael@0 3982 return oom();
michael@0 3983 MTypeBarrier *barrier = MTypeBarrier::New(alloc(), callInfo.thisArg(), clonedTypes);
michael@0 3984 current->add(barrier);
michael@0 3985 callInfo.setThis(barrier);
michael@0 3986 }
michael@0 3987 }
michael@0 3988
michael@0 3989 // Start inlining.
michael@0 3990 LifoAlloc *lifoAlloc = alloc_->lifoAlloc();
michael@0 3991 CompileInfo *info = lifoAlloc->new_<CompileInfo>(calleeScript, target,
michael@0 3992 (jsbytecode *)nullptr, callInfo.constructing(),
michael@0 3993 this->info().executionMode(),
michael@0 3994 /* needsArgsObj = */ false);
michael@0 3995 if (!info)
michael@0 3996 return false;
michael@0 3997
michael@0 3998 MIRGraphReturns returns(alloc());
michael@0 3999 AutoAccumulateReturns aar(graph(), returns);
michael@0 4000
michael@0 4001 // Build the graph.
michael@0 4002 IonBuilder inlineBuilder(analysisContext, compartment, options, &alloc(), &graph(), constraints(),
michael@0 4003 &inspector, info, &optimizationInfo(), nullptr, inliningDepth_ + 1,
michael@0 4004 loopDepth_);
michael@0 4005 if (!inlineBuilder.buildInline(this, outerResumePoint, callInfo)) {
michael@0 4006 if (analysisContext && analysisContext->isExceptionPending()) {
michael@0 4007 IonSpew(IonSpew_Abort, "Inline builder raised exception.");
michael@0 4008 abortReason_ = AbortReason_Error;
michael@0 4009 return false;
michael@0 4010 }
michael@0 4011
michael@0 4012 // Inlining the callee failed. Mark the callee as uninlineable only if
michael@0 4013 // the inlining was aborted for a non-exception reason.
michael@0 4014 if (inlineBuilder.abortReason_ == AbortReason_Disable) {
michael@0 4015 calleeScript->setUninlineable();
michael@0 4016 abortReason_ = AbortReason_Inlining;
michael@0 4017 } else if (inlineBuilder.abortReason_ == AbortReason_Inlining) {
michael@0 4018 abortReason_ = AbortReason_Inlining;
michael@0 4019 }
michael@0 4020
michael@0 4021 return false;
michael@0 4022 }
michael@0 4023
michael@0 4024 // Create return block.
michael@0 4025 jsbytecode *postCall = GetNextPc(pc);
michael@0 4026 MBasicBlock *returnBlock = newBlock(nullptr, postCall);
michael@0 4027 if (!returnBlock)
michael@0 4028 return false;
michael@0 4029 returnBlock->setCallerResumePoint(callerResumePoint_);
michael@0 4030
michael@0 4031 // When profiling add InlineExit instruction to indicate end of inlined function.
michael@0 4032 if (instrumentedProfiling())
michael@0 4033 returnBlock->add(MProfilerStackOp::New(alloc(), nullptr, MProfilerStackOp::InlineExit));
michael@0 4034
michael@0 4035 // Inherit the slots from current and pop |fun|.
michael@0 4036 returnBlock->inheritSlots(current);
michael@0 4037 returnBlock->pop();
michael@0 4038
michael@0 4039 // Accumulate return values.
michael@0 4040 if (returns.empty()) {
michael@0 4041 // Inlining of functions that have no exit is not supported.
michael@0 4042 calleeScript->setUninlineable();
michael@0 4043 abortReason_ = AbortReason_Inlining;
michael@0 4044 return false;
michael@0 4045 }
michael@0 4046 MDefinition *retvalDefn = patchInlinedReturns(callInfo, returns, returnBlock);
michael@0 4047 if (!retvalDefn)
michael@0 4048 return false;
michael@0 4049 returnBlock->push(retvalDefn);
michael@0 4050
michael@0 4051 // Initialize entry slots now that the stack has been fixed up.
michael@0 4052 if (!returnBlock->initEntrySlots(alloc()))
michael@0 4053 return false;
michael@0 4054
michael@0 4055 return setCurrentAndSpecializePhis(returnBlock);
michael@0 4056 }
michael@0 4057
michael@0 4058 MDefinition *
michael@0 4059 IonBuilder::patchInlinedReturn(CallInfo &callInfo, MBasicBlock *exit, MBasicBlock *bottom)
michael@0 4060 {
michael@0 4061 // Replaces the MReturn in the exit block with an MGoto.
michael@0 4062 MDefinition *rdef = exit->lastIns()->toReturn()->input();
michael@0 4063 exit->discardLastIns();
michael@0 4064
michael@0 4065 // Constructors must be patched by the caller to always return an object.
michael@0 4066 if (callInfo.constructing()) {
michael@0 4067 if (rdef->type() == MIRType_Value) {
michael@0 4068 // Unknown return: dynamically detect objects.
michael@0 4069 MReturnFromCtor *filter = MReturnFromCtor::New(alloc(), rdef, callInfo.thisArg());
michael@0 4070 exit->add(filter);
michael@0 4071 rdef = filter;
michael@0 4072 } else if (rdef->type() != MIRType_Object) {
michael@0 4073 // Known non-object return: force |this|.
michael@0 4074 rdef = callInfo.thisArg();
michael@0 4075 }
michael@0 4076 } else if (callInfo.isSetter()) {
michael@0 4077 // Setters return their argument, not whatever value is returned.
michael@0 4078 rdef = callInfo.getArg(0);
michael@0 4079 }
michael@0 4080
michael@0 4081 MGoto *replacement = MGoto::New(alloc(), bottom);
michael@0 4082 exit->end(replacement);
michael@0 4083 if (!bottom->addPredecessorWithoutPhis(exit))
michael@0 4084 return nullptr;
michael@0 4085
michael@0 4086 return rdef;
michael@0 4087 }
michael@0 4088
michael@0 4089 MDefinition *
michael@0 4090 IonBuilder::patchInlinedReturns(CallInfo &callInfo, MIRGraphReturns &returns, MBasicBlock *bottom)
michael@0 4091 {
michael@0 4092 // Replaces MReturns with MGotos, returning the MDefinition
michael@0 4093 // representing the return value, or nullptr.
michael@0 4094 JS_ASSERT(returns.length() > 0);
michael@0 4095
michael@0 4096 if (returns.length() == 1)
michael@0 4097 return patchInlinedReturn(callInfo, returns[0], bottom);
michael@0 4098
michael@0 4099 // Accumulate multiple returns with a phi.
michael@0 4100 MPhi *phi = MPhi::New(alloc(), bottom->stackDepth());
michael@0 4101 if (!phi->reserveLength(returns.length()))
michael@0 4102 return nullptr;
michael@0 4103
michael@0 4104 for (size_t i = 0; i < returns.length(); i++) {
michael@0 4105 MDefinition *rdef = patchInlinedReturn(callInfo, returns[i], bottom);
michael@0 4106 if (!rdef)
michael@0 4107 return nullptr;
michael@0 4108 phi->addInput(rdef);
michael@0 4109 }
michael@0 4110
michael@0 4111 bottom->addPhi(phi);
michael@0 4112 return phi;
michael@0 4113 }
michael@0 4114
michael@0 4115 IonBuilder::InliningDecision
michael@0 4116 IonBuilder::makeInliningDecision(JSFunction *target, CallInfo &callInfo)
michael@0 4117 {
michael@0 4118 // When there is no target, inlining is impossible.
michael@0 4119 if (target == nullptr)
michael@0 4120 return InliningDecision_DontInline;
michael@0 4121
michael@0 4122 // Never inline during the arguments usage analysis.
michael@0 4123 if (info().executionMode() == ArgumentsUsageAnalysis)
michael@0 4124 return InliningDecision_DontInline;
michael@0 4125
michael@0 4126 // Native functions provide their own detection in inlineNativeCall().
michael@0 4127 if (target->isNative())
michael@0 4128 return InliningDecision_Inline;
michael@0 4129
michael@0 4130 // Determine whether inlining is possible at callee site
michael@0 4131 InliningDecision decision = canInlineTarget(target, callInfo);
michael@0 4132 if (decision != InliningDecision_Inline)
michael@0 4133 return decision;
michael@0 4134
michael@0 4135 // Heuristics!
michael@0 4136 JSScript *targetScript = target->nonLazyScript();
michael@0 4137
michael@0 4138 // Skip heuristics if we have an explicit hint to inline.
michael@0 4139 if (!targetScript->shouldInline()) {
michael@0 4140 // Cap the inlining depth.
michael@0 4141 if (js_JitOptions.isSmallFunction(targetScript)) {
michael@0 4142 if (inliningDepth_ >= optimizationInfo().smallFunctionMaxInlineDepth())
michael@0 4143 return DontInline(targetScript, "Vetoed: exceeding allowed inline depth");
michael@0 4144 } else {
michael@0 4145 if (inliningDepth_ >= optimizationInfo().maxInlineDepth())
michael@0 4146 return DontInline(targetScript, "Vetoed: exceeding allowed inline depth");
michael@0 4147
michael@0 4148 if (targetScript->hasLoops())
michael@0 4149 return DontInline(targetScript, "Vetoed: big function that contains a loop");
michael@0 4150
michael@0 4151 // Caller must not be excessively large.
michael@0 4152 if (script()->length() >= optimizationInfo().inliningMaxCallerBytecodeLength())
michael@0 4153 return DontInline(targetScript, "Vetoed: caller excessively large");
michael@0 4154 }
michael@0 4155
michael@0 4156 // Callee must not be excessively large.
michael@0 4157 // This heuristic also applies to the callsite as a whole.
michael@0 4158 if (targetScript->length() > optimizationInfo().inlineMaxTotalBytecodeLength())
michael@0 4159 return DontInline(targetScript, "Vetoed: callee excessively large");
michael@0 4160
michael@0 4161 // Callee must have been called a few times to have somewhat stable
michael@0 4162 // type information, except for definite properties analysis,
michael@0 4163 // as the caller has not run yet.
michael@0 4164 if (targetScript->getUseCount() < optimizationInfo().usesBeforeInlining() &&
michael@0 4165 info().executionMode() != DefinitePropertiesAnalysis)
michael@0 4166 {
michael@0 4167 return DontInline(targetScript, "Vetoed: callee is insufficiently hot.");
michael@0 4168 }
michael@0 4169 }
michael@0 4170
michael@0 4171 // TI calls ObjectStateChange to trigger invalidation of the caller.
michael@0 4172 types::TypeObjectKey *targetType = types::TypeObjectKey::get(target);
michael@0 4173 targetType->watchStateChangeForInlinedCall(constraints());
michael@0 4174
michael@0 4175 // We mustn't relazify functions that have been inlined, because there's
michael@0 4176 // no way to tell if it safe to do so.
michael@0 4177 script()->setHasBeenInlined();
michael@0 4178
michael@0 4179 return InliningDecision_Inline;
michael@0 4180 }
michael@0 4181
michael@0 4182 bool
michael@0 4183 IonBuilder::selectInliningTargets(ObjectVector &targets, CallInfo &callInfo, BoolVector &choiceSet,
michael@0 4184 uint32_t *numInlineable)
michael@0 4185 {
michael@0 4186 *numInlineable = 0;
michael@0 4187 uint32_t totalSize = 0;
michael@0 4188
michael@0 4189 // For each target, ask whether it may be inlined.
michael@0 4190 if (!choiceSet.reserve(targets.length()))
michael@0 4191 return false;
michael@0 4192
michael@0 4193 for (size_t i = 0; i < targets.length(); i++) {
michael@0 4194 JSFunction *target = &targets[i]->as<JSFunction>();
michael@0 4195 bool inlineable;
michael@0 4196 InliningDecision decision = makeInliningDecision(target, callInfo);
michael@0 4197 switch (decision) {
michael@0 4198 case InliningDecision_Error:
michael@0 4199 return false;
michael@0 4200 case InliningDecision_DontInline:
michael@0 4201 inlineable = false;
michael@0 4202 break;
michael@0 4203 case InliningDecision_Inline:
michael@0 4204 inlineable = true;
michael@0 4205 break;
michael@0 4206 default:
michael@0 4207 MOZ_ASSUME_UNREACHABLE("Unhandled InliningDecision value!");
michael@0 4208 }
michael@0 4209
michael@0 4210 // Enforce a maximum inlined bytecode limit at the callsite.
michael@0 4211 if (inlineable && target->isInterpreted()) {
michael@0 4212 totalSize += target->nonLazyScript()->length();
michael@0 4213 if (totalSize > optimizationInfo().inlineMaxTotalBytecodeLength())
michael@0 4214 inlineable = false;
michael@0 4215 }
michael@0 4216
michael@0 4217 choiceSet.append(inlineable);
michael@0 4218 if (inlineable)
michael@0 4219 *numInlineable += 1;
michael@0 4220 }
michael@0 4221
michael@0 4222 JS_ASSERT(choiceSet.length() == targets.length());
michael@0 4223 return true;
michael@0 4224 }
michael@0 4225
michael@0 4226 static bool
michael@0 4227 CanInlineGetPropertyCache(MGetPropertyCache *cache, MDefinition *thisDef)
michael@0 4228 {
michael@0 4229 JS_ASSERT(cache->object()->type() == MIRType_Object);
michael@0 4230 if (cache->object() != thisDef)
michael@0 4231 return false;
michael@0 4232
michael@0 4233 InlinePropertyTable *table = cache->propTable();
michael@0 4234 if (!table)
michael@0 4235 return false;
michael@0 4236 if (table->numEntries() == 0)
michael@0 4237 return false;
michael@0 4238 return true;
michael@0 4239 }
michael@0 4240
michael@0 4241 MGetPropertyCache *
michael@0 4242 IonBuilder::getInlineableGetPropertyCache(CallInfo &callInfo)
michael@0 4243 {
michael@0 4244 if (callInfo.constructing())
michael@0 4245 return nullptr;
michael@0 4246
michael@0 4247 MDefinition *thisDef = callInfo.thisArg();
michael@0 4248 if (thisDef->type() != MIRType_Object)
michael@0 4249 return nullptr;
michael@0 4250
michael@0 4251 MDefinition *funcDef = callInfo.fun();
michael@0 4252 if (funcDef->type() != MIRType_Object)
michael@0 4253 return nullptr;
michael@0 4254
michael@0 4255 // MGetPropertyCache with no uses may be optimized away.
michael@0 4256 if (funcDef->isGetPropertyCache()) {
michael@0 4257 MGetPropertyCache *cache = funcDef->toGetPropertyCache();
michael@0 4258 if (cache->hasUses())
michael@0 4259 return nullptr;
michael@0 4260 if (!CanInlineGetPropertyCache(cache, thisDef))
michael@0 4261 return nullptr;
michael@0 4262 return cache;
michael@0 4263 }
michael@0 4264
michael@0 4265 // Optimize away the following common pattern:
michael@0 4266 // MTypeBarrier[MIRType_Object] <- MGetPropertyCache
michael@0 4267 if (funcDef->isTypeBarrier()) {
michael@0 4268 MTypeBarrier *barrier = funcDef->toTypeBarrier();
michael@0 4269 if (barrier->hasUses())
michael@0 4270 return nullptr;
michael@0 4271 if (barrier->type() != MIRType_Object)
michael@0 4272 return nullptr;
michael@0 4273 if (!barrier->input()->isGetPropertyCache())
michael@0 4274 return nullptr;
michael@0 4275
michael@0 4276 MGetPropertyCache *cache = barrier->input()->toGetPropertyCache();
michael@0 4277 if (cache->hasUses() && !cache->hasOneUse())
michael@0 4278 return nullptr;
michael@0 4279 if (!CanInlineGetPropertyCache(cache, thisDef))
michael@0 4280 return nullptr;
michael@0 4281 return cache;
michael@0 4282 }
michael@0 4283
michael@0 4284 return nullptr;
michael@0 4285 }
michael@0 4286
michael@0 4287 IonBuilder::InliningStatus
michael@0 4288 IonBuilder::inlineSingleCall(CallInfo &callInfo, JSFunction *target)
michael@0 4289 {
michael@0 4290 // Expects formals to be popped and wrapped.
michael@0 4291 if (target->isNative())
michael@0 4292 return inlineNativeCall(callInfo, target);
michael@0 4293
michael@0 4294 if (!inlineScriptedCall(callInfo, target))
michael@0 4295 return InliningStatus_Error;
michael@0 4296 return InliningStatus_Inlined;
michael@0 4297 }
michael@0 4298
michael@0 4299 IonBuilder::InliningStatus
michael@0 4300 IonBuilder::inlineCallsite(ObjectVector &targets, ObjectVector &originals,
michael@0 4301 bool lambda, CallInfo &callInfo)
michael@0 4302 {
michael@0 4303 if (targets.empty())
michael@0 4304 return InliningStatus_NotInlined;
michael@0 4305
michael@0 4306 // Is the function provided by an MGetPropertyCache?
michael@0 4307 // If so, the cache may be movable to a fallback path, with a dispatch
michael@0 4308 // instruction guarding on the incoming TypeObject.
michael@0 4309 MGetPropertyCache *propCache = getInlineableGetPropertyCache(callInfo);
michael@0 4310
michael@0 4311 // Inline single targets -- unless they derive from a cache, in which case
michael@0 4312 // avoiding the cache and guarding is still faster.
michael@0 4313 if (!propCache && targets.length() == 1) {
michael@0 4314 JSFunction *target = &targets[0]->as<JSFunction>();
michael@0 4315 InliningDecision decision = makeInliningDecision(target, callInfo);
michael@0 4316 switch (decision) {
michael@0 4317 case InliningDecision_Error:
michael@0 4318 return InliningStatus_Error;
michael@0 4319 case InliningDecision_DontInline:
michael@0 4320 return InliningStatus_NotInlined;
michael@0 4321 case InliningDecision_Inline:
michael@0 4322 break;
michael@0 4323 }
michael@0 4324
michael@0 4325 // Inlining will elminate uses of the original callee, but it needs to
michael@0 4326 // be preserved in phis if we bail out. Mark the old callee definition as
michael@0 4327 // implicitly used to ensure this happens.
michael@0 4328 callInfo.fun()->setImplicitlyUsedUnchecked();
michael@0 4329
michael@0 4330 // If the callee is not going to be a lambda (which may vary across
michael@0 4331 // different invocations), then the callee definition can be replaced by a
michael@0 4332 // constant.
michael@0 4333 if (!lambda) {
michael@0 4334 // Replace the function with an MConstant.
michael@0 4335 MConstant *constFun = constant(ObjectValue(*target));
michael@0 4336 callInfo.setFun(constFun);
michael@0 4337 }
michael@0 4338
michael@0 4339 return inlineSingleCall(callInfo, target);
michael@0 4340 }
michael@0 4341
michael@0 4342 // Choose a subset of the targets for polymorphic inlining.
michael@0 4343 BoolVector choiceSet(alloc());
michael@0 4344 uint32_t numInlined;
michael@0 4345 if (!selectInliningTargets(targets, callInfo, choiceSet, &numInlined))
michael@0 4346 return InliningStatus_Error;
michael@0 4347 if (numInlined == 0)
michael@0 4348 return InliningStatus_NotInlined;
michael@0 4349
michael@0 4350 // Perform a polymorphic dispatch.
michael@0 4351 if (!inlineCalls(callInfo, targets, originals, choiceSet, propCache))
michael@0 4352 return InliningStatus_Error;
michael@0 4353
michael@0 4354 return InliningStatus_Inlined;
michael@0 4355 }
michael@0 4356
michael@0 4357 bool
michael@0 4358 IonBuilder::inlineGenericFallback(JSFunction *target, CallInfo &callInfo, MBasicBlock *dispatchBlock,
michael@0 4359 bool clonedAtCallsite)
michael@0 4360 {
michael@0 4361 // Generate a new block with all arguments on-stack.
michael@0 4362 MBasicBlock *fallbackBlock = newBlock(dispatchBlock, pc);
michael@0 4363 if (!fallbackBlock)
michael@0 4364 return false;
michael@0 4365
michael@0 4366 // Create a new CallInfo to track modified state within this block.
michael@0 4367 CallInfo fallbackInfo(alloc(), callInfo.constructing());
michael@0 4368 if (!fallbackInfo.init(callInfo))
michael@0 4369 return false;
michael@0 4370 fallbackInfo.popFormals(fallbackBlock);
michael@0 4371
michael@0 4372 // Generate an MCall, which uses stateful |current|.
michael@0 4373 if (!setCurrentAndSpecializePhis(fallbackBlock))
michael@0 4374 return false;
michael@0 4375 if (!makeCall(target, fallbackInfo, clonedAtCallsite))
michael@0 4376 return false;
michael@0 4377
michael@0 4378 // Pass return block to caller as |current|.
michael@0 4379 return true;
michael@0 4380 }
michael@0 4381
michael@0 4382 bool
michael@0 4383 IonBuilder::inlineTypeObjectFallback(CallInfo &callInfo, MBasicBlock *dispatchBlock,
michael@0 4384 MTypeObjectDispatch *dispatch, MGetPropertyCache *cache,
michael@0 4385 MBasicBlock **fallbackTarget)
michael@0 4386 {
michael@0 4387 // Getting here implies the following:
michael@0 4388 // 1. The call function is an MGetPropertyCache, or an MGetPropertyCache
michael@0 4389 // followed by an MTypeBarrier.
michael@0 4390 JS_ASSERT(callInfo.fun()->isGetPropertyCache() || callInfo.fun()->isTypeBarrier());
michael@0 4391
michael@0 4392 // 2. The MGetPropertyCache has inlineable cases by guarding on the TypeObject.
michael@0 4393 JS_ASSERT(dispatch->numCases() > 0);
michael@0 4394
michael@0 4395 // 3. The MGetPropertyCache (and, if applicable, MTypeBarrier) only
michael@0 4396 // have at most a single use.
michael@0 4397 JS_ASSERT_IF(callInfo.fun()->isGetPropertyCache(), !cache->hasUses());
michael@0 4398 JS_ASSERT_IF(callInfo.fun()->isTypeBarrier(), cache->hasOneUse());
michael@0 4399
michael@0 4400 // This means that no resume points yet capture the MGetPropertyCache,
michael@0 4401 // so everything from the MGetPropertyCache up until the call is movable.
michael@0 4402 // We now move the MGetPropertyCache and friends into a fallback path.
michael@0 4403
michael@0 4404 // Create a new CallInfo to track modified state within the fallback path.
michael@0 4405 CallInfo fallbackInfo(alloc(), callInfo.constructing());
michael@0 4406 if (!fallbackInfo.init(callInfo))
michael@0 4407 return false;
michael@0 4408
michael@0 4409 // Capture stack prior to the call operation. This captures the function.
michael@0 4410 MResumePoint *preCallResumePoint =
michael@0 4411 MResumePoint::New(alloc(), dispatchBlock, pc, callerResumePoint_, MResumePoint::ResumeAt);
michael@0 4412 if (!preCallResumePoint)
michael@0 4413 return false;
michael@0 4414
michael@0 4415 DebugOnly<size_t> preCallFuncIndex = preCallResumePoint->numOperands() - callInfo.numFormals();
michael@0 4416 JS_ASSERT(preCallResumePoint->getOperand(preCallFuncIndex) == fallbackInfo.fun());
michael@0 4417
michael@0 4418 // In the dispatch block, replace the function's slot entry with Undefined.
michael@0 4419 MConstant *undefined = MConstant::New(alloc(), UndefinedValue());
michael@0 4420 dispatchBlock->add(undefined);
michael@0 4421 dispatchBlock->rewriteAtDepth(-int(callInfo.numFormals()), undefined);
michael@0 4422
michael@0 4423 // Construct a block that does nothing but remove formals from the stack.
michael@0 4424 // This is effectively changing the entry resume point of the later fallback block.
michael@0 4425 MBasicBlock *prepBlock = newBlock(dispatchBlock, pc);
michael@0 4426 if (!prepBlock)
michael@0 4427 return false;
michael@0 4428 fallbackInfo.popFormals(prepBlock);
michael@0 4429
michael@0 4430 // Construct a block into which the MGetPropertyCache can be moved.
michael@0 4431 // This is subtle: the pc and resume point are those of the MGetPropertyCache!
michael@0 4432 InlinePropertyTable *propTable = cache->propTable();
michael@0 4433 JS_ASSERT(propTable->pc() != nullptr);
michael@0 4434 JS_ASSERT(propTable->priorResumePoint() != nullptr);
michael@0 4435 MBasicBlock *getPropBlock = newBlock(prepBlock, propTable->pc(), propTable->priorResumePoint());
michael@0 4436 if (!getPropBlock)
michael@0 4437 return false;
michael@0 4438
michael@0 4439 prepBlock->end(MGoto::New(alloc(), getPropBlock));
michael@0 4440
michael@0 4441 // Since the getPropBlock inherited the stack from right before the MGetPropertyCache,
michael@0 4442 // the target of the MGetPropertyCache is still on the stack.
michael@0 4443 DebugOnly<MDefinition *> checkObject = getPropBlock->pop();
michael@0 4444 JS_ASSERT(checkObject == cache->object());
michael@0 4445
michael@0 4446 // Move the MGetPropertyCache and friends into the getPropBlock.
michael@0 4447 if (fallbackInfo.fun()->isGetPropertyCache()) {
michael@0 4448 JS_ASSERT(fallbackInfo.fun()->toGetPropertyCache() == cache);
michael@0 4449 getPropBlock->addFromElsewhere(cache);
michael@0 4450 getPropBlock->push(cache);
michael@0 4451 } else {
michael@0 4452 MTypeBarrier *barrier = callInfo.fun()->toTypeBarrier();
michael@0 4453 JS_ASSERT(barrier->type() == MIRType_Object);
michael@0 4454 JS_ASSERT(barrier->input()->isGetPropertyCache());
michael@0 4455 JS_ASSERT(barrier->input()->toGetPropertyCache() == cache);
michael@0 4456
michael@0 4457 getPropBlock->addFromElsewhere(cache);
michael@0 4458 getPropBlock->addFromElsewhere(barrier);
michael@0 4459 getPropBlock->push(barrier);
michael@0 4460 }
michael@0 4461
michael@0 4462 // Construct an end block with the correct resume point.
michael@0 4463 MBasicBlock *preCallBlock = newBlock(getPropBlock, pc, preCallResumePoint);
michael@0 4464 if (!preCallBlock)
michael@0 4465 return false;
michael@0 4466 getPropBlock->end(MGoto::New(alloc(), preCallBlock));
michael@0 4467
michael@0 4468 // Now inline the MCallGeneric, using preCallBlock as the dispatch point.
michael@0 4469 if (!inlineGenericFallback(nullptr, fallbackInfo, preCallBlock, false))
michael@0 4470 return false;
michael@0 4471
michael@0 4472 // inlineGenericFallback() set the return block as |current|.
michael@0 4473 preCallBlock->end(MGoto::New(alloc(), current));
michael@0 4474 *fallbackTarget = prepBlock;
michael@0 4475 return true;
michael@0 4476 }
michael@0 4477
michael@0 4478 bool
michael@0 4479 IonBuilder::inlineCalls(CallInfo &callInfo, ObjectVector &targets,
michael@0 4480 ObjectVector &originals, BoolVector &choiceSet,
michael@0 4481 MGetPropertyCache *maybeCache)
michael@0 4482 {
michael@0 4483 // Only handle polymorphic inlining.
michael@0 4484 JS_ASSERT(IsIonInlinablePC(pc));
michael@0 4485 JS_ASSERT(choiceSet.length() == targets.length());
michael@0 4486 JS_ASSERT_IF(!maybeCache, targets.length() >= 2);
michael@0 4487 JS_ASSERT_IF(maybeCache, targets.length() >= 1);
michael@0 4488
michael@0 4489 MBasicBlock *dispatchBlock = current;
michael@0 4490 callInfo.setImplicitlyUsedUnchecked();
michael@0 4491 callInfo.pushFormals(dispatchBlock);
michael@0 4492
michael@0 4493 // Patch any InlinePropertyTable to only contain functions that are inlineable.
michael@0 4494 //
michael@0 4495 // Note that we trim using originals, as callsite clones are not user
michael@0 4496 // visible. We don't patch the entries inside the table with the cloned
michael@0 4497 // targets, as the entries should only be used for comparison.
michael@0 4498 //
michael@0 4499 // The InlinePropertyTable will also be patched at the end to exclude native functions
michael@0 4500 // that vetoed inlining.
michael@0 4501 if (maybeCache) {
michael@0 4502 InlinePropertyTable *propTable = maybeCache->propTable();
michael@0 4503 propTable->trimToTargets(originals);
michael@0 4504 if (propTable->numEntries() == 0)
michael@0 4505 maybeCache = nullptr;
michael@0 4506 }
michael@0 4507
michael@0 4508 // Generate a dispatch based on guard kind.
michael@0 4509 MDispatchInstruction *dispatch;
michael@0 4510 if (maybeCache) {
michael@0 4511 dispatch = MTypeObjectDispatch::New(alloc(), maybeCache->object(), maybeCache->propTable());
michael@0 4512 callInfo.fun()->setImplicitlyUsedUnchecked();
michael@0 4513 } else {
michael@0 4514 dispatch = MFunctionDispatch::New(alloc(), callInfo.fun());
michael@0 4515 }
michael@0 4516
michael@0 4517 // Generate a return block to host the rval-collecting MPhi.
michael@0 4518 jsbytecode *postCall = GetNextPc(pc);
michael@0 4519 MBasicBlock *returnBlock = newBlock(nullptr, postCall);
michael@0 4520 if (!returnBlock)
michael@0 4521 return false;
michael@0 4522 returnBlock->setCallerResumePoint(callerResumePoint_);
michael@0 4523
michael@0 4524 // Set up stack, used to manually create a post-call resume point.
michael@0 4525 returnBlock->inheritSlots(dispatchBlock);
michael@0 4526 callInfo.popFormals(returnBlock);
michael@0 4527
michael@0 4528 MPhi *retPhi = MPhi::New(alloc(), returnBlock->stackDepth());
michael@0 4529 returnBlock->addPhi(retPhi);
michael@0 4530 returnBlock->push(retPhi);
michael@0 4531
michael@0 4532 // Create a resume point from current stack state.
michael@0 4533 returnBlock->initEntrySlots(alloc());
michael@0 4534
michael@0 4535 // Reserve the capacity for the phi.
michael@0 4536 // Note: this is an upperbound. Unreachable targets and uninlineable natives are also counted.
michael@0 4537 uint32_t count = 1; // Possible fallback block.
michael@0 4538 for (uint32_t i = 0; i < targets.length(); i++) {
michael@0 4539 if (choiceSet[i])
michael@0 4540 count++;
michael@0 4541 }
michael@0 4542 retPhi->reserveLength(count);
michael@0 4543
michael@0 4544 // During inlining the 'this' value is assigned a type set which is
michael@0 4545 // specialized to the type objects which can generate that inlining target.
michael@0 4546 // After inlining the original type set is restored.
michael@0 4547 types::TemporaryTypeSet *cacheObjectTypeSet =
michael@0 4548 maybeCache ? maybeCache->object()->resultTypeSet() : nullptr;
michael@0 4549
michael@0 4550 // Inline each of the inlineable targets.
michael@0 4551 JS_ASSERT(targets.length() == originals.length());
michael@0 4552 for (uint32_t i = 0; i < targets.length(); i++) {
michael@0 4553 // When original != target, the target is a callsite clone. The
michael@0 4554 // original should be used for guards, and the target should be the
michael@0 4555 // actual function inlined.
michael@0 4556 JSFunction *original = &originals[i]->as<JSFunction>();
michael@0 4557 JSFunction *target = &targets[i]->as<JSFunction>();
michael@0 4558
michael@0 4559 // Target must be inlineable.
michael@0 4560 if (!choiceSet[i])
michael@0 4561 continue;
michael@0 4562
michael@0 4563 // Target must be reachable by the MDispatchInstruction.
michael@0 4564 if (maybeCache && !maybeCache->propTable()->hasFunction(original)) {
michael@0 4565 choiceSet[i] = false;
michael@0 4566 continue;
michael@0 4567 }
michael@0 4568
michael@0 4569 MBasicBlock *inlineBlock = newBlock(dispatchBlock, pc);
michael@0 4570 if (!inlineBlock)
michael@0 4571 return false;
michael@0 4572
michael@0 4573 // Create a function MConstant to use in the entry ResumePoint.
michael@0 4574 MConstant *funcDef = MConstant::New(alloc(), ObjectValue(*target), constraints());
michael@0 4575 funcDef->setImplicitlyUsedUnchecked();
michael@0 4576 dispatchBlock->add(funcDef);
michael@0 4577
michael@0 4578 // Use the MConstant in the inline resume point and on stack.
michael@0 4579 int funIndex = inlineBlock->entryResumePoint()->numOperands() - callInfo.numFormals();
michael@0 4580 inlineBlock->entryResumePoint()->replaceOperand(funIndex, funcDef);
michael@0 4581 inlineBlock->rewriteSlot(funIndex, funcDef);
michael@0 4582
michael@0 4583 // Create a new CallInfo to track modified state within the inline block.
michael@0 4584 CallInfo inlineInfo(alloc(), callInfo.constructing());
michael@0 4585 if (!inlineInfo.init(callInfo))
michael@0 4586 return false;
michael@0 4587 inlineInfo.popFormals(inlineBlock);
michael@0 4588 inlineInfo.setFun(funcDef);
michael@0 4589
michael@0 4590 if (maybeCache) {
michael@0 4591 JS_ASSERT(callInfo.thisArg() == maybeCache->object());
michael@0 4592 types::TemporaryTypeSet *targetThisTypes =
michael@0 4593 maybeCache->propTable()->buildTypeSetForFunction(original);
michael@0 4594 if (!targetThisTypes)
michael@0 4595 return false;
michael@0 4596 maybeCache->object()->setResultTypeSet(targetThisTypes);
michael@0 4597 }
michael@0 4598
michael@0 4599 // Inline the call into the inlineBlock.
michael@0 4600 if (!setCurrentAndSpecializePhis(inlineBlock))
michael@0 4601 return false;
michael@0 4602 InliningStatus status = inlineSingleCall(inlineInfo, target);
michael@0 4603 if (status == InliningStatus_Error)
michael@0 4604 return false;
michael@0 4605
michael@0 4606 // Natives may veto inlining.
michael@0 4607 if (status == InliningStatus_NotInlined) {
michael@0 4608 JS_ASSERT(target->isNative());
michael@0 4609 JS_ASSERT(current == inlineBlock);
michael@0 4610 inlineBlock->discardAllResumePoints();
michael@0 4611 graph().removeBlock(inlineBlock);
michael@0 4612 choiceSet[i] = false;
michael@0 4613 continue;
michael@0 4614 }
michael@0 4615
michael@0 4616 // inlineSingleCall() changed |current| to the inline return block.
michael@0 4617 MBasicBlock *inlineReturnBlock = current;
michael@0 4618 setCurrent(dispatchBlock);
michael@0 4619
michael@0 4620 // Connect the inline path to the returnBlock.
michael@0 4621 //
michael@0 4622 // Note that guarding is on the original function pointer even
michael@0 4623 // if there is a clone, since cloning occurs at the callsite.
michael@0 4624 dispatch->addCase(original, inlineBlock);
michael@0 4625
michael@0 4626 MDefinition *retVal = inlineReturnBlock->peek(-1);
michael@0 4627 retPhi->addInput(retVal);
michael@0 4628 inlineReturnBlock->end(MGoto::New(alloc(), returnBlock));
michael@0 4629 if (!returnBlock->addPredecessorWithoutPhis(inlineReturnBlock))
michael@0 4630 return false;
michael@0 4631 }
michael@0 4632
michael@0 4633 // Patch the InlinePropertyTable to not dispatch to vetoed paths.
michael@0 4634 //
michael@0 4635 // Note that like above, we trim using originals instead of targets.
michael@0 4636 if (maybeCache) {
michael@0 4637 maybeCache->object()->setResultTypeSet(cacheObjectTypeSet);
michael@0 4638
michael@0 4639 InlinePropertyTable *propTable = maybeCache->propTable();
michael@0 4640 propTable->trimTo(originals, choiceSet);
michael@0 4641
michael@0 4642 // If all paths were vetoed, output only a generic fallback path.
michael@0 4643 if (propTable->numEntries() == 0) {
michael@0 4644 JS_ASSERT(dispatch->numCases() == 0);
michael@0 4645 maybeCache = nullptr;
michael@0 4646 }
michael@0 4647 }
michael@0 4648
michael@0 4649 // If necessary, generate a fallback path.
michael@0 4650 // MTypeObjectDispatch always uses a fallback path.
michael@0 4651 if (maybeCache || dispatch->numCases() < targets.length()) {
michael@0 4652 // Generate fallback blocks, and set |current| to the fallback return block.
michael@0 4653 if (maybeCache) {
michael@0 4654 MBasicBlock *fallbackTarget;
michael@0 4655 if (!inlineTypeObjectFallback(callInfo, dispatchBlock, (MTypeObjectDispatch *)dispatch,
michael@0 4656 maybeCache, &fallbackTarget))
michael@0 4657 {
michael@0 4658 return false;
michael@0 4659 }
michael@0 4660 dispatch->addFallback(fallbackTarget);
michael@0 4661 } else {
michael@0 4662 JSFunction *remaining = nullptr;
michael@0 4663 bool clonedAtCallsite = false;
michael@0 4664
michael@0 4665 // If there is only 1 remaining case, we can annotate the fallback call
michael@0 4666 // with the target information.
michael@0 4667 if (dispatch->numCases() + 1 == originals.length()) {
michael@0 4668 for (uint32_t i = 0; i < originals.length(); i++) {
michael@0 4669 if (choiceSet[i])
michael@0 4670 continue;
michael@0 4671
michael@0 4672 remaining = &targets[i]->as<JSFunction>();
michael@0 4673 clonedAtCallsite = targets[i] != originals[i];
michael@0 4674 break;
michael@0 4675 }
michael@0 4676 }
michael@0 4677
michael@0 4678 if (!inlineGenericFallback(remaining, callInfo, dispatchBlock, clonedAtCallsite))
michael@0 4679 return false;
michael@0 4680 dispatch->addFallback(current);
michael@0 4681 }
michael@0 4682
michael@0 4683 MBasicBlock *fallbackReturnBlock = current;
michael@0 4684
michael@0 4685 // Connect fallback case to return infrastructure.
michael@0 4686 MDefinition *retVal = fallbackReturnBlock->peek(-1);
michael@0 4687 retPhi->addInput(retVal);
michael@0 4688 fallbackReturnBlock->end(MGoto::New(alloc(), returnBlock));
michael@0 4689 if (!returnBlock->addPredecessorWithoutPhis(fallbackReturnBlock))
michael@0 4690 return false;
michael@0 4691 }
michael@0 4692
michael@0 4693 // Finally add the dispatch instruction.
michael@0 4694 // This must be done at the end so that add() may be called above.
michael@0 4695 dispatchBlock->end(dispatch);
michael@0 4696
michael@0 4697 // Check the depth change: +1 for retval
michael@0 4698 JS_ASSERT(returnBlock->stackDepth() == dispatchBlock->stackDepth() - callInfo.numFormals() + 1);
michael@0 4699
michael@0 4700 graph().moveBlockToEnd(returnBlock);
michael@0 4701 return setCurrentAndSpecializePhis(returnBlock);
michael@0 4702 }
michael@0 4703
michael@0 4704 MInstruction *
michael@0 4705 IonBuilder::createDeclEnvObject(MDefinition *callee, MDefinition *scope)
michael@0 4706 {
michael@0 4707 // Get a template CallObject that we'll use to generate inline object
michael@0 4708 // creation.
michael@0 4709 DeclEnvObject *templateObj = inspector->templateDeclEnvObject();
michael@0 4710
michael@0 4711 // One field is added to the function to handle its name. This cannot be a
michael@0 4712 // dynamic slot because there is still plenty of room on the DeclEnv object.
michael@0 4713 JS_ASSERT(!templateObj->hasDynamicSlots());
michael@0 4714
michael@0 4715 // Allocate the actual object. It is important that no intervening
michael@0 4716 // instructions could potentially bailout, thus leaking the dynamic slots
michael@0 4717 // pointer.
michael@0 4718 MInstruction *declEnvObj = MNewDeclEnvObject::New(alloc(), templateObj);
michael@0 4719 current->add(declEnvObj);
michael@0 4720
michael@0 4721 // Initialize the object's reserved slots. No post barrier is needed here:
michael@0 4722 // the object will be allocated in the nursery if possible, and if the
michael@0 4723 // tenured heap is used instead, a minor collection will have been performed
michael@0 4724 // that moved scope/callee to the tenured heap.
michael@0 4725 current->add(MStoreFixedSlot::New(alloc(), declEnvObj, DeclEnvObject::enclosingScopeSlot(), scope));
michael@0 4726 current->add(MStoreFixedSlot::New(alloc(), declEnvObj, DeclEnvObject::lambdaSlot(), callee));
michael@0 4727
michael@0 4728 return declEnvObj;
michael@0 4729 }
michael@0 4730
michael@0 4731 MInstruction *
michael@0 4732 IonBuilder::createCallObject(MDefinition *callee, MDefinition *scope)
michael@0 4733 {
michael@0 4734 // Get a template CallObject that we'll use to generate inline object
michael@0 4735 // creation.
michael@0 4736 CallObject *templateObj = inspector->templateCallObject();
michael@0 4737
michael@0 4738 // If the CallObject needs dynamic slots, allocate those now.
michael@0 4739 MInstruction *slots;
michael@0 4740 if (templateObj->hasDynamicSlots()) {
michael@0 4741 size_t nslots = JSObject::dynamicSlotsCount(templateObj->numFixedSlots(),
michael@0 4742 templateObj->lastProperty()->slotSpan(templateObj->getClass()),
michael@0 4743 templateObj->getClass());
michael@0 4744 slots = MNewSlots::New(alloc(), nslots);
michael@0 4745 } else {
michael@0 4746 slots = MConstant::New(alloc(), NullValue());
michael@0 4747 }
michael@0 4748 current->add(slots);
michael@0 4749
michael@0 4750 // Allocate the actual object. It is important that no intervening
michael@0 4751 // instructions could potentially bailout, thus leaking the dynamic slots
michael@0 4752 // pointer. Run-once scripts need a singleton type, so always do a VM call
michael@0 4753 // in such cases.
michael@0 4754 MUnaryInstruction *callObj;
michael@0 4755 if (script()->treatAsRunOnce())
michael@0 4756 callObj = MNewRunOnceCallObject::New(alloc(), templateObj, slots);
michael@0 4757 else
michael@0 4758 callObj = MNewCallObject::New(alloc(), templateObj, slots);
michael@0 4759 current->add(callObj);
michael@0 4760
michael@0 4761 // Initialize the object's reserved slots. No post barrier is needed here,
michael@0 4762 // for the same reason as in createDeclEnvObject.
michael@0 4763 current->add(MStoreFixedSlot::New(alloc(), callObj, CallObject::enclosingScopeSlot(), scope));
michael@0 4764 current->add(MStoreFixedSlot::New(alloc(), callObj, CallObject::calleeSlot(), callee));
michael@0 4765
michael@0 4766 // Initialize argument slots.
michael@0 4767 for (AliasedFormalIter i(script()); i; i++) {
michael@0 4768 unsigned slot = i.scopeSlot();
michael@0 4769 unsigned formal = i.frameIndex();
michael@0 4770 MDefinition *param = current->getSlot(info().argSlotUnchecked(formal));
michael@0 4771 if (slot >= templateObj->numFixedSlots())
michael@0 4772 current->add(MStoreSlot::New(alloc(), slots, slot - templateObj->numFixedSlots(), param));
michael@0 4773 else
michael@0 4774 current->add(MStoreFixedSlot::New(alloc(), callObj, slot, param));
michael@0 4775 }
michael@0 4776
michael@0 4777 return callObj;
michael@0 4778 }
michael@0 4779
michael@0 4780 MDefinition *
michael@0 4781 IonBuilder::createThisScripted(MDefinition *callee)
michael@0 4782 {
michael@0 4783 // Get callee.prototype.
michael@0 4784 //
michael@0 4785 // This instruction MUST be idempotent: since it does not correspond to an
michael@0 4786 // explicit operation in the bytecode, we cannot use resumeAfter().
michael@0 4787 // Getters may not override |prototype| fetching, so this operation is indeed idempotent.
michael@0 4788 // - First try an idempotent property cache.
michael@0 4789 // - Upon failing idempotent property cache, we can't use a non-idempotent cache,
michael@0 4790 // therefore we fallback to CallGetProperty
michael@0 4791 //
michael@0 4792 // Note: both CallGetProperty and GetPropertyCache can trigger a GC,
michael@0 4793 // and thus invalidation.
michael@0 4794 MInstruction *getProto;
michael@0 4795 if (!invalidatedIdempotentCache()) {
michael@0 4796 MGetPropertyCache *getPropCache = MGetPropertyCache::New(alloc(), callee, names().prototype,
michael@0 4797 /* monitored = */ false);
michael@0 4798 getPropCache->setIdempotent();
michael@0 4799 getProto = getPropCache;
michael@0 4800 } else {
michael@0 4801 MCallGetProperty *callGetProp = MCallGetProperty::New(alloc(), callee, names().prototype,
michael@0 4802 /* callprop = */ false);
michael@0 4803 callGetProp->setIdempotent();
michael@0 4804 getProto = callGetProp;
michael@0 4805 }
michael@0 4806 current->add(getProto);
michael@0 4807
michael@0 4808 // Create this from prototype
michael@0 4809 MCreateThisWithProto *createThis = MCreateThisWithProto::New(alloc(), callee, getProto);
michael@0 4810 current->add(createThis);
michael@0 4811
michael@0 4812 return createThis;
michael@0 4813 }
michael@0 4814
michael@0 4815 JSObject *
michael@0 4816 IonBuilder::getSingletonPrototype(JSFunction *target)
michael@0 4817 {
michael@0 4818 if (!target || !target->hasSingletonType())
michael@0 4819 return nullptr;
michael@0 4820 types::TypeObjectKey *targetType = types::TypeObjectKey::get(target);
michael@0 4821 if (targetType->unknownProperties())
michael@0 4822 return nullptr;
michael@0 4823
michael@0 4824 jsid protoid = NameToId(names().prototype);
michael@0 4825 types::HeapTypeSetKey protoProperty = targetType->property(protoid);
michael@0 4826
michael@0 4827 return protoProperty.singleton(constraints());
michael@0 4828 }
michael@0 4829
michael@0 4830 MDefinition *
michael@0 4831 IonBuilder::createThisScriptedSingleton(JSFunction *target, MDefinition *callee)
michael@0 4832 {
michael@0 4833 // Get the singleton prototype (if exists)
michael@0 4834 JSObject *proto = getSingletonPrototype(target);
michael@0 4835 if (!proto)
michael@0 4836 return nullptr;
michael@0 4837
michael@0 4838 JSObject *templateObject = inspector->getTemplateObject(pc);
michael@0 4839 if (!templateObject || !templateObject->is<JSObject>())
michael@0 4840 return nullptr;
michael@0 4841 if (!templateObject->hasTenuredProto() || templateObject->getProto() != proto)
michael@0 4842 return nullptr;
michael@0 4843
michael@0 4844 if (!target->nonLazyScript()->types)
michael@0 4845 return nullptr;
michael@0 4846 if (!types::TypeScript::ThisTypes(target->nonLazyScript())->hasType(types::Type::ObjectType(templateObject)))
michael@0 4847 return nullptr;
michael@0 4848
michael@0 4849 // For template objects with NewScript info, the appropriate allocation
michael@0 4850 // kind to use may change due to dynamic property adds. In these cases
michael@0 4851 // calling Ion code will be invalidated, but any baseline template object
michael@0 4852 // may be stale. Update to the correct template object in this case.
michael@0 4853 types::TypeObject *templateType = templateObject->type();
michael@0 4854 if (templateType->hasNewScript()) {
michael@0 4855 templateObject = templateType->newScript()->templateObject;
michael@0 4856 JS_ASSERT(templateObject->type() == templateType);
michael@0 4857
michael@0 4858 // Trigger recompilation if the templateObject changes.
michael@0 4859 types::TypeObjectKey::get(templateType)->watchStateChangeForNewScriptTemplate(constraints());
michael@0 4860 }
michael@0 4861
michael@0 4862 // Generate an inline path to create a new |this| object with
michael@0 4863 // the given singleton prototype.
michael@0 4864 MCreateThisWithTemplate *createThis =
michael@0 4865 MCreateThisWithTemplate::New(alloc(), constraints(), templateObject,
michael@0 4866 templateObject->type()->initialHeap(constraints()));
michael@0 4867 current->add(createThis);
michael@0 4868
michael@0 4869 return createThis;
michael@0 4870 }
michael@0 4871
michael@0 4872 MDefinition *
michael@0 4873 IonBuilder::createThis(JSFunction *target, MDefinition *callee)
michael@0 4874 {
michael@0 4875 // Create this for unknown target
michael@0 4876 if (!target) {
michael@0 4877 MCreateThis *createThis = MCreateThis::New(alloc(), callee);
michael@0 4878 current->add(createThis);
michael@0 4879 return createThis;
michael@0 4880 }
michael@0 4881
michael@0 4882 // Native constructors build the new Object themselves.
michael@0 4883 if (target->isNative()) {
michael@0 4884 if (!target->isNativeConstructor())
michael@0 4885 return nullptr;
michael@0 4886
michael@0 4887 MConstant *magic = MConstant::New(alloc(), MagicValue(JS_IS_CONSTRUCTING));
michael@0 4888 current->add(magic);
michael@0 4889 return magic;
michael@0 4890 }
michael@0 4891
michael@0 4892 // Try baking in the prototype.
michael@0 4893 MDefinition *createThis = createThisScriptedSingleton(target, callee);
michael@0 4894 if (createThis)
michael@0 4895 return createThis;
michael@0 4896
michael@0 4897 return createThisScripted(callee);
michael@0 4898 }
michael@0 4899
michael@0 4900 bool
michael@0 4901 IonBuilder::jsop_funcall(uint32_t argc)
michael@0 4902 {
michael@0 4903 // Stack for JSOP_FUNCALL:
michael@0 4904 // 1: arg0
michael@0 4905 // ...
michael@0 4906 // argc: argN
michael@0 4907 // argc+1: JSFunction*, the 'f' in |f.call()|, in |this| position.
michael@0 4908 // argc+2: The native 'call' function.
michael@0 4909
michael@0 4910 int calleeDepth = -((int)argc + 2);
michael@0 4911 int funcDepth = -((int)argc + 1);
michael@0 4912
michael@0 4913 // If |Function.prototype.call| may be overridden, don't optimize callsite.
michael@0 4914 types::TemporaryTypeSet *calleeTypes = current->peek(calleeDepth)->resultTypeSet();
michael@0 4915 JSFunction *native = getSingleCallTarget(calleeTypes);
michael@0 4916 if (!native || !native->isNative() || native->native() != &js_fun_call) {
michael@0 4917 CallInfo callInfo(alloc(), false);
michael@0 4918 if (!callInfo.init(current, argc))
michael@0 4919 return false;
michael@0 4920 return makeCall(native, callInfo, false);
michael@0 4921 }
michael@0 4922 current->peek(calleeDepth)->setImplicitlyUsedUnchecked();
michael@0 4923
michael@0 4924 // Extract call target.
michael@0 4925 types::TemporaryTypeSet *funTypes = current->peek(funcDepth)->resultTypeSet();
michael@0 4926 JSFunction *target = getSingleCallTarget(funTypes);
michael@0 4927
michael@0 4928 // Shimmy the slots down to remove the native 'call' function.
michael@0 4929 current->shimmySlots(funcDepth - 1);
michael@0 4930
michael@0 4931 bool zeroArguments = (argc == 0);
michael@0 4932
michael@0 4933 // If no |this| argument was provided, explicitly pass Undefined.
michael@0 4934 // Pushing is safe here, since one stack slot has been removed.
michael@0 4935 if (zeroArguments) {
michael@0 4936 pushConstant(UndefinedValue());
michael@0 4937 } else {
michael@0 4938 // |this| becomes implicit in the call.
michael@0 4939 argc -= 1;
michael@0 4940 }
michael@0 4941
michael@0 4942 CallInfo callInfo(alloc(), false);
michael@0 4943 if (!callInfo.init(current, argc))
michael@0 4944 return false;
michael@0 4945
michael@0 4946 // Try to inline the call.
michael@0 4947 if (!zeroArguments) {
michael@0 4948 InliningDecision decision = makeInliningDecision(target, callInfo);
michael@0 4949 switch (decision) {
michael@0 4950 case InliningDecision_Error:
michael@0 4951 return false;
michael@0 4952 case InliningDecision_DontInline:
michael@0 4953 break;
michael@0 4954 case InliningDecision_Inline:
michael@0 4955 if (target->isInterpreted())
michael@0 4956 return inlineScriptedCall(callInfo, target);
michael@0 4957 break;
michael@0 4958 }
michael@0 4959 }
michael@0 4960
michael@0 4961 // Call without inlining.
michael@0 4962 return makeCall(target, callInfo, false);
michael@0 4963 }
michael@0 4964
michael@0 4965 bool
michael@0 4966 IonBuilder::jsop_funapply(uint32_t argc)
michael@0 4967 {
michael@0 4968 int calleeDepth = -((int)argc + 2);
michael@0 4969
michael@0 4970 types::TemporaryTypeSet *calleeTypes = current->peek(calleeDepth)->resultTypeSet();
michael@0 4971 JSFunction *native = getSingleCallTarget(calleeTypes);
michael@0 4972 if (argc != 2) {
michael@0 4973 CallInfo callInfo(alloc(), false);
michael@0 4974 if (!callInfo.init(current, argc))
michael@0 4975 return false;
michael@0 4976 return makeCall(native, callInfo, false);
michael@0 4977 }
michael@0 4978
michael@0 4979 // Disable compilation if the second argument to |apply| cannot be guaranteed
michael@0 4980 // to be either definitely |arguments| or definitely not |arguments|.
michael@0 4981 MDefinition *argument = current->peek(-1);
michael@0 4982 if (script()->argumentsHasVarBinding() &&
michael@0 4983 argument->mightBeType(MIRType_MagicOptimizedArguments) &&
michael@0 4984 argument->type() != MIRType_MagicOptimizedArguments)
michael@0 4985 {
michael@0 4986 return abort("fun.apply with MaybeArguments");
michael@0 4987 }
michael@0 4988
michael@0 4989 // Fallback to regular call if arg 2 is not definitely |arguments|.
michael@0 4990 if (argument->type() != MIRType_MagicOptimizedArguments) {
michael@0 4991 CallInfo callInfo(alloc(), false);
michael@0 4992 if (!callInfo.init(current, argc))
michael@0 4993 return false;
michael@0 4994 return makeCall(native, callInfo, false);
michael@0 4995 }
michael@0 4996
michael@0 4997 if (!native ||
michael@0 4998 !native->isNative() ||
michael@0 4999 native->native() != js_fun_apply)
michael@0 5000 {
michael@0 5001 return abort("fun.apply speculation failed");
michael@0 5002 }
michael@0 5003
michael@0 5004 current->peek(calleeDepth)->setImplicitlyUsedUnchecked();
michael@0 5005
michael@0 5006 // Use funapply that definitely uses |arguments|
michael@0 5007 return jsop_funapplyarguments(argc);
michael@0 5008 }
michael@0 5009
michael@0 5010 bool
michael@0 5011 IonBuilder::jsop_funapplyarguments(uint32_t argc)
michael@0 5012 {
michael@0 5013 // Stack for JSOP_FUNAPPLY:
michael@0 5014 // 1: Vp
michael@0 5015 // 2: This
michael@0 5016 // argc+1: JSFunction*, the 'f' in |f.call()|, in |this| position.
michael@0 5017 // argc+2: The native 'apply' function.
michael@0 5018
michael@0 5019 int funcDepth = -((int)argc + 1);
michael@0 5020
michael@0 5021 // Extract call target.
michael@0 5022 types::TemporaryTypeSet *funTypes = current->peek(funcDepth)->resultTypeSet();
michael@0 5023 JSFunction *target = getSingleCallTarget(funTypes);
michael@0 5024
michael@0 5025 // When this script isn't inlined, use MApplyArgs,
michael@0 5026 // to copy the arguments from the stack and call the function
michael@0 5027 if (inliningDepth_ == 0 && info().executionMode() != DefinitePropertiesAnalysis) {
michael@0 5028 // The array argument corresponds to the arguments object. As the JIT
michael@0 5029 // is implicitly reading the arguments object in the next instruction,
michael@0 5030 // we need to prevent the deletion of the arguments object from resume
michael@0 5031 // points, so that Baseline will behave correctly after a bailout.
michael@0 5032 MDefinition *vp = current->pop();
michael@0 5033 vp->setImplicitlyUsedUnchecked();
michael@0 5034
michael@0 5035 MDefinition *argThis = current->pop();
michael@0 5036
michael@0 5037 // Unwrap the (JSFunction *) parameter.
michael@0 5038 MDefinition *argFunc = current->pop();
michael@0 5039
michael@0 5040 // Pop apply function.
michael@0 5041 current->pop();
michael@0 5042
michael@0 5043 MArgumentsLength *numArgs = MArgumentsLength::New(alloc());
michael@0 5044 current->add(numArgs);
michael@0 5045
michael@0 5046 MApplyArgs *apply = MApplyArgs::New(alloc(), target, argFunc, numArgs, argThis);
michael@0 5047 current->add(apply);
michael@0 5048 current->push(apply);
michael@0 5049 if (!resumeAfter(apply))
michael@0 5050 return false;
michael@0 5051
michael@0 5052 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 5053 return pushTypeBarrier(apply, types, true);
michael@0 5054 }
michael@0 5055
michael@0 5056 // When inlining we have the arguments the function gets called with
michael@0 5057 // and can optimize even more, by just calling the functions with the args.
michael@0 5058 // We also try this path when doing the definite properties analysis, as we
michael@0 5059 // can inline the apply() target and don't care about the actual arguments
michael@0 5060 // that were passed in.
michael@0 5061
michael@0 5062 CallInfo callInfo(alloc(), false);
michael@0 5063
michael@0 5064 // Vp
michael@0 5065 MDefinition *vp = current->pop();
michael@0 5066 vp->setImplicitlyUsedUnchecked();
michael@0 5067
michael@0 5068 // Arguments
michael@0 5069 MDefinitionVector args(alloc());
michael@0 5070 if (inliningDepth_) {
michael@0 5071 if (!args.appendAll(inlineCallInfo_->argv()))
michael@0 5072 return false;
michael@0 5073 }
michael@0 5074 callInfo.setArgs(&args);
michael@0 5075
michael@0 5076 // This
michael@0 5077 MDefinition *argThis = current->pop();
michael@0 5078 callInfo.setThis(argThis);
michael@0 5079
michael@0 5080 // Pop function parameter.
michael@0 5081 MDefinition *argFunc = current->pop();
michael@0 5082 callInfo.setFun(argFunc);
michael@0 5083
michael@0 5084 // Pop apply function.
michael@0 5085 current->pop();
michael@0 5086
michael@0 5087 // Try to inline the call.
michael@0 5088 InliningDecision decision = makeInliningDecision(target, callInfo);
michael@0 5089 switch (decision) {
michael@0 5090 case InliningDecision_Error:
michael@0 5091 return false;
michael@0 5092 case InliningDecision_DontInline:
michael@0 5093 break;
michael@0 5094 case InliningDecision_Inline:
michael@0 5095 if (target->isInterpreted())
michael@0 5096 return inlineScriptedCall(callInfo, target);
michael@0 5097 }
michael@0 5098
michael@0 5099 return makeCall(target, callInfo, false);
michael@0 5100 }
michael@0 5101
michael@0 5102 bool
michael@0 5103 IonBuilder::jsop_call(uint32_t argc, bool constructing)
michael@0 5104 {
michael@0 5105 // If this call has never executed, try to seed the observed type set
michael@0 5106 // based on how the call result is used.
michael@0 5107 types::TemporaryTypeSet *observed = bytecodeTypes(pc);
michael@0 5108 if (observed->empty()) {
michael@0 5109 if (BytecodeFlowsToBitop(pc)) {
michael@0 5110 observed->addType(types::Type::Int32Type(), alloc_->lifoAlloc());
michael@0 5111 } else if (*GetNextPc(pc) == JSOP_POS) {
michael@0 5112 // Note: this is lame, overspecialized on the code patterns used
michael@0 5113 // by asm.js and should be replaced by a more general mechanism.
michael@0 5114 // See bug 870847.
michael@0 5115 observed->addType(types::Type::DoubleType(), alloc_->lifoAlloc());
michael@0 5116 }
michael@0 5117 }
michael@0 5118
michael@0 5119 int calleeDepth = -((int)argc + 2);
michael@0 5120
michael@0 5121 // Acquire known call target if existent.
michael@0 5122 ObjectVector originals(alloc());
michael@0 5123 bool gotLambda = false;
michael@0 5124 types::TemporaryTypeSet *calleeTypes = current->peek(calleeDepth)->resultTypeSet();
michael@0 5125 if (calleeTypes) {
michael@0 5126 if (!getPolyCallTargets(calleeTypes, constructing, originals, 4, &gotLambda))
michael@0 5127 return false;
michael@0 5128 }
michael@0 5129 JS_ASSERT_IF(gotLambda, originals.length() <= 1);
michael@0 5130
michael@0 5131 // If any call targets need to be cloned, look for existing clones to use.
michael@0 5132 // Keep track of the originals as we need to case on them for poly inline.
michael@0 5133 bool hasClones = false;
michael@0 5134 ObjectVector targets(alloc());
michael@0 5135 for (uint32_t i = 0; i < originals.length(); i++) {
michael@0 5136 JSFunction *fun = &originals[i]->as<JSFunction>();
michael@0 5137 if (fun->hasScript() && fun->nonLazyScript()->shouldCloneAtCallsite()) {
michael@0 5138 if (JSFunction *clone = ExistingCloneFunctionAtCallsite(compartment->callsiteClones(), fun, script(), pc)) {
michael@0 5139 fun = clone;
michael@0 5140 hasClones = true;
michael@0 5141 }
michael@0 5142 }
michael@0 5143 if (!targets.append(fun))
michael@0 5144 return false;
michael@0 5145 }
michael@0 5146
michael@0 5147 CallInfo callInfo(alloc(), constructing);
michael@0 5148 if (!callInfo.init(current, argc))
michael@0 5149 return false;
michael@0 5150
michael@0 5151 // Try inlining
michael@0 5152 InliningStatus status = inlineCallsite(targets, originals, gotLambda, callInfo);
michael@0 5153 if (status == InliningStatus_Inlined)
michael@0 5154 return true;
michael@0 5155 if (status == InliningStatus_Error)
michael@0 5156 return false;
michael@0 5157
michael@0 5158 // No inline, just make the call.
michael@0 5159 JSFunction *target = nullptr;
michael@0 5160 if (targets.length() == 1)
michael@0 5161 target = &targets[0]->as<JSFunction>();
michael@0 5162
michael@0 5163 return makeCall(target, callInfo, hasClones);
michael@0 5164 }
michael@0 5165
michael@0 5166 MDefinition *
michael@0 5167 IonBuilder::makeCallsiteClone(JSFunction *target, MDefinition *fun)
michael@0 5168 {
michael@0 5169 // Bake in the clone eagerly if we have a known target. We have arrived here
michael@0 5170 // because TI told us that the known target is a should-clone-at-callsite
michael@0 5171 // function, which means that target already is the clone. Make sure to ensure
michael@0 5172 // that the old definition remains in resume points.
michael@0 5173 if (target) {
michael@0 5174 fun->setImplicitlyUsedUnchecked();
michael@0 5175 return constant(ObjectValue(*target));
michael@0 5176 }
michael@0 5177
michael@0 5178 // Add a callsite clone IC if we have multiple targets. Note that we
michael@0 5179 // should have checked already that at least some targets are marked as
michael@0 5180 // should-clone-at-callsite.
michael@0 5181 MCallsiteCloneCache *clone = MCallsiteCloneCache::New(alloc(), fun, pc);
michael@0 5182 current->add(clone);
michael@0 5183 return clone;
michael@0 5184 }
michael@0 5185
michael@0 5186 bool
michael@0 5187 IonBuilder::testShouldDOMCall(types::TypeSet *inTypes,
michael@0 5188 JSFunction *func, JSJitInfo::OpType opType)
michael@0 5189 {
michael@0 5190 if (!func->isNative() || !func->jitInfo())
michael@0 5191 return false;
michael@0 5192
michael@0 5193 // If all the DOM objects flowing through are legal with this
michael@0 5194 // property, we can bake in a call to the bottom half of the DOM
michael@0 5195 // accessor
michael@0 5196 DOMInstanceClassMatchesProto instanceChecker =
michael@0 5197 compartment->runtime()->DOMcallbacks()->instanceClassMatchesProto;
michael@0 5198
michael@0 5199 const JSJitInfo *jinfo = func->jitInfo();
michael@0 5200 if (jinfo->type() != opType)
michael@0 5201 return false;
michael@0 5202
michael@0 5203 for (unsigned i = 0; i < inTypes->getObjectCount(); i++) {
michael@0 5204 types::TypeObjectKey *curType = inTypes->getObject(i);
michael@0 5205 if (!curType)
michael@0 5206 continue;
michael@0 5207
michael@0 5208 if (!curType->hasTenuredProto())
michael@0 5209 return false;
michael@0 5210 JSObject *proto = curType->proto().toObjectOrNull();
michael@0 5211 if (!instanceChecker(proto, jinfo->protoID, jinfo->depth))
michael@0 5212 return false;
michael@0 5213 }
michael@0 5214
michael@0 5215 return true;
michael@0 5216 }
michael@0 5217
michael@0 5218 static bool
michael@0 5219 ArgumentTypesMatch(MDefinition *def, types::StackTypeSet *calleeTypes)
michael@0 5220 {
michael@0 5221 if (def->resultTypeSet()) {
michael@0 5222 JS_ASSERT(def->type() == MIRType_Value || def->mightBeType(def->type()));
michael@0 5223 return def->resultTypeSet()->isSubset(calleeTypes);
michael@0 5224 }
michael@0 5225
michael@0 5226 if (def->type() == MIRType_Value)
michael@0 5227 return false;
michael@0 5228
michael@0 5229 if (def->type() == MIRType_Object)
michael@0 5230 return calleeTypes->unknownObject();
michael@0 5231
michael@0 5232 return calleeTypes->mightBeMIRType(def->type());
michael@0 5233 }
michael@0 5234
michael@0 5235 bool
michael@0 5236 IonBuilder::testNeedsArgumentCheck(JSFunction *target, CallInfo &callInfo)
michael@0 5237 {
michael@0 5238 // If we have a known target, check if the caller arg types are a subset of callee.
michael@0 5239 // Since typeset accumulates and can't decrease that means we don't need to check
michael@0 5240 // the arguments anymore.
michael@0 5241 if (!target->hasScript())
michael@0 5242 return true;
michael@0 5243
michael@0 5244 JSScript *targetScript = target->nonLazyScript();
michael@0 5245
michael@0 5246 if (!targetScript->types)
michael@0 5247 return true;
michael@0 5248
michael@0 5249 if (!ArgumentTypesMatch(callInfo.thisArg(), types::TypeScript::ThisTypes(targetScript)))
michael@0 5250 return true;
michael@0 5251 uint32_t expected_args = Min<uint32_t>(callInfo.argc(), target->nargs());
michael@0 5252 for (size_t i = 0; i < expected_args; i++) {
michael@0 5253 if (!ArgumentTypesMatch(callInfo.getArg(i), types::TypeScript::ArgTypes(targetScript, i)))
michael@0 5254 return true;
michael@0 5255 }
michael@0 5256 for (size_t i = callInfo.argc(); i < target->nargs(); i++) {
michael@0 5257 if (!types::TypeScript::ArgTypes(targetScript, i)->mightBeMIRType(MIRType_Undefined))
michael@0 5258 return true;
michael@0 5259 }
michael@0 5260
michael@0 5261 return false;
michael@0 5262 }
michael@0 5263
michael@0 5264 MCall *
michael@0 5265 IonBuilder::makeCallHelper(JSFunction *target, CallInfo &callInfo, bool cloneAtCallsite)
michael@0 5266 {
michael@0 5267 // This function may be called with mutated stack.
michael@0 5268 // Querying TI for popped types is invalid.
michael@0 5269
michael@0 5270 uint32_t targetArgs = callInfo.argc();
michael@0 5271
michael@0 5272 // Collect number of missing arguments provided that the target is
michael@0 5273 // scripted. Native functions are passed an explicit 'argc' parameter.
michael@0 5274 if (target && !target->isNative())
michael@0 5275 targetArgs = Max<uint32_t>(target->nargs(), callInfo.argc());
michael@0 5276
michael@0 5277 bool isDOMCall = false;
michael@0 5278 if (target && !callInfo.constructing()) {
michael@0 5279 // We know we have a single call target. Check whether the "this" types
michael@0 5280 // are DOM types and our function a DOM function, and if so flag the
michael@0 5281 // MCall accordingly.
michael@0 5282 types::TemporaryTypeSet *thisTypes = callInfo.thisArg()->resultTypeSet();
michael@0 5283 if (thisTypes &&
michael@0 5284 thisTypes->getKnownMIRType() == MIRType_Object &&
michael@0 5285 thisTypes->isDOMClass() &&
michael@0 5286 testShouldDOMCall(thisTypes, target, JSJitInfo::Method))
michael@0 5287 {
michael@0 5288 isDOMCall = true;
michael@0 5289 }
michael@0 5290 }
michael@0 5291
michael@0 5292 MCall *call = MCall::New(alloc(), target, targetArgs + 1, callInfo.argc(),
michael@0 5293 callInfo.constructing(), isDOMCall);
michael@0 5294 if (!call)
michael@0 5295 return nullptr;
michael@0 5296
michael@0 5297 // Explicitly pad any missing arguments with |undefined|.
michael@0 5298 // This permits skipping the argumentsRectifier.
michael@0 5299 for (int i = targetArgs; i > (int)callInfo.argc(); i--) {
michael@0 5300 JS_ASSERT_IF(target, !target->isNative());
michael@0 5301 MConstant *undef = constant(UndefinedValue());
michael@0 5302 call->addArg(i, undef);
michael@0 5303 }
michael@0 5304
michael@0 5305 // Add explicit arguments.
michael@0 5306 // Skip addArg(0) because it is reserved for this
michael@0 5307 for (int32_t i = callInfo.argc() - 1; i >= 0; i--)
michael@0 5308 call->addArg(i + 1, callInfo.getArg(i));
michael@0 5309
michael@0 5310 // Now that we've told it about all the args, compute whether it's movable
michael@0 5311 call->computeMovable();
michael@0 5312
michael@0 5313 // Inline the constructor on the caller-side.
michael@0 5314 if (callInfo.constructing()) {
michael@0 5315 MDefinition *create = createThis(target, callInfo.fun());
michael@0 5316 if (!create) {
michael@0 5317 abort("Failure inlining constructor for call.");
michael@0 5318 return nullptr;
michael@0 5319 }
michael@0 5320
michael@0 5321 callInfo.thisArg()->setImplicitlyUsedUnchecked();
michael@0 5322 callInfo.setThis(create);
michael@0 5323 }
michael@0 5324
michael@0 5325 // Pass |this| and function.
michael@0 5326 MDefinition *thisArg = callInfo.thisArg();
michael@0 5327 call->addArg(0, thisArg);
michael@0 5328
michael@0 5329 // Add a callsite clone IC for multiple targets which all should be
michael@0 5330 // callsite cloned, or bake in the clone for a single target.
michael@0 5331 if (cloneAtCallsite) {
michael@0 5332 MDefinition *fun = makeCallsiteClone(target, callInfo.fun());
michael@0 5333 callInfo.setFun(fun);
michael@0 5334 }
michael@0 5335
michael@0 5336 if (target && !testNeedsArgumentCheck(target, callInfo))
michael@0 5337 call->disableArgCheck();
michael@0 5338
michael@0 5339 call->initFunction(callInfo.fun());
michael@0 5340
michael@0 5341 current->add(call);
michael@0 5342 return call;
michael@0 5343 }
michael@0 5344
michael@0 5345 static bool
michael@0 5346 DOMCallNeedsBarrier(const JSJitInfo* jitinfo, types::TemporaryTypeSet *types)
michael@0 5347 {
michael@0 5348 // If the return type of our DOM native is in "types" already, we don't
michael@0 5349 // actually need a barrier.
michael@0 5350 if (jitinfo->returnType() == JSVAL_TYPE_UNKNOWN)
michael@0 5351 return true;
michael@0 5352
michael@0 5353 // JSVAL_TYPE_OBJECT doesn't tell us much; we still have to barrier on the
michael@0 5354 // actual type of the object.
michael@0 5355 if (jitinfo->returnType() == JSVAL_TYPE_OBJECT)
michael@0 5356 return true;
michael@0 5357
michael@0 5358 // No need for a barrier if we're already expecting the type we'll produce.
michael@0 5359 return MIRTypeFromValueType(jitinfo->returnType()) != types->getKnownMIRType();
michael@0 5360 }
michael@0 5361
michael@0 5362 bool
michael@0 5363 IonBuilder::makeCall(JSFunction *target, CallInfo &callInfo, bool cloneAtCallsite)
michael@0 5364 {
michael@0 5365 // Constructor calls to non-constructors should throw. We don't want to use
michael@0 5366 // CallKnown in this case.
michael@0 5367 JS_ASSERT_IF(callInfo.constructing() && target,
michael@0 5368 target->isInterpretedConstructor() || target->isNativeConstructor());
michael@0 5369
michael@0 5370 MCall *call = makeCallHelper(target, callInfo, cloneAtCallsite);
michael@0 5371 if (!call)
michael@0 5372 return false;
michael@0 5373
michael@0 5374 current->push(call);
michael@0 5375 if (call->isEffectful() && !resumeAfter(call))
michael@0 5376 return false;
michael@0 5377
michael@0 5378 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 5379
michael@0 5380 if (call->isCallDOMNative())
michael@0 5381 return pushDOMTypeBarrier(call, types, call->getSingleTarget());
michael@0 5382
michael@0 5383 return pushTypeBarrier(call, types, true);
michael@0 5384 }
michael@0 5385
michael@0 5386 bool
michael@0 5387 IonBuilder::jsop_eval(uint32_t argc)
michael@0 5388 {
michael@0 5389 int calleeDepth = -((int)argc + 2);
michael@0 5390 types::TemporaryTypeSet *calleeTypes = current->peek(calleeDepth)->resultTypeSet();
michael@0 5391
michael@0 5392 // Emit a normal call if the eval has never executed. This keeps us from
michael@0 5393 // disabling compilation for the script when testing with --ion-eager.
michael@0 5394 if (calleeTypes && calleeTypes->empty())
michael@0 5395 return jsop_call(argc, /* constructing = */ false);
michael@0 5396
michael@0 5397 JSFunction *singleton = getSingleCallTarget(calleeTypes);
michael@0 5398 if (!singleton)
michael@0 5399 return abort("No singleton callee for eval()");
michael@0 5400
michael@0 5401 if (script()->global().valueIsEval(ObjectValue(*singleton))) {
michael@0 5402 if (argc != 1)
michael@0 5403 return abort("Direct eval with more than one argument");
michael@0 5404
michael@0 5405 if (!info().funMaybeLazy())
michael@0 5406 return abort("Direct eval in global code");
michael@0 5407
michael@0 5408 // The 'this' value for the outer and eval scripts must be the
michael@0 5409 // same. This is not guaranteed if a primitive string/number/etc.
michael@0 5410 // is passed through to the eval invoke as the primitive may be
michael@0 5411 // boxed into different objects if accessed via 'this'.
michael@0 5412 MIRType type = thisTypes->getKnownMIRType();
michael@0 5413 if (type != MIRType_Object && type != MIRType_Null && type != MIRType_Undefined)
michael@0 5414 return abort("Direct eval from script with maybe-primitive 'this'");
michael@0 5415
michael@0 5416 CallInfo callInfo(alloc(), /* constructing = */ false);
michael@0 5417 if (!callInfo.init(current, argc))
michael@0 5418 return false;
michael@0 5419 callInfo.setImplicitlyUsedUnchecked();
michael@0 5420
michael@0 5421 callInfo.fun()->setImplicitlyUsedUnchecked();
michael@0 5422
michael@0 5423 MDefinition *scopeChain = current->scopeChain();
michael@0 5424 MDefinition *string = callInfo.getArg(0);
michael@0 5425
michael@0 5426 // Direct eval acts as identity on non-string types according to
michael@0 5427 // ES5 15.1.2.1 step 1.
michael@0 5428 if (!string->mightBeType(MIRType_String)) {
michael@0 5429 current->push(string);
michael@0 5430 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 5431 return pushTypeBarrier(string, types, true);
michael@0 5432 }
michael@0 5433
michael@0 5434 current->pushSlot(info().thisSlot());
michael@0 5435 MDefinition *thisValue = current->pop();
michael@0 5436
michael@0 5437 // Try to pattern match 'eval(v + "()")'. In this case v is likely a
michael@0 5438 // name on the scope chain and the eval is performing a call on that
michael@0 5439 // value. Use a dynamic scope chain lookup rather than a full eval.
michael@0 5440 if (string->isConcat() &&
michael@0 5441 string->getOperand(1)->isConstant() &&
michael@0 5442 string->getOperand(1)->toConstant()->value().isString())
michael@0 5443 {
michael@0 5444 JSAtom *atom = &string->getOperand(1)->toConstant()->value().toString()->asAtom();
michael@0 5445
michael@0 5446 if (StringEqualsAscii(atom, "()")) {
michael@0 5447 MDefinition *name = string->getOperand(0);
michael@0 5448 MInstruction *dynamicName = MGetDynamicName::New(alloc(), scopeChain, name);
michael@0 5449 current->add(dynamicName);
michael@0 5450
michael@0 5451 current->push(dynamicName);
michael@0 5452 current->push(thisValue);
michael@0 5453
michael@0 5454 CallInfo evalCallInfo(alloc(), /* constructing = */ false);
michael@0 5455 if (!evalCallInfo.init(current, /* argc = */ 0))
michael@0 5456 return false;
michael@0 5457
michael@0 5458 return makeCall(nullptr, evalCallInfo, false);
michael@0 5459 }
michael@0 5460 }
michael@0 5461
michael@0 5462 MInstruction *filterArguments = MFilterArgumentsOrEval::New(alloc(), string);
michael@0 5463 current->add(filterArguments);
michael@0 5464
michael@0 5465 MInstruction *ins = MCallDirectEval::New(alloc(), scopeChain, string, thisValue, pc);
michael@0 5466 current->add(ins);
michael@0 5467 current->push(ins);
michael@0 5468
michael@0 5469 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 5470 return resumeAfter(ins) && pushTypeBarrier(ins, types, true);
michael@0 5471 }
michael@0 5472
michael@0 5473 return jsop_call(argc, /* constructing = */ false);
michael@0 5474 }
michael@0 5475
michael@0 5476 bool
michael@0 5477 IonBuilder::jsop_compare(JSOp op)
michael@0 5478 {
michael@0 5479 MDefinition *right = current->pop();
michael@0 5480 MDefinition *left = current->pop();
michael@0 5481
michael@0 5482 MCompare *ins = MCompare::New(alloc(), left, right, op);
michael@0 5483 current->add(ins);
michael@0 5484 current->push(ins);
michael@0 5485
michael@0 5486 ins->infer(inspector, pc);
michael@0 5487
michael@0 5488 if (ins->isEffectful() && !resumeAfter(ins))
michael@0 5489 return false;
michael@0 5490 return true;
michael@0 5491 }
michael@0 5492
michael@0 5493 bool
michael@0 5494 IonBuilder::jsop_newarray(uint32_t count)
michael@0 5495 {
michael@0 5496 JS_ASSERT(script()->compileAndGo());
michael@0 5497
michael@0 5498 JSObject *templateObject = inspector->getTemplateObject(pc);
michael@0 5499 if (!templateObject)
michael@0 5500 return abort("No template object for NEWARRAY");
michael@0 5501
michael@0 5502 JS_ASSERT(templateObject->is<ArrayObject>());
michael@0 5503 if (templateObject->type()->unknownProperties()) {
michael@0 5504 // We will get confused in jsop_initelem_array if we can't find the
michael@0 5505 // type object being initialized.
michael@0 5506 return abort("New array has unknown properties");
michael@0 5507 }
michael@0 5508
michael@0 5509 MNewArray *ins = MNewArray::New(alloc(), constraints(), count, templateObject,
michael@0 5510 templateObject->type()->initialHeap(constraints()),
michael@0 5511 MNewArray::NewArray_Allocating);
michael@0 5512 current->add(ins);
michael@0 5513 current->push(ins);
michael@0 5514
michael@0 5515 types::TemporaryTypeSet::DoubleConversion conversion =
michael@0 5516 ins->resultTypeSet()->convertDoubleElements(constraints());
michael@0 5517
michael@0 5518 if (conversion == types::TemporaryTypeSet::AlwaysConvertToDoubles)
michael@0 5519 templateObject->setShouldConvertDoubleElements();
michael@0 5520 else
michael@0 5521 templateObject->clearShouldConvertDoubleElements();
michael@0 5522 return true;
michael@0 5523 }
michael@0 5524
michael@0 5525 bool
michael@0 5526 IonBuilder::jsop_newobject()
michael@0 5527 {
michael@0 5528 // Don't bake in the TypeObject for non-CNG scripts.
michael@0 5529 JS_ASSERT(script()->compileAndGo());
michael@0 5530
michael@0 5531 JSObject *templateObject = inspector->getTemplateObject(pc);
michael@0 5532 if (!templateObject)
michael@0 5533 return abort("No template object for NEWOBJECT");
michael@0 5534
michael@0 5535 JS_ASSERT(templateObject->is<JSObject>());
michael@0 5536 MNewObject *ins = MNewObject::New(alloc(), constraints(), templateObject,
michael@0 5537 templateObject->hasSingletonType()
michael@0 5538 ? gc::TenuredHeap
michael@0 5539 : templateObject->type()->initialHeap(constraints()),
michael@0 5540 /* templateObjectIsClassPrototype = */ false);
michael@0 5541
michael@0 5542 current->add(ins);
michael@0 5543 current->push(ins);
michael@0 5544
michael@0 5545 return resumeAfter(ins);
michael@0 5546 }
michael@0 5547
michael@0 5548 bool
michael@0 5549 IonBuilder::jsop_initelem()
michael@0 5550 {
michael@0 5551 MDefinition *value = current->pop();
michael@0 5552 MDefinition *id = current->pop();
michael@0 5553 MDefinition *obj = current->peek(-1);
michael@0 5554
michael@0 5555 MInitElem *initElem = MInitElem::New(alloc(), obj, id, value);
michael@0 5556 current->add(initElem);
michael@0 5557
michael@0 5558 return resumeAfter(initElem);
michael@0 5559 }
michael@0 5560
michael@0 5561 bool
michael@0 5562 IonBuilder::jsop_initelem_array()
michael@0 5563 {
michael@0 5564 MDefinition *value = current->pop();
michael@0 5565 MDefinition *obj = current->peek(-1);
michael@0 5566
michael@0 5567 // Make sure that arrays have the type being written to them by the
michael@0 5568 // intializer, and that arrays are marked as non-packed when writing holes
michael@0 5569 // to them during initialization.
michael@0 5570 bool needStub = false;
michael@0 5571 types::TypeObjectKey *initializer = obj->resultTypeSet()->getObject(0);
michael@0 5572 if (value->type() == MIRType_MagicHole) {
michael@0 5573 if (!initializer->hasFlags(constraints(), types::OBJECT_FLAG_NON_PACKED))
michael@0 5574 needStub = true;
michael@0 5575 } else if (!initializer->unknownProperties()) {
michael@0 5576 types::HeapTypeSetKey elemTypes = initializer->property(JSID_VOID);
michael@0 5577 if (!TypeSetIncludes(elemTypes.maybeTypes(), value->type(), value->resultTypeSet())) {
michael@0 5578 elemTypes.freeze(constraints());
michael@0 5579 needStub = true;
michael@0 5580 }
michael@0 5581 }
michael@0 5582
michael@0 5583 if (NeedsPostBarrier(info(), value))
michael@0 5584 current->add(MPostWriteBarrier::New(alloc(), obj, value));
michael@0 5585
michael@0 5586 if (needStub) {
michael@0 5587 MCallInitElementArray *store = MCallInitElementArray::New(alloc(), obj, GET_UINT24(pc), value);
michael@0 5588 current->add(store);
michael@0 5589 return resumeAfter(store);
michael@0 5590 }
michael@0 5591
michael@0 5592 MConstant *id = MConstant::New(alloc(), Int32Value(GET_UINT24(pc)));
michael@0 5593 current->add(id);
michael@0 5594
michael@0 5595 // Get the elements vector.
michael@0 5596 MElements *elements = MElements::New(alloc(), obj);
michael@0 5597 current->add(elements);
michael@0 5598
michael@0 5599 JSObject *templateObject = obj->toNewArray()->templateObject();
michael@0 5600
michael@0 5601 if (templateObject->shouldConvertDoubleElements()) {
michael@0 5602 MInstruction *valueDouble = MToDouble::New(alloc(), value);
michael@0 5603 current->add(valueDouble);
michael@0 5604 value = valueDouble;
michael@0 5605 }
michael@0 5606
michael@0 5607 // Store the value.
michael@0 5608 MStoreElement *store = MStoreElement::New(alloc(), elements, id, value, /* needsHoleCheck = */ false);
michael@0 5609 current->add(store);
michael@0 5610
michael@0 5611 // Update the initialized length. (The template object for this array has
michael@0 5612 // the array's ultimate length, so the length field is already correct: no
michael@0 5613 // updating needed.)
michael@0 5614 MSetInitializedLength *initLength = MSetInitializedLength::New(alloc(), elements, id);
michael@0 5615 current->add(initLength);
michael@0 5616
michael@0 5617 if (!resumeAfter(initLength))
michael@0 5618 return false;
michael@0 5619
michael@0 5620 return true;
michael@0 5621 }
michael@0 5622
michael@0 5623 bool
michael@0 5624 IonBuilder::jsop_mutateproto()
michael@0 5625 {
michael@0 5626 MDefinition *value = current->pop();
michael@0 5627 MDefinition *obj = current->peek(-1);
michael@0 5628
michael@0 5629 MMutateProto *mutate = MMutateProto::New(alloc(), obj, value);
michael@0 5630 current->add(mutate);
michael@0 5631 return resumeAfter(mutate);
michael@0 5632 }
michael@0 5633
michael@0 5634 bool
michael@0 5635 IonBuilder::jsop_initprop(PropertyName *name)
michael@0 5636 {
michael@0 5637 MDefinition *value = current->pop();
michael@0 5638 MDefinition *obj = current->peek(-1);
michael@0 5639
michael@0 5640 JSObject *templateObject = obj->toNewObject()->templateObject();
michael@0 5641
michael@0 5642 Shape *shape = templateObject->lastProperty()->searchLinear(NameToId(name));
michael@0 5643
michael@0 5644 if (!shape) {
michael@0 5645 // JSOP_NEWINIT becomes an MNewObject without preconfigured properties.
michael@0 5646 MInitProp *init = MInitProp::New(alloc(), obj, name, value);
michael@0 5647 current->add(init);
michael@0 5648 return resumeAfter(init);
michael@0 5649 }
michael@0 5650
michael@0 5651 if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
michael@0 5652 &obj, name, &value, /* canModify = */ true))
michael@0 5653 {
michael@0 5654 // JSOP_NEWINIT becomes an MNewObject without preconfigured properties.
michael@0 5655 MInitProp *init = MInitProp::New(alloc(), obj, name, value);
michael@0 5656 current->add(init);
michael@0 5657 return resumeAfter(init);
michael@0 5658 }
michael@0 5659
michael@0 5660 if (NeedsPostBarrier(info(), value))
michael@0 5661 current->add(MPostWriteBarrier::New(alloc(), obj, value));
michael@0 5662
michael@0 5663 bool needsBarrier = true;
michael@0 5664 if (obj->resultTypeSet() &&
michael@0 5665 !obj->resultTypeSet()->propertyNeedsBarrier(constraints(), NameToId(name)))
michael@0 5666 {
michael@0 5667 needsBarrier = false;
michael@0 5668 }
michael@0 5669
michael@0 5670 // In parallel execution, we never require write barriers. See
michael@0 5671 // forkjoin.cpp for more information.
michael@0 5672 if (info().executionMode() == ParallelExecution)
michael@0 5673 needsBarrier = false;
michael@0 5674
michael@0 5675 if (templateObject->isFixedSlot(shape->slot())) {
michael@0 5676 MStoreFixedSlot *store = MStoreFixedSlot::New(alloc(), obj, shape->slot(), value);
michael@0 5677 if (needsBarrier)
michael@0 5678 store->setNeedsBarrier();
michael@0 5679
michael@0 5680 current->add(store);
michael@0 5681 return resumeAfter(store);
michael@0 5682 }
michael@0 5683
michael@0 5684 MSlots *slots = MSlots::New(alloc(), obj);
michael@0 5685 current->add(slots);
michael@0 5686
michael@0 5687 uint32_t slot = templateObject->dynamicSlotIndex(shape->slot());
michael@0 5688 MStoreSlot *store = MStoreSlot::New(alloc(), slots, slot, value);
michael@0 5689 if (needsBarrier)
michael@0 5690 store->setNeedsBarrier();
michael@0 5691
michael@0 5692 current->add(store);
michael@0 5693 return resumeAfter(store);
michael@0 5694 }
michael@0 5695
michael@0 5696 bool
michael@0 5697 IonBuilder::jsop_initprop_getter_setter(PropertyName *name)
michael@0 5698 {
michael@0 5699 MDefinition *value = current->pop();
michael@0 5700 MDefinition *obj = current->peek(-1);
michael@0 5701
michael@0 5702 MInitPropGetterSetter *init = MInitPropGetterSetter::New(alloc(), obj, name, value);
michael@0 5703 current->add(init);
michael@0 5704 return resumeAfter(init);
michael@0 5705 }
michael@0 5706
michael@0 5707 bool
michael@0 5708 IonBuilder::jsop_initelem_getter_setter()
michael@0 5709 {
michael@0 5710 MDefinition *value = current->pop();
michael@0 5711 MDefinition *id = current->pop();
michael@0 5712 MDefinition *obj = current->peek(-1);
michael@0 5713
michael@0 5714 MInitElemGetterSetter *init = MInitElemGetterSetter::New(alloc(), obj, id, value);
michael@0 5715 current->add(init);
michael@0 5716 return resumeAfter(init);
michael@0 5717 }
michael@0 5718
michael@0 5719 MBasicBlock *
michael@0 5720 IonBuilder::addBlock(MBasicBlock *block, uint32_t loopDepth)
michael@0 5721 {
michael@0 5722 if (!block)
michael@0 5723 return nullptr;
michael@0 5724 graph().addBlock(block);
michael@0 5725 block->setLoopDepth(loopDepth);
michael@0 5726 return block;
michael@0 5727 }
michael@0 5728
michael@0 5729 MBasicBlock *
michael@0 5730 IonBuilder::newBlock(MBasicBlock *predecessor, jsbytecode *pc)
michael@0 5731 {
michael@0 5732 MBasicBlock *block = MBasicBlock::New(graph(), &analysis(), info(),
michael@0 5733 predecessor, pc, MBasicBlock::NORMAL);
michael@0 5734 return addBlock(block, loopDepth_);
michael@0 5735 }
michael@0 5736
michael@0 5737 MBasicBlock *
michael@0 5738 IonBuilder::newBlock(MBasicBlock *predecessor, jsbytecode *pc, MResumePoint *priorResumePoint)
michael@0 5739 {
michael@0 5740 MBasicBlock *block = MBasicBlock::NewWithResumePoint(graph(), info(), predecessor, pc,
michael@0 5741 priorResumePoint);
michael@0 5742 return addBlock(block, loopDepth_);
michael@0 5743 }
michael@0 5744
michael@0 5745 MBasicBlock *
michael@0 5746 IonBuilder::newBlockPopN(MBasicBlock *predecessor, jsbytecode *pc, uint32_t popped)
michael@0 5747 {
michael@0 5748 MBasicBlock *block = MBasicBlock::NewPopN(graph(), info(), predecessor, pc, MBasicBlock::NORMAL, popped);
michael@0 5749 return addBlock(block, loopDepth_);
michael@0 5750 }
michael@0 5751
michael@0 5752 MBasicBlock *
michael@0 5753 IonBuilder::newBlockAfter(MBasicBlock *at, MBasicBlock *predecessor, jsbytecode *pc)
michael@0 5754 {
michael@0 5755 MBasicBlock *block = MBasicBlock::New(graph(), &analysis(), info(),
michael@0 5756 predecessor, pc, MBasicBlock::NORMAL);
michael@0 5757 if (!block)
michael@0 5758 return nullptr;
michael@0 5759 graph().insertBlockAfter(at, block);
michael@0 5760 return block;
michael@0 5761 }
michael@0 5762
michael@0 5763 MBasicBlock *
michael@0 5764 IonBuilder::newBlock(MBasicBlock *predecessor, jsbytecode *pc, uint32_t loopDepth)
michael@0 5765 {
michael@0 5766 MBasicBlock *block = MBasicBlock::New(graph(), &analysis(), info(),
michael@0 5767 predecessor, pc, MBasicBlock::NORMAL);
michael@0 5768 return addBlock(block, loopDepth);
michael@0 5769 }
michael@0 5770
michael@0 5771 MBasicBlock *
michael@0 5772 IonBuilder::newOsrPreheader(MBasicBlock *predecessor, jsbytecode *loopEntry)
michael@0 5773 {
michael@0 5774 JS_ASSERT(LoopEntryCanIonOsr(loopEntry));
michael@0 5775 JS_ASSERT(loopEntry == info().osrPc());
michael@0 5776
michael@0 5777 // Create two blocks: one for the OSR entry with no predecessors, one for
michael@0 5778 // the preheader, which has the OSR entry block as a predecessor. The
michael@0 5779 // OSR block is always the second block (with id 1).
michael@0 5780 MBasicBlock *osrBlock = newBlockAfter(*graph().begin(), loopEntry);
michael@0 5781 MBasicBlock *preheader = newBlock(predecessor, loopEntry);
michael@0 5782 if (!osrBlock || !preheader)
michael@0 5783 return nullptr;
michael@0 5784
michael@0 5785 MOsrEntry *entry = MOsrEntry::New(alloc());
michael@0 5786 osrBlock->add(entry);
michael@0 5787
michael@0 5788 // Initialize |scopeChain|.
michael@0 5789 {
michael@0 5790 uint32_t slot = info().scopeChainSlot();
michael@0 5791
michael@0 5792 MInstruction *scopev;
michael@0 5793 if (analysis().usesScopeChain()) {
michael@0 5794 scopev = MOsrScopeChain::New(alloc(), entry);
michael@0 5795 } else {
michael@0 5796 // Use an undefined value if the script does not need its scope
michael@0 5797 // chain, to match the type that is already being tracked for the
michael@0 5798 // slot.
michael@0 5799 scopev = MConstant::New(alloc(), UndefinedValue());
michael@0 5800 }
michael@0 5801
michael@0 5802 osrBlock->add(scopev);
michael@0 5803 osrBlock->initSlot(slot, scopev);
michael@0 5804 }
michael@0 5805 // Initialize |return value|
michael@0 5806 {
michael@0 5807 MInstruction *returnValue;
michael@0 5808 if (!script()->noScriptRval())
michael@0 5809 returnValue = MOsrReturnValue::New(alloc(), entry);
michael@0 5810 else
michael@0 5811 returnValue = MConstant::New(alloc(), UndefinedValue());
michael@0 5812 osrBlock->add(returnValue);
michael@0 5813 osrBlock->initSlot(info().returnValueSlot(), returnValue);
michael@0 5814 }
michael@0 5815
michael@0 5816 // Initialize arguments object.
michael@0 5817 bool needsArgsObj = info().needsArgsObj();
michael@0 5818 MInstruction *argsObj = nullptr;
michael@0 5819 if (info().hasArguments()) {
michael@0 5820 if (needsArgsObj)
michael@0 5821 argsObj = MOsrArgumentsObject::New(alloc(), entry);
michael@0 5822 else
michael@0 5823 argsObj = MConstant::New(alloc(), UndefinedValue());
michael@0 5824 osrBlock->add(argsObj);
michael@0 5825 osrBlock->initSlot(info().argsObjSlot(), argsObj);
michael@0 5826 }
michael@0 5827
michael@0 5828 if (info().funMaybeLazy()) {
michael@0 5829 // Initialize |this| parameter.
michael@0 5830 MParameter *thisv = MParameter::New(alloc(), MParameter::THIS_SLOT, nullptr);
michael@0 5831 osrBlock->add(thisv);
michael@0 5832 osrBlock->initSlot(info().thisSlot(), thisv);
michael@0 5833
michael@0 5834 // Initialize arguments.
michael@0 5835 for (uint32_t i = 0; i < info().nargs(); i++) {
michael@0 5836 uint32_t slot = needsArgsObj ? info().argSlotUnchecked(i) : info().argSlot(i);
michael@0 5837
michael@0 5838 // Only grab arguments from the arguments object if the arguments object
michael@0 5839 // aliases formals. If the argsobj does not alias formals, then the
michael@0 5840 // formals may have been assigned to during interpretation, and that change
michael@0 5841 // will not be reflected in the argsobj.
michael@0 5842 if (needsArgsObj && info().argsObjAliasesFormals()) {
michael@0 5843 JS_ASSERT(argsObj && argsObj->isOsrArgumentsObject());
michael@0 5844 // If this is an aliased formal, then the arguments object
michael@0 5845 // contains a hole at this index. Any references to this
michael@0 5846 // variable in the jitcode will come from JSOP_*ALIASEDVAR
michael@0 5847 // opcodes, so the slot itself can be set to undefined. If
michael@0 5848 // it's not aliased, it must be retrieved from the arguments
michael@0 5849 // object.
michael@0 5850 MInstruction *osrv;
michael@0 5851 if (script()->formalIsAliased(i))
michael@0 5852 osrv = MConstant::New(alloc(), UndefinedValue());
michael@0 5853 else
michael@0 5854 osrv = MGetArgumentsObjectArg::New(alloc(), argsObj, i);
michael@0 5855
michael@0 5856 osrBlock->add(osrv);
michael@0 5857 osrBlock->initSlot(slot, osrv);
michael@0 5858 } else {
michael@0 5859 MParameter *arg = MParameter::New(alloc(), i, nullptr);
michael@0 5860 osrBlock->add(arg);
michael@0 5861 osrBlock->initSlot(slot, arg);
michael@0 5862 }
michael@0 5863 }
michael@0 5864 }
michael@0 5865
michael@0 5866 // Initialize locals.
michael@0 5867 for (uint32_t i = 0; i < info().nlocals(); i++) {
michael@0 5868 uint32_t slot = info().localSlot(i);
michael@0 5869 ptrdiff_t offset = BaselineFrame::reverseOffsetOfLocal(i);
michael@0 5870
michael@0 5871 MOsrValue *osrv = MOsrValue::New(alloc(), entry, offset);
michael@0 5872 osrBlock->add(osrv);
michael@0 5873 osrBlock->initSlot(slot, osrv);
michael@0 5874 }
michael@0 5875
michael@0 5876 // Initialize stack.
michael@0 5877 uint32_t numStackSlots = preheader->stackDepth() - info().firstStackSlot();
michael@0 5878 for (uint32_t i = 0; i < numStackSlots; i++) {
michael@0 5879 uint32_t slot = info().stackSlot(i);
michael@0 5880 ptrdiff_t offset = BaselineFrame::reverseOffsetOfLocal(info().nlocals() + i);
michael@0 5881
michael@0 5882 MOsrValue *osrv = MOsrValue::New(alloc(), entry, offset);
michael@0 5883 osrBlock->add(osrv);
michael@0 5884 osrBlock->initSlot(slot, osrv);
michael@0 5885 }
michael@0 5886
michael@0 5887 // Create an MStart to hold the first valid MResumePoint.
michael@0 5888 MStart *start = MStart::New(alloc(), MStart::StartType_Osr);
michael@0 5889 osrBlock->add(start);
michael@0 5890 graph().setOsrStart(start);
michael@0 5891
michael@0 5892 // MOsrValue instructions are infallible, so the first MResumePoint must
michael@0 5893 // occur after they execute, at the point of the MStart.
michael@0 5894 if (!resumeAt(start, loopEntry))
michael@0 5895 return nullptr;
michael@0 5896
michael@0 5897 // Link the same MResumePoint from the MStart to each MOsrValue.
michael@0 5898 // This causes logic in ShouldSpecializeInput() to not replace Uses with
michael@0 5899 // Unboxes in the MResumePiont, so that the MStart always sees Values.
michael@0 5900 osrBlock->linkOsrValues(start);
michael@0 5901
michael@0 5902 // Clone types of the other predecessor of the pre-header to the osr block,
michael@0 5903 // such as pre-header phi's won't discard specialized type of the
michael@0 5904 // predecessor.
michael@0 5905 JS_ASSERT(predecessor->stackDepth() == osrBlock->stackDepth());
michael@0 5906 JS_ASSERT(info().scopeChainSlot() == 0);
michael@0 5907
michael@0 5908 // Treat the OSR values as having the same type as the existing values
michael@0 5909 // coming in to the loop. These will be fixed up with appropriate
michael@0 5910 // unboxing and type barriers in finishLoop, once the possible types
michael@0 5911 // at the loop header are known.
michael@0 5912 for (uint32_t i = info().startArgSlot(); i < osrBlock->stackDepth(); i++) {
michael@0 5913 MDefinition *existing = current->getSlot(i);
michael@0 5914 MDefinition *def = osrBlock->getSlot(i);
michael@0 5915 JS_ASSERT_IF(!needsArgsObj || !info().isSlotAliasedAtOsr(i), def->type() == MIRType_Value);
michael@0 5916
michael@0 5917 // Aliased slots are never accessed, since they need to go through
michael@0 5918 // the callobject. No need to type them here.
michael@0 5919 if (info().isSlotAliasedAtOsr(i))
michael@0 5920 continue;
michael@0 5921
michael@0 5922 def->setResultType(existing->type());
michael@0 5923 def->setResultTypeSet(existing->resultTypeSet());
michael@0 5924 }
michael@0 5925
michael@0 5926 // Finish the osrBlock.
michael@0 5927 osrBlock->end(MGoto::New(alloc(), preheader));
michael@0 5928 if (!preheader->addPredecessor(alloc(), osrBlock))
michael@0 5929 return nullptr;
michael@0 5930 graph().setOsrBlock(osrBlock);
michael@0 5931
michael@0 5932 // Wrap |this| with a guaranteed use, to prevent instruction elimination.
michael@0 5933 // Prevent |this| from being DCE'd: necessary for constructors.
michael@0 5934 if (info().funMaybeLazy())
michael@0 5935 preheader->getSlot(info().thisSlot())->setGuard();
michael@0 5936
michael@0 5937 return preheader;
michael@0 5938 }
michael@0 5939
michael@0 5940 MBasicBlock *
michael@0 5941 IonBuilder::newPendingLoopHeader(MBasicBlock *predecessor, jsbytecode *pc, bool osr, bool canOsr,
michael@0 5942 unsigned stackPhiCount)
michael@0 5943 {
michael@0 5944 loopDepth_++;
michael@0 5945 // If this site can OSR, all values on the expression stack are part of the loop.
michael@0 5946 if (canOsr)
michael@0 5947 stackPhiCount = predecessor->stackDepth() - info().firstStackSlot();
michael@0 5948 MBasicBlock *block = MBasicBlock::NewPendingLoopHeader(graph(), info(), predecessor, pc,
michael@0 5949 stackPhiCount);
michael@0 5950 if (!addBlock(block, loopDepth_))
michael@0 5951 return nullptr;
michael@0 5952
michael@0 5953 if (osr) {
michael@0 5954 // Incorporate type information from the OSR frame into the loop
michael@0 5955 // header. The OSR frame may have unexpected types due to type changes
michael@0 5956 // within the loop body or due to incomplete profiling information,
michael@0 5957 // in which case this may avoid restarts of loop analysis or bailouts
michael@0 5958 // during the OSR itself.
michael@0 5959
michael@0 5960 // Unbox the MOsrValue if it is known to be unboxable.
michael@0 5961 for (uint32_t i = info().startArgSlot(); i < block->stackDepth(); i++) {
michael@0 5962
michael@0 5963 // The value of aliased args and slots are in the callobject. So we can't
michael@0 5964 // the value from the baseline frame.
michael@0 5965 if (info().isSlotAliasedAtOsr(i))
michael@0 5966 continue;
michael@0 5967
michael@0 5968 // Don't bother with expression stack values. The stack should be
michael@0 5969 // empty except for let variables (not Ion-compiled) or iterators.
michael@0 5970 if (i >= info().firstStackSlot())
michael@0 5971 continue;
michael@0 5972
michael@0 5973 MPhi *phi = block->getSlot(i)->toPhi();
michael@0 5974
michael@0 5975 // Get the type from the baseline frame.
michael@0 5976 types::Type existingType = types::Type::UndefinedType();
michael@0 5977 uint32_t arg = i - info().firstArgSlot();
michael@0 5978 uint32_t var = i - info().firstLocalSlot();
michael@0 5979 if (info().funMaybeLazy() && i == info().thisSlot())
michael@0 5980 existingType = baselineFrame_->thisType;
michael@0 5981 else if (arg < info().nargs())
michael@0 5982 existingType = baselineFrame_->argTypes[arg];
michael@0 5983 else
michael@0 5984 existingType = baselineFrame_->varTypes[var];
michael@0 5985
michael@0 5986 // Extract typeset from value.
michael@0 5987 types::TemporaryTypeSet *typeSet =
michael@0 5988 alloc_->lifoAlloc()->new_<types::TemporaryTypeSet>(existingType);
michael@0 5989 if (!typeSet)
michael@0 5990 return nullptr;
michael@0 5991 MIRType type = typeSet->getKnownMIRType();
michael@0 5992 if (!phi->addBackedgeType(type, typeSet))
michael@0 5993 return nullptr;
michael@0 5994 }
michael@0 5995 }
michael@0 5996
michael@0 5997 return block;
michael@0 5998 }
michael@0 5999
michael@0 6000 // A resume point is a mapping of stack slots to MDefinitions. It is used to
michael@0 6001 // capture the environment such that if a guard fails, and IonMonkey needs
michael@0 6002 // to exit back to the interpreter, the interpreter state can be
michael@0 6003 // reconstructed.
michael@0 6004 //
michael@0 6005 // We capture stack state at critical points:
michael@0 6006 // * (1) At the beginning of every basic block.
michael@0 6007 // * (2) After every effectful operation.
michael@0 6008 //
michael@0 6009 // As long as these two properties are maintained, instructions can
michael@0 6010 // be moved, hoisted, or, eliminated without problems, and ops without side
michael@0 6011 // effects do not need to worry about capturing state at precisely the
michael@0 6012 // right point in time.
michael@0 6013 //
michael@0 6014 // Effectful instructions, of course, need to capture state after completion,
michael@0 6015 // where the interpreter will not attempt to repeat the operation. For this,
michael@0 6016 // ResumeAfter must be used. The state is attached directly to the effectful
michael@0 6017 // instruction to ensure that no intermediate instructions could be injected
michael@0 6018 // in between by a future analysis pass.
michael@0 6019 //
michael@0 6020 // During LIR construction, if an instruction can bail back to the interpreter,
michael@0 6021 // we create an LSnapshot, which uses the last known resume point to request
michael@0 6022 // register/stack assignments for every live value.
michael@0 6023 bool
michael@0 6024 IonBuilder::resume(MInstruction *ins, jsbytecode *pc, MResumePoint::Mode mode)
michael@0 6025 {
michael@0 6026 JS_ASSERT(ins->isEffectful() || !ins->isMovable());
michael@0 6027
michael@0 6028 MResumePoint *resumePoint = MResumePoint::New(alloc(), ins->block(), pc, callerResumePoint_,
michael@0 6029 mode);
michael@0 6030 if (!resumePoint)
michael@0 6031 return false;
michael@0 6032 ins->setResumePoint(resumePoint);
michael@0 6033 resumePoint->setInstruction(ins);
michael@0 6034 return true;
michael@0 6035 }
michael@0 6036
michael@0 6037 bool
michael@0 6038 IonBuilder::resumeAt(MInstruction *ins, jsbytecode *pc)
michael@0 6039 {
michael@0 6040 return resume(ins, pc, MResumePoint::ResumeAt);
michael@0 6041 }
michael@0 6042
michael@0 6043 bool
michael@0 6044 IonBuilder::resumeAfter(MInstruction *ins)
michael@0 6045 {
michael@0 6046 return resume(ins, pc, MResumePoint::ResumeAfter);
michael@0 6047 }
michael@0 6048
michael@0 6049 bool
michael@0 6050 IonBuilder::maybeInsertResume()
michael@0 6051 {
michael@0 6052 // Create a resume point at the current position, without an existing
michael@0 6053 // effectful instruction. This resume point is not necessary for correct
michael@0 6054 // behavior (see above), but is added to avoid holding any values from the
michael@0 6055 // previous resume point which are now dead. This shortens the live ranges
michael@0 6056 // of such values and improves register allocation.
michael@0 6057 //
michael@0 6058 // This optimization is not performed outside of loop bodies, where good
michael@0 6059 // register allocation is not as critical, in order to avoid creating
michael@0 6060 // excessive resume points.
michael@0 6061
michael@0 6062 if (loopDepth_ == 0)
michael@0 6063 return true;
michael@0 6064
michael@0 6065 MNop *ins = MNop::New(alloc());
michael@0 6066 current->add(ins);
michael@0 6067
michael@0 6068 return resumeAfter(ins);
michael@0 6069 }
michael@0 6070
michael@0 6071 static bool
michael@0 6072 ClassHasEffectlessLookup(const Class *clasp, PropertyName *name)
michael@0 6073 {
michael@0 6074 return clasp->isNative() && !clasp->ops.lookupGeneric;
michael@0 6075 }
michael@0 6076
michael@0 6077 static bool
michael@0 6078 ClassHasResolveHook(CompileCompartment *comp, const Class *clasp, PropertyName *name)
michael@0 6079 {
michael@0 6080 // While arrays do not have resolve hooks, the types of their |length|
michael@0 6081 // properties are not reflected in type information, so pretend there is a
michael@0 6082 // resolve hook for this property.
michael@0 6083 if (clasp == &ArrayObject::class_)
michael@0 6084 return name == comp->runtime()->names().length;
michael@0 6085
michael@0 6086 if (clasp->resolve == JS_ResolveStub)
michael@0 6087 return false;
michael@0 6088
michael@0 6089 if (clasp->resolve == (JSResolveOp)str_resolve) {
michael@0 6090 // str_resolve only resolves integers, not names.
michael@0 6091 return false;
michael@0 6092 }
michael@0 6093
michael@0 6094 if (clasp->resolve == (JSResolveOp)fun_resolve)
michael@0 6095 return FunctionHasResolveHook(comp->runtime()->names(), name);
michael@0 6096
michael@0 6097 return true;
michael@0 6098 }
michael@0 6099
michael@0 6100 void
michael@0 6101 IonBuilder::insertRecompileCheck()
michael@0 6102 {
michael@0 6103 // PJS doesn't recompile and doesn't need recompile checks.
michael@0 6104 if (info().executionMode() != SequentialExecution)
michael@0 6105 return;
michael@0 6106
michael@0 6107 // No need for recompile checks if this is the highest optimization level.
michael@0 6108 OptimizationLevel curLevel = optimizationInfo().level();
michael@0 6109 if (js_IonOptimizations.isLastLevel(curLevel))
michael@0 6110 return;
michael@0 6111
michael@0 6112 // Add recompile check.
michael@0 6113
michael@0 6114 // Get the topmost builder. The topmost script will get recompiled when
michael@0 6115 // usecount is high enough to justify a higher optimization level.
michael@0 6116 IonBuilder *topBuilder = this;
michael@0 6117 while (topBuilder->callerBuilder_)
michael@0 6118 topBuilder = topBuilder->callerBuilder_;
michael@0 6119
michael@0 6120 // Add recompile check to recompile when the usecount reaches the usecount
michael@0 6121 // of the next optimization level.
michael@0 6122 OptimizationLevel nextLevel = js_IonOptimizations.nextLevel(curLevel);
michael@0 6123 const OptimizationInfo *info = js_IonOptimizations.get(nextLevel);
michael@0 6124 uint32_t useCount = info->usesBeforeCompile(topBuilder->script());
michael@0 6125 current->add(MRecompileCheck::New(alloc(), topBuilder->script(), useCount));
michael@0 6126 }
michael@0 6127
michael@0 6128 JSObject *
michael@0 6129 IonBuilder::testSingletonProperty(JSObject *obj, PropertyName *name)
michael@0 6130 {
michael@0 6131 // We would like to completely no-op property/global accesses which can
michael@0 6132 // produce only a particular JSObject. When indicating the access result is
michael@0 6133 // definitely an object, type inference does not account for the
michael@0 6134 // possibility that the property is entirely missing from the input object
michael@0 6135 // and its prototypes (if this happens, a semantic trigger would be hit and
michael@0 6136 // the pushed types updated, even if there is no type barrier).
michael@0 6137 //
michael@0 6138 // If the access definitely goes through obj, either directly or on the
michael@0 6139 // prototype chain, and the object has singleton type, then the type
michael@0 6140 // information for that property reflects the value that will definitely be
michael@0 6141 // read on accesses to the object. If the property is later deleted or
michael@0 6142 // reconfigured as a getter/setter then the type information for the
michael@0 6143 // property will change and trigger invalidation.
michael@0 6144
michael@0 6145 while (obj) {
michael@0 6146 if (!ClassHasEffectlessLookup(obj->getClass(), name))
michael@0 6147 return nullptr;
michael@0 6148
michael@0 6149 types::TypeObjectKey *objType = types::TypeObjectKey::get(obj);
michael@0 6150 if (analysisContext)
michael@0 6151 objType->ensureTrackedProperty(analysisContext, NameToId(name));
michael@0 6152
michael@0 6153 if (objType->unknownProperties())
michael@0 6154 return nullptr;
michael@0 6155
michael@0 6156 types::HeapTypeSetKey property = objType->property(NameToId(name));
michael@0 6157 if (property.isOwnProperty(constraints())) {
michael@0 6158 if (obj->hasSingletonType())
michael@0 6159 return property.singleton(constraints());
michael@0 6160 return nullptr;
michael@0 6161 }
michael@0 6162
michael@0 6163 if (ClassHasResolveHook(compartment, obj->getClass(), name))
michael@0 6164 return nullptr;
michael@0 6165
michael@0 6166 if (!obj->hasTenuredProto())
michael@0 6167 return nullptr;
michael@0 6168 obj = obj->getProto();
michael@0 6169 }
michael@0 6170
michael@0 6171 return nullptr;
michael@0 6172 }
michael@0 6173
michael@0 6174 bool
michael@0 6175 IonBuilder::testSingletonPropertyTypes(MDefinition *obj, JSObject *singleton, PropertyName *name,
michael@0 6176 bool *testObject, bool *testString)
michael@0 6177 {
michael@0 6178 // As for TestSingletonProperty, but the input is any value in a type set
michael@0 6179 // rather than a specific object. If testObject is set then the constant
michael@0 6180 // result can only be used after ensuring the input is an object.
michael@0 6181
michael@0 6182 *testObject = false;
michael@0 6183 *testString = false;
michael@0 6184
michael@0 6185 types::TemporaryTypeSet *types = obj->resultTypeSet();
michael@0 6186 if (types && types->unknownObject())
michael@0 6187 return false;
michael@0 6188
michael@0 6189 JSObject *objectSingleton = types ? types->getSingleton() : nullptr;
michael@0 6190 if (objectSingleton)
michael@0 6191 return testSingletonProperty(objectSingleton, name) == singleton;
michael@0 6192
michael@0 6193 JSProtoKey key;
michael@0 6194 switch (obj->type()) {
michael@0 6195 case MIRType_String:
michael@0 6196 key = JSProto_String;
michael@0 6197 break;
michael@0 6198
michael@0 6199 case MIRType_Int32:
michael@0 6200 case MIRType_Double:
michael@0 6201 key = JSProto_Number;
michael@0 6202 break;
michael@0 6203
michael@0 6204 case MIRType_Boolean:
michael@0 6205 key = JSProto_Boolean;
michael@0 6206 break;
michael@0 6207
michael@0 6208 case MIRType_Object:
michael@0 6209 case MIRType_Value: {
michael@0 6210 if (!types)
michael@0 6211 return false;
michael@0 6212
michael@0 6213 if (types->hasType(types::Type::StringType())) {
michael@0 6214 key = JSProto_String;
michael@0 6215 *testString = true;
michael@0 6216 break;
michael@0 6217 }
michael@0 6218
michael@0 6219 if (!types->maybeObject())
michael@0 6220 return false;
michael@0 6221
michael@0 6222 // For property accesses which may be on many objects, we just need to
michael@0 6223 // find a prototype common to all the objects; if that prototype
michael@0 6224 // has the singleton property, the access will not be on a missing property.
michael@0 6225 for (unsigned i = 0; i < types->getObjectCount(); i++) {
michael@0 6226 types::TypeObjectKey *object = types->getObject(i);
michael@0 6227 if (!object)
michael@0 6228 continue;
michael@0 6229 if (analysisContext)
michael@0 6230 object->ensureTrackedProperty(analysisContext, NameToId(name));
michael@0 6231
michael@0 6232 const Class *clasp = object->clasp();
michael@0 6233 if (!ClassHasEffectlessLookup(clasp, name) || ClassHasResolveHook(compartment, clasp, name))
michael@0 6234 return false;
michael@0 6235 if (object->unknownProperties())
michael@0 6236 return false;
michael@0 6237 types::HeapTypeSetKey property = object->property(NameToId(name));
michael@0 6238 if (property.isOwnProperty(constraints()))
michael@0 6239 return false;
michael@0 6240
michael@0 6241 if (!object->hasTenuredProto())
michael@0 6242 return false;
michael@0 6243 if (JSObject *proto = object->proto().toObjectOrNull()) {
michael@0 6244 // Test this type.
michael@0 6245 if (testSingletonProperty(proto, name) != singleton)
michael@0 6246 return false;
michael@0 6247 } else {
michael@0 6248 // Can't be on the prototype chain with no prototypes...
michael@0 6249 return false;
michael@0 6250 }
michael@0 6251 }
michael@0 6252 // If this is not a known object, a test will be needed.
michael@0 6253 *testObject = (obj->type() != MIRType_Object);
michael@0 6254 return true;
michael@0 6255 }
michael@0 6256 default:
michael@0 6257 return false;
michael@0 6258 }
michael@0 6259
michael@0 6260 JSObject *proto = GetBuiltinPrototypePure(&script()->global(), key);
michael@0 6261 if (proto)
michael@0 6262 return testSingletonProperty(proto, name) == singleton;
michael@0 6263
michael@0 6264 return false;
michael@0 6265 }
michael@0 6266
michael@0 6267 // Given an observed type set, annotates the IR as much as possible:
michael@0 6268 // (1) If no type information is provided, the value on the top of the stack is
michael@0 6269 // left in place.
michael@0 6270 // (2) If a single type definitely exists, and no type barrier is needed,
michael@0 6271 // then an infallible unbox instruction replaces the value on the top of
michael@0 6272 // the stack.
michael@0 6273 // (3) If a type barrier is needed, but has an unknown type set, leave the
michael@0 6274 // value at the top of the stack.
michael@0 6275 // (4) If a type barrier is needed, and has a single type, an unbox
michael@0 6276 // instruction replaces the top of the stack.
michael@0 6277 // (5) Lastly, a type barrier instruction replaces the top of the stack.
michael@0 6278 bool
michael@0 6279 IonBuilder::pushTypeBarrier(MDefinition *def, types::TemporaryTypeSet *observed, bool needsBarrier)
michael@0 6280 {
michael@0 6281 // Barriers are never needed for instructions whose result will not be used.
michael@0 6282 if (BytecodeIsPopped(pc))
michael@0 6283 return true;
michael@0 6284
michael@0 6285 // If the instruction has no side effects, we'll resume the entire operation.
michael@0 6286 // The actual type barrier will occur in the interpreter. If the
michael@0 6287 // instruction is effectful, even if it has a singleton type, there
michael@0 6288 // must be a resume point capturing the original def, and resuming
michael@0 6289 // to that point will explicitly monitor the new type.
michael@0 6290
michael@0 6291 if (!needsBarrier) {
michael@0 6292 MDefinition *replace = ensureDefiniteType(def, observed->getKnownMIRType());
michael@0 6293 if (replace != def) {
michael@0 6294 current->pop();
michael@0 6295 current->push(replace);
michael@0 6296 }
michael@0 6297 replace->setResultTypeSet(observed);
michael@0 6298 return true;
michael@0 6299 }
michael@0 6300
michael@0 6301 if (observed->unknown())
michael@0 6302 return true;
michael@0 6303
michael@0 6304 current->pop();
michael@0 6305
michael@0 6306 MInstruction *barrier = MTypeBarrier::New(alloc(), def, observed);
michael@0 6307 current->add(barrier);
michael@0 6308
michael@0 6309 if (barrier->type() == MIRType_Undefined)
michael@0 6310 return pushConstant(UndefinedValue());
michael@0 6311 if (barrier->type() == MIRType_Null)
michael@0 6312 return pushConstant(NullValue());
michael@0 6313
michael@0 6314 current->push(barrier);
michael@0 6315 return true;
michael@0 6316 }
michael@0 6317
michael@0 6318 bool
michael@0 6319 IonBuilder::pushDOMTypeBarrier(MInstruction *ins, types::TemporaryTypeSet *observed, JSFunction* func)
michael@0 6320 {
michael@0 6321 JS_ASSERT(func && func->isNative() && func->jitInfo());
michael@0 6322
michael@0 6323 const JSJitInfo *jitinfo = func->jitInfo();
michael@0 6324 bool barrier = DOMCallNeedsBarrier(jitinfo, observed);
michael@0 6325 // Need to be a bit careful: if jitinfo->returnType is JSVAL_TYPE_DOUBLE but
michael@0 6326 // types->getKnownMIRType() is MIRType_Int32, then don't unconditionally
michael@0 6327 // unbox as a double. Instead, go ahead and barrier on having an int type,
michael@0 6328 // since we know we need a barrier anyway due to the type mismatch. This is
michael@0 6329 // the only situation in which TI actually has more information about the
michael@0 6330 // JSValueType than codegen can, short of jitinfo->returnType just being
michael@0 6331 // JSVAL_TYPE_UNKNOWN.
michael@0 6332 MDefinition* replace = ins;
michael@0 6333 if (jitinfo->returnType() != JSVAL_TYPE_DOUBLE ||
michael@0 6334 observed->getKnownMIRType() != MIRType_Int32) {
michael@0 6335 replace = ensureDefiniteType(ins, MIRTypeFromValueType(jitinfo->returnType()));
michael@0 6336 if (replace != ins) {
michael@0 6337 current->pop();
michael@0 6338 current->push(replace);
michael@0 6339 }
michael@0 6340 } else {
michael@0 6341 JS_ASSERT(barrier);
michael@0 6342 }
michael@0 6343
michael@0 6344 return pushTypeBarrier(replace, observed, barrier);
michael@0 6345 }
michael@0 6346
michael@0 6347 MDefinition *
michael@0 6348 IonBuilder::ensureDefiniteType(MDefinition *def, MIRType definiteType)
michael@0 6349 {
michael@0 6350 MInstruction *replace;
michael@0 6351 switch (definiteType) {
michael@0 6352 case MIRType_Undefined:
michael@0 6353 def->setImplicitlyUsedUnchecked();
michael@0 6354 replace = MConstant::New(alloc(), UndefinedValue());
michael@0 6355 break;
michael@0 6356
michael@0 6357 case MIRType_Null:
michael@0 6358 def->setImplicitlyUsedUnchecked();
michael@0 6359 replace = MConstant::New(alloc(), NullValue());
michael@0 6360 break;
michael@0 6361
michael@0 6362 case MIRType_Value:
michael@0 6363 return def;
michael@0 6364
michael@0 6365 default: {
michael@0 6366 if (def->type() != MIRType_Value) {
michael@0 6367 JS_ASSERT(def->type() == definiteType);
michael@0 6368 return def;
michael@0 6369 }
michael@0 6370 replace = MUnbox::New(alloc(), def, definiteType, MUnbox::Infallible);
michael@0 6371 break;
michael@0 6372 }
michael@0 6373 }
michael@0 6374
michael@0 6375 current->add(replace);
michael@0 6376 return replace;
michael@0 6377 }
michael@0 6378
michael@0 6379 MDefinition *
michael@0 6380 IonBuilder::ensureDefiniteTypeSet(MDefinition *def, types::TemporaryTypeSet *types)
michael@0 6381 {
michael@0 6382 // We cannot arbitrarily add a typeset to a definition. It can be shared
michael@0 6383 // in another path. So we always need to create a new MIR.
michael@0 6384
michael@0 6385 // Use ensureDefiniteType to do unboxing. If that happened the type can
michael@0 6386 // be added on the newly created unbox operation.
michael@0 6387 MDefinition *replace = ensureDefiniteType(def, types->getKnownMIRType());
michael@0 6388 if (replace != def) {
michael@0 6389 replace->setResultTypeSet(types);
michael@0 6390 return replace;
michael@0 6391 }
michael@0 6392
michael@0 6393 // Create a NOP mir instruction to filter the typeset.
michael@0 6394 MFilterTypeSet *filter = MFilterTypeSet::New(alloc(), def, types);
michael@0 6395 current->add(filter);
michael@0 6396 return filter;
michael@0 6397 }
michael@0 6398
michael@0 6399 static size_t
michael@0 6400 NumFixedSlots(JSObject *object)
michael@0 6401 {
michael@0 6402 // Note: we can't use object->numFixedSlots() here, as this will read the
michael@0 6403 // shape and can race with the main thread if we are building off thread.
michael@0 6404 // The allocation kind and object class (which goes through the type) can
michael@0 6405 // be read freely, however.
michael@0 6406 gc::AllocKind kind = object->tenuredGetAllocKind();
michael@0 6407 return gc::GetGCKindSlots(kind, object->getClass());
michael@0 6408 }
michael@0 6409
michael@0 6410 bool
michael@0 6411 IonBuilder::getStaticName(JSObject *staticObject, PropertyName *name, bool *psucceeded)
michael@0 6412 {
michael@0 6413 jsid id = NameToId(name);
michael@0 6414
michael@0 6415 JS_ASSERT(staticObject->is<GlobalObject>() || staticObject->is<CallObject>());
michael@0 6416 JS_ASSERT(staticObject->hasSingletonType());
michael@0 6417
michael@0 6418 *psucceeded = true;
michael@0 6419
michael@0 6420 if (staticObject->is<GlobalObject>()) {
michael@0 6421 // Optimize undefined, NaN, and Infinity.
michael@0 6422 if (name == names().undefined)
michael@0 6423 return pushConstant(UndefinedValue());
michael@0 6424 if (name == names().NaN)
michael@0 6425 return pushConstant(compartment->runtime()->NaNValue());
michael@0 6426 if (name == names().Infinity)
michael@0 6427 return pushConstant(compartment->runtime()->positiveInfinityValue());
michael@0 6428 }
michael@0 6429
michael@0 6430 types::TypeObjectKey *staticType = types::TypeObjectKey::get(staticObject);
michael@0 6431 if (analysisContext)
michael@0 6432 staticType->ensureTrackedProperty(analysisContext, NameToId(name));
michael@0 6433
michael@0 6434 if (staticType->unknownProperties()) {
michael@0 6435 *psucceeded = false;
michael@0 6436 return true;
michael@0 6437 }
michael@0 6438
michael@0 6439 types::HeapTypeSetKey property = staticType->property(id);
michael@0 6440 if (!property.maybeTypes() ||
michael@0 6441 !property.maybeTypes()->definiteProperty() ||
michael@0 6442 property.nonData(constraints()))
michael@0 6443 {
michael@0 6444 // The property has been reconfigured as non-configurable, non-enumerable
michael@0 6445 // or non-writable.
michael@0 6446 *psucceeded = false;
michael@0 6447 return true;
michael@0 6448 }
michael@0 6449
michael@0 6450 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 6451 bool barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(), staticType,
michael@0 6452 name, types, /* updateObserved = */ true);
michael@0 6453
michael@0 6454 JSObject *singleton = types->getSingleton();
michael@0 6455
michael@0 6456 MIRType knownType = types->getKnownMIRType();
michael@0 6457 if (!barrier) {
michael@0 6458 if (singleton) {
michael@0 6459 // Try to inline a known constant value.
michael@0 6460 if (testSingletonProperty(staticObject, name) == singleton)
michael@0 6461 return pushConstant(ObjectValue(*singleton));
michael@0 6462 }
michael@0 6463 if (knownType == MIRType_Undefined)
michael@0 6464 return pushConstant(UndefinedValue());
michael@0 6465 if (knownType == MIRType_Null)
michael@0 6466 return pushConstant(NullValue());
michael@0 6467 }
michael@0 6468
michael@0 6469 MInstruction *obj = constant(ObjectValue(*staticObject));
michael@0 6470
michael@0 6471 MIRType rvalType = types->getKnownMIRType();
michael@0 6472 if (barrier)
michael@0 6473 rvalType = MIRType_Value;
michael@0 6474
michael@0 6475 return loadSlot(obj, property.maybeTypes()->definiteSlot(), NumFixedSlots(staticObject),
michael@0 6476 rvalType, barrier, types);
michael@0 6477 }
michael@0 6478
michael@0 6479 // Whether 'types' includes all possible values represented by input/inputTypes.
michael@0 6480 bool
michael@0 6481 jit::TypeSetIncludes(types::TypeSet *types, MIRType input, types::TypeSet *inputTypes)
michael@0 6482 {
michael@0 6483 if (!types)
michael@0 6484 return inputTypes && inputTypes->empty();
michael@0 6485
michael@0 6486 switch (input) {
michael@0 6487 case MIRType_Undefined:
michael@0 6488 case MIRType_Null:
michael@0 6489 case MIRType_Boolean:
michael@0 6490 case MIRType_Int32:
michael@0 6491 case MIRType_Double:
michael@0 6492 case MIRType_Float32:
michael@0 6493 case MIRType_String:
michael@0 6494 case MIRType_MagicOptimizedArguments:
michael@0 6495 return types->hasType(types::Type::PrimitiveType(ValueTypeFromMIRType(input)));
michael@0 6496
michael@0 6497 case MIRType_Object:
michael@0 6498 return types->unknownObject() || (inputTypes && inputTypes->isSubset(types));
michael@0 6499
michael@0 6500 case MIRType_Value:
michael@0 6501 return types->unknown() || (inputTypes && inputTypes->isSubset(types));
michael@0 6502
michael@0 6503 default:
michael@0 6504 MOZ_ASSUME_UNREACHABLE("Bad input type");
michael@0 6505 }
michael@0 6506 }
michael@0 6507
michael@0 6508 // Whether a write of the given value may need a post-write barrier for GC purposes.
michael@0 6509 bool
michael@0 6510 jit::NeedsPostBarrier(CompileInfo &info, MDefinition *value)
michael@0 6511 {
michael@0 6512 return info.executionMode() != ParallelExecution && value->mightBeType(MIRType_Object);
michael@0 6513 }
michael@0 6514
michael@0 6515 bool
michael@0 6516 IonBuilder::setStaticName(JSObject *staticObject, PropertyName *name)
michael@0 6517 {
michael@0 6518 jsid id = NameToId(name);
michael@0 6519
michael@0 6520 JS_ASSERT(staticObject->is<GlobalObject>() || staticObject->is<CallObject>());
michael@0 6521
michael@0 6522 MDefinition *value = current->peek(-1);
michael@0 6523
michael@0 6524 types::TypeObjectKey *staticType = types::TypeObjectKey::get(staticObject);
michael@0 6525 if (staticType->unknownProperties())
michael@0 6526 return jsop_setprop(name);
michael@0 6527
michael@0 6528 types::HeapTypeSetKey property = staticType->property(id);
michael@0 6529 if (!property.maybeTypes() ||
michael@0 6530 !property.maybeTypes()->definiteProperty() ||
michael@0 6531 property.nonData(constraints()) ||
michael@0 6532 property.nonWritable(constraints()))
michael@0 6533 {
michael@0 6534 // The property has been reconfigured as non-configurable, non-enumerable
michael@0 6535 // or non-writable.
michael@0 6536 return jsop_setprop(name);
michael@0 6537 }
michael@0 6538
michael@0 6539 if (!TypeSetIncludes(property.maybeTypes(), value->type(), value->resultTypeSet()))
michael@0 6540 return jsop_setprop(name);
michael@0 6541
michael@0 6542 current->pop();
michael@0 6543
michael@0 6544 // Pop the bound object on the stack.
michael@0 6545 MDefinition *obj = current->pop();
michael@0 6546 JS_ASSERT(&obj->toConstant()->value().toObject() == staticObject);
michael@0 6547
michael@0 6548 if (NeedsPostBarrier(info(), value))
michael@0 6549 current->add(MPostWriteBarrier::New(alloc(), obj, value));
michael@0 6550
michael@0 6551 // If the property has a known type, we may be able to optimize typed stores by not
michael@0 6552 // storing the type tag.
michael@0 6553 MIRType slotType = MIRType_None;
michael@0 6554 MIRType knownType = property.knownMIRType(constraints());
michael@0 6555 if (knownType != MIRType_Value)
michael@0 6556 slotType = knownType;
michael@0 6557
michael@0 6558 bool needsBarrier = property.needsBarrier(constraints());
michael@0 6559 return storeSlot(obj, property.maybeTypes()->definiteSlot(), NumFixedSlots(staticObject),
michael@0 6560 value, needsBarrier, slotType);
michael@0 6561 }
michael@0 6562
michael@0 6563 bool
michael@0 6564 IonBuilder::jsop_getgname(PropertyName *name)
michael@0 6565 {
michael@0 6566 JSObject *obj = &script()->global();
michael@0 6567 bool succeeded;
michael@0 6568 if (!getStaticName(obj, name, &succeeded))
michael@0 6569 return false;
michael@0 6570 if (succeeded)
michael@0 6571 return true;
michael@0 6572
michael@0 6573 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 6574 // Spoof the stack to call into the getProp path.
michael@0 6575 // First, make sure there's room.
michael@0 6576 if (!current->ensureHasSlots(1))
michael@0 6577 return false;
michael@0 6578 pushConstant(ObjectValue(*obj));
michael@0 6579 if (!getPropTryCommonGetter(&succeeded, name, types))
michael@0 6580 return false;
michael@0 6581 if (succeeded)
michael@0 6582 return true;
michael@0 6583
michael@0 6584 // Clean up the pushed global object if we were not sucessful.
michael@0 6585 current->pop();
michael@0 6586 return jsop_getname(name);
michael@0 6587 }
michael@0 6588
michael@0 6589 bool
michael@0 6590 IonBuilder::jsop_getname(PropertyName *name)
michael@0 6591 {
michael@0 6592 MDefinition *object;
michael@0 6593 if (js_CodeSpec[*pc].format & JOF_GNAME) {
michael@0 6594 MInstruction *global = constant(ObjectValue(script()->global()));
michael@0 6595 object = global;
michael@0 6596 } else {
michael@0 6597 current->push(current->scopeChain());
michael@0 6598 object = current->pop();
michael@0 6599 }
michael@0 6600
michael@0 6601 MGetNameCache *ins;
michael@0 6602 if (JSOp(*GetNextPc(pc)) == JSOP_TYPEOF)
michael@0 6603 ins = MGetNameCache::New(alloc(), object, name, MGetNameCache::NAMETYPEOF);
michael@0 6604 else
michael@0 6605 ins = MGetNameCache::New(alloc(), object, name, MGetNameCache::NAME);
michael@0 6606
michael@0 6607 current->add(ins);
michael@0 6608 current->push(ins);
michael@0 6609
michael@0 6610 if (!resumeAfter(ins))
michael@0 6611 return false;
michael@0 6612
michael@0 6613 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 6614 return pushTypeBarrier(ins, types, true);
michael@0 6615 }
michael@0 6616
michael@0 6617 bool
michael@0 6618 IonBuilder::jsop_intrinsic(PropertyName *name)
michael@0 6619 {
michael@0 6620 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 6621
michael@0 6622 // If we haven't executed this opcode yet, we need to get the intrinsic
michael@0 6623 // value and monitor the result.
michael@0 6624 if (types->empty()) {
michael@0 6625 MCallGetIntrinsicValue *ins = MCallGetIntrinsicValue::New(alloc(), name);
michael@0 6626
michael@0 6627 current->add(ins);
michael@0 6628 current->push(ins);
michael@0 6629
michael@0 6630 if (!resumeAfter(ins))
michael@0 6631 return false;
michael@0 6632
michael@0 6633 return pushTypeBarrier(ins, types, true);
michael@0 6634 }
michael@0 6635
michael@0 6636 // Bake in the intrinsic. Make sure that TI agrees with us on the type.
michael@0 6637 Value vp;
michael@0 6638 JS_ALWAYS_TRUE(script()->global().maybeGetIntrinsicValue(name, &vp));
michael@0 6639 JS_ASSERT(types->hasType(types::GetValueType(vp)));
michael@0 6640
michael@0 6641 pushConstant(vp);
michael@0 6642 return true;
michael@0 6643 }
michael@0 6644
michael@0 6645 bool
michael@0 6646 IonBuilder::jsop_bindname(PropertyName *name)
michael@0 6647 {
michael@0 6648 JS_ASSERT(analysis().usesScopeChain());
michael@0 6649
michael@0 6650 MDefinition *scopeChain = current->scopeChain();
michael@0 6651 MBindNameCache *ins = MBindNameCache::New(alloc(), scopeChain, name, script(), pc);
michael@0 6652
michael@0 6653 current->add(ins);
michael@0 6654 current->push(ins);
michael@0 6655
michael@0 6656 return resumeAfter(ins);
michael@0 6657 }
michael@0 6658
michael@0 6659 static MIRType
michael@0 6660 GetElemKnownType(bool needsHoleCheck, types::TemporaryTypeSet *types)
michael@0 6661 {
michael@0 6662 MIRType knownType = types->getKnownMIRType();
michael@0 6663
michael@0 6664 // Null and undefined have no payload so they can't be specialized.
michael@0 6665 // Since folding null/undefined while building SSA is not safe (see the
michael@0 6666 // comment in IsPhiObservable), we just add an untyped load instruction
michael@0 6667 // and rely on pushTypeBarrier and DCE to replace it with a null/undefined
michael@0 6668 // constant.
michael@0 6669 if (knownType == MIRType_Undefined || knownType == MIRType_Null)
michael@0 6670 knownType = MIRType_Value;
michael@0 6671
michael@0 6672 // Different architectures may want typed element reads which require
michael@0 6673 // hole checks to be done as either value or typed reads.
michael@0 6674 if (needsHoleCheck && !LIRGenerator::allowTypedElementHoleCheck())
michael@0 6675 knownType = MIRType_Value;
michael@0 6676
michael@0 6677 return knownType;
michael@0 6678 }
michael@0 6679
michael@0 6680 bool
michael@0 6681 IonBuilder::jsop_getelem()
michael@0 6682 {
michael@0 6683 MDefinition *index = current->pop();
michael@0 6684 MDefinition *obj = current->pop();
michael@0 6685
michael@0 6686 // Always use a call if we are performing analysis and not actually
michael@0 6687 // emitting code, to simplify later analysis.
michael@0 6688 if (info().executionModeIsAnalysis()) {
michael@0 6689 MInstruction *ins = MCallGetElement::New(alloc(), obj, index);
michael@0 6690
michael@0 6691 current->add(ins);
michael@0 6692 current->push(ins);
michael@0 6693
michael@0 6694 if (!resumeAfter(ins))
michael@0 6695 return false;
michael@0 6696
michael@0 6697 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 6698 return pushTypeBarrier(ins, types, true);
michael@0 6699 }
michael@0 6700
michael@0 6701 bool emitted = false;
michael@0 6702
michael@0 6703 if (!getElemTryTypedObject(&emitted, obj, index) || emitted)
michael@0 6704 return emitted;
michael@0 6705
michael@0 6706 if (!getElemTryDense(&emitted, obj, index) || emitted)
michael@0 6707 return emitted;
michael@0 6708
michael@0 6709 if (!getElemTryTypedStatic(&emitted, obj, index) || emitted)
michael@0 6710 return emitted;
michael@0 6711
michael@0 6712 if (!getElemTryTypedArray(&emitted, obj, index) || emitted)
michael@0 6713 return emitted;
michael@0 6714
michael@0 6715 if (!getElemTryString(&emitted, obj, index) || emitted)
michael@0 6716 return emitted;
michael@0 6717
michael@0 6718 if (!getElemTryArguments(&emitted, obj, index) || emitted)
michael@0 6719 return emitted;
michael@0 6720
michael@0 6721 if (!getElemTryArgumentsInlined(&emitted, obj, index) || emitted)
michael@0 6722 return emitted;
michael@0 6723
michael@0 6724 if (script()->argumentsHasVarBinding() && obj->mightBeType(MIRType_MagicOptimizedArguments))
michael@0 6725 return abort("Type is not definitely lazy arguments.");
michael@0 6726
michael@0 6727 if (!getElemTryCache(&emitted, obj, index) || emitted)
michael@0 6728 return emitted;
michael@0 6729
michael@0 6730 // Emit call.
michael@0 6731 MInstruction *ins = MCallGetElement::New(alloc(), obj, index);
michael@0 6732
michael@0 6733 current->add(ins);
michael@0 6734 current->push(ins);
michael@0 6735
michael@0 6736 if (!resumeAfter(ins))
michael@0 6737 return false;
michael@0 6738
michael@0 6739 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 6740 return pushTypeBarrier(ins, types, true);
michael@0 6741 }
michael@0 6742
michael@0 6743 bool
michael@0 6744 IonBuilder::getElemTryTypedObject(bool *emitted, MDefinition *obj, MDefinition *index)
michael@0 6745 {
michael@0 6746 JS_ASSERT(*emitted == false);
michael@0 6747
michael@0 6748 TypeDescrSet objDescrs;
michael@0 6749 if (!lookupTypeDescrSet(obj, &objDescrs))
michael@0 6750 return false;
michael@0 6751
michael@0 6752 if (!objDescrs.allOfArrayKind())
michael@0 6753 return true;
michael@0 6754
michael@0 6755 TypeDescrSet elemDescrs;
michael@0 6756 if (!objDescrs.arrayElementType(*this, &elemDescrs))
michael@0 6757 return false;
michael@0 6758 if (elemDescrs.empty())
michael@0 6759 return true;
michael@0 6760
michael@0 6761 JS_ASSERT(TypeDescr::isSized(elemDescrs.kind()));
michael@0 6762
michael@0 6763 int32_t elemSize;
michael@0 6764 if (!elemDescrs.allHaveSameSize(&elemSize))
michael@0 6765 return true;
michael@0 6766
michael@0 6767 switch (elemDescrs.kind()) {
michael@0 6768 case TypeDescr::X4:
michael@0 6769 // FIXME (bug 894105): load into a MIRType_float32x4 etc
michael@0 6770 return true;
michael@0 6771
michael@0 6772 case TypeDescr::Struct:
michael@0 6773 case TypeDescr::SizedArray:
michael@0 6774 return getElemTryComplexElemOfTypedObject(emitted,
michael@0 6775 obj,
michael@0 6776 index,
michael@0 6777 objDescrs,
michael@0 6778 elemDescrs,
michael@0 6779 elemSize);
michael@0 6780 case TypeDescr::Scalar:
michael@0 6781 return getElemTryScalarElemOfTypedObject(emitted,
michael@0 6782 obj,
michael@0 6783 index,
michael@0 6784 objDescrs,
michael@0 6785 elemDescrs,
michael@0 6786 elemSize);
michael@0 6787
michael@0 6788 case TypeDescr::Reference:
michael@0 6789 return true;
michael@0 6790
michael@0 6791 case TypeDescr::UnsizedArray:
michael@0 6792 MOZ_ASSUME_UNREACHABLE("Unsized arrays cannot be element types");
michael@0 6793 }
michael@0 6794
michael@0 6795 MOZ_ASSUME_UNREACHABLE("Bad kind");
michael@0 6796 }
michael@0 6797
michael@0 6798 static MIRType
michael@0 6799 MIRTypeForTypedArrayRead(ScalarTypeDescr::Type arrayType,
michael@0 6800 bool observedDouble);
michael@0 6801
michael@0 6802 bool
michael@0 6803 IonBuilder::checkTypedObjectIndexInBounds(int32_t elemSize,
michael@0 6804 MDefinition *obj,
michael@0 6805 MDefinition *index,
michael@0 6806 TypeDescrSet objDescrs,
michael@0 6807 MDefinition **indexAsByteOffset,
michael@0 6808 bool *canBeNeutered)
michael@0 6809 {
michael@0 6810 // Ensure index is an integer.
michael@0 6811 MInstruction *idInt32 = MToInt32::New(alloc(), index);
michael@0 6812 current->add(idInt32);
michael@0 6813
michael@0 6814 // If we know the length statically from the type, just embed it.
michael@0 6815 // Otherwise, load it from the appropriate reserved slot on the
michael@0 6816 // typed object. We know it's an int32, so we can convert from
michael@0 6817 // Value to int32 using truncation.
michael@0 6818 int32_t lenOfAll;
michael@0 6819 MDefinition *length;
michael@0 6820 if (objDescrs.hasKnownArrayLength(&lenOfAll)) {
michael@0 6821 length = constantInt(lenOfAll);
michael@0 6822
michael@0 6823 // If we are not loading the length from the object itself,
michael@0 6824 // then we still need to check if the object was neutered.
michael@0 6825 *canBeNeutered = true;
michael@0 6826 } else {
michael@0 6827 MInstruction *lengthValue = MLoadFixedSlot::New(alloc(), obj, JS_TYPEDOBJ_SLOT_LENGTH);
michael@0 6828 current->add(lengthValue);
michael@0 6829
michael@0 6830 MInstruction *length32 = MTruncateToInt32::New(alloc(), lengthValue);
michael@0 6831 current->add(length32);
michael@0 6832
michael@0 6833 length = length32;
michael@0 6834
michael@0 6835 // If we are loading the length from the object itself,
michael@0 6836 // then we do not need an extra neuter check, because the length
michael@0 6837 // will have been set to 0 when the object was neutered.
michael@0 6838 *canBeNeutered = false;
michael@0 6839 }
michael@0 6840
michael@0 6841 index = addBoundsCheck(idInt32, length);
michael@0 6842
michael@0 6843 // Since we passed the bounds check, it is impossible for the
michael@0 6844 // result of multiplication to overflow; so enable imul path.
michael@0 6845 MMul *mul = MMul::New(alloc(), index, constantInt(elemSize),
michael@0 6846 MIRType_Int32, MMul::Integer);
michael@0 6847 current->add(mul);
michael@0 6848
michael@0 6849 *indexAsByteOffset = mul;
michael@0 6850 return true;
michael@0 6851 }
michael@0 6852
michael@0 6853 bool
michael@0 6854 IonBuilder::getElemTryScalarElemOfTypedObject(bool *emitted,
michael@0 6855 MDefinition *obj,
michael@0 6856 MDefinition *index,
michael@0 6857 TypeDescrSet objDescrs,
michael@0 6858 TypeDescrSet elemDescrs,
michael@0 6859 int32_t elemSize)
michael@0 6860 {
michael@0 6861 JS_ASSERT(objDescrs.allOfArrayKind());
michael@0 6862
michael@0 6863 // Must always be loading the same scalar type
michael@0 6864 ScalarTypeDescr::Type elemType;
michael@0 6865 if (!elemDescrs.scalarType(&elemType))
michael@0 6866 return true;
michael@0 6867 JS_ASSERT(elemSize == ScalarTypeDescr::alignment(elemType));
michael@0 6868
michael@0 6869 bool canBeNeutered;
michael@0 6870 MDefinition *indexAsByteOffset;
michael@0 6871 if (!checkTypedObjectIndexInBounds(elemSize, obj, index, objDescrs,
michael@0 6872 &indexAsByteOffset, &canBeNeutered))
michael@0 6873 {
michael@0 6874 return false;
michael@0 6875 }
michael@0 6876
michael@0 6877 return pushScalarLoadFromTypedObject(emitted, obj, indexAsByteOffset, elemType, canBeNeutered);
michael@0 6878 }
michael@0 6879
michael@0 6880 bool
michael@0 6881 IonBuilder::pushScalarLoadFromTypedObject(bool *emitted,
michael@0 6882 MDefinition *obj,
michael@0 6883 MDefinition *offset,
michael@0 6884 ScalarTypeDescr::Type elemType,
michael@0 6885 bool canBeNeutered)
michael@0 6886 {
michael@0 6887 int32_t size = ScalarTypeDescr::size(elemType);
michael@0 6888 JS_ASSERT(size == ScalarTypeDescr::alignment(elemType));
michael@0 6889
michael@0 6890 // Find location within the owner object.
michael@0 6891 MDefinition *elements, *scaledOffset;
michael@0 6892 loadTypedObjectElements(obj, offset, size, canBeNeutered,
michael@0 6893 &elements, &scaledOffset);
michael@0 6894
michael@0 6895 // Load the element.
michael@0 6896 MLoadTypedArrayElement *load = MLoadTypedArrayElement::New(alloc(), elements, scaledOffset, elemType);
michael@0 6897 current->add(load);
michael@0 6898 current->push(load);
michael@0 6899
michael@0 6900 // If we are reading in-bounds elements, we can use knowledge about
michael@0 6901 // the array type to determine the result type, even if the opcode has
michael@0 6902 // never executed. The known pushed type is only used to distinguish
michael@0 6903 // uint32 reads that may produce either doubles or integers.
michael@0 6904 types::TemporaryTypeSet *resultTypes = bytecodeTypes(pc);
michael@0 6905 bool allowDouble = resultTypes->hasType(types::Type::DoubleType());
michael@0 6906
michael@0 6907 // Note: knownType is not necessarily in resultTypes; e.g. if we
michael@0 6908 // have only observed integers coming out of float array.
michael@0 6909 MIRType knownType = MIRTypeForTypedArrayRead(elemType, allowDouble);
michael@0 6910
michael@0 6911 // Note: we can ignore the type barrier here, we know the type must
michael@0 6912 // be valid and unbarriered. Also, need not set resultTypeSet,
michael@0 6913 // because knownType is scalar and a resultTypeSet would provide
michael@0 6914 // no useful additional info.
michael@0 6915 load->setResultType(knownType);
michael@0 6916
michael@0 6917 *emitted = true;
michael@0 6918 return true;
michael@0 6919 }
michael@0 6920
michael@0 6921 bool
michael@0 6922 IonBuilder::getElemTryComplexElemOfTypedObject(bool *emitted,
michael@0 6923 MDefinition *obj,
michael@0 6924 MDefinition *index,
michael@0 6925 TypeDescrSet objDescrs,
michael@0 6926 TypeDescrSet elemDescrs,
michael@0 6927 int32_t elemSize)
michael@0 6928 {
michael@0 6929 JS_ASSERT(objDescrs.allOfArrayKind());
michael@0 6930
michael@0 6931 MDefinition *type = loadTypedObjectType(obj);
michael@0 6932 MDefinition *elemTypeObj = typeObjectForElementFromArrayStructType(type);
michael@0 6933
michael@0 6934 bool canBeNeutered;
michael@0 6935 MDefinition *indexAsByteOffset;
michael@0 6936 if (!checkTypedObjectIndexInBounds(elemSize, obj, index, objDescrs,
michael@0 6937 &indexAsByteOffset, &canBeNeutered))
michael@0 6938 {
michael@0 6939 return false;
michael@0 6940 }
michael@0 6941
michael@0 6942 return pushDerivedTypedObject(emitted, obj, indexAsByteOffset,
michael@0 6943 elemDescrs, elemTypeObj, canBeNeutered);
michael@0 6944 }
michael@0 6945
michael@0 6946 bool
michael@0 6947 IonBuilder::pushDerivedTypedObject(bool *emitted,
michael@0 6948 MDefinition *obj,
michael@0 6949 MDefinition *offset,
michael@0 6950 TypeDescrSet derivedTypeDescrs,
michael@0 6951 MDefinition *derivedTypeObj,
michael@0 6952 bool canBeNeutered)
michael@0 6953 {
michael@0 6954 // Find location within the owner object.
michael@0 6955 MDefinition *owner, *ownerOffset;
michael@0 6956 loadTypedObjectData(obj, offset, canBeNeutered, &owner, &ownerOffset);
michael@0 6957
michael@0 6958 // Create the derived typed object.
michael@0 6959 MInstruction *derivedTypedObj = MNewDerivedTypedObject::New(alloc(),
michael@0 6960 derivedTypeDescrs,
michael@0 6961 derivedTypeObj,
michael@0 6962 owner,
michael@0 6963 ownerOffset);
michael@0 6964 current->add(derivedTypedObj);
michael@0 6965 current->push(derivedTypedObj);
michael@0 6966
michael@0 6967 // Determine (if possible) the class/proto that `derivedTypedObj`
michael@0 6968 // will have. For derived typed objects, the class (transparent vs
michael@0 6969 // opaque) will be the same as the incoming object from which the
michael@0 6970 // derived typed object is, well, derived. The prototype will be
michael@0 6971 // determined based on the type descriptor (and is immutable).
michael@0 6972 types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
michael@0 6973 const Class *expectedClass = objTypes ? objTypes->getKnownClass() : nullptr;
michael@0 6974 JSObject *expectedProto = derivedTypeDescrs.knownPrototype();
michael@0 6975 JS_ASSERT_IF(expectedClass, IsTypedObjectClass(expectedClass));
michael@0 6976
michael@0 6977 // Determine (if possible) the class/proto that the observed type set
michael@0 6978 // describes.
michael@0 6979 types::TemporaryTypeSet *observedTypes = bytecodeTypes(pc);
michael@0 6980 const Class *observedClass = observedTypes->getKnownClass();
michael@0 6981 JSObject *observedProto = observedTypes->getCommonPrototype();
michael@0 6982
michael@0 6983 // If expectedClass/expectedProto are both non-null (and hence
michael@0 6984 // known), we can predict precisely what TI type object
michael@0 6985 // derivedTypedObj will have. Therefore, if we observe that this
michael@0 6986 // TI type object is already contained in the set of
michael@0 6987 // observedTypes, we can skip the barrier.
michael@0 6988 //
michael@0 6989 // Barriers still wind up being needed in some relatively
michael@0 6990 // rare cases:
michael@0 6991 //
michael@0 6992 // - if multiple kinds of typed objects flow into this point,
michael@0 6993 // in which case we will not be able to predict expectedClass
michael@0 6994 // nor expectedProto.
michael@0 6995 //
michael@0 6996 // - if the code has never executed, in which case the set of
michael@0 6997 // observed types will be incomplete.
michael@0 6998 //
michael@0 6999 // Barriers are particularly expensive here because they prevent
michael@0 7000 // us from optimizing the MNewDerivedTypedObject away.
michael@0 7001 if (observedClass && observedProto && observedClass == expectedClass &&
michael@0 7002 observedProto == expectedProto)
michael@0 7003 {
michael@0 7004 derivedTypedObj->setResultTypeSet(observedTypes);
michael@0 7005 } else {
michael@0 7006 if (!pushTypeBarrier(derivedTypedObj, observedTypes, true))
michael@0 7007 return false;
michael@0 7008 }
michael@0 7009
michael@0 7010 *emitted = true;
michael@0 7011 return true;
michael@0 7012 }
michael@0 7013
michael@0 7014 bool
michael@0 7015 IonBuilder::getElemTryDense(bool *emitted, MDefinition *obj, MDefinition *index)
michael@0 7016 {
michael@0 7017 JS_ASSERT(*emitted == false);
michael@0 7018
michael@0 7019 if (!ElementAccessIsDenseNative(obj, index))
michael@0 7020 return true;
michael@0 7021
michael@0 7022 // Don't generate a fast path if there have been bounds check failures
michael@0 7023 // and this access might be on a sparse property.
michael@0 7024 if (ElementAccessHasExtraIndexedProperty(constraints(), obj) && failedBoundsCheck_)
michael@0 7025 return true;
michael@0 7026
michael@0 7027 // Don't generate a fast path if this pc has seen negative indexes accessed,
michael@0 7028 // which will not appear to be extra indexed properties.
michael@0 7029 if (inspector->hasSeenNegativeIndexGetElement(pc))
michael@0 7030 return true;
michael@0 7031
michael@0 7032 // Emit dense getelem variant.
michael@0 7033 if (!jsop_getelem_dense(obj, index))
michael@0 7034 return false;
michael@0 7035
michael@0 7036 *emitted = true;
michael@0 7037 return true;
michael@0 7038 }
michael@0 7039
michael@0 7040 bool
michael@0 7041 IonBuilder::getElemTryTypedStatic(bool *emitted, MDefinition *obj, MDefinition *index)
michael@0 7042 {
michael@0 7043 JS_ASSERT(*emitted == false);
michael@0 7044
michael@0 7045 ScalarTypeDescr::Type arrayType;
michael@0 7046 if (!ElementAccessIsTypedArray(obj, index, &arrayType))
michael@0 7047 return true;
michael@0 7048
michael@0 7049 if (!LIRGenerator::allowStaticTypedArrayAccesses())
michael@0 7050 return true;
michael@0 7051
michael@0 7052 if (ElementAccessHasExtraIndexedProperty(constraints(), obj))
michael@0 7053 return true;
michael@0 7054
michael@0 7055 if (!obj->resultTypeSet())
michael@0 7056 return true;
michael@0 7057
michael@0 7058 JSObject *tarrObj = obj->resultTypeSet()->getSingleton();
michael@0 7059 if (!tarrObj)
michael@0 7060 return true;
michael@0 7061
michael@0 7062 TypedArrayObject *tarr = &tarrObj->as<TypedArrayObject>();
michael@0 7063
michael@0 7064 types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
michael@0 7065 if (tarrType->unknownProperties())
michael@0 7066 return true;
michael@0 7067
michael@0 7068 // LoadTypedArrayElementStatic currently treats uint32 arrays as int32.
michael@0 7069 ArrayBufferView::ViewType viewType = (ArrayBufferView::ViewType) tarr->type();
michael@0 7070 if (viewType == ArrayBufferView::TYPE_UINT32)
michael@0 7071 return true;
michael@0 7072
michael@0 7073 MDefinition *ptr = convertShiftToMaskForStaticTypedArray(index, viewType);
michael@0 7074 if (!ptr)
michael@0 7075 return true;
michael@0 7076
michael@0 7077 // Emit LoadTypedArrayElementStatic.
michael@0 7078 tarrType->watchStateChangeForTypedArrayData(constraints());
michael@0 7079
michael@0 7080 obj->setImplicitlyUsedUnchecked();
michael@0 7081 index->setImplicitlyUsedUnchecked();
michael@0 7082
michael@0 7083 MLoadTypedArrayElementStatic *load = MLoadTypedArrayElementStatic::New(alloc(), tarr, ptr);
michael@0 7084 current->add(load);
michael@0 7085 current->push(load);
michael@0 7086
michael@0 7087 // The load is infallible if an undefined result will be coerced to the
michael@0 7088 // appropriate numeric type if the read is out of bounds. The truncation
michael@0 7089 // analysis picks up some of these cases, but is incomplete with respect
michael@0 7090 // to others. For now, sniff the bytecode for simple patterns following
michael@0 7091 // the load which guarantee a truncation or numeric conversion.
michael@0 7092 if (viewType == ArrayBufferView::TYPE_FLOAT32 || viewType == ArrayBufferView::TYPE_FLOAT64) {
michael@0 7093 jsbytecode *next = pc + JSOP_GETELEM_LENGTH;
michael@0 7094 if (*next == JSOP_POS)
michael@0 7095 load->setInfallible();
michael@0 7096 } else {
michael@0 7097 jsbytecode *next = pc + JSOP_GETELEM_LENGTH;
michael@0 7098 if (*next == JSOP_ZERO && *(next + JSOP_ZERO_LENGTH) == JSOP_BITOR)
michael@0 7099 load->setInfallible();
michael@0 7100 }
michael@0 7101
michael@0 7102 *emitted = true;
michael@0 7103 return true;
michael@0 7104 }
michael@0 7105
michael@0 7106 bool
michael@0 7107 IonBuilder::getElemTryTypedArray(bool *emitted, MDefinition *obj, MDefinition *index)
michael@0 7108 {
michael@0 7109 JS_ASSERT(*emitted == false);
michael@0 7110
michael@0 7111 ScalarTypeDescr::Type arrayType;
michael@0 7112 if (!ElementAccessIsTypedArray(obj, index, &arrayType))
michael@0 7113 return true;
michael@0 7114
michael@0 7115 // Emit typed getelem variant.
michael@0 7116 if (!jsop_getelem_typed(obj, index, arrayType))
michael@0 7117 return false;
michael@0 7118
michael@0 7119 *emitted = true;
michael@0 7120 return true;
michael@0 7121 }
michael@0 7122
michael@0 7123 bool
michael@0 7124 IonBuilder::getElemTryString(bool *emitted, MDefinition *obj, MDefinition *index)
michael@0 7125 {
michael@0 7126 JS_ASSERT(*emitted == false);
michael@0 7127
michael@0 7128 if (obj->type() != MIRType_String || !IsNumberType(index->type()))
michael@0 7129 return true;
michael@0 7130
michael@0 7131 // If the index is expected to be out-of-bounds, don't optimize to avoid
michael@0 7132 // frequent bailouts.
michael@0 7133 if (bytecodeTypes(pc)->hasType(types::Type::UndefinedType()))
michael@0 7134 return true;
michael@0 7135
michael@0 7136 // Emit fast path for string[index].
michael@0 7137 MInstruction *idInt32 = MToInt32::New(alloc(), index);
michael@0 7138 current->add(idInt32);
michael@0 7139 index = idInt32;
michael@0 7140
michael@0 7141 MStringLength *length = MStringLength::New(alloc(), obj);
michael@0 7142 current->add(length);
michael@0 7143
michael@0 7144 index = addBoundsCheck(index, length);
michael@0 7145
michael@0 7146 MCharCodeAt *charCode = MCharCodeAt::New(alloc(), obj, index);
michael@0 7147 current->add(charCode);
michael@0 7148
michael@0 7149 MFromCharCode *result = MFromCharCode::New(alloc(), charCode);
michael@0 7150 current->add(result);
michael@0 7151 current->push(result);
michael@0 7152
michael@0 7153 *emitted = true;
michael@0 7154 return true;
michael@0 7155 }
michael@0 7156
michael@0 7157 bool
michael@0 7158 IonBuilder::getElemTryArguments(bool *emitted, MDefinition *obj, MDefinition *index)
michael@0 7159 {
michael@0 7160 JS_ASSERT(*emitted == false);
michael@0 7161
michael@0 7162 if (inliningDepth_ > 0)
michael@0 7163 return true;
michael@0 7164
michael@0 7165 if (obj->type() != MIRType_MagicOptimizedArguments)
michael@0 7166 return true;
michael@0 7167
michael@0 7168 // Emit GetFrameArgument.
michael@0 7169
michael@0 7170 JS_ASSERT(!info().argsObjAliasesFormals());
michael@0 7171
michael@0 7172 // Type Inference has guaranteed this is an optimized arguments object.
michael@0 7173 obj->setImplicitlyUsedUnchecked();
michael@0 7174
michael@0 7175 // To ensure that we are not looking above the number of actual arguments.
michael@0 7176 MArgumentsLength *length = MArgumentsLength::New(alloc());
michael@0 7177 current->add(length);
michael@0 7178
michael@0 7179 // Ensure index is an integer.
michael@0 7180 MInstruction *idInt32 = MToInt32::New(alloc(), index);
michael@0 7181 current->add(idInt32);
michael@0 7182 index = idInt32;
michael@0 7183
michael@0 7184 // Bailouts if we read more than the number of actual arguments.
michael@0 7185 index = addBoundsCheck(index, length);
michael@0 7186
michael@0 7187 // Load the argument from the actual arguments.
michael@0 7188 MGetFrameArgument *load = MGetFrameArgument::New(alloc(), index, analysis_.hasSetArg());
michael@0 7189 current->add(load);
michael@0 7190 current->push(load);
michael@0 7191
michael@0 7192 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 7193 if (!pushTypeBarrier(load, types, true))
michael@0 7194 return false;
michael@0 7195
michael@0 7196 *emitted = true;
michael@0 7197 return true;
michael@0 7198 }
michael@0 7199
michael@0 7200 bool
michael@0 7201 IonBuilder::getElemTryArgumentsInlined(bool *emitted, MDefinition *obj, MDefinition *index)
michael@0 7202 {
michael@0 7203 JS_ASSERT(*emitted == false);
michael@0 7204
michael@0 7205 if (inliningDepth_ == 0)
michael@0 7206 return true;
michael@0 7207
michael@0 7208 if (obj->type() != MIRType_MagicOptimizedArguments)
michael@0 7209 return true;
michael@0 7210
michael@0 7211 // Emit inlined arguments.
michael@0 7212 obj->setImplicitlyUsedUnchecked();
michael@0 7213
michael@0 7214 JS_ASSERT(!info().argsObjAliasesFormals());
michael@0 7215
michael@0 7216 // When the id is constant, we can just return the corresponding inlined argument
michael@0 7217 if (index->isConstant() && index->toConstant()->value().isInt32()) {
michael@0 7218 JS_ASSERT(inliningDepth_ > 0);
michael@0 7219
michael@0 7220 int32_t id = index->toConstant()->value().toInt32();
michael@0 7221 index->setImplicitlyUsedUnchecked();
michael@0 7222
michael@0 7223 if (id < (int32_t)inlineCallInfo_->argc() && id >= 0)
michael@0 7224 current->push(inlineCallInfo_->getArg(id));
michael@0 7225 else
michael@0 7226 pushConstant(UndefinedValue());
michael@0 7227
michael@0 7228 *emitted = true;
michael@0 7229 return true;
michael@0 7230 }
michael@0 7231
michael@0 7232 // inlined not constant not supported, yet.
michael@0 7233 return abort("NYI inlined not constant get argument element");
michael@0 7234 }
michael@0 7235
michael@0 7236 bool
michael@0 7237 IonBuilder::getElemTryCache(bool *emitted, MDefinition *obj, MDefinition *index)
michael@0 7238 {
michael@0 7239 JS_ASSERT(*emitted == false);
michael@0 7240
michael@0 7241 // Make sure we have at least an object.
michael@0 7242 if (!obj->mightBeType(MIRType_Object))
michael@0 7243 return true;
michael@0 7244
michael@0 7245 // Don't cache for strings.
michael@0 7246 if (obj->mightBeType(MIRType_String))
michael@0 7247 return true;
michael@0 7248
michael@0 7249 // Index should be integer or string
michael@0 7250 if (!index->mightBeType(MIRType_Int32) && !index->mightBeType(MIRType_String))
michael@0 7251 return true;
michael@0 7252
michael@0 7253 // Turn off cacheing if the element is int32 and we've seen non-native objects as the target
michael@0 7254 // of this getelem.
michael@0 7255 bool nonNativeGetElement = inspector->hasSeenNonNativeGetElement(pc);
michael@0 7256 if (index->mightBeType(MIRType_Int32) && nonNativeGetElement)
michael@0 7257 return true;
michael@0 7258
michael@0 7259 // Emit GetElementCache.
michael@0 7260
michael@0 7261 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 7262 bool barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(), obj, nullptr, types);
michael@0 7263
michael@0 7264 // Always add a barrier if the index might be a string, so that the cache
michael@0 7265 // can attach stubs for particular properties.
michael@0 7266 if (index->mightBeType(MIRType_String))
michael@0 7267 barrier = true;
michael@0 7268
michael@0 7269 // See note about always needing a barrier in jsop_getprop.
michael@0 7270 if (needsToMonitorMissingProperties(types))
michael@0 7271 barrier = true;
michael@0 7272
michael@0 7273 MInstruction *ins = MGetElementCache::New(alloc(), obj, index, barrier);
michael@0 7274
michael@0 7275 current->add(ins);
michael@0 7276 current->push(ins);
michael@0 7277
michael@0 7278 if (!resumeAfter(ins))
michael@0 7279 return false;
michael@0 7280
michael@0 7281 // Spice up type information.
michael@0 7282 if (index->type() == MIRType_Int32 && !barrier) {
michael@0 7283 bool needHoleCheck = !ElementAccessIsPacked(constraints(), obj);
michael@0 7284 MIRType knownType = GetElemKnownType(needHoleCheck, types);
michael@0 7285
michael@0 7286 if (knownType != MIRType_Value && knownType != MIRType_Double)
michael@0 7287 ins->setResultType(knownType);
michael@0 7288 }
michael@0 7289
michael@0 7290 if (!pushTypeBarrier(ins, types, barrier))
michael@0 7291 return false;
michael@0 7292
michael@0 7293 *emitted = true;
michael@0 7294 return true;
michael@0 7295 }
michael@0 7296
michael@0 7297 bool
michael@0 7298 IonBuilder::jsop_getelem_dense(MDefinition *obj, MDefinition *index)
michael@0 7299 {
michael@0 7300 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 7301
michael@0 7302 if (JSOp(*pc) == JSOP_CALLELEM && !index->mightBeType(MIRType_String)) {
michael@0 7303 // Indexed call on an element of an array. Populate the observed types
michael@0 7304 // with any objects that could be in the array, to avoid extraneous
michael@0 7305 // type barriers.
michael@0 7306 AddObjectsForPropertyRead(obj, nullptr, types);
michael@0 7307 }
michael@0 7308
michael@0 7309 bool barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(), obj, nullptr, types);
michael@0 7310 bool needsHoleCheck = !ElementAccessIsPacked(constraints(), obj);
michael@0 7311
michael@0 7312 // Reads which are on holes in the object do not have to bail out if
michael@0 7313 // undefined values have been observed at this access site and the access
michael@0 7314 // cannot hit another indexed property on the object or its prototypes.
michael@0 7315 bool readOutOfBounds =
michael@0 7316 types->hasType(types::Type::UndefinedType()) &&
michael@0 7317 !ElementAccessHasExtraIndexedProperty(constraints(), obj);
michael@0 7318
michael@0 7319 MIRType knownType = MIRType_Value;
michael@0 7320 if (!barrier)
michael@0 7321 knownType = GetElemKnownType(needsHoleCheck, types);
michael@0 7322
michael@0 7323 // Ensure index is an integer.
michael@0 7324 MInstruction *idInt32 = MToInt32::New(alloc(), index);
michael@0 7325 current->add(idInt32);
michael@0 7326 index = idInt32;
michael@0 7327
michael@0 7328 // Get the elements vector.
michael@0 7329 MInstruction *elements = MElements::New(alloc(), obj);
michael@0 7330 current->add(elements);
michael@0 7331
michael@0 7332 // Note: to help GVN, use the original MElements instruction and not
michael@0 7333 // MConvertElementsToDoubles as operand. This is fine because converting
michael@0 7334 // elements to double does not change the initialized length.
michael@0 7335 MInitializedLength *initLength = MInitializedLength::New(alloc(), elements);
michael@0 7336 current->add(initLength);
michael@0 7337
michael@0 7338 // If we can load the element as a definite double, make sure to check that
michael@0 7339 // the array has been converted to homogenous doubles first.
michael@0 7340 //
michael@0 7341 // NB: We disable this optimization in parallel execution mode
michael@0 7342 // because it is inherently not threadsafe (how do you convert the
michael@0 7343 // array atomically when there might be concurrent readers)?
michael@0 7344 types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
michael@0 7345 ExecutionMode executionMode = info().executionMode();
michael@0 7346 bool loadDouble =
michael@0 7347 executionMode == SequentialExecution &&
michael@0 7348 !barrier &&
michael@0 7349 loopDepth_ &&
michael@0 7350 !readOutOfBounds &&
michael@0 7351 !needsHoleCheck &&
michael@0 7352 knownType == MIRType_Double &&
michael@0 7353 objTypes &&
michael@0 7354 objTypes->convertDoubleElements(constraints()) == types::TemporaryTypeSet::AlwaysConvertToDoubles;
michael@0 7355 if (loadDouble)
michael@0 7356 elements = addConvertElementsToDoubles(elements);
michael@0 7357
michael@0 7358 MInstruction *load;
michael@0 7359
michael@0 7360 if (!readOutOfBounds) {
michael@0 7361 // This load should not return undefined, so likely we're reading
michael@0 7362 // in-bounds elements, and the array is packed or its holes are not
michael@0 7363 // read. This is the best case: we can separate the bounds check for
michael@0 7364 // hoisting.
michael@0 7365 index = addBoundsCheck(index, initLength);
michael@0 7366
michael@0 7367 load = MLoadElement::New(alloc(), elements, index, needsHoleCheck, loadDouble);
michael@0 7368 current->add(load);
michael@0 7369 } else {
michael@0 7370 // This load may return undefined, so assume that we *can* read holes,
michael@0 7371 // or that we can read out-of-bounds accesses. In this case, the bounds
michael@0 7372 // check is part of the opcode.
michael@0 7373 load = MLoadElementHole::New(alloc(), elements, index, initLength, needsHoleCheck);
michael@0 7374 current->add(load);
michael@0 7375
michael@0 7376 // If maybeUndefined was true, the typeset must have undefined, and
michael@0 7377 // then either additional types or a barrier. This means we should
michael@0 7378 // never have a typed version of LoadElementHole.
michael@0 7379 JS_ASSERT(knownType == MIRType_Value);
michael@0 7380 }
michael@0 7381
michael@0 7382 // If the array is being converted to doubles, but we've observed
michael@0 7383 // just int, substitute a type set of int+double into the observed
michael@0 7384 // type set. The reason for this is that, in the
michael@0 7385 // interpreter+baseline, such arrays may consist of mixed
michael@0 7386 // ints/doubles, but when we enter ion code, we will be coercing
michael@0 7387 // all inputs to doubles. Therefore, the type barrier checking for
michael@0 7388 // just int is highly likely (*almost* guaranteed) to fail sooner
michael@0 7389 // or later. Essentially, by eagerly coercing to double, ion is
michael@0 7390 // making the observed types outdated. To compensate for this, we
michael@0 7391 // substitute a broader observed type set consisting of both ints
michael@0 7392 // and doubles. There is perhaps a tradeoff here, so we limit this
michael@0 7393 // optimization to parallel code, where it is needed to prevent
michael@0 7394 // perpetual bailouts in some extreme cases. (Bug 977853)
michael@0 7395 //
michael@0 7396 // NB: we have not added a MConvertElementsToDoubles MIR, so we
michael@0 7397 // cannot *assume* the result is a double.
michael@0 7398 if (executionMode == ParallelExecution &&
michael@0 7399 barrier &&
michael@0 7400 types->getKnownMIRType() == MIRType_Int32 &&
michael@0 7401 objTypes &&
michael@0 7402 objTypes->convertDoubleElements(constraints()) == types::TemporaryTypeSet::AlwaysConvertToDoubles)
michael@0 7403 {
michael@0 7404 // Note: double implies int32 as well for typesets
michael@0 7405 types = alloc_->lifoAlloc()->new_<types::TemporaryTypeSet>(types::Type::DoubleType());
michael@0 7406 if (!types)
michael@0 7407 return false;
michael@0 7408
michael@0 7409 barrier = false; // Don't need a barrier anymore
michael@0 7410 }
michael@0 7411
michael@0 7412 if (knownType != MIRType_Value)
michael@0 7413 load->setResultType(knownType);
michael@0 7414
michael@0 7415 current->push(load);
michael@0 7416 return pushTypeBarrier(load, types, barrier);
michael@0 7417 }
michael@0 7418
michael@0 7419 void
michael@0 7420 IonBuilder::addTypedArrayLengthAndData(MDefinition *obj,
michael@0 7421 BoundsChecking checking,
michael@0 7422 MDefinition **index,
michael@0 7423 MInstruction **length, MInstruction **elements)
michael@0 7424 {
michael@0 7425 MOZ_ASSERT((index != nullptr) == (elements != nullptr));
michael@0 7426
michael@0 7427 if (obj->isConstant() && obj->toConstant()->value().isObject()) {
michael@0 7428 TypedArrayObject *tarr = &obj->toConstant()->value().toObject().as<TypedArrayObject>();
michael@0 7429 void *data = tarr->viewData();
michael@0 7430 // Bug 979449 - Optimistically embed the elements and use TI to
michael@0 7431 // invalidate if we move them.
michael@0 7432 if (!gc::IsInsideNursery(tarr->runtimeFromMainThread(), data)) {
michael@0 7433 // The 'data' pointer can change in rare circumstances
michael@0 7434 // (ArrayBufferObject::changeContents).
michael@0 7435 types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
michael@0 7436 if (!tarrType->unknownProperties()) {
michael@0 7437 tarrType->watchStateChangeForTypedArrayData(constraints());
michael@0 7438
michael@0 7439 obj->setImplicitlyUsedUnchecked();
michael@0 7440
michael@0 7441 int32_t len = SafeCast<int32_t>(tarr->length());
michael@0 7442 *length = MConstant::New(alloc(), Int32Value(len));
michael@0 7443 current->add(*length);
michael@0 7444
michael@0 7445 if (index) {
michael@0 7446 if (checking == DoBoundsCheck)
michael@0 7447 *index = addBoundsCheck(*index, *length);
michael@0 7448
michael@0 7449 *elements = MConstantElements::New(alloc(), data);
michael@0 7450 current->add(*elements);
michael@0 7451 }
michael@0 7452 return;
michael@0 7453 }
michael@0 7454 }
michael@0 7455 }
michael@0 7456
michael@0 7457 *length = MTypedArrayLength::New(alloc(), obj);
michael@0 7458 current->add(*length);
michael@0 7459
michael@0 7460 if (index) {
michael@0 7461 if (checking == DoBoundsCheck)
michael@0 7462 *index = addBoundsCheck(*index, *length);
michael@0 7463
michael@0 7464 *elements = MTypedArrayElements::New(alloc(), obj);
michael@0 7465 current->add(*elements);
michael@0 7466 }
michael@0 7467 }
michael@0 7468
michael@0 7469 MDefinition *
michael@0 7470 IonBuilder::convertShiftToMaskForStaticTypedArray(MDefinition *id,
michael@0 7471 ArrayBufferView::ViewType viewType)
michael@0 7472 {
michael@0 7473 // No shifting is necessary if the typed array has single byte elements.
michael@0 7474 if (TypedArrayShift(viewType) == 0)
michael@0 7475 return id;
michael@0 7476
michael@0 7477 // If the index is an already shifted constant, undo the shift to get the
michael@0 7478 // absolute offset being accessed.
michael@0 7479 if (id->isConstant() && id->toConstant()->value().isInt32()) {
michael@0 7480 int32_t index = id->toConstant()->value().toInt32();
michael@0 7481 MConstant *offset = MConstant::New(alloc(), Int32Value(index << TypedArrayShift(viewType)));
michael@0 7482 current->add(offset);
michael@0 7483 return offset;
michael@0 7484 }
michael@0 7485
michael@0 7486 if (!id->isRsh() || id->isEffectful())
michael@0 7487 return nullptr;
michael@0 7488 if (!id->getOperand(1)->isConstant())
michael@0 7489 return nullptr;
michael@0 7490 const Value &value = id->getOperand(1)->toConstant()->value();
michael@0 7491 if (!value.isInt32() || uint32_t(value.toInt32()) != TypedArrayShift(viewType))
michael@0 7492 return nullptr;
michael@0 7493
michael@0 7494 // Instead of shifting, mask off the low bits of the index so that
michael@0 7495 // a non-scaled access on the typed array can be performed.
michael@0 7496 MConstant *mask = MConstant::New(alloc(), Int32Value(~((1 << value.toInt32()) - 1)));
michael@0 7497 MBitAnd *ptr = MBitAnd::New(alloc(), id->getOperand(0), mask);
michael@0 7498
michael@0 7499 ptr->infer(nullptr, nullptr);
michael@0 7500 JS_ASSERT(!ptr->isEffectful());
michael@0 7501
michael@0 7502 current->add(mask);
michael@0 7503 current->add(ptr);
michael@0 7504
michael@0 7505 return ptr;
michael@0 7506 }
michael@0 7507
michael@0 7508 static MIRType
michael@0 7509 MIRTypeForTypedArrayRead(ScalarTypeDescr::Type arrayType,
michael@0 7510 bool observedDouble)
michael@0 7511 {
michael@0 7512 switch (arrayType) {
michael@0 7513 case ScalarTypeDescr::TYPE_INT8:
michael@0 7514 case ScalarTypeDescr::TYPE_UINT8:
michael@0 7515 case ScalarTypeDescr::TYPE_UINT8_CLAMPED:
michael@0 7516 case ScalarTypeDescr::TYPE_INT16:
michael@0 7517 case ScalarTypeDescr::TYPE_UINT16:
michael@0 7518 case ScalarTypeDescr::TYPE_INT32:
michael@0 7519 return MIRType_Int32;
michael@0 7520 case ScalarTypeDescr::TYPE_UINT32:
michael@0 7521 return observedDouble ? MIRType_Double : MIRType_Int32;
michael@0 7522 case ScalarTypeDescr::TYPE_FLOAT32:
michael@0 7523 return (LIRGenerator::allowFloat32Optimizations()) ? MIRType_Float32 : MIRType_Double;
michael@0 7524 case ScalarTypeDescr::TYPE_FLOAT64:
michael@0 7525 return MIRType_Double;
michael@0 7526 }
michael@0 7527 MOZ_ASSUME_UNREACHABLE("Unknown typed array type");
michael@0 7528 }
michael@0 7529
michael@0 7530 bool
michael@0 7531 IonBuilder::jsop_getelem_typed(MDefinition *obj, MDefinition *index,
michael@0 7532 ScalarTypeDescr::Type arrayType)
michael@0 7533 {
michael@0 7534 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 7535
michael@0 7536 bool maybeUndefined = types->hasType(types::Type::UndefinedType());
michael@0 7537
michael@0 7538 // Reading from an Uint32Array will result in a double for values
michael@0 7539 // that don't fit in an int32. We have to bailout if this happens
michael@0 7540 // and the instruction is not known to return a double.
michael@0 7541 bool allowDouble = types->hasType(types::Type::DoubleType());
michael@0 7542
michael@0 7543 // Ensure id is an integer.
michael@0 7544 MInstruction *idInt32 = MToInt32::New(alloc(), index);
michael@0 7545 current->add(idInt32);
michael@0 7546 index = idInt32;
michael@0 7547
michael@0 7548 if (!maybeUndefined) {
michael@0 7549 // Assume the index is in range, so that we can hoist the length,
michael@0 7550 // elements vector and bounds check.
michael@0 7551
michael@0 7552 // If we are reading in-bounds elements, we can use knowledge about
michael@0 7553 // the array type to determine the result type, even if the opcode has
michael@0 7554 // never executed. The known pushed type is only used to distinguish
michael@0 7555 // uint32 reads that may produce either doubles or integers.
michael@0 7556 MIRType knownType = MIRTypeForTypedArrayRead(arrayType, allowDouble);
michael@0 7557
michael@0 7558 // Get length, bounds-check, then get elements, and add all instructions.
michael@0 7559 MInstruction *length;
michael@0 7560 MInstruction *elements;
michael@0 7561 addTypedArrayLengthAndData(obj, DoBoundsCheck, &index, &length, &elements);
michael@0 7562
michael@0 7563 // Load the element.
michael@0 7564 MLoadTypedArrayElement *load = MLoadTypedArrayElement::New(alloc(), elements, index, arrayType);
michael@0 7565 current->add(load);
michael@0 7566 current->push(load);
michael@0 7567
michael@0 7568 // Note: we can ignore the type barrier here, we know the type must
michael@0 7569 // be valid and unbarriered.
michael@0 7570 load->setResultType(knownType);
michael@0 7571 return true;
michael@0 7572 } else {
michael@0 7573 // We need a type barrier if the array's element type has never been
michael@0 7574 // observed (we've only read out-of-bounds values). Note that for
michael@0 7575 // Uint32Array, we only check for int32: if allowDouble is false we
michael@0 7576 // will bailout when we read a double.
michael@0 7577 bool needsBarrier = true;
michael@0 7578 switch (arrayType) {
michael@0 7579 case ScalarTypeDescr::TYPE_INT8:
michael@0 7580 case ScalarTypeDescr::TYPE_UINT8:
michael@0 7581 case ScalarTypeDescr::TYPE_UINT8_CLAMPED:
michael@0 7582 case ScalarTypeDescr::TYPE_INT16:
michael@0 7583 case ScalarTypeDescr::TYPE_UINT16:
michael@0 7584 case ScalarTypeDescr::TYPE_INT32:
michael@0 7585 case ScalarTypeDescr::TYPE_UINT32:
michael@0 7586 if (types->hasType(types::Type::Int32Type()))
michael@0 7587 needsBarrier = false;
michael@0 7588 break;
michael@0 7589 case ScalarTypeDescr::TYPE_FLOAT32:
michael@0 7590 case ScalarTypeDescr::TYPE_FLOAT64:
michael@0 7591 if (allowDouble)
michael@0 7592 needsBarrier = false;
michael@0 7593 break;
michael@0 7594 default:
michael@0 7595 MOZ_ASSUME_UNREACHABLE("Unknown typed array type");
michael@0 7596 }
michael@0 7597
michael@0 7598 // Assume we will read out-of-bound values. In this case the
michael@0 7599 // bounds check will be part of the instruction, and the instruction
michael@0 7600 // will always return a Value.
michael@0 7601 MLoadTypedArrayElementHole *load =
michael@0 7602 MLoadTypedArrayElementHole::New(alloc(), obj, index, arrayType, allowDouble);
michael@0 7603 current->add(load);
michael@0 7604 current->push(load);
michael@0 7605
michael@0 7606 return pushTypeBarrier(load, types, needsBarrier);
michael@0 7607 }
michael@0 7608 }
michael@0 7609
michael@0 7610 bool
michael@0 7611 IonBuilder::jsop_setelem()
michael@0 7612 {
michael@0 7613 bool emitted = false;
michael@0 7614
michael@0 7615 MDefinition *value = current->pop();
michael@0 7616 MDefinition *index = current->pop();
michael@0 7617 MDefinition *object = current->pop();
michael@0 7618
michael@0 7619 if (!setElemTryTypedObject(&emitted, object, index, value) || emitted)
michael@0 7620 return emitted;
michael@0 7621
michael@0 7622 if (!setElemTryTypedStatic(&emitted, object, index, value) || emitted)
michael@0 7623 return emitted;
michael@0 7624
michael@0 7625 if (!setElemTryTypedArray(&emitted, object, index, value) || emitted)
michael@0 7626 return emitted;
michael@0 7627
michael@0 7628 if (!setElemTryDense(&emitted, object, index, value) || emitted)
michael@0 7629 return emitted;
michael@0 7630
michael@0 7631 if (!setElemTryArguments(&emitted, object, index, value) || emitted)
michael@0 7632 return emitted;
michael@0 7633
michael@0 7634 if (script()->argumentsHasVarBinding() && object->mightBeType(MIRType_MagicOptimizedArguments))
michael@0 7635 return abort("Type is not definitely lazy arguments.");
michael@0 7636
michael@0 7637 if (!setElemTryCache(&emitted, object, index, value) || emitted)
michael@0 7638 return emitted;
michael@0 7639
michael@0 7640 // Emit call.
michael@0 7641 MInstruction *ins = MCallSetElement::New(alloc(), object, index, value);
michael@0 7642 current->add(ins);
michael@0 7643 current->push(value);
michael@0 7644
michael@0 7645 return resumeAfter(ins);
michael@0 7646 }
michael@0 7647
michael@0 7648 bool
michael@0 7649 IonBuilder::setElemTryTypedObject(bool *emitted, MDefinition *obj,
michael@0 7650 MDefinition *index, MDefinition *value)
michael@0 7651 {
michael@0 7652 JS_ASSERT(*emitted == false);
michael@0 7653
michael@0 7654 TypeDescrSet objTypeDescrs;
michael@0 7655 if (!lookupTypeDescrSet(obj, &objTypeDescrs))
michael@0 7656 return false;
michael@0 7657
michael@0 7658 if (!objTypeDescrs.allOfArrayKind())
michael@0 7659 return true;
michael@0 7660
michael@0 7661 TypeDescrSet elemTypeDescrs;
michael@0 7662 if (!objTypeDescrs.arrayElementType(*this, &elemTypeDescrs))
michael@0 7663 return false;
michael@0 7664 if (elemTypeDescrs.empty())
michael@0 7665 return true;
michael@0 7666
michael@0 7667 JS_ASSERT(TypeDescr::isSized(elemTypeDescrs.kind()));
michael@0 7668
michael@0 7669 int32_t elemSize;
michael@0 7670 if (!elemTypeDescrs.allHaveSameSize(&elemSize))
michael@0 7671 return true;
michael@0 7672
michael@0 7673 switch (elemTypeDescrs.kind()) {
michael@0 7674 case TypeDescr::X4:
michael@0 7675 // FIXME (bug 894105): store a MIRType_float32x4 etc
michael@0 7676 return true;
michael@0 7677
michael@0 7678 case TypeDescr::Reference:
michael@0 7679 case TypeDescr::Struct:
michael@0 7680 case TypeDescr::SizedArray:
michael@0 7681 case TypeDescr::UnsizedArray:
michael@0 7682 // For now, only optimize storing scalars.
michael@0 7683 return true;
michael@0 7684
michael@0 7685 case TypeDescr::Scalar:
michael@0 7686 return setElemTryScalarElemOfTypedObject(emitted,
michael@0 7687 obj,
michael@0 7688 index,
michael@0 7689 objTypeDescrs,
michael@0 7690 value,
michael@0 7691 elemTypeDescrs,
michael@0 7692 elemSize);
michael@0 7693 }
michael@0 7694
michael@0 7695 MOZ_ASSUME_UNREACHABLE("Bad kind");
michael@0 7696 }
michael@0 7697
michael@0 7698 bool
michael@0 7699 IonBuilder::setElemTryScalarElemOfTypedObject(bool *emitted,
michael@0 7700 MDefinition *obj,
michael@0 7701 MDefinition *index,
michael@0 7702 TypeDescrSet objTypeDescrs,
michael@0 7703 MDefinition *value,
michael@0 7704 TypeDescrSet elemTypeDescrs,
michael@0 7705 int32_t elemSize)
michael@0 7706 {
michael@0 7707 // Must always be loading the same scalar type
michael@0 7708 ScalarTypeDescr::Type elemType;
michael@0 7709 if (!elemTypeDescrs.scalarType(&elemType))
michael@0 7710 return true;
michael@0 7711 JS_ASSERT(elemSize == ScalarTypeDescr::alignment(elemType));
michael@0 7712
michael@0 7713 bool canBeNeutered;
michael@0 7714 MDefinition *indexAsByteOffset;
michael@0 7715 if (!checkTypedObjectIndexInBounds(elemSize, obj, index, objTypeDescrs,
michael@0 7716 &indexAsByteOffset, &canBeNeutered))
michael@0 7717 {
michael@0 7718 return false;
michael@0 7719 }
michael@0 7720
michael@0 7721 // Store the element
michael@0 7722 if (!storeScalarTypedObjectValue(obj, indexAsByteOffset, elemType, canBeNeutered, false, value))
michael@0 7723 return false;
michael@0 7724
michael@0 7725 current->push(value);
michael@0 7726
michael@0 7727 *emitted = true;
michael@0 7728 return true;
michael@0 7729 }
michael@0 7730
michael@0 7731 bool
michael@0 7732 IonBuilder::setElemTryTypedStatic(bool *emitted, MDefinition *object,
michael@0 7733 MDefinition *index, MDefinition *value)
michael@0 7734 {
michael@0 7735 JS_ASSERT(*emitted == false);
michael@0 7736
michael@0 7737 ScalarTypeDescr::Type arrayType;
michael@0 7738 if (!ElementAccessIsTypedArray(object, index, &arrayType))
michael@0 7739 return true;
michael@0 7740
michael@0 7741 if (!LIRGenerator::allowStaticTypedArrayAccesses())
michael@0 7742 return true;
michael@0 7743
michael@0 7744 if (ElementAccessHasExtraIndexedProperty(constraints(), object))
michael@0 7745 return true;
michael@0 7746
michael@0 7747 if (!object->resultTypeSet())
michael@0 7748 return true;
michael@0 7749 JSObject *tarrObj = object->resultTypeSet()->getSingleton();
michael@0 7750 if (!tarrObj)
michael@0 7751 return true;
michael@0 7752
michael@0 7753 TypedArrayObject *tarr = &tarrObj->as<TypedArrayObject>();
michael@0 7754
michael@0 7755 types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
michael@0 7756 if (tarrType->unknownProperties())
michael@0 7757 return true;
michael@0 7758
michael@0 7759 ArrayBufferView::ViewType viewType = (ArrayBufferView::ViewType) tarr->type();
michael@0 7760 MDefinition *ptr = convertShiftToMaskForStaticTypedArray(index, viewType);
michael@0 7761 if (!ptr)
michael@0 7762 return true;
michael@0 7763
michael@0 7764 // Emit StoreTypedArrayElementStatic.
michael@0 7765 tarrType->watchStateChangeForTypedArrayData(constraints());
michael@0 7766
michael@0 7767 object->setImplicitlyUsedUnchecked();
michael@0 7768 index->setImplicitlyUsedUnchecked();
michael@0 7769
michael@0 7770 // Clamp value to [0, 255] for Uint8ClampedArray.
michael@0 7771 MDefinition *toWrite = value;
michael@0 7772 if (viewType == ArrayBufferView::TYPE_UINT8_CLAMPED) {
michael@0 7773 toWrite = MClampToUint8::New(alloc(), value);
michael@0 7774 current->add(toWrite->toInstruction());
michael@0 7775 }
michael@0 7776
michael@0 7777 MInstruction *store = MStoreTypedArrayElementStatic::New(alloc(), tarr, ptr, toWrite);
michael@0 7778 current->add(store);
michael@0 7779 current->push(value);
michael@0 7780
michael@0 7781 if (!resumeAfter(store))
michael@0 7782 return false;
michael@0 7783
michael@0 7784 *emitted = true;
michael@0 7785 return true;
michael@0 7786 }
michael@0 7787
michael@0 7788 bool
michael@0 7789 IonBuilder::setElemTryTypedArray(bool *emitted, MDefinition *object,
michael@0 7790 MDefinition *index, MDefinition *value)
michael@0 7791 {
michael@0 7792 JS_ASSERT(*emitted == false);
michael@0 7793
michael@0 7794 ScalarTypeDescr::Type arrayType;
michael@0 7795 if (!ElementAccessIsTypedArray(object, index, &arrayType))
michael@0 7796 return true;
michael@0 7797
michael@0 7798 // Emit typed setelem variant.
michael@0 7799 if (!jsop_setelem_typed(arrayType, SetElem_Normal, object, index, value))
michael@0 7800 return false;
michael@0 7801
michael@0 7802 *emitted = true;
michael@0 7803 return true;
michael@0 7804 }
michael@0 7805
michael@0 7806 bool
michael@0 7807 IonBuilder::setElemTryDense(bool *emitted, MDefinition *object,
michael@0 7808 MDefinition *index, MDefinition *value)
michael@0 7809 {
michael@0 7810 JS_ASSERT(*emitted == false);
michael@0 7811
michael@0 7812 if (!ElementAccessIsDenseNative(object, index))
michael@0 7813 return true;
michael@0 7814 if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
michael@0 7815 &object, nullptr, &value, /* canModify = */ true))
michael@0 7816 {
michael@0 7817 return true;
michael@0 7818 }
michael@0 7819 if (!object->resultTypeSet())
michael@0 7820 return true;
michael@0 7821
michael@0 7822 types::TemporaryTypeSet::DoubleConversion conversion =
michael@0 7823 object->resultTypeSet()->convertDoubleElements(constraints());
michael@0 7824
michael@0 7825 // If AmbiguousDoubleConversion, only handle int32 values for now.
michael@0 7826 if (conversion == types::TemporaryTypeSet::AmbiguousDoubleConversion &&
michael@0 7827 value->type() != MIRType_Int32)
michael@0 7828 {
michael@0 7829 return true;
michael@0 7830 }
michael@0 7831
michael@0 7832 // Don't generate a fast path if there have been bounds check failures
michael@0 7833 // and this access might be on a sparse property.
michael@0 7834 if (ElementAccessHasExtraIndexedProperty(constraints(), object) && failedBoundsCheck_)
michael@0 7835 return true;
michael@0 7836
michael@0 7837 // Emit dense setelem variant.
michael@0 7838 if (!jsop_setelem_dense(conversion, SetElem_Normal, object, index, value))
michael@0 7839 return false;
michael@0 7840
michael@0 7841 *emitted = true;
michael@0 7842 return true;
michael@0 7843 }
michael@0 7844
michael@0 7845 bool
michael@0 7846 IonBuilder::setElemTryArguments(bool *emitted, MDefinition *object,
michael@0 7847 MDefinition *index, MDefinition *value)
michael@0 7848 {
michael@0 7849 JS_ASSERT(*emitted == false);
michael@0 7850
michael@0 7851 if (object->type() != MIRType_MagicOptimizedArguments)
michael@0 7852 return true;
michael@0 7853
michael@0 7854 // Arguments are not supported yet.
michael@0 7855 return abort("NYI arguments[]=");
michael@0 7856 }
michael@0 7857
michael@0 7858 bool
michael@0 7859 IonBuilder::setElemTryCache(bool *emitted, MDefinition *object,
michael@0 7860 MDefinition *index, MDefinition *value)
michael@0 7861 {
michael@0 7862 JS_ASSERT(*emitted == false);
michael@0 7863
michael@0 7864 if (!object->mightBeType(MIRType_Object))
michael@0 7865 return true;
michael@0 7866
michael@0 7867 if (!index->mightBeType(MIRType_Int32) && !index->mightBeType(MIRType_String))
michael@0 7868 return true;
michael@0 7869
michael@0 7870 // TODO: Bug 876650: remove this check:
michael@0 7871 // Temporary disable the cache if non dense native,
michael@0 7872 // until the cache supports more ics
michael@0 7873 SetElemICInspector icInspect(inspector->setElemICInspector(pc));
michael@0 7874 if (!icInspect.sawDenseWrite() && !icInspect.sawTypedArrayWrite())
michael@0 7875 return true;
michael@0 7876
michael@0 7877 if (PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current,
michael@0 7878 &object, nullptr, &value, /* canModify = */ true))
michael@0 7879 {
michael@0 7880 return true;
michael@0 7881 }
michael@0 7882
michael@0 7883 // We can avoid worrying about holes in the IC if we know a priori we are safe
michael@0 7884 // from them. If TI can guard that there are no indexed properties on the prototype
michael@0 7885 // chain, we know that we anen't missing any setters by overwriting the hole with
michael@0 7886 // another value.
michael@0 7887 bool guardHoles = ElementAccessHasExtraIndexedProperty(constraints(), object);
michael@0 7888
michael@0 7889 if (NeedsPostBarrier(info(), value))
michael@0 7890 current->add(MPostWriteBarrier::New(alloc(), object, value));
michael@0 7891
michael@0 7892 // Emit SetElementCache.
michael@0 7893 MInstruction *ins = MSetElementCache::New(alloc(), object, index, value, script()->strict(), guardHoles);
michael@0 7894 current->add(ins);
michael@0 7895 current->push(value);
michael@0 7896
michael@0 7897 if (!resumeAfter(ins))
michael@0 7898 return false;
michael@0 7899
michael@0 7900 *emitted = true;
michael@0 7901 return true;
michael@0 7902 }
michael@0 7903
michael@0 7904 bool
michael@0 7905 IonBuilder::jsop_setelem_dense(types::TemporaryTypeSet::DoubleConversion conversion,
michael@0 7906 SetElemSafety safety,
michael@0 7907 MDefinition *obj, MDefinition *id, MDefinition *value)
michael@0 7908 {
michael@0 7909 MIRType elementType = DenseNativeElementType(constraints(), obj);
michael@0 7910 bool packed = ElementAccessIsPacked(constraints(), obj);
michael@0 7911
michael@0 7912 // Writes which are on holes in the object do not have to bail out if they
michael@0 7913 // cannot hit another indexed property on the object or its prototypes.
michael@0 7914 bool writeOutOfBounds = !ElementAccessHasExtraIndexedProperty(constraints(), obj);
michael@0 7915
michael@0 7916 if (NeedsPostBarrier(info(), value))
michael@0 7917 current->add(MPostWriteBarrier::New(alloc(), obj, value));
michael@0 7918
michael@0 7919 // Ensure id is an integer.
michael@0 7920 MInstruction *idInt32 = MToInt32::New(alloc(), id);
michael@0 7921 current->add(idInt32);
michael@0 7922 id = idInt32;
michael@0 7923
michael@0 7924 // Get the elements vector.
michael@0 7925 MElements *elements = MElements::New(alloc(), obj);
michael@0 7926 current->add(elements);
michael@0 7927
michael@0 7928 // Ensure the value is a double, if double conversion might be needed.
michael@0 7929 MDefinition *newValue = value;
michael@0 7930 switch (conversion) {
michael@0 7931 case types::TemporaryTypeSet::AlwaysConvertToDoubles:
michael@0 7932 case types::TemporaryTypeSet::MaybeConvertToDoubles: {
michael@0 7933 MInstruction *valueDouble = MToDouble::New(alloc(), value);
michael@0 7934 current->add(valueDouble);
michael@0 7935 newValue = valueDouble;
michael@0 7936 break;
michael@0 7937 }
michael@0 7938
michael@0 7939 case types::TemporaryTypeSet::AmbiguousDoubleConversion: {
michael@0 7940 JS_ASSERT(value->type() == MIRType_Int32);
michael@0 7941 MInstruction *maybeDouble = MMaybeToDoubleElement::New(alloc(), elements, value);
michael@0 7942 current->add(maybeDouble);
michael@0 7943 newValue = maybeDouble;
michael@0 7944 break;
michael@0 7945 }
michael@0 7946
michael@0 7947 case types::TemporaryTypeSet::DontConvertToDoubles:
michael@0 7948 break;
michael@0 7949
michael@0 7950 default:
michael@0 7951 MOZ_ASSUME_UNREACHABLE("Unknown double conversion");
michael@0 7952 }
michael@0 7953
michael@0 7954 bool writeHole = false;
michael@0 7955 if (safety == SetElem_Normal) {
michael@0 7956 SetElemICInspector icInspect(inspector->setElemICInspector(pc));
michael@0 7957 writeHole = icInspect.sawOOBDenseWrite();
michael@0 7958 }
michael@0 7959
michael@0 7960 // Use MStoreElementHole if this SETELEM has written to out-of-bounds
michael@0 7961 // indexes in the past. Otherwise, use MStoreElement so that we can hoist
michael@0 7962 // the initialized length and bounds check.
michael@0 7963 MStoreElementCommon *store;
michael@0 7964 if (writeHole && writeOutOfBounds) {
michael@0 7965 JS_ASSERT(safety == SetElem_Normal);
michael@0 7966
michael@0 7967 MStoreElementHole *ins = MStoreElementHole::New(alloc(), obj, elements, id, newValue);
michael@0 7968 store = ins;
michael@0 7969
michael@0 7970 current->add(ins);
michael@0 7971 current->push(value);
michael@0 7972
michael@0 7973 if (!resumeAfter(ins))
michael@0 7974 return false;
michael@0 7975 } else {
michael@0 7976 MInitializedLength *initLength = MInitializedLength::New(alloc(), elements);
michael@0 7977 current->add(initLength);
michael@0 7978
michael@0 7979 bool needsHoleCheck;
michael@0 7980 if (safety == SetElem_Normal) {
michael@0 7981 id = addBoundsCheck(id, initLength);
michael@0 7982 needsHoleCheck = !packed && !writeOutOfBounds;
michael@0 7983 } else {
michael@0 7984 needsHoleCheck = false;
michael@0 7985 }
michael@0 7986
michael@0 7987 MStoreElement *ins = MStoreElement::New(alloc(), elements, id, newValue, needsHoleCheck);
michael@0 7988 store = ins;
michael@0 7989
michael@0 7990 if (safety == SetElem_Unsafe)
michael@0 7991 ins->setRacy();
michael@0 7992
michael@0 7993 current->add(ins);
michael@0 7994
michael@0 7995 if (safety == SetElem_Normal)
michael@0 7996 current->push(value);
michael@0 7997
michael@0 7998 if (!resumeAfter(ins))
michael@0 7999 return false;
michael@0 8000 }
michael@0 8001
michael@0 8002 // Determine whether a write barrier is required.
michael@0 8003 if (obj->resultTypeSet()->propertyNeedsBarrier(constraints(), JSID_VOID))
michael@0 8004 store->setNeedsBarrier();
michael@0 8005
michael@0 8006 if (elementType != MIRType_None && packed)
michael@0 8007 store->setElementType(elementType);
michael@0 8008
michael@0 8009 return true;
michael@0 8010 }
michael@0 8011
michael@0 8012
michael@0 8013 bool
michael@0 8014 IonBuilder::jsop_setelem_typed(ScalarTypeDescr::Type arrayType,
michael@0 8015 SetElemSafety safety,
michael@0 8016 MDefinition *obj, MDefinition *id, MDefinition *value)
michael@0 8017 {
michael@0 8018 bool expectOOB;
michael@0 8019 if (safety == SetElem_Normal) {
michael@0 8020 SetElemICInspector icInspect(inspector->setElemICInspector(pc));
michael@0 8021 expectOOB = icInspect.sawOOBTypedArrayWrite();
michael@0 8022 } else {
michael@0 8023 expectOOB = false;
michael@0 8024 }
michael@0 8025
michael@0 8026 if (expectOOB)
michael@0 8027 spew("Emitting OOB TypedArray SetElem");
michael@0 8028
michael@0 8029 // Ensure id is an integer.
michael@0 8030 MInstruction *idInt32 = MToInt32::New(alloc(), id);
michael@0 8031 current->add(idInt32);
michael@0 8032 id = idInt32;
michael@0 8033
michael@0 8034 // Get length, bounds-check, then get elements, and add all instructions.
michael@0 8035 MInstruction *length;
michael@0 8036 MInstruction *elements;
michael@0 8037 BoundsChecking checking = (!expectOOB && safety == SetElem_Normal)
michael@0 8038 ? DoBoundsCheck
michael@0 8039 : SkipBoundsCheck;
michael@0 8040 addTypedArrayLengthAndData(obj, checking, &id, &length, &elements);
michael@0 8041
michael@0 8042 // Clamp value to [0, 255] for Uint8ClampedArray.
michael@0 8043 MDefinition *toWrite = value;
michael@0 8044 if (arrayType == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
michael@0 8045 toWrite = MClampToUint8::New(alloc(), value);
michael@0 8046 current->add(toWrite->toInstruction());
michael@0 8047 }
michael@0 8048
michael@0 8049 // Store the value.
michael@0 8050 MInstruction *ins;
michael@0 8051 if (expectOOB) {
michael@0 8052 ins = MStoreTypedArrayElementHole::New(alloc(), elements, length, id, toWrite, arrayType);
michael@0 8053 } else {
michael@0 8054 MStoreTypedArrayElement *store =
michael@0 8055 MStoreTypedArrayElement::New(alloc(), elements, id, toWrite, arrayType);
michael@0 8056 if (safety == SetElem_Unsafe)
michael@0 8057 store->setRacy();
michael@0 8058 ins = store;
michael@0 8059 }
michael@0 8060
michael@0 8061 current->add(ins);
michael@0 8062
michael@0 8063 if (safety == SetElem_Normal)
michael@0 8064 current->push(value);
michael@0 8065
michael@0 8066 return resumeAfter(ins);
michael@0 8067 }
michael@0 8068
michael@0 8069 bool
michael@0 8070 IonBuilder::jsop_setelem_typed_object(ScalarTypeDescr::Type arrayType,
michael@0 8071 SetElemSafety safety,
michael@0 8072 bool racy,
michael@0 8073 MDefinition *object, MDefinition *index, MDefinition *value)
michael@0 8074 {
michael@0 8075 JS_ASSERT(safety == SetElem_Unsafe); // Can be fixed, but there's been no reason to as of yet
michael@0 8076
michael@0 8077 MInstruction *int_index = MToInt32::New(alloc(), index);
michael@0 8078 current->add(int_index);
michael@0 8079
michael@0 8080 size_t elemSize = ScalarTypeDescr::alignment(arrayType);
michael@0 8081 MMul *byteOffset = MMul::New(alloc(), int_index, constantInt(elemSize),
michael@0 8082 MIRType_Int32, MMul::Integer);
michael@0 8083 current->add(byteOffset);
michael@0 8084
michael@0 8085 if (!storeScalarTypedObjectValue(object, byteOffset, arrayType, false, racy, value))
michael@0 8086 return false;
michael@0 8087
michael@0 8088 return true;
michael@0 8089 }
michael@0 8090
michael@0 8091 bool
michael@0 8092 IonBuilder::jsop_length()
michael@0 8093 {
michael@0 8094 if (jsop_length_fastPath())
michael@0 8095 return true;
michael@0 8096
michael@0 8097 PropertyName *name = info().getAtom(pc)->asPropertyName();
michael@0 8098 return jsop_getprop(name);
michael@0 8099 }
michael@0 8100
michael@0 8101 bool
michael@0 8102 IonBuilder::jsop_length_fastPath()
michael@0 8103 {
michael@0 8104 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 8105
michael@0 8106 if (types->getKnownMIRType() != MIRType_Int32)
michael@0 8107 return false;
michael@0 8108
michael@0 8109 MDefinition *obj = current->peek(-1);
michael@0 8110
michael@0 8111 if (obj->mightBeType(MIRType_String)) {
michael@0 8112 if (obj->mightBeType(MIRType_Object))
michael@0 8113 return false;
michael@0 8114 current->pop();
michael@0 8115 MStringLength *ins = MStringLength::New(alloc(), obj);
michael@0 8116 current->add(ins);
michael@0 8117 current->push(ins);
michael@0 8118 return true;
michael@0 8119 }
michael@0 8120
michael@0 8121 if (obj->mightBeType(MIRType_Object)) {
michael@0 8122 types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
michael@0 8123
michael@0 8124 if (objTypes &&
michael@0 8125 objTypes->getKnownClass() == &ArrayObject::class_ &&
michael@0 8126 !objTypes->hasObjectFlags(constraints(), types::OBJECT_FLAG_LENGTH_OVERFLOW))
michael@0 8127 {
michael@0 8128 current->pop();
michael@0 8129 MElements *elements = MElements::New(alloc(), obj);
michael@0 8130 current->add(elements);
michael@0 8131
michael@0 8132 // Read length.
michael@0 8133 MArrayLength *length = MArrayLength::New(alloc(), elements);
michael@0 8134 current->add(length);
michael@0 8135 current->push(length);
michael@0 8136 return true;
michael@0 8137 }
michael@0 8138
michael@0 8139 if (objTypes && objTypes->getTypedArrayType() != ScalarTypeDescr::TYPE_MAX) {
michael@0 8140 current->pop();
michael@0 8141 MInstruction *length = addTypedArrayLength(obj);
michael@0 8142 current->push(length);
michael@0 8143 return true;
michael@0 8144 }
michael@0 8145 }
michael@0 8146
michael@0 8147 return false;
michael@0 8148 }
michael@0 8149
michael@0 8150 bool
michael@0 8151 IonBuilder::jsop_arguments()
michael@0 8152 {
michael@0 8153 if (info().needsArgsObj()) {
michael@0 8154 current->push(current->argumentsObject());
michael@0 8155 return true;
michael@0 8156 }
michael@0 8157 JS_ASSERT(lazyArguments_);
michael@0 8158 current->push(lazyArguments_);
michael@0 8159 return true;
michael@0 8160 }
michael@0 8161
michael@0 8162 bool
michael@0 8163 IonBuilder::jsop_arguments_length()
michael@0 8164 {
michael@0 8165 // Type Inference has guaranteed this is an optimized arguments object.
michael@0 8166 MDefinition *args = current->pop();
michael@0 8167 args->setImplicitlyUsedUnchecked();
michael@0 8168
michael@0 8169 // We don't know anything from the callee
michael@0 8170 if (inliningDepth_ == 0) {
michael@0 8171 MInstruction *ins = MArgumentsLength::New(alloc());
michael@0 8172 current->add(ins);
michael@0 8173 current->push(ins);
michael@0 8174 return true;
michael@0 8175 }
michael@0 8176
michael@0 8177 // We are inlining and know the number of arguments the callee pushed
michael@0 8178 return pushConstant(Int32Value(inlineCallInfo_->argv().length()));
michael@0 8179 }
michael@0 8180
michael@0 8181 bool
michael@0 8182 IonBuilder::jsop_rest()
michael@0 8183 {
michael@0 8184 JSObject *templateObject = inspector->getTemplateObject(pc);
michael@0 8185 JS_ASSERT(templateObject->is<ArrayObject>());
michael@0 8186
michael@0 8187 if (inliningDepth_ == 0) {
michael@0 8188 // We don't know anything about the callee.
michael@0 8189 MArgumentsLength *numActuals = MArgumentsLength::New(alloc());
michael@0 8190 current->add(numActuals);
michael@0 8191
michael@0 8192 // Pass in the number of actual arguments, the number of formals (not
michael@0 8193 // including the rest parameter slot itself), and the template object.
michael@0 8194 MRest *rest = MRest::New(alloc(), constraints(), numActuals, info().nargs() - 1,
michael@0 8195 templateObject);
michael@0 8196 current->add(rest);
michael@0 8197 current->push(rest);
michael@0 8198 return true;
michael@0 8199 }
michael@0 8200
michael@0 8201 // We know the exact number of arguments the callee pushed.
michael@0 8202 unsigned numActuals = inlineCallInfo_->argv().length();
michael@0 8203 unsigned numFormals = info().nargs() - 1;
michael@0 8204 unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
michael@0 8205
michael@0 8206 MNewArray *array = MNewArray::New(alloc(), constraints(), numRest, templateObject,
michael@0 8207 templateObject->type()->initialHeap(constraints()),
michael@0 8208 MNewArray::NewArray_Allocating);
michael@0 8209 current->add(array);
michael@0 8210
michael@0 8211 if (numRest == 0) {
michael@0 8212 // No more updating to do. (Note that in this one case the length from
michael@0 8213 // the template object is already correct.)
michael@0 8214 current->push(array);
michael@0 8215 return true;
michael@0 8216 }
michael@0 8217
michael@0 8218 MElements *elements = MElements::New(alloc(), array);
michael@0 8219 current->add(elements);
michael@0 8220
michael@0 8221 // Unroll the argument copy loop. We don't need to do any bounds or hole
michael@0 8222 // checking here.
michael@0 8223 MConstant *index = nullptr;
michael@0 8224 for (unsigned i = numFormals; i < numActuals; i++) {
michael@0 8225 index = MConstant::New(alloc(), Int32Value(i - numFormals));
michael@0 8226 current->add(index);
michael@0 8227
michael@0 8228 MDefinition *arg = inlineCallInfo_->argv()[i];
michael@0 8229 MStoreElement *store = MStoreElement::New(alloc(), elements, index, arg,
michael@0 8230 /* needsHoleCheck = */ false);
michael@0 8231 current->add(store);
michael@0 8232
michael@0 8233 if (NeedsPostBarrier(info(), arg))
michael@0 8234 current->add(MPostWriteBarrier::New(alloc(), array, arg));
michael@0 8235 }
michael@0 8236
michael@0 8237 // The array's length is incorrectly 0 now, from the template object
michael@0 8238 // created by BaselineCompiler::emit_JSOP_REST() before the actual argument
michael@0 8239 // count was known. Set the correct length now that we know that count.
michael@0 8240 MSetArrayLength *length = MSetArrayLength::New(alloc(), elements, index);
michael@0 8241 current->add(length);
michael@0 8242
michael@0 8243 // Update the initialized length for all the (necessarily non-hole)
michael@0 8244 // elements added.
michael@0 8245 MSetInitializedLength *initLength = MSetInitializedLength::New(alloc(), elements, index);
michael@0 8246 current->add(initLength);
michael@0 8247
michael@0 8248 current->push(array);
michael@0 8249 return true;
michael@0 8250 }
michael@0 8251
michael@0 8252 bool
michael@0 8253 IonBuilder::getDefiniteSlot(types::TemporaryTypeSet *types, PropertyName *name,
michael@0 8254 types::HeapTypeSetKey *property)
michael@0 8255 {
michael@0 8256 if (!types || types->unknownObject() || types->getObjectCount() != 1)
michael@0 8257 return false;
michael@0 8258
michael@0 8259 types::TypeObjectKey *type = types->getObject(0);
michael@0 8260 if (type->unknownProperties() || type->singleton())
michael@0 8261 return false;
michael@0 8262
michael@0 8263 jsid id = NameToId(name);
michael@0 8264
michael@0 8265 *property = type->property(id);
michael@0 8266 return property->maybeTypes() &&
michael@0 8267 property->maybeTypes()->definiteProperty() &&
michael@0 8268 !property->nonData(constraints());
michael@0 8269 }
michael@0 8270
michael@0 8271 bool
michael@0 8272 IonBuilder::jsop_runonce()
michael@0 8273 {
michael@0 8274 MRunOncePrologue *ins = MRunOncePrologue::New(alloc());
michael@0 8275 current->add(ins);
michael@0 8276 return resumeAfter(ins);
michael@0 8277 }
michael@0 8278
michael@0 8279 bool
michael@0 8280 IonBuilder::jsop_not()
michael@0 8281 {
michael@0 8282 MDefinition *value = current->pop();
michael@0 8283
michael@0 8284 MNot *ins = MNot::New(alloc(), value);
michael@0 8285 current->add(ins);
michael@0 8286 current->push(ins);
michael@0 8287 ins->infer();
michael@0 8288 return true;
michael@0 8289 }
michael@0 8290
michael@0 8291 bool
michael@0 8292 IonBuilder::objectsHaveCommonPrototype(types::TemporaryTypeSet *types, PropertyName *name,
michael@0 8293 bool isGetter, JSObject *foundProto)
michael@0 8294 {
michael@0 8295 // With foundProto a prototype with a getter or setter for name, return
michael@0 8296 // whether looking up name on any object in |types| will go through
michael@0 8297 // foundProto, i.e. all the objects have foundProto on their prototype
michael@0 8298 // chain and do not have a property for name before reaching foundProto.
michael@0 8299
michael@0 8300 // No sense looking if we don't know what's going on.
michael@0 8301 if (!types || types->unknownObject())
michael@0 8302 return false;
michael@0 8303
michael@0 8304 for (unsigned i = 0; i < types->getObjectCount(); i++) {
michael@0 8305 if (types->getSingleObject(i) == foundProto)
michael@0 8306 continue;
michael@0 8307
michael@0 8308 types::TypeObjectKey *type = types->getObject(i);
michael@0 8309 if (!type)
michael@0 8310 continue;
michael@0 8311
michael@0 8312 while (type) {
michael@0 8313 if (type->unknownProperties())
michael@0 8314 return false;
michael@0 8315
michael@0 8316 const Class *clasp = type->clasp();
michael@0 8317 if (!ClassHasEffectlessLookup(clasp, name) || ClassHasResolveHook(compartment, clasp, name))
michael@0 8318 return false;
michael@0 8319
michael@0 8320 // Look for a getter/setter on the class itself which may need
michael@0 8321 // to be called. Ignore the getGeneric hook for typed arrays, it
michael@0 8322 // only handles integers and forwards names to the prototype.
michael@0 8323 if (isGetter && clasp->ops.getGeneric && !IsTypedArrayClass(clasp))
michael@0 8324 return false;
michael@0 8325 if (!isGetter && clasp->ops.setGeneric)
michael@0 8326 return false;
michael@0 8327
michael@0 8328 // Test for isOwnProperty() without freezing. If we end up
michael@0 8329 // optimizing, freezePropertiesForCommonPropFunc will freeze the
michael@0 8330 // property type sets later on.
michael@0 8331 types::HeapTypeSetKey property = type->property(NameToId(name));
michael@0 8332 if (types::TypeSet *types = property.maybeTypes()) {
michael@0 8333 if (!types->empty() || types->nonDataProperty())
michael@0 8334 return false;
michael@0 8335 }
michael@0 8336 if (JSObject *obj = type->singleton()) {
michael@0 8337 if (types::CanHaveEmptyPropertyTypesForOwnProperty(obj))
michael@0 8338 return false;
michael@0 8339 }
michael@0 8340
michael@0 8341 if (!type->hasTenuredProto())
michael@0 8342 return false;
michael@0 8343 JSObject *proto = type->proto().toObjectOrNull();
michael@0 8344 if (proto == foundProto)
michael@0 8345 break;
michael@0 8346 if (!proto) {
michael@0 8347 // The foundProto being searched for did not show up on the
michael@0 8348 // object's prototype chain.
michael@0 8349 return false;
michael@0 8350 }
michael@0 8351 type = types::TypeObjectKey::get(type->proto().toObjectOrNull());
michael@0 8352 }
michael@0 8353 }
michael@0 8354
michael@0 8355 return true;
michael@0 8356 }
michael@0 8357
michael@0 8358 void
michael@0 8359 IonBuilder::freezePropertiesForCommonPrototype(types::TemporaryTypeSet *types, PropertyName *name,
michael@0 8360 JSObject *foundProto)
michael@0 8361 {
michael@0 8362 for (unsigned i = 0; i < types->getObjectCount(); i++) {
michael@0 8363 // If we found a Singleton object's own-property, there's nothing to
michael@0 8364 // freeze.
michael@0 8365 if (types->getSingleObject(i) == foundProto)
michael@0 8366 continue;
michael@0 8367
michael@0 8368 types::TypeObjectKey *type = types->getObject(i);
michael@0 8369 if (!type)
michael@0 8370 continue;
michael@0 8371
michael@0 8372 while (true) {
michael@0 8373 types::HeapTypeSetKey property = type->property(NameToId(name));
michael@0 8374 JS_ALWAYS_TRUE(!property.isOwnProperty(constraints()));
michael@0 8375
michael@0 8376 // Don't mark the proto. It will be held down by the shape
michael@0 8377 // guard. This allows us to use properties found on prototypes
michael@0 8378 // with properties unknown to TI.
michael@0 8379 if (type->proto() == foundProto)
michael@0 8380 break;
michael@0 8381 type = types::TypeObjectKey::get(type->proto().toObjectOrNull());
michael@0 8382 }
michael@0 8383 }
michael@0 8384 }
michael@0 8385
michael@0 8386 inline MDefinition *
michael@0 8387 IonBuilder::testCommonGetterSetter(types::TemporaryTypeSet *types, PropertyName *name,
michael@0 8388 bool isGetter, JSObject *foundProto, Shape *lastProperty)
michael@0 8389 {
michael@0 8390 // Check if all objects being accessed will lookup the name through foundProto.
michael@0 8391 if (!objectsHaveCommonPrototype(types, name, isGetter, foundProto))
michael@0 8392 return nullptr;
michael@0 8393
michael@0 8394 // We can optimize the getter/setter, so freeze all involved properties to
michael@0 8395 // ensure there isn't a lower shadowing getter or setter installed in the
michael@0 8396 // future.
michael@0 8397 freezePropertiesForCommonPrototype(types, name, foundProto);
michael@0 8398
michael@0 8399 // Add a shape guard on the prototype we found the property on. The rest of
michael@0 8400 // the prototype chain is guarded by TI freezes. Note that a shape guard is
michael@0 8401 // good enough here, even in the proxy case, because we have ensured there
michael@0 8402 // are no lookup hooks for this property.
michael@0 8403 MInstruction *wrapper = constant(ObjectValue(*foundProto));
michael@0 8404 return addShapeGuard(wrapper, lastProperty, Bailout_ShapeGuard);
michael@0 8405 }
michael@0 8406
michael@0 8407 bool
michael@0 8408 IonBuilder::annotateGetPropertyCache(MDefinition *obj, MGetPropertyCache *getPropCache,
michael@0 8409 types::TemporaryTypeSet *objTypes,
michael@0 8410 types::TemporaryTypeSet *pushedTypes)
michael@0 8411 {
michael@0 8412 PropertyName *name = getPropCache->name();
michael@0 8413
michael@0 8414 // Ensure every pushed value is a singleton.
michael@0 8415 if (pushedTypes->unknownObject() || pushedTypes->baseFlags() != 0)
michael@0 8416 return true;
michael@0 8417
michael@0 8418 for (unsigned i = 0; i < pushedTypes->getObjectCount(); i++) {
michael@0 8419 if (pushedTypes->getTypeObject(i) != nullptr)
michael@0 8420 return true;
michael@0 8421 }
michael@0 8422
michael@0 8423 // Object's typeset should be a proper object
michael@0 8424 if (!objTypes || objTypes->baseFlags() || objTypes->unknownObject())
michael@0 8425 return true;
michael@0 8426
michael@0 8427 unsigned int objCount = objTypes->getObjectCount();
michael@0 8428 if (objCount == 0)
michael@0 8429 return true;
michael@0 8430
michael@0 8431 InlinePropertyTable *inlinePropTable = getPropCache->initInlinePropertyTable(alloc(), pc);
michael@0 8432 if (!inlinePropTable)
michael@0 8433 return false;
michael@0 8434
michael@0 8435 // Ensure that the relevant property typeset for each type object is
michael@0 8436 // is a single-object typeset containing a JSFunction
michael@0 8437 for (unsigned int i = 0; i < objCount; i++) {
michael@0 8438 types::TypeObject *baseTypeObj = objTypes->getTypeObject(i);
michael@0 8439 if (!baseTypeObj)
michael@0 8440 continue;
michael@0 8441 types::TypeObjectKey *typeObj = types::TypeObjectKey::get(baseTypeObj);
michael@0 8442 if (typeObj->unknownProperties() || !typeObj->hasTenuredProto() || !typeObj->proto().isObject())
michael@0 8443 continue;
michael@0 8444
michael@0 8445 const Class *clasp = typeObj->clasp();
michael@0 8446 if (!ClassHasEffectlessLookup(clasp, name) || ClassHasResolveHook(compartment, clasp, name))
michael@0 8447 continue;
michael@0 8448
michael@0 8449 types::HeapTypeSetKey ownTypes = typeObj->property(NameToId(name));
michael@0 8450 if (ownTypes.isOwnProperty(constraints()))
michael@0 8451 continue;
michael@0 8452
michael@0 8453 JSObject *singleton = testSingletonProperty(typeObj->proto().toObject(), name);
michael@0 8454 if (!singleton || !singleton->is<JSFunction>())
michael@0 8455 continue;
michael@0 8456
michael@0 8457 // Don't add cases corresponding to non-observed pushes
michael@0 8458 if (!pushedTypes->hasType(types::Type::ObjectType(singleton)))
michael@0 8459 continue;
michael@0 8460
michael@0 8461 if (!inlinePropTable->addEntry(alloc(), baseTypeObj, &singleton->as<JSFunction>()))
michael@0 8462 return false;
michael@0 8463 }
michael@0 8464
michael@0 8465 if (inlinePropTable->numEntries() == 0) {
michael@0 8466 getPropCache->clearInlinePropertyTable();
michael@0 8467 return true;
michael@0 8468 }
michael@0 8469
michael@0 8470 #ifdef DEBUG
michael@0 8471 if (inlinePropTable->numEntries() > 0)
michael@0 8472 IonSpew(IonSpew_Inlining, "Annotated GetPropertyCache with %d/%d inline cases",
michael@0 8473 (int) inlinePropTable->numEntries(), (int) objCount);
michael@0 8474 #endif
michael@0 8475
michael@0 8476 // If we successfully annotated the GetPropertyCache and there are inline cases,
michael@0 8477 // then keep a resume point of the state right before this instruction for use
michael@0 8478 // later when we have to bail out to this point in the fallback case of a
michael@0 8479 // PolyInlineDispatch.
michael@0 8480 if (inlinePropTable->numEntries() > 0) {
michael@0 8481 // Push the object back onto the stack temporarily to capture the resume point.
michael@0 8482 current->push(obj);
michael@0 8483 MResumePoint *resumePoint = MResumePoint::New(alloc(), current, pc, callerResumePoint_,
michael@0 8484 MResumePoint::ResumeAt);
michael@0 8485 if (!resumePoint)
michael@0 8486 return false;
michael@0 8487 inlinePropTable->setPriorResumePoint(resumePoint);
michael@0 8488 current->pop();
michael@0 8489 }
michael@0 8490 return true;
michael@0 8491 }
michael@0 8492
michael@0 8493 // Returns true if an idempotent cache has ever invalidated this script
michael@0 8494 // or an outer script.
michael@0 8495 bool
michael@0 8496 IonBuilder::invalidatedIdempotentCache()
michael@0 8497 {
michael@0 8498 IonBuilder *builder = this;
michael@0 8499 do {
michael@0 8500 if (builder->script()->invalidatedIdempotentCache())
michael@0 8501 return true;
michael@0 8502 builder = builder->callerBuilder_;
michael@0 8503 } while (builder);
michael@0 8504
michael@0 8505 return false;
michael@0 8506 }
michael@0 8507
michael@0 8508 bool
michael@0 8509 IonBuilder::loadSlot(MDefinition *obj, size_t slot, size_t nfixed, MIRType rvalType,
michael@0 8510 bool barrier, types::TemporaryTypeSet *types)
michael@0 8511 {
michael@0 8512 if (slot < nfixed) {
michael@0 8513 MLoadFixedSlot *load = MLoadFixedSlot::New(alloc(), obj, slot);
michael@0 8514 current->add(load);
michael@0 8515 current->push(load);
michael@0 8516
michael@0 8517 load->setResultType(rvalType);
michael@0 8518 return pushTypeBarrier(load, types, barrier);
michael@0 8519 }
michael@0 8520
michael@0 8521 MSlots *slots = MSlots::New(alloc(), obj);
michael@0 8522 current->add(slots);
michael@0 8523
michael@0 8524 MLoadSlot *load = MLoadSlot::New(alloc(), slots, slot - nfixed);
michael@0 8525 current->add(load);
michael@0 8526 current->push(load);
michael@0 8527
michael@0 8528 load->setResultType(rvalType);
michael@0 8529 return pushTypeBarrier(load, types, barrier);
michael@0 8530 }
michael@0 8531
michael@0 8532 bool
michael@0 8533 IonBuilder::loadSlot(MDefinition *obj, Shape *shape, MIRType rvalType,
michael@0 8534 bool barrier, types::TemporaryTypeSet *types)
michael@0 8535 {
michael@0 8536 return loadSlot(obj, shape->slot(), shape->numFixedSlots(), rvalType, barrier, types);
michael@0 8537 }
michael@0 8538
michael@0 8539 bool
michael@0 8540 IonBuilder::storeSlot(MDefinition *obj, size_t slot, size_t nfixed,
michael@0 8541 MDefinition *value, bool needsBarrier,
michael@0 8542 MIRType slotType /* = MIRType_None */)
michael@0 8543 {
michael@0 8544 if (slot < nfixed) {
michael@0 8545 MStoreFixedSlot *store = MStoreFixedSlot::New(alloc(), obj, slot, value);
michael@0 8546 current->add(store);
michael@0 8547 current->push(value);
michael@0 8548 if (needsBarrier)
michael@0 8549 store->setNeedsBarrier();
michael@0 8550 return resumeAfter(store);
michael@0 8551 }
michael@0 8552
michael@0 8553 MSlots *slots = MSlots::New(alloc(), obj);
michael@0 8554 current->add(slots);
michael@0 8555
michael@0 8556 MStoreSlot *store = MStoreSlot::New(alloc(), slots, slot - nfixed, value);
michael@0 8557 current->add(store);
michael@0 8558 current->push(value);
michael@0 8559 if (needsBarrier)
michael@0 8560 store->setNeedsBarrier();
michael@0 8561 if (slotType != MIRType_None)
michael@0 8562 store->setSlotType(slotType);
michael@0 8563 return resumeAfter(store);
michael@0 8564 }
michael@0 8565
michael@0 8566 bool
michael@0 8567 IonBuilder::storeSlot(MDefinition *obj, Shape *shape, MDefinition *value, bool needsBarrier,
michael@0 8568 MIRType slotType /* = MIRType_None */)
michael@0 8569 {
michael@0 8570 JS_ASSERT(shape->writable());
michael@0 8571 return storeSlot(obj, shape->slot(), shape->numFixedSlots(), value, needsBarrier, slotType);
michael@0 8572 }
michael@0 8573
michael@0 8574 bool
michael@0 8575 IonBuilder::jsop_getprop(PropertyName *name)
michael@0 8576 {
michael@0 8577 bool emitted = false;
michael@0 8578
michael@0 8579 // Try to optimize arguments.length.
michael@0 8580 if (!getPropTryArgumentsLength(&emitted) || emitted)
michael@0 8581 return emitted;
michael@0 8582
michael@0 8583 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 8584 bool barrier = PropertyReadNeedsTypeBarrier(analysisContext, constraints(),
michael@0 8585 current->peek(-1), name, types);
michael@0 8586
michael@0 8587 // Always use a call if we are performing analysis and
michael@0 8588 // not actually emitting code, to simplify later analysis. Also skip deeper
michael@0 8589 // analysis if there are no known types for this operation, as it will
michael@0 8590 // always invalidate when executing.
michael@0 8591 if (info().executionModeIsAnalysis() || types->empty()) {
michael@0 8592 MDefinition *obj = current->peek(-1);
michael@0 8593 MCallGetProperty *call = MCallGetProperty::New(alloc(), obj, name, *pc == JSOP_CALLPROP);
michael@0 8594 current->add(call);
michael@0 8595
michael@0 8596 // During the definite properties analysis we can still try to bake in
michael@0 8597 // constants read off the prototype chain, to allow inlining later on.
michael@0 8598 // In this case we still need the getprop call so that the later
michael@0 8599 // analysis knows when the |this| value has been read from.
michael@0 8600 if (info().executionModeIsAnalysis()) {
michael@0 8601 if (!getPropTryConstant(&emitted, name, types) || emitted)
michael@0 8602 return emitted;
michael@0 8603 }
michael@0 8604
michael@0 8605 current->pop();
michael@0 8606 current->push(call);
michael@0 8607 return resumeAfter(call) && pushTypeBarrier(call, types, true);
michael@0 8608 }
michael@0 8609
michael@0 8610 // Try to hardcode known constants.
michael@0 8611 if (!getPropTryConstant(&emitted, name, types) || emitted)
michael@0 8612 return emitted;
michael@0 8613
michael@0 8614 // Try to emit loads from known binary data blocks
michael@0 8615 if (!getPropTryTypedObject(&emitted, name, types) || emitted)
michael@0 8616 return emitted;
michael@0 8617
michael@0 8618 // Try to emit loads from definite slots.
michael@0 8619 if (!getPropTryDefiniteSlot(&emitted, name, barrier, types) || emitted)
michael@0 8620 return emitted;
michael@0 8621
michael@0 8622 // Try to inline a common property getter, or make a call.
michael@0 8623 if (!getPropTryCommonGetter(&emitted, name, types) || emitted)
michael@0 8624 return emitted;
michael@0 8625
michael@0 8626 // Try to emit a monomorphic/polymorphic access based on baseline caches.
michael@0 8627 if (!getPropTryInlineAccess(&emitted, name, barrier, types) || emitted)
michael@0 8628 return emitted;
michael@0 8629
michael@0 8630 // Try to emit a polymorphic cache.
michael@0 8631 if (!getPropTryCache(&emitted, name, barrier, types) || emitted)
michael@0 8632 return emitted;
michael@0 8633
michael@0 8634 // Emit a call.
michael@0 8635 MDefinition *obj = current->pop();
michael@0 8636 MCallGetProperty *call = MCallGetProperty::New(alloc(), obj, name, *pc == JSOP_CALLPROP);
michael@0 8637 current->add(call);
michael@0 8638 current->push(call);
michael@0 8639 if (!resumeAfter(call))
michael@0 8640 return false;
michael@0 8641
michael@0 8642 return pushTypeBarrier(call, types, true);
michael@0 8643 }
michael@0 8644
michael@0 8645 bool
michael@0 8646 IonBuilder::getPropTryArgumentsLength(bool *emitted)
michael@0 8647 {
michael@0 8648 JS_ASSERT(*emitted == false);
michael@0 8649 if (current->peek(-1)->type() != MIRType_MagicOptimizedArguments) {
michael@0 8650 if (script()->argumentsHasVarBinding() &&
michael@0 8651 current->peek(-1)->mightBeType(MIRType_MagicOptimizedArguments))
michael@0 8652 {
michael@0 8653 return abort("Type is not definitely lazy arguments.");
michael@0 8654 }
michael@0 8655 return true;
michael@0 8656 }
michael@0 8657 if (JSOp(*pc) != JSOP_LENGTH)
michael@0 8658 return true;
michael@0 8659
michael@0 8660 *emitted = true;
michael@0 8661 return jsop_arguments_length();
michael@0 8662 }
michael@0 8663
michael@0 8664 bool
michael@0 8665 IonBuilder::getPropTryConstant(bool *emitted, PropertyName *name,
michael@0 8666 types::TemporaryTypeSet *types)
michael@0 8667 {
michael@0 8668 JS_ASSERT(*emitted == false);
michael@0 8669 JSObject *singleton = types ? types->getSingleton() : nullptr;
michael@0 8670 if (!singleton)
michael@0 8671 return true;
michael@0 8672
michael@0 8673 bool testObject, testString;
michael@0 8674 if (!testSingletonPropertyTypes(current->peek(-1), singleton, name, &testObject, &testString))
michael@0 8675 return true;
michael@0 8676
michael@0 8677 MDefinition *obj = current->pop();
michael@0 8678
michael@0 8679 // Property access is a known constant -- safe to emit.
michael@0 8680 JS_ASSERT(!testString || !testObject);
michael@0 8681 if (testObject)
michael@0 8682 current->add(MGuardObject::New(alloc(), obj));
michael@0 8683 else if (testString)
michael@0 8684 current->add(MGuardString::New(alloc(), obj));
michael@0 8685 else
michael@0 8686 obj->setImplicitlyUsedUnchecked();
michael@0 8687
michael@0 8688 pushConstant(ObjectValue(*singleton));
michael@0 8689
michael@0 8690 *emitted = true;
michael@0 8691 return true;
michael@0 8692 }
michael@0 8693
michael@0 8694 bool
michael@0 8695 IonBuilder::getPropTryTypedObject(bool *emitted, PropertyName *name,
michael@0 8696 types::TemporaryTypeSet *resultTypes)
michael@0 8697 {
michael@0 8698 TypeDescrSet fieldDescrs;
michael@0 8699 int32_t fieldOffset;
michael@0 8700 size_t fieldIndex;
michael@0 8701 if (!lookupTypedObjectField(current->peek(-1), name, &fieldOffset,
michael@0 8702 &fieldDescrs, &fieldIndex))
michael@0 8703 return false;
michael@0 8704 if (fieldDescrs.empty())
michael@0 8705 return true;
michael@0 8706
michael@0 8707 switch (fieldDescrs.kind()) {
michael@0 8708 case TypeDescr::Reference:
michael@0 8709 return true;
michael@0 8710
michael@0 8711 case TypeDescr::X4:
michael@0 8712 // FIXME (bug 894104): load into a MIRType_float32x4 etc
michael@0 8713 return true;
michael@0 8714
michael@0 8715 case TypeDescr::Struct:
michael@0 8716 case TypeDescr::SizedArray:
michael@0 8717 return getPropTryComplexPropOfTypedObject(emitted,
michael@0 8718 fieldOffset,
michael@0 8719 fieldDescrs,
michael@0 8720 fieldIndex,
michael@0 8721 resultTypes);
michael@0 8722
michael@0 8723 case TypeDescr::Scalar:
michael@0 8724 return getPropTryScalarPropOfTypedObject(emitted,
michael@0 8725 fieldOffset,
michael@0 8726 fieldDescrs,
michael@0 8727 resultTypes);
michael@0 8728
michael@0 8729 case TypeDescr::UnsizedArray:
michael@0 8730 MOZ_ASSUME_UNREACHABLE("Field of unsized array type");
michael@0 8731 }
michael@0 8732
michael@0 8733 MOZ_ASSUME_UNREACHABLE("Bad kind");
michael@0 8734 }
michael@0 8735
michael@0 8736 bool
michael@0 8737 IonBuilder::getPropTryScalarPropOfTypedObject(bool *emitted,
michael@0 8738 int32_t fieldOffset,
michael@0 8739 TypeDescrSet fieldDescrs,
michael@0 8740 types::TemporaryTypeSet *resultTypes)
michael@0 8741 {
michael@0 8742 // Must always be loading the same scalar type
michael@0 8743 ScalarTypeDescr::Type fieldType;
michael@0 8744 if (!fieldDescrs.scalarType(&fieldType))
michael@0 8745 return true;
michael@0 8746
michael@0 8747 // OK, perform the optimization
michael@0 8748
michael@0 8749 MDefinition *typedObj = current->pop();
michael@0 8750
michael@0 8751 return pushScalarLoadFromTypedObject(emitted, typedObj, constantInt(fieldOffset),
michael@0 8752 fieldType, true);
michael@0 8753 }
michael@0 8754
michael@0 8755 bool
michael@0 8756 IonBuilder::getPropTryComplexPropOfTypedObject(bool *emitted,
michael@0 8757 int32_t fieldOffset,
michael@0 8758 TypeDescrSet fieldDescrs,
michael@0 8759 size_t fieldIndex,
michael@0 8760 types::TemporaryTypeSet *resultTypes)
michael@0 8761 {
michael@0 8762 // Must know the field index so that we can load the new type
michael@0 8763 // object for the derived value
michael@0 8764 if (fieldIndex == SIZE_MAX)
michael@0 8765 return true;
michael@0 8766
michael@0 8767 // OK, perform the optimization
michael@0 8768
michael@0 8769 MDefinition *typedObj = current->pop();
michael@0 8770
michael@0 8771 // Identify the type object for the field.
michael@0 8772 MDefinition *type = loadTypedObjectType(typedObj);
michael@0 8773 MDefinition *fieldTypeObj = typeObjectForFieldFromStructType(type, fieldIndex);
michael@0 8774
michael@0 8775 return pushDerivedTypedObject(emitted, typedObj, constantInt(fieldOffset),
michael@0 8776 fieldDescrs, fieldTypeObj, true);
michael@0 8777 }
michael@0 8778
michael@0 8779 bool
michael@0 8780 IonBuilder::getPropTryDefiniteSlot(bool *emitted, PropertyName *name,
michael@0 8781 bool barrier, types::TemporaryTypeSet *types)
michael@0 8782 {
michael@0 8783 JS_ASSERT(*emitted == false);
michael@0 8784 types::HeapTypeSetKey property;
michael@0 8785 if (!getDefiniteSlot(current->peek(-1)->resultTypeSet(), name, &property))
michael@0 8786 return true;
michael@0 8787
michael@0 8788 MDefinition *obj = current->pop();
michael@0 8789 MDefinition *useObj = obj;
michael@0 8790 if (obj->type() != MIRType_Object) {
michael@0 8791 MGuardObject *guard = MGuardObject::New(alloc(), obj);
michael@0 8792 current->add(guard);
michael@0 8793 useObj = guard;
michael@0 8794 }
michael@0 8795
michael@0 8796 MLoadFixedSlot *fixed = MLoadFixedSlot::New(alloc(), useObj, property.maybeTypes()->definiteSlot());
michael@0 8797 if (!barrier)
michael@0 8798 fixed->setResultType(types->getKnownMIRType());
michael@0 8799
michael@0 8800 current->add(fixed);
michael@0 8801 current->push(fixed);
michael@0 8802
michael@0 8803 if (!pushTypeBarrier(fixed, types, barrier))
michael@0 8804 return false;
michael@0 8805
michael@0 8806 *emitted = true;
michael@0 8807 return true;
michael@0 8808 }
michael@0 8809
michael@0 8810 bool
michael@0 8811 IonBuilder::getPropTryCommonGetter(bool *emitted, PropertyName *name,
michael@0 8812 types::TemporaryTypeSet *types)
michael@0 8813 {
michael@0 8814 JS_ASSERT(*emitted == false);
michael@0 8815
michael@0 8816 Shape *lastProperty = nullptr;
michael@0 8817 JSFunction *commonGetter = nullptr;
michael@0 8818 JSObject *foundProto = inspector->commonGetPropFunction(pc, &lastProperty, &commonGetter);
michael@0 8819 if (!foundProto)
michael@0 8820 return true;
michael@0 8821
michael@0 8822 types::TemporaryTypeSet *objTypes = current->peek(-1)->resultTypeSet();
michael@0 8823 MDefinition *guard = testCommonGetterSetter(objTypes, name, /* isGetter = */ true,
michael@0 8824 foundProto, lastProperty);
michael@0 8825 if (!guard)
michael@0 8826 return true;
michael@0 8827
michael@0 8828 bool isDOM = objTypes->isDOMClass();
michael@0 8829
michael@0 8830 MDefinition *obj = current->pop();
michael@0 8831
michael@0 8832 if (isDOM && testShouldDOMCall(objTypes, commonGetter, JSJitInfo::Getter)) {
michael@0 8833 const JSJitInfo *jitinfo = commonGetter->jitInfo();
michael@0 8834 MInstruction *get;
michael@0 8835 if (jitinfo->isInSlot) {
michael@0 8836 // We can't use MLoadFixedSlot here because it might not have the
michael@0 8837 // right aliasing behavior; we want to alias DOM setters.
michael@0 8838 get = MGetDOMMember::New(alloc(), jitinfo, obj, guard);
michael@0 8839 } else {
michael@0 8840 get = MGetDOMProperty::New(alloc(), jitinfo, obj, guard);
michael@0 8841 }
michael@0 8842 current->add(get);
michael@0 8843 current->push(get);
michael@0 8844
michael@0 8845 if (get->isEffectful() && !resumeAfter(get))
michael@0 8846 return false;
michael@0 8847
michael@0 8848 if (!pushDOMTypeBarrier(get, types, commonGetter))
michael@0 8849 return false;
michael@0 8850
michael@0 8851 *emitted = true;
michael@0 8852 return true;
michael@0 8853 }
michael@0 8854
michael@0 8855 // Don't call the getter with a primitive value.
michael@0 8856 if (objTypes->getKnownMIRType() != MIRType_Object) {
michael@0 8857 MGuardObject *guardObj = MGuardObject::New(alloc(), obj);
michael@0 8858 current->add(guardObj);
michael@0 8859 obj = guardObj;
michael@0 8860 }
michael@0 8861
michael@0 8862 // Spoof stack to expected state for call.
michael@0 8863
michael@0 8864 // Make sure there's enough room
michael@0 8865 if (!current->ensureHasSlots(2))
michael@0 8866 return false;
michael@0 8867 pushConstant(ObjectValue(*commonGetter));
michael@0 8868
michael@0 8869 current->push(obj);
michael@0 8870
michael@0 8871 CallInfo callInfo(alloc(), false);
michael@0 8872 if (!callInfo.init(current, 0))
michael@0 8873 return false;
michael@0 8874
michael@0 8875 // Inline if we can, otherwise, forget it and just generate a call.
michael@0 8876 bool inlineable = false;
michael@0 8877 if (commonGetter->isInterpreted()) {
michael@0 8878 InliningDecision decision = makeInliningDecision(commonGetter, callInfo);
michael@0 8879 switch (decision) {
michael@0 8880 case InliningDecision_Error:
michael@0 8881 return false;
michael@0 8882 case InliningDecision_DontInline:
michael@0 8883 break;
michael@0 8884 case InliningDecision_Inline:
michael@0 8885 inlineable = true;
michael@0 8886 break;
michael@0 8887 }
michael@0 8888 }
michael@0 8889
michael@0 8890 if (inlineable) {
michael@0 8891 if (!inlineScriptedCall(callInfo, commonGetter))
michael@0 8892 return false;
michael@0 8893 } else {
michael@0 8894 if (!makeCall(commonGetter, callInfo, false))
michael@0 8895 return false;
michael@0 8896 }
michael@0 8897
michael@0 8898 *emitted = true;
michael@0 8899 return true;
michael@0 8900 }
michael@0 8901
michael@0 8902 static bool
michael@0 8903 CanInlinePropertyOpShapes(const BaselineInspector::ShapeVector &shapes)
michael@0 8904 {
michael@0 8905 for (size_t i = 0; i < shapes.length(); i++) {
michael@0 8906 // We inline the property access as long as the shape is not in
michael@0 8907 // dictionary made. We cannot be sure that the shape is still a
michael@0 8908 // lastProperty, and calling Shape::search() on dictionary mode
michael@0 8909 // shapes that aren't lastProperty is invalid.
michael@0 8910 if (shapes[i]->inDictionary())
michael@0 8911 return false;
michael@0 8912 }
michael@0 8913
michael@0 8914 return true;
michael@0 8915 }
michael@0 8916
michael@0 8917 bool
michael@0 8918 IonBuilder::getPropTryInlineAccess(bool *emitted, PropertyName *name,
michael@0 8919 bool barrier, types::TemporaryTypeSet *types)
michael@0 8920 {
michael@0 8921 JS_ASSERT(*emitted == false);
michael@0 8922 if (current->peek(-1)->type() != MIRType_Object)
michael@0 8923 return true;
michael@0 8924
michael@0 8925 BaselineInspector::ShapeVector shapes(alloc());
michael@0 8926 if (!inspector->maybeShapesForPropertyOp(pc, shapes))
michael@0 8927 return false;
michael@0 8928
michael@0 8929 if (shapes.empty() || !CanInlinePropertyOpShapes(shapes))
michael@0 8930 return true;
michael@0 8931
michael@0 8932 MIRType rvalType = types->getKnownMIRType();
michael@0 8933 if (barrier || IsNullOrUndefined(rvalType))
michael@0 8934 rvalType = MIRType_Value;
michael@0 8935
michael@0 8936 MDefinition *obj = current->pop();
michael@0 8937 if (shapes.length() == 1) {
michael@0 8938 // In the monomorphic case, use separate ShapeGuard and LoadSlot
michael@0 8939 // instructions.
michael@0 8940 spew("Inlining monomorphic GETPROP");
michael@0 8941
michael@0 8942 Shape *objShape = shapes[0];
michael@0 8943 obj = addShapeGuard(obj, objShape, Bailout_ShapeGuard);
michael@0 8944
michael@0 8945 Shape *shape = objShape->searchLinear(NameToId(name));
michael@0 8946 JS_ASSERT(shape);
michael@0 8947
michael@0 8948 if (!loadSlot(obj, shape, rvalType, barrier, types))
michael@0 8949 return false;
michael@0 8950 } else {
michael@0 8951 JS_ASSERT(shapes.length() > 1);
michael@0 8952 spew("Inlining polymorphic GETPROP");
michael@0 8953
michael@0 8954 MGetPropertyPolymorphic *load = MGetPropertyPolymorphic::New(alloc(), obj, name);
michael@0 8955 current->add(load);
michael@0 8956 current->push(load);
michael@0 8957
michael@0 8958 for (size_t i = 0; i < shapes.length(); i++) {
michael@0 8959 Shape *objShape = shapes[i];
michael@0 8960 Shape *shape = objShape->searchLinear(NameToId(name));
michael@0 8961 JS_ASSERT(shape);
michael@0 8962 if (!load->addShape(objShape, shape))
michael@0 8963 return false;
michael@0 8964 }
michael@0 8965
michael@0 8966 if (failedShapeGuard_)
michael@0 8967 load->setNotMovable();
michael@0 8968
michael@0 8969 load->setResultType(rvalType);
michael@0 8970 if (!pushTypeBarrier(load, types, barrier))
michael@0 8971 return false;
michael@0 8972 }
michael@0 8973
michael@0 8974 *emitted = true;
michael@0 8975 return true;
michael@0 8976 }
michael@0 8977
michael@0 8978 bool
michael@0 8979 IonBuilder::getPropTryCache(bool *emitted, PropertyName *name,
michael@0 8980 bool barrier, types::TemporaryTypeSet *types)
michael@0 8981 {
michael@0 8982 JS_ASSERT(*emitted == false);
michael@0 8983
michael@0 8984 MDefinition *obj = current->peek(-1);
michael@0 8985
michael@0 8986 // The input value must either be an object, or we should have strong suspicions
michael@0 8987 // that it can be safely unboxed to an object.
michael@0 8988 if (obj->type() != MIRType_Object) {
michael@0 8989 types::TemporaryTypeSet *types = obj->resultTypeSet();
michael@0 8990 if (!types || !types->objectOrSentinel())
michael@0 8991 return true;
michael@0 8992 }
michael@0 8993
michael@0 8994 // Since getters have no guaranteed return values, we must barrier in order to be
michael@0 8995 // able to attach stubs for them.
michael@0 8996 if (inspector->hasSeenAccessedGetter(pc))
michael@0 8997 barrier = true;
michael@0 8998
michael@0 8999 if (needsToMonitorMissingProperties(types))
michael@0 9000 barrier = true;
michael@0 9001
michael@0 9002 // Caches can read values from prototypes, so update the barrier to
michael@0 9003 // reflect such possible values.
michael@0 9004 if (!barrier)
michael@0 9005 barrier = PropertyReadOnPrototypeNeedsTypeBarrier(constraints(), obj, name, types);
michael@0 9006
michael@0 9007 current->pop();
michael@0 9008 MGetPropertyCache *load = MGetPropertyCache::New(alloc(), obj, name, barrier);
michael@0 9009
michael@0 9010 // Try to mark the cache as idempotent.
michael@0 9011 //
michael@0 9012 // In parallel execution, idempotency of caches is ignored, since we
michael@0 9013 // repeat the entire ForkJoin workload if we bail out. Note that it's
michael@0 9014 // overly restrictive to mark everything as idempotent, because we can
michael@0 9015 // treat non-idempotent caches in parallel as repeatable.
michael@0 9016 if (obj->type() == MIRType_Object && !invalidatedIdempotentCache() &&
michael@0 9017 info().executionMode() != ParallelExecution)
michael@0 9018 {
michael@0 9019 if (PropertyReadIsIdempotent(constraints(), obj, name))
michael@0 9020 load->setIdempotent();
michael@0 9021 }
michael@0 9022
michael@0 9023 if (JSOp(*pc) == JSOP_CALLPROP) {
michael@0 9024 if (!annotateGetPropertyCache(obj, load, obj->resultTypeSet(), types))
michael@0 9025 return false;
michael@0 9026 }
michael@0 9027
michael@0 9028 current->add(load);
michael@0 9029 current->push(load);
michael@0 9030
michael@0 9031 if (load->isEffectful() && !resumeAfter(load))
michael@0 9032 return false;
michael@0 9033
michael@0 9034 MIRType rvalType = types->getKnownMIRType();
michael@0 9035 if (barrier || IsNullOrUndefined(rvalType))
michael@0 9036 rvalType = MIRType_Value;
michael@0 9037 load->setResultType(rvalType);
michael@0 9038
michael@0 9039 if (!pushTypeBarrier(load, types, barrier))
michael@0 9040 return false;
michael@0 9041
michael@0 9042 *emitted = true;
michael@0 9043 return true;
michael@0 9044 }
michael@0 9045
michael@0 9046 bool
michael@0 9047 IonBuilder::needsToMonitorMissingProperties(types::TemporaryTypeSet *types)
michael@0 9048 {
michael@0 9049 // GetPropertyParIC and GetElementParIC cannot safely call
michael@0 9050 // TypeScript::Monitor to ensure that the observed type set contains
michael@0 9051 // undefined. To account for possible missing properties, which property
michael@0 9052 // types do not track, we must always insert a type barrier.
michael@0 9053 return (info().executionMode() == ParallelExecution &&
michael@0 9054 !types->hasType(types::Type::UndefinedType()));
michael@0 9055 }
michael@0 9056
michael@0 9057 bool
michael@0 9058 IonBuilder::jsop_setprop(PropertyName *name)
michael@0 9059 {
michael@0 9060 MDefinition *value = current->pop();
michael@0 9061 MDefinition *obj = current->pop();
michael@0 9062
michael@0 9063 bool emitted = false;
michael@0 9064
michael@0 9065 // Always use a call if we are doing the definite properties analysis and
michael@0 9066 // not actually emitting code, to simplify later analysis.
michael@0 9067 if (info().executionModeIsAnalysis()) {
michael@0 9068 MInstruction *ins = MCallSetProperty::New(alloc(), obj, value, name, script()->strict());
michael@0 9069 current->add(ins);
michael@0 9070 current->push(value);
michael@0 9071 return resumeAfter(ins);
michael@0 9072 }
michael@0 9073
michael@0 9074 // Add post barrier if needed.
michael@0 9075 if (NeedsPostBarrier(info(), value))
michael@0 9076 current->add(MPostWriteBarrier::New(alloc(), obj, value));
michael@0 9077
michael@0 9078 // Try to inline a common property setter, or make a call.
michael@0 9079 if (!setPropTryCommonSetter(&emitted, obj, name, value) || emitted)
michael@0 9080 return emitted;
michael@0 9081
michael@0 9082 types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
michael@0 9083 bool barrier = PropertyWriteNeedsTypeBarrier(alloc(), constraints(), current, &obj, name, &value,
michael@0 9084 /* canModify = */ true);
michael@0 9085
michael@0 9086 // Try to emit stores to known binary data blocks
michael@0 9087 if (!setPropTryTypedObject(&emitted, obj, name, value) || emitted)
michael@0 9088 return emitted;
michael@0 9089
michael@0 9090 // Try to emit store from definite slots.
michael@0 9091 if (!setPropTryDefiniteSlot(&emitted, obj, name, value, barrier, objTypes) || emitted)
michael@0 9092 return emitted;
michael@0 9093
michael@0 9094 // Try to emit a monomorphic/polymorphic store based on baseline caches.
michael@0 9095 if (!setPropTryInlineAccess(&emitted, obj, name, value, barrier, objTypes) || emitted)
michael@0 9096 return emitted;
michael@0 9097
michael@0 9098 // Try to emit a polymorphic cache.
michael@0 9099 if (!setPropTryCache(&emitted, obj, name, value, barrier, objTypes) || emitted)
michael@0 9100 return emitted;
michael@0 9101
michael@0 9102 // Emit call.
michael@0 9103 MInstruction *ins = MCallSetProperty::New(alloc(), obj, value, name, script()->strict());
michael@0 9104 current->add(ins);
michael@0 9105 current->push(value);
michael@0 9106 return resumeAfter(ins);
michael@0 9107 }
michael@0 9108
michael@0 9109 bool
michael@0 9110 IonBuilder::setPropTryCommonSetter(bool *emitted, MDefinition *obj,
michael@0 9111 PropertyName *name, MDefinition *value)
michael@0 9112 {
michael@0 9113 JS_ASSERT(*emitted == false);
michael@0 9114
michael@0 9115 Shape *lastProperty = nullptr;
michael@0 9116 JSFunction *commonSetter = nullptr;
michael@0 9117 JSObject *foundProto = inspector->commonSetPropFunction(pc, &lastProperty, &commonSetter);
michael@0 9118 if (!foundProto)
michael@0 9119 return true;
michael@0 9120
michael@0 9121 types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
michael@0 9122 MDefinition *guard = testCommonGetterSetter(objTypes, name, /* isGetter = */ false,
michael@0 9123 foundProto, lastProperty);
michael@0 9124 if (!guard)
michael@0 9125 return true;
michael@0 9126
michael@0 9127 bool isDOM = objTypes->isDOMClass();
michael@0 9128
michael@0 9129 // Emit common setter.
michael@0 9130
michael@0 9131 // Setters can be called even if the property write needs a type
michael@0 9132 // barrier, as calling the setter does not actually write any data
michael@0 9133 // properties.
michael@0 9134
michael@0 9135 // Try emitting dom call.
michael@0 9136 if (!setPropTryCommonDOMSetter(emitted, obj, value, commonSetter, isDOM))
michael@0 9137 return false;
michael@0 9138
michael@0 9139 if (*emitted)
michael@0 9140 return true;
michael@0 9141
michael@0 9142 // Don't call the setter with a primitive value.
michael@0 9143 if (objTypes->getKnownMIRType() != MIRType_Object) {
michael@0 9144 MGuardObject *guardObj = MGuardObject::New(alloc(), obj);
michael@0 9145 current->add(guardObj);
michael@0 9146 obj = guardObj;
michael@0 9147 }
michael@0 9148
michael@0 9149 // Dummy up the stack, as in getprop. We are pushing an extra value, so
michael@0 9150 // ensure there is enough space.
michael@0 9151 if (!current->ensureHasSlots(3))
michael@0 9152 return false;
michael@0 9153
michael@0 9154 pushConstant(ObjectValue(*commonSetter));
michael@0 9155
michael@0 9156 current->push(obj);
michael@0 9157 current->push(value);
michael@0 9158
michael@0 9159 // Call the setter. Note that we have to push the original value, not
michael@0 9160 // the setter's return value.
michael@0 9161 CallInfo callInfo(alloc(), false);
michael@0 9162 if (!callInfo.init(current, 1))
michael@0 9163 return false;
michael@0 9164
michael@0 9165 // Ensure that we know we are calling a setter in case we inline it.
michael@0 9166 callInfo.markAsSetter();
michael@0 9167
michael@0 9168 // Inline the setter if we can.
michael@0 9169 if (commonSetter->isInterpreted()) {
michael@0 9170 InliningDecision decision = makeInliningDecision(commonSetter, callInfo);
michael@0 9171 switch (decision) {
michael@0 9172 case InliningDecision_Error:
michael@0 9173 return false;
michael@0 9174 case InliningDecision_DontInline:
michael@0 9175 break;
michael@0 9176 case InliningDecision_Inline:
michael@0 9177 if (!inlineScriptedCall(callInfo, commonSetter))
michael@0 9178 return false;
michael@0 9179 *emitted = true;
michael@0 9180 return true;
michael@0 9181 }
michael@0 9182 }
michael@0 9183
michael@0 9184 MCall *call = makeCallHelper(commonSetter, callInfo, false);
michael@0 9185 if (!call)
michael@0 9186 return false;
michael@0 9187
michael@0 9188 current->push(value);
michael@0 9189 if (!resumeAfter(call))
michael@0 9190 return false;
michael@0 9191
michael@0 9192 *emitted = true;
michael@0 9193 return true;
michael@0 9194 }
michael@0 9195
michael@0 9196 bool
michael@0 9197 IonBuilder::setPropTryCommonDOMSetter(bool *emitted, MDefinition *obj,
michael@0 9198 MDefinition *value, JSFunction *setter,
michael@0 9199 bool isDOM)
michael@0 9200 {
michael@0 9201 JS_ASSERT(*emitted == false);
michael@0 9202
michael@0 9203 if (!isDOM)
michael@0 9204 return true;
michael@0 9205
michael@0 9206 types::TemporaryTypeSet *objTypes = obj->resultTypeSet();
michael@0 9207 if (!testShouldDOMCall(objTypes, setter, JSJitInfo::Setter))
michael@0 9208 return true;
michael@0 9209
michael@0 9210 // Emit SetDOMProperty.
michael@0 9211 JS_ASSERT(setter->jitInfo()->type() == JSJitInfo::Setter);
michael@0 9212 MSetDOMProperty *set = MSetDOMProperty::New(alloc(), setter->jitInfo()->setter, obj, value);
michael@0 9213
michael@0 9214 current->add(set);
michael@0 9215 current->push(value);
michael@0 9216
michael@0 9217 if (!resumeAfter(set))
michael@0 9218 return false;
michael@0 9219
michael@0 9220 *emitted = true;
michael@0 9221 return true;
michael@0 9222 }
michael@0 9223
michael@0 9224 bool
michael@0 9225 IonBuilder::setPropTryTypedObject(bool *emitted, MDefinition *obj,
michael@0 9226 PropertyName *name, MDefinition *value)
michael@0 9227 {
michael@0 9228 TypeDescrSet fieldDescrs;
michael@0 9229 int32_t fieldOffset;
michael@0 9230 size_t fieldIndex;
michael@0 9231 if (!lookupTypedObjectField(obj, name, &fieldOffset, &fieldDescrs,
michael@0 9232 &fieldIndex))
michael@0 9233 return false;
michael@0 9234 if (fieldDescrs.empty())
michael@0 9235 return true;
michael@0 9236
michael@0 9237 switch (fieldDescrs.kind()) {
michael@0 9238 case TypeDescr::X4:
michael@0 9239 // FIXME (bug 894104): store into a MIRType_float32x4 etc
michael@0 9240 return true;
michael@0 9241
michael@0 9242 case TypeDescr::Reference:
michael@0 9243 case TypeDescr::Struct:
michael@0 9244 case TypeDescr::SizedArray:
michael@0 9245 case TypeDescr::UnsizedArray:
michael@0 9246 // For now, only optimize storing scalars.
michael@0 9247 return true;
michael@0 9248
michael@0 9249 case TypeDescr::Scalar:
michael@0 9250 return setPropTryScalarPropOfTypedObject(emitted, obj, fieldOffset,
michael@0 9251 value, fieldDescrs);
michael@0 9252 }
michael@0 9253
michael@0 9254 MOZ_ASSUME_UNREACHABLE("Unknown kind");
michael@0 9255 }
michael@0 9256
michael@0 9257 bool
michael@0 9258 IonBuilder::setPropTryScalarPropOfTypedObject(bool *emitted,
michael@0 9259 MDefinition *obj,
michael@0 9260 int32_t fieldOffset,
michael@0 9261 MDefinition *value,
michael@0 9262 TypeDescrSet fieldDescrs)
michael@0 9263 {
michael@0 9264 // Must always be loading the same scalar type
michael@0 9265 ScalarTypeDescr::Type fieldType;
michael@0 9266 if (!fieldDescrs.scalarType(&fieldType))
michael@0 9267 return true;
michael@0 9268
michael@0 9269 // OK! Perform the optimization.
michael@0 9270
michael@0 9271 if (!storeScalarTypedObjectValue(obj, constantInt(fieldOffset), fieldType, true, false, value))
michael@0 9272 return false;
michael@0 9273
michael@0 9274 current->push(value);
michael@0 9275
michael@0 9276 *emitted = true;
michael@0 9277 return true;
michael@0 9278 }
michael@0 9279
michael@0 9280 bool
michael@0 9281 IonBuilder::setPropTryDefiniteSlot(bool *emitted, MDefinition *obj,
michael@0 9282 PropertyName *name, MDefinition *value,
michael@0 9283 bool barrier, types::TemporaryTypeSet *objTypes)
michael@0 9284 {
michael@0 9285 JS_ASSERT(*emitted == false);
michael@0 9286
michael@0 9287 if (barrier)
michael@0 9288 return true;
michael@0 9289
michael@0 9290 types::HeapTypeSetKey property;
michael@0 9291 if (!getDefiniteSlot(obj->resultTypeSet(), name, &property))
michael@0 9292 return true;
michael@0 9293
michael@0 9294 if (property.nonWritable(constraints()))
michael@0 9295 return true;
michael@0 9296
michael@0 9297 MStoreFixedSlot *fixed = MStoreFixedSlot::New(alloc(), obj, property.maybeTypes()->definiteSlot(), value);
michael@0 9298 current->add(fixed);
michael@0 9299 current->push(value);
michael@0 9300
michael@0 9301 if (property.needsBarrier(constraints()))
michael@0 9302 fixed->setNeedsBarrier();
michael@0 9303
michael@0 9304 if (!resumeAfter(fixed))
michael@0 9305 return false;
michael@0 9306
michael@0 9307 *emitted = true;
michael@0 9308 return true;
michael@0 9309 }
michael@0 9310
michael@0 9311 bool
michael@0 9312 IonBuilder::setPropTryInlineAccess(bool *emitted, MDefinition *obj,
michael@0 9313 PropertyName *name,
michael@0 9314 MDefinition *value, bool barrier,
michael@0 9315 types::TemporaryTypeSet *objTypes)
michael@0 9316 {
michael@0 9317 JS_ASSERT(*emitted == false);
michael@0 9318
michael@0 9319 if (barrier)
michael@0 9320 return true;
michael@0 9321
michael@0 9322 BaselineInspector::ShapeVector shapes(alloc());
michael@0 9323 if (!inspector->maybeShapesForPropertyOp(pc, shapes))
michael@0 9324 return false;
michael@0 9325
michael@0 9326 if (shapes.empty())
michael@0 9327 return true;
michael@0 9328
michael@0 9329 if (!CanInlinePropertyOpShapes(shapes))
michael@0 9330 return true;
michael@0 9331
michael@0 9332 if (shapes.length() == 1) {
michael@0 9333 spew("Inlining monomorphic SETPROP");
michael@0 9334
michael@0 9335 // The Baseline IC was monomorphic, so we inline the property access as
michael@0 9336 // long as the shape is not in dictionary mode. We cannot be sure
michael@0 9337 // that the shape is still a lastProperty, and calling Shape::search
michael@0 9338 // on dictionary mode shapes that aren't lastProperty is invalid.
michael@0 9339 Shape *objShape = shapes[0];
michael@0 9340 obj = addShapeGuard(obj, objShape, Bailout_ShapeGuard);
michael@0 9341
michael@0 9342 Shape *shape = objShape->searchLinear(NameToId(name));
michael@0 9343 JS_ASSERT(shape);
michael@0 9344
michael@0 9345 bool needsBarrier = objTypes->propertyNeedsBarrier(constraints(), NameToId(name));
michael@0 9346 if (!storeSlot(obj, shape, value, needsBarrier))
michael@0 9347 return false;
michael@0 9348 } else {
michael@0 9349 JS_ASSERT(shapes.length() > 1);
michael@0 9350 spew("Inlining polymorphic SETPROP");
michael@0 9351
michael@0 9352 MSetPropertyPolymorphic *ins = MSetPropertyPolymorphic::New(alloc(), obj, value);
michael@0 9353 current->add(ins);
michael@0 9354 current->push(value);
michael@0 9355
michael@0 9356 for (size_t i = 0; i < shapes.length(); i++) {
michael@0 9357 Shape *objShape = shapes[i];
michael@0 9358 Shape *shape = objShape->searchLinear(NameToId(name));
michael@0 9359 JS_ASSERT(shape);
michael@0 9360 if (!ins->addShape(objShape, shape))
michael@0 9361 return false;
michael@0 9362 }
michael@0 9363
michael@0 9364 if (objTypes->propertyNeedsBarrier(constraints(), NameToId(name)))
michael@0 9365 ins->setNeedsBarrier();
michael@0 9366
michael@0 9367 if (!resumeAfter(ins))
michael@0 9368 return false;
michael@0 9369 }
michael@0 9370
michael@0 9371 *emitted = true;
michael@0 9372 return true;
michael@0 9373 }
michael@0 9374
michael@0 9375 bool
michael@0 9376 IonBuilder::setPropTryCache(bool *emitted, MDefinition *obj,
michael@0 9377 PropertyName *name, MDefinition *value,
michael@0 9378 bool barrier, types::TemporaryTypeSet *objTypes)
michael@0 9379 {
michael@0 9380 JS_ASSERT(*emitted == false);
michael@0 9381
michael@0 9382 // Emit SetPropertyCache.
michael@0 9383 MSetPropertyCache *ins = MSetPropertyCache::New(alloc(), obj, value, name, script()->strict(), barrier);
michael@0 9384
michael@0 9385 if (!objTypes || objTypes->propertyNeedsBarrier(constraints(), NameToId(name)))
michael@0 9386 ins->setNeedsBarrier();
michael@0 9387
michael@0 9388 current->add(ins);
michael@0 9389 current->push(value);
michael@0 9390
michael@0 9391 if (!resumeAfter(ins))
michael@0 9392 return false;
michael@0 9393
michael@0 9394 *emitted = true;
michael@0 9395 return true;
michael@0 9396 }
michael@0 9397
michael@0 9398 bool
michael@0 9399 IonBuilder::jsop_delprop(PropertyName *name)
michael@0 9400 {
michael@0 9401 MDefinition *obj = current->pop();
michael@0 9402
michael@0 9403 MInstruction *ins = MDeleteProperty::New(alloc(), obj, name);
michael@0 9404
michael@0 9405 current->add(ins);
michael@0 9406 current->push(ins);
michael@0 9407
michael@0 9408 return resumeAfter(ins);
michael@0 9409 }
michael@0 9410
michael@0 9411 bool
michael@0 9412 IonBuilder::jsop_delelem()
michael@0 9413 {
michael@0 9414 MDefinition *index = current->pop();
michael@0 9415 MDefinition *obj = current->pop();
michael@0 9416
michael@0 9417 MDeleteElement *ins = MDeleteElement::New(alloc(), obj, index);
michael@0 9418 current->add(ins);
michael@0 9419 current->push(ins);
michael@0 9420
michael@0 9421 return resumeAfter(ins);
michael@0 9422 }
michael@0 9423
michael@0 9424 bool
michael@0 9425 IonBuilder::jsop_regexp(RegExpObject *reobj)
michael@0 9426 {
michael@0 9427 // JS semantics require regular expression literals to create different
michael@0 9428 // objects every time they execute. We only need to do this cloning if the
michael@0 9429 // script could actually observe the effect of such cloning, for instance
michael@0 9430 // by getting or setting properties on it.
michael@0 9431 //
michael@0 9432 // First, make sure the regex is one we can safely optimize. Lowering can
michael@0 9433 // then check if this regex object only flows into known natives and can
michael@0 9434 // avoid cloning in this case.
michael@0 9435
michael@0 9436 bool mustClone = true;
michael@0 9437 types::TypeObjectKey *typeObj = types::TypeObjectKey::get(&script()->global());
michael@0 9438 if (!typeObj->hasFlags(constraints(), types::OBJECT_FLAG_REGEXP_FLAGS_SET)) {
michael@0 9439 RegExpStatics *res = script()->global().getRegExpStatics();
michael@0 9440
michael@0 9441 DebugOnly<uint32_t> origFlags = reobj->getFlags();
michael@0 9442 DebugOnly<uint32_t> staticsFlags = res->getFlags();
michael@0 9443 JS_ASSERT((origFlags & staticsFlags) == staticsFlags);
michael@0 9444
michael@0 9445 if (!reobj->global() && !reobj->sticky())
michael@0 9446 mustClone = false;
michael@0 9447 }
michael@0 9448
michael@0 9449 MRegExp *regexp = MRegExp::New(alloc(), constraints(), reobj, mustClone);
michael@0 9450 current->add(regexp);
michael@0 9451 current->push(regexp);
michael@0 9452
michael@0 9453 return true;
michael@0 9454 }
michael@0 9455
michael@0 9456 bool
michael@0 9457 IonBuilder::jsop_object(JSObject *obj)
michael@0 9458 {
michael@0 9459 if (options.cloneSingletons()) {
michael@0 9460 MCloneLiteral *clone = MCloneLiteral::New(alloc(), constant(ObjectValue(*obj)));
michael@0 9461 current->add(clone);
michael@0 9462 current->push(clone);
michael@0 9463 return resumeAfter(clone);
michael@0 9464 }
michael@0 9465
michael@0 9466 compartment->setSingletonsAsValues();
michael@0 9467 pushConstant(ObjectValue(*obj));
michael@0 9468 return true;
michael@0 9469 }
michael@0 9470
michael@0 9471 bool
michael@0 9472 IonBuilder::jsop_lambda(JSFunction *fun)
michael@0 9473 {
michael@0 9474 MOZ_ASSERT(analysis().usesScopeChain());
michael@0 9475 MOZ_ASSERT(!fun->isArrow());
michael@0 9476
michael@0 9477 if (fun->isNative() && IsAsmJSModuleNative(fun->native()))
michael@0 9478 return abort("asm.js module function");
michael@0 9479
michael@0 9480 MLambda *ins = MLambda::New(alloc(), constraints(), current->scopeChain(), fun);
michael@0 9481 current->add(ins);
michael@0 9482 current->push(ins);
michael@0 9483
michael@0 9484 return resumeAfter(ins);
michael@0 9485 }
michael@0 9486
michael@0 9487 bool
michael@0 9488 IonBuilder::jsop_lambda_arrow(JSFunction *fun)
michael@0 9489 {
michael@0 9490 MOZ_ASSERT(analysis().usesScopeChain());
michael@0 9491 MOZ_ASSERT(fun->isArrow());
michael@0 9492 MOZ_ASSERT(!fun->isNative());
michael@0 9493
michael@0 9494 MDefinition *thisDef = current->pop();
michael@0 9495
michael@0 9496 MLambdaArrow *ins = MLambdaArrow::New(alloc(), constraints(), current->scopeChain(),
michael@0 9497 thisDef, fun);
michael@0 9498 current->add(ins);
michael@0 9499 current->push(ins);
michael@0 9500
michael@0 9501 return resumeAfter(ins);
michael@0 9502 }
michael@0 9503
michael@0 9504 bool
michael@0 9505 IonBuilder::jsop_setarg(uint32_t arg)
michael@0 9506 {
michael@0 9507 // To handle this case, we should spill the arguments to the space where
michael@0 9508 // actual arguments are stored. The tricky part is that if we add a MIR
michael@0 9509 // to wrap the spilling action, we don't want the spilling to be
michael@0 9510 // captured by the GETARG and by the resume point, only by
michael@0 9511 // MGetFrameArgument.
michael@0 9512 JS_ASSERT(analysis_.hasSetArg());
michael@0 9513 MDefinition *val = current->peek(-1);
michael@0 9514
michael@0 9515 // If an arguments object is in use, and it aliases formals, then all SETARGs
michael@0 9516 // must go through the arguments object.
michael@0 9517 if (info().argsObjAliasesFormals()) {
michael@0 9518 if (NeedsPostBarrier(info(), val))
michael@0 9519 current->add(MPostWriteBarrier::New(alloc(), current->argumentsObject(), val));
michael@0 9520 current->add(MSetArgumentsObjectArg::New(alloc(), current->argumentsObject(),
michael@0 9521 GET_ARGNO(pc), val));
michael@0 9522 return true;
michael@0 9523 }
michael@0 9524
michael@0 9525 // :TODO: if hasArguments() is true, and the script has a JSOP_SETARG, then
michael@0 9526 // convert all arg accesses to go through the arguments object. (see Bug 957475)
michael@0 9527 if (info().hasArguments())
michael@0 9528 return abort("NYI: arguments & setarg.");
michael@0 9529
michael@0 9530 // Otherwise, if a magic arguments is in use, and it aliases formals, and there exist
michael@0 9531 // arguments[...] GETELEM expressions in the script, then SetFrameArgument must be used.
michael@0 9532 // If no arguments[...] GETELEM expressions are in the script, and an argsobj is not
michael@0 9533 // required, then it means that any aliased argument set can never be observed, and
michael@0 9534 // the frame does not actually need to be updated with the new arg value.
michael@0 9535 if (info().argumentsAliasesFormals()) {
michael@0 9536 // JSOP_SETARG with magic arguments within inline frames is not yet supported.
michael@0 9537 JS_ASSERT(script()->uninlineable() && !isInlineBuilder());
michael@0 9538
michael@0 9539 MSetFrameArgument *store = MSetFrameArgument::New(alloc(), arg, val);
michael@0 9540 modifiesFrameArguments_ = true;
michael@0 9541 current->add(store);
michael@0 9542 current->setArg(arg);
michael@0 9543 return true;
michael@0 9544 }
michael@0 9545
michael@0 9546 // If this assignment is at the start of the function and is coercing
michael@0 9547 // the original value for the argument which was passed in, loosen
michael@0 9548 // the type information for that original argument if it is currently
michael@0 9549 // empty due to originally executing in the interpreter.
michael@0 9550 if (graph().numBlocks() == 1 &&
michael@0 9551 (val->isBitOr() || val->isBitAnd() || val->isMul() /* for JSOP_POS */))
michael@0 9552 {
michael@0 9553 for (size_t i = 0; i < val->numOperands(); i++) {
michael@0 9554 MDefinition *op = val->getOperand(i);
michael@0 9555 if (op->isParameter() &&
michael@0 9556 op->toParameter()->index() == (int32_t)arg &&
michael@0 9557 op->resultTypeSet() &&
michael@0 9558 op->resultTypeSet()->empty())
michael@0 9559 {
michael@0 9560 bool otherUses = false;
michael@0 9561 for (MUseDefIterator iter(op); iter; iter++) {
michael@0 9562 MDefinition *def = iter.def();
michael@0 9563 if (def == val)
michael@0 9564 continue;
michael@0 9565 otherUses = true;
michael@0 9566 }
michael@0 9567 if (!otherUses) {
michael@0 9568 JS_ASSERT(op->resultTypeSet() == &argTypes[arg]);
michael@0 9569 argTypes[arg].addType(types::Type::UnknownType(), alloc_->lifoAlloc());
michael@0 9570 if (val->isMul()) {
michael@0 9571 val->setResultType(MIRType_Double);
michael@0 9572 val->toMul()->setSpecialization(MIRType_Double);
michael@0 9573 } else {
michael@0 9574 JS_ASSERT(val->type() == MIRType_Int32);
michael@0 9575 }
michael@0 9576 val->setResultTypeSet(nullptr);
michael@0 9577 }
michael@0 9578 }
michael@0 9579 }
michael@0 9580 }
michael@0 9581
michael@0 9582 current->setArg(arg);
michael@0 9583 return true;
michael@0 9584 }
michael@0 9585
michael@0 9586 bool
michael@0 9587 IonBuilder::jsop_defvar(uint32_t index)
michael@0 9588 {
michael@0 9589 JS_ASSERT(JSOp(*pc) == JSOP_DEFVAR || JSOp(*pc) == JSOP_DEFCONST);
michael@0 9590
michael@0 9591 PropertyName *name = script()->getName(index);
michael@0 9592
michael@0 9593 // Bake in attrs.
michael@0 9594 unsigned attrs = JSPROP_ENUMERATE | JSPROP_PERMANENT;
michael@0 9595 if (JSOp(*pc) == JSOP_DEFCONST)
michael@0 9596 attrs |= JSPROP_READONLY;
michael@0 9597
michael@0 9598 // Pass the ScopeChain.
michael@0 9599 JS_ASSERT(analysis().usesScopeChain());
michael@0 9600
michael@0 9601 // Bake the name pointer into the MDefVar.
michael@0 9602 MDefVar *defvar = MDefVar::New(alloc(), name, attrs, current->scopeChain());
michael@0 9603 current->add(defvar);
michael@0 9604
michael@0 9605 return resumeAfter(defvar);
michael@0 9606 }
michael@0 9607
michael@0 9608 bool
michael@0 9609 IonBuilder::jsop_deffun(uint32_t index)
michael@0 9610 {
michael@0 9611 JSFunction *fun = script()->getFunction(index);
michael@0 9612 if (fun->isNative() && IsAsmJSModuleNative(fun->native()))
michael@0 9613 return abort("asm.js module function");
michael@0 9614
michael@0 9615 JS_ASSERT(analysis().usesScopeChain());
michael@0 9616
michael@0 9617 MDefFun *deffun = MDefFun::New(alloc(), fun, current->scopeChain());
michael@0 9618 current->add(deffun);
michael@0 9619
michael@0 9620 return resumeAfter(deffun);
michael@0 9621 }
michael@0 9622
michael@0 9623 bool
michael@0 9624 IonBuilder::jsop_this()
michael@0 9625 {
michael@0 9626 if (!info().funMaybeLazy())
michael@0 9627 return abort("JSOP_THIS outside of a JSFunction.");
michael@0 9628
michael@0 9629 if (info().funMaybeLazy()->isArrow()) {
michael@0 9630 // Arrow functions store their lexical |this| in an extended slot.
michael@0 9631 MLoadArrowThis *thisObj = MLoadArrowThis::New(alloc(), getCallee());
michael@0 9632 current->add(thisObj);
michael@0 9633 current->push(thisObj);
michael@0 9634 return true;
michael@0 9635 }
michael@0 9636
michael@0 9637 if (script()->strict() || info().funMaybeLazy()->isSelfHostedBuiltin()) {
michael@0 9638 // No need to wrap primitive |this| in strict mode or self-hosted code.
michael@0 9639 current->pushSlot(info().thisSlot());
michael@0 9640 return true;
michael@0 9641 }
michael@0 9642
michael@0 9643 if (thisTypes->getKnownMIRType() == MIRType_Object ||
michael@0 9644 (thisTypes->empty() && baselineFrame_ && baselineFrame_->thisType.isSomeObject()))
michael@0 9645 {
michael@0 9646 // This is safe, because if the entry type of |this| is an object, it
michael@0 9647 // will necessarily be an object throughout the entire function. OSR
michael@0 9648 // can introduce a phi, but this phi will be specialized.
michael@0 9649 current->pushSlot(info().thisSlot());
michael@0 9650 return true;
michael@0 9651 }
michael@0 9652
michael@0 9653 // If we are doing an analysis, we might not yet know the type of |this|.
michael@0 9654 // Instead of bailing out just push the |this| slot, as this code won't
michael@0 9655 // actually execute and it does not matter whether |this| is primitive.
michael@0 9656 if (info().executionModeIsAnalysis()) {
michael@0 9657 current->pushSlot(info().thisSlot());
michael@0 9658 return true;
michael@0 9659 }
michael@0 9660
michael@0 9661 // Hard case: |this| may be a primitive we have to wrap.
michael@0 9662 MDefinition *def = current->getSlot(info().thisSlot());
michael@0 9663
michael@0 9664 if (def->type() == MIRType_Object) {
michael@0 9665 // If we already computed a |this| object, we can reuse it.
michael@0 9666 current->push(def);
michael@0 9667 return true;
michael@0 9668 }
michael@0 9669
michael@0 9670 MComputeThis *thisObj = MComputeThis::New(alloc(), def);
michael@0 9671 current->add(thisObj);
michael@0 9672 current->push(thisObj);
michael@0 9673
michael@0 9674 current->setSlot(info().thisSlot(), thisObj);
michael@0 9675
michael@0 9676 return resumeAfter(thisObj);
michael@0 9677 }
michael@0 9678
michael@0 9679 bool
michael@0 9680 IonBuilder::jsop_typeof()
michael@0 9681 {
michael@0 9682 MDefinition *input = current->pop();
michael@0 9683 MTypeOf *ins = MTypeOf::New(alloc(), input, input->type());
michael@0 9684
michael@0 9685 ins->infer();
michael@0 9686
michael@0 9687 current->add(ins);
michael@0 9688 current->push(ins);
michael@0 9689
michael@0 9690 return true;
michael@0 9691 }
michael@0 9692
michael@0 9693 bool
michael@0 9694 IonBuilder::jsop_toid()
michael@0 9695 {
michael@0 9696 // No-op if the index is an integer.
michael@0 9697 if (current->peek(-1)->type() == MIRType_Int32)
michael@0 9698 return true;
michael@0 9699
michael@0 9700 MDefinition *index = current->pop();
michael@0 9701 MToId *ins = MToId::New(alloc(), current->peek(-1), index);
michael@0 9702
michael@0 9703 current->add(ins);
michael@0 9704 current->push(ins);
michael@0 9705
michael@0 9706 return resumeAfter(ins);
michael@0 9707 }
michael@0 9708
michael@0 9709 bool
michael@0 9710 IonBuilder::jsop_iter(uint8_t flags)
michael@0 9711 {
michael@0 9712 if (flags != JSITER_ENUMERATE)
michael@0 9713 nonStringIteration_ = true;
michael@0 9714
michael@0 9715 MDefinition *obj = current->pop();
michael@0 9716 MInstruction *ins = MIteratorStart::New(alloc(), obj, flags);
michael@0 9717
michael@0 9718 if (!iterators_.append(ins))
michael@0 9719 return false;
michael@0 9720
michael@0 9721 current->add(ins);
michael@0 9722 current->push(ins);
michael@0 9723
michael@0 9724 return resumeAfter(ins);
michael@0 9725 }
michael@0 9726
michael@0 9727 bool
michael@0 9728 IonBuilder::jsop_iternext()
michael@0 9729 {
michael@0 9730 MDefinition *iter = current->peek(-1);
michael@0 9731 MInstruction *ins = MIteratorNext::New(alloc(), iter);
michael@0 9732
michael@0 9733 current->add(ins);
michael@0 9734 current->push(ins);
michael@0 9735
michael@0 9736 if (!resumeAfter(ins))
michael@0 9737 return false;
michael@0 9738
michael@0 9739 if (!nonStringIteration_ && !inspector->hasSeenNonStringIterNext(pc)) {
michael@0 9740 ins = MUnbox::New(alloc(), ins, MIRType_String, MUnbox::Fallible, Bailout_BaselineInfo);
michael@0 9741 current->add(ins);
michael@0 9742 current->rewriteAtDepth(-1, ins);
michael@0 9743 }
michael@0 9744
michael@0 9745 return true;
michael@0 9746 }
michael@0 9747
michael@0 9748 bool
michael@0 9749 IonBuilder::jsop_itermore()
michael@0 9750 {
michael@0 9751 MDefinition *iter = current->peek(-1);
michael@0 9752 MInstruction *ins = MIteratorMore::New(alloc(), iter);
michael@0 9753
michael@0 9754 current->add(ins);
michael@0 9755 current->push(ins);
michael@0 9756
michael@0 9757 return resumeAfter(ins);
michael@0 9758 }
michael@0 9759
michael@0 9760 bool
michael@0 9761 IonBuilder::jsop_iterend()
michael@0 9762 {
michael@0 9763 MDefinition *iter = current->pop();
michael@0 9764 MInstruction *ins = MIteratorEnd::New(alloc(), iter);
michael@0 9765
michael@0 9766 current->add(ins);
michael@0 9767
michael@0 9768 return resumeAfter(ins);
michael@0 9769 }
michael@0 9770
michael@0 9771 MDefinition *
michael@0 9772 IonBuilder::walkScopeChain(unsigned hops)
michael@0 9773 {
michael@0 9774 MDefinition *scope = current->getSlot(info().scopeChainSlot());
michael@0 9775
michael@0 9776 for (unsigned i = 0; i < hops; i++) {
michael@0 9777 MInstruction *ins = MEnclosingScope::New(alloc(), scope);
michael@0 9778 current->add(ins);
michael@0 9779 scope = ins;
michael@0 9780 }
michael@0 9781
michael@0 9782 return scope;
michael@0 9783 }
michael@0 9784
michael@0 9785 bool
michael@0 9786 IonBuilder::hasStaticScopeObject(ScopeCoordinate sc, JSObject **pcall)
michael@0 9787 {
michael@0 9788 JSScript *outerScript = ScopeCoordinateFunctionScript(script(), pc);
michael@0 9789 if (!outerScript || !outerScript->treatAsRunOnce())
michael@0 9790 return false;
michael@0 9791
michael@0 9792 types::TypeObjectKey *funType =
michael@0 9793 types::TypeObjectKey::get(outerScript->functionNonDelazifying());
michael@0 9794 if (funType->hasFlags(constraints(), types::OBJECT_FLAG_RUNONCE_INVALIDATED))
michael@0 9795 return false;
michael@0 9796
michael@0 9797 // The script this aliased var operation is accessing will run only once,
michael@0 9798 // so there will be only one call object and the aliased var access can be
michael@0 9799 // compiled in the same manner as a global access. We still need to find
michael@0 9800 // the call object though.
michael@0 9801
michael@0 9802 // Look for the call object on the current script's function's scope chain.
michael@0 9803 // If the current script is inner to the outer script and the function has
michael@0 9804 // singleton type then it should show up here.
michael@0 9805
michael@0 9806 MDefinition *scope = current->getSlot(info().scopeChainSlot());
michael@0 9807 scope->setImplicitlyUsedUnchecked();
michael@0 9808
michael@0 9809 JSObject *environment = script()->functionNonDelazifying()->environment();
michael@0 9810 while (environment && !environment->is<GlobalObject>()) {
michael@0 9811 if (environment->is<CallObject>() &&
michael@0 9812 !environment->as<CallObject>().isForEval() &&
michael@0 9813 environment->as<CallObject>().callee().nonLazyScript() == outerScript)
michael@0 9814 {
michael@0 9815 JS_ASSERT(environment->hasSingletonType());
michael@0 9816 *pcall = environment;
michael@0 9817 return true;
michael@0 9818 }
michael@0 9819 environment = environment->enclosingScope();
michael@0 9820 }
michael@0 9821
michael@0 9822 // Look for the call object on the current frame, if we are compiling the
michael@0 9823 // outer script itself. Don't do this if we are at entry to the outer
michael@0 9824 // script, as the call object we see will not be the real one --- after
michael@0 9825 // entering the Ion code a different call object will be created.
michael@0 9826
michael@0 9827 if (script() == outerScript && baselineFrame_ && info().osrPc()) {
michael@0 9828 JSObject *singletonScope = baselineFrame_->singletonScopeChain;
michael@0 9829 if (singletonScope &&
michael@0 9830 singletonScope->is<CallObject>() &&
michael@0 9831 singletonScope->as<CallObject>().callee().nonLazyScript() == outerScript)
michael@0 9832 {
michael@0 9833 JS_ASSERT(singletonScope->hasSingletonType());
michael@0 9834 *pcall = singletonScope;
michael@0 9835 return true;
michael@0 9836 }
michael@0 9837 }
michael@0 9838
michael@0 9839 return true;
michael@0 9840 }
michael@0 9841
michael@0 9842 bool
michael@0 9843 IonBuilder::jsop_getaliasedvar(ScopeCoordinate sc)
michael@0 9844 {
michael@0 9845 JSObject *call = nullptr;
michael@0 9846 if (hasStaticScopeObject(sc, &call) && call) {
michael@0 9847 PropertyName *name = ScopeCoordinateName(scopeCoordinateNameCache, script(), pc);
michael@0 9848 bool succeeded;
michael@0 9849 if (!getStaticName(call, name, &succeeded))
michael@0 9850 return false;
michael@0 9851 if (succeeded)
michael@0 9852 return true;
michael@0 9853 }
michael@0 9854
michael@0 9855 MDefinition *obj = walkScopeChain(sc.hops());
michael@0 9856
michael@0 9857 Shape *shape = ScopeCoordinateToStaticScopeShape(script(), pc);
michael@0 9858
michael@0 9859 MInstruction *load;
michael@0 9860 if (shape->numFixedSlots() <= sc.slot()) {
michael@0 9861 MInstruction *slots = MSlots::New(alloc(), obj);
michael@0 9862 current->add(slots);
michael@0 9863
michael@0 9864 load = MLoadSlot::New(alloc(), slots, sc.slot() - shape->numFixedSlots());
michael@0 9865 } else {
michael@0 9866 load = MLoadFixedSlot::New(alloc(), obj, sc.slot());
michael@0 9867 }
michael@0 9868
michael@0 9869 current->add(load);
michael@0 9870 current->push(load);
michael@0 9871
michael@0 9872 types::TemporaryTypeSet *types = bytecodeTypes(pc);
michael@0 9873 return pushTypeBarrier(load, types, true);
michael@0 9874 }
michael@0 9875
michael@0 9876 bool
michael@0 9877 IonBuilder::jsop_setaliasedvar(ScopeCoordinate sc)
michael@0 9878 {
michael@0 9879 JSObject *call = nullptr;
michael@0 9880 if (hasStaticScopeObject(sc, &call)) {
michael@0 9881 uint32_t depth = current->stackDepth() + 1;
michael@0 9882 if (depth > current->nslots()) {
michael@0 9883 if (!current->increaseSlots(depth - current->nslots()))
michael@0 9884 return false;
michael@0 9885 }
michael@0 9886 MDefinition *value = current->pop();
michael@0 9887 PropertyName *name = ScopeCoordinateName(scopeCoordinateNameCache, script(), pc);
michael@0 9888
michael@0 9889 if (call) {
michael@0 9890 // Push the object on the stack to match the bound object expected in
michael@0 9891 // the global and property set cases.
michael@0 9892 pushConstant(ObjectValue(*call));
michael@0 9893 current->push(value);
michael@0 9894 return setStaticName(call, name);
michael@0 9895 }
michael@0 9896
michael@0 9897 // The call object has type information we need to respect but we
michael@0 9898 // couldn't find it. Just do a normal property assign.
michael@0 9899 MDefinition *obj = walkScopeChain(sc.hops());
michael@0 9900 current->push(obj);
michael@0 9901 current->push(value);
michael@0 9902 return jsop_setprop(name);
michael@0 9903 }
michael@0 9904
michael@0 9905 MDefinition *rval = current->peek(-1);
michael@0 9906 MDefinition *obj = walkScopeChain(sc.hops());
michael@0 9907
michael@0 9908 Shape *shape = ScopeCoordinateToStaticScopeShape(script(), pc);
michael@0 9909
michael@0 9910 if (NeedsPostBarrier(info(), rval))
michael@0 9911 current->add(MPostWriteBarrier::New(alloc(), obj, rval));
michael@0 9912
michael@0 9913 MInstruction *store;
michael@0 9914 if (shape->numFixedSlots() <= sc.slot()) {
michael@0 9915 MInstruction *slots = MSlots::New(alloc(), obj);
michael@0 9916 current->add(slots);
michael@0 9917
michael@0 9918 store = MStoreSlot::NewBarriered(alloc(), slots, sc.slot() - shape->numFixedSlots(), rval);
michael@0 9919 } else {
michael@0 9920 store = MStoreFixedSlot::NewBarriered(alloc(), obj, sc.slot(), rval);
michael@0 9921 }
michael@0 9922
michael@0 9923 current->add(store);
michael@0 9924 return resumeAfter(store);
michael@0 9925 }
michael@0 9926
michael@0 9927 bool
michael@0 9928 IonBuilder::jsop_in()
michael@0 9929 {
michael@0 9930 MDefinition *obj = current->peek(-1);
michael@0 9931 MDefinition *id = current->peek(-2);
michael@0 9932
michael@0 9933 if (ElementAccessIsDenseNative(obj, id) &&
michael@0 9934 !ElementAccessHasExtraIndexedProperty(constraints(), obj))
michael@0 9935 {
michael@0 9936 return jsop_in_dense();
michael@0 9937 }
michael@0 9938
michael@0 9939 current->pop();
michael@0 9940 current->pop();
michael@0 9941 MIn *ins = MIn::New(alloc(), id, obj);
michael@0 9942
michael@0 9943 current->add(ins);
michael@0 9944 current->push(ins);
michael@0 9945
michael@0 9946 return resumeAfter(ins);
michael@0 9947 }
michael@0 9948
michael@0 9949 bool
michael@0 9950 IonBuilder::jsop_in_dense()
michael@0 9951 {
michael@0 9952 MDefinition *obj = current->pop();
michael@0 9953 MDefinition *id = current->pop();
michael@0 9954
michael@0 9955 bool needsHoleCheck = !ElementAccessIsPacked(constraints(), obj);
michael@0 9956
michael@0 9957 // Ensure id is an integer.
michael@0 9958 MInstruction *idInt32 = MToInt32::New(alloc(), id);
michael@0 9959 current->add(idInt32);
michael@0 9960 id = idInt32;
michael@0 9961
michael@0 9962 // Get the elements vector.
michael@0 9963 MElements *elements = MElements::New(alloc(), obj);
michael@0 9964 current->add(elements);
michael@0 9965
michael@0 9966 MInitializedLength *initLength = MInitializedLength::New(alloc(), elements);
michael@0 9967 current->add(initLength);
michael@0 9968
michael@0 9969 // Check if id < initLength and elem[id] not a hole.
michael@0 9970 MInArray *ins = MInArray::New(alloc(), elements, id, initLength, obj, needsHoleCheck);
michael@0 9971
michael@0 9972 current->add(ins);
michael@0 9973 current->push(ins);
michael@0 9974
michael@0 9975 return true;
michael@0 9976 }
michael@0 9977
michael@0 9978 bool
michael@0 9979 IonBuilder::jsop_instanceof()
michael@0 9980 {
michael@0 9981 MDefinition *rhs = current->pop();
michael@0 9982 MDefinition *obj = current->pop();
michael@0 9983
michael@0 9984 // If this is an 'x instanceof function' operation and we can determine the
michael@0 9985 // exact function and prototype object being tested for, use a typed path.
michael@0 9986 do {
michael@0 9987 types::TemporaryTypeSet *rhsTypes = rhs->resultTypeSet();
michael@0 9988 JSObject *rhsObject = rhsTypes ? rhsTypes->getSingleton() : nullptr;
michael@0 9989 if (!rhsObject || !rhsObject->is<JSFunction>() || rhsObject->isBoundFunction())
michael@0 9990 break;
michael@0 9991
michael@0 9992 types::TypeObjectKey *rhsType = types::TypeObjectKey::get(rhsObject);
michael@0 9993 if (rhsType->unknownProperties())
michael@0 9994 break;
michael@0 9995
michael@0 9996 types::HeapTypeSetKey protoProperty =
michael@0 9997 rhsType->property(NameToId(names().prototype));
michael@0 9998 JSObject *protoObject = protoProperty.singleton(constraints());
michael@0 9999 if (!protoObject)
michael@0 10000 break;
michael@0 10001
michael@0 10002 rhs->setImplicitlyUsedUnchecked();
michael@0 10003
michael@0 10004 MInstanceOf *ins = MInstanceOf::New(alloc(), obj, protoObject);
michael@0 10005
michael@0 10006 current->add(ins);
michael@0 10007 current->push(ins);
michael@0 10008
michael@0 10009 return resumeAfter(ins);
michael@0 10010 } while (false);
michael@0 10011
michael@0 10012 MCallInstanceOf *ins = MCallInstanceOf::New(alloc(), obj, rhs);
michael@0 10013
michael@0 10014 current->add(ins);
michael@0 10015 current->push(ins);
michael@0 10016
michael@0 10017 return resumeAfter(ins);
michael@0 10018 }
michael@0 10019
michael@0 10020 MInstruction *
michael@0 10021 IonBuilder::addConvertElementsToDoubles(MDefinition *elements)
michael@0 10022 {
michael@0 10023 MInstruction *convert = MConvertElementsToDoubles::New(alloc(), elements);
michael@0 10024 current->add(convert);
michael@0 10025 return convert;
michael@0 10026 }
michael@0 10027
michael@0 10028 MInstruction *
michael@0 10029 IonBuilder::addBoundsCheck(MDefinition *index, MDefinition *length)
michael@0 10030 {
michael@0 10031 MInstruction *check = MBoundsCheck::New(alloc(), index, length);
michael@0 10032 current->add(check);
michael@0 10033
michael@0 10034 // If a bounds check failed in the past, don't optimize bounds checks.
michael@0 10035 if (failedBoundsCheck_)
michael@0 10036 check->setNotMovable();
michael@0 10037
michael@0 10038 return check;
michael@0 10039 }
michael@0 10040
michael@0 10041 MInstruction *
michael@0 10042 IonBuilder::addShapeGuard(MDefinition *obj, Shape *const shape, BailoutKind bailoutKind)
michael@0 10043 {
michael@0 10044 MGuardShape *guard = MGuardShape::New(alloc(), obj, shape, bailoutKind);
michael@0 10045 current->add(guard);
michael@0 10046
michael@0 10047 // If a shape guard failed in the past, don't optimize shape guard.
michael@0 10048 if (failedShapeGuard_)
michael@0 10049 guard->setNotMovable();
michael@0 10050
michael@0 10051 return guard;
michael@0 10052 }
michael@0 10053
michael@0 10054 types::TemporaryTypeSet *
michael@0 10055 IonBuilder::bytecodeTypes(jsbytecode *pc)
michael@0 10056 {
michael@0 10057 return types::TypeScript::BytecodeTypes(script(), pc, bytecodeTypeMap, &typeArrayHint, typeArray);
michael@0 10058 }
michael@0 10059
michael@0 10060 TypeDescrSetHash *
michael@0 10061 IonBuilder::getOrCreateDescrSetHash()
michael@0 10062 {
michael@0 10063 if (!descrSetHash_) {
michael@0 10064 TypeDescrSetHash *hash =
michael@0 10065 alloc_->lifoAlloc()->new_<TypeDescrSetHash>(alloc());
michael@0 10066 if (!hash || !hash->init())
michael@0 10067 return nullptr;
michael@0 10068
michael@0 10069 descrSetHash_ = hash;
michael@0 10070 }
michael@0 10071 return descrSetHash_;
michael@0 10072 }
michael@0 10073
michael@0 10074 bool
michael@0 10075 IonBuilder::lookupTypeDescrSet(MDefinition *typedObj,
michael@0 10076 TypeDescrSet *out)
michael@0 10077 {
michael@0 10078 *out = TypeDescrSet(); // default to unknown
michael@0 10079
michael@0 10080 // Extract TypeDescrSet directly if we can
michael@0 10081 if (typedObj->isNewDerivedTypedObject()) {
michael@0 10082 *out = typedObj->toNewDerivedTypedObject()->set();
michael@0 10083 return true;
michael@0 10084 }
michael@0 10085
michael@0 10086 types::TemporaryTypeSet *types = typedObj->resultTypeSet();
michael@0 10087 return typeSetToTypeDescrSet(types, out);
michael@0 10088 }
michael@0 10089
michael@0 10090 bool
michael@0 10091 IonBuilder::typeSetToTypeDescrSet(types::TemporaryTypeSet *types,
michael@0 10092 TypeDescrSet *out)
michael@0 10093 {
michael@0 10094 // Extract TypeDescrSet directly if we can
michael@0 10095 if (!types || types->getKnownMIRType() != MIRType_Object)
michael@0 10096 return true;
michael@0 10097
michael@0 10098 // And only known objects.
michael@0 10099 if (types->unknownObject())
michael@0 10100 return true;
michael@0 10101
michael@0 10102 TypeDescrSetBuilder set;
michael@0 10103 for (uint32_t i = 0; i < types->getObjectCount(); i++) {
michael@0 10104 types::TypeObject *type = types->getTypeObject(i);
michael@0 10105 if (!type || type->unknownProperties())
michael@0 10106 return true;
michael@0 10107
michael@0 10108 if (!type->hasTypedObject())
michael@0 10109 return true;
michael@0 10110
michael@0 10111 TypeDescr &descr = type->typedObject()->descr();
michael@0 10112 if (!set.insert(&descr))
michael@0 10113 return false;
michael@0 10114 }
michael@0 10115
michael@0 10116 return set.build(*this, out);
michael@0 10117 }
michael@0 10118
michael@0 10119 MDefinition *
michael@0 10120 IonBuilder::loadTypedObjectType(MDefinition *typedObj)
michael@0 10121 {
michael@0 10122 // Shortcircuit derived type objects, meaning the intermediate
michael@0 10123 // objects created to represent `a.b` in an expression like
michael@0 10124 // `a.b.c`. In that case, the type object can be simply pulled
michael@0 10125 // from the operands of that instruction.
michael@0 10126 if (typedObj->isNewDerivedTypedObject())
michael@0 10127 return typedObj->toNewDerivedTypedObject()->type();
michael@0 10128
michael@0 10129 MInstruction *load = MLoadFixedSlot::New(alloc(), typedObj,
michael@0 10130 JS_TYPEDOBJ_SLOT_TYPE_DESCR);
michael@0 10131 current->add(load);
michael@0 10132 return load;
michael@0 10133 }
michael@0 10134
michael@0 10135 // Given a typed object `typedObj` and an offset `offset` into that
michael@0 10136 // object's data, returns another typed object and adusted offset
michael@0 10137 // where the data can be found. Often, these returned values are the
michael@0 10138 // same as the inputs, but in cases where intermediate derived type
michael@0 10139 // objects have been created, the return values will remove
michael@0 10140 // intermediate layers (often rendering those derived type objects
michael@0 10141 // into dead code).
michael@0 10142 void
michael@0 10143 IonBuilder::loadTypedObjectData(MDefinition *typedObj,
michael@0 10144 MDefinition *offset,
michael@0 10145 bool canBeNeutered,
michael@0 10146 MDefinition **owner,
michael@0 10147 MDefinition **ownerOffset)
michael@0 10148 {
michael@0 10149 JS_ASSERT(typedObj->type() == MIRType_Object);
michael@0 10150 JS_ASSERT(offset->type() == MIRType_Int32);
michael@0 10151
michael@0 10152 // Shortcircuit derived type objects, meaning the intermediate
michael@0 10153 // objects created to represent `a.b` in an expression like
michael@0 10154 // `a.b.c`. In that case, the owned and a base offset can be
michael@0 10155 // pulled from the operands of the instruction and combined with
michael@0 10156 // `offset`.
michael@0 10157 if (typedObj->isNewDerivedTypedObject()) {
michael@0 10158 MNewDerivedTypedObject *ins = typedObj->toNewDerivedTypedObject();
michael@0 10159
michael@0 10160 // Note: we never need to check for neutering on this path,
michael@0 10161 // because when we create the derived typed object, we check
michael@0 10162 // for neutering there, if needed.
michael@0 10163
michael@0 10164 MAdd *offsetAdd = MAdd::NewAsmJS(alloc(), ins->offset(), offset, MIRType_Int32);
michael@0 10165 current->add(offsetAdd);
michael@0 10166
michael@0 10167 *owner = ins->owner();
michael@0 10168 *ownerOffset = offsetAdd;
michael@0 10169 return;
michael@0 10170 }
michael@0 10171
michael@0 10172 if (canBeNeutered) {
michael@0 10173 MNeuterCheck *chk = MNeuterCheck::New(alloc(), typedObj);
michael@0 10174 current->add(chk);
michael@0 10175 typedObj = chk;
michael@0 10176 }
michael@0 10177
michael@0 10178 *owner = typedObj;
michael@0 10179 *ownerOffset = offset;
michael@0 10180 }
michael@0 10181
michael@0 10182 // Takes as input a typed object, an offset into that typed object's
michael@0 10183 // memory, and the type repr of the data found at that offset. Returns
michael@0 10184 // the elements pointer and a scaled offset. The scaled offset is
michael@0 10185 // expressed in units of `unit`; when working with typed array MIR,
michael@0 10186 // this is typically the alignment.
michael@0 10187 void
michael@0 10188 IonBuilder::loadTypedObjectElements(MDefinition *typedObj,
michael@0 10189 MDefinition *offset,
michael@0 10190 int32_t unit,
michael@0 10191 bool canBeNeutered,
michael@0 10192 MDefinition **ownerElements,
michael@0 10193 MDefinition **ownerScaledOffset)
michael@0 10194 {
michael@0 10195 MDefinition *owner, *ownerOffset;
michael@0 10196 loadTypedObjectData(typedObj, offset, canBeNeutered, &owner, &ownerOffset);
michael@0 10197
michael@0 10198 // Load the element data.
michael@0 10199 MTypedObjectElements *elements = MTypedObjectElements::New(alloc(), owner);
michael@0 10200 current->add(elements);
michael@0 10201
michael@0 10202 // Scale to a different unit for compat with typed array MIRs.
michael@0 10203 if (unit != 1) {
michael@0 10204 MDiv *scaledOffset = MDiv::NewAsmJS(alloc(), ownerOffset, constantInt(unit), MIRType_Int32,
michael@0 10205 /* unsignd = */ false);
michael@0 10206 current->add(scaledOffset);
michael@0 10207 *ownerScaledOffset = scaledOffset;
michael@0 10208 } else {
michael@0 10209 *ownerScaledOffset = ownerOffset;
michael@0 10210 }
michael@0 10211
michael@0 10212 *ownerElements = elements;
michael@0 10213 }
michael@0 10214
michael@0 10215 // Looks up the offset/type-repr-set of the field `id`, given the type
michael@0 10216 // set `objTypes` of the field owner. Note that even when true is
michael@0 10217 // returned, `*fieldDescrs` might be empty if no useful type/offset
michael@0 10218 // pair could be determined.
michael@0 10219 bool
michael@0 10220 IonBuilder::lookupTypedObjectField(MDefinition *typedObj,
michael@0 10221 PropertyName *name,
michael@0 10222 int32_t *fieldOffset,
michael@0 10223 TypeDescrSet *fieldDescrs,
michael@0 10224 size_t *fieldIndex)
michael@0 10225 {
michael@0 10226 TypeDescrSet objDescrs;
michael@0 10227 if (!lookupTypeDescrSet(typedObj, &objDescrs))
michael@0 10228 return false;
michael@0 10229
michael@0 10230 // Must be accessing a struct.
michael@0 10231 if (!objDescrs.allOfKind(TypeDescr::Struct))
michael@0 10232 return true;
michael@0 10233
michael@0 10234 // Determine the type/offset of the field `name`, if any.
michael@0 10235 int32_t offset;
michael@0 10236 if (!objDescrs.fieldNamed(*this, NameToId(name), &offset,
michael@0 10237 fieldDescrs, fieldIndex))
michael@0 10238 return false;
michael@0 10239 if (fieldDescrs->empty())
michael@0 10240 return true;
michael@0 10241
michael@0 10242 JS_ASSERT(offset >= 0);
michael@0 10243 *fieldOffset = offset;
michael@0 10244
michael@0 10245 return true;
michael@0 10246 }
michael@0 10247
michael@0 10248 MDefinition *
michael@0 10249 IonBuilder::typeObjectForElementFromArrayStructType(MDefinition *typeObj)
michael@0 10250 {
michael@0 10251 MInstruction *elemType = MLoadFixedSlot::New(alloc(), typeObj, JS_DESCR_SLOT_ARRAY_ELEM_TYPE);
michael@0 10252 current->add(elemType);
michael@0 10253
michael@0 10254 MInstruction *unboxElemType = MUnbox::New(alloc(), elemType, MIRType_Object, MUnbox::Infallible);
michael@0 10255 current->add(unboxElemType);
michael@0 10256
michael@0 10257 return unboxElemType;
michael@0 10258 }
michael@0 10259
michael@0 10260 MDefinition *
michael@0 10261 IonBuilder::typeObjectForFieldFromStructType(MDefinition *typeObj,
michael@0 10262 size_t fieldIndex)
michael@0 10263 {
michael@0 10264 // Load list of field type objects.
michael@0 10265
michael@0 10266 MInstruction *fieldTypes = MLoadFixedSlot::New(alloc(), typeObj, JS_DESCR_SLOT_STRUCT_FIELD_TYPES);
michael@0 10267 current->add(fieldTypes);
michael@0 10268
michael@0 10269 MInstruction *unboxFieldTypes = MUnbox::New(alloc(), fieldTypes, MIRType_Object, MUnbox::Infallible);
michael@0 10270 current->add(unboxFieldTypes);
michael@0 10271
michael@0 10272 // Index into list with index of field.
michael@0 10273
michael@0 10274 MInstruction *fieldTypesElements = MElements::New(alloc(), unboxFieldTypes);
michael@0 10275 current->add(fieldTypesElements);
michael@0 10276
michael@0 10277 MConstant *fieldIndexDef = constantInt(fieldIndex);
michael@0 10278
michael@0 10279 MInstruction *fieldType = MLoadElement::New(alloc(), fieldTypesElements, fieldIndexDef, false, false);
michael@0 10280 current->add(fieldType);
michael@0 10281
michael@0 10282 MInstruction *unboxFieldType = MUnbox::New(alloc(), fieldType, MIRType_Object, MUnbox::Infallible);
michael@0 10283 current->add(unboxFieldType);
michael@0 10284
michael@0 10285 return unboxFieldType;
michael@0 10286 }
michael@0 10287
michael@0 10288 bool
michael@0 10289 IonBuilder::storeScalarTypedObjectValue(MDefinition *typedObj,
michael@0 10290 MDefinition *byteOffset,
michael@0 10291 ScalarTypeDescr::Type type,
michael@0 10292 bool canBeNeutered,
michael@0 10293 bool racy,
michael@0 10294 MDefinition *value)
michael@0 10295 {
michael@0 10296 // Find location within the owner object.
michael@0 10297 MDefinition *elements, *scaledOffset;
michael@0 10298 size_t alignment = ScalarTypeDescr::alignment(type);
michael@0 10299 loadTypedObjectElements(typedObj, byteOffset, alignment, canBeNeutered,
michael@0 10300 &elements, &scaledOffset);
michael@0 10301
michael@0 10302 // Clamp value to [0, 255] when type is Uint8Clamped
michael@0 10303 MDefinition *toWrite = value;
michael@0 10304 if (type == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
michael@0 10305 toWrite = MClampToUint8::New(alloc(), value);
michael@0 10306 current->add(toWrite->toInstruction());
michael@0 10307 }
michael@0 10308
michael@0 10309 MStoreTypedArrayElement *store =
michael@0 10310 MStoreTypedArrayElement::New(alloc(), elements, scaledOffset, toWrite,
michael@0 10311 type);
michael@0 10312 if (racy)
michael@0 10313 store->setRacy();
michael@0 10314 current->add(store);
michael@0 10315
michael@0 10316 return true;
michael@0 10317 }
michael@0 10318
michael@0 10319 MConstant *
michael@0 10320 IonBuilder::constant(const Value &v)
michael@0 10321 {
michael@0 10322 MConstant *c = MConstant::New(alloc(), v, constraints());
michael@0 10323 current->add(c);
michael@0 10324 return c;
michael@0 10325 }
michael@0 10326
michael@0 10327 MConstant *
michael@0 10328 IonBuilder::constantInt(int32_t i)
michael@0 10329 {
michael@0 10330 return constant(Int32Value(i));
michael@0 10331 }
michael@0 10332
michael@0 10333 MDefinition *
michael@0 10334 IonBuilder::getCallee()
michael@0 10335 {
michael@0 10336 if (inliningDepth_ == 0) {
michael@0 10337 MInstruction *callee = MCallee::New(alloc());
michael@0 10338 current->add(callee);
michael@0 10339 return callee;
michael@0 10340 }
michael@0 10341
michael@0 10342 return inlineCallInfo_->fun();
michael@0 10343 }

mercurial