js/src/jit/BaselineIC.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/jit/BaselineIC.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,10233 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "jit/BaselineIC.h"
    1.11 +
    1.12 +#include "mozilla/DebugOnly.h"
    1.13 +#include "mozilla/TemplateLib.h"
    1.14 +
    1.15 +#include "jslibmath.h"
    1.16 +#include "jstypes.h"
    1.17 +
    1.18 +#include "builtin/Eval.h"
    1.19 +#include "jit/BaselineDebugModeOSR.h"
    1.20 +#include "jit/BaselineHelpers.h"
    1.21 +#include "jit/BaselineJIT.h"
    1.22 +#include "jit/IonLinker.h"
    1.23 +#include "jit/IonSpewer.h"
    1.24 +#include "jit/Lowering.h"
    1.25 +#ifdef JS_ION_PERF
    1.26 +# include "jit/PerfSpewer.h"
    1.27 +#endif
    1.28 +#include "jit/VMFunctions.h"
    1.29 +#include "vm/Opcodes.h"
    1.30 +
    1.31 +#include "jsboolinlines.h"
    1.32 +#include "jsscriptinlines.h"
    1.33 +
    1.34 +#include "jit/IonFrames-inl.h"
    1.35 +#include "vm/Interpreter-inl.h"
    1.36 +#include "vm/ScopeObject-inl.h"
    1.37 +#include "vm/StringObject-inl.h"
    1.38 +
    1.39 +using mozilla::DebugOnly;
    1.40 +
    1.41 +namespace js {
    1.42 +namespace jit {
    1.43 +
    1.44 +#ifdef DEBUG
    1.45 +void
    1.46 +FallbackICSpew(JSContext *cx, ICFallbackStub *stub, const char *fmt, ...)
    1.47 +{
    1.48 +    if (IonSpewEnabled(IonSpew_BaselineICFallback)) {
    1.49 +        RootedScript script(cx, GetTopIonJSScript(cx));
    1.50 +        jsbytecode *pc = stub->icEntry()->pc(script);
    1.51 +
    1.52 +        char fmtbuf[100];
    1.53 +        va_list args;
    1.54 +        va_start(args, fmt);
    1.55 +        vsnprintf(fmtbuf, 100, fmt, args);
    1.56 +        va_end(args);
    1.57 +
    1.58 +        IonSpew(IonSpew_BaselineICFallback,
    1.59 +                "Fallback hit for (%s:%d) (pc=%d,line=%d,uses=%d,stubs=%d): %s",
    1.60 +                script->filename(),
    1.61 +                script->lineno(),
    1.62 +                (int) script->pcToOffset(pc),
    1.63 +                PCToLineNumber(script, pc),
    1.64 +                script->getUseCount(),
    1.65 +                (int) stub->numOptimizedStubs(),
    1.66 +                fmtbuf);
    1.67 +    }
    1.68 +}
    1.69 +
    1.70 +void
    1.71 +TypeFallbackICSpew(JSContext *cx, ICTypeMonitor_Fallback *stub, const char *fmt, ...)
    1.72 +{
    1.73 +    if (IonSpewEnabled(IonSpew_BaselineICFallback)) {
    1.74 +        RootedScript script(cx, GetTopIonJSScript(cx));
    1.75 +        jsbytecode *pc = stub->icEntry()->pc(script);
    1.76 +
    1.77 +        char fmtbuf[100];
    1.78 +        va_list args;
    1.79 +        va_start(args, fmt);
    1.80 +        vsnprintf(fmtbuf, 100, fmt, args);
    1.81 +        va_end(args);
    1.82 +
    1.83 +        IonSpew(IonSpew_BaselineICFallback,
    1.84 +                "Type monitor fallback hit for (%s:%d) (pc=%d,line=%d,uses=%d,stubs=%d): %s",
    1.85 +                script->filename(),
    1.86 +                script->lineno(),
    1.87 +                (int) script->pcToOffset(pc),
    1.88 +                PCToLineNumber(script, pc),
    1.89 +                script->getUseCount(),
    1.90 +                (int) stub->numOptimizedMonitorStubs(),
    1.91 +                fmtbuf);
    1.92 +    }
    1.93 +}
    1.94 +
    1.95 +#else
    1.96 +#define FallbackICSpew(...)
    1.97 +#define TypeFallbackICSpew(...)
    1.98 +#endif
    1.99 +
   1.100 +
   1.101 +ICFallbackStub *
   1.102 +ICEntry::fallbackStub() const
   1.103 +{
   1.104 +    return firstStub()->getChainFallback();
   1.105 +}
   1.106 +
   1.107 +
   1.108 +ICStubConstIterator &
   1.109 +ICStubConstIterator::operator++()
   1.110 +{
   1.111 +    JS_ASSERT(currentStub_ != nullptr);
   1.112 +    currentStub_ = currentStub_->next();
   1.113 +    return *this;
   1.114 +}
   1.115 +
   1.116 +
   1.117 +ICStubIterator::ICStubIterator(ICFallbackStub *fallbackStub, bool end)
   1.118 +  : icEntry_(fallbackStub->icEntry()),
   1.119 +    fallbackStub_(fallbackStub),
   1.120 +    previousStub_(nullptr),
   1.121 +    currentStub_(end ? fallbackStub : icEntry_->firstStub()),
   1.122 +    unlinked_(false)
   1.123 +{ }
   1.124 +
   1.125 +ICStubIterator &
   1.126 +ICStubIterator::operator++()
   1.127 +{
   1.128 +    JS_ASSERT(currentStub_->next() != nullptr);
   1.129 +    if (!unlinked_)
   1.130 +        previousStub_ = currentStub_;
   1.131 +    currentStub_ = currentStub_->next();
   1.132 +    unlinked_ = false;
   1.133 +    return *this;
   1.134 +}
   1.135 +
   1.136 +void
   1.137 +ICStubIterator::unlink(JSContext *cx)
   1.138 +{
   1.139 +    JS_ASSERT(currentStub_->next() != nullptr);
   1.140 +    JS_ASSERT(currentStub_ != fallbackStub_);
   1.141 +    JS_ASSERT(!unlinked_);
   1.142 +
   1.143 +    fallbackStub_->unlinkStub(cx->zone(), previousStub_, currentStub_);
   1.144 +
   1.145 +    // Mark the current iterator position as unlinked, so operator++ works properly.
   1.146 +    unlinked_ = true;
   1.147 +}
   1.148 +
   1.149 +
   1.150 +void
   1.151 +ICStub::markCode(JSTracer *trc, const char *name)
   1.152 +{
   1.153 +    JitCode *stubJitCode = jitCode();
   1.154 +    MarkJitCodeUnbarriered(trc, &stubJitCode, name);
   1.155 +}
   1.156 +
   1.157 +void
   1.158 +ICStub::updateCode(JitCode *code)
   1.159 +{
   1.160 +    // Write barrier on the old code.
   1.161 +#ifdef JSGC_INCREMENTAL
   1.162 +    JitCode::writeBarrierPre(jitCode());
   1.163 +#endif
   1.164 +    stubCode_ = code->raw();
   1.165 +}
   1.166 +
   1.167 +/* static */ void
   1.168 +ICStub::trace(JSTracer *trc)
   1.169 +{
   1.170 +    markCode(trc, "baseline-stub-jitcode");
   1.171 +
   1.172 +    // If the stub is a monitored fallback stub, then mark the monitor ICs hanging
   1.173 +    // off of that stub.  We don't need to worry about the regular monitored stubs,
   1.174 +    // because the regular monitored stubs will always have a monitored fallback stub
   1.175 +    // that references the same stub chain.
   1.176 +    if (isMonitoredFallback()) {
   1.177 +        ICTypeMonitor_Fallback *lastMonStub = toMonitoredFallbackStub()->fallbackMonitorStub();
   1.178 +        for (ICStubConstIterator iter = lastMonStub->firstMonitorStub(); !iter.atEnd(); iter++) {
   1.179 +            JS_ASSERT_IF(iter->next() == nullptr, *iter == lastMonStub);
   1.180 +            iter->trace(trc);
   1.181 +        }
   1.182 +    }
   1.183 +
   1.184 +    if (isUpdated()) {
   1.185 +        for (ICStubConstIterator iter = toUpdatedStub()->firstUpdateStub(); !iter.atEnd(); iter++) {
   1.186 +            JS_ASSERT_IF(iter->next() == nullptr, iter->isTypeUpdate_Fallback());
   1.187 +            iter->trace(trc);
   1.188 +        }
   1.189 +    }
   1.190 +
   1.191 +    switch (kind()) {
   1.192 +      case ICStub::Call_Scripted: {
   1.193 +        ICCall_Scripted *callStub = toCall_Scripted();
   1.194 +        MarkScript(trc, &callStub->calleeScript(), "baseline-callscripted-callee");
   1.195 +        if (callStub->templateObject())
   1.196 +            MarkObject(trc, &callStub->templateObject(), "baseline-callscripted-template");
   1.197 +        break;
   1.198 +      }
   1.199 +      case ICStub::Call_Native: {
   1.200 +        ICCall_Native *callStub = toCall_Native();
   1.201 +        MarkObject(trc, &callStub->callee(), "baseline-callnative-callee");
   1.202 +        if (callStub->templateObject())
   1.203 +            MarkObject(trc, &callStub->templateObject(), "baseline-callnative-template");
   1.204 +        break;
   1.205 +      }
   1.206 +      case ICStub::GetElem_NativeSlot: {
   1.207 +        ICGetElem_NativeSlot *getElemStub = toGetElem_NativeSlot();
   1.208 +        MarkShape(trc, &getElemStub->shape(), "baseline-getelem-native-shape");
   1.209 +        MarkString(trc, &getElemStub->name(), "baseline-getelem-native-name");
   1.210 +        break;
   1.211 +      }
   1.212 +      case ICStub::GetElem_NativePrototypeSlot: {
   1.213 +        ICGetElem_NativePrototypeSlot *getElemStub = toGetElem_NativePrototypeSlot();
   1.214 +        MarkShape(trc, &getElemStub->shape(), "baseline-getelem-nativeproto-shape");
   1.215 +        MarkString(trc, &getElemStub->name(), "baseline-getelem-nativeproto-name");
   1.216 +        MarkObject(trc, &getElemStub->holder(), "baseline-getelem-nativeproto-holder");
   1.217 +        MarkShape(trc, &getElemStub->holderShape(), "baseline-getelem-nativeproto-holdershape");
   1.218 +        break;
   1.219 +      }
   1.220 +      case ICStub::GetElem_NativePrototypeCallNative:
   1.221 +      case ICStub::GetElem_NativePrototypeCallScripted: {
   1.222 +        ICGetElemNativePrototypeCallStub *callStub =
   1.223 +            reinterpret_cast<ICGetElemNativePrototypeCallStub *>(this);
   1.224 +        MarkShape(trc, &callStub->shape(), "baseline-getelem-nativeprotocall-shape");
   1.225 +        MarkString(trc, &callStub->name(), "baseline-getelem-nativeprotocall-name");
   1.226 +        MarkObject(trc, &callStub->getter(), "baseline-getelem-nativeprotocall-getter");
   1.227 +        MarkObject(trc, &callStub->holder(), "baseline-getelem-nativeprotocall-holder");
   1.228 +        MarkShape(trc, &callStub->holderShape(), "baseline-getelem-nativeprotocall-holdershape");
   1.229 +        break;
   1.230 +      }
   1.231 +      case ICStub::GetElem_Dense: {
   1.232 +        ICGetElem_Dense *getElemStub = toGetElem_Dense();
   1.233 +        MarkShape(trc, &getElemStub->shape(), "baseline-getelem-dense-shape");
   1.234 +        break;
   1.235 +      }
   1.236 +      case ICStub::GetElem_TypedArray: {
   1.237 +        ICGetElem_TypedArray *getElemStub = toGetElem_TypedArray();
   1.238 +        MarkShape(trc, &getElemStub->shape(), "baseline-getelem-typedarray-shape");
   1.239 +        break;
   1.240 +      }
   1.241 +      case ICStub::SetElem_Dense: {
   1.242 +        ICSetElem_Dense *setElemStub = toSetElem_Dense();
   1.243 +        MarkShape(trc, &setElemStub->shape(), "baseline-getelem-dense-shape");
   1.244 +        MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-dense-type");
   1.245 +        break;
   1.246 +      }
   1.247 +      case ICStub::SetElem_DenseAdd: {
   1.248 +        ICSetElem_DenseAdd *setElemStub = toSetElem_DenseAdd();
   1.249 +        MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-denseadd-type");
   1.250 +
   1.251 +        JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4);
   1.252 +
   1.253 +        switch (setElemStub->protoChainDepth()) {
   1.254 +          case 0: setElemStub->toImpl<0>()->traceShapes(trc); break;
   1.255 +          case 1: setElemStub->toImpl<1>()->traceShapes(trc); break;
   1.256 +          case 2: setElemStub->toImpl<2>()->traceShapes(trc); break;
   1.257 +          case 3: setElemStub->toImpl<3>()->traceShapes(trc); break;
   1.258 +          case 4: setElemStub->toImpl<4>()->traceShapes(trc); break;
   1.259 +          default: MOZ_ASSUME_UNREACHABLE("Invalid proto stub.");
   1.260 +        }
   1.261 +        break;
   1.262 +      }
   1.263 +      case ICStub::SetElem_TypedArray: {
   1.264 +        ICSetElem_TypedArray *setElemStub = toSetElem_TypedArray();
   1.265 +        MarkShape(trc, &setElemStub->shape(), "baseline-setelem-typedarray-shape");
   1.266 +        break;
   1.267 +      }
   1.268 +      case ICStub::TypeMonitor_SingleObject: {
   1.269 +        ICTypeMonitor_SingleObject *monitorStub = toTypeMonitor_SingleObject();
   1.270 +        MarkObject(trc, &monitorStub->object(), "baseline-monitor-singleobject");
   1.271 +        break;
   1.272 +      }
   1.273 +      case ICStub::TypeMonitor_TypeObject: {
   1.274 +        ICTypeMonitor_TypeObject *monitorStub = toTypeMonitor_TypeObject();
   1.275 +        MarkTypeObject(trc, &monitorStub->type(), "baseline-monitor-typeobject");
   1.276 +        break;
   1.277 +      }
   1.278 +      case ICStub::TypeUpdate_SingleObject: {
   1.279 +        ICTypeUpdate_SingleObject *updateStub = toTypeUpdate_SingleObject();
   1.280 +        MarkObject(trc, &updateStub->object(), "baseline-update-singleobject");
   1.281 +        break;
   1.282 +      }
   1.283 +      case ICStub::TypeUpdate_TypeObject: {
   1.284 +        ICTypeUpdate_TypeObject *updateStub = toTypeUpdate_TypeObject();
   1.285 +        MarkTypeObject(trc, &updateStub->type(), "baseline-update-typeobject");
   1.286 +        break;
   1.287 +      }
   1.288 +      case ICStub::Profiler_PushFunction: {
   1.289 +        ICProfiler_PushFunction *pushFunStub = toProfiler_PushFunction();
   1.290 +        MarkScript(trc, &pushFunStub->script(), "baseline-profilerpushfunction-stub-script");
   1.291 +        break;
   1.292 +      }
   1.293 +      case ICStub::GetName_Global: {
   1.294 +        ICGetName_Global *globalStub = toGetName_Global();
   1.295 +        MarkShape(trc, &globalStub->shape(), "baseline-global-stub-shape");
   1.296 +        break;
   1.297 +      }
   1.298 +      case ICStub::GetName_Scope0:
   1.299 +        static_cast<ICGetName_Scope<0>*>(this)->traceScopes(trc);
   1.300 +        break;
   1.301 +      case ICStub::GetName_Scope1:
   1.302 +        static_cast<ICGetName_Scope<1>*>(this)->traceScopes(trc);
   1.303 +        break;
   1.304 +      case ICStub::GetName_Scope2:
   1.305 +        static_cast<ICGetName_Scope<2>*>(this)->traceScopes(trc);
   1.306 +        break;
   1.307 +      case ICStub::GetName_Scope3:
   1.308 +        static_cast<ICGetName_Scope<3>*>(this)->traceScopes(trc);
   1.309 +        break;
   1.310 +      case ICStub::GetName_Scope4:
   1.311 +        static_cast<ICGetName_Scope<4>*>(this)->traceScopes(trc);
   1.312 +        break;
   1.313 +      case ICStub::GetName_Scope5:
   1.314 +        static_cast<ICGetName_Scope<5>*>(this)->traceScopes(trc);
   1.315 +        break;
   1.316 +      case ICStub::GetName_Scope6:
   1.317 +        static_cast<ICGetName_Scope<6>*>(this)->traceScopes(trc);
   1.318 +        break;
   1.319 +      case ICStub::GetIntrinsic_Constant: {
   1.320 +        ICGetIntrinsic_Constant *constantStub = toGetIntrinsic_Constant();
   1.321 +        gc::MarkValue(trc, &constantStub->value(), "baseline-getintrinsic-constant-value");
   1.322 +        break;
   1.323 +      }
   1.324 +      case ICStub::GetProp_Primitive: {
   1.325 +        ICGetProp_Primitive *propStub = toGetProp_Primitive();
   1.326 +        MarkShape(trc, &propStub->protoShape(), "baseline-getprop-primitive-stub-shape");
   1.327 +        break;
   1.328 +      }
   1.329 +      case ICStub::GetProp_Native: {
   1.330 +        ICGetProp_Native *propStub = toGetProp_Native();
   1.331 +        MarkShape(trc, &propStub->shape(), "baseline-getpropnative-stub-shape");
   1.332 +        break;
   1.333 +      }
   1.334 +      case ICStub::GetProp_NativePrototype: {
   1.335 +        ICGetProp_NativePrototype *propStub = toGetProp_NativePrototype();
   1.336 +        MarkShape(trc, &propStub->shape(), "baseline-getpropnativeproto-stub-shape");
   1.337 +        MarkObject(trc, &propStub->holder(), "baseline-getpropnativeproto-stub-holder");
   1.338 +        MarkShape(trc, &propStub->holderShape(), "baseline-getpropnativeproto-stub-holdershape");
   1.339 +        break;
   1.340 +      }
   1.341 +      case ICStub::GetProp_CallDOMProxyNative:
   1.342 +      case ICStub::GetProp_CallDOMProxyWithGenerationNative: {
   1.343 +        ICGetPropCallDOMProxyNativeStub *propStub;
   1.344 +        if (kind() ==  ICStub::GetProp_CallDOMProxyNative)
   1.345 +            propStub = toGetProp_CallDOMProxyNative();
   1.346 +        else
   1.347 +            propStub = toGetProp_CallDOMProxyWithGenerationNative();
   1.348 +        MarkShape(trc, &propStub->shape(), "baseline-getproplistbasenative-stub-shape");
   1.349 +        if (propStub->expandoShape()) {
   1.350 +            MarkShape(trc, &propStub->expandoShape(),
   1.351 +                      "baseline-getproplistbasenative-stub-expandoshape");
   1.352 +        }
   1.353 +        MarkObject(trc, &propStub->holder(), "baseline-getproplistbasenative-stub-holder");
   1.354 +        MarkShape(trc, &propStub->holderShape(), "baseline-getproplistbasenative-stub-holdershape");
   1.355 +        MarkObject(trc, &propStub->getter(), "baseline-getproplistbasenative-stub-getter");
   1.356 +        break;
   1.357 +      }
   1.358 +      case ICStub::GetProp_DOMProxyShadowed: {
   1.359 +        ICGetProp_DOMProxyShadowed *propStub = toGetProp_DOMProxyShadowed();
   1.360 +        MarkShape(trc, &propStub->shape(), "baseline-getproplistbaseshadowed-stub-shape");
   1.361 +        MarkString(trc, &propStub->name(), "baseline-getproplistbaseshadowed-stub-name");
   1.362 +        break;
   1.363 +      }
   1.364 +      case ICStub::GetProp_CallScripted: {
   1.365 +        ICGetProp_CallScripted *callStub = toGetProp_CallScripted();
   1.366 +        MarkShape(trc, &callStub->receiverShape(), "baseline-getpropcallscripted-stub-receivershape");
   1.367 +        MarkObject(trc, &callStub->holder(), "baseline-getpropcallscripted-stub-holder");
   1.368 +        MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallscripted-stub-holdershape");
   1.369 +        MarkObject(trc, &callStub->getter(), "baseline-getpropcallscripted-stub-getter");
   1.370 +        break;
   1.371 +      }
   1.372 +      case ICStub::GetProp_CallNative: {
   1.373 +        ICGetProp_CallNative *callStub = toGetProp_CallNative();
   1.374 +        MarkObject(trc, &callStub->holder(), "baseline-getpropcallnative-stub-holder");
   1.375 +        MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallnative-stub-holdershape");
   1.376 +        MarkObject(trc, &callStub->getter(), "baseline-getpropcallnative-stub-getter");
   1.377 +        break;
   1.378 +      }
   1.379 +      case ICStub::GetProp_CallNativePrototype: {
   1.380 +        ICGetProp_CallNativePrototype *callStub = toGetProp_CallNativePrototype();
   1.381 +        MarkShape(trc, &callStub->receiverShape(), "baseline-getpropcallnativeproto-stub-receivershape");
   1.382 +        MarkObject(trc, &callStub->holder(), "baseline-getpropcallnativeproto-stub-holder");
   1.383 +        MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallnativeproto-stub-holdershape");
   1.384 +        MarkObject(trc, &callStub->getter(), "baseline-getpropcallnativeproto-stub-getter");
   1.385 +        break;
   1.386 +      }
   1.387 +      case ICStub::SetProp_Native: {
   1.388 +        ICSetProp_Native *propStub = toSetProp_Native();
   1.389 +        MarkShape(trc, &propStub->shape(), "baseline-setpropnative-stub-shape");
   1.390 +        MarkTypeObject(trc, &propStub->type(), "baseline-setpropnative-stub-type");
   1.391 +        break;
   1.392 +      }
   1.393 +      case ICStub::SetProp_NativeAdd: {
   1.394 +        ICSetProp_NativeAdd *propStub = toSetProp_NativeAdd();
   1.395 +        MarkTypeObject(trc, &propStub->type(), "baseline-setpropnativeadd-stub-type");
   1.396 +        MarkShape(trc, &propStub->newShape(), "baseline-setpropnativeadd-stub-newshape");
   1.397 +        JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4);
   1.398 +        switch (propStub->protoChainDepth()) {
   1.399 +          case 0: propStub->toImpl<0>()->traceShapes(trc); break;
   1.400 +          case 1: propStub->toImpl<1>()->traceShapes(trc); break;
   1.401 +          case 2: propStub->toImpl<2>()->traceShapes(trc); break;
   1.402 +          case 3: propStub->toImpl<3>()->traceShapes(trc); break;
   1.403 +          case 4: propStub->toImpl<4>()->traceShapes(trc); break;
   1.404 +          default: MOZ_ASSUME_UNREACHABLE("Invalid proto stub.");
   1.405 +        }
   1.406 +        break;
   1.407 +      }
   1.408 +      case ICStub::SetProp_CallScripted: {
   1.409 +        ICSetProp_CallScripted *callStub = toSetProp_CallScripted();
   1.410 +        MarkShape(trc, &callStub->shape(), "baseline-setpropcallscripted-stub-shape");
   1.411 +        MarkObject(trc, &callStub->holder(), "baseline-setpropcallscripted-stub-holder");
   1.412 +        MarkShape(trc, &callStub->holderShape(), "baseline-setpropcallscripted-stub-holdershape");
   1.413 +        MarkObject(trc, &callStub->setter(), "baseline-setpropcallscripted-stub-setter");
   1.414 +        break;
   1.415 +      }
   1.416 +      case ICStub::SetProp_CallNative: {
   1.417 +        ICSetProp_CallNative *callStub = toSetProp_CallNative();
   1.418 +        MarkShape(trc, &callStub->shape(), "baseline-setpropcallnative-stub-shape");
   1.419 +        MarkObject(trc, &callStub->holder(), "baseline-setpropcallnative-stub-holder");
   1.420 +        MarkShape(trc, &callStub->holderShape(), "baseline-setpropcallnative-stub-holdershape");
   1.421 +        MarkObject(trc, &callStub->setter(), "baseline-setpropcallnative-stub-setter");
   1.422 +        break;
   1.423 +      }
   1.424 +      case ICStub::NewArray_Fallback: {
   1.425 +        ICNewArray_Fallback *stub = toNewArray_Fallback();
   1.426 +        MarkObject(trc, &stub->templateObject(), "baseline-newarray-template");
   1.427 +        break;
   1.428 +      }
   1.429 +      case ICStub::NewObject_Fallback: {
   1.430 +        ICNewObject_Fallback *stub = toNewObject_Fallback();
   1.431 +        MarkObject(trc, &stub->templateObject(), "baseline-newobject-template");
   1.432 +        break;
   1.433 +      }
   1.434 +      case ICStub::Rest_Fallback: {
   1.435 +        ICRest_Fallback *stub = toRest_Fallback();
   1.436 +        MarkObject(trc, &stub->templateObject(), "baseline-rest-template");
   1.437 +        break;
   1.438 +      }
   1.439 +      default:
   1.440 +        break;
   1.441 +    }
   1.442 +}
   1.443 +
   1.444 +void
   1.445 +ICFallbackStub::unlinkStub(Zone *zone, ICStub *prev, ICStub *stub)
   1.446 +{
   1.447 +    JS_ASSERT(stub->next());
   1.448 +
   1.449 +    // If stub is the last optimized stub, update lastStubPtrAddr.
   1.450 +    if (stub->next() == this) {
   1.451 +        JS_ASSERT(lastStubPtrAddr_ == stub->addressOfNext());
   1.452 +        if (prev)
   1.453 +            lastStubPtrAddr_ = prev->addressOfNext();
   1.454 +        else
   1.455 +            lastStubPtrAddr_ = icEntry()->addressOfFirstStub();
   1.456 +        *lastStubPtrAddr_ = this;
   1.457 +    } else {
   1.458 +        if (prev) {
   1.459 +            JS_ASSERT(prev->next() == stub);
   1.460 +            prev->setNext(stub->next());
   1.461 +        } else {
   1.462 +            JS_ASSERT(icEntry()->firstStub() == stub);
   1.463 +            icEntry()->setFirstStub(stub->next());
   1.464 +        }
   1.465 +    }
   1.466 +
   1.467 +    JS_ASSERT(numOptimizedStubs_ > 0);
   1.468 +    numOptimizedStubs_--;
   1.469 +
   1.470 +    if (zone->needsBarrier()) {
   1.471 +        // We are removing edges from ICStub to gcthings. Perform one final trace
   1.472 +        // of the stub for incremental GC, as it must know about those edges.
   1.473 +        stub->trace(zone->barrierTracer());
   1.474 +    }
   1.475 +
   1.476 +    if (ICStub::CanMakeCalls(stub->kind()) && stub->isMonitored()) {
   1.477 +        // This stub can make calls so we can return to it if it's on the stack.
   1.478 +        // We just have to reset its firstMonitorStub_ field to avoid a stale
   1.479 +        // pointer when purgeOptimizedStubs destroys all optimized monitor
   1.480 +        // stubs (unlinked stubs won't be updated).
   1.481 +        ICTypeMonitor_Fallback *monitorFallback = toMonitoredFallbackStub()->fallbackMonitorStub();
   1.482 +        stub->toMonitoredStub()->resetFirstMonitorStub(monitorFallback);
   1.483 +    }
   1.484 +
   1.485 +#ifdef DEBUG
   1.486 +    // Poison stub code to ensure we don't call this stub again. However, if this
   1.487 +    // stub can make calls, a pointer to it may be stored in a stub frame on the
   1.488 +    // stack, so we can't touch the stubCode_ or GC will crash when marking this
   1.489 +    // pointer.
   1.490 +    if (!ICStub::CanMakeCalls(stub->kind()))
   1.491 +        stub->stubCode_ = (uint8_t *)0xbad;
   1.492 +#endif
   1.493 +}
   1.494 +
   1.495 +void
   1.496 +ICFallbackStub::unlinkStubsWithKind(JSContext *cx, ICStub::Kind kind)
   1.497 +{
   1.498 +    for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++) {
   1.499 +        if (iter->kind() == kind)
   1.500 +            iter.unlink(cx);
   1.501 +    }
   1.502 +}
   1.503 +
   1.504 +void
   1.505 +ICTypeMonitor_Fallback::resetMonitorStubChain(Zone *zone)
   1.506 +{
   1.507 +    if (zone->needsBarrier()) {
   1.508 +        // We are removing edges from monitored stubs to gcthings (JitCode).
   1.509 +        // Perform one final trace of all monitor stubs for incremental GC,
   1.510 +        // as it must know about those edges.
   1.511 +        for (ICStub *s = firstMonitorStub_; !s->isTypeMonitor_Fallback(); s = s->next())
   1.512 +            s->trace(zone->barrierTracer());
   1.513 +    }
   1.514 +
   1.515 +    firstMonitorStub_ = this;
   1.516 +    numOptimizedMonitorStubs_ = 0;
   1.517 +
   1.518 +    if (hasFallbackStub_) {
   1.519 +        lastMonitorStubPtrAddr_ = nullptr;
   1.520 +
   1.521 +        // Reset firstMonitorStub_ field of all monitored stubs.
   1.522 +        for (ICStubConstIterator iter = mainFallbackStub_->beginChainConst();
   1.523 +             !iter.atEnd(); iter++)
   1.524 +        {
   1.525 +            if (!iter->isMonitored())
   1.526 +                continue;
   1.527 +            iter->toMonitoredStub()->resetFirstMonitorStub(this);
   1.528 +        }
   1.529 +    } else {
   1.530 +        icEntry_->setFirstStub(this);
   1.531 +        lastMonitorStubPtrAddr_ = icEntry_->addressOfFirstStub();
   1.532 +    }
   1.533 +}
   1.534 +
   1.535 +ICMonitoredStub::ICMonitoredStub(Kind kind, JitCode *stubCode, ICStub *firstMonitorStub)
   1.536 +  : ICStub(kind, ICStub::Monitored, stubCode),
   1.537 +    firstMonitorStub_(firstMonitorStub)
   1.538 +{
   1.539 +    // If the first monitored stub is a ICTypeMonitor_Fallback stub, then
   1.540 +    // double check that _its_ firstMonitorStub is the same as this one.
   1.541 +    JS_ASSERT_IF(firstMonitorStub_->isTypeMonitor_Fallback(),
   1.542 +                 firstMonitorStub_->toTypeMonitor_Fallback()->firstMonitorStub() ==
   1.543 +                    firstMonitorStub_);
   1.544 +}
   1.545 +
   1.546 +bool
   1.547 +ICMonitoredFallbackStub::initMonitoringChain(JSContext *cx, ICStubSpace *space)
   1.548 +{
   1.549 +    JS_ASSERT(fallbackMonitorStub_ == nullptr);
   1.550 +
   1.551 +    ICTypeMonitor_Fallback::Compiler compiler(cx, this);
   1.552 +    ICTypeMonitor_Fallback *stub = compiler.getStub(space);
   1.553 +    if (!stub)
   1.554 +        return false;
   1.555 +    fallbackMonitorStub_ = stub;
   1.556 +    return true;
   1.557 +}
   1.558 +
   1.559 +bool
   1.560 +ICMonitoredFallbackStub::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val)
   1.561 +{
   1.562 +    return fallbackMonitorStub_->addMonitorStubForValue(cx, script, val);
   1.563 +}
   1.564 +
   1.565 +bool
   1.566 +ICUpdatedStub::initUpdatingChain(JSContext *cx, ICStubSpace *space)
   1.567 +{
   1.568 +    JS_ASSERT(firstUpdateStub_ == nullptr);
   1.569 +
   1.570 +    ICTypeUpdate_Fallback::Compiler compiler(cx);
   1.571 +    ICTypeUpdate_Fallback *stub = compiler.getStub(space);
   1.572 +    if (!stub)
   1.573 +        return false;
   1.574 +
   1.575 +    firstUpdateStub_ = stub;
   1.576 +    return true;
   1.577 +}
   1.578 +
   1.579 +JitCode *
   1.580 +ICStubCompiler::getStubCode()
   1.581 +{
   1.582 +    JitCompartment *comp = cx->compartment()->jitCompartment();
   1.583 +
   1.584 +    // Check for existing cached stubcode.
   1.585 +    uint32_t stubKey = getKey();
   1.586 +    JitCode *stubCode = comp->getStubCode(stubKey);
   1.587 +    if (stubCode)
   1.588 +        return stubCode;
   1.589 +
   1.590 +    // Compile new stubcode.
   1.591 +    IonContext ictx(cx, nullptr);
   1.592 +    MacroAssembler masm;
   1.593 +#ifdef JS_CODEGEN_ARM
   1.594 +    masm.setSecondScratchReg(BaselineSecondScratchReg);
   1.595 +#endif
   1.596 +
   1.597 +    if (!generateStubCode(masm))
   1.598 +        return nullptr;
   1.599 +    Linker linker(masm);
   1.600 +    AutoFlushICache afc("getStubCode");
   1.601 +    Rooted<JitCode *> newStubCode(cx, linker.newCode<CanGC>(cx, JSC::BASELINE_CODE));
   1.602 +    if (!newStubCode)
   1.603 +        return nullptr;
   1.604 +
   1.605 +    // After generating code, run postGenerateStubCode()
   1.606 +    if (!postGenerateStubCode(masm, newStubCode))
   1.607 +        return nullptr;
   1.608 +
   1.609 +    // All barriers are emitted off-by-default, enable them if needed.
   1.610 +    if (cx->zone()->needsBarrier())
   1.611 +        newStubCode->togglePreBarriers(true);
   1.612 +
   1.613 +    // Cache newly compiled stubcode.
   1.614 +    if (!comp->putStubCode(stubKey, newStubCode))
   1.615 +        return nullptr;
   1.616 +
   1.617 +    JS_ASSERT(entersStubFrame_ == ICStub::CanMakeCalls(kind));
   1.618 +
   1.619 +#ifdef JS_ION_PERF
   1.620 +    writePerfSpewerJitCodeProfile(newStubCode, "BaselineIC");
   1.621 +#endif
   1.622 +
   1.623 +    return newStubCode;
   1.624 +}
   1.625 +
   1.626 +bool
   1.627 +ICStubCompiler::tailCallVM(const VMFunction &fun, MacroAssembler &masm)
   1.628 +{
   1.629 +    JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
   1.630 +    if (!code)
   1.631 +        return false;
   1.632 +
   1.633 +    uint32_t argSize = fun.explicitStackSlots() * sizeof(void *);
   1.634 +    EmitTailCallVM(code, masm, argSize);
   1.635 +    return true;
   1.636 +}
   1.637 +
   1.638 +bool
   1.639 +ICStubCompiler::callVM(const VMFunction &fun, MacroAssembler &masm)
   1.640 +{
   1.641 +    JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
   1.642 +    if (!code)
   1.643 +        return false;
   1.644 +
   1.645 +    EmitCallVM(code, masm);
   1.646 +    return true;
   1.647 +}
   1.648 +
   1.649 +bool
   1.650 +ICStubCompiler::callTypeUpdateIC(MacroAssembler &masm, uint32_t objectOffset)
   1.651 +{
   1.652 +    JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(DoTypeUpdateFallbackInfo);
   1.653 +    if (!code)
   1.654 +        return false;
   1.655 +
   1.656 +    EmitCallTypeUpdateIC(masm, code, objectOffset);
   1.657 +    return true;
   1.658 +}
   1.659 +
   1.660 +void
   1.661 +ICStubCompiler::enterStubFrame(MacroAssembler &masm, Register scratch)
   1.662 +{
   1.663 +    EmitEnterStubFrame(masm, scratch);
   1.664 +#ifdef DEBUG
   1.665 +    entersStubFrame_ = true;
   1.666 +#endif
   1.667 +}
   1.668 +
   1.669 +void
   1.670 +ICStubCompiler::leaveStubFrame(MacroAssembler &masm, bool calledIntoIon)
   1.671 +{
   1.672 +    JS_ASSERT(entersStubFrame_);
   1.673 +    EmitLeaveStubFrame(masm, calledIntoIon);
   1.674 +}
   1.675 +
   1.676 +void
   1.677 +ICStubCompiler::leaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon)
   1.678 +{
   1.679 +    JS_ASSERT(entersStubFrame_);
   1.680 +    EmitLeaveStubFrameHead(masm, calledIntoIon);
   1.681 +}
   1.682 +
   1.683 +void
   1.684 +ICStubCompiler::leaveStubFrameCommonTail(MacroAssembler &masm)
   1.685 +{
   1.686 +    JS_ASSERT(entersStubFrame_);
   1.687 +    EmitLeaveStubFrameCommonTail(masm);
   1.688 +}
   1.689 +
   1.690 +void
   1.691 +ICStubCompiler::guardProfilingEnabled(MacroAssembler &masm, Register scratch, Label *skip)
   1.692 +{
   1.693 +    // This should only be called from the following stubs.
   1.694 +    JS_ASSERT(kind == ICStub::Call_Scripted                             ||
   1.695 +              kind == ICStub::Call_AnyScripted                          ||
   1.696 +              kind == ICStub::Call_Native                               ||
   1.697 +              kind == ICStub::Call_ScriptedApplyArray                   ||
   1.698 +              kind == ICStub::Call_ScriptedApplyArguments               ||
   1.699 +              kind == ICStub::Call_ScriptedFunCall                      ||
   1.700 +              kind == ICStub::GetProp_CallScripted                      ||
   1.701 +              kind == ICStub::GetProp_CallNative                        ||
   1.702 +              kind == ICStub::GetProp_CallNativePrototype               ||
   1.703 +              kind == ICStub::GetProp_CallDOMProxyNative                ||
   1.704 +              kind == ICStub::GetElem_NativePrototypeCallNative         ||
   1.705 +              kind == ICStub::GetElem_NativePrototypeCallScripted       ||
   1.706 +              kind == ICStub::GetProp_CallDOMProxyWithGenerationNative  ||
   1.707 +              kind == ICStub::GetProp_DOMProxyShadowed                  ||
   1.708 +              kind == ICStub::SetProp_CallScripted                      ||
   1.709 +              kind == ICStub::SetProp_CallNative);
   1.710 +
   1.711 +    // Guard on bit in frame that indicates if the SPS frame was pushed in the first
   1.712 +    // place.  This code is expected to be called from within a stub that has already
   1.713 +    // entered a stub frame.
   1.714 +    JS_ASSERT(entersStubFrame_);
   1.715 +    masm.loadPtr(Address(BaselineFrameReg, 0), scratch);
   1.716 +    masm.branchTest32(Assembler::Zero,
   1.717 +                      Address(scratch, BaselineFrame::reverseOffsetOfFlags()),
   1.718 +                      Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME),
   1.719 +                      skip);
   1.720 +
   1.721 +    // Check if profiling is enabled
   1.722 +    uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled();
   1.723 +    masm.branch32(Assembler::Equal, AbsoluteAddress(enabledAddr), Imm32(0), skip);
   1.724 +}
   1.725 +
   1.726 +void
   1.727 +ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, Register pcIdx, Register scratch,
   1.728 +                                    uint32_t stubPcOffset)
   1.729 +{
   1.730 +    Label skipProfilerUpdate;
   1.731 +
   1.732 +    // Check if profiling is enabled.
   1.733 +    guardProfilingEnabled(masm, scratch, &skipProfilerUpdate);
   1.734 +
   1.735 +    // Update profiling entry before leaving function.
   1.736 +    masm.load32(Address(BaselineStubReg, stubPcOffset), pcIdx);
   1.737 +    masm.spsUpdatePCIdx(&cx->runtime()->spsProfiler, pcIdx, scratch);
   1.738 +
   1.739 +    masm.bind(&skipProfilerUpdate);
   1.740 +}
   1.741 +
   1.742 +void
   1.743 +ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, GeneralRegisterSet regs,
   1.744 +                                    uint32_t stubPcOffset)
   1.745 +{
   1.746 +    emitProfilingUpdate(masm, regs.takeAny(), regs.takeAny(), stubPcOffset);
   1.747 +}
   1.748 +
   1.749 +#ifdef JSGC_GENERATIONAL
   1.750 +inline bool
   1.751 +ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
   1.752 +                                         Register scratch, GeneralRegisterSet saveRegs)
   1.753 +{
   1.754 +    Nursery &nursery = cx->runtime()->gcNursery;
   1.755 +
   1.756 +    Label skipBarrier;
   1.757 +    masm.branchTestObject(Assembler::NotEqual, val, &skipBarrier);
   1.758 +
   1.759 +    masm.branchPtrInNurseryRange(obj, scratch, &skipBarrier);
   1.760 +
   1.761 +    Register valReg = masm.extractObject(val, scratch);
   1.762 +    masm.branchPtr(Assembler::Below, valReg, ImmWord(nursery.start()), &skipBarrier);
   1.763 +    masm.branchPtr(Assembler::AboveOrEqual, valReg, ImmWord(nursery.heapEnd()), &skipBarrier);
   1.764 +
   1.765 +    // void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
   1.766 +#if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
   1.767 +    saveRegs.add(BaselineTailCallReg);
   1.768 +#endif
   1.769 +    saveRegs = GeneralRegisterSet::Intersect(saveRegs, GeneralRegisterSet::Volatile());
   1.770 +    masm.PushRegsInMask(saveRegs);
   1.771 +    masm.setupUnalignedABICall(2, scratch);
   1.772 +    masm.movePtr(ImmPtr(cx->runtime()), scratch);
   1.773 +    masm.passABIArg(scratch);
   1.774 +    masm.passABIArg(obj);
   1.775 +    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
   1.776 +    masm.PopRegsInMask(saveRegs);
   1.777 +
   1.778 +    masm.bind(&skipBarrier);
   1.779 +    return true;
   1.780 +}
   1.781 +#endif // JSGC_GENERATIONAL
   1.782 +
   1.783 +//
   1.784 +// UseCount_Fallback
   1.785 +//
   1.786 +static bool
   1.787 +IsTopFrameConstructing(JSContext *cx)
   1.788 +{
   1.789 +    JS_ASSERT(cx->currentlyRunningInJit());
   1.790 +    JitActivationIterator activations(cx->runtime());
   1.791 +    JitFrameIterator iter(activations);
   1.792 +    JS_ASSERT(iter.type() == JitFrame_Exit);
   1.793 +
   1.794 +    ++iter;
   1.795 +    JS_ASSERT(iter.type() == JitFrame_BaselineStub);
   1.796 +
   1.797 +    ++iter;
   1.798 +    JS_ASSERT(iter.isBaselineJS());
   1.799 +
   1.800 +    return iter.isConstructing();
   1.801 +}
   1.802 +
   1.803 +static bool
   1.804 +EnsureCanEnterIon(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
   1.805 +                  HandleScript script, jsbytecode *pc, void **jitcodePtr)
   1.806 +{
   1.807 +    JS_ASSERT(jitcodePtr);
   1.808 +    JS_ASSERT(!*jitcodePtr);
   1.809 +
   1.810 +    bool isLoopEntry = (JSOp(*pc) == JSOP_LOOPENTRY);
   1.811 +
   1.812 +    bool isConstructing = IsTopFrameConstructing(cx);
   1.813 +    MethodStatus stat;
   1.814 +    if (isLoopEntry) {
   1.815 +        JS_ASSERT(LoopEntryCanIonOsr(pc));
   1.816 +        IonSpew(IonSpew_BaselineOSR, "  Compile at loop entry!");
   1.817 +        stat = CanEnterAtBranch(cx, script, frame, pc, isConstructing);
   1.818 +    } else if (frame->isFunctionFrame()) {
   1.819 +        IonSpew(IonSpew_BaselineOSR, "  Compile function from top for later entry!");
   1.820 +        stat = CompileFunctionForBaseline(cx, script, frame, isConstructing);
   1.821 +    } else {
   1.822 +        return true;
   1.823 +    }
   1.824 +
   1.825 +    if (stat == Method_Error) {
   1.826 +        IonSpew(IonSpew_BaselineOSR, "  Compile with Ion errored!");
   1.827 +        return false;
   1.828 +    }
   1.829 +
   1.830 +    if (stat == Method_CantCompile)
   1.831 +        IonSpew(IonSpew_BaselineOSR, "  Can't compile with Ion!");
   1.832 +    else if (stat == Method_Skipped)
   1.833 +        IonSpew(IonSpew_BaselineOSR, "  Skipped compile with Ion!");
   1.834 +    else if (stat == Method_Compiled)
   1.835 +        IonSpew(IonSpew_BaselineOSR, "  Compiled with Ion!");
   1.836 +    else
   1.837 +        MOZ_ASSUME_UNREACHABLE("Invalid MethodStatus!");
   1.838 +
   1.839 +    // Failed to compile.  Reset use count and return.
   1.840 +    if (stat != Method_Compiled) {
   1.841 +        // TODO: If stat == Method_CantCompile, insert stub that just skips the useCount
   1.842 +        // entirely, instead of resetting it.
   1.843 +        bool bailoutExpected = script->hasIonScript() && script->ionScript()->bailoutExpected();
   1.844 +        if (stat == Method_CantCompile || bailoutExpected) {
   1.845 +            IonSpew(IonSpew_BaselineOSR, "  Reset UseCount cantCompile=%s bailoutExpected=%s!",
   1.846 +                    stat == Method_CantCompile ? "yes" : "no",
   1.847 +                    bailoutExpected ? "yes" : "no");
   1.848 +            script->resetUseCount();
   1.849 +        }
   1.850 +        return true;
   1.851 +    }
   1.852 +
   1.853 +    if (isLoopEntry) {
   1.854 +        IonScript *ion = script->ionScript();
   1.855 +        JS_ASSERT(cx->runtime()->spsProfiler.enabled() == ion->hasSPSInstrumentation());
   1.856 +        JS_ASSERT(ion->osrPc() == pc);
   1.857 +
   1.858 +        // If the baseline frame's SPS handling doesn't match up with the Ion code's SPS
   1.859 +        // handling, don't OSR.
   1.860 +        if (frame->hasPushedSPSFrame() != ion->hasSPSInstrumentation()) {
   1.861 +            IonSpew(IonSpew_BaselineOSR, "  OSR crosses SPS handling boundaries, skipping!");
   1.862 +            return true;
   1.863 +        }
   1.864 +
   1.865 +        IonSpew(IonSpew_BaselineOSR, "  OSR possible!");
   1.866 +        *jitcodePtr = ion->method()->raw() + ion->osrEntryOffset();
   1.867 +    }
   1.868 +
   1.869 +    return true;
   1.870 +}
   1.871 +
   1.872 +//
   1.873 +// The following data is kept in a temporary heap-allocated buffer, stored in
   1.874 +// JitRuntime (high memory addresses at top, low at bottom):
   1.875 +//
   1.876 +//     +----->+=================================+  --      <---- High Address
   1.877 +//     |      |                                 |   |
   1.878 +//     |      |     ...BaselineFrame...         |   |-- Copy of BaselineFrame + stack values
   1.879 +//     |      |                                 |   |
   1.880 +//     |      +---------------------------------+   |
   1.881 +//     |      |                                 |   |
   1.882 +//     |      |     ...Locals/Stack...          |   |
   1.883 +//     |      |                                 |   |
   1.884 +//     |      +=================================+  --
   1.885 +//     |      |     Padding(Maybe Empty)        |
   1.886 +//     |      +=================================+  --
   1.887 +//     +------|-- baselineFrame                 |   |-- IonOsrTempData
   1.888 +//            |   jitcode                       |   |
   1.889 +//            +=================================+  --      <---- Low Address
   1.890 +//
   1.891 +// A pointer to the IonOsrTempData is returned.
   1.892 +
   1.893 +struct IonOsrTempData
   1.894 +{
   1.895 +    void *jitcode;
   1.896 +    uint8_t *baselineFrame;
   1.897 +};
   1.898 +
   1.899 +static IonOsrTempData *
   1.900 +PrepareOsrTempData(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
   1.901 +                   HandleScript script, jsbytecode *pc, void *jitcode)
   1.902 +{
   1.903 +    size_t numLocalsAndStackVals = frame->numValueSlots();
   1.904 +
   1.905 +    // Calculate the amount of space to allocate:
   1.906 +    //      BaselineFrame space:
   1.907 +    //          (sizeof(Value) * (numLocals + numStackVals))
   1.908 +    //        + sizeof(BaselineFrame)
   1.909 +    //
   1.910 +    //      IonOsrTempData space:
   1.911 +    //          sizeof(IonOsrTempData)
   1.912 +
   1.913 +    size_t frameSpace = sizeof(BaselineFrame) + sizeof(Value) * numLocalsAndStackVals;
   1.914 +    size_t ionOsrTempDataSpace = sizeof(IonOsrTempData);
   1.915 +
   1.916 +    size_t totalSpace = AlignBytes(frameSpace, sizeof(Value)) +
   1.917 +                        AlignBytes(ionOsrTempDataSpace, sizeof(Value));
   1.918 +
   1.919 +    IonOsrTempData *info = (IonOsrTempData *)cx->runtime()->getJitRuntime(cx)->allocateOsrTempData(totalSpace);
   1.920 +    if (!info)
   1.921 +        return nullptr;
   1.922 +
   1.923 +    memset(info, 0, totalSpace);
   1.924 +
   1.925 +    info->jitcode = jitcode;
   1.926 +
   1.927 +    // Copy the BaselineFrame + local/stack Values to the buffer. Arguments and
   1.928 +    // |this| are not copied but left on the stack: the Baseline and Ion frame
   1.929 +    // share the same frame prefix and Ion won't clobber these values. Note
   1.930 +    // that info->baselineFrame will point to the *end* of the frame data, like
   1.931 +    // the frame pointer register in baseline frames.
   1.932 +    uint8_t *frameStart = (uint8_t *)info + AlignBytes(ionOsrTempDataSpace, sizeof(Value));
   1.933 +    info->baselineFrame = frameStart + frameSpace;
   1.934 +
   1.935 +    memcpy(frameStart, (uint8_t *)frame - numLocalsAndStackVals * sizeof(Value), frameSpace);
   1.936 +
   1.937 +    IonSpew(IonSpew_BaselineOSR, "Allocated IonOsrTempData at %p", (void *) info);
   1.938 +    IonSpew(IonSpew_BaselineOSR, "Jitcode is %p", info->jitcode);
   1.939 +
   1.940 +    // All done.
   1.941 +    return info;
   1.942 +}
   1.943 +
   1.944 +static bool
   1.945 +DoUseCountFallback(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
   1.946 +                   IonOsrTempData **infoPtr)
   1.947 +{
   1.948 +    JS_ASSERT(infoPtr);
   1.949 +    *infoPtr = nullptr;
   1.950 +
   1.951 +    // A TI OOM will disable TI and Ion.
   1.952 +    if (!jit::IsIonEnabled(cx))
   1.953 +        return true;
   1.954 +
   1.955 +    RootedScript script(cx, frame->script());
   1.956 +    jsbytecode *pc = stub->icEntry()->pc(script);
   1.957 +    bool isLoopEntry = JSOp(*pc) == JSOP_LOOPENTRY;
   1.958 +
   1.959 +    JS_ASSERT(!isLoopEntry || LoopEntryCanIonOsr(pc));
   1.960 +
   1.961 +    FallbackICSpew(cx, stub, "UseCount(%d)", isLoopEntry ? int(script->pcToOffset(pc)) : int(-1));
   1.962 +
   1.963 +    if (!script->canIonCompile()) {
   1.964 +        // TODO: ASSERT that ion-compilation-disabled checker stub doesn't exist.
   1.965 +        // TODO: Clear all optimized stubs.
   1.966 +        // TODO: Add a ion-compilation-disabled checker IC stub
   1.967 +        script->resetUseCount();
   1.968 +        return true;
   1.969 +    }
   1.970 +
   1.971 +    JS_ASSERT(!script->isIonCompilingOffThread());
   1.972 +
   1.973 +    // If Ion script exists, but PC is not at a loop entry, then Ion will be entered for
   1.974 +    // this script at an appropriate LOOPENTRY or the next time this function is called.
   1.975 +    if (script->hasIonScript() && !isLoopEntry) {
   1.976 +        IonSpew(IonSpew_BaselineOSR, "IonScript exists, but not at loop entry!");
   1.977 +        // TODO: ASSERT that a ion-script-already-exists checker stub doesn't exist.
   1.978 +        // TODO: Clear all optimized stubs.
   1.979 +        // TODO: Add a ion-script-already-exists checker stub.
   1.980 +        return true;
   1.981 +    }
   1.982 +
   1.983 +    // Ensure that Ion-compiled code is available.
   1.984 +    IonSpew(IonSpew_BaselineOSR,
   1.985 +            "UseCount for %s:%d reached %d at pc %p, trying to switch to Ion!",
   1.986 +            script->filename(), script->lineno(), (int) script->getUseCount(), (void *) pc);
   1.987 +    void *jitcode = nullptr;
   1.988 +    if (!EnsureCanEnterIon(cx, stub, frame, script, pc, &jitcode))
   1.989 +        return false;
   1.990 +
   1.991 +    // Jitcode should only be set here if not at loop entry.
   1.992 +    JS_ASSERT_IF(!isLoopEntry, !jitcode);
   1.993 +    if (!jitcode)
   1.994 +        return true;
   1.995 +
   1.996 +    // Prepare the temporary heap copy of the fake InterpreterFrame and actual args list.
   1.997 +    IonSpew(IonSpew_BaselineOSR, "Got jitcode.  Preparing for OSR into ion.");
   1.998 +    IonOsrTempData *info = PrepareOsrTempData(cx, stub, frame, script, pc, jitcode);
   1.999 +    if (!info)
  1.1000 +        return false;
  1.1001 +    *infoPtr = info;
  1.1002 +
  1.1003 +    return true;
  1.1004 +}
  1.1005 +
  1.1006 +typedef bool (*DoUseCountFallbackFn)(JSContext *, ICUseCount_Fallback *, BaselineFrame *frame,
  1.1007 +                                     IonOsrTempData **infoPtr);
  1.1008 +static const VMFunction DoUseCountFallbackInfo =
  1.1009 +    FunctionInfo<DoUseCountFallbackFn>(DoUseCountFallback);
  1.1010 +
  1.1011 +bool
  1.1012 +ICUseCount_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.1013 +{
  1.1014 +    // enterStubFrame is going to clobber the BaselineFrameReg, save it in R0.scratchReg()
  1.1015 +    // first.
  1.1016 +    masm.movePtr(BaselineFrameReg, R0.scratchReg());
  1.1017 +
  1.1018 +    // Push a stub frame so that we can perform a non-tail call.
  1.1019 +    enterStubFrame(masm, R1.scratchReg());
  1.1020 +
  1.1021 +    Label noCompiledCode;
  1.1022 +    // Call DoUseCountFallback to compile/check-for Ion-compiled function
  1.1023 +    {
  1.1024 +        // Push IonOsrTempData pointer storage
  1.1025 +        masm.subPtr(Imm32(sizeof(void *)), BaselineStackReg);
  1.1026 +        masm.push(BaselineStackReg);
  1.1027 +
  1.1028 +        // Push IonJSFrameLayout pointer.
  1.1029 +        masm.loadBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
  1.1030 +        masm.push(R0.scratchReg());
  1.1031 +
  1.1032 +        // Push stub pointer.
  1.1033 +        masm.push(BaselineStubReg);
  1.1034 +
  1.1035 +        if (!callVM(DoUseCountFallbackInfo, masm))
  1.1036 +            return false;
  1.1037 +
  1.1038 +        // Pop IonOsrTempData pointer.
  1.1039 +        masm.pop(R0.scratchReg());
  1.1040 +
  1.1041 +        leaveStubFrame(masm);
  1.1042 +
  1.1043 +        // If no JitCode was found, then skip just exit the IC.
  1.1044 +        masm.branchPtr(Assembler::Equal, R0.scratchReg(), ImmPtr(nullptr), &noCompiledCode);
  1.1045 +    }
  1.1046 +
  1.1047 +    // Get a scratch register.
  1.1048 +    GeneralRegisterSet regs(availableGeneralRegs(0));
  1.1049 +    Register osrDataReg = R0.scratchReg();
  1.1050 +    regs.take(osrDataReg);
  1.1051 +    regs.takeUnchecked(OsrFrameReg);
  1.1052 +
  1.1053 +    Register scratchReg = regs.takeAny();
  1.1054 +
  1.1055 +    // At this point, stack looks like:
  1.1056 +    //  +-> [...Calling-Frame...]
  1.1057 +    //  |   [...Actual-Args/ThisV/ArgCount/Callee...]
  1.1058 +    //  |   [Descriptor]
  1.1059 +    //  |   [Return-Addr]
  1.1060 +    //  +---[Saved-FramePtr]            <-- BaselineFrameReg points here.
  1.1061 +    //      [...Baseline-Frame...]
  1.1062 +
  1.1063 +    // Restore the stack pointer to point to the saved frame pointer.
  1.1064 +    masm.movePtr(BaselineFrameReg, BaselineStackReg);
  1.1065 +
  1.1066 +    // Discard saved frame pointer, so that the return address is on top of
  1.1067 +    // the stack.
  1.1068 +    masm.pop(scratchReg);
  1.1069 +
  1.1070 +    // Jump into Ion.
  1.1071 +    masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, jitcode)), scratchReg);
  1.1072 +    masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, baselineFrame)), OsrFrameReg);
  1.1073 +    masm.jump(scratchReg);
  1.1074 +
  1.1075 +    // No jitcode available, do nothing.
  1.1076 +    masm.bind(&noCompiledCode);
  1.1077 +    EmitReturnFromIC(masm);
  1.1078 +    return true;
  1.1079 +}
  1.1080 +
  1.1081 +//
  1.1082 +// ICProfile_Fallback
  1.1083 +//
  1.1084 +
  1.1085 +static bool
  1.1086 +DoProfilerFallback(JSContext *cx, BaselineFrame *frame, ICProfiler_Fallback *stub)
  1.1087 +{
  1.1088 +    RootedScript script(cx, frame->script());
  1.1089 +    RootedFunction func(cx, frame->maybeFun());
  1.1090 +    mozilla::DebugOnly<ICEntry *> icEntry = stub->icEntry();
  1.1091 +
  1.1092 +    FallbackICSpew(cx, stub, "Profiler");
  1.1093 +
  1.1094 +    SPSProfiler *profiler = &cx->runtime()->spsProfiler;
  1.1095 +
  1.1096 +    // Manually enter SPS this time.
  1.1097 +    JS_ASSERT(profiler->enabled());
  1.1098 +    if (!cx->runtime()->spsProfiler.enter(script, func))
  1.1099 +        return false;
  1.1100 +    frame->setPushedSPSFrame();
  1.1101 +
  1.1102 +    // Unlink any existing PushFunction stub (which may hold stale 'const char *' to
  1.1103 +    // the profile string.
  1.1104 +    JS_ASSERT_IF(icEntry->firstStub() != stub,
  1.1105 +                 icEntry->firstStub()->isProfiler_PushFunction() &&
  1.1106 +                 icEntry->firstStub()->next() == stub);
  1.1107 +    stub->unlinkStubsWithKind(cx, ICStub::Profiler_PushFunction);
  1.1108 +    JS_ASSERT(icEntry->firstStub() == stub);
  1.1109 +
  1.1110 +    // Generate the string to use to identify this stack frame.
  1.1111 +    const char *string = profiler->profileString(script, func);
  1.1112 +    if (string == nullptr)
  1.1113 +        return false;
  1.1114 +
  1.1115 +    IonSpew(IonSpew_BaselineIC, "  Generating Profiler_PushFunction stub for %s:%d",
  1.1116 +            script->filename(), script->lineno());
  1.1117 +
  1.1118 +    // Create a new optimized stub.
  1.1119 +    ICProfiler_PushFunction::Compiler compiler(cx, string, script);
  1.1120 +    ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
  1.1121 +    if (!optStub)
  1.1122 +        return false;
  1.1123 +    stub->addNewStub(optStub);
  1.1124 +
  1.1125 +    return true;
  1.1126 +}
  1.1127 +
  1.1128 +typedef bool (*DoProfilerFallbackFn)(JSContext *, BaselineFrame *frame, ICProfiler_Fallback *);
  1.1129 +static const VMFunction DoProfilerFallbackInfo =
  1.1130 +    FunctionInfo<DoProfilerFallbackFn>(DoProfilerFallback);
  1.1131 +
  1.1132 +bool
  1.1133 +ICProfiler_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.1134 +{
  1.1135 +    EmitRestoreTailCallReg(masm);
  1.1136 +
  1.1137 +    masm.push(BaselineStubReg);         // Push stub.
  1.1138 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); // Push frame.
  1.1139 +
  1.1140 +    return tailCallVM(DoProfilerFallbackInfo, masm);
  1.1141 +}
  1.1142 +
  1.1143 +bool
  1.1144 +ICProfiler_PushFunction::Compiler::generateStubCode(MacroAssembler &masm)
  1.1145 +{
  1.1146 +
  1.1147 +    Register scratch = R0.scratchReg();
  1.1148 +    Register scratch2 = R1.scratchReg();
  1.1149 +
  1.1150 +    // Profiling should be enabled if we ever reach here.
  1.1151 +#ifdef DEBUG
  1.1152 +    Label spsEnabled;
  1.1153 +    uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled();
  1.1154 +    masm.branch32(Assembler::NotEqual, AbsoluteAddress(enabledAddr), Imm32(0), &spsEnabled);
  1.1155 +    masm.assumeUnreachable("Profiling should have been enabled.");
  1.1156 +    masm.bind(&spsEnabled);
  1.1157 +#endif
  1.1158 +
  1.1159 +    // Push SPS entry.
  1.1160 +    masm.spsPushFrame(&cx->runtime()->spsProfiler,
  1.1161 +                      Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfStr()),
  1.1162 +                      Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfScript()),
  1.1163 +                      scratch,
  1.1164 +                      scratch2);
  1.1165 +
  1.1166 +    // Mark frame as having profiler entry pushed.
  1.1167 +    Address flagsOffset(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags());
  1.1168 +    masm.or32(Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME), flagsOffset);
  1.1169 +
  1.1170 +    EmitReturnFromIC(masm);
  1.1171 +
  1.1172 +    return true;
  1.1173 +}
  1.1174 +
  1.1175 +//
  1.1176 +// TypeMonitor_Fallback
  1.1177 +//
  1.1178 +
  1.1179 +bool
  1.1180 +ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val)
  1.1181 +{
  1.1182 +    bool wasDetachedMonitorChain = lastMonitorStubPtrAddr_ == nullptr;
  1.1183 +    JS_ASSERT_IF(wasDetachedMonitorChain, numOptimizedMonitorStubs_ == 0);
  1.1184 +
  1.1185 +    if (numOptimizedMonitorStubs_ >= MAX_OPTIMIZED_STUBS) {
  1.1186 +        // TODO: if the TypeSet becomes unknown or has the AnyObject type,
  1.1187 +        // replace stubs with a single stub to handle these.
  1.1188 +        return true;
  1.1189 +    }
  1.1190 +
  1.1191 +    if (val.isPrimitive()) {
  1.1192 +        JS_ASSERT(!val.isMagic());
  1.1193 +        JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
  1.1194 +
  1.1195 +        // Check for existing TypeMonitor stub.
  1.1196 +        ICTypeMonitor_PrimitiveSet *existingStub = nullptr;
  1.1197 +        for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
  1.1198 +            if (iter->isTypeMonitor_PrimitiveSet()) {
  1.1199 +                existingStub = iter->toTypeMonitor_PrimitiveSet();
  1.1200 +                if (existingStub->containsType(type))
  1.1201 +                    return true;
  1.1202 +            }
  1.1203 +        }
  1.1204 +
  1.1205 +        ICTypeMonitor_PrimitiveSet::Compiler compiler(cx, existingStub, type);
  1.1206 +        ICStub *stub = existingStub ? compiler.updateStub()
  1.1207 +                                    : compiler.getStub(compiler.getStubSpace(script));
  1.1208 +        if (!stub) {
  1.1209 +            js_ReportOutOfMemory(cx);
  1.1210 +            return false;
  1.1211 +        }
  1.1212 +
  1.1213 +        IonSpew(IonSpew_BaselineIC, "  %s TypeMonitor stub %p for primitive type %d",
  1.1214 +                existingStub ? "Modified existing" : "Created new", stub, type);
  1.1215 +
  1.1216 +        if (!existingStub) {
  1.1217 +            JS_ASSERT(!hasStub(TypeMonitor_PrimitiveSet));
  1.1218 +            addOptimizedMonitorStub(stub);
  1.1219 +        }
  1.1220 +
  1.1221 +    } else if (val.toObject().hasSingletonType()) {
  1.1222 +        RootedObject obj(cx, &val.toObject());
  1.1223 +
  1.1224 +        // Check for existing TypeMonitor stub.
  1.1225 +        for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
  1.1226 +            if (iter->isTypeMonitor_SingleObject() &&
  1.1227 +                iter->toTypeMonitor_SingleObject()->object() == obj)
  1.1228 +            {
  1.1229 +                return true;
  1.1230 +            }
  1.1231 +        }
  1.1232 +
  1.1233 +        ICTypeMonitor_SingleObject::Compiler compiler(cx, obj);
  1.1234 +        ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
  1.1235 +        if (!stub) {
  1.1236 +            js_ReportOutOfMemory(cx);
  1.1237 +            return false;
  1.1238 +        }
  1.1239 +
  1.1240 +        IonSpew(IonSpew_BaselineIC, "  Added TypeMonitor stub %p for singleton %p",
  1.1241 +                stub, obj.get());
  1.1242 +
  1.1243 +        addOptimizedMonitorStub(stub);
  1.1244 +
  1.1245 +    } else {
  1.1246 +        RootedTypeObject type(cx, val.toObject().type());
  1.1247 +
  1.1248 +        // Check for existing TypeMonitor stub.
  1.1249 +        for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
  1.1250 +            if (iter->isTypeMonitor_TypeObject() &&
  1.1251 +                iter->toTypeMonitor_TypeObject()->type() == type)
  1.1252 +            {
  1.1253 +                return true;
  1.1254 +            }
  1.1255 +        }
  1.1256 +
  1.1257 +        ICTypeMonitor_TypeObject::Compiler compiler(cx, type);
  1.1258 +        ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
  1.1259 +        if (!stub) {
  1.1260 +            js_ReportOutOfMemory(cx);
  1.1261 +            return false;
  1.1262 +        }
  1.1263 +
  1.1264 +        IonSpew(IonSpew_BaselineIC, "  Added TypeMonitor stub %p for TypeObject %p",
  1.1265 +                stub, type.get());
  1.1266 +
  1.1267 +        addOptimizedMonitorStub(stub);
  1.1268 +    }
  1.1269 +
  1.1270 +    bool firstMonitorStubAdded = wasDetachedMonitorChain && (numOptimizedMonitorStubs_ > 0);
  1.1271 +
  1.1272 +    if (firstMonitorStubAdded) {
  1.1273 +        // Was an empty monitor chain before, but a new stub was added.  This is the
  1.1274 +        // only time that any main stubs' firstMonitorStub fields need to be updated to
  1.1275 +        // refer to the newly added monitor stub.
  1.1276 +        ICStub *firstStub = mainFallbackStub_->icEntry()->firstStub();
  1.1277 +        for (ICStubConstIterator iter = firstStub; !iter.atEnd(); iter++) {
  1.1278 +            // Non-monitored stubs are used if the result has always the same type,
  1.1279 +            // e.g. a StringLength stub will always return int32.
  1.1280 +            if (!iter->isMonitored())
  1.1281 +                continue;
  1.1282 +
  1.1283 +            // Since we just added the first optimized monitoring stub, any
  1.1284 +            // existing main stub's |firstMonitorStub| MUST be pointing to the fallback
  1.1285 +            // monitor stub (i.e. this stub).
  1.1286 +            JS_ASSERT(iter->toMonitoredStub()->firstMonitorStub() == this);
  1.1287 +            iter->toMonitoredStub()->updateFirstMonitorStub(firstMonitorStub_);
  1.1288 +        }
  1.1289 +    }
  1.1290 +
  1.1291 +    return true;
  1.1292 +}
  1.1293 +
  1.1294 +static bool
  1.1295 +DoTypeMonitorFallback(JSContext *cx, BaselineFrame *frame, ICTypeMonitor_Fallback *stub,
  1.1296 +                      HandleValue value, MutableHandleValue res)
  1.1297 +{
  1.1298 +    RootedScript script(cx, frame->script());
  1.1299 +    jsbytecode *pc = stub->icEntry()->pc(script);
  1.1300 +    TypeFallbackICSpew(cx, stub, "TypeMonitor");
  1.1301 +
  1.1302 +    uint32_t argument;
  1.1303 +    if (stub->monitorsThis()) {
  1.1304 +        JS_ASSERT(pc == script->code());
  1.1305 +        types::TypeScript::SetThis(cx, script, value);
  1.1306 +    } else if (stub->monitorsArgument(&argument)) {
  1.1307 +        JS_ASSERT(pc == script->code());
  1.1308 +        types::TypeScript::SetArgument(cx, script, argument, value);
  1.1309 +    } else {
  1.1310 +        types::TypeScript::Monitor(cx, script, pc, value);
  1.1311 +    }
  1.1312 +
  1.1313 +    if (!stub->addMonitorStubForValue(cx, script, value))
  1.1314 +        return false;
  1.1315 +
  1.1316 +    // Copy input value to res.
  1.1317 +    res.set(value);
  1.1318 +    return true;
  1.1319 +}
  1.1320 +
  1.1321 +typedef bool (*DoTypeMonitorFallbackFn)(JSContext *, BaselineFrame *, ICTypeMonitor_Fallback *,
  1.1322 +                                        HandleValue, MutableHandleValue);
  1.1323 +static const VMFunction DoTypeMonitorFallbackInfo =
  1.1324 +    FunctionInfo<DoTypeMonitorFallbackFn>(DoTypeMonitorFallback);
  1.1325 +
  1.1326 +bool
  1.1327 +ICTypeMonitor_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.1328 +{
  1.1329 +    JS_ASSERT(R0 == JSReturnOperand);
  1.1330 +
  1.1331 +    // Restore the tail call register.
  1.1332 +    EmitRestoreTailCallReg(masm);
  1.1333 +
  1.1334 +    masm.pushValue(R0);
  1.1335 +    masm.push(BaselineStubReg);
  1.1336 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.1337 +
  1.1338 +    return tailCallVM(DoTypeMonitorFallbackInfo, masm);
  1.1339 +}
  1.1340 +
  1.1341 +bool
  1.1342 +ICTypeMonitor_PrimitiveSet::Compiler::generateStubCode(MacroAssembler &masm)
  1.1343 +{
  1.1344 +    Label success;
  1.1345 +    if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
  1.1346 +        masm.branchTestInt32(Assembler::Equal, R0, &success);
  1.1347 +
  1.1348 +    if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
  1.1349 +        masm.branchTestNumber(Assembler::Equal, R0, &success);
  1.1350 +
  1.1351 +    if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
  1.1352 +        masm.branchTestUndefined(Assembler::Equal, R0, &success);
  1.1353 +
  1.1354 +    if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
  1.1355 +        masm.branchTestBoolean(Assembler::Equal, R0, &success);
  1.1356 +
  1.1357 +    if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
  1.1358 +        masm.branchTestString(Assembler::Equal, R0, &success);
  1.1359 +
  1.1360 +    // Currently, we will never generate primitive stub checks for object.  However,
  1.1361 +    // when we do get to the point where we want to collapse our monitor chains of
  1.1362 +    // objects and singletons down (when they get too long) to a generic "any object"
  1.1363 +    // in coordination with the typeset doing the same thing, this will need to
  1.1364 +    // be re-enabled.
  1.1365 +    /*
  1.1366 +    if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
  1.1367 +        masm.branchTestObject(Assembler::Equal, R0, &success);
  1.1368 +    */
  1.1369 +    JS_ASSERT(!(flags_ & TypeToFlag(JSVAL_TYPE_OBJECT)));
  1.1370 +
  1.1371 +    if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
  1.1372 +        masm.branchTestNull(Assembler::Equal, R0, &success);
  1.1373 +
  1.1374 +    EmitStubGuardFailure(masm);
  1.1375 +
  1.1376 +    masm.bind(&success);
  1.1377 +    EmitReturnFromIC(masm);
  1.1378 +    return true;
  1.1379 +}
  1.1380 +
  1.1381 +bool
  1.1382 +ICTypeMonitor_SingleObject::Compiler::generateStubCode(MacroAssembler &masm)
  1.1383 +{
  1.1384 +    Label failure;
  1.1385 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.1386 +
  1.1387 +    // Guard on the object's identity.
  1.1388 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.1389 +    Address expectedObject(BaselineStubReg, ICTypeMonitor_SingleObject::offsetOfObject());
  1.1390 +    masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
  1.1391 +
  1.1392 +    EmitReturnFromIC(masm);
  1.1393 +
  1.1394 +    masm.bind(&failure);
  1.1395 +    EmitStubGuardFailure(masm);
  1.1396 +    return true;
  1.1397 +}
  1.1398 +
  1.1399 +bool
  1.1400 +ICTypeMonitor_TypeObject::Compiler::generateStubCode(MacroAssembler &masm)
  1.1401 +{
  1.1402 +    Label failure;
  1.1403 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.1404 +
  1.1405 +    // Guard on the object's TypeObject.
  1.1406 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.1407 +    masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg());
  1.1408 +
  1.1409 +    Address expectedType(BaselineStubReg, ICTypeMonitor_TypeObject::offsetOfType());
  1.1410 +    masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure);
  1.1411 +
  1.1412 +    EmitReturnFromIC(masm);
  1.1413 +
  1.1414 +    masm.bind(&failure);
  1.1415 +    EmitStubGuardFailure(masm);
  1.1416 +    return true;
  1.1417 +}
  1.1418 +
  1.1419 +bool
  1.1420 +ICUpdatedStub::addUpdateStubForValue(JSContext *cx, HandleScript script, HandleObject obj,
  1.1421 +                                     HandleId id, HandleValue val)
  1.1422 +{
  1.1423 +    if (numOptimizedStubs_ >= MAX_OPTIMIZED_STUBS) {
  1.1424 +        // TODO: if the TypeSet becomes unknown or has the AnyObject type,
  1.1425 +        // replace stubs with a single stub to handle these.
  1.1426 +        return true;
  1.1427 +    }
  1.1428 +
  1.1429 +    types::EnsureTrackPropertyTypes(cx, obj, id);
  1.1430 +
  1.1431 +    // Make sure that undefined values are explicitly included in the property
  1.1432 +    // types for an object if generating a stub to write an undefined value.
  1.1433 +    if (val.isUndefined() && types::CanHaveEmptyPropertyTypesForOwnProperty(obj))
  1.1434 +        types::AddTypePropertyId(cx, obj, id, val);
  1.1435 +
  1.1436 +    if (val.isPrimitive()) {
  1.1437 +        JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
  1.1438 +
  1.1439 +        // Check for existing TypeUpdate stub.
  1.1440 +        ICTypeUpdate_PrimitiveSet *existingStub = nullptr;
  1.1441 +        for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
  1.1442 +            if (iter->isTypeUpdate_PrimitiveSet()) {
  1.1443 +                existingStub = iter->toTypeUpdate_PrimitiveSet();
  1.1444 +                if (existingStub->containsType(type))
  1.1445 +                    return true;
  1.1446 +            }
  1.1447 +        }
  1.1448 +
  1.1449 +        ICTypeUpdate_PrimitiveSet::Compiler compiler(cx, existingStub, type);
  1.1450 +        ICStub *stub = existingStub ? compiler.updateStub()
  1.1451 +                                    : compiler.getStub(compiler.getStubSpace(script));
  1.1452 +        if (!stub)
  1.1453 +            return false;
  1.1454 +        if (!existingStub) {
  1.1455 +            JS_ASSERT(!hasTypeUpdateStub(TypeUpdate_PrimitiveSet));
  1.1456 +            addOptimizedUpdateStub(stub);
  1.1457 +        }
  1.1458 +
  1.1459 +        IonSpew(IonSpew_BaselineIC, "  %s TypeUpdate stub %p for primitive type %d",
  1.1460 +                existingStub ? "Modified existing" : "Created new", stub, type);
  1.1461 +
  1.1462 +    } else if (val.toObject().hasSingletonType()) {
  1.1463 +        RootedObject obj(cx, &val.toObject());
  1.1464 +
  1.1465 +        // Check for existing TypeUpdate stub.
  1.1466 +        for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
  1.1467 +            if (iter->isTypeUpdate_SingleObject() &&
  1.1468 +                iter->toTypeUpdate_SingleObject()->object() == obj)
  1.1469 +            {
  1.1470 +                return true;
  1.1471 +            }
  1.1472 +        }
  1.1473 +
  1.1474 +        ICTypeUpdate_SingleObject::Compiler compiler(cx, obj);
  1.1475 +        ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
  1.1476 +        if (!stub)
  1.1477 +            return false;
  1.1478 +
  1.1479 +        IonSpew(IonSpew_BaselineIC, "  Added TypeUpdate stub %p for singleton %p", stub, obj.get());
  1.1480 +
  1.1481 +        addOptimizedUpdateStub(stub);
  1.1482 +
  1.1483 +    } else {
  1.1484 +        RootedTypeObject type(cx, val.toObject().type());
  1.1485 +
  1.1486 +        // Check for existing TypeUpdate stub.
  1.1487 +        for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
  1.1488 +            if (iter->isTypeUpdate_TypeObject() &&
  1.1489 +                iter->toTypeUpdate_TypeObject()->type() == type)
  1.1490 +            {
  1.1491 +                return true;
  1.1492 +            }
  1.1493 +        }
  1.1494 +
  1.1495 +        ICTypeUpdate_TypeObject::Compiler compiler(cx, type);
  1.1496 +        ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
  1.1497 +        if (!stub)
  1.1498 +            return false;
  1.1499 +
  1.1500 +        IonSpew(IonSpew_BaselineIC, "  Added TypeUpdate stub %p for TypeObject %p",
  1.1501 +                stub, type.get());
  1.1502 +
  1.1503 +        addOptimizedUpdateStub(stub);
  1.1504 +    }
  1.1505 +
  1.1506 +    return true;
  1.1507 +}
  1.1508 +
  1.1509 +//
  1.1510 +// TypeUpdate_Fallback
  1.1511 +//
  1.1512 +static bool
  1.1513 +DoTypeUpdateFallback(JSContext *cx, BaselineFrame *frame, ICUpdatedStub *stub, HandleValue objval,
  1.1514 +                     HandleValue value)
  1.1515 +{
  1.1516 +    FallbackICSpew(cx, stub->getChainFallback(), "TypeUpdate(%s)",
  1.1517 +                   ICStub::KindString(stub->kind()));
  1.1518 +
  1.1519 +    RootedScript script(cx, frame->script());
  1.1520 +    RootedObject obj(cx, &objval.toObject());
  1.1521 +    RootedId id(cx);
  1.1522 +
  1.1523 +    switch(stub->kind()) {
  1.1524 +      case ICStub::SetElem_Dense:
  1.1525 +      case ICStub::SetElem_DenseAdd: {
  1.1526 +        JS_ASSERT(obj->isNative());
  1.1527 +        id = JSID_VOID;
  1.1528 +        types::AddTypePropertyId(cx, obj, id, value);
  1.1529 +        break;
  1.1530 +      }
  1.1531 +      case ICStub::SetProp_Native:
  1.1532 +      case ICStub::SetProp_NativeAdd: {
  1.1533 +        JS_ASSERT(obj->isNative());
  1.1534 +        jsbytecode *pc = stub->getChainFallback()->icEntry()->pc(script);
  1.1535 +        if (*pc == JSOP_SETALIASEDVAR)
  1.1536 +            id = NameToId(ScopeCoordinateName(cx->runtime()->scopeCoordinateNameCache, script, pc));
  1.1537 +        else
  1.1538 +            id = NameToId(script->getName(pc));
  1.1539 +        types::AddTypePropertyId(cx, obj, id, value);
  1.1540 +        break;
  1.1541 +      }
  1.1542 +      default:
  1.1543 +        MOZ_ASSUME_UNREACHABLE("Invalid stub");
  1.1544 +    }
  1.1545 +
  1.1546 +    return stub->addUpdateStubForValue(cx, script, obj, id, value);
  1.1547 +}
  1.1548 +
  1.1549 +typedef bool (*DoTypeUpdateFallbackFn)(JSContext *, BaselineFrame *, ICUpdatedStub *, HandleValue,
  1.1550 +                                       HandleValue);
  1.1551 +const VMFunction DoTypeUpdateFallbackInfo =
  1.1552 +    FunctionInfo<DoTypeUpdateFallbackFn>(DoTypeUpdateFallback);
  1.1553 +
  1.1554 +bool
  1.1555 +ICTypeUpdate_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.1556 +{
  1.1557 +    // Just store false into R1.scratchReg() and return.
  1.1558 +    masm.move32(Imm32(0), R1.scratchReg());
  1.1559 +    EmitReturnFromIC(masm);
  1.1560 +    return true;
  1.1561 +}
  1.1562 +
  1.1563 +bool
  1.1564 +ICTypeUpdate_PrimitiveSet::Compiler::generateStubCode(MacroAssembler &masm)
  1.1565 +{
  1.1566 +    Label success;
  1.1567 +    if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
  1.1568 +        masm.branchTestInt32(Assembler::Equal, R0, &success);
  1.1569 +
  1.1570 +    if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
  1.1571 +        masm.branchTestNumber(Assembler::Equal, R0, &success);
  1.1572 +
  1.1573 +    if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
  1.1574 +        masm.branchTestUndefined(Assembler::Equal, R0, &success);
  1.1575 +
  1.1576 +    if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
  1.1577 +        masm.branchTestBoolean(Assembler::Equal, R0, &success);
  1.1578 +
  1.1579 +    if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
  1.1580 +        masm.branchTestString(Assembler::Equal, R0, &success);
  1.1581 +
  1.1582 +    // Currently, we will never generate primitive stub checks for object.  However,
  1.1583 +    // when we do get to the point where we want to collapse our monitor chains of
  1.1584 +    // objects and singletons down (when they get too long) to a generic "any object"
  1.1585 +    // in coordination with the typeset doing the same thing, this will need to
  1.1586 +    // be re-enabled.
  1.1587 +    /*
  1.1588 +    if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
  1.1589 +        masm.branchTestObject(Assembler::Equal, R0, &success);
  1.1590 +    */
  1.1591 +    JS_ASSERT(!(flags_ & TypeToFlag(JSVAL_TYPE_OBJECT)));
  1.1592 +
  1.1593 +    if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
  1.1594 +        masm.branchTestNull(Assembler::Equal, R0, &success);
  1.1595 +
  1.1596 +    EmitStubGuardFailure(masm);
  1.1597 +
  1.1598 +    // Type matches, load true into R1.scratchReg() and return.
  1.1599 +    masm.bind(&success);
  1.1600 +    masm.mov(ImmWord(1), R1.scratchReg());
  1.1601 +    EmitReturnFromIC(masm);
  1.1602 +
  1.1603 +    return true;
  1.1604 +}
  1.1605 +
  1.1606 +bool
  1.1607 +ICTypeUpdate_SingleObject::Compiler::generateStubCode(MacroAssembler &masm)
  1.1608 +{
  1.1609 +    Label failure;
  1.1610 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.1611 +
  1.1612 +    // Guard on the object's identity.
  1.1613 +    Register obj = masm.extractObject(R0, R1.scratchReg());
  1.1614 +    Address expectedObject(BaselineStubReg, ICTypeUpdate_SingleObject::offsetOfObject());
  1.1615 +    masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
  1.1616 +
  1.1617 +    // Identity matches, load true into R1.scratchReg() and return.
  1.1618 +    masm.mov(ImmWord(1), R1.scratchReg());
  1.1619 +    EmitReturnFromIC(masm);
  1.1620 +
  1.1621 +    masm.bind(&failure);
  1.1622 +    EmitStubGuardFailure(masm);
  1.1623 +    return true;
  1.1624 +}
  1.1625 +
  1.1626 +bool
  1.1627 +ICTypeUpdate_TypeObject::Compiler::generateStubCode(MacroAssembler &masm)
  1.1628 +{
  1.1629 +    Label failure;
  1.1630 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.1631 +
  1.1632 +    // Guard on the object's TypeObject.
  1.1633 +    Register obj = masm.extractObject(R0, R1.scratchReg());
  1.1634 +    masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg());
  1.1635 +
  1.1636 +    Address expectedType(BaselineStubReg, ICTypeUpdate_TypeObject::offsetOfType());
  1.1637 +    masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure);
  1.1638 +
  1.1639 +    // Type matches, load true into R1.scratchReg() and return.
  1.1640 +    masm.mov(ImmWord(1), R1.scratchReg());
  1.1641 +    EmitReturnFromIC(masm);
  1.1642 +
  1.1643 +    masm.bind(&failure);
  1.1644 +    EmitStubGuardFailure(masm);
  1.1645 +    return true;
  1.1646 +}
  1.1647 +
  1.1648 +//
  1.1649 +// VM function to help call native getters.
  1.1650 +//
  1.1651 +
  1.1652 +static bool
  1.1653 +DoCallNativeGetter(JSContext *cx, HandleFunction callee, HandleObject obj,
  1.1654 +                   MutableHandleValue result)
  1.1655 +{
  1.1656 +    JS_ASSERT(callee->isNative());
  1.1657 +    JSNative natfun = callee->native();
  1.1658 +
  1.1659 +    JS::AutoValueArray<2> vp(cx);
  1.1660 +    vp[0].setObject(*callee.get());
  1.1661 +    vp[1].setObject(*obj.get());
  1.1662 +
  1.1663 +    if (!natfun(cx, 0, vp.begin()))
  1.1664 +        return false;
  1.1665 +
  1.1666 +    result.set(vp[0]);
  1.1667 +    return true;
  1.1668 +}
  1.1669 +
  1.1670 +typedef bool (*DoCallNativeGetterFn)(JSContext *, HandleFunction, HandleObject, MutableHandleValue);
  1.1671 +static const VMFunction DoCallNativeGetterInfo =
  1.1672 +    FunctionInfo<DoCallNativeGetterFn>(DoCallNativeGetter);
  1.1673 +
  1.1674 +//
  1.1675 +// This_Fallback
  1.1676 +//
  1.1677 +
  1.1678 +static bool
  1.1679 +DoThisFallback(JSContext *cx, ICThis_Fallback *stub, HandleValue thisv, MutableHandleValue ret)
  1.1680 +{
  1.1681 +    FallbackICSpew(cx, stub, "This");
  1.1682 +
  1.1683 +    JSObject *thisObj = BoxNonStrictThis(cx, thisv);
  1.1684 +    if (!thisObj)
  1.1685 +        return false;
  1.1686 +
  1.1687 +    ret.setObject(*thisObj);
  1.1688 +    return true;
  1.1689 +}
  1.1690 +
  1.1691 +typedef bool (*DoThisFallbackFn)(JSContext *, ICThis_Fallback *, HandleValue, MutableHandleValue);
  1.1692 +static const VMFunction DoThisFallbackInfo = FunctionInfo<DoThisFallbackFn>(DoThisFallback);
  1.1693 +
  1.1694 +bool
  1.1695 +ICThis_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.1696 +{
  1.1697 +    JS_ASSERT(R0 == JSReturnOperand);
  1.1698 +
  1.1699 +    // Restore the tail call register.
  1.1700 +    EmitRestoreTailCallReg(masm);
  1.1701 +
  1.1702 +    masm.pushValue(R0);
  1.1703 +    masm.push(BaselineStubReg);
  1.1704 +
  1.1705 +    return tailCallVM(DoThisFallbackInfo, masm);
  1.1706 +}
  1.1707 +
  1.1708 +//
  1.1709 +// NewArray_Fallback
  1.1710 +//
  1.1711 +
  1.1712 +static bool
  1.1713 +DoNewArray(JSContext *cx, ICNewArray_Fallback *stub, uint32_t length,
  1.1714 +           HandleTypeObject type, MutableHandleValue res)
  1.1715 +{
  1.1716 +    FallbackICSpew(cx, stub, "NewArray");
  1.1717 +
  1.1718 +    JSObject *obj = NewInitArray(cx, length, type);
  1.1719 +    if (!obj)
  1.1720 +        return false;
  1.1721 +
  1.1722 +    res.setObject(*obj);
  1.1723 +    return true;
  1.1724 +}
  1.1725 +
  1.1726 +typedef bool(*DoNewArrayFn)(JSContext *, ICNewArray_Fallback *, uint32_t, HandleTypeObject,
  1.1727 +                            MutableHandleValue);
  1.1728 +static const VMFunction DoNewArrayInfo = FunctionInfo<DoNewArrayFn>(DoNewArray);
  1.1729 +
  1.1730 +bool
  1.1731 +ICNewArray_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.1732 +{
  1.1733 +    EmitRestoreTailCallReg(masm);
  1.1734 +
  1.1735 +    masm.push(R1.scratchReg()); // type
  1.1736 +    masm.push(R0.scratchReg()); // length
  1.1737 +    masm.push(BaselineStubReg); // stub.
  1.1738 +
  1.1739 +    return tailCallVM(DoNewArrayInfo, masm);
  1.1740 +}
  1.1741 +
  1.1742 +//
  1.1743 +// NewObject_Fallback
  1.1744 +//
  1.1745 +
  1.1746 +static bool
  1.1747 +DoNewObject(JSContext *cx, ICNewObject_Fallback *stub, MutableHandleValue res)
  1.1748 +{
  1.1749 +    FallbackICSpew(cx, stub, "NewObject");
  1.1750 +
  1.1751 +    RootedObject templateObject(cx, stub->templateObject());
  1.1752 +    JSObject *obj = NewInitObject(cx, templateObject);
  1.1753 +    if (!obj)
  1.1754 +        return false;
  1.1755 +
  1.1756 +    res.setObject(*obj);
  1.1757 +    return true;
  1.1758 +}
  1.1759 +
  1.1760 +typedef bool(*DoNewObjectFn)(JSContext *, ICNewObject_Fallback *, MutableHandleValue);
  1.1761 +static const VMFunction DoNewObjectInfo = FunctionInfo<DoNewObjectFn>(DoNewObject);
  1.1762 +
  1.1763 +bool
  1.1764 +ICNewObject_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.1765 +{
  1.1766 +    EmitRestoreTailCallReg(masm);
  1.1767 +
  1.1768 +    masm.push(BaselineStubReg); // stub.
  1.1769 +
  1.1770 +    return tailCallVM(DoNewObjectInfo, masm);
  1.1771 +}
  1.1772 +
  1.1773 +//
  1.1774 +// Compare_Fallback
  1.1775 +//
  1.1776 +
  1.1777 +static bool
  1.1778 +DoCompareFallback(JSContext *cx, BaselineFrame *frame, ICCompare_Fallback *stub_, HandleValue lhs,
  1.1779 +                  HandleValue rhs, MutableHandleValue ret)
  1.1780 +{
  1.1781 +    // This fallback stub may trigger debug mode toggling.
  1.1782 +    DebugModeOSRVolatileStub<ICCompare_Fallback *> stub(frame, stub_);
  1.1783 +
  1.1784 +    jsbytecode *pc = stub->icEntry()->pc(frame->script());
  1.1785 +    JSOp op = JSOp(*pc);
  1.1786 +
  1.1787 +    FallbackICSpew(cx, stub, "Compare(%s)", js_CodeName[op]);
  1.1788 +
  1.1789 +    // Case operations in a CONDSWITCH are performing strict equality.
  1.1790 +    if (op == JSOP_CASE)
  1.1791 +        op = JSOP_STRICTEQ;
  1.1792 +
  1.1793 +    // Don't pass lhs/rhs directly, we need the original values when
  1.1794 +    // generating stubs.
  1.1795 +    RootedValue lhsCopy(cx, lhs);
  1.1796 +    RootedValue rhsCopy(cx, rhs);
  1.1797 +
  1.1798 +    // Perform the compare operation.
  1.1799 +    bool out;
  1.1800 +    switch(op) {
  1.1801 +      case JSOP_LT:
  1.1802 +        if (!LessThan(cx, &lhsCopy, &rhsCopy, &out))
  1.1803 +            return false;
  1.1804 +        break;
  1.1805 +      case JSOP_LE:
  1.1806 +        if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out))
  1.1807 +            return false;
  1.1808 +        break;
  1.1809 +      case JSOP_GT:
  1.1810 +        if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out))
  1.1811 +            return false;
  1.1812 +        break;
  1.1813 +      case JSOP_GE:
  1.1814 +        if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out))
  1.1815 +            return false;
  1.1816 +        break;
  1.1817 +      case JSOP_EQ:
  1.1818 +        if (!LooselyEqual<true>(cx, &lhsCopy, &rhsCopy, &out))
  1.1819 +            return false;
  1.1820 +        break;
  1.1821 +      case JSOP_NE:
  1.1822 +        if (!LooselyEqual<false>(cx, &lhsCopy, &rhsCopy, &out))
  1.1823 +            return false;
  1.1824 +        break;
  1.1825 +      case JSOP_STRICTEQ:
  1.1826 +        if (!StrictlyEqual<true>(cx, &lhsCopy, &rhsCopy, &out))
  1.1827 +            return false;
  1.1828 +        break;
  1.1829 +      case JSOP_STRICTNE:
  1.1830 +        if (!StrictlyEqual<false>(cx, &lhsCopy, &rhsCopy, &out))
  1.1831 +            return false;
  1.1832 +        break;
  1.1833 +      default:
  1.1834 +        JS_ASSERT(!"Unhandled baseline compare op");
  1.1835 +        return false;
  1.1836 +    }
  1.1837 +
  1.1838 +    ret.setBoolean(out);
  1.1839 +
  1.1840 +    // Check if debug mode toggling made the stub invalid.
  1.1841 +    if (stub.invalid())
  1.1842 +        return true;
  1.1843 +
  1.1844 +    // Check to see if a new stub should be generated.
  1.1845 +    if (stub->numOptimizedStubs() >= ICCompare_Fallback::MAX_OPTIMIZED_STUBS) {
  1.1846 +        // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
  1.1847 +        // But for now we just bail.
  1.1848 +        return true;
  1.1849 +    }
  1.1850 +
  1.1851 +    JSScript *script = frame->script();
  1.1852 +
  1.1853 +    // Try to generate new stubs.
  1.1854 +    if (lhs.isInt32() && rhs.isInt32()) {
  1.1855 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(Int32, Int32) stub", js_CodeName[op]);
  1.1856 +        ICCompare_Int32::Compiler compiler(cx, op);
  1.1857 +        ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
  1.1858 +        if (!int32Stub)
  1.1859 +            return false;
  1.1860 +
  1.1861 +        stub->addNewStub(int32Stub);
  1.1862 +        return true;
  1.1863 +    }
  1.1864 +
  1.1865 +    if (!cx->runtime()->jitSupportsFloatingPoint && (lhs.isNumber() || rhs.isNumber()))
  1.1866 +        return true;
  1.1867 +
  1.1868 +    if (lhs.isNumber() && rhs.isNumber()) {
  1.1869 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(Number, Number) stub", js_CodeName[op]);
  1.1870 +
  1.1871 +        // Unlink int32 stubs, it's faster to always use the double stub.
  1.1872 +        stub->unlinkStubsWithKind(cx, ICStub::Compare_Int32);
  1.1873 +
  1.1874 +        ICCompare_Double::Compiler compiler(cx, op);
  1.1875 +        ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
  1.1876 +        if (!doubleStub)
  1.1877 +            return false;
  1.1878 +
  1.1879 +        stub->addNewStub(doubleStub);
  1.1880 +        return true;
  1.1881 +    }
  1.1882 +
  1.1883 +    if ((lhs.isNumber() && rhs.isUndefined()) ||
  1.1884 +        (lhs.isUndefined() && rhs.isNumber()))
  1.1885 +    {
  1.1886 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
  1.1887 +                    rhs.isUndefined() ? "Number" : "Undefined",
  1.1888 +                    rhs.isUndefined() ? "Undefined" : "Number");
  1.1889 +        ICCompare_NumberWithUndefined::Compiler compiler(cx, op, lhs.isUndefined());
  1.1890 +        ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
  1.1891 +        if (!doubleStub)
  1.1892 +            return false;
  1.1893 +
  1.1894 +        stub->addNewStub(doubleStub);
  1.1895 +        return true;
  1.1896 +    }
  1.1897 +
  1.1898 +    if (lhs.isBoolean() && rhs.isBoolean()) {
  1.1899 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(Boolean, Boolean) stub", js_CodeName[op]);
  1.1900 +        ICCompare_Boolean::Compiler compiler(cx, op);
  1.1901 +        ICStub *booleanStub = compiler.getStub(compiler.getStubSpace(script));
  1.1902 +        if (!booleanStub)
  1.1903 +            return false;
  1.1904 +
  1.1905 +        stub->addNewStub(booleanStub);
  1.1906 +        return true;
  1.1907 +    }
  1.1908 +
  1.1909 +    if ((lhs.isBoolean() && rhs.isInt32()) || (lhs.isInt32() && rhs.isBoolean())) {
  1.1910 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
  1.1911 +                    rhs.isInt32() ? "Boolean" : "Int32",
  1.1912 +                    rhs.isInt32() ? "Int32" : "Boolean");
  1.1913 +        ICCompare_Int32WithBoolean::Compiler compiler(cx, op, lhs.isInt32());
  1.1914 +        ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
  1.1915 +        if (!optStub)
  1.1916 +            return false;
  1.1917 +
  1.1918 +        stub->addNewStub(optStub);
  1.1919 +        return true;
  1.1920 +    }
  1.1921 +
  1.1922 +    if (IsEqualityOp(op)) {
  1.1923 +        if (lhs.isString() && rhs.isString() && !stub->hasStub(ICStub::Compare_String)) {
  1.1924 +            IonSpew(IonSpew_BaselineIC, "  Generating %s(String, String) stub", js_CodeName[op]);
  1.1925 +            ICCompare_String::Compiler compiler(cx, op);
  1.1926 +            ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
  1.1927 +            if (!stringStub)
  1.1928 +                return false;
  1.1929 +
  1.1930 +            stub->addNewStub(stringStub);
  1.1931 +            return true;
  1.1932 +        }
  1.1933 +
  1.1934 +        if (lhs.isObject() && rhs.isObject()) {
  1.1935 +            JS_ASSERT(!stub->hasStub(ICStub::Compare_Object));
  1.1936 +            IonSpew(IonSpew_BaselineIC, "  Generating %s(Object, Object) stub", js_CodeName[op]);
  1.1937 +            ICCompare_Object::Compiler compiler(cx, op);
  1.1938 +            ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script));
  1.1939 +            if (!objectStub)
  1.1940 +                return false;
  1.1941 +
  1.1942 +            stub->addNewStub(objectStub);
  1.1943 +            return true;
  1.1944 +        }
  1.1945 +
  1.1946 +        if ((lhs.isObject() || lhs.isNull() || lhs.isUndefined()) &&
  1.1947 +            (rhs.isObject() || rhs.isNull() || rhs.isUndefined()) &&
  1.1948 +            !stub->hasStub(ICStub::Compare_ObjectWithUndefined))
  1.1949 +        {
  1.1950 +            IonSpew(IonSpew_BaselineIC, "  Generating %s(Obj/Null/Undef, Obj/Null/Undef) stub",
  1.1951 +                    js_CodeName[op]);
  1.1952 +            bool lhsIsUndefined = lhs.isNull() || lhs.isUndefined();
  1.1953 +            bool compareWithNull = lhs.isNull() || rhs.isNull();
  1.1954 +            ICCompare_ObjectWithUndefined::Compiler compiler(cx, op,
  1.1955 +                                                             lhsIsUndefined, compareWithNull);
  1.1956 +            ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script));
  1.1957 +            if (!objectStub)
  1.1958 +                return false;
  1.1959 +
  1.1960 +            stub->addNewStub(objectStub);
  1.1961 +            return true;
  1.1962 +        }
  1.1963 +    }
  1.1964 +
  1.1965 +    return true;
  1.1966 +}
  1.1967 +
  1.1968 +typedef bool (*DoCompareFallbackFn)(JSContext *, BaselineFrame *, ICCompare_Fallback *,
  1.1969 +                                    HandleValue, HandleValue, MutableHandleValue);
  1.1970 +static const VMFunction DoCompareFallbackInfo =
  1.1971 +    FunctionInfo<DoCompareFallbackFn>(DoCompareFallback, PopValues(2));
  1.1972 +
  1.1973 +bool
  1.1974 +ICCompare_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.1975 +{
  1.1976 +    JS_ASSERT(R0 == JSReturnOperand);
  1.1977 +
  1.1978 +    // Restore the tail call register.
  1.1979 +    EmitRestoreTailCallReg(masm);
  1.1980 +
  1.1981 +    // Ensure stack is fully synced for the expression decompiler.
  1.1982 +    masm.pushValue(R0);
  1.1983 +    masm.pushValue(R1);
  1.1984 +
  1.1985 +    // Push arguments.
  1.1986 +    masm.pushValue(R1);
  1.1987 +    masm.pushValue(R0);
  1.1988 +    masm.push(BaselineStubReg);
  1.1989 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.1990 +    return tailCallVM(DoCompareFallbackInfo, masm);
  1.1991 +}
  1.1992 +
  1.1993 +//
  1.1994 +// Compare_String
  1.1995 +//
  1.1996 +
  1.1997 +bool
  1.1998 +ICCompare_String::Compiler::generateStubCode(MacroAssembler &masm)
  1.1999 +{
  1.2000 +    Label failure;
  1.2001 +    masm.branchTestString(Assembler::NotEqual, R0, &failure);
  1.2002 +    masm.branchTestString(Assembler::NotEqual, R1, &failure);
  1.2003 +
  1.2004 +    JS_ASSERT(IsEqualityOp(op));
  1.2005 +
  1.2006 +    Register left = masm.extractString(R0, ExtractTemp0);
  1.2007 +    Register right = masm.extractString(R1, ExtractTemp1);
  1.2008 +
  1.2009 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.2010 +    Register scratchReg = regs.takeAny();
  1.2011 +    // x86 doesn't have the luxury of a second scratch.
  1.2012 +    Register scratchReg2;
  1.2013 +    if (regs.empty()) {
  1.2014 +        scratchReg2 = BaselineStubReg;
  1.2015 +        masm.push(BaselineStubReg);
  1.2016 +    } else {
  1.2017 +        scratchReg2 = regs.takeAny();
  1.2018 +    }
  1.2019 +    JS_ASSERT(scratchReg2 != scratchReg);
  1.2020 +
  1.2021 +    Label inlineCompareFailed;
  1.2022 +    masm.compareStrings(op, left, right, scratchReg2, scratchReg, &inlineCompareFailed);
  1.2023 +    masm.tagValue(JSVAL_TYPE_BOOLEAN, scratchReg2, R0);
  1.2024 +    if (scratchReg2 == BaselineStubReg)
  1.2025 +        masm.pop(BaselineStubReg);
  1.2026 +    EmitReturnFromIC(masm);
  1.2027 +
  1.2028 +    masm.bind(&inlineCompareFailed);
  1.2029 +    if (scratchReg2 == BaselineStubReg)
  1.2030 +        masm.pop(BaselineStubReg);
  1.2031 +    masm.bind(&failure);
  1.2032 +    EmitStubGuardFailure(masm);
  1.2033 +    return true;
  1.2034 +}
  1.2035 +
  1.2036 +//
  1.2037 +// Compare_Boolean
  1.2038 +//
  1.2039 +
  1.2040 +bool
  1.2041 +ICCompare_Boolean::Compiler::generateStubCode(MacroAssembler &masm)
  1.2042 +{
  1.2043 +    Label failure;
  1.2044 +    masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
  1.2045 +    masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
  1.2046 +
  1.2047 +    Register left = masm.extractInt32(R0, ExtractTemp0);
  1.2048 +    Register right = masm.extractInt32(R1, ExtractTemp1);
  1.2049 +
  1.2050 +    // Compare payload regs of R0 and R1.
  1.2051 +    Assembler::Condition cond = JSOpToCondition(op, /* signed = */true);
  1.2052 +    masm.cmp32Set(cond, left, right, left);
  1.2053 +
  1.2054 +    // Box the result and return
  1.2055 +    masm.tagValue(JSVAL_TYPE_BOOLEAN, left, R0);
  1.2056 +    EmitReturnFromIC(masm);
  1.2057 +
  1.2058 +    // Failure case - jump to next stub
  1.2059 +    masm.bind(&failure);
  1.2060 +    EmitStubGuardFailure(masm);
  1.2061 +    return true;
  1.2062 +}
  1.2063 +
  1.2064 +//
  1.2065 +// Compare_NumberWithUndefined
  1.2066 +//
  1.2067 +
  1.2068 +bool
  1.2069 +ICCompare_NumberWithUndefined::Compiler::generateStubCode(MacroAssembler &masm)
  1.2070 +{
  1.2071 +    ValueOperand numberOperand, undefinedOperand;
  1.2072 +    if (lhsIsUndefined) {
  1.2073 +        numberOperand = R1;
  1.2074 +        undefinedOperand = R0;
  1.2075 +    } else {
  1.2076 +        numberOperand = R0;
  1.2077 +        undefinedOperand = R1;
  1.2078 +    }
  1.2079 +
  1.2080 +    Label failure;
  1.2081 +    masm.branchTestNumber(Assembler::NotEqual, numberOperand, &failure);
  1.2082 +    masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
  1.2083 +
  1.2084 +    // Comparing a number with undefined will always be true for NE/STRICTNE,
  1.2085 +    // and always be false for other compare ops.
  1.2086 +    masm.moveValue(BooleanValue(op == JSOP_NE || op == JSOP_STRICTNE), R0);
  1.2087 +
  1.2088 +    EmitReturnFromIC(masm);
  1.2089 +
  1.2090 +    // Failure case - jump to next stub
  1.2091 +    masm.bind(&failure);
  1.2092 +    EmitStubGuardFailure(masm);
  1.2093 +    return true;
  1.2094 +}
  1.2095 +
  1.2096 +//
  1.2097 +// Compare_Object
  1.2098 +//
  1.2099 +
  1.2100 +bool
  1.2101 +ICCompare_Object::Compiler::generateStubCode(MacroAssembler &masm)
  1.2102 +{
  1.2103 +    Label failure;
  1.2104 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.2105 +    masm.branchTestObject(Assembler::NotEqual, R1, &failure);
  1.2106 +
  1.2107 +    JS_ASSERT(IsEqualityOp(op));
  1.2108 +
  1.2109 +    Register left = masm.extractObject(R0, ExtractTemp0);
  1.2110 +    Register right = masm.extractObject(R1, ExtractTemp1);
  1.2111 +
  1.2112 +    Label ifTrue;
  1.2113 +    masm.branchPtr(JSOpToCondition(op, /* signed = */true), left, right, &ifTrue);
  1.2114 +
  1.2115 +    masm.moveValue(BooleanValue(false), R0);
  1.2116 +    EmitReturnFromIC(masm);
  1.2117 +
  1.2118 +    masm.bind(&ifTrue);
  1.2119 +    masm.moveValue(BooleanValue(true), R0);
  1.2120 +    EmitReturnFromIC(masm);
  1.2121 +
  1.2122 +    // Failure case - jump to next stub
  1.2123 +    masm.bind(&failure);
  1.2124 +    EmitStubGuardFailure(masm);
  1.2125 +    return true;
  1.2126 +}
  1.2127 +
  1.2128 +//
  1.2129 +// Compare_ObjectWithUndefined
  1.2130 +//
  1.2131 +
  1.2132 +bool
  1.2133 +ICCompare_ObjectWithUndefined::Compiler::generateStubCode(MacroAssembler &masm)
  1.2134 +{
  1.2135 +    JS_ASSERT(IsEqualityOp(op));
  1.2136 +
  1.2137 +    ValueOperand objectOperand, undefinedOperand;
  1.2138 +    if (lhsIsUndefined) {
  1.2139 +        objectOperand = R1;
  1.2140 +        undefinedOperand = R0;
  1.2141 +    } else {
  1.2142 +        objectOperand = R0;
  1.2143 +        undefinedOperand = R1;
  1.2144 +    }
  1.2145 +
  1.2146 +    Label failure;
  1.2147 +    if (compareWithNull)
  1.2148 +        masm.branchTestNull(Assembler::NotEqual, undefinedOperand, &failure);
  1.2149 +    else
  1.2150 +        masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
  1.2151 +
  1.2152 +    Label notObject;
  1.2153 +    masm.branchTestObject(Assembler::NotEqual, objectOperand, &notObject);
  1.2154 +
  1.2155 +    if (op == JSOP_STRICTEQ || op == JSOP_STRICTNE) {
  1.2156 +        // obj !== undefined for all objects.
  1.2157 +        masm.moveValue(BooleanValue(op == JSOP_STRICTNE), R0);
  1.2158 +        EmitReturnFromIC(masm);
  1.2159 +    } else {
  1.2160 +        // obj != undefined only where !obj->getClass()->emulatesUndefined()
  1.2161 +        Label emulatesUndefined;
  1.2162 +        Register obj = masm.extractObject(objectOperand, ExtractTemp0);
  1.2163 +        masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj);
  1.2164 +        masm.loadPtr(Address(obj, types::TypeObject::offsetOfClasp()), obj);
  1.2165 +        masm.branchTest32(Assembler::NonZero,
  1.2166 +                          Address(obj, Class::offsetOfFlags()),
  1.2167 +                          Imm32(JSCLASS_EMULATES_UNDEFINED),
  1.2168 +                          &emulatesUndefined);
  1.2169 +        masm.moveValue(BooleanValue(op == JSOP_NE), R0);
  1.2170 +        EmitReturnFromIC(masm);
  1.2171 +        masm.bind(&emulatesUndefined);
  1.2172 +        masm.moveValue(BooleanValue(op == JSOP_EQ), R0);
  1.2173 +        EmitReturnFromIC(masm);
  1.2174 +    }
  1.2175 +
  1.2176 +    masm.bind(&notObject);
  1.2177 +
  1.2178 +    // Also support null == null or undefined == undefined comparisons.
  1.2179 +    if (compareWithNull)
  1.2180 +        masm.branchTestNull(Assembler::NotEqual, objectOperand, &failure);
  1.2181 +    else
  1.2182 +        masm.branchTestUndefined(Assembler::NotEqual, objectOperand, &failure);
  1.2183 +
  1.2184 +    masm.moveValue(BooleanValue(op == JSOP_STRICTEQ || op == JSOP_EQ), R0);
  1.2185 +    EmitReturnFromIC(masm);
  1.2186 +
  1.2187 +    // Failure case - jump to next stub
  1.2188 +    masm.bind(&failure);
  1.2189 +    EmitStubGuardFailure(masm);
  1.2190 +    return true;
  1.2191 +}
  1.2192 +
  1.2193 +//
  1.2194 +// Compare_Int32WithBoolean
  1.2195 +//
  1.2196 +
  1.2197 +bool
  1.2198 +ICCompare_Int32WithBoolean::Compiler::generateStubCode(MacroAssembler &masm)
  1.2199 +{
  1.2200 +    Label failure;
  1.2201 +    ValueOperand int32Val;
  1.2202 +    ValueOperand boolVal;
  1.2203 +    if (lhsIsInt32_) {
  1.2204 +        int32Val = R0;
  1.2205 +        boolVal = R1;
  1.2206 +    } else {
  1.2207 +        boolVal = R0;
  1.2208 +        int32Val = R1;
  1.2209 +    }
  1.2210 +    masm.branchTestBoolean(Assembler::NotEqual, boolVal, &failure);
  1.2211 +    masm.branchTestInt32(Assembler::NotEqual, int32Val, &failure);
  1.2212 +
  1.2213 +    if (op_ == JSOP_STRICTEQ || op_ == JSOP_STRICTNE) {
  1.2214 +        // Ints and booleans are never strictly equal, always strictly not equal.
  1.2215 +        masm.moveValue(BooleanValue(op_ == JSOP_STRICTNE), R0);
  1.2216 +        EmitReturnFromIC(masm);
  1.2217 +    } else {
  1.2218 +        Register boolReg = masm.extractBoolean(boolVal, ExtractTemp0);
  1.2219 +        Register int32Reg = masm.extractInt32(int32Val, ExtractTemp1);
  1.2220 +
  1.2221 +        // Compare payload regs of R0 and R1.
  1.2222 +        Assembler::Condition cond = JSOpToCondition(op_, /* signed = */true);
  1.2223 +        masm.cmp32Set(cond, (lhsIsInt32_ ? int32Reg : boolReg),
  1.2224 +                      (lhsIsInt32_ ? boolReg : int32Reg), R0.scratchReg());
  1.2225 +
  1.2226 +        // Box the result and return
  1.2227 +        masm.tagValue(JSVAL_TYPE_BOOLEAN, R0.scratchReg(), R0);
  1.2228 +        EmitReturnFromIC(masm);
  1.2229 +    }
  1.2230 +
  1.2231 +    // Failure case - jump to next stub
  1.2232 +    masm.bind(&failure);
  1.2233 +    EmitStubGuardFailure(masm);
  1.2234 +    return true;
  1.2235 +}
  1.2236 +
  1.2237 +//
  1.2238 +// ToBool_Fallback
  1.2239 +//
  1.2240 +
  1.2241 +static bool
  1.2242 +DoToBoolFallback(JSContext *cx, BaselineFrame *frame, ICToBool_Fallback *stub, HandleValue arg,
  1.2243 +                 MutableHandleValue ret)
  1.2244 +{
  1.2245 +    FallbackICSpew(cx, stub, "ToBool");
  1.2246 +
  1.2247 +    bool cond = ToBoolean(arg);
  1.2248 +    ret.setBoolean(cond);
  1.2249 +
  1.2250 +    // Check to see if a new stub should be generated.
  1.2251 +    if (stub->numOptimizedStubs() >= ICToBool_Fallback::MAX_OPTIMIZED_STUBS) {
  1.2252 +        // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
  1.2253 +        // But for now we just bail.
  1.2254 +        return true;
  1.2255 +    }
  1.2256 +
  1.2257 +    JS_ASSERT(!arg.isBoolean());
  1.2258 +
  1.2259 +    JSScript *script = frame->script();
  1.2260 +
  1.2261 +    // Try to generate new stubs.
  1.2262 +    if (arg.isInt32()) {
  1.2263 +        IonSpew(IonSpew_BaselineIC, "  Generating ToBool(Int32) stub.");
  1.2264 +        ICToBool_Int32::Compiler compiler(cx);
  1.2265 +        ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
  1.2266 +        if (!int32Stub)
  1.2267 +            return false;
  1.2268 +
  1.2269 +        stub->addNewStub(int32Stub);
  1.2270 +        return true;
  1.2271 +    }
  1.2272 +
  1.2273 +    if (arg.isDouble() && cx->runtime()->jitSupportsFloatingPoint) {
  1.2274 +        IonSpew(IonSpew_BaselineIC, "  Generating ToBool(Double) stub.");
  1.2275 +        ICToBool_Double::Compiler compiler(cx);
  1.2276 +        ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
  1.2277 +        if (!doubleStub)
  1.2278 +            return false;
  1.2279 +
  1.2280 +        stub->addNewStub(doubleStub);
  1.2281 +        return true;
  1.2282 +    }
  1.2283 +
  1.2284 +    if (arg.isString()) {
  1.2285 +        IonSpew(IonSpew_BaselineIC, "  Generating ToBool(String) stub");
  1.2286 +        ICToBool_String::Compiler compiler(cx);
  1.2287 +        ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
  1.2288 +        if (!stringStub)
  1.2289 +            return false;
  1.2290 +
  1.2291 +        stub->addNewStub(stringStub);
  1.2292 +        return true;
  1.2293 +    }
  1.2294 +
  1.2295 +    if (arg.isNull() || arg.isUndefined()) {
  1.2296 +        ICToBool_NullUndefined::Compiler compiler(cx);
  1.2297 +        ICStub *nilStub = compiler.getStub(compiler.getStubSpace(script));
  1.2298 +        if (!nilStub)
  1.2299 +            return false;
  1.2300 +
  1.2301 +        stub->addNewStub(nilStub);
  1.2302 +        return true;
  1.2303 +    }
  1.2304 +
  1.2305 +    if (arg.isObject()) {
  1.2306 +        IonSpew(IonSpew_BaselineIC, "  Generating ToBool(Object) stub.");
  1.2307 +        ICToBool_Object::Compiler compiler(cx);
  1.2308 +        ICStub *objStub = compiler.getStub(compiler.getStubSpace(script));
  1.2309 +        if (!objStub)
  1.2310 +            return false;
  1.2311 +
  1.2312 +        stub->addNewStub(objStub);
  1.2313 +        return true;
  1.2314 +    }
  1.2315 +
  1.2316 +    return true;
  1.2317 +}
  1.2318 +
  1.2319 +typedef bool (*pf)(JSContext *, BaselineFrame *, ICToBool_Fallback *, HandleValue,
  1.2320 +                   MutableHandleValue);
  1.2321 +static const VMFunction fun = FunctionInfo<pf>(DoToBoolFallback);
  1.2322 +
  1.2323 +bool
  1.2324 +ICToBool_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.2325 +{
  1.2326 +    JS_ASSERT(R0 == JSReturnOperand);
  1.2327 +
  1.2328 +    // Restore the tail call register.
  1.2329 +    EmitRestoreTailCallReg(masm);
  1.2330 +
  1.2331 +    // Push arguments.
  1.2332 +    masm.pushValue(R0);
  1.2333 +    masm.push(BaselineStubReg);
  1.2334 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.2335 +
  1.2336 +    return tailCallVM(fun, masm);
  1.2337 +}
  1.2338 +
  1.2339 +//
  1.2340 +// ToBool_Int32
  1.2341 +//
  1.2342 +
  1.2343 +bool
  1.2344 +ICToBool_Int32::Compiler::generateStubCode(MacroAssembler &masm)
  1.2345 +{
  1.2346 +    Label failure;
  1.2347 +    masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
  1.2348 +
  1.2349 +    Label ifFalse;
  1.2350 +    masm.branchTestInt32Truthy(false, R0, &ifFalse);
  1.2351 +
  1.2352 +    masm.moveValue(BooleanValue(true), R0);
  1.2353 +    EmitReturnFromIC(masm);
  1.2354 +
  1.2355 +    masm.bind(&ifFalse);
  1.2356 +    masm.moveValue(BooleanValue(false), R0);
  1.2357 +    EmitReturnFromIC(masm);
  1.2358 +
  1.2359 +    // Failure case - jump to next stub
  1.2360 +    masm.bind(&failure);
  1.2361 +    EmitStubGuardFailure(masm);
  1.2362 +    return true;
  1.2363 +}
  1.2364 +
  1.2365 +//
  1.2366 +// ToBool_String
  1.2367 +//
  1.2368 +
  1.2369 +bool
  1.2370 +ICToBool_String::Compiler::generateStubCode(MacroAssembler &masm)
  1.2371 +{
  1.2372 +    Label failure;
  1.2373 +    masm.branchTestString(Assembler::NotEqual, R0, &failure);
  1.2374 +
  1.2375 +    Label ifFalse;
  1.2376 +    masm.branchTestStringTruthy(false, R0, &ifFalse);
  1.2377 +
  1.2378 +    masm.moveValue(BooleanValue(true), R0);
  1.2379 +    EmitReturnFromIC(masm);
  1.2380 +
  1.2381 +    masm.bind(&ifFalse);
  1.2382 +    masm.moveValue(BooleanValue(false), R0);
  1.2383 +    EmitReturnFromIC(masm);
  1.2384 +
  1.2385 +    // Failure case - jump to next stub
  1.2386 +    masm.bind(&failure);
  1.2387 +    EmitStubGuardFailure(masm);
  1.2388 +    return true;
  1.2389 +}
  1.2390 +
  1.2391 +//
  1.2392 +// ToBool_NullUndefined
  1.2393 +//
  1.2394 +
  1.2395 +bool
  1.2396 +ICToBool_NullUndefined::Compiler::generateStubCode(MacroAssembler &masm)
  1.2397 +{
  1.2398 +    Label failure, ifFalse;
  1.2399 +    masm.branchTestNull(Assembler::Equal, R0, &ifFalse);
  1.2400 +    masm.branchTestUndefined(Assembler::NotEqual, R0, &failure);
  1.2401 +
  1.2402 +    masm.bind(&ifFalse);
  1.2403 +    masm.moveValue(BooleanValue(false), R0);
  1.2404 +    EmitReturnFromIC(masm);
  1.2405 +
  1.2406 +    // Failure case - jump to next stub
  1.2407 +    masm.bind(&failure);
  1.2408 +    EmitStubGuardFailure(masm);
  1.2409 +    return true;
  1.2410 +}
  1.2411 +
  1.2412 +//
  1.2413 +// ToBool_Double
  1.2414 +//
  1.2415 +
  1.2416 +bool
  1.2417 +ICToBool_Double::Compiler::generateStubCode(MacroAssembler &masm)
  1.2418 +{
  1.2419 +    Label failure, ifTrue;
  1.2420 +    masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
  1.2421 +    masm.unboxDouble(R0, FloatReg0);
  1.2422 +    masm.branchTestDoubleTruthy(true, FloatReg0, &ifTrue);
  1.2423 +
  1.2424 +    masm.moveValue(BooleanValue(false), R0);
  1.2425 +    EmitReturnFromIC(masm);
  1.2426 +
  1.2427 +    masm.bind(&ifTrue);
  1.2428 +    masm.moveValue(BooleanValue(true), R0);
  1.2429 +    EmitReturnFromIC(masm);
  1.2430 +
  1.2431 +    // Failure case - jump to next stub
  1.2432 +    masm.bind(&failure);
  1.2433 +    EmitStubGuardFailure(masm);
  1.2434 +    return true;
  1.2435 +}
  1.2436 +
  1.2437 +//
  1.2438 +// ToBool_Object
  1.2439 +//
  1.2440 +
  1.2441 +bool
  1.2442 +ICToBool_Object::Compiler::generateStubCode(MacroAssembler &masm)
  1.2443 +{
  1.2444 +    Label failure, ifFalse, slowPath;
  1.2445 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.2446 +
  1.2447 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.2448 +    Register scratch = R1.scratchReg();
  1.2449 +    masm.branchTestObjectTruthy(false, objReg, scratch, &slowPath, &ifFalse);
  1.2450 +
  1.2451 +    // If object doesn't emulate undefined, it evaulates to true.
  1.2452 +    masm.moveValue(BooleanValue(true), R0);
  1.2453 +    EmitReturnFromIC(masm);
  1.2454 +
  1.2455 +    masm.bind(&ifFalse);
  1.2456 +    masm.moveValue(BooleanValue(false), R0);
  1.2457 +    EmitReturnFromIC(masm);
  1.2458 +
  1.2459 +    masm.bind(&slowPath);
  1.2460 +    masm.setupUnalignedABICall(1, scratch);
  1.2461 +    masm.passABIArg(objReg);
  1.2462 +    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::EmulatesUndefined));
  1.2463 +    masm.convertBoolToInt32(ReturnReg, ReturnReg);
  1.2464 +    masm.xor32(Imm32(1), ReturnReg);
  1.2465 +    masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, R0);
  1.2466 +    EmitReturnFromIC(masm);
  1.2467 +
  1.2468 +    // Failure case - jump to next stub
  1.2469 +    masm.bind(&failure);
  1.2470 +    EmitStubGuardFailure(masm);
  1.2471 +    return true;
  1.2472 +}
  1.2473 +
  1.2474 +//
  1.2475 +// ToNumber_Fallback
  1.2476 +//
  1.2477 +
  1.2478 +static bool
  1.2479 +DoToNumberFallback(JSContext *cx, ICToNumber_Fallback *stub, HandleValue arg, MutableHandleValue ret)
  1.2480 +{
  1.2481 +    FallbackICSpew(cx, stub, "ToNumber");
  1.2482 +    ret.set(arg);
  1.2483 +    return ToNumber(cx, ret);
  1.2484 +}
  1.2485 +
  1.2486 +typedef bool (*DoToNumberFallbackFn)(JSContext *, ICToNumber_Fallback *, HandleValue, MutableHandleValue);
  1.2487 +static const VMFunction DoToNumberFallbackInfo =
  1.2488 +    FunctionInfo<DoToNumberFallbackFn>(DoToNumberFallback, PopValues(1));
  1.2489 +
  1.2490 +bool
  1.2491 +ICToNumber_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.2492 +{
  1.2493 +    JS_ASSERT(R0 == JSReturnOperand);
  1.2494 +
  1.2495 +    // Restore the tail call register.
  1.2496 +    EmitRestoreTailCallReg(masm);
  1.2497 +
  1.2498 +    // Ensure stack is fully synced for the expression decompiler.
  1.2499 +    masm.pushValue(R0);
  1.2500 +
  1.2501 +    // Push arguments.
  1.2502 +    masm.pushValue(R0);
  1.2503 +    masm.push(BaselineStubReg);
  1.2504 +
  1.2505 +    return tailCallVM(DoToNumberFallbackInfo, masm);
  1.2506 +}
  1.2507 +
  1.2508 +//
  1.2509 +// BinaryArith_Fallback
  1.2510 +//
  1.2511 +
  1.2512 +// Disable PGO (see bug 851490).
  1.2513 +#if defined(_MSC_VER)
  1.2514 +# pragma optimize("g", off)
  1.2515 +#endif
  1.2516 +static bool
  1.2517 +DoBinaryArithFallback(JSContext *cx, BaselineFrame *frame, ICBinaryArith_Fallback *stub_,
  1.2518 +                      HandleValue lhs, HandleValue rhs, MutableHandleValue ret)
  1.2519 +{
  1.2520 +    // This fallback stub may trigger debug mode toggling.
  1.2521 +    DebugModeOSRVolatileStub<ICBinaryArith_Fallback *> stub(frame, stub_);
  1.2522 +
  1.2523 +    RootedScript script(cx, frame->script());
  1.2524 +    jsbytecode *pc = stub->icEntry()->pc(script);
  1.2525 +    JSOp op = JSOp(*pc);
  1.2526 +    FallbackICSpew(cx, stub, "BinaryArith(%s,%d,%d)", js_CodeName[op],
  1.2527 +            int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
  1.2528 +            int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
  1.2529 +
  1.2530 +    // Don't pass lhs/rhs directly, we need the original values when
  1.2531 +    // generating stubs.
  1.2532 +    RootedValue lhsCopy(cx, lhs);
  1.2533 +    RootedValue rhsCopy(cx, rhs);
  1.2534 +
  1.2535 +    // Perform the compare operation.
  1.2536 +    switch(op) {
  1.2537 +      case JSOP_ADD:
  1.2538 +        // Do an add.
  1.2539 +        if (!AddValues(cx, &lhsCopy, &rhsCopy, ret))
  1.2540 +            return false;
  1.2541 +        break;
  1.2542 +      case JSOP_SUB:
  1.2543 +        if (!SubValues(cx, &lhsCopy, &rhsCopy, ret))
  1.2544 +            return false;
  1.2545 +        break;
  1.2546 +      case JSOP_MUL:
  1.2547 +        if (!MulValues(cx, &lhsCopy, &rhsCopy, ret))
  1.2548 +            return false;
  1.2549 +        break;
  1.2550 +      case JSOP_DIV:
  1.2551 +        if (!DivValues(cx, &lhsCopy, &rhsCopy, ret))
  1.2552 +            return false;
  1.2553 +        break;
  1.2554 +      case JSOP_MOD:
  1.2555 +        if (!ModValues(cx, &lhsCopy, &rhsCopy, ret))
  1.2556 +            return false;
  1.2557 +        break;
  1.2558 +      case JSOP_BITOR: {
  1.2559 +        int32_t result;
  1.2560 +        if (!BitOr(cx, lhs, rhs, &result))
  1.2561 +            return false;
  1.2562 +        ret.setInt32(result);
  1.2563 +        break;
  1.2564 +      }
  1.2565 +      case JSOP_BITXOR: {
  1.2566 +        int32_t result;
  1.2567 +        if (!BitXor(cx, lhs, rhs, &result))
  1.2568 +            return false;
  1.2569 +        ret.setInt32(result);
  1.2570 +        break;
  1.2571 +      }
  1.2572 +      case JSOP_BITAND: {
  1.2573 +        int32_t result;
  1.2574 +        if (!BitAnd(cx, lhs, rhs, &result))
  1.2575 +            return false;
  1.2576 +        ret.setInt32(result);
  1.2577 +        break;
  1.2578 +      }
  1.2579 +      case JSOP_LSH: {
  1.2580 +        int32_t result;
  1.2581 +        if (!BitLsh(cx, lhs, rhs, &result))
  1.2582 +            return false;
  1.2583 +        ret.setInt32(result);
  1.2584 +        break;
  1.2585 +      }
  1.2586 +      case JSOP_RSH: {
  1.2587 +        int32_t result;
  1.2588 +        if (!BitRsh(cx, lhs, rhs, &result))
  1.2589 +            return false;
  1.2590 +        ret.setInt32(result);
  1.2591 +        break;
  1.2592 +      }
  1.2593 +      case JSOP_URSH: {
  1.2594 +        if (!UrshOperation(cx, lhs, rhs, ret))
  1.2595 +            return false;
  1.2596 +        break;
  1.2597 +      }
  1.2598 +      default:
  1.2599 +        MOZ_ASSUME_UNREACHABLE("Unhandled baseline arith op");
  1.2600 +    }
  1.2601 +
  1.2602 +    // Check if debug mode toggling made the stub invalid.
  1.2603 +    if (stub.invalid())
  1.2604 +        return true;
  1.2605 +
  1.2606 +    if (ret.isDouble())
  1.2607 +        stub->setSawDoubleResult();
  1.2608 +
  1.2609 +    // Check to see if a new stub should be generated.
  1.2610 +    if (stub->numOptimizedStubs() >= ICBinaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
  1.2611 +        stub->noteUnoptimizableOperands();
  1.2612 +        return true;
  1.2613 +    }
  1.2614 +
  1.2615 +    // Handle string concat.
  1.2616 +    if (op == JSOP_ADD) {
  1.2617 +        if (lhs.isString() && rhs.isString()) {
  1.2618 +            IonSpew(IonSpew_BaselineIC, "  Generating %s(String, String) stub", js_CodeName[op]);
  1.2619 +            JS_ASSERT(ret.isString());
  1.2620 +            ICBinaryArith_StringConcat::Compiler compiler(cx);
  1.2621 +            ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script));
  1.2622 +            if (!strcatStub)
  1.2623 +                return false;
  1.2624 +            stub->addNewStub(strcatStub);
  1.2625 +            return true;
  1.2626 +        }
  1.2627 +
  1.2628 +        if ((lhs.isString() && rhs.isObject()) || (lhs.isObject() && rhs.isString())) {
  1.2629 +            IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
  1.2630 +                    lhs.isString() ? "String" : "Object",
  1.2631 +                    lhs.isString() ? "Object" : "String");
  1.2632 +            JS_ASSERT(ret.isString());
  1.2633 +            ICBinaryArith_StringObjectConcat::Compiler compiler(cx, lhs.isString());
  1.2634 +            ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script));
  1.2635 +            if (!strcatStub)
  1.2636 +                return false;
  1.2637 +            stub->addNewStub(strcatStub);
  1.2638 +            return true;
  1.2639 +        }
  1.2640 +    }
  1.2641 +
  1.2642 +    if (((lhs.isBoolean() && (rhs.isBoolean() || rhs.isInt32())) ||
  1.2643 +         (rhs.isBoolean() && (lhs.isBoolean() || lhs.isInt32()))) &&
  1.2644 +        (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_BITOR || op == JSOP_BITAND ||
  1.2645 +         op == JSOP_BITXOR))
  1.2646 +    {
  1.2647 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
  1.2648 +                lhs.isBoolean() ? "Boolean" : "Int32", rhs.isBoolean() ? "Boolean" : "Int32");
  1.2649 +        ICBinaryArith_BooleanWithInt32::Compiler compiler(cx, op, lhs.isBoolean(), rhs.isBoolean());
  1.2650 +        ICStub *arithStub = compiler.getStub(compiler.getStubSpace(script));
  1.2651 +        if (!arithStub)
  1.2652 +            return false;
  1.2653 +        stub->addNewStub(arithStub);
  1.2654 +        return true;
  1.2655 +    }
  1.2656 +
  1.2657 +    // Handle only int32 or double.
  1.2658 +    if (!lhs.isNumber() || !rhs.isNumber()) {
  1.2659 +        stub->noteUnoptimizableOperands();
  1.2660 +        return true;
  1.2661 +    }
  1.2662 +
  1.2663 +    JS_ASSERT(ret.isNumber());
  1.2664 +
  1.2665 +    if (lhs.isDouble() || rhs.isDouble() || ret.isDouble()) {
  1.2666 +        if (!cx->runtime()->jitSupportsFloatingPoint)
  1.2667 +            return true;
  1.2668 +
  1.2669 +        switch (op) {
  1.2670 +          case JSOP_ADD:
  1.2671 +          case JSOP_SUB:
  1.2672 +          case JSOP_MUL:
  1.2673 +          case JSOP_DIV:
  1.2674 +          case JSOP_MOD: {
  1.2675 +            // Unlink int32 stubs, it's faster to always use the double stub.
  1.2676 +            stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
  1.2677 +            IonSpew(IonSpew_BaselineIC, "  Generating %s(Double, Double) stub", js_CodeName[op]);
  1.2678 +
  1.2679 +            ICBinaryArith_Double::Compiler compiler(cx, op);
  1.2680 +            ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
  1.2681 +            if (!doubleStub)
  1.2682 +                return false;
  1.2683 +            stub->addNewStub(doubleStub);
  1.2684 +            return true;
  1.2685 +          }
  1.2686 +          default:
  1.2687 +            break;
  1.2688 +        }
  1.2689 +    }
  1.2690 +
  1.2691 +    if (lhs.isInt32() && rhs.isInt32()) {
  1.2692 +        bool allowDouble = ret.isDouble();
  1.2693 +        if (allowDouble)
  1.2694 +            stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
  1.2695 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(Int32, Int32%s) stub", js_CodeName[op],
  1.2696 +                allowDouble ? " => Double" : "");
  1.2697 +        ICBinaryArith_Int32::Compiler compilerInt32(cx, op, allowDouble);
  1.2698 +        ICStub *int32Stub = compilerInt32.getStub(compilerInt32.getStubSpace(script));
  1.2699 +        if (!int32Stub)
  1.2700 +            return false;
  1.2701 +        stub->addNewStub(int32Stub);
  1.2702 +        return true;
  1.2703 +    }
  1.2704 +
  1.2705 +    // Handle Double <BITOP> Int32 or Int32 <BITOP> Double case.
  1.2706 +    if (((lhs.isDouble() && rhs.isInt32()) || (lhs.isInt32() && rhs.isDouble())) &&
  1.2707 +        ret.isInt32())
  1.2708 +    {
  1.2709 +        switch(op) {
  1.2710 +          case JSOP_BITOR:
  1.2711 +          case JSOP_BITXOR:
  1.2712 +          case JSOP_BITAND: {
  1.2713 +            IonSpew(IonSpew_BaselineIC, "  Generating %s(%s, %s) stub", js_CodeName[op],
  1.2714 +                        lhs.isDouble() ? "Double" : "Int32",
  1.2715 +                        lhs.isDouble() ? "Int32" : "Double");
  1.2716 +            ICBinaryArith_DoubleWithInt32::Compiler compiler(cx, op, lhs.isDouble());
  1.2717 +            ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
  1.2718 +            if (!optStub)
  1.2719 +                return false;
  1.2720 +            stub->addNewStub(optStub);
  1.2721 +            return true;
  1.2722 +          }
  1.2723 +          default:
  1.2724 +            break;
  1.2725 +        }
  1.2726 +    }
  1.2727 +
  1.2728 +    stub->noteUnoptimizableOperands();
  1.2729 +    return true;
  1.2730 +}
  1.2731 +#if defined(_MSC_VER)
  1.2732 +# pragma optimize("", on)
  1.2733 +#endif
  1.2734 +
  1.2735 +typedef bool (*DoBinaryArithFallbackFn)(JSContext *, BaselineFrame *, ICBinaryArith_Fallback *,
  1.2736 +                                        HandleValue, HandleValue, MutableHandleValue);
  1.2737 +static const VMFunction DoBinaryArithFallbackInfo =
  1.2738 +    FunctionInfo<DoBinaryArithFallbackFn>(DoBinaryArithFallback, PopValues(2));
  1.2739 +
  1.2740 +bool
  1.2741 +ICBinaryArith_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.2742 +{
  1.2743 +    JS_ASSERT(R0 == JSReturnOperand);
  1.2744 +
  1.2745 +    // Restore the tail call register.
  1.2746 +    EmitRestoreTailCallReg(masm);
  1.2747 +
  1.2748 +    // Ensure stack is fully synced for the expression decompiler.
  1.2749 +    masm.pushValue(R0);
  1.2750 +    masm.pushValue(R1);
  1.2751 +
  1.2752 +    // Push arguments.
  1.2753 +    masm.pushValue(R1);
  1.2754 +    masm.pushValue(R0);
  1.2755 +    masm.push(BaselineStubReg);
  1.2756 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.2757 +
  1.2758 +    return tailCallVM(DoBinaryArithFallbackInfo, masm);
  1.2759 +}
  1.2760 +
  1.2761 +static bool
  1.2762 +DoConcatStrings(JSContext *cx, HandleValue lhs, HandleValue rhs, MutableHandleValue res)
  1.2763 +{
  1.2764 +    JS_ASSERT(lhs.isString());
  1.2765 +    JS_ASSERT(rhs.isString());
  1.2766 +    JSString *lstr = lhs.toString();
  1.2767 +    JSString *rstr = rhs.toString();
  1.2768 +    JSString *result = ConcatStrings<NoGC>(cx, lstr, rstr);
  1.2769 +    if (result) {
  1.2770 +        res.set(StringValue(result));
  1.2771 +        return true;
  1.2772 +    }
  1.2773 +
  1.2774 +    RootedString rootedl(cx, lstr), rootedr(cx, rstr);
  1.2775 +    result = ConcatStrings<CanGC>(cx, rootedl, rootedr);
  1.2776 +    if (!result)
  1.2777 +        return false;
  1.2778 +
  1.2779 +    res.set(StringValue(result));
  1.2780 +    return true;
  1.2781 +}
  1.2782 +
  1.2783 +typedef bool (*DoConcatStringsFn)(JSContext *, HandleValue, HandleValue, MutableHandleValue);
  1.2784 +static const VMFunction DoConcatStringsInfo = FunctionInfo<DoConcatStringsFn>(DoConcatStrings);
  1.2785 +
  1.2786 +bool
  1.2787 +ICBinaryArith_StringConcat::Compiler::generateStubCode(MacroAssembler &masm)
  1.2788 +{
  1.2789 +    Label failure;
  1.2790 +    masm.branchTestString(Assembler::NotEqual, R0, &failure);
  1.2791 +    masm.branchTestString(Assembler::NotEqual, R1, &failure);
  1.2792 +
  1.2793 +    // Restore the tail call register.
  1.2794 +    EmitRestoreTailCallReg(masm);
  1.2795 +
  1.2796 +    masm.pushValue(R1);
  1.2797 +    masm.pushValue(R0);
  1.2798 +    if (!tailCallVM(DoConcatStringsInfo, masm))
  1.2799 +        return false;
  1.2800 +
  1.2801 +    // Failure case - jump to next stub
  1.2802 +    masm.bind(&failure);
  1.2803 +    EmitStubGuardFailure(masm);
  1.2804 +    return true;
  1.2805 +}
  1.2806 +
  1.2807 +static JSString *
  1.2808 +ConvertObjectToStringForConcat(JSContext *cx, HandleValue obj)
  1.2809 +{
  1.2810 +    JS_ASSERT(obj.isObject());
  1.2811 +    RootedValue rootedObj(cx, obj);
  1.2812 +    if (!ToPrimitive(cx, &rootedObj))
  1.2813 +        return nullptr;
  1.2814 +    return ToString<CanGC>(cx, rootedObj);
  1.2815 +}
  1.2816 +
  1.2817 +static bool
  1.2818 +DoConcatStringObject(JSContext *cx, bool lhsIsString, HandleValue lhs, HandleValue rhs,
  1.2819 +                     MutableHandleValue res)
  1.2820 +{
  1.2821 +    JSString *lstr = nullptr;
  1.2822 +    JSString *rstr = nullptr;
  1.2823 +    if (lhsIsString) {
  1.2824 +        // Convert rhs first.
  1.2825 +        JS_ASSERT(lhs.isString() && rhs.isObject());
  1.2826 +        rstr = ConvertObjectToStringForConcat(cx, rhs);
  1.2827 +        if (!rstr)
  1.2828 +            return false;
  1.2829 +
  1.2830 +        // lhs is already string.
  1.2831 +        lstr = lhs.toString();
  1.2832 +    } else {
  1.2833 +        JS_ASSERT(rhs.isString() && lhs.isObject());
  1.2834 +        // Convert lhs first.
  1.2835 +        lstr = ConvertObjectToStringForConcat(cx, lhs);
  1.2836 +        if (!lstr)
  1.2837 +            return false;
  1.2838 +
  1.2839 +        // rhs is already string.
  1.2840 +        rstr = rhs.toString();
  1.2841 +    }
  1.2842 +
  1.2843 +    JSString *str = ConcatStrings<NoGC>(cx, lstr, rstr);
  1.2844 +    if (!str) {
  1.2845 +        RootedString nlstr(cx, lstr), nrstr(cx, rstr);
  1.2846 +        str = ConcatStrings<CanGC>(cx, nlstr, nrstr);
  1.2847 +        if (!str)
  1.2848 +            return false;
  1.2849 +    }
  1.2850 +
  1.2851 +    // Technically, we need to call TypeScript::MonitorString for this PC, however
  1.2852 +    // it was called when this stub was attached so it's OK.
  1.2853 +
  1.2854 +    res.setString(str);
  1.2855 +    return true;
  1.2856 +}
  1.2857 +
  1.2858 +typedef bool (*DoConcatStringObjectFn)(JSContext *, bool lhsIsString, HandleValue, HandleValue,
  1.2859 +                                       MutableHandleValue);
  1.2860 +static const VMFunction DoConcatStringObjectInfo =
  1.2861 +    FunctionInfo<DoConcatStringObjectFn>(DoConcatStringObject, PopValues(2));
  1.2862 +
  1.2863 +bool
  1.2864 +ICBinaryArith_StringObjectConcat::Compiler::generateStubCode(MacroAssembler &masm)
  1.2865 +{
  1.2866 +    Label failure;
  1.2867 +    if (lhsIsString_) {
  1.2868 +        masm.branchTestString(Assembler::NotEqual, R0, &failure);
  1.2869 +        masm.branchTestObject(Assembler::NotEqual, R1, &failure);
  1.2870 +    } else {
  1.2871 +        masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.2872 +        masm.branchTestString(Assembler::NotEqual, R1, &failure);
  1.2873 +    }
  1.2874 +
  1.2875 +    // Restore the tail call register.
  1.2876 +    EmitRestoreTailCallReg(masm);
  1.2877 +
  1.2878 +    // Sync for the decompiler.
  1.2879 +    masm.pushValue(R0);
  1.2880 +    masm.pushValue(R1);
  1.2881 +
  1.2882 +    // Push arguments.
  1.2883 +    masm.pushValue(R1);
  1.2884 +    masm.pushValue(R0);
  1.2885 +    masm.push(Imm32(lhsIsString_));
  1.2886 +    if (!tailCallVM(DoConcatStringObjectInfo, masm))
  1.2887 +        return false;
  1.2888 +
  1.2889 +    // Failure case - jump to next stub
  1.2890 +    masm.bind(&failure);
  1.2891 +    EmitStubGuardFailure(masm);
  1.2892 +    return true;
  1.2893 +}
  1.2894 +
  1.2895 +bool
  1.2896 +ICBinaryArith_Double::Compiler::generateStubCode(MacroAssembler &masm)
  1.2897 +{
  1.2898 +    Label failure;
  1.2899 +    masm.ensureDouble(R0, FloatReg0, &failure);
  1.2900 +    masm.ensureDouble(R1, FloatReg1, &failure);
  1.2901 +
  1.2902 +    switch (op) {
  1.2903 +      case JSOP_ADD:
  1.2904 +        masm.addDouble(FloatReg1, FloatReg0);
  1.2905 +        break;
  1.2906 +      case JSOP_SUB:
  1.2907 +        masm.subDouble(FloatReg1, FloatReg0);
  1.2908 +        break;
  1.2909 +      case JSOP_MUL:
  1.2910 +        masm.mulDouble(FloatReg1, FloatReg0);
  1.2911 +        break;
  1.2912 +      case JSOP_DIV:
  1.2913 +        masm.divDouble(FloatReg1, FloatReg0);
  1.2914 +        break;
  1.2915 +      case JSOP_MOD:
  1.2916 +        masm.setupUnalignedABICall(2, R0.scratchReg());
  1.2917 +        masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
  1.2918 +        masm.passABIArg(FloatReg1, MoveOp::DOUBLE);
  1.2919 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, NumberMod), MoveOp::DOUBLE);
  1.2920 +        JS_ASSERT(ReturnFloatReg == FloatReg0);
  1.2921 +        break;
  1.2922 +      default:
  1.2923 +        MOZ_ASSUME_UNREACHABLE("Unexpected op");
  1.2924 +    }
  1.2925 +
  1.2926 +    masm.boxDouble(FloatReg0, R0);
  1.2927 +    EmitReturnFromIC(masm);
  1.2928 +
  1.2929 +    // Failure case - jump to next stub
  1.2930 +    masm.bind(&failure);
  1.2931 +    EmitStubGuardFailure(masm);
  1.2932 +    return true;
  1.2933 +}
  1.2934 +
  1.2935 +bool
  1.2936 +ICBinaryArith_BooleanWithInt32::Compiler::generateStubCode(MacroAssembler &masm)
  1.2937 +{
  1.2938 +    Label failure;
  1.2939 +    if (lhsIsBool_)
  1.2940 +        masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
  1.2941 +    else
  1.2942 +        masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
  1.2943 +
  1.2944 +    if (rhsIsBool_)
  1.2945 +        masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
  1.2946 +    else
  1.2947 +        masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.2948 +
  1.2949 +    Register lhsReg = lhsIsBool_ ? masm.extractBoolean(R0, ExtractTemp0)
  1.2950 +                                 : masm.extractInt32(R0, ExtractTemp0);
  1.2951 +    Register rhsReg = rhsIsBool_ ? masm.extractBoolean(R1, ExtractTemp1)
  1.2952 +                                 : masm.extractInt32(R1, ExtractTemp1);
  1.2953 +
  1.2954 +    JS_ASSERT(op_ == JSOP_ADD || op_ == JSOP_SUB ||
  1.2955 +              op_ == JSOP_BITOR || op_ == JSOP_BITXOR || op_ == JSOP_BITAND);
  1.2956 +
  1.2957 +    switch(op_) {
  1.2958 +      case JSOP_ADD: {
  1.2959 +        Label fixOverflow;
  1.2960 +
  1.2961 +        masm.branchAdd32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
  1.2962 +        masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
  1.2963 +        EmitReturnFromIC(masm);
  1.2964 +
  1.2965 +        masm.bind(&fixOverflow);
  1.2966 +        masm.sub32(rhsReg, lhsReg);
  1.2967 +        // Proceed to failure below.
  1.2968 +        break;
  1.2969 +      }
  1.2970 +      case JSOP_SUB: {
  1.2971 +        Label fixOverflow;
  1.2972 +
  1.2973 +        masm.branchSub32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
  1.2974 +        masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
  1.2975 +        EmitReturnFromIC(masm);
  1.2976 +
  1.2977 +        masm.bind(&fixOverflow);
  1.2978 +        masm.add32(rhsReg, lhsReg);
  1.2979 +        // Proceed to failure below.
  1.2980 +        break;
  1.2981 +      }
  1.2982 +      case JSOP_BITOR: {
  1.2983 +        masm.orPtr(rhsReg, lhsReg);
  1.2984 +        masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
  1.2985 +        EmitReturnFromIC(masm);
  1.2986 +        break;
  1.2987 +      }
  1.2988 +      case JSOP_BITXOR: {
  1.2989 +        masm.xorPtr(rhsReg, lhsReg);
  1.2990 +        masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
  1.2991 +        EmitReturnFromIC(masm);
  1.2992 +        break;
  1.2993 +      }
  1.2994 +      case JSOP_BITAND: {
  1.2995 +        masm.andPtr(rhsReg, lhsReg);
  1.2996 +        masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
  1.2997 +        EmitReturnFromIC(masm);
  1.2998 +        break;
  1.2999 +      }
  1.3000 +      default:
  1.3001 +       MOZ_ASSUME_UNREACHABLE("Unhandled op for BinaryArith_BooleanWithInt32.");
  1.3002 +    }
  1.3003 +
  1.3004 +    // Failure case - jump to next stub
  1.3005 +    masm.bind(&failure);
  1.3006 +    EmitStubGuardFailure(masm);
  1.3007 +    return true;
  1.3008 +}
  1.3009 +
  1.3010 +bool
  1.3011 +ICBinaryArith_DoubleWithInt32::Compiler::generateStubCode(MacroAssembler &masm)
  1.3012 +{
  1.3013 +    JS_ASSERT(op == JSOP_BITOR || op == JSOP_BITAND || op == JSOP_BITXOR);
  1.3014 +
  1.3015 +    Label failure;
  1.3016 +    Register intReg;
  1.3017 +    Register scratchReg;
  1.3018 +    if (lhsIsDouble_) {
  1.3019 +        masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
  1.3020 +        masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.3021 +        intReg = masm.extractInt32(R1, ExtractTemp0);
  1.3022 +        masm.unboxDouble(R0, FloatReg0);
  1.3023 +        scratchReg = R0.scratchReg();
  1.3024 +    } else {
  1.3025 +        masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
  1.3026 +        masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
  1.3027 +        intReg = masm.extractInt32(R0, ExtractTemp0);
  1.3028 +        masm.unboxDouble(R1, FloatReg0);
  1.3029 +        scratchReg = R1.scratchReg();
  1.3030 +    }
  1.3031 +
  1.3032 +    // Truncate the double to an int32.
  1.3033 +    {
  1.3034 +        Label doneTruncate;
  1.3035 +        Label truncateABICall;
  1.3036 +        masm.branchTruncateDouble(FloatReg0, scratchReg, &truncateABICall);
  1.3037 +        masm.jump(&doneTruncate);
  1.3038 +
  1.3039 +        masm.bind(&truncateABICall);
  1.3040 +        masm.push(intReg);
  1.3041 +        masm.setupUnalignedABICall(1, scratchReg);
  1.3042 +        masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
  1.3043 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::ToInt32));
  1.3044 +        masm.storeCallResult(scratchReg);
  1.3045 +        masm.pop(intReg);
  1.3046 +
  1.3047 +        masm.bind(&doneTruncate);
  1.3048 +    }
  1.3049 +
  1.3050 +    Register intReg2 = scratchReg;
  1.3051 +    // All handled ops commute, so no need to worry about ordering.
  1.3052 +    switch(op) {
  1.3053 +      case JSOP_BITOR:
  1.3054 +        masm.orPtr(intReg, intReg2);
  1.3055 +        break;
  1.3056 +      case JSOP_BITXOR:
  1.3057 +        masm.xorPtr(intReg, intReg2);
  1.3058 +        break;
  1.3059 +      case JSOP_BITAND:
  1.3060 +        masm.andPtr(intReg, intReg2);
  1.3061 +        break;
  1.3062 +      default:
  1.3063 +       MOZ_ASSUME_UNREACHABLE("Unhandled op for BinaryArith_DoubleWithInt32.");
  1.3064 +    }
  1.3065 +    masm.tagValue(JSVAL_TYPE_INT32, intReg2, R0);
  1.3066 +    EmitReturnFromIC(masm);
  1.3067 +
  1.3068 +    // Failure case - jump to next stub
  1.3069 +    masm.bind(&failure);
  1.3070 +    EmitStubGuardFailure(masm);
  1.3071 +    return true;
  1.3072 +}
  1.3073 +
  1.3074 +//
  1.3075 +// UnaryArith_Fallback
  1.3076 +//
  1.3077 +
  1.3078 +// Disable PGO (see bug 851490).
  1.3079 +#if defined(_MSC_VER)
  1.3080 +# pragma optimize("g", off)
  1.3081 +#endif
  1.3082 +static bool
  1.3083 +DoUnaryArithFallback(JSContext *cx, BaselineFrame *frame, ICUnaryArith_Fallback *stub_,
  1.3084 +                     HandleValue val, MutableHandleValue res)
  1.3085 +{
  1.3086 +    // This fallback stub may trigger debug mode toggling.
  1.3087 +    DebugModeOSRVolatileStub<ICUnaryArith_Fallback *> stub(frame, stub_);
  1.3088 +
  1.3089 +    RootedScript script(cx, frame->script());
  1.3090 +    jsbytecode *pc = stub->icEntry()->pc(script);
  1.3091 +    JSOp op = JSOp(*pc);
  1.3092 +    FallbackICSpew(cx, stub, "UnaryArith(%s)", js_CodeName[op]);
  1.3093 +
  1.3094 +    switch (op) {
  1.3095 +      case JSOP_BITNOT: {
  1.3096 +        int32_t result;
  1.3097 +        if (!BitNot(cx, val, &result))
  1.3098 +            return false;
  1.3099 +        res.setInt32(result);
  1.3100 +        break;
  1.3101 +      }
  1.3102 +      case JSOP_NEG:
  1.3103 +        if (!NegOperation(cx, script, pc, val, res))
  1.3104 +            return false;
  1.3105 +        break;
  1.3106 +      default:
  1.3107 +        MOZ_ASSUME_UNREACHABLE("Unexpected op");
  1.3108 +    }
  1.3109 +
  1.3110 +    // Check if debug mode toggling made the stub invalid.
  1.3111 +    if (stub.invalid())
  1.3112 +        return true;
  1.3113 +
  1.3114 +    if (res.isDouble())
  1.3115 +        stub->setSawDoubleResult();
  1.3116 +
  1.3117 +    if (stub->numOptimizedStubs() >= ICUnaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
  1.3118 +        // TODO: Discard/replace stubs.
  1.3119 +        return true;
  1.3120 +    }
  1.3121 +
  1.3122 +    if (val.isInt32() && res.isInt32()) {
  1.3123 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(Int32 => Int32) stub", js_CodeName[op]);
  1.3124 +        ICUnaryArith_Int32::Compiler compiler(cx, op);
  1.3125 +        ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
  1.3126 +        if (!int32Stub)
  1.3127 +            return false;
  1.3128 +        stub->addNewStub(int32Stub);
  1.3129 +        return true;
  1.3130 +    }
  1.3131 +
  1.3132 +    if (val.isNumber() && res.isNumber() && cx->runtime()->jitSupportsFloatingPoint) {
  1.3133 +        IonSpew(IonSpew_BaselineIC, "  Generating %s(Number => Number) stub", js_CodeName[op]);
  1.3134 +
  1.3135 +        // Unlink int32 stubs, the double stub handles both cases and TI specializes for both.
  1.3136 +        stub->unlinkStubsWithKind(cx, ICStub::UnaryArith_Int32);
  1.3137 +
  1.3138 +        ICUnaryArith_Double::Compiler compiler(cx, op);
  1.3139 +        ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
  1.3140 +        if (!doubleStub)
  1.3141 +            return false;
  1.3142 +        stub->addNewStub(doubleStub);
  1.3143 +        return true;
  1.3144 +    }
  1.3145 +
  1.3146 +    return true;
  1.3147 +}
  1.3148 +#if defined(_MSC_VER)
  1.3149 +# pragma optimize("", on)
  1.3150 +#endif
  1.3151 +
  1.3152 +typedef bool (*DoUnaryArithFallbackFn)(JSContext *, BaselineFrame *, ICUnaryArith_Fallback *,
  1.3153 +                                       HandleValue, MutableHandleValue);
  1.3154 +static const VMFunction DoUnaryArithFallbackInfo =
  1.3155 +    FunctionInfo<DoUnaryArithFallbackFn>(DoUnaryArithFallback, PopValues(1));
  1.3156 +
  1.3157 +bool
  1.3158 +ICUnaryArith_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.3159 +{
  1.3160 +    JS_ASSERT(R0 == JSReturnOperand);
  1.3161 +
  1.3162 +    // Restore the tail call register.
  1.3163 +    EmitRestoreTailCallReg(masm);
  1.3164 +
  1.3165 +    // Ensure stack is fully synced for the expression decompiler.
  1.3166 +    masm.pushValue(R0);
  1.3167 +
  1.3168 +    // Push arguments.
  1.3169 +    masm.pushValue(R0);
  1.3170 +    masm.push(BaselineStubReg);
  1.3171 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.3172 +
  1.3173 +    return tailCallVM(DoUnaryArithFallbackInfo, masm);
  1.3174 +}
  1.3175 +
  1.3176 +bool
  1.3177 +ICUnaryArith_Double::Compiler::generateStubCode(MacroAssembler &masm)
  1.3178 +{
  1.3179 +    Label failure;
  1.3180 +    masm.ensureDouble(R0, FloatReg0, &failure);
  1.3181 +
  1.3182 +    JS_ASSERT(op == JSOP_NEG || op == JSOP_BITNOT);
  1.3183 +
  1.3184 +    if (op == JSOP_NEG) {
  1.3185 +        masm.negateDouble(FloatReg0);
  1.3186 +        masm.boxDouble(FloatReg0, R0);
  1.3187 +    } else {
  1.3188 +        // Truncate the double to an int32.
  1.3189 +        Register scratchReg = R1.scratchReg();
  1.3190 +
  1.3191 +        Label doneTruncate;
  1.3192 +        Label truncateABICall;
  1.3193 +        masm.branchTruncateDouble(FloatReg0, scratchReg, &truncateABICall);
  1.3194 +        masm.jump(&doneTruncate);
  1.3195 +
  1.3196 +        masm.bind(&truncateABICall);
  1.3197 +        masm.setupUnalignedABICall(1, scratchReg);
  1.3198 +        masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
  1.3199 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::ToInt32));
  1.3200 +        masm.storeCallResult(scratchReg);
  1.3201 +
  1.3202 +        masm.bind(&doneTruncate);
  1.3203 +        masm.not32(scratchReg);
  1.3204 +        masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0);
  1.3205 +    }
  1.3206 +
  1.3207 +    EmitReturnFromIC(masm);
  1.3208 +
  1.3209 +    // Failure case - jump to next stub
  1.3210 +    masm.bind(&failure);
  1.3211 +    EmitStubGuardFailure(masm);
  1.3212 +    return true;
  1.3213 +}
  1.3214 +
  1.3215 +//
  1.3216 +// GetElem_Fallback
  1.3217 +//
  1.3218 +
  1.3219 +static void GetFixedOrDynamicSlotOffset(HandleObject obj, uint32_t slot,
  1.3220 +                                        bool *isFixed, uint32_t *offset)
  1.3221 +{
  1.3222 +    JS_ASSERT(isFixed);
  1.3223 +    JS_ASSERT(offset);
  1.3224 +    *isFixed = obj->isFixedSlot(slot);
  1.3225 +    *offset = *isFixed ? JSObject::getFixedSlotOffset(slot)
  1.3226 +                       : obj->dynamicSlotIndex(slot) * sizeof(Value);
  1.3227 +}
  1.3228 +
  1.3229 +static bool
  1.3230 +IsCacheableDOMProxy(JSObject *obj)
  1.3231 +{
  1.3232 +    if (!obj->is<ProxyObject>())
  1.3233 +        return false;
  1.3234 +
  1.3235 +    BaseProxyHandler *handler = obj->as<ProxyObject>().handler();
  1.3236 +
  1.3237 +    if (handler->family() != GetDOMProxyHandlerFamily())
  1.3238 +        return false;
  1.3239 +
  1.3240 +    if (obj->numFixedSlots() <= GetDOMProxyExpandoSlot())
  1.3241 +        return false;
  1.3242 +
  1.3243 +    return true;
  1.3244 +}
  1.3245 +
  1.3246 +static JSObject *
  1.3247 +GetDOMProxyProto(JSObject *obj)
  1.3248 +{
  1.3249 +    JS_ASSERT(IsCacheableDOMProxy(obj));
  1.3250 +    return obj->getTaggedProto().toObjectOrNull();
  1.3251 +}
  1.3252 +
  1.3253 +static void
  1.3254 +GenerateDOMProxyChecks(JSContext *cx, MacroAssembler &masm, Register object,
  1.3255 +                       Address checkProxyHandlerAddr,
  1.3256 +                       Address *checkExpandoShapeAddr,
  1.3257 +                       Address *expandoAndGenerationAddr,
  1.3258 +                       Address *generationAddr,
  1.3259 +                       Register scratch,
  1.3260 +                       GeneralRegisterSet &domProxyRegSet,
  1.3261 +                       Label *checkFailed)
  1.3262 +{
  1.3263 +    // Guard the following:
  1.3264 +    //      1. The object is a DOMProxy.
  1.3265 +    //      2. The object does not have expando properties, or has an expando
  1.3266 +    //          which is known to not have the desired property.
  1.3267 +    Address handlerAddr(object, ProxyObject::offsetOfHandler());
  1.3268 +    Address expandoAddr(object, JSObject::getFixedSlotOffset(GetDOMProxyExpandoSlot()));
  1.3269 +
  1.3270 +    // Check that object is a DOMProxy.
  1.3271 +    masm.loadPtr(checkProxyHandlerAddr, scratch);
  1.3272 +    masm.branchPrivatePtr(Assembler::NotEqual, handlerAddr, scratch, checkFailed);
  1.3273 +
  1.3274 +    // At this point, if not checking for an expando object, just return.
  1.3275 +    if (!checkExpandoShapeAddr)
  1.3276 +        return;
  1.3277 +
  1.3278 +    // For the remaining code, we need to reserve some registers to load a value.
  1.3279 +    // This is ugly, but unavoidable.
  1.3280 +    ValueOperand tempVal = domProxyRegSet.takeAnyValue();
  1.3281 +    masm.pushValue(tempVal);
  1.3282 +
  1.3283 +    Label failDOMProxyCheck;
  1.3284 +    Label domProxyOk;
  1.3285 +
  1.3286 +    if (expandoAndGenerationAddr) {
  1.3287 +        JS_ASSERT(generationAddr);
  1.3288 +
  1.3289 +        masm.loadPtr(*expandoAndGenerationAddr, tempVal.scratchReg());
  1.3290 +        masm.branchPrivatePtr(Assembler::NotEqual, expandoAddr, tempVal.scratchReg(),
  1.3291 +                              &failDOMProxyCheck);
  1.3292 +
  1.3293 +        masm.load32(*generationAddr, scratch);
  1.3294 +        masm.branch32(Assembler::NotEqual,
  1.3295 +                      Address(tempVal.scratchReg(), offsetof(ExpandoAndGeneration, generation)),
  1.3296 +                      scratch, &failDOMProxyCheck);
  1.3297 +
  1.3298 +        masm.loadValue(Address(tempVal.scratchReg(), 0), tempVal);
  1.3299 +    } else {
  1.3300 +        masm.loadValue(expandoAddr, tempVal);
  1.3301 +    }
  1.3302 +
  1.3303 +    // If the incoming object does not have an expando object then we're sure we're not
  1.3304 +    // shadowing.
  1.3305 +    masm.branchTestUndefined(Assembler::Equal, tempVal, &domProxyOk);
  1.3306 +
  1.3307 +    // The reference object used to generate this check may not have had an
  1.3308 +    // expando object at all, in which case the presence of a non-undefined
  1.3309 +    // expando value in the incoming object is automatically a failure.
  1.3310 +    masm.loadPtr(*checkExpandoShapeAddr, scratch);
  1.3311 +    masm.branchPtr(Assembler::Equal, scratch, ImmPtr(nullptr), &failDOMProxyCheck);
  1.3312 +
  1.3313 +    // Otherwise, ensure that the incoming object has an object for its expando value and that
  1.3314 +    // the shape matches.
  1.3315 +    masm.branchTestObject(Assembler::NotEqual, tempVal, &failDOMProxyCheck);
  1.3316 +    Register objReg = masm.extractObject(tempVal, tempVal.scratchReg());
  1.3317 +    masm.branchTestObjShape(Assembler::Equal, objReg, scratch, &domProxyOk);
  1.3318 +
  1.3319 +    // Failure case: restore the tempVal registers and jump to failures.
  1.3320 +    masm.bind(&failDOMProxyCheck);
  1.3321 +    masm.popValue(tempVal);
  1.3322 +    masm.jump(checkFailed);
  1.3323 +
  1.3324 +    // Success case: restore the tempval and proceed.
  1.3325 +    masm.bind(&domProxyOk);
  1.3326 +    masm.popValue(tempVal);
  1.3327 +}
  1.3328 +
  1.3329 +// Look up a property's shape on an object, being careful never to do any effectful
  1.3330 +// operations.  This procedure not yielding a shape should not be taken as a lack of
  1.3331 +// existence of the property on the object.
  1.3332 +static bool
  1.3333 +EffectlesslyLookupProperty(JSContext *cx, HandleObject obj, HandlePropertyName name,
  1.3334 +                           MutableHandleObject holder, MutableHandleShape shape,
  1.3335 +                           bool *checkDOMProxy=nullptr,
  1.3336 +                           DOMProxyShadowsResult *shadowsResult=nullptr,
  1.3337 +                           bool *domProxyHasGeneration=nullptr)
  1.3338 +{
  1.3339 +    shape.set(nullptr);
  1.3340 +    holder.set(nullptr);
  1.3341 +
  1.3342 +    if (checkDOMProxy)
  1.3343 +        *checkDOMProxy = false;
  1.3344 +
  1.3345 +    // Check for list base if asked to.
  1.3346 +    RootedObject checkObj(cx, obj);
  1.3347 +    if (checkDOMProxy && IsCacheableDOMProxy(obj)) {
  1.3348 +        JS_ASSERT(domProxyHasGeneration);
  1.3349 +        JS_ASSERT(shadowsResult);
  1.3350 +
  1.3351 +        *checkDOMProxy = true;
  1.3352 +        if (obj->hasUncacheableProto())
  1.3353 +            return true;
  1.3354 +
  1.3355 +        RootedId id(cx, NameToId(name));
  1.3356 +        *shadowsResult = GetDOMProxyShadowsCheck()(cx, obj, id);
  1.3357 +        if (*shadowsResult == ShadowCheckFailed)
  1.3358 +            return false;
  1.3359 +
  1.3360 +        if (*shadowsResult == Shadows) {
  1.3361 +            holder.set(obj);
  1.3362 +            return true;
  1.3363 +        }
  1.3364 +
  1.3365 +        *domProxyHasGeneration = (*shadowsResult == DoesntShadowUnique);
  1.3366 +
  1.3367 +        checkObj = GetDOMProxyProto(obj);
  1.3368 +        if (!checkObj)
  1.3369 +            return true;
  1.3370 +    } else if (!obj->isNative()) {
  1.3371 +        return true;
  1.3372 +    }
  1.3373 +
  1.3374 +    if (checkObj->hasIdempotentProtoChain()) {
  1.3375 +        if (!JSObject::lookupProperty(cx, checkObj, name, holder, shape))
  1.3376 +            return false;
  1.3377 +    } else if (checkObj->isNative()) {
  1.3378 +        shape.set(checkObj->nativeLookup(cx, NameToId(name)));
  1.3379 +        if (shape)
  1.3380 +            holder.set(checkObj);
  1.3381 +    }
  1.3382 +    return true;
  1.3383 +}
  1.3384 +
  1.3385 +static bool
  1.3386 +IsCacheableProtoChain(JSObject *obj, JSObject *holder, bool isDOMProxy=false)
  1.3387 +{
  1.3388 +    JS_ASSERT_IF(isDOMProxy, IsCacheableDOMProxy(obj));
  1.3389 +    JS_ASSERT_IF(!isDOMProxy, obj->isNative());
  1.3390 +
  1.3391 +    // Don't handle objects which require a prototype guard. This should
  1.3392 +    // be uncommon so handling it is likely not worth the complexity.
  1.3393 +    if (obj->hasUncacheableProto())
  1.3394 +        return false;
  1.3395 +
  1.3396 +    JSObject *cur = obj;
  1.3397 +    while (cur != holder) {
  1.3398 +        // We cannot assume that we find the holder object on the prototype
  1.3399 +        // chain and must check for null proto. The prototype chain can be
  1.3400 +        // altered during the lookupProperty call.
  1.3401 +        JSObject *proto;
  1.3402 +        if (isDOMProxy && cur == obj)
  1.3403 +            proto = cur->getTaggedProto().toObjectOrNull();
  1.3404 +        else
  1.3405 +            proto = cur->getProto();
  1.3406 +
  1.3407 +        if (!proto || !proto->isNative())
  1.3408 +            return false;
  1.3409 +
  1.3410 +        if (proto->hasUncacheableProto())
  1.3411 +            return false;
  1.3412 +
  1.3413 +        cur = proto;
  1.3414 +    }
  1.3415 +    return true;
  1.3416 +}
  1.3417 +
  1.3418 +static bool
  1.3419 +IsCacheableGetPropReadSlot(JSObject *obj, JSObject *holder, Shape *shape, bool isDOMProxy=false)
  1.3420 +{
  1.3421 +    if (!shape || !IsCacheableProtoChain(obj, holder, isDOMProxy))
  1.3422 +        return false;
  1.3423 +
  1.3424 +    if (!shape->hasSlot() || !shape->hasDefaultGetter())
  1.3425 +        return false;
  1.3426 +
  1.3427 +    return true;
  1.3428 +}
  1.3429 +
  1.3430 +static bool
  1.3431 +IsCacheableGetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *shape, bool *isScripted,
  1.3432 +                       bool isDOMProxy=false)
  1.3433 +{
  1.3434 +    JS_ASSERT(isScripted);
  1.3435 +
  1.3436 +    if (!shape || !IsCacheableProtoChain(obj, holder, isDOMProxy))
  1.3437 +        return false;
  1.3438 +
  1.3439 +    if (shape->hasSlot() || shape->hasDefaultGetter())
  1.3440 +        return false;
  1.3441 +
  1.3442 +    if (!shape->hasGetterValue())
  1.3443 +        return false;
  1.3444 +
  1.3445 +    if (!shape->getterValue().isObject() || !shape->getterObject()->is<JSFunction>())
  1.3446 +        return false;
  1.3447 +
  1.3448 +    JSFunction *func = &shape->getterObject()->as<JSFunction>();
  1.3449 +
  1.3450 +#ifdef JSGC_GENERATIONAL
  1.3451 +    // Information from get prop call ICs may be used directly from Ion code,
  1.3452 +    // and should not be nursery allocated.
  1.3453 +    if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func))
  1.3454 +        return false;
  1.3455 +#endif
  1.3456 +
  1.3457 +    if (func->isNative()) {
  1.3458 +        *isScripted = false;
  1.3459 +        return true;
  1.3460 +    }
  1.3461 +
  1.3462 +    if (!func->hasJITCode())
  1.3463 +        return false;
  1.3464 +
  1.3465 +    *isScripted = true;
  1.3466 +    return true;
  1.3467 +}
  1.3468 +
  1.3469 +static bool
  1.3470 +IsCacheableSetPropWriteSlot(JSObject *obj, Shape *oldShape, JSObject *holder, Shape *shape)
  1.3471 +{
  1.3472 +    if (!shape)
  1.3473 +        return false;
  1.3474 +
  1.3475 +    // Object shape must not have changed during the property set.
  1.3476 +    if (obj->lastProperty() != oldShape)
  1.3477 +        return false;
  1.3478 +
  1.3479 +    // Currently we only optimize direct writes.
  1.3480 +    if (obj != holder)
  1.3481 +        return false;
  1.3482 +
  1.3483 +    if (!shape->hasSlot() || !shape->hasDefaultSetter() || !shape->writable())
  1.3484 +        return false;
  1.3485 +
  1.3486 +    return true;
  1.3487 +}
  1.3488 +
  1.3489 +static bool
  1.3490 +IsCacheableSetPropAddSlot(JSContext *cx, HandleObject obj, HandleShape oldShape, uint32_t oldSlots,
  1.3491 +                          HandleId id, HandleObject holder, HandleShape shape,
  1.3492 +                          size_t *protoChainDepth)
  1.3493 +{
  1.3494 +    if (!shape)
  1.3495 +        return false;
  1.3496 +
  1.3497 +    // Property must be set directly on object, and be last added property of object.
  1.3498 +    if (obj != holder || shape != obj->lastProperty())
  1.3499 +        return false;
  1.3500 +
  1.3501 +    // Object must be extensible, oldShape must be immediate parent of curShape.
  1.3502 +    if (!obj->nonProxyIsExtensible() || obj->lastProperty()->previous() != oldShape)
  1.3503 +        return false;
  1.3504 +
  1.3505 +    // Basic shape checks.
  1.3506 +    if (shape->inDictionary() || !shape->hasSlot() || !shape->hasDefaultSetter() ||
  1.3507 +        !shape->writable())
  1.3508 +    {
  1.3509 +        return false;
  1.3510 +    }
  1.3511 +
  1.3512 +    // If object has a non-default resolve hook, don't inline
  1.3513 +    if (obj->getClass()->resolve != JS_ResolveStub)
  1.3514 +        return false;
  1.3515 +
  1.3516 +    size_t chainDepth = 0;
  1.3517 +    // walk up the object prototype chain and ensure that all prototypes
  1.3518 +    // are native, and that all prototypes have setter defined on the property
  1.3519 +    for (JSObject *proto = obj->getProto(); proto; proto = proto->getProto()) {
  1.3520 +        chainDepth++;
  1.3521 +        // if prototype is non-native, don't optimize
  1.3522 +        if (!proto->isNative())
  1.3523 +            return false;
  1.3524 +
  1.3525 +        // if prototype defines this property in a non-plain way, don't optimize
  1.3526 +        Shape *protoShape = proto->nativeLookup(cx, id);
  1.3527 +        if (protoShape && !protoShape->hasDefaultSetter())
  1.3528 +            return false;
  1.3529 +
  1.3530 +        // Otherise, if there's no such property, watch out for a resolve hook that would need
  1.3531 +        // to be invoked and thus prevent inlining of property addition.
  1.3532 +        if (proto->getClass()->resolve != JS_ResolveStub)
  1.3533 +             return false;
  1.3534 +    }
  1.3535 +
  1.3536 +    // Only add a IC entry if the dynamic slots didn't change when the shapes
  1.3537 +    // changed.  Need to ensure that a shape change for a subsequent object
  1.3538 +    // won't involve reallocating the slot array.
  1.3539 +    if (obj->numDynamicSlots() != oldSlots)
  1.3540 +        return false;
  1.3541 +
  1.3542 +    *protoChainDepth = chainDepth;
  1.3543 +    return true;
  1.3544 +}
  1.3545 +
  1.3546 +static bool
  1.3547 +IsCacheableSetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *shape, bool *isScripted)
  1.3548 +{
  1.3549 +    JS_ASSERT(isScripted);
  1.3550 +
  1.3551 +    // Currently we only optimize setter calls for setters bound on prototypes.
  1.3552 +    if (obj == holder)
  1.3553 +        return false;
  1.3554 +
  1.3555 +    if (!shape || !IsCacheableProtoChain(obj, holder))
  1.3556 +        return false;
  1.3557 +
  1.3558 +    if (shape->hasSlot() || shape->hasDefaultSetter())
  1.3559 +        return false;
  1.3560 +
  1.3561 +    if (!shape->hasSetterValue())
  1.3562 +        return false;
  1.3563 +
  1.3564 +    if (!shape->setterValue().isObject() || !shape->setterObject()->is<JSFunction>())
  1.3565 +        return false;
  1.3566 +
  1.3567 +    JSFunction *func = &shape->setterObject()->as<JSFunction>();
  1.3568 +
  1.3569 +#ifdef JSGC_GENERATIONAL
  1.3570 +    // Information from set prop call ICs may be used directly from Ion code,
  1.3571 +    // and should not be nursery allocated.
  1.3572 +    if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func))
  1.3573 +        return false;
  1.3574 +#endif
  1.3575 +
  1.3576 +    if (func->isNative()) {
  1.3577 +        *isScripted = false;
  1.3578 +        return true;
  1.3579 +    }
  1.3580 +
  1.3581 +    if (!func->hasJITCode())
  1.3582 +        return false;
  1.3583 +
  1.3584 +    *isScripted = true;
  1.3585 +    return true;
  1.3586 +}
  1.3587 +
  1.3588 +static bool
  1.3589 +LookupNoSuchMethodHandler(JSContext *cx, HandleObject obj, HandleValue id,
  1.3590 +                          MutableHandleValue result)
  1.3591 +{
  1.3592 +    return OnUnknownMethod(cx, obj, id, result);
  1.3593 +}
  1.3594 +
  1.3595 +typedef bool (*LookupNoSuchMethodHandlerFn)(JSContext *, HandleObject, HandleValue,
  1.3596 +                                            MutableHandleValue);
  1.3597 +static const VMFunction LookupNoSuchMethodHandlerInfo =
  1.3598 +    FunctionInfo<LookupNoSuchMethodHandlerFn>(LookupNoSuchMethodHandler);
  1.3599 +
  1.3600 +static bool
  1.3601 +GetElemNativeStubExists(ICGetElem_Fallback *stub, HandleObject obj, HandleObject holder,
  1.3602 +                        HandlePropertyName propName, bool needsAtomize)
  1.3603 +{
  1.3604 +    bool indirect = (obj.get() != holder.get());
  1.3605 +
  1.3606 +    for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
  1.3607 +        if (iter->kind() != ICStub::GetElem_NativeSlot &&
  1.3608 +            iter->kind() != ICStub::GetElem_NativePrototypeSlot &&
  1.3609 +            iter->kind() != ICStub::GetElem_NativePrototypeCallNative &&
  1.3610 +            iter->kind() != ICStub::GetElem_NativePrototypeCallScripted)
  1.3611 +        {
  1.3612 +            continue;
  1.3613 +        }
  1.3614 +
  1.3615 +        if (indirect && (iter->kind() != ICStub::GetElem_NativePrototypeSlot &&
  1.3616 +                         iter->kind() != ICStub::GetElem_NativePrototypeCallNative &&
  1.3617 +                         iter->kind() != ICStub::GetElem_NativePrototypeCallScripted))
  1.3618 +        {
  1.3619 +            continue;
  1.3620 +        }
  1.3621 +
  1.3622 +        ICGetElemNativeStub *getElemNativeStub = reinterpret_cast<ICGetElemNativeStub *>(*iter);
  1.3623 +        if (propName != getElemNativeStub->name())
  1.3624 +            continue;
  1.3625 +
  1.3626 +        if (obj->lastProperty() != getElemNativeStub->shape())
  1.3627 +            continue;
  1.3628 +
  1.3629 +        // If the new stub needs atomization, and the old stub doesn't atomize, then
  1.3630 +        // an appropriate stub doesn't exist.
  1.3631 +        if (needsAtomize && !getElemNativeStub->needsAtomize())
  1.3632 +            continue;
  1.3633 +
  1.3634 +        // For prototype gets, check the holder and holder shape.
  1.3635 +        if (indirect) {
  1.3636 +            if (iter->isGetElem_NativePrototypeSlot()) {
  1.3637 +                ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot();
  1.3638 +
  1.3639 +                if (holder != protoStub->holder())
  1.3640 +                    continue;
  1.3641 +
  1.3642 +                if (holder->lastProperty() != protoStub->holderShape())
  1.3643 +                    continue;
  1.3644 +            } else {
  1.3645 +                JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() ||
  1.3646 +                          iter->isGetElem_NativePrototypeCallScripted());
  1.3647 +
  1.3648 +                ICGetElemNativePrototypeCallStub *protoStub =
  1.3649 +                    reinterpret_cast<ICGetElemNativePrototypeCallStub *>(*iter);
  1.3650 +
  1.3651 +                if (holder != protoStub->holder())
  1.3652 +                    continue;
  1.3653 +
  1.3654 +                if (holder->lastProperty() != protoStub->holderShape())
  1.3655 +                    continue;
  1.3656 +            }
  1.3657 +        }
  1.3658 +
  1.3659 +        return true;
  1.3660 +    }
  1.3661 +    return false;
  1.3662 +}
  1.3663 +
  1.3664 +static void
  1.3665 +RemoveExistingGetElemNativeStubs(JSContext *cx, ICGetElem_Fallback *stub, HandleObject obj,
  1.3666 +                                 HandleObject holder, HandlePropertyName propName,
  1.3667 +                                 bool needsAtomize)
  1.3668 +{
  1.3669 +    bool indirect = (obj.get() != holder.get());
  1.3670 +
  1.3671 +    for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
  1.3672 +        switch (iter->kind()) {
  1.3673 +          case ICStub::GetElem_NativeSlot:
  1.3674 +            if (indirect)
  1.3675 +                continue;
  1.3676 +          case ICStub::GetElem_NativePrototypeSlot:
  1.3677 +          case ICStub::GetElem_NativePrototypeCallNative:
  1.3678 +          case ICStub::GetElem_NativePrototypeCallScripted:
  1.3679 +            break;
  1.3680 +          default:
  1.3681 +            continue;
  1.3682 +        }
  1.3683 +
  1.3684 +        ICGetElemNativeStub *getElemNativeStub = reinterpret_cast<ICGetElemNativeStub *>(*iter);
  1.3685 +        if (propName != getElemNativeStub->name())
  1.3686 +            continue;
  1.3687 +
  1.3688 +        if (obj->lastProperty() != getElemNativeStub->shape())
  1.3689 +            continue;
  1.3690 +
  1.3691 +        // For prototype gets, check the holder and holder shape.
  1.3692 +        if (indirect) {
  1.3693 +            if (iter->isGetElem_NativePrototypeSlot()) {
  1.3694 +                ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot();
  1.3695 +
  1.3696 +                if (holder != protoStub->holder())
  1.3697 +                    continue;
  1.3698 +
  1.3699 +                // If the holder matches, but the holder's lastProperty doesn't match, then
  1.3700 +                // this stub is invalid anyway.  Unlink it.
  1.3701 +                if (holder->lastProperty() != protoStub->holderShape()) {
  1.3702 +                    iter.unlink(cx);
  1.3703 +                    continue;
  1.3704 +                }
  1.3705 +            } else {
  1.3706 +                JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() ||
  1.3707 +                          iter->isGetElem_NativePrototypeCallScripted());
  1.3708 +
  1.3709 +                ICGetElemNativePrototypeCallStub *protoStub =
  1.3710 +                    reinterpret_cast<ICGetElemNativePrototypeCallStub *>(*iter);
  1.3711 +
  1.3712 +                if (holder != protoStub->holder())
  1.3713 +                    continue;
  1.3714 +
  1.3715 +                // If the holder matches, but the holder's lastProperty doesn't match, then
  1.3716 +                // this stub is invalid anyway.  Unlink it.
  1.3717 +                if (holder->lastProperty() != protoStub->holderShape()) {
  1.3718 +                    iter.unlink(cx);
  1.3719 +                    continue;
  1.3720 +                }
  1.3721 +            }
  1.3722 +        }
  1.3723 +
  1.3724 +        // If the new stub needs atomization, and the old stub doesn't atomize, then
  1.3725 +        // remove the old stub.
  1.3726 +        if (needsAtomize && !getElemNativeStub->needsAtomize()) {
  1.3727 +            iter.unlink(cx);
  1.3728 +            continue;
  1.3729 +        }
  1.3730 +
  1.3731 +        // Should never get here, because this means a matching stub exists, and if
  1.3732 +        // a matching stub exists, this procedure should never have been called.
  1.3733 +        MOZ_ASSUME_UNREACHABLE("Procedure should never have been called.");
  1.3734 +    }
  1.3735 +}
  1.3736 +
  1.3737 +static bool
  1.3738 +TypedArrayGetElemStubExists(ICGetElem_Fallback *stub, HandleObject obj)
  1.3739 +{
  1.3740 +    for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
  1.3741 +        if (!iter->isGetElem_TypedArray())
  1.3742 +            continue;
  1.3743 +        if (obj->lastProperty() == iter->toGetElem_TypedArray()->shape())
  1.3744 +            return true;
  1.3745 +    }
  1.3746 +    return false;
  1.3747 +}
  1.3748 +
  1.3749 +static bool
  1.3750 +ArgumentsGetElemStubExists(ICGetElem_Fallback *stub, ICGetElem_Arguments::Which which)
  1.3751 +{
  1.3752 +    for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
  1.3753 +        if (!iter->isGetElem_Arguments())
  1.3754 +            continue;
  1.3755 +        if (iter->toGetElem_Arguments()->which() == which)
  1.3756 +            return true;
  1.3757 +    }
  1.3758 +    return false;
  1.3759 +}
  1.3760 +
  1.3761 +
  1.3762 +static bool TryAttachNativeGetElemStub(JSContext *cx, HandleScript script, jsbytecode *pc,
  1.3763 +                                       ICGetElem_Fallback *stub, HandleObject obj,
  1.3764 +                                       HandleValue key)
  1.3765 +{
  1.3766 +    // Native-object GetElem stubs can't deal with non-string keys.
  1.3767 +    if (!key.isString())
  1.3768 +        return true;
  1.3769 +
  1.3770 +    // Convert to interned property name.
  1.3771 +    RootedId id(cx);
  1.3772 +    if (!ValueToId<CanGC>(cx, key, &id))
  1.3773 +        return false;
  1.3774 +
  1.3775 +    uint32_t dummy;
  1.3776 +    if (!JSID_IS_ATOM(id) || JSID_TO_ATOM(id)->isIndex(&dummy))
  1.3777 +        return true;
  1.3778 +
  1.3779 +    RootedPropertyName propName(cx, JSID_TO_ATOM(id)->asPropertyName());
  1.3780 +    bool needsAtomize = !key.toString()->isAtom();
  1.3781 +    bool isCallElem = (JSOp(*pc) == JSOP_CALLELEM);
  1.3782 +
  1.3783 +    RootedShape shape(cx);
  1.3784 +    RootedObject holder(cx);
  1.3785 +    if (!EffectlesslyLookupProperty(cx, obj, propName, &holder, &shape))
  1.3786 +        return false;
  1.3787 +
  1.3788 +    if (IsCacheableGetPropReadSlot(obj, holder, shape)) {
  1.3789 +        // If a suitable stub already exists, nothing else to do.
  1.3790 +        if (GetElemNativeStubExists(stub, obj, holder, propName, needsAtomize))
  1.3791 +            return true;
  1.3792 +
  1.3793 +        // Remove any existing stubs that may interfere with the new stub being added.
  1.3794 +        RemoveExistingGetElemNativeStubs(cx, stub, obj, holder, propName, needsAtomize);
  1.3795 +
  1.3796 +        bool isFixedSlot;
  1.3797 +        uint32_t offset;
  1.3798 +        GetFixedOrDynamicSlotOffset(holder, shape->slot(), &isFixedSlot, &offset);
  1.3799 +
  1.3800 +        ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
  1.3801 +        ICStub::Kind kind = (obj == holder) ? ICStub::GetElem_NativeSlot
  1.3802 +                                            : ICStub::GetElem_NativePrototypeSlot;
  1.3803 +
  1.3804 +        IonSpew(IonSpew_BaselineIC, "  Generating GetElem(Native %s%s slot) stub "
  1.3805 +                                    "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
  1.3806 +                    (obj == holder) ? "direct" : "prototype",
  1.3807 +                    needsAtomize ? " atomizing" : "",
  1.3808 +                    obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
  1.3809 +
  1.3810 +        ICGetElemNativeStub::AccessType acctype = isFixedSlot ? ICGetElemNativeStub::FixedSlot
  1.3811 +                                                              : ICGetElemNativeStub::DynamicSlot;
  1.3812 +        ICGetElemNativeCompiler compiler(cx, kind, isCallElem, monitorStub, obj, holder, propName,
  1.3813 +                                         acctype, needsAtomize, offset);
  1.3814 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.3815 +        if (!newStub)
  1.3816 +            return false;
  1.3817 +
  1.3818 +        stub->addNewStub(newStub);
  1.3819 +        return true;
  1.3820 +    }
  1.3821 +
  1.3822 +    bool getterIsScripted = false;
  1.3823 +    if (IsCacheableGetPropCall(cx, obj, holder, shape, &getterIsScripted, /*isDOMProxy=*/false)) {
  1.3824 +        RootedFunction getter(cx, &shape->getterObject()->as<JSFunction>());
  1.3825 +
  1.3826 +#if JS_HAS_NO_SUCH_METHOD
  1.3827 +        // It's unlikely that a getter function will be used in callelem locations.
  1.3828 +        // Just don't attach stubs in that case to avoid issues with __noSuchMethod__ handling.
  1.3829 +        if (isCallElem)
  1.3830 +            return true;
  1.3831 +#endif
  1.3832 +
  1.3833 +        // For now, we do not handle own property getters
  1.3834 +        if (obj == holder)
  1.3835 +            return true;
  1.3836 +
  1.3837 +        // If a suitable stub already exists, nothing else to do.
  1.3838 +        if (GetElemNativeStubExists(stub, obj, holder, propName, needsAtomize))
  1.3839 +            return true;
  1.3840 +
  1.3841 +        // Remove any existing stubs that may interfere with the new stub being added.
  1.3842 +        RemoveExistingGetElemNativeStubs(cx, stub, obj, holder, propName, needsAtomize);
  1.3843 +
  1.3844 +        ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
  1.3845 +        ICStub::Kind kind = getterIsScripted ? ICStub::GetElem_NativePrototypeCallScripted
  1.3846 +                                             : ICStub::GetElem_NativePrototypeCallNative;
  1.3847 +
  1.3848 +        if (getterIsScripted) {
  1.3849 +            IonSpew(IonSpew_BaselineIC,
  1.3850 +                    "  Generating GetElem(Native %s%s call scripted %s:%d) stub "
  1.3851 +                    "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
  1.3852 +                        (obj == holder) ? "direct" : "prototype",
  1.3853 +                        needsAtomize ? " atomizing" : "",
  1.3854 +                        getter->nonLazyScript()->filename(), getter->nonLazyScript()->lineno(),
  1.3855 +                        obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
  1.3856 +        } else {
  1.3857 +            IonSpew(IonSpew_BaselineIC,
  1.3858 +                    "  Generating GetElem(Native %s%s call native) stub "
  1.3859 +                    "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
  1.3860 +                        (obj == holder) ? "direct" : "prototype",
  1.3861 +                        needsAtomize ? " atomizing" : "",
  1.3862 +                        obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
  1.3863 +        }
  1.3864 +
  1.3865 +        ICGetElemNativeStub::AccessType acctype = getterIsScripted
  1.3866 +                                                           ? ICGetElemNativeStub::ScriptedGetter
  1.3867 +                                                           : ICGetElemNativeStub::NativeGetter;
  1.3868 +        ICGetElemNativeCompiler compiler(cx, kind, monitorStub, obj, holder, propName, acctype,
  1.3869 +                                         needsAtomize, getter, script->pcToOffset(pc), isCallElem);
  1.3870 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.3871 +        if (!newStub)
  1.3872 +            return false;
  1.3873 +
  1.3874 +        stub->addNewStub(newStub);
  1.3875 +        return true;
  1.3876 +    }
  1.3877 +
  1.3878 +    return true;
  1.3879 +}
  1.3880 +
  1.3881 +static bool
  1.3882 +TypedArrayRequiresFloatingPoint(TypedArrayObject *tarr)
  1.3883 +{
  1.3884 +    uint32_t type = tarr->type();
  1.3885 +    return (type == ScalarTypeDescr::TYPE_UINT32 ||
  1.3886 +            type == ScalarTypeDescr::TYPE_FLOAT32 ||
  1.3887 +            type == ScalarTypeDescr::TYPE_FLOAT64);
  1.3888 +}
  1.3889 +
  1.3890 +static bool
  1.3891 +TryAttachGetElemStub(JSContext *cx, JSScript *script, jsbytecode *pc, ICGetElem_Fallback *stub,
  1.3892 +                     HandleValue lhs, HandleValue rhs, HandleValue res)
  1.3893 +{
  1.3894 +    bool isCallElem = (JSOp(*pc) == JSOP_CALLELEM);
  1.3895 +
  1.3896 +    // Check for String[i] => Char accesses.
  1.3897 +    if (lhs.isString() && rhs.isInt32() && res.isString() &&
  1.3898 +        !stub->hasStub(ICStub::GetElem_String))
  1.3899 +    {
  1.3900 +        // NoSuchMethod handling doesn't apply to string targets.
  1.3901 +
  1.3902 +        IonSpew(IonSpew_BaselineIC, "  Generating GetElem(String[Int32]) stub");
  1.3903 +        ICGetElem_String::Compiler compiler(cx);
  1.3904 +        ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
  1.3905 +        if (!stringStub)
  1.3906 +            return false;
  1.3907 +
  1.3908 +        stub->addNewStub(stringStub);
  1.3909 +        return true;
  1.3910 +    }
  1.3911 +
  1.3912 +    if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS) && rhs.isInt32() &&
  1.3913 +        !ArgumentsGetElemStubExists(stub, ICGetElem_Arguments::Magic))
  1.3914 +    {
  1.3915 +        // Any script with a CALLPROP on arguments (arguments.foo())
  1.3916 +        // should not have optimized arguments.
  1.3917 +        JS_ASSERT(!isCallElem);
  1.3918 +
  1.3919 +        IonSpew(IonSpew_BaselineIC, "  Generating GetElem(MagicArgs[Int32]) stub");
  1.3920 +        ICGetElem_Arguments::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
  1.3921 +                                               ICGetElem_Arguments::Magic, false);
  1.3922 +        ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script));
  1.3923 +        if (!argsStub)
  1.3924 +            return false;
  1.3925 +
  1.3926 +        stub->addNewStub(argsStub);
  1.3927 +        return true;
  1.3928 +    }
  1.3929 +
  1.3930 +    // Otherwise, GetElem is only optimized on objects.
  1.3931 +    if (!lhs.isObject())
  1.3932 +        return true;
  1.3933 +    RootedObject obj(cx, &lhs.toObject());
  1.3934 +
  1.3935 +    // Check for ArgumentsObj[int] accesses
  1.3936 +    if (obj->is<ArgumentsObject>() && rhs.isInt32()) {
  1.3937 +        ICGetElem_Arguments::Which which = ICGetElem_Arguments::Normal;
  1.3938 +        if (obj->is<StrictArgumentsObject>())
  1.3939 +            which = ICGetElem_Arguments::Strict;
  1.3940 +        if (!ArgumentsGetElemStubExists(stub, which)) {
  1.3941 +            IonSpew(IonSpew_BaselineIC, "  Generating GetElem(ArgsObj[Int32]) stub");
  1.3942 +            ICGetElem_Arguments::Compiler compiler(
  1.3943 +                cx, stub->fallbackMonitorStub()->firstMonitorStub(), which, isCallElem);
  1.3944 +            ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script));
  1.3945 +            if (!argsStub)
  1.3946 +                return false;
  1.3947 +
  1.3948 +            stub->addNewStub(argsStub);
  1.3949 +            return true;
  1.3950 +        }
  1.3951 +    }
  1.3952 +
  1.3953 +    if (obj->isNative()) {
  1.3954 +        // Check for NativeObject[int] dense accesses.
  1.3955 +        if (rhs.isInt32() && rhs.toInt32() >= 0 && !obj->is<TypedArrayObject>()) {
  1.3956 +            IonSpew(IonSpew_BaselineIC, "  Generating GetElem(Native[Int32] dense) stub");
  1.3957 +            ICGetElem_Dense::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
  1.3958 +                                               obj->lastProperty(), isCallElem);
  1.3959 +            ICStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
  1.3960 +            if (!denseStub)
  1.3961 +                return false;
  1.3962 +
  1.3963 +            stub->addNewStub(denseStub);
  1.3964 +            return true;
  1.3965 +        }
  1.3966 +
  1.3967 +        // Check for NativeObject[id] shape-optimizable accesses.
  1.3968 +        if (rhs.isString()) {
  1.3969 +            RootedScript rootedScript(cx, script);
  1.3970 +            if (!TryAttachNativeGetElemStub(cx, rootedScript, pc, stub, obj, rhs))
  1.3971 +                return false;
  1.3972 +            script = rootedScript;
  1.3973 +        }
  1.3974 +    }
  1.3975 +
  1.3976 +    // Check for TypedArray[int] => Number accesses.
  1.3977 +    if (obj->is<TypedArrayObject>() && rhs.isNumber() && res.isNumber() &&
  1.3978 +        !TypedArrayGetElemStubExists(stub, obj))
  1.3979 +    {
  1.3980 +        // Don't attach CALLELEM stubs for accesses on typed array expected to yield numbers.
  1.3981 +#if JS_HAS_NO_SUCH_METHOD
  1.3982 +        if (isCallElem)
  1.3983 +            return true;
  1.3984 +#endif
  1.3985 +
  1.3986 +        TypedArrayObject *tarr = &obj->as<TypedArrayObject>();
  1.3987 +        if (!cx->runtime()->jitSupportsFloatingPoint &&
  1.3988 +            (TypedArrayRequiresFloatingPoint(tarr) || rhs.isDouble()))
  1.3989 +        {
  1.3990 +            return true;
  1.3991 +        }
  1.3992 +
  1.3993 +        IonSpew(IonSpew_BaselineIC, "  Generating GetElem(TypedArray[Int32]) stub");
  1.3994 +        ICGetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type());
  1.3995 +        ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script));
  1.3996 +        if (!typedArrayStub)
  1.3997 +            return false;
  1.3998 +
  1.3999 +        stub->addNewStub(typedArrayStub);
  1.4000 +        return true;
  1.4001 +    }
  1.4002 +
  1.4003 +    // GetElem operations on non-native objects cannot be cached by either
  1.4004 +    // Baseline or Ion. Indicate this in the cache so that Ion does not
  1.4005 +    // generate a cache for this op.
  1.4006 +    if (!obj->isNative())
  1.4007 +        stub->noteNonNativeAccess();
  1.4008 +
  1.4009 +    // GetElem operations which could access negative indexes generally can't
  1.4010 +    // be optimized without the potential for bailouts, as we can't statically
  1.4011 +    // determine that an object has no properties on such indexes.
  1.4012 +    if (rhs.isNumber() && rhs.toNumber() < 0)
  1.4013 +        stub->noteNegativeIndex();
  1.4014 +
  1.4015 +    return true;
  1.4016 +}
  1.4017 +
  1.4018 +static bool
  1.4019 +DoGetElemFallback(JSContext *cx, BaselineFrame *frame, ICGetElem_Fallback *stub_, HandleValue lhs,
  1.4020 +                  HandleValue rhs, MutableHandleValue res)
  1.4021 +{
  1.4022 +    // This fallback stub may trigger debug mode toggling.
  1.4023 +    DebugModeOSRVolatileStub<ICGetElem_Fallback *> stub(frame, stub_);
  1.4024 +
  1.4025 +    RootedScript script(cx, frame->script());
  1.4026 +    jsbytecode *pc = stub->icEntry()->pc(frame->script());
  1.4027 +    JSOp op = JSOp(*pc);
  1.4028 +    FallbackICSpew(cx, stub, "GetElem(%s)", js_CodeName[op]);
  1.4029 +
  1.4030 +    JS_ASSERT(op == JSOP_GETELEM || op == JSOP_CALLELEM);
  1.4031 +
  1.4032 +    // Don't pass lhs directly, we need it when generating stubs.
  1.4033 +    RootedValue lhsCopy(cx, lhs);
  1.4034 +
  1.4035 +    bool isOptimizedArgs = false;
  1.4036 +    if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS)) {
  1.4037 +        // Handle optimized arguments[i] access.
  1.4038 +        if (!GetElemOptimizedArguments(cx, frame, &lhsCopy, rhs, res, &isOptimizedArgs))
  1.4039 +            return false;
  1.4040 +        if (isOptimizedArgs)
  1.4041 +            types::TypeScript::Monitor(cx, frame->script(), pc, res);
  1.4042 +    }
  1.4043 +
  1.4044 +    if (!isOptimizedArgs) {
  1.4045 +        if (!GetElementOperation(cx, op, &lhsCopy, rhs, res))
  1.4046 +            return false;
  1.4047 +        types::TypeScript::Monitor(cx, frame->script(), pc, res);
  1.4048 +    }
  1.4049 +
  1.4050 +    // Check if debug mode toggling made the stub invalid.
  1.4051 +    if (stub.invalid())
  1.4052 +        return true;
  1.4053 +
  1.4054 +    // Add a type monitor stub for the resulting value.
  1.4055 +    if (!stub->addMonitorStubForValue(cx, frame->script(), res))
  1.4056 +        return false;
  1.4057 +
  1.4058 +    if (stub->numOptimizedStubs() >= ICGetElem_Fallback::MAX_OPTIMIZED_STUBS) {
  1.4059 +        // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
  1.4060 +        // But for now we just bail.
  1.4061 +        return true;
  1.4062 +    }
  1.4063 +
  1.4064 +    // Try to attach an optimized stub.
  1.4065 +    if (!TryAttachGetElemStub(cx, frame->script(), pc, stub, lhs, rhs, res))
  1.4066 +        return false;
  1.4067 +
  1.4068 +    return true;
  1.4069 +}
  1.4070 +
  1.4071 +typedef bool (*DoGetElemFallbackFn)(JSContext *, BaselineFrame *, ICGetElem_Fallback *,
  1.4072 +                                    HandleValue, HandleValue, MutableHandleValue);
  1.4073 +static const VMFunction DoGetElemFallbackInfo =
  1.4074 +    FunctionInfo<DoGetElemFallbackFn>(DoGetElemFallback, PopValues(2));
  1.4075 +
  1.4076 +bool
  1.4077 +ICGetElem_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.4078 +{
  1.4079 +    JS_ASSERT(R0 == JSReturnOperand);
  1.4080 +
  1.4081 +    // Restore the tail call register.
  1.4082 +    EmitRestoreTailCallReg(masm);
  1.4083 +
  1.4084 +    // Ensure stack is fully synced for the expression decompiler.
  1.4085 +    masm.pushValue(R0);
  1.4086 +    masm.pushValue(R1);
  1.4087 +
  1.4088 +    // Push arguments.
  1.4089 +    masm.pushValue(R1);
  1.4090 +    masm.pushValue(R0);
  1.4091 +    masm.push(BaselineStubReg);
  1.4092 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.4093 +
  1.4094 +    return tailCallVM(DoGetElemFallbackInfo, masm);
  1.4095 +}
  1.4096 +
  1.4097 +//
  1.4098 +// GetElem_NativeSlot
  1.4099 +//
  1.4100 +
  1.4101 +static bool
  1.4102 +DoAtomizeString(JSContext *cx, HandleString string, MutableHandleValue result)
  1.4103 +{
  1.4104 +    IonSpew(IonSpew_BaselineIC, "  AtomizeString called");
  1.4105 +
  1.4106 +    RootedValue key(cx, StringValue(string));
  1.4107 +
  1.4108 +    // Convert to interned property name.
  1.4109 +    RootedId id(cx);
  1.4110 +    if (!ValueToId<CanGC>(cx, key, &id))
  1.4111 +        return false;
  1.4112 +
  1.4113 +    if (!JSID_IS_ATOM(id)) {
  1.4114 +        result.set(key);
  1.4115 +        return true;
  1.4116 +    }
  1.4117 +
  1.4118 +    result.set(StringValue(JSID_TO_ATOM(id)));
  1.4119 +    return true;
  1.4120 +}
  1.4121 +
  1.4122 +typedef bool (*DoAtomizeStringFn)(JSContext *, HandleString, MutableHandleValue);
  1.4123 +static const VMFunction DoAtomizeStringInfo = FunctionInfo<DoAtomizeStringFn>(DoAtomizeString);
  1.4124 +
  1.4125 +bool
  1.4126 +ICGetElemNativeCompiler::emitCallNative(MacroAssembler &masm, Register objReg)
  1.4127 +{
  1.4128 +    GeneralRegisterSet regs = availableGeneralRegs(0);
  1.4129 +    regs.takeUnchecked(objReg);
  1.4130 +    regs.takeUnchecked(BaselineTailCallReg);
  1.4131 +
  1.4132 +    enterStubFrame(masm, regs.getAny());
  1.4133 +
  1.4134 +    // Push object.
  1.4135 +    masm.push(objReg);
  1.4136 +
  1.4137 +    // Push native callee.
  1.4138 +    masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), objReg);
  1.4139 +    masm.push(objReg);
  1.4140 +
  1.4141 +    regs.add(objReg);
  1.4142 +
  1.4143 +    // Profiler hook.
  1.4144 +    emitProfilingUpdate(masm, regs, ICGetElemNativeGetterStub::offsetOfPCOffset());
  1.4145 +
  1.4146 +    // Call helper.
  1.4147 +    if (!callVM(DoCallNativeGetterInfo, masm))
  1.4148 +        return false;
  1.4149 +
  1.4150 +    leaveStubFrame(masm);
  1.4151 +
  1.4152 +    return true;
  1.4153 +}
  1.4154 +
  1.4155 +bool
  1.4156 +ICGetElemNativeCompiler::emitCallScripted(MacroAssembler &masm, Register objReg)
  1.4157 +{
  1.4158 +    GeneralRegisterSet regs = availableGeneralRegs(0);
  1.4159 +    regs.takeUnchecked(objReg);
  1.4160 +    regs.takeUnchecked(BaselineTailCallReg);
  1.4161 +
  1.4162 +    // Enter stub frame.
  1.4163 +    enterStubFrame(masm, regs.getAny());
  1.4164 +
  1.4165 +    // Push |this| for getter (target object).
  1.4166 +    {
  1.4167 +        ValueOperand val = regs.takeAnyValue();
  1.4168 +        masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
  1.4169 +        masm.Push(val);
  1.4170 +        regs.add(val);
  1.4171 +    }
  1.4172 +
  1.4173 +    regs.add(objReg);
  1.4174 +
  1.4175 +    Register callee = regs.takeAny();
  1.4176 +    masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), callee);
  1.4177 +
  1.4178 +    // Push argc, callee, and descriptor.
  1.4179 +    {
  1.4180 +        Register callScratch = regs.takeAny();
  1.4181 +        EmitCreateStubFrameDescriptor(masm, callScratch);
  1.4182 +        masm.Push(Imm32(0));  // ActualArgc is 0
  1.4183 +        masm.Push(callee);
  1.4184 +        masm.Push(callScratch);
  1.4185 +        regs.add(callScratch);
  1.4186 +    }
  1.4187 +
  1.4188 +    Register code = regs.takeAnyExcluding(ArgumentsRectifierReg);
  1.4189 +    masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
  1.4190 +    masm.loadBaselineOrIonRaw(code, code, SequentialExecution, nullptr);
  1.4191 +
  1.4192 +    Register scratch = regs.takeAny();
  1.4193 +
  1.4194 +    // Handle arguments underflow.
  1.4195 +    Label noUnderflow;
  1.4196 +    masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
  1.4197 +    masm.branch32(Assembler::Equal, scratch, Imm32(0), &noUnderflow);
  1.4198 +    {
  1.4199 +        // Call the arguments rectifier.
  1.4200 +        JS_ASSERT(ArgumentsRectifierReg != code);
  1.4201 +
  1.4202 +        JitCode *argumentsRectifier =
  1.4203 +            cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
  1.4204 +
  1.4205 +        masm.movePtr(ImmGCPtr(argumentsRectifier), code);
  1.4206 +        masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
  1.4207 +        masm.mov(ImmWord(0), ArgumentsRectifierReg);
  1.4208 +    }
  1.4209 +
  1.4210 +    masm.bind(&noUnderflow);
  1.4211 +
  1.4212 +    // If needed, update SPS Profiler frame entry.  At this point, callee and scratch can
  1.4213 +    // be clobbered.
  1.4214 +    {
  1.4215 +        GeneralRegisterSet availRegs = availableGeneralRegs(0);
  1.4216 +        availRegs.take(ArgumentsRectifierReg);
  1.4217 +        availRegs.take(code);
  1.4218 +        emitProfilingUpdate(masm, availRegs, ICGetElemNativeGetterStub::offsetOfPCOffset());
  1.4219 +    }
  1.4220 +
  1.4221 +    masm.callIon(code);
  1.4222 +
  1.4223 +    leaveStubFrame(masm, true);
  1.4224 +
  1.4225 +    return true;
  1.4226 +}
  1.4227 +
  1.4228 +bool
  1.4229 +ICGetElemNativeCompiler::generateStubCode(MacroAssembler &masm)
  1.4230 +{
  1.4231 +    Label failure;
  1.4232 +    Label failurePopR1;
  1.4233 +    bool popR1 = false;
  1.4234 +
  1.4235 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.4236 +    masm.branchTestString(Assembler::NotEqual, R1, &failure);
  1.4237 +
  1.4238 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.4239 +    Register scratchReg = regs.takeAny();
  1.4240 +
  1.4241 +    // Unbox object.
  1.4242 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.4243 +
  1.4244 +    // Check object shape.
  1.4245 +    masm.loadPtr(Address(objReg, JSObject::offsetOfShape()), scratchReg);
  1.4246 +    Address shapeAddr(BaselineStubReg, ICGetElemNativeStub::offsetOfShape());
  1.4247 +    masm.branchPtr(Assembler::NotEqual, shapeAddr, scratchReg, &failure);
  1.4248 +
  1.4249 +    // Check key identity.  Don't automatically fail if this fails, since the incoming
  1.4250 +    // key maybe a non-interned string.  Switch to a slowpath vm-call based check.
  1.4251 +    Address nameAddr(BaselineStubReg, ICGetElemNativeStub::offsetOfName());
  1.4252 +    Register strExtract = masm.extractString(R1, ExtractTemp1);
  1.4253 +
  1.4254 +    // If needsAtomize_ is true, and the string is not already an atom, then atomize the
  1.4255 +    // string before proceeding.
  1.4256 +    if (needsAtomize_) {
  1.4257 +        Label skipAtomize;
  1.4258 +
  1.4259 +        // If string is already an atom, skip the atomize.
  1.4260 +        masm.branchTestPtr(Assembler::NonZero,
  1.4261 +                           Address(strExtract, JSString::offsetOfLengthAndFlags()),
  1.4262 +                           Imm32(JSString::ATOM_BIT),
  1.4263 +                           &skipAtomize);
  1.4264 +
  1.4265 +        // Stow R0.
  1.4266 +        EmitStowICValues(masm, 1);
  1.4267 +
  1.4268 +        enterStubFrame(masm, R0.scratchReg());
  1.4269 +
  1.4270 +        // Atomize the string into a new value.
  1.4271 +        masm.push(strExtract);
  1.4272 +        if (!callVM(DoAtomizeStringInfo, masm))
  1.4273 +            return false;
  1.4274 +
  1.4275 +        // Atomized string is now in JSReturnOperand (R0).
  1.4276 +        // Leave stub frame, move atomized string into R1.
  1.4277 +        JS_ASSERT(R0 == JSReturnOperand);
  1.4278 +        leaveStubFrame(masm);
  1.4279 +        masm.moveValue(JSReturnOperand, R1);
  1.4280 +
  1.4281 +        // Unstow R0
  1.4282 +        EmitUnstowICValues(masm, 1);
  1.4283 +
  1.4284 +        // Extract string from R1 again.
  1.4285 +        DebugOnly<Register> strExtract2 = masm.extractString(R1, ExtractTemp1);
  1.4286 +        JS_ASSERT(Register(strExtract2) == strExtract);
  1.4287 +
  1.4288 +        masm.bind(&skipAtomize);
  1.4289 +    }
  1.4290 +
  1.4291 +    // Since this stub sometimes enter a stub frame, we manually set this to true (lie).
  1.4292 +#ifdef DEBUG
  1.4293 +    entersStubFrame_ = true;
  1.4294 +#endif
  1.4295 +
  1.4296 +    // Key has been atomized if necessary.  Do identity check on string pointer.
  1.4297 +    masm.branchPtr(Assembler::NotEqual, nameAddr, strExtract, &failure);
  1.4298 +
  1.4299 +    Register holderReg;
  1.4300 +    if (obj_ == holder_) {
  1.4301 +        holderReg = objReg;
  1.4302 +    } else {
  1.4303 +        // Shape guard holder.
  1.4304 +        if (regs.empty()) {
  1.4305 +            masm.push(R1.scratchReg());
  1.4306 +            popR1 = true;
  1.4307 +            holderReg = R1.scratchReg();
  1.4308 +        } else {
  1.4309 +            holderReg = regs.takeAny();
  1.4310 +        }
  1.4311 +
  1.4312 +        if (kind == ICStub::GetElem_NativePrototypeCallNative ||
  1.4313 +            kind == ICStub::GetElem_NativePrototypeCallScripted)
  1.4314 +        {
  1.4315 +            masm.loadPtr(Address(BaselineStubReg,
  1.4316 +                                 ICGetElemNativePrototypeCallStub::offsetOfHolder()),
  1.4317 +                         holderReg);
  1.4318 +            masm.loadPtr(Address(BaselineStubReg,
  1.4319 +                                 ICGetElemNativePrototypeCallStub::offsetOfHolderShape()),
  1.4320 +                         scratchReg);
  1.4321 +        } else {
  1.4322 +            masm.loadPtr(Address(BaselineStubReg,
  1.4323 +                                 ICGetElem_NativePrototypeSlot::offsetOfHolder()),
  1.4324 +                         holderReg);
  1.4325 +            masm.loadPtr(Address(BaselineStubReg,
  1.4326 +                                 ICGetElem_NativePrototypeSlot::offsetOfHolderShape()),
  1.4327 +                         scratchReg);
  1.4328 +        }
  1.4329 +        masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratchReg,
  1.4330 +                                popR1 ? &failurePopR1 : &failure);
  1.4331 +    }
  1.4332 +
  1.4333 +    if (acctype_ == ICGetElemNativeStub::DynamicSlot ||
  1.4334 +        acctype_ == ICGetElemNativeStub::FixedSlot)
  1.4335 +    {
  1.4336 +        masm.load32(Address(BaselineStubReg, ICGetElemNativeSlotStub::offsetOfOffset()),
  1.4337 +                    scratchReg);
  1.4338 +
  1.4339 +        // Load from object.
  1.4340 +        if (acctype_ == ICGetElemNativeStub::DynamicSlot)
  1.4341 +            masm.addPtr(Address(holderReg, JSObject::offsetOfSlots()), scratchReg);
  1.4342 +        else
  1.4343 +            masm.addPtr(holderReg, scratchReg);
  1.4344 +
  1.4345 +        Address valAddr(scratchReg, 0);
  1.4346 +
  1.4347 +        // Check if __noSuchMethod__ needs to be called.
  1.4348 +#if JS_HAS_NO_SUCH_METHOD
  1.4349 +        if (isCallElem_) {
  1.4350 +            Label afterNoSuchMethod;
  1.4351 +            Label skipNoSuchMethod;
  1.4352 +
  1.4353 +            masm.branchTestUndefined(Assembler::NotEqual, valAddr, &skipNoSuchMethod);
  1.4354 +
  1.4355 +            GeneralRegisterSet regs = availableGeneralRegs(0);
  1.4356 +            regs.take(R1);
  1.4357 +            regs.take(R0);
  1.4358 +            regs.takeUnchecked(objReg);
  1.4359 +            if (popR1)
  1.4360 +                masm.pop(R1.scratchReg());
  1.4361 +
  1.4362 +            // Box and push obj and key onto baseline frame stack for decompiler.
  1.4363 +            masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0);
  1.4364 +            EmitStowICValues(masm, 2);
  1.4365 +
  1.4366 +            regs.add(R0);
  1.4367 +            regs.takeUnchecked(objReg);
  1.4368 +
  1.4369 +            enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
  1.4370 +
  1.4371 +            masm.pushValue(R1);
  1.4372 +            masm.push(objReg);
  1.4373 +            if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
  1.4374 +                return false;
  1.4375 +
  1.4376 +            leaveStubFrame(masm);
  1.4377 +
  1.4378 +            // Pop pushed obj and key from baseline stack.
  1.4379 +            EmitUnstowICValues(masm, 2, /* discard = */ true);
  1.4380 +
  1.4381 +            // Result is already in R0
  1.4382 +            masm.jump(&afterNoSuchMethod);
  1.4383 +            masm.bind(&skipNoSuchMethod);
  1.4384 +
  1.4385 +            if (popR1)
  1.4386 +                masm.pop(R1.scratchReg());
  1.4387 +            masm.loadValue(valAddr, R0);
  1.4388 +            masm.bind(&afterNoSuchMethod);
  1.4389 +        } else {
  1.4390 +            masm.loadValue(valAddr, R0);
  1.4391 +            if (popR1)
  1.4392 +                masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
  1.4393 +        }
  1.4394 +#else
  1.4395 +        masm.loadValue(valAddr, R0);
  1.4396 +        if (popR1)
  1.4397 +            masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
  1.4398 +#endif
  1.4399 +
  1.4400 +    } else {
  1.4401 +        JS_ASSERT(acctype_ == ICGetElemNativeStub::NativeGetter ||
  1.4402 +                  acctype_ == ICGetElemNativeStub::ScriptedGetter);
  1.4403 +        JS_ASSERT(kind == ICStub::GetElem_NativePrototypeCallNative ||
  1.4404 +                  kind == ICStub::GetElem_NativePrototypeCallScripted);
  1.4405 +
  1.4406 +        if (acctype_ == ICGetElemNativeStub::NativeGetter) {
  1.4407 +            // If calling a native getter, there is no chance of failure now.
  1.4408 +
  1.4409 +            // GetElem key (R1) is no longer needed.
  1.4410 +            if (popR1)
  1.4411 +                masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
  1.4412 +
  1.4413 +            emitCallNative(masm, objReg);
  1.4414 +
  1.4415 +        } else {
  1.4416 +            JS_ASSERT(acctype_ == ICGetElemNativeStub::ScriptedGetter);
  1.4417 +
  1.4418 +            // Load function in scratchReg and ensure that it has a jit script.
  1.4419 +            masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()),
  1.4420 +                         scratchReg);
  1.4421 +            masm.branchIfFunctionHasNoScript(scratchReg, popR1 ? &failurePopR1 : &failure);
  1.4422 +            masm.loadPtr(Address(scratchReg, JSFunction::offsetOfNativeOrScript()), scratchReg);
  1.4423 +            masm.loadBaselineOrIonRaw(scratchReg, scratchReg, SequentialExecution,
  1.4424 +                                      popR1 ? &failurePopR1 : &failure);
  1.4425 +
  1.4426 +            // At this point, we are guaranteed to successfully complete.
  1.4427 +            if (popR1)
  1.4428 +                masm.addPtr(Imm32(sizeof(size_t)), BaselineStackReg);
  1.4429 +
  1.4430 +            emitCallScripted(masm, objReg);
  1.4431 +        }
  1.4432 +    }
  1.4433 +
  1.4434 +    // Enter type monitor IC to type-check result.
  1.4435 +    EmitEnterTypeMonitorIC(masm);
  1.4436 +
  1.4437 +    // Failure case - jump to next stub
  1.4438 +    if (popR1) {
  1.4439 +        masm.bind(&failurePopR1);
  1.4440 +        masm.pop(R1.scratchReg());
  1.4441 +    }
  1.4442 +    masm.bind(&failure);
  1.4443 +    EmitStubGuardFailure(masm);
  1.4444 +
  1.4445 +    return true;
  1.4446 +}
  1.4447 +
  1.4448 +//
  1.4449 +// GetElem_String
  1.4450 +//
  1.4451 +
  1.4452 +bool
  1.4453 +ICGetElem_String::Compiler::generateStubCode(MacroAssembler &masm)
  1.4454 +{
  1.4455 +    Label failure;
  1.4456 +    masm.branchTestString(Assembler::NotEqual, R0, &failure);
  1.4457 +    masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.4458 +
  1.4459 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.4460 +    Register scratchReg = regs.takeAny();
  1.4461 +
  1.4462 +    // Unbox string in R0.
  1.4463 +    Register str = masm.extractString(R0, ExtractTemp0);
  1.4464 +
  1.4465 +    // Load string lengthAndFlags
  1.4466 +    Address lengthAndFlagsAddr(str, JSString::offsetOfLengthAndFlags());
  1.4467 +    masm.loadPtr(lengthAndFlagsAddr, scratchReg);
  1.4468 +
  1.4469 +    // Check for non-linear strings.
  1.4470 +    masm.branchTest32(Assembler::Zero, scratchReg, Imm32(JSString::FLAGS_MASK), &failure);
  1.4471 +
  1.4472 +    // Unbox key.
  1.4473 +    Register key = masm.extractInt32(R1, ExtractTemp1);
  1.4474 +
  1.4475 +    // Extract length and bounds check.
  1.4476 +    masm.rshiftPtr(Imm32(JSString::LENGTH_SHIFT), scratchReg);
  1.4477 +    masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure);
  1.4478 +
  1.4479 +    // Get char code.
  1.4480 +    Address charsAddr(str, JSString::offsetOfChars());
  1.4481 +    masm.loadPtr(charsAddr, scratchReg);
  1.4482 +    masm.load16ZeroExtend(BaseIndex(scratchReg, key, TimesTwo, 0), scratchReg);
  1.4483 +
  1.4484 +    // Check if char code >= UNIT_STATIC_LIMIT.
  1.4485 +    masm.branch32(Assembler::AboveOrEqual, scratchReg, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
  1.4486 +                  &failure);
  1.4487 +
  1.4488 +    // Load static string.
  1.4489 +    masm.movePtr(ImmPtr(&cx->staticStrings().unitStaticTable), str);
  1.4490 +    masm.loadPtr(BaseIndex(str, scratchReg, ScalePointer), str);
  1.4491 +
  1.4492 +    // Return.
  1.4493 +    masm.tagValue(JSVAL_TYPE_STRING, str, R0);
  1.4494 +    EmitReturnFromIC(masm);
  1.4495 +
  1.4496 +    // Failure case - jump to next stub
  1.4497 +    masm.bind(&failure);
  1.4498 +    EmitStubGuardFailure(masm);
  1.4499 +    return true;
  1.4500 +}
  1.4501 +
  1.4502 +//
  1.4503 +// GetElem_Dense
  1.4504 +//
  1.4505 +
  1.4506 +bool
  1.4507 +ICGetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
  1.4508 +{
  1.4509 +    Label failure;
  1.4510 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.4511 +    masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.4512 +
  1.4513 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.4514 +    Register scratchReg = regs.takeAny();
  1.4515 +
  1.4516 +    // Unbox R0 and shape guard.
  1.4517 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.4518 +    masm.loadPtr(Address(BaselineStubReg, ICGetElem_Dense::offsetOfShape()), scratchReg);
  1.4519 +    masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
  1.4520 +
  1.4521 +    // Load obj->elements.
  1.4522 +    masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
  1.4523 +
  1.4524 +    // Unbox key.
  1.4525 +    Register key = masm.extractInt32(R1, ExtractTemp1);
  1.4526 +
  1.4527 +    // Bounds check.
  1.4528 +    Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
  1.4529 +    masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure);
  1.4530 +
  1.4531 +    // Hole check and load value.
  1.4532 +    JS_STATIC_ASSERT(sizeof(Value) == 8);
  1.4533 +    BaseIndex element(scratchReg, key, TimesEight);
  1.4534 +    masm.branchTestMagic(Assembler::Equal, element, &failure);
  1.4535 +
  1.4536 +    // Check if __noSuchMethod__ should be called.
  1.4537 +#if JS_HAS_NO_SUCH_METHOD
  1.4538 +#ifdef DEBUG
  1.4539 +    entersStubFrame_ = true;
  1.4540 +#endif
  1.4541 +    if (isCallElem_) {
  1.4542 +        Label afterNoSuchMethod;
  1.4543 +        Label skipNoSuchMethod;
  1.4544 +        regs = availableGeneralRegs(0);
  1.4545 +        regs.takeUnchecked(obj);
  1.4546 +        regs.takeUnchecked(key);
  1.4547 +        regs.takeUnchecked(BaselineTailCallReg);
  1.4548 +        ValueOperand val = regs.takeValueOperand();
  1.4549 +
  1.4550 +        masm.loadValue(element, val);
  1.4551 +        masm.branchTestUndefined(Assembler::NotEqual, val, &skipNoSuchMethod);
  1.4552 +
  1.4553 +        // Box and push obj and key onto baseline frame stack for decompiler.
  1.4554 +        EmitRestoreTailCallReg(masm);
  1.4555 +        masm.tagValue(JSVAL_TYPE_OBJECT, obj, val);
  1.4556 +        masm.pushValue(val);
  1.4557 +        masm.tagValue(JSVAL_TYPE_INT32, key, val);
  1.4558 +        masm.pushValue(val);
  1.4559 +        EmitRepushTailCallReg(masm);
  1.4560 +
  1.4561 +        regs.add(val);
  1.4562 +
  1.4563 +        // Call __noSuchMethod__ checker.  Object pointer is in objReg.
  1.4564 +        enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
  1.4565 +
  1.4566 +        regs.take(val);
  1.4567 +
  1.4568 +        masm.tagValue(JSVAL_TYPE_INT32, key, val);
  1.4569 +        masm.pushValue(val);
  1.4570 +        masm.push(obj);
  1.4571 +        if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
  1.4572 +            return false;
  1.4573 +
  1.4574 +        leaveStubFrame(masm);
  1.4575 +
  1.4576 +        // Pop pushed obj and key from baseline stack.
  1.4577 +        EmitUnstowICValues(masm, 2, /* discard = */ true);
  1.4578 +
  1.4579 +        // Result is already in R0
  1.4580 +        masm.jump(&afterNoSuchMethod);
  1.4581 +        masm.bind(&skipNoSuchMethod);
  1.4582 +
  1.4583 +        masm.moveValue(val, R0);
  1.4584 +        masm.bind(&afterNoSuchMethod);
  1.4585 +    } else {
  1.4586 +        masm.loadValue(element, R0);
  1.4587 +    }
  1.4588 +#else
  1.4589 +    // Load value from element location.
  1.4590 +    masm.loadValue(element, R0);
  1.4591 +#endif
  1.4592 +
  1.4593 +    // Enter type monitor IC to type-check result.
  1.4594 +    EmitEnterTypeMonitorIC(masm);
  1.4595 +
  1.4596 +    // Failure case - jump to next stub
  1.4597 +    masm.bind(&failure);
  1.4598 +    EmitStubGuardFailure(masm);
  1.4599 +    return true;
  1.4600 +}
  1.4601 +
  1.4602 +//
  1.4603 +// GetElem_TypedArray
  1.4604 +//
  1.4605 +
  1.4606 +bool
  1.4607 +ICGetElem_TypedArray::Compiler::generateStubCode(MacroAssembler &masm)
  1.4608 +{
  1.4609 +    Label failure;
  1.4610 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.4611 +
  1.4612 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.4613 +    Register scratchReg = regs.takeAny();
  1.4614 +
  1.4615 +    // Unbox R0 and shape guard.
  1.4616 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.4617 +    masm.loadPtr(Address(BaselineStubReg, ICGetElem_TypedArray::offsetOfShape()), scratchReg);
  1.4618 +    masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
  1.4619 +
  1.4620 +    // Ensure the index is an integer.
  1.4621 +    if (cx->runtime()->jitSupportsFloatingPoint) {
  1.4622 +        Label isInt32;
  1.4623 +        masm.branchTestInt32(Assembler::Equal, R1, &isInt32);
  1.4624 +        {
  1.4625 +            // If the index is a double, try to convert it to int32. It's okay
  1.4626 +            // to convert -0 to 0: the shape check ensures the object is a typed
  1.4627 +            // array so the difference is not observable.
  1.4628 +            masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
  1.4629 +            masm.unboxDouble(R1, FloatReg0);
  1.4630 +            masm.convertDoubleToInt32(FloatReg0, scratchReg, &failure, /* negZeroCheck = */false);
  1.4631 +            masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R1);
  1.4632 +        }
  1.4633 +        masm.bind(&isInt32);
  1.4634 +    } else {
  1.4635 +        masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.4636 +    }
  1.4637 +
  1.4638 +    // Unbox key.
  1.4639 +    Register key = masm.extractInt32(R1, ExtractTemp1);
  1.4640 +
  1.4641 +    // Bounds check.
  1.4642 +    masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), scratchReg);
  1.4643 +    masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure);
  1.4644 +
  1.4645 +    // Load the elements vector.
  1.4646 +    masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), scratchReg);
  1.4647 +
  1.4648 +    // Load the value.
  1.4649 +    BaseIndex source(scratchReg, key, ScaleFromElemWidth(TypedArrayObject::slotWidth(type_)));
  1.4650 +    masm.loadFromTypedArray(type_, source, R0, false, scratchReg, &failure);
  1.4651 +
  1.4652 +    // Todo: Allow loading doubles from uint32 arrays, but this requires monitoring.
  1.4653 +    EmitReturnFromIC(masm);
  1.4654 +
  1.4655 +    // Failure case - jump to next stub
  1.4656 +    masm.bind(&failure);
  1.4657 +    EmitStubGuardFailure(masm);
  1.4658 +    return true;
  1.4659 +}
  1.4660 +
  1.4661 +//
  1.4662 +// GetEelem_Arguments
  1.4663 +//
  1.4664 +bool
  1.4665 +ICGetElem_Arguments::Compiler::generateStubCode(MacroAssembler &masm)
  1.4666 +{
  1.4667 +    // Variants of GetElem_Arguments can enter stub frames if entered in CallProp
  1.4668 +    // context when noSuchMethod support is on.
  1.4669 +#if JS_HAS_NO_SUCH_METHOD
  1.4670 +#ifdef DEBUG
  1.4671 +    entersStubFrame_ = true;
  1.4672 +#endif
  1.4673 +#endif
  1.4674 +
  1.4675 +    Label failure;
  1.4676 +    if (which_ == ICGetElem_Arguments::Magic) {
  1.4677 +        JS_ASSERT(!isCallElem_);
  1.4678 +
  1.4679 +        // Ensure that this is a magic arguments value.
  1.4680 +        masm.branchTestMagicValue(Assembler::NotEqual, R0, JS_OPTIMIZED_ARGUMENTS, &failure);
  1.4681 +
  1.4682 +        // Ensure that frame has not loaded different arguments object since.
  1.4683 +        masm.branchTest32(Assembler::NonZero,
  1.4684 +                          Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
  1.4685 +                          Imm32(BaselineFrame::HAS_ARGS_OBJ),
  1.4686 +                          &failure);
  1.4687 +
  1.4688 +        // Ensure that index is an integer.
  1.4689 +        masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.4690 +        Register idx = masm.extractInt32(R1, ExtractTemp1);
  1.4691 +
  1.4692 +        GeneralRegisterSet regs(availableGeneralRegs(2));
  1.4693 +        Register scratch = regs.takeAny();
  1.4694 +
  1.4695 +        // Load num actual arguments
  1.4696 +        Address actualArgs(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs());
  1.4697 +        masm.loadPtr(actualArgs, scratch);
  1.4698 +
  1.4699 +        // Ensure idx < argc
  1.4700 +        masm.branch32(Assembler::AboveOrEqual, idx, scratch, &failure);
  1.4701 +
  1.4702 +        // Load argval
  1.4703 +        JS_STATIC_ASSERT(sizeof(Value) == 8);
  1.4704 +        masm.movePtr(BaselineFrameReg, scratch);
  1.4705 +        masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), scratch);
  1.4706 +        BaseIndex element(scratch, idx, TimesEight);
  1.4707 +        masm.loadValue(element, R0);
  1.4708 +
  1.4709 +        // Enter type monitor IC to type-check result.
  1.4710 +        EmitEnterTypeMonitorIC(masm);
  1.4711 +
  1.4712 +        masm.bind(&failure);
  1.4713 +        EmitStubGuardFailure(masm);
  1.4714 +        return true;
  1.4715 +    }
  1.4716 +
  1.4717 +    JS_ASSERT(which_ == ICGetElem_Arguments::Strict ||
  1.4718 +              which_ == ICGetElem_Arguments::Normal);
  1.4719 +
  1.4720 +    bool isStrict = which_ == ICGetElem_Arguments::Strict;
  1.4721 +    const Class *clasp = isStrict ? &StrictArgumentsObject::class_ : &NormalArgumentsObject::class_;
  1.4722 +
  1.4723 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.4724 +    Register scratchReg = regs.takeAny();
  1.4725 +
  1.4726 +    // Guard on input being an arguments object.
  1.4727 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.4728 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.4729 +    masm.branchTestObjClass(Assembler::NotEqual, objReg, scratchReg, clasp, &failure);
  1.4730 +
  1.4731 +    // Guard on index being int32
  1.4732 +    masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.4733 +    Register idxReg = masm.extractInt32(R1, ExtractTemp1);
  1.4734 +
  1.4735 +    // Get initial ArgsObj length value.
  1.4736 +    masm.unboxInt32(Address(objReg, ArgumentsObject::getInitialLengthSlotOffset()), scratchReg);
  1.4737 +
  1.4738 +    // Test if length has been overridden.
  1.4739 +    masm.branchTest32(Assembler::NonZero,
  1.4740 +                      scratchReg,
  1.4741 +                      Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
  1.4742 +                      &failure);
  1.4743 +
  1.4744 +    // Length has not been overridden, ensure that R1 is an integer and is <= length.
  1.4745 +    masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratchReg);
  1.4746 +    masm.branch32(Assembler::AboveOrEqual, idxReg, scratchReg, &failure);
  1.4747 +
  1.4748 +    // Length check succeeded, now check the correct bit.  We clobber potential type regs
  1.4749 +    // now.  Inputs will have to be reconstructed if we fail after this point, but that's
  1.4750 +    // unlikely.
  1.4751 +    Label failureReconstructInputs;
  1.4752 +    regs = availableGeneralRegs(0);
  1.4753 +    regs.takeUnchecked(objReg);
  1.4754 +    regs.takeUnchecked(idxReg);
  1.4755 +    regs.take(scratchReg);
  1.4756 +    Register argData = regs.takeAny();
  1.4757 +    Register tempReg = regs.takeAny();
  1.4758 +
  1.4759 +    // Load ArgumentsData
  1.4760 +    masm.loadPrivate(Address(objReg, ArgumentsObject::getDataSlotOffset()), argData);
  1.4761 +
  1.4762 +    // Load deletedBits bitArray pointer into scratchReg
  1.4763 +    masm.loadPtr(Address(argData, offsetof(ArgumentsData, deletedBits)), scratchReg);
  1.4764 +
  1.4765 +    // In tempReg, calculate index of word containing bit: (idx >> logBitsPerWord)
  1.4766 +    masm.movePtr(idxReg, tempReg);
  1.4767 +    const uint32_t shift = mozilla::tl::FloorLog2<(sizeof(size_t) * JS_BITS_PER_BYTE)>::value;
  1.4768 +    JS_ASSERT(shift == 5 || shift == 6);
  1.4769 +    masm.rshiftPtr(Imm32(shift), tempReg);
  1.4770 +    masm.loadPtr(BaseIndex(scratchReg, tempReg, ScaleFromElemWidth(sizeof(size_t))), scratchReg);
  1.4771 +
  1.4772 +    // Don't bother testing specific bit, if any bit is set in the word, fail.
  1.4773 +    masm.branchPtr(Assembler::NotEqual, scratchReg, ImmPtr(nullptr), &failureReconstructInputs);
  1.4774 +
  1.4775 +    // Load the value.  use scratchReg and tempReg to form a ValueOperand to load into.
  1.4776 +    masm.addPtr(Imm32(ArgumentsData::offsetOfArgs()), argData);
  1.4777 +    regs.add(scratchReg);
  1.4778 +    regs.add(tempReg);
  1.4779 +    ValueOperand tempVal = regs.takeAnyValue();
  1.4780 +    masm.loadValue(BaseIndex(argData, idxReg, ScaleFromElemWidth(sizeof(Value))), tempVal);
  1.4781 +
  1.4782 +    // Makesure that this is not a FORWARD_TO_CALL_SLOT magic value.
  1.4783 +    masm.branchTestMagic(Assembler::Equal, tempVal, &failureReconstructInputs);
  1.4784 +
  1.4785 +#if JS_HAS_NO_SUCH_METHOD
  1.4786 +    if (isCallElem_) {
  1.4787 +        Label afterNoSuchMethod;
  1.4788 +        Label skipNoSuchMethod;
  1.4789 +
  1.4790 +        masm.branchTestUndefined(Assembler::NotEqual, tempVal, &skipNoSuchMethod);
  1.4791 +
  1.4792 +        // Call __noSuchMethod__ checker.  Object pointer is in objReg.
  1.4793 +        regs = availableGeneralRegs(0);
  1.4794 +        regs.takeUnchecked(objReg);
  1.4795 +        regs.takeUnchecked(idxReg);
  1.4796 +        regs.takeUnchecked(BaselineTailCallReg);
  1.4797 +        ValueOperand val = regs.takeValueOperand();
  1.4798 +
  1.4799 +        // Box and push obj and key onto baseline frame stack for decompiler.
  1.4800 +        EmitRestoreTailCallReg(masm);
  1.4801 +        masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
  1.4802 +        masm.pushValue(val);
  1.4803 +        masm.tagValue(JSVAL_TYPE_INT32, idxReg, val);
  1.4804 +        masm.pushValue(val);
  1.4805 +        EmitRepushTailCallReg(masm);
  1.4806 +
  1.4807 +        regs.add(val);
  1.4808 +        enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
  1.4809 +        regs.take(val);
  1.4810 +
  1.4811 +        masm.pushValue(val);
  1.4812 +        masm.push(objReg);
  1.4813 +        if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
  1.4814 +            return false;
  1.4815 +
  1.4816 +        leaveStubFrame(masm);
  1.4817 +
  1.4818 +        // Pop pushed obj and key from baseline stack.
  1.4819 +        EmitUnstowICValues(masm, 2, /* discard = */ true);
  1.4820 +
  1.4821 +        // Result is already in R0
  1.4822 +        masm.jump(&afterNoSuchMethod);
  1.4823 +        masm.bind(&skipNoSuchMethod);
  1.4824 +
  1.4825 +        masm.moveValue(tempVal, R0);
  1.4826 +        masm.bind(&afterNoSuchMethod);
  1.4827 +    } else {
  1.4828 +        masm.moveValue(tempVal, R0);
  1.4829 +    }
  1.4830 +#else
  1.4831 +    // Copy value from temp to R0.
  1.4832 +    masm.moveValue(tempVal, R0);
  1.4833 +#endif
  1.4834 +
  1.4835 +    // Type-check result
  1.4836 +    EmitEnterTypeMonitorIC(masm);
  1.4837 +
  1.4838 +    // Failed, but inputs are deconstructed into object and int, and need to be
  1.4839 +    // reconstructed into values.
  1.4840 +    masm.bind(&failureReconstructInputs);
  1.4841 +    masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0);
  1.4842 +    masm.tagValue(JSVAL_TYPE_INT32, idxReg, R1);
  1.4843 +
  1.4844 +    masm.bind(&failure);
  1.4845 +    EmitStubGuardFailure(masm);
  1.4846 +    return true;
  1.4847 +}
  1.4848 +
  1.4849 +//
  1.4850 +// SetElem_Fallback
  1.4851 +//
  1.4852 +
  1.4853 +static bool
  1.4854 +SetElemDenseAddHasSameShapes(ICSetElem_DenseAdd *stub, JSObject *obj)
  1.4855 +{
  1.4856 +    size_t numShapes = stub->protoChainDepth() + 1;
  1.4857 +    for (size_t i = 0; i < numShapes; i++) {
  1.4858 +        static const size_t MAX_DEPTH = ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH;
  1.4859 +        if (obj->lastProperty() != stub->toImplUnchecked<MAX_DEPTH>()->shape(i))
  1.4860 +            return false;
  1.4861 +        obj = obj->getProto();
  1.4862 +        if (!obj && i != numShapes - 1)
  1.4863 +            return false;
  1.4864 +    }
  1.4865 +
  1.4866 +    return true;
  1.4867 +}
  1.4868 +
  1.4869 +static bool
  1.4870 +DenseSetElemStubExists(JSContext *cx, ICStub::Kind kind, ICSetElem_Fallback *stub, HandleObject obj)
  1.4871 +{
  1.4872 +    JS_ASSERT(kind == ICStub::SetElem_Dense || kind == ICStub::SetElem_DenseAdd);
  1.4873 +
  1.4874 +    for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
  1.4875 +        if (kind == ICStub::SetElem_Dense && iter->isSetElem_Dense()) {
  1.4876 +            ICSetElem_Dense *dense = iter->toSetElem_Dense();
  1.4877 +            if (obj->lastProperty() == dense->shape() && obj->getType(cx) == dense->type())
  1.4878 +                return true;
  1.4879 +        }
  1.4880 +
  1.4881 +        if (kind == ICStub::SetElem_DenseAdd && iter->isSetElem_DenseAdd()) {
  1.4882 +            ICSetElem_DenseAdd *dense = iter->toSetElem_DenseAdd();
  1.4883 +            if (obj->getType(cx) == dense->type() && SetElemDenseAddHasSameShapes(dense, obj))
  1.4884 +                return true;
  1.4885 +        }
  1.4886 +    }
  1.4887 +    return false;
  1.4888 +}
  1.4889 +
  1.4890 +static bool
  1.4891 +TypedArraySetElemStubExists(ICSetElem_Fallback *stub, HandleObject obj, bool expectOOB)
  1.4892 +{
  1.4893 +    for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
  1.4894 +        if (!iter->isSetElem_TypedArray())
  1.4895 +            continue;
  1.4896 +        ICSetElem_TypedArray *taStub = iter->toSetElem_TypedArray();
  1.4897 +        if (obj->lastProperty() == taStub->shape() && taStub->expectOutOfBounds() == expectOOB)
  1.4898 +            return true;
  1.4899 +    }
  1.4900 +    return false;
  1.4901 +}
  1.4902 +
  1.4903 +static bool
  1.4904 +RemoveExistingTypedArraySetElemStub(JSContext *cx, ICSetElem_Fallback *stub, HandleObject obj)
  1.4905 +{
  1.4906 +    for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
  1.4907 +        if (!iter->isSetElem_TypedArray())
  1.4908 +            continue;
  1.4909 +
  1.4910 +        if (obj->lastProperty() != iter->toSetElem_TypedArray()->shape())
  1.4911 +            continue;
  1.4912 +
  1.4913 +        // TypedArraySetElem stubs are only removed using this procedure if
  1.4914 +        // being replaced with one that expects out of bounds index.
  1.4915 +        JS_ASSERT(!iter->toSetElem_TypedArray()->expectOutOfBounds());
  1.4916 +        iter.unlink(cx);
  1.4917 +        return true;
  1.4918 +    }
  1.4919 +    return false;
  1.4920 +}
  1.4921 +
  1.4922 +static bool
  1.4923 +CanOptimizeDenseSetElem(JSContext *cx, HandleObject obj, uint32_t index,
  1.4924 +                        HandleShape oldShape, uint32_t oldCapacity, uint32_t oldInitLength,
  1.4925 +                        bool *isAddingCaseOut, size_t *protoDepthOut)
  1.4926 +{
  1.4927 +    uint32_t initLength = obj->getDenseInitializedLength();
  1.4928 +    uint32_t capacity = obj->getDenseCapacity();
  1.4929 +
  1.4930 +    *isAddingCaseOut = false;
  1.4931 +    *protoDepthOut = 0;
  1.4932 +
  1.4933 +    // Some initial sanity checks.
  1.4934 +    if (initLength < oldInitLength || capacity < oldCapacity)
  1.4935 +        return false;
  1.4936 +
  1.4937 +    RootedShape shape(cx, obj->lastProperty());
  1.4938 +
  1.4939 +    // Cannot optimize if the shape changed.
  1.4940 +    if (oldShape != shape)
  1.4941 +        return false;
  1.4942 +
  1.4943 +    // Cannot optimize if the capacity changed.
  1.4944 +    if (oldCapacity != capacity)
  1.4945 +        return false;
  1.4946 +
  1.4947 +    // Cannot optimize if the index doesn't fit within the new initialized length.
  1.4948 +    if (index >= initLength)
  1.4949 +        return false;
  1.4950 +
  1.4951 +    // Cannot optimize if the value at position after the set is a hole.
  1.4952 +    if (!obj->containsDenseElement(index))
  1.4953 +        return false;
  1.4954 +
  1.4955 +    // At this point, if we know that the initLength did not change, then
  1.4956 +    // an optimized set is possible.
  1.4957 +    if (oldInitLength == initLength)
  1.4958 +        return true;
  1.4959 +
  1.4960 +    // If it did change, ensure that it changed specifically by incrementing by 1
  1.4961 +    // to accomodate this particular indexed set.
  1.4962 +    if (oldInitLength + 1 != initLength)
  1.4963 +        return false;
  1.4964 +    if (index != oldInitLength)
  1.4965 +        return false;
  1.4966 +
  1.4967 +    // The checks are not complete.  The object may have a setter definition,
  1.4968 +    // either directly, or via a prototype, or via the target object for a prototype
  1.4969 +    // which is a proxy, that handles a particular integer write.
  1.4970 +    // Scan the prototype and shape chain to make sure that this is not the case.
  1.4971 +    RootedObject curObj(cx, obj);
  1.4972 +    while (curObj) {
  1.4973 +        // Ensure object is native.
  1.4974 +        if (!curObj->isNative())
  1.4975 +            return false;
  1.4976 +
  1.4977 +        // Ensure all indexed properties are stored in dense elements.
  1.4978 +        if (curObj->isIndexed())
  1.4979 +            return false;
  1.4980 +
  1.4981 +        curObj = curObj->getProto();
  1.4982 +        if (curObj)
  1.4983 +            ++*protoDepthOut;
  1.4984 +    }
  1.4985 +
  1.4986 +    if (*protoDepthOut > ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH)
  1.4987 +        return false;
  1.4988 +
  1.4989 +    *isAddingCaseOut = true;
  1.4990 +
  1.4991 +    return true;
  1.4992 +}
  1.4993 +
  1.4994 +static bool
  1.4995 +DoSetElemFallback(JSContext *cx, BaselineFrame *frame, ICSetElem_Fallback *stub_, Value *stack,
  1.4996 +                  HandleValue objv, HandleValue index, HandleValue rhs)
  1.4997 +{
  1.4998 +    // This fallback stub may trigger debug mode toggling.
  1.4999 +    DebugModeOSRVolatileStub<ICSetElem_Fallback *> stub(frame, stub_);
  1.5000 +
  1.5001 +    RootedScript script(cx, frame->script());
  1.5002 +    jsbytecode *pc = stub->icEntry()->pc(script);
  1.5003 +    JSOp op = JSOp(*pc);
  1.5004 +    FallbackICSpew(cx, stub, "SetElem(%s)", js_CodeName[JSOp(*pc)]);
  1.5005 +
  1.5006 +    JS_ASSERT(op == JSOP_SETELEM ||
  1.5007 +              op == JSOP_INITELEM ||
  1.5008 +              op == JSOP_INITELEM_ARRAY);
  1.5009 +
  1.5010 +    RootedObject obj(cx, ToObjectFromStack(cx, objv));
  1.5011 +    if (!obj)
  1.5012 +        return false;
  1.5013 +
  1.5014 +    RootedShape oldShape(cx, obj->lastProperty());
  1.5015 +
  1.5016 +    // Check the old capacity
  1.5017 +    uint32_t oldCapacity = 0;
  1.5018 +    uint32_t oldInitLength = 0;
  1.5019 +    if (obj->isNative() && index.isInt32() && index.toInt32() >= 0) {
  1.5020 +        oldCapacity = obj->getDenseCapacity();
  1.5021 +        oldInitLength = obj->getDenseInitializedLength();
  1.5022 +    }
  1.5023 +
  1.5024 +    if (op == JSOP_INITELEM) {
  1.5025 +        if (!InitElemOperation(cx, obj, index, rhs))
  1.5026 +            return false;
  1.5027 +    } else if (op == JSOP_INITELEM_ARRAY) {
  1.5028 +        JS_ASSERT(uint32_t(index.toInt32()) == GET_UINT24(pc));
  1.5029 +        if (!InitArrayElemOperation(cx, pc, obj, index.toInt32(), rhs))
  1.5030 +            return false;
  1.5031 +    } else {
  1.5032 +        if (!SetObjectElement(cx, obj, index, rhs, script->strict(), script, pc))
  1.5033 +            return false;
  1.5034 +    }
  1.5035 +
  1.5036 +    // Overwrite the object on the stack (pushed for the decompiler) with the rhs.
  1.5037 +    JS_ASSERT(stack[2] == objv);
  1.5038 +    stack[2] = rhs;
  1.5039 +
  1.5040 +    // Check if debug mode toggling made the stub invalid.
  1.5041 +    if (stub.invalid())
  1.5042 +        return true;
  1.5043 +
  1.5044 +    if (stub->numOptimizedStubs() >= ICSetElem_Fallback::MAX_OPTIMIZED_STUBS) {
  1.5045 +        // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
  1.5046 +        // But for now we just bail.
  1.5047 +        return true;
  1.5048 +    }
  1.5049 +
  1.5050 +    // Try to generate new stubs.
  1.5051 +    if (obj->isNative() &&
  1.5052 +        !obj->is<TypedArrayObject>() &&
  1.5053 +        index.isInt32() && index.toInt32() >= 0 &&
  1.5054 +        !rhs.isMagic(JS_ELEMENTS_HOLE))
  1.5055 +    {
  1.5056 +        bool addingCase;
  1.5057 +        size_t protoDepth;
  1.5058 +
  1.5059 +        if (CanOptimizeDenseSetElem(cx, obj, index.toInt32(), oldShape, oldCapacity, oldInitLength,
  1.5060 +                                    &addingCase, &protoDepth))
  1.5061 +        {
  1.5062 +            RootedShape shape(cx, obj->lastProperty());
  1.5063 +            RootedTypeObject type(cx, obj->getType(cx));
  1.5064 +            if (!type)
  1.5065 +                return false;
  1.5066 +
  1.5067 +            if (addingCase && !DenseSetElemStubExists(cx, ICStub::SetElem_DenseAdd, stub, obj)) {
  1.5068 +                IonSpew(IonSpew_BaselineIC,
  1.5069 +                        "  Generating SetElem_DenseAdd stub "
  1.5070 +                        "(shape=%p, type=%p, protoDepth=%u)",
  1.5071 +                        obj->lastProperty(), type.get(), protoDepth);
  1.5072 +                ICSetElemDenseAddCompiler compiler(cx, obj, protoDepth);
  1.5073 +                ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
  1.5074 +                if (!denseStub)
  1.5075 +                    return false;
  1.5076 +                if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs))
  1.5077 +                    return false;
  1.5078 +
  1.5079 +                stub->addNewStub(denseStub);
  1.5080 +            } else if (!addingCase &&
  1.5081 +                       !DenseSetElemStubExists(cx, ICStub::SetElem_Dense, stub, obj))
  1.5082 +            {
  1.5083 +                IonSpew(IonSpew_BaselineIC,
  1.5084 +                        "  Generating SetElem_Dense stub (shape=%p, type=%p)",
  1.5085 +                        obj->lastProperty(), type.get());
  1.5086 +                ICSetElem_Dense::Compiler compiler(cx, shape, type);
  1.5087 +                ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
  1.5088 +                if (!denseStub)
  1.5089 +                    return false;
  1.5090 +                if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs))
  1.5091 +                    return false;
  1.5092 +
  1.5093 +                stub->addNewStub(denseStub);
  1.5094 +            }
  1.5095 +        }
  1.5096 +
  1.5097 +        return true;
  1.5098 +    }
  1.5099 +
  1.5100 +    if (obj->is<TypedArrayObject>() && index.isNumber() && rhs.isNumber()) {
  1.5101 +        Rooted<TypedArrayObject*> tarr(cx, &obj->as<TypedArrayObject>());
  1.5102 +        if (!cx->runtime()->jitSupportsFloatingPoint &&
  1.5103 +            (TypedArrayRequiresFloatingPoint(tarr) || index.isDouble()))
  1.5104 +        {
  1.5105 +            return true;
  1.5106 +        }
  1.5107 +
  1.5108 +        uint32_t len = tarr->length();
  1.5109 +        double idx = index.toNumber();
  1.5110 +        bool expectOutOfBounds = (idx < 0 || idx >= double(len));
  1.5111 +
  1.5112 +        if (!TypedArraySetElemStubExists(stub, tarr, expectOutOfBounds)) {
  1.5113 +            // Remove any existing TypedArraySetElemStub that doesn't handle out-of-bounds
  1.5114 +            if (expectOutOfBounds)
  1.5115 +                RemoveExistingTypedArraySetElemStub(cx, stub, tarr);
  1.5116 +
  1.5117 +            IonSpew(IonSpew_BaselineIC,
  1.5118 +                    "  Generating SetElem_TypedArray stub (shape=%p, type=%u, oob=%s)",
  1.5119 +                    tarr->lastProperty(), tarr->type(), expectOutOfBounds ? "yes" : "no");
  1.5120 +            ICSetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type(),
  1.5121 +                                                    expectOutOfBounds);
  1.5122 +            ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script));
  1.5123 +            if (!typedArrayStub)
  1.5124 +                return false;
  1.5125 +
  1.5126 +            stub->addNewStub(typedArrayStub);
  1.5127 +            return true;
  1.5128 +        }
  1.5129 +    }
  1.5130 +
  1.5131 +    return true;
  1.5132 +}
  1.5133 +
  1.5134 +typedef bool (*DoSetElemFallbackFn)(JSContext *, BaselineFrame *, ICSetElem_Fallback *, Value *,
  1.5135 +                                    HandleValue, HandleValue, HandleValue);
  1.5136 +static const VMFunction DoSetElemFallbackInfo =
  1.5137 +    FunctionInfo<DoSetElemFallbackFn>(DoSetElemFallback, PopValues(2));
  1.5138 +
  1.5139 +bool
  1.5140 +ICSetElem_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.5141 +{
  1.5142 +    JS_ASSERT(R0 == JSReturnOperand);
  1.5143 +
  1.5144 +    EmitRestoreTailCallReg(masm);
  1.5145 +
  1.5146 +    // State: R0: object, R1: index, stack: rhs.
  1.5147 +    // For the decompiler, the stack has to be: object, index, rhs,
  1.5148 +    // so we push the index, then overwrite the rhs Value with R0
  1.5149 +    // and push the rhs value.
  1.5150 +    masm.pushValue(R1);
  1.5151 +    masm.loadValue(Address(BaselineStackReg, sizeof(Value)), R1);
  1.5152 +    masm.storeValue(R0, Address(BaselineStackReg, sizeof(Value)));
  1.5153 +    masm.pushValue(R1);
  1.5154 +
  1.5155 +    // Push arguments.
  1.5156 +    masm.pushValue(R1); // RHS
  1.5157 +
  1.5158 +    // Push index. On x86 and ARM two push instructions are emitted so use a
  1.5159 +    // separate register to store the old stack pointer.
  1.5160 +    masm.mov(BaselineStackReg, R1.scratchReg());
  1.5161 +    masm.pushValue(Address(R1.scratchReg(), 2 * sizeof(Value)));
  1.5162 +    masm.pushValue(R0); // Object.
  1.5163 +
  1.5164 +    // Push pointer to stack values, so that the stub can overwrite the object
  1.5165 +    // (pushed for the decompiler) with the rhs.
  1.5166 +    masm.computeEffectiveAddress(Address(BaselineStackReg, 3 * sizeof(Value)), R0.scratchReg());
  1.5167 +    masm.push(R0.scratchReg());
  1.5168 +
  1.5169 +    masm.push(BaselineStubReg);
  1.5170 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.5171 +
  1.5172 +    return tailCallVM(DoSetElemFallbackInfo, masm);
  1.5173 +}
  1.5174 +
  1.5175 +void
  1.5176 +BaselineScript::noteArrayWriteHole(uint32_t pcOffset)
  1.5177 +{
  1.5178 +    ICEntry &entry = icEntryFromPCOffset(pcOffset);
  1.5179 +    ICFallbackStub *stub = entry.fallbackStub();
  1.5180 +
  1.5181 +    if (stub->isSetElem_Fallback())
  1.5182 +        stub->toSetElem_Fallback()->noteArrayWriteHole();
  1.5183 +}
  1.5184 +
  1.5185 +//
  1.5186 +// SetElem_Dense
  1.5187 +//
  1.5188 +
  1.5189 +bool
  1.5190 +ICSetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
  1.5191 +{
  1.5192 +    // R0 = object
  1.5193 +    // R1 = key
  1.5194 +    // Stack = { ... rhs-value, <return-addr>? }
  1.5195 +    Label failure;
  1.5196 +    Label failureUnstow;
  1.5197 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.5198 +    masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.5199 +
  1.5200 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.5201 +    Register scratchReg = regs.takeAny();
  1.5202 +
  1.5203 +    // Unbox R0 and guard on its shape.
  1.5204 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.5205 +    masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfShape()), scratchReg);
  1.5206 +    masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
  1.5207 +
  1.5208 +    // Stow both R0 and R1 (object and key)
  1.5209 +    // But R0 and R1 still hold their values.
  1.5210 +    EmitStowICValues(masm, 2);
  1.5211 +
  1.5212 +    // We may need to free up some registers.
  1.5213 +    regs = availableGeneralRegs(0);
  1.5214 +    regs.take(R0);
  1.5215 +
  1.5216 +    // Guard that the type object matches.
  1.5217 +    Register typeReg = regs.takeAny();
  1.5218 +    masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfType()), typeReg);
  1.5219 +    masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg,
  1.5220 +                   &failureUnstow);
  1.5221 +    regs.add(typeReg);
  1.5222 +
  1.5223 +    // Stack is now: { ..., rhs-value, object-value, key-value, maybe?-RET-ADDR }
  1.5224 +    // Load rhs-value in to R0
  1.5225 +    masm.loadValue(Address(BaselineStackReg, 2 * sizeof(Value) + ICStackValueOffset), R0);
  1.5226 +
  1.5227 +    // Call the type-update stub.
  1.5228 +    if (!callTypeUpdateIC(masm, sizeof(Value)))
  1.5229 +        return false;
  1.5230 +
  1.5231 +    // Unstow R0 and R1 (object and key)
  1.5232 +    EmitUnstowICValues(masm, 2);
  1.5233 +
  1.5234 +    // Reset register set.
  1.5235 +    regs = availableGeneralRegs(2);
  1.5236 +    scratchReg = regs.takeAny();
  1.5237 +
  1.5238 +    // Unbox object and key.
  1.5239 +    obj = masm.extractObject(R0, ExtractTemp0);
  1.5240 +    Register key = masm.extractInt32(R1, ExtractTemp1);
  1.5241 +
  1.5242 +    // Load obj->elements in scratchReg.
  1.5243 +    masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
  1.5244 +
  1.5245 +    // Bounds check.
  1.5246 +    Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
  1.5247 +    masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure);
  1.5248 +
  1.5249 +    // Hole check.
  1.5250 +    BaseIndex element(scratchReg, key, TimesEight);
  1.5251 +    masm.branchTestMagic(Assembler::Equal, element, &failure);
  1.5252 +
  1.5253 +    // Failure is not possible now.  Free up registers.
  1.5254 +    regs.add(R0);
  1.5255 +    regs.add(R1);
  1.5256 +    regs.takeUnchecked(obj);
  1.5257 +    regs.takeUnchecked(key);
  1.5258 +    Address valueAddr(BaselineStackReg, ICStackValueOffset);
  1.5259 +
  1.5260 +    // Convert int32 values to double if convertDoubleElements is set. In this
  1.5261 +    // case the heap typeset is guaranteed to contain both int32 and double, so
  1.5262 +    // it's okay to store a double.
  1.5263 +    Label dontConvertDoubles;
  1.5264 +    Address elementsFlags(scratchReg, ObjectElements::offsetOfFlags());
  1.5265 +    masm.branchTest32(Assembler::Zero, elementsFlags,
  1.5266 +                      Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
  1.5267 +                      &dontConvertDoubles);
  1.5268 +    // Note that double arrays are only created by IonMonkey, so if we have no
  1.5269 +    // floating-point support Ion is disabled and there should be no double arrays.
  1.5270 +    if (cx->runtime()->jitSupportsFloatingPoint)
  1.5271 +        masm.convertInt32ValueToDouble(valueAddr, regs.getAny(), &dontConvertDoubles);
  1.5272 +    else
  1.5273 +        masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support.");
  1.5274 +    masm.bind(&dontConvertDoubles);
  1.5275 +
  1.5276 +    // Don't overwrite R0 becuase |obj| might overlap with it, and it's needed
  1.5277 +    // for post-write barrier later.
  1.5278 +    ValueOperand tmpVal = regs.takeAnyValue();
  1.5279 +    masm.loadValue(valueAddr, tmpVal);
  1.5280 +    EmitPreBarrier(masm, element, MIRType_Value);
  1.5281 +    masm.storeValue(tmpVal, element);
  1.5282 +    regs.add(key);
  1.5283 +#ifdef JSGC_GENERATIONAL
  1.5284 +    {
  1.5285 +        Register r = regs.takeAny();
  1.5286 +        GeneralRegisterSet saveRegs;
  1.5287 +        emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
  1.5288 +        regs.add(r);
  1.5289 +    }
  1.5290 +#endif
  1.5291 +    EmitReturnFromIC(masm);
  1.5292 +
  1.5293 +
  1.5294 +    // Failure case - fail but first unstow R0 and R1
  1.5295 +    masm.bind(&failureUnstow);
  1.5296 +    EmitUnstowICValues(masm, 2);
  1.5297 +
  1.5298 +    // Failure case - jump to next stub
  1.5299 +    masm.bind(&failure);
  1.5300 +    EmitStubGuardFailure(masm);
  1.5301 +    return true;
  1.5302 +}
  1.5303 +
  1.5304 +static bool
  1.5305 +GetProtoShapes(JSObject *obj, size_t protoChainDepth, AutoShapeVector *shapes)
  1.5306 +{
  1.5307 +    JS_ASSERT(shapes->length() == 1);
  1.5308 +    JSObject *curProto = obj->getProto();
  1.5309 +    for (size_t i = 0; i < protoChainDepth; i++) {
  1.5310 +        if (!shapes->append(curProto->lastProperty()))
  1.5311 +            return false;
  1.5312 +        curProto = curProto->getProto();
  1.5313 +    }
  1.5314 +    JS_ASSERT(!curProto);
  1.5315 +    return true;
  1.5316 +}
  1.5317 +
  1.5318 +//
  1.5319 +// SetElem_DenseAdd
  1.5320 +//
  1.5321 +
  1.5322 +ICUpdatedStub *
  1.5323 +ICSetElemDenseAddCompiler::getStub(ICStubSpace *space)
  1.5324 +{
  1.5325 +    AutoShapeVector shapes(cx);
  1.5326 +    if (!shapes.append(obj_->lastProperty()))
  1.5327 +        return nullptr;
  1.5328 +
  1.5329 +    if (!GetProtoShapes(obj_, protoChainDepth_, &shapes))
  1.5330 +        return nullptr;
  1.5331 +
  1.5332 +    JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4);
  1.5333 +
  1.5334 +    ICUpdatedStub *stub = nullptr;
  1.5335 +    switch (protoChainDepth_) {
  1.5336 +      case 0: stub = getStubSpecific<0>(space, &shapes); break;
  1.5337 +      case 1: stub = getStubSpecific<1>(space, &shapes); break;
  1.5338 +      case 2: stub = getStubSpecific<2>(space, &shapes); break;
  1.5339 +      case 3: stub = getStubSpecific<3>(space, &shapes); break;
  1.5340 +      case 4: stub = getStubSpecific<4>(space, &shapes); break;
  1.5341 +      default: MOZ_ASSUME_UNREACHABLE("ProtoChainDepth too high.");
  1.5342 +    }
  1.5343 +    if (!stub || !stub->initUpdatingChain(cx, space))
  1.5344 +        return nullptr;
  1.5345 +    return stub;
  1.5346 +}
  1.5347 +
  1.5348 +bool
  1.5349 +ICSetElemDenseAddCompiler::generateStubCode(MacroAssembler &masm)
  1.5350 +{
  1.5351 +    // R0 = object
  1.5352 +    // R1 = key
  1.5353 +    // Stack = { ... rhs-value, <return-addr>? }
  1.5354 +    Label failure;
  1.5355 +    Label failureUnstow;
  1.5356 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.5357 +    masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.5358 +
  1.5359 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.5360 +    Register scratchReg = regs.takeAny();
  1.5361 +
  1.5362 +    // Unbox R0 and guard on its shape.
  1.5363 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.5364 +    masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAddImpl<0>::offsetOfShape(0)),
  1.5365 +                 scratchReg);
  1.5366 +    masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
  1.5367 +
  1.5368 +    // Stow both R0 and R1 (object and key)
  1.5369 +    // But R0 and R1 still hold their values.
  1.5370 +    EmitStowICValues(masm, 2);
  1.5371 +
  1.5372 +    // We may need to free up some registers.
  1.5373 +    regs = availableGeneralRegs(0);
  1.5374 +    regs.take(R0);
  1.5375 +
  1.5376 +    // Guard that the type object matches.
  1.5377 +    Register typeReg = regs.takeAny();
  1.5378 +    masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAdd::offsetOfType()), typeReg);
  1.5379 +    masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg,
  1.5380 +                   &failureUnstow);
  1.5381 +    regs.add(typeReg);
  1.5382 +
  1.5383 +    // Shape guard objects on the proto chain.
  1.5384 +    scratchReg = regs.takeAny();
  1.5385 +    Register protoReg = regs.takeAny();
  1.5386 +    for (size_t i = 0; i < protoChainDepth_; i++) {
  1.5387 +        masm.loadObjProto(i == 0 ? obj : protoReg, protoReg);
  1.5388 +        masm.branchTestPtr(Assembler::Zero, protoReg, protoReg, &failureUnstow);
  1.5389 +        masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAddImpl<0>::offsetOfShape(i + 1)),
  1.5390 +                     scratchReg);
  1.5391 +        masm.branchTestObjShape(Assembler::NotEqual, protoReg, scratchReg, &failureUnstow);
  1.5392 +    }
  1.5393 +    regs.add(protoReg);
  1.5394 +    regs.add(scratchReg);
  1.5395 +
  1.5396 +    // Stack is now: { ..., rhs-value, object-value, key-value, maybe?-RET-ADDR }
  1.5397 +    // Load rhs-value in to R0
  1.5398 +    masm.loadValue(Address(BaselineStackReg, 2 * sizeof(Value) + ICStackValueOffset), R0);
  1.5399 +
  1.5400 +    // Call the type-update stub.
  1.5401 +    if (!callTypeUpdateIC(masm, sizeof(Value)))
  1.5402 +        return false;
  1.5403 +
  1.5404 +    // Unstow R0 and R1 (object and key)
  1.5405 +    EmitUnstowICValues(masm, 2);
  1.5406 +
  1.5407 +    // Reset register set.
  1.5408 +    regs = availableGeneralRegs(2);
  1.5409 +    scratchReg = regs.takeAny();
  1.5410 +
  1.5411 +    // Unbox obj and key.
  1.5412 +    obj = masm.extractObject(R0, ExtractTemp0);
  1.5413 +    Register key = masm.extractInt32(R1, ExtractTemp1);
  1.5414 +
  1.5415 +    // Load obj->elements in scratchReg.
  1.5416 +    masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
  1.5417 +
  1.5418 +    // Bounds check (key == initLength)
  1.5419 +    Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
  1.5420 +    masm.branch32(Assembler::NotEqual, initLength, key, &failure);
  1.5421 +
  1.5422 +    // Capacity check.
  1.5423 +    Address capacity(scratchReg, ObjectElements::offsetOfCapacity());
  1.5424 +    masm.branch32(Assembler::BelowOrEqual, capacity, key, &failure);
  1.5425 +
  1.5426 +    // Failure is not possible now.  Free up registers.
  1.5427 +    regs.add(R0);
  1.5428 +    regs.add(R1);
  1.5429 +    regs.takeUnchecked(obj);
  1.5430 +    regs.takeUnchecked(key);
  1.5431 +
  1.5432 +    // Increment initLength before write.
  1.5433 +    masm.add32(Imm32(1), initLength);
  1.5434 +
  1.5435 +    // If length is now <= key, increment length before write.
  1.5436 +    Label skipIncrementLength;
  1.5437 +    Address length(scratchReg, ObjectElements::offsetOfLength());
  1.5438 +    masm.branch32(Assembler::Above, length, key, &skipIncrementLength);
  1.5439 +    masm.add32(Imm32(1), length);
  1.5440 +    masm.bind(&skipIncrementLength);
  1.5441 +
  1.5442 +    Address valueAddr(BaselineStackReg, ICStackValueOffset);
  1.5443 +
  1.5444 +    // Convert int32 values to double if convertDoubleElements is set. In this
  1.5445 +    // case the heap typeset is guaranteed to contain both int32 and double, so
  1.5446 +    // it's okay to store a double.
  1.5447 +    Label dontConvertDoubles;
  1.5448 +    Address elementsFlags(scratchReg, ObjectElements::offsetOfFlags());
  1.5449 +    masm.branchTest32(Assembler::Zero, elementsFlags,
  1.5450 +                      Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
  1.5451 +                      &dontConvertDoubles);
  1.5452 +    // Note that double arrays are only created by IonMonkey, so if we have no
  1.5453 +    // floating-point support Ion is disabled and there should be no double arrays.
  1.5454 +    if (cx->runtime()->jitSupportsFloatingPoint)
  1.5455 +        masm.convertInt32ValueToDouble(valueAddr, regs.getAny(), &dontConvertDoubles);
  1.5456 +    else
  1.5457 +        masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support.");
  1.5458 +    masm.bind(&dontConvertDoubles);
  1.5459 +
  1.5460 +    // Write the value.  No need for pre-barrier since we're not overwriting an old value.
  1.5461 +    ValueOperand tmpVal = regs.takeAnyValue();
  1.5462 +    BaseIndex element(scratchReg, key, TimesEight);
  1.5463 +    masm.loadValue(valueAddr, tmpVal);
  1.5464 +    masm.storeValue(tmpVal, element);
  1.5465 +    regs.add(key);
  1.5466 +#ifdef JSGC_GENERATIONAL
  1.5467 +    {
  1.5468 +        Register r = regs.takeAny();
  1.5469 +        GeneralRegisterSet saveRegs;
  1.5470 +        emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
  1.5471 +        regs.add(r);
  1.5472 +    }
  1.5473 +#endif
  1.5474 +    EmitReturnFromIC(masm);
  1.5475 +
  1.5476 +    // Failure case - fail but first unstow R0 and R1
  1.5477 +    masm.bind(&failureUnstow);
  1.5478 +    EmitUnstowICValues(masm, 2);
  1.5479 +
  1.5480 +    // Failure case - jump to next stub
  1.5481 +    masm.bind(&failure);
  1.5482 +    EmitStubGuardFailure(masm);
  1.5483 +    return true;
  1.5484 +}
  1.5485 +
  1.5486 +//
  1.5487 +// SetElem_TypedArray
  1.5488 +//
  1.5489 +
  1.5490 +bool
  1.5491 +ICSetElem_TypedArray::Compiler::generateStubCode(MacroAssembler &masm)
  1.5492 +{
  1.5493 +    Label failure;
  1.5494 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.5495 +
  1.5496 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.5497 +    Register scratchReg = regs.takeAny();
  1.5498 +
  1.5499 +    // Unbox R0 and shape guard.
  1.5500 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.5501 +    masm.loadPtr(Address(BaselineStubReg, ICSetElem_TypedArray::offsetOfShape()), scratchReg);
  1.5502 +    masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
  1.5503 +
  1.5504 +    // Ensure the index is an integer.
  1.5505 +    if (cx->runtime()->jitSupportsFloatingPoint) {
  1.5506 +        Label isInt32;
  1.5507 +        masm.branchTestInt32(Assembler::Equal, R1, &isInt32);
  1.5508 +        {
  1.5509 +            // If the index is a double, try to convert it to int32. It's okay
  1.5510 +            // to convert -0 to 0: the shape check ensures the object is a typed
  1.5511 +            // array so the difference is not observable.
  1.5512 +            masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
  1.5513 +            masm.unboxDouble(R1, FloatReg0);
  1.5514 +            masm.convertDoubleToInt32(FloatReg0, scratchReg, &failure, /* negZeroCheck = */false);
  1.5515 +            masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R1);
  1.5516 +        }
  1.5517 +        masm.bind(&isInt32);
  1.5518 +    } else {
  1.5519 +        masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
  1.5520 +    }
  1.5521 +
  1.5522 +    // Unbox key.
  1.5523 +    Register key = masm.extractInt32(R1, ExtractTemp1);
  1.5524 +
  1.5525 +    // Bounds check.
  1.5526 +    Label oobWrite;
  1.5527 +    masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), scratchReg);
  1.5528 +    masm.branch32(Assembler::BelowOrEqual, scratchReg, key,
  1.5529 +                  expectOutOfBounds_ ? &oobWrite : &failure);
  1.5530 +
  1.5531 +    // Load the elements vector.
  1.5532 +    masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), scratchReg);
  1.5533 +
  1.5534 +    BaseIndex dest(scratchReg, key, ScaleFromElemWidth(TypedArrayObject::slotWidth(type_)));
  1.5535 +    Address value(BaselineStackReg, ICStackValueOffset);
  1.5536 +
  1.5537 +    // We need a second scratch register. It's okay to clobber the type tag of
  1.5538 +    // R0 or R1, as long as it's restored before jumping to the next stub.
  1.5539 +    regs = availableGeneralRegs(0);
  1.5540 +    regs.takeUnchecked(obj);
  1.5541 +    regs.takeUnchecked(key);
  1.5542 +    regs.take(scratchReg);
  1.5543 +    Register secondScratch = regs.takeAny();
  1.5544 +
  1.5545 +    if (type_ == ScalarTypeDescr::TYPE_FLOAT32 || type_ == ScalarTypeDescr::TYPE_FLOAT64) {
  1.5546 +        masm.ensureDouble(value, FloatReg0, &failure);
  1.5547 +        if (LIRGenerator::allowFloat32Optimizations() &&
  1.5548 +            type_ == ScalarTypeDescr::TYPE_FLOAT32)
  1.5549 +        {
  1.5550 +            masm.convertDoubleToFloat32(FloatReg0, ScratchFloatReg);
  1.5551 +            masm.storeToTypedFloatArray(type_, ScratchFloatReg, dest);
  1.5552 +        } else {
  1.5553 +            masm.storeToTypedFloatArray(type_, FloatReg0, dest);
  1.5554 +        }
  1.5555 +        EmitReturnFromIC(masm);
  1.5556 +    } else if (type_ == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
  1.5557 +        Label notInt32;
  1.5558 +        masm.branchTestInt32(Assembler::NotEqual, value, &notInt32);
  1.5559 +        masm.unboxInt32(value, secondScratch);
  1.5560 +        masm.clampIntToUint8(secondScratch);
  1.5561 +
  1.5562 +        Label clamped;
  1.5563 +        masm.bind(&clamped);
  1.5564 +        masm.storeToTypedIntArray(type_, secondScratch, dest);
  1.5565 +        EmitReturnFromIC(masm);
  1.5566 +
  1.5567 +        // If the value is a double, clamp to uint8 and jump back.
  1.5568 +        // Else, jump to failure.
  1.5569 +        masm.bind(&notInt32);
  1.5570 +        if (cx->runtime()->jitSupportsFloatingPoint) {
  1.5571 +            masm.branchTestDouble(Assembler::NotEqual, value, &failure);
  1.5572 +            masm.unboxDouble(value, FloatReg0);
  1.5573 +            masm.clampDoubleToUint8(FloatReg0, secondScratch);
  1.5574 +            masm.jump(&clamped);
  1.5575 +        } else {
  1.5576 +            masm.jump(&failure);
  1.5577 +        }
  1.5578 +    } else {
  1.5579 +        Label notInt32;
  1.5580 +        masm.branchTestInt32(Assembler::NotEqual, value, &notInt32);
  1.5581 +        masm.unboxInt32(value, secondScratch);
  1.5582 +
  1.5583 +        Label isInt32;
  1.5584 +        masm.bind(&isInt32);
  1.5585 +        masm.storeToTypedIntArray(type_, secondScratch, dest);
  1.5586 +        EmitReturnFromIC(masm);
  1.5587 +
  1.5588 +        // If the value is a double, truncate and jump back.
  1.5589 +        // Else, jump to failure.
  1.5590 +        Label failureRestoreRegs;
  1.5591 +        masm.bind(&notInt32);
  1.5592 +        if (cx->runtime()->jitSupportsFloatingPoint) {
  1.5593 +            masm.branchTestDouble(Assembler::NotEqual, value, &failure);
  1.5594 +            masm.unboxDouble(value, FloatReg0);
  1.5595 +            masm.branchTruncateDouble(FloatReg0, secondScratch, &failureRestoreRegs);
  1.5596 +            masm.jump(&isInt32);
  1.5597 +        } else {
  1.5598 +            masm.jump(&failure);
  1.5599 +        }
  1.5600 +
  1.5601 +        // Writing to secondScratch may have clobbered R0 or R1, restore them
  1.5602 +        // first.
  1.5603 +        masm.bind(&failureRestoreRegs);
  1.5604 +        masm.tagValue(JSVAL_TYPE_OBJECT, obj, R0);
  1.5605 +        masm.tagValue(JSVAL_TYPE_INT32, key, R1);
  1.5606 +    }
  1.5607 +
  1.5608 +    // Failure case - jump to next stub
  1.5609 +    masm.bind(&failure);
  1.5610 +    EmitStubGuardFailure(masm);
  1.5611 +
  1.5612 +    if (expectOutOfBounds_) {
  1.5613 +        masm.bind(&oobWrite);
  1.5614 +        EmitReturnFromIC(masm);
  1.5615 +    }
  1.5616 +    return true;
  1.5617 +}
  1.5618 +
  1.5619 +//
  1.5620 +// In_Fallback
  1.5621 +//
  1.5622 +
  1.5623 +static bool
  1.5624 +DoInFallback(JSContext *cx, ICIn_Fallback *stub, HandleValue key, HandleValue objValue,
  1.5625 +             MutableHandleValue res)
  1.5626 +{
  1.5627 +    FallbackICSpew(cx, stub, "In");
  1.5628 +
  1.5629 +    if (!objValue.isObject()) {
  1.5630 +        js_ReportValueError(cx, JSMSG_IN_NOT_OBJECT, -1, objValue, NullPtr());
  1.5631 +        return false;
  1.5632 +    }
  1.5633 +
  1.5634 +    RootedObject obj(cx, &objValue.toObject());
  1.5635 +
  1.5636 +    bool cond = false;
  1.5637 +    if (!OperatorIn(cx, key, obj, &cond))
  1.5638 +        return false;
  1.5639 +
  1.5640 +    res.setBoolean(cond);
  1.5641 +    return true;
  1.5642 +}
  1.5643 +
  1.5644 +typedef bool (*DoInFallbackFn)(JSContext *, ICIn_Fallback *, HandleValue, HandleValue,
  1.5645 +                               MutableHandleValue);
  1.5646 +static const VMFunction DoInFallbackInfo =
  1.5647 +    FunctionInfo<DoInFallbackFn>(DoInFallback, PopValues(2));
  1.5648 +
  1.5649 +bool
  1.5650 +ICIn_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.5651 +{
  1.5652 +    EmitRestoreTailCallReg(masm);
  1.5653 +
  1.5654 +    // Sync for the decompiler.
  1.5655 +    masm.pushValue(R0);
  1.5656 +    masm.pushValue(R1);
  1.5657 +
  1.5658 +    // Push arguments.
  1.5659 +    masm.pushValue(R1);
  1.5660 +    masm.pushValue(R0);
  1.5661 +    masm.push(BaselineStubReg);
  1.5662 +
  1.5663 +    return tailCallVM(DoInFallbackInfo, masm);
  1.5664 +}
  1.5665 +
  1.5666 +// Attach an optimized stub for a GETGNAME/CALLGNAME op.
  1.5667 +static bool
  1.5668 +TryAttachGlobalNameStub(JSContext *cx, HandleScript script, jsbytecode *pc,
  1.5669 +                        ICGetName_Fallback *stub, HandleObject global,
  1.5670 +                        HandlePropertyName name)
  1.5671 +{
  1.5672 +    JS_ASSERT(global->is<GlobalObject>());
  1.5673 +
  1.5674 +    RootedId id(cx, NameToId(name));
  1.5675 +
  1.5676 +    // Instantiate this global property, for use during Ion compilation.
  1.5677 +    if (IsIonEnabled(cx))
  1.5678 +        types::EnsureTrackPropertyTypes(cx, global, NameToId(name));
  1.5679 +
  1.5680 +    // The property must be found, and it must be found as a normal data property.
  1.5681 +    RootedShape shape(cx, global->nativeLookup(cx, id));
  1.5682 +    if (!shape)
  1.5683 +        return true;
  1.5684 +
  1.5685 +    if (shape->hasDefaultGetter() && shape->hasSlot()) {
  1.5686 +
  1.5687 +        JS_ASSERT(shape->slot() >= global->numFixedSlots());
  1.5688 +        uint32_t slot = shape->slot() - global->numFixedSlots();
  1.5689 +
  1.5690 +        // TODO: if there's a previous stub discard it, or just update its Shape + slot?
  1.5691 +
  1.5692 +        ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
  1.5693 +        IonSpew(IonSpew_BaselineIC, "  Generating GetName(GlobalName) stub");
  1.5694 +        ICGetName_Global::Compiler compiler(cx, monitorStub, global->lastProperty(), slot);
  1.5695 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5696 +        if (!newStub)
  1.5697 +            return false;
  1.5698 +
  1.5699 +        stub->addNewStub(newStub);
  1.5700 +        return true;
  1.5701 +    }
  1.5702 +
  1.5703 +    bool isScripted;
  1.5704 +    if (IsCacheableGetPropCall(cx, global, global, shape, &isScripted) && !isScripted)
  1.5705 +    {
  1.5706 +        ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
  1.5707 +        IonSpew(IonSpew_BaselineIC, "  Generating GetName(GlobalName/NativeGetter) stub");
  1.5708 +        RootedFunction getter(cx, &shape->getterObject()->as<JSFunction>());
  1.5709 +        ICGetProp_CallNative::Compiler compiler(cx, monitorStub, global,
  1.5710 +                                                getter, script->pcToOffset(pc),
  1.5711 +                                                /* inputDefinitelyObject = */ true);
  1.5712 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5713 +        if (!newStub)
  1.5714 +            return false;
  1.5715 +
  1.5716 +        stub->addNewStub(newStub);
  1.5717 +        return true;
  1.5718 +    }
  1.5719 +
  1.5720 +    return true;
  1.5721 +}
  1.5722 +
  1.5723 +static bool
  1.5724 +TryAttachScopeNameStub(JSContext *cx, HandleScript script, ICGetName_Fallback *stub,
  1.5725 +                       HandleObject initialScopeChain, HandlePropertyName name)
  1.5726 +{
  1.5727 +    AutoShapeVector shapes(cx);
  1.5728 +    RootedId id(cx, NameToId(name));
  1.5729 +    RootedObject scopeChain(cx, initialScopeChain);
  1.5730 +
  1.5731 +    Shape *shape = nullptr;
  1.5732 +    while (scopeChain) {
  1.5733 +        if (!shapes.append(scopeChain->lastProperty()))
  1.5734 +            return false;
  1.5735 +
  1.5736 +        if (scopeChain->is<GlobalObject>()) {
  1.5737 +            shape = scopeChain->nativeLookup(cx, id);
  1.5738 +            if (shape)
  1.5739 +                break;
  1.5740 +            return true;
  1.5741 +        }
  1.5742 +
  1.5743 +        if (!scopeChain->is<ScopeObject>() || scopeChain->is<DynamicWithObject>())
  1.5744 +            return true;
  1.5745 +
  1.5746 +        // Check for an 'own' property on the scope. There is no need to
  1.5747 +        // check the prototype as non-with scopes do not inherit properties
  1.5748 +        // from any prototype.
  1.5749 +        shape = scopeChain->nativeLookup(cx, id);
  1.5750 +        if (shape)
  1.5751 +            break;
  1.5752 +
  1.5753 +        scopeChain = scopeChain->enclosingScope();
  1.5754 +    }
  1.5755 +
  1.5756 +    if (!IsCacheableGetPropReadSlot(scopeChain, scopeChain, shape))
  1.5757 +        return true;
  1.5758 +
  1.5759 +    bool isFixedSlot;
  1.5760 +    uint32_t offset;
  1.5761 +    GetFixedOrDynamicSlotOffset(scopeChain, shape->slot(), &isFixedSlot, &offset);
  1.5762 +
  1.5763 +    ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
  1.5764 +    ICStub *newStub;
  1.5765 +
  1.5766 +    switch (shapes.length()) {
  1.5767 +      case 1: {
  1.5768 +        ICGetName_Scope<0>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
  1.5769 +        newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5770 +        break;
  1.5771 +      }
  1.5772 +      case 2: {
  1.5773 +        ICGetName_Scope<1>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
  1.5774 +        newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5775 +        break;
  1.5776 +      }
  1.5777 +      case 3: {
  1.5778 +        ICGetName_Scope<2>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
  1.5779 +        newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5780 +        break;
  1.5781 +      }
  1.5782 +      case 4: {
  1.5783 +        ICGetName_Scope<3>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
  1.5784 +        newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5785 +        break;
  1.5786 +      }
  1.5787 +      case 5: {
  1.5788 +        ICGetName_Scope<4>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
  1.5789 +        newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5790 +        break;
  1.5791 +      }
  1.5792 +      case 6: {
  1.5793 +        ICGetName_Scope<5>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
  1.5794 +        newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5795 +        break;
  1.5796 +      }
  1.5797 +      case 7: {
  1.5798 +        ICGetName_Scope<6>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
  1.5799 +        newStub = compiler.getStub(compiler.getStubSpace(script));
  1.5800 +        break;
  1.5801 +      }
  1.5802 +      default:
  1.5803 +        return true;
  1.5804 +    }
  1.5805 +
  1.5806 +    if (!newStub)
  1.5807 +        return false;
  1.5808 +
  1.5809 +    stub->addNewStub(newStub);
  1.5810 +    return true;
  1.5811 +}
  1.5812 +
  1.5813 +static bool
  1.5814 +DoGetNameFallback(JSContext *cx, BaselineFrame *frame, ICGetName_Fallback *stub_,
  1.5815 +                  HandleObject scopeChain, MutableHandleValue res)
  1.5816 +{
  1.5817 +    // This fallback stub may trigger debug mode toggling.
  1.5818 +    DebugModeOSRVolatileStub<ICGetName_Fallback *> stub(frame, stub_);
  1.5819 +
  1.5820 +    RootedScript script(cx, frame->script());
  1.5821 +    jsbytecode *pc = stub->icEntry()->pc(script);
  1.5822 +    mozilla::DebugOnly<JSOp> op = JSOp(*pc);
  1.5823 +    FallbackICSpew(cx, stub, "GetName(%s)", js_CodeName[JSOp(*pc)]);
  1.5824 +
  1.5825 +    JS_ASSERT(op == JSOP_NAME || op == JSOP_GETGNAME);
  1.5826 +
  1.5827 +    RootedPropertyName name(cx, script->getName(pc));
  1.5828 +
  1.5829 +    if (JSOp(pc[JSOP_GETGNAME_LENGTH]) == JSOP_TYPEOF) {
  1.5830 +        if (!GetScopeNameForTypeOf(cx, scopeChain, name, res))
  1.5831 +            return false;
  1.5832 +    } else {
  1.5833 +        if (!GetScopeName(cx, scopeChain, name, res))
  1.5834 +            return false;
  1.5835 +    }
  1.5836 +
  1.5837 +    types::TypeScript::Monitor(cx, script, pc, res);
  1.5838 +
  1.5839 +    // Check if debug mode toggling made the stub invalid.
  1.5840 +    if (stub.invalid())
  1.5841 +        return true;
  1.5842 +
  1.5843 +    // Add a type monitor stub for the resulting value.
  1.5844 +    if (!stub->addMonitorStubForValue(cx, script, res))
  1.5845 +        return false;
  1.5846 +
  1.5847 +    // Attach new stub.
  1.5848 +    if (stub->numOptimizedStubs() >= ICGetName_Fallback::MAX_OPTIMIZED_STUBS) {
  1.5849 +        // TODO: Discard all stubs in this IC and replace with generic stub.
  1.5850 +        return true;
  1.5851 +    }
  1.5852 +
  1.5853 +    if (js_CodeSpec[*pc].format & JOF_GNAME) {
  1.5854 +        if (!TryAttachGlobalNameStub(cx, script, pc, stub, scopeChain, name))
  1.5855 +            return false;
  1.5856 +    } else {
  1.5857 +        if (!TryAttachScopeNameStub(cx, script, stub, scopeChain, name))
  1.5858 +            return false;
  1.5859 +    }
  1.5860 +
  1.5861 +    return true;
  1.5862 +}
  1.5863 +
  1.5864 +typedef bool (*DoGetNameFallbackFn)(JSContext *, BaselineFrame *, ICGetName_Fallback *,
  1.5865 +                                    HandleObject, MutableHandleValue);
  1.5866 +static const VMFunction DoGetNameFallbackInfo = FunctionInfo<DoGetNameFallbackFn>(DoGetNameFallback);
  1.5867 +
  1.5868 +bool
  1.5869 +ICGetName_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.5870 +{
  1.5871 +    JS_ASSERT(R0 == JSReturnOperand);
  1.5872 +
  1.5873 +    EmitRestoreTailCallReg(masm);
  1.5874 +
  1.5875 +    masm.push(R0.scratchReg());
  1.5876 +    masm.push(BaselineStubReg);
  1.5877 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.5878 +
  1.5879 +    return tailCallVM(DoGetNameFallbackInfo, masm);
  1.5880 +}
  1.5881 +
  1.5882 +bool
  1.5883 +ICGetName_Global::Compiler::generateStubCode(MacroAssembler &masm)
  1.5884 +{
  1.5885 +    Label failure;
  1.5886 +    Register obj = R0.scratchReg();
  1.5887 +    Register scratch = R1.scratchReg();
  1.5888 +
  1.5889 +    // Shape guard.
  1.5890 +    masm.loadPtr(Address(BaselineStubReg, ICGetName_Global::offsetOfShape()), scratch);
  1.5891 +    masm.branchTestObjShape(Assembler::NotEqual, obj, scratch, &failure);
  1.5892 +
  1.5893 +    // Load dynamic slot.
  1.5894 +    masm.loadPtr(Address(obj, JSObject::offsetOfSlots()), obj);
  1.5895 +    masm.load32(Address(BaselineStubReg, ICGetName_Global::offsetOfSlot()), scratch);
  1.5896 +    masm.loadValue(BaseIndex(obj, scratch, TimesEight), R0);
  1.5897 +
  1.5898 +    // Enter type monitor IC to type-check result.
  1.5899 +    EmitEnterTypeMonitorIC(masm);
  1.5900 +
  1.5901 +    // Failure case - jump to next stub
  1.5902 +    masm.bind(&failure);
  1.5903 +    EmitStubGuardFailure(masm);
  1.5904 +    return true;
  1.5905 +}
  1.5906 +
  1.5907 +template <size_t NumHops>
  1.5908 +bool
  1.5909 +ICGetName_Scope<NumHops>::Compiler::generateStubCode(MacroAssembler &masm)
  1.5910 +{
  1.5911 +    Label failure;
  1.5912 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.5913 +    Register obj = R0.scratchReg();
  1.5914 +    Register walker = regs.takeAny();
  1.5915 +    Register scratch = regs.takeAny();
  1.5916 +
  1.5917 +    // Use a local to silence Clang tautological-compare warning if NumHops is 0.
  1.5918 +    size_t numHops = NumHops;
  1.5919 +
  1.5920 +    for (size_t index = 0; index < NumHops + 1; index++) {
  1.5921 +        Register scope = index ? walker : obj;
  1.5922 +
  1.5923 +        // Shape guard.
  1.5924 +        masm.loadPtr(Address(BaselineStubReg, ICGetName_Scope::offsetOfShape(index)), scratch);
  1.5925 +        masm.branchTestObjShape(Assembler::NotEqual, scope, scratch, &failure);
  1.5926 +
  1.5927 +        if (index < numHops)
  1.5928 +            masm.extractObject(Address(scope, ScopeObject::offsetOfEnclosingScope()), walker);
  1.5929 +    }
  1.5930 +
  1.5931 +    Register scope = NumHops ? walker : obj;
  1.5932 +
  1.5933 +    if (!isFixedSlot_) {
  1.5934 +        masm.loadPtr(Address(scope, JSObject::offsetOfSlots()), walker);
  1.5935 +        scope = walker;
  1.5936 +    }
  1.5937 +
  1.5938 +    masm.load32(Address(BaselineStubReg, ICGetName_Scope::offsetOfOffset()), scratch);
  1.5939 +    masm.loadValue(BaseIndex(scope, scratch, TimesOne), R0);
  1.5940 +
  1.5941 +    // Enter type monitor IC to type-check result.
  1.5942 +    EmitEnterTypeMonitorIC(masm);
  1.5943 +
  1.5944 +    // Failure case - jump to next stub
  1.5945 +    masm.bind(&failure);
  1.5946 +    EmitStubGuardFailure(masm);
  1.5947 +    return true;
  1.5948 +}
  1.5949 +
  1.5950 +//
  1.5951 +// BindName_Fallback
  1.5952 +//
  1.5953 +
  1.5954 +static bool
  1.5955 +DoBindNameFallback(JSContext *cx, BaselineFrame *frame, ICBindName_Fallback *stub,
  1.5956 +                   HandleObject scopeChain, MutableHandleValue res)
  1.5957 +{
  1.5958 +    jsbytecode *pc = stub->icEntry()->pc(frame->script());
  1.5959 +    mozilla::DebugOnly<JSOp> op = JSOp(*pc);
  1.5960 +    FallbackICSpew(cx, stub, "BindName(%s)", js_CodeName[JSOp(*pc)]);
  1.5961 +
  1.5962 +    JS_ASSERT(op == JSOP_BINDNAME);
  1.5963 +
  1.5964 +    RootedPropertyName name(cx, frame->script()->getName(pc));
  1.5965 +
  1.5966 +    RootedObject scope(cx);
  1.5967 +    if (!LookupNameWithGlobalDefault(cx, name, scopeChain, &scope))
  1.5968 +        return false;
  1.5969 +
  1.5970 +    res.setObject(*scope);
  1.5971 +    return true;
  1.5972 +}
  1.5973 +
  1.5974 +typedef bool (*DoBindNameFallbackFn)(JSContext *, BaselineFrame *, ICBindName_Fallback *,
  1.5975 +                                     HandleObject, MutableHandleValue);
  1.5976 +static const VMFunction DoBindNameFallbackInfo =
  1.5977 +    FunctionInfo<DoBindNameFallbackFn>(DoBindNameFallback);
  1.5978 +
  1.5979 +bool
  1.5980 +ICBindName_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.5981 +{
  1.5982 +    JS_ASSERT(R0 == JSReturnOperand);
  1.5983 +
  1.5984 +    EmitRestoreTailCallReg(masm);
  1.5985 +
  1.5986 +    masm.push(R0.scratchReg());
  1.5987 +    masm.push(BaselineStubReg);
  1.5988 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.5989 +
  1.5990 +    return tailCallVM(DoBindNameFallbackInfo, masm);
  1.5991 +}
  1.5992 +
  1.5993 +//
  1.5994 +// GetIntrinsic_Fallback
  1.5995 +//
  1.5996 +
  1.5997 +static bool
  1.5998 +DoGetIntrinsicFallback(JSContext *cx, BaselineFrame *frame, ICGetIntrinsic_Fallback *stub_,
  1.5999 +                       MutableHandleValue res)
  1.6000 +{
  1.6001 +    // This fallback stub may trigger debug mode toggling.
  1.6002 +    DebugModeOSRVolatileStub<ICGetIntrinsic_Fallback *> stub(frame, stub_);
  1.6003 +
  1.6004 +    RootedScript script(cx, frame->script());
  1.6005 +    jsbytecode *pc = stub->icEntry()->pc(script);
  1.6006 +    mozilla::DebugOnly<JSOp> op = JSOp(*pc);
  1.6007 +    FallbackICSpew(cx, stub, "GetIntrinsic(%s)", js_CodeName[JSOp(*pc)]);
  1.6008 +
  1.6009 +    JS_ASSERT(op == JSOP_GETINTRINSIC);
  1.6010 +
  1.6011 +    if (!GetIntrinsicOperation(cx, pc, res))
  1.6012 +        return false;
  1.6013 +
  1.6014 +    // An intrinsic operation will always produce the same result, so only
  1.6015 +    // needs to be monitored once. Attach a stub to load the resulting constant
  1.6016 +    // directly.
  1.6017 +
  1.6018 +    types::TypeScript::Monitor(cx, script, pc, res);
  1.6019 +
  1.6020 +    // Check if debug mode toggling made the stub invalid.
  1.6021 +    if (stub.invalid())
  1.6022 +        return true;
  1.6023 +
  1.6024 +    IonSpew(IonSpew_BaselineIC, "  Generating GetIntrinsic optimized stub");
  1.6025 +    ICGetIntrinsic_Constant::Compiler compiler(cx, res);
  1.6026 +    ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6027 +    if (!newStub)
  1.6028 +        return false;
  1.6029 +
  1.6030 +    stub->addNewStub(newStub);
  1.6031 +    return true;
  1.6032 +}
  1.6033 +
  1.6034 +typedef bool (*DoGetIntrinsicFallbackFn)(JSContext *, BaselineFrame *, ICGetIntrinsic_Fallback *,
  1.6035 +                                         MutableHandleValue);
  1.6036 +static const VMFunction DoGetIntrinsicFallbackInfo =
  1.6037 +    FunctionInfo<DoGetIntrinsicFallbackFn>(DoGetIntrinsicFallback);
  1.6038 +
  1.6039 +bool
  1.6040 +ICGetIntrinsic_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.6041 +{
  1.6042 +    EmitRestoreTailCallReg(masm);
  1.6043 +
  1.6044 +    masm.push(BaselineStubReg);
  1.6045 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.6046 +
  1.6047 +    return tailCallVM(DoGetIntrinsicFallbackInfo, masm);
  1.6048 +}
  1.6049 +
  1.6050 +bool
  1.6051 +ICGetIntrinsic_Constant::Compiler::generateStubCode(MacroAssembler &masm)
  1.6052 +{
  1.6053 +    masm.loadValue(Address(BaselineStubReg, ICGetIntrinsic_Constant::offsetOfValue()), R0);
  1.6054 +
  1.6055 +    EmitReturnFromIC(masm);
  1.6056 +    return true;
  1.6057 +}
  1.6058 +
  1.6059 +//
  1.6060 +// GetProp_Fallback
  1.6061 +//
  1.6062 +
  1.6063 +static bool
  1.6064 +TryAttachLengthStub(JSContext *cx, JSScript *script, ICGetProp_Fallback *stub, HandleValue val,
  1.6065 +                    HandleValue res, bool *attached)
  1.6066 +{
  1.6067 +    JS_ASSERT(!*attached);
  1.6068 +
  1.6069 +    if (val.isString()) {
  1.6070 +        JS_ASSERT(res.isInt32());
  1.6071 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(String.length) stub");
  1.6072 +        ICGetProp_StringLength::Compiler compiler(cx);
  1.6073 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6074 +        if (!newStub)
  1.6075 +            return false;
  1.6076 +
  1.6077 +        *attached = true;
  1.6078 +        stub->addNewStub(newStub);
  1.6079 +        return true;
  1.6080 +    }
  1.6081 +
  1.6082 +    if (val.isMagic(JS_OPTIMIZED_ARGUMENTS) && res.isInt32()) {
  1.6083 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(MagicArgs.length) stub");
  1.6084 +        ICGetProp_ArgumentsLength::Compiler compiler(cx, ICGetProp_ArgumentsLength::Magic);
  1.6085 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6086 +        if (!newStub)
  1.6087 +            return false;
  1.6088 +
  1.6089 +        *attached = true;
  1.6090 +        stub->addNewStub(newStub);
  1.6091 +        return true;
  1.6092 +    }
  1.6093 +
  1.6094 +    if (!val.isObject())
  1.6095 +        return true;
  1.6096 +
  1.6097 +    RootedObject obj(cx, &val.toObject());
  1.6098 +
  1.6099 +    if (obj->is<ArrayObject>() && res.isInt32()) {
  1.6100 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(Array.length) stub");
  1.6101 +        ICGetProp_ArrayLength::Compiler compiler(cx);
  1.6102 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6103 +        if (!newStub)
  1.6104 +            return false;
  1.6105 +
  1.6106 +        *attached = true;
  1.6107 +        stub->addNewStub(newStub);
  1.6108 +        return true;
  1.6109 +    }
  1.6110 +
  1.6111 +    if (obj->is<TypedArrayObject>() && res.isInt32()) {
  1.6112 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(TypedArray.length) stub");
  1.6113 +        ICGetProp_TypedArrayLength::Compiler compiler(cx);
  1.6114 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6115 +        if (!newStub)
  1.6116 +            return false;
  1.6117 +
  1.6118 +        *attached = true;
  1.6119 +        stub->addNewStub(newStub);
  1.6120 +        return true;
  1.6121 +    }
  1.6122 +
  1.6123 +    if (obj->is<ArgumentsObject>() && res.isInt32()) {
  1.6124 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(ArgsObj.length %s) stub",
  1.6125 +                obj->is<StrictArgumentsObject>() ? "Strict" : "Normal");
  1.6126 +        ICGetProp_ArgumentsLength::Which which = ICGetProp_ArgumentsLength::Normal;
  1.6127 +        if (obj->is<StrictArgumentsObject>())
  1.6128 +            which = ICGetProp_ArgumentsLength::Strict;
  1.6129 +        ICGetProp_ArgumentsLength::Compiler compiler(cx, which);
  1.6130 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6131 +        if (!newStub)
  1.6132 +            return false;
  1.6133 +
  1.6134 +        *attached = true;
  1.6135 +        stub->addNewStub(newStub);
  1.6136 +        return true;
  1.6137 +    }
  1.6138 +
  1.6139 +    return true;
  1.6140 +}
  1.6141 +
  1.6142 +static bool
  1.6143 +UpdateExistingGenerationalDOMProxyStub(ICGetProp_Fallback *stub,
  1.6144 +                                       HandleObject obj)
  1.6145 +{
  1.6146 +    Value expandoSlot = obj->getFixedSlot(GetDOMProxyExpandoSlot());
  1.6147 +    JS_ASSERT(!expandoSlot.isObject() && !expandoSlot.isUndefined());
  1.6148 +    ExpandoAndGeneration *expandoAndGeneration = (ExpandoAndGeneration*)expandoSlot.toPrivate();
  1.6149 +    for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
  1.6150 +        if (iter->isGetProp_CallDOMProxyWithGenerationNative()) {
  1.6151 +            ICGetProp_CallDOMProxyWithGenerationNative* updateStub =
  1.6152 +                iter->toGetProp_CallDOMProxyWithGenerationNative();
  1.6153 +            if (updateStub->expandoAndGeneration() == expandoAndGeneration) {
  1.6154 +                // Update generation
  1.6155 +                uint32_t generation = expandoAndGeneration->generation;
  1.6156 +                IonSpew(IonSpew_BaselineIC,
  1.6157 +                        "  Updating existing stub with generation, old value: %i, "
  1.6158 +                        "new value: %i", updateStub->generation(),
  1.6159 +                        generation);
  1.6160 +                updateStub->setGeneration(generation);
  1.6161 +                return true;
  1.6162 +            }
  1.6163 +        }
  1.6164 +    }
  1.6165 +    return false;
  1.6166 +}
  1.6167 +
  1.6168 +static bool
  1.6169 +TryAttachNativeGetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc,
  1.6170 +                           ICGetProp_Fallback *stub, HandlePropertyName name,
  1.6171 +                           HandleValue val, HandleValue res, bool *attached)
  1.6172 +{
  1.6173 +    JS_ASSERT(!*attached);
  1.6174 +
  1.6175 +    if (!val.isObject())
  1.6176 +        return true;
  1.6177 +
  1.6178 +    RootedObject obj(cx, &val.toObject());
  1.6179 +
  1.6180 +    bool isDOMProxy;
  1.6181 +    bool domProxyHasGeneration;
  1.6182 +    DOMProxyShadowsResult domProxyShadowsResult;
  1.6183 +    RootedShape shape(cx);
  1.6184 +    RootedObject holder(cx);
  1.6185 +    if (!EffectlesslyLookupProperty(cx, obj, name, &holder, &shape, &isDOMProxy,
  1.6186 +                                    &domProxyShadowsResult, &domProxyHasGeneration))
  1.6187 +    {
  1.6188 +        return false;
  1.6189 +    }
  1.6190 +
  1.6191 +    if (!isDOMProxy && !obj->isNative())
  1.6192 +        return true;
  1.6193 +
  1.6194 +    bool isCallProp = (JSOp(*pc) == JSOP_CALLPROP);
  1.6195 +
  1.6196 +    ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
  1.6197 +    if (!isDOMProxy && IsCacheableGetPropReadSlot(obj, holder, shape)) {
  1.6198 +        bool isFixedSlot;
  1.6199 +        uint32_t offset;
  1.6200 +        GetFixedOrDynamicSlotOffset(holder, shape->slot(), &isFixedSlot, &offset);
  1.6201 +
  1.6202 +        // Instantiate this property for singleton holders, for use during Ion compilation.
  1.6203 +        if (IsIonEnabled(cx))
  1.6204 +            types::EnsureTrackPropertyTypes(cx, holder, NameToId(name));
  1.6205 +
  1.6206 +        ICStub::Kind kind = (obj == holder) ? ICStub::GetProp_Native
  1.6207 +                                            : ICStub::GetProp_NativePrototype;
  1.6208 +
  1.6209 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(%s %s) stub",
  1.6210 +                    isDOMProxy ? "DOMProxy" : "Native",
  1.6211 +                    (obj == holder) ? "direct" : "prototype");
  1.6212 +        ICGetPropNativeCompiler compiler(cx, kind, isCallProp, monitorStub, obj, holder,
  1.6213 +                                         name, isFixedSlot, offset);
  1.6214 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6215 +        if (!newStub)
  1.6216 +            return false;
  1.6217 +
  1.6218 +        stub->addNewStub(newStub);
  1.6219 +        *attached = true;
  1.6220 +        return true;
  1.6221 +    }
  1.6222 +
  1.6223 +    bool isScripted = false;
  1.6224 +    bool cacheableCall = IsCacheableGetPropCall(cx, obj, holder, shape, &isScripted, isDOMProxy);
  1.6225 +
  1.6226 +    // Try handling scripted getters.
  1.6227 +    if (cacheableCall && isScripted && !isDOMProxy) {
  1.6228 +#if JS_HAS_NO_SUCH_METHOD
  1.6229 +        // It's hard to keep the original object alive through a call, and it's unlikely
  1.6230 +        // that a getter will be used to generate functions for calling in CALLPROP locations.
  1.6231 +        // Just don't attach stubs in that case.
  1.6232 +        if (isCallProp)
  1.6233 +            return true;
  1.6234 +#endif
  1.6235 +
  1.6236 +        // Don't handle scripted own property getters
  1.6237 +        if (obj == holder)
  1.6238 +            return true;
  1.6239 +
  1.6240 +        RootedFunction callee(cx, &shape->getterObject()->as<JSFunction>());
  1.6241 +        JS_ASSERT(obj != holder);
  1.6242 +        JS_ASSERT(callee->hasScript());
  1.6243 +
  1.6244 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(NativeObj/ScriptedGetter %s:%d) stub",
  1.6245 +                    callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno());
  1.6246 +
  1.6247 +        ICGetProp_CallScripted::Compiler compiler(cx, monitorStub, obj, holder, callee,
  1.6248 +                                                  script->pcToOffset(pc));
  1.6249 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6250 +        if (!newStub)
  1.6251 +            return false;
  1.6252 +
  1.6253 +        stub->addNewStub(newStub);
  1.6254 +        *attached = true;
  1.6255 +        return true;
  1.6256 +    }
  1.6257 +
  1.6258 +    // Try handling JSNative getters.
  1.6259 +    if (cacheableCall && !isScripted) {
  1.6260 +#if JS_HAS_NO_SUCH_METHOD
  1.6261 +        // It's unlikely that a getter function will be used to generate functions for calling
  1.6262 +        // in CALLPROP locations.  Just don't attach stubs in that case to avoid issues with
  1.6263 +        // __noSuchMethod__ handling.
  1.6264 +        if (isCallProp)
  1.6265 +            return true;
  1.6266 +#endif
  1.6267 +
  1.6268 +        RootedFunction callee(cx, &shape->getterObject()->as<JSFunction>());
  1.6269 +        JS_ASSERT(callee->isNative());
  1.6270 +
  1.6271 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(%s%s/NativeGetter %p) stub",
  1.6272 +                isDOMProxy ? "DOMProxyObj" : "NativeObj",
  1.6273 +                isDOMProxy && domProxyHasGeneration ? "WithGeneration" : "",
  1.6274 +                callee->native());
  1.6275 +
  1.6276 +        ICStub *newStub = nullptr;
  1.6277 +        if (isDOMProxy) {
  1.6278 +            JS_ASSERT(obj != holder);
  1.6279 +            ICStub::Kind kind;
  1.6280 +            if (domProxyHasGeneration) {
  1.6281 +                if (UpdateExistingGenerationalDOMProxyStub(stub, obj)) {
  1.6282 +                    *attached = true;
  1.6283 +                    return true;
  1.6284 +                }
  1.6285 +                kind = ICStub::GetProp_CallDOMProxyWithGenerationNative;
  1.6286 +            } else {
  1.6287 +                kind = ICStub::GetProp_CallDOMProxyNative;
  1.6288 +            }
  1.6289 +            Rooted<ProxyObject*> proxy(cx, &obj->as<ProxyObject>());
  1.6290 +            ICGetPropCallDOMProxyNativeCompiler
  1.6291 +                compiler(cx, kind, monitorStub, proxy, holder, callee, script->pcToOffset(pc));
  1.6292 +            newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6293 +        } else if (obj == holder) {
  1.6294 +            ICGetProp_CallNative::Compiler compiler(cx, monitorStub, obj, callee,
  1.6295 +                                                    script->pcToOffset(pc));
  1.6296 +            newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6297 +        } else {
  1.6298 +            ICGetProp_CallNativePrototype::Compiler compiler(cx, monitorStub, obj, holder, callee,
  1.6299 +                                                             script->pcToOffset(pc));
  1.6300 +            newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6301 +        }
  1.6302 +        if (!newStub)
  1.6303 +            return false;
  1.6304 +        stub->addNewStub(newStub);
  1.6305 +        *attached = true;
  1.6306 +        return true;
  1.6307 +    }
  1.6308 +
  1.6309 +    // If it's a shadowed listbase proxy property, attach stub to call Proxy::get instead.
  1.6310 +    if (isDOMProxy && domProxyShadowsResult == Shadows) {
  1.6311 +        JS_ASSERT(obj == holder);
  1.6312 +#if JS_HAS_NO_SUCH_METHOD
  1.6313 +        if (isCallProp)
  1.6314 +            return true;
  1.6315 +#endif
  1.6316 +
  1.6317 +        IonSpew(IonSpew_BaselineIC, "  Generating GetProp(DOMProxyProxy) stub");
  1.6318 +        Rooted<ProxyObject*> proxy(cx, &obj->as<ProxyObject>());
  1.6319 +        ICGetProp_DOMProxyShadowed::Compiler compiler(cx, monitorStub, proxy, name,
  1.6320 +                                                      script->pcToOffset(pc));
  1.6321 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6322 +        if (!newStub)
  1.6323 +            return false;
  1.6324 +        stub->addNewStub(newStub);
  1.6325 +        *attached = true;
  1.6326 +        return true;
  1.6327 +    }
  1.6328 +
  1.6329 +    return true;
  1.6330 +}
  1.6331 +
  1.6332 +static bool
  1.6333 +TryAttachPrimitiveGetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc,
  1.6334 +                              ICGetProp_Fallback *stub, HandlePropertyName name, HandleValue val,
  1.6335 +                              HandleValue res, bool *attached)
  1.6336 +{
  1.6337 +    JS_ASSERT(!*attached);
  1.6338 +
  1.6339 +    JSValueType primitiveType;
  1.6340 +    RootedObject proto(cx);
  1.6341 +    Rooted<GlobalObject*> global(cx, &script->global());
  1.6342 +    if (val.isString()) {
  1.6343 +        primitiveType = JSVAL_TYPE_STRING;
  1.6344 +        proto = GlobalObject::getOrCreateStringPrototype(cx, global);
  1.6345 +    } else if (val.isNumber()) {
  1.6346 +        primitiveType = JSVAL_TYPE_DOUBLE;
  1.6347 +        proto = GlobalObject::getOrCreateNumberPrototype(cx, global);
  1.6348 +    } else {
  1.6349 +        JS_ASSERT(val.isBoolean());
  1.6350 +        primitiveType = JSVAL_TYPE_BOOLEAN;
  1.6351 +        proto = GlobalObject::getOrCreateBooleanPrototype(cx, global);
  1.6352 +    }
  1.6353 +    if (!proto)
  1.6354 +        return false;
  1.6355 +
  1.6356 +    // Instantiate this property, for use during Ion compilation.
  1.6357 +    RootedId id(cx, NameToId(name));
  1.6358 +    if (IsIonEnabled(cx))
  1.6359 +        types::EnsureTrackPropertyTypes(cx, proto, id);
  1.6360 +
  1.6361 +    // For now, only look for properties directly set on the prototype.
  1.6362 +    RootedShape shape(cx, proto->nativeLookup(cx, id));
  1.6363 +    if (!shape || !shape->hasSlot() || !shape->hasDefaultGetter())
  1.6364 +        return true;
  1.6365 +
  1.6366 +    bool isFixedSlot;
  1.6367 +    uint32_t offset;
  1.6368 +    GetFixedOrDynamicSlotOffset(proto, shape->slot(), &isFixedSlot, &offset);
  1.6369 +
  1.6370 +    ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
  1.6371 +
  1.6372 +    IonSpew(IonSpew_BaselineIC, "  Generating GetProp_Primitive stub");
  1.6373 +    ICGetProp_Primitive::Compiler compiler(cx, monitorStub, primitiveType, proto,
  1.6374 +                                           isFixedSlot, offset);
  1.6375 +    ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.6376 +    if (!newStub)
  1.6377 +        return false;
  1.6378 +
  1.6379 +    stub->addNewStub(newStub);
  1.6380 +    *attached = true;
  1.6381 +    return true;
  1.6382 +}
  1.6383 +
  1.6384 +static bool
  1.6385 +DoGetPropFallback(JSContext *cx, BaselineFrame *frame, ICGetProp_Fallback *stub_,
  1.6386 +                  MutableHandleValue val, MutableHandleValue res)
  1.6387 +{
  1.6388 +    // This fallback stub may trigger debug mode toggling.
  1.6389 +    DebugModeOSRVolatileStub<ICGetProp_Fallback *> stub(frame, stub_);
  1.6390 +
  1.6391 +    jsbytecode *pc = stub->icEntry()->pc(frame->script());
  1.6392 +    JSOp op = JSOp(*pc);
  1.6393 +    FallbackICSpew(cx, stub, "GetProp(%s)", js_CodeName[op]);
  1.6394 +
  1.6395 +    JS_ASSERT(op == JSOP_GETPROP || op == JSOP_CALLPROP || op == JSOP_LENGTH || op == JSOP_GETXPROP);
  1.6396 +
  1.6397 +    RootedPropertyName name(cx, frame->script()->getName(pc));
  1.6398 +
  1.6399 +    if (op == JSOP_LENGTH && val.isMagic(JS_OPTIMIZED_ARGUMENTS)) {
  1.6400 +        // Handle arguments.length access.
  1.6401 +        if (IsOptimizedArguments(frame, val.address())) {
  1.6402 +            res.setInt32(frame->numActualArgs());
  1.6403 +
  1.6404 +            // Monitor result
  1.6405 +            types::TypeScript::Monitor(cx, frame->script(), pc, res);
  1.6406 +            if (!stub->addMonitorStubForValue(cx, frame->script(), res))
  1.6407 +                return false;
  1.6408 +
  1.6409 +            bool attached = false;
  1.6410 +            if (!TryAttachLengthStub(cx, frame->script(), stub, val, res, &attached))
  1.6411 +                return false;
  1.6412 +            JS_ASSERT(attached);
  1.6413 +
  1.6414 +            return true;
  1.6415 +        }
  1.6416 +    }
  1.6417 +
  1.6418 +    RootedObject obj(cx, ToObjectFromStack(cx, val));
  1.6419 +    if (!obj)
  1.6420 +        return false;
  1.6421 +
  1.6422 +    RootedId id(cx, NameToId(name));
  1.6423 +    if (!JSObject::getGeneric(cx, obj, obj, id, res))
  1.6424 +        return false;
  1.6425 +
  1.6426 +#if JS_HAS_NO_SUCH_METHOD
  1.6427 +    // Handle objects with __noSuchMethod__.
  1.6428 +    if (op == JSOP_CALLPROP && MOZ_UNLIKELY(res.isUndefined()) && val.isObject()) {
  1.6429 +        if (!OnUnknownMethod(cx, obj, IdToValue(id), res))
  1.6430 +            return false;
  1.6431 +    }
  1.6432 +#endif
  1.6433 +
  1.6434 +    types::TypeScript::Monitor(cx, frame->script(), pc, res);
  1.6435 +
  1.6436 +    // Check if debug mode toggling made the stub invalid.
  1.6437 +    if (stub.invalid())
  1.6438 +        return true;
  1.6439 +
  1.6440 +    // Add a type monitor stub for the resulting value.
  1.6441 +    if (!stub->addMonitorStubForValue(cx, frame->script(), res))
  1.6442 +        return false;
  1.6443 +
  1.6444 +    if (stub->numOptimizedStubs() >= ICGetProp_Fallback::MAX_OPTIMIZED_STUBS) {
  1.6445 +        // TODO: Discard all stubs in this IC and replace with generic getprop stub.
  1.6446 +        return true;
  1.6447 +    }
  1.6448 +
  1.6449 +    bool attached = false;
  1.6450 +
  1.6451 +    if (op == JSOP_LENGTH) {
  1.6452 +        if (!TryAttachLengthStub(cx, frame->script(), stub, val, res, &attached))
  1.6453 +            return false;
  1.6454 +        if (attached)
  1.6455 +            return true;
  1.6456 +    }
  1.6457 +
  1.6458 +    RootedScript script(cx, frame->script());
  1.6459 +
  1.6460 +    if (!TryAttachNativeGetPropStub(cx, script, pc, stub, name, val, res, &attached))
  1.6461 +        return false;
  1.6462 +    if (attached)
  1.6463 +        return true;
  1.6464 +
  1.6465 +    if (val.isString() || val.isNumber() || val.isBoolean()) {
  1.6466 +        if (!TryAttachPrimitiveGetPropStub(cx, script, pc, stub, name, val, res, &attached))
  1.6467 +            return false;
  1.6468 +        if (attached)
  1.6469 +            return true;
  1.6470 +    }
  1.6471 +
  1.6472 +    JS_ASSERT(!attached);
  1.6473 +    stub->noteUnoptimizableAccess();
  1.6474 +
  1.6475 +    return true;
  1.6476 +}
  1.6477 +
  1.6478 +typedef bool (*DoGetPropFallbackFn)(JSContext *, BaselineFrame *, ICGetProp_Fallback *,
  1.6479 +                                    MutableHandleValue, MutableHandleValue);
  1.6480 +static const VMFunction DoGetPropFallbackInfo =
  1.6481 +    FunctionInfo<DoGetPropFallbackFn>(DoGetPropFallback, PopValues(1));
  1.6482 +
  1.6483 +bool
  1.6484 +ICGetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.6485 +{
  1.6486 +    JS_ASSERT(R0 == JSReturnOperand);
  1.6487 +
  1.6488 +    EmitRestoreTailCallReg(masm);
  1.6489 +
  1.6490 +    // Ensure stack is fully synced for the expression decompiler.
  1.6491 +    masm.pushValue(R0);
  1.6492 +
  1.6493 +    // Push arguments.
  1.6494 +    masm.pushValue(R0);
  1.6495 +    masm.push(BaselineStubReg);
  1.6496 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.6497 +
  1.6498 +    if (!tailCallVM(DoGetPropFallbackInfo, masm))
  1.6499 +        return false;
  1.6500 +
  1.6501 +    // What follows is bailout for inlined scripted getters or for on-stack
  1.6502 +    // debug mode recompile. The return address pointed to by the baseline
  1.6503 +    // stack points here.
  1.6504 +    //
  1.6505 +    // Even though the fallback frame doesn't enter a stub frame, the CallScripted
  1.6506 +    // frame that we are emulating does. Again, we lie.
  1.6507 +#ifdef DEBUG
  1.6508 +    entersStubFrame_ = true;
  1.6509 +#endif
  1.6510 +
  1.6511 +    Label leaveStubCommon;
  1.6512 +
  1.6513 +    returnFromStubOffset_ = masm.currentOffset();
  1.6514 +    leaveStubFrameHead(masm, false);
  1.6515 +    masm.jump(&leaveStubCommon);
  1.6516 +
  1.6517 +    returnFromIonOffset_ = masm.currentOffset();
  1.6518 +    leaveStubFrameHead(masm, true);
  1.6519 +
  1.6520 +    masm.bind(&leaveStubCommon);
  1.6521 +    leaveStubFrameCommonTail(masm);
  1.6522 +
  1.6523 +    // When we get here, BaselineStubReg contains the ICGetProp_Fallback stub,
  1.6524 +    // which we can't use to enter the TypeMonitor IC, because it's a MonitoredFallbackStub
  1.6525 +    // instead of a MonitoredStub. So, we cheat.
  1.6526 +    masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
  1.6527 +                 BaselineStubReg);
  1.6528 +    EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
  1.6529 +
  1.6530 +    return true;
  1.6531 +}
  1.6532 +
  1.6533 +bool
  1.6534 +ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
  1.6535 +{
  1.6536 +    JitCompartment *comp = cx->compartment()->jitCompartment();
  1.6537 +
  1.6538 +    CodeOffsetLabel fromIon(returnFromIonOffset_);
  1.6539 +    fromIon.fixup(&masm);
  1.6540 +    comp->initBaselineGetPropReturnFromIonAddr(code->raw() + fromIon.offset());
  1.6541 +
  1.6542 +    CodeOffsetLabel fromVM(returnFromStubOffset_);
  1.6543 +    fromVM.fixup(&masm);
  1.6544 +    comp->initBaselineGetPropReturnFromStubAddr(code->raw() + fromVM.offset());
  1.6545 +
  1.6546 +    return true;
  1.6547 +}
  1.6548 +
  1.6549 +bool
  1.6550 +ICGetProp_ArrayLength::Compiler::generateStubCode(MacroAssembler &masm)
  1.6551 +{
  1.6552 +    Label failure;
  1.6553 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.6554 +
  1.6555 +    Register scratch = R1.scratchReg();
  1.6556 +
  1.6557 +    // Unbox R0 and guard it's an array.
  1.6558 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.6559 +    masm.branchTestObjClass(Assembler::NotEqual, obj, scratch, &ArrayObject::class_, &failure);
  1.6560 +
  1.6561 +    // Load obj->elements->length.
  1.6562 +    masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratch);
  1.6563 +    masm.load32(Address(scratch, ObjectElements::offsetOfLength()), scratch);
  1.6564 +
  1.6565 +    // Guard length fits in an int32.
  1.6566 +    masm.branchTest32(Assembler::Signed, scratch, scratch, &failure);
  1.6567 +
  1.6568 +    masm.tagValue(JSVAL_TYPE_INT32, scratch, R0);
  1.6569 +    EmitReturnFromIC(masm);
  1.6570 +
  1.6571 +    // Failure case - jump to next stub
  1.6572 +    masm.bind(&failure);
  1.6573 +    EmitStubGuardFailure(masm);
  1.6574 +    return true;
  1.6575 +}
  1.6576 +
  1.6577 +bool
  1.6578 +ICGetProp_TypedArrayLength::Compiler::generateStubCode(MacroAssembler &masm)
  1.6579 +{
  1.6580 +    Label failure;
  1.6581 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.6582 +
  1.6583 +    Register scratch = R1.scratchReg();
  1.6584 +
  1.6585 +    // Unbox R0.
  1.6586 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.6587 +
  1.6588 +    // Implement the negated version of JSObject::isTypedArray predicate.
  1.6589 +    masm.loadObjClass(obj, scratch);
  1.6590 +    masm.branchPtr(Assembler::Below, scratch, ImmPtr(&TypedArrayObject::classes[0]),
  1.6591 +                   &failure);
  1.6592 +    masm.branchPtr(Assembler::AboveOrEqual, scratch,
  1.6593 +                   ImmPtr(&TypedArrayObject::classes[ScalarTypeDescr::TYPE_MAX]),
  1.6594 +                   &failure);
  1.6595 +
  1.6596 +    // Load length from fixed slot.
  1.6597 +    masm.loadValue(Address(obj, TypedArrayObject::lengthOffset()), R0);
  1.6598 +    EmitReturnFromIC(masm);
  1.6599 +
  1.6600 +    // Failure case - jump to next stub
  1.6601 +    masm.bind(&failure);
  1.6602 +    EmitStubGuardFailure(masm);
  1.6603 +    return true;
  1.6604 +}
  1.6605 +
  1.6606 +bool
  1.6607 +ICGetProp_StringLength::Compiler::generateStubCode(MacroAssembler &masm)
  1.6608 +{
  1.6609 +    Label failure;
  1.6610 +    masm.branchTestString(Assembler::NotEqual, R0, &failure);
  1.6611 +
  1.6612 +    // Unbox string and load its length.
  1.6613 +    Register string = masm.extractString(R0, ExtractTemp0);
  1.6614 +    masm.loadStringLength(string, string);
  1.6615 +
  1.6616 +    masm.tagValue(JSVAL_TYPE_INT32, string, R0);
  1.6617 +    EmitReturnFromIC(masm);
  1.6618 +
  1.6619 +    // Failure case - jump to next stub
  1.6620 +    masm.bind(&failure);
  1.6621 +    EmitStubGuardFailure(masm);
  1.6622 +    return true;
  1.6623 +}
  1.6624 +
  1.6625 +bool
  1.6626 +ICGetProp_Primitive::Compiler::generateStubCode(MacroAssembler &masm)
  1.6627 +{
  1.6628 +    Label failure;
  1.6629 +    switch (primitiveType_) {
  1.6630 +      case JSVAL_TYPE_STRING:
  1.6631 +        masm.branchTestString(Assembler::NotEqual, R0, &failure);
  1.6632 +        break;
  1.6633 +      case JSVAL_TYPE_DOUBLE: // Also used for int32.
  1.6634 +        masm.branchTestNumber(Assembler::NotEqual, R0, &failure);
  1.6635 +        break;
  1.6636 +      case JSVAL_TYPE_BOOLEAN:
  1.6637 +        masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
  1.6638 +        break;
  1.6639 +      default:
  1.6640 +        MOZ_ASSUME_UNREACHABLE("unexpected type");
  1.6641 +    }
  1.6642 +
  1.6643 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.6644 +    Register holderReg = regs.takeAny();
  1.6645 +    Register scratchReg = regs.takeAny();
  1.6646 +
  1.6647 +    // Verify the shape of the prototype.
  1.6648 +    masm.movePtr(ImmGCPtr(prototype_.get()), holderReg);
  1.6649 +
  1.6650 +    Address shapeAddr(BaselineStubReg, ICGetProp_Primitive::offsetOfProtoShape());
  1.6651 +    masm.loadPtr(Address(holderReg, JSObject::offsetOfShape()), scratchReg);
  1.6652 +    masm.branchPtr(Assembler::NotEqual, shapeAddr, scratchReg, &failure);
  1.6653 +
  1.6654 +    if (!isFixedSlot_)
  1.6655 +        masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), holderReg);
  1.6656 +
  1.6657 +    masm.load32(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfOffset()), scratchReg);
  1.6658 +    masm.loadValue(BaseIndex(holderReg, scratchReg, TimesOne), R0);
  1.6659 +
  1.6660 +    // Enter type monitor IC to type-check result.
  1.6661 +    EmitEnterTypeMonitorIC(masm);
  1.6662 +
  1.6663 +    // Failure case - jump to next stub
  1.6664 +    masm.bind(&failure);
  1.6665 +    EmitStubGuardFailure(masm);
  1.6666 +    return true;
  1.6667 +}
  1.6668 +
  1.6669 +bool
  1.6670 +ICGetPropNativeCompiler::generateStubCode(MacroAssembler &masm)
  1.6671 +{
  1.6672 +    Label failure;
  1.6673 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.6674 +
  1.6675 +    // Guard input is an object.
  1.6676 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.6677 +
  1.6678 +    Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
  1.6679 +
  1.6680 +    // Unbox and shape guard.
  1.6681 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.6682 +    masm.loadPtr(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfShape()), scratch);
  1.6683 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
  1.6684 +
  1.6685 +    Register holderReg;
  1.6686 +    if (obj_ == holder_) {
  1.6687 +        holderReg = objReg;
  1.6688 +    } else {
  1.6689 +        // Shape guard holder.
  1.6690 +        holderReg = regs.takeAny();
  1.6691 +        masm.loadPtr(Address(BaselineStubReg, ICGetProp_NativePrototype::offsetOfHolder()),
  1.6692 +                     holderReg);
  1.6693 +        masm.loadPtr(Address(BaselineStubReg, ICGetProp_NativePrototype::offsetOfHolderShape()),
  1.6694 +                     scratch);
  1.6695 +        masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
  1.6696 +    }
  1.6697 +
  1.6698 +    if (!isFixedSlot_) {
  1.6699 +        // Don't overwrite actual holderReg if we need to load a dynamic slots object.
  1.6700 +        // May need to preserve object for noSuchMethod check later.
  1.6701 +        Register nextHolder = regs.takeAny();
  1.6702 +        masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), nextHolder);
  1.6703 +        holderReg = nextHolder;
  1.6704 +    }
  1.6705 +
  1.6706 +    masm.load32(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfOffset()), scratch);
  1.6707 +    BaseIndex result(holderReg, scratch, TimesOne);
  1.6708 +
  1.6709 +#if JS_HAS_NO_SUCH_METHOD
  1.6710 +#ifdef DEBUG
  1.6711 +    entersStubFrame_ = true;
  1.6712 +#endif
  1.6713 +    if (isCallProp_) {
  1.6714 +        // Check for __noSuchMethod__ invocation.
  1.6715 +        Label afterNoSuchMethod;
  1.6716 +        Label skipNoSuchMethod;
  1.6717 +
  1.6718 +        masm.push(objReg);
  1.6719 +        masm.loadValue(result, R0);
  1.6720 +        masm.branchTestUndefined(Assembler::NotEqual, R0, &skipNoSuchMethod);
  1.6721 +
  1.6722 +        masm.pop(objReg);
  1.6723 +
  1.6724 +        // Call __noSuchMethod__ checker.  Object pointer is in objReg.
  1.6725 +        regs = availableGeneralRegs(0);
  1.6726 +        regs.takeUnchecked(objReg);
  1.6727 +        regs.takeUnchecked(BaselineTailCallReg);
  1.6728 +        ValueOperand val = regs.takeValueOperand();
  1.6729 +
  1.6730 +        // Box and push obj onto baseline frame stack for decompiler.
  1.6731 +        EmitRestoreTailCallReg(masm);
  1.6732 +        masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
  1.6733 +        masm.pushValue(val);
  1.6734 +        EmitRepushTailCallReg(masm);
  1.6735 +
  1.6736 +        enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
  1.6737 +
  1.6738 +        masm.movePtr(ImmGCPtr(propName_.get()), val.scratchReg());
  1.6739 +        masm.tagValue(JSVAL_TYPE_STRING, val.scratchReg(), val);
  1.6740 +        masm.pushValue(val);
  1.6741 +        masm.push(objReg);
  1.6742 +        if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
  1.6743 +            return false;
  1.6744 +
  1.6745 +        leaveStubFrame(masm);
  1.6746 +
  1.6747 +        // Pop pushed obj from baseline stack.
  1.6748 +        EmitUnstowICValues(masm, 1, /* discard = */ true);
  1.6749 +
  1.6750 +        masm.jump(&afterNoSuchMethod);
  1.6751 +        masm.bind(&skipNoSuchMethod);
  1.6752 +
  1.6753 +        // Pop pushed objReg.
  1.6754 +        masm.addPtr(Imm32(sizeof(void *)), BaselineStackReg);
  1.6755 +        masm.bind(&afterNoSuchMethod);
  1.6756 +    } else {
  1.6757 +        masm.loadValue(result, R0);
  1.6758 +    }
  1.6759 +#else
  1.6760 +    masm.loadValue(result, R0);
  1.6761 +#endif
  1.6762 +
  1.6763 +    // Enter type monitor IC to type-check result.
  1.6764 +    EmitEnterTypeMonitorIC(masm);
  1.6765 +
  1.6766 +    // Failure case - jump to next stub
  1.6767 +    masm.bind(&failure);
  1.6768 +    EmitStubGuardFailure(masm);
  1.6769 +    return true;
  1.6770 +}
  1.6771 +
  1.6772 +bool
  1.6773 +ICGetProp_CallScripted::Compiler::generateStubCode(MacroAssembler &masm)
  1.6774 +{
  1.6775 +    Label failure;
  1.6776 +    Label failureLeaveStubFrame;
  1.6777 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.6778 +    Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
  1.6779 +
  1.6780 +    // Guard input is an object.
  1.6781 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.6782 +
  1.6783 +    // Unbox and shape guard.
  1.6784 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.6785 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfReceiverShape()), scratch);
  1.6786 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
  1.6787 +
  1.6788 +    Register holderReg = regs.takeAny();
  1.6789 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfHolder()), holderReg);
  1.6790 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfHolderShape()), scratch);
  1.6791 +    masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
  1.6792 +    regs.add(holderReg);
  1.6793 +
  1.6794 +    // Push a stub frame so that we can perform a non-tail call.
  1.6795 +    enterStubFrame(masm, scratch);
  1.6796 +
  1.6797 +    // Load callee function and code.  To ensure that |code| doesn't end up being
  1.6798 +    // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
  1.6799 +    Register callee;
  1.6800 +    if (regs.has(ArgumentsRectifierReg)) {
  1.6801 +        callee = ArgumentsRectifierReg;
  1.6802 +        regs.take(callee);
  1.6803 +    } else {
  1.6804 +        callee = regs.takeAny();
  1.6805 +    }
  1.6806 +    Register code = regs.takeAny();
  1.6807 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfGetter()), callee);
  1.6808 +    masm.branchIfFunctionHasNoScript(callee, &failureLeaveStubFrame);
  1.6809 +    masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
  1.6810 +    masm.loadBaselineOrIonRaw(code, code, SequentialExecution, &failureLeaveStubFrame);
  1.6811 +
  1.6812 +    // Getter is called with 0 arguments, just |obj| as thisv.
  1.6813 +    // Note that we use Push, not push, so that callIon will align the stack
  1.6814 +    // properly on ARM.
  1.6815 +    masm.Push(R0);
  1.6816 +    EmitCreateStubFrameDescriptor(masm, scratch);
  1.6817 +    masm.Push(Imm32(0));  // ActualArgc is 0
  1.6818 +    masm.Push(callee);
  1.6819 +    masm.Push(scratch);
  1.6820 +
  1.6821 +    // Handle arguments underflow.
  1.6822 +    Label noUnderflow;
  1.6823 +    masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
  1.6824 +    masm.branch32(Assembler::Equal, scratch, Imm32(0), &noUnderflow);
  1.6825 +    {
  1.6826 +        // Call the arguments rectifier.
  1.6827 +        JS_ASSERT(ArgumentsRectifierReg != code);
  1.6828 +
  1.6829 +        JitCode *argumentsRectifier =
  1.6830 +            cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
  1.6831 +
  1.6832 +        masm.movePtr(ImmGCPtr(argumentsRectifier), code);
  1.6833 +        masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
  1.6834 +        masm.mov(ImmWord(0), ArgumentsRectifierReg);
  1.6835 +    }
  1.6836 +
  1.6837 +    masm.bind(&noUnderflow);
  1.6838 +
  1.6839 +    // If needed, update SPS Profiler frame entry.  At this point, callee and scratch can
  1.6840 +    // be clobbered.
  1.6841 +    {
  1.6842 +        GeneralRegisterSet availRegs = availableGeneralRegs(0);
  1.6843 +        availRegs.take(ArgumentsRectifierReg);
  1.6844 +        availRegs.take(code);
  1.6845 +        emitProfilingUpdate(masm, availRegs, ICGetProp_CallScripted::offsetOfPCOffset());
  1.6846 +    }
  1.6847 +
  1.6848 +    masm.callIon(code);
  1.6849 +
  1.6850 +    leaveStubFrame(masm, true);
  1.6851 +
  1.6852 +    // Enter type monitor IC to type-check result.
  1.6853 +    EmitEnterTypeMonitorIC(masm);
  1.6854 +
  1.6855 +    // Leave stub frame and go to next stub.
  1.6856 +    masm.bind(&failureLeaveStubFrame);
  1.6857 +    leaveStubFrame(masm, false);
  1.6858 +
  1.6859 +    // Failure case - jump to next stub
  1.6860 +    masm.bind(&failure);
  1.6861 +    EmitStubGuardFailure(masm);
  1.6862 +    return true;
  1.6863 +}
  1.6864 +
  1.6865 +bool
  1.6866 +ICGetProp_CallNative::Compiler::generateStubCode(MacroAssembler &masm)
  1.6867 +{
  1.6868 +    Label failure;
  1.6869 +
  1.6870 +    GeneralRegisterSet regs(availableGeneralRegs(0));
  1.6871 +    Register obj = InvalidReg;
  1.6872 +    if (inputDefinitelyObject_) {
  1.6873 +        obj = R0.scratchReg();
  1.6874 +    } else {
  1.6875 +        regs.take(R0);
  1.6876 +        masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.6877 +        obj = masm.extractObject(R0, ExtractTemp0);
  1.6878 +    }
  1.6879 +    regs.takeUnchecked(obj);
  1.6880 +
  1.6881 +    Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
  1.6882 +
  1.6883 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfHolderShape()), scratch);
  1.6884 +    masm.branchTestObjShape(Assembler::NotEqual, obj, scratch, &failure);
  1.6885 +
  1.6886 +    enterStubFrame(masm, scratch);
  1.6887 +
  1.6888 +    masm.Push(obj);
  1.6889 +
  1.6890 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfGetter()), scratch);
  1.6891 +    masm.Push(scratch);
  1.6892 +
  1.6893 +    regs.add(scratch);
  1.6894 +    if (!inputDefinitelyObject_)
  1.6895 +        regs.add(R0);
  1.6896 +
  1.6897 +    // If needed, update SPS Profiler frame entry.
  1.6898 +    emitProfilingUpdate(masm, regs, ICGetProp_CallNative::offsetOfPCOffset());
  1.6899 +
  1.6900 +    if (!callVM(DoCallNativeGetterInfo, masm))
  1.6901 +        return false;
  1.6902 +    leaveStubFrame(masm);
  1.6903 +
  1.6904 +    EmitEnterTypeMonitorIC(masm);
  1.6905 +
  1.6906 +    masm.bind(&failure);
  1.6907 +    EmitStubGuardFailure(masm);
  1.6908 +
  1.6909 +    return true;
  1.6910 +}
  1.6911 +
  1.6912 +bool
  1.6913 +ICGetProp_CallNativePrototype::Compiler::generateStubCode(MacroAssembler &masm)
  1.6914 +{
  1.6915 +    Label failure;
  1.6916 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.6917 +    Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
  1.6918 +
  1.6919 +    // Guard input is an object.
  1.6920 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.6921 +
  1.6922 +    // Unbox and shape guard.
  1.6923 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.6924 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfReceiverShape()), scratch);
  1.6925 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
  1.6926 +
  1.6927 +    Register holderReg = regs.takeAny();
  1.6928 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfHolder()), holderReg);
  1.6929 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfHolderShape()), scratch);
  1.6930 +    masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
  1.6931 +    regs.add(holderReg);
  1.6932 +
  1.6933 +    // Push a stub frame so that we can perform a non-tail call.
  1.6934 +    enterStubFrame(masm, scratch);
  1.6935 +
  1.6936 +    // Load callee function.
  1.6937 +    Register callee = regs.takeAny();
  1.6938 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfGetter()), callee);
  1.6939 +
  1.6940 +    // Push args for vm call.
  1.6941 +    masm.push(objReg);
  1.6942 +    masm.push(callee);
  1.6943 +
  1.6944 +    // Don't to preserve R0 anymore.
  1.6945 +    regs.add(R0);
  1.6946 +
  1.6947 +    // If needed, update SPS Profiler frame entry.
  1.6948 +    emitProfilingUpdate(masm, regs, ICGetProp_CallNativePrototype::offsetOfPCOffset());
  1.6949 +
  1.6950 +    if (!callVM(DoCallNativeGetterInfo, masm))
  1.6951 +        return false;
  1.6952 +    leaveStubFrame(masm);
  1.6953 +
  1.6954 +    // Enter type monitor IC to type-check result.
  1.6955 +    EmitEnterTypeMonitorIC(masm);
  1.6956 +
  1.6957 +    // Failure case - jump to next stub
  1.6958 +    masm.bind(&failure);
  1.6959 +    EmitStubGuardFailure(masm);
  1.6960 +    return true;
  1.6961 +}
  1.6962 +
  1.6963 +bool
  1.6964 +ICGetPropCallDOMProxyNativeCompiler::generateStubCode(MacroAssembler &masm,
  1.6965 +                                                      Address* expandoAndGenerationAddr,
  1.6966 +                                                      Address* generationAddr)
  1.6967 +{
  1.6968 +    Label failure;
  1.6969 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.6970 +    Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
  1.6971 +
  1.6972 +    // Guard input is an object.
  1.6973 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.6974 +
  1.6975 +    // Unbox.
  1.6976 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.6977 +
  1.6978 +    // Shape guard.
  1.6979 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfShape()), scratch);
  1.6980 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
  1.6981 +
  1.6982 +    // Guard for ListObject.
  1.6983 +    {
  1.6984 +        GeneralRegisterSet domProxyRegSet(GeneralRegisterSet::All());
  1.6985 +        domProxyRegSet.take(BaselineStubReg);
  1.6986 +        domProxyRegSet.take(objReg);
  1.6987 +        domProxyRegSet.take(scratch);
  1.6988 +        Address expandoShapeAddr(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfExpandoShape());
  1.6989 +        GenerateDOMProxyChecks(
  1.6990 +                cx, masm, objReg,
  1.6991 +                Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfProxyHandler()),
  1.6992 +                &expandoShapeAddr, expandoAndGenerationAddr, generationAddr,
  1.6993 +                scratch,
  1.6994 +                domProxyRegSet,
  1.6995 +                &failure);
  1.6996 +    }
  1.6997 +
  1.6998 +    Register holderReg = regs.takeAny();
  1.6999 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfHolder()),
  1.7000 +                 holderReg);
  1.7001 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfHolderShape()),
  1.7002 +                 scratch);
  1.7003 +    masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
  1.7004 +    regs.add(holderReg);
  1.7005 +
  1.7006 +    // Push a stub frame so that we can perform a non-tail call.
  1.7007 +    enterStubFrame(masm, scratch);
  1.7008 +
  1.7009 +    // Load callee function.
  1.7010 +    Register callee = regs.takeAny();
  1.7011 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfGetter()), callee);
  1.7012 +
  1.7013 +    // Push args for vm call.
  1.7014 +    masm.push(objReg);
  1.7015 +    masm.push(callee);
  1.7016 +
  1.7017 +    // Don't have to preserve R0 anymore.
  1.7018 +    regs.add(R0);
  1.7019 +
  1.7020 +    // If needed, update SPS Profiler frame entry.
  1.7021 +    emitProfilingUpdate(masm, regs, ICGetProp_CallDOMProxyNative::offsetOfPCOffset());
  1.7022 +
  1.7023 +    if (!callVM(DoCallNativeGetterInfo, masm))
  1.7024 +        return false;
  1.7025 +    leaveStubFrame(masm);
  1.7026 +
  1.7027 +    // Enter type monitor IC to type-check result.
  1.7028 +    EmitEnterTypeMonitorIC(masm);
  1.7029 +
  1.7030 +    // Failure case - jump to next stub
  1.7031 +    masm.bind(&failure);
  1.7032 +    EmitStubGuardFailure(masm);
  1.7033 +    return true;
  1.7034 +}
  1.7035 +
  1.7036 +bool
  1.7037 +ICGetPropCallDOMProxyNativeCompiler::generateStubCode(MacroAssembler &masm)
  1.7038 +{
  1.7039 +    if (kind == ICStub::GetProp_CallDOMProxyNative)
  1.7040 +        return generateStubCode(masm, nullptr, nullptr);
  1.7041 +
  1.7042 +    Address internalStructAddress(BaselineStubReg,
  1.7043 +        ICGetProp_CallDOMProxyWithGenerationNative::offsetOfInternalStruct());
  1.7044 +    Address generationAddress(BaselineStubReg,
  1.7045 +        ICGetProp_CallDOMProxyWithGenerationNative::offsetOfGeneration());
  1.7046 +    return generateStubCode(masm, &internalStructAddress, &generationAddress);
  1.7047 +}
  1.7048 +
  1.7049 +ICStub *
  1.7050 +ICGetPropCallDOMProxyNativeCompiler::getStub(ICStubSpace *space)
  1.7051 +{
  1.7052 +    RootedShape shape(cx, proxy_->lastProperty());
  1.7053 +    RootedShape holderShape(cx, holder_->lastProperty());
  1.7054 +
  1.7055 +    Value expandoSlot = proxy_->getFixedSlot(GetDOMProxyExpandoSlot());
  1.7056 +    RootedShape expandoShape(cx, nullptr);
  1.7057 +    ExpandoAndGeneration *expandoAndGeneration;
  1.7058 +    int32_t generation;
  1.7059 +    Value expandoVal;
  1.7060 +    if (kind == ICStub::GetProp_CallDOMProxyNative) {
  1.7061 +        expandoVal = expandoSlot;
  1.7062 +    } else {
  1.7063 +        JS_ASSERT(kind == ICStub::GetProp_CallDOMProxyWithGenerationNative);
  1.7064 +        JS_ASSERT(!expandoSlot.isObject() && !expandoSlot.isUndefined());
  1.7065 +        expandoAndGeneration = (ExpandoAndGeneration*)expandoSlot.toPrivate();
  1.7066 +        expandoVal = expandoAndGeneration->expando;
  1.7067 +        generation = expandoAndGeneration->generation;
  1.7068 +    }
  1.7069 +
  1.7070 +    if (expandoVal.isObject())
  1.7071 +        expandoShape = expandoVal.toObject().lastProperty();
  1.7072 +
  1.7073 +    if (kind == ICStub::GetProp_CallDOMProxyNative) {
  1.7074 +        return ICGetProp_CallDOMProxyNative::New(
  1.7075 +            space, getStubCode(), firstMonitorStub_, shape, proxy_->handler(),
  1.7076 +            expandoShape, holder_, holderShape, getter_, pcOffset_);
  1.7077 +    }
  1.7078 +
  1.7079 +    return ICGetProp_CallDOMProxyWithGenerationNative::New(
  1.7080 +        space, getStubCode(), firstMonitorStub_, shape, proxy_->handler(),
  1.7081 +        expandoAndGeneration, generation, expandoShape, holder_, holderShape, getter_,
  1.7082 +        pcOffset_);
  1.7083 +}
  1.7084 +
  1.7085 +ICStub *
  1.7086 +ICGetProp_DOMProxyShadowed::Compiler::getStub(ICStubSpace *space)
  1.7087 +{
  1.7088 +    RootedShape shape(cx, proxy_->lastProperty());
  1.7089 +    return ICGetProp_DOMProxyShadowed::New(space, getStubCode(), firstMonitorStub_, shape,
  1.7090 +                                           proxy_->handler(), name_, pcOffset_);
  1.7091 +}
  1.7092 +
  1.7093 +static bool
  1.7094 +ProxyGet(JSContext *cx, HandleObject proxy, HandlePropertyName name, MutableHandleValue vp)
  1.7095 +{
  1.7096 +    RootedId id(cx, NameToId(name));
  1.7097 +    return Proxy::get(cx, proxy, proxy, id, vp);
  1.7098 +}
  1.7099 +
  1.7100 +typedef bool (*ProxyGetFn)(JSContext *cx, HandleObject proxy, HandlePropertyName name,
  1.7101 +                           MutableHandleValue vp);
  1.7102 +static const VMFunction ProxyGetInfo = FunctionInfo<ProxyGetFn>(ProxyGet);
  1.7103 +
  1.7104 +bool
  1.7105 +ICGetProp_DOMProxyShadowed::Compiler::generateStubCode(MacroAssembler &masm)
  1.7106 +{
  1.7107 +    Label failure;
  1.7108 +
  1.7109 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.7110 +    // Need to reserve a scratch register, but the scratch register should not be
  1.7111 +    // BaselineTailCallReg, because it's used for |enterStubFrame| which needs a
  1.7112 +    // non-BaselineTailCallReg scratch reg.
  1.7113 +    Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
  1.7114 +
  1.7115 +    // Guard input is an object.
  1.7116 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.7117 +
  1.7118 +    // Unbox.
  1.7119 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.7120 +
  1.7121 +    // Shape guard.
  1.7122 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfShape()), scratch);
  1.7123 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
  1.7124 +
  1.7125 +    // Guard for ListObject.
  1.7126 +    {
  1.7127 +        GeneralRegisterSet domProxyRegSet(GeneralRegisterSet::All());
  1.7128 +        domProxyRegSet.take(BaselineStubReg);
  1.7129 +        domProxyRegSet.take(objReg);
  1.7130 +        domProxyRegSet.take(scratch);
  1.7131 +        GenerateDOMProxyChecks(
  1.7132 +                cx, masm, objReg,
  1.7133 +                Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfProxyHandler()),
  1.7134 +                /*expandoShapeAddr=*/nullptr,
  1.7135 +                /*expandoAndGenerationAddr=*/nullptr,
  1.7136 +                /*generationAddr=*/nullptr,
  1.7137 +                scratch,
  1.7138 +                domProxyRegSet,
  1.7139 +                &failure);
  1.7140 +    }
  1.7141 +
  1.7142 +    // Call ProxyGet(JSContext *cx, HandleObject proxy, HandlePropertyName name, MutableHandleValue vp);
  1.7143 +
  1.7144 +    // Push a stub frame so that we can perform a non-tail call.
  1.7145 +    enterStubFrame(masm, scratch);
  1.7146 +
  1.7147 +    // Push property name and proxy object.
  1.7148 +    masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfName()), scratch);
  1.7149 +    masm.push(scratch);
  1.7150 +    masm.push(objReg);
  1.7151 +
  1.7152 +    // Don't have to preserve R0 anymore.
  1.7153 +    regs.add(R0);
  1.7154 +
  1.7155 +    // If needed, update SPS Profiler frame entry.
  1.7156 +    emitProfilingUpdate(masm, regs, ICGetProp_DOMProxyShadowed::offsetOfPCOffset());
  1.7157 +
  1.7158 +    if (!callVM(ProxyGetInfo, masm))
  1.7159 +        return false;
  1.7160 +    leaveStubFrame(masm);
  1.7161 +
  1.7162 +    // Enter type monitor IC to type-check result.
  1.7163 +    EmitEnterTypeMonitorIC(masm);
  1.7164 +
  1.7165 +    // Failure case - jump to next stub
  1.7166 +    masm.bind(&failure);
  1.7167 +    EmitStubGuardFailure(masm);
  1.7168 +    return true;
  1.7169 +}
  1.7170 +
  1.7171 +bool
  1.7172 +ICGetProp_ArgumentsLength::Compiler::generateStubCode(MacroAssembler &masm)
  1.7173 +{
  1.7174 +    Label failure;
  1.7175 +    if (which_ == ICGetProp_ArgumentsLength::Magic) {
  1.7176 +        // Ensure that this is lazy arguments.
  1.7177 +        masm.branchTestMagicValue(Assembler::NotEqual, R0, JS_OPTIMIZED_ARGUMENTS, &failure);
  1.7178 +
  1.7179 +        // Ensure that frame has not loaded different arguments object since.
  1.7180 +        masm.branchTest32(Assembler::NonZero,
  1.7181 +                          Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
  1.7182 +                          Imm32(BaselineFrame::HAS_ARGS_OBJ),
  1.7183 +                          &failure);
  1.7184 +
  1.7185 +        Address actualArgs(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs());
  1.7186 +        masm.loadPtr(actualArgs, R0.scratchReg());
  1.7187 +        masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
  1.7188 +        EmitReturnFromIC(masm);
  1.7189 +
  1.7190 +        masm.bind(&failure);
  1.7191 +        EmitStubGuardFailure(masm);
  1.7192 +        return true;
  1.7193 +    }
  1.7194 +    JS_ASSERT(which_ == ICGetProp_ArgumentsLength::Strict ||
  1.7195 +              which_ == ICGetProp_ArgumentsLength::Normal);
  1.7196 +
  1.7197 +    bool isStrict = which_ == ICGetProp_ArgumentsLength::Strict;
  1.7198 +    const Class *clasp = isStrict ? &StrictArgumentsObject::class_ : &NormalArgumentsObject::class_;
  1.7199 +
  1.7200 +    Register scratchReg = R1.scratchReg();
  1.7201 +
  1.7202 +    // Guard on input being an arguments object.
  1.7203 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.7204 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.7205 +    masm.branchTestObjClass(Assembler::NotEqual, objReg, scratchReg, clasp, &failure);
  1.7206 +
  1.7207 +    // Get initial length value.
  1.7208 +    masm.unboxInt32(Address(objReg, ArgumentsObject::getInitialLengthSlotOffset()), scratchReg);
  1.7209 +
  1.7210 +    // Test if length has been overridden.
  1.7211 +    masm.branchTest32(Assembler::NonZero,
  1.7212 +                      scratchReg,
  1.7213 +                      Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
  1.7214 +                      &failure);
  1.7215 +
  1.7216 +    // Nope, shift out arguments length and return it.
  1.7217 +    // No need to type monitor because this stub always returns Int32.
  1.7218 +    masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratchReg);
  1.7219 +    masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0);
  1.7220 +    EmitReturnFromIC(masm);
  1.7221 +
  1.7222 +    masm.bind(&failure);
  1.7223 +    EmitStubGuardFailure(masm);
  1.7224 +    return true;
  1.7225 +}
  1.7226 +
  1.7227 +void
  1.7228 +BaselineScript::noteAccessedGetter(uint32_t pcOffset)
  1.7229 +{
  1.7230 +    ICEntry &entry = icEntryFromPCOffset(pcOffset);
  1.7231 +    ICFallbackStub *stub = entry.fallbackStub();
  1.7232 +
  1.7233 +    if (stub->isGetProp_Fallback())
  1.7234 +        stub->toGetProp_Fallback()->noteAccessedGetter();
  1.7235 +}
  1.7236 +
  1.7237 +//
  1.7238 +// SetProp_Fallback
  1.7239 +//
  1.7240 +
  1.7241 +// Attach an optimized stub for a SETPROP/SETGNAME/SETNAME op.
  1.7242 +static bool
  1.7243 +TryAttachSetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc, ICSetProp_Fallback *stub,
  1.7244 +                     HandleObject obj, HandleShape oldShape, uint32_t oldSlots,
  1.7245 +                     HandlePropertyName name, HandleId id, HandleValue rhs, bool *attached)
  1.7246 +{
  1.7247 +    JS_ASSERT(!*attached);
  1.7248 +
  1.7249 +    if (!obj->isNative() || obj->watched())
  1.7250 +        return true;
  1.7251 +
  1.7252 +    RootedShape shape(cx);
  1.7253 +    RootedObject holder(cx);
  1.7254 +    if (!EffectlesslyLookupProperty(cx, obj, name, &holder, &shape))
  1.7255 +        return false;
  1.7256 +
  1.7257 +    size_t chainDepth;
  1.7258 +    if (IsCacheableSetPropAddSlot(cx, obj, oldShape, oldSlots, id, holder, shape, &chainDepth)) {
  1.7259 +        // Don't attach if proto chain depth is too high.
  1.7260 +        if (chainDepth > ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH)
  1.7261 +            return true;
  1.7262 +
  1.7263 +        bool isFixedSlot;
  1.7264 +        uint32_t offset;
  1.7265 +        GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset);
  1.7266 +
  1.7267 +        IonSpew(IonSpew_BaselineIC, "  Generating SetProp(NativeObject.ADD) stub");
  1.7268 +        ICSetPropNativeAddCompiler compiler(cx, obj, oldShape, chainDepth, isFixedSlot, offset);
  1.7269 +        ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.7270 +        if (!newStub)
  1.7271 +            return false;
  1.7272 +        if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs))
  1.7273 +            return false;
  1.7274 +
  1.7275 +        stub->addNewStub(newStub);
  1.7276 +        *attached = true;
  1.7277 +        return true;
  1.7278 +    }
  1.7279 +
  1.7280 +    if (IsCacheableSetPropWriteSlot(obj, oldShape, holder, shape)) {
  1.7281 +        bool isFixedSlot;
  1.7282 +        uint32_t offset;
  1.7283 +        GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset);
  1.7284 +
  1.7285 +        IonSpew(IonSpew_BaselineIC, "  Generating SetProp(NativeObject.PROP) stub");
  1.7286 +        ICSetProp_Native::Compiler compiler(cx, obj, isFixedSlot, offset);
  1.7287 +        ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.7288 +        if (!newStub)
  1.7289 +            return false;
  1.7290 +        if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs))
  1.7291 +            return false;
  1.7292 +
  1.7293 +        stub->addNewStub(newStub);
  1.7294 +        *attached = true;
  1.7295 +        return true;
  1.7296 +    }
  1.7297 +
  1.7298 +    bool isScripted = false;
  1.7299 +    bool cacheableCall = IsCacheableSetPropCall(cx, obj, holder, shape, &isScripted);
  1.7300 +
  1.7301 +    // Try handling scripted setters.
  1.7302 +    if (cacheableCall && isScripted) {
  1.7303 +        RootedFunction callee(cx, &shape->setterObject()->as<JSFunction>());
  1.7304 +        JS_ASSERT(obj != holder);
  1.7305 +        JS_ASSERT(callee->hasScript());
  1.7306 +
  1.7307 +        IonSpew(IonSpew_BaselineIC, "  Generating SetProp(NativeObj/ScriptedSetter %s:%d) stub",
  1.7308 +                    callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno());
  1.7309 +
  1.7310 +        ICSetProp_CallScripted::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc));
  1.7311 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.7312 +        if (!newStub)
  1.7313 +            return false;
  1.7314 +
  1.7315 +        stub->addNewStub(newStub);
  1.7316 +        *attached = true;
  1.7317 +        return true;
  1.7318 +    }
  1.7319 +
  1.7320 +    // Try handling JSNative setters.
  1.7321 +    if (cacheableCall && !isScripted) {
  1.7322 +        RootedFunction callee(cx, &shape->setterObject()->as<JSFunction>());
  1.7323 +        JS_ASSERT(obj != holder);
  1.7324 +        JS_ASSERT(callee->isNative());
  1.7325 +
  1.7326 +        IonSpew(IonSpew_BaselineIC, "  Generating SetProp(NativeObj/NativeSetter %p) stub",
  1.7327 +                    callee->native());
  1.7328 +
  1.7329 +        ICSetProp_CallNative::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc));
  1.7330 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.7331 +        if (!newStub)
  1.7332 +            return false;
  1.7333 +
  1.7334 +        stub->addNewStub(newStub);
  1.7335 +        *attached = true;
  1.7336 +        return true;
  1.7337 +    }
  1.7338 +
  1.7339 +    return true;
  1.7340 +}
  1.7341 +
  1.7342 +static bool
  1.7343 +DoSetPropFallback(JSContext *cx, BaselineFrame *frame, ICSetProp_Fallback *stub_,
  1.7344 +                  HandleValue lhs, HandleValue rhs, MutableHandleValue res)
  1.7345 +{
  1.7346 +    // This fallback stub may trigger debug mode toggling.
  1.7347 +    DebugModeOSRVolatileStub<ICSetProp_Fallback *> stub(frame, stub_);
  1.7348 +
  1.7349 +    RootedScript script(cx, frame->script());
  1.7350 +    jsbytecode *pc = stub->icEntry()->pc(script);
  1.7351 +    JSOp op = JSOp(*pc);
  1.7352 +    FallbackICSpew(cx, stub, "SetProp(%s)", js_CodeName[op]);
  1.7353 +
  1.7354 +    JS_ASSERT(op == JSOP_SETPROP ||
  1.7355 +              op == JSOP_SETNAME ||
  1.7356 +              op == JSOP_SETGNAME ||
  1.7357 +              op == JSOP_INITPROP ||
  1.7358 +              op == JSOP_SETALIASEDVAR);
  1.7359 +
  1.7360 +    RootedPropertyName name(cx);
  1.7361 +    if (op == JSOP_SETALIASEDVAR)
  1.7362 +        name = ScopeCoordinateName(cx->runtime()->scopeCoordinateNameCache, script, pc);
  1.7363 +    else
  1.7364 +        name = script->getName(pc);
  1.7365 +    RootedId id(cx, NameToId(name));
  1.7366 +
  1.7367 +    RootedObject obj(cx, ToObjectFromStack(cx, lhs));
  1.7368 +    if (!obj)
  1.7369 +        return false;
  1.7370 +    RootedShape oldShape(cx, obj->lastProperty());
  1.7371 +    uint32_t oldSlots = obj->numDynamicSlots();
  1.7372 +
  1.7373 +    if (op == JSOP_INITPROP) {
  1.7374 +        MOZ_ASSERT(name != cx->names().proto, "should have used JSOP_MUTATEPROTO");
  1.7375 +        MOZ_ASSERT(obj->is<JSObject>());
  1.7376 +        if (!DefineNativeProperty(cx, obj, id, rhs, nullptr, nullptr, JSPROP_ENUMERATE))
  1.7377 +            return false;
  1.7378 +    } else if (op == JSOP_SETNAME || op == JSOP_SETGNAME) {
  1.7379 +        if (!SetNameOperation(cx, script, pc, obj, rhs))
  1.7380 +            return false;
  1.7381 +    } else if (op == JSOP_SETALIASEDVAR) {
  1.7382 +        obj->as<ScopeObject>().setAliasedVar(cx, pc, name, rhs);
  1.7383 +    } else {
  1.7384 +        MOZ_ASSERT(op == JSOP_SETPROP);
  1.7385 +        if (script->strict()) {
  1.7386 +            if (!js::SetProperty<true>(cx, obj, id, rhs))
  1.7387 +                return false;
  1.7388 +        } else {
  1.7389 +            if (!js::SetProperty<false>(cx, obj, id, rhs))
  1.7390 +                return false;
  1.7391 +        }
  1.7392 +    }
  1.7393 +
  1.7394 +    // Leave the RHS on the stack.
  1.7395 +    res.set(rhs);
  1.7396 +
  1.7397 +    // Check if debug mode toggling made the stub invalid.
  1.7398 +    if (stub.invalid())
  1.7399 +        return true;
  1.7400 +
  1.7401 +    if (stub->numOptimizedStubs() >= ICSetProp_Fallback::MAX_OPTIMIZED_STUBS) {
  1.7402 +        // TODO: Discard all stubs in this IC and replace with generic setprop stub.
  1.7403 +        return true;
  1.7404 +    }
  1.7405 +
  1.7406 +    bool attached = false;
  1.7407 +    if (!TryAttachSetPropStub(cx, script, pc, stub, obj, oldShape, oldSlots, name, id, rhs,
  1.7408 +         &attached))
  1.7409 +    {
  1.7410 +        return false;
  1.7411 +    }
  1.7412 +    if (attached)
  1.7413 +        return true;
  1.7414 +
  1.7415 +    JS_ASSERT(!attached);
  1.7416 +    stub->noteUnoptimizableAccess();
  1.7417 +
  1.7418 +    return true;
  1.7419 +}
  1.7420 +
  1.7421 +typedef bool (*DoSetPropFallbackFn)(JSContext *, BaselineFrame *, ICSetProp_Fallback *,
  1.7422 +                                    HandleValue, HandleValue, MutableHandleValue);
  1.7423 +static const VMFunction DoSetPropFallbackInfo =
  1.7424 +    FunctionInfo<DoSetPropFallbackFn>(DoSetPropFallback, PopValues(2));
  1.7425 +
  1.7426 +bool
  1.7427 +ICSetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.7428 +{
  1.7429 +    JS_ASSERT(R0 == JSReturnOperand);
  1.7430 +
  1.7431 +    EmitRestoreTailCallReg(masm);
  1.7432 +
  1.7433 +    // Ensure stack is fully synced for the expression decompiler.
  1.7434 +    masm.pushValue(R0);
  1.7435 +    masm.pushValue(R1);
  1.7436 +
  1.7437 +    // Push arguments.
  1.7438 +    masm.pushValue(R1);
  1.7439 +    masm.pushValue(R0);
  1.7440 +    masm.push(BaselineStubReg);
  1.7441 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.7442 +
  1.7443 +    if (!tailCallVM(DoSetPropFallbackInfo, masm))
  1.7444 +        return false;
  1.7445 +
  1.7446 +    // What follows is bailout debug mode recompile code for inlined scripted
  1.7447 +    // getters The return address pointed to by the baseline stack points
  1.7448 +    // here.
  1.7449 +    //
  1.7450 +    // Even though the fallback frame doesn't enter a stub frame, the CallScripted
  1.7451 +    // frame that we are emulating does. Again, we lie.
  1.7452 +#ifdef DEBUG
  1.7453 +    entersStubFrame_ = true;
  1.7454 +#endif
  1.7455 +
  1.7456 +    Label leaveStubCommon;
  1.7457 +
  1.7458 +    returnFromStubOffset_ = masm.currentOffset();
  1.7459 +    leaveStubFrameHead(masm, false);
  1.7460 +    masm.jump(&leaveStubCommon);
  1.7461 +
  1.7462 +    returnFromIonOffset_ = masm.currentOffset();
  1.7463 +    leaveStubFrameHead(masm, true);
  1.7464 +
  1.7465 +    masm.bind(&leaveStubCommon);
  1.7466 +    leaveStubFrameCommonTail(masm);
  1.7467 +
  1.7468 +    // Retrieve the stashed initial argument from the caller's frame before returning
  1.7469 +    EmitUnstowICValues(masm, 1);
  1.7470 +    EmitReturnFromIC(masm);
  1.7471 +
  1.7472 +    return true;
  1.7473 +}
  1.7474 +
  1.7475 +bool
  1.7476 +ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
  1.7477 +{
  1.7478 +    JitCompartment *comp = cx->compartment()->jitCompartment();
  1.7479 +
  1.7480 +    CodeOffsetLabel fromIon(returnFromIonOffset_);
  1.7481 +    fromIon.fixup(&masm);
  1.7482 +    comp->initBaselineSetPropReturnFromIonAddr(code->raw() + fromIon.offset());
  1.7483 +
  1.7484 +    CodeOffsetLabel fromVM(returnFromStubOffset_);
  1.7485 +    fromVM.fixup(&masm);
  1.7486 +    comp->initBaselineSetPropReturnFromStubAddr(code->raw() + fromVM.offset());
  1.7487 +
  1.7488 +    return true;
  1.7489 +}
  1.7490 +
  1.7491 +bool
  1.7492 +ICSetProp_Native::Compiler::generateStubCode(MacroAssembler &masm)
  1.7493 +{
  1.7494 +    Label failure;
  1.7495 +
  1.7496 +    // Guard input is an object.
  1.7497 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.7498 +
  1.7499 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.7500 +    Register scratch = regs.takeAny();
  1.7501 +
  1.7502 +    // Unbox and shape guard.
  1.7503 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.7504 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfShape()), scratch);
  1.7505 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
  1.7506 +
  1.7507 +    // Guard that the type object matches.
  1.7508 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfType()), scratch);
  1.7509 +    masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch,
  1.7510 +                   &failure);
  1.7511 +
  1.7512 +    // Stow both R0 and R1 (object and value).
  1.7513 +    EmitStowICValues(masm, 2);
  1.7514 +
  1.7515 +    // Type update stub expects the value to check in R0.
  1.7516 +    masm.moveValue(R1, R0);
  1.7517 +
  1.7518 +    // Call the type-update stub.
  1.7519 +    if (!callTypeUpdateIC(masm, sizeof(Value)))
  1.7520 +        return false;
  1.7521 +
  1.7522 +    // Unstow R0 and R1 (object and key)
  1.7523 +    EmitUnstowICValues(masm, 2);
  1.7524 +
  1.7525 +    regs.add(R0);
  1.7526 +    regs.takeUnchecked(objReg);
  1.7527 +
  1.7528 +    Register holderReg;
  1.7529 +    if (isFixedSlot_) {
  1.7530 +        holderReg = objReg;
  1.7531 +    } else {
  1.7532 +        holderReg = regs.takeAny();
  1.7533 +        masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), holderReg);
  1.7534 +    }
  1.7535 +
  1.7536 +    // Perform the store.
  1.7537 +    masm.load32(Address(BaselineStubReg, ICSetProp_Native::offsetOfOffset()), scratch);
  1.7538 +    EmitPreBarrier(masm, BaseIndex(holderReg, scratch, TimesOne), MIRType_Value);
  1.7539 +    masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
  1.7540 +    if (holderReg != objReg)
  1.7541 +        regs.add(holderReg);
  1.7542 +#ifdef JSGC_GENERATIONAL
  1.7543 +    {
  1.7544 +        Register scr = regs.takeAny();
  1.7545 +        GeneralRegisterSet saveRegs;
  1.7546 +        saveRegs.add(R1);
  1.7547 +        emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
  1.7548 +        regs.add(scr);
  1.7549 +    }
  1.7550 +#endif
  1.7551 +
  1.7552 +    // The RHS has to be in R0.
  1.7553 +    masm.moveValue(R1, R0);
  1.7554 +    EmitReturnFromIC(masm);
  1.7555 +
  1.7556 +    // Failure case - jump to next stub
  1.7557 +    masm.bind(&failure);
  1.7558 +    EmitStubGuardFailure(masm);
  1.7559 +    return true;
  1.7560 +}
  1.7561 +
  1.7562 +ICUpdatedStub *
  1.7563 +ICSetPropNativeAddCompiler::getStub(ICStubSpace *space)
  1.7564 +{
  1.7565 +    AutoShapeVector shapes(cx);
  1.7566 +    if (!shapes.append(oldShape_))
  1.7567 +        return nullptr;
  1.7568 +
  1.7569 +    if (!GetProtoShapes(obj_, protoChainDepth_, &shapes))
  1.7570 +        return nullptr;
  1.7571 +
  1.7572 +    JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4);
  1.7573 +
  1.7574 +    ICUpdatedStub *stub = nullptr;
  1.7575 +    switch(protoChainDepth_) {
  1.7576 +      case 0: stub = getStubSpecific<0>(space, &shapes); break;
  1.7577 +      case 1: stub = getStubSpecific<1>(space, &shapes); break;
  1.7578 +      case 2: stub = getStubSpecific<2>(space, &shapes); break;
  1.7579 +      case 3: stub = getStubSpecific<3>(space, &shapes); break;
  1.7580 +      case 4: stub = getStubSpecific<4>(space, &shapes); break;
  1.7581 +      default: MOZ_ASSUME_UNREACHABLE("ProtoChainDepth too high.");
  1.7582 +    }
  1.7583 +    if (!stub || !stub->initUpdatingChain(cx, space))
  1.7584 +        return nullptr;
  1.7585 +    return stub;
  1.7586 +}
  1.7587 +
  1.7588 +bool
  1.7589 +ICSetPropNativeAddCompiler::generateStubCode(MacroAssembler &masm)
  1.7590 +{
  1.7591 +    Label failure;
  1.7592 +    Label failureUnstow;
  1.7593 +
  1.7594 +    // Guard input is an object.
  1.7595 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.7596 +
  1.7597 +    GeneralRegisterSet regs(availableGeneralRegs(2));
  1.7598 +    Register scratch = regs.takeAny();
  1.7599 +
  1.7600 +    // Unbox and guard against old shape.
  1.7601 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.7602 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(0)), scratch);
  1.7603 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
  1.7604 +
  1.7605 +    // Guard that the type object matches.
  1.7606 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfType()), scratch);
  1.7607 +    masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch,
  1.7608 +                   &failure);
  1.7609 +
  1.7610 +    // Stow both R0 and R1 (object and value).
  1.7611 +    EmitStowICValues(masm, 2);
  1.7612 +
  1.7613 +    regs = availableGeneralRegs(1);
  1.7614 +    scratch = regs.takeAny();
  1.7615 +    Register protoReg = regs.takeAny();
  1.7616 +    // Check the proto chain.
  1.7617 +    for (size_t i = 0; i < protoChainDepth_; i++) {
  1.7618 +        masm.loadObjProto(i == 0 ? objReg : protoReg, protoReg);
  1.7619 +        masm.branchTestPtr(Assembler::Zero, protoReg, protoReg, &failureUnstow);
  1.7620 +        masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(i + 1)),
  1.7621 +                     scratch);
  1.7622 +        masm.branchTestObjShape(Assembler::NotEqual, protoReg, scratch, &failureUnstow);
  1.7623 +    }
  1.7624 +
  1.7625 +    // Shape and type checks succeeded, ok to proceed.
  1.7626 +
  1.7627 +    // Load RHS into R0 for TypeUpdate check.
  1.7628 +    // Stack is currently: [..., ObjValue, RHSValue, MaybeReturnAddr? ]
  1.7629 +    masm.loadValue(Address(BaselineStackReg, ICStackValueOffset), R0);
  1.7630 +
  1.7631 +    // Call the type-update stub.
  1.7632 +    if (!callTypeUpdateIC(masm, sizeof(Value)))
  1.7633 +        return false;
  1.7634 +
  1.7635 +    // Unstow R0 and R1 (object and key)
  1.7636 +    EmitUnstowICValues(masm, 2);
  1.7637 +    regs = availableGeneralRegs(2);
  1.7638 +    scratch = regs.takeAny();
  1.7639 +
  1.7640 +    // Changing object shape.  Write the object's new shape.
  1.7641 +    Address shapeAddr(objReg, JSObject::offsetOfShape());
  1.7642 +    EmitPreBarrier(masm, shapeAddr, MIRType_Shape);
  1.7643 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewShape()), scratch);
  1.7644 +    masm.storePtr(scratch, shapeAddr);
  1.7645 +
  1.7646 +    Register holderReg;
  1.7647 +    regs.add(R0);
  1.7648 +    regs.takeUnchecked(objReg);
  1.7649 +    if (isFixedSlot_) {
  1.7650 +        holderReg = objReg;
  1.7651 +    } else {
  1.7652 +        holderReg = regs.takeAny();
  1.7653 +        masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), holderReg);
  1.7654 +    }
  1.7655 +
  1.7656 +    // Perform the store.  No write barrier required since this is a new
  1.7657 +    // initialization.
  1.7658 +    masm.load32(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfOffset()), scratch);
  1.7659 +    masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
  1.7660 +
  1.7661 +    if (holderReg != objReg)
  1.7662 +        regs.add(holderReg);
  1.7663 +
  1.7664 +#ifdef JSGC_GENERATIONAL
  1.7665 +    {
  1.7666 +        Register scr = regs.takeAny();
  1.7667 +        GeneralRegisterSet saveRegs;
  1.7668 +        saveRegs.add(R1);
  1.7669 +        emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
  1.7670 +    }
  1.7671 +#endif
  1.7672 +
  1.7673 +    // The RHS has to be in R0.
  1.7674 +    masm.moveValue(R1, R0);
  1.7675 +    EmitReturnFromIC(masm);
  1.7676 +
  1.7677 +    // Failure case - jump to next stub
  1.7678 +    masm.bind(&failureUnstow);
  1.7679 +    EmitUnstowICValues(masm, 2);
  1.7680 +
  1.7681 +    masm.bind(&failure);
  1.7682 +    EmitStubGuardFailure(masm);
  1.7683 +    return true;
  1.7684 +}
  1.7685 +
  1.7686 +bool
  1.7687 +ICSetProp_CallScripted::Compiler::generateStubCode(MacroAssembler &masm)
  1.7688 +{
  1.7689 +    Label failure;
  1.7690 +    Label failureUnstow;
  1.7691 +    Label failureLeaveStubFrame;
  1.7692 +
  1.7693 +    // Guard input is an object.
  1.7694 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.7695 +
  1.7696 +    // Stow R0 and R1 to free up registers.
  1.7697 +    EmitStowICValues(masm, 2);
  1.7698 +
  1.7699 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.7700 +    Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
  1.7701 +
  1.7702 +    // Unbox and shape guard.
  1.7703 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.7704 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfShape()), scratch);
  1.7705 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failureUnstow);
  1.7706 +
  1.7707 +    Register holderReg = regs.takeAny();
  1.7708 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfHolder()), holderReg);
  1.7709 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfHolderShape()), scratch);
  1.7710 +    masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failureUnstow);
  1.7711 +    regs.add(holderReg);
  1.7712 +
  1.7713 +    // Push a stub frame so that we can perform a non-tail call.
  1.7714 +    enterStubFrame(masm, scratch);
  1.7715 +
  1.7716 +    // Load callee function and code.  To ensure that |code| doesn't end up being
  1.7717 +    // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
  1.7718 +    Register callee;
  1.7719 +    if (regs.has(ArgumentsRectifierReg)) {
  1.7720 +        callee = ArgumentsRectifierReg;
  1.7721 +        regs.take(callee);
  1.7722 +    } else {
  1.7723 +        callee = regs.takeAny();
  1.7724 +    }
  1.7725 +    Register code = regs.takeAny();
  1.7726 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfSetter()), callee);
  1.7727 +    masm.branchIfFunctionHasNoScript(callee, &failureLeaveStubFrame);
  1.7728 +    masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
  1.7729 +    masm.loadBaselineOrIonRaw(code, code, SequentialExecution, &failureLeaveStubFrame);
  1.7730 +
  1.7731 +    // Setter is called with the new value as the only argument, and |obj| as thisv.
  1.7732 +    // Note that we use Push, not push, so that callIon will align the stack
  1.7733 +    // properly on ARM.
  1.7734 +
  1.7735 +    // To Push R1, read it off of the stowed values on stack.
  1.7736 +    // Stack: [ ..., R0, R1, ..STUBFRAME-HEADER.. ]
  1.7737 +    masm.movePtr(BaselineStackReg, scratch);
  1.7738 +    masm.PushValue(Address(scratch, STUB_FRAME_SIZE));
  1.7739 +    masm.Push(R0);
  1.7740 +    EmitCreateStubFrameDescriptor(masm, scratch);
  1.7741 +    masm.Push(Imm32(1));  // ActualArgc is 1
  1.7742 +    masm.Push(callee);
  1.7743 +    masm.Push(scratch);
  1.7744 +
  1.7745 +    // Handle arguments underflow.
  1.7746 +    Label noUnderflow;
  1.7747 +    masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
  1.7748 +    masm.branch32(Assembler::BelowOrEqual, scratch, Imm32(1), &noUnderflow);
  1.7749 +    {
  1.7750 +        // Call the arguments rectifier.
  1.7751 +        JS_ASSERT(ArgumentsRectifierReg != code);
  1.7752 +
  1.7753 +        JitCode *argumentsRectifier =
  1.7754 +            cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
  1.7755 +
  1.7756 +        masm.movePtr(ImmGCPtr(argumentsRectifier), code);
  1.7757 +        masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
  1.7758 +        masm.mov(ImmWord(1), ArgumentsRectifierReg);
  1.7759 +    }
  1.7760 +
  1.7761 +    masm.bind(&noUnderflow);
  1.7762 +
  1.7763 +    // If needed, update SPS Profiler frame entry.  At this point, callee and scratch can
  1.7764 +    // be clobbered.
  1.7765 +    {
  1.7766 +        GeneralRegisterSet availRegs = availableGeneralRegs(0);
  1.7767 +        availRegs.take(ArgumentsRectifierReg);
  1.7768 +        availRegs.take(code);
  1.7769 +        emitProfilingUpdate(masm, availRegs, ICSetProp_CallScripted::offsetOfPCOffset());
  1.7770 +    }
  1.7771 +
  1.7772 +    masm.callIon(code);
  1.7773 +
  1.7774 +    leaveStubFrame(masm, true);
  1.7775 +    // Do not care about return value from function. The original RHS should be returned
  1.7776 +    // as the result of this operation.
  1.7777 +    EmitUnstowICValues(masm, 2);
  1.7778 +    masm.moveValue(R1, R0);
  1.7779 +    EmitReturnFromIC(masm);
  1.7780 +
  1.7781 +    // Leave stub frame and go to next stub.
  1.7782 +    masm.bind(&failureLeaveStubFrame);
  1.7783 +    leaveStubFrame(masm, false);
  1.7784 +
  1.7785 +    // Unstow R0 and R1
  1.7786 +    masm.bind(&failureUnstow);
  1.7787 +    EmitUnstowICValues(masm, 2);
  1.7788 +
  1.7789 +    // Failure case - jump to next stub
  1.7790 +    masm.bind(&failure);
  1.7791 +    EmitStubGuardFailure(masm);
  1.7792 +    return true;
  1.7793 +}
  1.7794 +
  1.7795 +static bool
  1.7796 +DoCallNativeSetter(JSContext *cx, HandleFunction callee, HandleObject obj, HandleValue val)
  1.7797 +{
  1.7798 +    JS_ASSERT(callee->isNative());
  1.7799 +    JSNative natfun = callee->native();
  1.7800 +
  1.7801 +    JS::AutoValueArray<3> vp(cx);
  1.7802 +    vp[0].setObject(*callee.get());
  1.7803 +    vp[1].setObject(*obj.get());
  1.7804 +    vp[2].set(val);
  1.7805 +
  1.7806 +    return natfun(cx, 1, vp.begin());
  1.7807 +}
  1.7808 +
  1.7809 +typedef bool (*DoCallNativeSetterFn)(JSContext *, HandleFunction, HandleObject, HandleValue);
  1.7810 +static const VMFunction DoCallNativeSetterInfo =
  1.7811 +    FunctionInfo<DoCallNativeSetterFn>(DoCallNativeSetter);
  1.7812 +
  1.7813 +bool
  1.7814 +ICSetProp_CallNative::Compiler::generateStubCode(MacroAssembler &masm)
  1.7815 +{
  1.7816 +    Label failure;
  1.7817 +    Label failureUnstow;
  1.7818 +
  1.7819 +    // Guard input is an object.
  1.7820 +    masm.branchTestObject(Assembler::NotEqual, R0, &failure);
  1.7821 +
  1.7822 +    // Stow R0 and R1 to free up registers.
  1.7823 +    EmitStowICValues(masm, 2);
  1.7824 +
  1.7825 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.7826 +    Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
  1.7827 +
  1.7828 +    // Unbox and shape guard.
  1.7829 +    Register objReg = masm.extractObject(R0, ExtractTemp0);
  1.7830 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfShape()), scratch);
  1.7831 +    masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failureUnstow);
  1.7832 +
  1.7833 +    Register holderReg = regs.takeAny();
  1.7834 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfHolder()), holderReg);
  1.7835 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfHolderShape()), scratch);
  1.7836 +    masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failureUnstow);
  1.7837 +    regs.add(holderReg);
  1.7838 +
  1.7839 +    // Push a stub frame so that we can perform a non-tail call.
  1.7840 +    enterStubFrame(masm, scratch);
  1.7841 +
  1.7842 +    // Load callee function and code.  To ensure that |code| doesn't end up being
  1.7843 +    // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
  1.7844 +    Register callee = regs.takeAny();
  1.7845 +    masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfSetter()), callee);
  1.7846 +
  1.7847 +    // To Push R1, read it off of the stowed values on stack.
  1.7848 +    // Stack: [ ..., R0, R1, ..STUBFRAME-HEADER.. ]
  1.7849 +    masm.movePtr(BaselineStackReg, scratch);
  1.7850 +    masm.pushValue(Address(scratch, STUB_FRAME_SIZE));
  1.7851 +    masm.push(objReg);
  1.7852 +    masm.push(callee);
  1.7853 +
  1.7854 +    // Don't need to preserve R0 anymore.
  1.7855 +    regs.add(R0);
  1.7856 +
  1.7857 +    // If needed, update SPS Profiler frame entry.
  1.7858 +    emitProfilingUpdate(masm, regs, ICSetProp_CallNative::offsetOfPCOffset());
  1.7859 +
  1.7860 +    if (!callVM(DoCallNativeSetterInfo, masm))
  1.7861 +        return false;
  1.7862 +    leaveStubFrame(masm);
  1.7863 +
  1.7864 +    // Do not care about return value from function. The original RHS should be returned
  1.7865 +    // as the result of this operation.
  1.7866 +    EmitUnstowICValues(masm, 2);
  1.7867 +    masm.moveValue(R1, R0);
  1.7868 +    EmitReturnFromIC(masm);
  1.7869 +
  1.7870 +    // Unstow R0 and R1
  1.7871 +    masm.bind(&failureUnstow);
  1.7872 +    EmitUnstowICValues(masm, 2);
  1.7873 +
  1.7874 +    // Failure case - jump to next stub
  1.7875 +    masm.bind(&failure);
  1.7876 +    EmitStubGuardFailure(masm);
  1.7877 +    return true;
  1.7878 +}
  1.7879 +
  1.7880 +//
  1.7881 +// Call_Fallback
  1.7882 +//
  1.7883 +
  1.7884 +static bool
  1.7885 +TryAttachFunApplyStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
  1.7886 +                      HandleValue thisv, uint32_t argc, Value *argv)
  1.7887 +{
  1.7888 +    if (argc != 2)
  1.7889 +        return true;
  1.7890 +
  1.7891 +    if (!thisv.isObject() || !thisv.toObject().is<JSFunction>())
  1.7892 +        return true;
  1.7893 +    RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
  1.7894 +
  1.7895 +    bool isScripted = target->hasJITCode();
  1.7896 +
  1.7897 +    // right now, only handle situation where second argument is |arguments|
  1.7898 +    if (argv[1].isMagic(JS_OPTIMIZED_ARGUMENTS) && !script->needsArgsObj()) {
  1.7899 +        if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArguments)) {
  1.7900 +            IonSpew(IonSpew_BaselineIC, "  Generating Call_ScriptedApplyArguments stub");
  1.7901 +
  1.7902 +            ICCall_ScriptedApplyArguments::Compiler compiler(
  1.7903 +                cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
  1.7904 +            ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.7905 +            if (!newStub)
  1.7906 +                return false;
  1.7907 +
  1.7908 +            stub->addNewStub(newStub);
  1.7909 +            return true;
  1.7910 +        }
  1.7911 +
  1.7912 +        // TODO: handle FUNAPPLY for native targets.
  1.7913 +    }
  1.7914 +
  1.7915 +    if (argv[1].isObject() && argv[1].toObject().is<ArrayObject>()) {
  1.7916 +        if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArray)) {
  1.7917 +            IonSpew(IonSpew_BaselineIC, "  Generating Call_ScriptedApplyArray stub");
  1.7918 +
  1.7919 +            ICCall_ScriptedApplyArray::Compiler compiler(
  1.7920 +                cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
  1.7921 +            ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.7922 +            if (!newStub)
  1.7923 +                return false;
  1.7924 +
  1.7925 +            stub->addNewStub(newStub);
  1.7926 +            return true;
  1.7927 +        }
  1.7928 +    }
  1.7929 +    return true;
  1.7930 +}
  1.7931 +
  1.7932 +static bool
  1.7933 +TryAttachFunCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
  1.7934 +                     HandleValue thisv, bool *attached)
  1.7935 +{
  1.7936 +    // Try to attach a stub for Function.prototype.call with scripted |this|.
  1.7937 +
  1.7938 +    *attached = false;
  1.7939 +    if (!thisv.isObject() || !thisv.toObject().is<JSFunction>())
  1.7940 +        return true;
  1.7941 +    RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
  1.7942 +
  1.7943 +    // Attach a stub if the script can be Baseline-compiled. We do this also
  1.7944 +    // if the script is not yet compiled to avoid attaching a CallNative stub
  1.7945 +    // that handles everything, even after the callee becomes hot.
  1.7946 +    if (target->hasScript() && target->nonLazyScript()->canBaselineCompile() &&
  1.7947 +        !stub->hasStub(ICStub::Call_ScriptedFunCall))
  1.7948 +    {
  1.7949 +        IonSpew(IonSpew_BaselineIC, "  Generating Call_ScriptedFunCall stub");
  1.7950 +
  1.7951 +        ICCall_ScriptedFunCall::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
  1.7952 +                                                  script->pcToOffset(pc));
  1.7953 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.7954 +        if (!newStub)
  1.7955 +            return false;
  1.7956 +
  1.7957 +        *attached = true;
  1.7958 +        stub->addNewStub(newStub);
  1.7959 +        return true;
  1.7960 +    }
  1.7961 +
  1.7962 +    return true;
  1.7963 +}
  1.7964 +
  1.7965 +static bool
  1.7966 +GetTemplateObjectForNative(JSContext *cx, HandleScript script, jsbytecode *pc,
  1.7967 +                           Native native, const CallArgs &args, MutableHandleObject res)
  1.7968 +{
  1.7969 +    // Check for natives to which template objects can be attached. This is
  1.7970 +    // done to provide templates to Ion for inlining these natives later on.
  1.7971 +
  1.7972 +    if (native == js_Array) {
  1.7973 +        // Note: the template array won't be used if its length is inaccurately
  1.7974 +        // computed here.  (We allocate here because compilation may occur on a
  1.7975 +        // separate thread where allocation is impossible.)
  1.7976 +        size_t count = 0;
  1.7977 +        if (args.length() != 1)
  1.7978 +            count = args.length();
  1.7979 +        else if (args.length() == 1 && args[0].isInt32() && args[0].toInt32() >= 0)
  1.7980 +            count = args[0].toInt32();
  1.7981 +        res.set(NewDenseUnallocatedArray(cx, count, nullptr, TenuredObject));
  1.7982 +        if (!res)
  1.7983 +            return false;
  1.7984 +
  1.7985 +        types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
  1.7986 +        if (!type)
  1.7987 +            return false;
  1.7988 +        res->setType(type);
  1.7989 +        return true;
  1.7990 +    }
  1.7991 +
  1.7992 +    if (native == intrinsic_NewDenseArray) {
  1.7993 +        res.set(NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject));
  1.7994 +        if (!res)
  1.7995 +            return false;
  1.7996 +
  1.7997 +        types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
  1.7998 +        if (!type)
  1.7999 +            return false;
  1.8000 +        res->setType(type);
  1.8001 +        return true;
  1.8002 +    }
  1.8003 +
  1.8004 +    if (native == js::array_concat) {
  1.8005 +        if (args.thisv().isObject() && args.thisv().toObject().is<ArrayObject>() &&
  1.8006 +            !args.thisv().toObject().hasSingletonType())
  1.8007 +        {
  1.8008 +            res.set(NewDenseEmptyArray(cx, args.thisv().toObject().getProto(), TenuredObject));
  1.8009 +            if (!res)
  1.8010 +                return false;
  1.8011 +            res->setType(args.thisv().toObject().type());
  1.8012 +            return true;
  1.8013 +        }
  1.8014 +    }
  1.8015 +
  1.8016 +    if (native == js::str_split && args.length() == 1 && args[0].isString()) {
  1.8017 +        res.set(NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject));
  1.8018 +        if (!res)
  1.8019 +            return false;
  1.8020 +
  1.8021 +        types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
  1.8022 +        if (!type)
  1.8023 +            return false;
  1.8024 +        res->setType(type);
  1.8025 +        return true;
  1.8026 +    }
  1.8027 +
  1.8028 +    if (native == js_String) {
  1.8029 +        RootedString emptyString(cx, cx->runtime()->emptyString);
  1.8030 +        res.set(StringObject::create(cx, emptyString, TenuredObject));
  1.8031 +        if (!res)
  1.8032 +            return false;
  1.8033 +        return true;
  1.8034 +    }
  1.8035 +
  1.8036 +    return true;
  1.8037 +}
  1.8038 +
  1.8039 +static bool
  1.8040 +TryAttachCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
  1.8041 +                  JSOp op, uint32_t argc, Value *vp, bool constructing, bool useNewType)
  1.8042 +{
  1.8043 +    if (useNewType || op == JSOP_EVAL)
  1.8044 +        return true;
  1.8045 +
  1.8046 +    if (stub->numOptimizedStubs() >= ICCall_Fallback::MAX_OPTIMIZED_STUBS) {
  1.8047 +        // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
  1.8048 +        // But for now we just bail.
  1.8049 +        return true;
  1.8050 +    }
  1.8051 +
  1.8052 +    RootedValue callee(cx, vp[0]);
  1.8053 +    RootedValue thisv(cx, vp[1]);
  1.8054 +
  1.8055 +    if (!callee.isObject())
  1.8056 +        return true;
  1.8057 +
  1.8058 +    RootedObject obj(cx, &callee.toObject());
  1.8059 +    if (!obj->is<JSFunction>())
  1.8060 +        return true;
  1.8061 +
  1.8062 +    RootedFunction fun(cx, &obj->as<JSFunction>());
  1.8063 +
  1.8064 +    if (fun->hasScript()) {
  1.8065 +        // Never attach optimized scripted call stubs for JSOP_FUNAPPLY.
  1.8066 +        // MagicArguments may escape the frame through them.
  1.8067 +        if (op == JSOP_FUNAPPLY)
  1.8068 +            return true;
  1.8069 +
  1.8070 +        // If callee is not an interpreted constructor, we have to throw.
  1.8071 +        if (constructing && !fun->isInterpretedConstructor())
  1.8072 +            return true;
  1.8073 +
  1.8074 +        RootedScript calleeScript(cx, fun->nonLazyScript());
  1.8075 +        if (!calleeScript->hasBaselineScript() && !calleeScript->hasIonScript())
  1.8076 +            return true;
  1.8077 +
  1.8078 +        if (calleeScript->shouldCloneAtCallsite())
  1.8079 +            return true;
  1.8080 +
  1.8081 +        // Check if this stub chain has already generalized scripted calls.
  1.8082 +        if (stub->scriptedStubsAreGeneralized()) {
  1.8083 +            IonSpew(IonSpew_BaselineIC, "  Chain already has generalized scripted call stub!");
  1.8084 +            return true;
  1.8085 +        }
  1.8086 +
  1.8087 +        if (stub->scriptedStubCount() >= ICCall_Fallback::MAX_SCRIPTED_STUBS) {
  1.8088 +            // Create a Call_AnyScripted stub.
  1.8089 +            IonSpew(IonSpew_BaselineIC, "  Generating Call_AnyScripted stub (cons=%s)",
  1.8090 +                    constructing ? "yes" : "no");
  1.8091 +
  1.8092 +            ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
  1.8093 +                                            constructing, script->pcToOffset(pc));
  1.8094 +            ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.8095 +            if (!newStub)
  1.8096 +                return false;
  1.8097 +
  1.8098 +            // Before adding new stub, unlink all previous Call_Scripted.
  1.8099 +            stub->unlinkStubsWithKind(cx, ICStub::Call_Scripted);
  1.8100 +
  1.8101 +            // Add new generalized stub.
  1.8102 +            stub->addNewStub(newStub);
  1.8103 +            return true;
  1.8104 +        }
  1.8105 +
  1.8106 +        // Keep track of the function's |prototype| property in type
  1.8107 +        // information, for use during Ion compilation.
  1.8108 +        if (IsIonEnabled(cx))
  1.8109 +            types::EnsureTrackPropertyTypes(cx, fun, NameToId(cx->names().prototype));
  1.8110 +
  1.8111 +        // Remember the template object associated with any script being called
  1.8112 +        // as a constructor, for later use during Ion compilation.
  1.8113 +        RootedObject templateObject(cx);
  1.8114 +        if (constructing) {
  1.8115 +            templateObject = CreateThisForFunction(cx, fun, MaybeSingletonObject);
  1.8116 +            if (!templateObject)
  1.8117 +                return false;
  1.8118 +        }
  1.8119 +
  1.8120 +        IonSpew(IonSpew_BaselineIC,
  1.8121 +                "  Generating Call_Scripted stub (fun=%p, %s:%d, cons=%s)",
  1.8122 +                fun.get(), fun->nonLazyScript()->filename(), fun->nonLazyScript()->lineno(),
  1.8123 +                constructing ? "yes" : "no");
  1.8124 +        ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
  1.8125 +                                        calleeScript, templateObject,
  1.8126 +                                        constructing, script->pcToOffset(pc));
  1.8127 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.8128 +        if (!newStub)
  1.8129 +            return false;
  1.8130 +
  1.8131 +        stub->addNewStub(newStub);
  1.8132 +        return true;
  1.8133 +    }
  1.8134 +
  1.8135 +    if (fun->isNative() && (!constructing || (constructing && fun->isNativeConstructor()))) {
  1.8136 +        // Generalized native call stubs are not here yet!
  1.8137 +        JS_ASSERT(!stub->nativeStubsAreGeneralized());
  1.8138 +
  1.8139 +        // Check for JSOP_FUNAPPLY
  1.8140 +        if (op == JSOP_FUNAPPLY) {
  1.8141 +            if (fun->native() == js_fun_apply)
  1.8142 +                return TryAttachFunApplyStub(cx, stub, script, pc, thisv, argc, vp + 2);
  1.8143 +
  1.8144 +            // Don't try to attach a "regular" optimized call stubs for FUNAPPLY ops,
  1.8145 +            // since MagicArguments may escape through them.
  1.8146 +            return true;
  1.8147 +        }
  1.8148 +
  1.8149 +        if (op == JSOP_FUNCALL && fun->native() == js_fun_call) {
  1.8150 +            bool attached;
  1.8151 +            if (!TryAttachFunCallStub(cx, stub, script, pc, thisv, &attached))
  1.8152 +                return false;
  1.8153 +            if (attached)
  1.8154 +                return true;
  1.8155 +        }
  1.8156 +
  1.8157 +        if (stub->nativeStubCount() >= ICCall_Fallback::MAX_NATIVE_STUBS) {
  1.8158 +            IonSpew(IonSpew_BaselineIC,
  1.8159 +                    "  Too many Call_Native stubs. TODO: add Call_AnyNative!");
  1.8160 +            return true;
  1.8161 +        }
  1.8162 +
  1.8163 +        CallArgs args = CallArgsFromVp(argc, vp);
  1.8164 +        RootedObject templateObject(cx);
  1.8165 +        if (!GetTemplateObjectForNative(cx, script, pc, fun->native(), args, &templateObject))
  1.8166 +            return false;
  1.8167 +
  1.8168 +        IonSpew(IonSpew_BaselineIC, "  Generating Call_Native stub (fun=%p, cons=%s)",
  1.8169 +                fun.get(), constructing ? "yes" : "no");
  1.8170 +        ICCall_Native::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
  1.8171 +                                         fun, templateObject, constructing, script->pcToOffset(pc));
  1.8172 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
  1.8173 +        if (!newStub)
  1.8174 +            return false;
  1.8175 +
  1.8176 +        stub->addNewStub(newStub);
  1.8177 +        return true;
  1.8178 +    }
  1.8179 +
  1.8180 +    return true;
  1.8181 +}
  1.8182 +
  1.8183 +static bool
  1.8184 +MaybeCloneFunctionAtCallsite(JSContext *cx, MutableHandleValue callee, HandleScript script,
  1.8185 +                             jsbytecode *pc)
  1.8186 +{
  1.8187 +    RootedFunction fun(cx);
  1.8188 +    if (!IsFunctionObject(callee, fun.address()))
  1.8189 +        return true;
  1.8190 +
  1.8191 +    if (!fun->hasScript() || !fun->nonLazyScript()->shouldCloneAtCallsite())
  1.8192 +        return true;
  1.8193 +
  1.8194 +    fun = CloneFunctionAtCallsite(cx, fun, script, pc);
  1.8195 +    if (!fun)
  1.8196 +        return false;
  1.8197 +
  1.8198 +    callee.setObject(*fun);
  1.8199 +    return true;
  1.8200 +}
  1.8201 +
  1.8202 +static bool
  1.8203 +DoCallFallback(JSContext *cx, BaselineFrame *frame, ICCall_Fallback *stub_, uint32_t argc,
  1.8204 +               Value *vp, MutableHandleValue res)
  1.8205 +{
  1.8206 +    // This fallback stub may trigger debug mode toggling.
  1.8207 +    DebugModeOSRVolatileStub<ICCall_Fallback *> stub(frame, stub_);
  1.8208 +
  1.8209 +    // Ensure vp array is rooted - we may GC in here.
  1.8210 +    AutoArrayRooter vpRoot(cx, argc + 2, vp);
  1.8211 +
  1.8212 +    RootedScript script(cx, frame->script());
  1.8213 +    jsbytecode *pc = stub->icEntry()->pc(script);
  1.8214 +    JSOp op = JSOp(*pc);
  1.8215 +    FallbackICSpew(cx, stub, "Call(%s)", js_CodeName[op]);
  1.8216 +
  1.8217 +    JS_ASSERT(argc == GET_ARGC(pc));
  1.8218 +
  1.8219 +    RootedValue callee(cx, vp[0]);
  1.8220 +    RootedValue thisv(cx, vp[1]);
  1.8221 +
  1.8222 +    Value *args = vp + 2;
  1.8223 +
  1.8224 +    // Handle funapply with JSOP_ARGUMENTS
  1.8225 +    if (op == JSOP_FUNAPPLY && argc == 2 && args[1].isMagic(JS_OPTIMIZED_ARGUMENTS)) {
  1.8226 +        if (!GuardFunApplyArgumentsOptimization(cx, frame, callee, args, argc))
  1.8227 +            return false;
  1.8228 +    }
  1.8229 +
  1.8230 +    // Compute construcing and useNewType flags.
  1.8231 +    bool constructing = (op == JSOP_NEW);
  1.8232 +    bool newType = types::UseNewType(cx, script, pc);
  1.8233 +
  1.8234 +    // Try attaching a call stub.
  1.8235 +    if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, newType))
  1.8236 +        return false;
  1.8237 +
  1.8238 +    // Maybe update PC in profiler entry before leaving this script by call.
  1.8239 +    if (cx->runtime()->spsProfiler.enabled() && frame->hasPushedSPSFrame())
  1.8240 +        cx->runtime()->spsProfiler.updatePC(script, pc);
  1.8241 +
  1.8242 +    if (!MaybeCloneFunctionAtCallsite(cx, &callee, script, pc))
  1.8243 +        return false;
  1.8244 +
  1.8245 +    if (op == JSOP_NEW) {
  1.8246 +        if (!InvokeConstructor(cx, callee, argc, args, res.address()))
  1.8247 +            return false;
  1.8248 +    } else if (op == JSOP_EVAL && frame->scopeChain()->global().valueIsEval(callee)) {
  1.8249 +        if (!DirectEval(cx, CallArgsFromVp(argc, vp)))
  1.8250 +            return false;
  1.8251 +        res.set(vp[0]);
  1.8252 +    } else {
  1.8253 +        JS_ASSERT(op == JSOP_CALL || op == JSOP_FUNCALL || op == JSOP_FUNAPPLY || op == JSOP_EVAL);
  1.8254 +        if (!Invoke(cx, thisv, callee, argc, args, res))
  1.8255 +            return false;
  1.8256 +    }
  1.8257 +
  1.8258 +    types::TypeScript::Monitor(cx, script, pc, res);
  1.8259 +
  1.8260 +    // Check if debug mode toggling made the stub invalid.
  1.8261 +    if (stub.invalid())
  1.8262 +        return true;
  1.8263 +
  1.8264 +    // Attach a new TypeMonitor stub for this value.
  1.8265 +    ICTypeMonitor_Fallback *typeMonFbStub = stub->fallbackMonitorStub();
  1.8266 +    if (!typeMonFbStub->addMonitorStubForValue(cx, script, res))
  1.8267 +        return false;
  1.8268 +    // Add a type monitor stub for the resulting value.
  1.8269 +    if (!stub->addMonitorStubForValue(cx, script, res))
  1.8270 +        return false;
  1.8271 +
  1.8272 +    return true;
  1.8273 +}
  1.8274 +
  1.8275 +void
  1.8276 +ICCallStubCompiler::pushCallArguments(MacroAssembler &masm, GeneralRegisterSet regs, Register argcReg)
  1.8277 +{
  1.8278 +    JS_ASSERT(!regs.has(argcReg));
  1.8279 +
  1.8280 +    // Push the callee and |this| too.
  1.8281 +    Register count = regs.takeAny();
  1.8282 +    masm.mov(argcReg, count);
  1.8283 +    masm.add32(Imm32(2), count);
  1.8284 +
  1.8285 +    // argPtr initially points to the last argument.
  1.8286 +    Register argPtr = regs.takeAny();
  1.8287 +    masm.mov(BaselineStackReg, argPtr);
  1.8288 +
  1.8289 +    // Skip 4 pointers pushed on top of the arguments: the frame descriptor,
  1.8290 +    // return address, old frame pointer and stub reg.
  1.8291 +    masm.addPtr(Imm32(STUB_FRAME_SIZE), argPtr);
  1.8292 +
  1.8293 +    // Push all values, starting at the last one.
  1.8294 +    Label loop, done;
  1.8295 +    masm.bind(&loop);
  1.8296 +    masm.branchTest32(Assembler::Zero, count, count, &done);
  1.8297 +    {
  1.8298 +        masm.pushValue(Address(argPtr, 0));
  1.8299 +        masm.addPtr(Imm32(sizeof(Value)), argPtr);
  1.8300 +
  1.8301 +        masm.sub32(Imm32(1), count);
  1.8302 +        masm.jump(&loop);
  1.8303 +    }
  1.8304 +    masm.bind(&done);
  1.8305 +}
  1.8306 +
  1.8307 +Register
  1.8308 +ICCallStubCompiler::guardFunApply(MacroAssembler &masm, GeneralRegisterSet regs, Register argcReg,
  1.8309 +                                  bool checkNative, FunApplyThing applyThing, Label *failure)
  1.8310 +{
  1.8311 +    // Ensure argc == 2
  1.8312 +    masm.branch32(Assembler::NotEqual, argcReg, Imm32(2), failure);
  1.8313 +
  1.8314 +    // Stack looks like:
  1.8315 +    //      [..., CalleeV, ThisV, Arg0V, Arg1V <MaybeReturnReg>]
  1.8316 +
  1.8317 +    Address secondArgSlot(BaselineStackReg, ICStackValueOffset);
  1.8318 +    if (applyThing == FunApply_MagicArgs) {
  1.8319 +        // Ensure that the second arg is magic arguments.
  1.8320 +        masm.branchTestMagic(Assembler::NotEqual, secondArgSlot, failure);
  1.8321 +
  1.8322 +        // Ensure that this frame doesn't have an arguments object.
  1.8323 +        masm.branchTest32(Assembler::NonZero,
  1.8324 +                          Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
  1.8325 +                          Imm32(BaselineFrame::HAS_ARGS_OBJ),
  1.8326 +                          failure);
  1.8327 +    } else {
  1.8328 +        JS_ASSERT(applyThing == FunApply_Array);
  1.8329 +
  1.8330 +        GeneralRegisterSet regsx = regs;
  1.8331 +
  1.8332 +        // Ensure that the second arg is an array.
  1.8333 +        ValueOperand secondArgVal = regsx.takeAnyValue();
  1.8334 +        masm.loadValue(secondArgSlot, secondArgVal);
  1.8335 +
  1.8336 +        masm.branchTestObject(Assembler::NotEqual, secondArgVal, failure);
  1.8337 +        Register secondArgObj = masm.extractObject(secondArgVal, ExtractTemp1);
  1.8338 +
  1.8339 +        regsx.add(secondArgVal);
  1.8340 +        regsx.takeUnchecked(secondArgObj);
  1.8341 +
  1.8342 +        masm.branchTestObjClass(Assembler::NotEqual, secondArgObj, regsx.getAny(),
  1.8343 +                                &ArrayObject::class_, failure);
  1.8344 +
  1.8345 +        // Get the array elements and ensure that initializedLength == length
  1.8346 +        masm.loadPtr(Address(secondArgObj, JSObject::offsetOfElements()), secondArgObj);
  1.8347 +
  1.8348 +        Register lenReg = regsx.takeAny();
  1.8349 +        masm.load32(Address(secondArgObj, ObjectElements::offsetOfLength()), lenReg);
  1.8350 +
  1.8351 +        masm.branch32(Assembler::NotEqual,
  1.8352 +                      Address(secondArgObj, ObjectElements::offsetOfInitializedLength()),
  1.8353 +                      lenReg, failure);
  1.8354 +
  1.8355 +        // Limit the length to something reasonable (huge number of arguments can
  1.8356 +        // blow the stack limit).
  1.8357 +        masm.branch32(Assembler::Above, lenReg,
  1.8358 +                      Imm32(ICCall_ScriptedApplyArray::MAX_ARGS_ARRAY_LENGTH),
  1.8359 +                      failure);
  1.8360 +
  1.8361 +        // Ensure no holes.  Loop through values in array and make sure none are magic.
  1.8362 +        // Start address is secondArgObj, end address is secondArgObj + (lenReg * sizeof(Value))
  1.8363 +        JS_STATIC_ASSERT(sizeof(Value) == 8);
  1.8364 +        masm.lshiftPtr(Imm32(3), lenReg);
  1.8365 +        masm.addPtr(secondArgObj, lenReg);
  1.8366 +
  1.8367 +        Register start = secondArgObj;
  1.8368 +        Register end = lenReg;
  1.8369 +        Label loop;
  1.8370 +        Label endLoop;
  1.8371 +        masm.bind(&loop);
  1.8372 +        masm.branchPtr(Assembler::AboveOrEqual, start, end, &endLoop);
  1.8373 +        masm.branchTestMagic(Assembler::Equal, Address(start, 0), failure);
  1.8374 +        masm.addPtr(Imm32(sizeof(Value)), start);
  1.8375 +        masm.jump(&loop);
  1.8376 +        masm.bind(&endLoop);
  1.8377 +    }
  1.8378 +
  1.8379 +    // Stack now confirmed to be like:
  1.8380 +    //      [..., CalleeV, ThisV, Arg0V, MagicValue(Arguments), <MaybeReturnAddr>]
  1.8381 +
  1.8382 +    // Load the callee, ensure that it's js_fun_apply
  1.8383 +    ValueOperand val = regs.takeAnyValue();
  1.8384 +    Address calleeSlot(BaselineStackReg, ICStackValueOffset + (3 * sizeof(Value)));
  1.8385 +    masm.loadValue(calleeSlot, val);
  1.8386 +
  1.8387 +    masm.branchTestObject(Assembler::NotEqual, val, failure);
  1.8388 +    Register callee = masm.extractObject(val, ExtractTemp1);
  1.8389 +
  1.8390 +    masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
  1.8391 +                            failure);
  1.8392 +    masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
  1.8393 +
  1.8394 +    masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(js_fun_apply), failure);
  1.8395 +
  1.8396 +    // Load the |thisv|, ensure that it's a scripted function with a valid baseline or ion
  1.8397 +    // script, or a native function.
  1.8398 +    Address thisSlot(BaselineStackReg, ICStackValueOffset + (2 * sizeof(Value)));
  1.8399 +    masm.loadValue(thisSlot, val);
  1.8400 +
  1.8401 +    masm.branchTestObject(Assembler::NotEqual, val, failure);
  1.8402 +    Register target = masm.extractObject(val, ExtractTemp1);
  1.8403 +    regs.add(val);
  1.8404 +    regs.takeUnchecked(target);
  1.8405 +
  1.8406 +    masm.branchTestObjClass(Assembler::NotEqual, target, regs.getAny(), &JSFunction::class_,
  1.8407 +                            failure);
  1.8408 +
  1.8409 +    if (checkNative) {
  1.8410 +        masm.branchIfInterpreted(target, failure);
  1.8411 +    } else {
  1.8412 +        masm.branchIfFunctionHasNoScript(target, failure);
  1.8413 +        Register temp = regs.takeAny();
  1.8414 +        masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), temp);
  1.8415 +        masm.loadBaselineOrIonRaw(temp, temp, SequentialExecution, failure);
  1.8416 +        regs.add(temp);
  1.8417 +    }
  1.8418 +    return target;
  1.8419 +}
  1.8420 +
  1.8421 +void
  1.8422 +ICCallStubCompiler::pushCallerArguments(MacroAssembler &masm, GeneralRegisterSet regs)
  1.8423 +{
  1.8424 +    // Initialize copyReg to point to start caller arguments vector.
  1.8425 +    // Initialize argcReg to poitn to the end of it.
  1.8426 +    Register startReg = regs.takeAny();
  1.8427 +    Register endReg = regs.takeAny();
  1.8428 +    masm.loadPtr(Address(BaselineFrameReg, 0), startReg);
  1.8429 +    masm.loadPtr(Address(startReg, BaselineFrame::offsetOfNumActualArgs()), endReg);
  1.8430 +    masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), startReg);
  1.8431 +    JS_STATIC_ASSERT(sizeof(Value) == 8);
  1.8432 +    masm.lshiftPtr(Imm32(3), endReg);
  1.8433 +    masm.addPtr(startReg, endReg);
  1.8434 +
  1.8435 +    // Copying pre-decrements endReg by 8 until startReg is reached
  1.8436 +    Label copyDone;
  1.8437 +    Label copyStart;
  1.8438 +    masm.bind(&copyStart);
  1.8439 +    masm.branchPtr(Assembler::Equal, endReg, startReg, &copyDone);
  1.8440 +    masm.subPtr(Imm32(sizeof(Value)), endReg);
  1.8441 +    masm.pushValue(Address(endReg, 0));
  1.8442 +    masm.jump(&copyStart);
  1.8443 +    masm.bind(&copyDone);
  1.8444 +}
  1.8445 +
  1.8446 +void
  1.8447 +ICCallStubCompiler::pushArrayArguments(MacroAssembler &masm, Address arrayVal,
  1.8448 +                                       GeneralRegisterSet regs)
  1.8449 +{
  1.8450 +    // Load start and end address of values to copy.
  1.8451 +    // guardFunApply has already gauranteed that the array is packed and contains
  1.8452 +    // no holes.
  1.8453 +    Register startReg = regs.takeAny();
  1.8454 +    Register endReg = regs.takeAny();
  1.8455 +    masm.extractObject(arrayVal, startReg);
  1.8456 +    masm.loadPtr(Address(startReg, JSObject::offsetOfElements()), startReg);
  1.8457 +    masm.load32(Address(startReg, ObjectElements::offsetOfInitializedLength()), endReg);
  1.8458 +    JS_STATIC_ASSERT(sizeof(Value) == 8);
  1.8459 +    masm.lshiftPtr(Imm32(3), endReg);
  1.8460 +    masm.addPtr(startReg, endReg);
  1.8461 +
  1.8462 +    // Copying pre-decrements endReg by 8 until startReg is reached
  1.8463 +    Label copyDone;
  1.8464 +    Label copyStart;
  1.8465 +    masm.bind(&copyStart);
  1.8466 +    masm.branchPtr(Assembler::Equal, endReg, startReg, &copyDone);
  1.8467 +    masm.subPtr(Imm32(sizeof(Value)), endReg);
  1.8468 +    masm.pushValue(Address(endReg, 0));
  1.8469 +    masm.jump(&copyStart);
  1.8470 +    masm.bind(&copyDone);
  1.8471 +}
  1.8472 +
  1.8473 +typedef bool (*DoCallFallbackFn)(JSContext *, BaselineFrame *, ICCall_Fallback *,
  1.8474 +                                 uint32_t, Value *, MutableHandleValue);
  1.8475 +static const VMFunction DoCallFallbackInfo = FunctionInfo<DoCallFallbackFn>(DoCallFallback);
  1.8476 +
  1.8477 +bool
  1.8478 +ICCall_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.8479 +{
  1.8480 +    JS_ASSERT(R0 == JSReturnOperand);
  1.8481 +
  1.8482 +    // Push a stub frame so that we can perform a non-tail call.
  1.8483 +    enterStubFrame(masm, R1.scratchReg());
  1.8484 +
  1.8485 +    // Values are on the stack left-to-right. Calling convention wants them
  1.8486 +    // right-to-left so duplicate them on the stack in reverse order.
  1.8487 +    // |this| and callee are pushed last.
  1.8488 +
  1.8489 +    GeneralRegisterSet regs(availableGeneralRegs(0));
  1.8490 +    regs.take(R0.scratchReg()); // argc.
  1.8491 +
  1.8492 +    pushCallArguments(masm, regs, R0.scratchReg());
  1.8493 +
  1.8494 +    masm.push(BaselineStackReg);
  1.8495 +    masm.push(R0.scratchReg());
  1.8496 +    masm.push(BaselineStubReg);
  1.8497 +
  1.8498 +    // Load previous frame pointer, push BaselineFrame *.
  1.8499 +    masm.loadPtr(Address(BaselineFrameReg, 0), R0.scratchReg());
  1.8500 +    masm.pushBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
  1.8501 +
  1.8502 +    if (!callVM(DoCallFallbackInfo, masm))
  1.8503 +        return false;
  1.8504 +
  1.8505 +    leaveStubFrame(masm);
  1.8506 +    EmitReturnFromIC(masm);
  1.8507 +
  1.8508 +    // The following asmcode is only used either when an Ion inlined frame
  1.8509 +    // bails out into baseline jitcode or we need to do on-stack script
  1.8510 +    // replacement for debug mode recompile.
  1.8511 +    Label leaveStubCommon;
  1.8512 +    returnFromStubOffset_ = masm.currentOffset();
  1.8513 +
  1.8514 +    // Load passed-in ThisV into R1 just in case it's needed.  Need to do this before
  1.8515 +    // we leave the stub frame since that info will be lost.
  1.8516 +    // Current stack:  [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
  1.8517 +    masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
  1.8518 +
  1.8519 +    // Emit the coming-from-VM specific part of the stub-leaving code.
  1.8520 +    leaveStubFrameHead(masm, /* calledIntoIon = */ false);
  1.8521 +
  1.8522 +    // Jump to the common leave stub tail.
  1.8523 +    masm.jump(&leaveStubCommon);
  1.8524 +
  1.8525 +    // For Ion bailouts, the return address pushed onto the reconstructed
  1.8526 +    // baseline stack points here.
  1.8527 +    returnFromIonOffset_ = masm.currentOffset();
  1.8528 +
  1.8529 +    masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
  1.8530 +
  1.8531 +    // Emit the coming-from-Ion specific part of the stub-leaving code.
  1.8532 +    leaveStubFrameHead(masm, /* calledIntoIon = */ true);
  1.8533 +
  1.8534 +    // Emit the common stub-leaving tail.
  1.8535 +    masm.bind(&leaveStubCommon);
  1.8536 +    leaveStubFrameCommonTail(masm);
  1.8537 +
  1.8538 +    // R1 and R0 are taken.
  1.8539 +    regs = availableGeneralRegs(2);
  1.8540 +    Register scratch = regs.takeAny();
  1.8541 +
  1.8542 +    // If this is a |constructing| call, if the callee returns a non-object, we replace it with
  1.8543 +    // the |this| object passed in.
  1.8544 +    JS_ASSERT(JSReturnOperand == R0);
  1.8545 +    Label skipThisReplace;
  1.8546 +    masm.load16ZeroExtend(Address(BaselineStubReg, ICStub::offsetOfExtra()), scratch);
  1.8547 +    masm.branchTest32(Assembler::Zero, scratch, Imm32(ICCall_Fallback::CONSTRUCTING_FLAG),
  1.8548 +                      &skipThisReplace);
  1.8549 +    masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
  1.8550 +    masm.moveValue(R1, R0);
  1.8551 +#ifdef DEBUG
  1.8552 +    masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
  1.8553 +    masm.assumeUnreachable("Failed to return object in constructing call.");
  1.8554 +#endif
  1.8555 +    masm.bind(&skipThisReplace);
  1.8556 +
  1.8557 +    // At this point, BaselineStubReg points to the ICCall_Fallback stub, which is NOT
  1.8558 +    // a MonitoredStub, but rather a MonitoredFallbackStub.  To use EmitEnterTypeMonitorIC,
  1.8559 +    // first load the ICTypeMonitor_Fallback stub into BaselineStubReg.  Then, use
  1.8560 +    // EmitEnterTypeMonitorIC with a custom struct offset.
  1.8561 +    masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
  1.8562 +                 BaselineStubReg);
  1.8563 +    EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
  1.8564 +
  1.8565 +    return true;
  1.8566 +}
  1.8567 +
  1.8568 +bool
  1.8569 +ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
  1.8570 +{
  1.8571 +    JitCompartment *comp = cx->compartment()->jitCompartment();
  1.8572 +
  1.8573 +    CodeOffsetLabel fromIon(returnFromIonOffset_);
  1.8574 +    fromIon.fixup(&masm);
  1.8575 +    comp->initBaselineCallReturnFromIonAddr(code->raw() + fromIon.offset());
  1.8576 +
  1.8577 +    CodeOffsetLabel fromVM(returnFromStubOffset_);
  1.8578 +    fromVM.fixup(&masm);
  1.8579 +    comp->initBaselineCallReturnFromStubAddr(code->raw() + fromVM.offset());
  1.8580 +
  1.8581 +    return true;
  1.8582 +}
  1.8583 +
  1.8584 +typedef bool (*CreateThisFn)(JSContext *cx, HandleObject callee, MutableHandleValue rval);
  1.8585 +static const VMFunction CreateThisInfoBaseline = FunctionInfo<CreateThisFn>(CreateThis);
  1.8586 +
  1.8587 +bool
  1.8588 +ICCallScriptedCompiler::generateStubCode(MacroAssembler &masm)
  1.8589 +{
  1.8590 +    Label failure;
  1.8591 +    GeneralRegisterSet regs(availableGeneralRegs(0));
  1.8592 +    bool canUseTailCallReg = regs.has(BaselineTailCallReg);
  1.8593 +
  1.8594 +    Register argcReg = R0.scratchReg();
  1.8595 +    JS_ASSERT(argcReg != ArgumentsRectifierReg);
  1.8596 +
  1.8597 +    regs.take(argcReg);
  1.8598 +    regs.take(ArgumentsRectifierReg);
  1.8599 +    regs.takeUnchecked(BaselineTailCallReg);
  1.8600 +
  1.8601 +    // Load the callee in R1.
  1.8602 +    // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ]
  1.8603 +    BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
  1.8604 +    masm.loadValue(calleeSlot, R1);
  1.8605 +    regs.take(R1);
  1.8606 +
  1.8607 +    // Ensure callee is an object.
  1.8608 +    masm.branchTestObject(Assembler::NotEqual, R1, &failure);
  1.8609 +
  1.8610 +    // Ensure callee is a function.
  1.8611 +    Register callee = masm.extractObject(R1, ExtractTemp0);
  1.8612 +    masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
  1.8613 +                            &failure);
  1.8614 +
  1.8615 +    // If calling a specific script, check if the script matches.  Otherwise, ensure that
  1.8616 +    // callee function is scripted.  Leave calleeScript in |callee| reg.
  1.8617 +    if (calleeScript_) {
  1.8618 +        JS_ASSERT(kind == ICStub::Call_Scripted);
  1.8619 +
  1.8620 +        // Callee is a function.  Check if script matches.
  1.8621 +        masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
  1.8622 +        Address expectedScript(BaselineStubReg, ICCall_Scripted::offsetOfCalleeScript());
  1.8623 +        masm.branchPtr(Assembler::NotEqual, expectedScript, callee, &failure);
  1.8624 +    } else {
  1.8625 +        if (isConstructing_)
  1.8626 +            masm.branchIfNotInterpretedConstructor(callee, regs.getAny(), &failure);
  1.8627 +        else
  1.8628 +            masm.branchIfFunctionHasNoScript(callee, &failure);
  1.8629 +        masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
  1.8630 +    }
  1.8631 +
  1.8632 +    // Load the start of the target JitCode.
  1.8633 +    Register code;
  1.8634 +    if (!isConstructing_) {
  1.8635 +        code = regs.takeAny();
  1.8636 +        masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failure);
  1.8637 +    } else {
  1.8638 +        Address scriptCode(callee, JSScript::offsetOfBaselineOrIonRaw());
  1.8639 +        masm.branchPtr(Assembler::Equal, scriptCode, ImmPtr(nullptr), &failure);
  1.8640 +    }
  1.8641 +
  1.8642 +    // We no longer need R1.
  1.8643 +    regs.add(R1);
  1.8644 +
  1.8645 +    // Push a stub frame so that we can perform a non-tail call.
  1.8646 +    enterStubFrame(masm, regs.getAny());
  1.8647 +    if (canUseTailCallReg)
  1.8648 +        regs.add(BaselineTailCallReg);
  1.8649 +
  1.8650 +    Label failureLeaveStubFrame;
  1.8651 +
  1.8652 +    if (isConstructing_) {
  1.8653 +        // Save argc before call.
  1.8654 +        masm.push(argcReg);
  1.8655 +
  1.8656 +        // Stack now looks like:
  1.8657 +        //      [..., Callee, ThisV, Arg0V, ..., ArgNV, StubFrameHeader, ArgC ]
  1.8658 +        BaseIndex calleeSlot2(BaselineStackReg, argcReg, TimesEight,
  1.8659 +                               sizeof(Value) + STUB_FRAME_SIZE + sizeof(size_t));
  1.8660 +        masm.loadValue(calleeSlot2, R1);
  1.8661 +        masm.push(masm.extractObject(R1, ExtractTemp0));
  1.8662 +        if (!callVM(CreateThisInfoBaseline, masm))
  1.8663 +            return false;
  1.8664 +
  1.8665 +        // Return of CreateThis must be an object.
  1.8666 +#ifdef DEBUG
  1.8667 +        Label createdThisIsObject;
  1.8668 +        masm.branchTestObject(Assembler::Equal, JSReturnOperand, &createdThisIsObject);
  1.8669 +        masm.assumeUnreachable("The return of CreateThis must be an object.");
  1.8670 +        masm.bind(&createdThisIsObject);
  1.8671 +#endif
  1.8672 +
  1.8673 +        // Reset the register set from here on in.
  1.8674 +        JS_ASSERT(JSReturnOperand == R0);
  1.8675 +        regs = availableGeneralRegs(0);
  1.8676 +        regs.take(R0);
  1.8677 +        regs.take(ArgumentsRectifierReg);
  1.8678 +        argcReg = regs.takeAny();
  1.8679 +
  1.8680 +        // Restore saved argc so we can use it to calculate the address to save
  1.8681 +        // the resulting this object to.
  1.8682 +        masm.pop(argcReg);
  1.8683 +
  1.8684 +        // Save "this" value back into pushed arguments on stack.  R0 can be clobbered after that.
  1.8685 +        // Stack now looks like:
  1.8686 +        //      [..., Callee, ThisV, Arg0V, ..., ArgNV, StubFrameHeader ]
  1.8687 +        BaseIndex thisSlot(BaselineStackReg, argcReg, TimesEight, STUB_FRAME_SIZE);
  1.8688 +        masm.storeValue(R0, thisSlot);
  1.8689 +
  1.8690 +        // Restore the stub register from the baseline stub frame.
  1.8691 +        masm.loadPtr(Address(BaselineStackReg, STUB_FRAME_SAVED_STUB_OFFSET), BaselineStubReg);
  1.8692 +
  1.8693 +        // Reload callee script. Note that a GC triggered by CreateThis may
  1.8694 +        // have destroyed the callee BaselineScript and IonScript. CreateThis is
  1.8695 +        // safely repeatable though, so in this case we just leave the stub frame
  1.8696 +        // and jump to the next stub.
  1.8697 +
  1.8698 +        // Just need to load the script now.
  1.8699 +        BaseIndex calleeSlot3(BaselineStackReg, argcReg, TimesEight,
  1.8700 +                               sizeof(Value) + STUB_FRAME_SIZE);
  1.8701 +        masm.loadValue(calleeSlot3, R0);
  1.8702 +        callee = masm.extractObject(R0, ExtractTemp0);
  1.8703 +        regs.add(R0);
  1.8704 +        regs.takeUnchecked(callee);
  1.8705 +        masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
  1.8706 +
  1.8707 +        code = regs.takeAny();
  1.8708 +        masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failureLeaveStubFrame);
  1.8709 +
  1.8710 +        // Release callee register, but don't add ExtractTemp0 back into the pool
  1.8711 +        // ExtractTemp0 is used later, and if it's allocated to some other register at that
  1.8712 +        // point, it will get clobbered when used.
  1.8713 +        if (callee != ExtractTemp0)
  1.8714 +            regs.add(callee);
  1.8715 +
  1.8716 +        if (canUseTailCallReg)
  1.8717 +            regs.addUnchecked(BaselineTailCallReg);
  1.8718 +    }
  1.8719 +    Register scratch = regs.takeAny();
  1.8720 +
  1.8721 +    // Values are on the stack left-to-right. Calling convention wants them
  1.8722 +    // right-to-left so duplicate them on the stack in reverse order.
  1.8723 +    // |this| and callee are pushed last.
  1.8724 +    pushCallArguments(masm, regs, argcReg);
  1.8725 +
  1.8726 +    // The callee is on top of the stack. Pop and unbox it.
  1.8727 +    ValueOperand val = regs.takeAnyValue();
  1.8728 +    masm.popValue(val);
  1.8729 +    callee = masm.extractObject(val, ExtractTemp0);
  1.8730 +
  1.8731 +    EmitCreateStubFrameDescriptor(masm, scratch);
  1.8732 +
  1.8733 +    // Note that we use Push, not push, so that callIon will align the stack
  1.8734 +    // properly on ARM.
  1.8735 +    masm.Push(argcReg);
  1.8736 +    masm.Push(callee);
  1.8737 +    masm.Push(scratch);
  1.8738 +
  1.8739 +    // Handle arguments underflow.
  1.8740 +    Label noUnderflow;
  1.8741 +    masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
  1.8742 +    masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
  1.8743 +    {
  1.8744 +        // Call the arguments rectifier.
  1.8745 +        JS_ASSERT(ArgumentsRectifierReg != code);
  1.8746 +        JS_ASSERT(ArgumentsRectifierReg != argcReg);
  1.8747 +
  1.8748 +        JitCode *argumentsRectifier =
  1.8749 +            cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
  1.8750 +
  1.8751 +        masm.movePtr(ImmGCPtr(argumentsRectifier), code);
  1.8752 +        masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
  1.8753 +        masm.mov(argcReg, ArgumentsRectifierReg);
  1.8754 +    }
  1.8755 +
  1.8756 +    masm.bind(&noUnderflow);
  1.8757 +
  1.8758 +    // If needed, update SPS Profiler frame entry before and after call.
  1.8759 +    {
  1.8760 +        JS_ASSERT(kind == ICStub::Call_Scripted || kind == ICStub::Call_AnyScripted);
  1.8761 +        GeneralRegisterSet availRegs = availableGeneralRegs(0);
  1.8762 +        availRegs.take(ArgumentsRectifierReg);
  1.8763 +        availRegs.take(code);
  1.8764 +        emitProfilingUpdate(masm, availRegs, kind == ICStub::Call_Scripted ?
  1.8765 +                                                ICCall_Scripted::offsetOfPCOffset()
  1.8766 +                                              : ICCall_AnyScripted::offsetOfPCOffset());
  1.8767 +    }
  1.8768 +
  1.8769 +    masm.callIon(code);
  1.8770 +
  1.8771 +    // If this is a constructing call, and the callee returns a non-object, replace it with
  1.8772 +    // the |this| object passed in.
  1.8773 +    if (isConstructing_) {
  1.8774 +        Label skipThisReplace;
  1.8775 +        masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
  1.8776 +
  1.8777 +        Register scratchReg = JSReturnOperand.scratchReg();
  1.8778 +
  1.8779 +        // Current stack: [ ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
  1.8780 +        // However, we can't use this ThisVal, because it hasn't been traced.  We need to use
  1.8781 +        // The ThisVal higher up the stack:
  1.8782 +        // Current stack: [ ThisVal, ARGVALS..., ...STUB FRAME...,
  1.8783 +        //                  ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
  1.8784 +        masm.loadPtr(Address(BaselineStackReg, 2*sizeof(size_t)), scratchReg);
  1.8785 +
  1.8786 +        // scratchReg now contains actualArgCount.  Double it to account for skipping past two
  1.8787 +        // pushed copies of argument values.  Additionally, we need to add:
  1.8788 +        // STUB_FRAME_SIZE + sizeof(ThisVal) + sizeof(size_t) + sizeof(void *) + sizoef(size_t)
  1.8789 +        // for: stub frame, this value, actual argc, callee, and descriptor
  1.8790 +        masm.lshiftPtr(Imm32(1), scratchReg);
  1.8791 +        BaseIndex reloadThisSlot(BaselineStackReg, scratchReg, TimesEight,
  1.8792 +                                 STUB_FRAME_SIZE + sizeof(Value) + 3*sizeof(size_t));
  1.8793 +        masm.loadValue(reloadThisSlot, JSReturnOperand);
  1.8794 +#ifdef DEBUG
  1.8795 +        masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
  1.8796 +        masm.assumeUnreachable("Return of constructing call should be an object.");
  1.8797 +#endif
  1.8798 +        masm.bind(&skipThisReplace);
  1.8799 +    }
  1.8800 +
  1.8801 +    leaveStubFrame(masm, true);
  1.8802 +
  1.8803 +    // Enter type monitor IC to type-check result.
  1.8804 +    EmitEnterTypeMonitorIC(masm);
  1.8805 +
  1.8806 +    // Leave stub frame and restore argc for the next stub.
  1.8807 +    masm.bind(&failureLeaveStubFrame);
  1.8808 +    leaveStubFrame(masm, false);
  1.8809 +    if (argcReg != R0.scratchReg())
  1.8810 +        masm.mov(argcReg, R0.scratchReg());
  1.8811 +
  1.8812 +    masm.bind(&failure);
  1.8813 +    EmitStubGuardFailure(masm);
  1.8814 +    return true;
  1.8815 +}
  1.8816 +
  1.8817 +bool
  1.8818 +ICCall_Native::Compiler::generateStubCode(MacroAssembler &masm)
  1.8819 +{
  1.8820 +    Label failure;
  1.8821 +    GeneralRegisterSet regs(availableGeneralRegs(0));
  1.8822 +
  1.8823 +    Register argcReg = R0.scratchReg();
  1.8824 +    regs.take(argcReg);
  1.8825 +    regs.takeUnchecked(BaselineTailCallReg);
  1.8826 +
  1.8827 +    // Load the callee in R1.
  1.8828 +    BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
  1.8829 +    masm.loadValue(calleeSlot, R1);
  1.8830 +    regs.take(R1);
  1.8831 +
  1.8832 +    masm.branchTestObject(Assembler::NotEqual, R1, &failure);
  1.8833 +
  1.8834 +    // Ensure callee matches this stub's callee.
  1.8835 +    Register callee = masm.extractObject(R1, ExtractTemp0);
  1.8836 +    Address expectedCallee(BaselineStubReg, ICCall_Native::offsetOfCallee());
  1.8837 +    masm.branchPtr(Assembler::NotEqual, expectedCallee, callee, &failure);
  1.8838 +
  1.8839 +    regs.add(R1);
  1.8840 +    regs.takeUnchecked(callee);
  1.8841 +
  1.8842 +    // Push a stub frame so that we can perform a non-tail call.
  1.8843 +    // Note that this leaves the return address in TailCallReg.
  1.8844 +    enterStubFrame(masm, regs.getAny());
  1.8845 +
  1.8846 +    // Values are on the stack left-to-right. Calling convention wants them
  1.8847 +    // right-to-left so duplicate them on the stack in reverse order.
  1.8848 +    // |this| and callee are pushed last.
  1.8849 +    pushCallArguments(masm, regs, argcReg);
  1.8850 +
  1.8851 +    if (isConstructing_) {
  1.8852 +        // Stack looks like: [ ..., Arg0Val, ThisVal, CalleeVal ]
  1.8853 +        // Replace ThisVal with MagicValue(JS_IS_CONSTRUCTING)
  1.8854 +        masm.storeValue(MagicValue(JS_IS_CONSTRUCTING), Address(BaselineStackReg, sizeof(Value)));
  1.8855 +    }
  1.8856 +
  1.8857 +    masm.checkStackAlignment();
  1.8858 +
  1.8859 +    // Native functions have the signature:
  1.8860 +    //
  1.8861 +    //    bool (*)(JSContext *, unsigned, Value *vp)
  1.8862 +    //
  1.8863 +    // Where vp[0] is space for callee/return value, vp[1] is |this|, and vp[2] onward
  1.8864 +    // are the function arguments.
  1.8865 +
  1.8866 +    // Initialize vp.
  1.8867 +    Register vpReg = regs.takeAny();
  1.8868 +    masm.movePtr(StackPointer, vpReg);
  1.8869 +
  1.8870 +    // Construct a native exit frame.
  1.8871 +    masm.push(argcReg);
  1.8872 +
  1.8873 +    Register scratch = regs.takeAny();
  1.8874 +    EmitCreateStubFrameDescriptor(masm, scratch);
  1.8875 +    masm.push(scratch);
  1.8876 +    masm.push(BaselineTailCallReg);
  1.8877 +    masm.enterFakeExitFrame();
  1.8878 +
  1.8879 +    // If needed, update SPS Profiler frame entry.  At this point, BaselineTailCallReg
  1.8880 +    // and scratch can be clobbered.
  1.8881 +    emitProfilingUpdate(masm, BaselineTailCallReg, scratch, ICCall_Native::offsetOfPCOffset());
  1.8882 +
  1.8883 +    // Execute call.
  1.8884 +    masm.setupUnalignedABICall(3, scratch);
  1.8885 +    masm.loadJSContext(scratch);
  1.8886 +    masm.passABIArg(scratch);
  1.8887 +    masm.passABIArg(argcReg);
  1.8888 +    masm.passABIArg(vpReg);
  1.8889 +
  1.8890 +#ifdef JS_ARM_SIMULATOR
  1.8891 +    // The simulator requires VM calls to be redirected to a special swi
  1.8892 +    // instruction to handle them, so we store the redirected pointer in the
  1.8893 +    // stub and use that instead of the original one.
  1.8894 +    masm.callWithABI(Address(BaselineStubReg, ICCall_Native::offsetOfNative()));
  1.8895 +#else
  1.8896 +    masm.callWithABI(Address(callee, JSFunction::offsetOfNativeOrScript()));
  1.8897 +#endif
  1.8898 +
  1.8899 +    // Test for failure.
  1.8900 +    masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
  1.8901 +
  1.8902 +    // Load the return value into R0.
  1.8903 +    masm.loadValue(Address(StackPointer, IonNativeExitFrameLayout::offsetOfResult()), R0);
  1.8904 +
  1.8905 +    leaveStubFrame(masm);
  1.8906 +
  1.8907 +    // Enter type monitor IC to type-check result.
  1.8908 +    EmitEnterTypeMonitorIC(masm);
  1.8909 +
  1.8910 +    masm.bind(&failure);
  1.8911 +    EmitStubGuardFailure(masm);
  1.8912 +    return true;
  1.8913 +}
  1.8914 +
  1.8915 +bool
  1.8916 +ICCall_ScriptedApplyArray::Compiler::generateStubCode(MacroAssembler &masm)
  1.8917 +{
  1.8918 +    Label failure;
  1.8919 +    GeneralRegisterSet regs(availableGeneralRegs(0));
  1.8920 +
  1.8921 +    Register argcReg = R0.scratchReg();
  1.8922 +    regs.take(argcReg);
  1.8923 +    regs.takeUnchecked(BaselineTailCallReg);
  1.8924 +    regs.takeUnchecked(ArgumentsRectifierReg);
  1.8925 +
  1.8926 +    //
  1.8927 +    // Validate inputs
  1.8928 +    //
  1.8929 +
  1.8930 +    Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false,
  1.8931 +                                    FunApply_Array, &failure);
  1.8932 +    if (regs.has(target)) {
  1.8933 +        regs.take(target);
  1.8934 +    } else {
  1.8935 +        // If target is already a reserved reg, take another register for it, because it's
  1.8936 +        // probably currently an ExtractTemp, which might get clobbered later.
  1.8937 +        Register targetTemp = regs.takeAny();
  1.8938 +        masm.movePtr(target, targetTemp);
  1.8939 +        target = targetTemp;
  1.8940 +    }
  1.8941 +
  1.8942 +    // Push a stub frame so that we can perform a non-tail call.
  1.8943 +    enterStubFrame(masm, regs.getAny());
  1.8944 +
  1.8945 +    //
  1.8946 +    // Push arguments
  1.8947 +    //
  1.8948 +
  1.8949 +    // Stack now looks like:
  1.8950 +    //                                      BaselineFrameReg -------------------.
  1.8951 +    //                                                                          v
  1.8952 +    //      [..., js_fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader]
  1.8953 +
  1.8954 +    // Push all array elements onto the stack:
  1.8955 +    Address arrayVal(BaselineFrameReg, STUB_FRAME_SIZE);
  1.8956 +    pushArrayArguments(masm, arrayVal, regs);
  1.8957 +
  1.8958 +    // Stack now looks like:
  1.8959 +    //                                      BaselineFrameReg -------------------.
  1.8960 +    //                                                                          v
  1.8961 +    //      [..., js_fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader,
  1.8962 +    //       PushedArgN, ..., PushedArg0]
  1.8963 +    // Can't fail after this, so it's ok to clobber argcReg.
  1.8964 +
  1.8965 +    // Push actual argument 0 as |thisv| for call.
  1.8966 +    masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value)));
  1.8967 +
  1.8968 +    // All pushes after this use Push instead of push to make sure ARM can align
  1.8969 +    // stack properly for call.
  1.8970 +    Register scratch = regs.takeAny();
  1.8971 +    EmitCreateStubFrameDescriptor(masm, scratch);
  1.8972 +
  1.8973 +    // Reload argc from length of array.
  1.8974 +    masm.extractObject(arrayVal, argcReg);
  1.8975 +    masm.loadPtr(Address(argcReg, JSObject::offsetOfElements()), argcReg);
  1.8976 +    masm.load32(Address(argcReg, ObjectElements::offsetOfInitializedLength()), argcReg);
  1.8977 +
  1.8978 +    masm.Push(argcReg);
  1.8979 +    masm.Push(target);
  1.8980 +    masm.Push(scratch);
  1.8981 +
  1.8982 +    // Load nargs into scratch for underflow check, and then load jitcode pointer into target.
  1.8983 +    masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch);
  1.8984 +    masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
  1.8985 +    masm.loadBaselineOrIonRaw(target, target, SequentialExecution, nullptr);
  1.8986 +
  1.8987 +    // Handle arguments underflow.
  1.8988 +    Label noUnderflow;
  1.8989 +    masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
  1.8990 +    {
  1.8991 +        // Call the arguments rectifier.
  1.8992 +        JS_ASSERT(ArgumentsRectifierReg != target);
  1.8993 +        JS_ASSERT(ArgumentsRectifierReg != argcReg);
  1.8994 +
  1.8995 +        JitCode *argumentsRectifier =
  1.8996 +            cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
  1.8997 +
  1.8998 +        masm.movePtr(ImmGCPtr(argumentsRectifier), target);
  1.8999 +        masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
  1.9000 +        masm.mov(argcReg, ArgumentsRectifierReg);
  1.9001 +    }
  1.9002 +    masm.bind(&noUnderflow);
  1.9003 +    regs.add(argcReg);
  1.9004 +
  1.9005 +    // If needed, update SPS Profiler frame entry.  At this point, BaselineTailCallReg
  1.9006 +    // and scratch can be clobbered.
  1.9007 +    emitProfilingUpdate(masm, regs.getAny(), scratch,
  1.9008 +                        ICCall_ScriptedApplyArguments::offsetOfPCOffset());
  1.9009 +
  1.9010 +    // Do call
  1.9011 +    masm.callIon(target);
  1.9012 +    leaveStubFrame(masm, true);
  1.9013 +
  1.9014 +    // Enter type monitor IC to type-check result.
  1.9015 +    EmitEnterTypeMonitorIC(masm);
  1.9016 +
  1.9017 +    masm.bind(&failure);
  1.9018 +    EmitStubGuardFailure(masm);
  1.9019 +    return true;
  1.9020 +}
  1.9021 +
  1.9022 +bool
  1.9023 +ICCall_ScriptedApplyArguments::Compiler::generateStubCode(MacroAssembler &masm)
  1.9024 +{
  1.9025 +    Label failure;
  1.9026 +    GeneralRegisterSet regs(availableGeneralRegs(0));
  1.9027 +
  1.9028 +    Register argcReg = R0.scratchReg();
  1.9029 +    regs.take(argcReg);
  1.9030 +    regs.takeUnchecked(BaselineTailCallReg);
  1.9031 +    regs.takeUnchecked(ArgumentsRectifierReg);
  1.9032 +
  1.9033 +    //
  1.9034 +    // Validate inputs
  1.9035 +    //
  1.9036 +
  1.9037 +    Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false,
  1.9038 +                                    FunApply_MagicArgs, &failure);
  1.9039 +    if (regs.has(target)) {
  1.9040 +        regs.take(target);
  1.9041 +    } else {
  1.9042 +        // If target is already a reserved reg, take another register for it, because it's
  1.9043 +        // probably currently an ExtractTemp, which might get clobbered later.
  1.9044 +        Register targetTemp = regs.takeAny();
  1.9045 +        masm.movePtr(target, targetTemp);
  1.9046 +        target = targetTemp;
  1.9047 +    }
  1.9048 +
  1.9049 +    // Push a stub frame so that we can perform a non-tail call.
  1.9050 +    enterStubFrame(masm, regs.getAny());
  1.9051 +
  1.9052 +    //
  1.9053 +    // Push arguments
  1.9054 +    //
  1.9055 +
  1.9056 +    // Stack now looks like:
  1.9057 +    //      [..., js_fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader]
  1.9058 +
  1.9059 +    // Push all arguments supplied to caller function onto the stack.
  1.9060 +    pushCallerArguments(masm, regs);
  1.9061 +
  1.9062 +    // Stack now looks like:
  1.9063 +    //                                      BaselineFrameReg -------------------.
  1.9064 +    //                                                                          v
  1.9065 +    //      [..., js_fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader,
  1.9066 +    //       PushedArgN, ..., PushedArg0]
  1.9067 +    // Can't fail after this, so it's ok to clobber argcReg.
  1.9068 +
  1.9069 +    // Push actual argument 0 as |thisv| for call.
  1.9070 +    masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value)));
  1.9071 +
  1.9072 +    // All pushes after this use Push instead of push to make sure ARM can align
  1.9073 +    // stack properly for call.
  1.9074 +    Register scratch = regs.takeAny();
  1.9075 +    EmitCreateStubFrameDescriptor(masm, scratch);
  1.9076 +
  1.9077 +    masm.loadPtr(Address(BaselineFrameReg, 0), argcReg);
  1.9078 +    masm.loadPtr(Address(argcReg, BaselineFrame::offsetOfNumActualArgs()), argcReg);
  1.9079 +    masm.Push(argcReg);
  1.9080 +    masm.Push(target);
  1.9081 +    masm.Push(scratch);
  1.9082 +
  1.9083 +    // Load nargs into scratch for underflow check, and then load jitcode pointer into target.
  1.9084 +    masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch);
  1.9085 +    masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
  1.9086 +    masm.loadBaselineOrIonRaw(target, target, SequentialExecution, nullptr);
  1.9087 +
  1.9088 +    // Handle arguments underflow.
  1.9089 +    Label noUnderflow;
  1.9090 +    masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
  1.9091 +    {
  1.9092 +        // Call the arguments rectifier.
  1.9093 +        JS_ASSERT(ArgumentsRectifierReg != target);
  1.9094 +        JS_ASSERT(ArgumentsRectifierReg != argcReg);
  1.9095 +
  1.9096 +        JitCode *argumentsRectifier =
  1.9097 +            cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
  1.9098 +
  1.9099 +        masm.movePtr(ImmGCPtr(argumentsRectifier), target);
  1.9100 +        masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
  1.9101 +        masm.mov(argcReg, ArgumentsRectifierReg);
  1.9102 +    }
  1.9103 +    masm.bind(&noUnderflow);
  1.9104 +    regs.add(argcReg);
  1.9105 +
  1.9106 +    // If needed, update SPS Profiler frame entry.  At this point, BaselineTailCallReg
  1.9107 +    // and scratch can be clobbered.
  1.9108 +    emitProfilingUpdate(masm, regs.getAny(), scratch,
  1.9109 +                        ICCall_ScriptedApplyArguments::offsetOfPCOffset());
  1.9110 +
  1.9111 +    // Do call
  1.9112 +    masm.callIon(target);
  1.9113 +    leaveStubFrame(masm, true);
  1.9114 +
  1.9115 +    // Enter type monitor IC to type-check result.
  1.9116 +    EmitEnterTypeMonitorIC(masm);
  1.9117 +
  1.9118 +    masm.bind(&failure);
  1.9119 +    EmitStubGuardFailure(masm);
  1.9120 +    return true;
  1.9121 +}
  1.9122 +
  1.9123 +bool
  1.9124 +ICCall_ScriptedFunCall::Compiler::generateStubCode(MacroAssembler &masm)
  1.9125 +{
  1.9126 +    Label failure;
  1.9127 +    GeneralRegisterSet regs(availableGeneralRegs(0));
  1.9128 +    bool canUseTailCallReg = regs.has(BaselineTailCallReg);
  1.9129 +
  1.9130 +    Register argcReg = R0.scratchReg();
  1.9131 +    JS_ASSERT(argcReg != ArgumentsRectifierReg);
  1.9132 +
  1.9133 +    regs.take(argcReg);
  1.9134 +    regs.take(ArgumentsRectifierReg);
  1.9135 +    regs.takeUnchecked(BaselineTailCallReg);
  1.9136 +
  1.9137 +    // Load the callee in R1.
  1.9138 +    // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ]
  1.9139 +    BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
  1.9140 +    masm.loadValue(calleeSlot, R1);
  1.9141 +    regs.take(R1);
  1.9142 +
  1.9143 +    // Ensure callee is js_fun_call.
  1.9144 +    masm.branchTestObject(Assembler::NotEqual, R1, &failure);
  1.9145 +
  1.9146 +    Register callee = masm.extractObject(R1, ExtractTemp0);
  1.9147 +    masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
  1.9148 +                            &failure);
  1.9149 +    masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
  1.9150 +    masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(js_fun_call), &failure);
  1.9151 +
  1.9152 +    // Ensure |this| is a scripted function with JIT code.
  1.9153 +    BaseIndex thisSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset);
  1.9154 +    masm.loadValue(thisSlot, R1);
  1.9155 +
  1.9156 +    masm.branchTestObject(Assembler::NotEqual, R1, &failure);
  1.9157 +    callee = masm.extractObject(R1, ExtractTemp0);
  1.9158 +
  1.9159 +    masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
  1.9160 +                            &failure);
  1.9161 +    masm.branchIfFunctionHasNoScript(callee, &failure);
  1.9162 +    masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
  1.9163 +
  1.9164 +    // Load the start of the target JitCode.
  1.9165 +    Register code = regs.takeAny();
  1.9166 +    masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failure);
  1.9167 +
  1.9168 +    // We no longer need R1.
  1.9169 +    regs.add(R1);
  1.9170 +
  1.9171 +    // Push a stub frame so that we can perform a non-tail call.
  1.9172 +    enterStubFrame(masm, regs.getAny());
  1.9173 +    if (canUseTailCallReg)
  1.9174 +        regs.add(BaselineTailCallReg);
  1.9175 +
  1.9176 +    // Values are on the stack left-to-right. Calling convention wants them
  1.9177 +    // right-to-left so duplicate them on the stack in reverse order.
  1.9178 +    pushCallArguments(masm, regs, argcReg);
  1.9179 +
  1.9180 +    // Discard callee (function.call).
  1.9181 +    masm.addPtr(Imm32(sizeof(Value)), StackPointer);
  1.9182 +
  1.9183 +    // Pop scripted callee (the original |this|).
  1.9184 +    ValueOperand val = regs.takeAnyValue();
  1.9185 +    masm.popValue(val);
  1.9186 +
  1.9187 +    // Decrement argc if argc > 0. If argc == 0, push |undefined| as |this|.
  1.9188 +    Label zeroArgs, done;
  1.9189 +    masm.branchTest32(Assembler::Zero, argcReg, argcReg, &zeroArgs);
  1.9190 +    masm.sub32(Imm32(1), argcReg);
  1.9191 +    masm.jump(&done);
  1.9192 +
  1.9193 +    masm.bind(&zeroArgs);
  1.9194 +    masm.pushValue(UndefinedValue());
  1.9195 +    masm.bind(&done);
  1.9196 +
  1.9197 +    // Unbox scripted callee.
  1.9198 +    callee = masm.extractObject(val, ExtractTemp0);
  1.9199 +
  1.9200 +    Register scratch = regs.takeAny();
  1.9201 +    EmitCreateStubFrameDescriptor(masm, scratch);
  1.9202 +
  1.9203 +    // Note that we use Push, not push, so that callIon will align the stack
  1.9204 +    // properly on ARM.
  1.9205 +    masm.Push(argcReg);
  1.9206 +    masm.Push(callee);
  1.9207 +    masm.Push(scratch);
  1.9208 +
  1.9209 +    // Handle arguments underflow.
  1.9210 +    Label noUnderflow;
  1.9211 +    masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
  1.9212 +    masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
  1.9213 +    {
  1.9214 +        // Call the arguments rectifier.
  1.9215 +        JS_ASSERT(ArgumentsRectifierReg != code);
  1.9216 +        JS_ASSERT(ArgumentsRectifierReg != argcReg);
  1.9217 +
  1.9218 +        JitCode *argumentsRectifier =
  1.9219 +            cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
  1.9220 +
  1.9221 +        masm.movePtr(ImmGCPtr(argumentsRectifier), code);
  1.9222 +        masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
  1.9223 +        masm.mov(argcReg, ArgumentsRectifierReg);
  1.9224 +    }
  1.9225 +
  1.9226 +    masm.bind(&noUnderflow);
  1.9227 +
  1.9228 +    // If needed, update SPS Profiler frame entry.
  1.9229 +    {
  1.9230 +        // Need to avoid using ArgumentsRectifierReg and code register.
  1.9231 +        GeneralRegisterSet availRegs = availableGeneralRegs(0);
  1.9232 +        availRegs.take(ArgumentsRectifierReg);
  1.9233 +        availRegs.take(code);
  1.9234 +        emitProfilingUpdate(masm, availRegs, ICCall_ScriptedFunCall::offsetOfPCOffset());
  1.9235 +    }
  1.9236 +
  1.9237 +    masm.callIon(code);
  1.9238 +
  1.9239 +    leaveStubFrame(masm, true);
  1.9240 +
  1.9241 +    // Enter type monitor IC to type-check result.
  1.9242 +    EmitEnterTypeMonitorIC(masm);
  1.9243 +
  1.9244 +    masm.bind(&failure);
  1.9245 +    EmitStubGuardFailure(masm);
  1.9246 +    return true;
  1.9247 +}
  1.9248 +
  1.9249 +static bool
  1.9250 +DoubleValueToInt32ForSwitch(Value *v)
  1.9251 +{
  1.9252 +    double d = v->toDouble();
  1.9253 +    int32_t truncated = int32_t(d);
  1.9254 +    if (d != double(truncated))
  1.9255 +        return false;
  1.9256 +
  1.9257 +    v->setInt32(truncated);
  1.9258 +    return true;
  1.9259 +}
  1.9260 +
  1.9261 +bool
  1.9262 +ICTableSwitch::Compiler::generateStubCode(MacroAssembler &masm)
  1.9263 +{
  1.9264 +    Label isInt32, notInt32, outOfRange;
  1.9265 +    Register scratch = R1.scratchReg();
  1.9266 +
  1.9267 +    masm.branchTestInt32(Assembler::NotEqual, R0, &notInt32);
  1.9268 +
  1.9269 +    Register key = masm.extractInt32(R0, ExtractTemp0);
  1.9270 +
  1.9271 +    masm.bind(&isInt32);
  1.9272 +
  1.9273 +    masm.load32(Address(BaselineStubReg, offsetof(ICTableSwitch, min_)), scratch);
  1.9274 +    masm.sub32(scratch, key);
  1.9275 +    masm.branch32(Assembler::BelowOrEqual,
  1.9276 +                  Address(BaselineStubReg, offsetof(ICTableSwitch, length_)), key, &outOfRange);
  1.9277 +
  1.9278 +    masm.loadPtr(Address(BaselineStubReg, offsetof(ICTableSwitch, table_)), scratch);
  1.9279 +    masm.loadPtr(BaseIndex(scratch, key, ScalePointer), scratch);
  1.9280 +
  1.9281 +    EmitChangeICReturnAddress(masm, scratch);
  1.9282 +    EmitReturnFromIC(masm);
  1.9283 +
  1.9284 +    masm.bind(&notInt32);
  1.9285 +
  1.9286 +    masm.branchTestDouble(Assembler::NotEqual, R0, &outOfRange);
  1.9287 +    if (cx->runtime()->jitSupportsFloatingPoint) {
  1.9288 +        masm.unboxDouble(R0, FloatReg0);
  1.9289 +
  1.9290 +        // N.B. -0 === 0, so convert -0 to a 0 int32.
  1.9291 +        masm.convertDoubleToInt32(FloatReg0, key, &outOfRange, /* negativeZeroCheck = */ false);
  1.9292 +    } else {
  1.9293 +        // Pass pointer to double value.
  1.9294 +        masm.pushValue(R0);
  1.9295 +        masm.movePtr(StackPointer, R0.scratchReg());
  1.9296 +
  1.9297 +        masm.setupUnalignedABICall(1, scratch);
  1.9298 +        masm.passABIArg(R0.scratchReg());
  1.9299 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, DoubleValueToInt32ForSwitch));
  1.9300 +
  1.9301 +        // If the function returns |true|, the value has been converted to
  1.9302 +        // int32.
  1.9303 +        masm.mov(ReturnReg, scratch);
  1.9304 +        masm.popValue(R0);
  1.9305 +        masm.branchIfFalseBool(scratch, &outOfRange);
  1.9306 +        masm.unboxInt32(R0, key);
  1.9307 +    }
  1.9308 +    masm.jump(&isInt32);
  1.9309 +
  1.9310 +    masm.bind(&outOfRange);
  1.9311 +
  1.9312 +    masm.loadPtr(Address(BaselineStubReg, offsetof(ICTableSwitch, defaultTarget_)), scratch);
  1.9313 +
  1.9314 +    EmitChangeICReturnAddress(masm, scratch);
  1.9315 +    EmitReturnFromIC(masm);
  1.9316 +    return true;
  1.9317 +}
  1.9318 +
  1.9319 +ICStub *
  1.9320 +ICTableSwitch::Compiler::getStub(ICStubSpace *space)
  1.9321 +{
  1.9322 +    JitCode *code = getStubCode();
  1.9323 +    if (!code)
  1.9324 +        return nullptr;
  1.9325 +
  1.9326 +    jsbytecode *pc = pc_;
  1.9327 +    pc += JUMP_OFFSET_LEN;
  1.9328 +    int32_t low = GET_JUMP_OFFSET(pc);
  1.9329 +    pc += JUMP_OFFSET_LEN;
  1.9330 +    int32_t high = GET_JUMP_OFFSET(pc);
  1.9331 +    int32_t length = high - low + 1;
  1.9332 +    pc += JUMP_OFFSET_LEN;
  1.9333 +
  1.9334 +    void **table = (void**) space->alloc(sizeof(void*) * length);
  1.9335 +    if (!table)
  1.9336 +        return nullptr;
  1.9337 +
  1.9338 +    jsbytecode *defaultpc = pc_ + GET_JUMP_OFFSET(pc_);
  1.9339 +
  1.9340 +    for (int32_t i = 0; i < length; i++) {
  1.9341 +        int32_t off = GET_JUMP_OFFSET(pc);
  1.9342 +        if (off)
  1.9343 +            table[i] = pc_ + off;
  1.9344 +        else
  1.9345 +            table[i] = defaultpc;
  1.9346 +        pc += JUMP_OFFSET_LEN;
  1.9347 +    }
  1.9348 +
  1.9349 +    return ICTableSwitch::New(space, code, table, low, length, defaultpc);
  1.9350 +}
  1.9351 +
  1.9352 +void
  1.9353 +ICTableSwitch::fixupJumpTable(JSScript *script, BaselineScript *baseline)
  1.9354 +{
  1.9355 +    defaultTarget_ = baseline->nativeCodeForPC(script, (jsbytecode *) defaultTarget_);
  1.9356 +
  1.9357 +    for (int32_t i = 0; i < length_; i++)
  1.9358 +        table_[i] = baseline->nativeCodeForPC(script, (jsbytecode *) table_[i]);
  1.9359 +}
  1.9360 +
  1.9361 +//
  1.9362 +// IteratorNew_Fallback
  1.9363 +//
  1.9364 +
  1.9365 +static bool
  1.9366 +DoIteratorNewFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNew_Fallback *stub,
  1.9367 +                      HandleValue value, MutableHandleValue res)
  1.9368 +{
  1.9369 +    jsbytecode *pc = stub->icEntry()->pc(frame->script());
  1.9370 +    FallbackICSpew(cx, stub, "IteratorNew");
  1.9371 +
  1.9372 +    uint8_t flags = GET_UINT8(pc);
  1.9373 +    res.set(value);
  1.9374 +    return ValueToIterator(cx, flags, res);
  1.9375 +}
  1.9376 +
  1.9377 +typedef bool (*DoIteratorNewFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNew_Fallback *,
  1.9378 +                                        HandleValue, MutableHandleValue);
  1.9379 +static const VMFunction DoIteratorNewFallbackInfo =
  1.9380 +    FunctionInfo<DoIteratorNewFallbackFn>(DoIteratorNewFallback, PopValues(1));
  1.9381 +
  1.9382 +bool
  1.9383 +ICIteratorNew_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.9384 +{
  1.9385 +    EmitRestoreTailCallReg(masm);
  1.9386 +
  1.9387 +    // Sync stack for the decompiler.
  1.9388 +    masm.pushValue(R0);
  1.9389 +
  1.9390 +    masm.pushValue(R0);
  1.9391 +    masm.push(BaselineStubReg);
  1.9392 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.9393 +
  1.9394 +    return tailCallVM(DoIteratorNewFallbackInfo, masm);
  1.9395 +}
  1.9396 +
  1.9397 +//
  1.9398 +// IteratorMore_Fallback
  1.9399 +//
  1.9400 +
  1.9401 +static bool
  1.9402 +DoIteratorMoreFallback(JSContext *cx, BaselineFrame *frame, ICIteratorMore_Fallback *stub_,
  1.9403 +                       HandleValue iterValue, MutableHandleValue res)
  1.9404 +{
  1.9405 +    // This fallback stub may trigger debug mode toggling.
  1.9406 +    DebugModeOSRVolatileStub<ICIteratorMore_Fallback *> stub(frame, stub_);
  1.9407 +
  1.9408 +    FallbackICSpew(cx, stub, "IteratorMore");
  1.9409 +
  1.9410 +    bool cond;
  1.9411 +    if (!IteratorMore(cx, &iterValue.toObject(), &cond, res))
  1.9412 +        return false;
  1.9413 +    res.setBoolean(cond);
  1.9414 +
  1.9415 +    // Check if debug mode toggling made the stub invalid.
  1.9416 +    if (stub.invalid())
  1.9417 +        return true;
  1.9418 +
  1.9419 +    if (iterValue.toObject().is<PropertyIteratorObject>() &&
  1.9420 +        !stub->hasStub(ICStub::IteratorMore_Native))
  1.9421 +    {
  1.9422 +        ICIteratorMore_Native::Compiler compiler(cx);
  1.9423 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
  1.9424 +        if (!newStub)
  1.9425 +            return false;
  1.9426 +        stub->addNewStub(newStub);
  1.9427 +    }
  1.9428 +
  1.9429 +    return true;
  1.9430 +}
  1.9431 +
  1.9432 +typedef bool (*DoIteratorMoreFallbackFn)(JSContext *, BaselineFrame *, ICIteratorMore_Fallback *,
  1.9433 +                                         HandleValue, MutableHandleValue);
  1.9434 +static const VMFunction DoIteratorMoreFallbackInfo =
  1.9435 +    FunctionInfo<DoIteratorMoreFallbackFn>(DoIteratorMoreFallback);
  1.9436 +
  1.9437 +bool
  1.9438 +ICIteratorMore_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.9439 +{
  1.9440 +    EmitRestoreTailCallReg(masm);
  1.9441 +
  1.9442 +    masm.pushValue(R0);
  1.9443 +    masm.push(BaselineStubReg);
  1.9444 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.9445 +
  1.9446 +    return tailCallVM(DoIteratorMoreFallbackInfo, masm);
  1.9447 +}
  1.9448 +
  1.9449 +//
  1.9450 +// IteratorMore_Native
  1.9451 +//
  1.9452 +
  1.9453 +bool
  1.9454 +ICIteratorMore_Native::Compiler::generateStubCode(MacroAssembler &masm)
  1.9455 +{
  1.9456 +    Label failure;
  1.9457 +
  1.9458 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.9459 +
  1.9460 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.9461 +    Register nativeIterator = regs.takeAny();
  1.9462 +    Register scratch = regs.takeAny();
  1.9463 +
  1.9464 +    masm.branchTestObjClass(Assembler::NotEqual, obj, scratch,
  1.9465 +                            &PropertyIteratorObject::class_, &failure);
  1.9466 +    masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator);
  1.9467 +
  1.9468 +    masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)),
  1.9469 +                      Imm32(JSITER_FOREACH), &failure);
  1.9470 +
  1.9471 +    // Set output to true if props_cursor < props_end.
  1.9472 +    masm.loadPtr(Address(nativeIterator, offsetof(NativeIterator, props_end)), scratch);
  1.9473 +    Address cursorAddr = Address(nativeIterator, offsetof(NativeIterator, props_cursor));
  1.9474 +    masm.cmpPtrSet(Assembler::LessThan, cursorAddr, scratch, scratch);
  1.9475 +
  1.9476 +    masm.tagValue(JSVAL_TYPE_BOOLEAN, scratch, R0);
  1.9477 +    EmitReturnFromIC(masm);
  1.9478 +
  1.9479 +    // Failure case - jump to next stub
  1.9480 +    masm.bind(&failure);
  1.9481 +    EmitStubGuardFailure(masm);
  1.9482 +    return true;
  1.9483 +}
  1.9484 +
  1.9485 +//
  1.9486 +// IteratorNext_Fallback
  1.9487 +//
  1.9488 +
  1.9489 +static bool
  1.9490 +DoIteratorNextFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNext_Fallback *stub_,
  1.9491 +                       HandleValue iterValue, MutableHandleValue res)
  1.9492 +{
  1.9493 +    // This fallback stub may trigger debug mode toggling.
  1.9494 +    DebugModeOSRVolatileStub<ICIteratorNext_Fallback *> stub(frame, stub_);
  1.9495 +
  1.9496 +    FallbackICSpew(cx, stub, "IteratorNext");
  1.9497 +
  1.9498 +    RootedObject iteratorObject(cx, &iterValue.toObject());
  1.9499 +    if (!IteratorNext(cx, iteratorObject, res))
  1.9500 +        return false;
  1.9501 +
  1.9502 +    // Check if debug mode toggling made the stub invalid.
  1.9503 +    if (stub.invalid())
  1.9504 +        return true;
  1.9505 +
  1.9506 +    if (!res.isString() && !stub->hasNonStringResult())
  1.9507 +        stub->setHasNonStringResult();
  1.9508 +
  1.9509 +    if (iteratorObject->is<PropertyIteratorObject>() &&
  1.9510 +        !stub->hasStub(ICStub::IteratorNext_Native))
  1.9511 +    {
  1.9512 +        ICIteratorNext_Native::Compiler compiler(cx);
  1.9513 +        ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
  1.9514 +        if (!newStub)
  1.9515 +            return false;
  1.9516 +        stub->addNewStub(newStub);
  1.9517 +    }
  1.9518 +
  1.9519 +    return true;
  1.9520 +}
  1.9521 +
  1.9522 +typedef bool (*DoIteratorNextFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNext_Fallback *,
  1.9523 +                                         HandleValue, MutableHandleValue);
  1.9524 +static const VMFunction DoIteratorNextFallbackInfo =
  1.9525 +    FunctionInfo<DoIteratorNextFallbackFn>(DoIteratorNextFallback);
  1.9526 +
  1.9527 +bool
  1.9528 +ICIteratorNext_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.9529 +{
  1.9530 +    EmitRestoreTailCallReg(masm);
  1.9531 +
  1.9532 +    masm.pushValue(R0);
  1.9533 +    masm.push(BaselineStubReg);
  1.9534 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.9535 +
  1.9536 +    return tailCallVM(DoIteratorNextFallbackInfo, masm);
  1.9537 +}
  1.9538 +
  1.9539 +//
  1.9540 +// IteratorNext_Native
  1.9541 +//
  1.9542 +
  1.9543 +bool
  1.9544 +ICIteratorNext_Native::Compiler::generateStubCode(MacroAssembler &masm)
  1.9545 +{
  1.9546 +    Label failure;
  1.9547 +
  1.9548 +    Register obj = masm.extractObject(R0, ExtractTemp0);
  1.9549 +
  1.9550 +    GeneralRegisterSet regs(availableGeneralRegs(1));
  1.9551 +    Register nativeIterator = regs.takeAny();
  1.9552 +    Register scratch = regs.takeAny();
  1.9553 +
  1.9554 +    masm.branchTestObjClass(Assembler::NotEqual, obj, scratch,
  1.9555 +                            &PropertyIteratorObject::class_, &failure);
  1.9556 +    masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator);
  1.9557 +
  1.9558 +    masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)),
  1.9559 +                      Imm32(JSITER_FOREACH), &failure);
  1.9560 +
  1.9561 +    // Get cursor, next string.
  1.9562 +    masm.loadPtr(Address(nativeIterator, offsetof(NativeIterator, props_cursor)), scratch);
  1.9563 +    masm.loadPtr(Address(scratch, 0), scratch);
  1.9564 +
  1.9565 +    // Increase the cursor.
  1.9566 +    masm.addPtr(Imm32(sizeof(JSString *)),
  1.9567 +                Address(nativeIterator, offsetof(NativeIterator, props_cursor)));
  1.9568 +
  1.9569 +    masm.tagValue(JSVAL_TYPE_STRING, scratch, R0);
  1.9570 +    EmitReturnFromIC(masm);
  1.9571 +
  1.9572 +    // Failure case - jump to next stub
  1.9573 +    masm.bind(&failure);
  1.9574 +    EmitStubGuardFailure(masm);
  1.9575 +    return true;
  1.9576 +}
  1.9577 +
  1.9578 +//
  1.9579 +// IteratorClose_Fallback
  1.9580 +//
  1.9581 +
  1.9582 +static bool
  1.9583 +DoIteratorCloseFallback(JSContext *cx, ICIteratorClose_Fallback *stub, HandleValue iterValue)
  1.9584 +{
  1.9585 +    FallbackICSpew(cx, stub, "IteratorClose");
  1.9586 +
  1.9587 +    RootedObject iteratorObject(cx, &iterValue.toObject());
  1.9588 +    return CloseIterator(cx, iteratorObject);
  1.9589 +}
  1.9590 +
  1.9591 +typedef bool (*DoIteratorCloseFallbackFn)(JSContext *, ICIteratorClose_Fallback *, HandleValue);
  1.9592 +static const VMFunction DoIteratorCloseFallbackInfo =
  1.9593 +    FunctionInfo<DoIteratorCloseFallbackFn>(DoIteratorCloseFallback);
  1.9594 +
  1.9595 +bool
  1.9596 +ICIteratorClose_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.9597 +{
  1.9598 +    EmitRestoreTailCallReg(masm);
  1.9599 +
  1.9600 +    masm.pushValue(R0);
  1.9601 +    masm.push(BaselineStubReg);
  1.9602 +
  1.9603 +    return tailCallVM(DoIteratorCloseFallbackInfo, masm);
  1.9604 +}
  1.9605 +
  1.9606 +//
  1.9607 +// InstanceOf_Fallback
  1.9608 +//
  1.9609 +
  1.9610 +static bool
  1.9611 +DoInstanceOfFallback(JSContext *cx, ICInstanceOf_Fallback *stub,
  1.9612 +                     HandleValue lhs, HandleValue rhs,
  1.9613 +                     MutableHandleValue res)
  1.9614 +{
  1.9615 +    FallbackICSpew(cx, stub, "InstanceOf");
  1.9616 +
  1.9617 +    if (!rhs.isObject()) {
  1.9618 +        js_ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, NullPtr());
  1.9619 +        return false;
  1.9620 +    }
  1.9621 +
  1.9622 +    RootedObject obj(cx, &rhs.toObject());
  1.9623 +
  1.9624 +    // For functions, keep track of the |prototype| property in type information,
  1.9625 +    // for use during Ion compilation.
  1.9626 +    if (obj->is<JSFunction>() && IsIonEnabled(cx))
  1.9627 +        types::EnsureTrackPropertyTypes(cx, obj, NameToId(cx->names().prototype));
  1.9628 +
  1.9629 +    bool cond = false;
  1.9630 +    if (!HasInstance(cx, obj, lhs, &cond))
  1.9631 +        return false;
  1.9632 +
  1.9633 +    res.setBoolean(cond);
  1.9634 +    return true;
  1.9635 +}
  1.9636 +
  1.9637 +typedef bool (*DoInstanceOfFallbackFn)(JSContext *, ICInstanceOf_Fallback *, HandleValue, HandleValue,
  1.9638 +                                       MutableHandleValue);
  1.9639 +static const VMFunction DoInstanceOfFallbackInfo =
  1.9640 +    FunctionInfo<DoInstanceOfFallbackFn>(DoInstanceOfFallback, PopValues(2));
  1.9641 +
  1.9642 +bool
  1.9643 +ICInstanceOf_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.9644 +{
  1.9645 +    EmitRestoreTailCallReg(masm);
  1.9646 +
  1.9647 +    // Sync stack for the decompiler.
  1.9648 +    masm.pushValue(R0);
  1.9649 +    masm.pushValue(R1);
  1.9650 +
  1.9651 +    masm.pushValue(R1);
  1.9652 +    masm.pushValue(R0);
  1.9653 +    masm.push(BaselineStubReg);
  1.9654 +
  1.9655 +    return tailCallVM(DoInstanceOfFallbackInfo, masm);
  1.9656 +}
  1.9657 +
  1.9658 +//
  1.9659 +// TypeOf_Fallback
  1.9660 +//
  1.9661 +
  1.9662 +static bool
  1.9663 +DoTypeOfFallback(JSContext *cx, BaselineFrame *frame, ICTypeOf_Fallback *stub, HandleValue val,
  1.9664 +                 MutableHandleValue res)
  1.9665 +{
  1.9666 +    FallbackICSpew(cx, stub, "TypeOf");
  1.9667 +    JSType type = js::TypeOfValue(val);
  1.9668 +    RootedString string(cx, TypeName(type, cx->names()));
  1.9669 +
  1.9670 +    res.setString(string);
  1.9671 +
  1.9672 +    JS_ASSERT(type != JSTYPE_NULL);
  1.9673 +    if (type != JSTYPE_OBJECT && type != JSTYPE_FUNCTION) {
  1.9674 +        // Create a new TypeOf stub.
  1.9675 +        IonSpew(IonSpew_BaselineIC, "  Generating TypeOf stub for JSType (%d)", (int) type);
  1.9676 +        ICTypeOf_Typed::Compiler compiler(cx, type, string);
  1.9677 +        ICStub *typeOfStub = compiler.getStub(compiler.getStubSpace(frame->script()));
  1.9678 +        if (!typeOfStub)
  1.9679 +            return false;
  1.9680 +        stub->addNewStub(typeOfStub);
  1.9681 +    }
  1.9682 +
  1.9683 +    return true;
  1.9684 +}
  1.9685 +
  1.9686 +typedef bool (*DoTypeOfFallbackFn)(JSContext *, BaselineFrame *frame, ICTypeOf_Fallback *,
  1.9687 +                                   HandleValue, MutableHandleValue);
  1.9688 +static const VMFunction DoTypeOfFallbackInfo =
  1.9689 +    FunctionInfo<DoTypeOfFallbackFn>(DoTypeOfFallback);
  1.9690 +
  1.9691 +bool
  1.9692 +ICTypeOf_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.9693 +{
  1.9694 +    EmitRestoreTailCallReg(masm);
  1.9695 +
  1.9696 +    masm.pushValue(R0);
  1.9697 +    masm.push(BaselineStubReg);
  1.9698 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
  1.9699 +
  1.9700 +    return tailCallVM(DoTypeOfFallbackInfo, masm);
  1.9701 +}
  1.9702 +
  1.9703 +bool
  1.9704 +ICTypeOf_Typed::Compiler::generateStubCode(MacroAssembler &masm)
  1.9705 +{
  1.9706 +    JS_ASSERT(type_ != JSTYPE_NULL);
  1.9707 +    JS_ASSERT(type_ != JSTYPE_FUNCTION);
  1.9708 +    JS_ASSERT(type_ != JSTYPE_OBJECT);
  1.9709 +
  1.9710 +    Label failure;
  1.9711 +    switch(type_) {
  1.9712 +      case JSTYPE_VOID:
  1.9713 +        masm.branchTestUndefined(Assembler::NotEqual, R0, &failure);
  1.9714 +        break;
  1.9715 +
  1.9716 +      case JSTYPE_STRING:
  1.9717 +        masm.branchTestString(Assembler::NotEqual, R0, &failure);
  1.9718 +        break;
  1.9719 +
  1.9720 +      case JSTYPE_NUMBER:
  1.9721 +        masm.branchTestNumber(Assembler::NotEqual, R0, &failure);
  1.9722 +        break;
  1.9723 +
  1.9724 +      case JSTYPE_BOOLEAN:
  1.9725 +        masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
  1.9726 +        break;
  1.9727 +
  1.9728 +      default:
  1.9729 +        MOZ_ASSUME_UNREACHABLE("Unexpected type");
  1.9730 +    }
  1.9731 +
  1.9732 +    masm.movePtr(ImmGCPtr(typeString_), R0.scratchReg());
  1.9733 +    masm.tagValue(JSVAL_TYPE_STRING, R0.scratchReg(), R0);
  1.9734 +    EmitReturnFromIC(masm);
  1.9735 +
  1.9736 +    masm.bind(&failure);
  1.9737 +    EmitStubGuardFailure(masm);
  1.9738 +    return true;
  1.9739 +}
  1.9740 +
  1.9741 +static bool
  1.9742 +DoRetSubFallback(JSContext *cx, BaselineFrame *frame, ICRetSub_Fallback *stub,
  1.9743 +                 HandleValue val, uint8_t **resumeAddr)
  1.9744 +{
  1.9745 +    FallbackICSpew(cx, stub, "RetSub");
  1.9746 +
  1.9747 +    // |val| is the bytecode offset where we should resume.
  1.9748 +
  1.9749 +    JS_ASSERT(val.isInt32());
  1.9750 +    JS_ASSERT(val.toInt32() >= 0);
  1.9751 +
  1.9752 +    JSScript *script = frame->script();
  1.9753 +    uint32_t offset = uint32_t(val.toInt32());
  1.9754 +
  1.9755 +    *resumeAddr = script->baselineScript()->nativeCodeForPC(script, script->offsetToPC(offset));
  1.9756 +
  1.9757 +    if (stub->numOptimizedStubs() >= ICRetSub_Fallback::MAX_OPTIMIZED_STUBS)
  1.9758 +        return true;
  1.9759 +
  1.9760 +    // Attach an optimized stub for this pc offset.
  1.9761 +    IonSpew(IonSpew_BaselineIC, "  Generating RetSub stub for pc offset %u", offset);
  1.9762 +    ICRetSub_Resume::Compiler compiler(cx, offset, *resumeAddr);
  1.9763 +    ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
  1.9764 +    if (!optStub)
  1.9765 +        return false;
  1.9766 +
  1.9767 +    stub->addNewStub(optStub);
  1.9768 +    return true;
  1.9769 +}
  1.9770 +
  1.9771 +typedef bool(*DoRetSubFallbackFn)(JSContext *cx, BaselineFrame *, ICRetSub_Fallback *,
  1.9772 +                                  HandleValue, uint8_t **);
  1.9773 +static const VMFunction DoRetSubFallbackInfo = FunctionInfo<DoRetSubFallbackFn>(DoRetSubFallback);
  1.9774 +
  1.9775 +typedef bool (*ThrowFn)(JSContext *, HandleValue);
  1.9776 +static const VMFunction ThrowInfoBaseline = FunctionInfo<ThrowFn>(js::Throw);
  1.9777 +
  1.9778 +bool
  1.9779 +ICRetSub_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
  1.9780 +{
  1.9781 +    // If R0 is BooleanValue(true), rethrow R1.
  1.9782 +    Label rethrow;
  1.9783 +    masm.branchTestBooleanTruthy(true, R0, &rethrow);
  1.9784 +    {
  1.9785 +        // Call a stub to get the native code address for the pc offset in R1.
  1.9786 +        GeneralRegisterSet regs(availableGeneralRegs(0));
  1.9787 +        regs.take(R1);
  1.9788 +        regs.takeUnchecked(BaselineTailCallReg);
  1.9789 +
  1.9790 +        Register frame = regs.takeAny();
  1.9791 +        masm.movePtr(BaselineFrameReg, frame);
  1.9792 +
  1.9793 +        enterStubFrame(masm, regs.getAny());
  1.9794 +
  1.9795 +        masm.pushValue(R1);
  1.9796 +        masm.push(BaselineStubReg);
  1.9797 +        masm.pushBaselineFramePtr(frame, frame);
  1.9798 +
  1.9799 +        if (!callVM(DoRetSubFallbackInfo, masm))
  1.9800 +            return false;
  1.9801 +
  1.9802 +        leaveStubFrame(masm);
  1.9803 +
  1.9804 +        EmitChangeICReturnAddress(masm, ReturnReg);
  1.9805 +        EmitReturnFromIC(masm);
  1.9806 +    }
  1.9807 +
  1.9808 +    masm.bind(&rethrow);
  1.9809 +    EmitRestoreTailCallReg(masm);
  1.9810 +    masm.pushValue(R1);
  1.9811 +    return tailCallVM(ThrowInfoBaseline, masm);
  1.9812 +}
  1.9813 +
  1.9814 +bool
  1.9815 +ICRetSub_Resume::Compiler::generateStubCode(MacroAssembler &masm)
  1.9816 +{
  1.9817 +    // If R0 is BooleanValue(true), rethrow R1.
  1.9818 +    Label fail, rethrow;
  1.9819 +    masm.branchTestBooleanTruthy(true, R0, &rethrow);
  1.9820 +
  1.9821 +    // R1 is the pc offset. Ensure it matches this stub's offset.
  1.9822 +    Register offset = masm.extractInt32(R1, ExtractTemp0);
  1.9823 +    masm.branch32(Assembler::NotEqual,
  1.9824 +                  Address(BaselineStubReg, ICRetSub_Resume::offsetOfPCOffset()),
  1.9825 +                  offset,
  1.9826 +                  &fail);
  1.9827 +
  1.9828 +    // pc offset matches, resume at the target pc.
  1.9829 +    masm.loadPtr(Address(BaselineStubReg, ICRetSub_Resume::offsetOfAddr()), R0.scratchReg());
  1.9830 +    EmitChangeICReturnAddress(masm, R0.scratchReg());
  1.9831 +    EmitReturnFromIC(masm);
  1.9832 +
  1.9833 +    // Rethrow the Value stored in R1.
  1.9834 +    masm.bind(&rethrow);
  1.9835 +    EmitRestoreTailCallReg(masm);
  1.9836 +    masm.pushValue(R1);
  1.9837 +    if (!tailCallVM(ThrowInfoBaseline, masm))
  1.9838 +        return false;
  1.9839 +
  1.9840 +    masm.bind(&fail);
  1.9841 +    EmitStubGuardFailure(masm);
  1.9842 +    return true;
  1.9843 +}
  1.9844 +
  1.9845 +ICProfiler_PushFunction::ICProfiler_PushFunction(JitCode *stubCode, const char *str,
  1.9846 +                                                 HandleScript script)
  1.9847 +  : ICStub(ICStub::Profiler_PushFunction, stubCode),
  1.9848 +    str_(str),
  1.9849 +    script_(script)
  1.9850 +{ }
  1.9851 +
  1.9852 +ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode *stubCode, HandleObject obj)
  1.9853 +  : ICStub(TypeMonitor_SingleObject, stubCode),
  1.9854 +    obj_(obj)
  1.9855 +{ }
  1.9856 +
  1.9857 +ICTypeMonitor_TypeObject::ICTypeMonitor_TypeObject(JitCode *stubCode, HandleTypeObject type)
  1.9858 +  : ICStub(TypeMonitor_TypeObject, stubCode),
  1.9859 +    type_(type)
  1.9860 +{ }
  1.9861 +
  1.9862 +ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode *stubCode, HandleObject obj)
  1.9863 +  : ICStub(TypeUpdate_SingleObject, stubCode),
  1.9864 +    obj_(obj)
  1.9865 +{ }
  1.9866 +
  1.9867 +ICTypeUpdate_TypeObject::ICTypeUpdate_TypeObject(JitCode *stubCode, HandleTypeObject type)
  1.9868 +  : ICStub(TypeUpdate_TypeObject, stubCode),
  1.9869 +    type_(type)
  1.9870 +{ }
  1.9871 +
  1.9872 +ICGetElemNativeStub::ICGetElemNativeStub(ICStub::Kind kind, JitCode *stubCode,
  1.9873 +                                         ICStub *firstMonitorStub,
  1.9874 +                                         HandleShape shape, HandlePropertyName name,
  1.9875 +                                         AccessType acctype, bool needsAtomize)
  1.9876 +  : ICMonitoredStub(kind, stubCode, firstMonitorStub),
  1.9877 +    shape_(shape),
  1.9878 +    name_(name)
  1.9879 +{
  1.9880 +    extra_ = (static_cast<uint16_t>(acctype) << ACCESSTYPE_SHIFT) |
  1.9881 +             (static_cast<uint16_t>(needsAtomize) << NEEDS_ATOMIZE_SHIFT);
  1.9882 +}
  1.9883 +
  1.9884 +ICGetElemNativeStub::~ICGetElemNativeStub()
  1.9885 +{ }
  1.9886 +
  1.9887 +ICGetElemNativeGetterStub::ICGetElemNativeGetterStub(
  1.9888 +                        ICStub::Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
  1.9889 +                        HandleShape shape, HandlePropertyName name, AccessType acctype,
  1.9890 +                        bool needsAtomize, HandleFunction getter, uint32_t pcOffset)
  1.9891 +  : ICGetElemNativeStub(kind, stubCode, firstMonitorStub, shape, name, acctype, needsAtomize),
  1.9892 +    getter_(getter),
  1.9893 +    pcOffset_(pcOffset)
  1.9894 +{
  1.9895 +    JS_ASSERT(kind == GetElem_NativePrototypeCallNative ||
  1.9896 +              kind == GetElem_NativePrototypeCallScripted);
  1.9897 +    JS_ASSERT(acctype == NativeGetter || acctype == ScriptedGetter);
  1.9898 +}
  1.9899 +
  1.9900 +ICGetElem_NativePrototypeSlot::ICGetElem_NativePrototypeSlot(
  1.9901 +                            JitCode *stubCode, ICStub *firstMonitorStub,
  1.9902 +                            HandleShape shape, HandlePropertyName name,
  1.9903 +                            AccessType acctype, bool needsAtomize, uint32_t offset,
  1.9904 +                            HandleObject holder, HandleShape holderShape)
  1.9905 +  : ICGetElemNativeSlotStub(ICStub::GetElem_NativePrototypeSlot, stubCode, firstMonitorStub, shape,
  1.9906 +                            name, acctype, needsAtomize, offset),
  1.9907 +    holder_(holder),
  1.9908 +    holderShape_(holderShape)
  1.9909 +{ }
  1.9910 +
  1.9911 +ICGetElemNativePrototypeCallStub::ICGetElemNativePrototypeCallStub(
  1.9912 +                                ICStub::Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
  1.9913 +                                HandleShape shape, HandlePropertyName name,
  1.9914 +                                AccessType acctype, bool needsAtomize, HandleFunction getter,
  1.9915 +                                uint32_t pcOffset, HandleObject holder, HandleShape holderShape)
  1.9916 +  : ICGetElemNativeGetterStub(kind, stubCode, firstMonitorStub, shape, name, acctype, needsAtomize,
  1.9917 +                              getter, pcOffset),
  1.9918 +    holder_(holder),
  1.9919 +    holderShape_(holderShape)
  1.9920 +{}
  1.9921 +
  1.9922 +ICGetElem_Dense::ICGetElem_Dense(JitCode *stubCode, ICStub *firstMonitorStub, HandleShape shape)
  1.9923 +    : ICMonitoredStub(GetElem_Dense, stubCode, firstMonitorStub),
  1.9924 +      shape_(shape)
  1.9925 +{ }
  1.9926 +
  1.9927 +ICGetElem_TypedArray::ICGetElem_TypedArray(JitCode *stubCode, HandleShape shape, uint32_t type)
  1.9928 +  : ICStub(GetElem_TypedArray, stubCode),
  1.9929 +    shape_(shape)
  1.9930 +{
  1.9931 +    extra_ = uint16_t(type);
  1.9932 +    JS_ASSERT(extra_ == type);
  1.9933 +}
  1.9934 +
  1.9935 +ICSetElem_Dense::ICSetElem_Dense(JitCode *stubCode, HandleShape shape, HandleTypeObject type)
  1.9936 +  : ICUpdatedStub(SetElem_Dense, stubCode),
  1.9937 +    shape_(shape),
  1.9938 +    type_(type)
  1.9939 +{ }
  1.9940 +
  1.9941 +ICSetElem_DenseAdd::ICSetElem_DenseAdd(JitCode *stubCode, types::TypeObject *type,
  1.9942 +                                       size_t protoChainDepth)
  1.9943 +  : ICUpdatedStub(SetElem_DenseAdd, stubCode),
  1.9944 +    type_(type)
  1.9945 +{
  1.9946 +    JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH);
  1.9947 +    extra_ = protoChainDepth;
  1.9948 +}
  1.9949 +
  1.9950 +template <size_t ProtoChainDepth>
  1.9951 +ICUpdatedStub *
  1.9952 +ICSetElemDenseAddCompiler::getStubSpecific(ICStubSpace *space, const AutoShapeVector *shapes)
  1.9953 +{
  1.9954 +    RootedTypeObject objType(cx, obj_->getType(cx));
  1.9955 +    if (!objType)
  1.9956 +        return nullptr;
  1.9957 +    Rooted<JitCode *> stubCode(cx, getStubCode());
  1.9958 +    return ICSetElem_DenseAddImpl<ProtoChainDepth>::New(space, stubCode, objType, shapes);
  1.9959 +}
  1.9960 +
  1.9961 +ICSetElem_TypedArray::ICSetElem_TypedArray(JitCode *stubCode, HandleShape shape, uint32_t type,
  1.9962 +                                           bool expectOutOfBounds)
  1.9963 +  : ICStub(SetElem_TypedArray, stubCode),
  1.9964 +    shape_(shape)
  1.9965 +{
  1.9966 +    extra_ = uint8_t(type);
  1.9967 +    JS_ASSERT(extra_ == type);
  1.9968 +    extra_ |= (static_cast<uint16_t>(expectOutOfBounds) << 8);
  1.9969 +}
  1.9970 +
  1.9971 +ICGetName_Global::ICGetName_Global(JitCode *stubCode, ICStub *firstMonitorStub, HandleShape shape,
  1.9972 +                                   uint32_t slot)
  1.9973 +  : ICMonitoredStub(GetName_Global, stubCode, firstMonitorStub),
  1.9974 +    shape_(shape),
  1.9975 +    slot_(slot)
  1.9976 +{ }
  1.9977 +
  1.9978 +template <size_t NumHops>
  1.9979 +ICGetName_Scope<NumHops>::ICGetName_Scope(JitCode *stubCode, ICStub *firstMonitorStub,
  1.9980 +                                          AutoShapeVector *shapes, uint32_t offset)
  1.9981 +  : ICMonitoredStub(GetStubKind(), stubCode, firstMonitorStub),
  1.9982 +    offset_(offset)
  1.9983 +{
  1.9984 +    JS_STATIC_ASSERT(NumHops <= MAX_HOPS);
  1.9985 +    JS_ASSERT(shapes->length() == NumHops + 1);
  1.9986 +    for (size_t i = 0; i < NumHops + 1; i++)
  1.9987 +        shapes_[i].init((*shapes)[i]);
  1.9988 +}
  1.9989 +
  1.9990 +ICGetIntrinsic_Constant::ICGetIntrinsic_Constant(JitCode *stubCode, HandleValue value)
  1.9991 +  : ICStub(GetIntrinsic_Constant, stubCode),
  1.9992 +    value_(value)
  1.9993 +{ }
  1.9994 +
  1.9995 +ICGetIntrinsic_Constant::~ICGetIntrinsic_Constant()
  1.9996 +{ }
  1.9997 +
  1.9998 +ICGetProp_Primitive::ICGetProp_Primitive(JitCode *stubCode, ICStub *firstMonitorStub,
  1.9999 +                                         HandleShape protoShape, uint32_t offset)
 1.10000 +  : ICMonitoredStub(GetProp_Primitive, stubCode, firstMonitorStub),
 1.10001 +    protoShape_(protoShape),
 1.10002 +    offset_(offset)
 1.10003 +{ }
 1.10004 +
 1.10005 +ICGetPropNativeStub::ICGetPropNativeStub(ICStub::Kind kind, JitCode *stubCode,
 1.10006 +                                         ICStub *firstMonitorStub,
 1.10007 +                                         HandleShape shape, uint32_t offset)
 1.10008 +  : ICMonitoredStub(kind, stubCode, firstMonitorStub),
 1.10009 +    shape_(shape),
 1.10010 +    offset_(offset)
 1.10011 +{ }
 1.10012 +
 1.10013 +ICGetProp_NativePrototype::ICGetProp_NativePrototype(JitCode *stubCode, ICStub *firstMonitorStub,
 1.10014 +                                                     HandleShape shape, uint32_t offset,
 1.10015 +                                                     HandleObject holder, HandleShape holderShape)
 1.10016 +  : ICGetPropNativeStub(GetProp_NativePrototype, stubCode, firstMonitorStub, shape, offset),
 1.10017 +    holder_(holder),
 1.10018 +    holderShape_(holderShape)
 1.10019 +{ }
 1.10020 +
 1.10021 +ICGetPropCallGetter::ICGetPropCallGetter(Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
 1.10022 +                                         HandleObject holder, HandleShape holderShape, HandleFunction getter,
 1.10023 +                                         uint32_t pcOffset)
 1.10024 +  : ICMonitoredStub(kind, stubCode, firstMonitorStub),
 1.10025 +    holder_(holder),
 1.10026 +    holderShape_(holderShape),
 1.10027 +    getter_(getter),
 1.10028 +    pcOffset_(pcOffset)
 1.10029 +{
 1.10030 +    JS_ASSERT(kind == ICStub::GetProp_CallScripted  ||
 1.10031 +              kind == ICStub::GetProp_CallNative    ||
 1.10032 +              kind == ICStub::GetProp_CallNativePrototype);
 1.10033 +}
 1.10034 +
 1.10035 +ICGetPropCallPrototypeGetter::ICGetPropCallPrototypeGetter(Kind kind, JitCode *stubCode,
 1.10036 +                                                           ICStub *firstMonitorStub,
 1.10037 +                                                           HandleShape receiverShape, HandleObject holder,
 1.10038 +                                                           HandleShape holderShape,
 1.10039 +                                                           HandleFunction getter, uint32_t pcOffset)
 1.10040 +  : ICGetPropCallGetter(kind, stubCode, firstMonitorStub, holder, holderShape, getter, pcOffset),
 1.10041 +    receiverShape_(receiverShape)
 1.10042 +{
 1.10043 +    JS_ASSERT(kind == ICStub::GetProp_CallScripted || kind == ICStub::GetProp_CallNativePrototype);
 1.10044 +}
 1.10045 +
 1.10046 +ICSetProp_Native::ICSetProp_Native(JitCode *stubCode, HandleTypeObject type, HandleShape shape,
 1.10047 +                                   uint32_t offset)
 1.10048 +  : ICUpdatedStub(SetProp_Native, stubCode),
 1.10049 +    type_(type),
 1.10050 +    shape_(shape),
 1.10051 +    offset_(offset)
 1.10052 +{ }
 1.10053 +
 1.10054 +ICUpdatedStub *
 1.10055 +ICSetProp_Native::Compiler::getStub(ICStubSpace *space)
 1.10056 +{
 1.10057 +    RootedTypeObject type(cx, obj_->getType(cx));
 1.10058 +    if (!type)
 1.10059 +        return nullptr;
 1.10060 +
 1.10061 +    RootedShape shape(cx, obj_->lastProperty());
 1.10062 +    ICUpdatedStub *stub = ICSetProp_Native::New(space, getStubCode(), type, shape, offset_);
 1.10063 +    if (!stub || !stub->initUpdatingChain(cx, space))
 1.10064 +        return nullptr;
 1.10065 +    return stub;
 1.10066 +}
 1.10067 +
 1.10068 +ICSetProp_NativeAdd::ICSetProp_NativeAdd(JitCode *stubCode, HandleTypeObject type,
 1.10069 +                                         size_t protoChainDepth,
 1.10070 +                                         HandleShape newShape,
 1.10071 +                                         uint32_t offset)
 1.10072 +  : ICUpdatedStub(SetProp_NativeAdd, stubCode),
 1.10073 +    type_(type),
 1.10074 +    newShape_(newShape),
 1.10075 +    offset_(offset)
 1.10076 +{
 1.10077 +    JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH);
 1.10078 +    extra_ = protoChainDepth;
 1.10079 +}
 1.10080 +
 1.10081 +template <size_t ProtoChainDepth>
 1.10082 +ICSetProp_NativeAddImpl<ProtoChainDepth>::ICSetProp_NativeAddImpl(JitCode *stubCode,
 1.10083 +                                                                  HandleTypeObject type,
 1.10084 +                                                                  const AutoShapeVector *shapes,
 1.10085 +                                                                  HandleShape newShape,
 1.10086 +                                                                  uint32_t offset)
 1.10087 +  : ICSetProp_NativeAdd(stubCode, type, ProtoChainDepth, newShape, offset)
 1.10088 +{
 1.10089 +    JS_ASSERT(shapes->length() == NumShapes);
 1.10090 +    for (size_t i = 0; i < NumShapes; i++)
 1.10091 +        shapes_[i].init((*shapes)[i]);
 1.10092 +}
 1.10093 +
 1.10094 +ICSetPropNativeAddCompiler::ICSetPropNativeAddCompiler(JSContext *cx, HandleObject obj,
 1.10095 +                                                       HandleShape oldShape,
 1.10096 +                                                       size_t protoChainDepth,
 1.10097 +                                                       bool isFixedSlot,
 1.10098 +                                                       uint32_t offset)
 1.10099 +  : ICStubCompiler(cx, ICStub::SetProp_NativeAdd),
 1.10100 +    obj_(cx, obj),
 1.10101 +    oldShape_(cx, oldShape),
 1.10102 +    protoChainDepth_(protoChainDepth),
 1.10103 +    isFixedSlot_(isFixedSlot),
 1.10104 +    offset_(offset)
 1.10105 +{
 1.10106 +    JS_ASSERT(protoChainDepth_ <= ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH);
 1.10107 +}
 1.10108 +
 1.10109 +ICSetPropCallSetter::ICSetPropCallSetter(Kind kind, JitCode *stubCode, HandleShape shape,
 1.10110 +                                         HandleObject holder, HandleShape holderShape,
 1.10111 +                                         HandleFunction setter, uint32_t pcOffset)
 1.10112 +  : ICStub(kind, stubCode),
 1.10113 +    shape_(shape),
 1.10114 +    holder_(holder),
 1.10115 +    holderShape_(holderShape),
 1.10116 +    setter_(setter),
 1.10117 +    pcOffset_(pcOffset)
 1.10118 +{
 1.10119 +    JS_ASSERT(kind == ICStub::SetProp_CallScripted || kind == ICStub::SetProp_CallNative);
 1.10120 +}
 1.10121 +
 1.10122 +ICCall_Scripted::ICCall_Scripted(JitCode *stubCode, ICStub *firstMonitorStub,
 1.10123 +                                 HandleScript calleeScript, HandleObject templateObject,
 1.10124 +                                 uint32_t pcOffset)
 1.10125 +  : ICMonitoredStub(ICStub::Call_Scripted, stubCode, firstMonitorStub),
 1.10126 +    calleeScript_(calleeScript),
 1.10127 +    templateObject_(templateObject),
 1.10128 +    pcOffset_(pcOffset)
 1.10129 +{ }
 1.10130 +
 1.10131 +ICCall_Native::ICCall_Native(JitCode *stubCode, ICStub *firstMonitorStub,
 1.10132 +                             HandleFunction callee, HandleObject templateObject,
 1.10133 +                             uint32_t pcOffset)
 1.10134 +  : ICMonitoredStub(ICStub::Call_Native, stubCode, firstMonitorStub),
 1.10135 +    callee_(callee),
 1.10136 +    templateObject_(templateObject),
 1.10137 +    pcOffset_(pcOffset)
 1.10138 +{
 1.10139 +#ifdef JS_ARM_SIMULATOR
 1.10140 +    // The simulator requires VM calls to be redirected to a special swi
 1.10141 +    // instruction to handle them. To make this work, we store the redirected
 1.10142 +    // pointer in the stub.
 1.10143 +    native_ = Simulator::RedirectNativeFunction(JS_FUNC_TO_DATA_PTR(void *, callee->native()),
 1.10144 +                                                Args_General3);
 1.10145 +#endif
 1.10146 +}
 1.10147 +
 1.10148 +ICGetPropCallDOMProxyNativeStub::ICGetPropCallDOMProxyNativeStub(Kind kind, JitCode *stubCode,
 1.10149 +                                                                 ICStub *firstMonitorStub,
 1.10150 +                                                                 HandleShape shape,
 1.10151 +                                                                 BaseProxyHandler *proxyHandler,
 1.10152 +                                                                 HandleShape expandoShape,
 1.10153 +                                                                 HandleObject holder,
 1.10154 +                                                                 HandleShape holderShape,
 1.10155 +                                                                 HandleFunction getter,
 1.10156 +                                                                 uint32_t pcOffset)
 1.10157 +  : ICMonitoredStub(kind, stubCode, firstMonitorStub),
 1.10158 +    shape_(shape),
 1.10159 +    proxyHandler_(proxyHandler),
 1.10160 +    expandoShape_(expandoShape),
 1.10161 +    holder_(holder),
 1.10162 +    holderShape_(holderShape),
 1.10163 +    getter_(getter),
 1.10164 +    pcOffset_(pcOffset)
 1.10165 +{ }
 1.10166 +
 1.10167 +ICGetPropCallDOMProxyNativeCompiler::ICGetPropCallDOMProxyNativeCompiler(JSContext *cx,
 1.10168 +                                                                         ICStub::Kind kind,
 1.10169 +                                                                         ICStub *firstMonitorStub,
 1.10170 +                                                                         Handle<ProxyObject*> proxy,
 1.10171 +                                                                         HandleObject holder,
 1.10172 +                                                                         HandleFunction getter,
 1.10173 +                                                                         uint32_t pcOffset)
 1.10174 +  : ICStubCompiler(cx, kind),
 1.10175 +    firstMonitorStub_(firstMonitorStub),
 1.10176 +    proxy_(cx, proxy),
 1.10177 +    holder_(cx, holder),
 1.10178 +    getter_(cx, getter),
 1.10179 +    pcOffset_(pcOffset)
 1.10180 +{
 1.10181 +    JS_ASSERT(kind == ICStub::GetProp_CallDOMProxyNative ||
 1.10182 +              kind == ICStub::GetProp_CallDOMProxyWithGenerationNative);
 1.10183 +    JS_ASSERT(proxy_->handler()->family() == GetDOMProxyHandlerFamily());
 1.10184 +}
 1.10185 +
 1.10186 +ICGetProp_DOMProxyShadowed::ICGetProp_DOMProxyShadowed(JitCode *stubCode,
 1.10187 +                                                       ICStub *firstMonitorStub,
 1.10188 +                                                       HandleShape shape,
 1.10189 +                                                       BaseProxyHandler *proxyHandler,
 1.10190 +                                                       HandlePropertyName name,
 1.10191 +                                                       uint32_t pcOffset)
 1.10192 +  : ICMonitoredStub(ICStub::GetProp_DOMProxyShadowed, stubCode, firstMonitorStub),
 1.10193 +    shape_(shape),
 1.10194 +    proxyHandler_(proxyHandler),
 1.10195 +    name_(name),
 1.10196 +    pcOffset_(pcOffset)
 1.10197 +{ }
 1.10198 +
 1.10199 +//
 1.10200 +// Rest_Fallback
 1.10201 +//
 1.10202 +
 1.10203 +static bool DoRestFallback(JSContext *cx, ICRest_Fallback *stub,
 1.10204 +                           BaselineFrame *frame, MutableHandleValue res)
 1.10205 +{
 1.10206 +    unsigned numFormals = frame->numFormalArgs() - 1;
 1.10207 +    unsigned numActuals = frame->numActualArgs();
 1.10208 +    unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
 1.10209 +    Value *rest = frame->argv() + numFormals;
 1.10210 +
 1.10211 +    JSObject *obj = NewDenseCopiedArray(cx, numRest, rest, nullptr);
 1.10212 +    if (!obj)
 1.10213 +        return false;
 1.10214 +    types::FixRestArgumentsType(cx, obj);
 1.10215 +    res.setObject(*obj);
 1.10216 +    return true;
 1.10217 +}
 1.10218 +
 1.10219 +typedef bool (*DoRestFallbackFn)(JSContext *, ICRest_Fallback *, BaselineFrame *,
 1.10220 +                                 MutableHandleValue);
 1.10221 +static const VMFunction DoRestFallbackInfo =
 1.10222 +    FunctionInfo<DoRestFallbackFn>(DoRestFallback);
 1.10223 +
 1.10224 +bool
 1.10225 +ICRest_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
 1.10226 +{
 1.10227 +    EmitRestoreTailCallReg(masm);
 1.10228 +
 1.10229 +    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
 1.10230 +    masm.push(BaselineStubReg);
 1.10231 +
 1.10232 +    return tailCallVM(DoRestFallbackInfo, masm);
 1.10233 +}
 1.10234 +
 1.10235 +} // namespace jit
 1.10236 +} // namespace js

mercurial