js/src/jit/IonCaches.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/jit/IonCaches.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,4357 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#include "jit/IonCaches.h"
    1.11 +
    1.12 +#include "mozilla/TemplateLib.h"
    1.13 +
    1.14 +#include "jsproxy.h"
    1.15 +#include "jstypes.h"
    1.16 +
    1.17 +#include "builtin/TypedObject.h"
    1.18 +#include "jit/Ion.h"
    1.19 +#include "jit/IonLinker.h"
    1.20 +#include "jit/IonSpewer.h"
    1.21 +#include "jit/Lowering.h"
    1.22 +#ifdef JS_ION_PERF
    1.23 +# include "jit/PerfSpewer.h"
    1.24 +#endif
    1.25 +#include "jit/ParallelFunctions.h"
    1.26 +#include "jit/VMFunctions.h"
    1.27 +#include "vm/Shape.h"
    1.28 +
    1.29 +#include "jit/IonFrames-inl.h"
    1.30 +#include "vm/Interpreter-inl.h"
    1.31 +#include "vm/Shape-inl.h"
    1.32 +
    1.33 +using namespace js;
    1.34 +using namespace js::jit;
    1.35 +
    1.36 +using mozilla::tl::FloorLog2;
    1.37 +
    1.38 +void
    1.39 +CodeLocationJump::repoint(JitCode *code, MacroAssembler *masm)
    1.40 +{
    1.41 +    JS_ASSERT(state_ == Relative);
    1.42 +    size_t new_off = (size_t)raw_;
    1.43 +#ifdef JS_SMALL_BRANCH
    1.44 +    size_t jumpTableEntryOffset = reinterpret_cast<size_t>(jumpTableEntry_);
    1.45 +#endif
    1.46 +    if (masm != nullptr) {
    1.47 +#ifdef JS_CODEGEN_X64
    1.48 +        JS_ASSERT((uint64_t)raw_ <= UINT32_MAX);
    1.49 +#endif
    1.50 +        new_off = masm->actualOffset((uintptr_t)raw_);
    1.51 +#ifdef JS_SMALL_BRANCH
    1.52 +        jumpTableEntryOffset = masm->actualIndex(jumpTableEntryOffset);
    1.53 +#endif
    1.54 +    }
    1.55 +    raw_ = code->raw() + new_off;
    1.56 +#ifdef JS_SMALL_BRANCH
    1.57 +    jumpTableEntry_ = Assembler::PatchableJumpAddress(code, (size_t) jumpTableEntryOffset);
    1.58 +#endif
    1.59 +    setAbsolute();
    1.60 +}
    1.61 +
    1.62 +void
    1.63 +CodeLocationLabel::repoint(JitCode *code, MacroAssembler *masm)
    1.64 +{
    1.65 +     JS_ASSERT(state_ == Relative);
    1.66 +     size_t new_off = (size_t)raw_;
    1.67 +     if (masm != nullptr) {
    1.68 +#ifdef JS_CODEGEN_X64
    1.69 +        JS_ASSERT((uint64_t)raw_ <= UINT32_MAX);
    1.70 +#endif
    1.71 +        new_off = masm->actualOffset((uintptr_t)raw_);
    1.72 +     }
    1.73 +     JS_ASSERT(new_off < code->instructionsSize());
    1.74 +
    1.75 +     raw_ = code->raw() + new_off;
    1.76 +     setAbsolute();
    1.77 +}
    1.78 +
    1.79 +void
    1.80 +CodeOffsetLabel::fixup(MacroAssembler *masm)
    1.81 +{
    1.82 +     offset_ = masm->actualOffset(offset_);
    1.83 +}
    1.84 +
    1.85 +void
    1.86 +CodeOffsetJump::fixup(MacroAssembler *masm)
    1.87 +{
    1.88 +     offset_ = masm->actualOffset(offset_);
    1.89 +#ifdef JS_SMALL_BRANCH
    1.90 +     jumpTableIndex_ = masm->actualIndex(jumpTableIndex_);
    1.91 +#endif
    1.92 +}
    1.93 +
    1.94 +const char *
    1.95 +IonCache::CacheName(IonCache::Kind kind)
    1.96 +{
    1.97 +    static const char * const names[] =
    1.98 +    {
    1.99 +#define NAME(x) #x,
   1.100 +        IONCACHE_KIND_LIST(NAME)
   1.101 +#undef NAME
   1.102 +    };
   1.103 +    return names[kind];
   1.104 +}
   1.105 +
   1.106 +IonCache::LinkStatus
   1.107 +IonCache::linkCode(JSContext *cx, MacroAssembler &masm, IonScript *ion, JitCode **code)
   1.108 +{
   1.109 +    Linker linker(masm);
   1.110 +    *code = linker.newCode<CanGC>(cx, JSC::ION_CODE);
   1.111 +    if (!*code)
   1.112 +        return LINK_ERROR;
   1.113 +
   1.114 +    if (ion->invalidated())
   1.115 +        return CACHE_FLUSHED;
   1.116 +
   1.117 +    return LINK_GOOD;
   1.118 +}
   1.119 +
   1.120 +const size_t IonCache::MAX_STUBS = 16;
   1.121 +
   1.122 +// Helper class which encapsulates logic to attach a stub to an IC by hooking
   1.123 +// up rejoins and next stub jumps.
   1.124 +//
   1.125 +// The simplest stubs have a single jump to the next stub and look like the
   1.126 +// following:
   1.127 +//
   1.128 +//    branch guard NEXTSTUB
   1.129 +//    ... IC-specific code ...
   1.130 +//    jump REJOIN
   1.131 +//
   1.132 +// This corresponds to:
   1.133 +//
   1.134 +//    attacher.branchNextStub(masm, ...);
   1.135 +//    ... emit IC-specific code ...
   1.136 +//    attacher.jumpRejoin(masm);
   1.137 +//
   1.138 +// Whether the stub needs multiple next stub jumps look like:
   1.139 +//
   1.140 +//   branch guard FAILURES
   1.141 +//   ... IC-specific code ...
   1.142 +//   branch another-guard FAILURES
   1.143 +//   ... IC-specific code ...
   1.144 +//   jump REJOIN
   1.145 +//   FAILURES:
   1.146 +//   jump NEXTSTUB
   1.147 +//
   1.148 +// This corresponds to:
   1.149 +//
   1.150 +//   Label failures;
   1.151 +//   masm.branchX(..., &failures);
   1.152 +//   ... emit IC-specific code ...
   1.153 +//   masm.branchY(..., failures);
   1.154 +//   ... emit more IC-specific code ...
   1.155 +//   attacher.jumpRejoin(masm);
   1.156 +//   masm.bind(&failures);
   1.157 +//   attacher.jumpNextStub(masm);
   1.158 +//
   1.159 +// A convenience function |branchNextStubOrLabel| is provided in the case that
   1.160 +// the stub sometimes has multiple next stub jumps and sometimes a single
   1.161 +// one. If a non-nullptr label is passed in, a |branchPtr| will be made to
   1.162 +// that label instead of a |branchPtrWithPatch| to the next stub.
   1.163 +class IonCache::StubAttacher
   1.164 +{
   1.165 +  protected:
   1.166 +    bool hasNextStubOffset_ : 1;
   1.167 +    bool hasStubCodePatchOffset_ : 1;
   1.168 +
   1.169 +    CodeLocationLabel rejoinLabel_;
   1.170 +    CodeOffsetJump nextStubOffset_;
   1.171 +    CodeOffsetJump rejoinOffset_;
   1.172 +    CodeOffsetLabel stubCodePatchOffset_;
   1.173 +
   1.174 +  public:
   1.175 +    StubAttacher(CodeLocationLabel rejoinLabel)
   1.176 +      : hasNextStubOffset_(false),
   1.177 +        hasStubCodePatchOffset_(false),
   1.178 +        rejoinLabel_(rejoinLabel),
   1.179 +        nextStubOffset_(),
   1.180 +        rejoinOffset_(),
   1.181 +        stubCodePatchOffset_()
   1.182 +    { }
   1.183 +
   1.184 +    // Value used instead of the JitCode self-reference of generated
   1.185 +    // stubs. This value is needed for marking calls made inside stubs. This
   1.186 +    // value would be replaced by the attachStub function after the allocation
   1.187 +    // of the JitCode. The self-reference is used to keep the stub path alive
   1.188 +    // even if the IonScript is invalidated or if the IC is flushed.
   1.189 +    static const ImmPtr STUB_ADDR;
   1.190 +
   1.191 +    template <class T1, class T2>
   1.192 +    void branchNextStub(MacroAssembler &masm, Assembler::Condition cond, T1 op1, T2 op2) {
   1.193 +        JS_ASSERT(!hasNextStubOffset_);
   1.194 +        RepatchLabel nextStub;
   1.195 +        nextStubOffset_ = masm.branchPtrWithPatch(cond, op1, op2, &nextStub);
   1.196 +        hasNextStubOffset_ = true;
   1.197 +        masm.bind(&nextStub);
   1.198 +    }
   1.199 +
   1.200 +    template <class T1, class T2>
   1.201 +    void branchNextStubOrLabel(MacroAssembler &masm, Assembler::Condition cond, T1 op1, T2 op2,
   1.202 +                               Label *label)
   1.203 +    {
   1.204 +        if (label != nullptr)
   1.205 +            masm.branchPtr(cond, op1, op2, label);
   1.206 +        else
   1.207 +            branchNextStub(masm, cond, op1, op2);
   1.208 +    }
   1.209 +
   1.210 +    void jumpRejoin(MacroAssembler &masm) {
   1.211 +        RepatchLabel rejoin;
   1.212 +        rejoinOffset_ = masm.jumpWithPatch(&rejoin);
   1.213 +        masm.bind(&rejoin);
   1.214 +    }
   1.215 +
   1.216 +    void jumpNextStub(MacroAssembler &masm) {
   1.217 +        JS_ASSERT(!hasNextStubOffset_);
   1.218 +        RepatchLabel nextStub;
   1.219 +        nextStubOffset_ = masm.jumpWithPatch(&nextStub);
   1.220 +        hasNextStubOffset_ = true;
   1.221 +        masm.bind(&nextStub);
   1.222 +    }
   1.223 +
   1.224 +    void pushStubCodePointer(MacroAssembler &masm) {
   1.225 +        // Push the JitCode pointer for the stub we're generating.
   1.226 +        // WARNING:
   1.227 +        // WARNING: If JitCode ever becomes relocatable, the following code is incorrect.
   1.228 +        // WARNING: Note that we're not marking the pointer being pushed as an ImmGCPtr.
   1.229 +        // WARNING: This location will be patched with the pointer of the generated stub,
   1.230 +        // WARNING: such as it can be marked when a call is made with this stub. Be aware
   1.231 +        // WARNING: that ICs are not marked and so this stub will only be kept alive iff
   1.232 +        // WARNING: it is on the stack at the time of the GC. No ImmGCPtr is needed as the
   1.233 +        // WARNING: stubs are flushed on GC.
   1.234 +        // WARNING:
   1.235 +        JS_ASSERT(!hasStubCodePatchOffset_);
   1.236 +        stubCodePatchOffset_ = masm.PushWithPatch(STUB_ADDR);
   1.237 +        hasStubCodePatchOffset_ = true;
   1.238 +    }
   1.239 +
   1.240 +    void patchRejoinJump(MacroAssembler &masm, JitCode *code) {
   1.241 +        rejoinOffset_.fixup(&masm);
   1.242 +        CodeLocationJump rejoinJump(code, rejoinOffset_);
   1.243 +        PatchJump(rejoinJump, rejoinLabel_);
   1.244 +    }
   1.245 +
   1.246 +    void patchStubCodePointer(MacroAssembler &masm, JitCode *code) {
   1.247 +        if (hasStubCodePatchOffset_) {
   1.248 +            stubCodePatchOffset_.fixup(&masm);
   1.249 +            Assembler::patchDataWithValueCheck(CodeLocationLabel(code, stubCodePatchOffset_),
   1.250 +                                               ImmPtr(code), STUB_ADDR);
   1.251 +        }
   1.252 +    }
   1.253 +
   1.254 +    virtual void patchNextStubJump(MacroAssembler &masm, JitCode *code) = 0;
   1.255 +};
   1.256 +
   1.257 +const ImmPtr IonCache::StubAttacher::STUB_ADDR = ImmPtr((void*)0xdeadc0de);
   1.258 +
   1.259 +class RepatchIonCache::RepatchStubAppender : public IonCache::StubAttacher
   1.260 +{
   1.261 +    RepatchIonCache &cache_;
   1.262 +
   1.263 +  public:
   1.264 +    RepatchStubAppender(RepatchIonCache &cache)
   1.265 +      : StubAttacher(cache.rejoinLabel()),
   1.266 +        cache_(cache)
   1.267 +    {
   1.268 +    }
   1.269 +
   1.270 +    void patchNextStubJump(MacroAssembler &masm, JitCode *code) {
   1.271 +        // Patch the previous nextStubJump of the last stub, or the jump from the
   1.272 +        // codeGen, to jump into the newly allocated code.
   1.273 +        PatchJump(cache_.lastJump_, CodeLocationLabel(code));
   1.274 +
   1.275 +        // If this path is not taken, we are producing an entry which can no
   1.276 +        // longer go back into the update function.
   1.277 +        if (hasNextStubOffset_) {
   1.278 +            nextStubOffset_.fixup(&masm);
   1.279 +            CodeLocationJump nextStubJump(code, nextStubOffset_);
   1.280 +            PatchJump(nextStubJump, cache_.fallbackLabel_);
   1.281 +
   1.282 +            // When the last stub fails, it fallback to the ool call which can
   1.283 +            // produce a stub. Next time we generate a stub, we will patch the
   1.284 +            // nextStub jump to try the new stub.
   1.285 +            cache_.lastJump_ = nextStubJump;
   1.286 +        }
   1.287 +    }
   1.288 +};
   1.289 +
   1.290 +void
   1.291 +RepatchIonCache::reset()
   1.292 +{
   1.293 +    IonCache::reset();
   1.294 +    PatchJump(initialJump_, fallbackLabel_);
   1.295 +    lastJump_ = initialJump_;
   1.296 +}
   1.297 +
   1.298 +void
   1.299 +RepatchIonCache::emitInitialJump(MacroAssembler &masm, AddCacheState &addState)
   1.300 +{
   1.301 +    initialJump_ = masm.jumpWithPatch(&addState.repatchEntry);
   1.302 +    lastJump_ = initialJump_;
   1.303 +}
   1.304 +
   1.305 +void
   1.306 +RepatchIonCache::bindInitialJump(MacroAssembler &masm, AddCacheState &addState)
   1.307 +{
   1.308 +    masm.bind(&addState.repatchEntry);
   1.309 +}
   1.310 +
   1.311 +void
   1.312 +RepatchIonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm)
   1.313 +{
   1.314 +    IonCache::updateBaseAddress(code, masm);
   1.315 +    initialJump_.repoint(code, &masm);
   1.316 +    lastJump_.repoint(code, &masm);
   1.317 +}
   1.318 +
   1.319 +class DispatchIonCache::DispatchStubPrepender : public IonCache::StubAttacher
   1.320 +{
   1.321 +    DispatchIonCache &cache_;
   1.322 +
   1.323 +  public:
   1.324 +    DispatchStubPrepender(DispatchIonCache &cache)
   1.325 +      : StubAttacher(cache.rejoinLabel_),
   1.326 +        cache_(cache)
   1.327 +    {
   1.328 +    }
   1.329 +
   1.330 +    void patchNextStubJump(MacroAssembler &masm, JitCode *code) {
   1.331 +        JS_ASSERT(hasNextStubOffset_);
   1.332 +
   1.333 +        // Jump to the previous entry in the stub dispatch table. We
   1.334 +        // have not yet executed the code we're patching the jump in.
   1.335 +        nextStubOffset_.fixup(&masm);
   1.336 +        CodeLocationJump nextStubJump(code, nextStubOffset_);
   1.337 +        PatchJump(nextStubJump, CodeLocationLabel(cache_.firstStub_));
   1.338 +
   1.339 +        // Update the dispatch table. Modification to jumps after the dispatch
   1.340 +        // table is updated is disallowed, lest we race on entry into an
   1.341 +        // unfinalized stub.
   1.342 +        cache_.firstStub_ = code->raw();
   1.343 +    }
   1.344 +};
   1.345 +
   1.346 +void
   1.347 +DispatchIonCache::reset()
   1.348 +{
   1.349 +    IonCache::reset();
   1.350 +    firstStub_ = fallbackLabel_.raw();
   1.351 +}
   1.352 +void
   1.353 +DispatchIonCache::emitInitialJump(MacroAssembler &masm, AddCacheState &addState)
   1.354 +{
   1.355 +    Register scratch = addState.dispatchScratch;
   1.356 +    dispatchLabel_ = masm.movWithPatch(ImmPtr((void*)-1), scratch);
   1.357 +    masm.loadPtr(Address(scratch, 0), scratch);
   1.358 +    masm.jump(scratch);
   1.359 +    rejoinLabel_ = masm.labelForPatch();
   1.360 +}
   1.361 +
   1.362 +void
   1.363 +DispatchIonCache::bindInitialJump(MacroAssembler &masm, AddCacheState &addState)
   1.364 +{
   1.365 +    // Do nothing.
   1.366 +}
   1.367 +
   1.368 +void
   1.369 +DispatchIonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm)
   1.370 +{
   1.371 +    // The address of firstStub_ should be pointer aligned.
   1.372 +    JS_ASSERT(uintptr_t(&firstStub_) % sizeof(uintptr_t) == 0);
   1.373 +
   1.374 +    IonCache::updateBaseAddress(code, masm);
   1.375 +    dispatchLabel_.fixup(&masm);
   1.376 +    Assembler::patchDataWithValueCheck(CodeLocationLabel(code, dispatchLabel_),
   1.377 +                                       ImmPtr(&firstStub_),
   1.378 +                                       ImmPtr((void*)-1));
   1.379 +    firstStub_ = fallbackLabel_.raw();
   1.380 +    rejoinLabel_.repoint(code, &masm);
   1.381 +}
   1.382 +
   1.383 +void
   1.384 +IonCache::attachStub(MacroAssembler &masm, StubAttacher &attacher, Handle<JitCode *> code)
   1.385 +{
   1.386 +    JS_ASSERT(canAttachStub());
   1.387 +    incrementStubCount();
   1.388 +
   1.389 +    // Update the success path to continue after the IC initial jump.
   1.390 +    attacher.patchRejoinJump(masm, code);
   1.391 +
   1.392 +    // Replace the STUB_ADDR constant by the address of the generated stub, such
   1.393 +    // as it can be kept alive even if the cache is flushed (see
   1.394 +    // MarkJitExitFrame).
   1.395 +    attacher.patchStubCodePointer(masm, code);
   1.396 +
   1.397 +    // Update the failure path. Note it is this patch that makes the stub
   1.398 +    // accessible for parallel ICs so it should not be moved unless you really
   1.399 +    // know what is going on.
   1.400 +    attacher.patchNextStubJump(masm, code);
   1.401 +}
   1.402 +
   1.403 +bool
   1.404 +IonCache::linkAndAttachStub(JSContext *cx, MacroAssembler &masm, StubAttacher &attacher,
   1.405 +                            IonScript *ion, const char *attachKind)
   1.406 +{
   1.407 +    Rooted<JitCode *> code(cx);
   1.408 +    {
   1.409 +        // Need to exit the AutoFlushICache context to flush the cache
   1.410 +        // before attaching the stub below.
   1.411 +        AutoFlushICache afc("IonCache");
   1.412 +        LinkStatus status = linkCode(cx, masm, ion, code.address());
   1.413 +        if (status != LINK_GOOD)
   1.414 +            return status != LINK_ERROR;
   1.415 +    }
   1.416 +
   1.417 +    if (pc_) {
   1.418 +        IonSpew(IonSpew_InlineCaches, "Cache %p(%s:%d/%d) generated %s %s stub at %p",
   1.419 +                this, script_->filename(), script_->lineno(), script_->pcToOffset(pc_),
   1.420 +                attachKind, CacheName(kind()), code->raw());
   1.421 +    } else {
   1.422 +        IonSpew(IonSpew_InlineCaches, "Cache %p generated %s %s stub at %p",
   1.423 +                this, attachKind, CacheName(kind()), code->raw());
   1.424 +    }
   1.425 +
   1.426 +#ifdef JS_ION_PERF
   1.427 +    writePerfSpewerJitCodeProfile(code, "IonCache");
   1.428 +#endif
   1.429 +
   1.430 +    attachStub(masm, attacher, code);
   1.431 +
   1.432 +    return true;
   1.433 +}
   1.434 +
   1.435 +void
   1.436 +IonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm)
   1.437 +{
   1.438 +    fallbackLabel_.repoint(code, &masm);
   1.439 +}
   1.440 +
   1.441 +void
   1.442 +IonCache::initializeAddCacheState(LInstruction *ins, AddCacheState *addState)
   1.443 +{
   1.444 +}
   1.445 +
   1.446 +static bool
   1.447 +IsCacheableDOMProxy(JSObject *obj)
   1.448 +{
   1.449 +    if (!obj->is<ProxyObject>())
   1.450 +        return false;
   1.451 +
   1.452 +    BaseProxyHandler *handler = obj->as<ProxyObject>().handler();
   1.453 +
   1.454 +    if (handler->family() != GetDOMProxyHandlerFamily())
   1.455 +        return false;
   1.456 +
   1.457 +    if (obj->numFixedSlots() <= GetDOMProxyExpandoSlot())
   1.458 +        return false;
   1.459 +
   1.460 +    return true;
   1.461 +}
   1.462 +
   1.463 +static void
   1.464 +GeneratePrototypeGuards(JSContext *cx, IonScript *ion, MacroAssembler &masm, JSObject *obj,
   1.465 +                        JSObject *holder, Register objectReg, Register scratchReg,
   1.466 +                        Label *failures)
   1.467 +{
   1.468 +    /* The guards here protect against the effects of TradeGuts(). If the prototype chain
   1.469 +     * is directly altered, then TI will toss the jitcode, so we don't have to worry about
   1.470 +     * it, and any other change to the holder, or adding a shadowing property will result
   1.471 +     * in reshaping the holder, and thus the failure of the shape guard.
   1.472 +     */
   1.473 +    JS_ASSERT(obj != holder);
   1.474 +
   1.475 +    if (obj->hasUncacheableProto()) {
   1.476 +        // Note: objectReg and scratchReg may be the same register, so we cannot
   1.477 +        // use objectReg in the rest of this function.
   1.478 +        masm.loadPtr(Address(objectReg, JSObject::offsetOfType()), scratchReg);
   1.479 +        Address proto(scratchReg, types::TypeObject::offsetOfProto());
   1.480 +        masm.branchNurseryPtr(Assembler::NotEqual, proto,
   1.481 +                              ImmMaybeNurseryPtr(obj->getProto()), failures);
   1.482 +    }
   1.483 +
   1.484 +    JSObject *pobj = IsCacheableDOMProxy(obj)
   1.485 +                     ? obj->getTaggedProto().toObjectOrNull()
   1.486 +                     : obj->getProto();
   1.487 +    if (!pobj)
   1.488 +        return;
   1.489 +    while (pobj != holder) {
   1.490 +        if (pobj->hasUncacheableProto()) {
   1.491 +            JS_ASSERT(!pobj->hasSingletonType());
   1.492 +            masm.moveNurseryPtr(ImmMaybeNurseryPtr(pobj), scratchReg);
   1.493 +            Address objType(scratchReg, JSObject::offsetOfType());
   1.494 +            masm.branchPtr(Assembler::NotEqual, objType, ImmGCPtr(pobj->type()), failures);
   1.495 +        }
   1.496 +        pobj = pobj->getProto();
   1.497 +    }
   1.498 +}
   1.499 +
   1.500 +static bool
   1.501 +IsCacheableProtoChain(JSObject *obj, JSObject *holder)
   1.502 +{
   1.503 +    while (obj != holder) {
   1.504 +        /*
   1.505 +         * We cannot assume that we find the holder object on the prototype
   1.506 +         * chain and must check for null proto. The prototype chain can be
   1.507 +         * altered during the lookupProperty call.
   1.508 +         */
   1.509 +        JSObject *proto = obj->getProto();
   1.510 +        if (!proto || !proto->isNative())
   1.511 +            return false;
   1.512 +        obj = proto;
   1.513 +    }
   1.514 +    return true;
   1.515 +}
   1.516 +
   1.517 +static bool
   1.518 +IsCacheableGetPropReadSlot(JSObject *obj, JSObject *holder, Shape *shape)
   1.519 +{
   1.520 +    if (!shape || !IsCacheableProtoChain(obj, holder))
   1.521 +        return false;
   1.522 +
   1.523 +    if (!shape->hasSlot() || !shape->hasDefaultGetter())
   1.524 +        return false;
   1.525 +
   1.526 +    return true;
   1.527 +}
   1.528 +
   1.529 +static bool
   1.530 +IsCacheableNoProperty(JSObject *obj, JSObject *holder, Shape *shape, jsbytecode *pc,
   1.531 +                      const TypedOrValueRegister &output)
   1.532 +{
   1.533 +    if (shape)
   1.534 +        return false;
   1.535 +
   1.536 +    JS_ASSERT(!holder);
   1.537 +
   1.538 +    // Just because we didn't find the property on the object doesn't mean it
   1.539 +    // won't magically appear through various engine hacks:
   1.540 +    if (obj->getClass()->getProperty && obj->getClass()->getProperty != JS_PropertyStub)
   1.541 +        return false;
   1.542 +
   1.543 +    // Don't generate missing property ICs if we skipped a non-native object, as
   1.544 +    // lookups may extend beyond the prototype chain (e.g.  for DOMProxy
   1.545 +    // proxies).
   1.546 +    JSObject *obj2 = obj;
   1.547 +    while (obj2) {
   1.548 +        if (!obj2->isNative())
   1.549 +            return false;
   1.550 +        obj2 = obj2->getProto();
   1.551 +    }
   1.552 +
   1.553 +    // The pc is nullptr if the cache is idempotent. We cannot share missing
   1.554 +    // properties between caches because TI can only try to prove that a type is
   1.555 +    // contained, but does not attempts to check if something does not exists.
   1.556 +    // So the infered type of getprop would be missing and would not contain
   1.557 +    // undefined, as expected for missing properties.
   1.558 +    if (!pc)
   1.559 +        return false;
   1.560 +
   1.561 +#if JS_HAS_NO_SUCH_METHOD
   1.562 +    // The __noSuchMethod__ hook may substitute in a valid method.  Since,
   1.563 +    // if o.m is missing, o.m() will probably be an error, just mark all
   1.564 +    // missing callprops as uncacheable.
   1.565 +    if (JSOp(*pc) == JSOP_CALLPROP ||
   1.566 +        JSOp(*pc) == JSOP_CALLELEM)
   1.567 +    {
   1.568 +        return false;
   1.569 +    }
   1.570 +#endif
   1.571 +
   1.572 +    // TI has not yet monitored an Undefined value. The fallback path will
   1.573 +    // monitor and invalidate the script.
   1.574 +    if (!output.hasValue())
   1.575 +        return false;
   1.576 +
   1.577 +    return true;
   1.578 +}
   1.579 +
   1.580 +static bool
   1.581 +IsOptimizableArgumentsObjectForLength(JSObject *obj)
   1.582 +{
   1.583 +    if (!obj->is<ArgumentsObject>())
   1.584 +        return false;
   1.585 +
   1.586 +    if (obj->as<ArgumentsObject>().hasOverriddenLength())
   1.587 +        return false;
   1.588 +
   1.589 +    return true;
   1.590 +}
   1.591 +
   1.592 +static bool
   1.593 +IsOptimizableArgumentsObjectForGetElem(JSObject *obj, Value idval)
   1.594 +{
   1.595 +    if (!IsOptimizableArgumentsObjectForLength(obj))
   1.596 +        return false;
   1.597 +
   1.598 +    ArgumentsObject &argsObj = obj->as<ArgumentsObject>();
   1.599 +
   1.600 +    if (argsObj.isAnyElementDeleted())
   1.601 +        return false;
   1.602 +
   1.603 +    if (!idval.isInt32())
   1.604 +        return false;
   1.605 +
   1.606 +    int32_t idint = idval.toInt32();
   1.607 +    if (idint < 0 || static_cast<uint32_t>(idint) >= argsObj.initialLength())
   1.608 +        return false;
   1.609 +
   1.610 +    return true;
   1.611 +}
   1.612 +
   1.613 +static bool
   1.614 +IsCacheableGetPropCallNative(JSObject *obj, JSObject *holder, Shape *shape)
   1.615 +{
   1.616 +    if (!shape || !IsCacheableProtoChain(obj, holder))
   1.617 +        return false;
   1.618 +
   1.619 +    if (!shape->hasGetterValue() || !shape->getterValue().isObject())
   1.620 +        return false;
   1.621 +
   1.622 +    if (!shape->getterValue().toObject().is<JSFunction>())
   1.623 +        return false;
   1.624 +
   1.625 +    JSFunction& getter = shape->getterValue().toObject().as<JSFunction>();
   1.626 +    if (!getter.isNative())
   1.627 +        return false;
   1.628 +
   1.629 +    // Check for a getter that has jitinfo and whose jitinfo says it's
   1.630 +    // OK with both inner and outer objects.
   1.631 +    if (getter.jitInfo() && !getter.jitInfo()->needsOuterizedThisObject())
   1.632 +        return true;
   1.633 +
   1.634 +    // For getters that need an outerized this object, don't cache if
   1.635 +    // obj has an outerObject hook, since our cache will pass obj
   1.636 +    // itself without outerizing.
   1.637 +    return !obj->getClass()->ext.outerObject;
   1.638 +}
   1.639 +
   1.640 +static bool
   1.641 +IsCacheableGetPropCallPropertyOp(JSObject *obj, JSObject *holder, Shape *shape)
   1.642 +{
   1.643 +    if (!shape || !IsCacheableProtoChain(obj, holder))
   1.644 +        return false;
   1.645 +
   1.646 +    if (shape->hasSlot() || shape->hasGetterValue() || shape->hasDefaultGetter())
   1.647 +        return false;
   1.648 +
   1.649 +    return true;
   1.650 +}
   1.651 +
   1.652 +static inline void
   1.653 +EmitLoadSlot(MacroAssembler &masm, JSObject *holder, Shape *shape, Register holderReg,
   1.654 +             TypedOrValueRegister output, Register scratchReg)
   1.655 +{
   1.656 +    JS_ASSERT(holder);
   1.657 +    if (holder->isFixedSlot(shape->slot())) {
   1.658 +        Address addr(holderReg, JSObject::getFixedSlotOffset(shape->slot()));
   1.659 +        masm.loadTypedOrValue(addr, output);
   1.660 +    } else {
   1.661 +        masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), scratchReg);
   1.662 +
   1.663 +        Address addr(scratchReg, holder->dynamicSlotIndex(shape->slot()) * sizeof(Value));
   1.664 +        masm.loadTypedOrValue(addr, output);
   1.665 +    }
   1.666 +}
   1.667 +
   1.668 +static void
   1.669 +GenerateDOMProxyChecks(JSContext *cx, MacroAssembler &masm, JSObject *obj,
   1.670 +                       PropertyName *name, Register object, Label *stubFailure,
   1.671 +                       bool skipExpandoCheck = false)
   1.672 +{
   1.673 +    JS_ASSERT(IsCacheableDOMProxy(obj));
   1.674 +
   1.675 +    // Guard the following:
   1.676 +    //      1. The object is a DOMProxy.
   1.677 +    //      2. The object does not have expando properties, or has an expando
   1.678 +    //          which is known to not have the desired property.
   1.679 +    Address handlerAddr(object, ProxyObject::offsetOfHandler());
   1.680 +    Address expandoSlotAddr(object, JSObject::getFixedSlotOffset(GetDOMProxyExpandoSlot()));
   1.681 +
   1.682 +    // Check that object is a DOMProxy.
   1.683 +    masm.branchPrivatePtr(Assembler::NotEqual, handlerAddr,
   1.684 +                          ImmPtr(obj->as<ProxyObject>().handler()), stubFailure);
   1.685 +
   1.686 +    if (skipExpandoCheck)
   1.687 +        return;
   1.688 +
   1.689 +    // For the remaining code, we need to reserve some registers to load a value.
   1.690 +    // This is ugly, but unvaoidable.
   1.691 +    RegisterSet domProxyRegSet(RegisterSet::All());
   1.692 +    domProxyRegSet.take(AnyRegister(object));
   1.693 +    ValueOperand tempVal = domProxyRegSet.takeValueOperand();
   1.694 +    masm.pushValue(tempVal);
   1.695 +
   1.696 +    Label failDOMProxyCheck;
   1.697 +    Label domProxyOk;
   1.698 +
   1.699 +    Value expandoVal = obj->getFixedSlot(GetDOMProxyExpandoSlot());
   1.700 +    masm.loadValue(expandoSlotAddr, tempVal);
   1.701 +
   1.702 +    if (!expandoVal.isObject() && !expandoVal.isUndefined()) {
   1.703 +        masm.branchTestValue(Assembler::NotEqual, tempVal, expandoVal, &failDOMProxyCheck);
   1.704 +
   1.705 +        ExpandoAndGeneration *expandoAndGeneration = (ExpandoAndGeneration*)expandoVal.toPrivate();
   1.706 +        masm.movePtr(ImmPtr(expandoAndGeneration), tempVal.scratchReg());
   1.707 +
   1.708 +        masm.branch32(Assembler::NotEqual,
   1.709 +                      Address(tempVal.scratchReg(),
   1.710 +                              ExpandoAndGeneration::offsetOfGeneration()),
   1.711 +                      Imm32(expandoAndGeneration->generation),
   1.712 +                      &failDOMProxyCheck);
   1.713 +
   1.714 +        expandoVal = expandoAndGeneration->expando;
   1.715 +        masm.loadValue(Address(tempVal.scratchReg(),
   1.716 +                               ExpandoAndGeneration::offsetOfExpando()),
   1.717 +                       tempVal);
   1.718 +    }
   1.719 +
   1.720 +    // If the incoming object does not have an expando object then we're sure we're not
   1.721 +    // shadowing.
   1.722 +    masm.branchTestUndefined(Assembler::Equal, tempVal, &domProxyOk);
   1.723 +
   1.724 +    if (expandoVal.isObject()) {
   1.725 +        JS_ASSERT(!expandoVal.toObject().nativeContains(cx, name));
   1.726 +
   1.727 +        // Reference object has an expando object that doesn't define the name. Check that
   1.728 +        // the incoming object has an expando object with the same shape.
   1.729 +        masm.branchTestObject(Assembler::NotEqual, tempVal, &failDOMProxyCheck);
   1.730 +        masm.extractObject(tempVal, tempVal.scratchReg());
   1.731 +        masm.branchPtr(Assembler::Equal,
   1.732 +                       Address(tempVal.scratchReg(), JSObject::offsetOfShape()),
   1.733 +                       ImmGCPtr(expandoVal.toObject().lastProperty()),
   1.734 +                       &domProxyOk);
   1.735 +    }
   1.736 +
   1.737 +    // Failure case: restore the tempVal registers and jump to failures.
   1.738 +    masm.bind(&failDOMProxyCheck);
   1.739 +    masm.popValue(tempVal);
   1.740 +    masm.jump(stubFailure);
   1.741 +
   1.742 +    // Success case: restore the tempval and proceed.
   1.743 +    masm.bind(&domProxyOk);
   1.744 +    masm.popValue(tempVal);
   1.745 +}
   1.746 +
   1.747 +static void
   1.748 +GenerateReadSlot(JSContext *cx, IonScript *ion, MacroAssembler &masm,
   1.749 +                 IonCache::StubAttacher &attacher, JSObject *obj, JSObject *holder,
   1.750 +                 Shape *shape, Register object, TypedOrValueRegister output,
   1.751 +                 Label *failures = nullptr)
   1.752 +{
   1.753 +    JS_ASSERT(obj->isNative());
   1.754 +    // If there's a single jump to |failures|, we can patch the shape guard
   1.755 +    // jump directly. Otherwise, jump to the end of the stub, so there's a
   1.756 +    // common point to patch.
   1.757 +    bool multipleFailureJumps = (obj != holder) || (failures != nullptr && failures->used());
   1.758 +
   1.759 +    // If we have multiple failure jumps but didn't get a label from the
   1.760 +    // outside, make one ourselves.
   1.761 +    Label failures_;
   1.762 +    if (multipleFailureJumps && !failures)
   1.763 +        failures = &failures_;
   1.764 +
   1.765 +    // Guard on the shape of the object.
   1.766 +    attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
   1.767 +                                   Address(object, JSObject::offsetOfShape()),
   1.768 +                                   ImmGCPtr(obj->lastProperty()),
   1.769 +                                   failures);
   1.770 +
   1.771 +    // If we need a scratch register, use either an output register or the
   1.772 +    // object register. After this point, we cannot jump directly to
   1.773 +    // |failures| since we may still have to pop the object register.
   1.774 +    bool restoreScratch = false;
   1.775 +    Register scratchReg = Register::FromCode(0); // Quell compiler warning.
   1.776 +
   1.777 +    if (obj != holder || !holder->isFixedSlot(shape->slot())) {
   1.778 +        if (output.hasValue()) {
   1.779 +            scratchReg = output.valueReg().scratchReg();
   1.780 +        } else if (output.type() == MIRType_Double) {
   1.781 +            scratchReg = object;
   1.782 +            masm.push(scratchReg);
   1.783 +            restoreScratch = true;
   1.784 +        } else {
   1.785 +            scratchReg = output.typedReg().gpr();
   1.786 +        }
   1.787 +    }
   1.788 +
   1.789 +    // Fast path: single failure jump, no prototype guards.
   1.790 +    if (!multipleFailureJumps) {
   1.791 +        EmitLoadSlot(masm, holder, shape, object, output, scratchReg);
   1.792 +        if (restoreScratch)
   1.793 +            masm.pop(scratchReg);
   1.794 +        attacher.jumpRejoin(masm);
   1.795 +        return;
   1.796 +    }
   1.797 +
   1.798 +    // Slow path: multiple jumps; generate prototype guards.
   1.799 +    Label prototypeFailures;
   1.800 +    Register holderReg;
   1.801 +    if (obj != holder) {
   1.802 +        // Note: this may clobber the object register if it's used as scratch.
   1.803 +        GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg,
   1.804 +                                &prototypeFailures);
   1.805 +
   1.806 +        if (holder) {
   1.807 +            // Guard on the holder's shape.
   1.808 +            holderReg = scratchReg;
   1.809 +            masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg);
   1.810 +            masm.branchPtr(Assembler::NotEqual,
   1.811 +                           Address(holderReg, JSObject::offsetOfShape()),
   1.812 +                           ImmGCPtr(holder->lastProperty()),
   1.813 +                           &prototypeFailures);
   1.814 +        } else {
   1.815 +            // The property does not exist. Guard on everything in the
   1.816 +            // prototype chain.
   1.817 +            JSObject *proto = obj->getTaggedProto().toObjectOrNull();
   1.818 +            Register lastReg = object;
   1.819 +            JS_ASSERT(scratchReg != object);
   1.820 +            while (proto) {
   1.821 +                masm.loadObjProto(lastReg, scratchReg);
   1.822 +
   1.823 +                // Guard the shape of the current prototype.
   1.824 +                masm.branchPtr(Assembler::NotEqual,
   1.825 +                               Address(scratchReg, JSObject::offsetOfShape()),
   1.826 +                               ImmGCPtr(proto->lastProperty()),
   1.827 +                               &prototypeFailures);
   1.828 +
   1.829 +                proto = proto->getProto();
   1.830 +                lastReg = scratchReg;
   1.831 +            }
   1.832 +
   1.833 +            holderReg = InvalidReg;
   1.834 +        }
   1.835 +    } else {
   1.836 +        holderReg = object;
   1.837 +    }
   1.838 +
   1.839 +    // Slot access.
   1.840 +    if (holder)
   1.841 +        EmitLoadSlot(masm, holder, shape, holderReg, output, scratchReg);
   1.842 +    else
   1.843 +        masm.moveValue(UndefinedValue(), output.valueReg());
   1.844 +
   1.845 +    // Restore scratch on success.
   1.846 +    if (restoreScratch)
   1.847 +        masm.pop(scratchReg);
   1.848 +
   1.849 +    attacher.jumpRejoin(masm);
   1.850 +
   1.851 +    masm.bind(&prototypeFailures);
   1.852 +    if (restoreScratch)
   1.853 +        masm.pop(scratchReg);
   1.854 +    masm.bind(failures);
   1.855 +
   1.856 +    attacher.jumpNextStub(masm);
   1.857 +
   1.858 +}
   1.859 +
   1.860 +static bool
   1.861 +EmitGetterCall(JSContext *cx, MacroAssembler &masm,
   1.862 +               IonCache::StubAttacher &attacher, JSObject *obj,
   1.863 +               JSObject *holder, HandleShape shape,
   1.864 +               RegisterSet liveRegs, Register object,
   1.865 +               Register scratchReg, TypedOrValueRegister output,
   1.866 +               void *returnAddr)
   1.867 +{
   1.868 +    JS_ASSERT(output.hasValue());
   1.869 +    MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs);
   1.870 +
   1.871 +    // Remaining registers should basically be free, but we need to use |object| still
   1.872 +    // so leave it alone.
   1.873 +    RegisterSet regSet(RegisterSet::All());
   1.874 +    regSet.take(AnyRegister(object));
   1.875 +
   1.876 +    // This is a slower stub path, and we're going to be doing a call anyway.  Don't need
   1.877 +    // to try so hard to not use the stack.  Scratch regs are just taken from the register
   1.878 +    // set not including the input, current value saved on the stack, and restored when
   1.879 +    // we're done with it.
   1.880 +    scratchReg               = regSet.takeGeneral();
   1.881 +    Register argJSContextReg = regSet.takeGeneral();
   1.882 +    Register argUintNReg     = regSet.takeGeneral();
   1.883 +    Register argVpReg        = regSet.takeGeneral();
   1.884 +
   1.885 +    // Shape has a getter function.
   1.886 +    bool callNative = IsCacheableGetPropCallNative(obj, holder, shape);
   1.887 +    JS_ASSERT_IF(!callNative, IsCacheableGetPropCallPropertyOp(obj, holder, shape));
   1.888 +
   1.889 +    if (callNative) {
   1.890 +        JS_ASSERT(shape->hasGetterValue() && shape->getterValue().isObject() &&
   1.891 +                  shape->getterValue().toObject().is<JSFunction>());
   1.892 +        JSFunction *target = &shape->getterValue().toObject().as<JSFunction>();
   1.893 +
   1.894 +        JS_ASSERT(target);
   1.895 +        JS_ASSERT(target->isNative());
   1.896 +
   1.897 +        // Native functions have the signature:
   1.898 +        //  bool (*)(JSContext *, unsigned, Value *vp)
   1.899 +        // Where vp[0] is space for an outparam, vp[1] is |this|, and vp[2] onward
   1.900 +        // are the function arguments.
   1.901 +
   1.902 +        // Construct vp array:
   1.903 +        // Push object value for |this|
   1.904 +        masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
   1.905 +        // Push callee/outparam.
   1.906 +        masm.Push(ObjectValue(*target));
   1.907 +
   1.908 +        // Preload arguments into registers.
   1.909 +        masm.loadJSContext(argJSContextReg);
   1.910 +        masm.move32(Imm32(0), argUintNReg);
   1.911 +        masm.movePtr(StackPointer, argVpReg);
   1.912 +
   1.913 +        // Push marking data for later use.
   1.914 +        masm.Push(argUintNReg);
   1.915 +        attacher.pushStubCodePointer(masm);
   1.916 +
   1.917 +        if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
   1.918 +            return false;
   1.919 +        masm.enterFakeExitFrame(ION_FRAME_OOL_NATIVE);
   1.920 +
   1.921 +        // Construct and execute call.
   1.922 +        masm.setupUnalignedABICall(3, scratchReg);
   1.923 +        masm.passABIArg(argJSContextReg);
   1.924 +        masm.passABIArg(argUintNReg);
   1.925 +        masm.passABIArg(argVpReg);
   1.926 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target->native()));
   1.927 +
   1.928 +        // Test for failure.
   1.929 +        masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
   1.930 +
   1.931 +        // Load the outparam vp[0] into output register(s).
   1.932 +        Address outparam(StackPointer, IonOOLNativeExitFrameLayout::offsetOfResult());
   1.933 +        masm.loadTypedOrValue(outparam, output);
   1.934 +
   1.935 +        // masm.leaveExitFrame & pop locals
   1.936 +        masm.adjustStack(IonOOLNativeExitFrameLayout::Size(0));
   1.937 +    } else {
   1.938 +        Register argObjReg       = argUintNReg;
   1.939 +        Register argIdReg        = regSet.takeGeneral();
   1.940 +
   1.941 +        PropertyOp target = shape->getterOp();
   1.942 +        JS_ASSERT(target);
   1.943 +
   1.944 +        // Push stubCode for marking.
   1.945 +        attacher.pushStubCodePointer(masm);
   1.946 +
   1.947 +        // JSPropertyOp: bool fn(JSContext *cx, HandleObject obj, HandleId id, MutableHandleValue vp)
   1.948 +
   1.949 +        // Push args on stack first so we can take pointers to make handles.
   1.950 +        masm.Push(UndefinedValue());
   1.951 +        masm.movePtr(StackPointer, argVpReg);
   1.952 +
   1.953 +        // push canonical jsid from shape instead of propertyname.
   1.954 +        masm.Push(shape->propid(), scratchReg);
   1.955 +        masm.movePtr(StackPointer, argIdReg);
   1.956 +
   1.957 +        masm.Push(object);
   1.958 +        masm.movePtr(StackPointer, argObjReg);
   1.959 +
   1.960 +        masm.loadJSContext(argJSContextReg);
   1.961 +
   1.962 +        if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
   1.963 +            return false;
   1.964 +        masm.enterFakeExitFrame(ION_FRAME_OOL_PROPERTY_OP);
   1.965 +
   1.966 +        // Make the call.
   1.967 +        masm.setupUnalignedABICall(4, scratchReg);
   1.968 +        masm.passABIArg(argJSContextReg);
   1.969 +        masm.passABIArg(argObjReg);
   1.970 +        masm.passABIArg(argIdReg);
   1.971 +        masm.passABIArg(argVpReg);
   1.972 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target));
   1.973 +
   1.974 +        // Test for failure.
   1.975 +        masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
   1.976 +
   1.977 +        // Load the outparam vp[0] into output register(s).
   1.978 +        Address outparam(StackPointer, IonOOLPropertyOpExitFrameLayout::offsetOfResult());
   1.979 +        masm.loadTypedOrValue(outparam, output);
   1.980 +
   1.981 +        // masm.leaveExitFrame & pop locals.
   1.982 +        masm.adjustStack(IonOOLPropertyOpExitFrameLayout::Size());
   1.983 +    }
   1.984 +
   1.985 +    masm.icRestoreLive(liveRegs, aic);
   1.986 +    return true;
   1.987 +}
   1.988 +
   1.989 +static bool
   1.990 +GenerateCallGetter(JSContext *cx, IonScript *ion, MacroAssembler &masm,
   1.991 +                   IonCache::StubAttacher &attacher, JSObject *obj, PropertyName *name,
   1.992 +                   JSObject *holder, HandleShape shape, RegisterSet &liveRegs, Register object,
   1.993 +                   TypedOrValueRegister output, void *returnAddr, Label *failures = nullptr)
   1.994 +{
   1.995 +    JS_ASSERT(obj->isNative());
   1.996 +    JS_ASSERT(output.hasValue());
   1.997 +
   1.998 +    // Use the passed in label if there was one. Otherwise, we'll have to make our own.
   1.999 +    Label stubFailure;
  1.1000 +    failures = failures ? failures : &stubFailure;
  1.1001 +
  1.1002 +    // Initial shape check.
  1.1003 +    masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfShape()),
  1.1004 +                   ImmGCPtr(obj->lastProperty()), failures);
  1.1005 +
  1.1006 +    Register scratchReg = output.valueReg().scratchReg();
  1.1007 +
  1.1008 +    // Note: this may clobber the object register if it's used as scratch.
  1.1009 +    if (obj != holder)
  1.1010 +        GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, failures);
  1.1011 +
  1.1012 +    // Guard on the holder's shape.
  1.1013 +    Register holderReg = scratchReg;
  1.1014 +    masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg);
  1.1015 +    masm.branchPtr(Assembler::NotEqual,
  1.1016 +                   Address(holderReg, JSObject::offsetOfShape()),
  1.1017 +                   ImmGCPtr(holder->lastProperty()),
  1.1018 +                   failures);
  1.1019 +
  1.1020 +    // Now we're good to go to invoke the native call.
  1.1021 +    if (!EmitGetterCall(cx, masm, attacher, obj, holder, shape, liveRegs, object,
  1.1022 +                        scratchReg, output, returnAddr))
  1.1023 +        return false;
  1.1024 +
  1.1025 +    // Rejoin jump.
  1.1026 +    attacher.jumpRejoin(masm);
  1.1027 +
  1.1028 +    // Jump to next stub.
  1.1029 +    masm.bind(failures);
  1.1030 +    attacher.jumpNextStub(masm);
  1.1031 +
  1.1032 +    return true;
  1.1033 +}
  1.1034 +
  1.1035 +static bool
  1.1036 +GenerateArrayLength(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.1037 +                    JSObject *obj, Register object, TypedOrValueRegister output)
  1.1038 +{
  1.1039 +    JS_ASSERT(obj->is<ArrayObject>());
  1.1040 +
  1.1041 +    Label failures;
  1.1042 +
  1.1043 +    // Guard object is a dense array.
  1.1044 +    RootedShape shape(cx, obj->lastProperty());
  1.1045 +    if (!shape)
  1.1046 +        return false;
  1.1047 +    masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
  1.1048 +
  1.1049 +    // Load length.
  1.1050 +    Register outReg;
  1.1051 +    if (output.hasValue()) {
  1.1052 +        outReg = output.valueReg().scratchReg();
  1.1053 +    } else {
  1.1054 +        JS_ASSERT(output.type() == MIRType_Int32);
  1.1055 +        outReg = output.typedReg().gpr();
  1.1056 +    }
  1.1057 +
  1.1058 +    masm.loadPtr(Address(object, JSObject::offsetOfElements()), outReg);
  1.1059 +    masm.load32(Address(outReg, ObjectElements::offsetOfLength()), outReg);
  1.1060 +
  1.1061 +    // The length is an unsigned int, but the value encodes a signed int.
  1.1062 +    JS_ASSERT(object != outReg);
  1.1063 +    masm.branchTest32(Assembler::Signed, outReg, outReg, &failures);
  1.1064 +
  1.1065 +    if (output.hasValue())
  1.1066 +        masm.tagValue(JSVAL_TYPE_INT32, outReg, output.valueReg());
  1.1067 +
  1.1068 +    /* Success. */
  1.1069 +    attacher.jumpRejoin(masm);
  1.1070 +
  1.1071 +    /* Failure. */
  1.1072 +    masm.bind(&failures);
  1.1073 +    attacher.jumpNextStub(masm);
  1.1074 +
  1.1075 +    return true;
  1.1076 +}
  1.1077 +
  1.1078 +static void
  1.1079 +GenerateTypedArrayLength(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.1080 +                         JSObject *obj, Register object, TypedOrValueRegister output)
  1.1081 +{
  1.1082 +    JS_ASSERT(obj->is<TypedArrayObject>());
  1.1083 +
  1.1084 +    Label failures;
  1.1085 +
  1.1086 +    Register tmpReg;
  1.1087 +    if (output.hasValue()) {
  1.1088 +        tmpReg = output.valueReg().scratchReg();
  1.1089 +    } else {
  1.1090 +        JS_ASSERT(output.type() == MIRType_Int32);
  1.1091 +        tmpReg = output.typedReg().gpr();
  1.1092 +    }
  1.1093 +    JS_ASSERT(object != tmpReg);
  1.1094 +
  1.1095 +    // Implement the negated version of JSObject::isTypedArray predicate.
  1.1096 +    masm.loadObjClass(object, tmpReg);
  1.1097 +    masm.branchPtr(Assembler::Below, tmpReg, ImmPtr(&TypedArrayObject::classes[0]),
  1.1098 +                   &failures);
  1.1099 +    masm.branchPtr(Assembler::AboveOrEqual, tmpReg,
  1.1100 +                   ImmPtr(&TypedArrayObject::classes[ScalarTypeDescr::TYPE_MAX]),
  1.1101 +                   &failures);
  1.1102 +
  1.1103 +    // Load length.
  1.1104 +    masm.loadTypedOrValue(Address(object, TypedArrayObject::lengthOffset()), output);
  1.1105 +
  1.1106 +    /* Success. */
  1.1107 +    attacher.jumpRejoin(masm);
  1.1108 +
  1.1109 +    /* Failure. */
  1.1110 +    masm.bind(&failures);
  1.1111 +    attacher.jumpNextStub(masm);
  1.1112 +}
  1.1113 +
  1.1114 +static bool
  1.1115 +IsCacheableArrayLength(JSContext *cx, HandleObject obj, HandlePropertyName name,
  1.1116 +                       TypedOrValueRegister output)
  1.1117 +{
  1.1118 +    if (!obj->is<ArrayObject>())
  1.1119 +        return false;
  1.1120 +
  1.1121 +    if (output.type() != MIRType_Value && output.type() != MIRType_Int32) {
  1.1122 +        // The stub assumes that we always output Int32, so make sure our output
  1.1123 +        // is equipped to handle that.
  1.1124 +        return false;
  1.1125 +    }
  1.1126 +
  1.1127 +    return true;
  1.1128 +}
  1.1129 +
  1.1130 +template <class GetPropCache>
  1.1131 +static GetPropertyIC::NativeGetPropCacheability
  1.1132 +CanAttachNativeGetProp(typename GetPropCache::Context cx, const GetPropCache &cache,
  1.1133 +                       HandleObject obj, HandlePropertyName name,
  1.1134 +                       MutableHandleObject holder, MutableHandleShape shape,
  1.1135 +                       bool skipArrayLen = false)
  1.1136 +{
  1.1137 +    if (!obj || !obj->isNative())
  1.1138 +        return GetPropertyIC::CanAttachNone;
  1.1139 +
  1.1140 +    // The lookup needs to be universally pure, otherwise we risk calling hooks out
  1.1141 +    // of turn. We don't mind doing this even when purity isn't required, because we
  1.1142 +    // only miss out on shape hashification, which is only a temporary perf cost.
  1.1143 +    // The limits were arbitrarily set, anyways.
  1.1144 +    if (!LookupPropertyPure(obj, NameToId(name), holder.address(), shape.address()))
  1.1145 +        return GetPropertyIC::CanAttachNone;
  1.1146 +
  1.1147 +    RootedScript script(cx);
  1.1148 +    jsbytecode *pc;
  1.1149 +    cache.getScriptedLocation(&script, &pc);
  1.1150 +    if (IsCacheableGetPropReadSlot(obj, holder, shape) ||
  1.1151 +        IsCacheableNoProperty(obj, holder, shape, pc, cache.output()))
  1.1152 +    {
  1.1153 +        return GetPropertyIC::CanAttachReadSlot;
  1.1154 +    }
  1.1155 +
  1.1156 +    // |length| is a non-configurable getter property on ArrayObjects. Any time this
  1.1157 +    // check would have passed, we can install a getter stub instead. Allow people to
  1.1158 +    // make that decision themselves with skipArrayLen
  1.1159 +    if (!skipArrayLen && cx->names().length == name && cache.allowArrayLength(cx, obj) &&
  1.1160 +        IsCacheableArrayLength(cx, obj, name, cache.output()))
  1.1161 +    {
  1.1162 +        // The array length property is non-configurable, which means both that
  1.1163 +        // checking the class of the object and the name of the property is enough
  1.1164 +        // and that we don't need to worry about monitoring, since we know the
  1.1165 +        // return type statically.
  1.1166 +        return GetPropertyIC::CanAttachArrayLength;
  1.1167 +    }
  1.1168 +
  1.1169 +    // IonBuilder guarantees that it's impossible to generate a GetPropertyIC with
  1.1170 +    // allowGetters() true and cache.output().hasValue() false. If this isn't true,
  1.1171 +    // we will quickly assert during stub generation.
  1.1172 +    if (cache.allowGetters() &&
  1.1173 +        (IsCacheableGetPropCallNative(obj, holder, shape) ||
  1.1174 +         IsCacheableGetPropCallPropertyOp(obj, holder, shape)))
  1.1175 +    {
  1.1176 +        // Don't enable getter call if cache is parallel or idempotent, since
  1.1177 +        // they can be effectful. This is handled by allowGetters()
  1.1178 +        return GetPropertyIC::CanAttachCallGetter;
  1.1179 +    }
  1.1180 +
  1.1181 +    return GetPropertyIC::CanAttachNone;
  1.1182 +}
  1.1183 +
  1.1184 +bool
  1.1185 +GetPropertyIC::allowArrayLength(Context cx, HandleObject obj) const
  1.1186 +{
  1.1187 +    if (!idempotent())
  1.1188 +        return true;
  1.1189 +
  1.1190 +    uint32_t locationIndex, numLocations;
  1.1191 +    getLocationInfo(&locationIndex, &numLocations);
  1.1192 +
  1.1193 +    IonScript *ion = GetTopIonJSScript(cx)->ionScript();
  1.1194 +    CacheLocation *locs = ion->getCacheLocs(locationIndex);
  1.1195 +    for (size_t i = 0; i < numLocations; i++) {
  1.1196 +        CacheLocation &curLoc = locs[i];
  1.1197 +        types::StackTypeSet *bcTypes =
  1.1198 +            types::TypeScript::BytecodeTypes(curLoc.script, curLoc.pc);
  1.1199 +
  1.1200 +        if (!bcTypes->hasType(types::Type::Int32Type()))
  1.1201 +            return false;
  1.1202 +    }
  1.1203 +
  1.1204 +    return true;
  1.1205 +}
  1.1206 +
  1.1207 +bool
  1.1208 +GetPropertyIC::tryAttachNative(JSContext *cx, IonScript *ion, HandleObject obj,
  1.1209 +                               HandlePropertyName name, void *returnAddr, bool *emitted)
  1.1210 +{
  1.1211 +    JS_ASSERT(canAttachStub());
  1.1212 +    JS_ASSERT(!*emitted);
  1.1213 +
  1.1214 +    RootedShape shape(cx);
  1.1215 +    RootedObject holder(cx);
  1.1216 +
  1.1217 +    NativeGetPropCacheability type =
  1.1218 +        CanAttachNativeGetProp(cx, *this, obj, name, &holder, &shape);
  1.1219 +    if (type == CanAttachNone)
  1.1220 +        return true;
  1.1221 +
  1.1222 +    *emitted = true;
  1.1223 +
  1.1224 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.1225 +
  1.1226 +    RepatchStubAppender attacher(*this);
  1.1227 +    const char *attachKind;
  1.1228 +
  1.1229 +    switch (type) {
  1.1230 +      case CanAttachReadSlot:
  1.1231 +        GenerateReadSlot(cx, ion, masm, attacher, obj, holder,
  1.1232 +                         shape, object(), output());
  1.1233 +        attachKind = idempotent() ? "idempotent reading"
  1.1234 +                                    : "non idempotent reading";
  1.1235 +        break;
  1.1236 +      case CanAttachCallGetter:
  1.1237 +        if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name, holder, shape,
  1.1238 +                                liveRegs_, object(), output(), returnAddr))
  1.1239 +        {
  1.1240 +            return false;
  1.1241 +        }
  1.1242 +        attachKind = "getter call";
  1.1243 +        break;
  1.1244 +      case CanAttachArrayLength:
  1.1245 +        if (!GenerateArrayLength(cx, masm, attacher, obj, object(), output()))
  1.1246 +            return false;
  1.1247 +
  1.1248 +        attachKind = "array length";
  1.1249 +        break;
  1.1250 +      default:
  1.1251 +        MOZ_ASSUME_UNREACHABLE("Bad NativeGetPropCacheability");
  1.1252 +    }
  1.1253 +    return linkAndAttachStub(cx, masm, attacher, ion, attachKind);
  1.1254 +}
  1.1255 +
  1.1256 +bool
  1.1257 +GetPropertyIC::tryAttachTypedArrayLength(JSContext *cx, IonScript *ion, HandleObject obj,
  1.1258 +                                         HandlePropertyName name, bool *emitted)
  1.1259 +{
  1.1260 +    JS_ASSERT(canAttachStub());
  1.1261 +    JS_ASSERT(!*emitted);
  1.1262 +
  1.1263 +    if (!obj->is<TypedArrayObject>())
  1.1264 +        return true;
  1.1265 +
  1.1266 +    if (cx->names().length != name)
  1.1267 +        return true;
  1.1268 +
  1.1269 +    if (hasTypedArrayLengthStub())
  1.1270 +        return true;
  1.1271 +
  1.1272 +    if (output().type() != MIRType_Value && output().type() != MIRType_Int32) {
  1.1273 +        // The next execution should cause an invalidation because the type
  1.1274 +        // does not fit.
  1.1275 +        return true;
  1.1276 +    }
  1.1277 +
  1.1278 +    if (idempotent())
  1.1279 +        return true;
  1.1280 +
  1.1281 +    *emitted = true;
  1.1282 +
  1.1283 +    MacroAssembler masm(cx, ion);
  1.1284 +    RepatchStubAppender attacher(*this);
  1.1285 +    GenerateTypedArrayLength(cx, masm, attacher, obj, object(), output());
  1.1286 +
  1.1287 +    JS_ASSERT(!hasTypedArrayLengthStub_);
  1.1288 +    hasTypedArrayLengthStub_ = true;
  1.1289 +    return linkAndAttachStub(cx, masm, attacher, ion, "typed array length");
  1.1290 +}
  1.1291 +
  1.1292 +
  1.1293 +static bool
  1.1294 +EmitCallProxyGet(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.1295 +                 PropertyName *name, RegisterSet liveRegs, Register object,
  1.1296 +                 TypedOrValueRegister output, jsbytecode *pc, void *returnAddr)
  1.1297 +{
  1.1298 +    JS_ASSERT(output.hasValue());
  1.1299 +    MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs);
  1.1300 +
  1.1301 +    // Remaining registers should be free, but we need to use |object| still
  1.1302 +    // so leave it alone.
  1.1303 +    RegisterSet regSet(RegisterSet::All());
  1.1304 +    regSet.take(AnyRegister(object));
  1.1305 +
  1.1306 +    // Proxy::get(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id,
  1.1307 +    //            MutableHandleValue vp)
  1.1308 +    Register argJSContextReg = regSet.takeGeneral();
  1.1309 +    Register argProxyReg     = regSet.takeGeneral();
  1.1310 +    Register argIdReg        = regSet.takeGeneral();
  1.1311 +    Register argVpReg        = regSet.takeGeneral();
  1.1312 +
  1.1313 +    Register scratch         = regSet.takeGeneral();
  1.1314 +
  1.1315 +    void *getFunction = JSOp(*pc) == JSOP_CALLPROP                      ?
  1.1316 +                            JS_FUNC_TO_DATA_PTR(void *, Proxy::callProp) :
  1.1317 +                            JS_FUNC_TO_DATA_PTR(void *, Proxy::get);
  1.1318 +
  1.1319 +    // Push stubCode for marking.
  1.1320 +    attacher.pushStubCodePointer(masm);
  1.1321 +
  1.1322 +    // Push args on stack first so we can take pointers to make handles.
  1.1323 +    masm.Push(UndefinedValue());
  1.1324 +    masm.movePtr(StackPointer, argVpReg);
  1.1325 +
  1.1326 +    RootedId propId(cx, AtomToId(name));
  1.1327 +    masm.Push(propId, scratch);
  1.1328 +    masm.movePtr(StackPointer, argIdReg);
  1.1329 +
  1.1330 +    // Pushing object and receiver.  Both are the same, so Handle to one is equivalent to
  1.1331 +    // handle to other.
  1.1332 +    masm.Push(object);
  1.1333 +    masm.Push(object);
  1.1334 +    masm.movePtr(StackPointer, argProxyReg);
  1.1335 +
  1.1336 +    masm.loadJSContext(argJSContextReg);
  1.1337 +
  1.1338 +    if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
  1.1339 +        return false;
  1.1340 +    masm.enterFakeExitFrame(ION_FRAME_OOL_PROXY);
  1.1341 +
  1.1342 +    // Make the call.
  1.1343 +    masm.setupUnalignedABICall(5, scratch);
  1.1344 +    masm.passABIArg(argJSContextReg);
  1.1345 +    masm.passABIArg(argProxyReg);
  1.1346 +    masm.passABIArg(argProxyReg);
  1.1347 +    masm.passABIArg(argIdReg);
  1.1348 +    masm.passABIArg(argVpReg);
  1.1349 +    masm.callWithABI(getFunction);
  1.1350 +
  1.1351 +    // Test for failure.
  1.1352 +    masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
  1.1353 +
  1.1354 +    // Load the outparam vp[0] into output register(s).
  1.1355 +    Address outparam(StackPointer, IonOOLProxyExitFrameLayout::offsetOfResult());
  1.1356 +    masm.loadTypedOrValue(outparam, output);
  1.1357 +
  1.1358 +    // masm.leaveExitFrame & pop locals
  1.1359 +    masm.adjustStack(IonOOLProxyExitFrameLayout::Size());
  1.1360 +
  1.1361 +    masm.icRestoreLive(liveRegs, aic);
  1.1362 +    return true;
  1.1363 +}
  1.1364 +
  1.1365 +bool
  1.1366 +GetPropertyIC::tryAttachDOMProxyShadowed(JSContext *cx, IonScript *ion,
  1.1367 +                                         HandleObject obj, void *returnAddr,
  1.1368 +                                         bool *emitted)
  1.1369 +{
  1.1370 +    JS_ASSERT(canAttachStub());
  1.1371 +    JS_ASSERT(!*emitted);
  1.1372 +    JS_ASSERT(IsCacheableDOMProxy(obj));
  1.1373 +    JS_ASSERT(monitoredResult());
  1.1374 +    JS_ASSERT(output().hasValue());
  1.1375 +
  1.1376 +    if (idempotent())
  1.1377 +        return true;
  1.1378 +
  1.1379 +    *emitted = true;
  1.1380 +
  1.1381 +    Label failures;
  1.1382 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.1383 +    RepatchStubAppender attacher(*this);
  1.1384 +
  1.1385 +    // Guard on the shape of the object.
  1.1386 +    attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
  1.1387 +                                   Address(object(), JSObject::offsetOfShape()),
  1.1388 +                                   ImmGCPtr(obj->lastProperty()),
  1.1389 +                                   &failures);
  1.1390 +
  1.1391 +    // Make sure object is a DOMProxy
  1.1392 +    GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures,
  1.1393 +                           /*skipExpandoCheck=*/true);
  1.1394 +
  1.1395 +    if (!EmitCallProxyGet(cx, masm, attacher, name(), liveRegs_, object(), output(),
  1.1396 +                          pc(), returnAddr))
  1.1397 +    {
  1.1398 +        return false;
  1.1399 +    }
  1.1400 +
  1.1401 +    // Success.
  1.1402 +    attacher.jumpRejoin(masm);
  1.1403 +
  1.1404 +    // Failure.
  1.1405 +    masm.bind(&failures);
  1.1406 +    attacher.jumpNextStub(masm);
  1.1407 +
  1.1408 +    return linkAndAttachStub(cx, masm, attacher, ion, "list base shadowed get");
  1.1409 +}
  1.1410 +
  1.1411 +bool
  1.1412 +GetPropertyIC::tryAttachDOMProxyUnshadowed(JSContext *cx, IonScript *ion, HandleObject obj,
  1.1413 +                                           HandlePropertyName name, bool resetNeeded,
  1.1414 +                                           void *returnAddr, bool *emitted)
  1.1415 +{
  1.1416 +    JS_ASSERT(canAttachStub());
  1.1417 +    JS_ASSERT(!*emitted);
  1.1418 +    JS_ASSERT(IsCacheableDOMProxy(obj));
  1.1419 +    JS_ASSERT(monitoredResult());
  1.1420 +    JS_ASSERT(output().hasValue());
  1.1421 +
  1.1422 +    RootedObject checkObj(cx, obj->getTaggedProto().toObjectOrNull());
  1.1423 +    RootedObject holder(cx);
  1.1424 +    RootedShape shape(cx);
  1.1425 +
  1.1426 +    NativeGetPropCacheability canCache =
  1.1427 +        CanAttachNativeGetProp(cx, *this, checkObj, name, &holder, &shape,
  1.1428 +                               /* skipArrayLen = */true);
  1.1429 +    JS_ASSERT(canCache != CanAttachArrayLength);
  1.1430 +
  1.1431 +    if (canCache == CanAttachNone)
  1.1432 +        return true;
  1.1433 +
  1.1434 +    // Make sure we observe our invariants if we're gonna deoptimize.
  1.1435 +    if (!holder && idempotent())
  1.1436 +        return true;
  1.1437 +
  1.1438 +    *emitted = true;
  1.1439 +
  1.1440 +    if (resetNeeded) {
  1.1441 +        // If we know that we have a DoesntShadowUnique object, then
  1.1442 +        // we reset the cache to clear out an existing IC for the object
  1.1443 +        // (if there is one). The generation is a constant in the generated
  1.1444 +        // code and we will not have the same generation again for this
  1.1445 +        // object, so the generation check in the existing IC would always
  1.1446 +        // fail anyway.
  1.1447 +        reset();
  1.1448 +    }
  1.1449 +
  1.1450 +    Label failures;
  1.1451 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.1452 +    RepatchStubAppender attacher(*this);
  1.1453 +
  1.1454 +    // Guard on the shape of the object.
  1.1455 +    attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
  1.1456 +                                   Address(object(), JSObject::offsetOfShape()),
  1.1457 +                                   ImmGCPtr(obj->lastProperty()),
  1.1458 +                                   &failures);
  1.1459 +
  1.1460 +    // Make sure object is a DOMProxy proxy
  1.1461 +    GenerateDOMProxyChecks(cx, masm, obj, name, object(), &failures);
  1.1462 +
  1.1463 +    if (holder) {
  1.1464 +        // Found the property on the prototype chain. Treat it like a native
  1.1465 +        // getprop.
  1.1466 +        Register scratchReg = output().valueReg().scratchReg();
  1.1467 +        GeneratePrototypeGuards(cx, ion, masm, obj, holder, object(), scratchReg, &failures);
  1.1468 +
  1.1469 +        // Rename scratch for clarity.
  1.1470 +        Register holderReg = scratchReg;
  1.1471 +
  1.1472 +        // Guard on the holder of the property
  1.1473 +        masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg);
  1.1474 +        masm.branchPtr(Assembler::NotEqual,
  1.1475 +                    Address(holderReg, JSObject::offsetOfShape()),
  1.1476 +                    ImmGCPtr(holder->lastProperty()),
  1.1477 +                    &failures);
  1.1478 +
  1.1479 +        if (canCache == CanAttachReadSlot) {
  1.1480 +            EmitLoadSlot(masm, holder, shape, holderReg, output(), scratchReg);
  1.1481 +        } else {
  1.1482 +            // EmitGetterCall() expects |obj| to be the object the property is
  1.1483 +            // on to do some checks. Since we actually looked at checkObj, and
  1.1484 +            // no extra guards will be generated, we can just pass that instead.
  1.1485 +            JS_ASSERT(canCache == CanAttachCallGetter);
  1.1486 +            JS_ASSERT(!idempotent());
  1.1487 +            if (!EmitGetterCall(cx, masm, attacher, checkObj, holder, shape, liveRegs_,
  1.1488 +                                object(), scratchReg, output(), returnAddr))
  1.1489 +            {
  1.1490 +                return false;
  1.1491 +            }
  1.1492 +        }
  1.1493 +    } else {
  1.1494 +        // Property was not found on the prototype chain. Deoptimize down to
  1.1495 +        // proxy get call
  1.1496 +        JS_ASSERT(!idempotent());
  1.1497 +        if (!EmitCallProxyGet(cx, masm, attacher, name, liveRegs_, object(), output(),
  1.1498 +                              pc(), returnAddr))
  1.1499 +        {
  1.1500 +            return false;
  1.1501 +        }
  1.1502 +    }
  1.1503 +
  1.1504 +    attacher.jumpRejoin(masm);
  1.1505 +    masm.bind(&failures);
  1.1506 +    attacher.jumpNextStub(masm);
  1.1507 +
  1.1508 +    return linkAndAttachStub(cx, masm, attacher, ion, "unshadowed proxy get");
  1.1509 +}
  1.1510 +
  1.1511 +bool
  1.1512 +GetPropertyIC::tryAttachProxy(JSContext *cx, IonScript *ion, HandleObject obj,
  1.1513 +                              HandlePropertyName name, void *returnAddr,
  1.1514 +                              bool *emitted)
  1.1515 +{
  1.1516 +    JS_ASSERT(canAttachStub());
  1.1517 +    JS_ASSERT(!*emitted);
  1.1518 +
  1.1519 +    if (!obj->is<ProxyObject>())
  1.1520 +        return true;
  1.1521 +
  1.1522 +    // TI can't be sure about our properties, so make sure anything
  1.1523 +    // we return can be monitored directly.
  1.1524 +    if (!monitoredResult())
  1.1525 +        return true;
  1.1526 +
  1.1527 +    // Skim off DOM proxies.
  1.1528 +    if (IsCacheableDOMProxy(obj)) {
  1.1529 +        RootedId id(cx, NameToId(name));
  1.1530 +        DOMProxyShadowsResult shadows = GetDOMProxyShadowsCheck()(cx, obj, id);
  1.1531 +        if (shadows == ShadowCheckFailed)
  1.1532 +            return false;
  1.1533 +        if (shadows == Shadows)
  1.1534 +            return tryAttachDOMProxyShadowed(cx, ion, obj, returnAddr, emitted);
  1.1535 +
  1.1536 +        return tryAttachDOMProxyUnshadowed(cx, ion, obj, name, shadows == DoesntShadowUnique,
  1.1537 +                                           returnAddr, emitted);
  1.1538 +    }
  1.1539 +
  1.1540 +    return tryAttachGenericProxy(cx, ion, obj, name, returnAddr, emitted);
  1.1541 +}
  1.1542 +
  1.1543 +static void
  1.1544 +GenerateProxyClassGuards(MacroAssembler &masm, Register object, Register scratchReg,
  1.1545 +                         Label *failures)
  1.1546 +{
  1.1547 +    masm.loadObjClass(object, scratchReg);
  1.1548 +    masm.branchTest32(Assembler::Zero,
  1.1549 +                      Address(scratchReg, Class::offsetOfFlags()),
  1.1550 +                      Imm32(JSCLASS_IS_PROXY), failures);
  1.1551 +}
  1.1552 +
  1.1553 +bool
  1.1554 +GetPropertyIC::tryAttachGenericProxy(JSContext *cx, IonScript *ion, HandleObject obj,
  1.1555 +                                     HandlePropertyName name, void *returnAddr,
  1.1556 +                                     bool *emitted)
  1.1557 +{
  1.1558 +    JS_ASSERT(canAttachStub());
  1.1559 +    JS_ASSERT(!*emitted);
  1.1560 +    JS_ASSERT(obj->is<ProxyObject>());
  1.1561 +    JS_ASSERT(monitoredResult());
  1.1562 +    JS_ASSERT(output().hasValue());
  1.1563 +
  1.1564 +    if (hasGenericProxyStub())
  1.1565 +        return true;
  1.1566 +
  1.1567 +    if (idempotent())
  1.1568 +        return true;
  1.1569 +
  1.1570 +    *emitted = true;
  1.1571 +
  1.1572 +    Label failures;
  1.1573 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.1574 +    RepatchStubAppender attacher(*this);
  1.1575 +
  1.1576 +    Register scratchReg = output().valueReg().scratchReg();
  1.1577 +
  1.1578 +    GenerateProxyClassGuards(masm, object(), scratchReg, &failures);
  1.1579 +
  1.1580 +    // Ensure that the incoming object is not a DOM proxy, so that we can get to
  1.1581 +    // the specialized stubs
  1.1582 +    masm.branchTestProxyHandlerFamily(Assembler::Equal, object(), scratchReg,
  1.1583 +                                      GetDOMProxyHandlerFamily(), &failures);
  1.1584 +
  1.1585 +    if (!EmitCallProxyGet(cx, masm, attacher, name, liveRegs_, object(), output(),
  1.1586 +                          pc(), returnAddr))
  1.1587 +    {
  1.1588 +        return false;
  1.1589 +    }
  1.1590 +
  1.1591 +    attacher.jumpRejoin(masm);
  1.1592 +
  1.1593 +    masm.bind(&failures);
  1.1594 +    attacher.jumpNextStub(masm);
  1.1595 +
  1.1596 +    JS_ASSERT(!hasGenericProxyStub_);
  1.1597 +    hasGenericProxyStub_ = true;
  1.1598 +
  1.1599 +    return linkAndAttachStub(cx, masm, attacher, ion, "Generic Proxy get");
  1.1600 +}
  1.1601 +
  1.1602 +bool
  1.1603 +GetPropertyIC::tryAttachArgumentsLength(JSContext *cx, IonScript *ion, HandleObject obj,
  1.1604 +                                        HandlePropertyName name, bool *emitted)
  1.1605 +{
  1.1606 +    JS_ASSERT(canAttachStub());
  1.1607 +    JS_ASSERT(!*emitted);
  1.1608 +
  1.1609 +    if (name != cx->names().length)
  1.1610 +        return true;
  1.1611 +    if (!IsOptimizableArgumentsObjectForLength(obj))
  1.1612 +        return true;
  1.1613 +
  1.1614 +    MIRType outputType = output().type();
  1.1615 +    if (!(outputType == MIRType_Value || outputType == MIRType_Int32))
  1.1616 +        return true;
  1.1617 +
  1.1618 +    if (hasArgumentsLengthStub(obj->is<StrictArgumentsObject>()))
  1.1619 +        return true;
  1.1620 +
  1.1621 +    *emitted = true;
  1.1622 +
  1.1623 +    JS_ASSERT(!idempotent());
  1.1624 +
  1.1625 +    Label failures;
  1.1626 +    MacroAssembler masm(cx, ion);
  1.1627 +    RepatchStubAppender attacher(*this);
  1.1628 +
  1.1629 +    Register tmpReg;
  1.1630 +    if (output().hasValue()) {
  1.1631 +        tmpReg = output().valueReg().scratchReg();
  1.1632 +    } else {
  1.1633 +        JS_ASSERT(output().type() == MIRType_Int32);
  1.1634 +        tmpReg = output().typedReg().gpr();
  1.1635 +    }
  1.1636 +    JS_ASSERT(object() != tmpReg);
  1.1637 +
  1.1638 +    const Class *clasp = obj->is<StrictArgumentsObject>() ? &StrictArgumentsObject::class_
  1.1639 +                                                          : &NormalArgumentsObject::class_;
  1.1640 +
  1.1641 +    masm.branchTestObjClass(Assembler::NotEqual, object(), tmpReg, clasp, &failures);
  1.1642 +
  1.1643 +    // Get initial ArgsObj length value, test if length has been overridden.
  1.1644 +    masm.unboxInt32(Address(object(), ArgumentsObject::getInitialLengthSlotOffset()), tmpReg);
  1.1645 +    masm.branchTest32(Assembler::NonZero, tmpReg, Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
  1.1646 +                      &failures);
  1.1647 +
  1.1648 +    masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), tmpReg);
  1.1649 +
  1.1650 +    // If output is Int32, result is already in right place, otherwise box it into output.
  1.1651 +    if (output().hasValue())
  1.1652 +        masm.tagValue(JSVAL_TYPE_INT32, tmpReg, output().valueReg());
  1.1653 +
  1.1654 +    // Success.
  1.1655 +    attacher.jumpRejoin(masm);
  1.1656 +
  1.1657 +    // Failure.
  1.1658 +    masm.bind(&failures);
  1.1659 +    attacher.jumpNextStub(masm);
  1.1660 +
  1.1661 +    if (obj->is<StrictArgumentsObject>()) {
  1.1662 +        JS_ASSERT(!hasStrictArgumentsLengthStub_);
  1.1663 +        hasStrictArgumentsLengthStub_ = true;
  1.1664 +        return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj length (strict)");
  1.1665 +    }
  1.1666 +
  1.1667 +    JS_ASSERT(!hasNormalArgumentsLengthStub_);
  1.1668 +    hasNormalArgumentsLengthStub_ = true;
  1.1669 +    return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj length (normal)");
  1.1670 +}
  1.1671 +
  1.1672 +bool
  1.1673 +GetPropertyIC::tryAttachStub(JSContext *cx, IonScript *ion, HandleObject obj,
  1.1674 +                             HandlePropertyName name, void *returnAddr, bool *emitted)
  1.1675 +{
  1.1676 +    JS_ASSERT(!*emitted);
  1.1677 +
  1.1678 +    if (!canAttachStub())
  1.1679 +        return true;
  1.1680 +
  1.1681 +    if (!*emitted && !tryAttachArgumentsLength(cx, ion, obj, name, emitted))
  1.1682 +        return false;
  1.1683 +
  1.1684 +    if (!*emitted && !tryAttachProxy(cx, ion, obj, name, returnAddr, emitted))
  1.1685 +        return false;
  1.1686 +
  1.1687 +    if (!*emitted && !tryAttachNative(cx, ion, obj, name, returnAddr, emitted))
  1.1688 +        return false;
  1.1689 +
  1.1690 +    if (!*emitted && !tryAttachTypedArrayLength(cx, ion, obj, name, emitted))
  1.1691 +        return false;
  1.1692 +
  1.1693 +    return true;
  1.1694 +}
  1.1695 +
  1.1696 +/* static */ bool
  1.1697 +GetPropertyIC::update(JSContext *cx, size_t cacheIndex,
  1.1698 +                      HandleObject obj, MutableHandleValue vp)
  1.1699 +{
  1.1700 +    void *returnAddr;
  1.1701 +    RootedScript topScript(cx, GetTopIonJSScript(cx, &returnAddr));
  1.1702 +    IonScript *ion = topScript->ionScript();
  1.1703 +
  1.1704 +    GetPropertyIC &cache = ion->getCache(cacheIndex).toGetProperty();
  1.1705 +    RootedPropertyName name(cx, cache.name());
  1.1706 +
  1.1707 +    // Override the return value if we are invalidated (bug 728188).
  1.1708 +    AutoDetectInvalidation adi(cx, vp.address(), ion);
  1.1709 +
  1.1710 +    // If the cache is idempotent, we will redo the op in the interpreter.
  1.1711 +    if (cache.idempotent())
  1.1712 +        adi.disable();
  1.1713 +
  1.1714 +    // For now, just stop generating new stubs once we hit the stub count
  1.1715 +    // limit. Once we can make calls from within generated stubs, a new call
  1.1716 +    // stub will be generated instead and the previous stubs unlinked.
  1.1717 +    bool emitted = false;
  1.1718 +    if (!cache.tryAttachStub(cx, ion, obj, name, returnAddr, &emitted))
  1.1719 +        return false;
  1.1720 +
  1.1721 +    if (cache.idempotent() && !emitted) {
  1.1722 +        // Invalidate the cache if the property was not found, or was found on
  1.1723 +        // a non-native object. This ensures:
  1.1724 +        // 1) The property read has no observable side-effects.
  1.1725 +        // 2) There's no need to dynamically monitor the return type. This would
  1.1726 +        //    be complicated since (due to GVN) there can be multiple pc's
  1.1727 +        //    associated with a single idempotent cache.
  1.1728 +        IonSpew(IonSpew_InlineCaches, "Invalidating from idempotent cache %s:%d",
  1.1729 +                topScript->filename(), topScript->lineno());
  1.1730 +
  1.1731 +        topScript->setInvalidatedIdempotentCache();
  1.1732 +
  1.1733 +        // Do not re-invalidate if the lookup already caused invalidation.
  1.1734 +        if (!topScript->hasIonScript())
  1.1735 +            return true;
  1.1736 +
  1.1737 +        return Invalidate(cx, topScript);
  1.1738 +    }
  1.1739 +
  1.1740 +    RootedId id(cx, NameToId(name));
  1.1741 +    if (!JSObject::getGeneric(cx, obj, obj, id, vp))
  1.1742 +        return false;
  1.1743 +
  1.1744 +    if (!cache.idempotent()) {
  1.1745 +        RootedScript script(cx);
  1.1746 +        jsbytecode *pc;
  1.1747 +        cache.getScriptedLocation(&script, &pc);
  1.1748 +
  1.1749 +        // If the cache is idempotent, the property exists so we don't have to
  1.1750 +        // call __noSuchMethod__.
  1.1751 +
  1.1752 +#if JS_HAS_NO_SUCH_METHOD
  1.1753 +        // Handle objects with __noSuchMethod__.
  1.1754 +        if (JSOp(*pc) == JSOP_CALLPROP && MOZ_UNLIKELY(vp.isUndefined())) {
  1.1755 +            if (!OnUnknownMethod(cx, obj, IdToValue(id), vp))
  1.1756 +                return false;
  1.1757 +        }
  1.1758 +#endif
  1.1759 +
  1.1760 +        // Monitor changes to cache entry.
  1.1761 +        if (!cache.monitoredResult())
  1.1762 +            types::TypeScript::Monitor(cx, script, pc, vp);
  1.1763 +    }
  1.1764 +
  1.1765 +    return true;
  1.1766 +}
  1.1767 +
  1.1768 +void
  1.1769 +GetPropertyIC::reset()
  1.1770 +{
  1.1771 +    RepatchIonCache::reset();
  1.1772 +    hasTypedArrayLengthStub_ = false;
  1.1773 +    hasStrictArgumentsLengthStub_ = false;
  1.1774 +    hasNormalArgumentsLengthStub_ = false;
  1.1775 +    hasGenericProxyStub_ = false;
  1.1776 +}
  1.1777 +
  1.1778 +bool
  1.1779 +ParallelIonCache::initStubbedShapes(JSContext *cx)
  1.1780 +{
  1.1781 +    JS_ASSERT(isAllocated());
  1.1782 +    if (!stubbedShapes_) {
  1.1783 +        stubbedShapes_ = cx->new_<ShapeSet>(cx);
  1.1784 +        return stubbedShapes_ && stubbedShapes_->init();
  1.1785 +    }
  1.1786 +    return true;
  1.1787 +}
  1.1788 +
  1.1789 +bool
  1.1790 +ParallelIonCache::hasOrAddStubbedShape(LockedJSContext &cx, Shape *shape, bool *alreadyStubbed)
  1.1791 +{
  1.1792 +    // Check if we have already stubbed the current object to avoid
  1.1793 +    // attaching a duplicate stub.
  1.1794 +    if (!initStubbedShapes(cx))
  1.1795 +        return false;
  1.1796 +    ShapeSet::AddPtr p = stubbedShapes_->lookupForAdd(shape);
  1.1797 +    if ((*alreadyStubbed = !!p))
  1.1798 +        return true;
  1.1799 +    return stubbedShapes_->add(p, shape);
  1.1800 +}
  1.1801 +
  1.1802 +void
  1.1803 +ParallelIonCache::reset()
  1.1804 +{
  1.1805 +    DispatchIonCache::reset();
  1.1806 +    if (stubbedShapes_)
  1.1807 +        stubbedShapes_->clear();
  1.1808 +}
  1.1809 +
  1.1810 +void
  1.1811 +ParallelIonCache::destroy()
  1.1812 +{
  1.1813 +    DispatchIonCache::destroy();
  1.1814 +    js_delete(stubbedShapes_);
  1.1815 +}
  1.1816 +
  1.1817 +void
  1.1818 +GetPropertyParIC::reset()
  1.1819 +{
  1.1820 +    ParallelIonCache::reset();
  1.1821 +    hasTypedArrayLengthStub_ = false;
  1.1822 +}
  1.1823 +
  1.1824 +bool
  1.1825 +GetPropertyParIC::attachReadSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj,
  1.1826 +                                 JSObject *holder, Shape *shape)
  1.1827 +{
  1.1828 +    // Ready to generate the read slot stub.
  1.1829 +    DispatchStubPrepender attacher(*this);
  1.1830 +    MacroAssembler masm(cx, ion);
  1.1831 +    GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output());
  1.1832 +
  1.1833 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel reading");
  1.1834 +}
  1.1835 +
  1.1836 +bool
  1.1837 +GetPropertyParIC::attachArrayLength(LockedJSContext &cx, IonScript *ion, JSObject *obj)
  1.1838 +{
  1.1839 +    MacroAssembler masm(cx, ion);
  1.1840 +    DispatchStubPrepender attacher(*this);
  1.1841 +    if (!GenerateArrayLength(cx, masm, attacher, obj, object(), output()))
  1.1842 +        return false;
  1.1843 +
  1.1844 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel array length");
  1.1845 +}
  1.1846 +
  1.1847 +bool
  1.1848 +GetPropertyParIC::attachTypedArrayLength(LockedJSContext &cx, IonScript *ion, JSObject *obj)
  1.1849 +{
  1.1850 +    MacroAssembler masm(cx, ion);
  1.1851 +    DispatchStubPrepender attacher(*this);
  1.1852 +    GenerateTypedArrayLength(cx, masm, attacher, obj, object(), output());
  1.1853 +
  1.1854 +    JS_ASSERT(!hasTypedArrayLengthStub_);
  1.1855 +    hasTypedArrayLengthStub_ = true;
  1.1856 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array length");
  1.1857 +}
  1.1858 +
  1.1859 +bool
  1.1860 +GetPropertyParIC::update(ForkJoinContext *cx, size_t cacheIndex,
  1.1861 +                         HandleObject obj, MutableHandleValue vp)
  1.1862 +{
  1.1863 +    IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript();
  1.1864 +    GetPropertyParIC &cache = ion->getCache(cacheIndex).toGetPropertyPar();
  1.1865 +
  1.1866 +    // Grab the property early, as the pure path is fast anyways and doesn't
  1.1867 +    // need a lock. If we can't do it purely, bail out of parallel execution.
  1.1868 +    if (!GetPropertyPure(cx, obj, NameToId(cache.name()), vp.address()))
  1.1869 +        return false;
  1.1870 +
  1.1871 +    // Avoid unnecessary locking if cannot attach stubs.
  1.1872 +    if (!cache.canAttachStub())
  1.1873 +        return true;
  1.1874 +
  1.1875 +    {
  1.1876 +        // Lock the context before mutating the cache. Ideally we'd like to do
  1.1877 +        // finer-grained locking, with one lock per cache. However, generating
  1.1878 +        // new jitcode uses a global ExecutableAllocator tied to the runtime.
  1.1879 +        LockedJSContext ncx(cx);
  1.1880 +
  1.1881 +        if (cache.canAttachStub()) {
  1.1882 +            bool alreadyStubbed;
  1.1883 +            if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed))
  1.1884 +                return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.1885 +            if (alreadyStubbed)
  1.1886 +                return true;
  1.1887 +
  1.1888 +            // See note about the stub limit in GetPropertyCache.
  1.1889 +            bool attachedStub = false;
  1.1890 +
  1.1891 +            {
  1.1892 +                RootedShape shape(ncx);
  1.1893 +                RootedObject holder(ncx);
  1.1894 +                RootedPropertyName name(ncx, cache.name());
  1.1895 +
  1.1896 +                GetPropertyIC::NativeGetPropCacheability canCache =
  1.1897 +                    CanAttachNativeGetProp(ncx, cache, obj, name, &holder, &shape);
  1.1898 +
  1.1899 +                if (canCache == GetPropertyIC::CanAttachReadSlot) {
  1.1900 +                    if (!cache.attachReadSlot(ncx, ion, obj, holder, shape))
  1.1901 +                        return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.1902 +                    attachedStub = true;
  1.1903 +                }
  1.1904 +
  1.1905 +                if (!attachedStub && canCache == GetPropertyIC::CanAttachArrayLength) {
  1.1906 +                    if (!cache.attachArrayLength(ncx, ion, obj))
  1.1907 +                        return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.1908 +                    attachedStub = true;
  1.1909 +                }
  1.1910 +            }
  1.1911 +
  1.1912 +            if (!attachedStub && !cache.hasTypedArrayLengthStub() &&
  1.1913 +                obj->is<TypedArrayObject>() && cx->names().length == cache.name() &&
  1.1914 +                (cache.output().type() == MIRType_Value || cache.output().type() == MIRType_Int32))
  1.1915 +            {
  1.1916 +                if (!cache.attachTypedArrayLength(ncx, ion, obj))
  1.1917 +                    return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.1918 +                attachedStub = true;
  1.1919 +            }
  1.1920 +        }
  1.1921 +    }
  1.1922 +
  1.1923 +    return true;
  1.1924 +}
  1.1925 +
  1.1926 +void
  1.1927 +IonCache::disable()
  1.1928 +{
  1.1929 +    reset();
  1.1930 +    this->disabled_ = 1;
  1.1931 +}
  1.1932 +
  1.1933 +void
  1.1934 +IonCache::reset()
  1.1935 +{
  1.1936 +    this->stubCount_ = 0;
  1.1937 +}
  1.1938 +
  1.1939 +void
  1.1940 +IonCache::destroy()
  1.1941 +{
  1.1942 +}
  1.1943 +
  1.1944 +static void
  1.1945 +GenerateSetSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.1946 +                JSObject *obj, Shape *shape, Register object, ConstantOrRegister value,
  1.1947 +                bool needsTypeBarrier, bool checkTypeset)
  1.1948 +{
  1.1949 +    JS_ASSERT(obj->isNative());
  1.1950 +
  1.1951 +    Label failures, barrierFailure;
  1.1952 +    masm.branchPtr(Assembler::NotEqual,
  1.1953 +                   Address(object, JSObject::offsetOfShape()),
  1.1954 +                   ImmGCPtr(obj->lastProperty()), &failures);
  1.1955 +
  1.1956 +    // Guard that the incoming value is in the type set for the property
  1.1957 +    // if a type barrier is required.
  1.1958 +    if (needsTypeBarrier) {
  1.1959 +        // We can't do anything that would change the HeapTypeSet, so
  1.1960 +        // just guard that it's already there.
  1.1961 +
  1.1962 +        // Obtain and guard on the TypeObject of the object.
  1.1963 +        types::TypeObject *type = obj->type();
  1.1964 +        masm.branchPtr(Assembler::NotEqual,
  1.1965 +                       Address(object, JSObject::offsetOfType()),
  1.1966 +                       ImmGCPtr(type), &failures);
  1.1967 +
  1.1968 +        if (checkTypeset) {
  1.1969 +            TypedOrValueRegister valReg = value.reg();
  1.1970 +            types::HeapTypeSet *propTypes = type->maybeGetProperty(shape->propid());
  1.1971 +            JS_ASSERT(propTypes);
  1.1972 +            JS_ASSERT(!propTypes->unknown());
  1.1973 +
  1.1974 +            Register scratchReg = object;
  1.1975 +            masm.push(scratchReg);
  1.1976 +
  1.1977 +            masm.guardTypeSet(valReg, propTypes, scratchReg, &barrierFailure);
  1.1978 +            masm.pop(object);
  1.1979 +        }
  1.1980 +    }
  1.1981 +
  1.1982 +    if (obj->isFixedSlot(shape->slot())) {
  1.1983 +        Address addr(object, JSObject::getFixedSlotOffset(shape->slot()));
  1.1984 +
  1.1985 +        if (cx->zone()->needsBarrier())
  1.1986 +            masm.callPreBarrier(addr, MIRType_Value);
  1.1987 +
  1.1988 +        masm.storeConstantOrRegister(value, addr);
  1.1989 +    } else {
  1.1990 +        Register slotsReg = object;
  1.1991 +        masm.loadPtr(Address(object, JSObject::offsetOfSlots()), slotsReg);
  1.1992 +
  1.1993 +        Address addr(slotsReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value));
  1.1994 +
  1.1995 +        if (cx->zone()->needsBarrier())
  1.1996 +            masm.callPreBarrier(addr, MIRType_Value);
  1.1997 +
  1.1998 +        masm.storeConstantOrRegister(value, addr);
  1.1999 +    }
  1.2000 +
  1.2001 +    attacher.jumpRejoin(masm);
  1.2002 +
  1.2003 +    if (barrierFailure.used()) {
  1.2004 +        masm.bind(&barrierFailure);
  1.2005 +        masm.pop(object);
  1.2006 +    }
  1.2007 +
  1.2008 +    masm.bind(&failures);
  1.2009 +    attacher.jumpNextStub(masm);
  1.2010 +}
  1.2011 +
  1.2012 +bool
  1.2013 +SetPropertyIC::attachSetSlot(JSContext *cx, IonScript *ion, HandleObject obj,
  1.2014 +                             HandleShape shape, bool checkTypeset)
  1.2015 +{
  1.2016 +    MacroAssembler masm(cx, ion);
  1.2017 +    RepatchStubAppender attacher(*this);
  1.2018 +    GenerateSetSlot(cx, masm, attacher, obj, shape, object(), value(), needsTypeBarrier(),
  1.2019 +                    checkTypeset);
  1.2020 +    return linkAndAttachStub(cx, masm, attacher, ion, "setting");
  1.2021 +}
  1.2022 +
  1.2023 +static bool
  1.2024 +IsCacheableSetPropCallNative(HandleObject obj, HandleObject holder, HandleShape shape)
  1.2025 +{
  1.2026 +    JS_ASSERT(obj->isNative());
  1.2027 +
  1.2028 +    if (!shape || !IsCacheableProtoChain(obj, holder))
  1.2029 +        return false;
  1.2030 +
  1.2031 +    return shape->hasSetterValue() && shape->setterObject() &&
  1.2032 +           shape->setterObject()->is<JSFunction>() &&
  1.2033 +           shape->setterObject()->as<JSFunction>().isNative();
  1.2034 +}
  1.2035 +
  1.2036 +static bool
  1.2037 +IsCacheableSetPropCallPropertyOp(HandleObject obj, HandleObject holder, HandleShape shape)
  1.2038 +{
  1.2039 +    JS_ASSERT(obj->isNative());
  1.2040 +
  1.2041 +    if (!shape)
  1.2042 +        return false;
  1.2043 +
  1.2044 +    if (!IsCacheableProtoChain(obj, holder))
  1.2045 +        return false;
  1.2046 +
  1.2047 +    if (shape->hasSlot())
  1.2048 +        return false;
  1.2049 +
  1.2050 +    if (shape->hasDefaultSetter())
  1.2051 +        return false;
  1.2052 +
  1.2053 +    if (shape->hasSetterValue())
  1.2054 +        return false;
  1.2055 +
  1.2056 +    // Despite the vehement claims of Shape.h that writable() is only
  1.2057 +    // relevant for data descriptors, some PropertyOp setters care
  1.2058 +    // desperately about its value. The flag should be always true, apart
  1.2059 +    // from these rare instances.
  1.2060 +    if (!shape->writable())
  1.2061 +        return false;
  1.2062 +
  1.2063 +    return true;
  1.2064 +}
  1.2065 +
  1.2066 +static bool
  1.2067 +EmitCallProxySet(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.2068 +                 HandleId propId, RegisterSet liveRegs, Register object,
  1.2069 +                 ConstantOrRegister value, void *returnAddr, bool strict)
  1.2070 +{
  1.2071 +    MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs);
  1.2072 +
  1.2073 +    // Remaining registers should be free, but we need to use |object| still
  1.2074 +    // so leave it alone.
  1.2075 +    RegisterSet regSet(RegisterSet::All());
  1.2076 +    regSet.take(AnyRegister(object));
  1.2077 +
  1.2078 +    // Proxy::set(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id,
  1.2079 +    //            bool strict, MutableHandleValue vp)
  1.2080 +    Register argJSContextReg = regSet.takeGeneral();
  1.2081 +    Register argProxyReg     = regSet.takeGeneral();
  1.2082 +    Register argIdReg        = regSet.takeGeneral();
  1.2083 +    Register argVpReg        = regSet.takeGeneral();
  1.2084 +    Register argStrictReg    = regSet.takeGeneral();
  1.2085 +
  1.2086 +    Register scratch         = regSet.takeGeneral();
  1.2087 +
  1.2088 +    // Push stubCode for marking.
  1.2089 +    attacher.pushStubCodePointer(masm);
  1.2090 +
  1.2091 +    // Push args on stack first so we can take pointers to make handles.
  1.2092 +    masm.Push(value);
  1.2093 +    masm.movePtr(StackPointer, argVpReg);
  1.2094 +
  1.2095 +    masm.Push(propId, scratch);
  1.2096 +    masm.movePtr(StackPointer, argIdReg);
  1.2097 +
  1.2098 +    // Pushing object and receiver.  Both are the same, so Handle to one is equivalent to
  1.2099 +    // handle to other.
  1.2100 +    masm.Push(object);
  1.2101 +    masm.Push(object);
  1.2102 +    masm.movePtr(StackPointer, argProxyReg);
  1.2103 +
  1.2104 +    masm.loadJSContext(argJSContextReg);
  1.2105 +    masm.move32(Imm32(strict? 1 : 0), argStrictReg);
  1.2106 +
  1.2107 +    if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
  1.2108 +        return false;
  1.2109 +    masm.enterFakeExitFrame(ION_FRAME_OOL_PROXY);
  1.2110 +
  1.2111 +    // Make the call.
  1.2112 +    masm.setupUnalignedABICall(6, scratch);
  1.2113 +    masm.passABIArg(argJSContextReg);
  1.2114 +    masm.passABIArg(argProxyReg);
  1.2115 +    masm.passABIArg(argProxyReg);
  1.2116 +    masm.passABIArg(argIdReg);
  1.2117 +    masm.passABIArg(argStrictReg);
  1.2118 +    masm.passABIArg(argVpReg);
  1.2119 +    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, Proxy::set));
  1.2120 +
  1.2121 +    // Test for failure.
  1.2122 +    masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
  1.2123 +
  1.2124 +    // masm.leaveExitFrame & pop locals
  1.2125 +    masm.adjustStack(IonOOLProxyExitFrameLayout::Size());
  1.2126 +
  1.2127 +    masm.icRestoreLive(liveRegs, aic);
  1.2128 +    return true;
  1.2129 +}
  1.2130 +
  1.2131 +bool
  1.2132 +SetPropertyIC::attachGenericProxy(JSContext *cx, IonScript *ion, void *returnAddr)
  1.2133 +{
  1.2134 +    JS_ASSERT(!hasGenericProxyStub());
  1.2135 +
  1.2136 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.2137 +    RepatchStubAppender attacher(*this);
  1.2138 +
  1.2139 +    Label failures;
  1.2140 +    {
  1.2141 +        Label proxyFailures;
  1.2142 +        Label proxySuccess;
  1.2143 +
  1.2144 +        RegisterSet regSet(RegisterSet::All());
  1.2145 +        regSet.take(AnyRegister(object()));
  1.2146 +        if (!value().constant())
  1.2147 +            regSet.takeUnchecked(value().reg());
  1.2148 +
  1.2149 +        Register scratch = regSet.takeGeneral();
  1.2150 +        masm.push(scratch);
  1.2151 +
  1.2152 +        GenerateProxyClassGuards(masm, object(), scratch, &proxyFailures);
  1.2153 +
  1.2154 +        // Remove the DOM proxies. They'll take care of themselves so this stub doesn't
  1.2155 +        // catch too much. The failure case is actually Equal. Fall through to the failure code.
  1.2156 +        masm.branchTestProxyHandlerFamily(Assembler::NotEqual, object(), scratch,
  1.2157 +                                          GetDOMProxyHandlerFamily(), &proxySuccess);
  1.2158 +
  1.2159 +        masm.bind(&proxyFailures);
  1.2160 +        masm.pop(scratch);
  1.2161 +        // Unify the point of failure to allow for later DOM proxy handling.
  1.2162 +        masm.jump(&failures);
  1.2163 +
  1.2164 +        masm.bind(&proxySuccess);
  1.2165 +        masm.pop(scratch);
  1.2166 +    }
  1.2167 +
  1.2168 +    RootedId propId(cx, AtomToId(name()));
  1.2169 +    if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(), value(),
  1.2170 +                          returnAddr, strict()))
  1.2171 +    {
  1.2172 +        return false;
  1.2173 +    }
  1.2174 +
  1.2175 +    attacher.jumpRejoin(masm);
  1.2176 +
  1.2177 +    masm.bind(&failures);
  1.2178 +    attacher.jumpNextStub(masm);
  1.2179 +
  1.2180 +    JS_ASSERT(!hasGenericProxyStub_);
  1.2181 +    hasGenericProxyStub_ = true;
  1.2182 +
  1.2183 +    return linkAndAttachStub(cx, masm, attacher, ion, "generic proxy set");
  1.2184 +}
  1.2185 +
  1.2186 +bool
  1.2187 +SetPropertyIC::attachDOMProxyShadowed(JSContext *cx, IonScript *ion, HandleObject obj,
  1.2188 +                                        void *returnAddr)
  1.2189 +{
  1.2190 +    JS_ASSERT(IsCacheableDOMProxy(obj));
  1.2191 +
  1.2192 +    Label failures;
  1.2193 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.2194 +    RepatchStubAppender attacher(*this);
  1.2195 +
  1.2196 +    // Guard on the shape of the object.
  1.2197 +    masm.branchPtr(Assembler::NotEqual,
  1.2198 +                   Address(object(), JSObject::offsetOfShape()),
  1.2199 +                   ImmGCPtr(obj->lastProperty()), &failures);
  1.2200 +
  1.2201 +    // Make sure object is a DOMProxy
  1.2202 +    GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures,
  1.2203 +                           /*skipExpandoCheck=*/true);
  1.2204 +
  1.2205 +    RootedId propId(cx, AtomToId(name()));
  1.2206 +    if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(),
  1.2207 +                          value(), returnAddr, strict()))
  1.2208 +    {
  1.2209 +        return false;
  1.2210 +    }
  1.2211 +
  1.2212 +    // Success.
  1.2213 +    attacher.jumpRejoin(masm);
  1.2214 +
  1.2215 +    // Failure.
  1.2216 +    masm.bind(&failures);
  1.2217 +    attacher.jumpNextStub(masm);
  1.2218 +
  1.2219 +    return linkAndAttachStub(cx, masm, attacher, ion, "DOM proxy shadowed set");
  1.2220 +}
  1.2221 +
  1.2222 +static bool
  1.2223 +GenerateCallSetter(JSContext *cx, IonScript *ion, MacroAssembler &masm,
  1.2224 +                   IonCache::StubAttacher &attacher, HandleObject obj,
  1.2225 +                   HandleObject holder, HandleShape shape, bool strict, Register object,
  1.2226 +                   ConstantOrRegister value, Label *failure, RegisterSet liveRegs,
  1.2227 +                   void *returnAddr)
  1.2228 +{
  1.2229 +    // Generate prototype guards if needed.
  1.2230 +    // Take a scratch register for use, save on stack.
  1.2231 +    {
  1.2232 +        RegisterSet regSet(RegisterSet::All());
  1.2233 +        regSet.take(AnyRegister(object));
  1.2234 +        if (!value.constant())
  1.2235 +            regSet.takeUnchecked(value.reg());
  1.2236 +        Register scratchReg = regSet.takeGeneral();
  1.2237 +        masm.push(scratchReg);
  1.2238 +
  1.2239 +        Label protoFailure;
  1.2240 +        Label protoSuccess;
  1.2241 +
  1.2242 +        // Generate prototype/shape guards.
  1.2243 +        if (obj != holder)
  1.2244 +            GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, &protoFailure);
  1.2245 +
  1.2246 +        masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), scratchReg);
  1.2247 +        masm.branchPtr(Assembler::NotEqual,
  1.2248 +                       Address(scratchReg, JSObject::offsetOfShape()),
  1.2249 +                       ImmGCPtr(holder->lastProperty()),
  1.2250 +                       &protoFailure);
  1.2251 +
  1.2252 +        masm.jump(&protoSuccess);
  1.2253 +
  1.2254 +        masm.bind(&protoFailure);
  1.2255 +        masm.pop(scratchReg);
  1.2256 +        masm.jump(failure);
  1.2257 +
  1.2258 +        masm.bind(&protoSuccess);
  1.2259 +        masm.pop(scratchReg);
  1.2260 +    }
  1.2261 +
  1.2262 +    // Good to go for invoking setter.
  1.2263 +
  1.2264 +    MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs);
  1.2265 +
  1.2266 +    // Remaining registers should basically be free, but we need to use |object| still
  1.2267 +    // so leave it alone.
  1.2268 +    RegisterSet regSet(RegisterSet::All());
  1.2269 +    regSet.take(AnyRegister(object));
  1.2270 +
  1.2271 +    // This is a slower stub path, and we're going to be doing a call anyway.  Don't need
  1.2272 +    // to try so hard to not use the stack.  Scratch regs are just taken from the register
  1.2273 +    // set not including the input, current value saved on the stack, and restored when
  1.2274 +    // we're done with it.
  1.2275 +    //
  1.2276 +    // Be very careful not to use any of these before value is pushed, since they
  1.2277 +    // might shadow.
  1.2278 +    Register scratchReg     = regSet.takeGeneral();
  1.2279 +    Register argJSContextReg = regSet.takeGeneral();
  1.2280 +    Register argVpReg        = regSet.takeGeneral();
  1.2281 +
  1.2282 +    bool callNative = IsCacheableSetPropCallNative(obj, holder, shape);
  1.2283 +    JS_ASSERT_IF(!callNative, IsCacheableSetPropCallPropertyOp(obj, holder, shape));
  1.2284 +
  1.2285 +    if (callNative) {
  1.2286 +        JS_ASSERT(shape->hasSetterValue() && shape->setterObject() &&
  1.2287 +                  shape->setterObject()->is<JSFunction>());
  1.2288 +        JSFunction *target = &shape->setterObject()->as<JSFunction>();
  1.2289 +
  1.2290 +        JS_ASSERT(target->isNative());
  1.2291 +
  1.2292 +        Register argUintNReg = regSet.takeGeneral();
  1.2293 +
  1.2294 +        // Set up the call:
  1.2295 +        //  bool (*)(JSContext *, unsigned, Value *vp)
  1.2296 +        // vp[0] is callee/outparam
  1.2297 +        // vp[1] is |this|
  1.2298 +        // vp[2] is the value
  1.2299 +
  1.2300 +        // Build vp and move the base into argVpReg.
  1.2301 +        masm.Push(value);
  1.2302 +        masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
  1.2303 +        masm.Push(ObjectValue(*target));
  1.2304 +        masm.movePtr(StackPointer, argVpReg);
  1.2305 +
  1.2306 +        // Preload other regs
  1.2307 +        masm.loadJSContext(argJSContextReg);
  1.2308 +        masm.move32(Imm32(1), argUintNReg);
  1.2309 +
  1.2310 +        // Push data for GC marking
  1.2311 +        masm.Push(argUintNReg);
  1.2312 +        attacher.pushStubCodePointer(masm);
  1.2313 +
  1.2314 +        if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
  1.2315 +            return false;
  1.2316 +        masm.enterFakeExitFrame(ION_FRAME_OOL_NATIVE);
  1.2317 +
  1.2318 +        // Make the call
  1.2319 +        masm.setupUnalignedABICall(3, scratchReg);
  1.2320 +        masm.passABIArg(argJSContextReg);
  1.2321 +        masm.passABIArg(argUintNReg);
  1.2322 +        masm.passABIArg(argVpReg);
  1.2323 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target->native()));
  1.2324 +
  1.2325 +        // Test for failure.
  1.2326 +        masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
  1.2327 +
  1.2328 +        // masm.leaveExitFrame & pop locals.
  1.2329 +        masm.adjustStack(IonOOLNativeExitFrameLayout::Size(1));
  1.2330 +    } else {
  1.2331 +        Register argObjReg       = regSet.takeGeneral();
  1.2332 +        Register argIdReg        = regSet.takeGeneral();
  1.2333 +        Register argStrictReg    = regSet.takeGeneral();
  1.2334 +
  1.2335 +        attacher.pushStubCodePointer(masm);
  1.2336 +
  1.2337 +        StrictPropertyOp target = shape->setterOp();
  1.2338 +        JS_ASSERT(target);
  1.2339 +        // JSStrictPropertyOp: bool fn(JSContext *cx, HandleObject obj,
  1.2340 +        //                               HandleId id, bool strict, MutableHandleValue vp);
  1.2341 +
  1.2342 +        // Push args on stack first so we can take pointers to make handles.
  1.2343 +        if (value.constant())
  1.2344 +            masm.Push(value.value());
  1.2345 +        else
  1.2346 +            masm.Push(value.reg());
  1.2347 +        masm.movePtr(StackPointer, argVpReg);
  1.2348 +
  1.2349 +        masm.move32(Imm32(strict ? 1 : 0), argStrictReg);
  1.2350 +
  1.2351 +        // push canonical jsid from shape instead of propertyname.
  1.2352 +        masm.Push(shape->propid(), argIdReg);
  1.2353 +        masm.movePtr(StackPointer, argIdReg);
  1.2354 +
  1.2355 +        masm.Push(object);
  1.2356 +        masm.movePtr(StackPointer, argObjReg);
  1.2357 +
  1.2358 +        masm.loadJSContext(argJSContextReg);
  1.2359 +
  1.2360 +        if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
  1.2361 +            return false;
  1.2362 +        masm.enterFakeExitFrame(ION_FRAME_OOL_PROPERTY_OP);
  1.2363 +
  1.2364 +        // Make the call.
  1.2365 +        masm.setupUnalignedABICall(5, scratchReg);
  1.2366 +        masm.passABIArg(argJSContextReg);
  1.2367 +        masm.passABIArg(argObjReg);
  1.2368 +        masm.passABIArg(argIdReg);
  1.2369 +        masm.passABIArg(argStrictReg);
  1.2370 +        masm.passABIArg(argVpReg);
  1.2371 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target));
  1.2372 +
  1.2373 +        // Test for failure.
  1.2374 +        masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
  1.2375 +
  1.2376 +        // masm.leaveExitFrame & pop locals.
  1.2377 +        masm.adjustStack(IonOOLPropertyOpExitFrameLayout::Size());
  1.2378 +    }
  1.2379 +
  1.2380 +    masm.icRestoreLive(liveRegs, aic);
  1.2381 +    return true;
  1.2382 +}
  1.2383 +
  1.2384 +static bool
  1.2385 +IsCacheableDOMProxyUnshadowedSetterCall(JSContext *cx, HandleObject obj, HandlePropertyName name,
  1.2386 +                                        MutableHandleObject holder, MutableHandleShape shape,
  1.2387 +                                        bool *isSetter)
  1.2388 +{
  1.2389 +    JS_ASSERT(IsCacheableDOMProxy(obj));
  1.2390 +
  1.2391 +    *isSetter = false;
  1.2392 +
  1.2393 +    RootedObject checkObj(cx, obj->getTaggedProto().toObjectOrNull());
  1.2394 +    if (!checkObj)
  1.2395 +        return true;
  1.2396 +
  1.2397 +    if (!JSObject::lookupProperty(cx, obj, name, holder, shape))
  1.2398 +        return false;
  1.2399 +
  1.2400 +    if (!holder)
  1.2401 +        return true;
  1.2402 +
  1.2403 +    if (!IsCacheableSetPropCallNative(checkObj, holder, shape) &&
  1.2404 +        !IsCacheableSetPropCallPropertyOp(checkObj, holder, shape))
  1.2405 +    {
  1.2406 +        return true;
  1.2407 +    }
  1.2408 +
  1.2409 +    *isSetter = true;
  1.2410 +    return true;
  1.2411 +}
  1.2412 +
  1.2413 +bool
  1.2414 +SetPropertyIC::attachDOMProxyUnshadowed(JSContext *cx, IonScript *ion, HandleObject obj,
  1.2415 +                                        void *returnAddr)
  1.2416 +{
  1.2417 +    JS_ASSERT(IsCacheableDOMProxy(obj));
  1.2418 +
  1.2419 +    Label failures;
  1.2420 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.2421 +    RepatchStubAppender attacher(*this);
  1.2422 +
  1.2423 +    // Guard on the shape of the object.
  1.2424 +    masm.branchPtr(Assembler::NotEqual,
  1.2425 +                   Address(object(), JSObject::offsetOfShape()),
  1.2426 +                   ImmGCPtr(obj->lastProperty()), &failures);
  1.2427 +
  1.2428 +    // Make sure object is a DOMProxy
  1.2429 +    GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures);
  1.2430 +
  1.2431 +    RootedPropertyName propName(cx, name());
  1.2432 +    RootedObject holder(cx);
  1.2433 +    RootedShape shape(cx);
  1.2434 +    bool isSetter;
  1.2435 +    if (!IsCacheableDOMProxyUnshadowedSetterCall(cx, obj, propName, &holder,
  1.2436 +                                                 &shape, &isSetter))
  1.2437 +    {
  1.2438 +        return false;
  1.2439 +    }
  1.2440 +
  1.2441 +    if (isSetter) {
  1.2442 +        if (!GenerateCallSetter(cx, ion, masm, attacher, obj, holder, shape, strict(),
  1.2443 +                                object(), value(), &failures, liveRegs_, returnAddr))
  1.2444 +        {
  1.2445 +            return false;
  1.2446 +        }
  1.2447 +    } else {
  1.2448 +        // Either there was no proto, or the property wasn't appropriately found on it.
  1.2449 +        // Drop back to just a call to Proxy::set().
  1.2450 +        RootedId propId(cx, AtomToId(name()));
  1.2451 +        if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(),
  1.2452 +                            value(), returnAddr, strict()))
  1.2453 +        {
  1.2454 +            return false;
  1.2455 +        }
  1.2456 +    }
  1.2457 +
  1.2458 +    // Success.
  1.2459 +    attacher.jumpRejoin(masm);
  1.2460 +
  1.2461 +    // Failure.
  1.2462 +    masm.bind(&failures);
  1.2463 +    attacher.jumpNextStub(masm);
  1.2464 +
  1.2465 +    return linkAndAttachStub(cx, masm, attacher, ion, "DOM proxy unshadowed set");
  1.2466 +}
  1.2467 +
  1.2468 +bool
  1.2469 +SetPropertyIC::attachCallSetter(JSContext *cx, IonScript *ion,
  1.2470 +                                HandleObject obj, HandleObject holder, HandleShape shape,
  1.2471 +                                void *returnAddr)
  1.2472 +{
  1.2473 +    JS_ASSERT(obj->isNative());
  1.2474 +
  1.2475 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.2476 +    RepatchStubAppender attacher(*this);
  1.2477 +
  1.2478 +    Label failure;
  1.2479 +    masm.branchPtr(Assembler::NotEqual,
  1.2480 +                   Address(object(), JSObject::offsetOfShape()),
  1.2481 +                   ImmGCPtr(obj->lastProperty()),
  1.2482 +                   &failure);
  1.2483 +
  1.2484 +    if (!GenerateCallSetter(cx, ion, masm, attacher, obj, holder, shape, strict(),
  1.2485 +                            object(), value(), &failure, liveRegs_, returnAddr))
  1.2486 +    {
  1.2487 +        return false;
  1.2488 +    }
  1.2489 +
  1.2490 +    // Rejoin jump.
  1.2491 +    attacher.jumpRejoin(masm);
  1.2492 +
  1.2493 +    // Jump to next stub.
  1.2494 +    masm.bind(&failure);
  1.2495 +    attacher.jumpNextStub(masm);
  1.2496 +
  1.2497 +    return linkAndAttachStub(cx, masm, attacher, ion, "setter call");
  1.2498 +}
  1.2499 +
  1.2500 +static void
  1.2501 +GenerateAddSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.2502 +                JSObject *obj, Shape *oldShape, Register object, ConstantOrRegister value,
  1.2503 +                bool checkTypeset)
  1.2504 +{
  1.2505 +    JS_ASSERT(obj->isNative());
  1.2506 +
  1.2507 +    Label failures;
  1.2508 +
  1.2509 +    // Guard the type of the object
  1.2510 +    masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfType()),
  1.2511 +                   ImmGCPtr(obj->type()), &failures);
  1.2512 +
  1.2513 +    // Guard shapes along prototype chain.
  1.2514 +    masm.branchTestObjShape(Assembler::NotEqual, object, oldShape, &failures);
  1.2515 +
  1.2516 +    Label failuresPopObject;
  1.2517 +    masm.push(object);    // save object reg because we clobber it
  1.2518 +
  1.2519 +    // Guard that the incoming value is in the type set for the property
  1.2520 +    // if a type barrier is required.
  1.2521 +    if (checkTypeset) {
  1.2522 +        TypedOrValueRegister valReg = value.reg();
  1.2523 +        types::TypeObject *type = obj->type();
  1.2524 +        types::HeapTypeSet *propTypes = type->maybeGetProperty(obj->lastProperty()->propid());
  1.2525 +        JS_ASSERT(propTypes);
  1.2526 +        JS_ASSERT(!propTypes->unknown());
  1.2527 +
  1.2528 +        Register scratchReg = object;
  1.2529 +        masm.guardTypeSet(valReg, propTypes, scratchReg, &failuresPopObject);
  1.2530 +        masm.loadPtr(Address(StackPointer, 0), object);
  1.2531 +    }
  1.2532 +
  1.2533 +    JSObject *proto = obj->getProto();
  1.2534 +    Register protoReg = object;
  1.2535 +    while (proto) {
  1.2536 +        Shape *protoShape = proto->lastProperty();
  1.2537 +
  1.2538 +        // load next prototype
  1.2539 +        masm.loadObjProto(protoReg, protoReg);
  1.2540 +
  1.2541 +        // Ensure that its shape matches.
  1.2542 +        masm.branchTestObjShape(Assembler::NotEqual, protoReg, protoShape, &failuresPopObject);
  1.2543 +
  1.2544 +        proto = proto->getProto();
  1.2545 +    }
  1.2546 +
  1.2547 +    masm.pop(object);     // restore object reg
  1.2548 +
  1.2549 +    // Changing object shape.  Write the object's new shape.
  1.2550 +    Shape *newShape = obj->lastProperty();
  1.2551 +    Address shapeAddr(object, JSObject::offsetOfShape());
  1.2552 +    if (cx->zone()->needsBarrier())
  1.2553 +        masm.callPreBarrier(shapeAddr, MIRType_Shape);
  1.2554 +    masm.storePtr(ImmGCPtr(newShape), shapeAddr);
  1.2555 +
  1.2556 +    // Set the value on the object. Since this is an add, obj->lastProperty()
  1.2557 +    // must be the shape of the property we are adding.
  1.2558 +    if (obj->isFixedSlot(newShape->slot())) {
  1.2559 +        Address addr(object, JSObject::getFixedSlotOffset(newShape->slot()));
  1.2560 +        masm.storeConstantOrRegister(value, addr);
  1.2561 +    } else {
  1.2562 +        Register slotsReg = object;
  1.2563 +
  1.2564 +        masm.loadPtr(Address(object, JSObject::offsetOfSlots()), slotsReg);
  1.2565 +
  1.2566 +        Address addr(slotsReg, obj->dynamicSlotIndex(newShape->slot()) * sizeof(Value));
  1.2567 +        masm.storeConstantOrRegister(value, addr);
  1.2568 +    }
  1.2569 +
  1.2570 +    // Success.
  1.2571 +    attacher.jumpRejoin(masm);
  1.2572 +
  1.2573 +    // Failure.
  1.2574 +    masm.bind(&failuresPopObject);
  1.2575 +    masm.pop(object);
  1.2576 +    masm.bind(&failures);
  1.2577 +
  1.2578 +    attacher.jumpNextStub(masm);
  1.2579 +}
  1.2580 +
  1.2581 +bool
  1.2582 +SetPropertyIC::attachAddSlot(JSContext *cx, IonScript *ion, JSObject *obj, HandleShape oldShape,
  1.2583 +                             bool checkTypeset)
  1.2584 +{
  1.2585 +    JS_ASSERT_IF(!needsTypeBarrier(), !checkTypeset);
  1.2586 +
  1.2587 +    MacroAssembler masm(cx, ion);
  1.2588 +    RepatchStubAppender attacher(*this);
  1.2589 +    GenerateAddSlot(cx, masm, attacher, obj, oldShape, object(), value(), checkTypeset);
  1.2590 +    return linkAndAttachStub(cx, masm, attacher, ion, "adding");
  1.2591 +}
  1.2592 +
  1.2593 +static bool
  1.2594 +CanInlineSetPropTypeCheck(JSObject *obj, jsid id, ConstantOrRegister val, bool *checkTypeset)
  1.2595 +{
  1.2596 +    bool shouldCheck = false;
  1.2597 +    types::TypeObject *type = obj->type();
  1.2598 +    if (!type->unknownProperties()) {
  1.2599 +        types::HeapTypeSet *propTypes = type->maybeGetProperty(id);
  1.2600 +        if (!propTypes)
  1.2601 +            return false;
  1.2602 +        if (!propTypes->unknown()) {
  1.2603 +            shouldCheck = true;
  1.2604 +            if (val.constant()) {
  1.2605 +                // If the input is a constant, then don't bother if the barrier will always fail.
  1.2606 +                if (!propTypes->hasType(types::GetValueType(val.value())))
  1.2607 +                    return false;
  1.2608 +                shouldCheck = false;
  1.2609 +            } else {
  1.2610 +                TypedOrValueRegister reg = val.reg();
  1.2611 +                // We can do the same trick as above for primitive types of specialized registers.
  1.2612 +                // TIs handling of objects is complicated enough to warrant a runtime
  1.2613 +                // check, as we can't statically handle the case where the typeset
  1.2614 +                // contains the specific object, but doesn't have ANYOBJECT set.
  1.2615 +                if (reg.hasTyped() && reg.type() != MIRType_Object) {
  1.2616 +                    JSValueType valType = ValueTypeFromMIRType(reg.type());
  1.2617 +                    if (!propTypes->hasType(types::Type::PrimitiveType(valType)))
  1.2618 +                        return false;
  1.2619 +                    shouldCheck = false;
  1.2620 +                }
  1.2621 +            }
  1.2622 +        }
  1.2623 +    }
  1.2624 +
  1.2625 +    *checkTypeset = shouldCheck;
  1.2626 +    return true;
  1.2627 +}
  1.2628 +
  1.2629 +static bool
  1.2630 +IsPropertySetInlineable(HandleObject obj, HandleId id, MutableHandleShape pshape,
  1.2631 +                        ConstantOrRegister val, bool needsTypeBarrier, bool *checkTypeset)
  1.2632 +{
  1.2633 +    JS_ASSERT(obj->isNative());
  1.2634 +
  1.2635 +    // Do a pure non-proto chain climbing lookup. See note in
  1.2636 +    // CanAttachNativeGetProp.
  1.2637 +    pshape.set(obj->nativeLookupPure(id));
  1.2638 +
  1.2639 +    if (!pshape)
  1.2640 +        return false;
  1.2641 +
  1.2642 +    if (!pshape->hasSlot())
  1.2643 +        return false;
  1.2644 +
  1.2645 +    if (!pshape->hasDefaultSetter())
  1.2646 +        return false;
  1.2647 +
  1.2648 +    if (!pshape->writable())
  1.2649 +        return false;
  1.2650 +
  1.2651 +    if (needsTypeBarrier)
  1.2652 +        return CanInlineSetPropTypeCheck(obj, id, val, checkTypeset);
  1.2653 +
  1.2654 +    return true;
  1.2655 +}
  1.2656 +
  1.2657 +static bool
  1.2658 +IsPropertyAddInlineable(HandleObject obj, HandleId id, ConstantOrRegister val, uint32_t oldSlots,
  1.2659 +                        HandleShape oldShape, bool needsTypeBarrier, bool *checkTypeset)
  1.2660 +{
  1.2661 +    JS_ASSERT(obj->isNative());
  1.2662 +
  1.2663 +    // If the shape of the object did not change, then this was not an add.
  1.2664 +    if (obj->lastProperty() == oldShape)
  1.2665 +        return false;
  1.2666 +
  1.2667 +    Shape *shape = obj->nativeLookupPure(id);
  1.2668 +    if (!shape || shape->inDictionary() || !shape->hasSlot() || !shape->hasDefaultSetter())
  1.2669 +        return false;
  1.2670 +
  1.2671 +    // If we have a shape at this point and the object's shape changed, then
  1.2672 +    // the shape must be the one we just added.
  1.2673 +    JS_ASSERT(shape == obj->lastProperty());
  1.2674 +
  1.2675 +    // If object has a non-default resolve hook, don't inline
  1.2676 +    if (obj->getClass()->resolve != JS_ResolveStub)
  1.2677 +        return false;
  1.2678 +
  1.2679 +    // Likewise for a non-default addProperty hook, since we'll need
  1.2680 +    // to invoke it.
  1.2681 +    if (obj->getClass()->addProperty != JS_PropertyStub)
  1.2682 +        return false;
  1.2683 +
  1.2684 +    if (!obj->nonProxyIsExtensible() || !shape->writable())
  1.2685 +        return false;
  1.2686 +
  1.2687 +    // Walk up the object prototype chain and ensure that all prototypes
  1.2688 +    // are native, and that all prototypes have no getter or setter
  1.2689 +    // defined on the property
  1.2690 +    for (JSObject *proto = obj->getProto(); proto; proto = proto->getProto()) {
  1.2691 +        // If prototype is non-native, don't optimize
  1.2692 +        if (!proto->isNative())
  1.2693 +            return false;
  1.2694 +
  1.2695 +        // If prototype defines this property in a non-plain way, don't optimize
  1.2696 +        Shape *protoShape = proto->nativeLookupPure(id);
  1.2697 +        if (protoShape && !protoShape->hasDefaultSetter())
  1.2698 +            return false;
  1.2699 +
  1.2700 +        // Otherwise, if there's no such property, watch out for a resolve
  1.2701 +        // hook that would need to be invoked and thus prevent inlining of
  1.2702 +        // property addition.
  1.2703 +        if (proto->getClass()->resolve != JS_ResolveStub)
  1.2704 +             return false;
  1.2705 +    }
  1.2706 +
  1.2707 +    // Only add a IC entry if the dynamic slots didn't change when the shapes
  1.2708 +    // changed.  Need to ensure that a shape change for a subsequent object
  1.2709 +    // won't involve reallocating the slot array.
  1.2710 +    if (obj->numDynamicSlots() != oldSlots)
  1.2711 +        return false;
  1.2712 +
  1.2713 +    if (needsTypeBarrier)
  1.2714 +        return CanInlineSetPropTypeCheck(obj, id, val, checkTypeset);
  1.2715 +
  1.2716 +    *checkTypeset = false;
  1.2717 +    return true;
  1.2718 +}
  1.2719 +
  1.2720 +static SetPropertyIC::NativeSetPropCacheability
  1.2721 +CanAttachNativeSetProp(HandleObject obj, HandleId id, ConstantOrRegister val,
  1.2722 +                       bool needsTypeBarrier, MutableHandleObject holder,
  1.2723 +                       MutableHandleShape shape, bool *checkTypeset)
  1.2724 +{
  1.2725 +    if (!obj->isNative())
  1.2726 +        return SetPropertyIC::CanAttachNone;
  1.2727 +
  1.2728 +    // See if the property exists on the object.
  1.2729 +    if (IsPropertySetInlineable(obj, id, shape, val, needsTypeBarrier, checkTypeset))
  1.2730 +        return SetPropertyIC::CanAttachSetSlot;
  1.2731 +
  1.2732 +    // If we couldn't find the property on the object itself, do a full, but
  1.2733 +    // still pure lookup for setters.
  1.2734 +    if (!LookupPropertyPure(obj, id, holder.address(), shape.address()))
  1.2735 +        return SetPropertyIC::CanAttachNone;
  1.2736 +
  1.2737 +    // If the object doesn't have the property, we don't know if we can attach
  1.2738 +    // a stub to add the property until we do the VM call to add. If the
  1.2739 +    // property exists as a data property on the prototype, we should add
  1.2740 +    // a new, shadowing property.
  1.2741 +    if (!shape || (obj != holder && shape->hasDefaultSetter() && shape->hasSlot()))
  1.2742 +        return SetPropertyIC::MaybeCanAttachAddSlot;
  1.2743 +
  1.2744 +    if (IsCacheableSetPropCallPropertyOp(obj, holder, shape) ||
  1.2745 +        IsCacheableSetPropCallNative(obj, holder, shape))
  1.2746 +    {
  1.2747 +        return SetPropertyIC::CanAttachCallSetter;
  1.2748 +    }
  1.2749 +
  1.2750 +    return SetPropertyIC::CanAttachNone;
  1.2751 +}
  1.2752 +
  1.2753 +bool
  1.2754 +SetPropertyIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj,
  1.2755 +                      HandleValue value)
  1.2756 +{
  1.2757 +    void *returnAddr;
  1.2758 +    RootedScript script(cx, GetTopIonJSScript(cx, &returnAddr));
  1.2759 +    IonScript *ion = script->ionScript();
  1.2760 +    SetPropertyIC &cache = ion->getCache(cacheIndex).toSetProperty();
  1.2761 +    RootedPropertyName name(cx, cache.name());
  1.2762 +    RootedId id(cx, AtomToId(name));
  1.2763 +
  1.2764 +    // Stop generating new stubs once we hit the stub count limit, see
  1.2765 +    // GetPropertyCache.
  1.2766 +    bool inlinable = cache.canAttachStub() && !obj->watched();
  1.2767 +    NativeSetPropCacheability canCache = CanAttachNone;
  1.2768 +    bool addedSetterStub = false;
  1.2769 +    if (inlinable) {
  1.2770 +        if (!addedSetterStub && obj->is<ProxyObject>()) {
  1.2771 +            if (IsCacheableDOMProxy(obj)) {
  1.2772 +                DOMProxyShadowsResult shadows = GetDOMProxyShadowsCheck()(cx, obj, id);
  1.2773 +                if (shadows == ShadowCheckFailed)
  1.2774 +                    return false;
  1.2775 +                if (shadows == Shadows) {
  1.2776 +                    if (!cache.attachDOMProxyShadowed(cx, ion, obj, returnAddr))
  1.2777 +                        return false;
  1.2778 +                    addedSetterStub = true;
  1.2779 +                } else {
  1.2780 +                    JS_ASSERT(shadows == DoesntShadow || shadows == DoesntShadowUnique);
  1.2781 +                    if (shadows == DoesntShadowUnique)
  1.2782 +                        cache.reset();
  1.2783 +                    if (!cache.attachDOMProxyUnshadowed(cx, ion, obj, returnAddr))
  1.2784 +                        return false;
  1.2785 +                    addedSetterStub = true;
  1.2786 +                }
  1.2787 +            }
  1.2788 +
  1.2789 +            if (!addedSetterStub && !cache.hasGenericProxyStub()) {
  1.2790 +                if (!cache.attachGenericProxy(cx, ion, returnAddr))
  1.2791 +                    return false;
  1.2792 +                addedSetterStub = true;
  1.2793 +            }
  1.2794 +        }
  1.2795 +
  1.2796 +        // Make sure the object de-lazifies its type. We do this here so that
  1.2797 +        // the parallel IC can share code that assumes that native objects all
  1.2798 +        // have a type object.
  1.2799 +        if (obj->isNative() && !obj->getType(cx))
  1.2800 +            return false;
  1.2801 +
  1.2802 +        RootedShape shape(cx);
  1.2803 +        RootedObject holder(cx);
  1.2804 +        bool checkTypeset;
  1.2805 +        canCache = CanAttachNativeSetProp(obj, id, cache.value(), cache.needsTypeBarrier(),
  1.2806 +                                          &holder, &shape, &checkTypeset);
  1.2807 +
  1.2808 +        if (!addedSetterStub && canCache == CanAttachSetSlot) {
  1.2809 +            if (!cache.attachSetSlot(cx, ion, obj, shape, checkTypeset))
  1.2810 +                return false;
  1.2811 +            addedSetterStub = true;
  1.2812 +        }
  1.2813 +
  1.2814 +        if (!addedSetterStub && canCache == CanAttachCallSetter) {
  1.2815 +            if (!cache.attachCallSetter(cx, ion, obj, holder, shape, returnAddr))
  1.2816 +                return false;
  1.2817 +            addedSetterStub = true;
  1.2818 +        }
  1.2819 +    }
  1.2820 +
  1.2821 +    uint32_t oldSlots = obj->numDynamicSlots();
  1.2822 +    RootedShape oldShape(cx, obj->lastProperty());
  1.2823 +
  1.2824 +    // Set/Add the property on the object, the inlined cache are setup for the next execution.
  1.2825 +    if (!SetProperty(cx, obj, name, value, cache.strict(), cache.pc()))
  1.2826 +        return false;
  1.2827 +
  1.2828 +    // The property did not exist before, now we can try to inline the property add.
  1.2829 +    bool checkTypeset;
  1.2830 +    if (!addedSetterStub && canCache == MaybeCanAttachAddSlot &&
  1.2831 +        IsPropertyAddInlineable(obj, id, cache.value(), oldSlots, oldShape, cache.needsTypeBarrier(),
  1.2832 +                                &checkTypeset))
  1.2833 +    {
  1.2834 +        if (!cache.attachAddSlot(cx, ion, obj, oldShape, checkTypeset))
  1.2835 +            return false;
  1.2836 +    }
  1.2837 +
  1.2838 +    return true;
  1.2839 +}
  1.2840 +
  1.2841 +void
  1.2842 +SetPropertyIC::reset()
  1.2843 +{
  1.2844 +    RepatchIonCache::reset();
  1.2845 +    hasGenericProxyStub_ = false;
  1.2846 +}
  1.2847 +
  1.2848 +bool
  1.2849 +SetPropertyParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj,
  1.2850 +                         HandleValue value)
  1.2851 +{
  1.2852 +    JS_ASSERT(cx->isThreadLocal(obj));
  1.2853 +
  1.2854 +    IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript();
  1.2855 +    SetPropertyParIC &cache = ion->getCache(cacheIndex).toSetPropertyPar();
  1.2856 +
  1.2857 +    RootedValue v(cx, value);
  1.2858 +    RootedId id(cx, AtomToId(cache.name()));
  1.2859 +
  1.2860 +    // Avoid unnecessary locking if cannot attach stubs.
  1.2861 +    if (!cache.canAttachStub()) {
  1.2862 +        return baseops::SetPropertyHelper<ParallelExecution>(
  1.2863 +            cx, obj, obj, id, baseops::Qualified, &v, cache.strict());
  1.2864 +    }
  1.2865 +
  1.2866 +    SetPropertyIC::NativeSetPropCacheability canCache = SetPropertyIC::CanAttachNone;
  1.2867 +    bool attachedStub = false;
  1.2868 +
  1.2869 +    {
  1.2870 +        // See note about locking context in GetPropertyParIC::update.
  1.2871 +        LockedJSContext ncx(cx);
  1.2872 +
  1.2873 +        if (cache.canAttachStub()) {
  1.2874 +            bool alreadyStubbed;
  1.2875 +            if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed))
  1.2876 +                return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.2877 +            if (alreadyStubbed) {
  1.2878 +                return baseops::SetPropertyHelper<ParallelExecution>(
  1.2879 +                    cx, obj, obj, id, baseops::Qualified, &v, cache.strict());
  1.2880 +            }
  1.2881 +
  1.2882 +            // If the object has a lazy type, we need to de-lazify it, but
  1.2883 +            // this is not safe in parallel.
  1.2884 +            if (obj->hasLazyType())
  1.2885 +                return false;
  1.2886 +
  1.2887 +            {
  1.2888 +                RootedShape shape(cx);
  1.2889 +                RootedObject holder(cx);
  1.2890 +                bool checkTypeset;
  1.2891 +                canCache = CanAttachNativeSetProp(obj, id, cache.value(), cache.needsTypeBarrier(),
  1.2892 +                                                  &holder, &shape, &checkTypeset);
  1.2893 +
  1.2894 +                if (canCache == SetPropertyIC::CanAttachSetSlot) {
  1.2895 +                    if (!cache.attachSetSlot(ncx, ion, obj, shape, checkTypeset))
  1.2896 +                        return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.2897 +                    attachedStub = true;
  1.2898 +                }
  1.2899 +            }
  1.2900 +        }
  1.2901 +    }
  1.2902 +
  1.2903 +    uint32_t oldSlots = obj->numDynamicSlots();
  1.2904 +    RootedShape oldShape(cx, obj->lastProperty());
  1.2905 +
  1.2906 +    if (!baseops::SetPropertyHelper<ParallelExecution>(cx, obj, obj, id, baseops::Qualified, &v,
  1.2907 +                                                       cache.strict()))
  1.2908 +    {
  1.2909 +        return false;
  1.2910 +    }
  1.2911 +
  1.2912 +    bool checkTypeset;
  1.2913 +    if (!attachedStub && canCache == SetPropertyIC::MaybeCanAttachAddSlot &&
  1.2914 +        IsPropertyAddInlineable(obj, id, cache.value(), oldSlots, oldShape, cache.needsTypeBarrier(),
  1.2915 +                                &checkTypeset))
  1.2916 +    {
  1.2917 +        LockedJSContext ncx(cx);
  1.2918 +        if (cache.canAttachStub() && !cache.attachAddSlot(ncx, ion, obj, oldShape, checkTypeset))
  1.2919 +            return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.2920 +    }
  1.2921 +
  1.2922 +    return true;
  1.2923 +}
  1.2924 +
  1.2925 +bool
  1.2926 +SetPropertyParIC::attachSetSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj, Shape *shape,
  1.2927 +                                bool checkTypeset)
  1.2928 +{
  1.2929 +    MacroAssembler masm(cx, ion);
  1.2930 +    DispatchStubPrepender attacher(*this);
  1.2931 +    GenerateSetSlot(cx, masm, attacher, obj, shape, object(), value(), needsTypeBarrier(),
  1.2932 +                    checkTypeset);
  1.2933 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel setting");
  1.2934 +}
  1.2935 +
  1.2936 +bool
  1.2937 +SetPropertyParIC::attachAddSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj, Shape *oldShape,
  1.2938 +                                bool checkTypeset)
  1.2939 +{
  1.2940 +    JS_ASSERT_IF(!needsTypeBarrier(), !checkTypeset);
  1.2941 +
  1.2942 +    MacroAssembler masm(cx, ion);
  1.2943 +    DispatchStubPrepender attacher(*this);
  1.2944 +    GenerateAddSlot(cx, masm, attacher, obj, oldShape, object(), value(), checkTypeset);
  1.2945 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel adding");
  1.2946 +}
  1.2947 +
  1.2948 +const size_t GetElementIC::MAX_FAILED_UPDATES = 16;
  1.2949 +
  1.2950 +/* static */ bool
  1.2951 +GetElementIC::canAttachGetProp(JSObject *obj, const Value &idval, jsid id)
  1.2952 +{
  1.2953 +    uint32_t dummy;
  1.2954 +    return (obj->isNative() &&
  1.2955 +            idval.isString() &&
  1.2956 +            JSID_IS_ATOM(id) &&
  1.2957 +            !JSID_TO_ATOM(id)->isIndex(&dummy));
  1.2958 +}
  1.2959 +
  1.2960 +static bool
  1.2961 +EqualStringsHelper(JSString *str1, JSString *str2)
  1.2962 +{
  1.2963 +    JS_ASSERT(str1->isAtom());
  1.2964 +    JS_ASSERT(!str2->isAtom());
  1.2965 +    JS_ASSERT(str1->length() == str2->length());
  1.2966 +
  1.2967 +    const jschar *chars = str2->getChars(nullptr);
  1.2968 +    if (!chars)
  1.2969 +        return false;
  1.2970 +    return mozilla::PodEqual(str1->asAtom().chars(), chars, str1->length());
  1.2971 +}
  1.2972 +
  1.2973 +bool
  1.2974 +GetElementIC::attachGetProp(JSContext *cx, IonScript *ion, HandleObject obj,
  1.2975 +                            const Value &idval, HandlePropertyName name,
  1.2976 +                            void *returnAddr)
  1.2977 +{
  1.2978 +    JS_ASSERT(index().reg().hasValue());
  1.2979 +
  1.2980 +    RootedObject holder(cx);
  1.2981 +    RootedShape shape(cx);
  1.2982 +
  1.2983 +    GetPropertyIC::NativeGetPropCacheability canCache =
  1.2984 +        CanAttachNativeGetProp(cx, *this, obj, name, &holder, &shape,
  1.2985 +                               /* skipArrayLen =*/true);
  1.2986 +
  1.2987 +    bool cacheable = canCache == GetPropertyIC::CanAttachReadSlot ||
  1.2988 +                     (canCache == GetPropertyIC::CanAttachCallGetter &&
  1.2989 +                      output().hasValue());
  1.2990 +
  1.2991 +    if (!cacheable) {
  1.2992 +        IonSpew(IonSpew_InlineCaches, "GETELEM uncacheable property");
  1.2993 +        return true;
  1.2994 +    }
  1.2995 +
  1.2996 +    JS_ASSERT(idval.isString());
  1.2997 +    JS_ASSERT(idval.toString()->length() == name->length());
  1.2998 +
  1.2999 +    Label failures;
  1.3000 +    MacroAssembler masm(cx, ion);
  1.3001 +
  1.3002 +    // Ensure the index is a string.
  1.3003 +    ValueOperand val = index().reg().valueReg();
  1.3004 +    masm.branchTestString(Assembler::NotEqual, val, &failures);
  1.3005 +
  1.3006 +    Register scratch = output().valueReg().scratchReg();
  1.3007 +    masm.unboxString(val, scratch);
  1.3008 +
  1.3009 +    Label equal;
  1.3010 +    masm.branchPtr(Assembler::Equal, scratch, ImmGCPtr(name), &equal);
  1.3011 +
  1.3012 +    // The pointers are not equal, so if the input string is also an atom it
  1.3013 +    // must be a different string.
  1.3014 +    masm.loadPtr(Address(scratch, JSString::offsetOfLengthAndFlags()), scratch);
  1.3015 +    masm.branchTest32(Assembler::NonZero, scratch, Imm32(JSString::ATOM_BIT), &failures);
  1.3016 +
  1.3017 +    // Check the length.
  1.3018 +    masm.rshiftPtr(Imm32(JSString::LENGTH_SHIFT), scratch);
  1.3019 +    masm.branch32(Assembler::NotEqual, scratch, Imm32(name->length()), &failures);
  1.3020 +
  1.3021 +    // We have a non-atomized string with the same length. For now call a helper
  1.3022 +    // function to do the comparison.
  1.3023 +    RegisterSet volatileRegs = RegisterSet::Volatile();
  1.3024 +    masm.PushRegsInMask(volatileRegs);
  1.3025 +
  1.3026 +    Register objReg = object();
  1.3027 +    JS_ASSERT(objReg != scratch);
  1.3028 +
  1.3029 +    if (!volatileRegs.has(objReg))
  1.3030 +        masm.push(objReg);
  1.3031 +
  1.3032 +    masm.setupUnalignedABICall(2, scratch);
  1.3033 +    masm.movePtr(ImmGCPtr(name), objReg);
  1.3034 +    masm.passABIArg(objReg);
  1.3035 +    masm.unboxString(val, scratch);
  1.3036 +    masm.passABIArg(scratch);
  1.3037 +    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, EqualStringsHelper));
  1.3038 +    masm.mov(ReturnReg, scratch);
  1.3039 +
  1.3040 +    if (!volatileRegs.has(objReg))
  1.3041 +        masm.pop(objReg);
  1.3042 +
  1.3043 +    RegisterSet ignore = RegisterSet();
  1.3044 +    ignore.add(scratch);
  1.3045 +    masm.PopRegsInMaskIgnore(volatileRegs, ignore);
  1.3046 +
  1.3047 +    masm.branchIfFalseBool(scratch, &failures);
  1.3048 +    masm.bind(&equal);
  1.3049 +
  1.3050 +    RepatchStubAppender attacher(*this);
  1.3051 +    if (canCache == GetPropertyIC::CanAttachReadSlot) {
  1.3052 +        GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output(),
  1.3053 +                         &failures);
  1.3054 +    } else {
  1.3055 +        JS_ASSERT(canCache == GetPropertyIC::CanAttachCallGetter);
  1.3056 +        // Set the frame for bailout safety of the OOL call.
  1.3057 +        if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name, holder, shape, liveRegs_,
  1.3058 +                                object(), output(), returnAddr, &failures))
  1.3059 +        {
  1.3060 +            return false;
  1.3061 +        }
  1.3062 +    }
  1.3063 +
  1.3064 +    return linkAndAttachStub(cx, masm, attacher, ion, "property");
  1.3065 +}
  1.3066 +
  1.3067 +/* static */ bool
  1.3068 +GetElementIC::canAttachDenseElement(JSObject *obj, const Value &idval)
  1.3069 +{
  1.3070 +    return obj->isNative() && idval.isInt32();
  1.3071 +}
  1.3072 +
  1.3073 +static bool
  1.3074 +GenerateDenseElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.3075 +                     JSObject *obj, const Value &idval, Register object,
  1.3076 +                     ConstantOrRegister index, TypedOrValueRegister output)
  1.3077 +{
  1.3078 +    JS_ASSERT(GetElementIC::canAttachDenseElement(obj, idval));
  1.3079 +
  1.3080 +    Label failures;
  1.3081 +
  1.3082 +    // Guard object's shape.
  1.3083 +    RootedShape shape(cx, obj->lastProperty());
  1.3084 +    if (!shape)
  1.3085 +        return false;
  1.3086 +    masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
  1.3087 +
  1.3088 +    // Ensure the index is an int32 value.
  1.3089 +    Register indexReg = InvalidReg;
  1.3090 +
  1.3091 +    if (index.reg().hasValue()) {
  1.3092 +        indexReg = output.scratchReg().gpr();
  1.3093 +        JS_ASSERT(indexReg != InvalidReg);
  1.3094 +        ValueOperand val = index.reg().valueReg();
  1.3095 +
  1.3096 +        masm.branchTestInt32(Assembler::NotEqual, val, &failures);
  1.3097 +
  1.3098 +        // Unbox the index.
  1.3099 +        masm.unboxInt32(val, indexReg);
  1.3100 +    } else {
  1.3101 +        JS_ASSERT(!index.reg().typedReg().isFloat());
  1.3102 +        indexReg = index.reg().typedReg().gpr();
  1.3103 +    }
  1.3104 +
  1.3105 +    // Load elements vector.
  1.3106 +    masm.push(object);
  1.3107 +    masm.loadPtr(Address(object, JSObject::offsetOfElements()), object);
  1.3108 +
  1.3109 +    Label hole;
  1.3110 +
  1.3111 +    // Guard on the initialized length.
  1.3112 +    Address initLength(object, ObjectElements::offsetOfInitializedLength());
  1.3113 +    masm.branch32(Assembler::BelowOrEqual, initLength, indexReg, &hole);
  1.3114 +
  1.3115 +    // Check for holes & load the value.
  1.3116 +    masm.loadElementTypedOrValue(BaseIndex(object, indexReg, TimesEight),
  1.3117 +                                 output, true, &hole);
  1.3118 +
  1.3119 +    masm.pop(object);
  1.3120 +    attacher.jumpRejoin(masm);
  1.3121 +
  1.3122 +    // All failures flow to here.
  1.3123 +    masm.bind(&hole);
  1.3124 +    masm.pop(object);
  1.3125 +    masm.bind(&failures);
  1.3126 +
  1.3127 +    attacher.jumpNextStub(masm);
  1.3128 +
  1.3129 +    return true;
  1.3130 +}
  1.3131 +
  1.3132 +bool
  1.3133 +GetElementIC::attachDenseElement(JSContext *cx, IonScript *ion, JSObject *obj, const Value &idval)
  1.3134 +{
  1.3135 +    MacroAssembler masm(cx, ion);
  1.3136 +    RepatchStubAppender attacher(*this);
  1.3137 +    if (!GenerateDenseElement(cx, masm, attacher, obj, idval, object(), index(), output()))
  1.3138 +        return false;
  1.3139 +
  1.3140 +    setHasDenseStub();
  1.3141 +    return linkAndAttachStub(cx, masm, attacher, ion, "dense array");
  1.3142 +}
  1.3143 +
  1.3144 +/* static */ bool
  1.3145 +GetElementIC::canAttachTypedArrayElement(JSObject *obj, const Value &idval,
  1.3146 +                                         TypedOrValueRegister output)
  1.3147 +{
  1.3148 +    if (!obj->is<TypedArrayObject>())
  1.3149 +        return false;
  1.3150 +
  1.3151 +    if (!idval.isInt32() && !idval.isString())
  1.3152 +        return false;
  1.3153 +
  1.3154 +
  1.3155 +    // Don't emit a stub if the access is out of bounds. We make to make
  1.3156 +    // certain that we monitor the type coming out of the typed array when
  1.3157 +    // we generate the stub. Out of bounds accesses will hit the fallback
  1.3158 +    // path.
  1.3159 +    uint32_t index;
  1.3160 +    if (idval.isInt32()) {
  1.3161 +        index = idval.toInt32();
  1.3162 +    } else {
  1.3163 +        index = GetIndexFromString(idval.toString());
  1.3164 +        if (index == UINT32_MAX)
  1.3165 +            return false;
  1.3166 +    }
  1.3167 +    if (index >= obj->as<TypedArrayObject>().length())
  1.3168 +        return false;
  1.3169 +
  1.3170 +    // The output register is not yet specialized as a float register, the only
  1.3171 +    // way to accept float typed arrays for now is to return a Value type.
  1.3172 +    uint32_t arrayType = obj->as<TypedArrayObject>().type();
  1.3173 +    if (arrayType == ScalarTypeDescr::TYPE_FLOAT32 ||
  1.3174 +        arrayType == ScalarTypeDescr::TYPE_FLOAT64)
  1.3175 +    {
  1.3176 +        return output.hasValue();
  1.3177 +    }
  1.3178 +
  1.3179 +    return output.hasValue() || !output.typedReg().isFloat();
  1.3180 +}
  1.3181 +
  1.3182 +static void
  1.3183 +GenerateGetTypedArrayElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.3184 +                             TypedArrayObject *tarr, const Value &idval, Register object,
  1.3185 +                             ConstantOrRegister index, TypedOrValueRegister output,
  1.3186 +                             bool allowDoubleResult)
  1.3187 +{
  1.3188 +    JS_ASSERT(GetElementIC::canAttachTypedArrayElement(tarr, idval, output));
  1.3189 +
  1.3190 +    Label failures;
  1.3191 +
  1.3192 +    // The array type is the object within the table of typed array classes.
  1.3193 +    int arrayType = tarr->type();
  1.3194 +
  1.3195 +    // Guard on the shape.
  1.3196 +    Shape *shape = tarr->lastProperty();
  1.3197 +    masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
  1.3198 +
  1.3199 +    // Decide to what type index the stub should be optimized
  1.3200 +    Register tmpReg = output.scratchReg().gpr();
  1.3201 +    JS_ASSERT(tmpReg != InvalidReg);
  1.3202 +    Register indexReg = tmpReg;
  1.3203 +    JS_ASSERT(!index.constant());
  1.3204 +    if (idval.isString()) {
  1.3205 +        JS_ASSERT(GetIndexFromString(idval.toString()) != UINT32_MAX);
  1.3206 +
  1.3207 +        // Part 1: Get the string into a register
  1.3208 +        Register str;
  1.3209 +        if (index.reg().hasValue()) {
  1.3210 +            ValueOperand val = index.reg().valueReg();
  1.3211 +            masm.branchTestString(Assembler::NotEqual, val, &failures);
  1.3212 +
  1.3213 +            str = masm.extractString(val, indexReg);
  1.3214 +        } else {
  1.3215 +            JS_ASSERT(!index.reg().typedReg().isFloat());
  1.3216 +            str = index.reg().typedReg().gpr();
  1.3217 +        }
  1.3218 +
  1.3219 +        // Part 2: Call to translate the str into index
  1.3220 +        RegisterSet regs = RegisterSet::Volatile();
  1.3221 +        masm.PushRegsInMask(regs);
  1.3222 +        regs.takeUnchecked(str);
  1.3223 +
  1.3224 +        Register temp = regs.takeGeneral();
  1.3225 +
  1.3226 +        masm.setupUnalignedABICall(1, temp);
  1.3227 +        masm.passABIArg(str);
  1.3228 +        masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, GetIndexFromString));
  1.3229 +        masm.mov(ReturnReg, indexReg);
  1.3230 +
  1.3231 +        RegisterSet ignore = RegisterSet();
  1.3232 +        ignore.add(indexReg);
  1.3233 +        masm.PopRegsInMaskIgnore(RegisterSet::Volatile(), ignore);
  1.3234 +
  1.3235 +        masm.branch32(Assembler::Equal, indexReg, Imm32(UINT32_MAX), &failures);
  1.3236 +
  1.3237 +    } else {
  1.3238 +        JS_ASSERT(idval.isInt32());
  1.3239 +
  1.3240 +        if (index.reg().hasValue()) {
  1.3241 +            ValueOperand val = index.reg().valueReg();
  1.3242 +            masm.branchTestInt32(Assembler::NotEqual, val, &failures);
  1.3243 +
  1.3244 +            // Unbox the index.
  1.3245 +            masm.unboxInt32(val, indexReg);
  1.3246 +        } else {
  1.3247 +            JS_ASSERT(!index.reg().typedReg().isFloat());
  1.3248 +            indexReg = index.reg().typedReg().gpr();
  1.3249 +        }
  1.3250 +    }
  1.3251 +
  1.3252 +    // Guard on the initialized length.
  1.3253 +    Address length(object, TypedArrayObject::lengthOffset());
  1.3254 +    masm.branch32(Assembler::BelowOrEqual, length, indexReg, &failures);
  1.3255 +
  1.3256 +    // Save the object register on the stack in case of failure.
  1.3257 +    Label popAndFail;
  1.3258 +    Register elementReg = object;
  1.3259 +    masm.push(object);
  1.3260 +
  1.3261 +    // Load elements vector.
  1.3262 +    masm.loadPtr(Address(object, TypedArrayObject::dataOffset()), elementReg);
  1.3263 +
  1.3264 +    // Load the value. We use an invalid register because the destination
  1.3265 +    // register is necessary a non double register.
  1.3266 +    int width = TypedArrayObject::slotWidth(arrayType);
  1.3267 +    BaseIndex source(elementReg, indexReg, ScaleFromElemWidth(width));
  1.3268 +    if (output.hasValue()) {
  1.3269 +        masm.loadFromTypedArray(arrayType, source, output.valueReg(), allowDoubleResult,
  1.3270 +                                elementReg, &popAndFail);
  1.3271 +    } else {
  1.3272 +        masm.loadFromTypedArray(arrayType, source, output.typedReg(), elementReg, &popAndFail);
  1.3273 +    }
  1.3274 +
  1.3275 +    masm.pop(object);
  1.3276 +    attacher.jumpRejoin(masm);
  1.3277 +
  1.3278 +    // Restore the object before continuing to the next stub.
  1.3279 +    masm.bind(&popAndFail);
  1.3280 +    masm.pop(object);
  1.3281 +    masm.bind(&failures);
  1.3282 +
  1.3283 +    attacher.jumpNextStub(masm);
  1.3284 +}
  1.3285 +
  1.3286 +bool
  1.3287 +GetElementIC::attachTypedArrayElement(JSContext *cx, IonScript *ion, TypedArrayObject *tarr,
  1.3288 +                                      const Value &idval)
  1.3289 +{
  1.3290 +    MacroAssembler masm(cx, ion);
  1.3291 +    RepatchStubAppender attacher(*this);
  1.3292 +    GenerateGetTypedArrayElement(cx, masm, attacher, tarr, idval, object(), index(), output(),
  1.3293 +                                 allowDoubleResult());
  1.3294 +    return linkAndAttachStub(cx, masm, attacher, ion, "typed array");
  1.3295 +}
  1.3296 +
  1.3297 +bool
  1.3298 +GetElementIC::attachArgumentsElement(JSContext *cx, IonScript *ion, JSObject *obj)
  1.3299 +{
  1.3300 +    JS_ASSERT(obj->is<ArgumentsObject>());
  1.3301 +
  1.3302 +    Label failures;
  1.3303 +    MacroAssembler masm(cx, ion);
  1.3304 +    RepatchStubAppender attacher(*this);
  1.3305 +
  1.3306 +    Register tmpReg = output().scratchReg().gpr();
  1.3307 +    JS_ASSERT(tmpReg != InvalidReg);
  1.3308 +
  1.3309 +    const Class *clasp = obj->is<StrictArgumentsObject>() ? &StrictArgumentsObject::class_
  1.3310 +                                                          : &NormalArgumentsObject::class_;
  1.3311 +
  1.3312 +    masm.branchTestObjClass(Assembler::NotEqual, object(), tmpReg, clasp, &failures);
  1.3313 +
  1.3314 +    // Get initial ArgsObj length value, test if length has been overridden.
  1.3315 +    masm.unboxInt32(Address(object(), ArgumentsObject::getInitialLengthSlotOffset()), tmpReg);
  1.3316 +    masm.branchTest32(Assembler::NonZero, tmpReg, Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
  1.3317 +                      &failures);
  1.3318 +    masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), tmpReg);
  1.3319 +
  1.3320 +    // Decide to what type index the stub should be optimized
  1.3321 +    Register indexReg;
  1.3322 +    JS_ASSERT(!index().constant());
  1.3323 +
  1.3324 +    // Check index against length.
  1.3325 +    Label failureRestoreIndex;
  1.3326 +    if (index().reg().hasValue()) {
  1.3327 +        ValueOperand val = index().reg().valueReg();
  1.3328 +        masm.branchTestInt32(Assembler::NotEqual, val, &failures);
  1.3329 +        indexReg = val.scratchReg();
  1.3330 +
  1.3331 +        masm.unboxInt32(val, indexReg);
  1.3332 +        masm.branch32(Assembler::AboveOrEqual, indexReg, tmpReg, &failureRestoreIndex);
  1.3333 +    } else {
  1.3334 +        JS_ASSERT(index().reg().type() == MIRType_Int32);
  1.3335 +        indexReg = index().reg().typedReg().gpr();
  1.3336 +        masm.branch32(Assembler::AboveOrEqual, indexReg, tmpReg, &failures);
  1.3337 +    }
  1.3338 +    // Save indexReg because it needs to be clobbered to check deleted bit.
  1.3339 +    Label failurePopIndex;
  1.3340 +    masm.push(indexReg);
  1.3341 +
  1.3342 +    // Check if property was deleted on arguments object.
  1.3343 +    masm.loadPrivate(Address(object(), ArgumentsObject::getDataSlotOffset()), tmpReg);
  1.3344 +    masm.loadPtr(Address(tmpReg, offsetof(ArgumentsData, deletedBits)), tmpReg);
  1.3345 +
  1.3346 +    // In tempReg, calculate index of word containing bit: (idx >> logBitsPerWord)
  1.3347 +    const uint32_t shift = FloorLog2<(sizeof(size_t) * JS_BITS_PER_BYTE)>::value;
  1.3348 +    JS_ASSERT(shift == 5 || shift == 6);
  1.3349 +    masm.rshiftPtr(Imm32(shift), indexReg);
  1.3350 +    masm.loadPtr(BaseIndex(tmpReg, indexReg, ScaleFromElemWidth(sizeof(size_t))), tmpReg);
  1.3351 +
  1.3352 +    // Don't bother testing specific bit, if any bit is set in the word, fail.
  1.3353 +    masm.branchPtr(Assembler::NotEqual, tmpReg, ImmPtr(nullptr), &failurePopIndex);
  1.3354 +
  1.3355 +    // Get the address to load from into tmpReg
  1.3356 +    masm.loadPrivate(Address(object(), ArgumentsObject::getDataSlotOffset()), tmpReg);
  1.3357 +    masm.addPtr(Imm32(ArgumentsData::offsetOfArgs()), tmpReg);
  1.3358 +
  1.3359 +    // Restore original index register value, to use for indexing element.
  1.3360 +    masm.pop(indexReg);
  1.3361 +    BaseIndex elemIdx(tmpReg, indexReg, ScaleFromElemWidth(sizeof(Value)));
  1.3362 +
  1.3363 +    // Ensure result is not magic value, and type-check result.
  1.3364 +    masm.branchTestMagic(Assembler::Equal, elemIdx, &failureRestoreIndex);
  1.3365 +
  1.3366 +    if (output().hasTyped()) {
  1.3367 +        JS_ASSERT(!output().typedReg().isFloat());
  1.3368 +        JS_ASSERT(index().reg().type() == MIRType_Boolean ||
  1.3369 +                  index().reg().type() == MIRType_Int32 ||
  1.3370 +                  index().reg().type() == MIRType_String ||
  1.3371 +                  index().reg().type() == MIRType_Object);
  1.3372 +        masm.branchTestMIRType(Assembler::NotEqual, elemIdx, index().reg().type(),
  1.3373 +                               &failureRestoreIndex);
  1.3374 +    }
  1.3375 +
  1.3376 +    masm.loadTypedOrValue(elemIdx, output());
  1.3377 +
  1.3378 +    // indexReg may need to be reconstructed if it was originally a value.
  1.3379 +    if (index().reg().hasValue())
  1.3380 +        masm.tagValue(JSVAL_TYPE_INT32, indexReg, index().reg().valueReg());
  1.3381 +
  1.3382 +    // Success.
  1.3383 +    attacher.jumpRejoin(masm);
  1.3384 +
  1.3385 +    // Restore the object before continuing to the next stub.
  1.3386 +    masm.bind(&failurePopIndex);
  1.3387 +    masm.pop(indexReg);
  1.3388 +    masm.bind(&failureRestoreIndex);
  1.3389 +    if (index().reg().hasValue())
  1.3390 +        masm.tagValue(JSVAL_TYPE_INT32, indexReg, index().reg().valueReg());
  1.3391 +    masm.bind(&failures);
  1.3392 +    attacher.jumpNextStub(masm);
  1.3393 +
  1.3394 +
  1.3395 +    if (obj->is<StrictArgumentsObject>()) {
  1.3396 +        JS_ASSERT(!hasStrictArgumentsStub_);
  1.3397 +        hasStrictArgumentsStub_ = true;
  1.3398 +        return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj element (strict)");
  1.3399 +    }
  1.3400 +
  1.3401 +    JS_ASSERT(!hasNormalArgumentsStub_);
  1.3402 +    hasNormalArgumentsStub_ = true;
  1.3403 +    return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj element (normal)");
  1.3404 +}
  1.3405 +
  1.3406 +bool
  1.3407 +GetElementIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj,
  1.3408 +                     HandleValue idval, MutableHandleValue res)
  1.3409 +{
  1.3410 +    void *returnAddr;
  1.3411 +    IonScript *ion = GetTopIonJSScript(cx, &returnAddr)->ionScript();
  1.3412 +    GetElementIC &cache = ion->getCache(cacheIndex).toGetElement();
  1.3413 +    RootedScript script(cx);
  1.3414 +    jsbytecode *pc;
  1.3415 +    cache.getScriptedLocation(&script, &pc);
  1.3416 +
  1.3417 +    // Override the return value when the script is invalidated (bug 728188).
  1.3418 +    AutoDetectInvalidation adi(cx, res.address(), ion);
  1.3419 +
  1.3420 +    if (cache.isDisabled()) {
  1.3421 +        if (!GetObjectElementOperation(cx, JSOp(*pc), obj, /* wasObject = */true, idval, res))
  1.3422 +            return false;
  1.3423 +        if (!cache.monitoredResult())
  1.3424 +            types::TypeScript::Monitor(cx, script, pc, res);
  1.3425 +        return true;
  1.3426 +    }
  1.3427 +
  1.3428 +    RootedId id(cx);
  1.3429 +    if (!ValueToId<CanGC>(cx, idval, &id))
  1.3430 +        return false;
  1.3431 +
  1.3432 +    bool attachedStub = false;
  1.3433 +    if (cache.canAttachStub()) {
  1.3434 +        if (IsOptimizableArgumentsObjectForGetElem(obj, idval) &&
  1.3435 +            !cache.hasArgumentsStub(obj->is<StrictArgumentsObject>()) &&
  1.3436 +            !cache.index().constant() &&
  1.3437 +            (cache.index().reg().hasValue() ||
  1.3438 +             cache.index().reg().type() == MIRType_Int32) &&
  1.3439 +            (cache.output().hasValue() || !cache.output().typedReg().isFloat()))
  1.3440 +        {
  1.3441 +            if (!cache.attachArgumentsElement(cx, ion, obj))
  1.3442 +                return false;
  1.3443 +            attachedStub = true;
  1.3444 +        }
  1.3445 +        if (!attachedStub && cache.monitoredResult() && canAttachGetProp(obj, idval, id)) {
  1.3446 +            RootedPropertyName name(cx, JSID_TO_ATOM(id)->asPropertyName());
  1.3447 +            if (!cache.attachGetProp(cx, ion, obj, idval, name, returnAddr))
  1.3448 +                return false;
  1.3449 +            attachedStub = true;
  1.3450 +        }
  1.3451 +        if (!attachedStub && !cache.hasDenseStub() && canAttachDenseElement(obj, idval)) {
  1.3452 +            if (!cache.attachDenseElement(cx, ion, obj, idval))
  1.3453 +                return false;
  1.3454 +            attachedStub = true;
  1.3455 +        }
  1.3456 +        if (!attachedStub && canAttachTypedArrayElement(obj, idval, cache.output())) {
  1.3457 +            Rooted<TypedArrayObject*> tarr(cx, &obj->as<TypedArrayObject>());
  1.3458 +            if (!cache.attachTypedArrayElement(cx, ion, tarr, idval))
  1.3459 +                return false;
  1.3460 +            attachedStub = true;
  1.3461 +        }
  1.3462 +    }
  1.3463 +
  1.3464 +    if (!GetObjectElementOperation(cx, JSOp(*pc), obj, /* wasObject = */true, idval, res))
  1.3465 +        return false;
  1.3466 +
  1.3467 +    // Disable cache when we reach max stubs or update failed too much.
  1.3468 +    if (!attachedStub) {
  1.3469 +        cache.incFailedUpdates();
  1.3470 +        if (cache.shouldDisable()) {
  1.3471 +            IonSpew(IonSpew_InlineCaches, "Disable inline cache");
  1.3472 +            cache.disable();
  1.3473 +        }
  1.3474 +    } else {
  1.3475 +        cache.resetFailedUpdates();
  1.3476 +    }
  1.3477 +
  1.3478 +    if (!cache.monitoredResult())
  1.3479 +        types::TypeScript::Monitor(cx, script, pc, res);
  1.3480 +    return true;
  1.3481 +}
  1.3482 +
  1.3483 +void
  1.3484 +GetElementIC::reset()
  1.3485 +{
  1.3486 +    RepatchIonCache::reset();
  1.3487 +    hasDenseStub_ = false;
  1.3488 +    hasStrictArgumentsStub_ = false;
  1.3489 +    hasNormalArgumentsStub_ = false;
  1.3490 +}
  1.3491 +
  1.3492 +static bool
  1.3493 +IsDenseElementSetInlineable(JSObject *obj, const Value &idval)
  1.3494 +{
  1.3495 +    if (!obj->is<ArrayObject>())
  1.3496 +        return false;
  1.3497 +
  1.3498 +    if (obj->watched())
  1.3499 +        return false;
  1.3500 +
  1.3501 +    if (!idval.isInt32())
  1.3502 +        return false;
  1.3503 +
  1.3504 +    // The object may have a setter definition,
  1.3505 +    // either directly, or via a prototype, or via the target object for a prototype
  1.3506 +    // which is a proxy, that handles a particular integer write.
  1.3507 +    // Scan the prototype and shape chain to make sure that this is not the case.
  1.3508 +    JSObject *curObj = obj;
  1.3509 +    while (curObj) {
  1.3510 +        // Ensure object is native.
  1.3511 +        if (!curObj->isNative())
  1.3512 +            return false;
  1.3513 +
  1.3514 +        // Ensure all indexed properties are stored in dense elements.
  1.3515 +        if (curObj->isIndexed())
  1.3516 +            return false;
  1.3517 +
  1.3518 +        curObj = curObj->getProto();
  1.3519 +    }
  1.3520 +
  1.3521 +    return true;
  1.3522 +}
  1.3523 +
  1.3524 +static bool
  1.3525 +IsTypedArrayElementSetInlineable(JSObject *obj, const Value &idval, const Value &value)
  1.3526 +{
  1.3527 +    // Don't bother attaching stubs for assigning strings and objects.
  1.3528 +    return (obj->is<TypedArrayObject>() && idval.isInt32() &&
  1.3529 +            !value.isString() && !value.isObject());
  1.3530 +}
  1.3531 +
  1.3532 +static void
  1.3533 +StoreDenseElement(MacroAssembler &masm, ConstantOrRegister value, Register elements,
  1.3534 +                  BaseIndex target)
  1.3535 +{
  1.3536 +    // If the ObjectElements::CONVERT_DOUBLE_ELEMENTS flag is set, int32 values
  1.3537 +    // have to be converted to double first. If the value is not int32, it can
  1.3538 +    // always be stored directly.
  1.3539 +
  1.3540 +    Address elementsFlags(elements, ObjectElements::offsetOfFlags());
  1.3541 +    if (value.constant()) {
  1.3542 +        Value v = value.value();
  1.3543 +        Label done;
  1.3544 +        if (v.isInt32()) {
  1.3545 +            Label dontConvert;
  1.3546 +            masm.branchTest32(Assembler::Zero, elementsFlags,
  1.3547 +                              Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
  1.3548 +                              &dontConvert);
  1.3549 +            masm.storeValue(DoubleValue(v.toInt32()), target);
  1.3550 +            masm.jump(&done);
  1.3551 +            masm.bind(&dontConvert);
  1.3552 +        }
  1.3553 +        masm.storeValue(v, target);
  1.3554 +        masm.bind(&done);
  1.3555 +        return;
  1.3556 +    }
  1.3557 +
  1.3558 +    TypedOrValueRegister reg = value.reg();
  1.3559 +    if (reg.hasTyped() && reg.type() != MIRType_Int32) {
  1.3560 +        masm.storeTypedOrValue(reg, target);
  1.3561 +        return;
  1.3562 +    }
  1.3563 +
  1.3564 +    Label convert, storeValue, done;
  1.3565 +    masm.branchTest32(Assembler::NonZero, elementsFlags,
  1.3566 +                      Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
  1.3567 +                      &convert);
  1.3568 +    masm.bind(&storeValue);
  1.3569 +    masm.storeTypedOrValue(reg, target);
  1.3570 +    masm.jump(&done);
  1.3571 +
  1.3572 +    masm.bind(&convert);
  1.3573 +    if (reg.hasValue()) {
  1.3574 +        masm.branchTestInt32(Assembler::NotEqual, reg.valueReg(), &storeValue);
  1.3575 +        masm.int32ValueToDouble(reg.valueReg(), ScratchFloatReg);
  1.3576 +        masm.storeDouble(ScratchFloatReg, target);
  1.3577 +    } else {
  1.3578 +        JS_ASSERT(reg.type() == MIRType_Int32);
  1.3579 +        masm.convertInt32ToDouble(reg.typedReg().gpr(), ScratchFloatReg);
  1.3580 +        masm.storeDouble(ScratchFloatReg, target);
  1.3581 +    }
  1.3582 +
  1.3583 +    masm.bind(&done);
  1.3584 +}
  1.3585 +
  1.3586 +static bool
  1.3587 +GenerateSetDenseElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.3588 +                        JSObject *obj, const Value &idval, bool guardHoles, Register object,
  1.3589 +                        ValueOperand indexVal, ConstantOrRegister value, Register tempToUnboxIndex,
  1.3590 +                        Register temp)
  1.3591 +{
  1.3592 +    JS_ASSERT(obj->isNative());
  1.3593 +    JS_ASSERT(idval.isInt32());
  1.3594 +
  1.3595 +    Label failures;
  1.3596 +    Label outOfBounds; // index represents a known hole, or an illegal append
  1.3597 +
  1.3598 +    Label markElem, storeElement; // used if TI protects us from worrying about holes.
  1.3599 +
  1.3600 +    // Guard object is a dense array.
  1.3601 +    Shape *shape = obj->lastProperty();
  1.3602 +    if (!shape)
  1.3603 +        return false;
  1.3604 +    masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
  1.3605 +
  1.3606 +    // Ensure the index is an int32 value.
  1.3607 +    masm.branchTestInt32(Assembler::NotEqual, indexVal, &failures);
  1.3608 +
  1.3609 +    // Unbox the index.
  1.3610 +    Register index = masm.extractInt32(indexVal, tempToUnboxIndex);
  1.3611 +
  1.3612 +    {
  1.3613 +        // Load obj->elements.
  1.3614 +        Register elements = temp;
  1.3615 +        masm.loadPtr(Address(object, JSObject::offsetOfElements()), elements);
  1.3616 +
  1.3617 +        // Compute the location of the element.
  1.3618 +        BaseIndex target(elements, index, TimesEight);
  1.3619 +
  1.3620 +        // If TI cannot help us deal with HOLES by preventing indexed properties
  1.3621 +        // on the prototype chain, we have to be very careful to check for ourselves
  1.3622 +        // to avoid stomping on what should be a setter call. Start by only allowing things
  1.3623 +        // within the initialized length.
  1.3624 +        if (guardHoles) {
  1.3625 +            Address initLength(elements, ObjectElements::offsetOfInitializedLength());
  1.3626 +            masm.branch32(Assembler::BelowOrEqual, initLength, index, &outOfBounds);
  1.3627 +        } else {
  1.3628 +            // Guard that we can increase the initialized length.
  1.3629 +            Address capacity(elements, ObjectElements::offsetOfCapacity());
  1.3630 +            masm.branch32(Assembler::BelowOrEqual, capacity, index, &outOfBounds);
  1.3631 +
  1.3632 +            // Guard on the initialized length.
  1.3633 +            Address initLength(elements, ObjectElements::offsetOfInitializedLength());
  1.3634 +            masm.branch32(Assembler::Below, initLength, index, &outOfBounds);
  1.3635 +
  1.3636 +            // if (initLength == index)
  1.3637 +            masm.branch32(Assembler::NotEqual, initLength, index, &markElem);
  1.3638 +            {
  1.3639 +                // Increase initialize length.
  1.3640 +                Int32Key newLength(index);
  1.3641 +                masm.bumpKey(&newLength, 1);
  1.3642 +                masm.storeKey(newLength, initLength);
  1.3643 +
  1.3644 +                // Increase length if needed.
  1.3645 +                Label bumpedLength;
  1.3646 +                Address length(elements, ObjectElements::offsetOfLength());
  1.3647 +                masm.branch32(Assembler::AboveOrEqual, length, index, &bumpedLength);
  1.3648 +                masm.storeKey(newLength, length);
  1.3649 +                masm.bind(&bumpedLength);
  1.3650 +
  1.3651 +                // Restore the index.
  1.3652 +                masm.bumpKey(&newLength, -1);
  1.3653 +                masm.jump(&storeElement);
  1.3654 +            }
  1.3655 +            // else
  1.3656 +            masm.bind(&markElem);
  1.3657 +        }
  1.3658 +
  1.3659 +        if (cx->zone()->needsBarrier())
  1.3660 +            masm.callPreBarrier(target, MIRType_Value);
  1.3661 +
  1.3662 +        // Store the value.
  1.3663 +        if (guardHoles)
  1.3664 +            masm.branchTestMagic(Assembler::Equal, target, &failures);
  1.3665 +        else
  1.3666 +            masm.bind(&storeElement);
  1.3667 +        StoreDenseElement(masm, value, elements, target);
  1.3668 +    }
  1.3669 +    attacher.jumpRejoin(masm);
  1.3670 +
  1.3671 +    // All failures flow to here.
  1.3672 +    masm.bind(&outOfBounds);
  1.3673 +    masm.bind(&failures);
  1.3674 +    attacher.jumpNextStub(masm);
  1.3675 +
  1.3676 +    return true;
  1.3677 +}
  1.3678 +
  1.3679 +bool
  1.3680 +SetElementIC::attachDenseElement(JSContext *cx, IonScript *ion, JSObject *obj, const Value &idval)
  1.3681 +{
  1.3682 +    MacroAssembler masm(cx, ion);
  1.3683 +    RepatchStubAppender attacher(*this);
  1.3684 +    if (!GenerateSetDenseElement(cx, masm, attacher, obj, idval,
  1.3685 +                                 guardHoles(), object(), index(),
  1.3686 +                                 value(), tempToUnboxIndex(),
  1.3687 +                                 temp()))
  1.3688 +    {
  1.3689 +        return false;
  1.3690 +    }
  1.3691 +
  1.3692 +    setHasDenseStub();
  1.3693 +    const char *message = guardHoles()            ?
  1.3694 +                            "dense array (holes)" :
  1.3695 +                            "dense array";
  1.3696 +    return linkAndAttachStub(cx, masm, attacher, ion, message);
  1.3697 +}
  1.3698 +
  1.3699 +static bool
  1.3700 +GenerateSetTypedArrayElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
  1.3701 +                             TypedArrayObject *tarr, Register object,
  1.3702 +                             ValueOperand indexVal, ConstantOrRegister value,
  1.3703 +                             Register tempUnbox, Register temp, FloatRegister tempFloat)
  1.3704 +{
  1.3705 +    Label failures, done, popObjectAndFail;
  1.3706 +
  1.3707 +    // Guard on the shape.
  1.3708 +    Shape *shape = tarr->lastProperty();
  1.3709 +    if (!shape)
  1.3710 +        return false;
  1.3711 +    masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
  1.3712 +
  1.3713 +    // Ensure the index is an int32.
  1.3714 +    masm.branchTestInt32(Assembler::NotEqual, indexVal, &failures);
  1.3715 +    Register index = masm.extractInt32(indexVal, tempUnbox);
  1.3716 +
  1.3717 +    // Guard on the length.
  1.3718 +    Address length(object, TypedArrayObject::lengthOffset());
  1.3719 +    masm.unboxInt32(length, temp);
  1.3720 +    masm.branch32(Assembler::BelowOrEqual, temp, index, &done);
  1.3721 +
  1.3722 +    // Load the elements vector.
  1.3723 +    Register elements = temp;
  1.3724 +    masm.loadPtr(Address(object, TypedArrayObject::dataOffset()), elements);
  1.3725 +
  1.3726 +    // Set the value.
  1.3727 +    int arrayType = tarr->type();
  1.3728 +    int width = TypedArrayObject::slotWidth(arrayType);
  1.3729 +    BaseIndex target(elements, index, ScaleFromElemWidth(width));
  1.3730 +
  1.3731 +    if (arrayType == ScalarTypeDescr::TYPE_FLOAT32) {
  1.3732 +        if (LIRGenerator::allowFloat32Optimizations()) {
  1.3733 +            if (!masm.convertConstantOrRegisterToFloat(cx, value, tempFloat, &failures))
  1.3734 +                return false;
  1.3735 +        } else {
  1.3736 +            if (!masm.convertConstantOrRegisterToDouble(cx, value, tempFloat, &failures))
  1.3737 +                return false;
  1.3738 +        }
  1.3739 +        masm.storeToTypedFloatArray(arrayType, tempFloat, target);
  1.3740 +    } else if (arrayType == ScalarTypeDescr::TYPE_FLOAT64) {
  1.3741 +        if (!masm.convertConstantOrRegisterToDouble(cx, value, tempFloat, &failures))
  1.3742 +            return false;
  1.3743 +        masm.storeToTypedFloatArray(arrayType, tempFloat, target);
  1.3744 +    } else {
  1.3745 +        // On x86 we only have 6 registers available to use, so reuse the object
  1.3746 +        // register to compute the intermediate value to store and restore it
  1.3747 +        // afterwards.
  1.3748 +        masm.push(object);
  1.3749 +
  1.3750 +        if (arrayType == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
  1.3751 +            if (!masm.clampConstantOrRegisterToUint8(cx, value, tempFloat, object,
  1.3752 +                                                     &popObjectAndFail))
  1.3753 +            {
  1.3754 +                return false;
  1.3755 +            }
  1.3756 +        } else {
  1.3757 +            if (!masm.truncateConstantOrRegisterToInt32(cx, value, tempFloat, object,
  1.3758 +                                                        &popObjectAndFail))
  1.3759 +            {
  1.3760 +                return false;
  1.3761 +            }
  1.3762 +        }
  1.3763 +        masm.storeToTypedIntArray(arrayType, object, target);
  1.3764 +
  1.3765 +        masm.pop(object);
  1.3766 +    }
  1.3767 +
  1.3768 +    // Out-of-bound writes jump here as they are no-ops.
  1.3769 +    masm.bind(&done);
  1.3770 +    attacher.jumpRejoin(masm);
  1.3771 +
  1.3772 +    if (popObjectAndFail.used()) {
  1.3773 +        masm.bind(&popObjectAndFail);
  1.3774 +        masm.pop(object);
  1.3775 +    }
  1.3776 +
  1.3777 +    masm.bind(&failures);
  1.3778 +    attacher.jumpNextStub(masm);
  1.3779 +    return true;
  1.3780 +}
  1.3781 +
  1.3782 +bool
  1.3783 +SetElementIC::attachTypedArrayElement(JSContext *cx, IonScript *ion, TypedArrayObject *tarr)
  1.3784 +{
  1.3785 +    MacroAssembler masm(cx, ion);
  1.3786 +    RepatchStubAppender attacher(*this);
  1.3787 +    if (!GenerateSetTypedArrayElement(cx, masm, attacher, tarr,
  1.3788 +                                      object(), index(), value(),
  1.3789 +                                      tempToUnboxIndex(), temp(), tempFloat()))
  1.3790 +    {
  1.3791 +        return false;
  1.3792 +    }
  1.3793 +
  1.3794 +    return linkAndAttachStub(cx, masm, attacher, ion, "typed array");
  1.3795 +}
  1.3796 +
  1.3797 +bool
  1.3798 +SetElementIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj,
  1.3799 +                     HandleValue idval, HandleValue value)
  1.3800 +{
  1.3801 +    IonScript *ion = GetTopIonJSScript(cx)->ionScript();
  1.3802 +    SetElementIC &cache = ion->getCache(cacheIndex).toSetElement();
  1.3803 +
  1.3804 +    bool attachedStub = false;
  1.3805 +    if (cache.canAttachStub()) {
  1.3806 +        if (!cache.hasDenseStub() && IsDenseElementSetInlineable(obj, idval)) {
  1.3807 +            if (!cache.attachDenseElement(cx, ion, obj, idval))
  1.3808 +                return false;
  1.3809 +            attachedStub = true;
  1.3810 +        }
  1.3811 +        if (!attachedStub && IsTypedArrayElementSetInlineable(obj, idval, value)) {
  1.3812 +            TypedArrayObject *tarr = &obj->as<TypedArrayObject>();
  1.3813 +            if (!cache.attachTypedArrayElement(cx, ion, tarr))
  1.3814 +                return false;
  1.3815 +        }
  1.3816 +    }
  1.3817 +
  1.3818 +    if (!SetObjectElement(cx, obj, idval, value, cache.strict()))
  1.3819 +        return false;
  1.3820 +    return true;
  1.3821 +}
  1.3822 +
  1.3823 +void
  1.3824 +SetElementIC::reset()
  1.3825 +{
  1.3826 +    RepatchIonCache::reset();
  1.3827 +    hasDenseStub_ = false;
  1.3828 +}
  1.3829 +
  1.3830 +bool
  1.3831 +SetElementParIC::attachDenseElement(LockedJSContext &cx, IonScript *ion, JSObject *obj,
  1.3832 +                                    const Value &idval)
  1.3833 +{
  1.3834 +    MacroAssembler masm(cx, ion);
  1.3835 +    DispatchStubPrepender attacher(*this);
  1.3836 +    if (!GenerateSetDenseElement(cx, masm, attacher, obj, idval,
  1.3837 +                                 guardHoles(), object(), index(),
  1.3838 +                                 value(), tempToUnboxIndex(),
  1.3839 +                                 temp()))
  1.3840 +    {
  1.3841 +        return false;
  1.3842 +    }
  1.3843 +
  1.3844 +    const char *message = guardHoles()                     ?
  1.3845 +                            "parallel dense array (holes)" :
  1.3846 +                            "parallel dense array";
  1.3847 +
  1.3848 +    return linkAndAttachStub(cx, masm, attacher, ion, message);
  1.3849 +}
  1.3850 +
  1.3851 +bool
  1.3852 +SetElementParIC::attachTypedArrayElement(LockedJSContext &cx, IonScript *ion,
  1.3853 +                                         TypedArrayObject *tarr)
  1.3854 +{
  1.3855 +    MacroAssembler masm(cx, ion);
  1.3856 +    DispatchStubPrepender attacher(*this);
  1.3857 +    if (!GenerateSetTypedArrayElement(cx, masm, attacher, tarr,
  1.3858 +                                      object(), index(), value(),
  1.3859 +                                      tempToUnboxIndex(), temp(), tempFloat()))
  1.3860 +    {
  1.3861 +        return false;
  1.3862 +    }
  1.3863 +
  1.3864 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array");
  1.3865 +}
  1.3866 +
  1.3867 +bool
  1.3868 +SetElementParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj,
  1.3869 +                        HandleValue idval, HandleValue value)
  1.3870 +{
  1.3871 +    IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript();
  1.3872 +    SetElementParIC &cache = ion->getCache(cacheIndex).toSetElementPar();
  1.3873 +
  1.3874 +    // Avoid unnecessary locking if cannot attach stubs.
  1.3875 +    if (!cache.canAttachStub())
  1.3876 +        return SetElementPar(cx, obj, idval, value, cache.strict());
  1.3877 +
  1.3878 +    {
  1.3879 +        LockedJSContext ncx(cx);
  1.3880 +
  1.3881 +        if (cache.canAttachStub()) {
  1.3882 +            bool alreadyStubbed;
  1.3883 +            if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed))
  1.3884 +                return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.3885 +            if (alreadyStubbed)
  1.3886 +                return SetElementPar(cx, obj, idval, value, cache.strict());
  1.3887 +
  1.3888 +            bool attachedStub = false;
  1.3889 +            if (IsDenseElementSetInlineable(obj, idval)) {
  1.3890 +                if (!cache.attachDenseElement(ncx, ion, obj, idval))
  1.3891 +                    return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.3892 +                attachedStub = true;
  1.3893 +            }
  1.3894 +            if (!attachedStub && IsTypedArrayElementSetInlineable(obj, idval, value)) {
  1.3895 +                TypedArrayObject *tarr = &obj->as<TypedArrayObject>();
  1.3896 +                if (!cache.attachTypedArrayElement(ncx, ion, tarr))
  1.3897 +                    return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.3898 +            }
  1.3899 +        }
  1.3900 +    }
  1.3901 +
  1.3902 +    return SetElementPar(cx, obj, idval, value, cache.strict());
  1.3903 +}
  1.3904 +
  1.3905 +bool
  1.3906 +GetElementParIC::attachReadSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj,
  1.3907 +                                const Value &idval, PropertyName *name, JSObject *holder,
  1.3908 +                                Shape *shape)
  1.3909 +{
  1.3910 +    MacroAssembler masm(cx, ion);
  1.3911 +    DispatchStubPrepender attacher(*this);
  1.3912 +
  1.3913 +    // Guard on the index value.
  1.3914 +    Label failures;
  1.3915 +    ValueOperand val = index().reg().valueReg();
  1.3916 +    masm.branchTestValue(Assembler::NotEqual, val, idval, &failures);
  1.3917 +
  1.3918 +    GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output(),
  1.3919 +                     &failures);
  1.3920 +
  1.3921 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel getelem reading");
  1.3922 +}
  1.3923 +
  1.3924 +bool
  1.3925 +GetElementParIC::attachDenseElement(LockedJSContext &cx, IonScript *ion, JSObject *obj,
  1.3926 +                                    const Value &idval)
  1.3927 +{
  1.3928 +    MacroAssembler masm(cx, ion);
  1.3929 +    DispatchStubPrepender attacher(*this);
  1.3930 +    if (!GenerateDenseElement(cx, masm, attacher, obj, idval, object(), index(), output()))
  1.3931 +        return false;
  1.3932 +
  1.3933 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel dense element");
  1.3934 +}
  1.3935 +
  1.3936 +bool
  1.3937 +GetElementParIC::attachTypedArrayElement(LockedJSContext &cx, IonScript *ion,
  1.3938 +                                         TypedArrayObject *tarr, const Value &idval)
  1.3939 +{
  1.3940 +    MacroAssembler masm(cx, ion);
  1.3941 +    DispatchStubPrepender attacher(*this);
  1.3942 +    GenerateGetTypedArrayElement(cx, masm, attacher, tarr, idval, object(), index(), output(),
  1.3943 +                                 allowDoubleResult());
  1.3944 +    return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array");
  1.3945 +}
  1.3946 +
  1.3947 +bool
  1.3948 +GetElementParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj,
  1.3949 +                        HandleValue idval, MutableHandleValue vp)
  1.3950 +{
  1.3951 +    IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript();
  1.3952 +    GetElementParIC &cache = ion->getCache(cacheIndex).toGetElementPar();
  1.3953 +
  1.3954 +    // Try to get the element early, as the pure path doesn't need a lock. If
  1.3955 +    // we can't do it purely, bail out of parallel execution.
  1.3956 +    if (!GetObjectElementOperationPure(cx, obj, idval, vp.address()))
  1.3957 +        return false;
  1.3958 +
  1.3959 +    // Avoid unnecessary locking if cannot attach stubs.
  1.3960 +    if (!cache.canAttachStub())
  1.3961 +        return true;
  1.3962 +
  1.3963 +    {
  1.3964 +        // See note about locking context in GetPropertyParIC::update.
  1.3965 +        LockedJSContext ncx(cx);
  1.3966 +
  1.3967 +        if (cache.canAttachStub()) {
  1.3968 +            bool alreadyStubbed;
  1.3969 +            if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed))
  1.3970 +                return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.3971 +            if (alreadyStubbed)
  1.3972 +                return true;
  1.3973 +
  1.3974 +            jsid id;
  1.3975 +            if (!ValueToIdPure(idval, &id))
  1.3976 +                return false;
  1.3977 +
  1.3978 +            bool attachedStub = false;
  1.3979 +            if (cache.monitoredResult() &&
  1.3980 +                GetElementIC::canAttachGetProp(obj, idval, id))
  1.3981 +            {
  1.3982 +                RootedShape shape(ncx);
  1.3983 +                RootedObject holder(ncx);
  1.3984 +                RootedPropertyName name(ncx, JSID_TO_ATOM(id)->asPropertyName());
  1.3985 +
  1.3986 +                GetPropertyIC::NativeGetPropCacheability canCache =
  1.3987 +                    CanAttachNativeGetProp(ncx, cache, obj, name, &holder, &shape);
  1.3988 +
  1.3989 +                if (canCache == GetPropertyIC::CanAttachReadSlot)
  1.3990 +                {
  1.3991 +                    if (!cache.attachReadSlot(ncx, ion, obj, idval, name, holder, shape))
  1.3992 +                        return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.3993 +                    attachedStub = true;
  1.3994 +                }
  1.3995 +            }
  1.3996 +            if (!attachedStub &&
  1.3997 +                GetElementIC::canAttachDenseElement(obj, idval))
  1.3998 +            {
  1.3999 +                if (!cache.attachDenseElement(ncx, ion, obj, idval))
  1.4000 +                    return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.4001 +                attachedStub = true;
  1.4002 +            }
  1.4003 +            if (!attachedStub &&
  1.4004 +                GetElementIC::canAttachTypedArrayElement(obj, idval, cache.output()))
  1.4005 +            {
  1.4006 +                if (!cache.attachTypedArrayElement(ncx, ion, &obj->as<TypedArrayObject>(), idval))
  1.4007 +                    return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
  1.4008 +                attachedStub = true;
  1.4009 +            }
  1.4010 +        }
  1.4011 +    }
  1.4012 +
  1.4013 +    return true;
  1.4014 +}
  1.4015 +
  1.4016 +bool
  1.4017 +BindNameIC::attachGlobal(JSContext *cx, IonScript *ion, JSObject *scopeChain)
  1.4018 +{
  1.4019 +    JS_ASSERT(scopeChain->is<GlobalObject>());
  1.4020 +
  1.4021 +    MacroAssembler masm(cx, ion);
  1.4022 +    RepatchStubAppender attacher(*this);
  1.4023 +
  1.4024 +    // Guard on the scope chain.
  1.4025 +    attacher.branchNextStub(masm, Assembler::NotEqual, scopeChainReg(),
  1.4026 +                            ImmGCPtr(scopeChain));
  1.4027 +    masm.movePtr(ImmGCPtr(scopeChain), outputReg());
  1.4028 +
  1.4029 +    attacher.jumpRejoin(masm);
  1.4030 +
  1.4031 +    return linkAndAttachStub(cx, masm, attacher, ion, "global");
  1.4032 +}
  1.4033 +
  1.4034 +static inline void
  1.4035 +GenerateScopeChainGuard(MacroAssembler &masm, JSObject *scopeObj,
  1.4036 +                        Register scopeObjReg, Shape *shape, Label *failures)
  1.4037 +{
  1.4038 +    if (scopeObj->is<CallObject>()) {
  1.4039 +        // We can skip a guard on the call object if the script's bindings are
  1.4040 +        // guaranteed to be immutable (and thus cannot introduce shadowing
  1.4041 +        // variables).
  1.4042 +        CallObject *callObj = &scopeObj->as<CallObject>();
  1.4043 +        if (!callObj->isForEval()) {
  1.4044 +            JSFunction *fun = &callObj->callee();
  1.4045 +            // The function might have been relazified under rare conditions.
  1.4046 +            // In that case, we pessimistically create the guard, as we'd
  1.4047 +            // need to root various pointers to delazify,
  1.4048 +            if (fun->hasScript()) {
  1.4049 +                JSScript *script = fun->nonLazyScript();
  1.4050 +                if (!script->funHasExtensibleScope())
  1.4051 +                    return;
  1.4052 +            }
  1.4053 +        }
  1.4054 +    } else if (scopeObj->is<GlobalObject>()) {
  1.4055 +        // If this is the last object on the scope walk, and the property we've
  1.4056 +        // found is not configurable, then we don't need a shape guard because
  1.4057 +        // the shape cannot be removed.
  1.4058 +        if (shape && !shape->configurable())
  1.4059 +            return;
  1.4060 +    }
  1.4061 +
  1.4062 +    Address shapeAddr(scopeObjReg, JSObject::offsetOfShape());
  1.4063 +    masm.branchPtr(Assembler::NotEqual, shapeAddr, ImmGCPtr(scopeObj->lastProperty()), failures);
  1.4064 +}
  1.4065 +
  1.4066 +static void
  1.4067 +GenerateScopeChainGuards(MacroAssembler &masm, JSObject *scopeChain, JSObject *holder,
  1.4068 +                         Register outputReg, Label *failures, bool skipLastGuard = false)
  1.4069 +{
  1.4070 +    JSObject *tobj = scopeChain;
  1.4071 +
  1.4072 +    // Walk up the scope chain. Note that IsCacheableScopeChain guarantees the
  1.4073 +    // |tobj == holder| condition terminates the loop.
  1.4074 +    while (true) {
  1.4075 +        JS_ASSERT(IsCacheableNonGlobalScope(tobj) || tobj->is<GlobalObject>());
  1.4076 +
  1.4077 +        if (skipLastGuard && tobj == holder)
  1.4078 +            break;
  1.4079 +
  1.4080 +        GenerateScopeChainGuard(masm, tobj, outputReg, nullptr, failures);
  1.4081 +
  1.4082 +        if (tobj == holder)
  1.4083 +            break;
  1.4084 +
  1.4085 +        // Load the next link.
  1.4086 +        tobj = &tobj->as<ScopeObject>().enclosingScope();
  1.4087 +        masm.extractObject(Address(outputReg, ScopeObject::offsetOfEnclosingScope()), outputReg);
  1.4088 +    }
  1.4089 +}
  1.4090 +
  1.4091 +bool
  1.4092 +BindNameIC::attachNonGlobal(JSContext *cx, IonScript *ion, JSObject *scopeChain, JSObject *holder)
  1.4093 +{
  1.4094 +    JS_ASSERT(IsCacheableNonGlobalScope(scopeChain));
  1.4095 +
  1.4096 +    MacroAssembler masm(cx, ion);
  1.4097 +    RepatchStubAppender attacher(*this);
  1.4098 +
  1.4099 +    // Guard on the shape of the scope chain.
  1.4100 +    Label failures;
  1.4101 +    attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
  1.4102 +                                   Address(scopeChainReg(), JSObject::offsetOfShape()),
  1.4103 +                                   ImmGCPtr(scopeChain->lastProperty()),
  1.4104 +                                   holder != scopeChain ? &failures : nullptr);
  1.4105 +
  1.4106 +    if (holder != scopeChain) {
  1.4107 +        JSObject *parent = &scopeChain->as<ScopeObject>().enclosingScope();
  1.4108 +        masm.extractObject(Address(scopeChainReg(), ScopeObject::offsetOfEnclosingScope()), outputReg());
  1.4109 +
  1.4110 +        GenerateScopeChainGuards(masm, parent, holder, outputReg(), &failures);
  1.4111 +    } else {
  1.4112 +        masm.movePtr(scopeChainReg(), outputReg());
  1.4113 +    }
  1.4114 +
  1.4115 +    // At this point outputReg holds the object on which the property
  1.4116 +    // was found, so we're done.
  1.4117 +    attacher.jumpRejoin(masm);
  1.4118 +
  1.4119 +    // All failures flow to here, so there is a common point to patch.
  1.4120 +    if (holder != scopeChain) {
  1.4121 +        masm.bind(&failures);
  1.4122 +        attacher.jumpNextStub(masm);
  1.4123 +    }
  1.4124 +
  1.4125 +    return linkAndAttachStub(cx, masm, attacher, ion, "non-global");
  1.4126 +}
  1.4127 +
  1.4128 +static bool
  1.4129 +IsCacheableScopeChain(JSObject *scopeChain, JSObject *holder)
  1.4130 +{
  1.4131 +    while (true) {
  1.4132 +        if (!IsCacheableNonGlobalScope(scopeChain)) {
  1.4133 +            IonSpew(IonSpew_InlineCaches, "Non-cacheable object on scope chain");
  1.4134 +            return false;
  1.4135 +        }
  1.4136 +
  1.4137 +        if (scopeChain == holder)
  1.4138 +            return true;
  1.4139 +
  1.4140 +        scopeChain = &scopeChain->as<ScopeObject>().enclosingScope();
  1.4141 +        if (!scopeChain) {
  1.4142 +            IonSpew(IonSpew_InlineCaches, "Scope chain indirect hit");
  1.4143 +            return false;
  1.4144 +        }
  1.4145 +    }
  1.4146 +
  1.4147 +    MOZ_ASSUME_UNREACHABLE("Invalid scope chain");
  1.4148 +}
  1.4149 +
  1.4150 +JSObject *
  1.4151 +BindNameIC::update(JSContext *cx, size_t cacheIndex, HandleObject scopeChain)
  1.4152 +{
  1.4153 +    IonScript *ion = GetTopIonJSScript(cx)->ionScript();
  1.4154 +    BindNameIC &cache = ion->getCache(cacheIndex).toBindName();
  1.4155 +    HandlePropertyName name = cache.name();
  1.4156 +
  1.4157 +    RootedObject holder(cx);
  1.4158 +    if (scopeChain->is<GlobalObject>()) {
  1.4159 +        holder = scopeChain;
  1.4160 +    } else {
  1.4161 +        if (!LookupNameWithGlobalDefault(cx, name, scopeChain, &holder))
  1.4162 +            return nullptr;
  1.4163 +    }
  1.4164 +
  1.4165 +    // Stop generating new stubs once we hit the stub count limit, see
  1.4166 +    // GetPropertyCache.
  1.4167 +    if (cache.canAttachStub()) {
  1.4168 +        if (scopeChain->is<GlobalObject>()) {
  1.4169 +            if (!cache.attachGlobal(cx, ion, scopeChain))
  1.4170 +                return nullptr;
  1.4171 +        } else if (IsCacheableScopeChain(scopeChain, holder)) {
  1.4172 +            if (!cache.attachNonGlobal(cx, ion, scopeChain, holder))
  1.4173 +                return nullptr;
  1.4174 +        } else {
  1.4175 +            IonSpew(IonSpew_InlineCaches, "BINDNAME uncacheable scope chain");
  1.4176 +        }
  1.4177 +    }
  1.4178 +
  1.4179 +    return holder;
  1.4180 +}
  1.4181 +
  1.4182 +bool
  1.4183 +NameIC::attachReadSlot(JSContext *cx, IonScript *ion, HandleObject scopeChain,
  1.4184 +                       HandleObject holderBase, HandleObject holder,
  1.4185 +                       HandleShape shape)
  1.4186 +{
  1.4187 +    MacroAssembler masm(cx, ion);
  1.4188 +    Label failures;
  1.4189 +    RepatchStubAppender attacher(*this);
  1.4190 +
  1.4191 +    Register scratchReg = outputReg().valueReg().scratchReg();
  1.4192 +
  1.4193 +    // Don't guard the base of the proto chain the name was found on. It will be guarded
  1.4194 +    // by GenerateReadSlot().
  1.4195 +    masm.mov(scopeChainReg(), scratchReg);
  1.4196 +    GenerateScopeChainGuards(masm, scopeChain, holderBase, scratchReg, &failures,
  1.4197 +                             /* skipLastGuard = */true);
  1.4198 +
  1.4199 +    // GenerateScopeChain leaves the last scope chain in scrachReg, even though it
  1.4200 +    // doesn't generate the extra guard.
  1.4201 +    GenerateReadSlot(cx, ion, masm, attacher, holderBase, holder, shape, scratchReg,
  1.4202 +                     outputReg(), failures.used() ? &failures : nullptr);
  1.4203 +
  1.4204 +    return linkAndAttachStub(cx, masm, attacher, ion, "generic");
  1.4205 +}
  1.4206 +
  1.4207 +static bool
  1.4208 +IsCacheableNameReadSlot(JSContext *cx, HandleObject scopeChain, HandleObject obj,
  1.4209 +                        HandleObject holder, HandleShape shape, jsbytecode *pc,
  1.4210 +                        const TypedOrValueRegister &output)
  1.4211 +{
  1.4212 +    if (!shape)
  1.4213 +        return false;
  1.4214 +    if (!obj->isNative())
  1.4215 +        return false;
  1.4216 +
  1.4217 +    if (obj->is<GlobalObject>()) {
  1.4218 +        // Support only simple property lookups.
  1.4219 +        if (!IsCacheableGetPropReadSlot(obj, holder, shape) &&
  1.4220 +            !IsCacheableNoProperty(obj, holder, shape, pc, output))
  1.4221 +            return false;
  1.4222 +    } else if (obj->is<CallObject>()) {
  1.4223 +        JS_ASSERT(obj == holder);
  1.4224 +        if (!shape->hasDefaultGetter())
  1.4225 +            return false;
  1.4226 +    } else {
  1.4227 +        // We don't yet support lookups on Block or DeclEnv objects.
  1.4228 +        return false;
  1.4229 +    }
  1.4230 +
  1.4231 +    RootedObject obj2(cx, scopeChain);
  1.4232 +    while (obj2) {
  1.4233 +        if (!IsCacheableNonGlobalScope(obj2) && !obj2->is<GlobalObject>())
  1.4234 +            return false;
  1.4235 +
  1.4236 +        // Stop once we hit the global or target obj.
  1.4237 +        if (obj2->is<GlobalObject>() || obj2 == obj)
  1.4238 +            break;
  1.4239 +
  1.4240 +        obj2 = obj2->enclosingScope();
  1.4241 +    }
  1.4242 +
  1.4243 +    return obj == obj2;
  1.4244 +}
  1.4245 +
  1.4246 +bool
  1.4247 +NameIC::attachCallGetter(JSContext *cx, IonScript *ion, JSObject *obj, JSObject *holder,
  1.4248 +                         HandleShape shape, void *returnAddr)
  1.4249 +{
  1.4250 +    MacroAssembler masm(cx, ion, script_, pc_);
  1.4251 +
  1.4252 +    RepatchStubAppender attacher(*this);
  1.4253 +    if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name(), holder, shape, liveRegs_,
  1.4254 +                            scopeChainReg(), outputReg(), returnAddr))
  1.4255 +    {
  1.4256 +         return false;
  1.4257 +    }
  1.4258 +
  1.4259 +    const char *attachKind = "name getter";
  1.4260 +    return linkAndAttachStub(cx, masm, attacher, ion, attachKind);
  1.4261 +}
  1.4262 +
  1.4263 +static bool
  1.4264 +IsCacheableNameCallGetter(JSObject *scopeChain, JSObject *obj, JSObject *holder, Shape *shape)
  1.4265 +{
  1.4266 +    if (obj != scopeChain)
  1.4267 +        return false;
  1.4268 +
  1.4269 +    if (!obj->is<GlobalObject>())
  1.4270 +        return false;
  1.4271 +
  1.4272 +    return IsCacheableGetPropCallNative(obj, holder, shape) ||
  1.4273 +        IsCacheableGetPropCallPropertyOp(obj, holder, shape);
  1.4274 +}
  1.4275 +
  1.4276 +bool
  1.4277 +NameIC::update(JSContext *cx, size_t cacheIndex, HandleObject scopeChain,
  1.4278 +               MutableHandleValue vp)
  1.4279 +{
  1.4280 +    void *returnAddr;
  1.4281 +    IonScript *ion = GetTopIonJSScript(cx, &returnAddr)->ionScript();
  1.4282 +
  1.4283 +    NameIC &cache = ion->getCache(cacheIndex).toName();
  1.4284 +    RootedPropertyName name(cx, cache.name());
  1.4285 +
  1.4286 +    RootedScript script(cx);
  1.4287 +    jsbytecode *pc;
  1.4288 +    cache.getScriptedLocation(&script, &pc);
  1.4289 +
  1.4290 +    RootedObject obj(cx);
  1.4291 +    RootedObject holder(cx);
  1.4292 +    RootedShape shape(cx);
  1.4293 +    if (!LookupName(cx, name, scopeChain, &obj, &holder, &shape))
  1.4294 +        return false;
  1.4295 +
  1.4296 +    if (cache.canAttachStub()) {
  1.4297 +        if (IsCacheableNameReadSlot(cx, scopeChain, obj, holder, shape, pc, cache.outputReg())) {
  1.4298 +            if (!cache.attachReadSlot(cx, ion, scopeChain, obj, holder, shape))
  1.4299 +                return false;
  1.4300 +        } else if (IsCacheableNameCallGetter(scopeChain, obj, holder, shape)) {
  1.4301 +            if (!cache.attachCallGetter(cx, ion, obj, holder, shape, returnAddr))
  1.4302 +                return false;
  1.4303 +        }
  1.4304 +    }
  1.4305 +
  1.4306 +    if (cache.isTypeOf()) {
  1.4307 +        if (!FetchName<true>(cx, obj, holder, name, shape, vp))
  1.4308 +            return false;
  1.4309 +    } else {
  1.4310 +        if (!FetchName<false>(cx, obj, holder, name, shape, vp))
  1.4311 +            return false;
  1.4312 +    }
  1.4313 +
  1.4314 +    // Monitor changes to cache entry.
  1.4315 +    types::TypeScript::Monitor(cx, script, pc, vp);
  1.4316 +
  1.4317 +    return true;
  1.4318 +}
  1.4319 +
  1.4320 +bool
  1.4321 +CallsiteCloneIC::attach(JSContext *cx, IonScript *ion, HandleFunction original,
  1.4322 +                        HandleFunction clone)
  1.4323 +{
  1.4324 +    MacroAssembler masm(cx, ion);
  1.4325 +    RepatchStubAppender attacher(*this);
  1.4326 +
  1.4327 +    // Guard against object identity on the original.
  1.4328 +    attacher.branchNextStub(masm, Assembler::NotEqual, calleeReg(), ImmGCPtr(original));
  1.4329 +
  1.4330 +    // Load the clone.
  1.4331 +    masm.movePtr(ImmGCPtr(clone), outputReg());
  1.4332 +
  1.4333 +    attacher.jumpRejoin(masm);
  1.4334 +
  1.4335 +    return linkAndAttachStub(cx, masm, attacher, ion, "generic");
  1.4336 +}
  1.4337 +
  1.4338 +JSObject *
  1.4339 +CallsiteCloneIC::update(JSContext *cx, size_t cacheIndex, HandleObject callee)
  1.4340 +{
  1.4341 +    // Act as the identity for functions that are not clone-at-callsite, as we
  1.4342 +    // generate this cache as long as some callees are clone-at-callsite.
  1.4343 +    RootedFunction fun(cx, &callee->as<JSFunction>());
  1.4344 +    if (!fun->hasScript() || !fun->nonLazyScript()->shouldCloneAtCallsite())
  1.4345 +        return fun;
  1.4346 +
  1.4347 +    IonScript *ion = GetTopIonJSScript(cx)->ionScript();
  1.4348 +    CallsiteCloneIC &cache = ion->getCache(cacheIndex).toCallsiteClone();
  1.4349 +
  1.4350 +    RootedFunction clone(cx, CloneFunctionAtCallsite(cx, fun, cache.callScript(), cache.callPc()));
  1.4351 +    if (!clone)
  1.4352 +        return nullptr;
  1.4353 +
  1.4354 +    if (cache.canAttachStub()) {
  1.4355 +        if (!cache.attach(cx, ion, fun, clone))
  1.4356 +            return nullptr;
  1.4357 +    }
  1.4358 +
  1.4359 +    return clone;
  1.4360 +}

mercurial