js/src/jit/IonCaches.cpp

Sat, 03 Jan 2015 20:18:00 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Sat, 03 Jan 2015 20:18:00 +0100
branch
TOR_BUG_3246
changeset 7
129ffea94266
permissions
-rw-r--r--

Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sts=4 et sw=4 tw=99:
michael@0 3 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #include "jit/IonCaches.h"
michael@0 8
michael@0 9 #include "mozilla/TemplateLib.h"
michael@0 10
michael@0 11 #include "jsproxy.h"
michael@0 12 #include "jstypes.h"
michael@0 13
michael@0 14 #include "builtin/TypedObject.h"
michael@0 15 #include "jit/Ion.h"
michael@0 16 #include "jit/IonLinker.h"
michael@0 17 #include "jit/IonSpewer.h"
michael@0 18 #include "jit/Lowering.h"
michael@0 19 #ifdef JS_ION_PERF
michael@0 20 # include "jit/PerfSpewer.h"
michael@0 21 #endif
michael@0 22 #include "jit/ParallelFunctions.h"
michael@0 23 #include "jit/VMFunctions.h"
michael@0 24 #include "vm/Shape.h"
michael@0 25
michael@0 26 #include "jit/IonFrames-inl.h"
michael@0 27 #include "vm/Interpreter-inl.h"
michael@0 28 #include "vm/Shape-inl.h"
michael@0 29
michael@0 30 using namespace js;
michael@0 31 using namespace js::jit;
michael@0 32
michael@0 33 using mozilla::tl::FloorLog2;
michael@0 34
michael@0 35 void
michael@0 36 CodeLocationJump::repoint(JitCode *code, MacroAssembler *masm)
michael@0 37 {
michael@0 38 JS_ASSERT(state_ == Relative);
michael@0 39 size_t new_off = (size_t)raw_;
michael@0 40 #ifdef JS_SMALL_BRANCH
michael@0 41 size_t jumpTableEntryOffset = reinterpret_cast<size_t>(jumpTableEntry_);
michael@0 42 #endif
michael@0 43 if (masm != nullptr) {
michael@0 44 #ifdef JS_CODEGEN_X64
michael@0 45 JS_ASSERT((uint64_t)raw_ <= UINT32_MAX);
michael@0 46 #endif
michael@0 47 new_off = masm->actualOffset((uintptr_t)raw_);
michael@0 48 #ifdef JS_SMALL_BRANCH
michael@0 49 jumpTableEntryOffset = masm->actualIndex(jumpTableEntryOffset);
michael@0 50 #endif
michael@0 51 }
michael@0 52 raw_ = code->raw() + new_off;
michael@0 53 #ifdef JS_SMALL_BRANCH
michael@0 54 jumpTableEntry_ = Assembler::PatchableJumpAddress(code, (size_t) jumpTableEntryOffset);
michael@0 55 #endif
michael@0 56 setAbsolute();
michael@0 57 }
michael@0 58
michael@0 59 void
michael@0 60 CodeLocationLabel::repoint(JitCode *code, MacroAssembler *masm)
michael@0 61 {
michael@0 62 JS_ASSERT(state_ == Relative);
michael@0 63 size_t new_off = (size_t)raw_;
michael@0 64 if (masm != nullptr) {
michael@0 65 #ifdef JS_CODEGEN_X64
michael@0 66 JS_ASSERT((uint64_t)raw_ <= UINT32_MAX);
michael@0 67 #endif
michael@0 68 new_off = masm->actualOffset((uintptr_t)raw_);
michael@0 69 }
michael@0 70 JS_ASSERT(new_off < code->instructionsSize());
michael@0 71
michael@0 72 raw_ = code->raw() + new_off;
michael@0 73 setAbsolute();
michael@0 74 }
michael@0 75
michael@0 76 void
michael@0 77 CodeOffsetLabel::fixup(MacroAssembler *masm)
michael@0 78 {
michael@0 79 offset_ = masm->actualOffset(offset_);
michael@0 80 }
michael@0 81
michael@0 82 void
michael@0 83 CodeOffsetJump::fixup(MacroAssembler *masm)
michael@0 84 {
michael@0 85 offset_ = masm->actualOffset(offset_);
michael@0 86 #ifdef JS_SMALL_BRANCH
michael@0 87 jumpTableIndex_ = masm->actualIndex(jumpTableIndex_);
michael@0 88 #endif
michael@0 89 }
michael@0 90
michael@0 91 const char *
michael@0 92 IonCache::CacheName(IonCache::Kind kind)
michael@0 93 {
michael@0 94 static const char * const names[] =
michael@0 95 {
michael@0 96 #define NAME(x) #x,
michael@0 97 IONCACHE_KIND_LIST(NAME)
michael@0 98 #undef NAME
michael@0 99 };
michael@0 100 return names[kind];
michael@0 101 }
michael@0 102
michael@0 103 IonCache::LinkStatus
michael@0 104 IonCache::linkCode(JSContext *cx, MacroAssembler &masm, IonScript *ion, JitCode **code)
michael@0 105 {
michael@0 106 Linker linker(masm);
michael@0 107 *code = linker.newCode<CanGC>(cx, JSC::ION_CODE);
michael@0 108 if (!*code)
michael@0 109 return LINK_ERROR;
michael@0 110
michael@0 111 if (ion->invalidated())
michael@0 112 return CACHE_FLUSHED;
michael@0 113
michael@0 114 return LINK_GOOD;
michael@0 115 }
michael@0 116
michael@0 117 const size_t IonCache::MAX_STUBS = 16;
michael@0 118
michael@0 119 // Helper class which encapsulates logic to attach a stub to an IC by hooking
michael@0 120 // up rejoins and next stub jumps.
michael@0 121 //
michael@0 122 // The simplest stubs have a single jump to the next stub and look like the
michael@0 123 // following:
michael@0 124 //
michael@0 125 // branch guard NEXTSTUB
michael@0 126 // ... IC-specific code ...
michael@0 127 // jump REJOIN
michael@0 128 //
michael@0 129 // This corresponds to:
michael@0 130 //
michael@0 131 // attacher.branchNextStub(masm, ...);
michael@0 132 // ... emit IC-specific code ...
michael@0 133 // attacher.jumpRejoin(masm);
michael@0 134 //
michael@0 135 // Whether the stub needs multiple next stub jumps look like:
michael@0 136 //
michael@0 137 // branch guard FAILURES
michael@0 138 // ... IC-specific code ...
michael@0 139 // branch another-guard FAILURES
michael@0 140 // ... IC-specific code ...
michael@0 141 // jump REJOIN
michael@0 142 // FAILURES:
michael@0 143 // jump NEXTSTUB
michael@0 144 //
michael@0 145 // This corresponds to:
michael@0 146 //
michael@0 147 // Label failures;
michael@0 148 // masm.branchX(..., &failures);
michael@0 149 // ... emit IC-specific code ...
michael@0 150 // masm.branchY(..., failures);
michael@0 151 // ... emit more IC-specific code ...
michael@0 152 // attacher.jumpRejoin(masm);
michael@0 153 // masm.bind(&failures);
michael@0 154 // attacher.jumpNextStub(masm);
michael@0 155 //
michael@0 156 // A convenience function |branchNextStubOrLabel| is provided in the case that
michael@0 157 // the stub sometimes has multiple next stub jumps and sometimes a single
michael@0 158 // one. If a non-nullptr label is passed in, a |branchPtr| will be made to
michael@0 159 // that label instead of a |branchPtrWithPatch| to the next stub.
michael@0 160 class IonCache::StubAttacher
michael@0 161 {
michael@0 162 protected:
michael@0 163 bool hasNextStubOffset_ : 1;
michael@0 164 bool hasStubCodePatchOffset_ : 1;
michael@0 165
michael@0 166 CodeLocationLabel rejoinLabel_;
michael@0 167 CodeOffsetJump nextStubOffset_;
michael@0 168 CodeOffsetJump rejoinOffset_;
michael@0 169 CodeOffsetLabel stubCodePatchOffset_;
michael@0 170
michael@0 171 public:
michael@0 172 StubAttacher(CodeLocationLabel rejoinLabel)
michael@0 173 : hasNextStubOffset_(false),
michael@0 174 hasStubCodePatchOffset_(false),
michael@0 175 rejoinLabel_(rejoinLabel),
michael@0 176 nextStubOffset_(),
michael@0 177 rejoinOffset_(),
michael@0 178 stubCodePatchOffset_()
michael@0 179 { }
michael@0 180
michael@0 181 // Value used instead of the JitCode self-reference of generated
michael@0 182 // stubs. This value is needed for marking calls made inside stubs. This
michael@0 183 // value would be replaced by the attachStub function after the allocation
michael@0 184 // of the JitCode. The self-reference is used to keep the stub path alive
michael@0 185 // even if the IonScript is invalidated or if the IC is flushed.
michael@0 186 static const ImmPtr STUB_ADDR;
michael@0 187
michael@0 188 template <class T1, class T2>
michael@0 189 void branchNextStub(MacroAssembler &masm, Assembler::Condition cond, T1 op1, T2 op2) {
michael@0 190 JS_ASSERT(!hasNextStubOffset_);
michael@0 191 RepatchLabel nextStub;
michael@0 192 nextStubOffset_ = masm.branchPtrWithPatch(cond, op1, op2, &nextStub);
michael@0 193 hasNextStubOffset_ = true;
michael@0 194 masm.bind(&nextStub);
michael@0 195 }
michael@0 196
michael@0 197 template <class T1, class T2>
michael@0 198 void branchNextStubOrLabel(MacroAssembler &masm, Assembler::Condition cond, T1 op1, T2 op2,
michael@0 199 Label *label)
michael@0 200 {
michael@0 201 if (label != nullptr)
michael@0 202 masm.branchPtr(cond, op1, op2, label);
michael@0 203 else
michael@0 204 branchNextStub(masm, cond, op1, op2);
michael@0 205 }
michael@0 206
michael@0 207 void jumpRejoin(MacroAssembler &masm) {
michael@0 208 RepatchLabel rejoin;
michael@0 209 rejoinOffset_ = masm.jumpWithPatch(&rejoin);
michael@0 210 masm.bind(&rejoin);
michael@0 211 }
michael@0 212
michael@0 213 void jumpNextStub(MacroAssembler &masm) {
michael@0 214 JS_ASSERT(!hasNextStubOffset_);
michael@0 215 RepatchLabel nextStub;
michael@0 216 nextStubOffset_ = masm.jumpWithPatch(&nextStub);
michael@0 217 hasNextStubOffset_ = true;
michael@0 218 masm.bind(&nextStub);
michael@0 219 }
michael@0 220
michael@0 221 void pushStubCodePointer(MacroAssembler &masm) {
michael@0 222 // Push the JitCode pointer for the stub we're generating.
michael@0 223 // WARNING:
michael@0 224 // WARNING: If JitCode ever becomes relocatable, the following code is incorrect.
michael@0 225 // WARNING: Note that we're not marking the pointer being pushed as an ImmGCPtr.
michael@0 226 // WARNING: This location will be patched with the pointer of the generated stub,
michael@0 227 // WARNING: such as it can be marked when a call is made with this stub. Be aware
michael@0 228 // WARNING: that ICs are not marked and so this stub will only be kept alive iff
michael@0 229 // WARNING: it is on the stack at the time of the GC. No ImmGCPtr is needed as the
michael@0 230 // WARNING: stubs are flushed on GC.
michael@0 231 // WARNING:
michael@0 232 JS_ASSERT(!hasStubCodePatchOffset_);
michael@0 233 stubCodePatchOffset_ = masm.PushWithPatch(STUB_ADDR);
michael@0 234 hasStubCodePatchOffset_ = true;
michael@0 235 }
michael@0 236
michael@0 237 void patchRejoinJump(MacroAssembler &masm, JitCode *code) {
michael@0 238 rejoinOffset_.fixup(&masm);
michael@0 239 CodeLocationJump rejoinJump(code, rejoinOffset_);
michael@0 240 PatchJump(rejoinJump, rejoinLabel_);
michael@0 241 }
michael@0 242
michael@0 243 void patchStubCodePointer(MacroAssembler &masm, JitCode *code) {
michael@0 244 if (hasStubCodePatchOffset_) {
michael@0 245 stubCodePatchOffset_.fixup(&masm);
michael@0 246 Assembler::patchDataWithValueCheck(CodeLocationLabel(code, stubCodePatchOffset_),
michael@0 247 ImmPtr(code), STUB_ADDR);
michael@0 248 }
michael@0 249 }
michael@0 250
michael@0 251 virtual void patchNextStubJump(MacroAssembler &masm, JitCode *code) = 0;
michael@0 252 };
michael@0 253
michael@0 254 const ImmPtr IonCache::StubAttacher::STUB_ADDR = ImmPtr((void*)0xdeadc0de);
michael@0 255
michael@0 256 class RepatchIonCache::RepatchStubAppender : public IonCache::StubAttacher
michael@0 257 {
michael@0 258 RepatchIonCache &cache_;
michael@0 259
michael@0 260 public:
michael@0 261 RepatchStubAppender(RepatchIonCache &cache)
michael@0 262 : StubAttacher(cache.rejoinLabel()),
michael@0 263 cache_(cache)
michael@0 264 {
michael@0 265 }
michael@0 266
michael@0 267 void patchNextStubJump(MacroAssembler &masm, JitCode *code) {
michael@0 268 // Patch the previous nextStubJump of the last stub, or the jump from the
michael@0 269 // codeGen, to jump into the newly allocated code.
michael@0 270 PatchJump(cache_.lastJump_, CodeLocationLabel(code));
michael@0 271
michael@0 272 // If this path is not taken, we are producing an entry which can no
michael@0 273 // longer go back into the update function.
michael@0 274 if (hasNextStubOffset_) {
michael@0 275 nextStubOffset_.fixup(&masm);
michael@0 276 CodeLocationJump nextStubJump(code, nextStubOffset_);
michael@0 277 PatchJump(nextStubJump, cache_.fallbackLabel_);
michael@0 278
michael@0 279 // When the last stub fails, it fallback to the ool call which can
michael@0 280 // produce a stub. Next time we generate a stub, we will patch the
michael@0 281 // nextStub jump to try the new stub.
michael@0 282 cache_.lastJump_ = nextStubJump;
michael@0 283 }
michael@0 284 }
michael@0 285 };
michael@0 286
michael@0 287 void
michael@0 288 RepatchIonCache::reset()
michael@0 289 {
michael@0 290 IonCache::reset();
michael@0 291 PatchJump(initialJump_, fallbackLabel_);
michael@0 292 lastJump_ = initialJump_;
michael@0 293 }
michael@0 294
michael@0 295 void
michael@0 296 RepatchIonCache::emitInitialJump(MacroAssembler &masm, AddCacheState &addState)
michael@0 297 {
michael@0 298 initialJump_ = masm.jumpWithPatch(&addState.repatchEntry);
michael@0 299 lastJump_ = initialJump_;
michael@0 300 }
michael@0 301
michael@0 302 void
michael@0 303 RepatchIonCache::bindInitialJump(MacroAssembler &masm, AddCacheState &addState)
michael@0 304 {
michael@0 305 masm.bind(&addState.repatchEntry);
michael@0 306 }
michael@0 307
michael@0 308 void
michael@0 309 RepatchIonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm)
michael@0 310 {
michael@0 311 IonCache::updateBaseAddress(code, masm);
michael@0 312 initialJump_.repoint(code, &masm);
michael@0 313 lastJump_.repoint(code, &masm);
michael@0 314 }
michael@0 315
michael@0 316 class DispatchIonCache::DispatchStubPrepender : public IonCache::StubAttacher
michael@0 317 {
michael@0 318 DispatchIonCache &cache_;
michael@0 319
michael@0 320 public:
michael@0 321 DispatchStubPrepender(DispatchIonCache &cache)
michael@0 322 : StubAttacher(cache.rejoinLabel_),
michael@0 323 cache_(cache)
michael@0 324 {
michael@0 325 }
michael@0 326
michael@0 327 void patchNextStubJump(MacroAssembler &masm, JitCode *code) {
michael@0 328 JS_ASSERT(hasNextStubOffset_);
michael@0 329
michael@0 330 // Jump to the previous entry in the stub dispatch table. We
michael@0 331 // have not yet executed the code we're patching the jump in.
michael@0 332 nextStubOffset_.fixup(&masm);
michael@0 333 CodeLocationJump nextStubJump(code, nextStubOffset_);
michael@0 334 PatchJump(nextStubJump, CodeLocationLabel(cache_.firstStub_));
michael@0 335
michael@0 336 // Update the dispatch table. Modification to jumps after the dispatch
michael@0 337 // table is updated is disallowed, lest we race on entry into an
michael@0 338 // unfinalized stub.
michael@0 339 cache_.firstStub_ = code->raw();
michael@0 340 }
michael@0 341 };
michael@0 342
michael@0 343 void
michael@0 344 DispatchIonCache::reset()
michael@0 345 {
michael@0 346 IonCache::reset();
michael@0 347 firstStub_ = fallbackLabel_.raw();
michael@0 348 }
michael@0 349 void
michael@0 350 DispatchIonCache::emitInitialJump(MacroAssembler &masm, AddCacheState &addState)
michael@0 351 {
michael@0 352 Register scratch = addState.dispatchScratch;
michael@0 353 dispatchLabel_ = masm.movWithPatch(ImmPtr((void*)-1), scratch);
michael@0 354 masm.loadPtr(Address(scratch, 0), scratch);
michael@0 355 masm.jump(scratch);
michael@0 356 rejoinLabel_ = masm.labelForPatch();
michael@0 357 }
michael@0 358
michael@0 359 void
michael@0 360 DispatchIonCache::bindInitialJump(MacroAssembler &masm, AddCacheState &addState)
michael@0 361 {
michael@0 362 // Do nothing.
michael@0 363 }
michael@0 364
michael@0 365 void
michael@0 366 DispatchIonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm)
michael@0 367 {
michael@0 368 // The address of firstStub_ should be pointer aligned.
michael@0 369 JS_ASSERT(uintptr_t(&firstStub_) % sizeof(uintptr_t) == 0);
michael@0 370
michael@0 371 IonCache::updateBaseAddress(code, masm);
michael@0 372 dispatchLabel_.fixup(&masm);
michael@0 373 Assembler::patchDataWithValueCheck(CodeLocationLabel(code, dispatchLabel_),
michael@0 374 ImmPtr(&firstStub_),
michael@0 375 ImmPtr((void*)-1));
michael@0 376 firstStub_ = fallbackLabel_.raw();
michael@0 377 rejoinLabel_.repoint(code, &masm);
michael@0 378 }
michael@0 379
michael@0 380 void
michael@0 381 IonCache::attachStub(MacroAssembler &masm, StubAttacher &attacher, Handle<JitCode *> code)
michael@0 382 {
michael@0 383 JS_ASSERT(canAttachStub());
michael@0 384 incrementStubCount();
michael@0 385
michael@0 386 // Update the success path to continue after the IC initial jump.
michael@0 387 attacher.patchRejoinJump(masm, code);
michael@0 388
michael@0 389 // Replace the STUB_ADDR constant by the address of the generated stub, such
michael@0 390 // as it can be kept alive even if the cache is flushed (see
michael@0 391 // MarkJitExitFrame).
michael@0 392 attacher.patchStubCodePointer(masm, code);
michael@0 393
michael@0 394 // Update the failure path. Note it is this patch that makes the stub
michael@0 395 // accessible for parallel ICs so it should not be moved unless you really
michael@0 396 // know what is going on.
michael@0 397 attacher.patchNextStubJump(masm, code);
michael@0 398 }
michael@0 399
michael@0 400 bool
michael@0 401 IonCache::linkAndAttachStub(JSContext *cx, MacroAssembler &masm, StubAttacher &attacher,
michael@0 402 IonScript *ion, const char *attachKind)
michael@0 403 {
michael@0 404 Rooted<JitCode *> code(cx);
michael@0 405 {
michael@0 406 // Need to exit the AutoFlushICache context to flush the cache
michael@0 407 // before attaching the stub below.
michael@0 408 AutoFlushICache afc("IonCache");
michael@0 409 LinkStatus status = linkCode(cx, masm, ion, code.address());
michael@0 410 if (status != LINK_GOOD)
michael@0 411 return status != LINK_ERROR;
michael@0 412 }
michael@0 413
michael@0 414 if (pc_) {
michael@0 415 IonSpew(IonSpew_InlineCaches, "Cache %p(%s:%d/%d) generated %s %s stub at %p",
michael@0 416 this, script_->filename(), script_->lineno(), script_->pcToOffset(pc_),
michael@0 417 attachKind, CacheName(kind()), code->raw());
michael@0 418 } else {
michael@0 419 IonSpew(IonSpew_InlineCaches, "Cache %p generated %s %s stub at %p",
michael@0 420 this, attachKind, CacheName(kind()), code->raw());
michael@0 421 }
michael@0 422
michael@0 423 #ifdef JS_ION_PERF
michael@0 424 writePerfSpewerJitCodeProfile(code, "IonCache");
michael@0 425 #endif
michael@0 426
michael@0 427 attachStub(masm, attacher, code);
michael@0 428
michael@0 429 return true;
michael@0 430 }
michael@0 431
michael@0 432 void
michael@0 433 IonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm)
michael@0 434 {
michael@0 435 fallbackLabel_.repoint(code, &masm);
michael@0 436 }
michael@0 437
michael@0 438 void
michael@0 439 IonCache::initializeAddCacheState(LInstruction *ins, AddCacheState *addState)
michael@0 440 {
michael@0 441 }
michael@0 442
michael@0 443 static bool
michael@0 444 IsCacheableDOMProxy(JSObject *obj)
michael@0 445 {
michael@0 446 if (!obj->is<ProxyObject>())
michael@0 447 return false;
michael@0 448
michael@0 449 BaseProxyHandler *handler = obj->as<ProxyObject>().handler();
michael@0 450
michael@0 451 if (handler->family() != GetDOMProxyHandlerFamily())
michael@0 452 return false;
michael@0 453
michael@0 454 if (obj->numFixedSlots() <= GetDOMProxyExpandoSlot())
michael@0 455 return false;
michael@0 456
michael@0 457 return true;
michael@0 458 }
michael@0 459
michael@0 460 static void
michael@0 461 GeneratePrototypeGuards(JSContext *cx, IonScript *ion, MacroAssembler &masm, JSObject *obj,
michael@0 462 JSObject *holder, Register objectReg, Register scratchReg,
michael@0 463 Label *failures)
michael@0 464 {
michael@0 465 /* The guards here protect against the effects of TradeGuts(). If the prototype chain
michael@0 466 * is directly altered, then TI will toss the jitcode, so we don't have to worry about
michael@0 467 * it, and any other change to the holder, or adding a shadowing property will result
michael@0 468 * in reshaping the holder, and thus the failure of the shape guard.
michael@0 469 */
michael@0 470 JS_ASSERT(obj != holder);
michael@0 471
michael@0 472 if (obj->hasUncacheableProto()) {
michael@0 473 // Note: objectReg and scratchReg may be the same register, so we cannot
michael@0 474 // use objectReg in the rest of this function.
michael@0 475 masm.loadPtr(Address(objectReg, JSObject::offsetOfType()), scratchReg);
michael@0 476 Address proto(scratchReg, types::TypeObject::offsetOfProto());
michael@0 477 masm.branchNurseryPtr(Assembler::NotEqual, proto,
michael@0 478 ImmMaybeNurseryPtr(obj->getProto()), failures);
michael@0 479 }
michael@0 480
michael@0 481 JSObject *pobj = IsCacheableDOMProxy(obj)
michael@0 482 ? obj->getTaggedProto().toObjectOrNull()
michael@0 483 : obj->getProto();
michael@0 484 if (!pobj)
michael@0 485 return;
michael@0 486 while (pobj != holder) {
michael@0 487 if (pobj->hasUncacheableProto()) {
michael@0 488 JS_ASSERT(!pobj->hasSingletonType());
michael@0 489 masm.moveNurseryPtr(ImmMaybeNurseryPtr(pobj), scratchReg);
michael@0 490 Address objType(scratchReg, JSObject::offsetOfType());
michael@0 491 masm.branchPtr(Assembler::NotEqual, objType, ImmGCPtr(pobj->type()), failures);
michael@0 492 }
michael@0 493 pobj = pobj->getProto();
michael@0 494 }
michael@0 495 }
michael@0 496
michael@0 497 static bool
michael@0 498 IsCacheableProtoChain(JSObject *obj, JSObject *holder)
michael@0 499 {
michael@0 500 while (obj != holder) {
michael@0 501 /*
michael@0 502 * We cannot assume that we find the holder object on the prototype
michael@0 503 * chain and must check for null proto. The prototype chain can be
michael@0 504 * altered during the lookupProperty call.
michael@0 505 */
michael@0 506 JSObject *proto = obj->getProto();
michael@0 507 if (!proto || !proto->isNative())
michael@0 508 return false;
michael@0 509 obj = proto;
michael@0 510 }
michael@0 511 return true;
michael@0 512 }
michael@0 513
michael@0 514 static bool
michael@0 515 IsCacheableGetPropReadSlot(JSObject *obj, JSObject *holder, Shape *shape)
michael@0 516 {
michael@0 517 if (!shape || !IsCacheableProtoChain(obj, holder))
michael@0 518 return false;
michael@0 519
michael@0 520 if (!shape->hasSlot() || !shape->hasDefaultGetter())
michael@0 521 return false;
michael@0 522
michael@0 523 return true;
michael@0 524 }
michael@0 525
michael@0 526 static bool
michael@0 527 IsCacheableNoProperty(JSObject *obj, JSObject *holder, Shape *shape, jsbytecode *pc,
michael@0 528 const TypedOrValueRegister &output)
michael@0 529 {
michael@0 530 if (shape)
michael@0 531 return false;
michael@0 532
michael@0 533 JS_ASSERT(!holder);
michael@0 534
michael@0 535 // Just because we didn't find the property on the object doesn't mean it
michael@0 536 // won't magically appear through various engine hacks:
michael@0 537 if (obj->getClass()->getProperty && obj->getClass()->getProperty != JS_PropertyStub)
michael@0 538 return false;
michael@0 539
michael@0 540 // Don't generate missing property ICs if we skipped a non-native object, as
michael@0 541 // lookups may extend beyond the prototype chain (e.g. for DOMProxy
michael@0 542 // proxies).
michael@0 543 JSObject *obj2 = obj;
michael@0 544 while (obj2) {
michael@0 545 if (!obj2->isNative())
michael@0 546 return false;
michael@0 547 obj2 = obj2->getProto();
michael@0 548 }
michael@0 549
michael@0 550 // The pc is nullptr if the cache is idempotent. We cannot share missing
michael@0 551 // properties between caches because TI can only try to prove that a type is
michael@0 552 // contained, but does not attempts to check if something does not exists.
michael@0 553 // So the infered type of getprop would be missing and would not contain
michael@0 554 // undefined, as expected for missing properties.
michael@0 555 if (!pc)
michael@0 556 return false;
michael@0 557
michael@0 558 #if JS_HAS_NO_SUCH_METHOD
michael@0 559 // The __noSuchMethod__ hook may substitute in a valid method. Since,
michael@0 560 // if o.m is missing, o.m() will probably be an error, just mark all
michael@0 561 // missing callprops as uncacheable.
michael@0 562 if (JSOp(*pc) == JSOP_CALLPROP ||
michael@0 563 JSOp(*pc) == JSOP_CALLELEM)
michael@0 564 {
michael@0 565 return false;
michael@0 566 }
michael@0 567 #endif
michael@0 568
michael@0 569 // TI has not yet monitored an Undefined value. The fallback path will
michael@0 570 // monitor and invalidate the script.
michael@0 571 if (!output.hasValue())
michael@0 572 return false;
michael@0 573
michael@0 574 return true;
michael@0 575 }
michael@0 576
michael@0 577 static bool
michael@0 578 IsOptimizableArgumentsObjectForLength(JSObject *obj)
michael@0 579 {
michael@0 580 if (!obj->is<ArgumentsObject>())
michael@0 581 return false;
michael@0 582
michael@0 583 if (obj->as<ArgumentsObject>().hasOverriddenLength())
michael@0 584 return false;
michael@0 585
michael@0 586 return true;
michael@0 587 }
michael@0 588
michael@0 589 static bool
michael@0 590 IsOptimizableArgumentsObjectForGetElem(JSObject *obj, Value idval)
michael@0 591 {
michael@0 592 if (!IsOptimizableArgumentsObjectForLength(obj))
michael@0 593 return false;
michael@0 594
michael@0 595 ArgumentsObject &argsObj = obj->as<ArgumentsObject>();
michael@0 596
michael@0 597 if (argsObj.isAnyElementDeleted())
michael@0 598 return false;
michael@0 599
michael@0 600 if (!idval.isInt32())
michael@0 601 return false;
michael@0 602
michael@0 603 int32_t idint = idval.toInt32();
michael@0 604 if (idint < 0 || static_cast<uint32_t>(idint) >= argsObj.initialLength())
michael@0 605 return false;
michael@0 606
michael@0 607 return true;
michael@0 608 }
michael@0 609
michael@0 610 static bool
michael@0 611 IsCacheableGetPropCallNative(JSObject *obj, JSObject *holder, Shape *shape)
michael@0 612 {
michael@0 613 if (!shape || !IsCacheableProtoChain(obj, holder))
michael@0 614 return false;
michael@0 615
michael@0 616 if (!shape->hasGetterValue() || !shape->getterValue().isObject())
michael@0 617 return false;
michael@0 618
michael@0 619 if (!shape->getterValue().toObject().is<JSFunction>())
michael@0 620 return false;
michael@0 621
michael@0 622 JSFunction& getter = shape->getterValue().toObject().as<JSFunction>();
michael@0 623 if (!getter.isNative())
michael@0 624 return false;
michael@0 625
michael@0 626 // Check for a getter that has jitinfo and whose jitinfo says it's
michael@0 627 // OK with both inner and outer objects.
michael@0 628 if (getter.jitInfo() && !getter.jitInfo()->needsOuterizedThisObject())
michael@0 629 return true;
michael@0 630
michael@0 631 // For getters that need an outerized this object, don't cache if
michael@0 632 // obj has an outerObject hook, since our cache will pass obj
michael@0 633 // itself without outerizing.
michael@0 634 return !obj->getClass()->ext.outerObject;
michael@0 635 }
michael@0 636
michael@0 637 static bool
michael@0 638 IsCacheableGetPropCallPropertyOp(JSObject *obj, JSObject *holder, Shape *shape)
michael@0 639 {
michael@0 640 if (!shape || !IsCacheableProtoChain(obj, holder))
michael@0 641 return false;
michael@0 642
michael@0 643 if (shape->hasSlot() || shape->hasGetterValue() || shape->hasDefaultGetter())
michael@0 644 return false;
michael@0 645
michael@0 646 return true;
michael@0 647 }
michael@0 648
michael@0 649 static inline void
michael@0 650 EmitLoadSlot(MacroAssembler &masm, JSObject *holder, Shape *shape, Register holderReg,
michael@0 651 TypedOrValueRegister output, Register scratchReg)
michael@0 652 {
michael@0 653 JS_ASSERT(holder);
michael@0 654 if (holder->isFixedSlot(shape->slot())) {
michael@0 655 Address addr(holderReg, JSObject::getFixedSlotOffset(shape->slot()));
michael@0 656 masm.loadTypedOrValue(addr, output);
michael@0 657 } else {
michael@0 658 masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), scratchReg);
michael@0 659
michael@0 660 Address addr(scratchReg, holder->dynamicSlotIndex(shape->slot()) * sizeof(Value));
michael@0 661 masm.loadTypedOrValue(addr, output);
michael@0 662 }
michael@0 663 }
michael@0 664
michael@0 665 static void
michael@0 666 GenerateDOMProxyChecks(JSContext *cx, MacroAssembler &masm, JSObject *obj,
michael@0 667 PropertyName *name, Register object, Label *stubFailure,
michael@0 668 bool skipExpandoCheck = false)
michael@0 669 {
michael@0 670 JS_ASSERT(IsCacheableDOMProxy(obj));
michael@0 671
michael@0 672 // Guard the following:
michael@0 673 // 1. The object is a DOMProxy.
michael@0 674 // 2. The object does not have expando properties, or has an expando
michael@0 675 // which is known to not have the desired property.
michael@0 676 Address handlerAddr(object, ProxyObject::offsetOfHandler());
michael@0 677 Address expandoSlotAddr(object, JSObject::getFixedSlotOffset(GetDOMProxyExpandoSlot()));
michael@0 678
michael@0 679 // Check that object is a DOMProxy.
michael@0 680 masm.branchPrivatePtr(Assembler::NotEqual, handlerAddr,
michael@0 681 ImmPtr(obj->as<ProxyObject>().handler()), stubFailure);
michael@0 682
michael@0 683 if (skipExpandoCheck)
michael@0 684 return;
michael@0 685
michael@0 686 // For the remaining code, we need to reserve some registers to load a value.
michael@0 687 // This is ugly, but unvaoidable.
michael@0 688 RegisterSet domProxyRegSet(RegisterSet::All());
michael@0 689 domProxyRegSet.take(AnyRegister(object));
michael@0 690 ValueOperand tempVal = domProxyRegSet.takeValueOperand();
michael@0 691 masm.pushValue(tempVal);
michael@0 692
michael@0 693 Label failDOMProxyCheck;
michael@0 694 Label domProxyOk;
michael@0 695
michael@0 696 Value expandoVal = obj->getFixedSlot(GetDOMProxyExpandoSlot());
michael@0 697 masm.loadValue(expandoSlotAddr, tempVal);
michael@0 698
michael@0 699 if (!expandoVal.isObject() && !expandoVal.isUndefined()) {
michael@0 700 masm.branchTestValue(Assembler::NotEqual, tempVal, expandoVal, &failDOMProxyCheck);
michael@0 701
michael@0 702 ExpandoAndGeneration *expandoAndGeneration = (ExpandoAndGeneration*)expandoVal.toPrivate();
michael@0 703 masm.movePtr(ImmPtr(expandoAndGeneration), tempVal.scratchReg());
michael@0 704
michael@0 705 masm.branch32(Assembler::NotEqual,
michael@0 706 Address(tempVal.scratchReg(),
michael@0 707 ExpandoAndGeneration::offsetOfGeneration()),
michael@0 708 Imm32(expandoAndGeneration->generation),
michael@0 709 &failDOMProxyCheck);
michael@0 710
michael@0 711 expandoVal = expandoAndGeneration->expando;
michael@0 712 masm.loadValue(Address(tempVal.scratchReg(),
michael@0 713 ExpandoAndGeneration::offsetOfExpando()),
michael@0 714 tempVal);
michael@0 715 }
michael@0 716
michael@0 717 // If the incoming object does not have an expando object then we're sure we're not
michael@0 718 // shadowing.
michael@0 719 masm.branchTestUndefined(Assembler::Equal, tempVal, &domProxyOk);
michael@0 720
michael@0 721 if (expandoVal.isObject()) {
michael@0 722 JS_ASSERT(!expandoVal.toObject().nativeContains(cx, name));
michael@0 723
michael@0 724 // Reference object has an expando object that doesn't define the name. Check that
michael@0 725 // the incoming object has an expando object with the same shape.
michael@0 726 masm.branchTestObject(Assembler::NotEqual, tempVal, &failDOMProxyCheck);
michael@0 727 masm.extractObject(tempVal, tempVal.scratchReg());
michael@0 728 masm.branchPtr(Assembler::Equal,
michael@0 729 Address(tempVal.scratchReg(), JSObject::offsetOfShape()),
michael@0 730 ImmGCPtr(expandoVal.toObject().lastProperty()),
michael@0 731 &domProxyOk);
michael@0 732 }
michael@0 733
michael@0 734 // Failure case: restore the tempVal registers and jump to failures.
michael@0 735 masm.bind(&failDOMProxyCheck);
michael@0 736 masm.popValue(tempVal);
michael@0 737 masm.jump(stubFailure);
michael@0 738
michael@0 739 // Success case: restore the tempval and proceed.
michael@0 740 masm.bind(&domProxyOk);
michael@0 741 masm.popValue(tempVal);
michael@0 742 }
michael@0 743
michael@0 744 static void
michael@0 745 GenerateReadSlot(JSContext *cx, IonScript *ion, MacroAssembler &masm,
michael@0 746 IonCache::StubAttacher &attacher, JSObject *obj, JSObject *holder,
michael@0 747 Shape *shape, Register object, TypedOrValueRegister output,
michael@0 748 Label *failures = nullptr)
michael@0 749 {
michael@0 750 JS_ASSERT(obj->isNative());
michael@0 751 // If there's a single jump to |failures|, we can patch the shape guard
michael@0 752 // jump directly. Otherwise, jump to the end of the stub, so there's a
michael@0 753 // common point to patch.
michael@0 754 bool multipleFailureJumps = (obj != holder) || (failures != nullptr && failures->used());
michael@0 755
michael@0 756 // If we have multiple failure jumps but didn't get a label from the
michael@0 757 // outside, make one ourselves.
michael@0 758 Label failures_;
michael@0 759 if (multipleFailureJumps && !failures)
michael@0 760 failures = &failures_;
michael@0 761
michael@0 762 // Guard on the shape of the object.
michael@0 763 attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
michael@0 764 Address(object, JSObject::offsetOfShape()),
michael@0 765 ImmGCPtr(obj->lastProperty()),
michael@0 766 failures);
michael@0 767
michael@0 768 // If we need a scratch register, use either an output register or the
michael@0 769 // object register. After this point, we cannot jump directly to
michael@0 770 // |failures| since we may still have to pop the object register.
michael@0 771 bool restoreScratch = false;
michael@0 772 Register scratchReg = Register::FromCode(0); // Quell compiler warning.
michael@0 773
michael@0 774 if (obj != holder || !holder->isFixedSlot(shape->slot())) {
michael@0 775 if (output.hasValue()) {
michael@0 776 scratchReg = output.valueReg().scratchReg();
michael@0 777 } else if (output.type() == MIRType_Double) {
michael@0 778 scratchReg = object;
michael@0 779 masm.push(scratchReg);
michael@0 780 restoreScratch = true;
michael@0 781 } else {
michael@0 782 scratchReg = output.typedReg().gpr();
michael@0 783 }
michael@0 784 }
michael@0 785
michael@0 786 // Fast path: single failure jump, no prototype guards.
michael@0 787 if (!multipleFailureJumps) {
michael@0 788 EmitLoadSlot(masm, holder, shape, object, output, scratchReg);
michael@0 789 if (restoreScratch)
michael@0 790 masm.pop(scratchReg);
michael@0 791 attacher.jumpRejoin(masm);
michael@0 792 return;
michael@0 793 }
michael@0 794
michael@0 795 // Slow path: multiple jumps; generate prototype guards.
michael@0 796 Label prototypeFailures;
michael@0 797 Register holderReg;
michael@0 798 if (obj != holder) {
michael@0 799 // Note: this may clobber the object register if it's used as scratch.
michael@0 800 GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg,
michael@0 801 &prototypeFailures);
michael@0 802
michael@0 803 if (holder) {
michael@0 804 // Guard on the holder's shape.
michael@0 805 holderReg = scratchReg;
michael@0 806 masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg);
michael@0 807 masm.branchPtr(Assembler::NotEqual,
michael@0 808 Address(holderReg, JSObject::offsetOfShape()),
michael@0 809 ImmGCPtr(holder->lastProperty()),
michael@0 810 &prototypeFailures);
michael@0 811 } else {
michael@0 812 // The property does not exist. Guard on everything in the
michael@0 813 // prototype chain.
michael@0 814 JSObject *proto = obj->getTaggedProto().toObjectOrNull();
michael@0 815 Register lastReg = object;
michael@0 816 JS_ASSERT(scratchReg != object);
michael@0 817 while (proto) {
michael@0 818 masm.loadObjProto(lastReg, scratchReg);
michael@0 819
michael@0 820 // Guard the shape of the current prototype.
michael@0 821 masm.branchPtr(Assembler::NotEqual,
michael@0 822 Address(scratchReg, JSObject::offsetOfShape()),
michael@0 823 ImmGCPtr(proto->lastProperty()),
michael@0 824 &prototypeFailures);
michael@0 825
michael@0 826 proto = proto->getProto();
michael@0 827 lastReg = scratchReg;
michael@0 828 }
michael@0 829
michael@0 830 holderReg = InvalidReg;
michael@0 831 }
michael@0 832 } else {
michael@0 833 holderReg = object;
michael@0 834 }
michael@0 835
michael@0 836 // Slot access.
michael@0 837 if (holder)
michael@0 838 EmitLoadSlot(masm, holder, shape, holderReg, output, scratchReg);
michael@0 839 else
michael@0 840 masm.moveValue(UndefinedValue(), output.valueReg());
michael@0 841
michael@0 842 // Restore scratch on success.
michael@0 843 if (restoreScratch)
michael@0 844 masm.pop(scratchReg);
michael@0 845
michael@0 846 attacher.jumpRejoin(masm);
michael@0 847
michael@0 848 masm.bind(&prototypeFailures);
michael@0 849 if (restoreScratch)
michael@0 850 masm.pop(scratchReg);
michael@0 851 masm.bind(failures);
michael@0 852
michael@0 853 attacher.jumpNextStub(masm);
michael@0 854
michael@0 855 }
michael@0 856
michael@0 857 static bool
michael@0 858 EmitGetterCall(JSContext *cx, MacroAssembler &masm,
michael@0 859 IonCache::StubAttacher &attacher, JSObject *obj,
michael@0 860 JSObject *holder, HandleShape shape,
michael@0 861 RegisterSet liveRegs, Register object,
michael@0 862 Register scratchReg, TypedOrValueRegister output,
michael@0 863 void *returnAddr)
michael@0 864 {
michael@0 865 JS_ASSERT(output.hasValue());
michael@0 866 MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs);
michael@0 867
michael@0 868 // Remaining registers should basically be free, but we need to use |object| still
michael@0 869 // so leave it alone.
michael@0 870 RegisterSet regSet(RegisterSet::All());
michael@0 871 regSet.take(AnyRegister(object));
michael@0 872
michael@0 873 // This is a slower stub path, and we're going to be doing a call anyway. Don't need
michael@0 874 // to try so hard to not use the stack. Scratch regs are just taken from the register
michael@0 875 // set not including the input, current value saved on the stack, and restored when
michael@0 876 // we're done with it.
michael@0 877 scratchReg = regSet.takeGeneral();
michael@0 878 Register argJSContextReg = regSet.takeGeneral();
michael@0 879 Register argUintNReg = regSet.takeGeneral();
michael@0 880 Register argVpReg = regSet.takeGeneral();
michael@0 881
michael@0 882 // Shape has a getter function.
michael@0 883 bool callNative = IsCacheableGetPropCallNative(obj, holder, shape);
michael@0 884 JS_ASSERT_IF(!callNative, IsCacheableGetPropCallPropertyOp(obj, holder, shape));
michael@0 885
michael@0 886 if (callNative) {
michael@0 887 JS_ASSERT(shape->hasGetterValue() && shape->getterValue().isObject() &&
michael@0 888 shape->getterValue().toObject().is<JSFunction>());
michael@0 889 JSFunction *target = &shape->getterValue().toObject().as<JSFunction>();
michael@0 890
michael@0 891 JS_ASSERT(target);
michael@0 892 JS_ASSERT(target->isNative());
michael@0 893
michael@0 894 // Native functions have the signature:
michael@0 895 // bool (*)(JSContext *, unsigned, Value *vp)
michael@0 896 // Where vp[0] is space for an outparam, vp[1] is |this|, and vp[2] onward
michael@0 897 // are the function arguments.
michael@0 898
michael@0 899 // Construct vp array:
michael@0 900 // Push object value for |this|
michael@0 901 masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
michael@0 902 // Push callee/outparam.
michael@0 903 masm.Push(ObjectValue(*target));
michael@0 904
michael@0 905 // Preload arguments into registers.
michael@0 906 masm.loadJSContext(argJSContextReg);
michael@0 907 masm.move32(Imm32(0), argUintNReg);
michael@0 908 masm.movePtr(StackPointer, argVpReg);
michael@0 909
michael@0 910 // Push marking data for later use.
michael@0 911 masm.Push(argUintNReg);
michael@0 912 attacher.pushStubCodePointer(masm);
michael@0 913
michael@0 914 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
michael@0 915 return false;
michael@0 916 masm.enterFakeExitFrame(ION_FRAME_OOL_NATIVE);
michael@0 917
michael@0 918 // Construct and execute call.
michael@0 919 masm.setupUnalignedABICall(3, scratchReg);
michael@0 920 masm.passABIArg(argJSContextReg);
michael@0 921 masm.passABIArg(argUintNReg);
michael@0 922 masm.passABIArg(argVpReg);
michael@0 923 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target->native()));
michael@0 924
michael@0 925 // Test for failure.
michael@0 926 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
michael@0 927
michael@0 928 // Load the outparam vp[0] into output register(s).
michael@0 929 Address outparam(StackPointer, IonOOLNativeExitFrameLayout::offsetOfResult());
michael@0 930 masm.loadTypedOrValue(outparam, output);
michael@0 931
michael@0 932 // masm.leaveExitFrame & pop locals
michael@0 933 masm.adjustStack(IonOOLNativeExitFrameLayout::Size(0));
michael@0 934 } else {
michael@0 935 Register argObjReg = argUintNReg;
michael@0 936 Register argIdReg = regSet.takeGeneral();
michael@0 937
michael@0 938 PropertyOp target = shape->getterOp();
michael@0 939 JS_ASSERT(target);
michael@0 940
michael@0 941 // Push stubCode for marking.
michael@0 942 attacher.pushStubCodePointer(masm);
michael@0 943
michael@0 944 // JSPropertyOp: bool fn(JSContext *cx, HandleObject obj, HandleId id, MutableHandleValue vp)
michael@0 945
michael@0 946 // Push args on stack first so we can take pointers to make handles.
michael@0 947 masm.Push(UndefinedValue());
michael@0 948 masm.movePtr(StackPointer, argVpReg);
michael@0 949
michael@0 950 // push canonical jsid from shape instead of propertyname.
michael@0 951 masm.Push(shape->propid(), scratchReg);
michael@0 952 masm.movePtr(StackPointer, argIdReg);
michael@0 953
michael@0 954 masm.Push(object);
michael@0 955 masm.movePtr(StackPointer, argObjReg);
michael@0 956
michael@0 957 masm.loadJSContext(argJSContextReg);
michael@0 958
michael@0 959 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
michael@0 960 return false;
michael@0 961 masm.enterFakeExitFrame(ION_FRAME_OOL_PROPERTY_OP);
michael@0 962
michael@0 963 // Make the call.
michael@0 964 masm.setupUnalignedABICall(4, scratchReg);
michael@0 965 masm.passABIArg(argJSContextReg);
michael@0 966 masm.passABIArg(argObjReg);
michael@0 967 masm.passABIArg(argIdReg);
michael@0 968 masm.passABIArg(argVpReg);
michael@0 969 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target));
michael@0 970
michael@0 971 // Test for failure.
michael@0 972 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
michael@0 973
michael@0 974 // Load the outparam vp[0] into output register(s).
michael@0 975 Address outparam(StackPointer, IonOOLPropertyOpExitFrameLayout::offsetOfResult());
michael@0 976 masm.loadTypedOrValue(outparam, output);
michael@0 977
michael@0 978 // masm.leaveExitFrame & pop locals.
michael@0 979 masm.adjustStack(IonOOLPropertyOpExitFrameLayout::Size());
michael@0 980 }
michael@0 981
michael@0 982 masm.icRestoreLive(liveRegs, aic);
michael@0 983 return true;
michael@0 984 }
michael@0 985
michael@0 986 static bool
michael@0 987 GenerateCallGetter(JSContext *cx, IonScript *ion, MacroAssembler &masm,
michael@0 988 IonCache::StubAttacher &attacher, JSObject *obj, PropertyName *name,
michael@0 989 JSObject *holder, HandleShape shape, RegisterSet &liveRegs, Register object,
michael@0 990 TypedOrValueRegister output, void *returnAddr, Label *failures = nullptr)
michael@0 991 {
michael@0 992 JS_ASSERT(obj->isNative());
michael@0 993 JS_ASSERT(output.hasValue());
michael@0 994
michael@0 995 // Use the passed in label if there was one. Otherwise, we'll have to make our own.
michael@0 996 Label stubFailure;
michael@0 997 failures = failures ? failures : &stubFailure;
michael@0 998
michael@0 999 // Initial shape check.
michael@0 1000 masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfShape()),
michael@0 1001 ImmGCPtr(obj->lastProperty()), failures);
michael@0 1002
michael@0 1003 Register scratchReg = output.valueReg().scratchReg();
michael@0 1004
michael@0 1005 // Note: this may clobber the object register if it's used as scratch.
michael@0 1006 if (obj != holder)
michael@0 1007 GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, failures);
michael@0 1008
michael@0 1009 // Guard on the holder's shape.
michael@0 1010 Register holderReg = scratchReg;
michael@0 1011 masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg);
michael@0 1012 masm.branchPtr(Assembler::NotEqual,
michael@0 1013 Address(holderReg, JSObject::offsetOfShape()),
michael@0 1014 ImmGCPtr(holder->lastProperty()),
michael@0 1015 failures);
michael@0 1016
michael@0 1017 // Now we're good to go to invoke the native call.
michael@0 1018 if (!EmitGetterCall(cx, masm, attacher, obj, holder, shape, liveRegs, object,
michael@0 1019 scratchReg, output, returnAddr))
michael@0 1020 return false;
michael@0 1021
michael@0 1022 // Rejoin jump.
michael@0 1023 attacher.jumpRejoin(masm);
michael@0 1024
michael@0 1025 // Jump to next stub.
michael@0 1026 masm.bind(failures);
michael@0 1027 attacher.jumpNextStub(masm);
michael@0 1028
michael@0 1029 return true;
michael@0 1030 }
michael@0 1031
michael@0 1032 static bool
michael@0 1033 GenerateArrayLength(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 1034 JSObject *obj, Register object, TypedOrValueRegister output)
michael@0 1035 {
michael@0 1036 JS_ASSERT(obj->is<ArrayObject>());
michael@0 1037
michael@0 1038 Label failures;
michael@0 1039
michael@0 1040 // Guard object is a dense array.
michael@0 1041 RootedShape shape(cx, obj->lastProperty());
michael@0 1042 if (!shape)
michael@0 1043 return false;
michael@0 1044 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
michael@0 1045
michael@0 1046 // Load length.
michael@0 1047 Register outReg;
michael@0 1048 if (output.hasValue()) {
michael@0 1049 outReg = output.valueReg().scratchReg();
michael@0 1050 } else {
michael@0 1051 JS_ASSERT(output.type() == MIRType_Int32);
michael@0 1052 outReg = output.typedReg().gpr();
michael@0 1053 }
michael@0 1054
michael@0 1055 masm.loadPtr(Address(object, JSObject::offsetOfElements()), outReg);
michael@0 1056 masm.load32(Address(outReg, ObjectElements::offsetOfLength()), outReg);
michael@0 1057
michael@0 1058 // The length is an unsigned int, but the value encodes a signed int.
michael@0 1059 JS_ASSERT(object != outReg);
michael@0 1060 masm.branchTest32(Assembler::Signed, outReg, outReg, &failures);
michael@0 1061
michael@0 1062 if (output.hasValue())
michael@0 1063 masm.tagValue(JSVAL_TYPE_INT32, outReg, output.valueReg());
michael@0 1064
michael@0 1065 /* Success. */
michael@0 1066 attacher.jumpRejoin(masm);
michael@0 1067
michael@0 1068 /* Failure. */
michael@0 1069 masm.bind(&failures);
michael@0 1070 attacher.jumpNextStub(masm);
michael@0 1071
michael@0 1072 return true;
michael@0 1073 }
michael@0 1074
michael@0 1075 static void
michael@0 1076 GenerateTypedArrayLength(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 1077 JSObject *obj, Register object, TypedOrValueRegister output)
michael@0 1078 {
michael@0 1079 JS_ASSERT(obj->is<TypedArrayObject>());
michael@0 1080
michael@0 1081 Label failures;
michael@0 1082
michael@0 1083 Register tmpReg;
michael@0 1084 if (output.hasValue()) {
michael@0 1085 tmpReg = output.valueReg().scratchReg();
michael@0 1086 } else {
michael@0 1087 JS_ASSERT(output.type() == MIRType_Int32);
michael@0 1088 tmpReg = output.typedReg().gpr();
michael@0 1089 }
michael@0 1090 JS_ASSERT(object != tmpReg);
michael@0 1091
michael@0 1092 // Implement the negated version of JSObject::isTypedArray predicate.
michael@0 1093 masm.loadObjClass(object, tmpReg);
michael@0 1094 masm.branchPtr(Assembler::Below, tmpReg, ImmPtr(&TypedArrayObject::classes[0]),
michael@0 1095 &failures);
michael@0 1096 masm.branchPtr(Assembler::AboveOrEqual, tmpReg,
michael@0 1097 ImmPtr(&TypedArrayObject::classes[ScalarTypeDescr::TYPE_MAX]),
michael@0 1098 &failures);
michael@0 1099
michael@0 1100 // Load length.
michael@0 1101 masm.loadTypedOrValue(Address(object, TypedArrayObject::lengthOffset()), output);
michael@0 1102
michael@0 1103 /* Success. */
michael@0 1104 attacher.jumpRejoin(masm);
michael@0 1105
michael@0 1106 /* Failure. */
michael@0 1107 masm.bind(&failures);
michael@0 1108 attacher.jumpNextStub(masm);
michael@0 1109 }
michael@0 1110
michael@0 1111 static bool
michael@0 1112 IsCacheableArrayLength(JSContext *cx, HandleObject obj, HandlePropertyName name,
michael@0 1113 TypedOrValueRegister output)
michael@0 1114 {
michael@0 1115 if (!obj->is<ArrayObject>())
michael@0 1116 return false;
michael@0 1117
michael@0 1118 if (output.type() != MIRType_Value && output.type() != MIRType_Int32) {
michael@0 1119 // The stub assumes that we always output Int32, so make sure our output
michael@0 1120 // is equipped to handle that.
michael@0 1121 return false;
michael@0 1122 }
michael@0 1123
michael@0 1124 return true;
michael@0 1125 }
michael@0 1126
michael@0 1127 template <class GetPropCache>
michael@0 1128 static GetPropertyIC::NativeGetPropCacheability
michael@0 1129 CanAttachNativeGetProp(typename GetPropCache::Context cx, const GetPropCache &cache,
michael@0 1130 HandleObject obj, HandlePropertyName name,
michael@0 1131 MutableHandleObject holder, MutableHandleShape shape,
michael@0 1132 bool skipArrayLen = false)
michael@0 1133 {
michael@0 1134 if (!obj || !obj->isNative())
michael@0 1135 return GetPropertyIC::CanAttachNone;
michael@0 1136
michael@0 1137 // The lookup needs to be universally pure, otherwise we risk calling hooks out
michael@0 1138 // of turn. We don't mind doing this even when purity isn't required, because we
michael@0 1139 // only miss out on shape hashification, which is only a temporary perf cost.
michael@0 1140 // The limits were arbitrarily set, anyways.
michael@0 1141 if (!LookupPropertyPure(obj, NameToId(name), holder.address(), shape.address()))
michael@0 1142 return GetPropertyIC::CanAttachNone;
michael@0 1143
michael@0 1144 RootedScript script(cx);
michael@0 1145 jsbytecode *pc;
michael@0 1146 cache.getScriptedLocation(&script, &pc);
michael@0 1147 if (IsCacheableGetPropReadSlot(obj, holder, shape) ||
michael@0 1148 IsCacheableNoProperty(obj, holder, shape, pc, cache.output()))
michael@0 1149 {
michael@0 1150 return GetPropertyIC::CanAttachReadSlot;
michael@0 1151 }
michael@0 1152
michael@0 1153 // |length| is a non-configurable getter property on ArrayObjects. Any time this
michael@0 1154 // check would have passed, we can install a getter stub instead. Allow people to
michael@0 1155 // make that decision themselves with skipArrayLen
michael@0 1156 if (!skipArrayLen && cx->names().length == name && cache.allowArrayLength(cx, obj) &&
michael@0 1157 IsCacheableArrayLength(cx, obj, name, cache.output()))
michael@0 1158 {
michael@0 1159 // The array length property is non-configurable, which means both that
michael@0 1160 // checking the class of the object and the name of the property is enough
michael@0 1161 // and that we don't need to worry about monitoring, since we know the
michael@0 1162 // return type statically.
michael@0 1163 return GetPropertyIC::CanAttachArrayLength;
michael@0 1164 }
michael@0 1165
michael@0 1166 // IonBuilder guarantees that it's impossible to generate a GetPropertyIC with
michael@0 1167 // allowGetters() true and cache.output().hasValue() false. If this isn't true,
michael@0 1168 // we will quickly assert during stub generation.
michael@0 1169 if (cache.allowGetters() &&
michael@0 1170 (IsCacheableGetPropCallNative(obj, holder, shape) ||
michael@0 1171 IsCacheableGetPropCallPropertyOp(obj, holder, shape)))
michael@0 1172 {
michael@0 1173 // Don't enable getter call if cache is parallel or idempotent, since
michael@0 1174 // they can be effectful. This is handled by allowGetters()
michael@0 1175 return GetPropertyIC::CanAttachCallGetter;
michael@0 1176 }
michael@0 1177
michael@0 1178 return GetPropertyIC::CanAttachNone;
michael@0 1179 }
michael@0 1180
michael@0 1181 bool
michael@0 1182 GetPropertyIC::allowArrayLength(Context cx, HandleObject obj) const
michael@0 1183 {
michael@0 1184 if (!idempotent())
michael@0 1185 return true;
michael@0 1186
michael@0 1187 uint32_t locationIndex, numLocations;
michael@0 1188 getLocationInfo(&locationIndex, &numLocations);
michael@0 1189
michael@0 1190 IonScript *ion = GetTopIonJSScript(cx)->ionScript();
michael@0 1191 CacheLocation *locs = ion->getCacheLocs(locationIndex);
michael@0 1192 for (size_t i = 0; i < numLocations; i++) {
michael@0 1193 CacheLocation &curLoc = locs[i];
michael@0 1194 types::StackTypeSet *bcTypes =
michael@0 1195 types::TypeScript::BytecodeTypes(curLoc.script, curLoc.pc);
michael@0 1196
michael@0 1197 if (!bcTypes->hasType(types::Type::Int32Type()))
michael@0 1198 return false;
michael@0 1199 }
michael@0 1200
michael@0 1201 return true;
michael@0 1202 }
michael@0 1203
michael@0 1204 bool
michael@0 1205 GetPropertyIC::tryAttachNative(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 1206 HandlePropertyName name, void *returnAddr, bool *emitted)
michael@0 1207 {
michael@0 1208 JS_ASSERT(canAttachStub());
michael@0 1209 JS_ASSERT(!*emitted);
michael@0 1210
michael@0 1211 RootedShape shape(cx);
michael@0 1212 RootedObject holder(cx);
michael@0 1213
michael@0 1214 NativeGetPropCacheability type =
michael@0 1215 CanAttachNativeGetProp(cx, *this, obj, name, &holder, &shape);
michael@0 1216 if (type == CanAttachNone)
michael@0 1217 return true;
michael@0 1218
michael@0 1219 *emitted = true;
michael@0 1220
michael@0 1221 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 1222
michael@0 1223 RepatchStubAppender attacher(*this);
michael@0 1224 const char *attachKind;
michael@0 1225
michael@0 1226 switch (type) {
michael@0 1227 case CanAttachReadSlot:
michael@0 1228 GenerateReadSlot(cx, ion, masm, attacher, obj, holder,
michael@0 1229 shape, object(), output());
michael@0 1230 attachKind = idempotent() ? "idempotent reading"
michael@0 1231 : "non idempotent reading";
michael@0 1232 break;
michael@0 1233 case CanAttachCallGetter:
michael@0 1234 if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name, holder, shape,
michael@0 1235 liveRegs_, object(), output(), returnAddr))
michael@0 1236 {
michael@0 1237 return false;
michael@0 1238 }
michael@0 1239 attachKind = "getter call";
michael@0 1240 break;
michael@0 1241 case CanAttachArrayLength:
michael@0 1242 if (!GenerateArrayLength(cx, masm, attacher, obj, object(), output()))
michael@0 1243 return false;
michael@0 1244
michael@0 1245 attachKind = "array length";
michael@0 1246 break;
michael@0 1247 default:
michael@0 1248 MOZ_ASSUME_UNREACHABLE("Bad NativeGetPropCacheability");
michael@0 1249 }
michael@0 1250 return linkAndAttachStub(cx, masm, attacher, ion, attachKind);
michael@0 1251 }
michael@0 1252
michael@0 1253 bool
michael@0 1254 GetPropertyIC::tryAttachTypedArrayLength(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 1255 HandlePropertyName name, bool *emitted)
michael@0 1256 {
michael@0 1257 JS_ASSERT(canAttachStub());
michael@0 1258 JS_ASSERT(!*emitted);
michael@0 1259
michael@0 1260 if (!obj->is<TypedArrayObject>())
michael@0 1261 return true;
michael@0 1262
michael@0 1263 if (cx->names().length != name)
michael@0 1264 return true;
michael@0 1265
michael@0 1266 if (hasTypedArrayLengthStub())
michael@0 1267 return true;
michael@0 1268
michael@0 1269 if (output().type() != MIRType_Value && output().type() != MIRType_Int32) {
michael@0 1270 // The next execution should cause an invalidation because the type
michael@0 1271 // does not fit.
michael@0 1272 return true;
michael@0 1273 }
michael@0 1274
michael@0 1275 if (idempotent())
michael@0 1276 return true;
michael@0 1277
michael@0 1278 *emitted = true;
michael@0 1279
michael@0 1280 MacroAssembler masm(cx, ion);
michael@0 1281 RepatchStubAppender attacher(*this);
michael@0 1282 GenerateTypedArrayLength(cx, masm, attacher, obj, object(), output());
michael@0 1283
michael@0 1284 JS_ASSERT(!hasTypedArrayLengthStub_);
michael@0 1285 hasTypedArrayLengthStub_ = true;
michael@0 1286 return linkAndAttachStub(cx, masm, attacher, ion, "typed array length");
michael@0 1287 }
michael@0 1288
michael@0 1289
michael@0 1290 static bool
michael@0 1291 EmitCallProxyGet(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 1292 PropertyName *name, RegisterSet liveRegs, Register object,
michael@0 1293 TypedOrValueRegister output, jsbytecode *pc, void *returnAddr)
michael@0 1294 {
michael@0 1295 JS_ASSERT(output.hasValue());
michael@0 1296 MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs);
michael@0 1297
michael@0 1298 // Remaining registers should be free, but we need to use |object| still
michael@0 1299 // so leave it alone.
michael@0 1300 RegisterSet regSet(RegisterSet::All());
michael@0 1301 regSet.take(AnyRegister(object));
michael@0 1302
michael@0 1303 // Proxy::get(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id,
michael@0 1304 // MutableHandleValue vp)
michael@0 1305 Register argJSContextReg = regSet.takeGeneral();
michael@0 1306 Register argProxyReg = regSet.takeGeneral();
michael@0 1307 Register argIdReg = regSet.takeGeneral();
michael@0 1308 Register argVpReg = regSet.takeGeneral();
michael@0 1309
michael@0 1310 Register scratch = regSet.takeGeneral();
michael@0 1311
michael@0 1312 void *getFunction = JSOp(*pc) == JSOP_CALLPROP ?
michael@0 1313 JS_FUNC_TO_DATA_PTR(void *, Proxy::callProp) :
michael@0 1314 JS_FUNC_TO_DATA_PTR(void *, Proxy::get);
michael@0 1315
michael@0 1316 // Push stubCode for marking.
michael@0 1317 attacher.pushStubCodePointer(masm);
michael@0 1318
michael@0 1319 // Push args on stack first so we can take pointers to make handles.
michael@0 1320 masm.Push(UndefinedValue());
michael@0 1321 masm.movePtr(StackPointer, argVpReg);
michael@0 1322
michael@0 1323 RootedId propId(cx, AtomToId(name));
michael@0 1324 masm.Push(propId, scratch);
michael@0 1325 masm.movePtr(StackPointer, argIdReg);
michael@0 1326
michael@0 1327 // Pushing object and receiver. Both are the same, so Handle to one is equivalent to
michael@0 1328 // handle to other.
michael@0 1329 masm.Push(object);
michael@0 1330 masm.Push(object);
michael@0 1331 masm.movePtr(StackPointer, argProxyReg);
michael@0 1332
michael@0 1333 masm.loadJSContext(argJSContextReg);
michael@0 1334
michael@0 1335 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
michael@0 1336 return false;
michael@0 1337 masm.enterFakeExitFrame(ION_FRAME_OOL_PROXY);
michael@0 1338
michael@0 1339 // Make the call.
michael@0 1340 masm.setupUnalignedABICall(5, scratch);
michael@0 1341 masm.passABIArg(argJSContextReg);
michael@0 1342 masm.passABIArg(argProxyReg);
michael@0 1343 masm.passABIArg(argProxyReg);
michael@0 1344 masm.passABIArg(argIdReg);
michael@0 1345 masm.passABIArg(argVpReg);
michael@0 1346 masm.callWithABI(getFunction);
michael@0 1347
michael@0 1348 // Test for failure.
michael@0 1349 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
michael@0 1350
michael@0 1351 // Load the outparam vp[0] into output register(s).
michael@0 1352 Address outparam(StackPointer, IonOOLProxyExitFrameLayout::offsetOfResult());
michael@0 1353 masm.loadTypedOrValue(outparam, output);
michael@0 1354
michael@0 1355 // masm.leaveExitFrame & pop locals
michael@0 1356 masm.adjustStack(IonOOLProxyExitFrameLayout::Size());
michael@0 1357
michael@0 1358 masm.icRestoreLive(liveRegs, aic);
michael@0 1359 return true;
michael@0 1360 }
michael@0 1361
michael@0 1362 bool
michael@0 1363 GetPropertyIC::tryAttachDOMProxyShadowed(JSContext *cx, IonScript *ion,
michael@0 1364 HandleObject obj, void *returnAddr,
michael@0 1365 bool *emitted)
michael@0 1366 {
michael@0 1367 JS_ASSERT(canAttachStub());
michael@0 1368 JS_ASSERT(!*emitted);
michael@0 1369 JS_ASSERT(IsCacheableDOMProxy(obj));
michael@0 1370 JS_ASSERT(monitoredResult());
michael@0 1371 JS_ASSERT(output().hasValue());
michael@0 1372
michael@0 1373 if (idempotent())
michael@0 1374 return true;
michael@0 1375
michael@0 1376 *emitted = true;
michael@0 1377
michael@0 1378 Label failures;
michael@0 1379 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 1380 RepatchStubAppender attacher(*this);
michael@0 1381
michael@0 1382 // Guard on the shape of the object.
michael@0 1383 attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
michael@0 1384 Address(object(), JSObject::offsetOfShape()),
michael@0 1385 ImmGCPtr(obj->lastProperty()),
michael@0 1386 &failures);
michael@0 1387
michael@0 1388 // Make sure object is a DOMProxy
michael@0 1389 GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures,
michael@0 1390 /*skipExpandoCheck=*/true);
michael@0 1391
michael@0 1392 if (!EmitCallProxyGet(cx, masm, attacher, name(), liveRegs_, object(), output(),
michael@0 1393 pc(), returnAddr))
michael@0 1394 {
michael@0 1395 return false;
michael@0 1396 }
michael@0 1397
michael@0 1398 // Success.
michael@0 1399 attacher.jumpRejoin(masm);
michael@0 1400
michael@0 1401 // Failure.
michael@0 1402 masm.bind(&failures);
michael@0 1403 attacher.jumpNextStub(masm);
michael@0 1404
michael@0 1405 return linkAndAttachStub(cx, masm, attacher, ion, "list base shadowed get");
michael@0 1406 }
michael@0 1407
michael@0 1408 bool
michael@0 1409 GetPropertyIC::tryAttachDOMProxyUnshadowed(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 1410 HandlePropertyName name, bool resetNeeded,
michael@0 1411 void *returnAddr, bool *emitted)
michael@0 1412 {
michael@0 1413 JS_ASSERT(canAttachStub());
michael@0 1414 JS_ASSERT(!*emitted);
michael@0 1415 JS_ASSERT(IsCacheableDOMProxy(obj));
michael@0 1416 JS_ASSERT(monitoredResult());
michael@0 1417 JS_ASSERT(output().hasValue());
michael@0 1418
michael@0 1419 RootedObject checkObj(cx, obj->getTaggedProto().toObjectOrNull());
michael@0 1420 RootedObject holder(cx);
michael@0 1421 RootedShape shape(cx);
michael@0 1422
michael@0 1423 NativeGetPropCacheability canCache =
michael@0 1424 CanAttachNativeGetProp(cx, *this, checkObj, name, &holder, &shape,
michael@0 1425 /* skipArrayLen = */true);
michael@0 1426 JS_ASSERT(canCache != CanAttachArrayLength);
michael@0 1427
michael@0 1428 if (canCache == CanAttachNone)
michael@0 1429 return true;
michael@0 1430
michael@0 1431 // Make sure we observe our invariants if we're gonna deoptimize.
michael@0 1432 if (!holder && idempotent())
michael@0 1433 return true;
michael@0 1434
michael@0 1435 *emitted = true;
michael@0 1436
michael@0 1437 if (resetNeeded) {
michael@0 1438 // If we know that we have a DoesntShadowUnique object, then
michael@0 1439 // we reset the cache to clear out an existing IC for the object
michael@0 1440 // (if there is one). The generation is a constant in the generated
michael@0 1441 // code and we will not have the same generation again for this
michael@0 1442 // object, so the generation check in the existing IC would always
michael@0 1443 // fail anyway.
michael@0 1444 reset();
michael@0 1445 }
michael@0 1446
michael@0 1447 Label failures;
michael@0 1448 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 1449 RepatchStubAppender attacher(*this);
michael@0 1450
michael@0 1451 // Guard on the shape of the object.
michael@0 1452 attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
michael@0 1453 Address(object(), JSObject::offsetOfShape()),
michael@0 1454 ImmGCPtr(obj->lastProperty()),
michael@0 1455 &failures);
michael@0 1456
michael@0 1457 // Make sure object is a DOMProxy proxy
michael@0 1458 GenerateDOMProxyChecks(cx, masm, obj, name, object(), &failures);
michael@0 1459
michael@0 1460 if (holder) {
michael@0 1461 // Found the property on the prototype chain. Treat it like a native
michael@0 1462 // getprop.
michael@0 1463 Register scratchReg = output().valueReg().scratchReg();
michael@0 1464 GeneratePrototypeGuards(cx, ion, masm, obj, holder, object(), scratchReg, &failures);
michael@0 1465
michael@0 1466 // Rename scratch for clarity.
michael@0 1467 Register holderReg = scratchReg;
michael@0 1468
michael@0 1469 // Guard on the holder of the property
michael@0 1470 masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg);
michael@0 1471 masm.branchPtr(Assembler::NotEqual,
michael@0 1472 Address(holderReg, JSObject::offsetOfShape()),
michael@0 1473 ImmGCPtr(holder->lastProperty()),
michael@0 1474 &failures);
michael@0 1475
michael@0 1476 if (canCache == CanAttachReadSlot) {
michael@0 1477 EmitLoadSlot(masm, holder, shape, holderReg, output(), scratchReg);
michael@0 1478 } else {
michael@0 1479 // EmitGetterCall() expects |obj| to be the object the property is
michael@0 1480 // on to do some checks. Since we actually looked at checkObj, and
michael@0 1481 // no extra guards will be generated, we can just pass that instead.
michael@0 1482 JS_ASSERT(canCache == CanAttachCallGetter);
michael@0 1483 JS_ASSERT(!idempotent());
michael@0 1484 if (!EmitGetterCall(cx, masm, attacher, checkObj, holder, shape, liveRegs_,
michael@0 1485 object(), scratchReg, output(), returnAddr))
michael@0 1486 {
michael@0 1487 return false;
michael@0 1488 }
michael@0 1489 }
michael@0 1490 } else {
michael@0 1491 // Property was not found on the prototype chain. Deoptimize down to
michael@0 1492 // proxy get call
michael@0 1493 JS_ASSERT(!idempotent());
michael@0 1494 if (!EmitCallProxyGet(cx, masm, attacher, name, liveRegs_, object(), output(),
michael@0 1495 pc(), returnAddr))
michael@0 1496 {
michael@0 1497 return false;
michael@0 1498 }
michael@0 1499 }
michael@0 1500
michael@0 1501 attacher.jumpRejoin(masm);
michael@0 1502 masm.bind(&failures);
michael@0 1503 attacher.jumpNextStub(masm);
michael@0 1504
michael@0 1505 return linkAndAttachStub(cx, masm, attacher, ion, "unshadowed proxy get");
michael@0 1506 }
michael@0 1507
michael@0 1508 bool
michael@0 1509 GetPropertyIC::tryAttachProxy(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 1510 HandlePropertyName name, void *returnAddr,
michael@0 1511 bool *emitted)
michael@0 1512 {
michael@0 1513 JS_ASSERT(canAttachStub());
michael@0 1514 JS_ASSERT(!*emitted);
michael@0 1515
michael@0 1516 if (!obj->is<ProxyObject>())
michael@0 1517 return true;
michael@0 1518
michael@0 1519 // TI can't be sure about our properties, so make sure anything
michael@0 1520 // we return can be monitored directly.
michael@0 1521 if (!monitoredResult())
michael@0 1522 return true;
michael@0 1523
michael@0 1524 // Skim off DOM proxies.
michael@0 1525 if (IsCacheableDOMProxy(obj)) {
michael@0 1526 RootedId id(cx, NameToId(name));
michael@0 1527 DOMProxyShadowsResult shadows = GetDOMProxyShadowsCheck()(cx, obj, id);
michael@0 1528 if (shadows == ShadowCheckFailed)
michael@0 1529 return false;
michael@0 1530 if (shadows == Shadows)
michael@0 1531 return tryAttachDOMProxyShadowed(cx, ion, obj, returnAddr, emitted);
michael@0 1532
michael@0 1533 return tryAttachDOMProxyUnshadowed(cx, ion, obj, name, shadows == DoesntShadowUnique,
michael@0 1534 returnAddr, emitted);
michael@0 1535 }
michael@0 1536
michael@0 1537 return tryAttachGenericProxy(cx, ion, obj, name, returnAddr, emitted);
michael@0 1538 }
michael@0 1539
michael@0 1540 static void
michael@0 1541 GenerateProxyClassGuards(MacroAssembler &masm, Register object, Register scratchReg,
michael@0 1542 Label *failures)
michael@0 1543 {
michael@0 1544 masm.loadObjClass(object, scratchReg);
michael@0 1545 masm.branchTest32(Assembler::Zero,
michael@0 1546 Address(scratchReg, Class::offsetOfFlags()),
michael@0 1547 Imm32(JSCLASS_IS_PROXY), failures);
michael@0 1548 }
michael@0 1549
michael@0 1550 bool
michael@0 1551 GetPropertyIC::tryAttachGenericProxy(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 1552 HandlePropertyName name, void *returnAddr,
michael@0 1553 bool *emitted)
michael@0 1554 {
michael@0 1555 JS_ASSERT(canAttachStub());
michael@0 1556 JS_ASSERT(!*emitted);
michael@0 1557 JS_ASSERT(obj->is<ProxyObject>());
michael@0 1558 JS_ASSERT(monitoredResult());
michael@0 1559 JS_ASSERT(output().hasValue());
michael@0 1560
michael@0 1561 if (hasGenericProxyStub())
michael@0 1562 return true;
michael@0 1563
michael@0 1564 if (idempotent())
michael@0 1565 return true;
michael@0 1566
michael@0 1567 *emitted = true;
michael@0 1568
michael@0 1569 Label failures;
michael@0 1570 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 1571 RepatchStubAppender attacher(*this);
michael@0 1572
michael@0 1573 Register scratchReg = output().valueReg().scratchReg();
michael@0 1574
michael@0 1575 GenerateProxyClassGuards(masm, object(), scratchReg, &failures);
michael@0 1576
michael@0 1577 // Ensure that the incoming object is not a DOM proxy, so that we can get to
michael@0 1578 // the specialized stubs
michael@0 1579 masm.branchTestProxyHandlerFamily(Assembler::Equal, object(), scratchReg,
michael@0 1580 GetDOMProxyHandlerFamily(), &failures);
michael@0 1581
michael@0 1582 if (!EmitCallProxyGet(cx, masm, attacher, name, liveRegs_, object(), output(),
michael@0 1583 pc(), returnAddr))
michael@0 1584 {
michael@0 1585 return false;
michael@0 1586 }
michael@0 1587
michael@0 1588 attacher.jumpRejoin(masm);
michael@0 1589
michael@0 1590 masm.bind(&failures);
michael@0 1591 attacher.jumpNextStub(masm);
michael@0 1592
michael@0 1593 JS_ASSERT(!hasGenericProxyStub_);
michael@0 1594 hasGenericProxyStub_ = true;
michael@0 1595
michael@0 1596 return linkAndAttachStub(cx, masm, attacher, ion, "Generic Proxy get");
michael@0 1597 }
michael@0 1598
michael@0 1599 bool
michael@0 1600 GetPropertyIC::tryAttachArgumentsLength(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 1601 HandlePropertyName name, bool *emitted)
michael@0 1602 {
michael@0 1603 JS_ASSERT(canAttachStub());
michael@0 1604 JS_ASSERT(!*emitted);
michael@0 1605
michael@0 1606 if (name != cx->names().length)
michael@0 1607 return true;
michael@0 1608 if (!IsOptimizableArgumentsObjectForLength(obj))
michael@0 1609 return true;
michael@0 1610
michael@0 1611 MIRType outputType = output().type();
michael@0 1612 if (!(outputType == MIRType_Value || outputType == MIRType_Int32))
michael@0 1613 return true;
michael@0 1614
michael@0 1615 if (hasArgumentsLengthStub(obj->is<StrictArgumentsObject>()))
michael@0 1616 return true;
michael@0 1617
michael@0 1618 *emitted = true;
michael@0 1619
michael@0 1620 JS_ASSERT(!idempotent());
michael@0 1621
michael@0 1622 Label failures;
michael@0 1623 MacroAssembler masm(cx, ion);
michael@0 1624 RepatchStubAppender attacher(*this);
michael@0 1625
michael@0 1626 Register tmpReg;
michael@0 1627 if (output().hasValue()) {
michael@0 1628 tmpReg = output().valueReg().scratchReg();
michael@0 1629 } else {
michael@0 1630 JS_ASSERT(output().type() == MIRType_Int32);
michael@0 1631 tmpReg = output().typedReg().gpr();
michael@0 1632 }
michael@0 1633 JS_ASSERT(object() != tmpReg);
michael@0 1634
michael@0 1635 const Class *clasp = obj->is<StrictArgumentsObject>() ? &StrictArgumentsObject::class_
michael@0 1636 : &NormalArgumentsObject::class_;
michael@0 1637
michael@0 1638 masm.branchTestObjClass(Assembler::NotEqual, object(), tmpReg, clasp, &failures);
michael@0 1639
michael@0 1640 // Get initial ArgsObj length value, test if length has been overridden.
michael@0 1641 masm.unboxInt32(Address(object(), ArgumentsObject::getInitialLengthSlotOffset()), tmpReg);
michael@0 1642 masm.branchTest32(Assembler::NonZero, tmpReg, Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
michael@0 1643 &failures);
michael@0 1644
michael@0 1645 masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), tmpReg);
michael@0 1646
michael@0 1647 // If output is Int32, result is already in right place, otherwise box it into output.
michael@0 1648 if (output().hasValue())
michael@0 1649 masm.tagValue(JSVAL_TYPE_INT32, tmpReg, output().valueReg());
michael@0 1650
michael@0 1651 // Success.
michael@0 1652 attacher.jumpRejoin(masm);
michael@0 1653
michael@0 1654 // Failure.
michael@0 1655 masm.bind(&failures);
michael@0 1656 attacher.jumpNextStub(masm);
michael@0 1657
michael@0 1658 if (obj->is<StrictArgumentsObject>()) {
michael@0 1659 JS_ASSERT(!hasStrictArgumentsLengthStub_);
michael@0 1660 hasStrictArgumentsLengthStub_ = true;
michael@0 1661 return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj length (strict)");
michael@0 1662 }
michael@0 1663
michael@0 1664 JS_ASSERT(!hasNormalArgumentsLengthStub_);
michael@0 1665 hasNormalArgumentsLengthStub_ = true;
michael@0 1666 return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj length (normal)");
michael@0 1667 }
michael@0 1668
michael@0 1669 bool
michael@0 1670 GetPropertyIC::tryAttachStub(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 1671 HandlePropertyName name, void *returnAddr, bool *emitted)
michael@0 1672 {
michael@0 1673 JS_ASSERT(!*emitted);
michael@0 1674
michael@0 1675 if (!canAttachStub())
michael@0 1676 return true;
michael@0 1677
michael@0 1678 if (!*emitted && !tryAttachArgumentsLength(cx, ion, obj, name, emitted))
michael@0 1679 return false;
michael@0 1680
michael@0 1681 if (!*emitted && !tryAttachProxy(cx, ion, obj, name, returnAddr, emitted))
michael@0 1682 return false;
michael@0 1683
michael@0 1684 if (!*emitted && !tryAttachNative(cx, ion, obj, name, returnAddr, emitted))
michael@0 1685 return false;
michael@0 1686
michael@0 1687 if (!*emitted && !tryAttachTypedArrayLength(cx, ion, obj, name, emitted))
michael@0 1688 return false;
michael@0 1689
michael@0 1690 return true;
michael@0 1691 }
michael@0 1692
michael@0 1693 /* static */ bool
michael@0 1694 GetPropertyIC::update(JSContext *cx, size_t cacheIndex,
michael@0 1695 HandleObject obj, MutableHandleValue vp)
michael@0 1696 {
michael@0 1697 void *returnAddr;
michael@0 1698 RootedScript topScript(cx, GetTopIonJSScript(cx, &returnAddr));
michael@0 1699 IonScript *ion = topScript->ionScript();
michael@0 1700
michael@0 1701 GetPropertyIC &cache = ion->getCache(cacheIndex).toGetProperty();
michael@0 1702 RootedPropertyName name(cx, cache.name());
michael@0 1703
michael@0 1704 // Override the return value if we are invalidated (bug 728188).
michael@0 1705 AutoDetectInvalidation adi(cx, vp.address(), ion);
michael@0 1706
michael@0 1707 // If the cache is idempotent, we will redo the op in the interpreter.
michael@0 1708 if (cache.idempotent())
michael@0 1709 adi.disable();
michael@0 1710
michael@0 1711 // For now, just stop generating new stubs once we hit the stub count
michael@0 1712 // limit. Once we can make calls from within generated stubs, a new call
michael@0 1713 // stub will be generated instead and the previous stubs unlinked.
michael@0 1714 bool emitted = false;
michael@0 1715 if (!cache.tryAttachStub(cx, ion, obj, name, returnAddr, &emitted))
michael@0 1716 return false;
michael@0 1717
michael@0 1718 if (cache.idempotent() && !emitted) {
michael@0 1719 // Invalidate the cache if the property was not found, or was found on
michael@0 1720 // a non-native object. This ensures:
michael@0 1721 // 1) The property read has no observable side-effects.
michael@0 1722 // 2) There's no need to dynamically monitor the return type. This would
michael@0 1723 // be complicated since (due to GVN) there can be multiple pc's
michael@0 1724 // associated with a single idempotent cache.
michael@0 1725 IonSpew(IonSpew_InlineCaches, "Invalidating from idempotent cache %s:%d",
michael@0 1726 topScript->filename(), topScript->lineno());
michael@0 1727
michael@0 1728 topScript->setInvalidatedIdempotentCache();
michael@0 1729
michael@0 1730 // Do not re-invalidate if the lookup already caused invalidation.
michael@0 1731 if (!topScript->hasIonScript())
michael@0 1732 return true;
michael@0 1733
michael@0 1734 return Invalidate(cx, topScript);
michael@0 1735 }
michael@0 1736
michael@0 1737 RootedId id(cx, NameToId(name));
michael@0 1738 if (!JSObject::getGeneric(cx, obj, obj, id, vp))
michael@0 1739 return false;
michael@0 1740
michael@0 1741 if (!cache.idempotent()) {
michael@0 1742 RootedScript script(cx);
michael@0 1743 jsbytecode *pc;
michael@0 1744 cache.getScriptedLocation(&script, &pc);
michael@0 1745
michael@0 1746 // If the cache is idempotent, the property exists so we don't have to
michael@0 1747 // call __noSuchMethod__.
michael@0 1748
michael@0 1749 #if JS_HAS_NO_SUCH_METHOD
michael@0 1750 // Handle objects with __noSuchMethod__.
michael@0 1751 if (JSOp(*pc) == JSOP_CALLPROP && MOZ_UNLIKELY(vp.isUndefined())) {
michael@0 1752 if (!OnUnknownMethod(cx, obj, IdToValue(id), vp))
michael@0 1753 return false;
michael@0 1754 }
michael@0 1755 #endif
michael@0 1756
michael@0 1757 // Monitor changes to cache entry.
michael@0 1758 if (!cache.monitoredResult())
michael@0 1759 types::TypeScript::Monitor(cx, script, pc, vp);
michael@0 1760 }
michael@0 1761
michael@0 1762 return true;
michael@0 1763 }
michael@0 1764
michael@0 1765 void
michael@0 1766 GetPropertyIC::reset()
michael@0 1767 {
michael@0 1768 RepatchIonCache::reset();
michael@0 1769 hasTypedArrayLengthStub_ = false;
michael@0 1770 hasStrictArgumentsLengthStub_ = false;
michael@0 1771 hasNormalArgumentsLengthStub_ = false;
michael@0 1772 hasGenericProxyStub_ = false;
michael@0 1773 }
michael@0 1774
michael@0 1775 bool
michael@0 1776 ParallelIonCache::initStubbedShapes(JSContext *cx)
michael@0 1777 {
michael@0 1778 JS_ASSERT(isAllocated());
michael@0 1779 if (!stubbedShapes_) {
michael@0 1780 stubbedShapes_ = cx->new_<ShapeSet>(cx);
michael@0 1781 return stubbedShapes_ && stubbedShapes_->init();
michael@0 1782 }
michael@0 1783 return true;
michael@0 1784 }
michael@0 1785
michael@0 1786 bool
michael@0 1787 ParallelIonCache::hasOrAddStubbedShape(LockedJSContext &cx, Shape *shape, bool *alreadyStubbed)
michael@0 1788 {
michael@0 1789 // Check if we have already stubbed the current object to avoid
michael@0 1790 // attaching a duplicate stub.
michael@0 1791 if (!initStubbedShapes(cx))
michael@0 1792 return false;
michael@0 1793 ShapeSet::AddPtr p = stubbedShapes_->lookupForAdd(shape);
michael@0 1794 if ((*alreadyStubbed = !!p))
michael@0 1795 return true;
michael@0 1796 return stubbedShapes_->add(p, shape);
michael@0 1797 }
michael@0 1798
michael@0 1799 void
michael@0 1800 ParallelIonCache::reset()
michael@0 1801 {
michael@0 1802 DispatchIonCache::reset();
michael@0 1803 if (stubbedShapes_)
michael@0 1804 stubbedShapes_->clear();
michael@0 1805 }
michael@0 1806
michael@0 1807 void
michael@0 1808 ParallelIonCache::destroy()
michael@0 1809 {
michael@0 1810 DispatchIonCache::destroy();
michael@0 1811 js_delete(stubbedShapes_);
michael@0 1812 }
michael@0 1813
michael@0 1814 void
michael@0 1815 GetPropertyParIC::reset()
michael@0 1816 {
michael@0 1817 ParallelIonCache::reset();
michael@0 1818 hasTypedArrayLengthStub_ = false;
michael@0 1819 }
michael@0 1820
michael@0 1821 bool
michael@0 1822 GetPropertyParIC::attachReadSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj,
michael@0 1823 JSObject *holder, Shape *shape)
michael@0 1824 {
michael@0 1825 // Ready to generate the read slot stub.
michael@0 1826 DispatchStubPrepender attacher(*this);
michael@0 1827 MacroAssembler masm(cx, ion);
michael@0 1828 GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output());
michael@0 1829
michael@0 1830 return linkAndAttachStub(cx, masm, attacher, ion, "parallel reading");
michael@0 1831 }
michael@0 1832
michael@0 1833 bool
michael@0 1834 GetPropertyParIC::attachArrayLength(LockedJSContext &cx, IonScript *ion, JSObject *obj)
michael@0 1835 {
michael@0 1836 MacroAssembler masm(cx, ion);
michael@0 1837 DispatchStubPrepender attacher(*this);
michael@0 1838 if (!GenerateArrayLength(cx, masm, attacher, obj, object(), output()))
michael@0 1839 return false;
michael@0 1840
michael@0 1841 return linkAndAttachStub(cx, masm, attacher, ion, "parallel array length");
michael@0 1842 }
michael@0 1843
michael@0 1844 bool
michael@0 1845 GetPropertyParIC::attachTypedArrayLength(LockedJSContext &cx, IonScript *ion, JSObject *obj)
michael@0 1846 {
michael@0 1847 MacroAssembler masm(cx, ion);
michael@0 1848 DispatchStubPrepender attacher(*this);
michael@0 1849 GenerateTypedArrayLength(cx, masm, attacher, obj, object(), output());
michael@0 1850
michael@0 1851 JS_ASSERT(!hasTypedArrayLengthStub_);
michael@0 1852 hasTypedArrayLengthStub_ = true;
michael@0 1853 return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array length");
michael@0 1854 }
michael@0 1855
michael@0 1856 bool
michael@0 1857 GetPropertyParIC::update(ForkJoinContext *cx, size_t cacheIndex,
michael@0 1858 HandleObject obj, MutableHandleValue vp)
michael@0 1859 {
michael@0 1860 IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript();
michael@0 1861 GetPropertyParIC &cache = ion->getCache(cacheIndex).toGetPropertyPar();
michael@0 1862
michael@0 1863 // Grab the property early, as the pure path is fast anyways and doesn't
michael@0 1864 // need a lock. If we can't do it purely, bail out of parallel execution.
michael@0 1865 if (!GetPropertyPure(cx, obj, NameToId(cache.name()), vp.address()))
michael@0 1866 return false;
michael@0 1867
michael@0 1868 // Avoid unnecessary locking if cannot attach stubs.
michael@0 1869 if (!cache.canAttachStub())
michael@0 1870 return true;
michael@0 1871
michael@0 1872 {
michael@0 1873 // Lock the context before mutating the cache. Ideally we'd like to do
michael@0 1874 // finer-grained locking, with one lock per cache. However, generating
michael@0 1875 // new jitcode uses a global ExecutableAllocator tied to the runtime.
michael@0 1876 LockedJSContext ncx(cx);
michael@0 1877
michael@0 1878 if (cache.canAttachStub()) {
michael@0 1879 bool alreadyStubbed;
michael@0 1880 if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed))
michael@0 1881 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 1882 if (alreadyStubbed)
michael@0 1883 return true;
michael@0 1884
michael@0 1885 // See note about the stub limit in GetPropertyCache.
michael@0 1886 bool attachedStub = false;
michael@0 1887
michael@0 1888 {
michael@0 1889 RootedShape shape(ncx);
michael@0 1890 RootedObject holder(ncx);
michael@0 1891 RootedPropertyName name(ncx, cache.name());
michael@0 1892
michael@0 1893 GetPropertyIC::NativeGetPropCacheability canCache =
michael@0 1894 CanAttachNativeGetProp(ncx, cache, obj, name, &holder, &shape);
michael@0 1895
michael@0 1896 if (canCache == GetPropertyIC::CanAttachReadSlot) {
michael@0 1897 if (!cache.attachReadSlot(ncx, ion, obj, holder, shape))
michael@0 1898 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 1899 attachedStub = true;
michael@0 1900 }
michael@0 1901
michael@0 1902 if (!attachedStub && canCache == GetPropertyIC::CanAttachArrayLength) {
michael@0 1903 if (!cache.attachArrayLength(ncx, ion, obj))
michael@0 1904 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 1905 attachedStub = true;
michael@0 1906 }
michael@0 1907 }
michael@0 1908
michael@0 1909 if (!attachedStub && !cache.hasTypedArrayLengthStub() &&
michael@0 1910 obj->is<TypedArrayObject>() && cx->names().length == cache.name() &&
michael@0 1911 (cache.output().type() == MIRType_Value || cache.output().type() == MIRType_Int32))
michael@0 1912 {
michael@0 1913 if (!cache.attachTypedArrayLength(ncx, ion, obj))
michael@0 1914 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 1915 attachedStub = true;
michael@0 1916 }
michael@0 1917 }
michael@0 1918 }
michael@0 1919
michael@0 1920 return true;
michael@0 1921 }
michael@0 1922
michael@0 1923 void
michael@0 1924 IonCache::disable()
michael@0 1925 {
michael@0 1926 reset();
michael@0 1927 this->disabled_ = 1;
michael@0 1928 }
michael@0 1929
michael@0 1930 void
michael@0 1931 IonCache::reset()
michael@0 1932 {
michael@0 1933 this->stubCount_ = 0;
michael@0 1934 }
michael@0 1935
michael@0 1936 void
michael@0 1937 IonCache::destroy()
michael@0 1938 {
michael@0 1939 }
michael@0 1940
michael@0 1941 static void
michael@0 1942 GenerateSetSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 1943 JSObject *obj, Shape *shape, Register object, ConstantOrRegister value,
michael@0 1944 bool needsTypeBarrier, bool checkTypeset)
michael@0 1945 {
michael@0 1946 JS_ASSERT(obj->isNative());
michael@0 1947
michael@0 1948 Label failures, barrierFailure;
michael@0 1949 masm.branchPtr(Assembler::NotEqual,
michael@0 1950 Address(object, JSObject::offsetOfShape()),
michael@0 1951 ImmGCPtr(obj->lastProperty()), &failures);
michael@0 1952
michael@0 1953 // Guard that the incoming value is in the type set for the property
michael@0 1954 // if a type barrier is required.
michael@0 1955 if (needsTypeBarrier) {
michael@0 1956 // We can't do anything that would change the HeapTypeSet, so
michael@0 1957 // just guard that it's already there.
michael@0 1958
michael@0 1959 // Obtain and guard on the TypeObject of the object.
michael@0 1960 types::TypeObject *type = obj->type();
michael@0 1961 masm.branchPtr(Assembler::NotEqual,
michael@0 1962 Address(object, JSObject::offsetOfType()),
michael@0 1963 ImmGCPtr(type), &failures);
michael@0 1964
michael@0 1965 if (checkTypeset) {
michael@0 1966 TypedOrValueRegister valReg = value.reg();
michael@0 1967 types::HeapTypeSet *propTypes = type->maybeGetProperty(shape->propid());
michael@0 1968 JS_ASSERT(propTypes);
michael@0 1969 JS_ASSERT(!propTypes->unknown());
michael@0 1970
michael@0 1971 Register scratchReg = object;
michael@0 1972 masm.push(scratchReg);
michael@0 1973
michael@0 1974 masm.guardTypeSet(valReg, propTypes, scratchReg, &barrierFailure);
michael@0 1975 masm.pop(object);
michael@0 1976 }
michael@0 1977 }
michael@0 1978
michael@0 1979 if (obj->isFixedSlot(shape->slot())) {
michael@0 1980 Address addr(object, JSObject::getFixedSlotOffset(shape->slot()));
michael@0 1981
michael@0 1982 if (cx->zone()->needsBarrier())
michael@0 1983 masm.callPreBarrier(addr, MIRType_Value);
michael@0 1984
michael@0 1985 masm.storeConstantOrRegister(value, addr);
michael@0 1986 } else {
michael@0 1987 Register slotsReg = object;
michael@0 1988 masm.loadPtr(Address(object, JSObject::offsetOfSlots()), slotsReg);
michael@0 1989
michael@0 1990 Address addr(slotsReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value));
michael@0 1991
michael@0 1992 if (cx->zone()->needsBarrier())
michael@0 1993 masm.callPreBarrier(addr, MIRType_Value);
michael@0 1994
michael@0 1995 masm.storeConstantOrRegister(value, addr);
michael@0 1996 }
michael@0 1997
michael@0 1998 attacher.jumpRejoin(masm);
michael@0 1999
michael@0 2000 if (barrierFailure.used()) {
michael@0 2001 masm.bind(&barrierFailure);
michael@0 2002 masm.pop(object);
michael@0 2003 }
michael@0 2004
michael@0 2005 masm.bind(&failures);
michael@0 2006 attacher.jumpNextStub(masm);
michael@0 2007 }
michael@0 2008
michael@0 2009 bool
michael@0 2010 SetPropertyIC::attachSetSlot(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 2011 HandleShape shape, bool checkTypeset)
michael@0 2012 {
michael@0 2013 MacroAssembler masm(cx, ion);
michael@0 2014 RepatchStubAppender attacher(*this);
michael@0 2015 GenerateSetSlot(cx, masm, attacher, obj, shape, object(), value(), needsTypeBarrier(),
michael@0 2016 checkTypeset);
michael@0 2017 return linkAndAttachStub(cx, masm, attacher, ion, "setting");
michael@0 2018 }
michael@0 2019
michael@0 2020 static bool
michael@0 2021 IsCacheableSetPropCallNative(HandleObject obj, HandleObject holder, HandleShape shape)
michael@0 2022 {
michael@0 2023 JS_ASSERT(obj->isNative());
michael@0 2024
michael@0 2025 if (!shape || !IsCacheableProtoChain(obj, holder))
michael@0 2026 return false;
michael@0 2027
michael@0 2028 return shape->hasSetterValue() && shape->setterObject() &&
michael@0 2029 shape->setterObject()->is<JSFunction>() &&
michael@0 2030 shape->setterObject()->as<JSFunction>().isNative();
michael@0 2031 }
michael@0 2032
michael@0 2033 static bool
michael@0 2034 IsCacheableSetPropCallPropertyOp(HandleObject obj, HandleObject holder, HandleShape shape)
michael@0 2035 {
michael@0 2036 JS_ASSERT(obj->isNative());
michael@0 2037
michael@0 2038 if (!shape)
michael@0 2039 return false;
michael@0 2040
michael@0 2041 if (!IsCacheableProtoChain(obj, holder))
michael@0 2042 return false;
michael@0 2043
michael@0 2044 if (shape->hasSlot())
michael@0 2045 return false;
michael@0 2046
michael@0 2047 if (shape->hasDefaultSetter())
michael@0 2048 return false;
michael@0 2049
michael@0 2050 if (shape->hasSetterValue())
michael@0 2051 return false;
michael@0 2052
michael@0 2053 // Despite the vehement claims of Shape.h that writable() is only
michael@0 2054 // relevant for data descriptors, some PropertyOp setters care
michael@0 2055 // desperately about its value. The flag should be always true, apart
michael@0 2056 // from these rare instances.
michael@0 2057 if (!shape->writable())
michael@0 2058 return false;
michael@0 2059
michael@0 2060 return true;
michael@0 2061 }
michael@0 2062
michael@0 2063 static bool
michael@0 2064 EmitCallProxySet(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 2065 HandleId propId, RegisterSet liveRegs, Register object,
michael@0 2066 ConstantOrRegister value, void *returnAddr, bool strict)
michael@0 2067 {
michael@0 2068 MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs);
michael@0 2069
michael@0 2070 // Remaining registers should be free, but we need to use |object| still
michael@0 2071 // so leave it alone.
michael@0 2072 RegisterSet regSet(RegisterSet::All());
michael@0 2073 regSet.take(AnyRegister(object));
michael@0 2074
michael@0 2075 // Proxy::set(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id,
michael@0 2076 // bool strict, MutableHandleValue vp)
michael@0 2077 Register argJSContextReg = regSet.takeGeneral();
michael@0 2078 Register argProxyReg = regSet.takeGeneral();
michael@0 2079 Register argIdReg = regSet.takeGeneral();
michael@0 2080 Register argVpReg = regSet.takeGeneral();
michael@0 2081 Register argStrictReg = regSet.takeGeneral();
michael@0 2082
michael@0 2083 Register scratch = regSet.takeGeneral();
michael@0 2084
michael@0 2085 // Push stubCode for marking.
michael@0 2086 attacher.pushStubCodePointer(masm);
michael@0 2087
michael@0 2088 // Push args on stack first so we can take pointers to make handles.
michael@0 2089 masm.Push(value);
michael@0 2090 masm.movePtr(StackPointer, argVpReg);
michael@0 2091
michael@0 2092 masm.Push(propId, scratch);
michael@0 2093 masm.movePtr(StackPointer, argIdReg);
michael@0 2094
michael@0 2095 // Pushing object and receiver. Both are the same, so Handle to one is equivalent to
michael@0 2096 // handle to other.
michael@0 2097 masm.Push(object);
michael@0 2098 masm.Push(object);
michael@0 2099 masm.movePtr(StackPointer, argProxyReg);
michael@0 2100
michael@0 2101 masm.loadJSContext(argJSContextReg);
michael@0 2102 masm.move32(Imm32(strict? 1 : 0), argStrictReg);
michael@0 2103
michael@0 2104 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
michael@0 2105 return false;
michael@0 2106 masm.enterFakeExitFrame(ION_FRAME_OOL_PROXY);
michael@0 2107
michael@0 2108 // Make the call.
michael@0 2109 masm.setupUnalignedABICall(6, scratch);
michael@0 2110 masm.passABIArg(argJSContextReg);
michael@0 2111 masm.passABIArg(argProxyReg);
michael@0 2112 masm.passABIArg(argProxyReg);
michael@0 2113 masm.passABIArg(argIdReg);
michael@0 2114 masm.passABIArg(argStrictReg);
michael@0 2115 masm.passABIArg(argVpReg);
michael@0 2116 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, Proxy::set));
michael@0 2117
michael@0 2118 // Test for failure.
michael@0 2119 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
michael@0 2120
michael@0 2121 // masm.leaveExitFrame & pop locals
michael@0 2122 masm.adjustStack(IonOOLProxyExitFrameLayout::Size());
michael@0 2123
michael@0 2124 masm.icRestoreLive(liveRegs, aic);
michael@0 2125 return true;
michael@0 2126 }
michael@0 2127
michael@0 2128 bool
michael@0 2129 SetPropertyIC::attachGenericProxy(JSContext *cx, IonScript *ion, void *returnAddr)
michael@0 2130 {
michael@0 2131 JS_ASSERT(!hasGenericProxyStub());
michael@0 2132
michael@0 2133 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 2134 RepatchStubAppender attacher(*this);
michael@0 2135
michael@0 2136 Label failures;
michael@0 2137 {
michael@0 2138 Label proxyFailures;
michael@0 2139 Label proxySuccess;
michael@0 2140
michael@0 2141 RegisterSet regSet(RegisterSet::All());
michael@0 2142 regSet.take(AnyRegister(object()));
michael@0 2143 if (!value().constant())
michael@0 2144 regSet.takeUnchecked(value().reg());
michael@0 2145
michael@0 2146 Register scratch = regSet.takeGeneral();
michael@0 2147 masm.push(scratch);
michael@0 2148
michael@0 2149 GenerateProxyClassGuards(masm, object(), scratch, &proxyFailures);
michael@0 2150
michael@0 2151 // Remove the DOM proxies. They'll take care of themselves so this stub doesn't
michael@0 2152 // catch too much. The failure case is actually Equal. Fall through to the failure code.
michael@0 2153 masm.branchTestProxyHandlerFamily(Assembler::NotEqual, object(), scratch,
michael@0 2154 GetDOMProxyHandlerFamily(), &proxySuccess);
michael@0 2155
michael@0 2156 masm.bind(&proxyFailures);
michael@0 2157 masm.pop(scratch);
michael@0 2158 // Unify the point of failure to allow for later DOM proxy handling.
michael@0 2159 masm.jump(&failures);
michael@0 2160
michael@0 2161 masm.bind(&proxySuccess);
michael@0 2162 masm.pop(scratch);
michael@0 2163 }
michael@0 2164
michael@0 2165 RootedId propId(cx, AtomToId(name()));
michael@0 2166 if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(), value(),
michael@0 2167 returnAddr, strict()))
michael@0 2168 {
michael@0 2169 return false;
michael@0 2170 }
michael@0 2171
michael@0 2172 attacher.jumpRejoin(masm);
michael@0 2173
michael@0 2174 masm.bind(&failures);
michael@0 2175 attacher.jumpNextStub(masm);
michael@0 2176
michael@0 2177 JS_ASSERT(!hasGenericProxyStub_);
michael@0 2178 hasGenericProxyStub_ = true;
michael@0 2179
michael@0 2180 return linkAndAttachStub(cx, masm, attacher, ion, "generic proxy set");
michael@0 2181 }
michael@0 2182
michael@0 2183 bool
michael@0 2184 SetPropertyIC::attachDOMProxyShadowed(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 2185 void *returnAddr)
michael@0 2186 {
michael@0 2187 JS_ASSERT(IsCacheableDOMProxy(obj));
michael@0 2188
michael@0 2189 Label failures;
michael@0 2190 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 2191 RepatchStubAppender attacher(*this);
michael@0 2192
michael@0 2193 // Guard on the shape of the object.
michael@0 2194 masm.branchPtr(Assembler::NotEqual,
michael@0 2195 Address(object(), JSObject::offsetOfShape()),
michael@0 2196 ImmGCPtr(obj->lastProperty()), &failures);
michael@0 2197
michael@0 2198 // Make sure object is a DOMProxy
michael@0 2199 GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures,
michael@0 2200 /*skipExpandoCheck=*/true);
michael@0 2201
michael@0 2202 RootedId propId(cx, AtomToId(name()));
michael@0 2203 if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(),
michael@0 2204 value(), returnAddr, strict()))
michael@0 2205 {
michael@0 2206 return false;
michael@0 2207 }
michael@0 2208
michael@0 2209 // Success.
michael@0 2210 attacher.jumpRejoin(masm);
michael@0 2211
michael@0 2212 // Failure.
michael@0 2213 masm.bind(&failures);
michael@0 2214 attacher.jumpNextStub(masm);
michael@0 2215
michael@0 2216 return linkAndAttachStub(cx, masm, attacher, ion, "DOM proxy shadowed set");
michael@0 2217 }
michael@0 2218
michael@0 2219 static bool
michael@0 2220 GenerateCallSetter(JSContext *cx, IonScript *ion, MacroAssembler &masm,
michael@0 2221 IonCache::StubAttacher &attacher, HandleObject obj,
michael@0 2222 HandleObject holder, HandleShape shape, bool strict, Register object,
michael@0 2223 ConstantOrRegister value, Label *failure, RegisterSet liveRegs,
michael@0 2224 void *returnAddr)
michael@0 2225 {
michael@0 2226 // Generate prototype guards if needed.
michael@0 2227 // Take a scratch register for use, save on stack.
michael@0 2228 {
michael@0 2229 RegisterSet regSet(RegisterSet::All());
michael@0 2230 regSet.take(AnyRegister(object));
michael@0 2231 if (!value.constant())
michael@0 2232 regSet.takeUnchecked(value.reg());
michael@0 2233 Register scratchReg = regSet.takeGeneral();
michael@0 2234 masm.push(scratchReg);
michael@0 2235
michael@0 2236 Label protoFailure;
michael@0 2237 Label protoSuccess;
michael@0 2238
michael@0 2239 // Generate prototype/shape guards.
michael@0 2240 if (obj != holder)
michael@0 2241 GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, &protoFailure);
michael@0 2242
michael@0 2243 masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), scratchReg);
michael@0 2244 masm.branchPtr(Assembler::NotEqual,
michael@0 2245 Address(scratchReg, JSObject::offsetOfShape()),
michael@0 2246 ImmGCPtr(holder->lastProperty()),
michael@0 2247 &protoFailure);
michael@0 2248
michael@0 2249 masm.jump(&protoSuccess);
michael@0 2250
michael@0 2251 masm.bind(&protoFailure);
michael@0 2252 masm.pop(scratchReg);
michael@0 2253 masm.jump(failure);
michael@0 2254
michael@0 2255 masm.bind(&protoSuccess);
michael@0 2256 masm.pop(scratchReg);
michael@0 2257 }
michael@0 2258
michael@0 2259 // Good to go for invoking setter.
michael@0 2260
michael@0 2261 MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs);
michael@0 2262
michael@0 2263 // Remaining registers should basically be free, but we need to use |object| still
michael@0 2264 // so leave it alone.
michael@0 2265 RegisterSet regSet(RegisterSet::All());
michael@0 2266 regSet.take(AnyRegister(object));
michael@0 2267
michael@0 2268 // This is a slower stub path, and we're going to be doing a call anyway. Don't need
michael@0 2269 // to try so hard to not use the stack. Scratch regs are just taken from the register
michael@0 2270 // set not including the input, current value saved on the stack, and restored when
michael@0 2271 // we're done with it.
michael@0 2272 //
michael@0 2273 // Be very careful not to use any of these before value is pushed, since they
michael@0 2274 // might shadow.
michael@0 2275 Register scratchReg = regSet.takeGeneral();
michael@0 2276 Register argJSContextReg = regSet.takeGeneral();
michael@0 2277 Register argVpReg = regSet.takeGeneral();
michael@0 2278
michael@0 2279 bool callNative = IsCacheableSetPropCallNative(obj, holder, shape);
michael@0 2280 JS_ASSERT_IF(!callNative, IsCacheableSetPropCallPropertyOp(obj, holder, shape));
michael@0 2281
michael@0 2282 if (callNative) {
michael@0 2283 JS_ASSERT(shape->hasSetterValue() && shape->setterObject() &&
michael@0 2284 shape->setterObject()->is<JSFunction>());
michael@0 2285 JSFunction *target = &shape->setterObject()->as<JSFunction>();
michael@0 2286
michael@0 2287 JS_ASSERT(target->isNative());
michael@0 2288
michael@0 2289 Register argUintNReg = regSet.takeGeneral();
michael@0 2290
michael@0 2291 // Set up the call:
michael@0 2292 // bool (*)(JSContext *, unsigned, Value *vp)
michael@0 2293 // vp[0] is callee/outparam
michael@0 2294 // vp[1] is |this|
michael@0 2295 // vp[2] is the value
michael@0 2296
michael@0 2297 // Build vp and move the base into argVpReg.
michael@0 2298 masm.Push(value);
michael@0 2299 masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object)));
michael@0 2300 masm.Push(ObjectValue(*target));
michael@0 2301 masm.movePtr(StackPointer, argVpReg);
michael@0 2302
michael@0 2303 // Preload other regs
michael@0 2304 masm.loadJSContext(argJSContextReg);
michael@0 2305 masm.move32(Imm32(1), argUintNReg);
michael@0 2306
michael@0 2307 // Push data for GC marking
michael@0 2308 masm.Push(argUintNReg);
michael@0 2309 attacher.pushStubCodePointer(masm);
michael@0 2310
michael@0 2311 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
michael@0 2312 return false;
michael@0 2313 masm.enterFakeExitFrame(ION_FRAME_OOL_NATIVE);
michael@0 2314
michael@0 2315 // Make the call
michael@0 2316 masm.setupUnalignedABICall(3, scratchReg);
michael@0 2317 masm.passABIArg(argJSContextReg);
michael@0 2318 masm.passABIArg(argUintNReg);
michael@0 2319 masm.passABIArg(argVpReg);
michael@0 2320 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target->native()));
michael@0 2321
michael@0 2322 // Test for failure.
michael@0 2323 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
michael@0 2324
michael@0 2325 // masm.leaveExitFrame & pop locals.
michael@0 2326 masm.adjustStack(IonOOLNativeExitFrameLayout::Size(1));
michael@0 2327 } else {
michael@0 2328 Register argObjReg = regSet.takeGeneral();
michael@0 2329 Register argIdReg = regSet.takeGeneral();
michael@0 2330 Register argStrictReg = regSet.takeGeneral();
michael@0 2331
michael@0 2332 attacher.pushStubCodePointer(masm);
michael@0 2333
michael@0 2334 StrictPropertyOp target = shape->setterOp();
michael@0 2335 JS_ASSERT(target);
michael@0 2336 // JSStrictPropertyOp: bool fn(JSContext *cx, HandleObject obj,
michael@0 2337 // HandleId id, bool strict, MutableHandleValue vp);
michael@0 2338
michael@0 2339 // Push args on stack first so we can take pointers to make handles.
michael@0 2340 if (value.constant())
michael@0 2341 masm.Push(value.value());
michael@0 2342 else
michael@0 2343 masm.Push(value.reg());
michael@0 2344 masm.movePtr(StackPointer, argVpReg);
michael@0 2345
michael@0 2346 masm.move32(Imm32(strict ? 1 : 0), argStrictReg);
michael@0 2347
michael@0 2348 // push canonical jsid from shape instead of propertyname.
michael@0 2349 masm.Push(shape->propid(), argIdReg);
michael@0 2350 masm.movePtr(StackPointer, argIdReg);
michael@0 2351
michael@0 2352 masm.Push(object);
michael@0 2353 masm.movePtr(StackPointer, argObjReg);
michael@0 2354
michael@0 2355 masm.loadJSContext(argJSContextReg);
michael@0 2356
michael@0 2357 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic))
michael@0 2358 return false;
michael@0 2359 masm.enterFakeExitFrame(ION_FRAME_OOL_PROPERTY_OP);
michael@0 2360
michael@0 2361 // Make the call.
michael@0 2362 masm.setupUnalignedABICall(5, scratchReg);
michael@0 2363 masm.passABIArg(argJSContextReg);
michael@0 2364 masm.passABIArg(argObjReg);
michael@0 2365 masm.passABIArg(argIdReg);
michael@0 2366 masm.passABIArg(argStrictReg);
michael@0 2367 masm.passABIArg(argVpReg);
michael@0 2368 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target));
michael@0 2369
michael@0 2370 // Test for failure.
michael@0 2371 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
michael@0 2372
michael@0 2373 // masm.leaveExitFrame & pop locals.
michael@0 2374 masm.adjustStack(IonOOLPropertyOpExitFrameLayout::Size());
michael@0 2375 }
michael@0 2376
michael@0 2377 masm.icRestoreLive(liveRegs, aic);
michael@0 2378 return true;
michael@0 2379 }
michael@0 2380
michael@0 2381 static bool
michael@0 2382 IsCacheableDOMProxyUnshadowedSetterCall(JSContext *cx, HandleObject obj, HandlePropertyName name,
michael@0 2383 MutableHandleObject holder, MutableHandleShape shape,
michael@0 2384 bool *isSetter)
michael@0 2385 {
michael@0 2386 JS_ASSERT(IsCacheableDOMProxy(obj));
michael@0 2387
michael@0 2388 *isSetter = false;
michael@0 2389
michael@0 2390 RootedObject checkObj(cx, obj->getTaggedProto().toObjectOrNull());
michael@0 2391 if (!checkObj)
michael@0 2392 return true;
michael@0 2393
michael@0 2394 if (!JSObject::lookupProperty(cx, obj, name, holder, shape))
michael@0 2395 return false;
michael@0 2396
michael@0 2397 if (!holder)
michael@0 2398 return true;
michael@0 2399
michael@0 2400 if (!IsCacheableSetPropCallNative(checkObj, holder, shape) &&
michael@0 2401 !IsCacheableSetPropCallPropertyOp(checkObj, holder, shape))
michael@0 2402 {
michael@0 2403 return true;
michael@0 2404 }
michael@0 2405
michael@0 2406 *isSetter = true;
michael@0 2407 return true;
michael@0 2408 }
michael@0 2409
michael@0 2410 bool
michael@0 2411 SetPropertyIC::attachDOMProxyUnshadowed(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 2412 void *returnAddr)
michael@0 2413 {
michael@0 2414 JS_ASSERT(IsCacheableDOMProxy(obj));
michael@0 2415
michael@0 2416 Label failures;
michael@0 2417 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 2418 RepatchStubAppender attacher(*this);
michael@0 2419
michael@0 2420 // Guard on the shape of the object.
michael@0 2421 masm.branchPtr(Assembler::NotEqual,
michael@0 2422 Address(object(), JSObject::offsetOfShape()),
michael@0 2423 ImmGCPtr(obj->lastProperty()), &failures);
michael@0 2424
michael@0 2425 // Make sure object is a DOMProxy
michael@0 2426 GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures);
michael@0 2427
michael@0 2428 RootedPropertyName propName(cx, name());
michael@0 2429 RootedObject holder(cx);
michael@0 2430 RootedShape shape(cx);
michael@0 2431 bool isSetter;
michael@0 2432 if (!IsCacheableDOMProxyUnshadowedSetterCall(cx, obj, propName, &holder,
michael@0 2433 &shape, &isSetter))
michael@0 2434 {
michael@0 2435 return false;
michael@0 2436 }
michael@0 2437
michael@0 2438 if (isSetter) {
michael@0 2439 if (!GenerateCallSetter(cx, ion, masm, attacher, obj, holder, shape, strict(),
michael@0 2440 object(), value(), &failures, liveRegs_, returnAddr))
michael@0 2441 {
michael@0 2442 return false;
michael@0 2443 }
michael@0 2444 } else {
michael@0 2445 // Either there was no proto, or the property wasn't appropriately found on it.
michael@0 2446 // Drop back to just a call to Proxy::set().
michael@0 2447 RootedId propId(cx, AtomToId(name()));
michael@0 2448 if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(),
michael@0 2449 value(), returnAddr, strict()))
michael@0 2450 {
michael@0 2451 return false;
michael@0 2452 }
michael@0 2453 }
michael@0 2454
michael@0 2455 // Success.
michael@0 2456 attacher.jumpRejoin(masm);
michael@0 2457
michael@0 2458 // Failure.
michael@0 2459 masm.bind(&failures);
michael@0 2460 attacher.jumpNextStub(masm);
michael@0 2461
michael@0 2462 return linkAndAttachStub(cx, masm, attacher, ion, "DOM proxy unshadowed set");
michael@0 2463 }
michael@0 2464
michael@0 2465 bool
michael@0 2466 SetPropertyIC::attachCallSetter(JSContext *cx, IonScript *ion,
michael@0 2467 HandleObject obj, HandleObject holder, HandleShape shape,
michael@0 2468 void *returnAddr)
michael@0 2469 {
michael@0 2470 JS_ASSERT(obj->isNative());
michael@0 2471
michael@0 2472 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 2473 RepatchStubAppender attacher(*this);
michael@0 2474
michael@0 2475 Label failure;
michael@0 2476 masm.branchPtr(Assembler::NotEqual,
michael@0 2477 Address(object(), JSObject::offsetOfShape()),
michael@0 2478 ImmGCPtr(obj->lastProperty()),
michael@0 2479 &failure);
michael@0 2480
michael@0 2481 if (!GenerateCallSetter(cx, ion, masm, attacher, obj, holder, shape, strict(),
michael@0 2482 object(), value(), &failure, liveRegs_, returnAddr))
michael@0 2483 {
michael@0 2484 return false;
michael@0 2485 }
michael@0 2486
michael@0 2487 // Rejoin jump.
michael@0 2488 attacher.jumpRejoin(masm);
michael@0 2489
michael@0 2490 // Jump to next stub.
michael@0 2491 masm.bind(&failure);
michael@0 2492 attacher.jumpNextStub(masm);
michael@0 2493
michael@0 2494 return linkAndAttachStub(cx, masm, attacher, ion, "setter call");
michael@0 2495 }
michael@0 2496
michael@0 2497 static void
michael@0 2498 GenerateAddSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 2499 JSObject *obj, Shape *oldShape, Register object, ConstantOrRegister value,
michael@0 2500 bool checkTypeset)
michael@0 2501 {
michael@0 2502 JS_ASSERT(obj->isNative());
michael@0 2503
michael@0 2504 Label failures;
michael@0 2505
michael@0 2506 // Guard the type of the object
michael@0 2507 masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfType()),
michael@0 2508 ImmGCPtr(obj->type()), &failures);
michael@0 2509
michael@0 2510 // Guard shapes along prototype chain.
michael@0 2511 masm.branchTestObjShape(Assembler::NotEqual, object, oldShape, &failures);
michael@0 2512
michael@0 2513 Label failuresPopObject;
michael@0 2514 masm.push(object); // save object reg because we clobber it
michael@0 2515
michael@0 2516 // Guard that the incoming value is in the type set for the property
michael@0 2517 // if a type barrier is required.
michael@0 2518 if (checkTypeset) {
michael@0 2519 TypedOrValueRegister valReg = value.reg();
michael@0 2520 types::TypeObject *type = obj->type();
michael@0 2521 types::HeapTypeSet *propTypes = type->maybeGetProperty(obj->lastProperty()->propid());
michael@0 2522 JS_ASSERT(propTypes);
michael@0 2523 JS_ASSERT(!propTypes->unknown());
michael@0 2524
michael@0 2525 Register scratchReg = object;
michael@0 2526 masm.guardTypeSet(valReg, propTypes, scratchReg, &failuresPopObject);
michael@0 2527 masm.loadPtr(Address(StackPointer, 0), object);
michael@0 2528 }
michael@0 2529
michael@0 2530 JSObject *proto = obj->getProto();
michael@0 2531 Register protoReg = object;
michael@0 2532 while (proto) {
michael@0 2533 Shape *protoShape = proto->lastProperty();
michael@0 2534
michael@0 2535 // load next prototype
michael@0 2536 masm.loadObjProto(protoReg, protoReg);
michael@0 2537
michael@0 2538 // Ensure that its shape matches.
michael@0 2539 masm.branchTestObjShape(Assembler::NotEqual, protoReg, protoShape, &failuresPopObject);
michael@0 2540
michael@0 2541 proto = proto->getProto();
michael@0 2542 }
michael@0 2543
michael@0 2544 masm.pop(object); // restore object reg
michael@0 2545
michael@0 2546 // Changing object shape. Write the object's new shape.
michael@0 2547 Shape *newShape = obj->lastProperty();
michael@0 2548 Address shapeAddr(object, JSObject::offsetOfShape());
michael@0 2549 if (cx->zone()->needsBarrier())
michael@0 2550 masm.callPreBarrier(shapeAddr, MIRType_Shape);
michael@0 2551 masm.storePtr(ImmGCPtr(newShape), shapeAddr);
michael@0 2552
michael@0 2553 // Set the value on the object. Since this is an add, obj->lastProperty()
michael@0 2554 // must be the shape of the property we are adding.
michael@0 2555 if (obj->isFixedSlot(newShape->slot())) {
michael@0 2556 Address addr(object, JSObject::getFixedSlotOffset(newShape->slot()));
michael@0 2557 masm.storeConstantOrRegister(value, addr);
michael@0 2558 } else {
michael@0 2559 Register slotsReg = object;
michael@0 2560
michael@0 2561 masm.loadPtr(Address(object, JSObject::offsetOfSlots()), slotsReg);
michael@0 2562
michael@0 2563 Address addr(slotsReg, obj->dynamicSlotIndex(newShape->slot()) * sizeof(Value));
michael@0 2564 masm.storeConstantOrRegister(value, addr);
michael@0 2565 }
michael@0 2566
michael@0 2567 // Success.
michael@0 2568 attacher.jumpRejoin(masm);
michael@0 2569
michael@0 2570 // Failure.
michael@0 2571 masm.bind(&failuresPopObject);
michael@0 2572 masm.pop(object);
michael@0 2573 masm.bind(&failures);
michael@0 2574
michael@0 2575 attacher.jumpNextStub(masm);
michael@0 2576 }
michael@0 2577
michael@0 2578 bool
michael@0 2579 SetPropertyIC::attachAddSlot(JSContext *cx, IonScript *ion, JSObject *obj, HandleShape oldShape,
michael@0 2580 bool checkTypeset)
michael@0 2581 {
michael@0 2582 JS_ASSERT_IF(!needsTypeBarrier(), !checkTypeset);
michael@0 2583
michael@0 2584 MacroAssembler masm(cx, ion);
michael@0 2585 RepatchStubAppender attacher(*this);
michael@0 2586 GenerateAddSlot(cx, masm, attacher, obj, oldShape, object(), value(), checkTypeset);
michael@0 2587 return linkAndAttachStub(cx, masm, attacher, ion, "adding");
michael@0 2588 }
michael@0 2589
michael@0 2590 static bool
michael@0 2591 CanInlineSetPropTypeCheck(JSObject *obj, jsid id, ConstantOrRegister val, bool *checkTypeset)
michael@0 2592 {
michael@0 2593 bool shouldCheck = false;
michael@0 2594 types::TypeObject *type = obj->type();
michael@0 2595 if (!type->unknownProperties()) {
michael@0 2596 types::HeapTypeSet *propTypes = type->maybeGetProperty(id);
michael@0 2597 if (!propTypes)
michael@0 2598 return false;
michael@0 2599 if (!propTypes->unknown()) {
michael@0 2600 shouldCheck = true;
michael@0 2601 if (val.constant()) {
michael@0 2602 // If the input is a constant, then don't bother if the barrier will always fail.
michael@0 2603 if (!propTypes->hasType(types::GetValueType(val.value())))
michael@0 2604 return false;
michael@0 2605 shouldCheck = false;
michael@0 2606 } else {
michael@0 2607 TypedOrValueRegister reg = val.reg();
michael@0 2608 // We can do the same trick as above for primitive types of specialized registers.
michael@0 2609 // TIs handling of objects is complicated enough to warrant a runtime
michael@0 2610 // check, as we can't statically handle the case where the typeset
michael@0 2611 // contains the specific object, but doesn't have ANYOBJECT set.
michael@0 2612 if (reg.hasTyped() && reg.type() != MIRType_Object) {
michael@0 2613 JSValueType valType = ValueTypeFromMIRType(reg.type());
michael@0 2614 if (!propTypes->hasType(types::Type::PrimitiveType(valType)))
michael@0 2615 return false;
michael@0 2616 shouldCheck = false;
michael@0 2617 }
michael@0 2618 }
michael@0 2619 }
michael@0 2620 }
michael@0 2621
michael@0 2622 *checkTypeset = shouldCheck;
michael@0 2623 return true;
michael@0 2624 }
michael@0 2625
michael@0 2626 static bool
michael@0 2627 IsPropertySetInlineable(HandleObject obj, HandleId id, MutableHandleShape pshape,
michael@0 2628 ConstantOrRegister val, bool needsTypeBarrier, bool *checkTypeset)
michael@0 2629 {
michael@0 2630 JS_ASSERT(obj->isNative());
michael@0 2631
michael@0 2632 // Do a pure non-proto chain climbing lookup. See note in
michael@0 2633 // CanAttachNativeGetProp.
michael@0 2634 pshape.set(obj->nativeLookupPure(id));
michael@0 2635
michael@0 2636 if (!pshape)
michael@0 2637 return false;
michael@0 2638
michael@0 2639 if (!pshape->hasSlot())
michael@0 2640 return false;
michael@0 2641
michael@0 2642 if (!pshape->hasDefaultSetter())
michael@0 2643 return false;
michael@0 2644
michael@0 2645 if (!pshape->writable())
michael@0 2646 return false;
michael@0 2647
michael@0 2648 if (needsTypeBarrier)
michael@0 2649 return CanInlineSetPropTypeCheck(obj, id, val, checkTypeset);
michael@0 2650
michael@0 2651 return true;
michael@0 2652 }
michael@0 2653
michael@0 2654 static bool
michael@0 2655 IsPropertyAddInlineable(HandleObject obj, HandleId id, ConstantOrRegister val, uint32_t oldSlots,
michael@0 2656 HandleShape oldShape, bool needsTypeBarrier, bool *checkTypeset)
michael@0 2657 {
michael@0 2658 JS_ASSERT(obj->isNative());
michael@0 2659
michael@0 2660 // If the shape of the object did not change, then this was not an add.
michael@0 2661 if (obj->lastProperty() == oldShape)
michael@0 2662 return false;
michael@0 2663
michael@0 2664 Shape *shape = obj->nativeLookupPure(id);
michael@0 2665 if (!shape || shape->inDictionary() || !shape->hasSlot() || !shape->hasDefaultSetter())
michael@0 2666 return false;
michael@0 2667
michael@0 2668 // If we have a shape at this point and the object's shape changed, then
michael@0 2669 // the shape must be the one we just added.
michael@0 2670 JS_ASSERT(shape == obj->lastProperty());
michael@0 2671
michael@0 2672 // If object has a non-default resolve hook, don't inline
michael@0 2673 if (obj->getClass()->resolve != JS_ResolveStub)
michael@0 2674 return false;
michael@0 2675
michael@0 2676 // Likewise for a non-default addProperty hook, since we'll need
michael@0 2677 // to invoke it.
michael@0 2678 if (obj->getClass()->addProperty != JS_PropertyStub)
michael@0 2679 return false;
michael@0 2680
michael@0 2681 if (!obj->nonProxyIsExtensible() || !shape->writable())
michael@0 2682 return false;
michael@0 2683
michael@0 2684 // Walk up the object prototype chain and ensure that all prototypes
michael@0 2685 // are native, and that all prototypes have no getter or setter
michael@0 2686 // defined on the property
michael@0 2687 for (JSObject *proto = obj->getProto(); proto; proto = proto->getProto()) {
michael@0 2688 // If prototype is non-native, don't optimize
michael@0 2689 if (!proto->isNative())
michael@0 2690 return false;
michael@0 2691
michael@0 2692 // If prototype defines this property in a non-plain way, don't optimize
michael@0 2693 Shape *protoShape = proto->nativeLookupPure(id);
michael@0 2694 if (protoShape && !protoShape->hasDefaultSetter())
michael@0 2695 return false;
michael@0 2696
michael@0 2697 // Otherwise, if there's no such property, watch out for a resolve
michael@0 2698 // hook that would need to be invoked and thus prevent inlining of
michael@0 2699 // property addition.
michael@0 2700 if (proto->getClass()->resolve != JS_ResolveStub)
michael@0 2701 return false;
michael@0 2702 }
michael@0 2703
michael@0 2704 // Only add a IC entry if the dynamic slots didn't change when the shapes
michael@0 2705 // changed. Need to ensure that a shape change for a subsequent object
michael@0 2706 // won't involve reallocating the slot array.
michael@0 2707 if (obj->numDynamicSlots() != oldSlots)
michael@0 2708 return false;
michael@0 2709
michael@0 2710 if (needsTypeBarrier)
michael@0 2711 return CanInlineSetPropTypeCheck(obj, id, val, checkTypeset);
michael@0 2712
michael@0 2713 *checkTypeset = false;
michael@0 2714 return true;
michael@0 2715 }
michael@0 2716
michael@0 2717 static SetPropertyIC::NativeSetPropCacheability
michael@0 2718 CanAttachNativeSetProp(HandleObject obj, HandleId id, ConstantOrRegister val,
michael@0 2719 bool needsTypeBarrier, MutableHandleObject holder,
michael@0 2720 MutableHandleShape shape, bool *checkTypeset)
michael@0 2721 {
michael@0 2722 if (!obj->isNative())
michael@0 2723 return SetPropertyIC::CanAttachNone;
michael@0 2724
michael@0 2725 // See if the property exists on the object.
michael@0 2726 if (IsPropertySetInlineable(obj, id, shape, val, needsTypeBarrier, checkTypeset))
michael@0 2727 return SetPropertyIC::CanAttachSetSlot;
michael@0 2728
michael@0 2729 // If we couldn't find the property on the object itself, do a full, but
michael@0 2730 // still pure lookup for setters.
michael@0 2731 if (!LookupPropertyPure(obj, id, holder.address(), shape.address()))
michael@0 2732 return SetPropertyIC::CanAttachNone;
michael@0 2733
michael@0 2734 // If the object doesn't have the property, we don't know if we can attach
michael@0 2735 // a stub to add the property until we do the VM call to add. If the
michael@0 2736 // property exists as a data property on the prototype, we should add
michael@0 2737 // a new, shadowing property.
michael@0 2738 if (!shape || (obj != holder && shape->hasDefaultSetter() && shape->hasSlot()))
michael@0 2739 return SetPropertyIC::MaybeCanAttachAddSlot;
michael@0 2740
michael@0 2741 if (IsCacheableSetPropCallPropertyOp(obj, holder, shape) ||
michael@0 2742 IsCacheableSetPropCallNative(obj, holder, shape))
michael@0 2743 {
michael@0 2744 return SetPropertyIC::CanAttachCallSetter;
michael@0 2745 }
michael@0 2746
michael@0 2747 return SetPropertyIC::CanAttachNone;
michael@0 2748 }
michael@0 2749
michael@0 2750 bool
michael@0 2751 SetPropertyIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj,
michael@0 2752 HandleValue value)
michael@0 2753 {
michael@0 2754 void *returnAddr;
michael@0 2755 RootedScript script(cx, GetTopIonJSScript(cx, &returnAddr));
michael@0 2756 IonScript *ion = script->ionScript();
michael@0 2757 SetPropertyIC &cache = ion->getCache(cacheIndex).toSetProperty();
michael@0 2758 RootedPropertyName name(cx, cache.name());
michael@0 2759 RootedId id(cx, AtomToId(name));
michael@0 2760
michael@0 2761 // Stop generating new stubs once we hit the stub count limit, see
michael@0 2762 // GetPropertyCache.
michael@0 2763 bool inlinable = cache.canAttachStub() && !obj->watched();
michael@0 2764 NativeSetPropCacheability canCache = CanAttachNone;
michael@0 2765 bool addedSetterStub = false;
michael@0 2766 if (inlinable) {
michael@0 2767 if (!addedSetterStub && obj->is<ProxyObject>()) {
michael@0 2768 if (IsCacheableDOMProxy(obj)) {
michael@0 2769 DOMProxyShadowsResult shadows = GetDOMProxyShadowsCheck()(cx, obj, id);
michael@0 2770 if (shadows == ShadowCheckFailed)
michael@0 2771 return false;
michael@0 2772 if (shadows == Shadows) {
michael@0 2773 if (!cache.attachDOMProxyShadowed(cx, ion, obj, returnAddr))
michael@0 2774 return false;
michael@0 2775 addedSetterStub = true;
michael@0 2776 } else {
michael@0 2777 JS_ASSERT(shadows == DoesntShadow || shadows == DoesntShadowUnique);
michael@0 2778 if (shadows == DoesntShadowUnique)
michael@0 2779 cache.reset();
michael@0 2780 if (!cache.attachDOMProxyUnshadowed(cx, ion, obj, returnAddr))
michael@0 2781 return false;
michael@0 2782 addedSetterStub = true;
michael@0 2783 }
michael@0 2784 }
michael@0 2785
michael@0 2786 if (!addedSetterStub && !cache.hasGenericProxyStub()) {
michael@0 2787 if (!cache.attachGenericProxy(cx, ion, returnAddr))
michael@0 2788 return false;
michael@0 2789 addedSetterStub = true;
michael@0 2790 }
michael@0 2791 }
michael@0 2792
michael@0 2793 // Make sure the object de-lazifies its type. We do this here so that
michael@0 2794 // the parallel IC can share code that assumes that native objects all
michael@0 2795 // have a type object.
michael@0 2796 if (obj->isNative() && !obj->getType(cx))
michael@0 2797 return false;
michael@0 2798
michael@0 2799 RootedShape shape(cx);
michael@0 2800 RootedObject holder(cx);
michael@0 2801 bool checkTypeset;
michael@0 2802 canCache = CanAttachNativeSetProp(obj, id, cache.value(), cache.needsTypeBarrier(),
michael@0 2803 &holder, &shape, &checkTypeset);
michael@0 2804
michael@0 2805 if (!addedSetterStub && canCache == CanAttachSetSlot) {
michael@0 2806 if (!cache.attachSetSlot(cx, ion, obj, shape, checkTypeset))
michael@0 2807 return false;
michael@0 2808 addedSetterStub = true;
michael@0 2809 }
michael@0 2810
michael@0 2811 if (!addedSetterStub && canCache == CanAttachCallSetter) {
michael@0 2812 if (!cache.attachCallSetter(cx, ion, obj, holder, shape, returnAddr))
michael@0 2813 return false;
michael@0 2814 addedSetterStub = true;
michael@0 2815 }
michael@0 2816 }
michael@0 2817
michael@0 2818 uint32_t oldSlots = obj->numDynamicSlots();
michael@0 2819 RootedShape oldShape(cx, obj->lastProperty());
michael@0 2820
michael@0 2821 // Set/Add the property on the object, the inlined cache are setup for the next execution.
michael@0 2822 if (!SetProperty(cx, obj, name, value, cache.strict(), cache.pc()))
michael@0 2823 return false;
michael@0 2824
michael@0 2825 // The property did not exist before, now we can try to inline the property add.
michael@0 2826 bool checkTypeset;
michael@0 2827 if (!addedSetterStub && canCache == MaybeCanAttachAddSlot &&
michael@0 2828 IsPropertyAddInlineable(obj, id, cache.value(), oldSlots, oldShape, cache.needsTypeBarrier(),
michael@0 2829 &checkTypeset))
michael@0 2830 {
michael@0 2831 if (!cache.attachAddSlot(cx, ion, obj, oldShape, checkTypeset))
michael@0 2832 return false;
michael@0 2833 }
michael@0 2834
michael@0 2835 return true;
michael@0 2836 }
michael@0 2837
michael@0 2838 void
michael@0 2839 SetPropertyIC::reset()
michael@0 2840 {
michael@0 2841 RepatchIonCache::reset();
michael@0 2842 hasGenericProxyStub_ = false;
michael@0 2843 }
michael@0 2844
michael@0 2845 bool
michael@0 2846 SetPropertyParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj,
michael@0 2847 HandleValue value)
michael@0 2848 {
michael@0 2849 JS_ASSERT(cx->isThreadLocal(obj));
michael@0 2850
michael@0 2851 IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript();
michael@0 2852 SetPropertyParIC &cache = ion->getCache(cacheIndex).toSetPropertyPar();
michael@0 2853
michael@0 2854 RootedValue v(cx, value);
michael@0 2855 RootedId id(cx, AtomToId(cache.name()));
michael@0 2856
michael@0 2857 // Avoid unnecessary locking if cannot attach stubs.
michael@0 2858 if (!cache.canAttachStub()) {
michael@0 2859 return baseops::SetPropertyHelper<ParallelExecution>(
michael@0 2860 cx, obj, obj, id, baseops::Qualified, &v, cache.strict());
michael@0 2861 }
michael@0 2862
michael@0 2863 SetPropertyIC::NativeSetPropCacheability canCache = SetPropertyIC::CanAttachNone;
michael@0 2864 bool attachedStub = false;
michael@0 2865
michael@0 2866 {
michael@0 2867 // See note about locking context in GetPropertyParIC::update.
michael@0 2868 LockedJSContext ncx(cx);
michael@0 2869
michael@0 2870 if (cache.canAttachStub()) {
michael@0 2871 bool alreadyStubbed;
michael@0 2872 if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed))
michael@0 2873 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 2874 if (alreadyStubbed) {
michael@0 2875 return baseops::SetPropertyHelper<ParallelExecution>(
michael@0 2876 cx, obj, obj, id, baseops::Qualified, &v, cache.strict());
michael@0 2877 }
michael@0 2878
michael@0 2879 // If the object has a lazy type, we need to de-lazify it, but
michael@0 2880 // this is not safe in parallel.
michael@0 2881 if (obj->hasLazyType())
michael@0 2882 return false;
michael@0 2883
michael@0 2884 {
michael@0 2885 RootedShape shape(cx);
michael@0 2886 RootedObject holder(cx);
michael@0 2887 bool checkTypeset;
michael@0 2888 canCache = CanAttachNativeSetProp(obj, id, cache.value(), cache.needsTypeBarrier(),
michael@0 2889 &holder, &shape, &checkTypeset);
michael@0 2890
michael@0 2891 if (canCache == SetPropertyIC::CanAttachSetSlot) {
michael@0 2892 if (!cache.attachSetSlot(ncx, ion, obj, shape, checkTypeset))
michael@0 2893 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 2894 attachedStub = true;
michael@0 2895 }
michael@0 2896 }
michael@0 2897 }
michael@0 2898 }
michael@0 2899
michael@0 2900 uint32_t oldSlots = obj->numDynamicSlots();
michael@0 2901 RootedShape oldShape(cx, obj->lastProperty());
michael@0 2902
michael@0 2903 if (!baseops::SetPropertyHelper<ParallelExecution>(cx, obj, obj, id, baseops::Qualified, &v,
michael@0 2904 cache.strict()))
michael@0 2905 {
michael@0 2906 return false;
michael@0 2907 }
michael@0 2908
michael@0 2909 bool checkTypeset;
michael@0 2910 if (!attachedStub && canCache == SetPropertyIC::MaybeCanAttachAddSlot &&
michael@0 2911 IsPropertyAddInlineable(obj, id, cache.value(), oldSlots, oldShape, cache.needsTypeBarrier(),
michael@0 2912 &checkTypeset))
michael@0 2913 {
michael@0 2914 LockedJSContext ncx(cx);
michael@0 2915 if (cache.canAttachStub() && !cache.attachAddSlot(ncx, ion, obj, oldShape, checkTypeset))
michael@0 2916 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 2917 }
michael@0 2918
michael@0 2919 return true;
michael@0 2920 }
michael@0 2921
michael@0 2922 bool
michael@0 2923 SetPropertyParIC::attachSetSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj, Shape *shape,
michael@0 2924 bool checkTypeset)
michael@0 2925 {
michael@0 2926 MacroAssembler masm(cx, ion);
michael@0 2927 DispatchStubPrepender attacher(*this);
michael@0 2928 GenerateSetSlot(cx, masm, attacher, obj, shape, object(), value(), needsTypeBarrier(),
michael@0 2929 checkTypeset);
michael@0 2930 return linkAndAttachStub(cx, masm, attacher, ion, "parallel setting");
michael@0 2931 }
michael@0 2932
michael@0 2933 bool
michael@0 2934 SetPropertyParIC::attachAddSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj, Shape *oldShape,
michael@0 2935 bool checkTypeset)
michael@0 2936 {
michael@0 2937 JS_ASSERT_IF(!needsTypeBarrier(), !checkTypeset);
michael@0 2938
michael@0 2939 MacroAssembler masm(cx, ion);
michael@0 2940 DispatchStubPrepender attacher(*this);
michael@0 2941 GenerateAddSlot(cx, masm, attacher, obj, oldShape, object(), value(), checkTypeset);
michael@0 2942 return linkAndAttachStub(cx, masm, attacher, ion, "parallel adding");
michael@0 2943 }
michael@0 2944
michael@0 2945 const size_t GetElementIC::MAX_FAILED_UPDATES = 16;
michael@0 2946
michael@0 2947 /* static */ bool
michael@0 2948 GetElementIC::canAttachGetProp(JSObject *obj, const Value &idval, jsid id)
michael@0 2949 {
michael@0 2950 uint32_t dummy;
michael@0 2951 return (obj->isNative() &&
michael@0 2952 idval.isString() &&
michael@0 2953 JSID_IS_ATOM(id) &&
michael@0 2954 !JSID_TO_ATOM(id)->isIndex(&dummy));
michael@0 2955 }
michael@0 2956
michael@0 2957 static bool
michael@0 2958 EqualStringsHelper(JSString *str1, JSString *str2)
michael@0 2959 {
michael@0 2960 JS_ASSERT(str1->isAtom());
michael@0 2961 JS_ASSERT(!str2->isAtom());
michael@0 2962 JS_ASSERT(str1->length() == str2->length());
michael@0 2963
michael@0 2964 const jschar *chars = str2->getChars(nullptr);
michael@0 2965 if (!chars)
michael@0 2966 return false;
michael@0 2967 return mozilla::PodEqual(str1->asAtom().chars(), chars, str1->length());
michael@0 2968 }
michael@0 2969
michael@0 2970 bool
michael@0 2971 GetElementIC::attachGetProp(JSContext *cx, IonScript *ion, HandleObject obj,
michael@0 2972 const Value &idval, HandlePropertyName name,
michael@0 2973 void *returnAddr)
michael@0 2974 {
michael@0 2975 JS_ASSERT(index().reg().hasValue());
michael@0 2976
michael@0 2977 RootedObject holder(cx);
michael@0 2978 RootedShape shape(cx);
michael@0 2979
michael@0 2980 GetPropertyIC::NativeGetPropCacheability canCache =
michael@0 2981 CanAttachNativeGetProp(cx, *this, obj, name, &holder, &shape,
michael@0 2982 /* skipArrayLen =*/true);
michael@0 2983
michael@0 2984 bool cacheable = canCache == GetPropertyIC::CanAttachReadSlot ||
michael@0 2985 (canCache == GetPropertyIC::CanAttachCallGetter &&
michael@0 2986 output().hasValue());
michael@0 2987
michael@0 2988 if (!cacheable) {
michael@0 2989 IonSpew(IonSpew_InlineCaches, "GETELEM uncacheable property");
michael@0 2990 return true;
michael@0 2991 }
michael@0 2992
michael@0 2993 JS_ASSERT(idval.isString());
michael@0 2994 JS_ASSERT(idval.toString()->length() == name->length());
michael@0 2995
michael@0 2996 Label failures;
michael@0 2997 MacroAssembler masm(cx, ion);
michael@0 2998
michael@0 2999 // Ensure the index is a string.
michael@0 3000 ValueOperand val = index().reg().valueReg();
michael@0 3001 masm.branchTestString(Assembler::NotEqual, val, &failures);
michael@0 3002
michael@0 3003 Register scratch = output().valueReg().scratchReg();
michael@0 3004 masm.unboxString(val, scratch);
michael@0 3005
michael@0 3006 Label equal;
michael@0 3007 masm.branchPtr(Assembler::Equal, scratch, ImmGCPtr(name), &equal);
michael@0 3008
michael@0 3009 // The pointers are not equal, so if the input string is also an atom it
michael@0 3010 // must be a different string.
michael@0 3011 masm.loadPtr(Address(scratch, JSString::offsetOfLengthAndFlags()), scratch);
michael@0 3012 masm.branchTest32(Assembler::NonZero, scratch, Imm32(JSString::ATOM_BIT), &failures);
michael@0 3013
michael@0 3014 // Check the length.
michael@0 3015 masm.rshiftPtr(Imm32(JSString::LENGTH_SHIFT), scratch);
michael@0 3016 masm.branch32(Assembler::NotEqual, scratch, Imm32(name->length()), &failures);
michael@0 3017
michael@0 3018 // We have a non-atomized string with the same length. For now call a helper
michael@0 3019 // function to do the comparison.
michael@0 3020 RegisterSet volatileRegs = RegisterSet::Volatile();
michael@0 3021 masm.PushRegsInMask(volatileRegs);
michael@0 3022
michael@0 3023 Register objReg = object();
michael@0 3024 JS_ASSERT(objReg != scratch);
michael@0 3025
michael@0 3026 if (!volatileRegs.has(objReg))
michael@0 3027 masm.push(objReg);
michael@0 3028
michael@0 3029 masm.setupUnalignedABICall(2, scratch);
michael@0 3030 masm.movePtr(ImmGCPtr(name), objReg);
michael@0 3031 masm.passABIArg(objReg);
michael@0 3032 masm.unboxString(val, scratch);
michael@0 3033 masm.passABIArg(scratch);
michael@0 3034 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, EqualStringsHelper));
michael@0 3035 masm.mov(ReturnReg, scratch);
michael@0 3036
michael@0 3037 if (!volatileRegs.has(objReg))
michael@0 3038 masm.pop(objReg);
michael@0 3039
michael@0 3040 RegisterSet ignore = RegisterSet();
michael@0 3041 ignore.add(scratch);
michael@0 3042 masm.PopRegsInMaskIgnore(volatileRegs, ignore);
michael@0 3043
michael@0 3044 masm.branchIfFalseBool(scratch, &failures);
michael@0 3045 masm.bind(&equal);
michael@0 3046
michael@0 3047 RepatchStubAppender attacher(*this);
michael@0 3048 if (canCache == GetPropertyIC::CanAttachReadSlot) {
michael@0 3049 GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output(),
michael@0 3050 &failures);
michael@0 3051 } else {
michael@0 3052 JS_ASSERT(canCache == GetPropertyIC::CanAttachCallGetter);
michael@0 3053 // Set the frame for bailout safety of the OOL call.
michael@0 3054 if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name, holder, shape, liveRegs_,
michael@0 3055 object(), output(), returnAddr, &failures))
michael@0 3056 {
michael@0 3057 return false;
michael@0 3058 }
michael@0 3059 }
michael@0 3060
michael@0 3061 return linkAndAttachStub(cx, masm, attacher, ion, "property");
michael@0 3062 }
michael@0 3063
michael@0 3064 /* static */ bool
michael@0 3065 GetElementIC::canAttachDenseElement(JSObject *obj, const Value &idval)
michael@0 3066 {
michael@0 3067 return obj->isNative() && idval.isInt32();
michael@0 3068 }
michael@0 3069
michael@0 3070 static bool
michael@0 3071 GenerateDenseElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 3072 JSObject *obj, const Value &idval, Register object,
michael@0 3073 ConstantOrRegister index, TypedOrValueRegister output)
michael@0 3074 {
michael@0 3075 JS_ASSERT(GetElementIC::canAttachDenseElement(obj, idval));
michael@0 3076
michael@0 3077 Label failures;
michael@0 3078
michael@0 3079 // Guard object's shape.
michael@0 3080 RootedShape shape(cx, obj->lastProperty());
michael@0 3081 if (!shape)
michael@0 3082 return false;
michael@0 3083 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
michael@0 3084
michael@0 3085 // Ensure the index is an int32 value.
michael@0 3086 Register indexReg = InvalidReg;
michael@0 3087
michael@0 3088 if (index.reg().hasValue()) {
michael@0 3089 indexReg = output.scratchReg().gpr();
michael@0 3090 JS_ASSERT(indexReg != InvalidReg);
michael@0 3091 ValueOperand val = index.reg().valueReg();
michael@0 3092
michael@0 3093 masm.branchTestInt32(Assembler::NotEqual, val, &failures);
michael@0 3094
michael@0 3095 // Unbox the index.
michael@0 3096 masm.unboxInt32(val, indexReg);
michael@0 3097 } else {
michael@0 3098 JS_ASSERT(!index.reg().typedReg().isFloat());
michael@0 3099 indexReg = index.reg().typedReg().gpr();
michael@0 3100 }
michael@0 3101
michael@0 3102 // Load elements vector.
michael@0 3103 masm.push(object);
michael@0 3104 masm.loadPtr(Address(object, JSObject::offsetOfElements()), object);
michael@0 3105
michael@0 3106 Label hole;
michael@0 3107
michael@0 3108 // Guard on the initialized length.
michael@0 3109 Address initLength(object, ObjectElements::offsetOfInitializedLength());
michael@0 3110 masm.branch32(Assembler::BelowOrEqual, initLength, indexReg, &hole);
michael@0 3111
michael@0 3112 // Check for holes & load the value.
michael@0 3113 masm.loadElementTypedOrValue(BaseIndex(object, indexReg, TimesEight),
michael@0 3114 output, true, &hole);
michael@0 3115
michael@0 3116 masm.pop(object);
michael@0 3117 attacher.jumpRejoin(masm);
michael@0 3118
michael@0 3119 // All failures flow to here.
michael@0 3120 masm.bind(&hole);
michael@0 3121 masm.pop(object);
michael@0 3122 masm.bind(&failures);
michael@0 3123
michael@0 3124 attacher.jumpNextStub(masm);
michael@0 3125
michael@0 3126 return true;
michael@0 3127 }
michael@0 3128
michael@0 3129 bool
michael@0 3130 GetElementIC::attachDenseElement(JSContext *cx, IonScript *ion, JSObject *obj, const Value &idval)
michael@0 3131 {
michael@0 3132 MacroAssembler masm(cx, ion);
michael@0 3133 RepatchStubAppender attacher(*this);
michael@0 3134 if (!GenerateDenseElement(cx, masm, attacher, obj, idval, object(), index(), output()))
michael@0 3135 return false;
michael@0 3136
michael@0 3137 setHasDenseStub();
michael@0 3138 return linkAndAttachStub(cx, masm, attacher, ion, "dense array");
michael@0 3139 }
michael@0 3140
michael@0 3141 /* static */ bool
michael@0 3142 GetElementIC::canAttachTypedArrayElement(JSObject *obj, const Value &idval,
michael@0 3143 TypedOrValueRegister output)
michael@0 3144 {
michael@0 3145 if (!obj->is<TypedArrayObject>())
michael@0 3146 return false;
michael@0 3147
michael@0 3148 if (!idval.isInt32() && !idval.isString())
michael@0 3149 return false;
michael@0 3150
michael@0 3151
michael@0 3152 // Don't emit a stub if the access is out of bounds. We make to make
michael@0 3153 // certain that we monitor the type coming out of the typed array when
michael@0 3154 // we generate the stub. Out of bounds accesses will hit the fallback
michael@0 3155 // path.
michael@0 3156 uint32_t index;
michael@0 3157 if (idval.isInt32()) {
michael@0 3158 index = idval.toInt32();
michael@0 3159 } else {
michael@0 3160 index = GetIndexFromString(idval.toString());
michael@0 3161 if (index == UINT32_MAX)
michael@0 3162 return false;
michael@0 3163 }
michael@0 3164 if (index >= obj->as<TypedArrayObject>().length())
michael@0 3165 return false;
michael@0 3166
michael@0 3167 // The output register is not yet specialized as a float register, the only
michael@0 3168 // way to accept float typed arrays for now is to return a Value type.
michael@0 3169 uint32_t arrayType = obj->as<TypedArrayObject>().type();
michael@0 3170 if (arrayType == ScalarTypeDescr::TYPE_FLOAT32 ||
michael@0 3171 arrayType == ScalarTypeDescr::TYPE_FLOAT64)
michael@0 3172 {
michael@0 3173 return output.hasValue();
michael@0 3174 }
michael@0 3175
michael@0 3176 return output.hasValue() || !output.typedReg().isFloat();
michael@0 3177 }
michael@0 3178
michael@0 3179 static void
michael@0 3180 GenerateGetTypedArrayElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 3181 TypedArrayObject *tarr, const Value &idval, Register object,
michael@0 3182 ConstantOrRegister index, TypedOrValueRegister output,
michael@0 3183 bool allowDoubleResult)
michael@0 3184 {
michael@0 3185 JS_ASSERT(GetElementIC::canAttachTypedArrayElement(tarr, idval, output));
michael@0 3186
michael@0 3187 Label failures;
michael@0 3188
michael@0 3189 // The array type is the object within the table of typed array classes.
michael@0 3190 int arrayType = tarr->type();
michael@0 3191
michael@0 3192 // Guard on the shape.
michael@0 3193 Shape *shape = tarr->lastProperty();
michael@0 3194 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
michael@0 3195
michael@0 3196 // Decide to what type index the stub should be optimized
michael@0 3197 Register tmpReg = output.scratchReg().gpr();
michael@0 3198 JS_ASSERT(tmpReg != InvalidReg);
michael@0 3199 Register indexReg = tmpReg;
michael@0 3200 JS_ASSERT(!index.constant());
michael@0 3201 if (idval.isString()) {
michael@0 3202 JS_ASSERT(GetIndexFromString(idval.toString()) != UINT32_MAX);
michael@0 3203
michael@0 3204 // Part 1: Get the string into a register
michael@0 3205 Register str;
michael@0 3206 if (index.reg().hasValue()) {
michael@0 3207 ValueOperand val = index.reg().valueReg();
michael@0 3208 masm.branchTestString(Assembler::NotEqual, val, &failures);
michael@0 3209
michael@0 3210 str = masm.extractString(val, indexReg);
michael@0 3211 } else {
michael@0 3212 JS_ASSERT(!index.reg().typedReg().isFloat());
michael@0 3213 str = index.reg().typedReg().gpr();
michael@0 3214 }
michael@0 3215
michael@0 3216 // Part 2: Call to translate the str into index
michael@0 3217 RegisterSet regs = RegisterSet::Volatile();
michael@0 3218 masm.PushRegsInMask(regs);
michael@0 3219 regs.takeUnchecked(str);
michael@0 3220
michael@0 3221 Register temp = regs.takeGeneral();
michael@0 3222
michael@0 3223 masm.setupUnalignedABICall(1, temp);
michael@0 3224 masm.passABIArg(str);
michael@0 3225 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, GetIndexFromString));
michael@0 3226 masm.mov(ReturnReg, indexReg);
michael@0 3227
michael@0 3228 RegisterSet ignore = RegisterSet();
michael@0 3229 ignore.add(indexReg);
michael@0 3230 masm.PopRegsInMaskIgnore(RegisterSet::Volatile(), ignore);
michael@0 3231
michael@0 3232 masm.branch32(Assembler::Equal, indexReg, Imm32(UINT32_MAX), &failures);
michael@0 3233
michael@0 3234 } else {
michael@0 3235 JS_ASSERT(idval.isInt32());
michael@0 3236
michael@0 3237 if (index.reg().hasValue()) {
michael@0 3238 ValueOperand val = index.reg().valueReg();
michael@0 3239 masm.branchTestInt32(Assembler::NotEqual, val, &failures);
michael@0 3240
michael@0 3241 // Unbox the index.
michael@0 3242 masm.unboxInt32(val, indexReg);
michael@0 3243 } else {
michael@0 3244 JS_ASSERT(!index.reg().typedReg().isFloat());
michael@0 3245 indexReg = index.reg().typedReg().gpr();
michael@0 3246 }
michael@0 3247 }
michael@0 3248
michael@0 3249 // Guard on the initialized length.
michael@0 3250 Address length(object, TypedArrayObject::lengthOffset());
michael@0 3251 masm.branch32(Assembler::BelowOrEqual, length, indexReg, &failures);
michael@0 3252
michael@0 3253 // Save the object register on the stack in case of failure.
michael@0 3254 Label popAndFail;
michael@0 3255 Register elementReg = object;
michael@0 3256 masm.push(object);
michael@0 3257
michael@0 3258 // Load elements vector.
michael@0 3259 masm.loadPtr(Address(object, TypedArrayObject::dataOffset()), elementReg);
michael@0 3260
michael@0 3261 // Load the value. We use an invalid register because the destination
michael@0 3262 // register is necessary a non double register.
michael@0 3263 int width = TypedArrayObject::slotWidth(arrayType);
michael@0 3264 BaseIndex source(elementReg, indexReg, ScaleFromElemWidth(width));
michael@0 3265 if (output.hasValue()) {
michael@0 3266 masm.loadFromTypedArray(arrayType, source, output.valueReg(), allowDoubleResult,
michael@0 3267 elementReg, &popAndFail);
michael@0 3268 } else {
michael@0 3269 masm.loadFromTypedArray(arrayType, source, output.typedReg(), elementReg, &popAndFail);
michael@0 3270 }
michael@0 3271
michael@0 3272 masm.pop(object);
michael@0 3273 attacher.jumpRejoin(masm);
michael@0 3274
michael@0 3275 // Restore the object before continuing to the next stub.
michael@0 3276 masm.bind(&popAndFail);
michael@0 3277 masm.pop(object);
michael@0 3278 masm.bind(&failures);
michael@0 3279
michael@0 3280 attacher.jumpNextStub(masm);
michael@0 3281 }
michael@0 3282
michael@0 3283 bool
michael@0 3284 GetElementIC::attachTypedArrayElement(JSContext *cx, IonScript *ion, TypedArrayObject *tarr,
michael@0 3285 const Value &idval)
michael@0 3286 {
michael@0 3287 MacroAssembler masm(cx, ion);
michael@0 3288 RepatchStubAppender attacher(*this);
michael@0 3289 GenerateGetTypedArrayElement(cx, masm, attacher, tarr, idval, object(), index(), output(),
michael@0 3290 allowDoubleResult());
michael@0 3291 return linkAndAttachStub(cx, masm, attacher, ion, "typed array");
michael@0 3292 }
michael@0 3293
michael@0 3294 bool
michael@0 3295 GetElementIC::attachArgumentsElement(JSContext *cx, IonScript *ion, JSObject *obj)
michael@0 3296 {
michael@0 3297 JS_ASSERT(obj->is<ArgumentsObject>());
michael@0 3298
michael@0 3299 Label failures;
michael@0 3300 MacroAssembler masm(cx, ion);
michael@0 3301 RepatchStubAppender attacher(*this);
michael@0 3302
michael@0 3303 Register tmpReg = output().scratchReg().gpr();
michael@0 3304 JS_ASSERT(tmpReg != InvalidReg);
michael@0 3305
michael@0 3306 const Class *clasp = obj->is<StrictArgumentsObject>() ? &StrictArgumentsObject::class_
michael@0 3307 : &NormalArgumentsObject::class_;
michael@0 3308
michael@0 3309 masm.branchTestObjClass(Assembler::NotEqual, object(), tmpReg, clasp, &failures);
michael@0 3310
michael@0 3311 // Get initial ArgsObj length value, test if length has been overridden.
michael@0 3312 masm.unboxInt32(Address(object(), ArgumentsObject::getInitialLengthSlotOffset()), tmpReg);
michael@0 3313 masm.branchTest32(Assembler::NonZero, tmpReg, Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
michael@0 3314 &failures);
michael@0 3315 masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), tmpReg);
michael@0 3316
michael@0 3317 // Decide to what type index the stub should be optimized
michael@0 3318 Register indexReg;
michael@0 3319 JS_ASSERT(!index().constant());
michael@0 3320
michael@0 3321 // Check index against length.
michael@0 3322 Label failureRestoreIndex;
michael@0 3323 if (index().reg().hasValue()) {
michael@0 3324 ValueOperand val = index().reg().valueReg();
michael@0 3325 masm.branchTestInt32(Assembler::NotEqual, val, &failures);
michael@0 3326 indexReg = val.scratchReg();
michael@0 3327
michael@0 3328 masm.unboxInt32(val, indexReg);
michael@0 3329 masm.branch32(Assembler::AboveOrEqual, indexReg, tmpReg, &failureRestoreIndex);
michael@0 3330 } else {
michael@0 3331 JS_ASSERT(index().reg().type() == MIRType_Int32);
michael@0 3332 indexReg = index().reg().typedReg().gpr();
michael@0 3333 masm.branch32(Assembler::AboveOrEqual, indexReg, tmpReg, &failures);
michael@0 3334 }
michael@0 3335 // Save indexReg because it needs to be clobbered to check deleted bit.
michael@0 3336 Label failurePopIndex;
michael@0 3337 masm.push(indexReg);
michael@0 3338
michael@0 3339 // Check if property was deleted on arguments object.
michael@0 3340 masm.loadPrivate(Address(object(), ArgumentsObject::getDataSlotOffset()), tmpReg);
michael@0 3341 masm.loadPtr(Address(tmpReg, offsetof(ArgumentsData, deletedBits)), tmpReg);
michael@0 3342
michael@0 3343 // In tempReg, calculate index of word containing bit: (idx >> logBitsPerWord)
michael@0 3344 const uint32_t shift = FloorLog2<(sizeof(size_t) * JS_BITS_PER_BYTE)>::value;
michael@0 3345 JS_ASSERT(shift == 5 || shift == 6);
michael@0 3346 masm.rshiftPtr(Imm32(shift), indexReg);
michael@0 3347 masm.loadPtr(BaseIndex(tmpReg, indexReg, ScaleFromElemWidth(sizeof(size_t))), tmpReg);
michael@0 3348
michael@0 3349 // Don't bother testing specific bit, if any bit is set in the word, fail.
michael@0 3350 masm.branchPtr(Assembler::NotEqual, tmpReg, ImmPtr(nullptr), &failurePopIndex);
michael@0 3351
michael@0 3352 // Get the address to load from into tmpReg
michael@0 3353 masm.loadPrivate(Address(object(), ArgumentsObject::getDataSlotOffset()), tmpReg);
michael@0 3354 masm.addPtr(Imm32(ArgumentsData::offsetOfArgs()), tmpReg);
michael@0 3355
michael@0 3356 // Restore original index register value, to use for indexing element.
michael@0 3357 masm.pop(indexReg);
michael@0 3358 BaseIndex elemIdx(tmpReg, indexReg, ScaleFromElemWidth(sizeof(Value)));
michael@0 3359
michael@0 3360 // Ensure result is not magic value, and type-check result.
michael@0 3361 masm.branchTestMagic(Assembler::Equal, elemIdx, &failureRestoreIndex);
michael@0 3362
michael@0 3363 if (output().hasTyped()) {
michael@0 3364 JS_ASSERT(!output().typedReg().isFloat());
michael@0 3365 JS_ASSERT(index().reg().type() == MIRType_Boolean ||
michael@0 3366 index().reg().type() == MIRType_Int32 ||
michael@0 3367 index().reg().type() == MIRType_String ||
michael@0 3368 index().reg().type() == MIRType_Object);
michael@0 3369 masm.branchTestMIRType(Assembler::NotEqual, elemIdx, index().reg().type(),
michael@0 3370 &failureRestoreIndex);
michael@0 3371 }
michael@0 3372
michael@0 3373 masm.loadTypedOrValue(elemIdx, output());
michael@0 3374
michael@0 3375 // indexReg may need to be reconstructed if it was originally a value.
michael@0 3376 if (index().reg().hasValue())
michael@0 3377 masm.tagValue(JSVAL_TYPE_INT32, indexReg, index().reg().valueReg());
michael@0 3378
michael@0 3379 // Success.
michael@0 3380 attacher.jumpRejoin(masm);
michael@0 3381
michael@0 3382 // Restore the object before continuing to the next stub.
michael@0 3383 masm.bind(&failurePopIndex);
michael@0 3384 masm.pop(indexReg);
michael@0 3385 masm.bind(&failureRestoreIndex);
michael@0 3386 if (index().reg().hasValue())
michael@0 3387 masm.tagValue(JSVAL_TYPE_INT32, indexReg, index().reg().valueReg());
michael@0 3388 masm.bind(&failures);
michael@0 3389 attacher.jumpNextStub(masm);
michael@0 3390
michael@0 3391
michael@0 3392 if (obj->is<StrictArgumentsObject>()) {
michael@0 3393 JS_ASSERT(!hasStrictArgumentsStub_);
michael@0 3394 hasStrictArgumentsStub_ = true;
michael@0 3395 return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj element (strict)");
michael@0 3396 }
michael@0 3397
michael@0 3398 JS_ASSERT(!hasNormalArgumentsStub_);
michael@0 3399 hasNormalArgumentsStub_ = true;
michael@0 3400 return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj element (normal)");
michael@0 3401 }
michael@0 3402
michael@0 3403 bool
michael@0 3404 GetElementIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj,
michael@0 3405 HandleValue idval, MutableHandleValue res)
michael@0 3406 {
michael@0 3407 void *returnAddr;
michael@0 3408 IonScript *ion = GetTopIonJSScript(cx, &returnAddr)->ionScript();
michael@0 3409 GetElementIC &cache = ion->getCache(cacheIndex).toGetElement();
michael@0 3410 RootedScript script(cx);
michael@0 3411 jsbytecode *pc;
michael@0 3412 cache.getScriptedLocation(&script, &pc);
michael@0 3413
michael@0 3414 // Override the return value when the script is invalidated (bug 728188).
michael@0 3415 AutoDetectInvalidation adi(cx, res.address(), ion);
michael@0 3416
michael@0 3417 if (cache.isDisabled()) {
michael@0 3418 if (!GetObjectElementOperation(cx, JSOp(*pc), obj, /* wasObject = */true, idval, res))
michael@0 3419 return false;
michael@0 3420 if (!cache.monitoredResult())
michael@0 3421 types::TypeScript::Monitor(cx, script, pc, res);
michael@0 3422 return true;
michael@0 3423 }
michael@0 3424
michael@0 3425 RootedId id(cx);
michael@0 3426 if (!ValueToId<CanGC>(cx, idval, &id))
michael@0 3427 return false;
michael@0 3428
michael@0 3429 bool attachedStub = false;
michael@0 3430 if (cache.canAttachStub()) {
michael@0 3431 if (IsOptimizableArgumentsObjectForGetElem(obj, idval) &&
michael@0 3432 !cache.hasArgumentsStub(obj->is<StrictArgumentsObject>()) &&
michael@0 3433 !cache.index().constant() &&
michael@0 3434 (cache.index().reg().hasValue() ||
michael@0 3435 cache.index().reg().type() == MIRType_Int32) &&
michael@0 3436 (cache.output().hasValue() || !cache.output().typedReg().isFloat()))
michael@0 3437 {
michael@0 3438 if (!cache.attachArgumentsElement(cx, ion, obj))
michael@0 3439 return false;
michael@0 3440 attachedStub = true;
michael@0 3441 }
michael@0 3442 if (!attachedStub && cache.monitoredResult() && canAttachGetProp(obj, idval, id)) {
michael@0 3443 RootedPropertyName name(cx, JSID_TO_ATOM(id)->asPropertyName());
michael@0 3444 if (!cache.attachGetProp(cx, ion, obj, idval, name, returnAddr))
michael@0 3445 return false;
michael@0 3446 attachedStub = true;
michael@0 3447 }
michael@0 3448 if (!attachedStub && !cache.hasDenseStub() && canAttachDenseElement(obj, idval)) {
michael@0 3449 if (!cache.attachDenseElement(cx, ion, obj, idval))
michael@0 3450 return false;
michael@0 3451 attachedStub = true;
michael@0 3452 }
michael@0 3453 if (!attachedStub && canAttachTypedArrayElement(obj, idval, cache.output())) {
michael@0 3454 Rooted<TypedArrayObject*> tarr(cx, &obj->as<TypedArrayObject>());
michael@0 3455 if (!cache.attachTypedArrayElement(cx, ion, tarr, idval))
michael@0 3456 return false;
michael@0 3457 attachedStub = true;
michael@0 3458 }
michael@0 3459 }
michael@0 3460
michael@0 3461 if (!GetObjectElementOperation(cx, JSOp(*pc), obj, /* wasObject = */true, idval, res))
michael@0 3462 return false;
michael@0 3463
michael@0 3464 // Disable cache when we reach max stubs or update failed too much.
michael@0 3465 if (!attachedStub) {
michael@0 3466 cache.incFailedUpdates();
michael@0 3467 if (cache.shouldDisable()) {
michael@0 3468 IonSpew(IonSpew_InlineCaches, "Disable inline cache");
michael@0 3469 cache.disable();
michael@0 3470 }
michael@0 3471 } else {
michael@0 3472 cache.resetFailedUpdates();
michael@0 3473 }
michael@0 3474
michael@0 3475 if (!cache.monitoredResult())
michael@0 3476 types::TypeScript::Monitor(cx, script, pc, res);
michael@0 3477 return true;
michael@0 3478 }
michael@0 3479
michael@0 3480 void
michael@0 3481 GetElementIC::reset()
michael@0 3482 {
michael@0 3483 RepatchIonCache::reset();
michael@0 3484 hasDenseStub_ = false;
michael@0 3485 hasStrictArgumentsStub_ = false;
michael@0 3486 hasNormalArgumentsStub_ = false;
michael@0 3487 }
michael@0 3488
michael@0 3489 static bool
michael@0 3490 IsDenseElementSetInlineable(JSObject *obj, const Value &idval)
michael@0 3491 {
michael@0 3492 if (!obj->is<ArrayObject>())
michael@0 3493 return false;
michael@0 3494
michael@0 3495 if (obj->watched())
michael@0 3496 return false;
michael@0 3497
michael@0 3498 if (!idval.isInt32())
michael@0 3499 return false;
michael@0 3500
michael@0 3501 // The object may have a setter definition,
michael@0 3502 // either directly, or via a prototype, or via the target object for a prototype
michael@0 3503 // which is a proxy, that handles a particular integer write.
michael@0 3504 // Scan the prototype and shape chain to make sure that this is not the case.
michael@0 3505 JSObject *curObj = obj;
michael@0 3506 while (curObj) {
michael@0 3507 // Ensure object is native.
michael@0 3508 if (!curObj->isNative())
michael@0 3509 return false;
michael@0 3510
michael@0 3511 // Ensure all indexed properties are stored in dense elements.
michael@0 3512 if (curObj->isIndexed())
michael@0 3513 return false;
michael@0 3514
michael@0 3515 curObj = curObj->getProto();
michael@0 3516 }
michael@0 3517
michael@0 3518 return true;
michael@0 3519 }
michael@0 3520
michael@0 3521 static bool
michael@0 3522 IsTypedArrayElementSetInlineable(JSObject *obj, const Value &idval, const Value &value)
michael@0 3523 {
michael@0 3524 // Don't bother attaching stubs for assigning strings and objects.
michael@0 3525 return (obj->is<TypedArrayObject>() && idval.isInt32() &&
michael@0 3526 !value.isString() && !value.isObject());
michael@0 3527 }
michael@0 3528
michael@0 3529 static void
michael@0 3530 StoreDenseElement(MacroAssembler &masm, ConstantOrRegister value, Register elements,
michael@0 3531 BaseIndex target)
michael@0 3532 {
michael@0 3533 // If the ObjectElements::CONVERT_DOUBLE_ELEMENTS flag is set, int32 values
michael@0 3534 // have to be converted to double first. If the value is not int32, it can
michael@0 3535 // always be stored directly.
michael@0 3536
michael@0 3537 Address elementsFlags(elements, ObjectElements::offsetOfFlags());
michael@0 3538 if (value.constant()) {
michael@0 3539 Value v = value.value();
michael@0 3540 Label done;
michael@0 3541 if (v.isInt32()) {
michael@0 3542 Label dontConvert;
michael@0 3543 masm.branchTest32(Assembler::Zero, elementsFlags,
michael@0 3544 Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
michael@0 3545 &dontConvert);
michael@0 3546 masm.storeValue(DoubleValue(v.toInt32()), target);
michael@0 3547 masm.jump(&done);
michael@0 3548 masm.bind(&dontConvert);
michael@0 3549 }
michael@0 3550 masm.storeValue(v, target);
michael@0 3551 masm.bind(&done);
michael@0 3552 return;
michael@0 3553 }
michael@0 3554
michael@0 3555 TypedOrValueRegister reg = value.reg();
michael@0 3556 if (reg.hasTyped() && reg.type() != MIRType_Int32) {
michael@0 3557 masm.storeTypedOrValue(reg, target);
michael@0 3558 return;
michael@0 3559 }
michael@0 3560
michael@0 3561 Label convert, storeValue, done;
michael@0 3562 masm.branchTest32(Assembler::NonZero, elementsFlags,
michael@0 3563 Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
michael@0 3564 &convert);
michael@0 3565 masm.bind(&storeValue);
michael@0 3566 masm.storeTypedOrValue(reg, target);
michael@0 3567 masm.jump(&done);
michael@0 3568
michael@0 3569 masm.bind(&convert);
michael@0 3570 if (reg.hasValue()) {
michael@0 3571 masm.branchTestInt32(Assembler::NotEqual, reg.valueReg(), &storeValue);
michael@0 3572 masm.int32ValueToDouble(reg.valueReg(), ScratchFloatReg);
michael@0 3573 masm.storeDouble(ScratchFloatReg, target);
michael@0 3574 } else {
michael@0 3575 JS_ASSERT(reg.type() == MIRType_Int32);
michael@0 3576 masm.convertInt32ToDouble(reg.typedReg().gpr(), ScratchFloatReg);
michael@0 3577 masm.storeDouble(ScratchFloatReg, target);
michael@0 3578 }
michael@0 3579
michael@0 3580 masm.bind(&done);
michael@0 3581 }
michael@0 3582
michael@0 3583 static bool
michael@0 3584 GenerateSetDenseElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 3585 JSObject *obj, const Value &idval, bool guardHoles, Register object,
michael@0 3586 ValueOperand indexVal, ConstantOrRegister value, Register tempToUnboxIndex,
michael@0 3587 Register temp)
michael@0 3588 {
michael@0 3589 JS_ASSERT(obj->isNative());
michael@0 3590 JS_ASSERT(idval.isInt32());
michael@0 3591
michael@0 3592 Label failures;
michael@0 3593 Label outOfBounds; // index represents a known hole, or an illegal append
michael@0 3594
michael@0 3595 Label markElem, storeElement; // used if TI protects us from worrying about holes.
michael@0 3596
michael@0 3597 // Guard object is a dense array.
michael@0 3598 Shape *shape = obj->lastProperty();
michael@0 3599 if (!shape)
michael@0 3600 return false;
michael@0 3601 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
michael@0 3602
michael@0 3603 // Ensure the index is an int32 value.
michael@0 3604 masm.branchTestInt32(Assembler::NotEqual, indexVal, &failures);
michael@0 3605
michael@0 3606 // Unbox the index.
michael@0 3607 Register index = masm.extractInt32(indexVal, tempToUnboxIndex);
michael@0 3608
michael@0 3609 {
michael@0 3610 // Load obj->elements.
michael@0 3611 Register elements = temp;
michael@0 3612 masm.loadPtr(Address(object, JSObject::offsetOfElements()), elements);
michael@0 3613
michael@0 3614 // Compute the location of the element.
michael@0 3615 BaseIndex target(elements, index, TimesEight);
michael@0 3616
michael@0 3617 // If TI cannot help us deal with HOLES by preventing indexed properties
michael@0 3618 // on the prototype chain, we have to be very careful to check for ourselves
michael@0 3619 // to avoid stomping on what should be a setter call. Start by only allowing things
michael@0 3620 // within the initialized length.
michael@0 3621 if (guardHoles) {
michael@0 3622 Address initLength(elements, ObjectElements::offsetOfInitializedLength());
michael@0 3623 masm.branch32(Assembler::BelowOrEqual, initLength, index, &outOfBounds);
michael@0 3624 } else {
michael@0 3625 // Guard that we can increase the initialized length.
michael@0 3626 Address capacity(elements, ObjectElements::offsetOfCapacity());
michael@0 3627 masm.branch32(Assembler::BelowOrEqual, capacity, index, &outOfBounds);
michael@0 3628
michael@0 3629 // Guard on the initialized length.
michael@0 3630 Address initLength(elements, ObjectElements::offsetOfInitializedLength());
michael@0 3631 masm.branch32(Assembler::Below, initLength, index, &outOfBounds);
michael@0 3632
michael@0 3633 // if (initLength == index)
michael@0 3634 masm.branch32(Assembler::NotEqual, initLength, index, &markElem);
michael@0 3635 {
michael@0 3636 // Increase initialize length.
michael@0 3637 Int32Key newLength(index);
michael@0 3638 masm.bumpKey(&newLength, 1);
michael@0 3639 masm.storeKey(newLength, initLength);
michael@0 3640
michael@0 3641 // Increase length if needed.
michael@0 3642 Label bumpedLength;
michael@0 3643 Address length(elements, ObjectElements::offsetOfLength());
michael@0 3644 masm.branch32(Assembler::AboveOrEqual, length, index, &bumpedLength);
michael@0 3645 masm.storeKey(newLength, length);
michael@0 3646 masm.bind(&bumpedLength);
michael@0 3647
michael@0 3648 // Restore the index.
michael@0 3649 masm.bumpKey(&newLength, -1);
michael@0 3650 masm.jump(&storeElement);
michael@0 3651 }
michael@0 3652 // else
michael@0 3653 masm.bind(&markElem);
michael@0 3654 }
michael@0 3655
michael@0 3656 if (cx->zone()->needsBarrier())
michael@0 3657 masm.callPreBarrier(target, MIRType_Value);
michael@0 3658
michael@0 3659 // Store the value.
michael@0 3660 if (guardHoles)
michael@0 3661 masm.branchTestMagic(Assembler::Equal, target, &failures);
michael@0 3662 else
michael@0 3663 masm.bind(&storeElement);
michael@0 3664 StoreDenseElement(masm, value, elements, target);
michael@0 3665 }
michael@0 3666 attacher.jumpRejoin(masm);
michael@0 3667
michael@0 3668 // All failures flow to here.
michael@0 3669 masm.bind(&outOfBounds);
michael@0 3670 masm.bind(&failures);
michael@0 3671 attacher.jumpNextStub(masm);
michael@0 3672
michael@0 3673 return true;
michael@0 3674 }
michael@0 3675
michael@0 3676 bool
michael@0 3677 SetElementIC::attachDenseElement(JSContext *cx, IonScript *ion, JSObject *obj, const Value &idval)
michael@0 3678 {
michael@0 3679 MacroAssembler masm(cx, ion);
michael@0 3680 RepatchStubAppender attacher(*this);
michael@0 3681 if (!GenerateSetDenseElement(cx, masm, attacher, obj, idval,
michael@0 3682 guardHoles(), object(), index(),
michael@0 3683 value(), tempToUnboxIndex(),
michael@0 3684 temp()))
michael@0 3685 {
michael@0 3686 return false;
michael@0 3687 }
michael@0 3688
michael@0 3689 setHasDenseStub();
michael@0 3690 const char *message = guardHoles() ?
michael@0 3691 "dense array (holes)" :
michael@0 3692 "dense array";
michael@0 3693 return linkAndAttachStub(cx, masm, attacher, ion, message);
michael@0 3694 }
michael@0 3695
michael@0 3696 static bool
michael@0 3697 GenerateSetTypedArrayElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher,
michael@0 3698 TypedArrayObject *tarr, Register object,
michael@0 3699 ValueOperand indexVal, ConstantOrRegister value,
michael@0 3700 Register tempUnbox, Register temp, FloatRegister tempFloat)
michael@0 3701 {
michael@0 3702 Label failures, done, popObjectAndFail;
michael@0 3703
michael@0 3704 // Guard on the shape.
michael@0 3705 Shape *shape = tarr->lastProperty();
michael@0 3706 if (!shape)
michael@0 3707 return false;
michael@0 3708 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures);
michael@0 3709
michael@0 3710 // Ensure the index is an int32.
michael@0 3711 masm.branchTestInt32(Assembler::NotEqual, indexVal, &failures);
michael@0 3712 Register index = masm.extractInt32(indexVal, tempUnbox);
michael@0 3713
michael@0 3714 // Guard on the length.
michael@0 3715 Address length(object, TypedArrayObject::lengthOffset());
michael@0 3716 masm.unboxInt32(length, temp);
michael@0 3717 masm.branch32(Assembler::BelowOrEqual, temp, index, &done);
michael@0 3718
michael@0 3719 // Load the elements vector.
michael@0 3720 Register elements = temp;
michael@0 3721 masm.loadPtr(Address(object, TypedArrayObject::dataOffset()), elements);
michael@0 3722
michael@0 3723 // Set the value.
michael@0 3724 int arrayType = tarr->type();
michael@0 3725 int width = TypedArrayObject::slotWidth(arrayType);
michael@0 3726 BaseIndex target(elements, index, ScaleFromElemWidth(width));
michael@0 3727
michael@0 3728 if (arrayType == ScalarTypeDescr::TYPE_FLOAT32) {
michael@0 3729 if (LIRGenerator::allowFloat32Optimizations()) {
michael@0 3730 if (!masm.convertConstantOrRegisterToFloat(cx, value, tempFloat, &failures))
michael@0 3731 return false;
michael@0 3732 } else {
michael@0 3733 if (!masm.convertConstantOrRegisterToDouble(cx, value, tempFloat, &failures))
michael@0 3734 return false;
michael@0 3735 }
michael@0 3736 masm.storeToTypedFloatArray(arrayType, tempFloat, target);
michael@0 3737 } else if (arrayType == ScalarTypeDescr::TYPE_FLOAT64) {
michael@0 3738 if (!masm.convertConstantOrRegisterToDouble(cx, value, tempFloat, &failures))
michael@0 3739 return false;
michael@0 3740 masm.storeToTypedFloatArray(arrayType, tempFloat, target);
michael@0 3741 } else {
michael@0 3742 // On x86 we only have 6 registers available to use, so reuse the object
michael@0 3743 // register to compute the intermediate value to store and restore it
michael@0 3744 // afterwards.
michael@0 3745 masm.push(object);
michael@0 3746
michael@0 3747 if (arrayType == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
michael@0 3748 if (!masm.clampConstantOrRegisterToUint8(cx, value, tempFloat, object,
michael@0 3749 &popObjectAndFail))
michael@0 3750 {
michael@0 3751 return false;
michael@0 3752 }
michael@0 3753 } else {
michael@0 3754 if (!masm.truncateConstantOrRegisterToInt32(cx, value, tempFloat, object,
michael@0 3755 &popObjectAndFail))
michael@0 3756 {
michael@0 3757 return false;
michael@0 3758 }
michael@0 3759 }
michael@0 3760 masm.storeToTypedIntArray(arrayType, object, target);
michael@0 3761
michael@0 3762 masm.pop(object);
michael@0 3763 }
michael@0 3764
michael@0 3765 // Out-of-bound writes jump here as they are no-ops.
michael@0 3766 masm.bind(&done);
michael@0 3767 attacher.jumpRejoin(masm);
michael@0 3768
michael@0 3769 if (popObjectAndFail.used()) {
michael@0 3770 masm.bind(&popObjectAndFail);
michael@0 3771 masm.pop(object);
michael@0 3772 }
michael@0 3773
michael@0 3774 masm.bind(&failures);
michael@0 3775 attacher.jumpNextStub(masm);
michael@0 3776 return true;
michael@0 3777 }
michael@0 3778
michael@0 3779 bool
michael@0 3780 SetElementIC::attachTypedArrayElement(JSContext *cx, IonScript *ion, TypedArrayObject *tarr)
michael@0 3781 {
michael@0 3782 MacroAssembler masm(cx, ion);
michael@0 3783 RepatchStubAppender attacher(*this);
michael@0 3784 if (!GenerateSetTypedArrayElement(cx, masm, attacher, tarr,
michael@0 3785 object(), index(), value(),
michael@0 3786 tempToUnboxIndex(), temp(), tempFloat()))
michael@0 3787 {
michael@0 3788 return false;
michael@0 3789 }
michael@0 3790
michael@0 3791 return linkAndAttachStub(cx, masm, attacher, ion, "typed array");
michael@0 3792 }
michael@0 3793
michael@0 3794 bool
michael@0 3795 SetElementIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj,
michael@0 3796 HandleValue idval, HandleValue value)
michael@0 3797 {
michael@0 3798 IonScript *ion = GetTopIonJSScript(cx)->ionScript();
michael@0 3799 SetElementIC &cache = ion->getCache(cacheIndex).toSetElement();
michael@0 3800
michael@0 3801 bool attachedStub = false;
michael@0 3802 if (cache.canAttachStub()) {
michael@0 3803 if (!cache.hasDenseStub() && IsDenseElementSetInlineable(obj, idval)) {
michael@0 3804 if (!cache.attachDenseElement(cx, ion, obj, idval))
michael@0 3805 return false;
michael@0 3806 attachedStub = true;
michael@0 3807 }
michael@0 3808 if (!attachedStub && IsTypedArrayElementSetInlineable(obj, idval, value)) {
michael@0 3809 TypedArrayObject *tarr = &obj->as<TypedArrayObject>();
michael@0 3810 if (!cache.attachTypedArrayElement(cx, ion, tarr))
michael@0 3811 return false;
michael@0 3812 }
michael@0 3813 }
michael@0 3814
michael@0 3815 if (!SetObjectElement(cx, obj, idval, value, cache.strict()))
michael@0 3816 return false;
michael@0 3817 return true;
michael@0 3818 }
michael@0 3819
michael@0 3820 void
michael@0 3821 SetElementIC::reset()
michael@0 3822 {
michael@0 3823 RepatchIonCache::reset();
michael@0 3824 hasDenseStub_ = false;
michael@0 3825 }
michael@0 3826
michael@0 3827 bool
michael@0 3828 SetElementParIC::attachDenseElement(LockedJSContext &cx, IonScript *ion, JSObject *obj,
michael@0 3829 const Value &idval)
michael@0 3830 {
michael@0 3831 MacroAssembler masm(cx, ion);
michael@0 3832 DispatchStubPrepender attacher(*this);
michael@0 3833 if (!GenerateSetDenseElement(cx, masm, attacher, obj, idval,
michael@0 3834 guardHoles(), object(), index(),
michael@0 3835 value(), tempToUnboxIndex(),
michael@0 3836 temp()))
michael@0 3837 {
michael@0 3838 return false;
michael@0 3839 }
michael@0 3840
michael@0 3841 const char *message = guardHoles() ?
michael@0 3842 "parallel dense array (holes)" :
michael@0 3843 "parallel dense array";
michael@0 3844
michael@0 3845 return linkAndAttachStub(cx, masm, attacher, ion, message);
michael@0 3846 }
michael@0 3847
michael@0 3848 bool
michael@0 3849 SetElementParIC::attachTypedArrayElement(LockedJSContext &cx, IonScript *ion,
michael@0 3850 TypedArrayObject *tarr)
michael@0 3851 {
michael@0 3852 MacroAssembler masm(cx, ion);
michael@0 3853 DispatchStubPrepender attacher(*this);
michael@0 3854 if (!GenerateSetTypedArrayElement(cx, masm, attacher, tarr,
michael@0 3855 object(), index(), value(),
michael@0 3856 tempToUnboxIndex(), temp(), tempFloat()))
michael@0 3857 {
michael@0 3858 return false;
michael@0 3859 }
michael@0 3860
michael@0 3861 return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array");
michael@0 3862 }
michael@0 3863
michael@0 3864 bool
michael@0 3865 SetElementParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj,
michael@0 3866 HandleValue idval, HandleValue value)
michael@0 3867 {
michael@0 3868 IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript();
michael@0 3869 SetElementParIC &cache = ion->getCache(cacheIndex).toSetElementPar();
michael@0 3870
michael@0 3871 // Avoid unnecessary locking if cannot attach stubs.
michael@0 3872 if (!cache.canAttachStub())
michael@0 3873 return SetElementPar(cx, obj, idval, value, cache.strict());
michael@0 3874
michael@0 3875 {
michael@0 3876 LockedJSContext ncx(cx);
michael@0 3877
michael@0 3878 if (cache.canAttachStub()) {
michael@0 3879 bool alreadyStubbed;
michael@0 3880 if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed))
michael@0 3881 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 3882 if (alreadyStubbed)
michael@0 3883 return SetElementPar(cx, obj, idval, value, cache.strict());
michael@0 3884
michael@0 3885 bool attachedStub = false;
michael@0 3886 if (IsDenseElementSetInlineable(obj, idval)) {
michael@0 3887 if (!cache.attachDenseElement(ncx, ion, obj, idval))
michael@0 3888 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 3889 attachedStub = true;
michael@0 3890 }
michael@0 3891 if (!attachedStub && IsTypedArrayElementSetInlineable(obj, idval, value)) {
michael@0 3892 TypedArrayObject *tarr = &obj->as<TypedArrayObject>();
michael@0 3893 if (!cache.attachTypedArrayElement(ncx, ion, tarr))
michael@0 3894 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 3895 }
michael@0 3896 }
michael@0 3897 }
michael@0 3898
michael@0 3899 return SetElementPar(cx, obj, idval, value, cache.strict());
michael@0 3900 }
michael@0 3901
michael@0 3902 bool
michael@0 3903 GetElementParIC::attachReadSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj,
michael@0 3904 const Value &idval, PropertyName *name, JSObject *holder,
michael@0 3905 Shape *shape)
michael@0 3906 {
michael@0 3907 MacroAssembler masm(cx, ion);
michael@0 3908 DispatchStubPrepender attacher(*this);
michael@0 3909
michael@0 3910 // Guard on the index value.
michael@0 3911 Label failures;
michael@0 3912 ValueOperand val = index().reg().valueReg();
michael@0 3913 masm.branchTestValue(Assembler::NotEqual, val, idval, &failures);
michael@0 3914
michael@0 3915 GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output(),
michael@0 3916 &failures);
michael@0 3917
michael@0 3918 return linkAndAttachStub(cx, masm, attacher, ion, "parallel getelem reading");
michael@0 3919 }
michael@0 3920
michael@0 3921 bool
michael@0 3922 GetElementParIC::attachDenseElement(LockedJSContext &cx, IonScript *ion, JSObject *obj,
michael@0 3923 const Value &idval)
michael@0 3924 {
michael@0 3925 MacroAssembler masm(cx, ion);
michael@0 3926 DispatchStubPrepender attacher(*this);
michael@0 3927 if (!GenerateDenseElement(cx, masm, attacher, obj, idval, object(), index(), output()))
michael@0 3928 return false;
michael@0 3929
michael@0 3930 return linkAndAttachStub(cx, masm, attacher, ion, "parallel dense element");
michael@0 3931 }
michael@0 3932
michael@0 3933 bool
michael@0 3934 GetElementParIC::attachTypedArrayElement(LockedJSContext &cx, IonScript *ion,
michael@0 3935 TypedArrayObject *tarr, const Value &idval)
michael@0 3936 {
michael@0 3937 MacroAssembler masm(cx, ion);
michael@0 3938 DispatchStubPrepender attacher(*this);
michael@0 3939 GenerateGetTypedArrayElement(cx, masm, attacher, tarr, idval, object(), index(), output(),
michael@0 3940 allowDoubleResult());
michael@0 3941 return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array");
michael@0 3942 }
michael@0 3943
michael@0 3944 bool
michael@0 3945 GetElementParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj,
michael@0 3946 HandleValue idval, MutableHandleValue vp)
michael@0 3947 {
michael@0 3948 IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript();
michael@0 3949 GetElementParIC &cache = ion->getCache(cacheIndex).toGetElementPar();
michael@0 3950
michael@0 3951 // Try to get the element early, as the pure path doesn't need a lock. If
michael@0 3952 // we can't do it purely, bail out of parallel execution.
michael@0 3953 if (!GetObjectElementOperationPure(cx, obj, idval, vp.address()))
michael@0 3954 return false;
michael@0 3955
michael@0 3956 // Avoid unnecessary locking if cannot attach stubs.
michael@0 3957 if (!cache.canAttachStub())
michael@0 3958 return true;
michael@0 3959
michael@0 3960 {
michael@0 3961 // See note about locking context in GetPropertyParIC::update.
michael@0 3962 LockedJSContext ncx(cx);
michael@0 3963
michael@0 3964 if (cache.canAttachStub()) {
michael@0 3965 bool alreadyStubbed;
michael@0 3966 if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed))
michael@0 3967 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 3968 if (alreadyStubbed)
michael@0 3969 return true;
michael@0 3970
michael@0 3971 jsid id;
michael@0 3972 if (!ValueToIdPure(idval, &id))
michael@0 3973 return false;
michael@0 3974
michael@0 3975 bool attachedStub = false;
michael@0 3976 if (cache.monitoredResult() &&
michael@0 3977 GetElementIC::canAttachGetProp(obj, idval, id))
michael@0 3978 {
michael@0 3979 RootedShape shape(ncx);
michael@0 3980 RootedObject holder(ncx);
michael@0 3981 RootedPropertyName name(ncx, JSID_TO_ATOM(id)->asPropertyName());
michael@0 3982
michael@0 3983 GetPropertyIC::NativeGetPropCacheability canCache =
michael@0 3984 CanAttachNativeGetProp(ncx, cache, obj, name, &holder, &shape);
michael@0 3985
michael@0 3986 if (canCache == GetPropertyIC::CanAttachReadSlot)
michael@0 3987 {
michael@0 3988 if (!cache.attachReadSlot(ncx, ion, obj, idval, name, holder, shape))
michael@0 3989 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 3990 attachedStub = true;
michael@0 3991 }
michael@0 3992 }
michael@0 3993 if (!attachedStub &&
michael@0 3994 GetElementIC::canAttachDenseElement(obj, idval))
michael@0 3995 {
michael@0 3996 if (!cache.attachDenseElement(ncx, ion, obj, idval))
michael@0 3997 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 3998 attachedStub = true;
michael@0 3999 }
michael@0 4000 if (!attachedStub &&
michael@0 4001 GetElementIC::canAttachTypedArrayElement(obj, idval, cache.output()))
michael@0 4002 {
michael@0 4003 if (!cache.attachTypedArrayElement(ncx, ion, &obj->as<TypedArrayObject>(), idval))
michael@0 4004 return cx->setPendingAbortFatal(ParallelBailoutFailedIC);
michael@0 4005 attachedStub = true;
michael@0 4006 }
michael@0 4007 }
michael@0 4008 }
michael@0 4009
michael@0 4010 return true;
michael@0 4011 }
michael@0 4012
michael@0 4013 bool
michael@0 4014 BindNameIC::attachGlobal(JSContext *cx, IonScript *ion, JSObject *scopeChain)
michael@0 4015 {
michael@0 4016 JS_ASSERT(scopeChain->is<GlobalObject>());
michael@0 4017
michael@0 4018 MacroAssembler masm(cx, ion);
michael@0 4019 RepatchStubAppender attacher(*this);
michael@0 4020
michael@0 4021 // Guard on the scope chain.
michael@0 4022 attacher.branchNextStub(masm, Assembler::NotEqual, scopeChainReg(),
michael@0 4023 ImmGCPtr(scopeChain));
michael@0 4024 masm.movePtr(ImmGCPtr(scopeChain), outputReg());
michael@0 4025
michael@0 4026 attacher.jumpRejoin(masm);
michael@0 4027
michael@0 4028 return linkAndAttachStub(cx, masm, attacher, ion, "global");
michael@0 4029 }
michael@0 4030
michael@0 4031 static inline void
michael@0 4032 GenerateScopeChainGuard(MacroAssembler &masm, JSObject *scopeObj,
michael@0 4033 Register scopeObjReg, Shape *shape, Label *failures)
michael@0 4034 {
michael@0 4035 if (scopeObj->is<CallObject>()) {
michael@0 4036 // We can skip a guard on the call object if the script's bindings are
michael@0 4037 // guaranteed to be immutable (and thus cannot introduce shadowing
michael@0 4038 // variables).
michael@0 4039 CallObject *callObj = &scopeObj->as<CallObject>();
michael@0 4040 if (!callObj->isForEval()) {
michael@0 4041 JSFunction *fun = &callObj->callee();
michael@0 4042 // The function might have been relazified under rare conditions.
michael@0 4043 // In that case, we pessimistically create the guard, as we'd
michael@0 4044 // need to root various pointers to delazify,
michael@0 4045 if (fun->hasScript()) {
michael@0 4046 JSScript *script = fun->nonLazyScript();
michael@0 4047 if (!script->funHasExtensibleScope())
michael@0 4048 return;
michael@0 4049 }
michael@0 4050 }
michael@0 4051 } else if (scopeObj->is<GlobalObject>()) {
michael@0 4052 // If this is the last object on the scope walk, and the property we've
michael@0 4053 // found is not configurable, then we don't need a shape guard because
michael@0 4054 // the shape cannot be removed.
michael@0 4055 if (shape && !shape->configurable())
michael@0 4056 return;
michael@0 4057 }
michael@0 4058
michael@0 4059 Address shapeAddr(scopeObjReg, JSObject::offsetOfShape());
michael@0 4060 masm.branchPtr(Assembler::NotEqual, shapeAddr, ImmGCPtr(scopeObj->lastProperty()), failures);
michael@0 4061 }
michael@0 4062
michael@0 4063 static void
michael@0 4064 GenerateScopeChainGuards(MacroAssembler &masm, JSObject *scopeChain, JSObject *holder,
michael@0 4065 Register outputReg, Label *failures, bool skipLastGuard = false)
michael@0 4066 {
michael@0 4067 JSObject *tobj = scopeChain;
michael@0 4068
michael@0 4069 // Walk up the scope chain. Note that IsCacheableScopeChain guarantees the
michael@0 4070 // |tobj == holder| condition terminates the loop.
michael@0 4071 while (true) {
michael@0 4072 JS_ASSERT(IsCacheableNonGlobalScope(tobj) || tobj->is<GlobalObject>());
michael@0 4073
michael@0 4074 if (skipLastGuard && tobj == holder)
michael@0 4075 break;
michael@0 4076
michael@0 4077 GenerateScopeChainGuard(masm, tobj, outputReg, nullptr, failures);
michael@0 4078
michael@0 4079 if (tobj == holder)
michael@0 4080 break;
michael@0 4081
michael@0 4082 // Load the next link.
michael@0 4083 tobj = &tobj->as<ScopeObject>().enclosingScope();
michael@0 4084 masm.extractObject(Address(outputReg, ScopeObject::offsetOfEnclosingScope()), outputReg);
michael@0 4085 }
michael@0 4086 }
michael@0 4087
michael@0 4088 bool
michael@0 4089 BindNameIC::attachNonGlobal(JSContext *cx, IonScript *ion, JSObject *scopeChain, JSObject *holder)
michael@0 4090 {
michael@0 4091 JS_ASSERT(IsCacheableNonGlobalScope(scopeChain));
michael@0 4092
michael@0 4093 MacroAssembler masm(cx, ion);
michael@0 4094 RepatchStubAppender attacher(*this);
michael@0 4095
michael@0 4096 // Guard on the shape of the scope chain.
michael@0 4097 Label failures;
michael@0 4098 attacher.branchNextStubOrLabel(masm, Assembler::NotEqual,
michael@0 4099 Address(scopeChainReg(), JSObject::offsetOfShape()),
michael@0 4100 ImmGCPtr(scopeChain->lastProperty()),
michael@0 4101 holder != scopeChain ? &failures : nullptr);
michael@0 4102
michael@0 4103 if (holder != scopeChain) {
michael@0 4104 JSObject *parent = &scopeChain->as<ScopeObject>().enclosingScope();
michael@0 4105 masm.extractObject(Address(scopeChainReg(), ScopeObject::offsetOfEnclosingScope()), outputReg());
michael@0 4106
michael@0 4107 GenerateScopeChainGuards(masm, parent, holder, outputReg(), &failures);
michael@0 4108 } else {
michael@0 4109 masm.movePtr(scopeChainReg(), outputReg());
michael@0 4110 }
michael@0 4111
michael@0 4112 // At this point outputReg holds the object on which the property
michael@0 4113 // was found, so we're done.
michael@0 4114 attacher.jumpRejoin(masm);
michael@0 4115
michael@0 4116 // All failures flow to here, so there is a common point to patch.
michael@0 4117 if (holder != scopeChain) {
michael@0 4118 masm.bind(&failures);
michael@0 4119 attacher.jumpNextStub(masm);
michael@0 4120 }
michael@0 4121
michael@0 4122 return linkAndAttachStub(cx, masm, attacher, ion, "non-global");
michael@0 4123 }
michael@0 4124
michael@0 4125 static bool
michael@0 4126 IsCacheableScopeChain(JSObject *scopeChain, JSObject *holder)
michael@0 4127 {
michael@0 4128 while (true) {
michael@0 4129 if (!IsCacheableNonGlobalScope(scopeChain)) {
michael@0 4130 IonSpew(IonSpew_InlineCaches, "Non-cacheable object on scope chain");
michael@0 4131 return false;
michael@0 4132 }
michael@0 4133
michael@0 4134 if (scopeChain == holder)
michael@0 4135 return true;
michael@0 4136
michael@0 4137 scopeChain = &scopeChain->as<ScopeObject>().enclosingScope();
michael@0 4138 if (!scopeChain) {
michael@0 4139 IonSpew(IonSpew_InlineCaches, "Scope chain indirect hit");
michael@0 4140 return false;
michael@0 4141 }
michael@0 4142 }
michael@0 4143
michael@0 4144 MOZ_ASSUME_UNREACHABLE("Invalid scope chain");
michael@0 4145 }
michael@0 4146
michael@0 4147 JSObject *
michael@0 4148 BindNameIC::update(JSContext *cx, size_t cacheIndex, HandleObject scopeChain)
michael@0 4149 {
michael@0 4150 IonScript *ion = GetTopIonJSScript(cx)->ionScript();
michael@0 4151 BindNameIC &cache = ion->getCache(cacheIndex).toBindName();
michael@0 4152 HandlePropertyName name = cache.name();
michael@0 4153
michael@0 4154 RootedObject holder(cx);
michael@0 4155 if (scopeChain->is<GlobalObject>()) {
michael@0 4156 holder = scopeChain;
michael@0 4157 } else {
michael@0 4158 if (!LookupNameWithGlobalDefault(cx, name, scopeChain, &holder))
michael@0 4159 return nullptr;
michael@0 4160 }
michael@0 4161
michael@0 4162 // Stop generating new stubs once we hit the stub count limit, see
michael@0 4163 // GetPropertyCache.
michael@0 4164 if (cache.canAttachStub()) {
michael@0 4165 if (scopeChain->is<GlobalObject>()) {
michael@0 4166 if (!cache.attachGlobal(cx, ion, scopeChain))
michael@0 4167 return nullptr;
michael@0 4168 } else if (IsCacheableScopeChain(scopeChain, holder)) {
michael@0 4169 if (!cache.attachNonGlobal(cx, ion, scopeChain, holder))
michael@0 4170 return nullptr;
michael@0 4171 } else {
michael@0 4172 IonSpew(IonSpew_InlineCaches, "BINDNAME uncacheable scope chain");
michael@0 4173 }
michael@0 4174 }
michael@0 4175
michael@0 4176 return holder;
michael@0 4177 }
michael@0 4178
michael@0 4179 bool
michael@0 4180 NameIC::attachReadSlot(JSContext *cx, IonScript *ion, HandleObject scopeChain,
michael@0 4181 HandleObject holderBase, HandleObject holder,
michael@0 4182 HandleShape shape)
michael@0 4183 {
michael@0 4184 MacroAssembler masm(cx, ion);
michael@0 4185 Label failures;
michael@0 4186 RepatchStubAppender attacher(*this);
michael@0 4187
michael@0 4188 Register scratchReg = outputReg().valueReg().scratchReg();
michael@0 4189
michael@0 4190 // Don't guard the base of the proto chain the name was found on. It will be guarded
michael@0 4191 // by GenerateReadSlot().
michael@0 4192 masm.mov(scopeChainReg(), scratchReg);
michael@0 4193 GenerateScopeChainGuards(masm, scopeChain, holderBase, scratchReg, &failures,
michael@0 4194 /* skipLastGuard = */true);
michael@0 4195
michael@0 4196 // GenerateScopeChain leaves the last scope chain in scrachReg, even though it
michael@0 4197 // doesn't generate the extra guard.
michael@0 4198 GenerateReadSlot(cx, ion, masm, attacher, holderBase, holder, shape, scratchReg,
michael@0 4199 outputReg(), failures.used() ? &failures : nullptr);
michael@0 4200
michael@0 4201 return linkAndAttachStub(cx, masm, attacher, ion, "generic");
michael@0 4202 }
michael@0 4203
michael@0 4204 static bool
michael@0 4205 IsCacheableNameReadSlot(JSContext *cx, HandleObject scopeChain, HandleObject obj,
michael@0 4206 HandleObject holder, HandleShape shape, jsbytecode *pc,
michael@0 4207 const TypedOrValueRegister &output)
michael@0 4208 {
michael@0 4209 if (!shape)
michael@0 4210 return false;
michael@0 4211 if (!obj->isNative())
michael@0 4212 return false;
michael@0 4213
michael@0 4214 if (obj->is<GlobalObject>()) {
michael@0 4215 // Support only simple property lookups.
michael@0 4216 if (!IsCacheableGetPropReadSlot(obj, holder, shape) &&
michael@0 4217 !IsCacheableNoProperty(obj, holder, shape, pc, output))
michael@0 4218 return false;
michael@0 4219 } else if (obj->is<CallObject>()) {
michael@0 4220 JS_ASSERT(obj == holder);
michael@0 4221 if (!shape->hasDefaultGetter())
michael@0 4222 return false;
michael@0 4223 } else {
michael@0 4224 // We don't yet support lookups on Block or DeclEnv objects.
michael@0 4225 return false;
michael@0 4226 }
michael@0 4227
michael@0 4228 RootedObject obj2(cx, scopeChain);
michael@0 4229 while (obj2) {
michael@0 4230 if (!IsCacheableNonGlobalScope(obj2) && !obj2->is<GlobalObject>())
michael@0 4231 return false;
michael@0 4232
michael@0 4233 // Stop once we hit the global or target obj.
michael@0 4234 if (obj2->is<GlobalObject>() || obj2 == obj)
michael@0 4235 break;
michael@0 4236
michael@0 4237 obj2 = obj2->enclosingScope();
michael@0 4238 }
michael@0 4239
michael@0 4240 return obj == obj2;
michael@0 4241 }
michael@0 4242
michael@0 4243 bool
michael@0 4244 NameIC::attachCallGetter(JSContext *cx, IonScript *ion, JSObject *obj, JSObject *holder,
michael@0 4245 HandleShape shape, void *returnAddr)
michael@0 4246 {
michael@0 4247 MacroAssembler masm(cx, ion, script_, pc_);
michael@0 4248
michael@0 4249 RepatchStubAppender attacher(*this);
michael@0 4250 if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name(), holder, shape, liveRegs_,
michael@0 4251 scopeChainReg(), outputReg(), returnAddr))
michael@0 4252 {
michael@0 4253 return false;
michael@0 4254 }
michael@0 4255
michael@0 4256 const char *attachKind = "name getter";
michael@0 4257 return linkAndAttachStub(cx, masm, attacher, ion, attachKind);
michael@0 4258 }
michael@0 4259
michael@0 4260 static bool
michael@0 4261 IsCacheableNameCallGetter(JSObject *scopeChain, JSObject *obj, JSObject *holder, Shape *shape)
michael@0 4262 {
michael@0 4263 if (obj != scopeChain)
michael@0 4264 return false;
michael@0 4265
michael@0 4266 if (!obj->is<GlobalObject>())
michael@0 4267 return false;
michael@0 4268
michael@0 4269 return IsCacheableGetPropCallNative(obj, holder, shape) ||
michael@0 4270 IsCacheableGetPropCallPropertyOp(obj, holder, shape);
michael@0 4271 }
michael@0 4272
michael@0 4273 bool
michael@0 4274 NameIC::update(JSContext *cx, size_t cacheIndex, HandleObject scopeChain,
michael@0 4275 MutableHandleValue vp)
michael@0 4276 {
michael@0 4277 void *returnAddr;
michael@0 4278 IonScript *ion = GetTopIonJSScript(cx, &returnAddr)->ionScript();
michael@0 4279
michael@0 4280 NameIC &cache = ion->getCache(cacheIndex).toName();
michael@0 4281 RootedPropertyName name(cx, cache.name());
michael@0 4282
michael@0 4283 RootedScript script(cx);
michael@0 4284 jsbytecode *pc;
michael@0 4285 cache.getScriptedLocation(&script, &pc);
michael@0 4286
michael@0 4287 RootedObject obj(cx);
michael@0 4288 RootedObject holder(cx);
michael@0 4289 RootedShape shape(cx);
michael@0 4290 if (!LookupName(cx, name, scopeChain, &obj, &holder, &shape))
michael@0 4291 return false;
michael@0 4292
michael@0 4293 if (cache.canAttachStub()) {
michael@0 4294 if (IsCacheableNameReadSlot(cx, scopeChain, obj, holder, shape, pc, cache.outputReg())) {
michael@0 4295 if (!cache.attachReadSlot(cx, ion, scopeChain, obj, holder, shape))
michael@0 4296 return false;
michael@0 4297 } else if (IsCacheableNameCallGetter(scopeChain, obj, holder, shape)) {
michael@0 4298 if (!cache.attachCallGetter(cx, ion, obj, holder, shape, returnAddr))
michael@0 4299 return false;
michael@0 4300 }
michael@0 4301 }
michael@0 4302
michael@0 4303 if (cache.isTypeOf()) {
michael@0 4304 if (!FetchName<true>(cx, obj, holder, name, shape, vp))
michael@0 4305 return false;
michael@0 4306 } else {
michael@0 4307 if (!FetchName<false>(cx, obj, holder, name, shape, vp))
michael@0 4308 return false;
michael@0 4309 }
michael@0 4310
michael@0 4311 // Monitor changes to cache entry.
michael@0 4312 types::TypeScript::Monitor(cx, script, pc, vp);
michael@0 4313
michael@0 4314 return true;
michael@0 4315 }
michael@0 4316
michael@0 4317 bool
michael@0 4318 CallsiteCloneIC::attach(JSContext *cx, IonScript *ion, HandleFunction original,
michael@0 4319 HandleFunction clone)
michael@0 4320 {
michael@0 4321 MacroAssembler masm(cx, ion);
michael@0 4322 RepatchStubAppender attacher(*this);
michael@0 4323
michael@0 4324 // Guard against object identity on the original.
michael@0 4325 attacher.branchNextStub(masm, Assembler::NotEqual, calleeReg(), ImmGCPtr(original));
michael@0 4326
michael@0 4327 // Load the clone.
michael@0 4328 masm.movePtr(ImmGCPtr(clone), outputReg());
michael@0 4329
michael@0 4330 attacher.jumpRejoin(masm);
michael@0 4331
michael@0 4332 return linkAndAttachStub(cx, masm, attacher, ion, "generic");
michael@0 4333 }
michael@0 4334
michael@0 4335 JSObject *
michael@0 4336 CallsiteCloneIC::update(JSContext *cx, size_t cacheIndex, HandleObject callee)
michael@0 4337 {
michael@0 4338 // Act as the identity for functions that are not clone-at-callsite, as we
michael@0 4339 // generate this cache as long as some callees are clone-at-callsite.
michael@0 4340 RootedFunction fun(cx, &callee->as<JSFunction>());
michael@0 4341 if (!fun->hasScript() || !fun->nonLazyScript()->shouldCloneAtCallsite())
michael@0 4342 return fun;
michael@0 4343
michael@0 4344 IonScript *ion = GetTopIonJSScript(cx)->ionScript();
michael@0 4345 CallsiteCloneIC &cache = ion->getCache(cacheIndex).toCallsiteClone();
michael@0 4346
michael@0 4347 RootedFunction clone(cx, CloneFunctionAtCallsite(cx, fun, cache.callScript(), cache.callPc()));
michael@0 4348 if (!clone)
michael@0 4349 return nullptr;
michael@0 4350
michael@0 4351 if (cache.canAttachStub()) {
michael@0 4352 if (!cache.attach(cx, ion, fun, clone))
michael@0 4353 return nullptr;
michael@0 4354 }
michael@0 4355
michael@0 4356 return clone;
michael@0 4357 }

mercurial