michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sts=4 et sw=4 tw=99: michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #include "jit/BaselineIC.h" michael@0: michael@0: #include "mozilla/DebugOnly.h" michael@0: #include "mozilla/TemplateLib.h" michael@0: michael@0: #include "jslibmath.h" michael@0: #include "jstypes.h" michael@0: michael@0: #include "builtin/Eval.h" michael@0: #include "jit/BaselineDebugModeOSR.h" michael@0: #include "jit/BaselineHelpers.h" michael@0: #include "jit/BaselineJIT.h" michael@0: #include "jit/IonLinker.h" michael@0: #include "jit/IonSpewer.h" michael@0: #include "jit/Lowering.h" michael@0: #ifdef JS_ION_PERF michael@0: # include "jit/PerfSpewer.h" michael@0: #endif michael@0: #include "jit/VMFunctions.h" michael@0: #include "vm/Opcodes.h" michael@0: michael@0: #include "jsboolinlines.h" michael@0: #include "jsscriptinlines.h" michael@0: michael@0: #include "jit/IonFrames-inl.h" michael@0: #include "vm/Interpreter-inl.h" michael@0: #include "vm/ScopeObject-inl.h" michael@0: #include "vm/StringObject-inl.h" michael@0: michael@0: using mozilla::DebugOnly; michael@0: michael@0: namespace js { michael@0: namespace jit { michael@0: michael@0: #ifdef DEBUG michael@0: void michael@0: FallbackICSpew(JSContext *cx, ICFallbackStub *stub, const char *fmt, ...) michael@0: { michael@0: if (IonSpewEnabled(IonSpew_BaselineICFallback)) { michael@0: RootedScript script(cx, GetTopIonJSScript(cx)); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: michael@0: char fmtbuf[100]; michael@0: va_list args; michael@0: va_start(args, fmt); michael@0: vsnprintf(fmtbuf, 100, fmt, args); michael@0: va_end(args); michael@0: michael@0: IonSpew(IonSpew_BaselineICFallback, michael@0: "Fallback hit for (%s:%d) (pc=%d,line=%d,uses=%d,stubs=%d): %s", michael@0: script->filename(), michael@0: script->lineno(), michael@0: (int) script->pcToOffset(pc), michael@0: PCToLineNumber(script, pc), michael@0: script->getUseCount(), michael@0: (int) stub->numOptimizedStubs(), michael@0: fmtbuf); michael@0: } michael@0: } michael@0: michael@0: void michael@0: TypeFallbackICSpew(JSContext *cx, ICTypeMonitor_Fallback *stub, const char *fmt, ...) michael@0: { michael@0: if (IonSpewEnabled(IonSpew_BaselineICFallback)) { michael@0: RootedScript script(cx, GetTopIonJSScript(cx)); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: michael@0: char fmtbuf[100]; michael@0: va_list args; michael@0: va_start(args, fmt); michael@0: vsnprintf(fmtbuf, 100, fmt, args); michael@0: va_end(args); michael@0: michael@0: IonSpew(IonSpew_BaselineICFallback, michael@0: "Type monitor fallback hit for (%s:%d) (pc=%d,line=%d,uses=%d,stubs=%d): %s", michael@0: script->filename(), michael@0: script->lineno(), michael@0: (int) script->pcToOffset(pc), michael@0: PCToLineNumber(script, pc), michael@0: script->getUseCount(), michael@0: (int) stub->numOptimizedMonitorStubs(), michael@0: fmtbuf); michael@0: } michael@0: } michael@0: michael@0: #else michael@0: #define FallbackICSpew(...) michael@0: #define TypeFallbackICSpew(...) michael@0: #endif michael@0: michael@0: michael@0: ICFallbackStub * michael@0: ICEntry::fallbackStub() const michael@0: { michael@0: return firstStub()->getChainFallback(); michael@0: } michael@0: michael@0: michael@0: ICStubConstIterator & michael@0: ICStubConstIterator::operator++() michael@0: { michael@0: JS_ASSERT(currentStub_ != nullptr); michael@0: currentStub_ = currentStub_->next(); michael@0: return *this; michael@0: } michael@0: michael@0: michael@0: ICStubIterator::ICStubIterator(ICFallbackStub *fallbackStub, bool end) michael@0: : icEntry_(fallbackStub->icEntry()), michael@0: fallbackStub_(fallbackStub), michael@0: previousStub_(nullptr), michael@0: currentStub_(end ? fallbackStub : icEntry_->firstStub()), michael@0: unlinked_(false) michael@0: { } michael@0: michael@0: ICStubIterator & michael@0: ICStubIterator::operator++() michael@0: { michael@0: JS_ASSERT(currentStub_->next() != nullptr); michael@0: if (!unlinked_) michael@0: previousStub_ = currentStub_; michael@0: currentStub_ = currentStub_->next(); michael@0: unlinked_ = false; michael@0: return *this; michael@0: } michael@0: michael@0: void michael@0: ICStubIterator::unlink(JSContext *cx) michael@0: { michael@0: JS_ASSERT(currentStub_->next() != nullptr); michael@0: JS_ASSERT(currentStub_ != fallbackStub_); michael@0: JS_ASSERT(!unlinked_); michael@0: michael@0: fallbackStub_->unlinkStub(cx->zone(), previousStub_, currentStub_); michael@0: michael@0: // Mark the current iterator position as unlinked, so operator++ works properly. michael@0: unlinked_ = true; michael@0: } michael@0: michael@0: michael@0: void michael@0: ICStub::markCode(JSTracer *trc, const char *name) michael@0: { michael@0: JitCode *stubJitCode = jitCode(); michael@0: MarkJitCodeUnbarriered(trc, &stubJitCode, name); michael@0: } michael@0: michael@0: void michael@0: ICStub::updateCode(JitCode *code) michael@0: { michael@0: // Write barrier on the old code. michael@0: #ifdef JSGC_INCREMENTAL michael@0: JitCode::writeBarrierPre(jitCode()); michael@0: #endif michael@0: stubCode_ = code->raw(); michael@0: } michael@0: michael@0: /* static */ void michael@0: ICStub::trace(JSTracer *trc) michael@0: { michael@0: markCode(trc, "baseline-stub-jitcode"); michael@0: michael@0: // If the stub is a monitored fallback stub, then mark the monitor ICs hanging michael@0: // off of that stub. We don't need to worry about the regular monitored stubs, michael@0: // because the regular monitored stubs will always have a monitored fallback stub michael@0: // that references the same stub chain. michael@0: if (isMonitoredFallback()) { michael@0: ICTypeMonitor_Fallback *lastMonStub = toMonitoredFallbackStub()->fallbackMonitorStub(); michael@0: for (ICStubConstIterator iter = lastMonStub->firstMonitorStub(); !iter.atEnd(); iter++) { michael@0: JS_ASSERT_IF(iter->next() == nullptr, *iter == lastMonStub); michael@0: iter->trace(trc); michael@0: } michael@0: } michael@0: michael@0: if (isUpdated()) { michael@0: for (ICStubConstIterator iter = toUpdatedStub()->firstUpdateStub(); !iter.atEnd(); iter++) { michael@0: JS_ASSERT_IF(iter->next() == nullptr, iter->isTypeUpdate_Fallback()); michael@0: iter->trace(trc); michael@0: } michael@0: } michael@0: michael@0: switch (kind()) { michael@0: case ICStub::Call_Scripted: { michael@0: ICCall_Scripted *callStub = toCall_Scripted(); michael@0: MarkScript(trc, &callStub->calleeScript(), "baseline-callscripted-callee"); michael@0: if (callStub->templateObject()) michael@0: MarkObject(trc, &callStub->templateObject(), "baseline-callscripted-template"); michael@0: break; michael@0: } michael@0: case ICStub::Call_Native: { michael@0: ICCall_Native *callStub = toCall_Native(); michael@0: MarkObject(trc, &callStub->callee(), "baseline-callnative-callee"); michael@0: if (callStub->templateObject()) michael@0: MarkObject(trc, &callStub->templateObject(), "baseline-callnative-template"); michael@0: break; michael@0: } michael@0: case ICStub::GetElem_NativeSlot: { michael@0: ICGetElem_NativeSlot *getElemStub = toGetElem_NativeSlot(); michael@0: MarkShape(trc, &getElemStub->shape(), "baseline-getelem-native-shape"); michael@0: MarkString(trc, &getElemStub->name(), "baseline-getelem-native-name"); michael@0: break; michael@0: } michael@0: case ICStub::GetElem_NativePrototypeSlot: { michael@0: ICGetElem_NativePrototypeSlot *getElemStub = toGetElem_NativePrototypeSlot(); michael@0: MarkShape(trc, &getElemStub->shape(), "baseline-getelem-nativeproto-shape"); michael@0: MarkString(trc, &getElemStub->name(), "baseline-getelem-nativeproto-name"); michael@0: MarkObject(trc, &getElemStub->holder(), "baseline-getelem-nativeproto-holder"); michael@0: MarkShape(trc, &getElemStub->holderShape(), "baseline-getelem-nativeproto-holdershape"); michael@0: break; michael@0: } michael@0: case ICStub::GetElem_NativePrototypeCallNative: michael@0: case ICStub::GetElem_NativePrototypeCallScripted: { michael@0: ICGetElemNativePrototypeCallStub *callStub = michael@0: reinterpret_cast(this); michael@0: MarkShape(trc, &callStub->shape(), "baseline-getelem-nativeprotocall-shape"); michael@0: MarkString(trc, &callStub->name(), "baseline-getelem-nativeprotocall-name"); michael@0: MarkObject(trc, &callStub->getter(), "baseline-getelem-nativeprotocall-getter"); michael@0: MarkObject(trc, &callStub->holder(), "baseline-getelem-nativeprotocall-holder"); michael@0: MarkShape(trc, &callStub->holderShape(), "baseline-getelem-nativeprotocall-holdershape"); michael@0: break; michael@0: } michael@0: case ICStub::GetElem_Dense: { michael@0: ICGetElem_Dense *getElemStub = toGetElem_Dense(); michael@0: MarkShape(trc, &getElemStub->shape(), "baseline-getelem-dense-shape"); michael@0: break; michael@0: } michael@0: case ICStub::GetElem_TypedArray: { michael@0: ICGetElem_TypedArray *getElemStub = toGetElem_TypedArray(); michael@0: MarkShape(trc, &getElemStub->shape(), "baseline-getelem-typedarray-shape"); michael@0: break; michael@0: } michael@0: case ICStub::SetElem_Dense: { michael@0: ICSetElem_Dense *setElemStub = toSetElem_Dense(); michael@0: MarkShape(trc, &setElemStub->shape(), "baseline-getelem-dense-shape"); michael@0: MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-dense-type"); michael@0: break; michael@0: } michael@0: case ICStub::SetElem_DenseAdd: { michael@0: ICSetElem_DenseAdd *setElemStub = toSetElem_DenseAdd(); michael@0: MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-denseadd-type"); michael@0: michael@0: JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4); michael@0: michael@0: switch (setElemStub->protoChainDepth()) { michael@0: case 0: setElemStub->toImpl<0>()->traceShapes(trc); break; michael@0: case 1: setElemStub->toImpl<1>()->traceShapes(trc); break; michael@0: case 2: setElemStub->toImpl<2>()->traceShapes(trc); break; michael@0: case 3: setElemStub->toImpl<3>()->traceShapes(trc); break; michael@0: case 4: setElemStub->toImpl<4>()->traceShapes(trc); break; michael@0: default: MOZ_ASSUME_UNREACHABLE("Invalid proto stub."); michael@0: } michael@0: break; michael@0: } michael@0: case ICStub::SetElem_TypedArray: { michael@0: ICSetElem_TypedArray *setElemStub = toSetElem_TypedArray(); michael@0: MarkShape(trc, &setElemStub->shape(), "baseline-setelem-typedarray-shape"); michael@0: break; michael@0: } michael@0: case ICStub::TypeMonitor_SingleObject: { michael@0: ICTypeMonitor_SingleObject *monitorStub = toTypeMonitor_SingleObject(); michael@0: MarkObject(trc, &monitorStub->object(), "baseline-monitor-singleobject"); michael@0: break; michael@0: } michael@0: case ICStub::TypeMonitor_TypeObject: { michael@0: ICTypeMonitor_TypeObject *monitorStub = toTypeMonitor_TypeObject(); michael@0: MarkTypeObject(trc, &monitorStub->type(), "baseline-monitor-typeobject"); michael@0: break; michael@0: } michael@0: case ICStub::TypeUpdate_SingleObject: { michael@0: ICTypeUpdate_SingleObject *updateStub = toTypeUpdate_SingleObject(); michael@0: MarkObject(trc, &updateStub->object(), "baseline-update-singleobject"); michael@0: break; michael@0: } michael@0: case ICStub::TypeUpdate_TypeObject: { michael@0: ICTypeUpdate_TypeObject *updateStub = toTypeUpdate_TypeObject(); michael@0: MarkTypeObject(trc, &updateStub->type(), "baseline-update-typeobject"); michael@0: break; michael@0: } michael@0: case ICStub::Profiler_PushFunction: { michael@0: ICProfiler_PushFunction *pushFunStub = toProfiler_PushFunction(); michael@0: MarkScript(trc, &pushFunStub->script(), "baseline-profilerpushfunction-stub-script"); michael@0: break; michael@0: } michael@0: case ICStub::GetName_Global: { michael@0: ICGetName_Global *globalStub = toGetName_Global(); michael@0: MarkShape(trc, &globalStub->shape(), "baseline-global-stub-shape"); michael@0: break; michael@0: } michael@0: case ICStub::GetName_Scope0: michael@0: static_cast*>(this)->traceScopes(trc); michael@0: break; michael@0: case ICStub::GetName_Scope1: michael@0: static_cast*>(this)->traceScopes(trc); michael@0: break; michael@0: case ICStub::GetName_Scope2: michael@0: static_cast*>(this)->traceScopes(trc); michael@0: break; michael@0: case ICStub::GetName_Scope3: michael@0: static_cast*>(this)->traceScopes(trc); michael@0: break; michael@0: case ICStub::GetName_Scope4: michael@0: static_cast*>(this)->traceScopes(trc); michael@0: break; michael@0: case ICStub::GetName_Scope5: michael@0: static_cast*>(this)->traceScopes(trc); michael@0: break; michael@0: case ICStub::GetName_Scope6: michael@0: static_cast*>(this)->traceScopes(trc); michael@0: break; michael@0: case ICStub::GetIntrinsic_Constant: { michael@0: ICGetIntrinsic_Constant *constantStub = toGetIntrinsic_Constant(); michael@0: gc::MarkValue(trc, &constantStub->value(), "baseline-getintrinsic-constant-value"); michael@0: break; michael@0: } michael@0: case ICStub::GetProp_Primitive: { michael@0: ICGetProp_Primitive *propStub = toGetProp_Primitive(); michael@0: MarkShape(trc, &propStub->protoShape(), "baseline-getprop-primitive-stub-shape"); michael@0: break; michael@0: } michael@0: case ICStub::GetProp_Native: { michael@0: ICGetProp_Native *propStub = toGetProp_Native(); michael@0: MarkShape(trc, &propStub->shape(), "baseline-getpropnative-stub-shape"); michael@0: break; michael@0: } michael@0: case ICStub::GetProp_NativePrototype: { michael@0: ICGetProp_NativePrototype *propStub = toGetProp_NativePrototype(); michael@0: MarkShape(trc, &propStub->shape(), "baseline-getpropnativeproto-stub-shape"); michael@0: MarkObject(trc, &propStub->holder(), "baseline-getpropnativeproto-stub-holder"); michael@0: MarkShape(trc, &propStub->holderShape(), "baseline-getpropnativeproto-stub-holdershape"); michael@0: break; michael@0: } michael@0: case ICStub::GetProp_CallDOMProxyNative: michael@0: case ICStub::GetProp_CallDOMProxyWithGenerationNative: { michael@0: ICGetPropCallDOMProxyNativeStub *propStub; michael@0: if (kind() == ICStub::GetProp_CallDOMProxyNative) michael@0: propStub = toGetProp_CallDOMProxyNative(); michael@0: else michael@0: propStub = toGetProp_CallDOMProxyWithGenerationNative(); michael@0: MarkShape(trc, &propStub->shape(), "baseline-getproplistbasenative-stub-shape"); michael@0: if (propStub->expandoShape()) { michael@0: MarkShape(trc, &propStub->expandoShape(), michael@0: "baseline-getproplistbasenative-stub-expandoshape"); michael@0: } michael@0: MarkObject(trc, &propStub->holder(), "baseline-getproplistbasenative-stub-holder"); michael@0: MarkShape(trc, &propStub->holderShape(), "baseline-getproplistbasenative-stub-holdershape"); michael@0: MarkObject(trc, &propStub->getter(), "baseline-getproplistbasenative-stub-getter"); michael@0: break; michael@0: } michael@0: case ICStub::GetProp_DOMProxyShadowed: { michael@0: ICGetProp_DOMProxyShadowed *propStub = toGetProp_DOMProxyShadowed(); michael@0: MarkShape(trc, &propStub->shape(), "baseline-getproplistbaseshadowed-stub-shape"); michael@0: MarkString(trc, &propStub->name(), "baseline-getproplistbaseshadowed-stub-name"); michael@0: break; michael@0: } michael@0: case ICStub::GetProp_CallScripted: { michael@0: ICGetProp_CallScripted *callStub = toGetProp_CallScripted(); michael@0: MarkShape(trc, &callStub->receiverShape(), "baseline-getpropcallscripted-stub-receivershape"); michael@0: MarkObject(trc, &callStub->holder(), "baseline-getpropcallscripted-stub-holder"); michael@0: MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallscripted-stub-holdershape"); michael@0: MarkObject(trc, &callStub->getter(), "baseline-getpropcallscripted-stub-getter"); michael@0: break; michael@0: } michael@0: case ICStub::GetProp_CallNative: { michael@0: ICGetProp_CallNative *callStub = toGetProp_CallNative(); michael@0: MarkObject(trc, &callStub->holder(), "baseline-getpropcallnative-stub-holder"); michael@0: MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallnative-stub-holdershape"); michael@0: MarkObject(trc, &callStub->getter(), "baseline-getpropcallnative-stub-getter"); michael@0: break; michael@0: } michael@0: case ICStub::GetProp_CallNativePrototype: { michael@0: ICGetProp_CallNativePrototype *callStub = toGetProp_CallNativePrototype(); michael@0: MarkShape(trc, &callStub->receiverShape(), "baseline-getpropcallnativeproto-stub-receivershape"); michael@0: MarkObject(trc, &callStub->holder(), "baseline-getpropcallnativeproto-stub-holder"); michael@0: MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallnativeproto-stub-holdershape"); michael@0: MarkObject(trc, &callStub->getter(), "baseline-getpropcallnativeproto-stub-getter"); michael@0: break; michael@0: } michael@0: case ICStub::SetProp_Native: { michael@0: ICSetProp_Native *propStub = toSetProp_Native(); michael@0: MarkShape(trc, &propStub->shape(), "baseline-setpropnative-stub-shape"); michael@0: MarkTypeObject(trc, &propStub->type(), "baseline-setpropnative-stub-type"); michael@0: break; michael@0: } michael@0: case ICStub::SetProp_NativeAdd: { michael@0: ICSetProp_NativeAdd *propStub = toSetProp_NativeAdd(); michael@0: MarkTypeObject(trc, &propStub->type(), "baseline-setpropnativeadd-stub-type"); michael@0: MarkShape(trc, &propStub->newShape(), "baseline-setpropnativeadd-stub-newshape"); michael@0: JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4); michael@0: switch (propStub->protoChainDepth()) { michael@0: case 0: propStub->toImpl<0>()->traceShapes(trc); break; michael@0: case 1: propStub->toImpl<1>()->traceShapes(trc); break; michael@0: case 2: propStub->toImpl<2>()->traceShapes(trc); break; michael@0: case 3: propStub->toImpl<3>()->traceShapes(trc); break; michael@0: case 4: propStub->toImpl<4>()->traceShapes(trc); break; michael@0: default: MOZ_ASSUME_UNREACHABLE("Invalid proto stub."); michael@0: } michael@0: break; michael@0: } michael@0: case ICStub::SetProp_CallScripted: { michael@0: ICSetProp_CallScripted *callStub = toSetProp_CallScripted(); michael@0: MarkShape(trc, &callStub->shape(), "baseline-setpropcallscripted-stub-shape"); michael@0: MarkObject(trc, &callStub->holder(), "baseline-setpropcallscripted-stub-holder"); michael@0: MarkShape(trc, &callStub->holderShape(), "baseline-setpropcallscripted-stub-holdershape"); michael@0: MarkObject(trc, &callStub->setter(), "baseline-setpropcallscripted-stub-setter"); michael@0: break; michael@0: } michael@0: case ICStub::SetProp_CallNative: { michael@0: ICSetProp_CallNative *callStub = toSetProp_CallNative(); michael@0: MarkShape(trc, &callStub->shape(), "baseline-setpropcallnative-stub-shape"); michael@0: MarkObject(trc, &callStub->holder(), "baseline-setpropcallnative-stub-holder"); michael@0: MarkShape(trc, &callStub->holderShape(), "baseline-setpropcallnative-stub-holdershape"); michael@0: MarkObject(trc, &callStub->setter(), "baseline-setpropcallnative-stub-setter"); michael@0: break; michael@0: } michael@0: case ICStub::NewArray_Fallback: { michael@0: ICNewArray_Fallback *stub = toNewArray_Fallback(); michael@0: MarkObject(trc, &stub->templateObject(), "baseline-newarray-template"); michael@0: break; michael@0: } michael@0: case ICStub::NewObject_Fallback: { michael@0: ICNewObject_Fallback *stub = toNewObject_Fallback(); michael@0: MarkObject(trc, &stub->templateObject(), "baseline-newobject-template"); michael@0: break; michael@0: } michael@0: case ICStub::Rest_Fallback: { michael@0: ICRest_Fallback *stub = toRest_Fallback(); michael@0: MarkObject(trc, &stub->templateObject(), "baseline-rest-template"); michael@0: break; michael@0: } michael@0: default: michael@0: break; michael@0: } michael@0: } michael@0: michael@0: void michael@0: ICFallbackStub::unlinkStub(Zone *zone, ICStub *prev, ICStub *stub) michael@0: { michael@0: JS_ASSERT(stub->next()); michael@0: michael@0: // If stub is the last optimized stub, update lastStubPtrAddr. michael@0: if (stub->next() == this) { michael@0: JS_ASSERT(lastStubPtrAddr_ == stub->addressOfNext()); michael@0: if (prev) michael@0: lastStubPtrAddr_ = prev->addressOfNext(); michael@0: else michael@0: lastStubPtrAddr_ = icEntry()->addressOfFirstStub(); michael@0: *lastStubPtrAddr_ = this; michael@0: } else { michael@0: if (prev) { michael@0: JS_ASSERT(prev->next() == stub); michael@0: prev->setNext(stub->next()); michael@0: } else { michael@0: JS_ASSERT(icEntry()->firstStub() == stub); michael@0: icEntry()->setFirstStub(stub->next()); michael@0: } michael@0: } michael@0: michael@0: JS_ASSERT(numOptimizedStubs_ > 0); michael@0: numOptimizedStubs_--; michael@0: michael@0: if (zone->needsBarrier()) { michael@0: // We are removing edges from ICStub to gcthings. Perform one final trace michael@0: // of the stub for incremental GC, as it must know about those edges. michael@0: stub->trace(zone->barrierTracer()); michael@0: } michael@0: michael@0: if (ICStub::CanMakeCalls(stub->kind()) && stub->isMonitored()) { michael@0: // This stub can make calls so we can return to it if it's on the stack. michael@0: // We just have to reset its firstMonitorStub_ field to avoid a stale michael@0: // pointer when purgeOptimizedStubs destroys all optimized monitor michael@0: // stubs (unlinked stubs won't be updated). michael@0: ICTypeMonitor_Fallback *monitorFallback = toMonitoredFallbackStub()->fallbackMonitorStub(); michael@0: stub->toMonitoredStub()->resetFirstMonitorStub(monitorFallback); michael@0: } michael@0: michael@0: #ifdef DEBUG michael@0: // Poison stub code to ensure we don't call this stub again. However, if this michael@0: // stub can make calls, a pointer to it may be stored in a stub frame on the michael@0: // stack, so we can't touch the stubCode_ or GC will crash when marking this michael@0: // pointer. michael@0: if (!ICStub::CanMakeCalls(stub->kind())) michael@0: stub->stubCode_ = (uint8_t *)0xbad; michael@0: #endif michael@0: } michael@0: michael@0: void michael@0: ICFallbackStub::unlinkStubsWithKind(JSContext *cx, ICStub::Kind kind) michael@0: { michael@0: for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++) { michael@0: if (iter->kind() == kind) michael@0: iter.unlink(cx); michael@0: } michael@0: } michael@0: michael@0: void michael@0: ICTypeMonitor_Fallback::resetMonitorStubChain(Zone *zone) michael@0: { michael@0: if (zone->needsBarrier()) { michael@0: // We are removing edges from monitored stubs to gcthings (JitCode). michael@0: // Perform one final trace of all monitor stubs for incremental GC, michael@0: // as it must know about those edges. michael@0: for (ICStub *s = firstMonitorStub_; !s->isTypeMonitor_Fallback(); s = s->next()) michael@0: s->trace(zone->barrierTracer()); michael@0: } michael@0: michael@0: firstMonitorStub_ = this; michael@0: numOptimizedMonitorStubs_ = 0; michael@0: michael@0: if (hasFallbackStub_) { michael@0: lastMonitorStubPtrAddr_ = nullptr; michael@0: michael@0: // Reset firstMonitorStub_ field of all monitored stubs. michael@0: for (ICStubConstIterator iter = mainFallbackStub_->beginChainConst(); michael@0: !iter.atEnd(); iter++) michael@0: { michael@0: if (!iter->isMonitored()) michael@0: continue; michael@0: iter->toMonitoredStub()->resetFirstMonitorStub(this); michael@0: } michael@0: } else { michael@0: icEntry_->setFirstStub(this); michael@0: lastMonitorStubPtrAddr_ = icEntry_->addressOfFirstStub(); michael@0: } michael@0: } michael@0: michael@0: ICMonitoredStub::ICMonitoredStub(Kind kind, JitCode *stubCode, ICStub *firstMonitorStub) michael@0: : ICStub(kind, ICStub::Monitored, stubCode), michael@0: firstMonitorStub_(firstMonitorStub) michael@0: { michael@0: // If the first monitored stub is a ICTypeMonitor_Fallback stub, then michael@0: // double check that _its_ firstMonitorStub is the same as this one. michael@0: JS_ASSERT_IF(firstMonitorStub_->isTypeMonitor_Fallback(), michael@0: firstMonitorStub_->toTypeMonitor_Fallback()->firstMonitorStub() == michael@0: firstMonitorStub_); michael@0: } michael@0: michael@0: bool michael@0: ICMonitoredFallbackStub::initMonitoringChain(JSContext *cx, ICStubSpace *space) michael@0: { michael@0: JS_ASSERT(fallbackMonitorStub_ == nullptr); michael@0: michael@0: ICTypeMonitor_Fallback::Compiler compiler(cx, this); michael@0: ICTypeMonitor_Fallback *stub = compiler.getStub(space); michael@0: if (!stub) michael@0: return false; michael@0: fallbackMonitorStub_ = stub; michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICMonitoredFallbackStub::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val) michael@0: { michael@0: return fallbackMonitorStub_->addMonitorStubForValue(cx, script, val); michael@0: } michael@0: michael@0: bool michael@0: ICUpdatedStub::initUpdatingChain(JSContext *cx, ICStubSpace *space) michael@0: { michael@0: JS_ASSERT(firstUpdateStub_ == nullptr); michael@0: michael@0: ICTypeUpdate_Fallback::Compiler compiler(cx); michael@0: ICTypeUpdate_Fallback *stub = compiler.getStub(space); michael@0: if (!stub) michael@0: return false; michael@0: michael@0: firstUpdateStub_ = stub; michael@0: return true; michael@0: } michael@0: michael@0: JitCode * michael@0: ICStubCompiler::getStubCode() michael@0: { michael@0: JitCompartment *comp = cx->compartment()->jitCompartment(); michael@0: michael@0: // Check for existing cached stubcode. michael@0: uint32_t stubKey = getKey(); michael@0: JitCode *stubCode = comp->getStubCode(stubKey); michael@0: if (stubCode) michael@0: return stubCode; michael@0: michael@0: // Compile new stubcode. michael@0: IonContext ictx(cx, nullptr); michael@0: MacroAssembler masm; michael@0: #ifdef JS_CODEGEN_ARM michael@0: masm.setSecondScratchReg(BaselineSecondScratchReg); michael@0: #endif michael@0: michael@0: if (!generateStubCode(masm)) michael@0: return nullptr; michael@0: Linker linker(masm); michael@0: AutoFlushICache afc("getStubCode"); michael@0: Rooted newStubCode(cx, linker.newCode(cx, JSC::BASELINE_CODE)); michael@0: if (!newStubCode) michael@0: return nullptr; michael@0: michael@0: // After generating code, run postGenerateStubCode() michael@0: if (!postGenerateStubCode(masm, newStubCode)) michael@0: return nullptr; michael@0: michael@0: // All barriers are emitted off-by-default, enable them if needed. michael@0: if (cx->zone()->needsBarrier()) michael@0: newStubCode->togglePreBarriers(true); michael@0: michael@0: // Cache newly compiled stubcode. michael@0: if (!comp->putStubCode(stubKey, newStubCode)) michael@0: return nullptr; michael@0: michael@0: JS_ASSERT(entersStubFrame_ == ICStub::CanMakeCalls(kind)); michael@0: michael@0: #ifdef JS_ION_PERF michael@0: writePerfSpewerJitCodeProfile(newStubCode, "BaselineIC"); michael@0: #endif michael@0: michael@0: return newStubCode; michael@0: } michael@0: michael@0: bool michael@0: ICStubCompiler::tailCallVM(const VMFunction &fun, MacroAssembler &masm) michael@0: { michael@0: JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(fun); michael@0: if (!code) michael@0: return false; michael@0: michael@0: uint32_t argSize = fun.explicitStackSlots() * sizeof(void *); michael@0: EmitTailCallVM(code, masm, argSize); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICStubCompiler::callVM(const VMFunction &fun, MacroAssembler &masm) michael@0: { michael@0: JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(fun); michael@0: if (!code) michael@0: return false; michael@0: michael@0: EmitCallVM(code, masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICStubCompiler::callTypeUpdateIC(MacroAssembler &masm, uint32_t objectOffset) michael@0: { michael@0: JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(DoTypeUpdateFallbackInfo); michael@0: if (!code) michael@0: return false; michael@0: michael@0: EmitCallTypeUpdateIC(masm, code, objectOffset); michael@0: return true; michael@0: } michael@0: michael@0: void michael@0: ICStubCompiler::enterStubFrame(MacroAssembler &masm, Register scratch) michael@0: { michael@0: EmitEnterStubFrame(masm, scratch); michael@0: #ifdef DEBUG michael@0: entersStubFrame_ = true; michael@0: #endif michael@0: } michael@0: michael@0: void michael@0: ICStubCompiler::leaveStubFrame(MacroAssembler &masm, bool calledIntoIon) michael@0: { michael@0: JS_ASSERT(entersStubFrame_); michael@0: EmitLeaveStubFrame(masm, calledIntoIon); michael@0: } michael@0: michael@0: void michael@0: ICStubCompiler::leaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon) michael@0: { michael@0: JS_ASSERT(entersStubFrame_); michael@0: EmitLeaveStubFrameHead(masm, calledIntoIon); michael@0: } michael@0: michael@0: void michael@0: ICStubCompiler::leaveStubFrameCommonTail(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(entersStubFrame_); michael@0: EmitLeaveStubFrameCommonTail(masm); michael@0: } michael@0: michael@0: void michael@0: ICStubCompiler::guardProfilingEnabled(MacroAssembler &masm, Register scratch, Label *skip) michael@0: { michael@0: // This should only be called from the following stubs. michael@0: JS_ASSERT(kind == ICStub::Call_Scripted || michael@0: kind == ICStub::Call_AnyScripted || michael@0: kind == ICStub::Call_Native || michael@0: kind == ICStub::Call_ScriptedApplyArray || michael@0: kind == ICStub::Call_ScriptedApplyArguments || michael@0: kind == ICStub::Call_ScriptedFunCall || michael@0: kind == ICStub::GetProp_CallScripted || michael@0: kind == ICStub::GetProp_CallNative || michael@0: kind == ICStub::GetProp_CallNativePrototype || michael@0: kind == ICStub::GetProp_CallDOMProxyNative || michael@0: kind == ICStub::GetElem_NativePrototypeCallNative || michael@0: kind == ICStub::GetElem_NativePrototypeCallScripted || michael@0: kind == ICStub::GetProp_CallDOMProxyWithGenerationNative || michael@0: kind == ICStub::GetProp_DOMProxyShadowed || michael@0: kind == ICStub::SetProp_CallScripted || michael@0: kind == ICStub::SetProp_CallNative); michael@0: michael@0: // Guard on bit in frame that indicates if the SPS frame was pushed in the first michael@0: // place. This code is expected to be called from within a stub that has already michael@0: // entered a stub frame. michael@0: JS_ASSERT(entersStubFrame_); michael@0: masm.loadPtr(Address(BaselineFrameReg, 0), scratch); michael@0: masm.branchTest32(Assembler::Zero, michael@0: Address(scratch, BaselineFrame::reverseOffsetOfFlags()), michael@0: Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME), michael@0: skip); michael@0: michael@0: // Check if profiling is enabled michael@0: uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled(); michael@0: masm.branch32(Assembler::Equal, AbsoluteAddress(enabledAddr), Imm32(0), skip); michael@0: } michael@0: michael@0: void michael@0: ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, Register pcIdx, Register scratch, michael@0: uint32_t stubPcOffset) michael@0: { michael@0: Label skipProfilerUpdate; michael@0: michael@0: // Check if profiling is enabled. michael@0: guardProfilingEnabled(masm, scratch, &skipProfilerUpdate); michael@0: michael@0: // Update profiling entry before leaving function. michael@0: masm.load32(Address(BaselineStubReg, stubPcOffset), pcIdx); michael@0: masm.spsUpdatePCIdx(&cx->runtime()->spsProfiler, pcIdx, scratch); michael@0: michael@0: masm.bind(&skipProfilerUpdate); michael@0: } michael@0: michael@0: void michael@0: ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, GeneralRegisterSet regs, michael@0: uint32_t stubPcOffset) michael@0: { michael@0: emitProfilingUpdate(masm, regs.takeAny(), regs.takeAny(), stubPcOffset); michael@0: } michael@0: michael@0: #ifdef JSGC_GENERATIONAL michael@0: inline bool michael@0: ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val, michael@0: Register scratch, GeneralRegisterSet saveRegs) michael@0: { michael@0: Nursery &nursery = cx->runtime()->gcNursery; michael@0: michael@0: Label skipBarrier; michael@0: masm.branchTestObject(Assembler::NotEqual, val, &skipBarrier); michael@0: michael@0: masm.branchPtrInNurseryRange(obj, scratch, &skipBarrier); michael@0: michael@0: Register valReg = masm.extractObject(val, scratch); michael@0: masm.branchPtr(Assembler::Below, valReg, ImmWord(nursery.start()), &skipBarrier); michael@0: masm.branchPtr(Assembler::AboveOrEqual, valReg, ImmWord(nursery.heapEnd()), &skipBarrier); michael@0: michael@0: // void PostWriteBarrier(JSRuntime *rt, JSObject *obj); michael@0: #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS) michael@0: saveRegs.add(BaselineTailCallReg); michael@0: #endif michael@0: saveRegs = GeneralRegisterSet::Intersect(saveRegs, GeneralRegisterSet::Volatile()); michael@0: masm.PushRegsInMask(saveRegs); michael@0: masm.setupUnalignedABICall(2, scratch); michael@0: masm.movePtr(ImmPtr(cx->runtime()), scratch); michael@0: masm.passABIArg(scratch); michael@0: masm.passABIArg(obj); michael@0: masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier)); michael@0: masm.PopRegsInMask(saveRegs); michael@0: michael@0: masm.bind(&skipBarrier); michael@0: return true; michael@0: } michael@0: #endif // JSGC_GENERATIONAL michael@0: michael@0: // michael@0: // UseCount_Fallback michael@0: // michael@0: static bool michael@0: IsTopFrameConstructing(JSContext *cx) michael@0: { michael@0: JS_ASSERT(cx->currentlyRunningInJit()); michael@0: JitActivationIterator activations(cx->runtime()); michael@0: JitFrameIterator iter(activations); michael@0: JS_ASSERT(iter.type() == JitFrame_Exit); michael@0: michael@0: ++iter; michael@0: JS_ASSERT(iter.type() == JitFrame_BaselineStub); michael@0: michael@0: ++iter; michael@0: JS_ASSERT(iter.isBaselineJS()); michael@0: michael@0: return iter.isConstructing(); michael@0: } michael@0: michael@0: static bool michael@0: EnsureCanEnterIon(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame, michael@0: HandleScript script, jsbytecode *pc, void **jitcodePtr) michael@0: { michael@0: JS_ASSERT(jitcodePtr); michael@0: JS_ASSERT(!*jitcodePtr); michael@0: michael@0: bool isLoopEntry = (JSOp(*pc) == JSOP_LOOPENTRY); michael@0: michael@0: bool isConstructing = IsTopFrameConstructing(cx); michael@0: MethodStatus stat; michael@0: if (isLoopEntry) { michael@0: JS_ASSERT(LoopEntryCanIonOsr(pc)); michael@0: IonSpew(IonSpew_BaselineOSR, " Compile at loop entry!"); michael@0: stat = CanEnterAtBranch(cx, script, frame, pc, isConstructing); michael@0: } else if (frame->isFunctionFrame()) { michael@0: IonSpew(IonSpew_BaselineOSR, " Compile function from top for later entry!"); michael@0: stat = CompileFunctionForBaseline(cx, script, frame, isConstructing); michael@0: } else { michael@0: return true; michael@0: } michael@0: michael@0: if (stat == Method_Error) { michael@0: IonSpew(IonSpew_BaselineOSR, " Compile with Ion errored!"); michael@0: return false; michael@0: } michael@0: michael@0: if (stat == Method_CantCompile) michael@0: IonSpew(IonSpew_BaselineOSR, " Can't compile with Ion!"); michael@0: else if (stat == Method_Skipped) michael@0: IonSpew(IonSpew_BaselineOSR, " Skipped compile with Ion!"); michael@0: else if (stat == Method_Compiled) michael@0: IonSpew(IonSpew_BaselineOSR, " Compiled with Ion!"); michael@0: else michael@0: MOZ_ASSUME_UNREACHABLE("Invalid MethodStatus!"); michael@0: michael@0: // Failed to compile. Reset use count and return. michael@0: if (stat != Method_Compiled) { michael@0: // TODO: If stat == Method_CantCompile, insert stub that just skips the useCount michael@0: // entirely, instead of resetting it. michael@0: bool bailoutExpected = script->hasIonScript() && script->ionScript()->bailoutExpected(); michael@0: if (stat == Method_CantCompile || bailoutExpected) { michael@0: IonSpew(IonSpew_BaselineOSR, " Reset UseCount cantCompile=%s bailoutExpected=%s!", michael@0: stat == Method_CantCompile ? "yes" : "no", michael@0: bailoutExpected ? "yes" : "no"); michael@0: script->resetUseCount(); michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: if (isLoopEntry) { michael@0: IonScript *ion = script->ionScript(); michael@0: JS_ASSERT(cx->runtime()->spsProfiler.enabled() == ion->hasSPSInstrumentation()); michael@0: JS_ASSERT(ion->osrPc() == pc); michael@0: michael@0: // If the baseline frame's SPS handling doesn't match up with the Ion code's SPS michael@0: // handling, don't OSR. michael@0: if (frame->hasPushedSPSFrame() != ion->hasSPSInstrumentation()) { michael@0: IonSpew(IonSpew_BaselineOSR, " OSR crosses SPS handling boundaries, skipping!"); michael@0: return true; michael@0: } michael@0: michael@0: IonSpew(IonSpew_BaselineOSR, " OSR possible!"); michael@0: *jitcodePtr = ion->method()->raw() + ion->osrEntryOffset(); michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // The following data is kept in a temporary heap-allocated buffer, stored in michael@0: // JitRuntime (high memory addresses at top, low at bottom): michael@0: // michael@0: // +----->+=================================+ -- <---- High Address michael@0: // | | | | michael@0: // | | ...BaselineFrame... | |-- Copy of BaselineFrame + stack values michael@0: // | | | | michael@0: // | +---------------------------------+ | michael@0: // | | | | michael@0: // | | ...Locals/Stack... | | michael@0: // | | | | michael@0: // | +=================================+ -- michael@0: // | | Padding(Maybe Empty) | michael@0: // | +=================================+ -- michael@0: // +------|-- baselineFrame | |-- IonOsrTempData michael@0: // | jitcode | | michael@0: // +=================================+ -- <---- Low Address michael@0: // michael@0: // A pointer to the IonOsrTempData is returned. michael@0: michael@0: struct IonOsrTempData michael@0: { michael@0: void *jitcode; michael@0: uint8_t *baselineFrame; michael@0: }; michael@0: michael@0: static IonOsrTempData * michael@0: PrepareOsrTempData(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame, michael@0: HandleScript script, jsbytecode *pc, void *jitcode) michael@0: { michael@0: size_t numLocalsAndStackVals = frame->numValueSlots(); michael@0: michael@0: // Calculate the amount of space to allocate: michael@0: // BaselineFrame space: michael@0: // (sizeof(Value) * (numLocals + numStackVals)) michael@0: // + sizeof(BaselineFrame) michael@0: // michael@0: // IonOsrTempData space: michael@0: // sizeof(IonOsrTempData) michael@0: michael@0: size_t frameSpace = sizeof(BaselineFrame) + sizeof(Value) * numLocalsAndStackVals; michael@0: size_t ionOsrTempDataSpace = sizeof(IonOsrTempData); michael@0: michael@0: size_t totalSpace = AlignBytes(frameSpace, sizeof(Value)) + michael@0: AlignBytes(ionOsrTempDataSpace, sizeof(Value)); michael@0: michael@0: IonOsrTempData *info = (IonOsrTempData *)cx->runtime()->getJitRuntime(cx)->allocateOsrTempData(totalSpace); michael@0: if (!info) michael@0: return nullptr; michael@0: michael@0: memset(info, 0, totalSpace); michael@0: michael@0: info->jitcode = jitcode; michael@0: michael@0: // Copy the BaselineFrame + local/stack Values to the buffer. Arguments and michael@0: // |this| are not copied but left on the stack: the Baseline and Ion frame michael@0: // share the same frame prefix and Ion won't clobber these values. Note michael@0: // that info->baselineFrame will point to the *end* of the frame data, like michael@0: // the frame pointer register in baseline frames. michael@0: uint8_t *frameStart = (uint8_t *)info + AlignBytes(ionOsrTempDataSpace, sizeof(Value)); michael@0: info->baselineFrame = frameStart + frameSpace; michael@0: michael@0: memcpy(frameStart, (uint8_t *)frame - numLocalsAndStackVals * sizeof(Value), frameSpace); michael@0: michael@0: IonSpew(IonSpew_BaselineOSR, "Allocated IonOsrTempData at %p", (void *) info); michael@0: IonSpew(IonSpew_BaselineOSR, "Jitcode is %p", info->jitcode); michael@0: michael@0: // All done. michael@0: return info; michael@0: } michael@0: michael@0: static bool michael@0: DoUseCountFallback(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame, michael@0: IonOsrTempData **infoPtr) michael@0: { michael@0: JS_ASSERT(infoPtr); michael@0: *infoPtr = nullptr; michael@0: michael@0: // A TI OOM will disable TI and Ion. michael@0: if (!jit::IsIonEnabled(cx)) michael@0: return true; michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: bool isLoopEntry = JSOp(*pc) == JSOP_LOOPENTRY; michael@0: michael@0: JS_ASSERT(!isLoopEntry || LoopEntryCanIonOsr(pc)); michael@0: michael@0: FallbackICSpew(cx, stub, "UseCount(%d)", isLoopEntry ? int(script->pcToOffset(pc)) : int(-1)); michael@0: michael@0: if (!script->canIonCompile()) { michael@0: // TODO: ASSERT that ion-compilation-disabled checker stub doesn't exist. michael@0: // TODO: Clear all optimized stubs. michael@0: // TODO: Add a ion-compilation-disabled checker IC stub michael@0: script->resetUseCount(); michael@0: return true; michael@0: } michael@0: michael@0: JS_ASSERT(!script->isIonCompilingOffThread()); michael@0: michael@0: // If Ion script exists, but PC is not at a loop entry, then Ion will be entered for michael@0: // this script at an appropriate LOOPENTRY or the next time this function is called. michael@0: if (script->hasIonScript() && !isLoopEntry) { michael@0: IonSpew(IonSpew_BaselineOSR, "IonScript exists, but not at loop entry!"); michael@0: // TODO: ASSERT that a ion-script-already-exists checker stub doesn't exist. michael@0: // TODO: Clear all optimized stubs. michael@0: // TODO: Add a ion-script-already-exists checker stub. michael@0: return true; michael@0: } michael@0: michael@0: // Ensure that Ion-compiled code is available. michael@0: IonSpew(IonSpew_BaselineOSR, michael@0: "UseCount for %s:%d reached %d at pc %p, trying to switch to Ion!", michael@0: script->filename(), script->lineno(), (int) script->getUseCount(), (void *) pc); michael@0: void *jitcode = nullptr; michael@0: if (!EnsureCanEnterIon(cx, stub, frame, script, pc, &jitcode)) michael@0: return false; michael@0: michael@0: // Jitcode should only be set here if not at loop entry. michael@0: JS_ASSERT_IF(!isLoopEntry, !jitcode); michael@0: if (!jitcode) michael@0: return true; michael@0: michael@0: // Prepare the temporary heap copy of the fake InterpreterFrame and actual args list. michael@0: IonSpew(IonSpew_BaselineOSR, "Got jitcode. Preparing for OSR into ion."); michael@0: IonOsrTempData *info = PrepareOsrTempData(cx, stub, frame, script, pc, jitcode); michael@0: if (!info) michael@0: return false; michael@0: *infoPtr = info; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoUseCountFallbackFn)(JSContext *, ICUseCount_Fallback *, BaselineFrame *frame, michael@0: IonOsrTempData **infoPtr); michael@0: static const VMFunction DoUseCountFallbackInfo = michael@0: FunctionInfo(DoUseCountFallback); michael@0: michael@0: bool michael@0: ICUseCount_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: // enterStubFrame is going to clobber the BaselineFrameReg, save it in R0.scratchReg() michael@0: // first. michael@0: masm.movePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, R1.scratchReg()); michael@0: michael@0: Label noCompiledCode; michael@0: // Call DoUseCountFallback to compile/check-for Ion-compiled function michael@0: { michael@0: // Push IonOsrTempData pointer storage michael@0: masm.subPtr(Imm32(sizeof(void *)), BaselineStackReg); michael@0: masm.push(BaselineStackReg); michael@0: michael@0: // Push IonJSFrameLayout pointer. michael@0: masm.loadBaselineFramePtr(R0.scratchReg(), R0.scratchReg()); michael@0: masm.push(R0.scratchReg()); michael@0: michael@0: // Push stub pointer. michael@0: masm.push(BaselineStubReg); michael@0: michael@0: if (!callVM(DoUseCountFallbackInfo, masm)) michael@0: return false; michael@0: michael@0: // Pop IonOsrTempData pointer. michael@0: masm.pop(R0.scratchReg()); michael@0: michael@0: leaveStubFrame(masm); michael@0: michael@0: // If no JitCode was found, then skip just exit the IC. michael@0: masm.branchPtr(Assembler::Equal, R0.scratchReg(), ImmPtr(nullptr), &noCompiledCode); michael@0: } michael@0: michael@0: // Get a scratch register. michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: Register osrDataReg = R0.scratchReg(); michael@0: regs.take(osrDataReg); michael@0: regs.takeUnchecked(OsrFrameReg); michael@0: michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // At this point, stack looks like: michael@0: // +-> [...Calling-Frame...] michael@0: // | [...Actual-Args/ThisV/ArgCount/Callee...] michael@0: // | [Descriptor] michael@0: // | [Return-Addr] michael@0: // +---[Saved-FramePtr] <-- BaselineFrameReg points here. michael@0: // [...Baseline-Frame...] michael@0: michael@0: // Restore the stack pointer to point to the saved frame pointer. michael@0: masm.movePtr(BaselineFrameReg, BaselineStackReg); michael@0: michael@0: // Discard saved frame pointer, so that the return address is on top of michael@0: // the stack. michael@0: masm.pop(scratchReg); michael@0: michael@0: // Jump into Ion. michael@0: masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, jitcode)), scratchReg); michael@0: masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, baselineFrame)), OsrFrameReg); michael@0: masm.jump(scratchReg); michael@0: michael@0: // No jitcode available, do nothing. michael@0: masm.bind(&noCompiledCode); michael@0: EmitReturnFromIC(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // ICProfile_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoProfilerFallback(JSContext *cx, BaselineFrame *frame, ICProfiler_Fallback *stub) michael@0: { michael@0: RootedScript script(cx, frame->script()); michael@0: RootedFunction func(cx, frame->maybeFun()); michael@0: mozilla::DebugOnly icEntry = stub->icEntry(); michael@0: michael@0: FallbackICSpew(cx, stub, "Profiler"); michael@0: michael@0: SPSProfiler *profiler = &cx->runtime()->spsProfiler; michael@0: michael@0: // Manually enter SPS this time. michael@0: JS_ASSERT(profiler->enabled()); michael@0: if (!cx->runtime()->spsProfiler.enter(script, func)) michael@0: return false; michael@0: frame->setPushedSPSFrame(); michael@0: michael@0: // Unlink any existing PushFunction stub (which may hold stale 'const char *' to michael@0: // the profile string. michael@0: JS_ASSERT_IF(icEntry->firstStub() != stub, michael@0: icEntry->firstStub()->isProfiler_PushFunction() && michael@0: icEntry->firstStub()->next() == stub); michael@0: stub->unlinkStubsWithKind(cx, ICStub::Profiler_PushFunction); michael@0: JS_ASSERT(icEntry->firstStub() == stub); michael@0: michael@0: // Generate the string to use to identify this stack frame. michael@0: const char *string = profiler->profileString(script, func); michael@0: if (string == nullptr) michael@0: return false; michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating Profiler_PushFunction stub for %s:%d", michael@0: script->filename(), script->lineno()); michael@0: michael@0: // Create a new optimized stub. michael@0: ICProfiler_PushFunction::Compiler compiler(cx, string, script); michael@0: ICStub *optStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!optStub) michael@0: return false; michael@0: stub->addNewStub(optStub); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoProfilerFallbackFn)(JSContext *, BaselineFrame *frame, ICProfiler_Fallback *); michael@0: static const VMFunction DoProfilerFallbackInfo = michael@0: FunctionInfo(DoProfilerFallback); michael@0: michael@0: bool michael@0: ICProfiler_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.push(BaselineStubReg); // Push stub. michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); // Push frame. michael@0: michael@0: return tailCallVM(DoProfilerFallbackInfo, masm); michael@0: } michael@0: michael@0: bool michael@0: ICProfiler_PushFunction::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: michael@0: Register scratch = R0.scratchReg(); michael@0: Register scratch2 = R1.scratchReg(); michael@0: michael@0: // Profiling should be enabled if we ever reach here. michael@0: #ifdef DEBUG michael@0: Label spsEnabled; michael@0: uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled(); michael@0: masm.branch32(Assembler::NotEqual, AbsoluteAddress(enabledAddr), Imm32(0), &spsEnabled); michael@0: masm.assumeUnreachable("Profiling should have been enabled."); michael@0: masm.bind(&spsEnabled); michael@0: #endif michael@0: michael@0: // Push SPS entry. michael@0: masm.spsPushFrame(&cx->runtime()->spsProfiler, michael@0: Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfStr()), michael@0: Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfScript()), michael@0: scratch, michael@0: scratch2); michael@0: michael@0: // Mark frame as having profiler entry pushed. michael@0: Address flagsOffset(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()); michael@0: masm.or32(Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME), flagsOffset); michael@0: michael@0: EmitReturnFromIC(masm); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // TypeMonitor_Fallback michael@0: // michael@0: michael@0: bool michael@0: ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val) michael@0: { michael@0: bool wasDetachedMonitorChain = lastMonitorStubPtrAddr_ == nullptr; michael@0: JS_ASSERT_IF(wasDetachedMonitorChain, numOptimizedMonitorStubs_ == 0); michael@0: michael@0: if (numOptimizedMonitorStubs_ >= MAX_OPTIMIZED_STUBS) { michael@0: // TODO: if the TypeSet becomes unknown or has the AnyObject type, michael@0: // replace stubs with a single stub to handle these. michael@0: return true; michael@0: } michael@0: michael@0: if (val.isPrimitive()) { michael@0: JS_ASSERT(!val.isMagic()); michael@0: JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType(); michael@0: michael@0: // Check for existing TypeMonitor stub. michael@0: ICTypeMonitor_PrimitiveSet *existingStub = nullptr; michael@0: for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) { michael@0: if (iter->isTypeMonitor_PrimitiveSet()) { michael@0: existingStub = iter->toTypeMonitor_PrimitiveSet(); michael@0: if (existingStub->containsType(type)) michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: ICTypeMonitor_PrimitiveSet::Compiler compiler(cx, existingStub, type); michael@0: ICStub *stub = existingStub ? compiler.updateStub() michael@0: : compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stub) { michael@0: js_ReportOutOfMemory(cx); michael@0: return false; michael@0: } michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " %s TypeMonitor stub %p for primitive type %d", michael@0: existingStub ? "Modified existing" : "Created new", stub, type); michael@0: michael@0: if (!existingStub) { michael@0: JS_ASSERT(!hasStub(TypeMonitor_PrimitiveSet)); michael@0: addOptimizedMonitorStub(stub); michael@0: } michael@0: michael@0: } else if (val.toObject().hasSingletonType()) { michael@0: RootedObject obj(cx, &val.toObject()); michael@0: michael@0: // Check for existing TypeMonitor stub. michael@0: for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) { michael@0: if (iter->isTypeMonitor_SingleObject() && michael@0: iter->toTypeMonitor_SingleObject()->object() == obj) michael@0: { michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: ICTypeMonitor_SingleObject::Compiler compiler(cx, obj); michael@0: ICStub *stub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stub) { michael@0: js_ReportOutOfMemory(cx); michael@0: return false; michael@0: } michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Added TypeMonitor stub %p for singleton %p", michael@0: stub, obj.get()); michael@0: michael@0: addOptimizedMonitorStub(stub); michael@0: michael@0: } else { michael@0: RootedTypeObject type(cx, val.toObject().type()); michael@0: michael@0: // Check for existing TypeMonitor stub. michael@0: for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) { michael@0: if (iter->isTypeMonitor_TypeObject() && michael@0: iter->toTypeMonitor_TypeObject()->type() == type) michael@0: { michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: ICTypeMonitor_TypeObject::Compiler compiler(cx, type); michael@0: ICStub *stub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stub) { michael@0: js_ReportOutOfMemory(cx); michael@0: return false; michael@0: } michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Added TypeMonitor stub %p for TypeObject %p", michael@0: stub, type.get()); michael@0: michael@0: addOptimizedMonitorStub(stub); michael@0: } michael@0: michael@0: bool firstMonitorStubAdded = wasDetachedMonitorChain && (numOptimizedMonitorStubs_ > 0); michael@0: michael@0: if (firstMonitorStubAdded) { michael@0: // Was an empty monitor chain before, but a new stub was added. This is the michael@0: // only time that any main stubs' firstMonitorStub fields need to be updated to michael@0: // refer to the newly added monitor stub. michael@0: ICStub *firstStub = mainFallbackStub_->icEntry()->firstStub(); michael@0: for (ICStubConstIterator iter = firstStub; !iter.atEnd(); iter++) { michael@0: // Non-monitored stubs are used if the result has always the same type, michael@0: // e.g. a StringLength stub will always return int32. michael@0: if (!iter->isMonitored()) michael@0: continue; michael@0: michael@0: // Since we just added the first optimized monitoring stub, any michael@0: // existing main stub's |firstMonitorStub| MUST be pointing to the fallback michael@0: // monitor stub (i.e. this stub). michael@0: JS_ASSERT(iter->toMonitoredStub()->firstMonitorStub() == this); michael@0: iter->toMonitoredStub()->updateFirstMonitorStub(firstMonitorStub_); michael@0: } michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoTypeMonitorFallback(JSContext *cx, BaselineFrame *frame, ICTypeMonitor_Fallback *stub, michael@0: HandleValue value, MutableHandleValue res) michael@0: { michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: TypeFallbackICSpew(cx, stub, "TypeMonitor"); michael@0: michael@0: uint32_t argument; michael@0: if (stub->monitorsThis()) { michael@0: JS_ASSERT(pc == script->code()); michael@0: types::TypeScript::SetThis(cx, script, value); michael@0: } else if (stub->monitorsArgument(&argument)) { michael@0: JS_ASSERT(pc == script->code()); michael@0: types::TypeScript::SetArgument(cx, script, argument, value); michael@0: } else { michael@0: types::TypeScript::Monitor(cx, script, pc, value); michael@0: } michael@0: michael@0: if (!stub->addMonitorStubForValue(cx, script, value)) michael@0: return false; michael@0: michael@0: // Copy input value to res. michael@0: res.set(value); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoTypeMonitorFallbackFn)(JSContext *, BaselineFrame *, ICTypeMonitor_Fallback *, michael@0: HandleValue, MutableHandleValue); michael@0: static const VMFunction DoTypeMonitorFallbackInfo = michael@0: FunctionInfo(DoTypeMonitorFallback); michael@0: michael@0: bool michael@0: ICTypeMonitor_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoTypeMonitorFallbackInfo, masm); michael@0: } michael@0: michael@0: bool michael@0: ICTypeMonitor_PrimitiveSet::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label success; michael@0: if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))) michael@0: masm.branchTestInt32(Assembler::Equal, R0, &success); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)) michael@0: masm.branchTestNumber(Assembler::Equal, R0, &success); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED)) michael@0: masm.branchTestUndefined(Assembler::Equal, R0, &success); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN)) michael@0: masm.branchTestBoolean(Assembler::Equal, R0, &success); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_STRING)) michael@0: masm.branchTestString(Assembler::Equal, R0, &success); michael@0: michael@0: // Currently, we will never generate primitive stub checks for object. However, michael@0: // when we do get to the point where we want to collapse our monitor chains of michael@0: // objects and singletons down (when they get too long) to a generic "any object" michael@0: // in coordination with the typeset doing the same thing, this will need to michael@0: // be re-enabled. michael@0: /* michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT)) michael@0: masm.branchTestObject(Assembler::Equal, R0, &success); michael@0: */ michael@0: JS_ASSERT(!(flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_NULL)) michael@0: masm.branchTestNull(Assembler::Equal, R0, &success); michael@0: michael@0: EmitStubGuardFailure(masm); michael@0: michael@0: masm.bind(&success); michael@0: EmitReturnFromIC(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICTypeMonitor_SingleObject::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Guard on the object's identity. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: Address expectedObject(BaselineStubReg, ICTypeMonitor_SingleObject::offsetOfObject()); michael@0: masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure); michael@0: michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICTypeMonitor_TypeObject::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Guard on the object's TypeObject. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg()); michael@0: michael@0: Address expectedType(BaselineStubReg, ICTypeMonitor_TypeObject::offsetOfType()); michael@0: masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure); michael@0: michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICUpdatedStub::addUpdateStubForValue(JSContext *cx, HandleScript script, HandleObject obj, michael@0: HandleId id, HandleValue val) michael@0: { michael@0: if (numOptimizedStubs_ >= MAX_OPTIMIZED_STUBS) { michael@0: // TODO: if the TypeSet becomes unknown or has the AnyObject type, michael@0: // replace stubs with a single stub to handle these. michael@0: return true; michael@0: } michael@0: michael@0: types::EnsureTrackPropertyTypes(cx, obj, id); michael@0: michael@0: // Make sure that undefined values are explicitly included in the property michael@0: // types for an object if generating a stub to write an undefined value. michael@0: if (val.isUndefined() && types::CanHaveEmptyPropertyTypesForOwnProperty(obj)) michael@0: types::AddTypePropertyId(cx, obj, id, val); michael@0: michael@0: if (val.isPrimitive()) { michael@0: JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType(); michael@0: michael@0: // Check for existing TypeUpdate stub. michael@0: ICTypeUpdate_PrimitiveSet *existingStub = nullptr; michael@0: for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) { michael@0: if (iter->isTypeUpdate_PrimitiveSet()) { michael@0: existingStub = iter->toTypeUpdate_PrimitiveSet(); michael@0: if (existingStub->containsType(type)) michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: ICTypeUpdate_PrimitiveSet::Compiler compiler(cx, existingStub, type); michael@0: ICStub *stub = existingStub ? compiler.updateStub() michael@0: : compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stub) michael@0: return false; michael@0: if (!existingStub) { michael@0: JS_ASSERT(!hasTypeUpdateStub(TypeUpdate_PrimitiveSet)); michael@0: addOptimizedUpdateStub(stub); michael@0: } michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " %s TypeUpdate stub %p for primitive type %d", michael@0: existingStub ? "Modified existing" : "Created new", stub, type); michael@0: michael@0: } else if (val.toObject().hasSingletonType()) { michael@0: RootedObject obj(cx, &val.toObject()); michael@0: michael@0: // Check for existing TypeUpdate stub. michael@0: for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) { michael@0: if (iter->isTypeUpdate_SingleObject() && michael@0: iter->toTypeUpdate_SingleObject()->object() == obj) michael@0: { michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: ICTypeUpdate_SingleObject::Compiler compiler(cx, obj); michael@0: ICStub *stub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stub) michael@0: return false; michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Added TypeUpdate stub %p for singleton %p", stub, obj.get()); michael@0: michael@0: addOptimizedUpdateStub(stub); michael@0: michael@0: } else { michael@0: RootedTypeObject type(cx, val.toObject().type()); michael@0: michael@0: // Check for existing TypeUpdate stub. michael@0: for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) { michael@0: if (iter->isTypeUpdate_TypeObject() && michael@0: iter->toTypeUpdate_TypeObject()->type() == type) michael@0: { michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: ICTypeUpdate_TypeObject::Compiler compiler(cx, type); michael@0: ICStub *stub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stub) michael@0: return false; michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Added TypeUpdate stub %p for TypeObject %p", michael@0: stub, type.get()); michael@0: michael@0: addOptimizedUpdateStub(stub); michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // TypeUpdate_Fallback michael@0: // michael@0: static bool michael@0: DoTypeUpdateFallback(JSContext *cx, BaselineFrame *frame, ICUpdatedStub *stub, HandleValue objval, michael@0: HandleValue value) michael@0: { michael@0: FallbackICSpew(cx, stub->getChainFallback(), "TypeUpdate(%s)", michael@0: ICStub::KindString(stub->kind())); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: RootedObject obj(cx, &objval.toObject()); michael@0: RootedId id(cx); michael@0: michael@0: switch(stub->kind()) { michael@0: case ICStub::SetElem_Dense: michael@0: case ICStub::SetElem_DenseAdd: { michael@0: JS_ASSERT(obj->isNative()); michael@0: id = JSID_VOID; michael@0: types::AddTypePropertyId(cx, obj, id, value); michael@0: break; michael@0: } michael@0: case ICStub::SetProp_Native: michael@0: case ICStub::SetProp_NativeAdd: { michael@0: JS_ASSERT(obj->isNative()); michael@0: jsbytecode *pc = stub->getChainFallback()->icEntry()->pc(script); michael@0: if (*pc == JSOP_SETALIASEDVAR) michael@0: id = NameToId(ScopeCoordinateName(cx->runtime()->scopeCoordinateNameCache, script, pc)); michael@0: else michael@0: id = NameToId(script->getName(pc)); michael@0: types::AddTypePropertyId(cx, obj, id, value); michael@0: break; michael@0: } michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Invalid stub"); michael@0: } michael@0: michael@0: return stub->addUpdateStubForValue(cx, script, obj, id, value); michael@0: } michael@0: michael@0: typedef bool (*DoTypeUpdateFallbackFn)(JSContext *, BaselineFrame *, ICUpdatedStub *, HandleValue, michael@0: HandleValue); michael@0: const VMFunction DoTypeUpdateFallbackInfo = michael@0: FunctionInfo(DoTypeUpdateFallback); michael@0: michael@0: bool michael@0: ICTypeUpdate_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: // Just store false into R1.scratchReg() and return. michael@0: masm.move32(Imm32(0), R1.scratchReg()); michael@0: EmitReturnFromIC(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICTypeUpdate_PrimitiveSet::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label success; michael@0: if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))) michael@0: masm.branchTestInt32(Assembler::Equal, R0, &success); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)) michael@0: masm.branchTestNumber(Assembler::Equal, R0, &success); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED)) michael@0: masm.branchTestUndefined(Assembler::Equal, R0, &success); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN)) michael@0: masm.branchTestBoolean(Assembler::Equal, R0, &success); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_STRING)) michael@0: masm.branchTestString(Assembler::Equal, R0, &success); michael@0: michael@0: // Currently, we will never generate primitive stub checks for object. However, michael@0: // when we do get to the point where we want to collapse our monitor chains of michael@0: // objects and singletons down (when they get too long) to a generic "any object" michael@0: // in coordination with the typeset doing the same thing, this will need to michael@0: // be re-enabled. michael@0: /* michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT)) michael@0: masm.branchTestObject(Assembler::Equal, R0, &success); michael@0: */ michael@0: JS_ASSERT(!(flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))); michael@0: michael@0: if (flags_ & TypeToFlag(JSVAL_TYPE_NULL)) michael@0: masm.branchTestNull(Assembler::Equal, R0, &success); michael@0: michael@0: EmitStubGuardFailure(masm); michael@0: michael@0: // Type matches, load true into R1.scratchReg() and return. michael@0: masm.bind(&success); michael@0: masm.mov(ImmWord(1), R1.scratchReg()); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICTypeUpdate_SingleObject::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Guard on the object's identity. michael@0: Register obj = masm.extractObject(R0, R1.scratchReg()); michael@0: Address expectedObject(BaselineStubReg, ICTypeUpdate_SingleObject::offsetOfObject()); michael@0: masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure); michael@0: michael@0: // Identity matches, load true into R1.scratchReg() and return. michael@0: masm.mov(ImmWord(1), R1.scratchReg()); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICTypeUpdate_TypeObject::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Guard on the object's TypeObject. michael@0: Register obj = masm.extractObject(R0, R1.scratchReg()); michael@0: masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg()); michael@0: michael@0: Address expectedType(BaselineStubReg, ICTypeUpdate_TypeObject::offsetOfType()); michael@0: masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure); michael@0: michael@0: // Type matches, load true into R1.scratchReg() and return. michael@0: masm.mov(ImmWord(1), R1.scratchReg()); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // VM function to help call native getters. michael@0: // michael@0: michael@0: static bool michael@0: DoCallNativeGetter(JSContext *cx, HandleFunction callee, HandleObject obj, michael@0: MutableHandleValue result) michael@0: { michael@0: JS_ASSERT(callee->isNative()); michael@0: JSNative natfun = callee->native(); michael@0: michael@0: JS::AutoValueArray<2> vp(cx); michael@0: vp[0].setObject(*callee.get()); michael@0: vp[1].setObject(*obj.get()); michael@0: michael@0: if (!natfun(cx, 0, vp.begin())) michael@0: return false; michael@0: michael@0: result.set(vp[0]); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoCallNativeGetterFn)(JSContext *, HandleFunction, HandleObject, MutableHandleValue); michael@0: static const VMFunction DoCallNativeGetterInfo = michael@0: FunctionInfo(DoCallNativeGetter); michael@0: michael@0: // michael@0: // This_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoThisFallback(JSContext *cx, ICThis_Fallback *stub, HandleValue thisv, MutableHandleValue ret) michael@0: { michael@0: FallbackICSpew(cx, stub, "This"); michael@0: michael@0: JSObject *thisObj = BoxNonStrictThis(cx, thisv); michael@0: if (!thisObj) michael@0: return false; michael@0: michael@0: ret.setObject(*thisObj); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoThisFallbackFn)(JSContext *, ICThis_Fallback *, HandleValue, MutableHandleValue); michael@0: static const VMFunction DoThisFallbackInfo = FunctionInfo(DoThisFallback); michael@0: michael@0: bool michael@0: ICThis_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: michael@0: return tailCallVM(DoThisFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // NewArray_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoNewArray(JSContext *cx, ICNewArray_Fallback *stub, uint32_t length, michael@0: HandleTypeObject type, MutableHandleValue res) michael@0: { michael@0: FallbackICSpew(cx, stub, "NewArray"); michael@0: michael@0: JSObject *obj = NewInitArray(cx, length, type); michael@0: if (!obj) michael@0: return false; michael@0: michael@0: res.setObject(*obj); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool(*DoNewArrayFn)(JSContext *, ICNewArray_Fallback *, uint32_t, HandleTypeObject, michael@0: MutableHandleValue); michael@0: static const VMFunction DoNewArrayInfo = FunctionInfo(DoNewArray); michael@0: michael@0: bool michael@0: ICNewArray_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.push(R1.scratchReg()); // type michael@0: masm.push(R0.scratchReg()); // length michael@0: masm.push(BaselineStubReg); // stub. michael@0: michael@0: return tailCallVM(DoNewArrayInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // NewObject_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoNewObject(JSContext *cx, ICNewObject_Fallback *stub, MutableHandleValue res) michael@0: { michael@0: FallbackICSpew(cx, stub, "NewObject"); michael@0: michael@0: RootedObject templateObject(cx, stub->templateObject()); michael@0: JSObject *obj = NewInitObject(cx, templateObject); michael@0: if (!obj) michael@0: return false; michael@0: michael@0: res.setObject(*obj); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool(*DoNewObjectFn)(JSContext *, ICNewObject_Fallback *, MutableHandleValue); michael@0: static const VMFunction DoNewObjectInfo = FunctionInfo(DoNewObject); michael@0: michael@0: bool michael@0: ICNewObject_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.push(BaselineStubReg); // stub. michael@0: michael@0: return tailCallVM(DoNewObjectInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // Compare_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoCompareFallback(JSContext *cx, BaselineFrame *frame, ICCompare_Fallback *stub_, HandleValue lhs, michael@0: HandleValue rhs, MutableHandleValue ret) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: jsbytecode *pc = stub->icEntry()->pc(frame->script()); michael@0: JSOp op = JSOp(*pc); michael@0: michael@0: FallbackICSpew(cx, stub, "Compare(%s)", js_CodeName[op]); michael@0: michael@0: // Case operations in a CONDSWITCH are performing strict equality. michael@0: if (op == JSOP_CASE) michael@0: op = JSOP_STRICTEQ; michael@0: michael@0: // Don't pass lhs/rhs directly, we need the original values when michael@0: // generating stubs. michael@0: RootedValue lhsCopy(cx, lhs); michael@0: RootedValue rhsCopy(cx, rhs); michael@0: michael@0: // Perform the compare operation. michael@0: bool out; michael@0: switch(op) { michael@0: case JSOP_LT: michael@0: if (!LessThan(cx, &lhsCopy, &rhsCopy, &out)) michael@0: return false; michael@0: break; michael@0: case JSOP_LE: michael@0: if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) michael@0: return false; michael@0: break; michael@0: case JSOP_GT: michael@0: if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out)) michael@0: return false; michael@0: break; michael@0: case JSOP_GE: michael@0: if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out)) michael@0: return false; michael@0: break; michael@0: case JSOP_EQ: michael@0: if (!LooselyEqual(cx, &lhsCopy, &rhsCopy, &out)) michael@0: return false; michael@0: break; michael@0: case JSOP_NE: michael@0: if (!LooselyEqual(cx, &lhsCopy, &rhsCopy, &out)) michael@0: return false; michael@0: break; michael@0: case JSOP_STRICTEQ: michael@0: if (!StrictlyEqual(cx, &lhsCopy, &rhsCopy, &out)) michael@0: return false; michael@0: break; michael@0: case JSOP_STRICTNE: michael@0: if (!StrictlyEqual(cx, &lhsCopy, &rhsCopy, &out)) michael@0: return false; michael@0: break; michael@0: default: michael@0: JS_ASSERT(!"Unhandled baseline compare op"); michael@0: return false; michael@0: } michael@0: michael@0: ret.setBoolean(out); michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: // Check to see if a new stub should be generated. michael@0: if (stub->numOptimizedStubs() >= ICCompare_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard all stubs in this IC and replace with inert megamorphic stub. michael@0: // But for now we just bail. michael@0: return true; michael@0: } michael@0: michael@0: JSScript *script = frame->script(); michael@0: michael@0: // Try to generate new stubs. michael@0: if (lhs.isInt32() && rhs.isInt32()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Int32, Int32) stub", js_CodeName[op]); michael@0: ICCompare_Int32::Compiler compiler(cx, op); michael@0: ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!int32Stub) michael@0: return false; michael@0: michael@0: stub->addNewStub(int32Stub); michael@0: return true; michael@0: } michael@0: michael@0: if (!cx->runtime()->jitSupportsFloatingPoint && (lhs.isNumber() || rhs.isNumber())) michael@0: return true; michael@0: michael@0: if (lhs.isNumber() && rhs.isNumber()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Number, Number) stub", js_CodeName[op]); michael@0: michael@0: // Unlink int32 stubs, it's faster to always use the double stub. michael@0: stub->unlinkStubsWithKind(cx, ICStub::Compare_Int32); michael@0: michael@0: ICCompare_Double::Compiler compiler(cx, op); michael@0: ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!doubleStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(doubleStub); michael@0: return true; michael@0: } michael@0: michael@0: if ((lhs.isNumber() && rhs.isUndefined()) || michael@0: (lhs.isUndefined() && rhs.isNumber())) michael@0: { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op], michael@0: rhs.isUndefined() ? "Number" : "Undefined", michael@0: rhs.isUndefined() ? "Undefined" : "Number"); michael@0: ICCompare_NumberWithUndefined::Compiler compiler(cx, op, lhs.isUndefined()); michael@0: ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!doubleStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(doubleStub); michael@0: return true; michael@0: } michael@0: michael@0: if (lhs.isBoolean() && rhs.isBoolean()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Boolean, Boolean) stub", js_CodeName[op]); michael@0: ICCompare_Boolean::Compiler compiler(cx, op); michael@0: ICStub *booleanStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!booleanStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(booleanStub); michael@0: return true; michael@0: } michael@0: michael@0: if ((lhs.isBoolean() && rhs.isInt32()) || (lhs.isInt32() && rhs.isBoolean())) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op], michael@0: rhs.isInt32() ? "Boolean" : "Int32", michael@0: rhs.isInt32() ? "Int32" : "Boolean"); michael@0: ICCompare_Int32WithBoolean::Compiler compiler(cx, op, lhs.isInt32()); michael@0: ICStub *optStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!optStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(optStub); michael@0: return true; michael@0: } michael@0: michael@0: if (IsEqualityOp(op)) { michael@0: if (lhs.isString() && rhs.isString() && !stub->hasStub(ICStub::Compare_String)) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(String, String) stub", js_CodeName[op]); michael@0: ICCompare_String::Compiler compiler(cx, op); michael@0: ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stringStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(stringStub); michael@0: return true; michael@0: } michael@0: michael@0: if (lhs.isObject() && rhs.isObject()) { michael@0: JS_ASSERT(!stub->hasStub(ICStub::Compare_Object)); michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Object, Object) stub", js_CodeName[op]); michael@0: ICCompare_Object::Compiler compiler(cx, op); michael@0: ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!objectStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(objectStub); michael@0: return true; michael@0: } michael@0: michael@0: if ((lhs.isObject() || lhs.isNull() || lhs.isUndefined()) && michael@0: (rhs.isObject() || rhs.isNull() || rhs.isUndefined()) && michael@0: !stub->hasStub(ICStub::Compare_ObjectWithUndefined)) michael@0: { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Obj/Null/Undef, Obj/Null/Undef) stub", michael@0: js_CodeName[op]); michael@0: bool lhsIsUndefined = lhs.isNull() || lhs.isUndefined(); michael@0: bool compareWithNull = lhs.isNull() || rhs.isNull(); michael@0: ICCompare_ObjectWithUndefined::Compiler compiler(cx, op, michael@0: lhsIsUndefined, compareWithNull); michael@0: ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!objectStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(objectStub); michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoCompareFallbackFn)(JSContext *, BaselineFrame *, ICCompare_Fallback *, michael@0: HandleValue, HandleValue, MutableHandleValue); michael@0: static const VMFunction DoCompareFallbackInfo = michael@0: FunctionInfo(DoCompareFallback, PopValues(2)); michael@0: michael@0: bool michael@0: ICCompare_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Ensure stack is fully synced for the expression decompiler. michael@0: masm.pushValue(R0); michael@0: masm.pushValue(R1); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R1); michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: return tailCallVM(DoCompareFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // Compare_String michael@0: // michael@0: michael@0: bool michael@0: ICCompare_String::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestString(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestString(Assembler::NotEqual, R1, &failure); michael@0: michael@0: JS_ASSERT(IsEqualityOp(op)); michael@0: michael@0: Register left = masm.extractString(R0, ExtractTemp0); michael@0: Register right = masm.extractString(R1, ExtractTemp1); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: // x86 doesn't have the luxury of a second scratch. michael@0: Register scratchReg2; michael@0: if (regs.empty()) { michael@0: scratchReg2 = BaselineStubReg; michael@0: masm.push(BaselineStubReg); michael@0: } else { michael@0: scratchReg2 = regs.takeAny(); michael@0: } michael@0: JS_ASSERT(scratchReg2 != scratchReg); michael@0: michael@0: Label inlineCompareFailed; michael@0: masm.compareStrings(op, left, right, scratchReg2, scratchReg, &inlineCompareFailed); michael@0: masm.tagValue(JSVAL_TYPE_BOOLEAN, scratchReg2, R0); michael@0: if (scratchReg2 == BaselineStubReg) michael@0: masm.pop(BaselineStubReg); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&inlineCompareFailed); michael@0: if (scratchReg2 == BaselineStubReg) michael@0: masm.pop(BaselineStubReg); michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // Compare_Boolean michael@0: // michael@0: michael@0: bool michael@0: ICCompare_Boolean::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestBoolean(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestBoolean(Assembler::NotEqual, R1, &failure); michael@0: michael@0: Register left = masm.extractInt32(R0, ExtractTemp0); michael@0: Register right = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: // Compare payload regs of R0 and R1. michael@0: Assembler::Condition cond = JSOpToCondition(op, /* signed = */true); michael@0: masm.cmp32Set(cond, left, right, left); michael@0: michael@0: // Box the result and return michael@0: masm.tagValue(JSVAL_TYPE_BOOLEAN, left, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // Compare_NumberWithUndefined michael@0: // michael@0: michael@0: bool michael@0: ICCompare_NumberWithUndefined::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: ValueOperand numberOperand, undefinedOperand; michael@0: if (lhsIsUndefined) { michael@0: numberOperand = R1; michael@0: undefinedOperand = R0; michael@0: } else { michael@0: numberOperand = R0; michael@0: undefinedOperand = R1; michael@0: } michael@0: michael@0: Label failure; michael@0: masm.branchTestNumber(Assembler::NotEqual, numberOperand, &failure); michael@0: masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure); michael@0: michael@0: // Comparing a number with undefined will always be true for NE/STRICTNE, michael@0: // and always be false for other compare ops. michael@0: masm.moveValue(BooleanValue(op == JSOP_NE || op == JSOP_STRICTNE), R0); michael@0: michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // Compare_Object michael@0: // michael@0: michael@0: bool michael@0: ICCompare_Object::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestObject(Assembler::NotEqual, R1, &failure); michael@0: michael@0: JS_ASSERT(IsEqualityOp(op)); michael@0: michael@0: Register left = masm.extractObject(R0, ExtractTemp0); michael@0: Register right = masm.extractObject(R1, ExtractTemp1); michael@0: michael@0: Label ifTrue; michael@0: masm.branchPtr(JSOpToCondition(op, /* signed = */true), left, right, &ifTrue); michael@0: michael@0: masm.moveValue(BooleanValue(false), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&ifTrue); michael@0: masm.moveValue(BooleanValue(true), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // Compare_ObjectWithUndefined michael@0: // michael@0: michael@0: bool michael@0: ICCompare_ObjectWithUndefined::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(IsEqualityOp(op)); michael@0: michael@0: ValueOperand objectOperand, undefinedOperand; michael@0: if (lhsIsUndefined) { michael@0: objectOperand = R1; michael@0: undefinedOperand = R0; michael@0: } else { michael@0: objectOperand = R0; michael@0: undefinedOperand = R1; michael@0: } michael@0: michael@0: Label failure; michael@0: if (compareWithNull) michael@0: masm.branchTestNull(Assembler::NotEqual, undefinedOperand, &failure); michael@0: else michael@0: masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure); michael@0: michael@0: Label notObject; michael@0: masm.branchTestObject(Assembler::NotEqual, objectOperand, ¬Object); michael@0: michael@0: if (op == JSOP_STRICTEQ || op == JSOP_STRICTNE) { michael@0: // obj !== undefined for all objects. michael@0: masm.moveValue(BooleanValue(op == JSOP_STRICTNE), R0); michael@0: EmitReturnFromIC(masm); michael@0: } else { michael@0: // obj != undefined only where !obj->getClass()->emulatesUndefined() michael@0: Label emulatesUndefined; michael@0: Register obj = masm.extractObject(objectOperand, ExtractTemp0); michael@0: masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj); michael@0: masm.loadPtr(Address(obj, types::TypeObject::offsetOfClasp()), obj); michael@0: masm.branchTest32(Assembler::NonZero, michael@0: Address(obj, Class::offsetOfFlags()), michael@0: Imm32(JSCLASS_EMULATES_UNDEFINED), michael@0: &emulatesUndefined); michael@0: masm.moveValue(BooleanValue(op == JSOP_NE), R0); michael@0: EmitReturnFromIC(masm); michael@0: masm.bind(&emulatesUndefined); michael@0: masm.moveValue(BooleanValue(op == JSOP_EQ), R0); michael@0: EmitReturnFromIC(masm); michael@0: } michael@0: michael@0: masm.bind(¬Object); michael@0: michael@0: // Also support null == null or undefined == undefined comparisons. michael@0: if (compareWithNull) michael@0: masm.branchTestNull(Assembler::NotEqual, objectOperand, &failure); michael@0: else michael@0: masm.branchTestUndefined(Assembler::NotEqual, objectOperand, &failure); michael@0: michael@0: masm.moveValue(BooleanValue(op == JSOP_STRICTEQ || op == JSOP_EQ), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // Compare_Int32WithBoolean michael@0: // michael@0: michael@0: bool michael@0: ICCompare_Int32WithBoolean::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: ValueOperand int32Val; michael@0: ValueOperand boolVal; michael@0: if (lhsIsInt32_) { michael@0: int32Val = R0; michael@0: boolVal = R1; michael@0: } else { michael@0: boolVal = R0; michael@0: int32Val = R1; michael@0: } michael@0: masm.branchTestBoolean(Assembler::NotEqual, boolVal, &failure); michael@0: masm.branchTestInt32(Assembler::NotEqual, int32Val, &failure); michael@0: michael@0: if (op_ == JSOP_STRICTEQ || op_ == JSOP_STRICTNE) { michael@0: // Ints and booleans are never strictly equal, always strictly not equal. michael@0: masm.moveValue(BooleanValue(op_ == JSOP_STRICTNE), R0); michael@0: EmitReturnFromIC(masm); michael@0: } else { michael@0: Register boolReg = masm.extractBoolean(boolVal, ExtractTemp0); michael@0: Register int32Reg = masm.extractInt32(int32Val, ExtractTemp1); michael@0: michael@0: // Compare payload regs of R0 and R1. michael@0: Assembler::Condition cond = JSOpToCondition(op_, /* signed = */true); michael@0: masm.cmp32Set(cond, (lhsIsInt32_ ? int32Reg : boolReg), michael@0: (lhsIsInt32_ ? boolReg : int32Reg), R0.scratchReg()); michael@0: michael@0: // Box the result and return michael@0: masm.tagValue(JSVAL_TYPE_BOOLEAN, R0.scratchReg(), R0); michael@0: EmitReturnFromIC(masm); michael@0: } michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // ToBool_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoToBoolFallback(JSContext *cx, BaselineFrame *frame, ICToBool_Fallback *stub, HandleValue arg, michael@0: MutableHandleValue ret) michael@0: { michael@0: FallbackICSpew(cx, stub, "ToBool"); michael@0: michael@0: bool cond = ToBoolean(arg); michael@0: ret.setBoolean(cond); michael@0: michael@0: // Check to see if a new stub should be generated. michael@0: if (stub->numOptimizedStubs() >= ICToBool_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard all stubs in this IC and replace with inert megamorphic stub. michael@0: // But for now we just bail. michael@0: return true; michael@0: } michael@0: michael@0: JS_ASSERT(!arg.isBoolean()); michael@0: michael@0: JSScript *script = frame->script(); michael@0: michael@0: // Try to generate new stubs. michael@0: if (arg.isInt32()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating ToBool(Int32) stub."); michael@0: ICToBool_Int32::Compiler compiler(cx); michael@0: ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!int32Stub) michael@0: return false; michael@0: michael@0: stub->addNewStub(int32Stub); michael@0: return true; michael@0: } michael@0: michael@0: if (arg.isDouble() && cx->runtime()->jitSupportsFloatingPoint) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating ToBool(Double) stub."); michael@0: ICToBool_Double::Compiler compiler(cx); michael@0: ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!doubleStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(doubleStub); michael@0: return true; michael@0: } michael@0: michael@0: if (arg.isString()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating ToBool(String) stub"); michael@0: ICToBool_String::Compiler compiler(cx); michael@0: ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stringStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(stringStub); michael@0: return true; michael@0: } michael@0: michael@0: if (arg.isNull() || arg.isUndefined()) { michael@0: ICToBool_NullUndefined::Compiler compiler(cx); michael@0: ICStub *nilStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!nilStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(nilStub); michael@0: return true; michael@0: } michael@0: michael@0: if (arg.isObject()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating ToBool(Object) stub."); michael@0: ICToBool_Object::Compiler compiler(cx); michael@0: ICStub *objStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!objStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(objStub); michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*pf)(JSContext *, BaselineFrame *, ICToBool_Fallback *, HandleValue, michael@0: MutableHandleValue); michael@0: static const VMFunction fun = FunctionInfo(DoToBoolFallback); michael@0: michael@0: bool michael@0: ICToBool_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(fun, masm); michael@0: } michael@0: michael@0: // michael@0: // ToBool_Int32 michael@0: // michael@0: michael@0: bool michael@0: ICToBool_Int32::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestInt32(Assembler::NotEqual, R0, &failure); michael@0: michael@0: Label ifFalse; michael@0: masm.branchTestInt32Truthy(false, R0, &ifFalse); michael@0: michael@0: masm.moveValue(BooleanValue(true), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&ifFalse); michael@0: masm.moveValue(BooleanValue(false), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // ToBool_String michael@0: // michael@0: michael@0: bool michael@0: ICToBool_String::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestString(Assembler::NotEqual, R0, &failure); michael@0: michael@0: Label ifFalse; michael@0: masm.branchTestStringTruthy(false, R0, &ifFalse); michael@0: michael@0: masm.moveValue(BooleanValue(true), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&ifFalse); michael@0: masm.moveValue(BooleanValue(false), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // ToBool_NullUndefined michael@0: // michael@0: michael@0: bool michael@0: ICToBool_NullUndefined::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure, ifFalse; michael@0: masm.branchTestNull(Assembler::Equal, R0, &ifFalse); michael@0: masm.branchTestUndefined(Assembler::NotEqual, R0, &failure); michael@0: michael@0: masm.bind(&ifFalse); michael@0: masm.moveValue(BooleanValue(false), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // ToBool_Double michael@0: // michael@0: michael@0: bool michael@0: ICToBool_Double::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure, ifTrue; michael@0: masm.branchTestDouble(Assembler::NotEqual, R0, &failure); michael@0: masm.unboxDouble(R0, FloatReg0); michael@0: masm.branchTestDoubleTruthy(true, FloatReg0, &ifTrue); michael@0: michael@0: masm.moveValue(BooleanValue(false), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&ifTrue); michael@0: masm.moveValue(BooleanValue(true), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // ToBool_Object michael@0: // michael@0: michael@0: bool michael@0: ICToBool_Object::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure, ifFalse, slowPath; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: Register scratch = R1.scratchReg(); michael@0: masm.branchTestObjectTruthy(false, objReg, scratch, &slowPath, &ifFalse); michael@0: michael@0: // If object doesn't emulate undefined, it evaulates to true. michael@0: masm.moveValue(BooleanValue(true), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&ifFalse); michael@0: masm.moveValue(BooleanValue(false), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&slowPath); michael@0: masm.setupUnalignedABICall(1, scratch); michael@0: masm.passABIArg(objReg); michael@0: masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::EmulatesUndefined)); michael@0: masm.convertBoolToInt32(ReturnReg, ReturnReg); michael@0: masm.xor32(Imm32(1), ReturnReg); michael@0: masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // ToNumber_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoToNumberFallback(JSContext *cx, ICToNumber_Fallback *stub, HandleValue arg, MutableHandleValue ret) michael@0: { michael@0: FallbackICSpew(cx, stub, "ToNumber"); michael@0: ret.set(arg); michael@0: return ToNumber(cx, ret); michael@0: } michael@0: michael@0: typedef bool (*DoToNumberFallbackFn)(JSContext *, ICToNumber_Fallback *, HandleValue, MutableHandleValue); michael@0: static const VMFunction DoToNumberFallbackInfo = michael@0: FunctionInfo(DoToNumberFallback, PopValues(1)); michael@0: michael@0: bool michael@0: ICToNumber_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Ensure stack is fully synced for the expression decompiler. michael@0: masm.pushValue(R0); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: michael@0: return tailCallVM(DoToNumberFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // BinaryArith_Fallback michael@0: // michael@0: michael@0: // Disable PGO (see bug 851490). michael@0: #if defined(_MSC_VER) michael@0: # pragma optimize("g", off) michael@0: #endif michael@0: static bool michael@0: DoBinaryArithFallback(JSContext *cx, BaselineFrame *frame, ICBinaryArith_Fallback *stub_, michael@0: HandleValue lhs, HandleValue rhs, MutableHandleValue ret) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: JSOp op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "BinaryArith(%s,%d,%d)", js_CodeName[op], michael@0: int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()), michael@0: int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType())); michael@0: michael@0: // Don't pass lhs/rhs directly, we need the original values when michael@0: // generating stubs. michael@0: RootedValue lhsCopy(cx, lhs); michael@0: RootedValue rhsCopy(cx, rhs); michael@0: michael@0: // Perform the compare operation. michael@0: switch(op) { michael@0: case JSOP_ADD: michael@0: // Do an add. michael@0: if (!AddValues(cx, &lhsCopy, &rhsCopy, ret)) michael@0: return false; michael@0: break; michael@0: case JSOP_SUB: michael@0: if (!SubValues(cx, &lhsCopy, &rhsCopy, ret)) michael@0: return false; michael@0: break; michael@0: case JSOP_MUL: michael@0: if (!MulValues(cx, &lhsCopy, &rhsCopy, ret)) michael@0: return false; michael@0: break; michael@0: case JSOP_DIV: michael@0: if (!DivValues(cx, &lhsCopy, &rhsCopy, ret)) michael@0: return false; michael@0: break; michael@0: case JSOP_MOD: michael@0: if (!ModValues(cx, &lhsCopy, &rhsCopy, ret)) michael@0: return false; michael@0: break; michael@0: case JSOP_BITOR: { michael@0: int32_t result; michael@0: if (!BitOr(cx, lhs, rhs, &result)) michael@0: return false; michael@0: ret.setInt32(result); michael@0: break; michael@0: } michael@0: case JSOP_BITXOR: { michael@0: int32_t result; michael@0: if (!BitXor(cx, lhs, rhs, &result)) michael@0: return false; michael@0: ret.setInt32(result); michael@0: break; michael@0: } michael@0: case JSOP_BITAND: { michael@0: int32_t result; michael@0: if (!BitAnd(cx, lhs, rhs, &result)) michael@0: return false; michael@0: ret.setInt32(result); michael@0: break; michael@0: } michael@0: case JSOP_LSH: { michael@0: int32_t result; michael@0: if (!BitLsh(cx, lhs, rhs, &result)) michael@0: return false; michael@0: ret.setInt32(result); michael@0: break; michael@0: } michael@0: case JSOP_RSH: { michael@0: int32_t result; michael@0: if (!BitRsh(cx, lhs, rhs, &result)) michael@0: return false; michael@0: ret.setInt32(result); michael@0: break; michael@0: } michael@0: case JSOP_URSH: { michael@0: if (!UrshOperation(cx, lhs, rhs, ret)) michael@0: return false; michael@0: break; michael@0: } michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Unhandled baseline arith op"); michael@0: } michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: if (ret.isDouble()) michael@0: stub->setSawDoubleResult(); michael@0: michael@0: // Check to see if a new stub should be generated. michael@0: if (stub->numOptimizedStubs() >= ICBinaryArith_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: stub->noteUnoptimizableOperands(); michael@0: return true; michael@0: } michael@0: michael@0: // Handle string concat. michael@0: if (op == JSOP_ADD) { michael@0: if (lhs.isString() && rhs.isString()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(String, String) stub", js_CodeName[op]); michael@0: JS_ASSERT(ret.isString()); michael@0: ICBinaryArith_StringConcat::Compiler compiler(cx); michael@0: ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!strcatStub) michael@0: return false; michael@0: stub->addNewStub(strcatStub); michael@0: return true; michael@0: } michael@0: michael@0: if ((lhs.isString() && rhs.isObject()) || (lhs.isObject() && rhs.isString())) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op], michael@0: lhs.isString() ? "String" : "Object", michael@0: lhs.isString() ? "Object" : "String"); michael@0: JS_ASSERT(ret.isString()); michael@0: ICBinaryArith_StringObjectConcat::Compiler compiler(cx, lhs.isString()); michael@0: ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!strcatStub) michael@0: return false; michael@0: stub->addNewStub(strcatStub); michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: if (((lhs.isBoolean() && (rhs.isBoolean() || rhs.isInt32())) || michael@0: (rhs.isBoolean() && (lhs.isBoolean() || lhs.isInt32()))) && michael@0: (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_BITOR || op == JSOP_BITAND || michael@0: op == JSOP_BITXOR)) michael@0: { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op], michael@0: lhs.isBoolean() ? "Boolean" : "Int32", rhs.isBoolean() ? "Boolean" : "Int32"); michael@0: ICBinaryArith_BooleanWithInt32::Compiler compiler(cx, op, lhs.isBoolean(), rhs.isBoolean()); michael@0: ICStub *arithStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!arithStub) michael@0: return false; michael@0: stub->addNewStub(arithStub); michael@0: return true; michael@0: } michael@0: michael@0: // Handle only int32 or double. michael@0: if (!lhs.isNumber() || !rhs.isNumber()) { michael@0: stub->noteUnoptimizableOperands(); michael@0: return true; michael@0: } michael@0: michael@0: JS_ASSERT(ret.isNumber()); michael@0: michael@0: if (lhs.isDouble() || rhs.isDouble() || ret.isDouble()) { michael@0: if (!cx->runtime()->jitSupportsFloatingPoint) michael@0: return true; michael@0: michael@0: switch (op) { michael@0: case JSOP_ADD: michael@0: case JSOP_SUB: michael@0: case JSOP_MUL: michael@0: case JSOP_DIV: michael@0: case JSOP_MOD: { michael@0: // Unlink int32 stubs, it's faster to always use the double stub. michael@0: stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32); michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Double, Double) stub", js_CodeName[op]); michael@0: michael@0: ICBinaryArith_Double::Compiler compiler(cx, op); michael@0: ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!doubleStub) michael@0: return false; michael@0: stub->addNewStub(doubleStub); michael@0: return true; michael@0: } michael@0: default: michael@0: break; michael@0: } michael@0: } michael@0: michael@0: if (lhs.isInt32() && rhs.isInt32()) { michael@0: bool allowDouble = ret.isDouble(); michael@0: if (allowDouble) michael@0: stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32); michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Int32, Int32%s) stub", js_CodeName[op], michael@0: allowDouble ? " => Double" : ""); michael@0: ICBinaryArith_Int32::Compiler compilerInt32(cx, op, allowDouble); michael@0: ICStub *int32Stub = compilerInt32.getStub(compilerInt32.getStubSpace(script)); michael@0: if (!int32Stub) michael@0: return false; michael@0: stub->addNewStub(int32Stub); michael@0: return true; michael@0: } michael@0: michael@0: // Handle Double Int32 or Int32 Double case. michael@0: if (((lhs.isDouble() && rhs.isInt32()) || (lhs.isInt32() && rhs.isDouble())) && michael@0: ret.isInt32()) michael@0: { michael@0: switch(op) { michael@0: case JSOP_BITOR: michael@0: case JSOP_BITXOR: michael@0: case JSOP_BITAND: { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op], michael@0: lhs.isDouble() ? "Double" : "Int32", michael@0: lhs.isDouble() ? "Int32" : "Double"); michael@0: ICBinaryArith_DoubleWithInt32::Compiler compiler(cx, op, lhs.isDouble()); michael@0: ICStub *optStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!optStub) michael@0: return false; michael@0: stub->addNewStub(optStub); michael@0: return true; michael@0: } michael@0: default: michael@0: break; michael@0: } michael@0: } michael@0: michael@0: stub->noteUnoptimizableOperands(); michael@0: return true; michael@0: } michael@0: #if defined(_MSC_VER) michael@0: # pragma optimize("", on) michael@0: #endif michael@0: michael@0: typedef bool (*DoBinaryArithFallbackFn)(JSContext *, BaselineFrame *, ICBinaryArith_Fallback *, michael@0: HandleValue, HandleValue, MutableHandleValue); michael@0: static const VMFunction DoBinaryArithFallbackInfo = michael@0: FunctionInfo(DoBinaryArithFallback, PopValues(2)); michael@0: michael@0: bool michael@0: ICBinaryArith_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Ensure stack is fully synced for the expression decompiler. michael@0: masm.pushValue(R0); michael@0: masm.pushValue(R1); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R1); michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoBinaryArithFallbackInfo, masm); michael@0: } michael@0: michael@0: static bool michael@0: DoConcatStrings(JSContext *cx, HandleValue lhs, HandleValue rhs, MutableHandleValue res) michael@0: { michael@0: JS_ASSERT(lhs.isString()); michael@0: JS_ASSERT(rhs.isString()); michael@0: JSString *lstr = lhs.toString(); michael@0: JSString *rstr = rhs.toString(); michael@0: JSString *result = ConcatStrings(cx, lstr, rstr); michael@0: if (result) { michael@0: res.set(StringValue(result)); michael@0: return true; michael@0: } michael@0: michael@0: RootedString rootedl(cx, lstr), rootedr(cx, rstr); michael@0: result = ConcatStrings(cx, rootedl, rootedr); michael@0: if (!result) michael@0: return false; michael@0: michael@0: res.set(StringValue(result)); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoConcatStringsFn)(JSContext *, HandleValue, HandleValue, MutableHandleValue); michael@0: static const VMFunction DoConcatStringsInfo = FunctionInfo(DoConcatStrings); michael@0: michael@0: bool michael@0: ICBinaryArith_StringConcat::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestString(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestString(Assembler::NotEqual, R1, &failure); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.pushValue(R1); michael@0: masm.pushValue(R0); michael@0: if (!tailCallVM(DoConcatStringsInfo, masm)) michael@0: return false; michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: static JSString * michael@0: ConvertObjectToStringForConcat(JSContext *cx, HandleValue obj) michael@0: { michael@0: JS_ASSERT(obj.isObject()); michael@0: RootedValue rootedObj(cx, obj); michael@0: if (!ToPrimitive(cx, &rootedObj)) michael@0: return nullptr; michael@0: return ToString(cx, rootedObj); michael@0: } michael@0: michael@0: static bool michael@0: DoConcatStringObject(JSContext *cx, bool lhsIsString, HandleValue lhs, HandleValue rhs, michael@0: MutableHandleValue res) michael@0: { michael@0: JSString *lstr = nullptr; michael@0: JSString *rstr = nullptr; michael@0: if (lhsIsString) { michael@0: // Convert rhs first. michael@0: JS_ASSERT(lhs.isString() && rhs.isObject()); michael@0: rstr = ConvertObjectToStringForConcat(cx, rhs); michael@0: if (!rstr) michael@0: return false; michael@0: michael@0: // lhs is already string. michael@0: lstr = lhs.toString(); michael@0: } else { michael@0: JS_ASSERT(rhs.isString() && lhs.isObject()); michael@0: // Convert lhs first. michael@0: lstr = ConvertObjectToStringForConcat(cx, lhs); michael@0: if (!lstr) michael@0: return false; michael@0: michael@0: // rhs is already string. michael@0: rstr = rhs.toString(); michael@0: } michael@0: michael@0: JSString *str = ConcatStrings(cx, lstr, rstr); michael@0: if (!str) { michael@0: RootedString nlstr(cx, lstr), nrstr(cx, rstr); michael@0: str = ConcatStrings(cx, nlstr, nrstr); michael@0: if (!str) michael@0: return false; michael@0: } michael@0: michael@0: // Technically, we need to call TypeScript::MonitorString for this PC, however michael@0: // it was called when this stub was attached so it's OK. michael@0: michael@0: res.setString(str); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoConcatStringObjectFn)(JSContext *, bool lhsIsString, HandleValue, HandleValue, michael@0: MutableHandleValue); michael@0: static const VMFunction DoConcatStringObjectInfo = michael@0: FunctionInfo(DoConcatStringObject, PopValues(2)); michael@0: michael@0: bool michael@0: ICBinaryArith_StringObjectConcat::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: if (lhsIsString_) { michael@0: masm.branchTestString(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestObject(Assembler::NotEqual, R1, &failure); michael@0: } else { michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestString(Assembler::NotEqual, R1, &failure); michael@0: } michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Sync for the decompiler. michael@0: masm.pushValue(R0); michael@0: masm.pushValue(R1); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R1); michael@0: masm.pushValue(R0); michael@0: masm.push(Imm32(lhsIsString_)); michael@0: if (!tailCallVM(DoConcatStringObjectInfo, masm)) michael@0: return false; michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICBinaryArith_Double::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.ensureDouble(R0, FloatReg0, &failure); michael@0: masm.ensureDouble(R1, FloatReg1, &failure); michael@0: michael@0: switch (op) { michael@0: case JSOP_ADD: michael@0: masm.addDouble(FloatReg1, FloatReg0); michael@0: break; michael@0: case JSOP_SUB: michael@0: masm.subDouble(FloatReg1, FloatReg0); michael@0: break; michael@0: case JSOP_MUL: michael@0: masm.mulDouble(FloatReg1, FloatReg0); michael@0: break; michael@0: case JSOP_DIV: michael@0: masm.divDouble(FloatReg1, FloatReg0); michael@0: break; michael@0: case JSOP_MOD: michael@0: masm.setupUnalignedABICall(2, R0.scratchReg()); michael@0: masm.passABIArg(FloatReg0, MoveOp::DOUBLE); michael@0: masm.passABIArg(FloatReg1, MoveOp::DOUBLE); michael@0: masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, NumberMod), MoveOp::DOUBLE); michael@0: JS_ASSERT(ReturnFloatReg == FloatReg0); michael@0: break; michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Unexpected op"); michael@0: } michael@0: michael@0: masm.boxDouble(FloatReg0, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICBinaryArith_BooleanWithInt32::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: if (lhsIsBool_) michael@0: masm.branchTestBoolean(Assembler::NotEqual, R0, &failure); michael@0: else michael@0: masm.branchTestInt32(Assembler::NotEqual, R0, &failure); michael@0: michael@0: if (rhsIsBool_) michael@0: masm.branchTestBoolean(Assembler::NotEqual, R1, &failure); michael@0: else michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: michael@0: Register lhsReg = lhsIsBool_ ? masm.extractBoolean(R0, ExtractTemp0) michael@0: : masm.extractInt32(R0, ExtractTemp0); michael@0: Register rhsReg = rhsIsBool_ ? masm.extractBoolean(R1, ExtractTemp1) michael@0: : masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: JS_ASSERT(op_ == JSOP_ADD || op_ == JSOP_SUB || michael@0: op_ == JSOP_BITOR || op_ == JSOP_BITXOR || op_ == JSOP_BITAND); michael@0: michael@0: switch(op_) { michael@0: case JSOP_ADD: { michael@0: Label fixOverflow; michael@0: michael@0: masm.branchAdd32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow); michael@0: masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&fixOverflow); michael@0: masm.sub32(rhsReg, lhsReg); michael@0: // Proceed to failure below. michael@0: break; michael@0: } michael@0: case JSOP_SUB: { michael@0: Label fixOverflow; michael@0: michael@0: masm.branchSub32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow); michael@0: masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&fixOverflow); michael@0: masm.add32(rhsReg, lhsReg); michael@0: // Proceed to failure below. michael@0: break; michael@0: } michael@0: case JSOP_BITOR: { michael@0: masm.orPtr(rhsReg, lhsReg); michael@0: masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0); michael@0: EmitReturnFromIC(masm); michael@0: break; michael@0: } michael@0: case JSOP_BITXOR: { michael@0: masm.xorPtr(rhsReg, lhsReg); michael@0: masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0); michael@0: EmitReturnFromIC(masm); michael@0: break; michael@0: } michael@0: case JSOP_BITAND: { michael@0: masm.andPtr(rhsReg, lhsReg); michael@0: masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0); michael@0: EmitReturnFromIC(masm); michael@0: break; michael@0: } michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Unhandled op for BinaryArith_BooleanWithInt32."); michael@0: } michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICBinaryArith_DoubleWithInt32::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(op == JSOP_BITOR || op == JSOP_BITAND || op == JSOP_BITXOR); michael@0: michael@0: Label failure; michael@0: Register intReg; michael@0: Register scratchReg; michael@0: if (lhsIsDouble_) { michael@0: masm.branchTestDouble(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: intReg = masm.extractInt32(R1, ExtractTemp0); michael@0: masm.unboxDouble(R0, FloatReg0); michael@0: scratchReg = R0.scratchReg(); michael@0: } else { michael@0: masm.branchTestInt32(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestDouble(Assembler::NotEqual, R1, &failure); michael@0: intReg = masm.extractInt32(R0, ExtractTemp0); michael@0: masm.unboxDouble(R1, FloatReg0); michael@0: scratchReg = R1.scratchReg(); michael@0: } michael@0: michael@0: // Truncate the double to an int32. michael@0: { michael@0: Label doneTruncate; michael@0: Label truncateABICall; michael@0: masm.branchTruncateDouble(FloatReg0, scratchReg, &truncateABICall); michael@0: masm.jump(&doneTruncate); michael@0: michael@0: masm.bind(&truncateABICall); michael@0: masm.push(intReg); michael@0: masm.setupUnalignedABICall(1, scratchReg); michael@0: masm.passABIArg(FloatReg0, MoveOp::DOUBLE); michael@0: masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::ToInt32)); michael@0: masm.storeCallResult(scratchReg); michael@0: masm.pop(intReg); michael@0: michael@0: masm.bind(&doneTruncate); michael@0: } michael@0: michael@0: Register intReg2 = scratchReg; michael@0: // All handled ops commute, so no need to worry about ordering. michael@0: switch(op) { michael@0: case JSOP_BITOR: michael@0: masm.orPtr(intReg, intReg2); michael@0: break; michael@0: case JSOP_BITXOR: michael@0: masm.xorPtr(intReg, intReg2); michael@0: break; michael@0: case JSOP_BITAND: michael@0: masm.andPtr(intReg, intReg2); michael@0: break; michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Unhandled op for BinaryArith_DoubleWithInt32."); michael@0: } michael@0: masm.tagValue(JSVAL_TYPE_INT32, intReg2, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // UnaryArith_Fallback michael@0: // michael@0: michael@0: // Disable PGO (see bug 851490). michael@0: #if defined(_MSC_VER) michael@0: # pragma optimize("g", off) michael@0: #endif michael@0: static bool michael@0: DoUnaryArithFallback(JSContext *cx, BaselineFrame *frame, ICUnaryArith_Fallback *stub_, michael@0: HandleValue val, MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: JSOp op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "UnaryArith(%s)", js_CodeName[op]); michael@0: michael@0: switch (op) { michael@0: case JSOP_BITNOT: { michael@0: int32_t result; michael@0: if (!BitNot(cx, val, &result)) michael@0: return false; michael@0: res.setInt32(result); michael@0: break; michael@0: } michael@0: case JSOP_NEG: michael@0: if (!NegOperation(cx, script, pc, val, res)) michael@0: return false; michael@0: break; michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Unexpected op"); michael@0: } michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: if (res.isDouble()) michael@0: stub->setSawDoubleResult(); michael@0: michael@0: if (stub->numOptimizedStubs() >= ICUnaryArith_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard/replace stubs. michael@0: return true; michael@0: } michael@0: michael@0: if (val.isInt32() && res.isInt32()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Int32 => Int32) stub", js_CodeName[op]); michael@0: ICUnaryArith_Int32::Compiler compiler(cx, op); michael@0: ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!int32Stub) michael@0: return false; michael@0: stub->addNewStub(int32Stub); michael@0: return true; michael@0: } michael@0: michael@0: if (val.isNumber() && res.isNumber() && cx->runtime()->jitSupportsFloatingPoint) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating %s(Number => Number) stub", js_CodeName[op]); michael@0: michael@0: // Unlink int32 stubs, the double stub handles both cases and TI specializes for both. michael@0: stub->unlinkStubsWithKind(cx, ICStub::UnaryArith_Int32); michael@0: michael@0: ICUnaryArith_Double::Compiler compiler(cx, op); michael@0: ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!doubleStub) michael@0: return false; michael@0: stub->addNewStub(doubleStub); michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: #if defined(_MSC_VER) michael@0: # pragma optimize("", on) michael@0: #endif michael@0: michael@0: typedef bool (*DoUnaryArithFallbackFn)(JSContext *, BaselineFrame *, ICUnaryArith_Fallback *, michael@0: HandleValue, MutableHandleValue); michael@0: static const VMFunction DoUnaryArithFallbackInfo = michael@0: FunctionInfo(DoUnaryArithFallback, PopValues(1)); michael@0: michael@0: bool michael@0: ICUnaryArith_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Ensure stack is fully synced for the expression decompiler. michael@0: masm.pushValue(R0); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoUnaryArithFallbackInfo, masm); michael@0: } michael@0: michael@0: bool michael@0: ICUnaryArith_Double::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.ensureDouble(R0, FloatReg0, &failure); michael@0: michael@0: JS_ASSERT(op == JSOP_NEG || op == JSOP_BITNOT); michael@0: michael@0: if (op == JSOP_NEG) { michael@0: masm.negateDouble(FloatReg0); michael@0: masm.boxDouble(FloatReg0, R0); michael@0: } else { michael@0: // Truncate the double to an int32. michael@0: Register scratchReg = R1.scratchReg(); michael@0: michael@0: Label doneTruncate; michael@0: Label truncateABICall; michael@0: masm.branchTruncateDouble(FloatReg0, scratchReg, &truncateABICall); michael@0: masm.jump(&doneTruncate); michael@0: michael@0: masm.bind(&truncateABICall); michael@0: masm.setupUnalignedABICall(1, scratchReg); michael@0: masm.passABIArg(FloatReg0, MoveOp::DOUBLE); michael@0: masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::ToInt32)); michael@0: masm.storeCallResult(scratchReg); michael@0: michael@0: masm.bind(&doneTruncate); michael@0: masm.not32(scratchReg); michael@0: masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0); michael@0: } michael@0: michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // GetElem_Fallback michael@0: // michael@0: michael@0: static void GetFixedOrDynamicSlotOffset(HandleObject obj, uint32_t slot, michael@0: bool *isFixed, uint32_t *offset) michael@0: { michael@0: JS_ASSERT(isFixed); michael@0: JS_ASSERT(offset); michael@0: *isFixed = obj->isFixedSlot(slot); michael@0: *offset = *isFixed ? JSObject::getFixedSlotOffset(slot) michael@0: : obj->dynamicSlotIndex(slot) * sizeof(Value); michael@0: } michael@0: michael@0: static bool michael@0: IsCacheableDOMProxy(JSObject *obj) michael@0: { michael@0: if (!obj->is()) michael@0: return false; michael@0: michael@0: BaseProxyHandler *handler = obj->as().handler(); michael@0: michael@0: if (handler->family() != GetDOMProxyHandlerFamily()) michael@0: return false; michael@0: michael@0: if (obj->numFixedSlots() <= GetDOMProxyExpandoSlot()) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static JSObject * michael@0: GetDOMProxyProto(JSObject *obj) michael@0: { michael@0: JS_ASSERT(IsCacheableDOMProxy(obj)); michael@0: return obj->getTaggedProto().toObjectOrNull(); michael@0: } michael@0: michael@0: static void michael@0: GenerateDOMProxyChecks(JSContext *cx, MacroAssembler &masm, Register object, michael@0: Address checkProxyHandlerAddr, michael@0: Address *checkExpandoShapeAddr, michael@0: Address *expandoAndGenerationAddr, michael@0: Address *generationAddr, michael@0: Register scratch, michael@0: GeneralRegisterSet &domProxyRegSet, michael@0: Label *checkFailed) michael@0: { michael@0: // Guard the following: michael@0: // 1. The object is a DOMProxy. michael@0: // 2. The object does not have expando properties, or has an expando michael@0: // which is known to not have the desired property. michael@0: Address handlerAddr(object, ProxyObject::offsetOfHandler()); michael@0: Address expandoAddr(object, JSObject::getFixedSlotOffset(GetDOMProxyExpandoSlot())); michael@0: michael@0: // Check that object is a DOMProxy. michael@0: masm.loadPtr(checkProxyHandlerAddr, scratch); michael@0: masm.branchPrivatePtr(Assembler::NotEqual, handlerAddr, scratch, checkFailed); michael@0: michael@0: // At this point, if not checking for an expando object, just return. michael@0: if (!checkExpandoShapeAddr) michael@0: return; michael@0: michael@0: // For the remaining code, we need to reserve some registers to load a value. michael@0: // This is ugly, but unavoidable. michael@0: ValueOperand tempVal = domProxyRegSet.takeAnyValue(); michael@0: masm.pushValue(tempVal); michael@0: michael@0: Label failDOMProxyCheck; michael@0: Label domProxyOk; michael@0: michael@0: if (expandoAndGenerationAddr) { michael@0: JS_ASSERT(generationAddr); michael@0: michael@0: masm.loadPtr(*expandoAndGenerationAddr, tempVal.scratchReg()); michael@0: masm.branchPrivatePtr(Assembler::NotEqual, expandoAddr, tempVal.scratchReg(), michael@0: &failDOMProxyCheck); michael@0: michael@0: masm.load32(*generationAddr, scratch); michael@0: masm.branch32(Assembler::NotEqual, michael@0: Address(tempVal.scratchReg(), offsetof(ExpandoAndGeneration, generation)), michael@0: scratch, &failDOMProxyCheck); michael@0: michael@0: masm.loadValue(Address(tempVal.scratchReg(), 0), tempVal); michael@0: } else { michael@0: masm.loadValue(expandoAddr, tempVal); michael@0: } michael@0: michael@0: // If the incoming object does not have an expando object then we're sure we're not michael@0: // shadowing. michael@0: masm.branchTestUndefined(Assembler::Equal, tempVal, &domProxyOk); michael@0: michael@0: // The reference object used to generate this check may not have had an michael@0: // expando object at all, in which case the presence of a non-undefined michael@0: // expando value in the incoming object is automatically a failure. michael@0: masm.loadPtr(*checkExpandoShapeAddr, scratch); michael@0: masm.branchPtr(Assembler::Equal, scratch, ImmPtr(nullptr), &failDOMProxyCheck); michael@0: michael@0: // Otherwise, ensure that the incoming object has an object for its expando value and that michael@0: // the shape matches. michael@0: masm.branchTestObject(Assembler::NotEqual, tempVal, &failDOMProxyCheck); michael@0: Register objReg = masm.extractObject(tempVal, tempVal.scratchReg()); michael@0: masm.branchTestObjShape(Assembler::Equal, objReg, scratch, &domProxyOk); michael@0: michael@0: // Failure case: restore the tempVal registers and jump to failures. michael@0: masm.bind(&failDOMProxyCheck); michael@0: masm.popValue(tempVal); michael@0: masm.jump(checkFailed); michael@0: michael@0: // Success case: restore the tempval and proceed. michael@0: masm.bind(&domProxyOk); michael@0: masm.popValue(tempVal); michael@0: } michael@0: michael@0: // Look up a property's shape on an object, being careful never to do any effectful michael@0: // operations. This procedure not yielding a shape should not be taken as a lack of michael@0: // existence of the property on the object. michael@0: static bool michael@0: EffectlesslyLookupProperty(JSContext *cx, HandleObject obj, HandlePropertyName name, michael@0: MutableHandleObject holder, MutableHandleShape shape, michael@0: bool *checkDOMProxy=nullptr, michael@0: DOMProxyShadowsResult *shadowsResult=nullptr, michael@0: bool *domProxyHasGeneration=nullptr) michael@0: { michael@0: shape.set(nullptr); michael@0: holder.set(nullptr); michael@0: michael@0: if (checkDOMProxy) michael@0: *checkDOMProxy = false; michael@0: michael@0: // Check for list base if asked to. michael@0: RootedObject checkObj(cx, obj); michael@0: if (checkDOMProxy && IsCacheableDOMProxy(obj)) { michael@0: JS_ASSERT(domProxyHasGeneration); michael@0: JS_ASSERT(shadowsResult); michael@0: michael@0: *checkDOMProxy = true; michael@0: if (obj->hasUncacheableProto()) michael@0: return true; michael@0: michael@0: RootedId id(cx, NameToId(name)); michael@0: *shadowsResult = GetDOMProxyShadowsCheck()(cx, obj, id); michael@0: if (*shadowsResult == ShadowCheckFailed) michael@0: return false; michael@0: michael@0: if (*shadowsResult == Shadows) { michael@0: holder.set(obj); michael@0: return true; michael@0: } michael@0: michael@0: *domProxyHasGeneration = (*shadowsResult == DoesntShadowUnique); michael@0: michael@0: checkObj = GetDOMProxyProto(obj); michael@0: if (!checkObj) michael@0: return true; michael@0: } else if (!obj->isNative()) { michael@0: return true; michael@0: } michael@0: michael@0: if (checkObj->hasIdempotentProtoChain()) { michael@0: if (!JSObject::lookupProperty(cx, checkObj, name, holder, shape)) michael@0: return false; michael@0: } else if (checkObj->isNative()) { michael@0: shape.set(checkObj->nativeLookup(cx, NameToId(name))); michael@0: if (shape) michael@0: holder.set(checkObj); michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: IsCacheableProtoChain(JSObject *obj, JSObject *holder, bool isDOMProxy=false) michael@0: { michael@0: JS_ASSERT_IF(isDOMProxy, IsCacheableDOMProxy(obj)); michael@0: JS_ASSERT_IF(!isDOMProxy, obj->isNative()); michael@0: michael@0: // Don't handle objects which require a prototype guard. This should michael@0: // be uncommon so handling it is likely not worth the complexity. michael@0: if (obj->hasUncacheableProto()) michael@0: return false; michael@0: michael@0: JSObject *cur = obj; michael@0: while (cur != holder) { michael@0: // We cannot assume that we find the holder object on the prototype michael@0: // chain and must check for null proto. The prototype chain can be michael@0: // altered during the lookupProperty call. michael@0: JSObject *proto; michael@0: if (isDOMProxy && cur == obj) michael@0: proto = cur->getTaggedProto().toObjectOrNull(); michael@0: else michael@0: proto = cur->getProto(); michael@0: michael@0: if (!proto || !proto->isNative()) michael@0: return false; michael@0: michael@0: if (proto->hasUncacheableProto()) michael@0: return false; michael@0: michael@0: cur = proto; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: IsCacheableGetPropReadSlot(JSObject *obj, JSObject *holder, Shape *shape, bool isDOMProxy=false) michael@0: { michael@0: if (!shape || !IsCacheableProtoChain(obj, holder, isDOMProxy)) michael@0: return false; michael@0: michael@0: if (!shape->hasSlot() || !shape->hasDefaultGetter()) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: IsCacheableGetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *shape, bool *isScripted, michael@0: bool isDOMProxy=false) michael@0: { michael@0: JS_ASSERT(isScripted); michael@0: michael@0: if (!shape || !IsCacheableProtoChain(obj, holder, isDOMProxy)) michael@0: return false; michael@0: michael@0: if (shape->hasSlot() || shape->hasDefaultGetter()) michael@0: return false; michael@0: michael@0: if (!shape->hasGetterValue()) michael@0: return false; michael@0: michael@0: if (!shape->getterValue().isObject() || !shape->getterObject()->is()) michael@0: return false; michael@0: michael@0: JSFunction *func = &shape->getterObject()->as(); michael@0: michael@0: #ifdef JSGC_GENERATIONAL michael@0: // Information from get prop call ICs may be used directly from Ion code, michael@0: // and should not be nursery allocated. michael@0: if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func)) michael@0: return false; michael@0: #endif michael@0: michael@0: if (func->isNative()) { michael@0: *isScripted = false; michael@0: return true; michael@0: } michael@0: michael@0: if (!func->hasJITCode()) michael@0: return false; michael@0: michael@0: *isScripted = true; michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: IsCacheableSetPropWriteSlot(JSObject *obj, Shape *oldShape, JSObject *holder, Shape *shape) michael@0: { michael@0: if (!shape) michael@0: return false; michael@0: michael@0: // Object shape must not have changed during the property set. michael@0: if (obj->lastProperty() != oldShape) michael@0: return false; michael@0: michael@0: // Currently we only optimize direct writes. michael@0: if (obj != holder) michael@0: return false; michael@0: michael@0: if (!shape->hasSlot() || !shape->hasDefaultSetter() || !shape->writable()) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: IsCacheableSetPropAddSlot(JSContext *cx, HandleObject obj, HandleShape oldShape, uint32_t oldSlots, michael@0: HandleId id, HandleObject holder, HandleShape shape, michael@0: size_t *protoChainDepth) michael@0: { michael@0: if (!shape) michael@0: return false; michael@0: michael@0: // Property must be set directly on object, and be last added property of object. michael@0: if (obj != holder || shape != obj->lastProperty()) michael@0: return false; michael@0: michael@0: // Object must be extensible, oldShape must be immediate parent of curShape. michael@0: if (!obj->nonProxyIsExtensible() || obj->lastProperty()->previous() != oldShape) michael@0: return false; michael@0: michael@0: // Basic shape checks. michael@0: if (shape->inDictionary() || !shape->hasSlot() || !shape->hasDefaultSetter() || michael@0: !shape->writable()) michael@0: { michael@0: return false; michael@0: } michael@0: michael@0: // If object has a non-default resolve hook, don't inline michael@0: if (obj->getClass()->resolve != JS_ResolveStub) michael@0: return false; michael@0: michael@0: size_t chainDepth = 0; michael@0: // walk up the object prototype chain and ensure that all prototypes michael@0: // are native, and that all prototypes have setter defined on the property michael@0: for (JSObject *proto = obj->getProto(); proto; proto = proto->getProto()) { michael@0: chainDepth++; michael@0: // if prototype is non-native, don't optimize michael@0: if (!proto->isNative()) michael@0: return false; michael@0: michael@0: // if prototype defines this property in a non-plain way, don't optimize michael@0: Shape *protoShape = proto->nativeLookup(cx, id); michael@0: if (protoShape && !protoShape->hasDefaultSetter()) michael@0: return false; michael@0: michael@0: // Otherise, if there's no such property, watch out for a resolve hook that would need michael@0: // to be invoked and thus prevent inlining of property addition. michael@0: if (proto->getClass()->resolve != JS_ResolveStub) michael@0: return false; michael@0: } michael@0: michael@0: // Only add a IC entry if the dynamic slots didn't change when the shapes michael@0: // changed. Need to ensure that a shape change for a subsequent object michael@0: // won't involve reallocating the slot array. michael@0: if (obj->numDynamicSlots() != oldSlots) michael@0: return false; michael@0: michael@0: *protoChainDepth = chainDepth; michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: IsCacheableSetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *shape, bool *isScripted) michael@0: { michael@0: JS_ASSERT(isScripted); michael@0: michael@0: // Currently we only optimize setter calls for setters bound on prototypes. michael@0: if (obj == holder) michael@0: return false; michael@0: michael@0: if (!shape || !IsCacheableProtoChain(obj, holder)) michael@0: return false; michael@0: michael@0: if (shape->hasSlot() || shape->hasDefaultSetter()) michael@0: return false; michael@0: michael@0: if (!shape->hasSetterValue()) michael@0: return false; michael@0: michael@0: if (!shape->setterValue().isObject() || !shape->setterObject()->is()) michael@0: return false; michael@0: michael@0: JSFunction *func = &shape->setterObject()->as(); michael@0: michael@0: #ifdef JSGC_GENERATIONAL michael@0: // Information from set prop call ICs may be used directly from Ion code, michael@0: // and should not be nursery allocated. michael@0: if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func)) michael@0: return false; michael@0: #endif michael@0: michael@0: if (func->isNative()) { michael@0: *isScripted = false; michael@0: return true; michael@0: } michael@0: michael@0: if (!func->hasJITCode()) michael@0: return false; michael@0: michael@0: *isScripted = true; michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: LookupNoSuchMethodHandler(JSContext *cx, HandleObject obj, HandleValue id, michael@0: MutableHandleValue result) michael@0: { michael@0: return OnUnknownMethod(cx, obj, id, result); michael@0: } michael@0: michael@0: typedef bool (*LookupNoSuchMethodHandlerFn)(JSContext *, HandleObject, HandleValue, michael@0: MutableHandleValue); michael@0: static const VMFunction LookupNoSuchMethodHandlerInfo = michael@0: FunctionInfo(LookupNoSuchMethodHandler); michael@0: michael@0: static bool michael@0: GetElemNativeStubExists(ICGetElem_Fallback *stub, HandleObject obj, HandleObject holder, michael@0: HandlePropertyName propName, bool needsAtomize) michael@0: { michael@0: bool indirect = (obj.get() != holder.get()); michael@0: michael@0: for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) { michael@0: if (iter->kind() != ICStub::GetElem_NativeSlot && michael@0: iter->kind() != ICStub::GetElem_NativePrototypeSlot && michael@0: iter->kind() != ICStub::GetElem_NativePrototypeCallNative && michael@0: iter->kind() != ICStub::GetElem_NativePrototypeCallScripted) michael@0: { michael@0: continue; michael@0: } michael@0: michael@0: if (indirect && (iter->kind() != ICStub::GetElem_NativePrototypeSlot && michael@0: iter->kind() != ICStub::GetElem_NativePrototypeCallNative && michael@0: iter->kind() != ICStub::GetElem_NativePrototypeCallScripted)) michael@0: { michael@0: continue; michael@0: } michael@0: michael@0: ICGetElemNativeStub *getElemNativeStub = reinterpret_cast(*iter); michael@0: if (propName != getElemNativeStub->name()) michael@0: continue; michael@0: michael@0: if (obj->lastProperty() != getElemNativeStub->shape()) michael@0: continue; michael@0: michael@0: // If the new stub needs atomization, and the old stub doesn't atomize, then michael@0: // an appropriate stub doesn't exist. michael@0: if (needsAtomize && !getElemNativeStub->needsAtomize()) michael@0: continue; michael@0: michael@0: // For prototype gets, check the holder and holder shape. michael@0: if (indirect) { michael@0: if (iter->isGetElem_NativePrototypeSlot()) { michael@0: ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot(); michael@0: michael@0: if (holder != protoStub->holder()) michael@0: continue; michael@0: michael@0: if (holder->lastProperty() != protoStub->holderShape()) michael@0: continue; michael@0: } else { michael@0: JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() || michael@0: iter->isGetElem_NativePrototypeCallScripted()); michael@0: michael@0: ICGetElemNativePrototypeCallStub *protoStub = michael@0: reinterpret_cast(*iter); michael@0: michael@0: if (holder != protoStub->holder()) michael@0: continue; michael@0: michael@0: if (holder->lastProperty() != protoStub->holderShape()) michael@0: continue; michael@0: } michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: static void michael@0: RemoveExistingGetElemNativeStubs(JSContext *cx, ICGetElem_Fallback *stub, HandleObject obj, michael@0: HandleObject holder, HandlePropertyName propName, michael@0: bool needsAtomize) michael@0: { michael@0: bool indirect = (obj.get() != holder.get()); michael@0: michael@0: for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) { michael@0: switch (iter->kind()) { michael@0: case ICStub::GetElem_NativeSlot: michael@0: if (indirect) michael@0: continue; michael@0: case ICStub::GetElem_NativePrototypeSlot: michael@0: case ICStub::GetElem_NativePrototypeCallNative: michael@0: case ICStub::GetElem_NativePrototypeCallScripted: michael@0: break; michael@0: default: michael@0: continue; michael@0: } michael@0: michael@0: ICGetElemNativeStub *getElemNativeStub = reinterpret_cast(*iter); michael@0: if (propName != getElemNativeStub->name()) michael@0: continue; michael@0: michael@0: if (obj->lastProperty() != getElemNativeStub->shape()) michael@0: continue; michael@0: michael@0: // For prototype gets, check the holder and holder shape. michael@0: if (indirect) { michael@0: if (iter->isGetElem_NativePrototypeSlot()) { michael@0: ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot(); michael@0: michael@0: if (holder != protoStub->holder()) michael@0: continue; michael@0: michael@0: // If the holder matches, but the holder's lastProperty doesn't match, then michael@0: // this stub is invalid anyway. Unlink it. michael@0: if (holder->lastProperty() != protoStub->holderShape()) { michael@0: iter.unlink(cx); michael@0: continue; michael@0: } michael@0: } else { michael@0: JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() || michael@0: iter->isGetElem_NativePrototypeCallScripted()); michael@0: michael@0: ICGetElemNativePrototypeCallStub *protoStub = michael@0: reinterpret_cast(*iter); michael@0: michael@0: if (holder != protoStub->holder()) michael@0: continue; michael@0: michael@0: // If the holder matches, but the holder's lastProperty doesn't match, then michael@0: // this stub is invalid anyway. Unlink it. michael@0: if (holder->lastProperty() != protoStub->holderShape()) { michael@0: iter.unlink(cx); michael@0: continue; michael@0: } michael@0: } michael@0: } michael@0: michael@0: // If the new stub needs atomization, and the old stub doesn't atomize, then michael@0: // remove the old stub. michael@0: if (needsAtomize && !getElemNativeStub->needsAtomize()) { michael@0: iter.unlink(cx); michael@0: continue; michael@0: } michael@0: michael@0: // Should never get here, because this means a matching stub exists, and if michael@0: // a matching stub exists, this procedure should never have been called. michael@0: MOZ_ASSUME_UNREACHABLE("Procedure should never have been called."); michael@0: } michael@0: } michael@0: michael@0: static bool michael@0: TypedArrayGetElemStubExists(ICGetElem_Fallback *stub, HandleObject obj) michael@0: { michael@0: for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) { michael@0: if (!iter->isGetElem_TypedArray()) michael@0: continue; michael@0: if (obj->lastProperty() == iter->toGetElem_TypedArray()->shape()) michael@0: return true; michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: static bool michael@0: ArgumentsGetElemStubExists(ICGetElem_Fallback *stub, ICGetElem_Arguments::Which which) michael@0: { michael@0: for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) { michael@0: if (!iter->isGetElem_Arguments()) michael@0: continue; michael@0: if (iter->toGetElem_Arguments()->which() == which) michael@0: return true; michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: michael@0: static bool TryAttachNativeGetElemStub(JSContext *cx, HandleScript script, jsbytecode *pc, michael@0: ICGetElem_Fallback *stub, HandleObject obj, michael@0: HandleValue key) michael@0: { michael@0: // Native-object GetElem stubs can't deal with non-string keys. michael@0: if (!key.isString()) michael@0: return true; michael@0: michael@0: // Convert to interned property name. michael@0: RootedId id(cx); michael@0: if (!ValueToId(cx, key, &id)) michael@0: return false; michael@0: michael@0: uint32_t dummy; michael@0: if (!JSID_IS_ATOM(id) || JSID_TO_ATOM(id)->isIndex(&dummy)) michael@0: return true; michael@0: michael@0: RootedPropertyName propName(cx, JSID_TO_ATOM(id)->asPropertyName()); michael@0: bool needsAtomize = !key.toString()->isAtom(); michael@0: bool isCallElem = (JSOp(*pc) == JSOP_CALLELEM); michael@0: michael@0: RootedShape shape(cx); michael@0: RootedObject holder(cx); michael@0: if (!EffectlesslyLookupProperty(cx, obj, propName, &holder, &shape)) michael@0: return false; michael@0: michael@0: if (IsCacheableGetPropReadSlot(obj, holder, shape)) { michael@0: // If a suitable stub already exists, nothing else to do. michael@0: if (GetElemNativeStubExists(stub, obj, holder, propName, needsAtomize)) michael@0: return true; michael@0: michael@0: // Remove any existing stubs that may interfere with the new stub being added. michael@0: RemoveExistingGetElemNativeStubs(cx, stub, obj, holder, propName, needsAtomize); michael@0: michael@0: bool isFixedSlot; michael@0: uint32_t offset; michael@0: GetFixedOrDynamicSlotOffset(holder, shape->slot(), &isFixedSlot, &offset); michael@0: michael@0: ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub(); michael@0: ICStub::Kind kind = (obj == holder) ? ICStub::GetElem_NativeSlot michael@0: : ICStub::GetElem_NativePrototypeSlot; michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetElem(Native %s%s slot) stub " michael@0: "(obj=%p, shape=%p, holder=%p, holderShape=%p)", michael@0: (obj == holder) ? "direct" : "prototype", michael@0: needsAtomize ? " atomizing" : "", michael@0: obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty()); michael@0: michael@0: ICGetElemNativeStub::AccessType acctype = isFixedSlot ? ICGetElemNativeStub::FixedSlot michael@0: : ICGetElemNativeStub::DynamicSlot; michael@0: ICGetElemNativeCompiler compiler(cx, kind, isCallElem, monitorStub, obj, holder, propName, michael@0: acctype, needsAtomize, offset); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: bool getterIsScripted = false; michael@0: if (IsCacheableGetPropCall(cx, obj, holder, shape, &getterIsScripted, /*isDOMProxy=*/false)) { michael@0: RootedFunction getter(cx, &shape->getterObject()->as()); michael@0: michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: // It's unlikely that a getter function will be used in callelem locations. michael@0: // Just don't attach stubs in that case to avoid issues with __noSuchMethod__ handling. michael@0: if (isCallElem) michael@0: return true; michael@0: #endif michael@0: michael@0: // For now, we do not handle own property getters michael@0: if (obj == holder) michael@0: return true; michael@0: michael@0: // If a suitable stub already exists, nothing else to do. michael@0: if (GetElemNativeStubExists(stub, obj, holder, propName, needsAtomize)) michael@0: return true; michael@0: michael@0: // Remove any existing stubs that may interfere with the new stub being added. michael@0: RemoveExistingGetElemNativeStubs(cx, stub, obj, holder, propName, needsAtomize); michael@0: michael@0: ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub(); michael@0: ICStub::Kind kind = getterIsScripted ? ICStub::GetElem_NativePrototypeCallScripted michael@0: : ICStub::GetElem_NativePrototypeCallNative; michael@0: michael@0: if (getterIsScripted) { michael@0: IonSpew(IonSpew_BaselineIC, michael@0: " Generating GetElem(Native %s%s call scripted %s:%d) stub " michael@0: "(obj=%p, shape=%p, holder=%p, holderShape=%p)", michael@0: (obj == holder) ? "direct" : "prototype", michael@0: needsAtomize ? " atomizing" : "", michael@0: getter->nonLazyScript()->filename(), getter->nonLazyScript()->lineno(), michael@0: obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty()); michael@0: } else { michael@0: IonSpew(IonSpew_BaselineIC, michael@0: " Generating GetElem(Native %s%s call native) stub " michael@0: "(obj=%p, shape=%p, holder=%p, holderShape=%p)", michael@0: (obj == holder) ? "direct" : "prototype", michael@0: needsAtomize ? " atomizing" : "", michael@0: obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty()); michael@0: } michael@0: michael@0: ICGetElemNativeStub::AccessType acctype = getterIsScripted michael@0: ? ICGetElemNativeStub::ScriptedGetter michael@0: : ICGetElemNativeStub::NativeGetter; michael@0: ICGetElemNativeCompiler compiler(cx, kind, monitorStub, obj, holder, propName, acctype, michael@0: needsAtomize, getter, script->pcToOffset(pc), isCallElem); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: TypedArrayRequiresFloatingPoint(TypedArrayObject *tarr) michael@0: { michael@0: uint32_t type = tarr->type(); michael@0: return (type == ScalarTypeDescr::TYPE_UINT32 || michael@0: type == ScalarTypeDescr::TYPE_FLOAT32 || michael@0: type == ScalarTypeDescr::TYPE_FLOAT64); michael@0: } michael@0: michael@0: static bool michael@0: TryAttachGetElemStub(JSContext *cx, JSScript *script, jsbytecode *pc, ICGetElem_Fallback *stub, michael@0: HandleValue lhs, HandleValue rhs, HandleValue res) michael@0: { michael@0: bool isCallElem = (JSOp(*pc) == JSOP_CALLELEM); michael@0: michael@0: // Check for String[i] => Char accesses. michael@0: if (lhs.isString() && rhs.isInt32() && res.isString() && michael@0: !stub->hasStub(ICStub::GetElem_String)) michael@0: { michael@0: // NoSuchMethod handling doesn't apply to string targets. michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetElem(String[Int32]) stub"); michael@0: ICGetElem_String::Compiler compiler(cx); michael@0: ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!stringStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(stringStub); michael@0: return true; michael@0: } michael@0: michael@0: if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS) && rhs.isInt32() && michael@0: !ArgumentsGetElemStubExists(stub, ICGetElem_Arguments::Magic)) michael@0: { michael@0: // Any script with a CALLPROP on arguments (arguments.foo()) michael@0: // should not have optimized arguments. michael@0: JS_ASSERT(!isCallElem); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetElem(MagicArgs[Int32]) stub"); michael@0: ICGetElem_Arguments::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(), michael@0: ICGetElem_Arguments::Magic, false); michael@0: ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!argsStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(argsStub); michael@0: return true; michael@0: } michael@0: michael@0: // Otherwise, GetElem is only optimized on objects. michael@0: if (!lhs.isObject()) michael@0: return true; michael@0: RootedObject obj(cx, &lhs.toObject()); michael@0: michael@0: // Check for ArgumentsObj[int] accesses michael@0: if (obj->is() && rhs.isInt32()) { michael@0: ICGetElem_Arguments::Which which = ICGetElem_Arguments::Normal; michael@0: if (obj->is()) michael@0: which = ICGetElem_Arguments::Strict; michael@0: if (!ArgumentsGetElemStubExists(stub, which)) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetElem(ArgsObj[Int32]) stub"); michael@0: ICGetElem_Arguments::Compiler compiler( michael@0: cx, stub->fallbackMonitorStub()->firstMonitorStub(), which, isCallElem); michael@0: ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!argsStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(argsStub); michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: if (obj->isNative()) { michael@0: // Check for NativeObject[int] dense accesses. michael@0: if (rhs.isInt32() && rhs.toInt32() >= 0 && !obj->is()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetElem(Native[Int32] dense) stub"); michael@0: ICGetElem_Dense::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(), michael@0: obj->lastProperty(), isCallElem); michael@0: ICStub *denseStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!denseStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(denseStub); michael@0: return true; michael@0: } michael@0: michael@0: // Check for NativeObject[id] shape-optimizable accesses. michael@0: if (rhs.isString()) { michael@0: RootedScript rootedScript(cx, script); michael@0: if (!TryAttachNativeGetElemStub(cx, rootedScript, pc, stub, obj, rhs)) michael@0: return false; michael@0: script = rootedScript; michael@0: } michael@0: } michael@0: michael@0: // Check for TypedArray[int] => Number accesses. michael@0: if (obj->is() && rhs.isNumber() && res.isNumber() && michael@0: !TypedArrayGetElemStubExists(stub, obj)) michael@0: { michael@0: // Don't attach CALLELEM stubs for accesses on typed array expected to yield numbers. michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: if (isCallElem) michael@0: return true; michael@0: #endif michael@0: michael@0: TypedArrayObject *tarr = &obj->as(); michael@0: if (!cx->runtime()->jitSupportsFloatingPoint && michael@0: (TypedArrayRequiresFloatingPoint(tarr) || rhs.isDouble())) michael@0: { michael@0: return true; michael@0: } michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetElem(TypedArray[Int32]) stub"); michael@0: ICGetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type()); michael@0: ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!typedArrayStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(typedArrayStub); michael@0: return true; michael@0: } michael@0: michael@0: // GetElem operations on non-native objects cannot be cached by either michael@0: // Baseline or Ion. Indicate this in the cache so that Ion does not michael@0: // generate a cache for this op. michael@0: if (!obj->isNative()) michael@0: stub->noteNonNativeAccess(); michael@0: michael@0: // GetElem operations which could access negative indexes generally can't michael@0: // be optimized without the potential for bailouts, as we can't statically michael@0: // determine that an object has no properties on such indexes. michael@0: if (rhs.isNumber() && rhs.toNumber() < 0) michael@0: stub->noteNegativeIndex(); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoGetElemFallback(JSContext *cx, BaselineFrame *frame, ICGetElem_Fallback *stub_, HandleValue lhs, michael@0: HandleValue rhs, MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(frame->script()); michael@0: JSOp op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "GetElem(%s)", js_CodeName[op]); michael@0: michael@0: JS_ASSERT(op == JSOP_GETELEM || op == JSOP_CALLELEM); michael@0: michael@0: // Don't pass lhs directly, we need it when generating stubs. michael@0: RootedValue lhsCopy(cx, lhs); michael@0: michael@0: bool isOptimizedArgs = false; michael@0: if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS)) { michael@0: // Handle optimized arguments[i] access. michael@0: if (!GetElemOptimizedArguments(cx, frame, &lhsCopy, rhs, res, &isOptimizedArgs)) michael@0: return false; michael@0: if (isOptimizedArgs) michael@0: types::TypeScript::Monitor(cx, frame->script(), pc, res); michael@0: } michael@0: michael@0: if (!isOptimizedArgs) { michael@0: if (!GetElementOperation(cx, op, &lhsCopy, rhs, res)) michael@0: return false; michael@0: types::TypeScript::Monitor(cx, frame->script(), pc, res); michael@0: } michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: // Add a type monitor stub for the resulting value. michael@0: if (!stub->addMonitorStubForValue(cx, frame->script(), res)) michael@0: return false; michael@0: michael@0: if (stub->numOptimizedStubs() >= ICGetElem_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard all stubs in this IC and replace with inert megamorphic stub. michael@0: // But for now we just bail. michael@0: return true; michael@0: } michael@0: michael@0: // Try to attach an optimized stub. michael@0: if (!TryAttachGetElemStub(cx, frame->script(), pc, stub, lhs, rhs, res)) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoGetElemFallbackFn)(JSContext *, BaselineFrame *, ICGetElem_Fallback *, michael@0: HandleValue, HandleValue, MutableHandleValue); michael@0: static const VMFunction DoGetElemFallbackInfo = michael@0: FunctionInfo(DoGetElemFallback, PopValues(2)); michael@0: michael@0: bool michael@0: ICGetElem_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Restore the tail call register. michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Ensure stack is fully synced for the expression decompiler. michael@0: masm.pushValue(R0); michael@0: masm.pushValue(R1); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R1); michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoGetElemFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // GetElem_NativeSlot michael@0: // michael@0: michael@0: static bool michael@0: DoAtomizeString(JSContext *cx, HandleString string, MutableHandleValue result) michael@0: { michael@0: IonSpew(IonSpew_BaselineIC, " AtomizeString called"); michael@0: michael@0: RootedValue key(cx, StringValue(string)); michael@0: michael@0: // Convert to interned property name. michael@0: RootedId id(cx); michael@0: if (!ValueToId(cx, key, &id)) michael@0: return false; michael@0: michael@0: if (!JSID_IS_ATOM(id)) { michael@0: result.set(key); michael@0: return true; michael@0: } michael@0: michael@0: result.set(StringValue(JSID_TO_ATOM(id))); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoAtomizeStringFn)(JSContext *, HandleString, MutableHandleValue); michael@0: static const VMFunction DoAtomizeStringInfo = FunctionInfo(DoAtomizeString); michael@0: michael@0: bool michael@0: ICGetElemNativeCompiler::emitCallNative(MacroAssembler &masm, Register objReg) michael@0: { michael@0: GeneralRegisterSet regs = availableGeneralRegs(0); michael@0: regs.takeUnchecked(objReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: michael@0: enterStubFrame(masm, regs.getAny()); michael@0: michael@0: // Push object. michael@0: masm.push(objReg); michael@0: michael@0: // Push native callee. michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), objReg); michael@0: masm.push(objReg); michael@0: michael@0: regs.add(objReg); michael@0: michael@0: // Profiler hook. michael@0: emitProfilingUpdate(masm, regs, ICGetElemNativeGetterStub::offsetOfPCOffset()); michael@0: michael@0: // Call helper. michael@0: if (!callVM(DoCallNativeGetterInfo, masm)) michael@0: return false; michael@0: michael@0: leaveStubFrame(masm); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetElemNativeCompiler::emitCallScripted(MacroAssembler &masm, Register objReg) michael@0: { michael@0: GeneralRegisterSet regs = availableGeneralRegs(0); michael@0: regs.takeUnchecked(objReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: michael@0: // Enter stub frame. michael@0: enterStubFrame(masm, regs.getAny()); michael@0: michael@0: // Push |this| for getter (target object). michael@0: { michael@0: ValueOperand val = regs.takeAnyValue(); michael@0: masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val); michael@0: masm.Push(val); michael@0: regs.add(val); michael@0: } michael@0: michael@0: regs.add(objReg); michael@0: michael@0: Register callee = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), callee); michael@0: michael@0: // Push argc, callee, and descriptor. michael@0: { michael@0: Register callScratch = regs.takeAny(); michael@0: EmitCreateStubFrameDescriptor(masm, callScratch); michael@0: masm.Push(Imm32(0)); // ActualArgc is 0 michael@0: masm.Push(callee); michael@0: masm.Push(callScratch); michael@0: regs.add(callScratch); michael@0: } michael@0: michael@0: Register code = regs.takeAnyExcluding(ArgumentsRectifierReg); michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code); michael@0: masm.loadBaselineOrIonRaw(code, code, SequentialExecution, nullptr); michael@0: michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: // Handle arguments underflow. michael@0: Label noUnderflow; michael@0: masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch); michael@0: masm.branch32(Assembler::Equal, scratch, Imm32(0), &noUnderflow); michael@0: { michael@0: // Call the arguments rectifier. michael@0: JS_ASSERT(ArgumentsRectifierReg != code); michael@0: michael@0: JitCode *argumentsRectifier = michael@0: cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution); michael@0: michael@0: masm.movePtr(ImmGCPtr(argumentsRectifier), code); michael@0: masm.loadPtr(Address(code, JitCode::offsetOfCode()), code); michael@0: masm.mov(ImmWord(0), ArgumentsRectifierReg); michael@0: } michael@0: michael@0: masm.bind(&noUnderflow); michael@0: michael@0: // If needed, update SPS Profiler frame entry. At this point, callee and scratch can michael@0: // be clobbered. michael@0: { michael@0: GeneralRegisterSet availRegs = availableGeneralRegs(0); michael@0: availRegs.take(ArgumentsRectifierReg); michael@0: availRegs.take(code); michael@0: emitProfilingUpdate(masm, availRegs, ICGetElemNativeGetterStub::offsetOfPCOffset()); michael@0: } michael@0: michael@0: masm.callIon(code); michael@0: michael@0: leaveStubFrame(masm, true); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetElemNativeCompiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: Label failurePopR1; michael@0: bool popR1 = false; michael@0: michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestString(Assembler::NotEqual, R1, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox object. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: michael@0: // Check object shape. michael@0: masm.loadPtr(Address(objReg, JSObject::offsetOfShape()), scratchReg); michael@0: Address shapeAddr(BaselineStubReg, ICGetElemNativeStub::offsetOfShape()); michael@0: masm.branchPtr(Assembler::NotEqual, shapeAddr, scratchReg, &failure); michael@0: michael@0: // Check key identity. Don't automatically fail if this fails, since the incoming michael@0: // key maybe a non-interned string. Switch to a slowpath vm-call based check. michael@0: Address nameAddr(BaselineStubReg, ICGetElemNativeStub::offsetOfName()); michael@0: Register strExtract = masm.extractString(R1, ExtractTemp1); michael@0: michael@0: // If needsAtomize_ is true, and the string is not already an atom, then atomize the michael@0: // string before proceeding. michael@0: if (needsAtomize_) { michael@0: Label skipAtomize; michael@0: michael@0: // If string is already an atom, skip the atomize. michael@0: masm.branchTestPtr(Assembler::NonZero, michael@0: Address(strExtract, JSString::offsetOfLengthAndFlags()), michael@0: Imm32(JSString::ATOM_BIT), michael@0: &skipAtomize); michael@0: michael@0: // Stow R0. michael@0: EmitStowICValues(masm, 1); michael@0: michael@0: enterStubFrame(masm, R0.scratchReg()); michael@0: michael@0: // Atomize the string into a new value. michael@0: masm.push(strExtract); michael@0: if (!callVM(DoAtomizeStringInfo, masm)) michael@0: return false; michael@0: michael@0: // Atomized string is now in JSReturnOperand (R0). michael@0: // Leave stub frame, move atomized string into R1. michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: leaveStubFrame(masm); michael@0: masm.moveValue(JSReturnOperand, R1); michael@0: michael@0: // Unstow R0 michael@0: EmitUnstowICValues(masm, 1); michael@0: michael@0: // Extract string from R1 again. michael@0: DebugOnly strExtract2 = masm.extractString(R1, ExtractTemp1); michael@0: JS_ASSERT(Register(strExtract2) == strExtract); michael@0: michael@0: masm.bind(&skipAtomize); michael@0: } michael@0: michael@0: // Since this stub sometimes enter a stub frame, we manually set this to true (lie). michael@0: #ifdef DEBUG michael@0: entersStubFrame_ = true; michael@0: #endif michael@0: michael@0: // Key has been atomized if necessary. Do identity check on string pointer. michael@0: masm.branchPtr(Assembler::NotEqual, nameAddr, strExtract, &failure); michael@0: michael@0: Register holderReg; michael@0: if (obj_ == holder_) { michael@0: holderReg = objReg; michael@0: } else { michael@0: // Shape guard holder. michael@0: if (regs.empty()) { michael@0: masm.push(R1.scratchReg()); michael@0: popR1 = true; michael@0: holderReg = R1.scratchReg(); michael@0: } else { michael@0: holderReg = regs.takeAny(); michael@0: } michael@0: michael@0: if (kind == ICStub::GetElem_NativePrototypeCallNative || michael@0: kind == ICStub::GetElem_NativePrototypeCallScripted) michael@0: { michael@0: masm.loadPtr(Address(BaselineStubReg, michael@0: ICGetElemNativePrototypeCallStub::offsetOfHolder()), michael@0: holderReg); michael@0: masm.loadPtr(Address(BaselineStubReg, michael@0: ICGetElemNativePrototypeCallStub::offsetOfHolderShape()), michael@0: scratchReg); michael@0: } else { michael@0: masm.loadPtr(Address(BaselineStubReg, michael@0: ICGetElem_NativePrototypeSlot::offsetOfHolder()), michael@0: holderReg); michael@0: masm.loadPtr(Address(BaselineStubReg, michael@0: ICGetElem_NativePrototypeSlot::offsetOfHolderShape()), michael@0: scratchReg); michael@0: } michael@0: masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratchReg, michael@0: popR1 ? &failurePopR1 : &failure); michael@0: } michael@0: michael@0: if (acctype_ == ICGetElemNativeStub::DynamicSlot || michael@0: acctype_ == ICGetElemNativeStub::FixedSlot) michael@0: { michael@0: masm.load32(Address(BaselineStubReg, ICGetElemNativeSlotStub::offsetOfOffset()), michael@0: scratchReg); michael@0: michael@0: // Load from object. michael@0: if (acctype_ == ICGetElemNativeStub::DynamicSlot) michael@0: masm.addPtr(Address(holderReg, JSObject::offsetOfSlots()), scratchReg); michael@0: else michael@0: masm.addPtr(holderReg, scratchReg); michael@0: michael@0: Address valAddr(scratchReg, 0); michael@0: michael@0: // Check if __noSuchMethod__ needs to be called. michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: if (isCallElem_) { michael@0: Label afterNoSuchMethod; michael@0: Label skipNoSuchMethod; michael@0: michael@0: masm.branchTestUndefined(Assembler::NotEqual, valAddr, &skipNoSuchMethod); michael@0: michael@0: GeneralRegisterSet regs = availableGeneralRegs(0); michael@0: regs.take(R1); michael@0: regs.take(R0); michael@0: regs.takeUnchecked(objReg); michael@0: if (popR1) michael@0: masm.pop(R1.scratchReg()); michael@0: michael@0: // Box and push obj and key onto baseline frame stack for decompiler. michael@0: masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0); michael@0: EmitStowICValues(masm, 2); michael@0: michael@0: regs.add(R0); michael@0: regs.takeUnchecked(objReg); michael@0: michael@0: enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg)); michael@0: michael@0: masm.pushValue(R1); michael@0: masm.push(objReg); michael@0: if (!callVM(LookupNoSuchMethodHandlerInfo, masm)) michael@0: return false; michael@0: michael@0: leaveStubFrame(masm); michael@0: michael@0: // Pop pushed obj and key from baseline stack. michael@0: EmitUnstowICValues(masm, 2, /* discard = */ true); michael@0: michael@0: // Result is already in R0 michael@0: masm.jump(&afterNoSuchMethod); michael@0: masm.bind(&skipNoSuchMethod); michael@0: michael@0: if (popR1) michael@0: masm.pop(R1.scratchReg()); michael@0: masm.loadValue(valAddr, R0); michael@0: masm.bind(&afterNoSuchMethod); michael@0: } else { michael@0: masm.loadValue(valAddr, R0); michael@0: if (popR1) michael@0: masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg); michael@0: } michael@0: #else michael@0: masm.loadValue(valAddr, R0); michael@0: if (popR1) michael@0: masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg); michael@0: #endif michael@0: michael@0: } else { michael@0: JS_ASSERT(acctype_ == ICGetElemNativeStub::NativeGetter || michael@0: acctype_ == ICGetElemNativeStub::ScriptedGetter); michael@0: JS_ASSERT(kind == ICStub::GetElem_NativePrototypeCallNative || michael@0: kind == ICStub::GetElem_NativePrototypeCallScripted); michael@0: michael@0: if (acctype_ == ICGetElemNativeStub::NativeGetter) { michael@0: // If calling a native getter, there is no chance of failure now. michael@0: michael@0: // GetElem key (R1) is no longer needed. michael@0: if (popR1) michael@0: masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg); michael@0: michael@0: emitCallNative(masm, objReg); michael@0: michael@0: } else { michael@0: JS_ASSERT(acctype_ == ICGetElemNativeStub::ScriptedGetter); michael@0: michael@0: // Load function in scratchReg and ensure that it has a jit script. michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), michael@0: scratchReg); michael@0: masm.branchIfFunctionHasNoScript(scratchReg, popR1 ? &failurePopR1 : &failure); michael@0: masm.loadPtr(Address(scratchReg, JSFunction::offsetOfNativeOrScript()), scratchReg); michael@0: masm.loadBaselineOrIonRaw(scratchReg, scratchReg, SequentialExecution, michael@0: popR1 ? &failurePopR1 : &failure); michael@0: michael@0: // At this point, we are guaranteed to successfully complete. michael@0: if (popR1) michael@0: masm.addPtr(Imm32(sizeof(size_t)), BaselineStackReg); michael@0: michael@0: emitCallScripted(masm, objReg); michael@0: } michael@0: } michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: if (popR1) { michael@0: masm.bind(&failurePopR1); michael@0: masm.pop(R1.scratchReg()); michael@0: } michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // GetElem_String michael@0: // michael@0: michael@0: bool michael@0: ICGetElem_String::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestString(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox string in R0. michael@0: Register str = masm.extractString(R0, ExtractTemp0); michael@0: michael@0: // Load string lengthAndFlags michael@0: Address lengthAndFlagsAddr(str, JSString::offsetOfLengthAndFlags()); michael@0: masm.loadPtr(lengthAndFlagsAddr, scratchReg); michael@0: michael@0: // Check for non-linear strings. michael@0: masm.branchTest32(Assembler::Zero, scratchReg, Imm32(JSString::FLAGS_MASK), &failure); michael@0: michael@0: // Unbox key. michael@0: Register key = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: // Extract length and bounds check. michael@0: masm.rshiftPtr(Imm32(JSString::LENGTH_SHIFT), scratchReg); michael@0: masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure); michael@0: michael@0: // Get char code. michael@0: Address charsAddr(str, JSString::offsetOfChars()); michael@0: masm.loadPtr(charsAddr, scratchReg); michael@0: masm.load16ZeroExtend(BaseIndex(scratchReg, key, TimesTwo, 0), scratchReg); michael@0: michael@0: // Check if char code >= UNIT_STATIC_LIMIT. michael@0: masm.branch32(Assembler::AboveOrEqual, scratchReg, Imm32(StaticStrings::UNIT_STATIC_LIMIT), michael@0: &failure); michael@0: michael@0: // Load static string. michael@0: masm.movePtr(ImmPtr(&cx->staticStrings().unitStaticTable), str); michael@0: masm.loadPtr(BaseIndex(str, scratchReg, ScalePointer), str); michael@0: michael@0: // Return. michael@0: masm.tagValue(JSVAL_TYPE_STRING, str, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // GetElem_Dense michael@0: // michael@0: michael@0: bool michael@0: ICGetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox R0 and shape guard. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetElem_Dense::offsetOfShape()), scratchReg); michael@0: masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure); michael@0: michael@0: // Load obj->elements. michael@0: masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg); michael@0: michael@0: // Unbox key. michael@0: Register key = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: // Bounds check. michael@0: Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength()); michael@0: masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure); michael@0: michael@0: // Hole check and load value. michael@0: JS_STATIC_ASSERT(sizeof(Value) == 8); michael@0: BaseIndex element(scratchReg, key, TimesEight); michael@0: masm.branchTestMagic(Assembler::Equal, element, &failure); michael@0: michael@0: // Check if __noSuchMethod__ should be called. michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: #ifdef DEBUG michael@0: entersStubFrame_ = true; michael@0: #endif michael@0: if (isCallElem_) { michael@0: Label afterNoSuchMethod; michael@0: Label skipNoSuchMethod; michael@0: regs = availableGeneralRegs(0); michael@0: regs.takeUnchecked(obj); michael@0: regs.takeUnchecked(key); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: ValueOperand val = regs.takeValueOperand(); michael@0: michael@0: masm.loadValue(element, val); michael@0: masm.branchTestUndefined(Assembler::NotEqual, val, &skipNoSuchMethod); michael@0: michael@0: // Box and push obj and key onto baseline frame stack for decompiler. michael@0: EmitRestoreTailCallReg(masm); michael@0: masm.tagValue(JSVAL_TYPE_OBJECT, obj, val); michael@0: masm.pushValue(val); michael@0: masm.tagValue(JSVAL_TYPE_INT32, key, val); michael@0: masm.pushValue(val); michael@0: EmitRepushTailCallReg(masm); michael@0: michael@0: regs.add(val); michael@0: michael@0: // Call __noSuchMethod__ checker. Object pointer is in objReg. michael@0: enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg)); michael@0: michael@0: regs.take(val); michael@0: michael@0: masm.tagValue(JSVAL_TYPE_INT32, key, val); michael@0: masm.pushValue(val); michael@0: masm.push(obj); michael@0: if (!callVM(LookupNoSuchMethodHandlerInfo, masm)) michael@0: return false; michael@0: michael@0: leaveStubFrame(masm); michael@0: michael@0: // Pop pushed obj and key from baseline stack. michael@0: EmitUnstowICValues(masm, 2, /* discard = */ true); michael@0: michael@0: // Result is already in R0 michael@0: masm.jump(&afterNoSuchMethod); michael@0: masm.bind(&skipNoSuchMethod); michael@0: michael@0: masm.moveValue(val, R0); michael@0: masm.bind(&afterNoSuchMethod); michael@0: } else { michael@0: masm.loadValue(element, R0); michael@0: } michael@0: #else michael@0: // Load value from element location. michael@0: masm.loadValue(element, R0); michael@0: #endif michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // GetElem_TypedArray michael@0: // michael@0: michael@0: bool michael@0: ICGetElem_TypedArray::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox R0 and shape guard. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetElem_TypedArray::offsetOfShape()), scratchReg); michael@0: masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure); michael@0: michael@0: // Ensure the index is an integer. michael@0: if (cx->runtime()->jitSupportsFloatingPoint) { michael@0: Label isInt32; michael@0: masm.branchTestInt32(Assembler::Equal, R1, &isInt32); michael@0: { michael@0: // If the index is a double, try to convert it to int32. It's okay michael@0: // to convert -0 to 0: the shape check ensures the object is a typed michael@0: // array so the difference is not observable. michael@0: masm.branchTestDouble(Assembler::NotEqual, R1, &failure); michael@0: masm.unboxDouble(R1, FloatReg0); michael@0: masm.convertDoubleToInt32(FloatReg0, scratchReg, &failure, /* negZeroCheck = */false); michael@0: masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R1); michael@0: } michael@0: masm.bind(&isInt32); michael@0: } else { michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: } michael@0: michael@0: // Unbox key. michael@0: Register key = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: // Bounds check. michael@0: masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), scratchReg); michael@0: masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure); michael@0: michael@0: // Load the elements vector. michael@0: masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), scratchReg); michael@0: michael@0: // Load the value. michael@0: BaseIndex source(scratchReg, key, ScaleFromElemWidth(TypedArrayObject::slotWidth(type_))); michael@0: masm.loadFromTypedArray(type_, source, R0, false, scratchReg, &failure); michael@0: michael@0: // Todo: Allow loading doubles from uint32 arrays, but this requires monitoring. michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // GetEelem_Arguments michael@0: // michael@0: bool michael@0: ICGetElem_Arguments::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: // Variants of GetElem_Arguments can enter stub frames if entered in CallProp michael@0: // context when noSuchMethod support is on. michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: #ifdef DEBUG michael@0: entersStubFrame_ = true; michael@0: #endif michael@0: #endif michael@0: michael@0: Label failure; michael@0: if (which_ == ICGetElem_Arguments::Magic) { michael@0: JS_ASSERT(!isCallElem_); michael@0: michael@0: // Ensure that this is a magic arguments value. michael@0: masm.branchTestMagicValue(Assembler::NotEqual, R0, JS_OPTIMIZED_ARGUMENTS, &failure); michael@0: michael@0: // Ensure that frame has not loaded different arguments object since. michael@0: masm.branchTest32(Assembler::NonZero, michael@0: Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()), michael@0: Imm32(BaselineFrame::HAS_ARGS_OBJ), michael@0: &failure); michael@0: michael@0: // Ensure that index is an integer. michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: Register idx = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: // Load num actual arguments michael@0: Address actualArgs(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs()); michael@0: masm.loadPtr(actualArgs, scratch); michael@0: michael@0: // Ensure idx < argc michael@0: masm.branch32(Assembler::AboveOrEqual, idx, scratch, &failure); michael@0: michael@0: // Load argval michael@0: JS_STATIC_ASSERT(sizeof(Value) == 8); michael@0: masm.movePtr(BaselineFrameReg, scratch); michael@0: masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), scratch); michael@0: BaseIndex element(scratch, idx, TimesEight); michael@0: masm.loadValue(element, R0); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: JS_ASSERT(which_ == ICGetElem_Arguments::Strict || michael@0: which_ == ICGetElem_Arguments::Normal); michael@0: michael@0: bool isStrict = which_ == ICGetElem_Arguments::Strict; michael@0: const Class *clasp = isStrict ? &StrictArgumentsObject::class_ : &NormalArgumentsObject::class_; michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Guard on input being an arguments object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.branchTestObjClass(Assembler::NotEqual, objReg, scratchReg, clasp, &failure); michael@0: michael@0: // Guard on index being int32 michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: Register idxReg = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: // Get initial ArgsObj length value. michael@0: masm.unboxInt32(Address(objReg, ArgumentsObject::getInitialLengthSlotOffset()), scratchReg); michael@0: michael@0: // Test if length has been overridden. michael@0: masm.branchTest32(Assembler::NonZero, michael@0: scratchReg, michael@0: Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT), michael@0: &failure); michael@0: michael@0: // Length has not been overridden, ensure that R1 is an integer and is <= length. michael@0: masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratchReg); michael@0: masm.branch32(Assembler::AboveOrEqual, idxReg, scratchReg, &failure); michael@0: michael@0: // Length check succeeded, now check the correct bit. We clobber potential type regs michael@0: // now. Inputs will have to be reconstructed if we fail after this point, but that's michael@0: // unlikely. michael@0: Label failureReconstructInputs; michael@0: regs = availableGeneralRegs(0); michael@0: regs.takeUnchecked(objReg); michael@0: regs.takeUnchecked(idxReg); michael@0: regs.take(scratchReg); michael@0: Register argData = regs.takeAny(); michael@0: Register tempReg = regs.takeAny(); michael@0: michael@0: // Load ArgumentsData michael@0: masm.loadPrivate(Address(objReg, ArgumentsObject::getDataSlotOffset()), argData); michael@0: michael@0: // Load deletedBits bitArray pointer into scratchReg michael@0: masm.loadPtr(Address(argData, offsetof(ArgumentsData, deletedBits)), scratchReg); michael@0: michael@0: // In tempReg, calculate index of word containing bit: (idx >> logBitsPerWord) michael@0: masm.movePtr(idxReg, tempReg); michael@0: const uint32_t shift = mozilla::tl::FloorLog2<(sizeof(size_t) * JS_BITS_PER_BYTE)>::value; michael@0: JS_ASSERT(shift == 5 || shift == 6); michael@0: masm.rshiftPtr(Imm32(shift), tempReg); michael@0: masm.loadPtr(BaseIndex(scratchReg, tempReg, ScaleFromElemWidth(sizeof(size_t))), scratchReg); michael@0: michael@0: // Don't bother testing specific bit, if any bit is set in the word, fail. michael@0: masm.branchPtr(Assembler::NotEqual, scratchReg, ImmPtr(nullptr), &failureReconstructInputs); michael@0: michael@0: // Load the value. use scratchReg and tempReg to form a ValueOperand to load into. michael@0: masm.addPtr(Imm32(ArgumentsData::offsetOfArgs()), argData); michael@0: regs.add(scratchReg); michael@0: regs.add(tempReg); michael@0: ValueOperand tempVal = regs.takeAnyValue(); michael@0: masm.loadValue(BaseIndex(argData, idxReg, ScaleFromElemWidth(sizeof(Value))), tempVal); michael@0: michael@0: // Makesure that this is not a FORWARD_TO_CALL_SLOT magic value. michael@0: masm.branchTestMagic(Assembler::Equal, tempVal, &failureReconstructInputs); michael@0: michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: if (isCallElem_) { michael@0: Label afterNoSuchMethod; michael@0: Label skipNoSuchMethod; michael@0: michael@0: masm.branchTestUndefined(Assembler::NotEqual, tempVal, &skipNoSuchMethod); michael@0: michael@0: // Call __noSuchMethod__ checker. Object pointer is in objReg. michael@0: regs = availableGeneralRegs(0); michael@0: regs.takeUnchecked(objReg); michael@0: regs.takeUnchecked(idxReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: ValueOperand val = regs.takeValueOperand(); michael@0: michael@0: // Box and push obj and key onto baseline frame stack for decompiler. michael@0: EmitRestoreTailCallReg(masm); michael@0: masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val); michael@0: masm.pushValue(val); michael@0: masm.tagValue(JSVAL_TYPE_INT32, idxReg, val); michael@0: masm.pushValue(val); michael@0: EmitRepushTailCallReg(masm); michael@0: michael@0: regs.add(val); michael@0: enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg)); michael@0: regs.take(val); michael@0: michael@0: masm.pushValue(val); michael@0: masm.push(objReg); michael@0: if (!callVM(LookupNoSuchMethodHandlerInfo, masm)) michael@0: return false; michael@0: michael@0: leaveStubFrame(masm); michael@0: michael@0: // Pop pushed obj and key from baseline stack. michael@0: EmitUnstowICValues(masm, 2, /* discard = */ true); michael@0: michael@0: // Result is already in R0 michael@0: masm.jump(&afterNoSuchMethod); michael@0: masm.bind(&skipNoSuchMethod); michael@0: michael@0: masm.moveValue(tempVal, R0); michael@0: masm.bind(&afterNoSuchMethod); michael@0: } else { michael@0: masm.moveValue(tempVal, R0); michael@0: } michael@0: #else michael@0: // Copy value from temp to R0. michael@0: masm.moveValue(tempVal, R0); michael@0: #endif michael@0: michael@0: // Type-check result michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failed, but inputs are deconstructed into object and int, and need to be michael@0: // reconstructed into values. michael@0: masm.bind(&failureReconstructInputs); michael@0: masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0); michael@0: masm.tagValue(JSVAL_TYPE_INT32, idxReg, R1); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // SetElem_Fallback michael@0: // michael@0: michael@0: static bool michael@0: SetElemDenseAddHasSameShapes(ICSetElem_DenseAdd *stub, JSObject *obj) michael@0: { michael@0: size_t numShapes = stub->protoChainDepth() + 1; michael@0: for (size_t i = 0; i < numShapes; i++) { michael@0: static const size_t MAX_DEPTH = ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH; michael@0: if (obj->lastProperty() != stub->toImplUnchecked()->shape(i)) michael@0: return false; michael@0: obj = obj->getProto(); michael@0: if (!obj && i != numShapes - 1) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DenseSetElemStubExists(JSContext *cx, ICStub::Kind kind, ICSetElem_Fallback *stub, HandleObject obj) michael@0: { michael@0: JS_ASSERT(kind == ICStub::SetElem_Dense || kind == ICStub::SetElem_DenseAdd); michael@0: michael@0: for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) { michael@0: if (kind == ICStub::SetElem_Dense && iter->isSetElem_Dense()) { michael@0: ICSetElem_Dense *dense = iter->toSetElem_Dense(); michael@0: if (obj->lastProperty() == dense->shape() && obj->getType(cx) == dense->type()) michael@0: return true; michael@0: } michael@0: michael@0: if (kind == ICStub::SetElem_DenseAdd && iter->isSetElem_DenseAdd()) { michael@0: ICSetElem_DenseAdd *dense = iter->toSetElem_DenseAdd(); michael@0: if (obj->getType(cx) == dense->type() && SetElemDenseAddHasSameShapes(dense, obj)) michael@0: return true; michael@0: } michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: static bool michael@0: TypedArraySetElemStubExists(ICSetElem_Fallback *stub, HandleObject obj, bool expectOOB) michael@0: { michael@0: for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) { michael@0: if (!iter->isSetElem_TypedArray()) michael@0: continue; michael@0: ICSetElem_TypedArray *taStub = iter->toSetElem_TypedArray(); michael@0: if (obj->lastProperty() == taStub->shape() && taStub->expectOutOfBounds() == expectOOB) michael@0: return true; michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: static bool michael@0: RemoveExistingTypedArraySetElemStub(JSContext *cx, ICSetElem_Fallback *stub, HandleObject obj) michael@0: { michael@0: for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) { michael@0: if (!iter->isSetElem_TypedArray()) michael@0: continue; michael@0: michael@0: if (obj->lastProperty() != iter->toSetElem_TypedArray()->shape()) michael@0: continue; michael@0: michael@0: // TypedArraySetElem stubs are only removed using this procedure if michael@0: // being replaced with one that expects out of bounds index. michael@0: JS_ASSERT(!iter->toSetElem_TypedArray()->expectOutOfBounds()); michael@0: iter.unlink(cx); michael@0: return true; michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: static bool michael@0: CanOptimizeDenseSetElem(JSContext *cx, HandleObject obj, uint32_t index, michael@0: HandleShape oldShape, uint32_t oldCapacity, uint32_t oldInitLength, michael@0: bool *isAddingCaseOut, size_t *protoDepthOut) michael@0: { michael@0: uint32_t initLength = obj->getDenseInitializedLength(); michael@0: uint32_t capacity = obj->getDenseCapacity(); michael@0: michael@0: *isAddingCaseOut = false; michael@0: *protoDepthOut = 0; michael@0: michael@0: // Some initial sanity checks. michael@0: if (initLength < oldInitLength || capacity < oldCapacity) michael@0: return false; michael@0: michael@0: RootedShape shape(cx, obj->lastProperty()); michael@0: michael@0: // Cannot optimize if the shape changed. michael@0: if (oldShape != shape) michael@0: return false; michael@0: michael@0: // Cannot optimize if the capacity changed. michael@0: if (oldCapacity != capacity) michael@0: return false; michael@0: michael@0: // Cannot optimize if the index doesn't fit within the new initialized length. michael@0: if (index >= initLength) michael@0: return false; michael@0: michael@0: // Cannot optimize if the value at position after the set is a hole. michael@0: if (!obj->containsDenseElement(index)) michael@0: return false; michael@0: michael@0: // At this point, if we know that the initLength did not change, then michael@0: // an optimized set is possible. michael@0: if (oldInitLength == initLength) michael@0: return true; michael@0: michael@0: // If it did change, ensure that it changed specifically by incrementing by 1 michael@0: // to accomodate this particular indexed set. michael@0: if (oldInitLength + 1 != initLength) michael@0: return false; michael@0: if (index != oldInitLength) michael@0: return false; michael@0: michael@0: // The checks are not complete. The object may have a setter definition, michael@0: // either directly, or via a prototype, or via the target object for a prototype michael@0: // which is a proxy, that handles a particular integer write. michael@0: // Scan the prototype and shape chain to make sure that this is not the case. michael@0: RootedObject curObj(cx, obj); michael@0: while (curObj) { michael@0: // Ensure object is native. michael@0: if (!curObj->isNative()) michael@0: return false; michael@0: michael@0: // Ensure all indexed properties are stored in dense elements. michael@0: if (curObj->isIndexed()) michael@0: return false; michael@0: michael@0: curObj = curObj->getProto(); michael@0: if (curObj) michael@0: ++*protoDepthOut; michael@0: } michael@0: michael@0: if (*protoDepthOut > ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH) michael@0: return false; michael@0: michael@0: *isAddingCaseOut = true; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoSetElemFallback(JSContext *cx, BaselineFrame *frame, ICSetElem_Fallback *stub_, Value *stack, michael@0: HandleValue objv, HandleValue index, HandleValue rhs) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: JSOp op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "SetElem(%s)", js_CodeName[JSOp(*pc)]); michael@0: michael@0: JS_ASSERT(op == JSOP_SETELEM || michael@0: op == JSOP_INITELEM || michael@0: op == JSOP_INITELEM_ARRAY); michael@0: michael@0: RootedObject obj(cx, ToObjectFromStack(cx, objv)); michael@0: if (!obj) michael@0: return false; michael@0: michael@0: RootedShape oldShape(cx, obj->lastProperty()); michael@0: michael@0: // Check the old capacity michael@0: uint32_t oldCapacity = 0; michael@0: uint32_t oldInitLength = 0; michael@0: if (obj->isNative() && index.isInt32() && index.toInt32() >= 0) { michael@0: oldCapacity = obj->getDenseCapacity(); michael@0: oldInitLength = obj->getDenseInitializedLength(); michael@0: } michael@0: michael@0: if (op == JSOP_INITELEM) { michael@0: if (!InitElemOperation(cx, obj, index, rhs)) michael@0: return false; michael@0: } else if (op == JSOP_INITELEM_ARRAY) { michael@0: JS_ASSERT(uint32_t(index.toInt32()) == GET_UINT24(pc)); michael@0: if (!InitArrayElemOperation(cx, pc, obj, index.toInt32(), rhs)) michael@0: return false; michael@0: } else { michael@0: if (!SetObjectElement(cx, obj, index, rhs, script->strict(), script, pc)) michael@0: return false; michael@0: } michael@0: michael@0: // Overwrite the object on the stack (pushed for the decompiler) with the rhs. michael@0: JS_ASSERT(stack[2] == objv); michael@0: stack[2] = rhs; michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: if (stub->numOptimizedStubs() >= ICSetElem_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard all stubs in this IC and replace with inert megamorphic stub. michael@0: // But for now we just bail. michael@0: return true; michael@0: } michael@0: michael@0: // Try to generate new stubs. michael@0: if (obj->isNative() && michael@0: !obj->is() && michael@0: index.isInt32() && index.toInt32() >= 0 && michael@0: !rhs.isMagic(JS_ELEMENTS_HOLE)) michael@0: { michael@0: bool addingCase; michael@0: size_t protoDepth; michael@0: michael@0: if (CanOptimizeDenseSetElem(cx, obj, index.toInt32(), oldShape, oldCapacity, oldInitLength, michael@0: &addingCase, &protoDepth)) michael@0: { michael@0: RootedShape shape(cx, obj->lastProperty()); michael@0: RootedTypeObject type(cx, obj->getType(cx)); michael@0: if (!type) michael@0: return false; michael@0: michael@0: if (addingCase && !DenseSetElemStubExists(cx, ICStub::SetElem_DenseAdd, stub, obj)) { michael@0: IonSpew(IonSpew_BaselineIC, michael@0: " Generating SetElem_DenseAdd stub " michael@0: "(shape=%p, type=%p, protoDepth=%u)", michael@0: obj->lastProperty(), type.get(), protoDepth); michael@0: ICSetElemDenseAddCompiler compiler(cx, obj, protoDepth); michael@0: ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!denseStub) michael@0: return false; michael@0: if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs)) michael@0: return false; michael@0: michael@0: stub->addNewStub(denseStub); michael@0: } else if (!addingCase && michael@0: !DenseSetElemStubExists(cx, ICStub::SetElem_Dense, stub, obj)) michael@0: { michael@0: IonSpew(IonSpew_BaselineIC, michael@0: " Generating SetElem_Dense stub (shape=%p, type=%p)", michael@0: obj->lastProperty(), type.get()); michael@0: ICSetElem_Dense::Compiler compiler(cx, shape, type); michael@0: ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!denseStub) michael@0: return false; michael@0: if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs)) michael@0: return false; michael@0: michael@0: stub->addNewStub(denseStub); michael@0: } michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: if (obj->is() && index.isNumber() && rhs.isNumber()) { michael@0: Rooted tarr(cx, &obj->as()); michael@0: if (!cx->runtime()->jitSupportsFloatingPoint && michael@0: (TypedArrayRequiresFloatingPoint(tarr) || index.isDouble())) michael@0: { michael@0: return true; michael@0: } michael@0: michael@0: uint32_t len = tarr->length(); michael@0: double idx = index.toNumber(); michael@0: bool expectOutOfBounds = (idx < 0 || idx >= double(len)); michael@0: michael@0: if (!TypedArraySetElemStubExists(stub, tarr, expectOutOfBounds)) { michael@0: // Remove any existing TypedArraySetElemStub that doesn't handle out-of-bounds michael@0: if (expectOutOfBounds) michael@0: RemoveExistingTypedArraySetElemStub(cx, stub, tarr); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, michael@0: " Generating SetElem_TypedArray stub (shape=%p, type=%u, oob=%s)", michael@0: tarr->lastProperty(), tarr->type(), expectOutOfBounds ? "yes" : "no"); michael@0: ICSetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type(), michael@0: expectOutOfBounds); michael@0: ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!typedArrayStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(typedArrayStub); michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoSetElemFallbackFn)(JSContext *, BaselineFrame *, ICSetElem_Fallback *, Value *, michael@0: HandleValue, HandleValue, HandleValue); michael@0: static const VMFunction DoSetElemFallbackInfo = michael@0: FunctionInfo(DoSetElemFallback, PopValues(2)); michael@0: michael@0: bool michael@0: ICSetElem_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // State: R0: object, R1: index, stack: rhs. michael@0: // For the decompiler, the stack has to be: object, index, rhs, michael@0: // so we push the index, then overwrite the rhs Value with R0 michael@0: // and push the rhs value. michael@0: masm.pushValue(R1); michael@0: masm.loadValue(Address(BaselineStackReg, sizeof(Value)), R1); michael@0: masm.storeValue(R0, Address(BaselineStackReg, sizeof(Value))); michael@0: masm.pushValue(R1); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R1); // RHS michael@0: michael@0: // Push index. On x86 and ARM two push instructions are emitted so use a michael@0: // separate register to store the old stack pointer. michael@0: masm.mov(BaselineStackReg, R1.scratchReg()); michael@0: masm.pushValue(Address(R1.scratchReg(), 2 * sizeof(Value))); michael@0: masm.pushValue(R0); // Object. michael@0: michael@0: // Push pointer to stack values, so that the stub can overwrite the object michael@0: // (pushed for the decompiler) with the rhs. michael@0: masm.computeEffectiveAddress(Address(BaselineStackReg, 3 * sizeof(Value)), R0.scratchReg()); michael@0: masm.push(R0.scratchReg()); michael@0: michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoSetElemFallbackInfo, masm); michael@0: } michael@0: michael@0: void michael@0: BaselineScript::noteArrayWriteHole(uint32_t pcOffset) michael@0: { michael@0: ICEntry &entry = icEntryFromPCOffset(pcOffset); michael@0: ICFallbackStub *stub = entry.fallbackStub(); michael@0: michael@0: if (stub->isSetElem_Fallback()) michael@0: stub->toSetElem_Fallback()->noteArrayWriteHole(); michael@0: } michael@0: michael@0: // michael@0: // SetElem_Dense michael@0: // michael@0: michael@0: bool michael@0: ICSetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: // R0 = object michael@0: // R1 = key michael@0: // Stack = { ... rhs-value, ? } michael@0: Label failure; michael@0: Label failureUnstow; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox R0 and guard on its shape. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfShape()), scratchReg); michael@0: masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure); michael@0: michael@0: // Stow both R0 and R1 (object and key) michael@0: // But R0 and R1 still hold their values. michael@0: EmitStowICValues(masm, 2); michael@0: michael@0: // We may need to free up some registers. michael@0: regs = availableGeneralRegs(0); michael@0: regs.take(R0); michael@0: michael@0: // Guard that the type object matches. michael@0: Register typeReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfType()), typeReg); michael@0: masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg, michael@0: &failureUnstow); michael@0: regs.add(typeReg); michael@0: michael@0: // Stack is now: { ..., rhs-value, object-value, key-value, maybe?-RET-ADDR } michael@0: // Load rhs-value in to R0 michael@0: masm.loadValue(Address(BaselineStackReg, 2 * sizeof(Value) + ICStackValueOffset), R0); michael@0: michael@0: // Call the type-update stub. michael@0: if (!callTypeUpdateIC(masm, sizeof(Value))) michael@0: return false; michael@0: michael@0: // Unstow R0 and R1 (object and key) michael@0: EmitUnstowICValues(masm, 2); michael@0: michael@0: // Reset register set. michael@0: regs = availableGeneralRegs(2); michael@0: scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox object and key. michael@0: obj = masm.extractObject(R0, ExtractTemp0); michael@0: Register key = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: // Load obj->elements in scratchReg. michael@0: masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg); michael@0: michael@0: // Bounds check. michael@0: Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength()); michael@0: masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure); michael@0: michael@0: // Hole check. michael@0: BaseIndex element(scratchReg, key, TimesEight); michael@0: masm.branchTestMagic(Assembler::Equal, element, &failure); michael@0: michael@0: // Failure is not possible now. Free up registers. michael@0: regs.add(R0); michael@0: regs.add(R1); michael@0: regs.takeUnchecked(obj); michael@0: regs.takeUnchecked(key); michael@0: Address valueAddr(BaselineStackReg, ICStackValueOffset); michael@0: michael@0: // Convert int32 values to double if convertDoubleElements is set. In this michael@0: // case the heap typeset is guaranteed to contain both int32 and double, so michael@0: // it's okay to store a double. michael@0: Label dontConvertDoubles; michael@0: Address elementsFlags(scratchReg, ObjectElements::offsetOfFlags()); michael@0: masm.branchTest32(Assembler::Zero, elementsFlags, michael@0: Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS), michael@0: &dontConvertDoubles); michael@0: // Note that double arrays are only created by IonMonkey, so if we have no michael@0: // floating-point support Ion is disabled and there should be no double arrays. michael@0: if (cx->runtime()->jitSupportsFloatingPoint) michael@0: masm.convertInt32ValueToDouble(valueAddr, regs.getAny(), &dontConvertDoubles); michael@0: else michael@0: masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support."); michael@0: masm.bind(&dontConvertDoubles); michael@0: michael@0: // Don't overwrite R0 becuase |obj| might overlap with it, and it's needed michael@0: // for post-write barrier later. michael@0: ValueOperand tmpVal = regs.takeAnyValue(); michael@0: masm.loadValue(valueAddr, tmpVal); michael@0: EmitPreBarrier(masm, element, MIRType_Value); michael@0: masm.storeValue(tmpVal, element); michael@0: regs.add(key); michael@0: #ifdef JSGC_GENERATIONAL michael@0: { michael@0: Register r = regs.takeAny(); michael@0: GeneralRegisterSet saveRegs; michael@0: emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs); michael@0: regs.add(r); michael@0: } michael@0: #endif michael@0: EmitReturnFromIC(masm); michael@0: michael@0: michael@0: // Failure case - fail but first unstow R0 and R1 michael@0: masm.bind(&failureUnstow); michael@0: EmitUnstowICValues(masm, 2); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: GetProtoShapes(JSObject *obj, size_t protoChainDepth, AutoShapeVector *shapes) michael@0: { michael@0: JS_ASSERT(shapes->length() == 1); michael@0: JSObject *curProto = obj->getProto(); michael@0: for (size_t i = 0; i < protoChainDepth; i++) { michael@0: if (!shapes->append(curProto->lastProperty())) michael@0: return false; michael@0: curProto = curProto->getProto(); michael@0: } michael@0: JS_ASSERT(!curProto); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // SetElem_DenseAdd michael@0: // michael@0: michael@0: ICUpdatedStub * michael@0: ICSetElemDenseAddCompiler::getStub(ICStubSpace *space) michael@0: { michael@0: AutoShapeVector shapes(cx); michael@0: if (!shapes.append(obj_->lastProperty())) michael@0: return nullptr; michael@0: michael@0: if (!GetProtoShapes(obj_, protoChainDepth_, &shapes)) michael@0: return nullptr; michael@0: michael@0: JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4); michael@0: michael@0: ICUpdatedStub *stub = nullptr; michael@0: switch (protoChainDepth_) { michael@0: case 0: stub = getStubSpecific<0>(space, &shapes); break; michael@0: case 1: stub = getStubSpecific<1>(space, &shapes); break; michael@0: case 2: stub = getStubSpecific<2>(space, &shapes); break; michael@0: case 3: stub = getStubSpecific<3>(space, &shapes); break; michael@0: case 4: stub = getStubSpecific<4>(space, &shapes); break; michael@0: default: MOZ_ASSUME_UNREACHABLE("ProtoChainDepth too high."); michael@0: } michael@0: if (!stub || !stub->initUpdatingChain(cx, space)) michael@0: return nullptr; michael@0: return stub; michael@0: } michael@0: michael@0: bool michael@0: ICSetElemDenseAddCompiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: // R0 = object michael@0: // R1 = key michael@0: // Stack = { ... rhs-value, ? } michael@0: Label failure; michael@0: Label failureUnstow; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox R0 and guard on its shape. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAddImpl<0>::offsetOfShape(0)), michael@0: scratchReg); michael@0: masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure); michael@0: michael@0: // Stow both R0 and R1 (object and key) michael@0: // But R0 and R1 still hold their values. michael@0: EmitStowICValues(masm, 2); michael@0: michael@0: // We may need to free up some registers. michael@0: regs = availableGeneralRegs(0); michael@0: regs.take(R0); michael@0: michael@0: // Guard that the type object matches. michael@0: Register typeReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAdd::offsetOfType()), typeReg); michael@0: masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg, michael@0: &failureUnstow); michael@0: regs.add(typeReg); michael@0: michael@0: // Shape guard objects on the proto chain. michael@0: scratchReg = regs.takeAny(); michael@0: Register protoReg = regs.takeAny(); michael@0: for (size_t i = 0; i < protoChainDepth_; i++) { michael@0: masm.loadObjProto(i == 0 ? obj : protoReg, protoReg); michael@0: masm.branchTestPtr(Assembler::Zero, protoReg, protoReg, &failureUnstow); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAddImpl<0>::offsetOfShape(i + 1)), michael@0: scratchReg); michael@0: masm.branchTestObjShape(Assembler::NotEqual, protoReg, scratchReg, &failureUnstow); michael@0: } michael@0: regs.add(protoReg); michael@0: regs.add(scratchReg); michael@0: michael@0: // Stack is now: { ..., rhs-value, object-value, key-value, maybe?-RET-ADDR } michael@0: // Load rhs-value in to R0 michael@0: masm.loadValue(Address(BaselineStackReg, 2 * sizeof(Value) + ICStackValueOffset), R0); michael@0: michael@0: // Call the type-update stub. michael@0: if (!callTypeUpdateIC(masm, sizeof(Value))) michael@0: return false; michael@0: michael@0: // Unstow R0 and R1 (object and key) michael@0: EmitUnstowICValues(masm, 2); michael@0: michael@0: // Reset register set. michael@0: regs = availableGeneralRegs(2); michael@0: scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox obj and key. michael@0: obj = masm.extractObject(R0, ExtractTemp0); michael@0: Register key = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: // Load obj->elements in scratchReg. michael@0: masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg); michael@0: michael@0: // Bounds check (key == initLength) michael@0: Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength()); michael@0: masm.branch32(Assembler::NotEqual, initLength, key, &failure); michael@0: michael@0: // Capacity check. michael@0: Address capacity(scratchReg, ObjectElements::offsetOfCapacity()); michael@0: masm.branch32(Assembler::BelowOrEqual, capacity, key, &failure); michael@0: michael@0: // Failure is not possible now. Free up registers. michael@0: regs.add(R0); michael@0: regs.add(R1); michael@0: regs.takeUnchecked(obj); michael@0: regs.takeUnchecked(key); michael@0: michael@0: // Increment initLength before write. michael@0: masm.add32(Imm32(1), initLength); michael@0: michael@0: // If length is now <= key, increment length before write. michael@0: Label skipIncrementLength; michael@0: Address length(scratchReg, ObjectElements::offsetOfLength()); michael@0: masm.branch32(Assembler::Above, length, key, &skipIncrementLength); michael@0: masm.add32(Imm32(1), length); michael@0: masm.bind(&skipIncrementLength); michael@0: michael@0: Address valueAddr(BaselineStackReg, ICStackValueOffset); michael@0: michael@0: // Convert int32 values to double if convertDoubleElements is set. In this michael@0: // case the heap typeset is guaranteed to contain both int32 and double, so michael@0: // it's okay to store a double. michael@0: Label dontConvertDoubles; michael@0: Address elementsFlags(scratchReg, ObjectElements::offsetOfFlags()); michael@0: masm.branchTest32(Assembler::Zero, elementsFlags, michael@0: Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS), michael@0: &dontConvertDoubles); michael@0: // Note that double arrays are only created by IonMonkey, so if we have no michael@0: // floating-point support Ion is disabled and there should be no double arrays. michael@0: if (cx->runtime()->jitSupportsFloatingPoint) michael@0: masm.convertInt32ValueToDouble(valueAddr, regs.getAny(), &dontConvertDoubles); michael@0: else michael@0: masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support."); michael@0: masm.bind(&dontConvertDoubles); michael@0: michael@0: // Write the value. No need for pre-barrier since we're not overwriting an old value. michael@0: ValueOperand tmpVal = regs.takeAnyValue(); michael@0: BaseIndex element(scratchReg, key, TimesEight); michael@0: masm.loadValue(valueAddr, tmpVal); michael@0: masm.storeValue(tmpVal, element); michael@0: regs.add(key); michael@0: #ifdef JSGC_GENERATIONAL michael@0: { michael@0: Register r = regs.takeAny(); michael@0: GeneralRegisterSet saveRegs; michael@0: emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs); michael@0: regs.add(r); michael@0: } michael@0: #endif michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - fail but first unstow R0 and R1 michael@0: masm.bind(&failureUnstow); michael@0: EmitUnstowICValues(masm, 2); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // SetElem_TypedArray michael@0: // michael@0: michael@0: bool michael@0: ICSetElem_TypedArray::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Unbox R0 and shape guard. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetElem_TypedArray::offsetOfShape()), scratchReg); michael@0: masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure); michael@0: michael@0: // Ensure the index is an integer. michael@0: if (cx->runtime()->jitSupportsFloatingPoint) { michael@0: Label isInt32; michael@0: masm.branchTestInt32(Assembler::Equal, R1, &isInt32); michael@0: { michael@0: // If the index is a double, try to convert it to int32. It's okay michael@0: // to convert -0 to 0: the shape check ensures the object is a typed michael@0: // array so the difference is not observable. michael@0: masm.branchTestDouble(Assembler::NotEqual, R1, &failure); michael@0: masm.unboxDouble(R1, FloatReg0); michael@0: masm.convertDoubleToInt32(FloatReg0, scratchReg, &failure, /* negZeroCheck = */false); michael@0: masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R1); michael@0: } michael@0: masm.bind(&isInt32); michael@0: } else { michael@0: masm.branchTestInt32(Assembler::NotEqual, R1, &failure); michael@0: } michael@0: michael@0: // Unbox key. michael@0: Register key = masm.extractInt32(R1, ExtractTemp1); michael@0: michael@0: // Bounds check. michael@0: Label oobWrite; michael@0: masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), scratchReg); michael@0: masm.branch32(Assembler::BelowOrEqual, scratchReg, key, michael@0: expectOutOfBounds_ ? &oobWrite : &failure); michael@0: michael@0: // Load the elements vector. michael@0: masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), scratchReg); michael@0: michael@0: BaseIndex dest(scratchReg, key, ScaleFromElemWidth(TypedArrayObject::slotWidth(type_))); michael@0: Address value(BaselineStackReg, ICStackValueOffset); michael@0: michael@0: // We need a second scratch register. It's okay to clobber the type tag of michael@0: // R0 or R1, as long as it's restored before jumping to the next stub. michael@0: regs = availableGeneralRegs(0); michael@0: regs.takeUnchecked(obj); michael@0: regs.takeUnchecked(key); michael@0: regs.take(scratchReg); michael@0: Register secondScratch = regs.takeAny(); michael@0: michael@0: if (type_ == ScalarTypeDescr::TYPE_FLOAT32 || type_ == ScalarTypeDescr::TYPE_FLOAT64) { michael@0: masm.ensureDouble(value, FloatReg0, &failure); michael@0: if (LIRGenerator::allowFloat32Optimizations() && michael@0: type_ == ScalarTypeDescr::TYPE_FLOAT32) michael@0: { michael@0: masm.convertDoubleToFloat32(FloatReg0, ScratchFloatReg); michael@0: masm.storeToTypedFloatArray(type_, ScratchFloatReg, dest); michael@0: } else { michael@0: masm.storeToTypedFloatArray(type_, FloatReg0, dest); michael@0: } michael@0: EmitReturnFromIC(masm); michael@0: } else if (type_ == ScalarTypeDescr::TYPE_UINT8_CLAMPED) { michael@0: Label notInt32; michael@0: masm.branchTestInt32(Assembler::NotEqual, value, ¬Int32); michael@0: masm.unboxInt32(value, secondScratch); michael@0: masm.clampIntToUint8(secondScratch); michael@0: michael@0: Label clamped; michael@0: masm.bind(&clamped); michael@0: masm.storeToTypedIntArray(type_, secondScratch, dest); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // If the value is a double, clamp to uint8 and jump back. michael@0: // Else, jump to failure. michael@0: masm.bind(¬Int32); michael@0: if (cx->runtime()->jitSupportsFloatingPoint) { michael@0: masm.branchTestDouble(Assembler::NotEqual, value, &failure); michael@0: masm.unboxDouble(value, FloatReg0); michael@0: masm.clampDoubleToUint8(FloatReg0, secondScratch); michael@0: masm.jump(&clamped); michael@0: } else { michael@0: masm.jump(&failure); michael@0: } michael@0: } else { michael@0: Label notInt32; michael@0: masm.branchTestInt32(Assembler::NotEqual, value, ¬Int32); michael@0: masm.unboxInt32(value, secondScratch); michael@0: michael@0: Label isInt32; michael@0: masm.bind(&isInt32); michael@0: masm.storeToTypedIntArray(type_, secondScratch, dest); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // If the value is a double, truncate and jump back. michael@0: // Else, jump to failure. michael@0: Label failureRestoreRegs; michael@0: masm.bind(¬Int32); michael@0: if (cx->runtime()->jitSupportsFloatingPoint) { michael@0: masm.branchTestDouble(Assembler::NotEqual, value, &failure); michael@0: masm.unboxDouble(value, FloatReg0); michael@0: masm.branchTruncateDouble(FloatReg0, secondScratch, &failureRestoreRegs); michael@0: masm.jump(&isInt32); michael@0: } else { michael@0: masm.jump(&failure); michael@0: } michael@0: michael@0: // Writing to secondScratch may have clobbered R0 or R1, restore them michael@0: // first. michael@0: masm.bind(&failureRestoreRegs); michael@0: masm.tagValue(JSVAL_TYPE_OBJECT, obj, R0); michael@0: masm.tagValue(JSVAL_TYPE_INT32, key, R1); michael@0: } michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: michael@0: if (expectOutOfBounds_) { michael@0: masm.bind(&oobWrite); michael@0: EmitReturnFromIC(masm); michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // In_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoInFallback(JSContext *cx, ICIn_Fallback *stub, HandleValue key, HandleValue objValue, michael@0: MutableHandleValue res) michael@0: { michael@0: FallbackICSpew(cx, stub, "In"); michael@0: michael@0: if (!objValue.isObject()) { michael@0: js_ReportValueError(cx, JSMSG_IN_NOT_OBJECT, -1, objValue, NullPtr()); michael@0: return false; michael@0: } michael@0: michael@0: RootedObject obj(cx, &objValue.toObject()); michael@0: michael@0: bool cond = false; michael@0: if (!OperatorIn(cx, key, obj, &cond)) michael@0: return false; michael@0: michael@0: res.setBoolean(cond); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoInFallbackFn)(JSContext *, ICIn_Fallback *, HandleValue, HandleValue, michael@0: MutableHandleValue); michael@0: static const VMFunction DoInFallbackInfo = michael@0: FunctionInfo(DoInFallback, PopValues(2)); michael@0: michael@0: bool michael@0: ICIn_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Sync for the decompiler. michael@0: masm.pushValue(R0); michael@0: masm.pushValue(R1); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R1); michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: michael@0: return tailCallVM(DoInFallbackInfo, masm); michael@0: } michael@0: michael@0: // Attach an optimized stub for a GETGNAME/CALLGNAME op. michael@0: static bool michael@0: TryAttachGlobalNameStub(JSContext *cx, HandleScript script, jsbytecode *pc, michael@0: ICGetName_Fallback *stub, HandleObject global, michael@0: HandlePropertyName name) michael@0: { michael@0: JS_ASSERT(global->is()); michael@0: michael@0: RootedId id(cx, NameToId(name)); michael@0: michael@0: // Instantiate this global property, for use during Ion compilation. michael@0: if (IsIonEnabled(cx)) michael@0: types::EnsureTrackPropertyTypes(cx, global, NameToId(name)); michael@0: michael@0: // The property must be found, and it must be found as a normal data property. michael@0: RootedShape shape(cx, global->nativeLookup(cx, id)); michael@0: if (!shape) michael@0: return true; michael@0: michael@0: if (shape->hasDefaultGetter() && shape->hasSlot()) { michael@0: michael@0: JS_ASSERT(shape->slot() >= global->numFixedSlots()); michael@0: uint32_t slot = shape->slot() - global->numFixedSlots(); michael@0: michael@0: // TODO: if there's a previous stub discard it, or just update its Shape + slot? michael@0: michael@0: ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub(); michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetName(GlobalName) stub"); michael@0: ICGetName_Global::Compiler compiler(cx, monitorStub, global->lastProperty(), slot); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: bool isScripted; michael@0: if (IsCacheableGetPropCall(cx, global, global, shape, &isScripted) && !isScripted) michael@0: { michael@0: ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub(); michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetName(GlobalName/NativeGetter) stub"); michael@0: RootedFunction getter(cx, &shape->getterObject()->as()); michael@0: ICGetProp_CallNative::Compiler compiler(cx, monitorStub, global, michael@0: getter, script->pcToOffset(pc), michael@0: /* inputDefinitelyObject = */ true); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: TryAttachScopeNameStub(JSContext *cx, HandleScript script, ICGetName_Fallback *stub, michael@0: HandleObject initialScopeChain, HandlePropertyName name) michael@0: { michael@0: AutoShapeVector shapes(cx); michael@0: RootedId id(cx, NameToId(name)); michael@0: RootedObject scopeChain(cx, initialScopeChain); michael@0: michael@0: Shape *shape = nullptr; michael@0: while (scopeChain) { michael@0: if (!shapes.append(scopeChain->lastProperty())) michael@0: return false; michael@0: michael@0: if (scopeChain->is()) { michael@0: shape = scopeChain->nativeLookup(cx, id); michael@0: if (shape) michael@0: break; michael@0: return true; michael@0: } michael@0: michael@0: if (!scopeChain->is() || scopeChain->is()) michael@0: return true; michael@0: michael@0: // Check for an 'own' property on the scope. There is no need to michael@0: // check the prototype as non-with scopes do not inherit properties michael@0: // from any prototype. michael@0: shape = scopeChain->nativeLookup(cx, id); michael@0: if (shape) michael@0: break; michael@0: michael@0: scopeChain = scopeChain->enclosingScope(); michael@0: } michael@0: michael@0: if (!IsCacheableGetPropReadSlot(scopeChain, scopeChain, shape)) michael@0: return true; michael@0: michael@0: bool isFixedSlot; michael@0: uint32_t offset; michael@0: GetFixedOrDynamicSlotOffset(scopeChain, shape->slot(), &isFixedSlot, &offset); michael@0: michael@0: ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub(); michael@0: ICStub *newStub; michael@0: michael@0: switch (shapes.length()) { michael@0: case 1: { michael@0: ICGetName_Scope<0>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: break; michael@0: } michael@0: case 2: { michael@0: ICGetName_Scope<1>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: break; michael@0: } michael@0: case 3: { michael@0: ICGetName_Scope<2>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: break; michael@0: } michael@0: case 4: { michael@0: ICGetName_Scope<3>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: break; michael@0: } michael@0: case 5: { michael@0: ICGetName_Scope<4>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: break; michael@0: } michael@0: case 6: { michael@0: ICGetName_Scope<5>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: break; michael@0: } michael@0: case 7: { michael@0: ICGetName_Scope<6>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: break; michael@0: } michael@0: default: michael@0: return true; michael@0: } michael@0: michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoGetNameFallback(JSContext *cx, BaselineFrame *frame, ICGetName_Fallback *stub_, michael@0: HandleObject scopeChain, MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: mozilla::DebugOnly op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "GetName(%s)", js_CodeName[JSOp(*pc)]); michael@0: michael@0: JS_ASSERT(op == JSOP_NAME || op == JSOP_GETGNAME); michael@0: michael@0: RootedPropertyName name(cx, script->getName(pc)); michael@0: michael@0: if (JSOp(pc[JSOP_GETGNAME_LENGTH]) == JSOP_TYPEOF) { michael@0: if (!GetScopeNameForTypeOf(cx, scopeChain, name, res)) michael@0: return false; michael@0: } else { michael@0: if (!GetScopeName(cx, scopeChain, name, res)) michael@0: return false; michael@0: } michael@0: michael@0: types::TypeScript::Monitor(cx, script, pc, res); michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: // Add a type monitor stub for the resulting value. michael@0: if (!stub->addMonitorStubForValue(cx, script, res)) michael@0: return false; michael@0: michael@0: // Attach new stub. michael@0: if (stub->numOptimizedStubs() >= ICGetName_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard all stubs in this IC and replace with generic stub. michael@0: return true; michael@0: } michael@0: michael@0: if (js_CodeSpec[*pc].format & JOF_GNAME) { michael@0: if (!TryAttachGlobalNameStub(cx, script, pc, stub, scopeChain, name)) michael@0: return false; michael@0: } else { michael@0: if (!TryAttachScopeNameStub(cx, script, stub, scopeChain, name)) michael@0: return false; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoGetNameFallbackFn)(JSContext *, BaselineFrame *, ICGetName_Fallback *, michael@0: HandleObject, MutableHandleValue); michael@0: static const VMFunction DoGetNameFallbackInfo = FunctionInfo(DoGetNameFallback); michael@0: michael@0: bool michael@0: ICGetName_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.push(R0.scratchReg()); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoGetNameFallbackInfo, masm); michael@0: } michael@0: michael@0: bool michael@0: ICGetName_Global::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: Register obj = R0.scratchReg(); michael@0: Register scratch = R1.scratchReg(); michael@0: michael@0: // Shape guard. michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetName_Global::offsetOfShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, obj, scratch, &failure); michael@0: michael@0: // Load dynamic slot. michael@0: masm.loadPtr(Address(obj, JSObject::offsetOfSlots()), obj); michael@0: masm.load32(Address(BaselineStubReg, ICGetName_Global::offsetOfSlot()), scratch); michael@0: masm.loadValue(BaseIndex(obj, scratch, TimesEight), R0); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: template michael@0: bool michael@0: ICGetName_Scope::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register obj = R0.scratchReg(); michael@0: Register walker = regs.takeAny(); michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: // Use a local to silence Clang tautological-compare warning if NumHops is 0. michael@0: size_t numHops = NumHops; michael@0: michael@0: for (size_t index = 0; index < NumHops + 1; index++) { michael@0: Register scope = index ? walker : obj; michael@0: michael@0: // Shape guard. michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetName_Scope::offsetOfShape(index)), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, scope, scratch, &failure); michael@0: michael@0: if (index < numHops) michael@0: masm.extractObject(Address(scope, ScopeObject::offsetOfEnclosingScope()), walker); michael@0: } michael@0: michael@0: Register scope = NumHops ? walker : obj; michael@0: michael@0: if (!isFixedSlot_) { michael@0: masm.loadPtr(Address(scope, JSObject::offsetOfSlots()), walker); michael@0: scope = walker; michael@0: } michael@0: michael@0: masm.load32(Address(BaselineStubReg, ICGetName_Scope::offsetOfOffset()), scratch); michael@0: masm.loadValue(BaseIndex(scope, scratch, TimesOne), R0); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // BindName_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoBindNameFallback(JSContext *cx, BaselineFrame *frame, ICBindName_Fallback *stub, michael@0: HandleObject scopeChain, MutableHandleValue res) michael@0: { michael@0: jsbytecode *pc = stub->icEntry()->pc(frame->script()); michael@0: mozilla::DebugOnly op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "BindName(%s)", js_CodeName[JSOp(*pc)]); michael@0: michael@0: JS_ASSERT(op == JSOP_BINDNAME); michael@0: michael@0: RootedPropertyName name(cx, frame->script()->getName(pc)); michael@0: michael@0: RootedObject scope(cx); michael@0: if (!LookupNameWithGlobalDefault(cx, name, scopeChain, &scope)) michael@0: return false; michael@0: michael@0: res.setObject(*scope); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoBindNameFallbackFn)(JSContext *, BaselineFrame *, ICBindName_Fallback *, michael@0: HandleObject, MutableHandleValue); michael@0: static const VMFunction DoBindNameFallbackInfo = michael@0: FunctionInfo(DoBindNameFallback); michael@0: michael@0: bool michael@0: ICBindName_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.push(R0.scratchReg()); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoBindNameFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // GetIntrinsic_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoGetIntrinsicFallback(JSContext *cx, BaselineFrame *frame, ICGetIntrinsic_Fallback *stub_, michael@0: MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: mozilla::DebugOnly op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "GetIntrinsic(%s)", js_CodeName[JSOp(*pc)]); michael@0: michael@0: JS_ASSERT(op == JSOP_GETINTRINSIC); michael@0: michael@0: if (!GetIntrinsicOperation(cx, pc, res)) michael@0: return false; michael@0: michael@0: // An intrinsic operation will always produce the same result, so only michael@0: // needs to be monitored once. Attach a stub to load the resulting constant michael@0: // directly. michael@0: michael@0: types::TypeScript::Monitor(cx, script, pc, res); michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetIntrinsic optimized stub"); michael@0: ICGetIntrinsic_Constant::Compiler compiler(cx, res); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoGetIntrinsicFallbackFn)(JSContext *, BaselineFrame *, ICGetIntrinsic_Fallback *, michael@0: MutableHandleValue); michael@0: static const VMFunction DoGetIntrinsicFallbackInfo = michael@0: FunctionInfo(DoGetIntrinsicFallback); michael@0: michael@0: bool michael@0: ICGetIntrinsic_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoGetIntrinsicFallbackInfo, masm); michael@0: } michael@0: michael@0: bool michael@0: ICGetIntrinsic_Constant::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: masm.loadValue(Address(BaselineStubReg, ICGetIntrinsic_Constant::offsetOfValue()), R0); michael@0: michael@0: EmitReturnFromIC(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // GetProp_Fallback michael@0: // michael@0: michael@0: static bool michael@0: TryAttachLengthStub(JSContext *cx, JSScript *script, ICGetProp_Fallback *stub, HandleValue val, michael@0: HandleValue res, bool *attached) michael@0: { michael@0: JS_ASSERT(!*attached); michael@0: michael@0: if (val.isString()) { michael@0: JS_ASSERT(res.isInt32()); michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(String.length) stub"); michael@0: ICGetProp_StringLength::Compiler compiler(cx); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: *attached = true; michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: if (val.isMagic(JS_OPTIMIZED_ARGUMENTS) && res.isInt32()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(MagicArgs.length) stub"); michael@0: ICGetProp_ArgumentsLength::Compiler compiler(cx, ICGetProp_ArgumentsLength::Magic); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: *attached = true; michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: if (!val.isObject()) michael@0: return true; michael@0: michael@0: RootedObject obj(cx, &val.toObject()); michael@0: michael@0: if (obj->is() && res.isInt32()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(Array.length) stub"); michael@0: ICGetProp_ArrayLength::Compiler compiler(cx); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: *attached = true; michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: if (obj->is() && res.isInt32()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(TypedArray.length) stub"); michael@0: ICGetProp_TypedArrayLength::Compiler compiler(cx); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: *attached = true; michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: if (obj->is() && res.isInt32()) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(ArgsObj.length %s) stub", michael@0: obj->is() ? "Strict" : "Normal"); michael@0: ICGetProp_ArgumentsLength::Which which = ICGetProp_ArgumentsLength::Normal; michael@0: if (obj->is()) michael@0: which = ICGetProp_ArgumentsLength::Strict; michael@0: ICGetProp_ArgumentsLength::Compiler compiler(cx, which); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: *attached = true; michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: UpdateExistingGenerationalDOMProxyStub(ICGetProp_Fallback *stub, michael@0: HandleObject obj) michael@0: { michael@0: Value expandoSlot = obj->getFixedSlot(GetDOMProxyExpandoSlot()); michael@0: JS_ASSERT(!expandoSlot.isObject() && !expandoSlot.isUndefined()); michael@0: ExpandoAndGeneration *expandoAndGeneration = (ExpandoAndGeneration*)expandoSlot.toPrivate(); michael@0: for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) { michael@0: if (iter->isGetProp_CallDOMProxyWithGenerationNative()) { michael@0: ICGetProp_CallDOMProxyWithGenerationNative* updateStub = michael@0: iter->toGetProp_CallDOMProxyWithGenerationNative(); michael@0: if (updateStub->expandoAndGeneration() == expandoAndGeneration) { michael@0: // Update generation michael@0: uint32_t generation = expandoAndGeneration->generation; michael@0: IonSpew(IonSpew_BaselineIC, michael@0: " Updating existing stub with generation, old value: %i, " michael@0: "new value: %i", updateStub->generation(), michael@0: generation); michael@0: updateStub->setGeneration(generation); michael@0: return true; michael@0: } michael@0: } michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: static bool michael@0: TryAttachNativeGetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc, michael@0: ICGetProp_Fallback *stub, HandlePropertyName name, michael@0: HandleValue val, HandleValue res, bool *attached) michael@0: { michael@0: JS_ASSERT(!*attached); michael@0: michael@0: if (!val.isObject()) michael@0: return true; michael@0: michael@0: RootedObject obj(cx, &val.toObject()); michael@0: michael@0: bool isDOMProxy; michael@0: bool domProxyHasGeneration; michael@0: DOMProxyShadowsResult domProxyShadowsResult; michael@0: RootedShape shape(cx); michael@0: RootedObject holder(cx); michael@0: if (!EffectlesslyLookupProperty(cx, obj, name, &holder, &shape, &isDOMProxy, michael@0: &domProxyShadowsResult, &domProxyHasGeneration)) michael@0: { michael@0: return false; michael@0: } michael@0: michael@0: if (!isDOMProxy && !obj->isNative()) michael@0: return true; michael@0: michael@0: bool isCallProp = (JSOp(*pc) == JSOP_CALLPROP); michael@0: michael@0: ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub(); michael@0: if (!isDOMProxy && IsCacheableGetPropReadSlot(obj, holder, shape)) { michael@0: bool isFixedSlot; michael@0: uint32_t offset; michael@0: GetFixedOrDynamicSlotOffset(holder, shape->slot(), &isFixedSlot, &offset); michael@0: michael@0: // Instantiate this property for singleton holders, for use during Ion compilation. michael@0: if (IsIonEnabled(cx)) michael@0: types::EnsureTrackPropertyTypes(cx, holder, NameToId(name)); michael@0: michael@0: ICStub::Kind kind = (obj == holder) ? ICStub::GetProp_Native michael@0: : ICStub::GetProp_NativePrototype; michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(%s %s) stub", michael@0: isDOMProxy ? "DOMProxy" : "Native", michael@0: (obj == holder) ? "direct" : "prototype"); michael@0: ICGetPropNativeCompiler compiler(cx, kind, isCallProp, monitorStub, obj, holder, michael@0: name, isFixedSlot, offset); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: bool isScripted = false; michael@0: bool cacheableCall = IsCacheableGetPropCall(cx, obj, holder, shape, &isScripted, isDOMProxy); michael@0: michael@0: // Try handling scripted getters. michael@0: if (cacheableCall && isScripted && !isDOMProxy) { michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: // It's hard to keep the original object alive through a call, and it's unlikely michael@0: // that a getter will be used to generate functions for calling in CALLPROP locations. michael@0: // Just don't attach stubs in that case. michael@0: if (isCallProp) michael@0: return true; michael@0: #endif michael@0: michael@0: // Don't handle scripted own property getters michael@0: if (obj == holder) michael@0: return true; michael@0: michael@0: RootedFunction callee(cx, &shape->getterObject()->as()); michael@0: JS_ASSERT(obj != holder); michael@0: JS_ASSERT(callee->hasScript()); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(NativeObj/ScriptedGetter %s:%d) stub", michael@0: callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno()); michael@0: michael@0: ICGetProp_CallScripted::Compiler compiler(cx, monitorStub, obj, holder, callee, michael@0: script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: // Try handling JSNative getters. michael@0: if (cacheableCall && !isScripted) { michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: // It's unlikely that a getter function will be used to generate functions for calling michael@0: // in CALLPROP locations. Just don't attach stubs in that case to avoid issues with michael@0: // __noSuchMethod__ handling. michael@0: if (isCallProp) michael@0: return true; michael@0: #endif michael@0: michael@0: RootedFunction callee(cx, &shape->getterObject()->as()); michael@0: JS_ASSERT(callee->isNative()); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(%s%s/NativeGetter %p) stub", michael@0: isDOMProxy ? "DOMProxyObj" : "NativeObj", michael@0: isDOMProxy && domProxyHasGeneration ? "WithGeneration" : "", michael@0: callee->native()); michael@0: michael@0: ICStub *newStub = nullptr; michael@0: if (isDOMProxy) { michael@0: JS_ASSERT(obj != holder); michael@0: ICStub::Kind kind; michael@0: if (domProxyHasGeneration) { michael@0: if (UpdateExistingGenerationalDOMProxyStub(stub, obj)) { michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: kind = ICStub::GetProp_CallDOMProxyWithGenerationNative; michael@0: } else { michael@0: kind = ICStub::GetProp_CallDOMProxyNative; michael@0: } michael@0: Rooted proxy(cx, &obj->as()); michael@0: ICGetPropCallDOMProxyNativeCompiler michael@0: compiler(cx, kind, monitorStub, proxy, holder, callee, script->pcToOffset(pc)); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: } else if (obj == holder) { michael@0: ICGetProp_CallNative::Compiler compiler(cx, monitorStub, obj, callee, michael@0: script->pcToOffset(pc)); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: } else { michael@0: ICGetProp_CallNativePrototype::Compiler compiler(cx, monitorStub, obj, holder, callee, michael@0: script->pcToOffset(pc)); michael@0: newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: } michael@0: if (!newStub) michael@0: return false; michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: // If it's a shadowed listbase proxy property, attach stub to call Proxy::get instead. michael@0: if (isDOMProxy && domProxyShadowsResult == Shadows) { michael@0: JS_ASSERT(obj == holder); michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: if (isCallProp) michael@0: return true; michael@0: #endif michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp(DOMProxyProxy) stub"); michael@0: Rooted proxy(cx, &obj->as()); michael@0: ICGetProp_DOMProxyShadowed::Compiler compiler(cx, monitorStub, proxy, name, michael@0: script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: TryAttachPrimitiveGetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc, michael@0: ICGetProp_Fallback *stub, HandlePropertyName name, HandleValue val, michael@0: HandleValue res, bool *attached) michael@0: { michael@0: JS_ASSERT(!*attached); michael@0: michael@0: JSValueType primitiveType; michael@0: RootedObject proto(cx); michael@0: Rooted global(cx, &script->global()); michael@0: if (val.isString()) { michael@0: primitiveType = JSVAL_TYPE_STRING; michael@0: proto = GlobalObject::getOrCreateStringPrototype(cx, global); michael@0: } else if (val.isNumber()) { michael@0: primitiveType = JSVAL_TYPE_DOUBLE; michael@0: proto = GlobalObject::getOrCreateNumberPrototype(cx, global); michael@0: } else { michael@0: JS_ASSERT(val.isBoolean()); michael@0: primitiveType = JSVAL_TYPE_BOOLEAN; michael@0: proto = GlobalObject::getOrCreateBooleanPrototype(cx, global); michael@0: } michael@0: if (!proto) michael@0: return false; michael@0: michael@0: // Instantiate this property, for use during Ion compilation. michael@0: RootedId id(cx, NameToId(name)); michael@0: if (IsIonEnabled(cx)) michael@0: types::EnsureTrackPropertyTypes(cx, proto, id); michael@0: michael@0: // For now, only look for properties directly set on the prototype. michael@0: RootedShape shape(cx, proto->nativeLookup(cx, id)); michael@0: if (!shape || !shape->hasSlot() || !shape->hasDefaultGetter()) michael@0: return true; michael@0: michael@0: bool isFixedSlot; michael@0: uint32_t offset; michael@0: GetFixedOrDynamicSlotOffset(proto, shape->slot(), &isFixedSlot, &offset); michael@0: michael@0: ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub(); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating GetProp_Primitive stub"); michael@0: ICGetProp_Primitive::Compiler compiler(cx, monitorStub, primitiveType, proto, michael@0: isFixedSlot, offset); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoGetPropFallback(JSContext *cx, BaselineFrame *frame, ICGetProp_Fallback *stub_, michael@0: MutableHandleValue val, MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: jsbytecode *pc = stub->icEntry()->pc(frame->script()); michael@0: JSOp op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "GetProp(%s)", js_CodeName[op]); michael@0: michael@0: JS_ASSERT(op == JSOP_GETPROP || op == JSOP_CALLPROP || op == JSOP_LENGTH || op == JSOP_GETXPROP); michael@0: michael@0: RootedPropertyName name(cx, frame->script()->getName(pc)); michael@0: michael@0: if (op == JSOP_LENGTH && val.isMagic(JS_OPTIMIZED_ARGUMENTS)) { michael@0: // Handle arguments.length access. michael@0: if (IsOptimizedArguments(frame, val.address())) { michael@0: res.setInt32(frame->numActualArgs()); michael@0: michael@0: // Monitor result michael@0: types::TypeScript::Monitor(cx, frame->script(), pc, res); michael@0: if (!stub->addMonitorStubForValue(cx, frame->script(), res)) michael@0: return false; michael@0: michael@0: bool attached = false; michael@0: if (!TryAttachLengthStub(cx, frame->script(), stub, val, res, &attached)) michael@0: return false; michael@0: JS_ASSERT(attached); michael@0: michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: RootedObject obj(cx, ToObjectFromStack(cx, val)); michael@0: if (!obj) michael@0: return false; michael@0: michael@0: RootedId id(cx, NameToId(name)); michael@0: if (!JSObject::getGeneric(cx, obj, obj, id, res)) michael@0: return false; michael@0: michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: // Handle objects with __noSuchMethod__. michael@0: if (op == JSOP_CALLPROP && MOZ_UNLIKELY(res.isUndefined()) && val.isObject()) { michael@0: if (!OnUnknownMethod(cx, obj, IdToValue(id), res)) michael@0: return false; michael@0: } michael@0: #endif michael@0: michael@0: types::TypeScript::Monitor(cx, frame->script(), pc, res); michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: // Add a type monitor stub for the resulting value. michael@0: if (!stub->addMonitorStubForValue(cx, frame->script(), res)) michael@0: return false; michael@0: michael@0: if (stub->numOptimizedStubs() >= ICGetProp_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard all stubs in this IC and replace with generic getprop stub. michael@0: return true; michael@0: } michael@0: michael@0: bool attached = false; michael@0: michael@0: if (op == JSOP_LENGTH) { michael@0: if (!TryAttachLengthStub(cx, frame->script(), stub, val, res, &attached)) michael@0: return false; michael@0: if (attached) michael@0: return true; michael@0: } michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: michael@0: if (!TryAttachNativeGetPropStub(cx, script, pc, stub, name, val, res, &attached)) michael@0: return false; michael@0: if (attached) michael@0: return true; michael@0: michael@0: if (val.isString() || val.isNumber() || val.isBoolean()) { michael@0: if (!TryAttachPrimitiveGetPropStub(cx, script, pc, stub, name, val, res, &attached)) michael@0: return false; michael@0: if (attached) michael@0: return true; michael@0: } michael@0: michael@0: JS_ASSERT(!attached); michael@0: stub->noteUnoptimizableAccess(); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoGetPropFallbackFn)(JSContext *, BaselineFrame *, ICGetProp_Fallback *, michael@0: MutableHandleValue, MutableHandleValue); michael@0: static const VMFunction DoGetPropFallbackInfo = michael@0: FunctionInfo(DoGetPropFallback, PopValues(1)); michael@0: michael@0: bool michael@0: ICGetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Ensure stack is fully synced for the expression decompiler. michael@0: masm.pushValue(R0); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: if (!tailCallVM(DoGetPropFallbackInfo, masm)) michael@0: return false; michael@0: michael@0: // What follows is bailout for inlined scripted getters or for on-stack michael@0: // debug mode recompile. The return address pointed to by the baseline michael@0: // stack points here. michael@0: // michael@0: // Even though the fallback frame doesn't enter a stub frame, the CallScripted michael@0: // frame that we are emulating does. Again, we lie. michael@0: #ifdef DEBUG michael@0: entersStubFrame_ = true; michael@0: #endif michael@0: michael@0: Label leaveStubCommon; michael@0: michael@0: returnFromStubOffset_ = masm.currentOffset(); michael@0: leaveStubFrameHead(masm, false); michael@0: masm.jump(&leaveStubCommon); michael@0: michael@0: returnFromIonOffset_ = masm.currentOffset(); michael@0: leaveStubFrameHead(masm, true); michael@0: michael@0: masm.bind(&leaveStubCommon); michael@0: leaveStubFrameCommonTail(masm); michael@0: michael@0: // When we get here, BaselineStubReg contains the ICGetProp_Fallback stub, michael@0: // which we can't use to enter the TypeMonitor IC, because it's a MonitoredFallbackStub michael@0: // instead of a MonitoredStub. So, we cheat. michael@0: masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()), michael@0: BaselineStubReg); michael@0: EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub()); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle code) michael@0: { michael@0: JitCompartment *comp = cx->compartment()->jitCompartment(); michael@0: michael@0: CodeOffsetLabel fromIon(returnFromIonOffset_); michael@0: fromIon.fixup(&masm); michael@0: comp->initBaselineGetPropReturnFromIonAddr(code->raw() + fromIon.offset()); michael@0: michael@0: CodeOffsetLabel fromVM(returnFromStubOffset_); michael@0: fromVM.fixup(&masm); michael@0: comp->initBaselineGetPropReturnFromStubAddr(code->raw() + fromVM.offset()); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_ArrayLength::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: Register scratch = R1.scratchReg(); michael@0: michael@0: // Unbox R0 and guard it's an array. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: masm.branchTestObjClass(Assembler::NotEqual, obj, scratch, &ArrayObject::class_, &failure); michael@0: michael@0: // Load obj->elements->length. michael@0: masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratch); michael@0: masm.load32(Address(scratch, ObjectElements::offsetOfLength()), scratch); michael@0: michael@0: // Guard length fits in an int32. michael@0: masm.branchTest32(Assembler::Signed, scratch, scratch, &failure); michael@0: michael@0: masm.tagValue(JSVAL_TYPE_INT32, scratch, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_TypedArrayLength::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: Register scratch = R1.scratchReg(); michael@0: michael@0: // Unbox R0. michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: michael@0: // Implement the negated version of JSObject::isTypedArray predicate. michael@0: masm.loadObjClass(obj, scratch); michael@0: masm.branchPtr(Assembler::Below, scratch, ImmPtr(&TypedArrayObject::classes[0]), michael@0: &failure); michael@0: masm.branchPtr(Assembler::AboveOrEqual, scratch, michael@0: ImmPtr(&TypedArrayObject::classes[ScalarTypeDescr::TYPE_MAX]), michael@0: &failure); michael@0: michael@0: // Load length from fixed slot. michael@0: masm.loadValue(Address(obj, TypedArrayObject::lengthOffset()), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_StringLength::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: masm.branchTestString(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Unbox string and load its length. michael@0: Register string = masm.extractString(R0, ExtractTemp0); michael@0: masm.loadStringLength(string, string); michael@0: michael@0: masm.tagValue(JSVAL_TYPE_INT32, string, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_Primitive::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: switch (primitiveType_) { michael@0: case JSVAL_TYPE_STRING: michael@0: masm.branchTestString(Assembler::NotEqual, R0, &failure); michael@0: break; michael@0: case JSVAL_TYPE_DOUBLE: // Also used for int32. michael@0: masm.branchTestNumber(Assembler::NotEqual, R0, &failure); michael@0: break; michael@0: case JSVAL_TYPE_BOOLEAN: michael@0: masm.branchTestBoolean(Assembler::NotEqual, R0, &failure); michael@0: break; michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("unexpected type"); michael@0: } michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register holderReg = regs.takeAny(); michael@0: Register scratchReg = regs.takeAny(); michael@0: michael@0: // Verify the shape of the prototype. michael@0: masm.movePtr(ImmGCPtr(prototype_.get()), holderReg); michael@0: michael@0: Address shapeAddr(BaselineStubReg, ICGetProp_Primitive::offsetOfProtoShape()); michael@0: masm.loadPtr(Address(holderReg, JSObject::offsetOfShape()), scratchReg); michael@0: masm.branchPtr(Assembler::NotEqual, shapeAddr, scratchReg, &failure); michael@0: michael@0: if (!isFixedSlot_) michael@0: masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), holderReg); michael@0: michael@0: masm.load32(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfOffset()), scratchReg); michael@0: masm.loadValue(BaseIndex(holderReg, scratchReg, TimesOne), R0); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetPropNativeCompiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: Register scratch = regs.takeAnyExcluding(BaselineTailCallReg); michael@0: michael@0: // Unbox and shape guard. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure); michael@0: michael@0: Register holderReg; michael@0: if (obj_ == holder_) { michael@0: holderReg = objReg; michael@0: } else { michael@0: // Shape guard holder. michael@0: holderReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_NativePrototype::offsetOfHolder()), michael@0: holderReg); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_NativePrototype::offsetOfHolderShape()), michael@0: scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure); michael@0: } michael@0: michael@0: if (!isFixedSlot_) { michael@0: // Don't overwrite actual holderReg if we need to load a dynamic slots object. michael@0: // May need to preserve object for noSuchMethod check later. michael@0: Register nextHolder = regs.takeAny(); michael@0: masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), nextHolder); michael@0: holderReg = nextHolder; michael@0: } michael@0: michael@0: masm.load32(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfOffset()), scratch); michael@0: BaseIndex result(holderReg, scratch, TimesOne); michael@0: michael@0: #if JS_HAS_NO_SUCH_METHOD michael@0: #ifdef DEBUG michael@0: entersStubFrame_ = true; michael@0: #endif michael@0: if (isCallProp_) { michael@0: // Check for __noSuchMethod__ invocation. michael@0: Label afterNoSuchMethod; michael@0: Label skipNoSuchMethod; michael@0: michael@0: masm.push(objReg); michael@0: masm.loadValue(result, R0); michael@0: masm.branchTestUndefined(Assembler::NotEqual, R0, &skipNoSuchMethod); michael@0: michael@0: masm.pop(objReg); michael@0: michael@0: // Call __noSuchMethod__ checker. Object pointer is in objReg. michael@0: regs = availableGeneralRegs(0); michael@0: regs.takeUnchecked(objReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: ValueOperand val = regs.takeValueOperand(); michael@0: michael@0: // Box and push obj onto baseline frame stack for decompiler. michael@0: EmitRestoreTailCallReg(masm); michael@0: masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val); michael@0: masm.pushValue(val); michael@0: EmitRepushTailCallReg(masm); michael@0: michael@0: enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg)); michael@0: michael@0: masm.movePtr(ImmGCPtr(propName_.get()), val.scratchReg()); michael@0: masm.tagValue(JSVAL_TYPE_STRING, val.scratchReg(), val); michael@0: masm.pushValue(val); michael@0: masm.push(objReg); michael@0: if (!callVM(LookupNoSuchMethodHandlerInfo, masm)) michael@0: return false; michael@0: michael@0: leaveStubFrame(masm); michael@0: michael@0: // Pop pushed obj from baseline stack. michael@0: EmitUnstowICValues(masm, 1, /* discard = */ true); michael@0: michael@0: masm.jump(&afterNoSuchMethod); michael@0: masm.bind(&skipNoSuchMethod); michael@0: michael@0: // Pop pushed objReg. michael@0: masm.addPtr(Imm32(sizeof(void *)), BaselineStackReg); michael@0: masm.bind(&afterNoSuchMethod); michael@0: } else { michael@0: masm.loadValue(result, R0); michael@0: } michael@0: #else michael@0: masm.loadValue(result, R0); michael@0: #endif michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_CallScripted::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: Label failureLeaveStubFrame; michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register scratch = regs.takeAnyExcluding(BaselineTailCallReg); michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Unbox and shape guard. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfReceiverShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure); michael@0: michael@0: Register holderReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfHolder()), holderReg); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfHolderShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure); michael@0: regs.add(holderReg); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, scratch); michael@0: michael@0: // Load callee function and code. To ensure that |code| doesn't end up being michael@0: // ArgumentsRectifierReg, if it's available we assign it to |callee| instead. michael@0: Register callee; michael@0: if (regs.has(ArgumentsRectifierReg)) { michael@0: callee = ArgumentsRectifierReg; michael@0: regs.take(callee); michael@0: } else { michael@0: callee = regs.takeAny(); michael@0: } michael@0: Register code = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfGetter()), callee); michael@0: masm.branchIfFunctionHasNoScript(callee, &failureLeaveStubFrame); michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code); michael@0: masm.loadBaselineOrIonRaw(code, code, SequentialExecution, &failureLeaveStubFrame); michael@0: michael@0: // Getter is called with 0 arguments, just |obj| as thisv. michael@0: // Note that we use Push, not push, so that callIon will align the stack michael@0: // properly on ARM. michael@0: masm.Push(R0); michael@0: EmitCreateStubFrameDescriptor(masm, scratch); michael@0: masm.Push(Imm32(0)); // ActualArgc is 0 michael@0: masm.Push(callee); michael@0: masm.Push(scratch); michael@0: michael@0: // Handle arguments underflow. michael@0: Label noUnderflow; michael@0: masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch); michael@0: masm.branch32(Assembler::Equal, scratch, Imm32(0), &noUnderflow); michael@0: { michael@0: // Call the arguments rectifier. michael@0: JS_ASSERT(ArgumentsRectifierReg != code); michael@0: michael@0: JitCode *argumentsRectifier = michael@0: cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution); michael@0: michael@0: masm.movePtr(ImmGCPtr(argumentsRectifier), code); michael@0: masm.loadPtr(Address(code, JitCode::offsetOfCode()), code); michael@0: masm.mov(ImmWord(0), ArgumentsRectifierReg); michael@0: } michael@0: michael@0: masm.bind(&noUnderflow); michael@0: michael@0: // If needed, update SPS Profiler frame entry. At this point, callee and scratch can michael@0: // be clobbered. michael@0: { michael@0: GeneralRegisterSet availRegs = availableGeneralRegs(0); michael@0: availRegs.take(ArgumentsRectifierReg); michael@0: availRegs.take(code); michael@0: emitProfilingUpdate(masm, availRegs, ICGetProp_CallScripted::offsetOfPCOffset()); michael@0: } michael@0: michael@0: masm.callIon(code); michael@0: michael@0: leaveStubFrame(masm, true); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Leave stub frame and go to next stub. michael@0: masm.bind(&failureLeaveStubFrame); michael@0: leaveStubFrame(masm, false); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_CallNative::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: Register obj = InvalidReg; michael@0: if (inputDefinitelyObject_) { michael@0: obj = R0.scratchReg(); michael@0: } else { michael@0: regs.take(R0); michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: obj = masm.extractObject(R0, ExtractTemp0); michael@0: } michael@0: regs.takeUnchecked(obj); michael@0: michael@0: Register scratch = regs.takeAnyExcluding(BaselineTailCallReg); michael@0: michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfHolderShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, obj, scratch, &failure); michael@0: michael@0: enterStubFrame(masm, scratch); michael@0: michael@0: masm.Push(obj); michael@0: michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfGetter()), scratch); michael@0: masm.Push(scratch); michael@0: michael@0: regs.add(scratch); michael@0: if (!inputDefinitelyObject_) michael@0: regs.add(R0); michael@0: michael@0: // If needed, update SPS Profiler frame entry. michael@0: emitProfilingUpdate(masm, regs, ICGetProp_CallNative::offsetOfPCOffset()); michael@0: michael@0: if (!callVM(DoCallNativeGetterInfo, masm)) michael@0: return false; michael@0: leaveStubFrame(masm); michael@0: michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_CallNativePrototype::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register scratch = regs.takeAnyExcluding(BaselineTailCallReg); michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Unbox and shape guard. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfReceiverShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure); michael@0: michael@0: Register holderReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfHolder()), holderReg); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfHolderShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure); michael@0: regs.add(holderReg); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, scratch); michael@0: michael@0: // Load callee function. michael@0: Register callee = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfGetter()), callee); michael@0: michael@0: // Push args for vm call. michael@0: masm.push(objReg); michael@0: masm.push(callee); michael@0: michael@0: // Don't to preserve R0 anymore. michael@0: regs.add(R0); michael@0: michael@0: // If needed, update SPS Profiler frame entry. michael@0: emitProfilingUpdate(masm, regs, ICGetProp_CallNativePrototype::offsetOfPCOffset()); michael@0: michael@0: if (!callVM(DoCallNativeGetterInfo, masm)) michael@0: return false; michael@0: leaveStubFrame(masm); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetPropCallDOMProxyNativeCompiler::generateStubCode(MacroAssembler &masm, michael@0: Address* expandoAndGenerationAddr, michael@0: Address* generationAddr) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register scratch = regs.takeAnyExcluding(BaselineTailCallReg); michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Unbox. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: michael@0: // Shape guard. michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure); michael@0: michael@0: // Guard for ListObject. michael@0: { michael@0: GeneralRegisterSet domProxyRegSet(GeneralRegisterSet::All()); michael@0: domProxyRegSet.take(BaselineStubReg); michael@0: domProxyRegSet.take(objReg); michael@0: domProxyRegSet.take(scratch); michael@0: Address expandoShapeAddr(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfExpandoShape()); michael@0: GenerateDOMProxyChecks( michael@0: cx, masm, objReg, michael@0: Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfProxyHandler()), michael@0: &expandoShapeAddr, expandoAndGenerationAddr, generationAddr, michael@0: scratch, michael@0: domProxyRegSet, michael@0: &failure); michael@0: } michael@0: michael@0: Register holderReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfHolder()), michael@0: holderReg); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfHolderShape()), michael@0: scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure); michael@0: regs.add(holderReg); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, scratch); michael@0: michael@0: // Load callee function. michael@0: Register callee = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfGetter()), callee); michael@0: michael@0: // Push args for vm call. michael@0: masm.push(objReg); michael@0: masm.push(callee); michael@0: michael@0: // Don't have to preserve R0 anymore. michael@0: regs.add(R0); michael@0: michael@0: // If needed, update SPS Profiler frame entry. michael@0: emitProfilingUpdate(masm, regs, ICGetProp_CallDOMProxyNative::offsetOfPCOffset()); michael@0: michael@0: if (!callVM(DoCallNativeGetterInfo, masm)) michael@0: return false; michael@0: leaveStubFrame(masm); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetPropCallDOMProxyNativeCompiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: if (kind == ICStub::GetProp_CallDOMProxyNative) michael@0: return generateStubCode(masm, nullptr, nullptr); michael@0: michael@0: Address internalStructAddress(BaselineStubReg, michael@0: ICGetProp_CallDOMProxyWithGenerationNative::offsetOfInternalStruct()); michael@0: Address generationAddress(BaselineStubReg, michael@0: ICGetProp_CallDOMProxyWithGenerationNative::offsetOfGeneration()); michael@0: return generateStubCode(masm, &internalStructAddress, &generationAddress); michael@0: } michael@0: michael@0: ICStub * michael@0: ICGetPropCallDOMProxyNativeCompiler::getStub(ICStubSpace *space) michael@0: { michael@0: RootedShape shape(cx, proxy_->lastProperty()); michael@0: RootedShape holderShape(cx, holder_->lastProperty()); michael@0: michael@0: Value expandoSlot = proxy_->getFixedSlot(GetDOMProxyExpandoSlot()); michael@0: RootedShape expandoShape(cx, nullptr); michael@0: ExpandoAndGeneration *expandoAndGeneration; michael@0: int32_t generation; michael@0: Value expandoVal; michael@0: if (kind == ICStub::GetProp_CallDOMProxyNative) { michael@0: expandoVal = expandoSlot; michael@0: } else { michael@0: JS_ASSERT(kind == ICStub::GetProp_CallDOMProxyWithGenerationNative); michael@0: JS_ASSERT(!expandoSlot.isObject() && !expandoSlot.isUndefined()); michael@0: expandoAndGeneration = (ExpandoAndGeneration*)expandoSlot.toPrivate(); michael@0: expandoVal = expandoAndGeneration->expando; michael@0: generation = expandoAndGeneration->generation; michael@0: } michael@0: michael@0: if (expandoVal.isObject()) michael@0: expandoShape = expandoVal.toObject().lastProperty(); michael@0: michael@0: if (kind == ICStub::GetProp_CallDOMProxyNative) { michael@0: return ICGetProp_CallDOMProxyNative::New( michael@0: space, getStubCode(), firstMonitorStub_, shape, proxy_->handler(), michael@0: expandoShape, holder_, holderShape, getter_, pcOffset_); michael@0: } michael@0: michael@0: return ICGetProp_CallDOMProxyWithGenerationNative::New( michael@0: space, getStubCode(), firstMonitorStub_, shape, proxy_->handler(), michael@0: expandoAndGeneration, generation, expandoShape, holder_, holderShape, getter_, michael@0: pcOffset_); michael@0: } michael@0: michael@0: ICStub * michael@0: ICGetProp_DOMProxyShadowed::Compiler::getStub(ICStubSpace *space) michael@0: { michael@0: RootedShape shape(cx, proxy_->lastProperty()); michael@0: return ICGetProp_DOMProxyShadowed::New(space, getStubCode(), firstMonitorStub_, shape, michael@0: proxy_->handler(), name_, pcOffset_); michael@0: } michael@0: michael@0: static bool michael@0: ProxyGet(JSContext *cx, HandleObject proxy, HandlePropertyName name, MutableHandleValue vp) michael@0: { michael@0: RootedId id(cx, NameToId(name)); michael@0: return Proxy::get(cx, proxy, proxy, id, vp); michael@0: } michael@0: michael@0: typedef bool (*ProxyGetFn)(JSContext *cx, HandleObject proxy, HandlePropertyName name, michael@0: MutableHandleValue vp); michael@0: static const VMFunction ProxyGetInfo = FunctionInfo(ProxyGet); michael@0: michael@0: bool michael@0: ICGetProp_DOMProxyShadowed::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: // Need to reserve a scratch register, but the scratch register should not be michael@0: // BaselineTailCallReg, because it's used for |enterStubFrame| which needs a michael@0: // non-BaselineTailCallReg scratch reg. michael@0: Register scratch = regs.takeAnyExcluding(BaselineTailCallReg); michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Unbox. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: michael@0: // Shape guard. michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure); michael@0: michael@0: // Guard for ListObject. michael@0: { michael@0: GeneralRegisterSet domProxyRegSet(GeneralRegisterSet::All()); michael@0: domProxyRegSet.take(BaselineStubReg); michael@0: domProxyRegSet.take(objReg); michael@0: domProxyRegSet.take(scratch); michael@0: GenerateDOMProxyChecks( michael@0: cx, masm, objReg, michael@0: Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfProxyHandler()), michael@0: /*expandoShapeAddr=*/nullptr, michael@0: /*expandoAndGenerationAddr=*/nullptr, michael@0: /*generationAddr=*/nullptr, michael@0: scratch, michael@0: domProxyRegSet, michael@0: &failure); michael@0: } michael@0: michael@0: // Call ProxyGet(JSContext *cx, HandleObject proxy, HandlePropertyName name, MutableHandleValue vp); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, scratch); michael@0: michael@0: // Push property name and proxy object. michael@0: masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfName()), scratch); michael@0: masm.push(scratch); michael@0: masm.push(objReg); michael@0: michael@0: // Don't have to preserve R0 anymore. michael@0: regs.add(R0); michael@0: michael@0: // If needed, update SPS Profiler frame entry. michael@0: emitProfilingUpdate(masm, regs, ICGetProp_DOMProxyShadowed::offsetOfPCOffset()); michael@0: michael@0: if (!callVM(ProxyGetInfo, masm)) michael@0: return false; michael@0: leaveStubFrame(masm); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICGetProp_ArgumentsLength::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: if (which_ == ICGetProp_ArgumentsLength::Magic) { michael@0: // Ensure that this is lazy arguments. michael@0: masm.branchTestMagicValue(Assembler::NotEqual, R0, JS_OPTIMIZED_ARGUMENTS, &failure); michael@0: michael@0: // Ensure that frame has not loaded different arguments object since. michael@0: masm.branchTest32(Assembler::NonZero, michael@0: Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()), michael@0: Imm32(BaselineFrame::HAS_ARGS_OBJ), michael@0: &failure); michael@0: michael@0: Address actualArgs(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs()); michael@0: masm.loadPtr(actualArgs, R0.scratchReg()); michael@0: masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: JS_ASSERT(which_ == ICGetProp_ArgumentsLength::Strict || michael@0: which_ == ICGetProp_ArgumentsLength::Normal); michael@0: michael@0: bool isStrict = which_ == ICGetProp_ArgumentsLength::Strict; michael@0: const Class *clasp = isStrict ? &StrictArgumentsObject::class_ : &NormalArgumentsObject::class_; michael@0: michael@0: Register scratchReg = R1.scratchReg(); michael@0: michael@0: // Guard on input being an arguments object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.branchTestObjClass(Assembler::NotEqual, objReg, scratchReg, clasp, &failure); michael@0: michael@0: // Get initial length value. michael@0: masm.unboxInt32(Address(objReg, ArgumentsObject::getInitialLengthSlotOffset()), scratchReg); michael@0: michael@0: // Test if length has been overridden. michael@0: masm.branchTest32(Assembler::NonZero, michael@0: scratchReg, michael@0: Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT), michael@0: &failure); michael@0: michael@0: // Nope, shift out arguments length and return it. michael@0: // No need to type monitor because this stub always returns Int32. michael@0: masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratchReg); michael@0: masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: void michael@0: BaselineScript::noteAccessedGetter(uint32_t pcOffset) michael@0: { michael@0: ICEntry &entry = icEntryFromPCOffset(pcOffset); michael@0: ICFallbackStub *stub = entry.fallbackStub(); michael@0: michael@0: if (stub->isGetProp_Fallback()) michael@0: stub->toGetProp_Fallback()->noteAccessedGetter(); michael@0: } michael@0: michael@0: // michael@0: // SetProp_Fallback michael@0: // michael@0: michael@0: // Attach an optimized stub for a SETPROP/SETGNAME/SETNAME op. michael@0: static bool michael@0: TryAttachSetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc, ICSetProp_Fallback *stub, michael@0: HandleObject obj, HandleShape oldShape, uint32_t oldSlots, michael@0: HandlePropertyName name, HandleId id, HandleValue rhs, bool *attached) michael@0: { michael@0: JS_ASSERT(!*attached); michael@0: michael@0: if (!obj->isNative() || obj->watched()) michael@0: return true; michael@0: michael@0: RootedShape shape(cx); michael@0: RootedObject holder(cx); michael@0: if (!EffectlesslyLookupProperty(cx, obj, name, &holder, &shape)) michael@0: return false; michael@0: michael@0: size_t chainDepth; michael@0: if (IsCacheableSetPropAddSlot(cx, obj, oldShape, oldSlots, id, holder, shape, &chainDepth)) { michael@0: // Don't attach if proto chain depth is too high. michael@0: if (chainDepth > ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH) michael@0: return true; michael@0: michael@0: bool isFixedSlot; michael@0: uint32_t offset; michael@0: GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObject.ADD) stub"); michael@0: ICSetPropNativeAddCompiler compiler(cx, obj, oldShape, chainDepth, isFixedSlot, offset); michael@0: ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs)) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: if (IsCacheableSetPropWriteSlot(obj, oldShape, holder, shape)) { michael@0: bool isFixedSlot; michael@0: uint32_t offset; michael@0: GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObject.PROP) stub"); michael@0: ICSetProp_Native::Compiler compiler(cx, obj, isFixedSlot, offset); michael@0: ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs)) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: bool isScripted = false; michael@0: bool cacheableCall = IsCacheableSetPropCall(cx, obj, holder, shape, &isScripted); michael@0: michael@0: // Try handling scripted setters. michael@0: if (cacheableCall && isScripted) { michael@0: RootedFunction callee(cx, &shape->setterObject()->as()); michael@0: JS_ASSERT(obj != holder); michael@0: JS_ASSERT(callee->hasScript()); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObj/ScriptedSetter %s:%d) stub", michael@0: callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno()); michael@0: michael@0: ICSetProp_CallScripted::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: // Try handling JSNative setters. michael@0: if (cacheableCall && !isScripted) { michael@0: RootedFunction callee(cx, &shape->setterObject()->as()); michael@0: JS_ASSERT(obj != holder); michael@0: JS_ASSERT(callee->isNative()); michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObj/NativeSetter %p) stub", michael@0: callee->native()); michael@0: michael@0: ICSetProp_CallNative::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: *attached = true; michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoSetPropFallback(JSContext *cx, BaselineFrame *frame, ICSetProp_Fallback *stub_, michael@0: HandleValue lhs, HandleValue rhs, MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: JSOp op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "SetProp(%s)", js_CodeName[op]); michael@0: michael@0: JS_ASSERT(op == JSOP_SETPROP || michael@0: op == JSOP_SETNAME || michael@0: op == JSOP_SETGNAME || michael@0: op == JSOP_INITPROP || michael@0: op == JSOP_SETALIASEDVAR); michael@0: michael@0: RootedPropertyName name(cx); michael@0: if (op == JSOP_SETALIASEDVAR) michael@0: name = ScopeCoordinateName(cx->runtime()->scopeCoordinateNameCache, script, pc); michael@0: else michael@0: name = script->getName(pc); michael@0: RootedId id(cx, NameToId(name)); michael@0: michael@0: RootedObject obj(cx, ToObjectFromStack(cx, lhs)); michael@0: if (!obj) michael@0: return false; michael@0: RootedShape oldShape(cx, obj->lastProperty()); michael@0: uint32_t oldSlots = obj->numDynamicSlots(); michael@0: michael@0: if (op == JSOP_INITPROP) { michael@0: MOZ_ASSERT(name != cx->names().proto, "should have used JSOP_MUTATEPROTO"); michael@0: MOZ_ASSERT(obj->is()); michael@0: if (!DefineNativeProperty(cx, obj, id, rhs, nullptr, nullptr, JSPROP_ENUMERATE)) michael@0: return false; michael@0: } else if (op == JSOP_SETNAME || op == JSOP_SETGNAME) { michael@0: if (!SetNameOperation(cx, script, pc, obj, rhs)) michael@0: return false; michael@0: } else if (op == JSOP_SETALIASEDVAR) { michael@0: obj->as().setAliasedVar(cx, pc, name, rhs); michael@0: } else { michael@0: MOZ_ASSERT(op == JSOP_SETPROP); michael@0: if (script->strict()) { michael@0: if (!js::SetProperty(cx, obj, id, rhs)) michael@0: return false; michael@0: } else { michael@0: if (!js::SetProperty(cx, obj, id, rhs)) michael@0: return false; michael@0: } michael@0: } michael@0: michael@0: // Leave the RHS on the stack. michael@0: res.set(rhs); michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: if (stub->numOptimizedStubs() >= ICSetProp_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard all stubs in this IC and replace with generic setprop stub. michael@0: return true; michael@0: } michael@0: michael@0: bool attached = false; michael@0: if (!TryAttachSetPropStub(cx, script, pc, stub, obj, oldShape, oldSlots, name, id, rhs, michael@0: &attached)) michael@0: { michael@0: return false; michael@0: } michael@0: if (attached) michael@0: return true; michael@0: michael@0: JS_ASSERT(!attached); michael@0: stub->noteUnoptimizableAccess(); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoSetPropFallbackFn)(JSContext *, BaselineFrame *, ICSetProp_Fallback *, michael@0: HandleValue, HandleValue, MutableHandleValue); michael@0: static const VMFunction DoSetPropFallbackInfo = michael@0: FunctionInfo(DoSetPropFallback, PopValues(2)); michael@0: michael@0: bool michael@0: ICSetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Ensure stack is fully synced for the expression decompiler. michael@0: masm.pushValue(R0); michael@0: masm.pushValue(R1); michael@0: michael@0: // Push arguments. michael@0: masm.pushValue(R1); michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: if (!tailCallVM(DoSetPropFallbackInfo, masm)) michael@0: return false; michael@0: michael@0: // What follows is bailout debug mode recompile code for inlined scripted michael@0: // getters The return address pointed to by the baseline stack points michael@0: // here. michael@0: // michael@0: // Even though the fallback frame doesn't enter a stub frame, the CallScripted michael@0: // frame that we are emulating does. Again, we lie. michael@0: #ifdef DEBUG michael@0: entersStubFrame_ = true; michael@0: #endif michael@0: michael@0: Label leaveStubCommon; michael@0: michael@0: returnFromStubOffset_ = masm.currentOffset(); michael@0: leaveStubFrameHead(masm, false); michael@0: masm.jump(&leaveStubCommon); michael@0: michael@0: returnFromIonOffset_ = masm.currentOffset(); michael@0: leaveStubFrameHead(masm, true); michael@0: michael@0: masm.bind(&leaveStubCommon); michael@0: leaveStubFrameCommonTail(masm); michael@0: michael@0: // Retrieve the stashed initial argument from the caller's frame before returning michael@0: EmitUnstowICValues(masm, 1); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle code) michael@0: { michael@0: JitCompartment *comp = cx->compartment()->jitCompartment(); michael@0: michael@0: CodeOffsetLabel fromIon(returnFromIonOffset_); michael@0: fromIon.fixup(&masm); michael@0: comp->initBaselineSetPropReturnFromIonAddr(code->raw() + fromIon.offset()); michael@0: michael@0: CodeOffsetLabel fromVM(returnFromStubOffset_); michael@0: fromVM.fixup(&masm); michael@0: comp->initBaselineSetPropReturnFromStubAddr(code->raw() + fromVM.offset()); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICSetProp_Native::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: // Unbox and shape guard. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure); michael@0: michael@0: // Guard that the type object matches. michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfType()), scratch); michael@0: masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch, michael@0: &failure); michael@0: michael@0: // Stow both R0 and R1 (object and value). michael@0: EmitStowICValues(masm, 2); michael@0: michael@0: // Type update stub expects the value to check in R0. michael@0: masm.moveValue(R1, R0); michael@0: michael@0: // Call the type-update stub. michael@0: if (!callTypeUpdateIC(masm, sizeof(Value))) michael@0: return false; michael@0: michael@0: // Unstow R0 and R1 (object and key) michael@0: EmitUnstowICValues(masm, 2); michael@0: michael@0: regs.add(R0); michael@0: regs.takeUnchecked(objReg); michael@0: michael@0: Register holderReg; michael@0: if (isFixedSlot_) { michael@0: holderReg = objReg; michael@0: } else { michael@0: holderReg = regs.takeAny(); michael@0: masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), holderReg); michael@0: } michael@0: michael@0: // Perform the store. michael@0: masm.load32(Address(BaselineStubReg, ICSetProp_Native::offsetOfOffset()), scratch); michael@0: EmitPreBarrier(masm, BaseIndex(holderReg, scratch, TimesOne), MIRType_Value); michael@0: masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne)); michael@0: if (holderReg != objReg) michael@0: regs.add(holderReg); michael@0: #ifdef JSGC_GENERATIONAL michael@0: { michael@0: Register scr = regs.takeAny(); michael@0: GeneralRegisterSet saveRegs; michael@0: saveRegs.add(R1); michael@0: emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs); michael@0: regs.add(scr); michael@0: } michael@0: #endif michael@0: michael@0: // The RHS has to be in R0. michael@0: masm.moveValue(R1, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: ICUpdatedStub * michael@0: ICSetPropNativeAddCompiler::getStub(ICStubSpace *space) michael@0: { michael@0: AutoShapeVector shapes(cx); michael@0: if (!shapes.append(oldShape_)) michael@0: return nullptr; michael@0: michael@0: if (!GetProtoShapes(obj_, protoChainDepth_, &shapes)) michael@0: return nullptr; michael@0: michael@0: JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4); michael@0: michael@0: ICUpdatedStub *stub = nullptr; michael@0: switch(protoChainDepth_) { michael@0: case 0: stub = getStubSpecific<0>(space, &shapes); break; michael@0: case 1: stub = getStubSpecific<1>(space, &shapes); break; michael@0: case 2: stub = getStubSpecific<2>(space, &shapes); break; michael@0: case 3: stub = getStubSpecific<3>(space, &shapes); break; michael@0: case 4: stub = getStubSpecific<4>(space, &shapes); break; michael@0: default: MOZ_ASSUME_UNREACHABLE("ProtoChainDepth too high."); michael@0: } michael@0: if (!stub || !stub->initUpdatingChain(cx, space)) michael@0: return nullptr; michael@0: return stub; michael@0: } michael@0: michael@0: bool michael@0: ICSetPropNativeAddCompiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: Label failureUnstow; michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(2)); michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: // Unbox and guard against old shape. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(0)), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure); michael@0: michael@0: // Guard that the type object matches. michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfType()), scratch); michael@0: masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch, michael@0: &failure); michael@0: michael@0: // Stow both R0 and R1 (object and value). michael@0: EmitStowICValues(masm, 2); michael@0: michael@0: regs = availableGeneralRegs(1); michael@0: scratch = regs.takeAny(); michael@0: Register protoReg = regs.takeAny(); michael@0: // Check the proto chain. michael@0: for (size_t i = 0; i < protoChainDepth_; i++) { michael@0: masm.loadObjProto(i == 0 ? objReg : protoReg, protoReg); michael@0: masm.branchTestPtr(Assembler::Zero, protoReg, protoReg, &failureUnstow); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(i + 1)), michael@0: scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, protoReg, scratch, &failureUnstow); michael@0: } michael@0: michael@0: // Shape and type checks succeeded, ok to proceed. michael@0: michael@0: // Load RHS into R0 for TypeUpdate check. michael@0: // Stack is currently: [..., ObjValue, RHSValue, MaybeReturnAddr? ] michael@0: masm.loadValue(Address(BaselineStackReg, ICStackValueOffset), R0); michael@0: michael@0: // Call the type-update stub. michael@0: if (!callTypeUpdateIC(masm, sizeof(Value))) michael@0: return false; michael@0: michael@0: // Unstow R0 and R1 (object and key) michael@0: EmitUnstowICValues(masm, 2); michael@0: regs = availableGeneralRegs(2); michael@0: scratch = regs.takeAny(); michael@0: michael@0: // Changing object shape. Write the object's new shape. michael@0: Address shapeAddr(objReg, JSObject::offsetOfShape()); michael@0: EmitPreBarrier(masm, shapeAddr, MIRType_Shape); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewShape()), scratch); michael@0: masm.storePtr(scratch, shapeAddr); michael@0: michael@0: Register holderReg; michael@0: regs.add(R0); michael@0: regs.takeUnchecked(objReg); michael@0: if (isFixedSlot_) { michael@0: holderReg = objReg; michael@0: } else { michael@0: holderReg = regs.takeAny(); michael@0: masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), holderReg); michael@0: } michael@0: michael@0: // Perform the store. No write barrier required since this is a new michael@0: // initialization. michael@0: masm.load32(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfOffset()), scratch); michael@0: masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne)); michael@0: michael@0: if (holderReg != objReg) michael@0: regs.add(holderReg); michael@0: michael@0: #ifdef JSGC_GENERATIONAL michael@0: { michael@0: Register scr = regs.takeAny(); michael@0: GeneralRegisterSet saveRegs; michael@0: saveRegs.add(R1); michael@0: emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs); michael@0: } michael@0: #endif michael@0: michael@0: // The RHS has to be in R0. michael@0: masm.moveValue(R1, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failureUnstow); michael@0: EmitUnstowICValues(masm, 2); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICSetProp_CallScripted::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: Label failureUnstow; michael@0: Label failureLeaveStubFrame; michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Stow R0 and R1 to free up registers. michael@0: EmitStowICValues(masm, 2); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register scratch = regs.takeAnyExcluding(BaselineTailCallReg); michael@0: michael@0: // Unbox and shape guard. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failureUnstow); michael@0: michael@0: Register holderReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfHolder()), holderReg); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfHolderShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failureUnstow); michael@0: regs.add(holderReg); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, scratch); michael@0: michael@0: // Load callee function and code. To ensure that |code| doesn't end up being michael@0: // ArgumentsRectifierReg, if it's available we assign it to |callee| instead. michael@0: Register callee; michael@0: if (regs.has(ArgumentsRectifierReg)) { michael@0: callee = ArgumentsRectifierReg; michael@0: regs.take(callee); michael@0: } else { michael@0: callee = regs.takeAny(); michael@0: } michael@0: Register code = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfSetter()), callee); michael@0: masm.branchIfFunctionHasNoScript(callee, &failureLeaveStubFrame); michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code); michael@0: masm.loadBaselineOrIonRaw(code, code, SequentialExecution, &failureLeaveStubFrame); michael@0: michael@0: // Setter is called with the new value as the only argument, and |obj| as thisv. michael@0: // Note that we use Push, not push, so that callIon will align the stack michael@0: // properly on ARM. michael@0: michael@0: // To Push R1, read it off of the stowed values on stack. michael@0: // Stack: [ ..., R0, R1, ..STUBFRAME-HEADER.. ] michael@0: masm.movePtr(BaselineStackReg, scratch); michael@0: masm.PushValue(Address(scratch, STUB_FRAME_SIZE)); michael@0: masm.Push(R0); michael@0: EmitCreateStubFrameDescriptor(masm, scratch); michael@0: masm.Push(Imm32(1)); // ActualArgc is 1 michael@0: masm.Push(callee); michael@0: masm.Push(scratch); michael@0: michael@0: // Handle arguments underflow. michael@0: Label noUnderflow; michael@0: masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch); michael@0: masm.branch32(Assembler::BelowOrEqual, scratch, Imm32(1), &noUnderflow); michael@0: { michael@0: // Call the arguments rectifier. michael@0: JS_ASSERT(ArgumentsRectifierReg != code); michael@0: michael@0: JitCode *argumentsRectifier = michael@0: cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution); michael@0: michael@0: masm.movePtr(ImmGCPtr(argumentsRectifier), code); michael@0: masm.loadPtr(Address(code, JitCode::offsetOfCode()), code); michael@0: masm.mov(ImmWord(1), ArgumentsRectifierReg); michael@0: } michael@0: michael@0: masm.bind(&noUnderflow); michael@0: michael@0: // If needed, update SPS Profiler frame entry. At this point, callee and scratch can michael@0: // be clobbered. michael@0: { michael@0: GeneralRegisterSet availRegs = availableGeneralRegs(0); michael@0: availRegs.take(ArgumentsRectifierReg); michael@0: availRegs.take(code); michael@0: emitProfilingUpdate(masm, availRegs, ICSetProp_CallScripted::offsetOfPCOffset()); michael@0: } michael@0: michael@0: masm.callIon(code); michael@0: michael@0: leaveStubFrame(masm, true); michael@0: // Do not care about return value from function. The original RHS should be returned michael@0: // as the result of this operation. michael@0: EmitUnstowICValues(masm, 2); michael@0: masm.moveValue(R1, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Leave stub frame and go to next stub. michael@0: masm.bind(&failureLeaveStubFrame); michael@0: leaveStubFrame(masm, false); michael@0: michael@0: // Unstow R0 and R1 michael@0: masm.bind(&failureUnstow); michael@0: EmitUnstowICValues(masm, 2); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoCallNativeSetter(JSContext *cx, HandleFunction callee, HandleObject obj, HandleValue val) michael@0: { michael@0: JS_ASSERT(callee->isNative()); michael@0: JSNative natfun = callee->native(); michael@0: michael@0: JS::AutoValueArray<3> vp(cx); michael@0: vp[0].setObject(*callee.get()); michael@0: vp[1].setObject(*obj.get()); michael@0: vp[2].set(val); michael@0: michael@0: return natfun(cx, 1, vp.begin()); michael@0: } michael@0: michael@0: typedef bool (*DoCallNativeSetterFn)(JSContext *, HandleFunction, HandleObject, HandleValue); michael@0: static const VMFunction DoCallNativeSetterInfo = michael@0: FunctionInfo(DoCallNativeSetter); michael@0: michael@0: bool michael@0: ICSetProp_CallNative::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: Label failureUnstow; michael@0: michael@0: // Guard input is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R0, &failure); michael@0: michael@0: // Stow R0 and R1 to free up registers. michael@0: EmitStowICValues(masm, 2); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register scratch = regs.takeAnyExcluding(BaselineTailCallReg); michael@0: michael@0: // Unbox and shape guard. michael@0: Register objReg = masm.extractObject(R0, ExtractTemp0); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failureUnstow); michael@0: michael@0: Register holderReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfHolder()), holderReg); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfHolderShape()), scratch); michael@0: masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failureUnstow); michael@0: regs.add(holderReg); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, scratch); michael@0: michael@0: // Load callee function and code. To ensure that |code| doesn't end up being michael@0: // ArgumentsRectifierReg, if it's available we assign it to |callee| instead. michael@0: Register callee = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfSetter()), callee); michael@0: michael@0: // To Push R1, read it off of the stowed values on stack. michael@0: // Stack: [ ..., R0, R1, ..STUBFRAME-HEADER.. ] michael@0: masm.movePtr(BaselineStackReg, scratch); michael@0: masm.pushValue(Address(scratch, STUB_FRAME_SIZE)); michael@0: masm.push(objReg); michael@0: masm.push(callee); michael@0: michael@0: // Don't need to preserve R0 anymore. michael@0: regs.add(R0); michael@0: michael@0: // If needed, update SPS Profiler frame entry. michael@0: emitProfilingUpdate(masm, regs, ICSetProp_CallNative::offsetOfPCOffset()); michael@0: michael@0: if (!callVM(DoCallNativeSetterInfo, masm)) michael@0: return false; michael@0: leaveStubFrame(masm); michael@0: michael@0: // Do not care about return value from function. The original RHS should be returned michael@0: // as the result of this operation. michael@0: EmitUnstowICValues(masm, 2); michael@0: masm.moveValue(R1, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Unstow R0 and R1 michael@0: masm.bind(&failureUnstow); michael@0: EmitUnstowICValues(masm, 2); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // Call_Fallback michael@0: // michael@0: michael@0: static bool michael@0: TryAttachFunApplyStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc, michael@0: HandleValue thisv, uint32_t argc, Value *argv) michael@0: { michael@0: if (argc != 2) michael@0: return true; michael@0: michael@0: if (!thisv.isObject() || !thisv.toObject().is()) michael@0: return true; michael@0: RootedFunction target(cx, &thisv.toObject().as()); michael@0: michael@0: bool isScripted = target->hasJITCode(); michael@0: michael@0: // right now, only handle situation where second argument is |arguments| michael@0: if (argv[1].isMagic(JS_OPTIMIZED_ARGUMENTS) && !script->needsArgsObj()) { michael@0: if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArguments)) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedApplyArguments stub"); michael@0: michael@0: ICCall_ScriptedApplyArguments::Compiler compiler( michael@0: cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: // TODO: handle FUNAPPLY for native targets. michael@0: } michael@0: michael@0: if (argv[1].isObject() && argv[1].toObject().is()) { michael@0: if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArray)) { michael@0: IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedApplyArray stub"); michael@0: michael@0: ICCall_ScriptedApplyArray::Compiler compiler( michael@0: cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: TryAttachFunCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc, michael@0: HandleValue thisv, bool *attached) michael@0: { michael@0: // Try to attach a stub for Function.prototype.call with scripted |this|. michael@0: michael@0: *attached = false; michael@0: if (!thisv.isObject() || !thisv.toObject().is()) michael@0: return true; michael@0: RootedFunction target(cx, &thisv.toObject().as()); michael@0: michael@0: // Attach a stub if the script can be Baseline-compiled. We do this also michael@0: // if the script is not yet compiled to avoid attaching a CallNative stub michael@0: // that handles everything, even after the callee becomes hot. michael@0: if (target->hasScript() && target->nonLazyScript()->canBaselineCompile() && michael@0: !stub->hasStub(ICStub::Call_ScriptedFunCall)) michael@0: { michael@0: IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedFunCall stub"); michael@0: michael@0: ICCall_ScriptedFunCall::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(), michael@0: script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: *attached = true; michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: GetTemplateObjectForNative(JSContext *cx, HandleScript script, jsbytecode *pc, michael@0: Native native, const CallArgs &args, MutableHandleObject res) michael@0: { michael@0: // Check for natives to which template objects can be attached. This is michael@0: // done to provide templates to Ion for inlining these natives later on. michael@0: michael@0: if (native == js_Array) { michael@0: // Note: the template array won't be used if its length is inaccurately michael@0: // computed here. (We allocate here because compilation may occur on a michael@0: // separate thread where allocation is impossible.) michael@0: size_t count = 0; michael@0: if (args.length() != 1) michael@0: count = args.length(); michael@0: else if (args.length() == 1 && args[0].isInt32() && args[0].toInt32() >= 0) michael@0: count = args[0].toInt32(); michael@0: res.set(NewDenseUnallocatedArray(cx, count, nullptr, TenuredObject)); michael@0: if (!res) michael@0: return false; michael@0: michael@0: types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array); michael@0: if (!type) michael@0: return false; michael@0: res->setType(type); michael@0: return true; michael@0: } michael@0: michael@0: if (native == intrinsic_NewDenseArray) { michael@0: res.set(NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject)); michael@0: if (!res) michael@0: return false; michael@0: michael@0: types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array); michael@0: if (!type) michael@0: return false; michael@0: res->setType(type); michael@0: return true; michael@0: } michael@0: michael@0: if (native == js::array_concat) { michael@0: if (args.thisv().isObject() && args.thisv().toObject().is() && michael@0: !args.thisv().toObject().hasSingletonType()) michael@0: { michael@0: res.set(NewDenseEmptyArray(cx, args.thisv().toObject().getProto(), TenuredObject)); michael@0: if (!res) michael@0: return false; michael@0: res->setType(args.thisv().toObject().type()); michael@0: return true; michael@0: } michael@0: } michael@0: michael@0: if (native == js::str_split && args.length() == 1 && args[0].isString()) { michael@0: res.set(NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject)); michael@0: if (!res) michael@0: return false; michael@0: michael@0: types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array); michael@0: if (!type) michael@0: return false; michael@0: res->setType(type); michael@0: return true; michael@0: } michael@0: michael@0: if (native == js_String) { michael@0: RootedString emptyString(cx, cx->runtime()->emptyString); michael@0: res.set(StringObject::create(cx, emptyString, TenuredObject)); michael@0: if (!res) michael@0: return false; michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: TryAttachCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc, michael@0: JSOp op, uint32_t argc, Value *vp, bool constructing, bool useNewType) michael@0: { michael@0: if (useNewType || op == JSOP_EVAL) michael@0: return true; michael@0: michael@0: if (stub->numOptimizedStubs() >= ICCall_Fallback::MAX_OPTIMIZED_STUBS) { michael@0: // TODO: Discard all stubs in this IC and replace with inert megamorphic stub. michael@0: // But for now we just bail. michael@0: return true; michael@0: } michael@0: michael@0: RootedValue callee(cx, vp[0]); michael@0: RootedValue thisv(cx, vp[1]); michael@0: michael@0: if (!callee.isObject()) michael@0: return true; michael@0: michael@0: RootedObject obj(cx, &callee.toObject()); michael@0: if (!obj->is()) michael@0: return true; michael@0: michael@0: RootedFunction fun(cx, &obj->as()); michael@0: michael@0: if (fun->hasScript()) { michael@0: // Never attach optimized scripted call stubs for JSOP_FUNAPPLY. michael@0: // MagicArguments may escape the frame through them. michael@0: if (op == JSOP_FUNAPPLY) michael@0: return true; michael@0: michael@0: // If callee is not an interpreted constructor, we have to throw. michael@0: if (constructing && !fun->isInterpretedConstructor()) michael@0: return true; michael@0: michael@0: RootedScript calleeScript(cx, fun->nonLazyScript()); michael@0: if (!calleeScript->hasBaselineScript() && !calleeScript->hasIonScript()) michael@0: return true; michael@0: michael@0: if (calleeScript->shouldCloneAtCallsite()) michael@0: return true; michael@0: michael@0: // Check if this stub chain has already generalized scripted calls. michael@0: if (stub->scriptedStubsAreGeneralized()) { michael@0: IonSpew(IonSpew_BaselineIC, " Chain already has generalized scripted call stub!"); michael@0: return true; michael@0: } michael@0: michael@0: if (stub->scriptedStubCount() >= ICCall_Fallback::MAX_SCRIPTED_STUBS) { michael@0: // Create a Call_AnyScripted stub. michael@0: IonSpew(IonSpew_BaselineIC, " Generating Call_AnyScripted stub (cons=%s)", michael@0: constructing ? "yes" : "no"); michael@0: michael@0: ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(), michael@0: constructing, script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: // Before adding new stub, unlink all previous Call_Scripted. michael@0: stub->unlinkStubsWithKind(cx, ICStub::Call_Scripted); michael@0: michael@0: // Add new generalized stub. michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: // Keep track of the function's |prototype| property in type michael@0: // information, for use during Ion compilation. michael@0: if (IsIonEnabled(cx)) michael@0: types::EnsureTrackPropertyTypes(cx, fun, NameToId(cx->names().prototype)); michael@0: michael@0: // Remember the template object associated with any script being called michael@0: // as a constructor, for later use during Ion compilation. michael@0: RootedObject templateObject(cx); michael@0: if (constructing) { michael@0: templateObject = CreateThisForFunction(cx, fun, MaybeSingletonObject); michael@0: if (!templateObject) michael@0: return false; michael@0: } michael@0: michael@0: IonSpew(IonSpew_BaselineIC, michael@0: " Generating Call_Scripted stub (fun=%p, %s:%d, cons=%s)", michael@0: fun.get(), fun->nonLazyScript()->filename(), fun->nonLazyScript()->lineno(), michael@0: constructing ? "yes" : "no"); michael@0: ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(), michael@0: calleeScript, templateObject, michael@0: constructing, script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: if (fun->isNative() && (!constructing || (constructing && fun->isNativeConstructor()))) { michael@0: // Generalized native call stubs are not here yet! michael@0: JS_ASSERT(!stub->nativeStubsAreGeneralized()); michael@0: michael@0: // Check for JSOP_FUNAPPLY michael@0: if (op == JSOP_FUNAPPLY) { michael@0: if (fun->native() == js_fun_apply) michael@0: return TryAttachFunApplyStub(cx, stub, script, pc, thisv, argc, vp + 2); michael@0: michael@0: // Don't try to attach a "regular" optimized call stubs for FUNAPPLY ops, michael@0: // since MagicArguments may escape through them. michael@0: return true; michael@0: } michael@0: michael@0: if (op == JSOP_FUNCALL && fun->native() == js_fun_call) { michael@0: bool attached; michael@0: if (!TryAttachFunCallStub(cx, stub, script, pc, thisv, &attached)) michael@0: return false; michael@0: if (attached) michael@0: return true; michael@0: } michael@0: michael@0: if (stub->nativeStubCount() >= ICCall_Fallback::MAX_NATIVE_STUBS) { michael@0: IonSpew(IonSpew_BaselineIC, michael@0: " Too many Call_Native stubs. TODO: add Call_AnyNative!"); michael@0: return true; michael@0: } michael@0: michael@0: CallArgs args = CallArgsFromVp(argc, vp); michael@0: RootedObject templateObject(cx); michael@0: if (!GetTemplateObjectForNative(cx, script, pc, fun->native(), args, &templateObject)) michael@0: return false; michael@0: michael@0: IonSpew(IonSpew_BaselineIC, " Generating Call_Native stub (fun=%p, cons=%s)", michael@0: fun.get(), constructing ? "yes" : "no"); michael@0: ICCall_Native::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(), michael@0: fun, templateObject, constructing, script->pcToOffset(pc)); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!newStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(newStub); michael@0: return true; michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: MaybeCloneFunctionAtCallsite(JSContext *cx, MutableHandleValue callee, HandleScript script, michael@0: jsbytecode *pc) michael@0: { michael@0: RootedFunction fun(cx); michael@0: if (!IsFunctionObject(callee, fun.address())) michael@0: return true; michael@0: michael@0: if (!fun->hasScript() || !fun->nonLazyScript()->shouldCloneAtCallsite()) michael@0: return true; michael@0: michael@0: fun = CloneFunctionAtCallsite(cx, fun, script, pc); michael@0: if (!fun) michael@0: return false; michael@0: michael@0: callee.setObject(*fun); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoCallFallback(JSContext *cx, BaselineFrame *frame, ICCall_Fallback *stub_, uint32_t argc, michael@0: Value *vp, MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: // Ensure vp array is rooted - we may GC in here. michael@0: AutoArrayRooter vpRoot(cx, argc + 2, vp); michael@0: michael@0: RootedScript script(cx, frame->script()); michael@0: jsbytecode *pc = stub->icEntry()->pc(script); michael@0: JSOp op = JSOp(*pc); michael@0: FallbackICSpew(cx, stub, "Call(%s)", js_CodeName[op]); michael@0: michael@0: JS_ASSERT(argc == GET_ARGC(pc)); michael@0: michael@0: RootedValue callee(cx, vp[0]); michael@0: RootedValue thisv(cx, vp[1]); michael@0: michael@0: Value *args = vp + 2; michael@0: michael@0: // Handle funapply with JSOP_ARGUMENTS michael@0: if (op == JSOP_FUNAPPLY && argc == 2 && args[1].isMagic(JS_OPTIMIZED_ARGUMENTS)) { michael@0: if (!GuardFunApplyArgumentsOptimization(cx, frame, callee, args, argc)) michael@0: return false; michael@0: } michael@0: michael@0: // Compute construcing and useNewType flags. michael@0: bool constructing = (op == JSOP_NEW); michael@0: bool newType = types::UseNewType(cx, script, pc); michael@0: michael@0: // Try attaching a call stub. michael@0: if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, newType)) michael@0: return false; michael@0: michael@0: // Maybe update PC in profiler entry before leaving this script by call. michael@0: if (cx->runtime()->spsProfiler.enabled() && frame->hasPushedSPSFrame()) michael@0: cx->runtime()->spsProfiler.updatePC(script, pc); michael@0: michael@0: if (!MaybeCloneFunctionAtCallsite(cx, &callee, script, pc)) michael@0: return false; michael@0: michael@0: if (op == JSOP_NEW) { michael@0: if (!InvokeConstructor(cx, callee, argc, args, res.address())) michael@0: return false; michael@0: } else if (op == JSOP_EVAL && frame->scopeChain()->global().valueIsEval(callee)) { michael@0: if (!DirectEval(cx, CallArgsFromVp(argc, vp))) michael@0: return false; michael@0: res.set(vp[0]); michael@0: } else { michael@0: JS_ASSERT(op == JSOP_CALL || op == JSOP_FUNCALL || op == JSOP_FUNAPPLY || op == JSOP_EVAL); michael@0: if (!Invoke(cx, thisv, callee, argc, args, res)) michael@0: return false; michael@0: } michael@0: michael@0: types::TypeScript::Monitor(cx, script, pc, res); michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: // Attach a new TypeMonitor stub for this value. michael@0: ICTypeMonitor_Fallback *typeMonFbStub = stub->fallbackMonitorStub(); michael@0: if (!typeMonFbStub->addMonitorStubForValue(cx, script, res)) michael@0: return false; michael@0: // Add a type monitor stub for the resulting value. michael@0: if (!stub->addMonitorStubForValue(cx, script, res)) michael@0: return false; michael@0: michael@0: return true; michael@0: } michael@0: michael@0: void michael@0: ICCallStubCompiler::pushCallArguments(MacroAssembler &masm, GeneralRegisterSet regs, Register argcReg) michael@0: { michael@0: JS_ASSERT(!regs.has(argcReg)); michael@0: michael@0: // Push the callee and |this| too. michael@0: Register count = regs.takeAny(); michael@0: masm.mov(argcReg, count); michael@0: masm.add32(Imm32(2), count); michael@0: michael@0: // argPtr initially points to the last argument. michael@0: Register argPtr = regs.takeAny(); michael@0: masm.mov(BaselineStackReg, argPtr); michael@0: michael@0: // Skip 4 pointers pushed on top of the arguments: the frame descriptor, michael@0: // return address, old frame pointer and stub reg. michael@0: masm.addPtr(Imm32(STUB_FRAME_SIZE), argPtr); michael@0: michael@0: // Push all values, starting at the last one. michael@0: Label loop, done; michael@0: masm.bind(&loop); michael@0: masm.branchTest32(Assembler::Zero, count, count, &done); michael@0: { michael@0: masm.pushValue(Address(argPtr, 0)); michael@0: masm.addPtr(Imm32(sizeof(Value)), argPtr); michael@0: michael@0: masm.sub32(Imm32(1), count); michael@0: masm.jump(&loop); michael@0: } michael@0: masm.bind(&done); michael@0: } michael@0: michael@0: Register michael@0: ICCallStubCompiler::guardFunApply(MacroAssembler &masm, GeneralRegisterSet regs, Register argcReg, michael@0: bool checkNative, FunApplyThing applyThing, Label *failure) michael@0: { michael@0: // Ensure argc == 2 michael@0: masm.branch32(Assembler::NotEqual, argcReg, Imm32(2), failure); michael@0: michael@0: // Stack looks like: michael@0: // [..., CalleeV, ThisV, Arg0V, Arg1V ] michael@0: michael@0: Address secondArgSlot(BaselineStackReg, ICStackValueOffset); michael@0: if (applyThing == FunApply_MagicArgs) { michael@0: // Ensure that the second arg is magic arguments. michael@0: masm.branchTestMagic(Assembler::NotEqual, secondArgSlot, failure); michael@0: michael@0: // Ensure that this frame doesn't have an arguments object. michael@0: masm.branchTest32(Assembler::NonZero, michael@0: Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()), michael@0: Imm32(BaselineFrame::HAS_ARGS_OBJ), michael@0: failure); michael@0: } else { michael@0: JS_ASSERT(applyThing == FunApply_Array); michael@0: michael@0: GeneralRegisterSet regsx = regs; michael@0: michael@0: // Ensure that the second arg is an array. michael@0: ValueOperand secondArgVal = regsx.takeAnyValue(); michael@0: masm.loadValue(secondArgSlot, secondArgVal); michael@0: michael@0: masm.branchTestObject(Assembler::NotEqual, secondArgVal, failure); michael@0: Register secondArgObj = masm.extractObject(secondArgVal, ExtractTemp1); michael@0: michael@0: regsx.add(secondArgVal); michael@0: regsx.takeUnchecked(secondArgObj); michael@0: michael@0: masm.branchTestObjClass(Assembler::NotEqual, secondArgObj, regsx.getAny(), michael@0: &ArrayObject::class_, failure); michael@0: michael@0: // Get the array elements and ensure that initializedLength == length michael@0: masm.loadPtr(Address(secondArgObj, JSObject::offsetOfElements()), secondArgObj); michael@0: michael@0: Register lenReg = regsx.takeAny(); michael@0: masm.load32(Address(secondArgObj, ObjectElements::offsetOfLength()), lenReg); michael@0: michael@0: masm.branch32(Assembler::NotEqual, michael@0: Address(secondArgObj, ObjectElements::offsetOfInitializedLength()), michael@0: lenReg, failure); michael@0: michael@0: // Limit the length to something reasonable (huge number of arguments can michael@0: // blow the stack limit). michael@0: masm.branch32(Assembler::Above, lenReg, michael@0: Imm32(ICCall_ScriptedApplyArray::MAX_ARGS_ARRAY_LENGTH), michael@0: failure); michael@0: michael@0: // Ensure no holes. Loop through values in array and make sure none are magic. michael@0: // Start address is secondArgObj, end address is secondArgObj + (lenReg * sizeof(Value)) michael@0: JS_STATIC_ASSERT(sizeof(Value) == 8); michael@0: masm.lshiftPtr(Imm32(3), lenReg); michael@0: masm.addPtr(secondArgObj, lenReg); michael@0: michael@0: Register start = secondArgObj; michael@0: Register end = lenReg; michael@0: Label loop; michael@0: Label endLoop; michael@0: masm.bind(&loop); michael@0: masm.branchPtr(Assembler::AboveOrEqual, start, end, &endLoop); michael@0: masm.branchTestMagic(Assembler::Equal, Address(start, 0), failure); michael@0: masm.addPtr(Imm32(sizeof(Value)), start); michael@0: masm.jump(&loop); michael@0: masm.bind(&endLoop); michael@0: } michael@0: michael@0: // Stack now confirmed to be like: michael@0: // [..., CalleeV, ThisV, Arg0V, MagicValue(Arguments), ] michael@0: michael@0: // Load the callee, ensure that it's js_fun_apply michael@0: ValueOperand val = regs.takeAnyValue(); michael@0: Address calleeSlot(BaselineStackReg, ICStackValueOffset + (3 * sizeof(Value))); michael@0: masm.loadValue(calleeSlot, val); michael@0: michael@0: masm.branchTestObject(Assembler::NotEqual, val, failure); michael@0: Register callee = masm.extractObject(val, ExtractTemp1); michael@0: michael@0: masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_, michael@0: failure); michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee); michael@0: michael@0: masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(js_fun_apply), failure); michael@0: michael@0: // Load the |thisv|, ensure that it's a scripted function with a valid baseline or ion michael@0: // script, or a native function. michael@0: Address thisSlot(BaselineStackReg, ICStackValueOffset + (2 * sizeof(Value))); michael@0: masm.loadValue(thisSlot, val); michael@0: michael@0: masm.branchTestObject(Assembler::NotEqual, val, failure); michael@0: Register target = masm.extractObject(val, ExtractTemp1); michael@0: regs.add(val); michael@0: regs.takeUnchecked(target); michael@0: michael@0: masm.branchTestObjClass(Assembler::NotEqual, target, regs.getAny(), &JSFunction::class_, michael@0: failure); michael@0: michael@0: if (checkNative) { michael@0: masm.branchIfInterpreted(target, failure); michael@0: } else { michael@0: masm.branchIfFunctionHasNoScript(target, failure); michael@0: Register temp = regs.takeAny(); michael@0: masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), temp); michael@0: masm.loadBaselineOrIonRaw(temp, temp, SequentialExecution, failure); michael@0: regs.add(temp); michael@0: } michael@0: return target; michael@0: } michael@0: michael@0: void michael@0: ICCallStubCompiler::pushCallerArguments(MacroAssembler &masm, GeneralRegisterSet regs) michael@0: { michael@0: // Initialize copyReg to point to start caller arguments vector. michael@0: // Initialize argcReg to poitn to the end of it. michael@0: Register startReg = regs.takeAny(); michael@0: Register endReg = regs.takeAny(); michael@0: masm.loadPtr(Address(BaselineFrameReg, 0), startReg); michael@0: masm.loadPtr(Address(startReg, BaselineFrame::offsetOfNumActualArgs()), endReg); michael@0: masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), startReg); michael@0: JS_STATIC_ASSERT(sizeof(Value) == 8); michael@0: masm.lshiftPtr(Imm32(3), endReg); michael@0: masm.addPtr(startReg, endReg); michael@0: michael@0: // Copying pre-decrements endReg by 8 until startReg is reached michael@0: Label copyDone; michael@0: Label copyStart; michael@0: masm.bind(©Start); michael@0: masm.branchPtr(Assembler::Equal, endReg, startReg, ©Done); michael@0: masm.subPtr(Imm32(sizeof(Value)), endReg); michael@0: masm.pushValue(Address(endReg, 0)); michael@0: masm.jump(©Start); michael@0: masm.bind(©Done); michael@0: } michael@0: michael@0: void michael@0: ICCallStubCompiler::pushArrayArguments(MacroAssembler &masm, Address arrayVal, michael@0: GeneralRegisterSet regs) michael@0: { michael@0: // Load start and end address of values to copy. michael@0: // guardFunApply has already gauranteed that the array is packed and contains michael@0: // no holes. michael@0: Register startReg = regs.takeAny(); michael@0: Register endReg = regs.takeAny(); michael@0: masm.extractObject(arrayVal, startReg); michael@0: masm.loadPtr(Address(startReg, JSObject::offsetOfElements()), startReg); michael@0: masm.load32(Address(startReg, ObjectElements::offsetOfInitializedLength()), endReg); michael@0: JS_STATIC_ASSERT(sizeof(Value) == 8); michael@0: masm.lshiftPtr(Imm32(3), endReg); michael@0: masm.addPtr(startReg, endReg); michael@0: michael@0: // Copying pre-decrements endReg by 8 until startReg is reached michael@0: Label copyDone; michael@0: Label copyStart; michael@0: masm.bind(©Start); michael@0: masm.branchPtr(Assembler::Equal, endReg, startReg, ©Done); michael@0: masm.subPtr(Imm32(sizeof(Value)), endReg); michael@0: masm.pushValue(Address(endReg, 0)); michael@0: masm.jump(©Start); michael@0: masm.bind(©Done); michael@0: } michael@0: michael@0: typedef bool (*DoCallFallbackFn)(JSContext *, BaselineFrame *, ICCall_Fallback *, michael@0: uint32_t, Value *, MutableHandleValue); michael@0: static const VMFunction DoCallFallbackInfo = FunctionInfo(DoCallFallback); michael@0: michael@0: bool michael@0: ICCall_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(R0 == JSReturnOperand); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, R1.scratchReg()); michael@0: michael@0: // Values are on the stack left-to-right. Calling convention wants them michael@0: // right-to-left so duplicate them on the stack in reverse order. michael@0: // |this| and callee are pushed last. michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: regs.take(R0.scratchReg()); // argc. michael@0: michael@0: pushCallArguments(masm, regs, R0.scratchReg()); michael@0: michael@0: masm.push(BaselineStackReg); michael@0: masm.push(R0.scratchReg()); michael@0: masm.push(BaselineStubReg); michael@0: michael@0: // Load previous frame pointer, push BaselineFrame *. michael@0: masm.loadPtr(Address(BaselineFrameReg, 0), R0.scratchReg()); michael@0: masm.pushBaselineFramePtr(R0.scratchReg(), R0.scratchReg()); michael@0: michael@0: if (!callVM(DoCallFallbackInfo, masm)) michael@0: return false; michael@0: michael@0: leaveStubFrame(masm); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // The following asmcode is only used either when an Ion inlined frame michael@0: // bails out into baseline jitcode or we need to do on-stack script michael@0: // replacement for debug mode recompile. michael@0: Label leaveStubCommon; michael@0: returnFromStubOffset_ = masm.currentOffset(); michael@0: michael@0: // Load passed-in ThisV into R1 just in case it's needed. Need to do this before michael@0: // we leave the stub frame since that info will be lost. michael@0: // Current stack: [...., ThisV, ActualArgc, CalleeToken, Descriptor ] michael@0: masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1); michael@0: michael@0: // Emit the coming-from-VM specific part of the stub-leaving code. michael@0: leaveStubFrameHead(masm, /* calledIntoIon = */ false); michael@0: michael@0: // Jump to the common leave stub tail. michael@0: masm.jump(&leaveStubCommon); michael@0: michael@0: // For Ion bailouts, the return address pushed onto the reconstructed michael@0: // baseline stack points here. michael@0: returnFromIonOffset_ = masm.currentOffset(); michael@0: michael@0: masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1); michael@0: michael@0: // Emit the coming-from-Ion specific part of the stub-leaving code. michael@0: leaveStubFrameHead(masm, /* calledIntoIon = */ true); michael@0: michael@0: // Emit the common stub-leaving tail. michael@0: masm.bind(&leaveStubCommon); michael@0: leaveStubFrameCommonTail(masm); michael@0: michael@0: // R1 and R0 are taken. michael@0: regs = availableGeneralRegs(2); michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: // If this is a |constructing| call, if the callee returns a non-object, we replace it with michael@0: // the |this| object passed in. michael@0: JS_ASSERT(JSReturnOperand == R0); michael@0: Label skipThisReplace; michael@0: masm.load16ZeroExtend(Address(BaselineStubReg, ICStub::offsetOfExtra()), scratch); michael@0: masm.branchTest32(Assembler::Zero, scratch, Imm32(ICCall_Fallback::CONSTRUCTING_FLAG), michael@0: &skipThisReplace); michael@0: masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace); michael@0: masm.moveValue(R1, R0); michael@0: #ifdef DEBUG michael@0: masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace); michael@0: masm.assumeUnreachable("Failed to return object in constructing call."); michael@0: #endif michael@0: masm.bind(&skipThisReplace); michael@0: michael@0: // At this point, BaselineStubReg points to the ICCall_Fallback stub, which is NOT michael@0: // a MonitoredStub, but rather a MonitoredFallbackStub. To use EmitEnterTypeMonitorIC, michael@0: // first load the ICTypeMonitor_Fallback stub into BaselineStubReg. Then, use michael@0: // EmitEnterTypeMonitorIC with a custom struct offset. michael@0: masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()), michael@0: BaselineStubReg); michael@0: EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub()); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle code) michael@0: { michael@0: JitCompartment *comp = cx->compartment()->jitCompartment(); michael@0: michael@0: CodeOffsetLabel fromIon(returnFromIonOffset_); michael@0: fromIon.fixup(&masm); michael@0: comp->initBaselineCallReturnFromIonAddr(code->raw() + fromIon.offset()); michael@0: michael@0: CodeOffsetLabel fromVM(returnFromStubOffset_); michael@0: fromVM.fixup(&masm); michael@0: comp->initBaselineCallReturnFromStubAddr(code->raw() + fromVM.offset()); michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*CreateThisFn)(JSContext *cx, HandleObject callee, MutableHandleValue rval); michael@0: static const VMFunction CreateThisInfoBaseline = FunctionInfo(CreateThis); michael@0: michael@0: bool michael@0: ICCallScriptedCompiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: bool canUseTailCallReg = regs.has(BaselineTailCallReg); michael@0: michael@0: Register argcReg = R0.scratchReg(); michael@0: JS_ASSERT(argcReg != ArgumentsRectifierReg); michael@0: michael@0: regs.take(argcReg); michael@0: regs.take(ArgumentsRectifierReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: michael@0: // Load the callee in R1. michael@0: // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ] michael@0: BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value)); michael@0: masm.loadValue(calleeSlot, R1); michael@0: regs.take(R1); michael@0: michael@0: // Ensure callee is an object. michael@0: masm.branchTestObject(Assembler::NotEqual, R1, &failure); michael@0: michael@0: // Ensure callee is a function. michael@0: Register callee = masm.extractObject(R1, ExtractTemp0); michael@0: masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_, michael@0: &failure); michael@0: michael@0: // If calling a specific script, check if the script matches. Otherwise, ensure that michael@0: // callee function is scripted. Leave calleeScript in |callee| reg. michael@0: if (calleeScript_) { michael@0: JS_ASSERT(kind == ICStub::Call_Scripted); michael@0: michael@0: // Callee is a function. Check if script matches. michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee); michael@0: Address expectedScript(BaselineStubReg, ICCall_Scripted::offsetOfCalleeScript()); michael@0: masm.branchPtr(Assembler::NotEqual, expectedScript, callee, &failure); michael@0: } else { michael@0: if (isConstructing_) michael@0: masm.branchIfNotInterpretedConstructor(callee, regs.getAny(), &failure); michael@0: else michael@0: masm.branchIfFunctionHasNoScript(callee, &failure); michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee); michael@0: } michael@0: michael@0: // Load the start of the target JitCode. michael@0: Register code; michael@0: if (!isConstructing_) { michael@0: code = regs.takeAny(); michael@0: masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failure); michael@0: } else { michael@0: Address scriptCode(callee, JSScript::offsetOfBaselineOrIonRaw()); michael@0: masm.branchPtr(Assembler::Equal, scriptCode, ImmPtr(nullptr), &failure); michael@0: } michael@0: michael@0: // We no longer need R1. michael@0: regs.add(R1); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, regs.getAny()); michael@0: if (canUseTailCallReg) michael@0: regs.add(BaselineTailCallReg); michael@0: michael@0: Label failureLeaveStubFrame; michael@0: michael@0: if (isConstructing_) { michael@0: // Save argc before call. michael@0: masm.push(argcReg); michael@0: michael@0: // Stack now looks like: michael@0: // [..., Callee, ThisV, Arg0V, ..., ArgNV, StubFrameHeader, ArgC ] michael@0: BaseIndex calleeSlot2(BaselineStackReg, argcReg, TimesEight, michael@0: sizeof(Value) + STUB_FRAME_SIZE + sizeof(size_t)); michael@0: masm.loadValue(calleeSlot2, R1); michael@0: masm.push(masm.extractObject(R1, ExtractTemp0)); michael@0: if (!callVM(CreateThisInfoBaseline, masm)) michael@0: return false; michael@0: michael@0: // Return of CreateThis must be an object. michael@0: #ifdef DEBUG michael@0: Label createdThisIsObject; michael@0: masm.branchTestObject(Assembler::Equal, JSReturnOperand, &createdThisIsObject); michael@0: masm.assumeUnreachable("The return of CreateThis must be an object."); michael@0: masm.bind(&createdThisIsObject); michael@0: #endif michael@0: michael@0: // Reset the register set from here on in. michael@0: JS_ASSERT(JSReturnOperand == R0); michael@0: regs = availableGeneralRegs(0); michael@0: regs.take(R0); michael@0: regs.take(ArgumentsRectifierReg); michael@0: argcReg = regs.takeAny(); michael@0: michael@0: // Restore saved argc so we can use it to calculate the address to save michael@0: // the resulting this object to. michael@0: masm.pop(argcReg); michael@0: michael@0: // Save "this" value back into pushed arguments on stack. R0 can be clobbered after that. michael@0: // Stack now looks like: michael@0: // [..., Callee, ThisV, Arg0V, ..., ArgNV, StubFrameHeader ] michael@0: BaseIndex thisSlot(BaselineStackReg, argcReg, TimesEight, STUB_FRAME_SIZE); michael@0: masm.storeValue(R0, thisSlot); michael@0: michael@0: // Restore the stub register from the baseline stub frame. michael@0: masm.loadPtr(Address(BaselineStackReg, STUB_FRAME_SAVED_STUB_OFFSET), BaselineStubReg); michael@0: michael@0: // Reload callee script. Note that a GC triggered by CreateThis may michael@0: // have destroyed the callee BaselineScript and IonScript. CreateThis is michael@0: // safely repeatable though, so in this case we just leave the stub frame michael@0: // and jump to the next stub. michael@0: michael@0: // Just need to load the script now. michael@0: BaseIndex calleeSlot3(BaselineStackReg, argcReg, TimesEight, michael@0: sizeof(Value) + STUB_FRAME_SIZE); michael@0: masm.loadValue(calleeSlot3, R0); michael@0: callee = masm.extractObject(R0, ExtractTemp0); michael@0: regs.add(R0); michael@0: regs.takeUnchecked(callee); michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee); michael@0: michael@0: code = regs.takeAny(); michael@0: masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failureLeaveStubFrame); michael@0: michael@0: // Release callee register, but don't add ExtractTemp0 back into the pool michael@0: // ExtractTemp0 is used later, and if it's allocated to some other register at that michael@0: // point, it will get clobbered when used. michael@0: if (callee != ExtractTemp0) michael@0: regs.add(callee); michael@0: michael@0: if (canUseTailCallReg) michael@0: regs.addUnchecked(BaselineTailCallReg); michael@0: } michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: // Values are on the stack left-to-right. Calling convention wants them michael@0: // right-to-left so duplicate them on the stack in reverse order. michael@0: // |this| and callee are pushed last. michael@0: pushCallArguments(masm, regs, argcReg); michael@0: michael@0: // The callee is on top of the stack. Pop and unbox it. michael@0: ValueOperand val = regs.takeAnyValue(); michael@0: masm.popValue(val); michael@0: callee = masm.extractObject(val, ExtractTemp0); michael@0: michael@0: EmitCreateStubFrameDescriptor(masm, scratch); michael@0: michael@0: // Note that we use Push, not push, so that callIon will align the stack michael@0: // properly on ARM. michael@0: masm.Push(argcReg); michael@0: masm.Push(callee); michael@0: masm.Push(scratch); michael@0: michael@0: // Handle arguments underflow. michael@0: Label noUnderflow; michael@0: masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee); michael@0: masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow); michael@0: { michael@0: // Call the arguments rectifier. michael@0: JS_ASSERT(ArgumentsRectifierReg != code); michael@0: JS_ASSERT(ArgumentsRectifierReg != argcReg); michael@0: michael@0: JitCode *argumentsRectifier = michael@0: cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution); michael@0: michael@0: masm.movePtr(ImmGCPtr(argumentsRectifier), code); michael@0: masm.loadPtr(Address(code, JitCode::offsetOfCode()), code); michael@0: masm.mov(argcReg, ArgumentsRectifierReg); michael@0: } michael@0: michael@0: masm.bind(&noUnderflow); michael@0: michael@0: // If needed, update SPS Profiler frame entry before and after call. michael@0: { michael@0: JS_ASSERT(kind == ICStub::Call_Scripted || kind == ICStub::Call_AnyScripted); michael@0: GeneralRegisterSet availRegs = availableGeneralRegs(0); michael@0: availRegs.take(ArgumentsRectifierReg); michael@0: availRegs.take(code); michael@0: emitProfilingUpdate(masm, availRegs, kind == ICStub::Call_Scripted ? michael@0: ICCall_Scripted::offsetOfPCOffset() michael@0: : ICCall_AnyScripted::offsetOfPCOffset()); michael@0: } michael@0: michael@0: masm.callIon(code); michael@0: michael@0: // If this is a constructing call, and the callee returns a non-object, replace it with michael@0: // the |this| object passed in. michael@0: if (isConstructing_) { michael@0: Label skipThisReplace; michael@0: masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace); michael@0: michael@0: Register scratchReg = JSReturnOperand.scratchReg(); michael@0: michael@0: // Current stack: [ ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ] michael@0: // However, we can't use this ThisVal, because it hasn't been traced. We need to use michael@0: // The ThisVal higher up the stack: michael@0: // Current stack: [ ThisVal, ARGVALS..., ...STUB FRAME..., michael@0: // ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ] michael@0: masm.loadPtr(Address(BaselineStackReg, 2*sizeof(size_t)), scratchReg); michael@0: michael@0: // scratchReg now contains actualArgCount. Double it to account for skipping past two michael@0: // pushed copies of argument values. Additionally, we need to add: michael@0: // STUB_FRAME_SIZE + sizeof(ThisVal) + sizeof(size_t) + sizeof(void *) + sizoef(size_t) michael@0: // for: stub frame, this value, actual argc, callee, and descriptor michael@0: masm.lshiftPtr(Imm32(1), scratchReg); michael@0: BaseIndex reloadThisSlot(BaselineStackReg, scratchReg, TimesEight, michael@0: STUB_FRAME_SIZE + sizeof(Value) + 3*sizeof(size_t)); michael@0: masm.loadValue(reloadThisSlot, JSReturnOperand); michael@0: #ifdef DEBUG michael@0: masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace); michael@0: masm.assumeUnreachable("Return of constructing call should be an object."); michael@0: #endif michael@0: masm.bind(&skipThisReplace); michael@0: } michael@0: michael@0: leaveStubFrame(masm, true); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: // Leave stub frame and restore argc for the next stub. michael@0: masm.bind(&failureLeaveStubFrame); michael@0: leaveStubFrame(masm, false); michael@0: if (argcReg != R0.scratchReg()) michael@0: masm.mov(argcReg, R0.scratchReg()); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICCall_Native::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: michael@0: Register argcReg = R0.scratchReg(); michael@0: regs.take(argcReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: michael@0: // Load the callee in R1. michael@0: BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value)); michael@0: masm.loadValue(calleeSlot, R1); michael@0: regs.take(R1); michael@0: michael@0: masm.branchTestObject(Assembler::NotEqual, R1, &failure); michael@0: michael@0: // Ensure callee matches this stub's callee. michael@0: Register callee = masm.extractObject(R1, ExtractTemp0); michael@0: Address expectedCallee(BaselineStubReg, ICCall_Native::offsetOfCallee()); michael@0: masm.branchPtr(Assembler::NotEqual, expectedCallee, callee, &failure); michael@0: michael@0: regs.add(R1); michael@0: regs.takeUnchecked(callee); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: // Note that this leaves the return address in TailCallReg. michael@0: enterStubFrame(masm, regs.getAny()); michael@0: michael@0: // Values are on the stack left-to-right. Calling convention wants them michael@0: // right-to-left so duplicate them on the stack in reverse order. michael@0: // |this| and callee are pushed last. michael@0: pushCallArguments(masm, regs, argcReg); michael@0: michael@0: if (isConstructing_) { michael@0: // Stack looks like: [ ..., Arg0Val, ThisVal, CalleeVal ] michael@0: // Replace ThisVal with MagicValue(JS_IS_CONSTRUCTING) michael@0: masm.storeValue(MagicValue(JS_IS_CONSTRUCTING), Address(BaselineStackReg, sizeof(Value))); michael@0: } michael@0: michael@0: masm.checkStackAlignment(); michael@0: michael@0: // Native functions have the signature: michael@0: // michael@0: // bool (*)(JSContext *, unsigned, Value *vp) michael@0: // michael@0: // Where vp[0] is space for callee/return value, vp[1] is |this|, and vp[2] onward michael@0: // are the function arguments. michael@0: michael@0: // Initialize vp. michael@0: Register vpReg = regs.takeAny(); michael@0: masm.movePtr(StackPointer, vpReg); michael@0: michael@0: // Construct a native exit frame. michael@0: masm.push(argcReg); michael@0: michael@0: Register scratch = regs.takeAny(); michael@0: EmitCreateStubFrameDescriptor(masm, scratch); michael@0: masm.push(scratch); michael@0: masm.push(BaselineTailCallReg); michael@0: masm.enterFakeExitFrame(); michael@0: michael@0: // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg michael@0: // and scratch can be clobbered. michael@0: emitProfilingUpdate(masm, BaselineTailCallReg, scratch, ICCall_Native::offsetOfPCOffset()); michael@0: michael@0: // Execute call. michael@0: masm.setupUnalignedABICall(3, scratch); michael@0: masm.loadJSContext(scratch); michael@0: masm.passABIArg(scratch); michael@0: masm.passABIArg(argcReg); michael@0: masm.passABIArg(vpReg); michael@0: michael@0: #ifdef JS_ARM_SIMULATOR michael@0: // The simulator requires VM calls to be redirected to a special swi michael@0: // instruction to handle them, so we store the redirected pointer in the michael@0: // stub and use that instead of the original one. michael@0: masm.callWithABI(Address(BaselineStubReg, ICCall_Native::offsetOfNative())); michael@0: #else michael@0: masm.callWithABI(Address(callee, JSFunction::offsetOfNativeOrScript())); michael@0: #endif michael@0: michael@0: // Test for failure. michael@0: masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel()); michael@0: michael@0: // Load the return value into R0. michael@0: masm.loadValue(Address(StackPointer, IonNativeExitFrameLayout::offsetOfResult()), R0); michael@0: michael@0: leaveStubFrame(masm); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICCall_ScriptedApplyArray::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: michael@0: Register argcReg = R0.scratchReg(); michael@0: regs.take(argcReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: regs.takeUnchecked(ArgumentsRectifierReg); michael@0: michael@0: // michael@0: // Validate inputs michael@0: // michael@0: michael@0: Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false, michael@0: FunApply_Array, &failure); michael@0: if (regs.has(target)) { michael@0: regs.take(target); michael@0: } else { michael@0: // If target is already a reserved reg, take another register for it, because it's michael@0: // probably currently an ExtractTemp, which might get clobbered later. michael@0: Register targetTemp = regs.takeAny(); michael@0: masm.movePtr(target, targetTemp); michael@0: target = targetTemp; michael@0: } michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, regs.getAny()); michael@0: michael@0: // michael@0: // Push arguments michael@0: // michael@0: michael@0: // Stack now looks like: michael@0: // BaselineFrameReg -------------------. michael@0: // v michael@0: // [..., js_fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader] michael@0: michael@0: // Push all array elements onto the stack: michael@0: Address arrayVal(BaselineFrameReg, STUB_FRAME_SIZE); michael@0: pushArrayArguments(masm, arrayVal, regs); michael@0: michael@0: // Stack now looks like: michael@0: // BaselineFrameReg -------------------. michael@0: // v michael@0: // [..., js_fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader, michael@0: // PushedArgN, ..., PushedArg0] michael@0: // Can't fail after this, so it's ok to clobber argcReg. michael@0: michael@0: // Push actual argument 0 as |thisv| for call. michael@0: masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value))); michael@0: michael@0: // All pushes after this use Push instead of push to make sure ARM can align michael@0: // stack properly for call. michael@0: Register scratch = regs.takeAny(); michael@0: EmitCreateStubFrameDescriptor(masm, scratch); michael@0: michael@0: // Reload argc from length of array. michael@0: masm.extractObject(arrayVal, argcReg); michael@0: masm.loadPtr(Address(argcReg, JSObject::offsetOfElements()), argcReg); michael@0: masm.load32(Address(argcReg, ObjectElements::offsetOfInitializedLength()), argcReg); michael@0: michael@0: masm.Push(argcReg); michael@0: masm.Push(target); michael@0: masm.Push(scratch); michael@0: michael@0: // Load nargs into scratch for underflow check, and then load jitcode pointer into target. michael@0: masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch); michael@0: masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target); michael@0: masm.loadBaselineOrIonRaw(target, target, SequentialExecution, nullptr); michael@0: michael@0: // Handle arguments underflow. michael@0: Label noUnderflow; michael@0: masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow); michael@0: { michael@0: // Call the arguments rectifier. michael@0: JS_ASSERT(ArgumentsRectifierReg != target); michael@0: JS_ASSERT(ArgumentsRectifierReg != argcReg); michael@0: michael@0: JitCode *argumentsRectifier = michael@0: cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution); michael@0: michael@0: masm.movePtr(ImmGCPtr(argumentsRectifier), target); michael@0: masm.loadPtr(Address(target, JitCode::offsetOfCode()), target); michael@0: masm.mov(argcReg, ArgumentsRectifierReg); michael@0: } michael@0: masm.bind(&noUnderflow); michael@0: regs.add(argcReg); michael@0: michael@0: // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg michael@0: // and scratch can be clobbered. michael@0: emitProfilingUpdate(masm, regs.getAny(), scratch, michael@0: ICCall_ScriptedApplyArguments::offsetOfPCOffset()); michael@0: michael@0: // Do call michael@0: masm.callIon(target); michael@0: leaveStubFrame(masm, true); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICCall_ScriptedApplyArguments::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: michael@0: Register argcReg = R0.scratchReg(); michael@0: regs.take(argcReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: regs.takeUnchecked(ArgumentsRectifierReg); michael@0: michael@0: // michael@0: // Validate inputs michael@0: // michael@0: michael@0: Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false, michael@0: FunApply_MagicArgs, &failure); michael@0: if (regs.has(target)) { michael@0: regs.take(target); michael@0: } else { michael@0: // If target is already a reserved reg, take another register for it, because it's michael@0: // probably currently an ExtractTemp, which might get clobbered later. michael@0: Register targetTemp = regs.takeAny(); michael@0: masm.movePtr(target, targetTemp); michael@0: target = targetTemp; michael@0: } michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, regs.getAny()); michael@0: michael@0: // michael@0: // Push arguments michael@0: // michael@0: michael@0: // Stack now looks like: michael@0: // [..., js_fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader] michael@0: michael@0: // Push all arguments supplied to caller function onto the stack. michael@0: pushCallerArguments(masm, regs); michael@0: michael@0: // Stack now looks like: michael@0: // BaselineFrameReg -------------------. michael@0: // v michael@0: // [..., js_fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader, michael@0: // PushedArgN, ..., PushedArg0] michael@0: // Can't fail after this, so it's ok to clobber argcReg. michael@0: michael@0: // Push actual argument 0 as |thisv| for call. michael@0: masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value))); michael@0: michael@0: // All pushes after this use Push instead of push to make sure ARM can align michael@0: // stack properly for call. michael@0: Register scratch = regs.takeAny(); michael@0: EmitCreateStubFrameDescriptor(masm, scratch); michael@0: michael@0: masm.loadPtr(Address(BaselineFrameReg, 0), argcReg); michael@0: masm.loadPtr(Address(argcReg, BaselineFrame::offsetOfNumActualArgs()), argcReg); michael@0: masm.Push(argcReg); michael@0: masm.Push(target); michael@0: masm.Push(scratch); michael@0: michael@0: // Load nargs into scratch for underflow check, and then load jitcode pointer into target. michael@0: masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch); michael@0: masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target); michael@0: masm.loadBaselineOrIonRaw(target, target, SequentialExecution, nullptr); michael@0: michael@0: // Handle arguments underflow. michael@0: Label noUnderflow; michael@0: masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow); michael@0: { michael@0: // Call the arguments rectifier. michael@0: JS_ASSERT(ArgumentsRectifierReg != target); michael@0: JS_ASSERT(ArgumentsRectifierReg != argcReg); michael@0: michael@0: JitCode *argumentsRectifier = michael@0: cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution); michael@0: michael@0: masm.movePtr(ImmGCPtr(argumentsRectifier), target); michael@0: masm.loadPtr(Address(target, JitCode::offsetOfCode()), target); michael@0: masm.mov(argcReg, ArgumentsRectifierReg); michael@0: } michael@0: masm.bind(&noUnderflow); michael@0: regs.add(argcReg); michael@0: michael@0: // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg michael@0: // and scratch can be clobbered. michael@0: emitProfilingUpdate(masm, regs.getAny(), scratch, michael@0: ICCall_ScriptedApplyArguments::offsetOfPCOffset()); michael@0: michael@0: // Do call michael@0: masm.callIon(target); michael@0: leaveStubFrame(masm, true); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICCall_ScriptedFunCall::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: bool canUseTailCallReg = regs.has(BaselineTailCallReg); michael@0: michael@0: Register argcReg = R0.scratchReg(); michael@0: JS_ASSERT(argcReg != ArgumentsRectifierReg); michael@0: michael@0: regs.take(argcReg); michael@0: regs.take(ArgumentsRectifierReg); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: michael@0: // Load the callee in R1. michael@0: // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ] michael@0: BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value)); michael@0: masm.loadValue(calleeSlot, R1); michael@0: regs.take(R1); michael@0: michael@0: // Ensure callee is js_fun_call. michael@0: masm.branchTestObject(Assembler::NotEqual, R1, &failure); michael@0: michael@0: Register callee = masm.extractObject(R1, ExtractTemp0); michael@0: masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_, michael@0: &failure); michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee); michael@0: masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(js_fun_call), &failure); michael@0: michael@0: // Ensure |this| is a scripted function with JIT code. michael@0: BaseIndex thisSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset); michael@0: masm.loadValue(thisSlot, R1); michael@0: michael@0: masm.branchTestObject(Assembler::NotEqual, R1, &failure); michael@0: callee = masm.extractObject(R1, ExtractTemp0); michael@0: michael@0: masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_, michael@0: &failure); michael@0: masm.branchIfFunctionHasNoScript(callee, &failure); michael@0: masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee); michael@0: michael@0: // Load the start of the target JitCode. michael@0: Register code = regs.takeAny(); michael@0: masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failure); michael@0: michael@0: // We no longer need R1. michael@0: regs.add(R1); michael@0: michael@0: // Push a stub frame so that we can perform a non-tail call. michael@0: enterStubFrame(masm, regs.getAny()); michael@0: if (canUseTailCallReg) michael@0: regs.add(BaselineTailCallReg); michael@0: michael@0: // Values are on the stack left-to-right. Calling convention wants them michael@0: // right-to-left so duplicate them on the stack in reverse order. michael@0: pushCallArguments(masm, regs, argcReg); michael@0: michael@0: // Discard callee (function.call). michael@0: masm.addPtr(Imm32(sizeof(Value)), StackPointer); michael@0: michael@0: // Pop scripted callee (the original |this|). michael@0: ValueOperand val = regs.takeAnyValue(); michael@0: masm.popValue(val); michael@0: michael@0: // Decrement argc if argc > 0. If argc == 0, push |undefined| as |this|. michael@0: Label zeroArgs, done; michael@0: masm.branchTest32(Assembler::Zero, argcReg, argcReg, &zeroArgs); michael@0: masm.sub32(Imm32(1), argcReg); michael@0: masm.jump(&done); michael@0: michael@0: masm.bind(&zeroArgs); michael@0: masm.pushValue(UndefinedValue()); michael@0: masm.bind(&done); michael@0: michael@0: // Unbox scripted callee. michael@0: callee = masm.extractObject(val, ExtractTemp0); michael@0: michael@0: Register scratch = regs.takeAny(); michael@0: EmitCreateStubFrameDescriptor(masm, scratch); michael@0: michael@0: // Note that we use Push, not push, so that callIon will align the stack michael@0: // properly on ARM. michael@0: masm.Push(argcReg); michael@0: masm.Push(callee); michael@0: masm.Push(scratch); michael@0: michael@0: // Handle arguments underflow. michael@0: Label noUnderflow; michael@0: masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee); michael@0: masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow); michael@0: { michael@0: // Call the arguments rectifier. michael@0: JS_ASSERT(ArgumentsRectifierReg != code); michael@0: JS_ASSERT(ArgumentsRectifierReg != argcReg); michael@0: michael@0: JitCode *argumentsRectifier = michael@0: cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution); michael@0: michael@0: masm.movePtr(ImmGCPtr(argumentsRectifier), code); michael@0: masm.loadPtr(Address(code, JitCode::offsetOfCode()), code); michael@0: masm.mov(argcReg, ArgumentsRectifierReg); michael@0: } michael@0: michael@0: masm.bind(&noUnderflow); michael@0: michael@0: // If needed, update SPS Profiler frame entry. michael@0: { michael@0: // Need to avoid using ArgumentsRectifierReg and code register. michael@0: GeneralRegisterSet availRegs = availableGeneralRegs(0); michael@0: availRegs.take(ArgumentsRectifierReg); michael@0: availRegs.take(code); michael@0: emitProfilingUpdate(masm, availRegs, ICCall_ScriptedFunCall::offsetOfPCOffset()); michael@0: } michael@0: michael@0: masm.callIon(code); michael@0: michael@0: leaveStubFrame(masm, true); michael@0: michael@0: // Enter type monitor IC to type-check result. michael@0: EmitEnterTypeMonitorIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoubleValueToInt32ForSwitch(Value *v) michael@0: { michael@0: double d = v->toDouble(); michael@0: int32_t truncated = int32_t(d); michael@0: if (d != double(truncated)) michael@0: return false; michael@0: michael@0: v->setInt32(truncated); michael@0: return true; michael@0: } michael@0: michael@0: bool michael@0: ICTableSwitch::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label isInt32, notInt32, outOfRange; michael@0: Register scratch = R1.scratchReg(); michael@0: michael@0: masm.branchTestInt32(Assembler::NotEqual, R0, ¬Int32); michael@0: michael@0: Register key = masm.extractInt32(R0, ExtractTemp0); michael@0: michael@0: masm.bind(&isInt32); michael@0: michael@0: masm.load32(Address(BaselineStubReg, offsetof(ICTableSwitch, min_)), scratch); michael@0: masm.sub32(scratch, key); michael@0: masm.branch32(Assembler::BelowOrEqual, michael@0: Address(BaselineStubReg, offsetof(ICTableSwitch, length_)), key, &outOfRange); michael@0: michael@0: masm.loadPtr(Address(BaselineStubReg, offsetof(ICTableSwitch, table_)), scratch); michael@0: masm.loadPtr(BaseIndex(scratch, key, ScalePointer), scratch); michael@0: michael@0: EmitChangeICReturnAddress(masm, scratch); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(¬Int32); michael@0: michael@0: masm.branchTestDouble(Assembler::NotEqual, R0, &outOfRange); michael@0: if (cx->runtime()->jitSupportsFloatingPoint) { michael@0: masm.unboxDouble(R0, FloatReg0); michael@0: michael@0: // N.B. -0 === 0, so convert -0 to a 0 int32. michael@0: masm.convertDoubleToInt32(FloatReg0, key, &outOfRange, /* negativeZeroCheck = */ false); michael@0: } else { michael@0: // Pass pointer to double value. michael@0: masm.pushValue(R0); michael@0: masm.movePtr(StackPointer, R0.scratchReg()); michael@0: michael@0: masm.setupUnalignedABICall(1, scratch); michael@0: masm.passABIArg(R0.scratchReg()); michael@0: masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, DoubleValueToInt32ForSwitch)); michael@0: michael@0: // If the function returns |true|, the value has been converted to michael@0: // int32. michael@0: masm.mov(ReturnReg, scratch); michael@0: masm.popValue(R0); michael@0: masm.branchIfFalseBool(scratch, &outOfRange); michael@0: masm.unboxInt32(R0, key); michael@0: } michael@0: masm.jump(&isInt32); michael@0: michael@0: masm.bind(&outOfRange); michael@0: michael@0: masm.loadPtr(Address(BaselineStubReg, offsetof(ICTableSwitch, defaultTarget_)), scratch); michael@0: michael@0: EmitChangeICReturnAddress(masm, scratch); michael@0: EmitReturnFromIC(masm); michael@0: return true; michael@0: } michael@0: michael@0: ICStub * michael@0: ICTableSwitch::Compiler::getStub(ICStubSpace *space) michael@0: { michael@0: JitCode *code = getStubCode(); michael@0: if (!code) michael@0: return nullptr; michael@0: michael@0: jsbytecode *pc = pc_; michael@0: pc += JUMP_OFFSET_LEN; michael@0: int32_t low = GET_JUMP_OFFSET(pc); michael@0: pc += JUMP_OFFSET_LEN; michael@0: int32_t high = GET_JUMP_OFFSET(pc); michael@0: int32_t length = high - low + 1; michael@0: pc += JUMP_OFFSET_LEN; michael@0: michael@0: void **table = (void**) space->alloc(sizeof(void*) * length); michael@0: if (!table) michael@0: return nullptr; michael@0: michael@0: jsbytecode *defaultpc = pc_ + GET_JUMP_OFFSET(pc_); michael@0: michael@0: for (int32_t i = 0; i < length; i++) { michael@0: int32_t off = GET_JUMP_OFFSET(pc); michael@0: if (off) michael@0: table[i] = pc_ + off; michael@0: else michael@0: table[i] = defaultpc; michael@0: pc += JUMP_OFFSET_LEN; michael@0: } michael@0: michael@0: return ICTableSwitch::New(space, code, table, low, length, defaultpc); michael@0: } michael@0: michael@0: void michael@0: ICTableSwitch::fixupJumpTable(JSScript *script, BaselineScript *baseline) michael@0: { michael@0: defaultTarget_ = baseline->nativeCodeForPC(script, (jsbytecode *) defaultTarget_); michael@0: michael@0: for (int32_t i = 0; i < length_; i++) michael@0: table_[i] = baseline->nativeCodeForPC(script, (jsbytecode *) table_[i]); michael@0: } michael@0: michael@0: // michael@0: // IteratorNew_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoIteratorNewFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNew_Fallback *stub, michael@0: HandleValue value, MutableHandleValue res) michael@0: { michael@0: jsbytecode *pc = stub->icEntry()->pc(frame->script()); michael@0: FallbackICSpew(cx, stub, "IteratorNew"); michael@0: michael@0: uint8_t flags = GET_UINT8(pc); michael@0: res.set(value); michael@0: return ValueToIterator(cx, flags, res); michael@0: } michael@0: michael@0: typedef bool (*DoIteratorNewFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNew_Fallback *, michael@0: HandleValue, MutableHandleValue); michael@0: static const VMFunction DoIteratorNewFallbackInfo = michael@0: FunctionInfo(DoIteratorNewFallback, PopValues(1)); michael@0: michael@0: bool michael@0: ICIteratorNew_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Sync stack for the decompiler. michael@0: masm.pushValue(R0); michael@0: michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoIteratorNewFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // IteratorMore_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoIteratorMoreFallback(JSContext *cx, BaselineFrame *frame, ICIteratorMore_Fallback *stub_, michael@0: HandleValue iterValue, MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: FallbackICSpew(cx, stub, "IteratorMore"); michael@0: michael@0: bool cond; michael@0: if (!IteratorMore(cx, &iterValue.toObject(), &cond, res)) michael@0: return false; michael@0: res.setBoolean(cond); michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: if (iterValue.toObject().is() && michael@0: !stub->hasStub(ICStub::IteratorMore_Native)) michael@0: { michael@0: ICIteratorMore_Native::Compiler compiler(cx); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script())); michael@0: if (!newStub) michael@0: return false; michael@0: stub->addNewStub(newStub); michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoIteratorMoreFallbackFn)(JSContext *, BaselineFrame *, ICIteratorMore_Fallback *, michael@0: HandleValue, MutableHandleValue); michael@0: static const VMFunction DoIteratorMoreFallbackInfo = michael@0: FunctionInfo(DoIteratorMoreFallback); michael@0: michael@0: bool michael@0: ICIteratorMore_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoIteratorMoreFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // IteratorMore_Native michael@0: // michael@0: michael@0: bool michael@0: ICIteratorMore_Native::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register nativeIterator = regs.takeAny(); michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: masm.branchTestObjClass(Assembler::NotEqual, obj, scratch, michael@0: &PropertyIteratorObject::class_, &failure); michael@0: masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator); michael@0: michael@0: masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)), michael@0: Imm32(JSITER_FOREACH), &failure); michael@0: michael@0: // Set output to true if props_cursor < props_end. michael@0: masm.loadPtr(Address(nativeIterator, offsetof(NativeIterator, props_end)), scratch); michael@0: Address cursorAddr = Address(nativeIterator, offsetof(NativeIterator, props_cursor)); michael@0: masm.cmpPtrSet(Assembler::LessThan, cursorAddr, scratch, scratch); michael@0: michael@0: masm.tagValue(JSVAL_TYPE_BOOLEAN, scratch, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // IteratorNext_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoIteratorNextFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNext_Fallback *stub_, michael@0: HandleValue iterValue, MutableHandleValue res) michael@0: { michael@0: // This fallback stub may trigger debug mode toggling. michael@0: DebugModeOSRVolatileStub stub(frame, stub_); michael@0: michael@0: FallbackICSpew(cx, stub, "IteratorNext"); michael@0: michael@0: RootedObject iteratorObject(cx, &iterValue.toObject()); michael@0: if (!IteratorNext(cx, iteratorObject, res)) michael@0: return false; michael@0: michael@0: // Check if debug mode toggling made the stub invalid. michael@0: if (stub.invalid()) michael@0: return true; michael@0: michael@0: if (!res.isString() && !stub->hasNonStringResult()) michael@0: stub->setHasNonStringResult(); michael@0: michael@0: if (iteratorObject->is() && michael@0: !stub->hasStub(ICStub::IteratorNext_Native)) michael@0: { michael@0: ICIteratorNext_Native::Compiler compiler(cx); michael@0: ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script())); michael@0: if (!newStub) michael@0: return false; michael@0: stub->addNewStub(newStub); michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoIteratorNextFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNext_Fallback *, michael@0: HandleValue, MutableHandleValue); michael@0: static const VMFunction DoIteratorNextFallbackInfo = michael@0: FunctionInfo(DoIteratorNextFallback); michael@0: michael@0: bool michael@0: ICIteratorNext_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoIteratorNextFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // IteratorNext_Native michael@0: // michael@0: michael@0: bool michael@0: ICIteratorNext_Native::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: Label failure; michael@0: michael@0: Register obj = masm.extractObject(R0, ExtractTemp0); michael@0: michael@0: GeneralRegisterSet regs(availableGeneralRegs(1)); michael@0: Register nativeIterator = regs.takeAny(); michael@0: Register scratch = regs.takeAny(); michael@0: michael@0: masm.branchTestObjClass(Assembler::NotEqual, obj, scratch, michael@0: &PropertyIteratorObject::class_, &failure); michael@0: masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator); michael@0: michael@0: masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)), michael@0: Imm32(JSITER_FOREACH), &failure); michael@0: michael@0: // Get cursor, next string. michael@0: masm.loadPtr(Address(nativeIterator, offsetof(NativeIterator, props_cursor)), scratch); michael@0: masm.loadPtr(Address(scratch, 0), scratch); michael@0: michael@0: // Increase the cursor. michael@0: masm.addPtr(Imm32(sizeof(JSString *)), michael@0: Address(nativeIterator, offsetof(NativeIterator, props_cursor))); michael@0: michael@0: masm.tagValue(JSVAL_TYPE_STRING, scratch, R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Failure case - jump to next stub michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: // michael@0: // IteratorClose_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoIteratorCloseFallback(JSContext *cx, ICIteratorClose_Fallback *stub, HandleValue iterValue) michael@0: { michael@0: FallbackICSpew(cx, stub, "IteratorClose"); michael@0: michael@0: RootedObject iteratorObject(cx, &iterValue.toObject()); michael@0: return CloseIterator(cx, iteratorObject); michael@0: } michael@0: michael@0: typedef bool (*DoIteratorCloseFallbackFn)(JSContext *, ICIteratorClose_Fallback *, HandleValue); michael@0: static const VMFunction DoIteratorCloseFallbackInfo = michael@0: FunctionInfo(DoIteratorCloseFallback); michael@0: michael@0: bool michael@0: ICIteratorClose_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: michael@0: return tailCallVM(DoIteratorCloseFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // InstanceOf_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoInstanceOfFallback(JSContext *cx, ICInstanceOf_Fallback *stub, michael@0: HandleValue lhs, HandleValue rhs, michael@0: MutableHandleValue res) michael@0: { michael@0: FallbackICSpew(cx, stub, "InstanceOf"); michael@0: michael@0: if (!rhs.isObject()) { michael@0: js_ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, NullPtr()); michael@0: return false; michael@0: } michael@0: michael@0: RootedObject obj(cx, &rhs.toObject()); michael@0: michael@0: // For functions, keep track of the |prototype| property in type information, michael@0: // for use during Ion compilation. michael@0: if (obj->is() && IsIonEnabled(cx)) michael@0: types::EnsureTrackPropertyTypes(cx, obj, NameToId(cx->names().prototype)); michael@0: michael@0: bool cond = false; michael@0: if (!HasInstance(cx, obj, lhs, &cond)) michael@0: return false; michael@0: michael@0: res.setBoolean(cond); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoInstanceOfFallbackFn)(JSContext *, ICInstanceOf_Fallback *, HandleValue, HandleValue, michael@0: MutableHandleValue); michael@0: static const VMFunction DoInstanceOfFallbackInfo = michael@0: FunctionInfo(DoInstanceOfFallback, PopValues(2)); michael@0: michael@0: bool michael@0: ICInstanceOf_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: // Sync stack for the decompiler. michael@0: masm.pushValue(R0); michael@0: masm.pushValue(R1); michael@0: michael@0: masm.pushValue(R1); michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: michael@0: return tailCallVM(DoInstanceOfFallbackInfo, masm); michael@0: } michael@0: michael@0: // michael@0: // TypeOf_Fallback michael@0: // michael@0: michael@0: static bool michael@0: DoTypeOfFallback(JSContext *cx, BaselineFrame *frame, ICTypeOf_Fallback *stub, HandleValue val, michael@0: MutableHandleValue res) michael@0: { michael@0: FallbackICSpew(cx, stub, "TypeOf"); michael@0: JSType type = js::TypeOfValue(val); michael@0: RootedString string(cx, TypeName(type, cx->names())); michael@0: michael@0: res.setString(string); michael@0: michael@0: JS_ASSERT(type != JSTYPE_NULL); michael@0: if (type != JSTYPE_OBJECT && type != JSTYPE_FUNCTION) { michael@0: // Create a new TypeOf stub. michael@0: IonSpew(IonSpew_BaselineIC, " Generating TypeOf stub for JSType (%d)", (int) type); michael@0: ICTypeOf_Typed::Compiler compiler(cx, type, string); michael@0: ICStub *typeOfStub = compiler.getStub(compiler.getStubSpace(frame->script())); michael@0: if (!typeOfStub) michael@0: return false; michael@0: stub->addNewStub(typeOfStub); michael@0: } michael@0: michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoTypeOfFallbackFn)(JSContext *, BaselineFrame *frame, ICTypeOf_Fallback *, michael@0: HandleValue, MutableHandleValue); michael@0: static const VMFunction DoTypeOfFallbackInfo = michael@0: FunctionInfo(DoTypeOfFallback); michael@0: michael@0: bool michael@0: ICTypeOf_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.pushValue(R0); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: michael@0: return tailCallVM(DoTypeOfFallbackInfo, masm); michael@0: } michael@0: michael@0: bool michael@0: ICTypeOf_Typed::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: JS_ASSERT(type_ != JSTYPE_NULL); michael@0: JS_ASSERT(type_ != JSTYPE_FUNCTION); michael@0: JS_ASSERT(type_ != JSTYPE_OBJECT); michael@0: michael@0: Label failure; michael@0: switch(type_) { michael@0: case JSTYPE_VOID: michael@0: masm.branchTestUndefined(Assembler::NotEqual, R0, &failure); michael@0: break; michael@0: michael@0: case JSTYPE_STRING: michael@0: masm.branchTestString(Assembler::NotEqual, R0, &failure); michael@0: break; michael@0: michael@0: case JSTYPE_NUMBER: michael@0: masm.branchTestNumber(Assembler::NotEqual, R0, &failure); michael@0: break; michael@0: michael@0: case JSTYPE_BOOLEAN: michael@0: masm.branchTestBoolean(Assembler::NotEqual, R0, &failure); michael@0: break; michael@0: michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Unexpected type"); michael@0: } michael@0: michael@0: masm.movePtr(ImmGCPtr(typeString_), R0.scratchReg()); michael@0: masm.tagValue(JSVAL_TYPE_STRING, R0.scratchReg(), R0); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: masm.bind(&failure); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: static bool michael@0: DoRetSubFallback(JSContext *cx, BaselineFrame *frame, ICRetSub_Fallback *stub, michael@0: HandleValue val, uint8_t **resumeAddr) michael@0: { michael@0: FallbackICSpew(cx, stub, "RetSub"); michael@0: michael@0: // |val| is the bytecode offset where we should resume. michael@0: michael@0: JS_ASSERT(val.isInt32()); michael@0: JS_ASSERT(val.toInt32() >= 0); michael@0: michael@0: JSScript *script = frame->script(); michael@0: uint32_t offset = uint32_t(val.toInt32()); michael@0: michael@0: *resumeAddr = script->baselineScript()->nativeCodeForPC(script, script->offsetToPC(offset)); michael@0: michael@0: if (stub->numOptimizedStubs() >= ICRetSub_Fallback::MAX_OPTIMIZED_STUBS) michael@0: return true; michael@0: michael@0: // Attach an optimized stub for this pc offset. michael@0: IonSpew(IonSpew_BaselineIC, " Generating RetSub stub for pc offset %u", offset); michael@0: ICRetSub_Resume::Compiler compiler(cx, offset, *resumeAddr); michael@0: ICStub *optStub = compiler.getStub(compiler.getStubSpace(script)); michael@0: if (!optStub) michael@0: return false; michael@0: michael@0: stub->addNewStub(optStub); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool(*DoRetSubFallbackFn)(JSContext *cx, BaselineFrame *, ICRetSub_Fallback *, michael@0: HandleValue, uint8_t **); michael@0: static const VMFunction DoRetSubFallbackInfo = FunctionInfo(DoRetSubFallback); michael@0: michael@0: typedef bool (*ThrowFn)(JSContext *, HandleValue); michael@0: static const VMFunction ThrowInfoBaseline = FunctionInfo(js::Throw); michael@0: michael@0: bool michael@0: ICRetSub_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: // If R0 is BooleanValue(true), rethrow R1. michael@0: Label rethrow; michael@0: masm.branchTestBooleanTruthy(true, R0, &rethrow); michael@0: { michael@0: // Call a stub to get the native code address for the pc offset in R1. michael@0: GeneralRegisterSet regs(availableGeneralRegs(0)); michael@0: regs.take(R1); michael@0: regs.takeUnchecked(BaselineTailCallReg); michael@0: michael@0: Register frame = regs.takeAny(); michael@0: masm.movePtr(BaselineFrameReg, frame); michael@0: michael@0: enterStubFrame(masm, regs.getAny()); michael@0: michael@0: masm.pushValue(R1); michael@0: masm.push(BaselineStubReg); michael@0: masm.pushBaselineFramePtr(frame, frame); michael@0: michael@0: if (!callVM(DoRetSubFallbackInfo, masm)) michael@0: return false; michael@0: michael@0: leaveStubFrame(masm); michael@0: michael@0: EmitChangeICReturnAddress(masm, ReturnReg); michael@0: EmitReturnFromIC(masm); michael@0: } michael@0: michael@0: masm.bind(&rethrow); michael@0: EmitRestoreTailCallReg(masm); michael@0: masm.pushValue(R1); michael@0: return tailCallVM(ThrowInfoBaseline, masm); michael@0: } michael@0: michael@0: bool michael@0: ICRetSub_Resume::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: // If R0 is BooleanValue(true), rethrow R1. michael@0: Label fail, rethrow; michael@0: masm.branchTestBooleanTruthy(true, R0, &rethrow); michael@0: michael@0: // R1 is the pc offset. Ensure it matches this stub's offset. michael@0: Register offset = masm.extractInt32(R1, ExtractTemp0); michael@0: masm.branch32(Assembler::NotEqual, michael@0: Address(BaselineStubReg, ICRetSub_Resume::offsetOfPCOffset()), michael@0: offset, michael@0: &fail); michael@0: michael@0: // pc offset matches, resume at the target pc. michael@0: masm.loadPtr(Address(BaselineStubReg, ICRetSub_Resume::offsetOfAddr()), R0.scratchReg()); michael@0: EmitChangeICReturnAddress(masm, R0.scratchReg()); michael@0: EmitReturnFromIC(masm); michael@0: michael@0: // Rethrow the Value stored in R1. michael@0: masm.bind(&rethrow); michael@0: EmitRestoreTailCallReg(masm); michael@0: masm.pushValue(R1); michael@0: if (!tailCallVM(ThrowInfoBaseline, masm)) michael@0: return false; michael@0: michael@0: masm.bind(&fail); michael@0: EmitStubGuardFailure(masm); michael@0: return true; michael@0: } michael@0: michael@0: ICProfiler_PushFunction::ICProfiler_PushFunction(JitCode *stubCode, const char *str, michael@0: HandleScript script) michael@0: : ICStub(ICStub::Profiler_PushFunction, stubCode), michael@0: str_(str), michael@0: script_(script) michael@0: { } michael@0: michael@0: ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode *stubCode, HandleObject obj) michael@0: : ICStub(TypeMonitor_SingleObject, stubCode), michael@0: obj_(obj) michael@0: { } michael@0: michael@0: ICTypeMonitor_TypeObject::ICTypeMonitor_TypeObject(JitCode *stubCode, HandleTypeObject type) michael@0: : ICStub(TypeMonitor_TypeObject, stubCode), michael@0: type_(type) michael@0: { } michael@0: michael@0: ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode *stubCode, HandleObject obj) michael@0: : ICStub(TypeUpdate_SingleObject, stubCode), michael@0: obj_(obj) michael@0: { } michael@0: michael@0: ICTypeUpdate_TypeObject::ICTypeUpdate_TypeObject(JitCode *stubCode, HandleTypeObject type) michael@0: : ICStub(TypeUpdate_TypeObject, stubCode), michael@0: type_(type) michael@0: { } michael@0: michael@0: ICGetElemNativeStub::ICGetElemNativeStub(ICStub::Kind kind, JitCode *stubCode, michael@0: ICStub *firstMonitorStub, michael@0: HandleShape shape, HandlePropertyName name, michael@0: AccessType acctype, bool needsAtomize) michael@0: : ICMonitoredStub(kind, stubCode, firstMonitorStub), michael@0: shape_(shape), michael@0: name_(name) michael@0: { michael@0: extra_ = (static_cast(acctype) << ACCESSTYPE_SHIFT) | michael@0: (static_cast(needsAtomize) << NEEDS_ATOMIZE_SHIFT); michael@0: } michael@0: michael@0: ICGetElemNativeStub::~ICGetElemNativeStub() michael@0: { } michael@0: michael@0: ICGetElemNativeGetterStub::ICGetElemNativeGetterStub( michael@0: ICStub::Kind kind, JitCode *stubCode, ICStub *firstMonitorStub, michael@0: HandleShape shape, HandlePropertyName name, AccessType acctype, michael@0: bool needsAtomize, HandleFunction getter, uint32_t pcOffset) michael@0: : ICGetElemNativeStub(kind, stubCode, firstMonitorStub, shape, name, acctype, needsAtomize), michael@0: getter_(getter), michael@0: pcOffset_(pcOffset) michael@0: { michael@0: JS_ASSERT(kind == GetElem_NativePrototypeCallNative || michael@0: kind == GetElem_NativePrototypeCallScripted); michael@0: JS_ASSERT(acctype == NativeGetter || acctype == ScriptedGetter); michael@0: } michael@0: michael@0: ICGetElem_NativePrototypeSlot::ICGetElem_NativePrototypeSlot( michael@0: JitCode *stubCode, ICStub *firstMonitorStub, michael@0: HandleShape shape, HandlePropertyName name, michael@0: AccessType acctype, bool needsAtomize, uint32_t offset, michael@0: HandleObject holder, HandleShape holderShape) michael@0: : ICGetElemNativeSlotStub(ICStub::GetElem_NativePrototypeSlot, stubCode, firstMonitorStub, shape, michael@0: name, acctype, needsAtomize, offset), michael@0: holder_(holder), michael@0: holderShape_(holderShape) michael@0: { } michael@0: michael@0: ICGetElemNativePrototypeCallStub::ICGetElemNativePrototypeCallStub( michael@0: ICStub::Kind kind, JitCode *stubCode, ICStub *firstMonitorStub, michael@0: HandleShape shape, HandlePropertyName name, michael@0: AccessType acctype, bool needsAtomize, HandleFunction getter, michael@0: uint32_t pcOffset, HandleObject holder, HandleShape holderShape) michael@0: : ICGetElemNativeGetterStub(kind, stubCode, firstMonitorStub, shape, name, acctype, needsAtomize, michael@0: getter, pcOffset), michael@0: holder_(holder), michael@0: holderShape_(holderShape) michael@0: {} michael@0: michael@0: ICGetElem_Dense::ICGetElem_Dense(JitCode *stubCode, ICStub *firstMonitorStub, HandleShape shape) michael@0: : ICMonitoredStub(GetElem_Dense, stubCode, firstMonitorStub), michael@0: shape_(shape) michael@0: { } michael@0: michael@0: ICGetElem_TypedArray::ICGetElem_TypedArray(JitCode *stubCode, HandleShape shape, uint32_t type) michael@0: : ICStub(GetElem_TypedArray, stubCode), michael@0: shape_(shape) michael@0: { michael@0: extra_ = uint16_t(type); michael@0: JS_ASSERT(extra_ == type); michael@0: } michael@0: michael@0: ICSetElem_Dense::ICSetElem_Dense(JitCode *stubCode, HandleShape shape, HandleTypeObject type) michael@0: : ICUpdatedStub(SetElem_Dense, stubCode), michael@0: shape_(shape), michael@0: type_(type) michael@0: { } michael@0: michael@0: ICSetElem_DenseAdd::ICSetElem_DenseAdd(JitCode *stubCode, types::TypeObject *type, michael@0: size_t protoChainDepth) michael@0: : ICUpdatedStub(SetElem_DenseAdd, stubCode), michael@0: type_(type) michael@0: { michael@0: JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH); michael@0: extra_ = protoChainDepth; michael@0: } michael@0: michael@0: template michael@0: ICUpdatedStub * michael@0: ICSetElemDenseAddCompiler::getStubSpecific(ICStubSpace *space, const AutoShapeVector *shapes) michael@0: { michael@0: RootedTypeObject objType(cx, obj_->getType(cx)); michael@0: if (!objType) michael@0: return nullptr; michael@0: Rooted stubCode(cx, getStubCode()); michael@0: return ICSetElem_DenseAddImpl::New(space, stubCode, objType, shapes); michael@0: } michael@0: michael@0: ICSetElem_TypedArray::ICSetElem_TypedArray(JitCode *stubCode, HandleShape shape, uint32_t type, michael@0: bool expectOutOfBounds) michael@0: : ICStub(SetElem_TypedArray, stubCode), michael@0: shape_(shape) michael@0: { michael@0: extra_ = uint8_t(type); michael@0: JS_ASSERT(extra_ == type); michael@0: extra_ |= (static_cast(expectOutOfBounds) << 8); michael@0: } michael@0: michael@0: ICGetName_Global::ICGetName_Global(JitCode *stubCode, ICStub *firstMonitorStub, HandleShape shape, michael@0: uint32_t slot) michael@0: : ICMonitoredStub(GetName_Global, stubCode, firstMonitorStub), michael@0: shape_(shape), michael@0: slot_(slot) michael@0: { } michael@0: michael@0: template michael@0: ICGetName_Scope::ICGetName_Scope(JitCode *stubCode, ICStub *firstMonitorStub, michael@0: AutoShapeVector *shapes, uint32_t offset) michael@0: : ICMonitoredStub(GetStubKind(), stubCode, firstMonitorStub), michael@0: offset_(offset) michael@0: { michael@0: JS_STATIC_ASSERT(NumHops <= MAX_HOPS); michael@0: JS_ASSERT(shapes->length() == NumHops + 1); michael@0: for (size_t i = 0; i < NumHops + 1; i++) michael@0: shapes_[i].init((*shapes)[i]); michael@0: } michael@0: michael@0: ICGetIntrinsic_Constant::ICGetIntrinsic_Constant(JitCode *stubCode, HandleValue value) michael@0: : ICStub(GetIntrinsic_Constant, stubCode), michael@0: value_(value) michael@0: { } michael@0: michael@0: ICGetIntrinsic_Constant::~ICGetIntrinsic_Constant() michael@0: { } michael@0: michael@0: ICGetProp_Primitive::ICGetProp_Primitive(JitCode *stubCode, ICStub *firstMonitorStub, michael@0: HandleShape protoShape, uint32_t offset) michael@0: : ICMonitoredStub(GetProp_Primitive, stubCode, firstMonitorStub), michael@0: protoShape_(protoShape), michael@0: offset_(offset) michael@0: { } michael@0: michael@0: ICGetPropNativeStub::ICGetPropNativeStub(ICStub::Kind kind, JitCode *stubCode, michael@0: ICStub *firstMonitorStub, michael@0: HandleShape shape, uint32_t offset) michael@0: : ICMonitoredStub(kind, stubCode, firstMonitorStub), michael@0: shape_(shape), michael@0: offset_(offset) michael@0: { } michael@0: michael@0: ICGetProp_NativePrototype::ICGetProp_NativePrototype(JitCode *stubCode, ICStub *firstMonitorStub, michael@0: HandleShape shape, uint32_t offset, michael@0: HandleObject holder, HandleShape holderShape) michael@0: : ICGetPropNativeStub(GetProp_NativePrototype, stubCode, firstMonitorStub, shape, offset), michael@0: holder_(holder), michael@0: holderShape_(holderShape) michael@0: { } michael@0: michael@0: ICGetPropCallGetter::ICGetPropCallGetter(Kind kind, JitCode *stubCode, ICStub *firstMonitorStub, michael@0: HandleObject holder, HandleShape holderShape, HandleFunction getter, michael@0: uint32_t pcOffset) michael@0: : ICMonitoredStub(kind, stubCode, firstMonitorStub), michael@0: holder_(holder), michael@0: holderShape_(holderShape), michael@0: getter_(getter), michael@0: pcOffset_(pcOffset) michael@0: { michael@0: JS_ASSERT(kind == ICStub::GetProp_CallScripted || michael@0: kind == ICStub::GetProp_CallNative || michael@0: kind == ICStub::GetProp_CallNativePrototype); michael@0: } michael@0: michael@0: ICGetPropCallPrototypeGetter::ICGetPropCallPrototypeGetter(Kind kind, JitCode *stubCode, michael@0: ICStub *firstMonitorStub, michael@0: HandleShape receiverShape, HandleObject holder, michael@0: HandleShape holderShape, michael@0: HandleFunction getter, uint32_t pcOffset) michael@0: : ICGetPropCallGetter(kind, stubCode, firstMonitorStub, holder, holderShape, getter, pcOffset), michael@0: receiverShape_(receiverShape) michael@0: { michael@0: JS_ASSERT(kind == ICStub::GetProp_CallScripted || kind == ICStub::GetProp_CallNativePrototype); michael@0: } michael@0: michael@0: ICSetProp_Native::ICSetProp_Native(JitCode *stubCode, HandleTypeObject type, HandleShape shape, michael@0: uint32_t offset) michael@0: : ICUpdatedStub(SetProp_Native, stubCode), michael@0: type_(type), michael@0: shape_(shape), michael@0: offset_(offset) michael@0: { } michael@0: michael@0: ICUpdatedStub * michael@0: ICSetProp_Native::Compiler::getStub(ICStubSpace *space) michael@0: { michael@0: RootedTypeObject type(cx, obj_->getType(cx)); michael@0: if (!type) michael@0: return nullptr; michael@0: michael@0: RootedShape shape(cx, obj_->lastProperty()); michael@0: ICUpdatedStub *stub = ICSetProp_Native::New(space, getStubCode(), type, shape, offset_); michael@0: if (!stub || !stub->initUpdatingChain(cx, space)) michael@0: return nullptr; michael@0: return stub; michael@0: } michael@0: michael@0: ICSetProp_NativeAdd::ICSetProp_NativeAdd(JitCode *stubCode, HandleTypeObject type, michael@0: size_t protoChainDepth, michael@0: HandleShape newShape, michael@0: uint32_t offset) michael@0: : ICUpdatedStub(SetProp_NativeAdd, stubCode), michael@0: type_(type), michael@0: newShape_(newShape), michael@0: offset_(offset) michael@0: { michael@0: JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH); michael@0: extra_ = protoChainDepth; michael@0: } michael@0: michael@0: template michael@0: ICSetProp_NativeAddImpl::ICSetProp_NativeAddImpl(JitCode *stubCode, michael@0: HandleTypeObject type, michael@0: const AutoShapeVector *shapes, michael@0: HandleShape newShape, michael@0: uint32_t offset) michael@0: : ICSetProp_NativeAdd(stubCode, type, ProtoChainDepth, newShape, offset) michael@0: { michael@0: JS_ASSERT(shapes->length() == NumShapes); michael@0: for (size_t i = 0; i < NumShapes; i++) michael@0: shapes_[i].init((*shapes)[i]); michael@0: } michael@0: michael@0: ICSetPropNativeAddCompiler::ICSetPropNativeAddCompiler(JSContext *cx, HandleObject obj, michael@0: HandleShape oldShape, michael@0: size_t protoChainDepth, michael@0: bool isFixedSlot, michael@0: uint32_t offset) michael@0: : ICStubCompiler(cx, ICStub::SetProp_NativeAdd), michael@0: obj_(cx, obj), michael@0: oldShape_(cx, oldShape), michael@0: protoChainDepth_(protoChainDepth), michael@0: isFixedSlot_(isFixedSlot), michael@0: offset_(offset) michael@0: { michael@0: JS_ASSERT(protoChainDepth_ <= ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH); michael@0: } michael@0: michael@0: ICSetPropCallSetter::ICSetPropCallSetter(Kind kind, JitCode *stubCode, HandleShape shape, michael@0: HandleObject holder, HandleShape holderShape, michael@0: HandleFunction setter, uint32_t pcOffset) michael@0: : ICStub(kind, stubCode), michael@0: shape_(shape), michael@0: holder_(holder), michael@0: holderShape_(holderShape), michael@0: setter_(setter), michael@0: pcOffset_(pcOffset) michael@0: { michael@0: JS_ASSERT(kind == ICStub::SetProp_CallScripted || kind == ICStub::SetProp_CallNative); michael@0: } michael@0: michael@0: ICCall_Scripted::ICCall_Scripted(JitCode *stubCode, ICStub *firstMonitorStub, michael@0: HandleScript calleeScript, HandleObject templateObject, michael@0: uint32_t pcOffset) michael@0: : ICMonitoredStub(ICStub::Call_Scripted, stubCode, firstMonitorStub), michael@0: calleeScript_(calleeScript), michael@0: templateObject_(templateObject), michael@0: pcOffset_(pcOffset) michael@0: { } michael@0: michael@0: ICCall_Native::ICCall_Native(JitCode *stubCode, ICStub *firstMonitorStub, michael@0: HandleFunction callee, HandleObject templateObject, michael@0: uint32_t pcOffset) michael@0: : ICMonitoredStub(ICStub::Call_Native, stubCode, firstMonitorStub), michael@0: callee_(callee), michael@0: templateObject_(templateObject), michael@0: pcOffset_(pcOffset) michael@0: { michael@0: #ifdef JS_ARM_SIMULATOR michael@0: // The simulator requires VM calls to be redirected to a special swi michael@0: // instruction to handle them. To make this work, we store the redirected michael@0: // pointer in the stub. michael@0: native_ = Simulator::RedirectNativeFunction(JS_FUNC_TO_DATA_PTR(void *, callee->native()), michael@0: Args_General3); michael@0: #endif michael@0: } michael@0: michael@0: ICGetPropCallDOMProxyNativeStub::ICGetPropCallDOMProxyNativeStub(Kind kind, JitCode *stubCode, michael@0: ICStub *firstMonitorStub, michael@0: HandleShape shape, michael@0: BaseProxyHandler *proxyHandler, michael@0: HandleShape expandoShape, michael@0: HandleObject holder, michael@0: HandleShape holderShape, michael@0: HandleFunction getter, michael@0: uint32_t pcOffset) michael@0: : ICMonitoredStub(kind, stubCode, firstMonitorStub), michael@0: shape_(shape), michael@0: proxyHandler_(proxyHandler), michael@0: expandoShape_(expandoShape), michael@0: holder_(holder), michael@0: holderShape_(holderShape), michael@0: getter_(getter), michael@0: pcOffset_(pcOffset) michael@0: { } michael@0: michael@0: ICGetPropCallDOMProxyNativeCompiler::ICGetPropCallDOMProxyNativeCompiler(JSContext *cx, michael@0: ICStub::Kind kind, michael@0: ICStub *firstMonitorStub, michael@0: Handle proxy, michael@0: HandleObject holder, michael@0: HandleFunction getter, michael@0: uint32_t pcOffset) michael@0: : ICStubCompiler(cx, kind), michael@0: firstMonitorStub_(firstMonitorStub), michael@0: proxy_(cx, proxy), michael@0: holder_(cx, holder), michael@0: getter_(cx, getter), michael@0: pcOffset_(pcOffset) michael@0: { michael@0: JS_ASSERT(kind == ICStub::GetProp_CallDOMProxyNative || michael@0: kind == ICStub::GetProp_CallDOMProxyWithGenerationNative); michael@0: JS_ASSERT(proxy_->handler()->family() == GetDOMProxyHandlerFamily()); michael@0: } michael@0: michael@0: ICGetProp_DOMProxyShadowed::ICGetProp_DOMProxyShadowed(JitCode *stubCode, michael@0: ICStub *firstMonitorStub, michael@0: HandleShape shape, michael@0: BaseProxyHandler *proxyHandler, michael@0: HandlePropertyName name, michael@0: uint32_t pcOffset) michael@0: : ICMonitoredStub(ICStub::GetProp_DOMProxyShadowed, stubCode, firstMonitorStub), michael@0: shape_(shape), michael@0: proxyHandler_(proxyHandler), michael@0: name_(name), michael@0: pcOffset_(pcOffset) michael@0: { } michael@0: michael@0: // michael@0: // Rest_Fallback michael@0: // michael@0: michael@0: static bool DoRestFallback(JSContext *cx, ICRest_Fallback *stub, michael@0: BaselineFrame *frame, MutableHandleValue res) michael@0: { michael@0: unsigned numFormals = frame->numFormalArgs() - 1; michael@0: unsigned numActuals = frame->numActualArgs(); michael@0: unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0; michael@0: Value *rest = frame->argv() + numFormals; michael@0: michael@0: JSObject *obj = NewDenseCopiedArray(cx, numRest, rest, nullptr); michael@0: if (!obj) michael@0: return false; michael@0: types::FixRestArgumentsType(cx, obj); michael@0: res.setObject(*obj); michael@0: return true; michael@0: } michael@0: michael@0: typedef bool (*DoRestFallbackFn)(JSContext *, ICRest_Fallback *, BaselineFrame *, michael@0: MutableHandleValue); michael@0: static const VMFunction DoRestFallbackInfo = michael@0: FunctionInfo(DoRestFallback); michael@0: michael@0: bool michael@0: ICRest_Fallback::Compiler::generateStubCode(MacroAssembler &masm) michael@0: { michael@0: EmitRestoreTailCallReg(masm); michael@0: michael@0: masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); michael@0: masm.push(BaselineStubReg); michael@0: michael@0: return tailCallVM(DoRestFallbackInfo, masm); michael@0: } michael@0: michael@0: } // namespace jit michael@0: } // namespace js