js/src/jit/BaselineIC.cpp

Sat, 03 Jan 2015 20:18:00 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Sat, 03 Jan 2015 20:18:00 +0100
branch
TOR_BUG_3246
changeset 7
129ffea94266
permissions
-rw-r--r--

Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sts=4 et sw=4 tw=99:
michael@0 3 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #include "jit/BaselineIC.h"
michael@0 8
michael@0 9 #include "mozilla/DebugOnly.h"
michael@0 10 #include "mozilla/TemplateLib.h"
michael@0 11
michael@0 12 #include "jslibmath.h"
michael@0 13 #include "jstypes.h"
michael@0 14
michael@0 15 #include "builtin/Eval.h"
michael@0 16 #include "jit/BaselineDebugModeOSR.h"
michael@0 17 #include "jit/BaselineHelpers.h"
michael@0 18 #include "jit/BaselineJIT.h"
michael@0 19 #include "jit/IonLinker.h"
michael@0 20 #include "jit/IonSpewer.h"
michael@0 21 #include "jit/Lowering.h"
michael@0 22 #ifdef JS_ION_PERF
michael@0 23 # include "jit/PerfSpewer.h"
michael@0 24 #endif
michael@0 25 #include "jit/VMFunctions.h"
michael@0 26 #include "vm/Opcodes.h"
michael@0 27
michael@0 28 #include "jsboolinlines.h"
michael@0 29 #include "jsscriptinlines.h"
michael@0 30
michael@0 31 #include "jit/IonFrames-inl.h"
michael@0 32 #include "vm/Interpreter-inl.h"
michael@0 33 #include "vm/ScopeObject-inl.h"
michael@0 34 #include "vm/StringObject-inl.h"
michael@0 35
michael@0 36 using mozilla::DebugOnly;
michael@0 37
michael@0 38 namespace js {
michael@0 39 namespace jit {
michael@0 40
michael@0 41 #ifdef DEBUG
michael@0 42 void
michael@0 43 FallbackICSpew(JSContext *cx, ICFallbackStub *stub, const char *fmt, ...)
michael@0 44 {
michael@0 45 if (IonSpewEnabled(IonSpew_BaselineICFallback)) {
michael@0 46 RootedScript script(cx, GetTopIonJSScript(cx));
michael@0 47 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 48
michael@0 49 char fmtbuf[100];
michael@0 50 va_list args;
michael@0 51 va_start(args, fmt);
michael@0 52 vsnprintf(fmtbuf, 100, fmt, args);
michael@0 53 va_end(args);
michael@0 54
michael@0 55 IonSpew(IonSpew_BaselineICFallback,
michael@0 56 "Fallback hit for (%s:%d) (pc=%d,line=%d,uses=%d,stubs=%d): %s",
michael@0 57 script->filename(),
michael@0 58 script->lineno(),
michael@0 59 (int) script->pcToOffset(pc),
michael@0 60 PCToLineNumber(script, pc),
michael@0 61 script->getUseCount(),
michael@0 62 (int) stub->numOptimizedStubs(),
michael@0 63 fmtbuf);
michael@0 64 }
michael@0 65 }
michael@0 66
michael@0 67 void
michael@0 68 TypeFallbackICSpew(JSContext *cx, ICTypeMonitor_Fallback *stub, const char *fmt, ...)
michael@0 69 {
michael@0 70 if (IonSpewEnabled(IonSpew_BaselineICFallback)) {
michael@0 71 RootedScript script(cx, GetTopIonJSScript(cx));
michael@0 72 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 73
michael@0 74 char fmtbuf[100];
michael@0 75 va_list args;
michael@0 76 va_start(args, fmt);
michael@0 77 vsnprintf(fmtbuf, 100, fmt, args);
michael@0 78 va_end(args);
michael@0 79
michael@0 80 IonSpew(IonSpew_BaselineICFallback,
michael@0 81 "Type monitor fallback hit for (%s:%d) (pc=%d,line=%d,uses=%d,stubs=%d): %s",
michael@0 82 script->filename(),
michael@0 83 script->lineno(),
michael@0 84 (int) script->pcToOffset(pc),
michael@0 85 PCToLineNumber(script, pc),
michael@0 86 script->getUseCount(),
michael@0 87 (int) stub->numOptimizedMonitorStubs(),
michael@0 88 fmtbuf);
michael@0 89 }
michael@0 90 }
michael@0 91
michael@0 92 #else
michael@0 93 #define FallbackICSpew(...)
michael@0 94 #define TypeFallbackICSpew(...)
michael@0 95 #endif
michael@0 96
michael@0 97
michael@0 98 ICFallbackStub *
michael@0 99 ICEntry::fallbackStub() const
michael@0 100 {
michael@0 101 return firstStub()->getChainFallback();
michael@0 102 }
michael@0 103
michael@0 104
michael@0 105 ICStubConstIterator &
michael@0 106 ICStubConstIterator::operator++()
michael@0 107 {
michael@0 108 JS_ASSERT(currentStub_ != nullptr);
michael@0 109 currentStub_ = currentStub_->next();
michael@0 110 return *this;
michael@0 111 }
michael@0 112
michael@0 113
michael@0 114 ICStubIterator::ICStubIterator(ICFallbackStub *fallbackStub, bool end)
michael@0 115 : icEntry_(fallbackStub->icEntry()),
michael@0 116 fallbackStub_(fallbackStub),
michael@0 117 previousStub_(nullptr),
michael@0 118 currentStub_(end ? fallbackStub : icEntry_->firstStub()),
michael@0 119 unlinked_(false)
michael@0 120 { }
michael@0 121
michael@0 122 ICStubIterator &
michael@0 123 ICStubIterator::operator++()
michael@0 124 {
michael@0 125 JS_ASSERT(currentStub_->next() != nullptr);
michael@0 126 if (!unlinked_)
michael@0 127 previousStub_ = currentStub_;
michael@0 128 currentStub_ = currentStub_->next();
michael@0 129 unlinked_ = false;
michael@0 130 return *this;
michael@0 131 }
michael@0 132
michael@0 133 void
michael@0 134 ICStubIterator::unlink(JSContext *cx)
michael@0 135 {
michael@0 136 JS_ASSERT(currentStub_->next() != nullptr);
michael@0 137 JS_ASSERT(currentStub_ != fallbackStub_);
michael@0 138 JS_ASSERT(!unlinked_);
michael@0 139
michael@0 140 fallbackStub_->unlinkStub(cx->zone(), previousStub_, currentStub_);
michael@0 141
michael@0 142 // Mark the current iterator position as unlinked, so operator++ works properly.
michael@0 143 unlinked_ = true;
michael@0 144 }
michael@0 145
michael@0 146
michael@0 147 void
michael@0 148 ICStub::markCode(JSTracer *trc, const char *name)
michael@0 149 {
michael@0 150 JitCode *stubJitCode = jitCode();
michael@0 151 MarkJitCodeUnbarriered(trc, &stubJitCode, name);
michael@0 152 }
michael@0 153
michael@0 154 void
michael@0 155 ICStub::updateCode(JitCode *code)
michael@0 156 {
michael@0 157 // Write barrier on the old code.
michael@0 158 #ifdef JSGC_INCREMENTAL
michael@0 159 JitCode::writeBarrierPre(jitCode());
michael@0 160 #endif
michael@0 161 stubCode_ = code->raw();
michael@0 162 }
michael@0 163
michael@0 164 /* static */ void
michael@0 165 ICStub::trace(JSTracer *trc)
michael@0 166 {
michael@0 167 markCode(trc, "baseline-stub-jitcode");
michael@0 168
michael@0 169 // If the stub is a monitored fallback stub, then mark the monitor ICs hanging
michael@0 170 // off of that stub. We don't need to worry about the regular monitored stubs,
michael@0 171 // because the regular monitored stubs will always have a monitored fallback stub
michael@0 172 // that references the same stub chain.
michael@0 173 if (isMonitoredFallback()) {
michael@0 174 ICTypeMonitor_Fallback *lastMonStub = toMonitoredFallbackStub()->fallbackMonitorStub();
michael@0 175 for (ICStubConstIterator iter = lastMonStub->firstMonitorStub(); !iter.atEnd(); iter++) {
michael@0 176 JS_ASSERT_IF(iter->next() == nullptr, *iter == lastMonStub);
michael@0 177 iter->trace(trc);
michael@0 178 }
michael@0 179 }
michael@0 180
michael@0 181 if (isUpdated()) {
michael@0 182 for (ICStubConstIterator iter = toUpdatedStub()->firstUpdateStub(); !iter.atEnd(); iter++) {
michael@0 183 JS_ASSERT_IF(iter->next() == nullptr, iter->isTypeUpdate_Fallback());
michael@0 184 iter->trace(trc);
michael@0 185 }
michael@0 186 }
michael@0 187
michael@0 188 switch (kind()) {
michael@0 189 case ICStub::Call_Scripted: {
michael@0 190 ICCall_Scripted *callStub = toCall_Scripted();
michael@0 191 MarkScript(trc, &callStub->calleeScript(), "baseline-callscripted-callee");
michael@0 192 if (callStub->templateObject())
michael@0 193 MarkObject(trc, &callStub->templateObject(), "baseline-callscripted-template");
michael@0 194 break;
michael@0 195 }
michael@0 196 case ICStub::Call_Native: {
michael@0 197 ICCall_Native *callStub = toCall_Native();
michael@0 198 MarkObject(trc, &callStub->callee(), "baseline-callnative-callee");
michael@0 199 if (callStub->templateObject())
michael@0 200 MarkObject(trc, &callStub->templateObject(), "baseline-callnative-template");
michael@0 201 break;
michael@0 202 }
michael@0 203 case ICStub::GetElem_NativeSlot: {
michael@0 204 ICGetElem_NativeSlot *getElemStub = toGetElem_NativeSlot();
michael@0 205 MarkShape(trc, &getElemStub->shape(), "baseline-getelem-native-shape");
michael@0 206 MarkString(trc, &getElemStub->name(), "baseline-getelem-native-name");
michael@0 207 break;
michael@0 208 }
michael@0 209 case ICStub::GetElem_NativePrototypeSlot: {
michael@0 210 ICGetElem_NativePrototypeSlot *getElemStub = toGetElem_NativePrototypeSlot();
michael@0 211 MarkShape(trc, &getElemStub->shape(), "baseline-getelem-nativeproto-shape");
michael@0 212 MarkString(trc, &getElemStub->name(), "baseline-getelem-nativeproto-name");
michael@0 213 MarkObject(trc, &getElemStub->holder(), "baseline-getelem-nativeproto-holder");
michael@0 214 MarkShape(trc, &getElemStub->holderShape(), "baseline-getelem-nativeproto-holdershape");
michael@0 215 break;
michael@0 216 }
michael@0 217 case ICStub::GetElem_NativePrototypeCallNative:
michael@0 218 case ICStub::GetElem_NativePrototypeCallScripted: {
michael@0 219 ICGetElemNativePrototypeCallStub *callStub =
michael@0 220 reinterpret_cast<ICGetElemNativePrototypeCallStub *>(this);
michael@0 221 MarkShape(trc, &callStub->shape(), "baseline-getelem-nativeprotocall-shape");
michael@0 222 MarkString(trc, &callStub->name(), "baseline-getelem-nativeprotocall-name");
michael@0 223 MarkObject(trc, &callStub->getter(), "baseline-getelem-nativeprotocall-getter");
michael@0 224 MarkObject(trc, &callStub->holder(), "baseline-getelem-nativeprotocall-holder");
michael@0 225 MarkShape(trc, &callStub->holderShape(), "baseline-getelem-nativeprotocall-holdershape");
michael@0 226 break;
michael@0 227 }
michael@0 228 case ICStub::GetElem_Dense: {
michael@0 229 ICGetElem_Dense *getElemStub = toGetElem_Dense();
michael@0 230 MarkShape(trc, &getElemStub->shape(), "baseline-getelem-dense-shape");
michael@0 231 break;
michael@0 232 }
michael@0 233 case ICStub::GetElem_TypedArray: {
michael@0 234 ICGetElem_TypedArray *getElemStub = toGetElem_TypedArray();
michael@0 235 MarkShape(trc, &getElemStub->shape(), "baseline-getelem-typedarray-shape");
michael@0 236 break;
michael@0 237 }
michael@0 238 case ICStub::SetElem_Dense: {
michael@0 239 ICSetElem_Dense *setElemStub = toSetElem_Dense();
michael@0 240 MarkShape(trc, &setElemStub->shape(), "baseline-getelem-dense-shape");
michael@0 241 MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-dense-type");
michael@0 242 break;
michael@0 243 }
michael@0 244 case ICStub::SetElem_DenseAdd: {
michael@0 245 ICSetElem_DenseAdd *setElemStub = toSetElem_DenseAdd();
michael@0 246 MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-denseadd-type");
michael@0 247
michael@0 248 JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4);
michael@0 249
michael@0 250 switch (setElemStub->protoChainDepth()) {
michael@0 251 case 0: setElemStub->toImpl<0>()->traceShapes(trc); break;
michael@0 252 case 1: setElemStub->toImpl<1>()->traceShapes(trc); break;
michael@0 253 case 2: setElemStub->toImpl<2>()->traceShapes(trc); break;
michael@0 254 case 3: setElemStub->toImpl<3>()->traceShapes(trc); break;
michael@0 255 case 4: setElemStub->toImpl<4>()->traceShapes(trc); break;
michael@0 256 default: MOZ_ASSUME_UNREACHABLE("Invalid proto stub.");
michael@0 257 }
michael@0 258 break;
michael@0 259 }
michael@0 260 case ICStub::SetElem_TypedArray: {
michael@0 261 ICSetElem_TypedArray *setElemStub = toSetElem_TypedArray();
michael@0 262 MarkShape(trc, &setElemStub->shape(), "baseline-setelem-typedarray-shape");
michael@0 263 break;
michael@0 264 }
michael@0 265 case ICStub::TypeMonitor_SingleObject: {
michael@0 266 ICTypeMonitor_SingleObject *monitorStub = toTypeMonitor_SingleObject();
michael@0 267 MarkObject(trc, &monitorStub->object(), "baseline-monitor-singleobject");
michael@0 268 break;
michael@0 269 }
michael@0 270 case ICStub::TypeMonitor_TypeObject: {
michael@0 271 ICTypeMonitor_TypeObject *monitorStub = toTypeMonitor_TypeObject();
michael@0 272 MarkTypeObject(trc, &monitorStub->type(), "baseline-monitor-typeobject");
michael@0 273 break;
michael@0 274 }
michael@0 275 case ICStub::TypeUpdate_SingleObject: {
michael@0 276 ICTypeUpdate_SingleObject *updateStub = toTypeUpdate_SingleObject();
michael@0 277 MarkObject(trc, &updateStub->object(), "baseline-update-singleobject");
michael@0 278 break;
michael@0 279 }
michael@0 280 case ICStub::TypeUpdate_TypeObject: {
michael@0 281 ICTypeUpdate_TypeObject *updateStub = toTypeUpdate_TypeObject();
michael@0 282 MarkTypeObject(trc, &updateStub->type(), "baseline-update-typeobject");
michael@0 283 break;
michael@0 284 }
michael@0 285 case ICStub::Profiler_PushFunction: {
michael@0 286 ICProfiler_PushFunction *pushFunStub = toProfiler_PushFunction();
michael@0 287 MarkScript(trc, &pushFunStub->script(), "baseline-profilerpushfunction-stub-script");
michael@0 288 break;
michael@0 289 }
michael@0 290 case ICStub::GetName_Global: {
michael@0 291 ICGetName_Global *globalStub = toGetName_Global();
michael@0 292 MarkShape(trc, &globalStub->shape(), "baseline-global-stub-shape");
michael@0 293 break;
michael@0 294 }
michael@0 295 case ICStub::GetName_Scope0:
michael@0 296 static_cast<ICGetName_Scope<0>*>(this)->traceScopes(trc);
michael@0 297 break;
michael@0 298 case ICStub::GetName_Scope1:
michael@0 299 static_cast<ICGetName_Scope<1>*>(this)->traceScopes(trc);
michael@0 300 break;
michael@0 301 case ICStub::GetName_Scope2:
michael@0 302 static_cast<ICGetName_Scope<2>*>(this)->traceScopes(trc);
michael@0 303 break;
michael@0 304 case ICStub::GetName_Scope3:
michael@0 305 static_cast<ICGetName_Scope<3>*>(this)->traceScopes(trc);
michael@0 306 break;
michael@0 307 case ICStub::GetName_Scope4:
michael@0 308 static_cast<ICGetName_Scope<4>*>(this)->traceScopes(trc);
michael@0 309 break;
michael@0 310 case ICStub::GetName_Scope5:
michael@0 311 static_cast<ICGetName_Scope<5>*>(this)->traceScopes(trc);
michael@0 312 break;
michael@0 313 case ICStub::GetName_Scope6:
michael@0 314 static_cast<ICGetName_Scope<6>*>(this)->traceScopes(trc);
michael@0 315 break;
michael@0 316 case ICStub::GetIntrinsic_Constant: {
michael@0 317 ICGetIntrinsic_Constant *constantStub = toGetIntrinsic_Constant();
michael@0 318 gc::MarkValue(trc, &constantStub->value(), "baseline-getintrinsic-constant-value");
michael@0 319 break;
michael@0 320 }
michael@0 321 case ICStub::GetProp_Primitive: {
michael@0 322 ICGetProp_Primitive *propStub = toGetProp_Primitive();
michael@0 323 MarkShape(trc, &propStub->protoShape(), "baseline-getprop-primitive-stub-shape");
michael@0 324 break;
michael@0 325 }
michael@0 326 case ICStub::GetProp_Native: {
michael@0 327 ICGetProp_Native *propStub = toGetProp_Native();
michael@0 328 MarkShape(trc, &propStub->shape(), "baseline-getpropnative-stub-shape");
michael@0 329 break;
michael@0 330 }
michael@0 331 case ICStub::GetProp_NativePrototype: {
michael@0 332 ICGetProp_NativePrototype *propStub = toGetProp_NativePrototype();
michael@0 333 MarkShape(trc, &propStub->shape(), "baseline-getpropnativeproto-stub-shape");
michael@0 334 MarkObject(trc, &propStub->holder(), "baseline-getpropnativeproto-stub-holder");
michael@0 335 MarkShape(trc, &propStub->holderShape(), "baseline-getpropnativeproto-stub-holdershape");
michael@0 336 break;
michael@0 337 }
michael@0 338 case ICStub::GetProp_CallDOMProxyNative:
michael@0 339 case ICStub::GetProp_CallDOMProxyWithGenerationNative: {
michael@0 340 ICGetPropCallDOMProxyNativeStub *propStub;
michael@0 341 if (kind() == ICStub::GetProp_CallDOMProxyNative)
michael@0 342 propStub = toGetProp_CallDOMProxyNative();
michael@0 343 else
michael@0 344 propStub = toGetProp_CallDOMProxyWithGenerationNative();
michael@0 345 MarkShape(trc, &propStub->shape(), "baseline-getproplistbasenative-stub-shape");
michael@0 346 if (propStub->expandoShape()) {
michael@0 347 MarkShape(trc, &propStub->expandoShape(),
michael@0 348 "baseline-getproplistbasenative-stub-expandoshape");
michael@0 349 }
michael@0 350 MarkObject(trc, &propStub->holder(), "baseline-getproplistbasenative-stub-holder");
michael@0 351 MarkShape(trc, &propStub->holderShape(), "baseline-getproplistbasenative-stub-holdershape");
michael@0 352 MarkObject(trc, &propStub->getter(), "baseline-getproplistbasenative-stub-getter");
michael@0 353 break;
michael@0 354 }
michael@0 355 case ICStub::GetProp_DOMProxyShadowed: {
michael@0 356 ICGetProp_DOMProxyShadowed *propStub = toGetProp_DOMProxyShadowed();
michael@0 357 MarkShape(trc, &propStub->shape(), "baseline-getproplistbaseshadowed-stub-shape");
michael@0 358 MarkString(trc, &propStub->name(), "baseline-getproplistbaseshadowed-stub-name");
michael@0 359 break;
michael@0 360 }
michael@0 361 case ICStub::GetProp_CallScripted: {
michael@0 362 ICGetProp_CallScripted *callStub = toGetProp_CallScripted();
michael@0 363 MarkShape(trc, &callStub->receiverShape(), "baseline-getpropcallscripted-stub-receivershape");
michael@0 364 MarkObject(trc, &callStub->holder(), "baseline-getpropcallscripted-stub-holder");
michael@0 365 MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallscripted-stub-holdershape");
michael@0 366 MarkObject(trc, &callStub->getter(), "baseline-getpropcallscripted-stub-getter");
michael@0 367 break;
michael@0 368 }
michael@0 369 case ICStub::GetProp_CallNative: {
michael@0 370 ICGetProp_CallNative *callStub = toGetProp_CallNative();
michael@0 371 MarkObject(trc, &callStub->holder(), "baseline-getpropcallnative-stub-holder");
michael@0 372 MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallnative-stub-holdershape");
michael@0 373 MarkObject(trc, &callStub->getter(), "baseline-getpropcallnative-stub-getter");
michael@0 374 break;
michael@0 375 }
michael@0 376 case ICStub::GetProp_CallNativePrototype: {
michael@0 377 ICGetProp_CallNativePrototype *callStub = toGetProp_CallNativePrototype();
michael@0 378 MarkShape(trc, &callStub->receiverShape(), "baseline-getpropcallnativeproto-stub-receivershape");
michael@0 379 MarkObject(trc, &callStub->holder(), "baseline-getpropcallnativeproto-stub-holder");
michael@0 380 MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallnativeproto-stub-holdershape");
michael@0 381 MarkObject(trc, &callStub->getter(), "baseline-getpropcallnativeproto-stub-getter");
michael@0 382 break;
michael@0 383 }
michael@0 384 case ICStub::SetProp_Native: {
michael@0 385 ICSetProp_Native *propStub = toSetProp_Native();
michael@0 386 MarkShape(trc, &propStub->shape(), "baseline-setpropnative-stub-shape");
michael@0 387 MarkTypeObject(trc, &propStub->type(), "baseline-setpropnative-stub-type");
michael@0 388 break;
michael@0 389 }
michael@0 390 case ICStub::SetProp_NativeAdd: {
michael@0 391 ICSetProp_NativeAdd *propStub = toSetProp_NativeAdd();
michael@0 392 MarkTypeObject(trc, &propStub->type(), "baseline-setpropnativeadd-stub-type");
michael@0 393 MarkShape(trc, &propStub->newShape(), "baseline-setpropnativeadd-stub-newshape");
michael@0 394 JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4);
michael@0 395 switch (propStub->protoChainDepth()) {
michael@0 396 case 0: propStub->toImpl<0>()->traceShapes(trc); break;
michael@0 397 case 1: propStub->toImpl<1>()->traceShapes(trc); break;
michael@0 398 case 2: propStub->toImpl<2>()->traceShapes(trc); break;
michael@0 399 case 3: propStub->toImpl<3>()->traceShapes(trc); break;
michael@0 400 case 4: propStub->toImpl<4>()->traceShapes(trc); break;
michael@0 401 default: MOZ_ASSUME_UNREACHABLE("Invalid proto stub.");
michael@0 402 }
michael@0 403 break;
michael@0 404 }
michael@0 405 case ICStub::SetProp_CallScripted: {
michael@0 406 ICSetProp_CallScripted *callStub = toSetProp_CallScripted();
michael@0 407 MarkShape(trc, &callStub->shape(), "baseline-setpropcallscripted-stub-shape");
michael@0 408 MarkObject(trc, &callStub->holder(), "baseline-setpropcallscripted-stub-holder");
michael@0 409 MarkShape(trc, &callStub->holderShape(), "baseline-setpropcallscripted-stub-holdershape");
michael@0 410 MarkObject(trc, &callStub->setter(), "baseline-setpropcallscripted-stub-setter");
michael@0 411 break;
michael@0 412 }
michael@0 413 case ICStub::SetProp_CallNative: {
michael@0 414 ICSetProp_CallNative *callStub = toSetProp_CallNative();
michael@0 415 MarkShape(trc, &callStub->shape(), "baseline-setpropcallnative-stub-shape");
michael@0 416 MarkObject(trc, &callStub->holder(), "baseline-setpropcallnative-stub-holder");
michael@0 417 MarkShape(trc, &callStub->holderShape(), "baseline-setpropcallnative-stub-holdershape");
michael@0 418 MarkObject(trc, &callStub->setter(), "baseline-setpropcallnative-stub-setter");
michael@0 419 break;
michael@0 420 }
michael@0 421 case ICStub::NewArray_Fallback: {
michael@0 422 ICNewArray_Fallback *stub = toNewArray_Fallback();
michael@0 423 MarkObject(trc, &stub->templateObject(), "baseline-newarray-template");
michael@0 424 break;
michael@0 425 }
michael@0 426 case ICStub::NewObject_Fallback: {
michael@0 427 ICNewObject_Fallback *stub = toNewObject_Fallback();
michael@0 428 MarkObject(trc, &stub->templateObject(), "baseline-newobject-template");
michael@0 429 break;
michael@0 430 }
michael@0 431 case ICStub::Rest_Fallback: {
michael@0 432 ICRest_Fallback *stub = toRest_Fallback();
michael@0 433 MarkObject(trc, &stub->templateObject(), "baseline-rest-template");
michael@0 434 break;
michael@0 435 }
michael@0 436 default:
michael@0 437 break;
michael@0 438 }
michael@0 439 }
michael@0 440
michael@0 441 void
michael@0 442 ICFallbackStub::unlinkStub(Zone *zone, ICStub *prev, ICStub *stub)
michael@0 443 {
michael@0 444 JS_ASSERT(stub->next());
michael@0 445
michael@0 446 // If stub is the last optimized stub, update lastStubPtrAddr.
michael@0 447 if (stub->next() == this) {
michael@0 448 JS_ASSERT(lastStubPtrAddr_ == stub->addressOfNext());
michael@0 449 if (prev)
michael@0 450 lastStubPtrAddr_ = prev->addressOfNext();
michael@0 451 else
michael@0 452 lastStubPtrAddr_ = icEntry()->addressOfFirstStub();
michael@0 453 *lastStubPtrAddr_ = this;
michael@0 454 } else {
michael@0 455 if (prev) {
michael@0 456 JS_ASSERT(prev->next() == stub);
michael@0 457 prev->setNext(stub->next());
michael@0 458 } else {
michael@0 459 JS_ASSERT(icEntry()->firstStub() == stub);
michael@0 460 icEntry()->setFirstStub(stub->next());
michael@0 461 }
michael@0 462 }
michael@0 463
michael@0 464 JS_ASSERT(numOptimizedStubs_ > 0);
michael@0 465 numOptimizedStubs_--;
michael@0 466
michael@0 467 if (zone->needsBarrier()) {
michael@0 468 // We are removing edges from ICStub to gcthings. Perform one final trace
michael@0 469 // of the stub for incremental GC, as it must know about those edges.
michael@0 470 stub->trace(zone->barrierTracer());
michael@0 471 }
michael@0 472
michael@0 473 if (ICStub::CanMakeCalls(stub->kind()) && stub->isMonitored()) {
michael@0 474 // This stub can make calls so we can return to it if it's on the stack.
michael@0 475 // We just have to reset its firstMonitorStub_ field to avoid a stale
michael@0 476 // pointer when purgeOptimizedStubs destroys all optimized monitor
michael@0 477 // stubs (unlinked stubs won't be updated).
michael@0 478 ICTypeMonitor_Fallback *monitorFallback = toMonitoredFallbackStub()->fallbackMonitorStub();
michael@0 479 stub->toMonitoredStub()->resetFirstMonitorStub(monitorFallback);
michael@0 480 }
michael@0 481
michael@0 482 #ifdef DEBUG
michael@0 483 // Poison stub code to ensure we don't call this stub again. However, if this
michael@0 484 // stub can make calls, a pointer to it may be stored in a stub frame on the
michael@0 485 // stack, so we can't touch the stubCode_ or GC will crash when marking this
michael@0 486 // pointer.
michael@0 487 if (!ICStub::CanMakeCalls(stub->kind()))
michael@0 488 stub->stubCode_ = (uint8_t *)0xbad;
michael@0 489 #endif
michael@0 490 }
michael@0 491
michael@0 492 void
michael@0 493 ICFallbackStub::unlinkStubsWithKind(JSContext *cx, ICStub::Kind kind)
michael@0 494 {
michael@0 495 for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++) {
michael@0 496 if (iter->kind() == kind)
michael@0 497 iter.unlink(cx);
michael@0 498 }
michael@0 499 }
michael@0 500
michael@0 501 void
michael@0 502 ICTypeMonitor_Fallback::resetMonitorStubChain(Zone *zone)
michael@0 503 {
michael@0 504 if (zone->needsBarrier()) {
michael@0 505 // We are removing edges from monitored stubs to gcthings (JitCode).
michael@0 506 // Perform one final trace of all monitor stubs for incremental GC,
michael@0 507 // as it must know about those edges.
michael@0 508 for (ICStub *s = firstMonitorStub_; !s->isTypeMonitor_Fallback(); s = s->next())
michael@0 509 s->trace(zone->barrierTracer());
michael@0 510 }
michael@0 511
michael@0 512 firstMonitorStub_ = this;
michael@0 513 numOptimizedMonitorStubs_ = 0;
michael@0 514
michael@0 515 if (hasFallbackStub_) {
michael@0 516 lastMonitorStubPtrAddr_ = nullptr;
michael@0 517
michael@0 518 // Reset firstMonitorStub_ field of all monitored stubs.
michael@0 519 for (ICStubConstIterator iter = mainFallbackStub_->beginChainConst();
michael@0 520 !iter.atEnd(); iter++)
michael@0 521 {
michael@0 522 if (!iter->isMonitored())
michael@0 523 continue;
michael@0 524 iter->toMonitoredStub()->resetFirstMonitorStub(this);
michael@0 525 }
michael@0 526 } else {
michael@0 527 icEntry_->setFirstStub(this);
michael@0 528 lastMonitorStubPtrAddr_ = icEntry_->addressOfFirstStub();
michael@0 529 }
michael@0 530 }
michael@0 531
michael@0 532 ICMonitoredStub::ICMonitoredStub(Kind kind, JitCode *stubCode, ICStub *firstMonitorStub)
michael@0 533 : ICStub(kind, ICStub::Monitored, stubCode),
michael@0 534 firstMonitorStub_(firstMonitorStub)
michael@0 535 {
michael@0 536 // If the first monitored stub is a ICTypeMonitor_Fallback stub, then
michael@0 537 // double check that _its_ firstMonitorStub is the same as this one.
michael@0 538 JS_ASSERT_IF(firstMonitorStub_->isTypeMonitor_Fallback(),
michael@0 539 firstMonitorStub_->toTypeMonitor_Fallback()->firstMonitorStub() ==
michael@0 540 firstMonitorStub_);
michael@0 541 }
michael@0 542
michael@0 543 bool
michael@0 544 ICMonitoredFallbackStub::initMonitoringChain(JSContext *cx, ICStubSpace *space)
michael@0 545 {
michael@0 546 JS_ASSERT(fallbackMonitorStub_ == nullptr);
michael@0 547
michael@0 548 ICTypeMonitor_Fallback::Compiler compiler(cx, this);
michael@0 549 ICTypeMonitor_Fallback *stub = compiler.getStub(space);
michael@0 550 if (!stub)
michael@0 551 return false;
michael@0 552 fallbackMonitorStub_ = stub;
michael@0 553 return true;
michael@0 554 }
michael@0 555
michael@0 556 bool
michael@0 557 ICMonitoredFallbackStub::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val)
michael@0 558 {
michael@0 559 return fallbackMonitorStub_->addMonitorStubForValue(cx, script, val);
michael@0 560 }
michael@0 561
michael@0 562 bool
michael@0 563 ICUpdatedStub::initUpdatingChain(JSContext *cx, ICStubSpace *space)
michael@0 564 {
michael@0 565 JS_ASSERT(firstUpdateStub_ == nullptr);
michael@0 566
michael@0 567 ICTypeUpdate_Fallback::Compiler compiler(cx);
michael@0 568 ICTypeUpdate_Fallback *stub = compiler.getStub(space);
michael@0 569 if (!stub)
michael@0 570 return false;
michael@0 571
michael@0 572 firstUpdateStub_ = stub;
michael@0 573 return true;
michael@0 574 }
michael@0 575
michael@0 576 JitCode *
michael@0 577 ICStubCompiler::getStubCode()
michael@0 578 {
michael@0 579 JitCompartment *comp = cx->compartment()->jitCompartment();
michael@0 580
michael@0 581 // Check for existing cached stubcode.
michael@0 582 uint32_t stubKey = getKey();
michael@0 583 JitCode *stubCode = comp->getStubCode(stubKey);
michael@0 584 if (stubCode)
michael@0 585 return stubCode;
michael@0 586
michael@0 587 // Compile new stubcode.
michael@0 588 IonContext ictx(cx, nullptr);
michael@0 589 MacroAssembler masm;
michael@0 590 #ifdef JS_CODEGEN_ARM
michael@0 591 masm.setSecondScratchReg(BaselineSecondScratchReg);
michael@0 592 #endif
michael@0 593
michael@0 594 if (!generateStubCode(masm))
michael@0 595 return nullptr;
michael@0 596 Linker linker(masm);
michael@0 597 AutoFlushICache afc("getStubCode");
michael@0 598 Rooted<JitCode *> newStubCode(cx, linker.newCode<CanGC>(cx, JSC::BASELINE_CODE));
michael@0 599 if (!newStubCode)
michael@0 600 return nullptr;
michael@0 601
michael@0 602 // After generating code, run postGenerateStubCode()
michael@0 603 if (!postGenerateStubCode(masm, newStubCode))
michael@0 604 return nullptr;
michael@0 605
michael@0 606 // All barriers are emitted off-by-default, enable them if needed.
michael@0 607 if (cx->zone()->needsBarrier())
michael@0 608 newStubCode->togglePreBarriers(true);
michael@0 609
michael@0 610 // Cache newly compiled stubcode.
michael@0 611 if (!comp->putStubCode(stubKey, newStubCode))
michael@0 612 return nullptr;
michael@0 613
michael@0 614 JS_ASSERT(entersStubFrame_ == ICStub::CanMakeCalls(kind));
michael@0 615
michael@0 616 #ifdef JS_ION_PERF
michael@0 617 writePerfSpewerJitCodeProfile(newStubCode, "BaselineIC");
michael@0 618 #endif
michael@0 619
michael@0 620 return newStubCode;
michael@0 621 }
michael@0 622
michael@0 623 bool
michael@0 624 ICStubCompiler::tailCallVM(const VMFunction &fun, MacroAssembler &masm)
michael@0 625 {
michael@0 626 JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
michael@0 627 if (!code)
michael@0 628 return false;
michael@0 629
michael@0 630 uint32_t argSize = fun.explicitStackSlots() * sizeof(void *);
michael@0 631 EmitTailCallVM(code, masm, argSize);
michael@0 632 return true;
michael@0 633 }
michael@0 634
michael@0 635 bool
michael@0 636 ICStubCompiler::callVM(const VMFunction &fun, MacroAssembler &masm)
michael@0 637 {
michael@0 638 JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
michael@0 639 if (!code)
michael@0 640 return false;
michael@0 641
michael@0 642 EmitCallVM(code, masm);
michael@0 643 return true;
michael@0 644 }
michael@0 645
michael@0 646 bool
michael@0 647 ICStubCompiler::callTypeUpdateIC(MacroAssembler &masm, uint32_t objectOffset)
michael@0 648 {
michael@0 649 JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(DoTypeUpdateFallbackInfo);
michael@0 650 if (!code)
michael@0 651 return false;
michael@0 652
michael@0 653 EmitCallTypeUpdateIC(masm, code, objectOffset);
michael@0 654 return true;
michael@0 655 }
michael@0 656
michael@0 657 void
michael@0 658 ICStubCompiler::enterStubFrame(MacroAssembler &masm, Register scratch)
michael@0 659 {
michael@0 660 EmitEnterStubFrame(masm, scratch);
michael@0 661 #ifdef DEBUG
michael@0 662 entersStubFrame_ = true;
michael@0 663 #endif
michael@0 664 }
michael@0 665
michael@0 666 void
michael@0 667 ICStubCompiler::leaveStubFrame(MacroAssembler &masm, bool calledIntoIon)
michael@0 668 {
michael@0 669 JS_ASSERT(entersStubFrame_);
michael@0 670 EmitLeaveStubFrame(masm, calledIntoIon);
michael@0 671 }
michael@0 672
michael@0 673 void
michael@0 674 ICStubCompiler::leaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon)
michael@0 675 {
michael@0 676 JS_ASSERT(entersStubFrame_);
michael@0 677 EmitLeaveStubFrameHead(masm, calledIntoIon);
michael@0 678 }
michael@0 679
michael@0 680 void
michael@0 681 ICStubCompiler::leaveStubFrameCommonTail(MacroAssembler &masm)
michael@0 682 {
michael@0 683 JS_ASSERT(entersStubFrame_);
michael@0 684 EmitLeaveStubFrameCommonTail(masm);
michael@0 685 }
michael@0 686
michael@0 687 void
michael@0 688 ICStubCompiler::guardProfilingEnabled(MacroAssembler &masm, Register scratch, Label *skip)
michael@0 689 {
michael@0 690 // This should only be called from the following stubs.
michael@0 691 JS_ASSERT(kind == ICStub::Call_Scripted ||
michael@0 692 kind == ICStub::Call_AnyScripted ||
michael@0 693 kind == ICStub::Call_Native ||
michael@0 694 kind == ICStub::Call_ScriptedApplyArray ||
michael@0 695 kind == ICStub::Call_ScriptedApplyArguments ||
michael@0 696 kind == ICStub::Call_ScriptedFunCall ||
michael@0 697 kind == ICStub::GetProp_CallScripted ||
michael@0 698 kind == ICStub::GetProp_CallNative ||
michael@0 699 kind == ICStub::GetProp_CallNativePrototype ||
michael@0 700 kind == ICStub::GetProp_CallDOMProxyNative ||
michael@0 701 kind == ICStub::GetElem_NativePrototypeCallNative ||
michael@0 702 kind == ICStub::GetElem_NativePrototypeCallScripted ||
michael@0 703 kind == ICStub::GetProp_CallDOMProxyWithGenerationNative ||
michael@0 704 kind == ICStub::GetProp_DOMProxyShadowed ||
michael@0 705 kind == ICStub::SetProp_CallScripted ||
michael@0 706 kind == ICStub::SetProp_CallNative);
michael@0 707
michael@0 708 // Guard on bit in frame that indicates if the SPS frame was pushed in the first
michael@0 709 // place. This code is expected to be called from within a stub that has already
michael@0 710 // entered a stub frame.
michael@0 711 JS_ASSERT(entersStubFrame_);
michael@0 712 masm.loadPtr(Address(BaselineFrameReg, 0), scratch);
michael@0 713 masm.branchTest32(Assembler::Zero,
michael@0 714 Address(scratch, BaselineFrame::reverseOffsetOfFlags()),
michael@0 715 Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME),
michael@0 716 skip);
michael@0 717
michael@0 718 // Check if profiling is enabled
michael@0 719 uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled();
michael@0 720 masm.branch32(Assembler::Equal, AbsoluteAddress(enabledAddr), Imm32(0), skip);
michael@0 721 }
michael@0 722
michael@0 723 void
michael@0 724 ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, Register pcIdx, Register scratch,
michael@0 725 uint32_t stubPcOffset)
michael@0 726 {
michael@0 727 Label skipProfilerUpdate;
michael@0 728
michael@0 729 // Check if profiling is enabled.
michael@0 730 guardProfilingEnabled(masm, scratch, &skipProfilerUpdate);
michael@0 731
michael@0 732 // Update profiling entry before leaving function.
michael@0 733 masm.load32(Address(BaselineStubReg, stubPcOffset), pcIdx);
michael@0 734 masm.spsUpdatePCIdx(&cx->runtime()->spsProfiler, pcIdx, scratch);
michael@0 735
michael@0 736 masm.bind(&skipProfilerUpdate);
michael@0 737 }
michael@0 738
michael@0 739 void
michael@0 740 ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, GeneralRegisterSet regs,
michael@0 741 uint32_t stubPcOffset)
michael@0 742 {
michael@0 743 emitProfilingUpdate(masm, regs.takeAny(), regs.takeAny(), stubPcOffset);
michael@0 744 }
michael@0 745
michael@0 746 #ifdef JSGC_GENERATIONAL
michael@0 747 inline bool
michael@0 748 ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
michael@0 749 Register scratch, GeneralRegisterSet saveRegs)
michael@0 750 {
michael@0 751 Nursery &nursery = cx->runtime()->gcNursery;
michael@0 752
michael@0 753 Label skipBarrier;
michael@0 754 masm.branchTestObject(Assembler::NotEqual, val, &skipBarrier);
michael@0 755
michael@0 756 masm.branchPtrInNurseryRange(obj, scratch, &skipBarrier);
michael@0 757
michael@0 758 Register valReg = masm.extractObject(val, scratch);
michael@0 759 masm.branchPtr(Assembler::Below, valReg, ImmWord(nursery.start()), &skipBarrier);
michael@0 760 masm.branchPtr(Assembler::AboveOrEqual, valReg, ImmWord(nursery.heapEnd()), &skipBarrier);
michael@0 761
michael@0 762 // void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
michael@0 763 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
michael@0 764 saveRegs.add(BaselineTailCallReg);
michael@0 765 #endif
michael@0 766 saveRegs = GeneralRegisterSet::Intersect(saveRegs, GeneralRegisterSet::Volatile());
michael@0 767 masm.PushRegsInMask(saveRegs);
michael@0 768 masm.setupUnalignedABICall(2, scratch);
michael@0 769 masm.movePtr(ImmPtr(cx->runtime()), scratch);
michael@0 770 masm.passABIArg(scratch);
michael@0 771 masm.passABIArg(obj);
michael@0 772 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
michael@0 773 masm.PopRegsInMask(saveRegs);
michael@0 774
michael@0 775 masm.bind(&skipBarrier);
michael@0 776 return true;
michael@0 777 }
michael@0 778 #endif // JSGC_GENERATIONAL
michael@0 779
michael@0 780 //
michael@0 781 // UseCount_Fallback
michael@0 782 //
michael@0 783 static bool
michael@0 784 IsTopFrameConstructing(JSContext *cx)
michael@0 785 {
michael@0 786 JS_ASSERT(cx->currentlyRunningInJit());
michael@0 787 JitActivationIterator activations(cx->runtime());
michael@0 788 JitFrameIterator iter(activations);
michael@0 789 JS_ASSERT(iter.type() == JitFrame_Exit);
michael@0 790
michael@0 791 ++iter;
michael@0 792 JS_ASSERT(iter.type() == JitFrame_BaselineStub);
michael@0 793
michael@0 794 ++iter;
michael@0 795 JS_ASSERT(iter.isBaselineJS());
michael@0 796
michael@0 797 return iter.isConstructing();
michael@0 798 }
michael@0 799
michael@0 800 static bool
michael@0 801 EnsureCanEnterIon(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
michael@0 802 HandleScript script, jsbytecode *pc, void **jitcodePtr)
michael@0 803 {
michael@0 804 JS_ASSERT(jitcodePtr);
michael@0 805 JS_ASSERT(!*jitcodePtr);
michael@0 806
michael@0 807 bool isLoopEntry = (JSOp(*pc) == JSOP_LOOPENTRY);
michael@0 808
michael@0 809 bool isConstructing = IsTopFrameConstructing(cx);
michael@0 810 MethodStatus stat;
michael@0 811 if (isLoopEntry) {
michael@0 812 JS_ASSERT(LoopEntryCanIonOsr(pc));
michael@0 813 IonSpew(IonSpew_BaselineOSR, " Compile at loop entry!");
michael@0 814 stat = CanEnterAtBranch(cx, script, frame, pc, isConstructing);
michael@0 815 } else if (frame->isFunctionFrame()) {
michael@0 816 IonSpew(IonSpew_BaselineOSR, " Compile function from top for later entry!");
michael@0 817 stat = CompileFunctionForBaseline(cx, script, frame, isConstructing);
michael@0 818 } else {
michael@0 819 return true;
michael@0 820 }
michael@0 821
michael@0 822 if (stat == Method_Error) {
michael@0 823 IonSpew(IonSpew_BaselineOSR, " Compile with Ion errored!");
michael@0 824 return false;
michael@0 825 }
michael@0 826
michael@0 827 if (stat == Method_CantCompile)
michael@0 828 IonSpew(IonSpew_BaselineOSR, " Can't compile with Ion!");
michael@0 829 else if (stat == Method_Skipped)
michael@0 830 IonSpew(IonSpew_BaselineOSR, " Skipped compile with Ion!");
michael@0 831 else if (stat == Method_Compiled)
michael@0 832 IonSpew(IonSpew_BaselineOSR, " Compiled with Ion!");
michael@0 833 else
michael@0 834 MOZ_ASSUME_UNREACHABLE("Invalid MethodStatus!");
michael@0 835
michael@0 836 // Failed to compile. Reset use count and return.
michael@0 837 if (stat != Method_Compiled) {
michael@0 838 // TODO: If stat == Method_CantCompile, insert stub that just skips the useCount
michael@0 839 // entirely, instead of resetting it.
michael@0 840 bool bailoutExpected = script->hasIonScript() && script->ionScript()->bailoutExpected();
michael@0 841 if (stat == Method_CantCompile || bailoutExpected) {
michael@0 842 IonSpew(IonSpew_BaselineOSR, " Reset UseCount cantCompile=%s bailoutExpected=%s!",
michael@0 843 stat == Method_CantCompile ? "yes" : "no",
michael@0 844 bailoutExpected ? "yes" : "no");
michael@0 845 script->resetUseCount();
michael@0 846 }
michael@0 847 return true;
michael@0 848 }
michael@0 849
michael@0 850 if (isLoopEntry) {
michael@0 851 IonScript *ion = script->ionScript();
michael@0 852 JS_ASSERT(cx->runtime()->spsProfiler.enabled() == ion->hasSPSInstrumentation());
michael@0 853 JS_ASSERT(ion->osrPc() == pc);
michael@0 854
michael@0 855 // If the baseline frame's SPS handling doesn't match up with the Ion code's SPS
michael@0 856 // handling, don't OSR.
michael@0 857 if (frame->hasPushedSPSFrame() != ion->hasSPSInstrumentation()) {
michael@0 858 IonSpew(IonSpew_BaselineOSR, " OSR crosses SPS handling boundaries, skipping!");
michael@0 859 return true;
michael@0 860 }
michael@0 861
michael@0 862 IonSpew(IonSpew_BaselineOSR, " OSR possible!");
michael@0 863 *jitcodePtr = ion->method()->raw() + ion->osrEntryOffset();
michael@0 864 }
michael@0 865
michael@0 866 return true;
michael@0 867 }
michael@0 868
michael@0 869 //
michael@0 870 // The following data is kept in a temporary heap-allocated buffer, stored in
michael@0 871 // JitRuntime (high memory addresses at top, low at bottom):
michael@0 872 //
michael@0 873 // +----->+=================================+ -- <---- High Address
michael@0 874 // | | | |
michael@0 875 // | | ...BaselineFrame... | |-- Copy of BaselineFrame + stack values
michael@0 876 // | | | |
michael@0 877 // | +---------------------------------+ |
michael@0 878 // | | | |
michael@0 879 // | | ...Locals/Stack... | |
michael@0 880 // | | | |
michael@0 881 // | +=================================+ --
michael@0 882 // | | Padding(Maybe Empty) |
michael@0 883 // | +=================================+ --
michael@0 884 // +------|-- baselineFrame | |-- IonOsrTempData
michael@0 885 // | jitcode | |
michael@0 886 // +=================================+ -- <---- Low Address
michael@0 887 //
michael@0 888 // A pointer to the IonOsrTempData is returned.
michael@0 889
michael@0 890 struct IonOsrTempData
michael@0 891 {
michael@0 892 void *jitcode;
michael@0 893 uint8_t *baselineFrame;
michael@0 894 };
michael@0 895
michael@0 896 static IonOsrTempData *
michael@0 897 PrepareOsrTempData(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
michael@0 898 HandleScript script, jsbytecode *pc, void *jitcode)
michael@0 899 {
michael@0 900 size_t numLocalsAndStackVals = frame->numValueSlots();
michael@0 901
michael@0 902 // Calculate the amount of space to allocate:
michael@0 903 // BaselineFrame space:
michael@0 904 // (sizeof(Value) * (numLocals + numStackVals))
michael@0 905 // + sizeof(BaselineFrame)
michael@0 906 //
michael@0 907 // IonOsrTempData space:
michael@0 908 // sizeof(IonOsrTempData)
michael@0 909
michael@0 910 size_t frameSpace = sizeof(BaselineFrame) + sizeof(Value) * numLocalsAndStackVals;
michael@0 911 size_t ionOsrTempDataSpace = sizeof(IonOsrTempData);
michael@0 912
michael@0 913 size_t totalSpace = AlignBytes(frameSpace, sizeof(Value)) +
michael@0 914 AlignBytes(ionOsrTempDataSpace, sizeof(Value));
michael@0 915
michael@0 916 IonOsrTempData *info = (IonOsrTempData *)cx->runtime()->getJitRuntime(cx)->allocateOsrTempData(totalSpace);
michael@0 917 if (!info)
michael@0 918 return nullptr;
michael@0 919
michael@0 920 memset(info, 0, totalSpace);
michael@0 921
michael@0 922 info->jitcode = jitcode;
michael@0 923
michael@0 924 // Copy the BaselineFrame + local/stack Values to the buffer. Arguments and
michael@0 925 // |this| are not copied but left on the stack: the Baseline and Ion frame
michael@0 926 // share the same frame prefix and Ion won't clobber these values. Note
michael@0 927 // that info->baselineFrame will point to the *end* of the frame data, like
michael@0 928 // the frame pointer register in baseline frames.
michael@0 929 uint8_t *frameStart = (uint8_t *)info + AlignBytes(ionOsrTempDataSpace, sizeof(Value));
michael@0 930 info->baselineFrame = frameStart + frameSpace;
michael@0 931
michael@0 932 memcpy(frameStart, (uint8_t *)frame - numLocalsAndStackVals * sizeof(Value), frameSpace);
michael@0 933
michael@0 934 IonSpew(IonSpew_BaselineOSR, "Allocated IonOsrTempData at %p", (void *) info);
michael@0 935 IonSpew(IonSpew_BaselineOSR, "Jitcode is %p", info->jitcode);
michael@0 936
michael@0 937 // All done.
michael@0 938 return info;
michael@0 939 }
michael@0 940
michael@0 941 static bool
michael@0 942 DoUseCountFallback(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
michael@0 943 IonOsrTempData **infoPtr)
michael@0 944 {
michael@0 945 JS_ASSERT(infoPtr);
michael@0 946 *infoPtr = nullptr;
michael@0 947
michael@0 948 // A TI OOM will disable TI and Ion.
michael@0 949 if (!jit::IsIonEnabled(cx))
michael@0 950 return true;
michael@0 951
michael@0 952 RootedScript script(cx, frame->script());
michael@0 953 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 954 bool isLoopEntry = JSOp(*pc) == JSOP_LOOPENTRY;
michael@0 955
michael@0 956 JS_ASSERT(!isLoopEntry || LoopEntryCanIonOsr(pc));
michael@0 957
michael@0 958 FallbackICSpew(cx, stub, "UseCount(%d)", isLoopEntry ? int(script->pcToOffset(pc)) : int(-1));
michael@0 959
michael@0 960 if (!script->canIonCompile()) {
michael@0 961 // TODO: ASSERT that ion-compilation-disabled checker stub doesn't exist.
michael@0 962 // TODO: Clear all optimized stubs.
michael@0 963 // TODO: Add a ion-compilation-disabled checker IC stub
michael@0 964 script->resetUseCount();
michael@0 965 return true;
michael@0 966 }
michael@0 967
michael@0 968 JS_ASSERT(!script->isIonCompilingOffThread());
michael@0 969
michael@0 970 // If Ion script exists, but PC is not at a loop entry, then Ion will be entered for
michael@0 971 // this script at an appropriate LOOPENTRY or the next time this function is called.
michael@0 972 if (script->hasIonScript() && !isLoopEntry) {
michael@0 973 IonSpew(IonSpew_BaselineOSR, "IonScript exists, but not at loop entry!");
michael@0 974 // TODO: ASSERT that a ion-script-already-exists checker stub doesn't exist.
michael@0 975 // TODO: Clear all optimized stubs.
michael@0 976 // TODO: Add a ion-script-already-exists checker stub.
michael@0 977 return true;
michael@0 978 }
michael@0 979
michael@0 980 // Ensure that Ion-compiled code is available.
michael@0 981 IonSpew(IonSpew_BaselineOSR,
michael@0 982 "UseCount for %s:%d reached %d at pc %p, trying to switch to Ion!",
michael@0 983 script->filename(), script->lineno(), (int) script->getUseCount(), (void *) pc);
michael@0 984 void *jitcode = nullptr;
michael@0 985 if (!EnsureCanEnterIon(cx, stub, frame, script, pc, &jitcode))
michael@0 986 return false;
michael@0 987
michael@0 988 // Jitcode should only be set here if not at loop entry.
michael@0 989 JS_ASSERT_IF(!isLoopEntry, !jitcode);
michael@0 990 if (!jitcode)
michael@0 991 return true;
michael@0 992
michael@0 993 // Prepare the temporary heap copy of the fake InterpreterFrame and actual args list.
michael@0 994 IonSpew(IonSpew_BaselineOSR, "Got jitcode. Preparing for OSR into ion.");
michael@0 995 IonOsrTempData *info = PrepareOsrTempData(cx, stub, frame, script, pc, jitcode);
michael@0 996 if (!info)
michael@0 997 return false;
michael@0 998 *infoPtr = info;
michael@0 999
michael@0 1000 return true;
michael@0 1001 }
michael@0 1002
michael@0 1003 typedef bool (*DoUseCountFallbackFn)(JSContext *, ICUseCount_Fallback *, BaselineFrame *frame,
michael@0 1004 IonOsrTempData **infoPtr);
michael@0 1005 static const VMFunction DoUseCountFallbackInfo =
michael@0 1006 FunctionInfo<DoUseCountFallbackFn>(DoUseCountFallback);
michael@0 1007
michael@0 1008 bool
michael@0 1009 ICUseCount_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1010 {
michael@0 1011 // enterStubFrame is going to clobber the BaselineFrameReg, save it in R0.scratchReg()
michael@0 1012 // first.
michael@0 1013 masm.movePtr(BaselineFrameReg, R0.scratchReg());
michael@0 1014
michael@0 1015 // Push a stub frame so that we can perform a non-tail call.
michael@0 1016 enterStubFrame(masm, R1.scratchReg());
michael@0 1017
michael@0 1018 Label noCompiledCode;
michael@0 1019 // Call DoUseCountFallback to compile/check-for Ion-compiled function
michael@0 1020 {
michael@0 1021 // Push IonOsrTempData pointer storage
michael@0 1022 masm.subPtr(Imm32(sizeof(void *)), BaselineStackReg);
michael@0 1023 masm.push(BaselineStackReg);
michael@0 1024
michael@0 1025 // Push IonJSFrameLayout pointer.
michael@0 1026 masm.loadBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
michael@0 1027 masm.push(R0.scratchReg());
michael@0 1028
michael@0 1029 // Push stub pointer.
michael@0 1030 masm.push(BaselineStubReg);
michael@0 1031
michael@0 1032 if (!callVM(DoUseCountFallbackInfo, masm))
michael@0 1033 return false;
michael@0 1034
michael@0 1035 // Pop IonOsrTempData pointer.
michael@0 1036 masm.pop(R0.scratchReg());
michael@0 1037
michael@0 1038 leaveStubFrame(masm);
michael@0 1039
michael@0 1040 // If no JitCode was found, then skip just exit the IC.
michael@0 1041 masm.branchPtr(Assembler::Equal, R0.scratchReg(), ImmPtr(nullptr), &noCompiledCode);
michael@0 1042 }
michael@0 1043
michael@0 1044 // Get a scratch register.
michael@0 1045 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 1046 Register osrDataReg = R0.scratchReg();
michael@0 1047 regs.take(osrDataReg);
michael@0 1048 regs.takeUnchecked(OsrFrameReg);
michael@0 1049
michael@0 1050 Register scratchReg = regs.takeAny();
michael@0 1051
michael@0 1052 // At this point, stack looks like:
michael@0 1053 // +-> [...Calling-Frame...]
michael@0 1054 // | [...Actual-Args/ThisV/ArgCount/Callee...]
michael@0 1055 // | [Descriptor]
michael@0 1056 // | [Return-Addr]
michael@0 1057 // +---[Saved-FramePtr] <-- BaselineFrameReg points here.
michael@0 1058 // [...Baseline-Frame...]
michael@0 1059
michael@0 1060 // Restore the stack pointer to point to the saved frame pointer.
michael@0 1061 masm.movePtr(BaselineFrameReg, BaselineStackReg);
michael@0 1062
michael@0 1063 // Discard saved frame pointer, so that the return address is on top of
michael@0 1064 // the stack.
michael@0 1065 masm.pop(scratchReg);
michael@0 1066
michael@0 1067 // Jump into Ion.
michael@0 1068 masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, jitcode)), scratchReg);
michael@0 1069 masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, baselineFrame)), OsrFrameReg);
michael@0 1070 masm.jump(scratchReg);
michael@0 1071
michael@0 1072 // No jitcode available, do nothing.
michael@0 1073 masm.bind(&noCompiledCode);
michael@0 1074 EmitReturnFromIC(masm);
michael@0 1075 return true;
michael@0 1076 }
michael@0 1077
michael@0 1078 //
michael@0 1079 // ICProfile_Fallback
michael@0 1080 //
michael@0 1081
michael@0 1082 static bool
michael@0 1083 DoProfilerFallback(JSContext *cx, BaselineFrame *frame, ICProfiler_Fallback *stub)
michael@0 1084 {
michael@0 1085 RootedScript script(cx, frame->script());
michael@0 1086 RootedFunction func(cx, frame->maybeFun());
michael@0 1087 mozilla::DebugOnly<ICEntry *> icEntry = stub->icEntry();
michael@0 1088
michael@0 1089 FallbackICSpew(cx, stub, "Profiler");
michael@0 1090
michael@0 1091 SPSProfiler *profiler = &cx->runtime()->spsProfiler;
michael@0 1092
michael@0 1093 // Manually enter SPS this time.
michael@0 1094 JS_ASSERT(profiler->enabled());
michael@0 1095 if (!cx->runtime()->spsProfiler.enter(script, func))
michael@0 1096 return false;
michael@0 1097 frame->setPushedSPSFrame();
michael@0 1098
michael@0 1099 // Unlink any existing PushFunction stub (which may hold stale 'const char *' to
michael@0 1100 // the profile string.
michael@0 1101 JS_ASSERT_IF(icEntry->firstStub() != stub,
michael@0 1102 icEntry->firstStub()->isProfiler_PushFunction() &&
michael@0 1103 icEntry->firstStub()->next() == stub);
michael@0 1104 stub->unlinkStubsWithKind(cx, ICStub::Profiler_PushFunction);
michael@0 1105 JS_ASSERT(icEntry->firstStub() == stub);
michael@0 1106
michael@0 1107 // Generate the string to use to identify this stack frame.
michael@0 1108 const char *string = profiler->profileString(script, func);
michael@0 1109 if (string == nullptr)
michael@0 1110 return false;
michael@0 1111
michael@0 1112 IonSpew(IonSpew_BaselineIC, " Generating Profiler_PushFunction stub for %s:%d",
michael@0 1113 script->filename(), script->lineno());
michael@0 1114
michael@0 1115 // Create a new optimized stub.
michael@0 1116 ICProfiler_PushFunction::Compiler compiler(cx, string, script);
michael@0 1117 ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1118 if (!optStub)
michael@0 1119 return false;
michael@0 1120 stub->addNewStub(optStub);
michael@0 1121
michael@0 1122 return true;
michael@0 1123 }
michael@0 1124
michael@0 1125 typedef bool (*DoProfilerFallbackFn)(JSContext *, BaselineFrame *frame, ICProfiler_Fallback *);
michael@0 1126 static const VMFunction DoProfilerFallbackInfo =
michael@0 1127 FunctionInfo<DoProfilerFallbackFn>(DoProfilerFallback);
michael@0 1128
michael@0 1129 bool
michael@0 1130 ICProfiler_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1131 {
michael@0 1132 EmitRestoreTailCallReg(masm);
michael@0 1133
michael@0 1134 masm.push(BaselineStubReg); // Push stub.
michael@0 1135 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); // Push frame.
michael@0 1136
michael@0 1137 return tailCallVM(DoProfilerFallbackInfo, masm);
michael@0 1138 }
michael@0 1139
michael@0 1140 bool
michael@0 1141 ICProfiler_PushFunction::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1142 {
michael@0 1143
michael@0 1144 Register scratch = R0.scratchReg();
michael@0 1145 Register scratch2 = R1.scratchReg();
michael@0 1146
michael@0 1147 // Profiling should be enabled if we ever reach here.
michael@0 1148 #ifdef DEBUG
michael@0 1149 Label spsEnabled;
michael@0 1150 uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled();
michael@0 1151 masm.branch32(Assembler::NotEqual, AbsoluteAddress(enabledAddr), Imm32(0), &spsEnabled);
michael@0 1152 masm.assumeUnreachable("Profiling should have been enabled.");
michael@0 1153 masm.bind(&spsEnabled);
michael@0 1154 #endif
michael@0 1155
michael@0 1156 // Push SPS entry.
michael@0 1157 masm.spsPushFrame(&cx->runtime()->spsProfiler,
michael@0 1158 Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfStr()),
michael@0 1159 Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfScript()),
michael@0 1160 scratch,
michael@0 1161 scratch2);
michael@0 1162
michael@0 1163 // Mark frame as having profiler entry pushed.
michael@0 1164 Address flagsOffset(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags());
michael@0 1165 masm.or32(Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME), flagsOffset);
michael@0 1166
michael@0 1167 EmitReturnFromIC(masm);
michael@0 1168
michael@0 1169 return true;
michael@0 1170 }
michael@0 1171
michael@0 1172 //
michael@0 1173 // TypeMonitor_Fallback
michael@0 1174 //
michael@0 1175
michael@0 1176 bool
michael@0 1177 ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val)
michael@0 1178 {
michael@0 1179 bool wasDetachedMonitorChain = lastMonitorStubPtrAddr_ == nullptr;
michael@0 1180 JS_ASSERT_IF(wasDetachedMonitorChain, numOptimizedMonitorStubs_ == 0);
michael@0 1181
michael@0 1182 if (numOptimizedMonitorStubs_ >= MAX_OPTIMIZED_STUBS) {
michael@0 1183 // TODO: if the TypeSet becomes unknown or has the AnyObject type,
michael@0 1184 // replace stubs with a single stub to handle these.
michael@0 1185 return true;
michael@0 1186 }
michael@0 1187
michael@0 1188 if (val.isPrimitive()) {
michael@0 1189 JS_ASSERT(!val.isMagic());
michael@0 1190 JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
michael@0 1191
michael@0 1192 // Check for existing TypeMonitor stub.
michael@0 1193 ICTypeMonitor_PrimitiveSet *existingStub = nullptr;
michael@0 1194 for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
michael@0 1195 if (iter->isTypeMonitor_PrimitiveSet()) {
michael@0 1196 existingStub = iter->toTypeMonitor_PrimitiveSet();
michael@0 1197 if (existingStub->containsType(type))
michael@0 1198 return true;
michael@0 1199 }
michael@0 1200 }
michael@0 1201
michael@0 1202 ICTypeMonitor_PrimitiveSet::Compiler compiler(cx, existingStub, type);
michael@0 1203 ICStub *stub = existingStub ? compiler.updateStub()
michael@0 1204 : compiler.getStub(compiler.getStubSpace(script));
michael@0 1205 if (!stub) {
michael@0 1206 js_ReportOutOfMemory(cx);
michael@0 1207 return false;
michael@0 1208 }
michael@0 1209
michael@0 1210 IonSpew(IonSpew_BaselineIC, " %s TypeMonitor stub %p for primitive type %d",
michael@0 1211 existingStub ? "Modified existing" : "Created new", stub, type);
michael@0 1212
michael@0 1213 if (!existingStub) {
michael@0 1214 JS_ASSERT(!hasStub(TypeMonitor_PrimitiveSet));
michael@0 1215 addOptimizedMonitorStub(stub);
michael@0 1216 }
michael@0 1217
michael@0 1218 } else if (val.toObject().hasSingletonType()) {
michael@0 1219 RootedObject obj(cx, &val.toObject());
michael@0 1220
michael@0 1221 // Check for existing TypeMonitor stub.
michael@0 1222 for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
michael@0 1223 if (iter->isTypeMonitor_SingleObject() &&
michael@0 1224 iter->toTypeMonitor_SingleObject()->object() == obj)
michael@0 1225 {
michael@0 1226 return true;
michael@0 1227 }
michael@0 1228 }
michael@0 1229
michael@0 1230 ICTypeMonitor_SingleObject::Compiler compiler(cx, obj);
michael@0 1231 ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1232 if (!stub) {
michael@0 1233 js_ReportOutOfMemory(cx);
michael@0 1234 return false;
michael@0 1235 }
michael@0 1236
michael@0 1237 IonSpew(IonSpew_BaselineIC, " Added TypeMonitor stub %p for singleton %p",
michael@0 1238 stub, obj.get());
michael@0 1239
michael@0 1240 addOptimizedMonitorStub(stub);
michael@0 1241
michael@0 1242 } else {
michael@0 1243 RootedTypeObject type(cx, val.toObject().type());
michael@0 1244
michael@0 1245 // Check for existing TypeMonitor stub.
michael@0 1246 for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
michael@0 1247 if (iter->isTypeMonitor_TypeObject() &&
michael@0 1248 iter->toTypeMonitor_TypeObject()->type() == type)
michael@0 1249 {
michael@0 1250 return true;
michael@0 1251 }
michael@0 1252 }
michael@0 1253
michael@0 1254 ICTypeMonitor_TypeObject::Compiler compiler(cx, type);
michael@0 1255 ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1256 if (!stub) {
michael@0 1257 js_ReportOutOfMemory(cx);
michael@0 1258 return false;
michael@0 1259 }
michael@0 1260
michael@0 1261 IonSpew(IonSpew_BaselineIC, " Added TypeMonitor stub %p for TypeObject %p",
michael@0 1262 stub, type.get());
michael@0 1263
michael@0 1264 addOptimizedMonitorStub(stub);
michael@0 1265 }
michael@0 1266
michael@0 1267 bool firstMonitorStubAdded = wasDetachedMonitorChain && (numOptimizedMonitorStubs_ > 0);
michael@0 1268
michael@0 1269 if (firstMonitorStubAdded) {
michael@0 1270 // Was an empty monitor chain before, but a new stub was added. This is the
michael@0 1271 // only time that any main stubs' firstMonitorStub fields need to be updated to
michael@0 1272 // refer to the newly added monitor stub.
michael@0 1273 ICStub *firstStub = mainFallbackStub_->icEntry()->firstStub();
michael@0 1274 for (ICStubConstIterator iter = firstStub; !iter.atEnd(); iter++) {
michael@0 1275 // Non-monitored stubs are used if the result has always the same type,
michael@0 1276 // e.g. a StringLength stub will always return int32.
michael@0 1277 if (!iter->isMonitored())
michael@0 1278 continue;
michael@0 1279
michael@0 1280 // Since we just added the first optimized monitoring stub, any
michael@0 1281 // existing main stub's |firstMonitorStub| MUST be pointing to the fallback
michael@0 1282 // monitor stub (i.e. this stub).
michael@0 1283 JS_ASSERT(iter->toMonitoredStub()->firstMonitorStub() == this);
michael@0 1284 iter->toMonitoredStub()->updateFirstMonitorStub(firstMonitorStub_);
michael@0 1285 }
michael@0 1286 }
michael@0 1287
michael@0 1288 return true;
michael@0 1289 }
michael@0 1290
michael@0 1291 static bool
michael@0 1292 DoTypeMonitorFallback(JSContext *cx, BaselineFrame *frame, ICTypeMonitor_Fallback *stub,
michael@0 1293 HandleValue value, MutableHandleValue res)
michael@0 1294 {
michael@0 1295 RootedScript script(cx, frame->script());
michael@0 1296 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 1297 TypeFallbackICSpew(cx, stub, "TypeMonitor");
michael@0 1298
michael@0 1299 uint32_t argument;
michael@0 1300 if (stub->monitorsThis()) {
michael@0 1301 JS_ASSERT(pc == script->code());
michael@0 1302 types::TypeScript::SetThis(cx, script, value);
michael@0 1303 } else if (stub->monitorsArgument(&argument)) {
michael@0 1304 JS_ASSERT(pc == script->code());
michael@0 1305 types::TypeScript::SetArgument(cx, script, argument, value);
michael@0 1306 } else {
michael@0 1307 types::TypeScript::Monitor(cx, script, pc, value);
michael@0 1308 }
michael@0 1309
michael@0 1310 if (!stub->addMonitorStubForValue(cx, script, value))
michael@0 1311 return false;
michael@0 1312
michael@0 1313 // Copy input value to res.
michael@0 1314 res.set(value);
michael@0 1315 return true;
michael@0 1316 }
michael@0 1317
michael@0 1318 typedef bool (*DoTypeMonitorFallbackFn)(JSContext *, BaselineFrame *, ICTypeMonitor_Fallback *,
michael@0 1319 HandleValue, MutableHandleValue);
michael@0 1320 static const VMFunction DoTypeMonitorFallbackInfo =
michael@0 1321 FunctionInfo<DoTypeMonitorFallbackFn>(DoTypeMonitorFallback);
michael@0 1322
michael@0 1323 bool
michael@0 1324 ICTypeMonitor_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1325 {
michael@0 1326 JS_ASSERT(R0 == JSReturnOperand);
michael@0 1327
michael@0 1328 // Restore the tail call register.
michael@0 1329 EmitRestoreTailCallReg(masm);
michael@0 1330
michael@0 1331 masm.pushValue(R0);
michael@0 1332 masm.push(BaselineStubReg);
michael@0 1333 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 1334
michael@0 1335 return tailCallVM(DoTypeMonitorFallbackInfo, masm);
michael@0 1336 }
michael@0 1337
michael@0 1338 bool
michael@0 1339 ICTypeMonitor_PrimitiveSet::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1340 {
michael@0 1341 Label success;
michael@0 1342 if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
michael@0 1343 masm.branchTestInt32(Assembler::Equal, R0, &success);
michael@0 1344
michael@0 1345 if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
michael@0 1346 masm.branchTestNumber(Assembler::Equal, R0, &success);
michael@0 1347
michael@0 1348 if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
michael@0 1349 masm.branchTestUndefined(Assembler::Equal, R0, &success);
michael@0 1350
michael@0 1351 if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
michael@0 1352 masm.branchTestBoolean(Assembler::Equal, R0, &success);
michael@0 1353
michael@0 1354 if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
michael@0 1355 masm.branchTestString(Assembler::Equal, R0, &success);
michael@0 1356
michael@0 1357 // Currently, we will never generate primitive stub checks for object. However,
michael@0 1358 // when we do get to the point where we want to collapse our monitor chains of
michael@0 1359 // objects and singletons down (when they get too long) to a generic "any object"
michael@0 1360 // in coordination with the typeset doing the same thing, this will need to
michael@0 1361 // be re-enabled.
michael@0 1362 /*
michael@0 1363 if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
michael@0 1364 masm.branchTestObject(Assembler::Equal, R0, &success);
michael@0 1365 */
michael@0 1366 JS_ASSERT(!(flags_ & TypeToFlag(JSVAL_TYPE_OBJECT)));
michael@0 1367
michael@0 1368 if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
michael@0 1369 masm.branchTestNull(Assembler::Equal, R0, &success);
michael@0 1370
michael@0 1371 EmitStubGuardFailure(masm);
michael@0 1372
michael@0 1373 masm.bind(&success);
michael@0 1374 EmitReturnFromIC(masm);
michael@0 1375 return true;
michael@0 1376 }
michael@0 1377
michael@0 1378 bool
michael@0 1379 ICTypeMonitor_SingleObject::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1380 {
michael@0 1381 Label failure;
michael@0 1382 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 1383
michael@0 1384 // Guard on the object's identity.
michael@0 1385 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 1386 Address expectedObject(BaselineStubReg, ICTypeMonitor_SingleObject::offsetOfObject());
michael@0 1387 masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
michael@0 1388
michael@0 1389 EmitReturnFromIC(masm);
michael@0 1390
michael@0 1391 masm.bind(&failure);
michael@0 1392 EmitStubGuardFailure(masm);
michael@0 1393 return true;
michael@0 1394 }
michael@0 1395
michael@0 1396 bool
michael@0 1397 ICTypeMonitor_TypeObject::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1398 {
michael@0 1399 Label failure;
michael@0 1400 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 1401
michael@0 1402 // Guard on the object's TypeObject.
michael@0 1403 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 1404 masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg());
michael@0 1405
michael@0 1406 Address expectedType(BaselineStubReg, ICTypeMonitor_TypeObject::offsetOfType());
michael@0 1407 masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure);
michael@0 1408
michael@0 1409 EmitReturnFromIC(masm);
michael@0 1410
michael@0 1411 masm.bind(&failure);
michael@0 1412 EmitStubGuardFailure(masm);
michael@0 1413 return true;
michael@0 1414 }
michael@0 1415
michael@0 1416 bool
michael@0 1417 ICUpdatedStub::addUpdateStubForValue(JSContext *cx, HandleScript script, HandleObject obj,
michael@0 1418 HandleId id, HandleValue val)
michael@0 1419 {
michael@0 1420 if (numOptimizedStubs_ >= MAX_OPTIMIZED_STUBS) {
michael@0 1421 // TODO: if the TypeSet becomes unknown or has the AnyObject type,
michael@0 1422 // replace stubs with a single stub to handle these.
michael@0 1423 return true;
michael@0 1424 }
michael@0 1425
michael@0 1426 types::EnsureTrackPropertyTypes(cx, obj, id);
michael@0 1427
michael@0 1428 // Make sure that undefined values are explicitly included in the property
michael@0 1429 // types for an object if generating a stub to write an undefined value.
michael@0 1430 if (val.isUndefined() && types::CanHaveEmptyPropertyTypesForOwnProperty(obj))
michael@0 1431 types::AddTypePropertyId(cx, obj, id, val);
michael@0 1432
michael@0 1433 if (val.isPrimitive()) {
michael@0 1434 JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
michael@0 1435
michael@0 1436 // Check for existing TypeUpdate stub.
michael@0 1437 ICTypeUpdate_PrimitiveSet *existingStub = nullptr;
michael@0 1438 for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
michael@0 1439 if (iter->isTypeUpdate_PrimitiveSet()) {
michael@0 1440 existingStub = iter->toTypeUpdate_PrimitiveSet();
michael@0 1441 if (existingStub->containsType(type))
michael@0 1442 return true;
michael@0 1443 }
michael@0 1444 }
michael@0 1445
michael@0 1446 ICTypeUpdate_PrimitiveSet::Compiler compiler(cx, existingStub, type);
michael@0 1447 ICStub *stub = existingStub ? compiler.updateStub()
michael@0 1448 : compiler.getStub(compiler.getStubSpace(script));
michael@0 1449 if (!stub)
michael@0 1450 return false;
michael@0 1451 if (!existingStub) {
michael@0 1452 JS_ASSERT(!hasTypeUpdateStub(TypeUpdate_PrimitiveSet));
michael@0 1453 addOptimizedUpdateStub(stub);
michael@0 1454 }
michael@0 1455
michael@0 1456 IonSpew(IonSpew_BaselineIC, " %s TypeUpdate stub %p for primitive type %d",
michael@0 1457 existingStub ? "Modified existing" : "Created new", stub, type);
michael@0 1458
michael@0 1459 } else if (val.toObject().hasSingletonType()) {
michael@0 1460 RootedObject obj(cx, &val.toObject());
michael@0 1461
michael@0 1462 // Check for existing TypeUpdate stub.
michael@0 1463 for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
michael@0 1464 if (iter->isTypeUpdate_SingleObject() &&
michael@0 1465 iter->toTypeUpdate_SingleObject()->object() == obj)
michael@0 1466 {
michael@0 1467 return true;
michael@0 1468 }
michael@0 1469 }
michael@0 1470
michael@0 1471 ICTypeUpdate_SingleObject::Compiler compiler(cx, obj);
michael@0 1472 ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1473 if (!stub)
michael@0 1474 return false;
michael@0 1475
michael@0 1476 IonSpew(IonSpew_BaselineIC, " Added TypeUpdate stub %p for singleton %p", stub, obj.get());
michael@0 1477
michael@0 1478 addOptimizedUpdateStub(stub);
michael@0 1479
michael@0 1480 } else {
michael@0 1481 RootedTypeObject type(cx, val.toObject().type());
michael@0 1482
michael@0 1483 // Check for existing TypeUpdate stub.
michael@0 1484 for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
michael@0 1485 if (iter->isTypeUpdate_TypeObject() &&
michael@0 1486 iter->toTypeUpdate_TypeObject()->type() == type)
michael@0 1487 {
michael@0 1488 return true;
michael@0 1489 }
michael@0 1490 }
michael@0 1491
michael@0 1492 ICTypeUpdate_TypeObject::Compiler compiler(cx, type);
michael@0 1493 ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1494 if (!stub)
michael@0 1495 return false;
michael@0 1496
michael@0 1497 IonSpew(IonSpew_BaselineIC, " Added TypeUpdate stub %p for TypeObject %p",
michael@0 1498 stub, type.get());
michael@0 1499
michael@0 1500 addOptimizedUpdateStub(stub);
michael@0 1501 }
michael@0 1502
michael@0 1503 return true;
michael@0 1504 }
michael@0 1505
michael@0 1506 //
michael@0 1507 // TypeUpdate_Fallback
michael@0 1508 //
michael@0 1509 static bool
michael@0 1510 DoTypeUpdateFallback(JSContext *cx, BaselineFrame *frame, ICUpdatedStub *stub, HandleValue objval,
michael@0 1511 HandleValue value)
michael@0 1512 {
michael@0 1513 FallbackICSpew(cx, stub->getChainFallback(), "TypeUpdate(%s)",
michael@0 1514 ICStub::KindString(stub->kind()));
michael@0 1515
michael@0 1516 RootedScript script(cx, frame->script());
michael@0 1517 RootedObject obj(cx, &objval.toObject());
michael@0 1518 RootedId id(cx);
michael@0 1519
michael@0 1520 switch(stub->kind()) {
michael@0 1521 case ICStub::SetElem_Dense:
michael@0 1522 case ICStub::SetElem_DenseAdd: {
michael@0 1523 JS_ASSERT(obj->isNative());
michael@0 1524 id = JSID_VOID;
michael@0 1525 types::AddTypePropertyId(cx, obj, id, value);
michael@0 1526 break;
michael@0 1527 }
michael@0 1528 case ICStub::SetProp_Native:
michael@0 1529 case ICStub::SetProp_NativeAdd: {
michael@0 1530 JS_ASSERT(obj->isNative());
michael@0 1531 jsbytecode *pc = stub->getChainFallback()->icEntry()->pc(script);
michael@0 1532 if (*pc == JSOP_SETALIASEDVAR)
michael@0 1533 id = NameToId(ScopeCoordinateName(cx->runtime()->scopeCoordinateNameCache, script, pc));
michael@0 1534 else
michael@0 1535 id = NameToId(script->getName(pc));
michael@0 1536 types::AddTypePropertyId(cx, obj, id, value);
michael@0 1537 break;
michael@0 1538 }
michael@0 1539 default:
michael@0 1540 MOZ_ASSUME_UNREACHABLE("Invalid stub");
michael@0 1541 }
michael@0 1542
michael@0 1543 return stub->addUpdateStubForValue(cx, script, obj, id, value);
michael@0 1544 }
michael@0 1545
michael@0 1546 typedef bool (*DoTypeUpdateFallbackFn)(JSContext *, BaselineFrame *, ICUpdatedStub *, HandleValue,
michael@0 1547 HandleValue);
michael@0 1548 const VMFunction DoTypeUpdateFallbackInfo =
michael@0 1549 FunctionInfo<DoTypeUpdateFallbackFn>(DoTypeUpdateFallback);
michael@0 1550
michael@0 1551 bool
michael@0 1552 ICTypeUpdate_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1553 {
michael@0 1554 // Just store false into R1.scratchReg() and return.
michael@0 1555 masm.move32(Imm32(0), R1.scratchReg());
michael@0 1556 EmitReturnFromIC(masm);
michael@0 1557 return true;
michael@0 1558 }
michael@0 1559
michael@0 1560 bool
michael@0 1561 ICTypeUpdate_PrimitiveSet::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1562 {
michael@0 1563 Label success;
michael@0 1564 if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
michael@0 1565 masm.branchTestInt32(Assembler::Equal, R0, &success);
michael@0 1566
michael@0 1567 if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
michael@0 1568 masm.branchTestNumber(Assembler::Equal, R0, &success);
michael@0 1569
michael@0 1570 if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
michael@0 1571 masm.branchTestUndefined(Assembler::Equal, R0, &success);
michael@0 1572
michael@0 1573 if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
michael@0 1574 masm.branchTestBoolean(Assembler::Equal, R0, &success);
michael@0 1575
michael@0 1576 if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
michael@0 1577 masm.branchTestString(Assembler::Equal, R0, &success);
michael@0 1578
michael@0 1579 // Currently, we will never generate primitive stub checks for object. However,
michael@0 1580 // when we do get to the point where we want to collapse our monitor chains of
michael@0 1581 // objects and singletons down (when they get too long) to a generic "any object"
michael@0 1582 // in coordination with the typeset doing the same thing, this will need to
michael@0 1583 // be re-enabled.
michael@0 1584 /*
michael@0 1585 if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
michael@0 1586 masm.branchTestObject(Assembler::Equal, R0, &success);
michael@0 1587 */
michael@0 1588 JS_ASSERT(!(flags_ & TypeToFlag(JSVAL_TYPE_OBJECT)));
michael@0 1589
michael@0 1590 if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
michael@0 1591 masm.branchTestNull(Assembler::Equal, R0, &success);
michael@0 1592
michael@0 1593 EmitStubGuardFailure(masm);
michael@0 1594
michael@0 1595 // Type matches, load true into R1.scratchReg() and return.
michael@0 1596 masm.bind(&success);
michael@0 1597 masm.mov(ImmWord(1), R1.scratchReg());
michael@0 1598 EmitReturnFromIC(masm);
michael@0 1599
michael@0 1600 return true;
michael@0 1601 }
michael@0 1602
michael@0 1603 bool
michael@0 1604 ICTypeUpdate_SingleObject::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1605 {
michael@0 1606 Label failure;
michael@0 1607 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 1608
michael@0 1609 // Guard on the object's identity.
michael@0 1610 Register obj = masm.extractObject(R0, R1.scratchReg());
michael@0 1611 Address expectedObject(BaselineStubReg, ICTypeUpdate_SingleObject::offsetOfObject());
michael@0 1612 masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
michael@0 1613
michael@0 1614 // Identity matches, load true into R1.scratchReg() and return.
michael@0 1615 masm.mov(ImmWord(1), R1.scratchReg());
michael@0 1616 EmitReturnFromIC(masm);
michael@0 1617
michael@0 1618 masm.bind(&failure);
michael@0 1619 EmitStubGuardFailure(masm);
michael@0 1620 return true;
michael@0 1621 }
michael@0 1622
michael@0 1623 bool
michael@0 1624 ICTypeUpdate_TypeObject::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1625 {
michael@0 1626 Label failure;
michael@0 1627 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 1628
michael@0 1629 // Guard on the object's TypeObject.
michael@0 1630 Register obj = masm.extractObject(R0, R1.scratchReg());
michael@0 1631 masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg());
michael@0 1632
michael@0 1633 Address expectedType(BaselineStubReg, ICTypeUpdate_TypeObject::offsetOfType());
michael@0 1634 masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure);
michael@0 1635
michael@0 1636 // Type matches, load true into R1.scratchReg() and return.
michael@0 1637 masm.mov(ImmWord(1), R1.scratchReg());
michael@0 1638 EmitReturnFromIC(masm);
michael@0 1639
michael@0 1640 masm.bind(&failure);
michael@0 1641 EmitStubGuardFailure(masm);
michael@0 1642 return true;
michael@0 1643 }
michael@0 1644
michael@0 1645 //
michael@0 1646 // VM function to help call native getters.
michael@0 1647 //
michael@0 1648
michael@0 1649 static bool
michael@0 1650 DoCallNativeGetter(JSContext *cx, HandleFunction callee, HandleObject obj,
michael@0 1651 MutableHandleValue result)
michael@0 1652 {
michael@0 1653 JS_ASSERT(callee->isNative());
michael@0 1654 JSNative natfun = callee->native();
michael@0 1655
michael@0 1656 JS::AutoValueArray<2> vp(cx);
michael@0 1657 vp[0].setObject(*callee.get());
michael@0 1658 vp[1].setObject(*obj.get());
michael@0 1659
michael@0 1660 if (!natfun(cx, 0, vp.begin()))
michael@0 1661 return false;
michael@0 1662
michael@0 1663 result.set(vp[0]);
michael@0 1664 return true;
michael@0 1665 }
michael@0 1666
michael@0 1667 typedef bool (*DoCallNativeGetterFn)(JSContext *, HandleFunction, HandleObject, MutableHandleValue);
michael@0 1668 static const VMFunction DoCallNativeGetterInfo =
michael@0 1669 FunctionInfo<DoCallNativeGetterFn>(DoCallNativeGetter);
michael@0 1670
michael@0 1671 //
michael@0 1672 // This_Fallback
michael@0 1673 //
michael@0 1674
michael@0 1675 static bool
michael@0 1676 DoThisFallback(JSContext *cx, ICThis_Fallback *stub, HandleValue thisv, MutableHandleValue ret)
michael@0 1677 {
michael@0 1678 FallbackICSpew(cx, stub, "This");
michael@0 1679
michael@0 1680 JSObject *thisObj = BoxNonStrictThis(cx, thisv);
michael@0 1681 if (!thisObj)
michael@0 1682 return false;
michael@0 1683
michael@0 1684 ret.setObject(*thisObj);
michael@0 1685 return true;
michael@0 1686 }
michael@0 1687
michael@0 1688 typedef bool (*DoThisFallbackFn)(JSContext *, ICThis_Fallback *, HandleValue, MutableHandleValue);
michael@0 1689 static const VMFunction DoThisFallbackInfo = FunctionInfo<DoThisFallbackFn>(DoThisFallback);
michael@0 1690
michael@0 1691 bool
michael@0 1692 ICThis_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1693 {
michael@0 1694 JS_ASSERT(R0 == JSReturnOperand);
michael@0 1695
michael@0 1696 // Restore the tail call register.
michael@0 1697 EmitRestoreTailCallReg(masm);
michael@0 1698
michael@0 1699 masm.pushValue(R0);
michael@0 1700 masm.push(BaselineStubReg);
michael@0 1701
michael@0 1702 return tailCallVM(DoThisFallbackInfo, masm);
michael@0 1703 }
michael@0 1704
michael@0 1705 //
michael@0 1706 // NewArray_Fallback
michael@0 1707 //
michael@0 1708
michael@0 1709 static bool
michael@0 1710 DoNewArray(JSContext *cx, ICNewArray_Fallback *stub, uint32_t length,
michael@0 1711 HandleTypeObject type, MutableHandleValue res)
michael@0 1712 {
michael@0 1713 FallbackICSpew(cx, stub, "NewArray");
michael@0 1714
michael@0 1715 JSObject *obj = NewInitArray(cx, length, type);
michael@0 1716 if (!obj)
michael@0 1717 return false;
michael@0 1718
michael@0 1719 res.setObject(*obj);
michael@0 1720 return true;
michael@0 1721 }
michael@0 1722
michael@0 1723 typedef bool(*DoNewArrayFn)(JSContext *, ICNewArray_Fallback *, uint32_t, HandleTypeObject,
michael@0 1724 MutableHandleValue);
michael@0 1725 static const VMFunction DoNewArrayInfo = FunctionInfo<DoNewArrayFn>(DoNewArray);
michael@0 1726
michael@0 1727 bool
michael@0 1728 ICNewArray_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1729 {
michael@0 1730 EmitRestoreTailCallReg(masm);
michael@0 1731
michael@0 1732 masm.push(R1.scratchReg()); // type
michael@0 1733 masm.push(R0.scratchReg()); // length
michael@0 1734 masm.push(BaselineStubReg); // stub.
michael@0 1735
michael@0 1736 return tailCallVM(DoNewArrayInfo, masm);
michael@0 1737 }
michael@0 1738
michael@0 1739 //
michael@0 1740 // NewObject_Fallback
michael@0 1741 //
michael@0 1742
michael@0 1743 static bool
michael@0 1744 DoNewObject(JSContext *cx, ICNewObject_Fallback *stub, MutableHandleValue res)
michael@0 1745 {
michael@0 1746 FallbackICSpew(cx, stub, "NewObject");
michael@0 1747
michael@0 1748 RootedObject templateObject(cx, stub->templateObject());
michael@0 1749 JSObject *obj = NewInitObject(cx, templateObject);
michael@0 1750 if (!obj)
michael@0 1751 return false;
michael@0 1752
michael@0 1753 res.setObject(*obj);
michael@0 1754 return true;
michael@0 1755 }
michael@0 1756
michael@0 1757 typedef bool(*DoNewObjectFn)(JSContext *, ICNewObject_Fallback *, MutableHandleValue);
michael@0 1758 static const VMFunction DoNewObjectInfo = FunctionInfo<DoNewObjectFn>(DoNewObject);
michael@0 1759
michael@0 1760 bool
michael@0 1761 ICNewObject_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1762 {
michael@0 1763 EmitRestoreTailCallReg(masm);
michael@0 1764
michael@0 1765 masm.push(BaselineStubReg); // stub.
michael@0 1766
michael@0 1767 return tailCallVM(DoNewObjectInfo, masm);
michael@0 1768 }
michael@0 1769
michael@0 1770 //
michael@0 1771 // Compare_Fallback
michael@0 1772 //
michael@0 1773
michael@0 1774 static bool
michael@0 1775 DoCompareFallback(JSContext *cx, BaselineFrame *frame, ICCompare_Fallback *stub_, HandleValue lhs,
michael@0 1776 HandleValue rhs, MutableHandleValue ret)
michael@0 1777 {
michael@0 1778 // This fallback stub may trigger debug mode toggling.
michael@0 1779 DebugModeOSRVolatileStub<ICCompare_Fallback *> stub(frame, stub_);
michael@0 1780
michael@0 1781 jsbytecode *pc = stub->icEntry()->pc(frame->script());
michael@0 1782 JSOp op = JSOp(*pc);
michael@0 1783
michael@0 1784 FallbackICSpew(cx, stub, "Compare(%s)", js_CodeName[op]);
michael@0 1785
michael@0 1786 // Case operations in a CONDSWITCH are performing strict equality.
michael@0 1787 if (op == JSOP_CASE)
michael@0 1788 op = JSOP_STRICTEQ;
michael@0 1789
michael@0 1790 // Don't pass lhs/rhs directly, we need the original values when
michael@0 1791 // generating stubs.
michael@0 1792 RootedValue lhsCopy(cx, lhs);
michael@0 1793 RootedValue rhsCopy(cx, rhs);
michael@0 1794
michael@0 1795 // Perform the compare operation.
michael@0 1796 bool out;
michael@0 1797 switch(op) {
michael@0 1798 case JSOP_LT:
michael@0 1799 if (!LessThan(cx, &lhsCopy, &rhsCopy, &out))
michael@0 1800 return false;
michael@0 1801 break;
michael@0 1802 case JSOP_LE:
michael@0 1803 if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out))
michael@0 1804 return false;
michael@0 1805 break;
michael@0 1806 case JSOP_GT:
michael@0 1807 if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out))
michael@0 1808 return false;
michael@0 1809 break;
michael@0 1810 case JSOP_GE:
michael@0 1811 if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out))
michael@0 1812 return false;
michael@0 1813 break;
michael@0 1814 case JSOP_EQ:
michael@0 1815 if (!LooselyEqual<true>(cx, &lhsCopy, &rhsCopy, &out))
michael@0 1816 return false;
michael@0 1817 break;
michael@0 1818 case JSOP_NE:
michael@0 1819 if (!LooselyEqual<false>(cx, &lhsCopy, &rhsCopy, &out))
michael@0 1820 return false;
michael@0 1821 break;
michael@0 1822 case JSOP_STRICTEQ:
michael@0 1823 if (!StrictlyEqual<true>(cx, &lhsCopy, &rhsCopy, &out))
michael@0 1824 return false;
michael@0 1825 break;
michael@0 1826 case JSOP_STRICTNE:
michael@0 1827 if (!StrictlyEqual<false>(cx, &lhsCopy, &rhsCopy, &out))
michael@0 1828 return false;
michael@0 1829 break;
michael@0 1830 default:
michael@0 1831 JS_ASSERT(!"Unhandled baseline compare op");
michael@0 1832 return false;
michael@0 1833 }
michael@0 1834
michael@0 1835 ret.setBoolean(out);
michael@0 1836
michael@0 1837 // Check if debug mode toggling made the stub invalid.
michael@0 1838 if (stub.invalid())
michael@0 1839 return true;
michael@0 1840
michael@0 1841 // Check to see if a new stub should be generated.
michael@0 1842 if (stub->numOptimizedStubs() >= ICCompare_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 1843 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
michael@0 1844 // But for now we just bail.
michael@0 1845 return true;
michael@0 1846 }
michael@0 1847
michael@0 1848 JSScript *script = frame->script();
michael@0 1849
michael@0 1850 // Try to generate new stubs.
michael@0 1851 if (lhs.isInt32() && rhs.isInt32()) {
michael@0 1852 IonSpew(IonSpew_BaselineIC, " Generating %s(Int32, Int32) stub", js_CodeName[op]);
michael@0 1853 ICCompare_Int32::Compiler compiler(cx, op);
michael@0 1854 ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1855 if (!int32Stub)
michael@0 1856 return false;
michael@0 1857
michael@0 1858 stub->addNewStub(int32Stub);
michael@0 1859 return true;
michael@0 1860 }
michael@0 1861
michael@0 1862 if (!cx->runtime()->jitSupportsFloatingPoint && (lhs.isNumber() || rhs.isNumber()))
michael@0 1863 return true;
michael@0 1864
michael@0 1865 if (lhs.isNumber() && rhs.isNumber()) {
michael@0 1866 IonSpew(IonSpew_BaselineIC, " Generating %s(Number, Number) stub", js_CodeName[op]);
michael@0 1867
michael@0 1868 // Unlink int32 stubs, it's faster to always use the double stub.
michael@0 1869 stub->unlinkStubsWithKind(cx, ICStub::Compare_Int32);
michael@0 1870
michael@0 1871 ICCompare_Double::Compiler compiler(cx, op);
michael@0 1872 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1873 if (!doubleStub)
michael@0 1874 return false;
michael@0 1875
michael@0 1876 stub->addNewStub(doubleStub);
michael@0 1877 return true;
michael@0 1878 }
michael@0 1879
michael@0 1880 if ((lhs.isNumber() && rhs.isUndefined()) ||
michael@0 1881 (lhs.isUndefined() && rhs.isNumber()))
michael@0 1882 {
michael@0 1883 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
michael@0 1884 rhs.isUndefined() ? "Number" : "Undefined",
michael@0 1885 rhs.isUndefined() ? "Undefined" : "Number");
michael@0 1886 ICCompare_NumberWithUndefined::Compiler compiler(cx, op, lhs.isUndefined());
michael@0 1887 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1888 if (!doubleStub)
michael@0 1889 return false;
michael@0 1890
michael@0 1891 stub->addNewStub(doubleStub);
michael@0 1892 return true;
michael@0 1893 }
michael@0 1894
michael@0 1895 if (lhs.isBoolean() && rhs.isBoolean()) {
michael@0 1896 IonSpew(IonSpew_BaselineIC, " Generating %s(Boolean, Boolean) stub", js_CodeName[op]);
michael@0 1897 ICCompare_Boolean::Compiler compiler(cx, op);
michael@0 1898 ICStub *booleanStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1899 if (!booleanStub)
michael@0 1900 return false;
michael@0 1901
michael@0 1902 stub->addNewStub(booleanStub);
michael@0 1903 return true;
michael@0 1904 }
michael@0 1905
michael@0 1906 if ((lhs.isBoolean() && rhs.isInt32()) || (lhs.isInt32() && rhs.isBoolean())) {
michael@0 1907 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
michael@0 1908 rhs.isInt32() ? "Boolean" : "Int32",
michael@0 1909 rhs.isInt32() ? "Int32" : "Boolean");
michael@0 1910 ICCompare_Int32WithBoolean::Compiler compiler(cx, op, lhs.isInt32());
michael@0 1911 ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1912 if (!optStub)
michael@0 1913 return false;
michael@0 1914
michael@0 1915 stub->addNewStub(optStub);
michael@0 1916 return true;
michael@0 1917 }
michael@0 1918
michael@0 1919 if (IsEqualityOp(op)) {
michael@0 1920 if (lhs.isString() && rhs.isString() && !stub->hasStub(ICStub::Compare_String)) {
michael@0 1921 IonSpew(IonSpew_BaselineIC, " Generating %s(String, String) stub", js_CodeName[op]);
michael@0 1922 ICCompare_String::Compiler compiler(cx, op);
michael@0 1923 ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1924 if (!stringStub)
michael@0 1925 return false;
michael@0 1926
michael@0 1927 stub->addNewStub(stringStub);
michael@0 1928 return true;
michael@0 1929 }
michael@0 1930
michael@0 1931 if (lhs.isObject() && rhs.isObject()) {
michael@0 1932 JS_ASSERT(!stub->hasStub(ICStub::Compare_Object));
michael@0 1933 IonSpew(IonSpew_BaselineIC, " Generating %s(Object, Object) stub", js_CodeName[op]);
michael@0 1934 ICCompare_Object::Compiler compiler(cx, op);
michael@0 1935 ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1936 if (!objectStub)
michael@0 1937 return false;
michael@0 1938
michael@0 1939 stub->addNewStub(objectStub);
michael@0 1940 return true;
michael@0 1941 }
michael@0 1942
michael@0 1943 if ((lhs.isObject() || lhs.isNull() || lhs.isUndefined()) &&
michael@0 1944 (rhs.isObject() || rhs.isNull() || rhs.isUndefined()) &&
michael@0 1945 !stub->hasStub(ICStub::Compare_ObjectWithUndefined))
michael@0 1946 {
michael@0 1947 IonSpew(IonSpew_BaselineIC, " Generating %s(Obj/Null/Undef, Obj/Null/Undef) stub",
michael@0 1948 js_CodeName[op]);
michael@0 1949 bool lhsIsUndefined = lhs.isNull() || lhs.isUndefined();
michael@0 1950 bool compareWithNull = lhs.isNull() || rhs.isNull();
michael@0 1951 ICCompare_ObjectWithUndefined::Compiler compiler(cx, op,
michael@0 1952 lhsIsUndefined, compareWithNull);
michael@0 1953 ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 1954 if (!objectStub)
michael@0 1955 return false;
michael@0 1956
michael@0 1957 stub->addNewStub(objectStub);
michael@0 1958 return true;
michael@0 1959 }
michael@0 1960 }
michael@0 1961
michael@0 1962 return true;
michael@0 1963 }
michael@0 1964
michael@0 1965 typedef bool (*DoCompareFallbackFn)(JSContext *, BaselineFrame *, ICCompare_Fallback *,
michael@0 1966 HandleValue, HandleValue, MutableHandleValue);
michael@0 1967 static const VMFunction DoCompareFallbackInfo =
michael@0 1968 FunctionInfo<DoCompareFallbackFn>(DoCompareFallback, PopValues(2));
michael@0 1969
michael@0 1970 bool
michael@0 1971 ICCompare_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1972 {
michael@0 1973 JS_ASSERT(R0 == JSReturnOperand);
michael@0 1974
michael@0 1975 // Restore the tail call register.
michael@0 1976 EmitRestoreTailCallReg(masm);
michael@0 1977
michael@0 1978 // Ensure stack is fully synced for the expression decompiler.
michael@0 1979 masm.pushValue(R0);
michael@0 1980 masm.pushValue(R1);
michael@0 1981
michael@0 1982 // Push arguments.
michael@0 1983 masm.pushValue(R1);
michael@0 1984 masm.pushValue(R0);
michael@0 1985 masm.push(BaselineStubReg);
michael@0 1986 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 1987 return tailCallVM(DoCompareFallbackInfo, masm);
michael@0 1988 }
michael@0 1989
michael@0 1990 //
michael@0 1991 // Compare_String
michael@0 1992 //
michael@0 1993
michael@0 1994 bool
michael@0 1995 ICCompare_String::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 1996 {
michael@0 1997 Label failure;
michael@0 1998 masm.branchTestString(Assembler::NotEqual, R0, &failure);
michael@0 1999 masm.branchTestString(Assembler::NotEqual, R1, &failure);
michael@0 2000
michael@0 2001 JS_ASSERT(IsEqualityOp(op));
michael@0 2002
michael@0 2003 Register left = masm.extractString(R0, ExtractTemp0);
michael@0 2004 Register right = masm.extractString(R1, ExtractTemp1);
michael@0 2005
michael@0 2006 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 2007 Register scratchReg = regs.takeAny();
michael@0 2008 // x86 doesn't have the luxury of a second scratch.
michael@0 2009 Register scratchReg2;
michael@0 2010 if (regs.empty()) {
michael@0 2011 scratchReg2 = BaselineStubReg;
michael@0 2012 masm.push(BaselineStubReg);
michael@0 2013 } else {
michael@0 2014 scratchReg2 = regs.takeAny();
michael@0 2015 }
michael@0 2016 JS_ASSERT(scratchReg2 != scratchReg);
michael@0 2017
michael@0 2018 Label inlineCompareFailed;
michael@0 2019 masm.compareStrings(op, left, right, scratchReg2, scratchReg, &inlineCompareFailed);
michael@0 2020 masm.tagValue(JSVAL_TYPE_BOOLEAN, scratchReg2, R0);
michael@0 2021 if (scratchReg2 == BaselineStubReg)
michael@0 2022 masm.pop(BaselineStubReg);
michael@0 2023 EmitReturnFromIC(masm);
michael@0 2024
michael@0 2025 masm.bind(&inlineCompareFailed);
michael@0 2026 if (scratchReg2 == BaselineStubReg)
michael@0 2027 masm.pop(BaselineStubReg);
michael@0 2028 masm.bind(&failure);
michael@0 2029 EmitStubGuardFailure(masm);
michael@0 2030 return true;
michael@0 2031 }
michael@0 2032
michael@0 2033 //
michael@0 2034 // Compare_Boolean
michael@0 2035 //
michael@0 2036
michael@0 2037 bool
michael@0 2038 ICCompare_Boolean::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2039 {
michael@0 2040 Label failure;
michael@0 2041 masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
michael@0 2042 masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
michael@0 2043
michael@0 2044 Register left = masm.extractInt32(R0, ExtractTemp0);
michael@0 2045 Register right = masm.extractInt32(R1, ExtractTemp1);
michael@0 2046
michael@0 2047 // Compare payload regs of R0 and R1.
michael@0 2048 Assembler::Condition cond = JSOpToCondition(op, /* signed = */true);
michael@0 2049 masm.cmp32Set(cond, left, right, left);
michael@0 2050
michael@0 2051 // Box the result and return
michael@0 2052 masm.tagValue(JSVAL_TYPE_BOOLEAN, left, R0);
michael@0 2053 EmitReturnFromIC(masm);
michael@0 2054
michael@0 2055 // Failure case - jump to next stub
michael@0 2056 masm.bind(&failure);
michael@0 2057 EmitStubGuardFailure(masm);
michael@0 2058 return true;
michael@0 2059 }
michael@0 2060
michael@0 2061 //
michael@0 2062 // Compare_NumberWithUndefined
michael@0 2063 //
michael@0 2064
michael@0 2065 bool
michael@0 2066 ICCompare_NumberWithUndefined::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2067 {
michael@0 2068 ValueOperand numberOperand, undefinedOperand;
michael@0 2069 if (lhsIsUndefined) {
michael@0 2070 numberOperand = R1;
michael@0 2071 undefinedOperand = R0;
michael@0 2072 } else {
michael@0 2073 numberOperand = R0;
michael@0 2074 undefinedOperand = R1;
michael@0 2075 }
michael@0 2076
michael@0 2077 Label failure;
michael@0 2078 masm.branchTestNumber(Assembler::NotEqual, numberOperand, &failure);
michael@0 2079 masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
michael@0 2080
michael@0 2081 // Comparing a number with undefined will always be true for NE/STRICTNE,
michael@0 2082 // and always be false for other compare ops.
michael@0 2083 masm.moveValue(BooleanValue(op == JSOP_NE || op == JSOP_STRICTNE), R0);
michael@0 2084
michael@0 2085 EmitReturnFromIC(masm);
michael@0 2086
michael@0 2087 // Failure case - jump to next stub
michael@0 2088 masm.bind(&failure);
michael@0 2089 EmitStubGuardFailure(masm);
michael@0 2090 return true;
michael@0 2091 }
michael@0 2092
michael@0 2093 //
michael@0 2094 // Compare_Object
michael@0 2095 //
michael@0 2096
michael@0 2097 bool
michael@0 2098 ICCompare_Object::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2099 {
michael@0 2100 Label failure;
michael@0 2101 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 2102 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
michael@0 2103
michael@0 2104 JS_ASSERT(IsEqualityOp(op));
michael@0 2105
michael@0 2106 Register left = masm.extractObject(R0, ExtractTemp0);
michael@0 2107 Register right = masm.extractObject(R1, ExtractTemp1);
michael@0 2108
michael@0 2109 Label ifTrue;
michael@0 2110 masm.branchPtr(JSOpToCondition(op, /* signed = */true), left, right, &ifTrue);
michael@0 2111
michael@0 2112 masm.moveValue(BooleanValue(false), R0);
michael@0 2113 EmitReturnFromIC(masm);
michael@0 2114
michael@0 2115 masm.bind(&ifTrue);
michael@0 2116 masm.moveValue(BooleanValue(true), R0);
michael@0 2117 EmitReturnFromIC(masm);
michael@0 2118
michael@0 2119 // Failure case - jump to next stub
michael@0 2120 masm.bind(&failure);
michael@0 2121 EmitStubGuardFailure(masm);
michael@0 2122 return true;
michael@0 2123 }
michael@0 2124
michael@0 2125 //
michael@0 2126 // Compare_ObjectWithUndefined
michael@0 2127 //
michael@0 2128
michael@0 2129 bool
michael@0 2130 ICCompare_ObjectWithUndefined::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2131 {
michael@0 2132 JS_ASSERT(IsEqualityOp(op));
michael@0 2133
michael@0 2134 ValueOperand objectOperand, undefinedOperand;
michael@0 2135 if (lhsIsUndefined) {
michael@0 2136 objectOperand = R1;
michael@0 2137 undefinedOperand = R0;
michael@0 2138 } else {
michael@0 2139 objectOperand = R0;
michael@0 2140 undefinedOperand = R1;
michael@0 2141 }
michael@0 2142
michael@0 2143 Label failure;
michael@0 2144 if (compareWithNull)
michael@0 2145 masm.branchTestNull(Assembler::NotEqual, undefinedOperand, &failure);
michael@0 2146 else
michael@0 2147 masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
michael@0 2148
michael@0 2149 Label notObject;
michael@0 2150 masm.branchTestObject(Assembler::NotEqual, objectOperand, &notObject);
michael@0 2151
michael@0 2152 if (op == JSOP_STRICTEQ || op == JSOP_STRICTNE) {
michael@0 2153 // obj !== undefined for all objects.
michael@0 2154 masm.moveValue(BooleanValue(op == JSOP_STRICTNE), R0);
michael@0 2155 EmitReturnFromIC(masm);
michael@0 2156 } else {
michael@0 2157 // obj != undefined only where !obj->getClass()->emulatesUndefined()
michael@0 2158 Label emulatesUndefined;
michael@0 2159 Register obj = masm.extractObject(objectOperand, ExtractTemp0);
michael@0 2160 masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj);
michael@0 2161 masm.loadPtr(Address(obj, types::TypeObject::offsetOfClasp()), obj);
michael@0 2162 masm.branchTest32(Assembler::NonZero,
michael@0 2163 Address(obj, Class::offsetOfFlags()),
michael@0 2164 Imm32(JSCLASS_EMULATES_UNDEFINED),
michael@0 2165 &emulatesUndefined);
michael@0 2166 masm.moveValue(BooleanValue(op == JSOP_NE), R0);
michael@0 2167 EmitReturnFromIC(masm);
michael@0 2168 masm.bind(&emulatesUndefined);
michael@0 2169 masm.moveValue(BooleanValue(op == JSOP_EQ), R0);
michael@0 2170 EmitReturnFromIC(masm);
michael@0 2171 }
michael@0 2172
michael@0 2173 masm.bind(&notObject);
michael@0 2174
michael@0 2175 // Also support null == null or undefined == undefined comparisons.
michael@0 2176 if (compareWithNull)
michael@0 2177 masm.branchTestNull(Assembler::NotEqual, objectOperand, &failure);
michael@0 2178 else
michael@0 2179 masm.branchTestUndefined(Assembler::NotEqual, objectOperand, &failure);
michael@0 2180
michael@0 2181 masm.moveValue(BooleanValue(op == JSOP_STRICTEQ || op == JSOP_EQ), R0);
michael@0 2182 EmitReturnFromIC(masm);
michael@0 2183
michael@0 2184 // Failure case - jump to next stub
michael@0 2185 masm.bind(&failure);
michael@0 2186 EmitStubGuardFailure(masm);
michael@0 2187 return true;
michael@0 2188 }
michael@0 2189
michael@0 2190 //
michael@0 2191 // Compare_Int32WithBoolean
michael@0 2192 //
michael@0 2193
michael@0 2194 bool
michael@0 2195 ICCompare_Int32WithBoolean::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2196 {
michael@0 2197 Label failure;
michael@0 2198 ValueOperand int32Val;
michael@0 2199 ValueOperand boolVal;
michael@0 2200 if (lhsIsInt32_) {
michael@0 2201 int32Val = R0;
michael@0 2202 boolVal = R1;
michael@0 2203 } else {
michael@0 2204 boolVal = R0;
michael@0 2205 int32Val = R1;
michael@0 2206 }
michael@0 2207 masm.branchTestBoolean(Assembler::NotEqual, boolVal, &failure);
michael@0 2208 masm.branchTestInt32(Assembler::NotEqual, int32Val, &failure);
michael@0 2209
michael@0 2210 if (op_ == JSOP_STRICTEQ || op_ == JSOP_STRICTNE) {
michael@0 2211 // Ints and booleans are never strictly equal, always strictly not equal.
michael@0 2212 masm.moveValue(BooleanValue(op_ == JSOP_STRICTNE), R0);
michael@0 2213 EmitReturnFromIC(masm);
michael@0 2214 } else {
michael@0 2215 Register boolReg = masm.extractBoolean(boolVal, ExtractTemp0);
michael@0 2216 Register int32Reg = masm.extractInt32(int32Val, ExtractTemp1);
michael@0 2217
michael@0 2218 // Compare payload regs of R0 and R1.
michael@0 2219 Assembler::Condition cond = JSOpToCondition(op_, /* signed = */true);
michael@0 2220 masm.cmp32Set(cond, (lhsIsInt32_ ? int32Reg : boolReg),
michael@0 2221 (lhsIsInt32_ ? boolReg : int32Reg), R0.scratchReg());
michael@0 2222
michael@0 2223 // Box the result and return
michael@0 2224 masm.tagValue(JSVAL_TYPE_BOOLEAN, R0.scratchReg(), R0);
michael@0 2225 EmitReturnFromIC(masm);
michael@0 2226 }
michael@0 2227
michael@0 2228 // Failure case - jump to next stub
michael@0 2229 masm.bind(&failure);
michael@0 2230 EmitStubGuardFailure(masm);
michael@0 2231 return true;
michael@0 2232 }
michael@0 2233
michael@0 2234 //
michael@0 2235 // ToBool_Fallback
michael@0 2236 //
michael@0 2237
michael@0 2238 static bool
michael@0 2239 DoToBoolFallback(JSContext *cx, BaselineFrame *frame, ICToBool_Fallback *stub, HandleValue arg,
michael@0 2240 MutableHandleValue ret)
michael@0 2241 {
michael@0 2242 FallbackICSpew(cx, stub, "ToBool");
michael@0 2243
michael@0 2244 bool cond = ToBoolean(arg);
michael@0 2245 ret.setBoolean(cond);
michael@0 2246
michael@0 2247 // Check to see if a new stub should be generated.
michael@0 2248 if (stub->numOptimizedStubs() >= ICToBool_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 2249 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
michael@0 2250 // But for now we just bail.
michael@0 2251 return true;
michael@0 2252 }
michael@0 2253
michael@0 2254 JS_ASSERT(!arg.isBoolean());
michael@0 2255
michael@0 2256 JSScript *script = frame->script();
michael@0 2257
michael@0 2258 // Try to generate new stubs.
michael@0 2259 if (arg.isInt32()) {
michael@0 2260 IonSpew(IonSpew_BaselineIC, " Generating ToBool(Int32) stub.");
michael@0 2261 ICToBool_Int32::Compiler compiler(cx);
michael@0 2262 ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2263 if (!int32Stub)
michael@0 2264 return false;
michael@0 2265
michael@0 2266 stub->addNewStub(int32Stub);
michael@0 2267 return true;
michael@0 2268 }
michael@0 2269
michael@0 2270 if (arg.isDouble() && cx->runtime()->jitSupportsFloatingPoint) {
michael@0 2271 IonSpew(IonSpew_BaselineIC, " Generating ToBool(Double) stub.");
michael@0 2272 ICToBool_Double::Compiler compiler(cx);
michael@0 2273 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2274 if (!doubleStub)
michael@0 2275 return false;
michael@0 2276
michael@0 2277 stub->addNewStub(doubleStub);
michael@0 2278 return true;
michael@0 2279 }
michael@0 2280
michael@0 2281 if (arg.isString()) {
michael@0 2282 IonSpew(IonSpew_BaselineIC, " Generating ToBool(String) stub");
michael@0 2283 ICToBool_String::Compiler compiler(cx);
michael@0 2284 ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2285 if (!stringStub)
michael@0 2286 return false;
michael@0 2287
michael@0 2288 stub->addNewStub(stringStub);
michael@0 2289 return true;
michael@0 2290 }
michael@0 2291
michael@0 2292 if (arg.isNull() || arg.isUndefined()) {
michael@0 2293 ICToBool_NullUndefined::Compiler compiler(cx);
michael@0 2294 ICStub *nilStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2295 if (!nilStub)
michael@0 2296 return false;
michael@0 2297
michael@0 2298 stub->addNewStub(nilStub);
michael@0 2299 return true;
michael@0 2300 }
michael@0 2301
michael@0 2302 if (arg.isObject()) {
michael@0 2303 IonSpew(IonSpew_BaselineIC, " Generating ToBool(Object) stub.");
michael@0 2304 ICToBool_Object::Compiler compiler(cx);
michael@0 2305 ICStub *objStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2306 if (!objStub)
michael@0 2307 return false;
michael@0 2308
michael@0 2309 stub->addNewStub(objStub);
michael@0 2310 return true;
michael@0 2311 }
michael@0 2312
michael@0 2313 return true;
michael@0 2314 }
michael@0 2315
michael@0 2316 typedef bool (*pf)(JSContext *, BaselineFrame *, ICToBool_Fallback *, HandleValue,
michael@0 2317 MutableHandleValue);
michael@0 2318 static const VMFunction fun = FunctionInfo<pf>(DoToBoolFallback);
michael@0 2319
michael@0 2320 bool
michael@0 2321 ICToBool_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2322 {
michael@0 2323 JS_ASSERT(R0 == JSReturnOperand);
michael@0 2324
michael@0 2325 // Restore the tail call register.
michael@0 2326 EmitRestoreTailCallReg(masm);
michael@0 2327
michael@0 2328 // Push arguments.
michael@0 2329 masm.pushValue(R0);
michael@0 2330 masm.push(BaselineStubReg);
michael@0 2331 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 2332
michael@0 2333 return tailCallVM(fun, masm);
michael@0 2334 }
michael@0 2335
michael@0 2336 //
michael@0 2337 // ToBool_Int32
michael@0 2338 //
michael@0 2339
michael@0 2340 bool
michael@0 2341 ICToBool_Int32::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2342 {
michael@0 2343 Label failure;
michael@0 2344 masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
michael@0 2345
michael@0 2346 Label ifFalse;
michael@0 2347 masm.branchTestInt32Truthy(false, R0, &ifFalse);
michael@0 2348
michael@0 2349 masm.moveValue(BooleanValue(true), R0);
michael@0 2350 EmitReturnFromIC(masm);
michael@0 2351
michael@0 2352 masm.bind(&ifFalse);
michael@0 2353 masm.moveValue(BooleanValue(false), R0);
michael@0 2354 EmitReturnFromIC(masm);
michael@0 2355
michael@0 2356 // Failure case - jump to next stub
michael@0 2357 masm.bind(&failure);
michael@0 2358 EmitStubGuardFailure(masm);
michael@0 2359 return true;
michael@0 2360 }
michael@0 2361
michael@0 2362 //
michael@0 2363 // ToBool_String
michael@0 2364 //
michael@0 2365
michael@0 2366 bool
michael@0 2367 ICToBool_String::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2368 {
michael@0 2369 Label failure;
michael@0 2370 masm.branchTestString(Assembler::NotEqual, R0, &failure);
michael@0 2371
michael@0 2372 Label ifFalse;
michael@0 2373 masm.branchTestStringTruthy(false, R0, &ifFalse);
michael@0 2374
michael@0 2375 masm.moveValue(BooleanValue(true), R0);
michael@0 2376 EmitReturnFromIC(masm);
michael@0 2377
michael@0 2378 masm.bind(&ifFalse);
michael@0 2379 masm.moveValue(BooleanValue(false), R0);
michael@0 2380 EmitReturnFromIC(masm);
michael@0 2381
michael@0 2382 // Failure case - jump to next stub
michael@0 2383 masm.bind(&failure);
michael@0 2384 EmitStubGuardFailure(masm);
michael@0 2385 return true;
michael@0 2386 }
michael@0 2387
michael@0 2388 //
michael@0 2389 // ToBool_NullUndefined
michael@0 2390 //
michael@0 2391
michael@0 2392 bool
michael@0 2393 ICToBool_NullUndefined::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2394 {
michael@0 2395 Label failure, ifFalse;
michael@0 2396 masm.branchTestNull(Assembler::Equal, R0, &ifFalse);
michael@0 2397 masm.branchTestUndefined(Assembler::NotEqual, R0, &failure);
michael@0 2398
michael@0 2399 masm.bind(&ifFalse);
michael@0 2400 masm.moveValue(BooleanValue(false), R0);
michael@0 2401 EmitReturnFromIC(masm);
michael@0 2402
michael@0 2403 // Failure case - jump to next stub
michael@0 2404 masm.bind(&failure);
michael@0 2405 EmitStubGuardFailure(masm);
michael@0 2406 return true;
michael@0 2407 }
michael@0 2408
michael@0 2409 //
michael@0 2410 // ToBool_Double
michael@0 2411 //
michael@0 2412
michael@0 2413 bool
michael@0 2414 ICToBool_Double::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2415 {
michael@0 2416 Label failure, ifTrue;
michael@0 2417 masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
michael@0 2418 masm.unboxDouble(R0, FloatReg0);
michael@0 2419 masm.branchTestDoubleTruthy(true, FloatReg0, &ifTrue);
michael@0 2420
michael@0 2421 masm.moveValue(BooleanValue(false), R0);
michael@0 2422 EmitReturnFromIC(masm);
michael@0 2423
michael@0 2424 masm.bind(&ifTrue);
michael@0 2425 masm.moveValue(BooleanValue(true), R0);
michael@0 2426 EmitReturnFromIC(masm);
michael@0 2427
michael@0 2428 // Failure case - jump to next stub
michael@0 2429 masm.bind(&failure);
michael@0 2430 EmitStubGuardFailure(masm);
michael@0 2431 return true;
michael@0 2432 }
michael@0 2433
michael@0 2434 //
michael@0 2435 // ToBool_Object
michael@0 2436 //
michael@0 2437
michael@0 2438 bool
michael@0 2439 ICToBool_Object::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2440 {
michael@0 2441 Label failure, ifFalse, slowPath;
michael@0 2442 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 2443
michael@0 2444 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 2445 Register scratch = R1.scratchReg();
michael@0 2446 masm.branchTestObjectTruthy(false, objReg, scratch, &slowPath, &ifFalse);
michael@0 2447
michael@0 2448 // If object doesn't emulate undefined, it evaulates to true.
michael@0 2449 masm.moveValue(BooleanValue(true), R0);
michael@0 2450 EmitReturnFromIC(masm);
michael@0 2451
michael@0 2452 masm.bind(&ifFalse);
michael@0 2453 masm.moveValue(BooleanValue(false), R0);
michael@0 2454 EmitReturnFromIC(masm);
michael@0 2455
michael@0 2456 masm.bind(&slowPath);
michael@0 2457 masm.setupUnalignedABICall(1, scratch);
michael@0 2458 masm.passABIArg(objReg);
michael@0 2459 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::EmulatesUndefined));
michael@0 2460 masm.convertBoolToInt32(ReturnReg, ReturnReg);
michael@0 2461 masm.xor32(Imm32(1), ReturnReg);
michael@0 2462 masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, R0);
michael@0 2463 EmitReturnFromIC(masm);
michael@0 2464
michael@0 2465 // Failure case - jump to next stub
michael@0 2466 masm.bind(&failure);
michael@0 2467 EmitStubGuardFailure(masm);
michael@0 2468 return true;
michael@0 2469 }
michael@0 2470
michael@0 2471 //
michael@0 2472 // ToNumber_Fallback
michael@0 2473 //
michael@0 2474
michael@0 2475 static bool
michael@0 2476 DoToNumberFallback(JSContext *cx, ICToNumber_Fallback *stub, HandleValue arg, MutableHandleValue ret)
michael@0 2477 {
michael@0 2478 FallbackICSpew(cx, stub, "ToNumber");
michael@0 2479 ret.set(arg);
michael@0 2480 return ToNumber(cx, ret);
michael@0 2481 }
michael@0 2482
michael@0 2483 typedef bool (*DoToNumberFallbackFn)(JSContext *, ICToNumber_Fallback *, HandleValue, MutableHandleValue);
michael@0 2484 static const VMFunction DoToNumberFallbackInfo =
michael@0 2485 FunctionInfo<DoToNumberFallbackFn>(DoToNumberFallback, PopValues(1));
michael@0 2486
michael@0 2487 bool
michael@0 2488 ICToNumber_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2489 {
michael@0 2490 JS_ASSERT(R0 == JSReturnOperand);
michael@0 2491
michael@0 2492 // Restore the tail call register.
michael@0 2493 EmitRestoreTailCallReg(masm);
michael@0 2494
michael@0 2495 // Ensure stack is fully synced for the expression decompiler.
michael@0 2496 masm.pushValue(R0);
michael@0 2497
michael@0 2498 // Push arguments.
michael@0 2499 masm.pushValue(R0);
michael@0 2500 masm.push(BaselineStubReg);
michael@0 2501
michael@0 2502 return tailCallVM(DoToNumberFallbackInfo, masm);
michael@0 2503 }
michael@0 2504
michael@0 2505 //
michael@0 2506 // BinaryArith_Fallback
michael@0 2507 //
michael@0 2508
michael@0 2509 // Disable PGO (see bug 851490).
michael@0 2510 #if defined(_MSC_VER)
michael@0 2511 # pragma optimize("g", off)
michael@0 2512 #endif
michael@0 2513 static bool
michael@0 2514 DoBinaryArithFallback(JSContext *cx, BaselineFrame *frame, ICBinaryArith_Fallback *stub_,
michael@0 2515 HandleValue lhs, HandleValue rhs, MutableHandleValue ret)
michael@0 2516 {
michael@0 2517 // This fallback stub may trigger debug mode toggling.
michael@0 2518 DebugModeOSRVolatileStub<ICBinaryArith_Fallback *> stub(frame, stub_);
michael@0 2519
michael@0 2520 RootedScript script(cx, frame->script());
michael@0 2521 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 2522 JSOp op = JSOp(*pc);
michael@0 2523 FallbackICSpew(cx, stub, "BinaryArith(%s,%d,%d)", js_CodeName[op],
michael@0 2524 int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
michael@0 2525 int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
michael@0 2526
michael@0 2527 // Don't pass lhs/rhs directly, we need the original values when
michael@0 2528 // generating stubs.
michael@0 2529 RootedValue lhsCopy(cx, lhs);
michael@0 2530 RootedValue rhsCopy(cx, rhs);
michael@0 2531
michael@0 2532 // Perform the compare operation.
michael@0 2533 switch(op) {
michael@0 2534 case JSOP_ADD:
michael@0 2535 // Do an add.
michael@0 2536 if (!AddValues(cx, &lhsCopy, &rhsCopy, ret))
michael@0 2537 return false;
michael@0 2538 break;
michael@0 2539 case JSOP_SUB:
michael@0 2540 if (!SubValues(cx, &lhsCopy, &rhsCopy, ret))
michael@0 2541 return false;
michael@0 2542 break;
michael@0 2543 case JSOP_MUL:
michael@0 2544 if (!MulValues(cx, &lhsCopy, &rhsCopy, ret))
michael@0 2545 return false;
michael@0 2546 break;
michael@0 2547 case JSOP_DIV:
michael@0 2548 if (!DivValues(cx, &lhsCopy, &rhsCopy, ret))
michael@0 2549 return false;
michael@0 2550 break;
michael@0 2551 case JSOP_MOD:
michael@0 2552 if (!ModValues(cx, &lhsCopy, &rhsCopy, ret))
michael@0 2553 return false;
michael@0 2554 break;
michael@0 2555 case JSOP_BITOR: {
michael@0 2556 int32_t result;
michael@0 2557 if (!BitOr(cx, lhs, rhs, &result))
michael@0 2558 return false;
michael@0 2559 ret.setInt32(result);
michael@0 2560 break;
michael@0 2561 }
michael@0 2562 case JSOP_BITXOR: {
michael@0 2563 int32_t result;
michael@0 2564 if (!BitXor(cx, lhs, rhs, &result))
michael@0 2565 return false;
michael@0 2566 ret.setInt32(result);
michael@0 2567 break;
michael@0 2568 }
michael@0 2569 case JSOP_BITAND: {
michael@0 2570 int32_t result;
michael@0 2571 if (!BitAnd(cx, lhs, rhs, &result))
michael@0 2572 return false;
michael@0 2573 ret.setInt32(result);
michael@0 2574 break;
michael@0 2575 }
michael@0 2576 case JSOP_LSH: {
michael@0 2577 int32_t result;
michael@0 2578 if (!BitLsh(cx, lhs, rhs, &result))
michael@0 2579 return false;
michael@0 2580 ret.setInt32(result);
michael@0 2581 break;
michael@0 2582 }
michael@0 2583 case JSOP_RSH: {
michael@0 2584 int32_t result;
michael@0 2585 if (!BitRsh(cx, lhs, rhs, &result))
michael@0 2586 return false;
michael@0 2587 ret.setInt32(result);
michael@0 2588 break;
michael@0 2589 }
michael@0 2590 case JSOP_URSH: {
michael@0 2591 if (!UrshOperation(cx, lhs, rhs, ret))
michael@0 2592 return false;
michael@0 2593 break;
michael@0 2594 }
michael@0 2595 default:
michael@0 2596 MOZ_ASSUME_UNREACHABLE("Unhandled baseline arith op");
michael@0 2597 }
michael@0 2598
michael@0 2599 // Check if debug mode toggling made the stub invalid.
michael@0 2600 if (stub.invalid())
michael@0 2601 return true;
michael@0 2602
michael@0 2603 if (ret.isDouble())
michael@0 2604 stub->setSawDoubleResult();
michael@0 2605
michael@0 2606 // Check to see if a new stub should be generated.
michael@0 2607 if (stub->numOptimizedStubs() >= ICBinaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 2608 stub->noteUnoptimizableOperands();
michael@0 2609 return true;
michael@0 2610 }
michael@0 2611
michael@0 2612 // Handle string concat.
michael@0 2613 if (op == JSOP_ADD) {
michael@0 2614 if (lhs.isString() && rhs.isString()) {
michael@0 2615 IonSpew(IonSpew_BaselineIC, " Generating %s(String, String) stub", js_CodeName[op]);
michael@0 2616 JS_ASSERT(ret.isString());
michael@0 2617 ICBinaryArith_StringConcat::Compiler compiler(cx);
michael@0 2618 ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2619 if (!strcatStub)
michael@0 2620 return false;
michael@0 2621 stub->addNewStub(strcatStub);
michael@0 2622 return true;
michael@0 2623 }
michael@0 2624
michael@0 2625 if ((lhs.isString() && rhs.isObject()) || (lhs.isObject() && rhs.isString())) {
michael@0 2626 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
michael@0 2627 lhs.isString() ? "String" : "Object",
michael@0 2628 lhs.isString() ? "Object" : "String");
michael@0 2629 JS_ASSERT(ret.isString());
michael@0 2630 ICBinaryArith_StringObjectConcat::Compiler compiler(cx, lhs.isString());
michael@0 2631 ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2632 if (!strcatStub)
michael@0 2633 return false;
michael@0 2634 stub->addNewStub(strcatStub);
michael@0 2635 return true;
michael@0 2636 }
michael@0 2637 }
michael@0 2638
michael@0 2639 if (((lhs.isBoolean() && (rhs.isBoolean() || rhs.isInt32())) ||
michael@0 2640 (rhs.isBoolean() && (lhs.isBoolean() || lhs.isInt32()))) &&
michael@0 2641 (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_BITOR || op == JSOP_BITAND ||
michael@0 2642 op == JSOP_BITXOR))
michael@0 2643 {
michael@0 2644 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
michael@0 2645 lhs.isBoolean() ? "Boolean" : "Int32", rhs.isBoolean() ? "Boolean" : "Int32");
michael@0 2646 ICBinaryArith_BooleanWithInt32::Compiler compiler(cx, op, lhs.isBoolean(), rhs.isBoolean());
michael@0 2647 ICStub *arithStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2648 if (!arithStub)
michael@0 2649 return false;
michael@0 2650 stub->addNewStub(arithStub);
michael@0 2651 return true;
michael@0 2652 }
michael@0 2653
michael@0 2654 // Handle only int32 or double.
michael@0 2655 if (!lhs.isNumber() || !rhs.isNumber()) {
michael@0 2656 stub->noteUnoptimizableOperands();
michael@0 2657 return true;
michael@0 2658 }
michael@0 2659
michael@0 2660 JS_ASSERT(ret.isNumber());
michael@0 2661
michael@0 2662 if (lhs.isDouble() || rhs.isDouble() || ret.isDouble()) {
michael@0 2663 if (!cx->runtime()->jitSupportsFloatingPoint)
michael@0 2664 return true;
michael@0 2665
michael@0 2666 switch (op) {
michael@0 2667 case JSOP_ADD:
michael@0 2668 case JSOP_SUB:
michael@0 2669 case JSOP_MUL:
michael@0 2670 case JSOP_DIV:
michael@0 2671 case JSOP_MOD: {
michael@0 2672 // Unlink int32 stubs, it's faster to always use the double stub.
michael@0 2673 stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
michael@0 2674 IonSpew(IonSpew_BaselineIC, " Generating %s(Double, Double) stub", js_CodeName[op]);
michael@0 2675
michael@0 2676 ICBinaryArith_Double::Compiler compiler(cx, op);
michael@0 2677 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2678 if (!doubleStub)
michael@0 2679 return false;
michael@0 2680 stub->addNewStub(doubleStub);
michael@0 2681 return true;
michael@0 2682 }
michael@0 2683 default:
michael@0 2684 break;
michael@0 2685 }
michael@0 2686 }
michael@0 2687
michael@0 2688 if (lhs.isInt32() && rhs.isInt32()) {
michael@0 2689 bool allowDouble = ret.isDouble();
michael@0 2690 if (allowDouble)
michael@0 2691 stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
michael@0 2692 IonSpew(IonSpew_BaselineIC, " Generating %s(Int32, Int32%s) stub", js_CodeName[op],
michael@0 2693 allowDouble ? " => Double" : "");
michael@0 2694 ICBinaryArith_Int32::Compiler compilerInt32(cx, op, allowDouble);
michael@0 2695 ICStub *int32Stub = compilerInt32.getStub(compilerInt32.getStubSpace(script));
michael@0 2696 if (!int32Stub)
michael@0 2697 return false;
michael@0 2698 stub->addNewStub(int32Stub);
michael@0 2699 return true;
michael@0 2700 }
michael@0 2701
michael@0 2702 // Handle Double <BITOP> Int32 or Int32 <BITOP> Double case.
michael@0 2703 if (((lhs.isDouble() && rhs.isInt32()) || (lhs.isInt32() && rhs.isDouble())) &&
michael@0 2704 ret.isInt32())
michael@0 2705 {
michael@0 2706 switch(op) {
michael@0 2707 case JSOP_BITOR:
michael@0 2708 case JSOP_BITXOR:
michael@0 2709 case JSOP_BITAND: {
michael@0 2710 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
michael@0 2711 lhs.isDouble() ? "Double" : "Int32",
michael@0 2712 lhs.isDouble() ? "Int32" : "Double");
michael@0 2713 ICBinaryArith_DoubleWithInt32::Compiler compiler(cx, op, lhs.isDouble());
michael@0 2714 ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 2715 if (!optStub)
michael@0 2716 return false;
michael@0 2717 stub->addNewStub(optStub);
michael@0 2718 return true;
michael@0 2719 }
michael@0 2720 default:
michael@0 2721 break;
michael@0 2722 }
michael@0 2723 }
michael@0 2724
michael@0 2725 stub->noteUnoptimizableOperands();
michael@0 2726 return true;
michael@0 2727 }
michael@0 2728 #if defined(_MSC_VER)
michael@0 2729 # pragma optimize("", on)
michael@0 2730 #endif
michael@0 2731
michael@0 2732 typedef bool (*DoBinaryArithFallbackFn)(JSContext *, BaselineFrame *, ICBinaryArith_Fallback *,
michael@0 2733 HandleValue, HandleValue, MutableHandleValue);
michael@0 2734 static const VMFunction DoBinaryArithFallbackInfo =
michael@0 2735 FunctionInfo<DoBinaryArithFallbackFn>(DoBinaryArithFallback, PopValues(2));
michael@0 2736
michael@0 2737 bool
michael@0 2738 ICBinaryArith_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2739 {
michael@0 2740 JS_ASSERT(R0 == JSReturnOperand);
michael@0 2741
michael@0 2742 // Restore the tail call register.
michael@0 2743 EmitRestoreTailCallReg(masm);
michael@0 2744
michael@0 2745 // Ensure stack is fully synced for the expression decompiler.
michael@0 2746 masm.pushValue(R0);
michael@0 2747 masm.pushValue(R1);
michael@0 2748
michael@0 2749 // Push arguments.
michael@0 2750 masm.pushValue(R1);
michael@0 2751 masm.pushValue(R0);
michael@0 2752 masm.push(BaselineStubReg);
michael@0 2753 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 2754
michael@0 2755 return tailCallVM(DoBinaryArithFallbackInfo, masm);
michael@0 2756 }
michael@0 2757
michael@0 2758 static bool
michael@0 2759 DoConcatStrings(JSContext *cx, HandleValue lhs, HandleValue rhs, MutableHandleValue res)
michael@0 2760 {
michael@0 2761 JS_ASSERT(lhs.isString());
michael@0 2762 JS_ASSERT(rhs.isString());
michael@0 2763 JSString *lstr = lhs.toString();
michael@0 2764 JSString *rstr = rhs.toString();
michael@0 2765 JSString *result = ConcatStrings<NoGC>(cx, lstr, rstr);
michael@0 2766 if (result) {
michael@0 2767 res.set(StringValue(result));
michael@0 2768 return true;
michael@0 2769 }
michael@0 2770
michael@0 2771 RootedString rootedl(cx, lstr), rootedr(cx, rstr);
michael@0 2772 result = ConcatStrings<CanGC>(cx, rootedl, rootedr);
michael@0 2773 if (!result)
michael@0 2774 return false;
michael@0 2775
michael@0 2776 res.set(StringValue(result));
michael@0 2777 return true;
michael@0 2778 }
michael@0 2779
michael@0 2780 typedef bool (*DoConcatStringsFn)(JSContext *, HandleValue, HandleValue, MutableHandleValue);
michael@0 2781 static const VMFunction DoConcatStringsInfo = FunctionInfo<DoConcatStringsFn>(DoConcatStrings);
michael@0 2782
michael@0 2783 bool
michael@0 2784 ICBinaryArith_StringConcat::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2785 {
michael@0 2786 Label failure;
michael@0 2787 masm.branchTestString(Assembler::NotEqual, R0, &failure);
michael@0 2788 masm.branchTestString(Assembler::NotEqual, R1, &failure);
michael@0 2789
michael@0 2790 // Restore the tail call register.
michael@0 2791 EmitRestoreTailCallReg(masm);
michael@0 2792
michael@0 2793 masm.pushValue(R1);
michael@0 2794 masm.pushValue(R0);
michael@0 2795 if (!tailCallVM(DoConcatStringsInfo, masm))
michael@0 2796 return false;
michael@0 2797
michael@0 2798 // Failure case - jump to next stub
michael@0 2799 masm.bind(&failure);
michael@0 2800 EmitStubGuardFailure(masm);
michael@0 2801 return true;
michael@0 2802 }
michael@0 2803
michael@0 2804 static JSString *
michael@0 2805 ConvertObjectToStringForConcat(JSContext *cx, HandleValue obj)
michael@0 2806 {
michael@0 2807 JS_ASSERT(obj.isObject());
michael@0 2808 RootedValue rootedObj(cx, obj);
michael@0 2809 if (!ToPrimitive(cx, &rootedObj))
michael@0 2810 return nullptr;
michael@0 2811 return ToString<CanGC>(cx, rootedObj);
michael@0 2812 }
michael@0 2813
michael@0 2814 static bool
michael@0 2815 DoConcatStringObject(JSContext *cx, bool lhsIsString, HandleValue lhs, HandleValue rhs,
michael@0 2816 MutableHandleValue res)
michael@0 2817 {
michael@0 2818 JSString *lstr = nullptr;
michael@0 2819 JSString *rstr = nullptr;
michael@0 2820 if (lhsIsString) {
michael@0 2821 // Convert rhs first.
michael@0 2822 JS_ASSERT(lhs.isString() && rhs.isObject());
michael@0 2823 rstr = ConvertObjectToStringForConcat(cx, rhs);
michael@0 2824 if (!rstr)
michael@0 2825 return false;
michael@0 2826
michael@0 2827 // lhs is already string.
michael@0 2828 lstr = lhs.toString();
michael@0 2829 } else {
michael@0 2830 JS_ASSERT(rhs.isString() && lhs.isObject());
michael@0 2831 // Convert lhs first.
michael@0 2832 lstr = ConvertObjectToStringForConcat(cx, lhs);
michael@0 2833 if (!lstr)
michael@0 2834 return false;
michael@0 2835
michael@0 2836 // rhs is already string.
michael@0 2837 rstr = rhs.toString();
michael@0 2838 }
michael@0 2839
michael@0 2840 JSString *str = ConcatStrings<NoGC>(cx, lstr, rstr);
michael@0 2841 if (!str) {
michael@0 2842 RootedString nlstr(cx, lstr), nrstr(cx, rstr);
michael@0 2843 str = ConcatStrings<CanGC>(cx, nlstr, nrstr);
michael@0 2844 if (!str)
michael@0 2845 return false;
michael@0 2846 }
michael@0 2847
michael@0 2848 // Technically, we need to call TypeScript::MonitorString for this PC, however
michael@0 2849 // it was called when this stub was attached so it's OK.
michael@0 2850
michael@0 2851 res.setString(str);
michael@0 2852 return true;
michael@0 2853 }
michael@0 2854
michael@0 2855 typedef bool (*DoConcatStringObjectFn)(JSContext *, bool lhsIsString, HandleValue, HandleValue,
michael@0 2856 MutableHandleValue);
michael@0 2857 static const VMFunction DoConcatStringObjectInfo =
michael@0 2858 FunctionInfo<DoConcatStringObjectFn>(DoConcatStringObject, PopValues(2));
michael@0 2859
michael@0 2860 bool
michael@0 2861 ICBinaryArith_StringObjectConcat::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2862 {
michael@0 2863 Label failure;
michael@0 2864 if (lhsIsString_) {
michael@0 2865 masm.branchTestString(Assembler::NotEqual, R0, &failure);
michael@0 2866 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
michael@0 2867 } else {
michael@0 2868 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 2869 masm.branchTestString(Assembler::NotEqual, R1, &failure);
michael@0 2870 }
michael@0 2871
michael@0 2872 // Restore the tail call register.
michael@0 2873 EmitRestoreTailCallReg(masm);
michael@0 2874
michael@0 2875 // Sync for the decompiler.
michael@0 2876 masm.pushValue(R0);
michael@0 2877 masm.pushValue(R1);
michael@0 2878
michael@0 2879 // Push arguments.
michael@0 2880 masm.pushValue(R1);
michael@0 2881 masm.pushValue(R0);
michael@0 2882 masm.push(Imm32(lhsIsString_));
michael@0 2883 if (!tailCallVM(DoConcatStringObjectInfo, masm))
michael@0 2884 return false;
michael@0 2885
michael@0 2886 // Failure case - jump to next stub
michael@0 2887 masm.bind(&failure);
michael@0 2888 EmitStubGuardFailure(masm);
michael@0 2889 return true;
michael@0 2890 }
michael@0 2891
michael@0 2892 bool
michael@0 2893 ICBinaryArith_Double::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2894 {
michael@0 2895 Label failure;
michael@0 2896 masm.ensureDouble(R0, FloatReg0, &failure);
michael@0 2897 masm.ensureDouble(R1, FloatReg1, &failure);
michael@0 2898
michael@0 2899 switch (op) {
michael@0 2900 case JSOP_ADD:
michael@0 2901 masm.addDouble(FloatReg1, FloatReg0);
michael@0 2902 break;
michael@0 2903 case JSOP_SUB:
michael@0 2904 masm.subDouble(FloatReg1, FloatReg0);
michael@0 2905 break;
michael@0 2906 case JSOP_MUL:
michael@0 2907 masm.mulDouble(FloatReg1, FloatReg0);
michael@0 2908 break;
michael@0 2909 case JSOP_DIV:
michael@0 2910 masm.divDouble(FloatReg1, FloatReg0);
michael@0 2911 break;
michael@0 2912 case JSOP_MOD:
michael@0 2913 masm.setupUnalignedABICall(2, R0.scratchReg());
michael@0 2914 masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
michael@0 2915 masm.passABIArg(FloatReg1, MoveOp::DOUBLE);
michael@0 2916 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, NumberMod), MoveOp::DOUBLE);
michael@0 2917 JS_ASSERT(ReturnFloatReg == FloatReg0);
michael@0 2918 break;
michael@0 2919 default:
michael@0 2920 MOZ_ASSUME_UNREACHABLE("Unexpected op");
michael@0 2921 }
michael@0 2922
michael@0 2923 masm.boxDouble(FloatReg0, R0);
michael@0 2924 EmitReturnFromIC(masm);
michael@0 2925
michael@0 2926 // Failure case - jump to next stub
michael@0 2927 masm.bind(&failure);
michael@0 2928 EmitStubGuardFailure(masm);
michael@0 2929 return true;
michael@0 2930 }
michael@0 2931
michael@0 2932 bool
michael@0 2933 ICBinaryArith_BooleanWithInt32::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 2934 {
michael@0 2935 Label failure;
michael@0 2936 if (lhsIsBool_)
michael@0 2937 masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
michael@0 2938 else
michael@0 2939 masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
michael@0 2940
michael@0 2941 if (rhsIsBool_)
michael@0 2942 masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
michael@0 2943 else
michael@0 2944 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 2945
michael@0 2946 Register lhsReg = lhsIsBool_ ? masm.extractBoolean(R0, ExtractTemp0)
michael@0 2947 : masm.extractInt32(R0, ExtractTemp0);
michael@0 2948 Register rhsReg = rhsIsBool_ ? masm.extractBoolean(R1, ExtractTemp1)
michael@0 2949 : masm.extractInt32(R1, ExtractTemp1);
michael@0 2950
michael@0 2951 JS_ASSERT(op_ == JSOP_ADD || op_ == JSOP_SUB ||
michael@0 2952 op_ == JSOP_BITOR || op_ == JSOP_BITXOR || op_ == JSOP_BITAND);
michael@0 2953
michael@0 2954 switch(op_) {
michael@0 2955 case JSOP_ADD: {
michael@0 2956 Label fixOverflow;
michael@0 2957
michael@0 2958 masm.branchAdd32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
michael@0 2959 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
michael@0 2960 EmitReturnFromIC(masm);
michael@0 2961
michael@0 2962 masm.bind(&fixOverflow);
michael@0 2963 masm.sub32(rhsReg, lhsReg);
michael@0 2964 // Proceed to failure below.
michael@0 2965 break;
michael@0 2966 }
michael@0 2967 case JSOP_SUB: {
michael@0 2968 Label fixOverflow;
michael@0 2969
michael@0 2970 masm.branchSub32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
michael@0 2971 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
michael@0 2972 EmitReturnFromIC(masm);
michael@0 2973
michael@0 2974 masm.bind(&fixOverflow);
michael@0 2975 masm.add32(rhsReg, lhsReg);
michael@0 2976 // Proceed to failure below.
michael@0 2977 break;
michael@0 2978 }
michael@0 2979 case JSOP_BITOR: {
michael@0 2980 masm.orPtr(rhsReg, lhsReg);
michael@0 2981 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
michael@0 2982 EmitReturnFromIC(masm);
michael@0 2983 break;
michael@0 2984 }
michael@0 2985 case JSOP_BITXOR: {
michael@0 2986 masm.xorPtr(rhsReg, lhsReg);
michael@0 2987 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
michael@0 2988 EmitReturnFromIC(masm);
michael@0 2989 break;
michael@0 2990 }
michael@0 2991 case JSOP_BITAND: {
michael@0 2992 masm.andPtr(rhsReg, lhsReg);
michael@0 2993 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
michael@0 2994 EmitReturnFromIC(masm);
michael@0 2995 break;
michael@0 2996 }
michael@0 2997 default:
michael@0 2998 MOZ_ASSUME_UNREACHABLE("Unhandled op for BinaryArith_BooleanWithInt32.");
michael@0 2999 }
michael@0 3000
michael@0 3001 // Failure case - jump to next stub
michael@0 3002 masm.bind(&failure);
michael@0 3003 EmitStubGuardFailure(masm);
michael@0 3004 return true;
michael@0 3005 }
michael@0 3006
michael@0 3007 bool
michael@0 3008 ICBinaryArith_DoubleWithInt32::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 3009 {
michael@0 3010 JS_ASSERT(op == JSOP_BITOR || op == JSOP_BITAND || op == JSOP_BITXOR);
michael@0 3011
michael@0 3012 Label failure;
michael@0 3013 Register intReg;
michael@0 3014 Register scratchReg;
michael@0 3015 if (lhsIsDouble_) {
michael@0 3016 masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
michael@0 3017 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 3018 intReg = masm.extractInt32(R1, ExtractTemp0);
michael@0 3019 masm.unboxDouble(R0, FloatReg0);
michael@0 3020 scratchReg = R0.scratchReg();
michael@0 3021 } else {
michael@0 3022 masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
michael@0 3023 masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
michael@0 3024 intReg = masm.extractInt32(R0, ExtractTemp0);
michael@0 3025 masm.unboxDouble(R1, FloatReg0);
michael@0 3026 scratchReg = R1.scratchReg();
michael@0 3027 }
michael@0 3028
michael@0 3029 // Truncate the double to an int32.
michael@0 3030 {
michael@0 3031 Label doneTruncate;
michael@0 3032 Label truncateABICall;
michael@0 3033 masm.branchTruncateDouble(FloatReg0, scratchReg, &truncateABICall);
michael@0 3034 masm.jump(&doneTruncate);
michael@0 3035
michael@0 3036 masm.bind(&truncateABICall);
michael@0 3037 masm.push(intReg);
michael@0 3038 masm.setupUnalignedABICall(1, scratchReg);
michael@0 3039 masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
michael@0 3040 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::ToInt32));
michael@0 3041 masm.storeCallResult(scratchReg);
michael@0 3042 masm.pop(intReg);
michael@0 3043
michael@0 3044 masm.bind(&doneTruncate);
michael@0 3045 }
michael@0 3046
michael@0 3047 Register intReg2 = scratchReg;
michael@0 3048 // All handled ops commute, so no need to worry about ordering.
michael@0 3049 switch(op) {
michael@0 3050 case JSOP_BITOR:
michael@0 3051 masm.orPtr(intReg, intReg2);
michael@0 3052 break;
michael@0 3053 case JSOP_BITXOR:
michael@0 3054 masm.xorPtr(intReg, intReg2);
michael@0 3055 break;
michael@0 3056 case JSOP_BITAND:
michael@0 3057 masm.andPtr(intReg, intReg2);
michael@0 3058 break;
michael@0 3059 default:
michael@0 3060 MOZ_ASSUME_UNREACHABLE("Unhandled op for BinaryArith_DoubleWithInt32.");
michael@0 3061 }
michael@0 3062 masm.tagValue(JSVAL_TYPE_INT32, intReg2, R0);
michael@0 3063 EmitReturnFromIC(masm);
michael@0 3064
michael@0 3065 // Failure case - jump to next stub
michael@0 3066 masm.bind(&failure);
michael@0 3067 EmitStubGuardFailure(masm);
michael@0 3068 return true;
michael@0 3069 }
michael@0 3070
michael@0 3071 //
michael@0 3072 // UnaryArith_Fallback
michael@0 3073 //
michael@0 3074
michael@0 3075 // Disable PGO (see bug 851490).
michael@0 3076 #if defined(_MSC_VER)
michael@0 3077 # pragma optimize("g", off)
michael@0 3078 #endif
michael@0 3079 static bool
michael@0 3080 DoUnaryArithFallback(JSContext *cx, BaselineFrame *frame, ICUnaryArith_Fallback *stub_,
michael@0 3081 HandleValue val, MutableHandleValue res)
michael@0 3082 {
michael@0 3083 // This fallback stub may trigger debug mode toggling.
michael@0 3084 DebugModeOSRVolatileStub<ICUnaryArith_Fallback *> stub(frame, stub_);
michael@0 3085
michael@0 3086 RootedScript script(cx, frame->script());
michael@0 3087 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 3088 JSOp op = JSOp(*pc);
michael@0 3089 FallbackICSpew(cx, stub, "UnaryArith(%s)", js_CodeName[op]);
michael@0 3090
michael@0 3091 switch (op) {
michael@0 3092 case JSOP_BITNOT: {
michael@0 3093 int32_t result;
michael@0 3094 if (!BitNot(cx, val, &result))
michael@0 3095 return false;
michael@0 3096 res.setInt32(result);
michael@0 3097 break;
michael@0 3098 }
michael@0 3099 case JSOP_NEG:
michael@0 3100 if (!NegOperation(cx, script, pc, val, res))
michael@0 3101 return false;
michael@0 3102 break;
michael@0 3103 default:
michael@0 3104 MOZ_ASSUME_UNREACHABLE("Unexpected op");
michael@0 3105 }
michael@0 3106
michael@0 3107 // Check if debug mode toggling made the stub invalid.
michael@0 3108 if (stub.invalid())
michael@0 3109 return true;
michael@0 3110
michael@0 3111 if (res.isDouble())
michael@0 3112 stub->setSawDoubleResult();
michael@0 3113
michael@0 3114 if (stub->numOptimizedStubs() >= ICUnaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 3115 // TODO: Discard/replace stubs.
michael@0 3116 return true;
michael@0 3117 }
michael@0 3118
michael@0 3119 if (val.isInt32() && res.isInt32()) {
michael@0 3120 IonSpew(IonSpew_BaselineIC, " Generating %s(Int32 => Int32) stub", js_CodeName[op]);
michael@0 3121 ICUnaryArith_Int32::Compiler compiler(cx, op);
michael@0 3122 ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3123 if (!int32Stub)
michael@0 3124 return false;
michael@0 3125 stub->addNewStub(int32Stub);
michael@0 3126 return true;
michael@0 3127 }
michael@0 3128
michael@0 3129 if (val.isNumber() && res.isNumber() && cx->runtime()->jitSupportsFloatingPoint) {
michael@0 3130 IonSpew(IonSpew_BaselineIC, " Generating %s(Number => Number) stub", js_CodeName[op]);
michael@0 3131
michael@0 3132 // Unlink int32 stubs, the double stub handles both cases and TI specializes for both.
michael@0 3133 stub->unlinkStubsWithKind(cx, ICStub::UnaryArith_Int32);
michael@0 3134
michael@0 3135 ICUnaryArith_Double::Compiler compiler(cx, op);
michael@0 3136 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3137 if (!doubleStub)
michael@0 3138 return false;
michael@0 3139 stub->addNewStub(doubleStub);
michael@0 3140 return true;
michael@0 3141 }
michael@0 3142
michael@0 3143 return true;
michael@0 3144 }
michael@0 3145 #if defined(_MSC_VER)
michael@0 3146 # pragma optimize("", on)
michael@0 3147 #endif
michael@0 3148
michael@0 3149 typedef bool (*DoUnaryArithFallbackFn)(JSContext *, BaselineFrame *, ICUnaryArith_Fallback *,
michael@0 3150 HandleValue, MutableHandleValue);
michael@0 3151 static const VMFunction DoUnaryArithFallbackInfo =
michael@0 3152 FunctionInfo<DoUnaryArithFallbackFn>(DoUnaryArithFallback, PopValues(1));
michael@0 3153
michael@0 3154 bool
michael@0 3155 ICUnaryArith_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 3156 {
michael@0 3157 JS_ASSERT(R0 == JSReturnOperand);
michael@0 3158
michael@0 3159 // Restore the tail call register.
michael@0 3160 EmitRestoreTailCallReg(masm);
michael@0 3161
michael@0 3162 // Ensure stack is fully synced for the expression decompiler.
michael@0 3163 masm.pushValue(R0);
michael@0 3164
michael@0 3165 // Push arguments.
michael@0 3166 masm.pushValue(R0);
michael@0 3167 masm.push(BaselineStubReg);
michael@0 3168 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 3169
michael@0 3170 return tailCallVM(DoUnaryArithFallbackInfo, masm);
michael@0 3171 }
michael@0 3172
michael@0 3173 bool
michael@0 3174 ICUnaryArith_Double::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 3175 {
michael@0 3176 Label failure;
michael@0 3177 masm.ensureDouble(R0, FloatReg0, &failure);
michael@0 3178
michael@0 3179 JS_ASSERT(op == JSOP_NEG || op == JSOP_BITNOT);
michael@0 3180
michael@0 3181 if (op == JSOP_NEG) {
michael@0 3182 masm.negateDouble(FloatReg0);
michael@0 3183 masm.boxDouble(FloatReg0, R0);
michael@0 3184 } else {
michael@0 3185 // Truncate the double to an int32.
michael@0 3186 Register scratchReg = R1.scratchReg();
michael@0 3187
michael@0 3188 Label doneTruncate;
michael@0 3189 Label truncateABICall;
michael@0 3190 masm.branchTruncateDouble(FloatReg0, scratchReg, &truncateABICall);
michael@0 3191 masm.jump(&doneTruncate);
michael@0 3192
michael@0 3193 masm.bind(&truncateABICall);
michael@0 3194 masm.setupUnalignedABICall(1, scratchReg);
michael@0 3195 masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
michael@0 3196 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::ToInt32));
michael@0 3197 masm.storeCallResult(scratchReg);
michael@0 3198
michael@0 3199 masm.bind(&doneTruncate);
michael@0 3200 masm.not32(scratchReg);
michael@0 3201 masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0);
michael@0 3202 }
michael@0 3203
michael@0 3204 EmitReturnFromIC(masm);
michael@0 3205
michael@0 3206 // Failure case - jump to next stub
michael@0 3207 masm.bind(&failure);
michael@0 3208 EmitStubGuardFailure(masm);
michael@0 3209 return true;
michael@0 3210 }
michael@0 3211
michael@0 3212 //
michael@0 3213 // GetElem_Fallback
michael@0 3214 //
michael@0 3215
michael@0 3216 static void GetFixedOrDynamicSlotOffset(HandleObject obj, uint32_t slot,
michael@0 3217 bool *isFixed, uint32_t *offset)
michael@0 3218 {
michael@0 3219 JS_ASSERT(isFixed);
michael@0 3220 JS_ASSERT(offset);
michael@0 3221 *isFixed = obj->isFixedSlot(slot);
michael@0 3222 *offset = *isFixed ? JSObject::getFixedSlotOffset(slot)
michael@0 3223 : obj->dynamicSlotIndex(slot) * sizeof(Value);
michael@0 3224 }
michael@0 3225
michael@0 3226 static bool
michael@0 3227 IsCacheableDOMProxy(JSObject *obj)
michael@0 3228 {
michael@0 3229 if (!obj->is<ProxyObject>())
michael@0 3230 return false;
michael@0 3231
michael@0 3232 BaseProxyHandler *handler = obj->as<ProxyObject>().handler();
michael@0 3233
michael@0 3234 if (handler->family() != GetDOMProxyHandlerFamily())
michael@0 3235 return false;
michael@0 3236
michael@0 3237 if (obj->numFixedSlots() <= GetDOMProxyExpandoSlot())
michael@0 3238 return false;
michael@0 3239
michael@0 3240 return true;
michael@0 3241 }
michael@0 3242
michael@0 3243 static JSObject *
michael@0 3244 GetDOMProxyProto(JSObject *obj)
michael@0 3245 {
michael@0 3246 JS_ASSERT(IsCacheableDOMProxy(obj));
michael@0 3247 return obj->getTaggedProto().toObjectOrNull();
michael@0 3248 }
michael@0 3249
michael@0 3250 static void
michael@0 3251 GenerateDOMProxyChecks(JSContext *cx, MacroAssembler &masm, Register object,
michael@0 3252 Address checkProxyHandlerAddr,
michael@0 3253 Address *checkExpandoShapeAddr,
michael@0 3254 Address *expandoAndGenerationAddr,
michael@0 3255 Address *generationAddr,
michael@0 3256 Register scratch,
michael@0 3257 GeneralRegisterSet &domProxyRegSet,
michael@0 3258 Label *checkFailed)
michael@0 3259 {
michael@0 3260 // Guard the following:
michael@0 3261 // 1. The object is a DOMProxy.
michael@0 3262 // 2. The object does not have expando properties, or has an expando
michael@0 3263 // which is known to not have the desired property.
michael@0 3264 Address handlerAddr(object, ProxyObject::offsetOfHandler());
michael@0 3265 Address expandoAddr(object, JSObject::getFixedSlotOffset(GetDOMProxyExpandoSlot()));
michael@0 3266
michael@0 3267 // Check that object is a DOMProxy.
michael@0 3268 masm.loadPtr(checkProxyHandlerAddr, scratch);
michael@0 3269 masm.branchPrivatePtr(Assembler::NotEqual, handlerAddr, scratch, checkFailed);
michael@0 3270
michael@0 3271 // At this point, if not checking for an expando object, just return.
michael@0 3272 if (!checkExpandoShapeAddr)
michael@0 3273 return;
michael@0 3274
michael@0 3275 // For the remaining code, we need to reserve some registers to load a value.
michael@0 3276 // This is ugly, but unavoidable.
michael@0 3277 ValueOperand tempVal = domProxyRegSet.takeAnyValue();
michael@0 3278 masm.pushValue(tempVal);
michael@0 3279
michael@0 3280 Label failDOMProxyCheck;
michael@0 3281 Label domProxyOk;
michael@0 3282
michael@0 3283 if (expandoAndGenerationAddr) {
michael@0 3284 JS_ASSERT(generationAddr);
michael@0 3285
michael@0 3286 masm.loadPtr(*expandoAndGenerationAddr, tempVal.scratchReg());
michael@0 3287 masm.branchPrivatePtr(Assembler::NotEqual, expandoAddr, tempVal.scratchReg(),
michael@0 3288 &failDOMProxyCheck);
michael@0 3289
michael@0 3290 masm.load32(*generationAddr, scratch);
michael@0 3291 masm.branch32(Assembler::NotEqual,
michael@0 3292 Address(tempVal.scratchReg(), offsetof(ExpandoAndGeneration, generation)),
michael@0 3293 scratch, &failDOMProxyCheck);
michael@0 3294
michael@0 3295 masm.loadValue(Address(tempVal.scratchReg(), 0), tempVal);
michael@0 3296 } else {
michael@0 3297 masm.loadValue(expandoAddr, tempVal);
michael@0 3298 }
michael@0 3299
michael@0 3300 // If the incoming object does not have an expando object then we're sure we're not
michael@0 3301 // shadowing.
michael@0 3302 masm.branchTestUndefined(Assembler::Equal, tempVal, &domProxyOk);
michael@0 3303
michael@0 3304 // The reference object used to generate this check may not have had an
michael@0 3305 // expando object at all, in which case the presence of a non-undefined
michael@0 3306 // expando value in the incoming object is automatically a failure.
michael@0 3307 masm.loadPtr(*checkExpandoShapeAddr, scratch);
michael@0 3308 masm.branchPtr(Assembler::Equal, scratch, ImmPtr(nullptr), &failDOMProxyCheck);
michael@0 3309
michael@0 3310 // Otherwise, ensure that the incoming object has an object for its expando value and that
michael@0 3311 // the shape matches.
michael@0 3312 masm.branchTestObject(Assembler::NotEqual, tempVal, &failDOMProxyCheck);
michael@0 3313 Register objReg = masm.extractObject(tempVal, tempVal.scratchReg());
michael@0 3314 masm.branchTestObjShape(Assembler::Equal, objReg, scratch, &domProxyOk);
michael@0 3315
michael@0 3316 // Failure case: restore the tempVal registers and jump to failures.
michael@0 3317 masm.bind(&failDOMProxyCheck);
michael@0 3318 masm.popValue(tempVal);
michael@0 3319 masm.jump(checkFailed);
michael@0 3320
michael@0 3321 // Success case: restore the tempval and proceed.
michael@0 3322 masm.bind(&domProxyOk);
michael@0 3323 masm.popValue(tempVal);
michael@0 3324 }
michael@0 3325
michael@0 3326 // Look up a property's shape on an object, being careful never to do any effectful
michael@0 3327 // operations. This procedure not yielding a shape should not be taken as a lack of
michael@0 3328 // existence of the property on the object.
michael@0 3329 static bool
michael@0 3330 EffectlesslyLookupProperty(JSContext *cx, HandleObject obj, HandlePropertyName name,
michael@0 3331 MutableHandleObject holder, MutableHandleShape shape,
michael@0 3332 bool *checkDOMProxy=nullptr,
michael@0 3333 DOMProxyShadowsResult *shadowsResult=nullptr,
michael@0 3334 bool *domProxyHasGeneration=nullptr)
michael@0 3335 {
michael@0 3336 shape.set(nullptr);
michael@0 3337 holder.set(nullptr);
michael@0 3338
michael@0 3339 if (checkDOMProxy)
michael@0 3340 *checkDOMProxy = false;
michael@0 3341
michael@0 3342 // Check for list base if asked to.
michael@0 3343 RootedObject checkObj(cx, obj);
michael@0 3344 if (checkDOMProxy && IsCacheableDOMProxy(obj)) {
michael@0 3345 JS_ASSERT(domProxyHasGeneration);
michael@0 3346 JS_ASSERT(shadowsResult);
michael@0 3347
michael@0 3348 *checkDOMProxy = true;
michael@0 3349 if (obj->hasUncacheableProto())
michael@0 3350 return true;
michael@0 3351
michael@0 3352 RootedId id(cx, NameToId(name));
michael@0 3353 *shadowsResult = GetDOMProxyShadowsCheck()(cx, obj, id);
michael@0 3354 if (*shadowsResult == ShadowCheckFailed)
michael@0 3355 return false;
michael@0 3356
michael@0 3357 if (*shadowsResult == Shadows) {
michael@0 3358 holder.set(obj);
michael@0 3359 return true;
michael@0 3360 }
michael@0 3361
michael@0 3362 *domProxyHasGeneration = (*shadowsResult == DoesntShadowUnique);
michael@0 3363
michael@0 3364 checkObj = GetDOMProxyProto(obj);
michael@0 3365 if (!checkObj)
michael@0 3366 return true;
michael@0 3367 } else if (!obj->isNative()) {
michael@0 3368 return true;
michael@0 3369 }
michael@0 3370
michael@0 3371 if (checkObj->hasIdempotentProtoChain()) {
michael@0 3372 if (!JSObject::lookupProperty(cx, checkObj, name, holder, shape))
michael@0 3373 return false;
michael@0 3374 } else if (checkObj->isNative()) {
michael@0 3375 shape.set(checkObj->nativeLookup(cx, NameToId(name)));
michael@0 3376 if (shape)
michael@0 3377 holder.set(checkObj);
michael@0 3378 }
michael@0 3379 return true;
michael@0 3380 }
michael@0 3381
michael@0 3382 static bool
michael@0 3383 IsCacheableProtoChain(JSObject *obj, JSObject *holder, bool isDOMProxy=false)
michael@0 3384 {
michael@0 3385 JS_ASSERT_IF(isDOMProxy, IsCacheableDOMProxy(obj));
michael@0 3386 JS_ASSERT_IF(!isDOMProxy, obj->isNative());
michael@0 3387
michael@0 3388 // Don't handle objects which require a prototype guard. This should
michael@0 3389 // be uncommon so handling it is likely not worth the complexity.
michael@0 3390 if (obj->hasUncacheableProto())
michael@0 3391 return false;
michael@0 3392
michael@0 3393 JSObject *cur = obj;
michael@0 3394 while (cur != holder) {
michael@0 3395 // We cannot assume that we find the holder object on the prototype
michael@0 3396 // chain and must check for null proto. The prototype chain can be
michael@0 3397 // altered during the lookupProperty call.
michael@0 3398 JSObject *proto;
michael@0 3399 if (isDOMProxy && cur == obj)
michael@0 3400 proto = cur->getTaggedProto().toObjectOrNull();
michael@0 3401 else
michael@0 3402 proto = cur->getProto();
michael@0 3403
michael@0 3404 if (!proto || !proto->isNative())
michael@0 3405 return false;
michael@0 3406
michael@0 3407 if (proto->hasUncacheableProto())
michael@0 3408 return false;
michael@0 3409
michael@0 3410 cur = proto;
michael@0 3411 }
michael@0 3412 return true;
michael@0 3413 }
michael@0 3414
michael@0 3415 static bool
michael@0 3416 IsCacheableGetPropReadSlot(JSObject *obj, JSObject *holder, Shape *shape, bool isDOMProxy=false)
michael@0 3417 {
michael@0 3418 if (!shape || !IsCacheableProtoChain(obj, holder, isDOMProxy))
michael@0 3419 return false;
michael@0 3420
michael@0 3421 if (!shape->hasSlot() || !shape->hasDefaultGetter())
michael@0 3422 return false;
michael@0 3423
michael@0 3424 return true;
michael@0 3425 }
michael@0 3426
michael@0 3427 static bool
michael@0 3428 IsCacheableGetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *shape, bool *isScripted,
michael@0 3429 bool isDOMProxy=false)
michael@0 3430 {
michael@0 3431 JS_ASSERT(isScripted);
michael@0 3432
michael@0 3433 if (!shape || !IsCacheableProtoChain(obj, holder, isDOMProxy))
michael@0 3434 return false;
michael@0 3435
michael@0 3436 if (shape->hasSlot() || shape->hasDefaultGetter())
michael@0 3437 return false;
michael@0 3438
michael@0 3439 if (!shape->hasGetterValue())
michael@0 3440 return false;
michael@0 3441
michael@0 3442 if (!shape->getterValue().isObject() || !shape->getterObject()->is<JSFunction>())
michael@0 3443 return false;
michael@0 3444
michael@0 3445 JSFunction *func = &shape->getterObject()->as<JSFunction>();
michael@0 3446
michael@0 3447 #ifdef JSGC_GENERATIONAL
michael@0 3448 // Information from get prop call ICs may be used directly from Ion code,
michael@0 3449 // and should not be nursery allocated.
michael@0 3450 if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func))
michael@0 3451 return false;
michael@0 3452 #endif
michael@0 3453
michael@0 3454 if (func->isNative()) {
michael@0 3455 *isScripted = false;
michael@0 3456 return true;
michael@0 3457 }
michael@0 3458
michael@0 3459 if (!func->hasJITCode())
michael@0 3460 return false;
michael@0 3461
michael@0 3462 *isScripted = true;
michael@0 3463 return true;
michael@0 3464 }
michael@0 3465
michael@0 3466 static bool
michael@0 3467 IsCacheableSetPropWriteSlot(JSObject *obj, Shape *oldShape, JSObject *holder, Shape *shape)
michael@0 3468 {
michael@0 3469 if (!shape)
michael@0 3470 return false;
michael@0 3471
michael@0 3472 // Object shape must not have changed during the property set.
michael@0 3473 if (obj->lastProperty() != oldShape)
michael@0 3474 return false;
michael@0 3475
michael@0 3476 // Currently we only optimize direct writes.
michael@0 3477 if (obj != holder)
michael@0 3478 return false;
michael@0 3479
michael@0 3480 if (!shape->hasSlot() || !shape->hasDefaultSetter() || !shape->writable())
michael@0 3481 return false;
michael@0 3482
michael@0 3483 return true;
michael@0 3484 }
michael@0 3485
michael@0 3486 static bool
michael@0 3487 IsCacheableSetPropAddSlot(JSContext *cx, HandleObject obj, HandleShape oldShape, uint32_t oldSlots,
michael@0 3488 HandleId id, HandleObject holder, HandleShape shape,
michael@0 3489 size_t *protoChainDepth)
michael@0 3490 {
michael@0 3491 if (!shape)
michael@0 3492 return false;
michael@0 3493
michael@0 3494 // Property must be set directly on object, and be last added property of object.
michael@0 3495 if (obj != holder || shape != obj->lastProperty())
michael@0 3496 return false;
michael@0 3497
michael@0 3498 // Object must be extensible, oldShape must be immediate parent of curShape.
michael@0 3499 if (!obj->nonProxyIsExtensible() || obj->lastProperty()->previous() != oldShape)
michael@0 3500 return false;
michael@0 3501
michael@0 3502 // Basic shape checks.
michael@0 3503 if (shape->inDictionary() || !shape->hasSlot() || !shape->hasDefaultSetter() ||
michael@0 3504 !shape->writable())
michael@0 3505 {
michael@0 3506 return false;
michael@0 3507 }
michael@0 3508
michael@0 3509 // If object has a non-default resolve hook, don't inline
michael@0 3510 if (obj->getClass()->resolve != JS_ResolveStub)
michael@0 3511 return false;
michael@0 3512
michael@0 3513 size_t chainDepth = 0;
michael@0 3514 // walk up the object prototype chain and ensure that all prototypes
michael@0 3515 // are native, and that all prototypes have setter defined on the property
michael@0 3516 for (JSObject *proto = obj->getProto(); proto; proto = proto->getProto()) {
michael@0 3517 chainDepth++;
michael@0 3518 // if prototype is non-native, don't optimize
michael@0 3519 if (!proto->isNative())
michael@0 3520 return false;
michael@0 3521
michael@0 3522 // if prototype defines this property in a non-plain way, don't optimize
michael@0 3523 Shape *protoShape = proto->nativeLookup(cx, id);
michael@0 3524 if (protoShape && !protoShape->hasDefaultSetter())
michael@0 3525 return false;
michael@0 3526
michael@0 3527 // Otherise, if there's no such property, watch out for a resolve hook that would need
michael@0 3528 // to be invoked and thus prevent inlining of property addition.
michael@0 3529 if (proto->getClass()->resolve != JS_ResolveStub)
michael@0 3530 return false;
michael@0 3531 }
michael@0 3532
michael@0 3533 // Only add a IC entry if the dynamic slots didn't change when the shapes
michael@0 3534 // changed. Need to ensure that a shape change for a subsequent object
michael@0 3535 // won't involve reallocating the slot array.
michael@0 3536 if (obj->numDynamicSlots() != oldSlots)
michael@0 3537 return false;
michael@0 3538
michael@0 3539 *protoChainDepth = chainDepth;
michael@0 3540 return true;
michael@0 3541 }
michael@0 3542
michael@0 3543 static bool
michael@0 3544 IsCacheableSetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *shape, bool *isScripted)
michael@0 3545 {
michael@0 3546 JS_ASSERT(isScripted);
michael@0 3547
michael@0 3548 // Currently we only optimize setter calls for setters bound on prototypes.
michael@0 3549 if (obj == holder)
michael@0 3550 return false;
michael@0 3551
michael@0 3552 if (!shape || !IsCacheableProtoChain(obj, holder))
michael@0 3553 return false;
michael@0 3554
michael@0 3555 if (shape->hasSlot() || shape->hasDefaultSetter())
michael@0 3556 return false;
michael@0 3557
michael@0 3558 if (!shape->hasSetterValue())
michael@0 3559 return false;
michael@0 3560
michael@0 3561 if (!shape->setterValue().isObject() || !shape->setterObject()->is<JSFunction>())
michael@0 3562 return false;
michael@0 3563
michael@0 3564 JSFunction *func = &shape->setterObject()->as<JSFunction>();
michael@0 3565
michael@0 3566 #ifdef JSGC_GENERATIONAL
michael@0 3567 // Information from set prop call ICs may be used directly from Ion code,
michael@0 3568 // and should not be nursery allocated.
michael@0 3569 if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func))
michael@0 3570 return false;
michael@0 3571 #endif
michael@0 3572
michael@0 3573 if (func->isNative()) {
michael@0 3574 *isScripted = false;
michael@0 3575 return true;
michael@0 3576 }
michael@0 3577
michael@0 3578 if (!func->hasJITCode())
michael@0 3579 return false;
michael@0 3580
michael@0 3581 *isScripted = true;
michael@0 3582 return true;
michael@0 3583 }
michael@0 3584
michael@0 3585 static bool
michael@0 3586 LookupNoSuchMethodHandler(JSContext *cx, HandleObject obj, HandleValue id,
michael@0 3587 MutableHandleValue result)
michael@0 3588 {
michael@0 3589 return OnUnknownMethod(cx, obj, id, result);
michael@0 3590 }
michael@0 3591
michael@0 3592 typedef bool (*LookupNoSuchMethodHandlerFn)(JSContext *, HandleObject, HandleValue,
michael@0 3593 MutableHandleValue);
michael@0 3594 static const VMFunction LookupNoSuchMethodHandlerInfo =
michael@0 3595 FunctionInfo<LookupNoSuchMethodHandlerFn>(LookupNoSuchMethodHandler);
michael@0 3596
michael@0 3597 static bool
michael@0 3598 GetElemNativeStubExists(ICGetElem_Fallback *stub, HandleObject obj, HandleObject holder,
michael@0 3599 HandlePropertyName propName, bool needsAtomize)
michael@0 3600 {
michael@0 3601 bool indirect = (obj.get() != holder.get());
michael@0 3602
michael@0 3603 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
michael@0 3604 if (iter->kind() != ICStub::GetElem_NativeSlot &&
michael@0 3605 iter->kind() != ICStub::GetElem_NativePrototypeSlot &&
michael@0 3606 iter->kind() != ICStub::GetElem_NativePrototypeCallNative &&
michael@0 3607 iter->kind() != ICStub::GetElem_NativePrototypeCallScripted)
michael@0 3608 {
michael@0 3609 continue;
michael@0 3610 }
michael@0 3611
michael@0 3612 if (indirect && (iter->kind() != ICStub::GetElem_NativePrototypeSlot &&
michael@0 3613 iter->kind() != ICStub::GetElem_NativePrototypeCallNative &&
michael@0 3614 iter->kind() != ICStub::GetElem_NativePrototypeCallScripted))
michael@0 3615 {
michael@0 3616 continue;
michael@0 3617 }
michael@0 3618
michael@0 3619 ICGetElemNativeStub *getElemNativeStub = reinterpret_cast<ICGetElemNativeStub *>(*iter);
michael@0 3620 if (propName != getElemNativeStub->name())
michael@0 3621 continue;
michael@0 3622
michael@0 3623 if (obj->lastProperty() != getElemNativeStub->shape())
michael@0 3624 continue;
michael@0 3625
michael@0 3626 // If the new stub needs atomization, and the old stub doesn't atomize, then
michael@0 3627 // an appropriate stub doesn't exist.
michael@0 3628 if (needsAtomize && !getElemNativeStub->needsAtomize())
michael@0 3629 continue;
michael@0 3630
michael@0 3631 // For prototype gets, check the holder and holder shape.
michael@0 3632 if (indirect) {
michael@0 3633 if (iter->isGetElem_NativePrototypeSlot()) {
michael@0 3634 ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot();
michael@0 3635
michael@0 3636 if (holder != protoStub->holder())
michael@0 3637 continue;
michael@0 3638
michael@0 3639 if (holder->lastProperty() != protoStub->holderShape())
michael@0 3640 continue;
michael@0 3641 } else {
michael@0 3642 JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() ||
michael@0 3643 iter->isGetElem_NativePrototypeCallScripted());
michael@0 3644
michael@0 3645 ICGetElemNativePrototypeCallStub *protoStub =
michael@0 3646 reinterpret_cast<ICGetElemNativePrototypeCallStub *>(*iter);
michael@0 3647
michael@0 3648 if (holder != protoStub->holder())
michael@0 3649 continue;
michael@0 3650
michael@0 3651 if (holder->lastProperty() != protoStub->holderShape())
michael@0 3652 continue;
michael@0 3653 }
michael@0 3654 }
michael@0 3655
michael@0 3656 return true;
michael@0 3657 }
michael@0 3658 return false;
michael@0 3659 }
michael@0 3660
michael@0 3661 static void
michael@0 3662 RemoveExistingGetElemNativeStubs(JSContext *cx, ICGetElem_Fallback *stub, HandleObject obj,
michael@0 3663 HandleObject holder, HandlePropertyName propName,
michael@0 3664 bool needsAtomize)
michael@0 3665 {
michael@0 3666 bool indirect = (obj.get() != holder.get());
michael@0 3667
michael@0 3668 for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
michael@0 3669 switch (iter->kind()) {
michael@0 3670 case ICStub::GetElem_NativeSlot:
michael@0 3671 if (indirect)
michael@0 3672 continue;
michael@0 3673 case ICStub::GetElem_NativePrototypeSlot:
michael@0 3674 case ICStub::GetElem_NativePrototypeCallNative:
michael@0 3675 case ICStub::GetElem_NativePrototypeCallScripted:
michael@0 3676 break;
michael@0 3677 default:
michael@0 3678 continue;
michael@0 3679 }
michael@0 3680
michael@0 3681 ICGetElemNativeStub *getElemNativeStub = reinterpret_cast<ICGetElemNativeStub *>(*iter);
michael@0 3682 if (propName != getElemNativeStub->name())
michael@0 3683 continue;
michael@0 3684
michael@0 3685 if (obj->lastProperty() != getElemNativeStub->shape())
michael@0 3686 continue;
michael@0 3687
michael@0 3688 // For prototype gets, check the holder and holder shape.
michael@0 3689 if (indirect) {
michael@0 3690 if (iter->isGetElem_NativePrototypeSlot()) {
michael@0 3691 ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot();
michael@0 3692
michael@0 3693 if (holder != protoStub->holder())
michael@0 3694 continue;
michael@0 3695
michael@0 3696 // If the holder matches, but the holder's lastProperty doesn't match, then
michael@0 3697 // this stub is invalid anyway. Unlink it.
michael@0 3698 if (holder->lastProperty() != protoStub->holderShape()) {
michael@0 3699 iter.unlink(cx);
michael@0 3700 continue;
michael@0 3701 }
michael@0 3702 } else {
michael@0 3703 JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() ||
michael@0 3704 iter->isGetElem_NativePrototypeCallScripted());
michael@0 3705
michael@0 3706 ICGetElemNativePrototypeCallStub *protoStub =
michael@0 3707 reinterpret_cast<ICGetElemNativePrototypeCallStub *>(*iter);
michael@0 3708
michael@0 3709 if (holder != protoStub->holder())
michael@0 3710 continue;
michael@0 3711
michael@0 3712 // If the holder matches, but the holder's lastProperty doesn't match, then
michael@0 3713 // this stub is invalid anyway. Unlink it.
michael@0 3714 if (holder->lastProperty() != protoStub->holderShape()) {
michael@0 3715 iter.unlink(cx);
michael@0 3716 continue;
michael@0 3717 }
michael@0 3718 }
michael@0 3719 }
michael@0 3720
michael@0 3721 // If the new stub needs atomization, and the old stub doesn't atomize, then
michael@0 3722 // remove the old stub.
michael@0 3723 if (needsAtomize && !getElemNativeStub->needsAtomize()) {
michael@0 3724 iter.unlink(cx);
michael@0 3725 continue;
michael@0 3726 }
michael@0 3727
michael@0 3728 // Should never get here, because this means a matching stub exists, and if
michael@0 3729 // a matching stub exists, this procedure should never have been called.
michael@0 3730 MOZ_ASSUME_UNREACHABLE("Procedure should never have been called.");
michael@0 3731 }
michael@0 3732 }
michael@0 3733
michael@0 3734 static bool
michael@0 3735 TypedArrayGetElemStubExists(ICGetElem_Fallback *stub, HandleObject obj)
michael@0 3736 {
michael@0 3737 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
michael@0 3738 if (!iter->isGetElem_TypedArray())
michael@0 3739 continue;
michael@0 3740 if (obj->lastProperty() == iter->toGetElem_TypedArray()->shape())
michael@0 3741 return true;
michael@0 3742 }
michael@0 3743 return false;
michael@0 3744 }
michael@0 3745
michael@0 3746 static bool
michael@0 3747 ArgumentsGetElemStubExists(ICGetElem_Fallback *stub, ICGetElem_Arguments::Which which)
michael@0 3748 {
michael@0 3749 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
michael@0 3750 if (!iter->isGetElem_Arguments())
michael@0 3751 continue;
michael@0 3752 if (iter->toGetElem_Arguments()->which() == which)
michael@0 3753 return true;
michael@0 3754 }
michael@0 3755 return false;
michael@0 3756 }
michael@0 3757
michael@0 3758
michael@0 3759 static bool TryAttachNativeGetElemStub(JSContext *cx, HandleScript script, jsbytecode *pc,
michael@0 3760 ICGetElem_Fallback *stub, HandleObject obj,
michael@0 3761 HandleValue key)
michael@0 3762 {
michael@0 3763 // Native-object GetElem stubs can't deal with non-string keys.
michael@0 3764 if (!key.isString())
michael@0 3765 return true;
michael@0 3766
michael@0 3767 // Convert to interned property name.
michael@0 3768 RootedId id(cx);
michael@0 3769 if (!ValueToId<CanGC>(cx, key, &id))
michael@0 3770 return false;
michael@0 3771
michael@0 3772 uint32_t dummy;
michael@0 3773 if (!JSID_IS_ATOM(id) || JSID_TO_ATOM(id)->isIndex(&dummy))
michael@0 3774 return true;
michael@0 3775
michael@0 3776 RootedPropertyName propName(cx, JSID_TO_ATOM(id)->asPropertyName());
michael@0 3777 bool needsAtomize = !key.toString()->isAtom();
michael@0 3778 bool isCallElem = (JSOp(*pc) == JSOP_CALLELEM);
michael@0 3779
michael@0 3780 RootedShape shape(cx);
michael@0 3781 RootedObject holder(cx);
michael@0 3782 if (!EffectlesslyLookupProperty(cx, obj, propName, &holder, &shape))
michael@0 3783 return false;
michael@0 3784
michael@0 3785 if (IsCacheableGetPropReadSlot(obj, holder, shape)) {
michael@0 3786 // If a suitable stub already exists, nothing else to do.
michael@0 3787 if (GetElemNativeStubExists(stub, obj, holder, propName, needsAtomize))
michael@0 3788 return true;
michael@0 3789
michael@0 3790 // Remove any existing stubs that may interfere with the new stub being added.
michael@0 3791 RemoveExistingGetElemNativeStubs(cx, stub, obj, holder, propName, needsAtomize);
michael@0 3792
michael@0 3793 bool isFixedSlot;
michael@0 3794 uint32_t offset;
michael@0 3795 GetFixedOrDynamicSlotOffset(holder, shape->slot(), &isFixedSlot, &offset);
michael@0 3796
michael@0 3797 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
michael@0 3798 ICStub::Kind kind = (obj == holder) ? ICStub::GetElem_NativeSlot
michael@0 3799 : ICStub::GetElem_NativePrototypeSlot;
michael@0 3800
michael@0 3801 IonSpew(IonSpew_BaselineIC, " Generating GetElem(Native %s%s slot) stub "
michael@0 3802 "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
michael@0 3803 (obj == holder) ? "direct" : "prototype",
michael@0 3804 needsAtomize ? " atomizing" : "",
michael@0 3805 obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
michael@0 3806
michael@0 3807 ICGetElemNativeStub::AccessType acctype = isFixedSlot ? ICGetElemNativeStub::FixedSlot
michael@0 3808 : ICGetElemNativeStub::DynamicSlot;
michael@0 3809 ICGetElemNativeCompiler compiler(cx, kind, isCallElem, monitorStub, obj, holder, propName,
michael@0 3810 acctype, needsAtomize, offset);
michael@0 3811 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3812 if (!newStub)
michael@0 3813 return false;
michael@0 3814
michael@0 3815 stub->addNewStub(newStub);
michael@0 3816 return true;
michael@0 3817 }
michael@0 3818
michael@0 3819 bool getterIsScripted = false;
michael@0 3820 if (IsCacheableGetPropCall(cx, obj, holder, shape, &getterIsScripted, /*isDOMProxy=*/false)) {
michael@0 3821 RootedFunction getter(cx, &shape->getterObject()->as<JSFunction>());
michael@0 3822
michael@0 3823 #if JS_HAS_NO_SUCH_METHOD
michael@0 3824 // It's unlikely that a getter function will be used in callelem locations.
michael@0 3825 // Just don't attach stubs in that case to avoid issues with __noSuchMethod__ handling.
michael@0 3826 if (isCallElem)
michael@0 3827 return true;
michael@0 3828 #endif
michael@0 3829
michael@0 3830 // For now, we do not handle own property getters
michael@0 3831 if (obj == holder)
michael@0 3832 return true;
michael@0 3833
michael@0 3834 // If a suitable stub already exists, nothing else to do.
michael@0 3835 if (GetElemNativeStubExists(stub, obj, holder, propName, needsAtomize))
michael@0 3836 return true;
michael@0 3837
michael@0 3838 // Remove any existing stubs that may interfere with the new stub being added.
michael@0 3839 RemoveExistingGetElemNativeStubs(cx, stub, obj, holder, propName, needsAtomize);
michael@0 3840
michael@0 3841 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
michael@0 3842 ICStub::Kind kind = getterIsScripted ? ICStub::GetElem_NativePrototypeCallScripted
michael@0 3843 : ICStub::GetElem_NativePrototypeCallNative;
michael@0 3844
michael@0 3845 if (getterIsScripted) {
michael@0 3846 IonSpew(IonSpew_BaselineIC,
michael@0 3847 " Generating GetElem(Native %s%s call scripted %s:%d) stub "
michael@0 3848 "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
michael@0 3849 (obj == holder) ? "direct" : "prototype",
michael@0 3850 needsAtomize ? " atomizing" : "",
michael@0 3851 getter->nonLazyScript()->filename(), getter->nonLazyScript()->lineno(),
michael@0 3852 obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
michael@0 3853 } else {
michael@0 3854 IonSpew(IonSpew_BaselineIC,
michael@0 3855 " Generating GetElem(Native %s%s call native) stub "
michael@0 3856 "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
michael@0 3857 (obj == holder) ? "direct" : "prototype",
michael@0 3858 needsAtomize ? " atomizing" : "",
michael@0 3859 obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
michael@0 3860 }
michael@0 3861
michael@0 3862 ICGetElemNativeStub::AccessType acctype = getterIsScripted
michael@0 3863 ? ICGetElemNativeStub::ScriptedGetter
michael@0 3864 : ICGetElemNativeStub::NativeGetter;
michael@0 3865 ICGetElemNativeCompiler compiler(cx, kind, monitorStub, obj, holder, propName, acctype,
michael@0 3866 needsAtomize, getter, script->pcToOffset(pc), isCallElem);
michael@0 3867 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3868 if (!newStub)
michael@0 3869 return false;
michael@0 3870
michael@0 3871 stub->addNewStub(newStub);
michael@0 3872 return true;
michael@0 3873 }
michael@0 3874
michael@0 3875 return true;
michael@0 3876 }
michael@0 3877
michael@0 3878 static bool
michael@0 3879 TypedArrayRequiresFloatingPoint(TypedArrayObject *tarr)
michael@0 3880 {
michael@0 3881 uint32_t type = tarr->type();
michael@0 3882 return (type == ScalarTypeDescr::TYPE_UINT32 ||
michael@0 3883 type == ScalarTypeDescr::TYPE_FLOAT32 ||
michael@0 3884 type == ScalarTypeDescr::TYPE_FLOAT64);
michael@0 3885 }
michael@0 3886
michael@0 3887 static bool
michael@0 3888 TryAttachGetElemStub(JSContext *cx, JSScript *script, jsbytecode *pc, ICGetElem_Fallback *stub,
michael@0 3889 HandleValue lhs, HandleValue rhs, HandleValue res)
michael@0 3890 {
michael@0 3891 bool isCallElem = (JSOp(*pc) == JSOP_CALLELEM);
michael@0 3892
michael@0 3893 // Check for String[i] => Char accesses.
michael@0 3894 if (lhs.isString() && rhs.isInt32() && res.isString() &&
michael@0 3895 !stub->hasStub(ICStub::GetElem_String))
michael@0 3896 {
michael@0 3897 // NoSuchMethod handling doesn't apply to string targets.
michael@0 3898
michael@0 3899 IonSpew(IonSpew_BaselineIC, " Generating GetElem(String[Int32]) stub");
michael@0 3900 ICGetElem_String::Compiler compiler(cx);
michael@0 3901 ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3902 if (!stringStub)
michael@0 3903 return false;
michael@0 3904
michael@0 3905 stub->addNewStub(stringStub);
michael@0 3906 return true;
michael@0 3907 }
michael@0 3908
michael@0 3909 if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS) && rhs.isInt32() &&
michael@0 3910 !ArgumentsGetElemStubExists(stub, ICGetElem_Arguments::Magic))
michael@0 3911 {
michael@0 3912 // Any script with a CALLPROP on arguments (arguments.foo())
michael@0 3913 // should not have optimized arguments.
michael@0 3914 JS_ASSERT(!isCallElem);
michael@0 3915
michael@0 3916 IonSpew(IonSpew_BaselineIC, " Generating GetElem(MagicArgs[Int32]) stub");
michael@0 3917 ICGetElem_Arguments::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
michael@0 3918 ICGetElem_Arguments::Magic, false);
michael@0 3919 ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3920 if (!argsStub)
michael@0 3921 return false;
michael@0 3922
michael@0 3923 stub->addNewStub(argsStub);
michael@0 3924 return true;
michael@0 3925 }
michael@0 3926
michael@0 3927 // Otherwise, GetElem is only optimized on objects.
michael@0 3928 if (!lhs.isObject())
michael@0 3929 return true;
michael@0 3930 RootedObject obj(cx, &lhs.toObject());
michael@0 3931
michael@0 3932 // Check for ArgumentsObj[int] accesses
michael@0 3933 if (obj->is<ArgumentsObject>() && rhs.isInt32()) {
michael@0 3934 ICGetElem_Arguments::Which which = ICGetElem_Arguments::Normal;
michael@0 3935 if (obj->is<StrictArgumentsObject>())
michael@0 3936 which = ICGetElem_Arguments::Strict;
michael@0 3937 if (!ArgumentsGetElemStubExists(stub, which)) {
michael@0 3938 IonSpew(IonSpew_BaselineIC, " Generating GetElem(ArgsObj[Int32]) stub");
michael@0 3939 ICGetElem_Arguments::Compiler compiler(
michael@0 3940 cx, stub->fallbackMonitorStub()->firstMonitorStub(), which, isCallElem);
michael@0 3941 ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3942 if (!argsStub)
michael@0 3943 return false;
michael@0 3944
michael@0 3945 stub->addNewStub(argsStub);
michael@0 3946 return true;
michael@0 3947 }
michael@0 3948 }
michael@0 3949
michael@0 3950 if (obj->isNative()) {
michael@0 3951 // Check for NativeObject[int] dense accesses.
michael@0 3952 if (rhs.isInt32() && rhs.toInt32() >= 0 && !obj->is<TypedArrayObject>()) {
michael@0 3953 IonSpew(IonSpew_BaselineIC, " Generating GetElem(Native[Int32] dense) stub");
michael@0 3954 ICGetElem_Dense::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
michael@0 3955 obj->lastProperty(), isCallElem);
michael@0 3956 ICStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3957 if (!denseStub)
michael@0 3958 return false;
michael@0 3959
michael@0 3960 stub->addNewStub(denseStub);
michael@0 3961 return true;
michael@0 3962 }
michael@0 3963
michael@0 3964 // Check for NativeObject[id] shape-optimizable accesses.
michael@0 3965 if (rhs.isString()) {
michael@0 3966 RootedScript rootedScript(cx, script);
michael@0 3967 if (!TryAttachNativeGetElemStub(cx, rootedScript, pc, stub, obj, rhs))
michael@0 3968 return false;
michael@0 3969 script = rootedScript;
michael@0 3970 }
michael@0 3971 }
michael@0 3972
michael@0 3973 // Check for TypedArray[int] => Number accesses.
michael@0 3974 if (obj->is<TypedArrayObject>() && rhs.isNumber() && res.isNumber() &&
michael@0 3975 !TypedArrayGetElemStubExists(stub, obj))
michael@0 3976 {
michael@0 3977 // Don't attach CALLELEM stubs for accesses on typed array expected to yield numbers.
michael@0 3978 #if JS_HAS_NO_SUCH_METHOD
michael@0 3979 if (isCallElem)
michael@0 3980 return true;
michael@0 3981 #endif
michael@0 3982
michael@0 3983 TypedArrayObject *tarr = &obj->as<TypedArrayObject>();
michael@0 3984 if (!cx->runtime()->jitSupportsFloatingPoint &&
michael@0 3985 (TypedArrayRequiresFloatingPoint(tarr) || rhs.isDouble()))
michael@0 3986 {
michael@0 3987 return true;
michael@0 3988 }
michael@0 3989
michael@0 3990 IonSpew(IonSpew_BaselineIC, " Generating GetElem(TypedArray[Int32]) stub");
michael@0 3991 ICGetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type());
michael@0 3992 ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 3993 if (!typedArrayStub)
michael@0 3994 return false;
michael@0 3995
michael@0 3996 stub->addNewStub(typedArrayStub);
michael@0 3997 return true;
michael@0 3998 }
michael@0 3999
michael@0 4000 // GetElem operations on non-native objects cannot be cached by either
michael@0 4001 // Baseline or Ion. Indicate this in the cache so that Ion does not
michael@0 4002 // generate a cache for this op.
michael@0 4003 if (!obj->isNative())
michael@0 4004 stub->noteNonNativeAccess();
michael@0 4005
michael@0 4006 // GetElem operations which could access negative indexes generally can't
michael@0 4007 // be optimized without the potential for bailouts, as we can't statically
michael@0 4008 // determine that an object has no properties on such indexes.
michael@0 4009 if (rhs.isNumber() && rhs.toNumber() < 0)
michael@0 4010 stub->noteNegativeIndex();
michael@0 4011
michael@0 4012 return true;
michael@0 4013 }
michael@0 4014
michael@0 4015 static bool
michael@0 4016 DoGetElemFallback(JSContext *cx, BaselineFrame *frame, ICGetElem_Fallback *stub_, HandleValue lhs,
michael@0 4017 HandleValue rhs, MutableHandleValue res)
michael@0 4018 {
michael@0 4019 // This fallback stub may trigger debug mode toggling.
michael@0 4020 DebugModeOSRVolatileStub<ICGetElem_Fallback *> stub(frame, stub_);
michael@0 4021
michael@0 4022 RootedScript script(cx, frame->script());
michael@0 4023 jsbytecode *pc = stub->icEntry()->pc(frame->script());
michael@0 4024 JSOp op = JSOp(*pc);
michael@0 4025 FallbackICSpew(cx, stub, "GetElem(%s)", js_CodeName[op]);
michael@0 4026
michael@0 4027 JS_ASSERT(op == JSOP_GETELEM || op == JSOP_CALLELEM);
michael@0 4028
michael@0 4029 // Don't pass lhs directly, we need it when generating stubs.
michael@0 4030 RootedValue lhsCopy(cx, lhs);
michael@0 4031
michael@0 4032 bool isOptimizedArgs = false;
michael@0 4033 if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS)) {
michael@0 4034 // Handle optimized arguments[i] access.
michael@0 4035 if (!GetElemOptimizedArguments(cx, frame, &lhsCopy, rhs, res, &isOptimizedArgs))
michael@0 4036 return false;
michael@0 4037 if (isOptimizedArgs)
michael@0 4038 types::TypeScript::Monitor(cx, frame->script(), pc, res);
michael@0 4039 }
michael@0 4040
michael@0 4041 if (!isOptimizedArgs) {
michael@0 4042 if (!GetElementOperation(cx, op, &lhsCopy, rhs, res))
michael@0 4043 return false;
michael@0 4044 types::TypeScript::Monitor(cx, frame->script(), pc, res);
michael@0 4045 }
michael@0 4046
michael@0 4047 // Check if debug mode toggling made the stub invalid.
michael@0 4048 if (stub.invalid())
michael@0 4049 return true;
michael@0 4050
michael@0 4051 // Add a type monitor stub for the resulting value.
michael@0 4052 if (!stub->addMonitorStubForValue(cx, frame->script(), res))
michael@0 4053 return false;
michael@0 4054
michael@0 4055 if (stub->numOptimizedStubs() >= ICGetElem_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 4056 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
michael@0 4057 // But for now we just bail.
michael@0 4058 return true;
michael@0 4059 }
michael@0 4060
michael@0 4061 // Try to attach an optimized stub.
michael@0 4062 if (!TryAttachGetElemStub(cx, frame->script(), pc, stub, lhs, rhs, res))
michael@0 4063 return false;
michael@0 4064
michael@0 4065 return true;
michael@0 4066 }
michael@0 4067
michael@0 4068 typedef bool (*DoGetElemFallbackFn)(JSContext *, BaselineFrame *, ICGetElem_Fallback *,
michael@0 4069 HandleValue, HandleValue, MutableHandleValue);
michael@0 4070 static const VMFunction DoGetElemFallbackInfo =
michael@0 4071 FunctionInfo<DoGetElemFallbackFn>(DoGetElemFallback, PopValues(2));
michael@0 4072
michael@0 4073 bool
michael@0 4074 ICGetElem_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 4075 {
michael@0 4076 JS_ASSERT(R0 == JSReturnOperand);
michael@0 4077
michael@0 4078 // Restore the tail call register.
michael@0 4079 EmitRestoreTailCallReg(masm);
michael@0 4080
michael@0 4081 // Ensure stack is fully synced for the expression decompiler.
michael@0 4082 masm.pushValue(R0);
michael@0 4083 masm.pushValue(R1);
michael@0 4084
michael@0 4085 // Push arguments.
michael@0 4086 masm.pushValue(R1);
michael@0 4087 masm.pushValue(R0);
michael@0 4088 masm.push(BaselineStubReg);
michael@0 4089 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 4090
michael@0 4091 return tailCallVM(DoGetElemFallbackInfo, masm);
michael@0 4092 }
michael@0 4093
michael@0 4094 //
michael@0 4095 // GetElem_NativeSlot
michael@0 4096 //
michael@0 4097
michael@0 4098 static bool
michael@0 4099 DoAtomizeString(JSContext *cx, HandleString string, MutableHandleValue result)
michael@0 4100 {
michael@0 4101 IonSpew(IonSpew_BaselineIC, " AtomizeString called");
michael@0 4102
michael@0 4103 RootedValue key(cx, StringValue(string));
michael@0 4104
michael@0 4105 // Convert to interned property name.
michael@0 4106 RootedId id(cx);
michael@0 4107 if (!ValueToId<CanGC>(cx, key, &id))
michael@0 4108 return false;
michael@0 4109
michael@0 4110 if (!JSID_IS_ATOM(id)) {
michael@0 4111 result.set(key);
michael@0 4112 return true;
michael@0 4113 }
michael@0 4114
michael@0 4115 result.set(StringValue(JSID_TO_ATOM(id)));
michael@0 4116 return true;
michael@0 4117 }
michael@0 4118
michael@0 4119 typedef bool (*DoAtomizeStringFn)(JSContext *, HandleString, MutableHandleValue);
michael@0 4120 static const VMFunction DoAtomizeStringInfo = FunctionInfo<DoAtomizeStringFn>(DoAtomizeString);
michael@0 4121
michael@0 4122 bool
michael@0 4123 ICGetElemNativeCompiler::emitCallNative(MacroAssembler &masm, Register objReg)
michael@0 4124 {
michael@0 4125 GeneralRegisterSet regs = availableGeneralRegs(0);
michael@0 4126 regs.takeUnchecked(objReg);
michael@0 4127 regs.takeUnchecked(BaselineTailCallReg);
michael@0 4128
michael@0 4129 enterStubFrame(masm, regs.getAny());
michael@0 4130
michael@0 4131 // Push object.
michael@0 4132 masm.push(objReg);
michael@0 4133
michael@0 4134 // Push native callee.
michael@0 4135 masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), objReg);
michael@0 4136 masm.push(objReg);
michael@0 4137
michael@0 4138 regs.add(objReg);
michael@0 4139
michael@0 4140 // Profiler hook.
michael@0 4141 emitProfilingUpdate(masm, regs, ICGetElemNativeGetterStub::offsetOfPCOffset());
michael@0 4142
michael@0 4143 // Call helper.
michael@0 4144 if (!callVM(DoCallNativeGetterInfo, masm))
michael@0 4145 return false;
michael@0 4146
michael@0 4147 leaveStubFrame(masm);
michael@0 4148
michael@0 4149 return true;
michael@0 4150 }
michael@0 4151
michael@0 4152 bool
michael@0 4153 ICGetElemNativeCompiler::emitCallScripted(MacroAssembler &masm, Register objReg)
michael@0 4154 {
michael@0 4155 GeneralRegisterSet regs = availableGeneralRegs(0);
michael@0 4156 regs.takeUnchecked(objReg);
michael@0 4157 regs.takeUnchecked(BaselineTailCallReg);
michael@0 4158
michael@0 4159 // Enter stub frame.
michael@0 4160 enterStubFrame(masm, regs.getAny());
michael@0 4161
michael@0 4162 // Push |this| for getter (target object).
michael@0 4163 {
michael@0 4164 ValueOperand val = regs.takeAnyValue();
michael@0 4165 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
michael@0 4166 masm.Push(val);
michael@0 4167 regs.add(val);
michael@0 4168 }
michael@0 4169
michael@0 4170 regs.add(objReg);
michael@0 4171
michael@0 4172 Register callee = regs.takeAny();
michael@0 4173 masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), callee);
michael@0 4174
michael@0 4175 // Push argc, callee, and descriptor.
michael@0 4176 {
michael@0 4177 Register callScratch = regs.takeAny();
michael@0 4178 EmitCreateStubFrameDescriptor(masm, callScratch);
michael@0 4179 masm.Push(Imm32(0)); // ActualArgc is 0
michael@0 4180 masm.Push(callee);
michael@0 4181 masm.Push(callScratch);
michael@0 4182 regs.add(callScratch);
michael@0 4183 }
michael@0 4184
michael@0 4185 Register code = regs.takeAnyExcluding(ArgumentsRectifierReg);
michael@0 4186 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
michael@0 4187 masm.loadBaselineOrIonRaw(code, code, SequentialExecution, nullptr);
michael@0 4188
michael@0 4189 Register scratch = regs.takeAny();
michael@0 4190
michael@0 4191 // Handle arguments underflow.
michael@0 4192 Label noUnderflow;
michael@0 4193 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
michael@0 4194 masm.branch32(Assembler::Equal, scratch, Imm32(0), &noUnderflow);
michael@0 4195 {
michael@0 4196 // Call the arguments rectifier.
michael@0 4197 JS_ASSERT(ArgumentsRectifierReg != code);
michael@0 4198
michael@0 4199 JitCode *argumentsRectifier =
michael@0 4200 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
michael@0 4201
michael@0 4202 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
michael@0 4203 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
michael@0 4204 masm.mov(ImmWord(0), ArgumentsRectifierReg);
michael@0 4205 }
michael@0 4206
michael@0 4207 masm.bind(&noUnderflow);
michael@0 4208
michael@0 4209 // If needed, update SPS Profiler frame entry. At this point, callee and scratch can
michael@0 4210 // be clobbered.
michael@0 4211 {
michael@0 4212 GeneralRegisterSet availRegs = availableGeneralRegs(0);
michael@0 4213 availRegs.take(ArgumentsRectifierReg);
michael@0 4214 availRegs.take(code);
michael@0 4215 emitProfilingUpdate(masm, availRegs, ICGetElemNativeGetterStub::offsetOfPCOffset());
michael@0 4216 }
michael@0 4217
michael@0 4218 masm.callIon(code);
michael@0 4219
michael@0 4220 leaveStubFrame(masm, true);
michael@0 4221
michael@0 4222 return true;
michael@0 4223 }
michael@0 4224
michael@0 4225 bool
michael@0 4226 ICGetElemNativeCompiler::generateStubCode(MacroAssembler &masm)
michael@0 4227 {
michael@0 4228 Label failure;
michael@0 4229 Label failurePopR1;
michael@0 4230 bool popR1 = false;
michael@0 4231
michael@0 4232 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 4233 masm.branchTestString(Assembler::NotEqual, R1, &failure);
michael@0 4234
michael@0 4235 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 4236 Register scratchReg = regs.takeAny();
michael@0 4237
michael@0 4238 // Unbox object.
michael@0 4239 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 4240
michael@0 4241 // Check object shape.
michael@0 4242 masm.loadPtr(Address(objReg, JSObject::offsetOfShape()), scratchReg);
michael@0 4243 Address shapeAddr(BaselineStubReg, ICGetElemNativeStub::offsetOfShape());
michael@0 4244 masm.branchPtr(Assembler::NotEqual, shapeAddr, scratchReg, &failure);
michael@0 4245
michael@0 4246 // Check key identity. Don't automatically fail if this fails, since the incoming
michael@0 4247 // key maybe a non-interned string. Switch to a slowpath vm-call based check.
michael@0 4248 Address nameAddr(BaselineStubReg, ICGetElemNativeStub::offsetOfName());
michael@0 4249 Register strExtract = masm.extractString(R1, ExtractTemp1);
michael@0 4250
michael@0 4251 // If needsAtomize_ is true, and the string is not already an atom, then atomize the
michael@0 4252 // string before proceeding.
michael@0 4253 if (needsAtomize_) {
michael@0 4254 Label skipAtomize;
michael@0 4255
michael@0 4256 // If string is already an atom, skip the atomize.
michael@0 4257 masm.branchTestPtr(Assembler::NonZero,
michael@0 4258 Address(strExtract, JSString::offsetOfLengthAndFlags()),
michael@0 4259 Imm32(JSString::ATOM_BIT),
michael@0 4260 &skipAtomize);
michael@0 4261
michael@0 4262 // Stow R0.
michael@0 4263 EmitStowICValues(masm, 1);
michael@0 4264
michael@0 4265 enterStubFrame(masm, R0.scratchReg());
michael@0 4266
michael@0 4267 // Atomize the string into a new value.
michael@0 4268 masm.push(strExtract);
michael@0 4269 if (!callVM(DoAtomizeStringInfo, masm))
michael@0 4270 return false;
michael@0 4271
michael@0 4272 // Atomized string is now in JSReturnOperand (R0).
michael@0 4273 // Leave stub frame, move atomized string into R1.
michael@0 4274 JS_ASSERT(R0 == JSReturnOperand);
michael@0 4275 leaveStubFrame(masm);
michael@0 4276 masm.moveValue(JSReturnOperand, R1);
michael@0 4277
michael@0 4278 // Unstow R0
michael@0 4279 EmitUnstowICValues(masm, 1);
michael@0 4280
michael@0 4281 // Extract string from R1 again.
michael@0 4282 DebugOnly<Register> strExtract2 = masm.extractString(R1, ExtractTemp1);
michael@0 4283 JS_ASSERT(Register(strExtract2) == strExtract);
michael@0 4284
michael@0 4285 masm.bind(&skipAtomize);
michael@0 4286 }
michael@0 4287
michael@0 4288 // Since this stub sometimes enter a stub frame, we manually set this to true (lie).
michael@0 4289 #ifdef DEBUG
michael@0 4290 entersStubFrame_ = true;
michael@0 4291 #endif
michael@0 4292
michael@0 4293 // Key has been atomized if necessary. Do identity check on string pointer.
michael@0 4294 masm.branchPtr(Assembler::NotEqual, nameAddr, strExtract, &failure);
michael@0 4295
michael@0 4296 Register holderReg;
michael@0 4297 if (obj_ == holder_) {
michael@0 4298 holderReg = objReg;
michael@0 4299 } else {
michael@0 4300 // Shape guard holder.
michael@0 4301 if (regs.empty()) {
michael@0 4302 masm.push(R1.scratchReg());
michael@0 4303 popR1 = true;
michael@0 4304 holderReg = R1.scratchReg();
michael@0 4305 } else {
michael@0 4306 holderReg = regs.takeAny();
michael@0 4307 }
michael@0 4308
michael@0 4309 if (kind == ICStub::GetElem_NativePrototypeCallNative ||
michael@0 4310 kind == ICStub::GetElem_NativePrototypeCallScripted)
michael@0 4311 {
michael@0 4312 masm.loadPtr(Address(BaselineStubReg,
michael@0 4313 ICGetElemNativePrototypeCallStub::offsetOfHolder()),
michael@0 4314 holderReg);
michael@0 4315 masm.loadPtr(Address(BaselineStubReg,
michael@0 4316 ICGetElemNativePrototypeCallStub::offsetOfHolderShape()),
michael@0 4317 scratchReg);
michael@0 4318 } else {
michael@0 4319 masm.loadPtr(Address(BaselineStubReg,
michael@0 4320 ICGetElem_NativePrototypeSlot::offsetOfHolder()),
michael@0 4321 holderReg);
michael@0 4322 masm.loadPtr(Address(BaselineStubReg,
michael@0 4323 ICGetElem_NativePrototypeSlot::offsetOfHolderShape()),
michael@0 4324 scratchReg);
michael@0 4325 }
michael@0 4326 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratchReg,
michael@0 4327 popR1 ? &failurePopR1 : &failure);
michael@0 4328 }
michael@0 4329
michael@0 4330 if (acctype_ == ICGetElemNativeStub::DynamicSlot ||
michael@0 4331 acctype_ == ICGetElemNativeStub::FixedSlot)
michael@0 4332 {
michael@0 4333 masm.load32(Address(BaselineStubReg, ICGetElemNativeSlotStub::offsetOfOffset()),
michael@0 4334 scratchReg);
michael@0 4335
michael@0 4336 // Load from object.
michael@0 4337 if (acctype_ == ICGetElemNativeStub::DynamicSlot)
michael@0 4338 masm.addPtr(Address(holderReg, JSObject::offsetOfSlots()), scratchReg);
michael@0 4339 else
michael@0 4340 masm.addPtr(holderReg, scratchReg);
michael@0 4341
michael@0 4342 Address valAddr(scratchReg, 0);
michael@0 4343
michael@0 4344 // Check if __noSuchMethod__ needs to be called.
michael@0 4345 #if JS_HAS_NO_SUCH_METHOD
michael@0 4346 if (isCallElem_) {
michael@0 4347 Label afterNoSuchMethod;
michael@0 4348 Label skipNoSuchMethod;
michael@0 4349
michael@0 4350 masm.branchTestUndefined(Assembler::NotEqual, valAddr, &skipNoSuchMethod);
michael@0 4351
michael@0 4352 GeneralRegisterSet regs = availableGeneralRegs(0);
michael@0 4353 regs.take(R1);
michael@0 4354 regs.take(R0);
michael@0 4355 regs.takeUnchecked(objReg);
michael@0 4356 if (popR1)
michael@0 4357 masm.pop(R1.scratchReg());
michael@0 4358
michael@0 4359 // Box and push obj and key onto baseline frame stack for decompiler.
michael@0 4360 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0);
michael@0 4361 EmitStowICValues(masm, 2);
michael@0 4362
michael@0 4363 regs.add(R0);
michael@0 4364 regs.takeUnchecked(objReg);
michael@0 4365
michael@0 4366 enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
michael@0 4367
michael@0 4368 masm.pushValue(R1);
michael@0 4369 masm.push(objReg);
michael@0 4370 if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
michael@0 4371 return false;
michael@0 4372
michael@0 4373 leaveStubFrame(masm);
michael@0 4374
michael@0 4375 // Pop pushed obj and key from baseline stack.
michael@0 4376 EmitUnstowICValues(masm, 2, /* discard = */ true);
michael@0 4377
michael@0 4378 // Result is already in R0
michael@0 4379 masm.jump(&afterNoSuchMethod);
michael@0 4380 masm.bind(&skipNoSuchMethod);
michael@0 4381
michael@0 4382 if (popR1)
michael@0 4383 masm.pop(R1.scratchReg());
michael@0 4384 masm.loadValue(valAddr, R0);
michael@0 4385 masm.bind(&afterNoSuchMethod);
michael@0 4386 } else {
michael@0 4387 masm.loadValue(valAddr, R0);
michael@0 4388 if (popR1)
michael@0 4389 masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
michael@0 4390 }
michael@0 4391 #else
michael@0 4392 masm.loadValue(valAddr, R0);
michael@0 4393 if (popR1)
michael@0 4394 masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
michael@0 4395 #endif
michael@0 4396
michael@0 4397 } else {
michael@0 4398 JS_ASSERT(acctype_ == ICGetElemNativeStub::NativeGetter ||
michael@0 4399 acctype_ == ICGetElemNativeStub::ScriptedGetter);
michael@0 4400 JS_ASSERT(kind == ICStub::GetElem_NativePrototypeCallNative ||
michael@0 4401 kind == ICStub::GetElem_NativePrototypeCallScripted);
michael@0 4402
michael@0 4403 if (acctype_ == ICGetElemNativeStub::NativeGetter) {
michael@0 4404 // If calling a native getter, there is no chance of failure now.
michael@0 4405
michael@0 4406 // GetElem key (R1) is no longer needed.
michael@0 4407 if (popR1)
michael@0 4408 masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
michael@0 4409
michael@0 4410 emitCallNative(masm, objReg);
michael@0 4411
michael@0 4412 } else {
michael@0 4413 JS_ASSERT(acctype_ == ICGetElemNativeStub::ScriptedGetter);
michael@0 4414
michael@0 4415 // Load function in scratchReg and ensure that it has a jit script.
michael@0 4416 masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()),
michael@0 4417 scratchReg);
michael@0 4418 masm.branchIfFunctionHasNoScript(scratchReg, popR1 ? &failurePopR1 : &failure);
michael@0 4419 masm.loadPtr(Address(scratchReg, JSFunction::offsetOfNativeOrScript()), scratchReg);
michael@0 4420 masm.loadBaselineOrIonRaw(scratchReg, scratchReg, SequentialExecution,
michael@0 4421 popR1 ? &failurePopR1 : &failure);
michael@0 4422
michael@0 4423 // At this point, we are guaranteed to successfully complete.
michael@0 4424 if (popR1)
michael@0 4425 masm.addPtr(Imm32(sizeof(size_t)), BaselineStackReg);
michael@0 4426
michael@0 4427 emitCallScripted(masm, objReg);
michael@0 4428 }
michael@0 4429 }
michael@0 4430
michael@0 4431 // Enter type monitor IC to type-check result.
michael@0 4432 EmitEnterTypeMonitorIC(masm);
michael@0 4433
michael@0 4434 // Failure case - jump to next stub
michael@0 4435 if (popR1) {
michael@0 4436 masm.bind(&failurePopR1);
michael@0 4437 masm.pop(R1.scratchReg());
michael@0 4438 }
michael@0 4439 masm.bind(&failure);
michael@0 4440 EmitStubGuardFailure(masm);
michael@0 4441
michael@0 4442 return true;
michael@0 4443 }
michael@0 4444
michael@0 4445 //
michael@0 4446 // GetElem_String
michael@0 4447 //
michael@0 4448
michael@0 4449 bool
michael@0 4450 ICGetElem_String::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 4451 {
michael@0 4452 Label failure;
michael@0 4453 masm.branchTestString(Assembler::NotEqual, R0, &failure);
michael@0 4454 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 4455
michael@0 4456 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 4457 Register scratchReg = regs.takeAny();
michael@0 4458
michael@0 4459 // Unbox string in R0.
michael@0 4460 Register str = masm.extractString(R0, ExtractTemp0);
michael@0 4461
michael@0 4462 // Load string lengthAndFlags
michael@0 4463 Address lengthAndFlagsAddr(str, JSString::offsetOfLengthAndFlags());
michael@0 4464 masm.loadPtr(lengthAndFlagsAddr, scratchReg);
michael@0 4465
michael@0 4466 // Check for non-linear strings.
michael@0 4467 masm.branchTest32(Assembler::Zero, scratchReg, Imm32(JSString::FLAGS_MASK), &failure);
michael@0 4468
michael@0 4469 // Unbox key.
michael@0 4470 Register key = masm.extractInt32(R1, ExtractTemp1);
michael@0 4471
michael@0 4472 // Extract length and bounds check.
michael@0 4473 masm.rshiftPtr(Imm32(JSString::LENGTH_SHIFT), scratchReg);
michael@0 4474 masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure);
michael@0 4475
michael@0 4476 // Get char code.
michael@0 4477 Address charsAddr(str, JSString::offsetOfChars());
michael@0 4478 masm.loadPtr(charsAddr, scratchReg);
michael@0 4479 masm.load16ZeroExtend(BaseIndex(scratchReg, key, TimesTwo, 0), scratchReg);
michael@0 4480
michael@0 4481 // Check if char code >= UNIT_STATIC_LIMIT.
michael@0 4482 masm.branch32(Assembler::AboveOrEqual, scratchReg, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
michael@0 4483 &failure);
michael@0 4484
michael@0 4485 // Load static string.
michael@0 4486 masm.movePtr(ImmPtr(&cx->staticStrings().unitStaticTable), str);
michael@0 4487 masm.loadPtr(BaseIndex(str, scratchReg, ScalePointer), str);
michael@0 4488
michael@0 4489 // Return.
michael@0 4490 masm.tagValue(JSVAL_TYPE_STRING, str, R0);
michael@0 4491 EmitReturnFromIC(masm);
michael@0 4492
michael@0 4493 // Failure case - jump to next stub
michael@0 4494 masm.bind(&failure);
michael@0 4495 EmitStubGuardFailure(masm);
michael@0 4496 return true;
michael@0 4497 }
michael@0 4498
michael@0 4499 //
michael@0 4500 // GetElem_Dense
michael@0 4501 //
michael@0 4502
michael@0 4503 bool
michael@0 4504 ICGetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 4505 {
michael@0 4506 Label failure;
michael@0 4507 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 4508 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 4509
michael@0 4510 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 4511 Register scratchReg = regs.takeAny();
michael@0 4512
michael@0 4513 // Unbox R0 and shape guard.
michael@0 4514 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 4515 masm.loadPtr(Address(BaselineStubReg, ICGetElem_Dense::offsetOfShape()), scratchReg);
michael@0 4516 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
michael@0 4517
michael@0 4518 // Load obj->elements.
michael@0 4519 masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
michael@0 4520
michael@0 4521 // Unbox key.
michael@0 4522 Register key = masm.extractInt32(R1, ExtractTemp1);
michael@0 4523
michael@0 4524 // Bounds check.
michael@0 4525 Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
michael@0 4526 masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure);
michael@0 4527
michael@0 4528 // Hole check and load value.
michael@0 4529 JS_STATIC_ASSERT(sizeof(Value) == 8);
michael@0 4530 BaseIndex element(scratchReg, key, TimesEight);
michael@0 4531 masm.branchTestMagic(Assembler::Equal, element, &failure);
michael@0 4532
michael@0 4533 // Check if __noSuchMethod__ should be called.
michael@0 4534 #if JS_HAS_NO_SUCH_METHOD
michael@0 4535 #ifdef DEBUG
michael@0 4536 entersStubFrame_ = true;
michael@0 4537 #endif
michael@0 4538 if (isCallElem_) {
michael@0 4539 Label afterNoSuchMethod;
michael@0 4540 Label skipNoSuchMethod;
michael@0 4541 regs = availableGeneralRegs(0);
michael@0 4542 regs.takeUnchecked(obj);
michael@0 4543 regs.takeUnchecked(key);
michael@0 4544 regs.takeUnchecked(BaselineTailCallReg);
michael@0 4545 ValueOperand val = regs.takeValueOperand();
michael@0 4546
michael@0 4547 masm.loadValue(element, val);
michael@0 4548 masm.branchTestUndefined(Assembler::NotEqual, val, &skipNoSuchMethod);
michael@0 4549
michael@0 4550 // Box and push obj and key onto baseline frame stack for decompiler.
michael@0 4551 EmitRestoreTailCallReg(masm);
michael@0 4552 masm.tagValue(JSVAL_TYPE_OBJECT, obj, val);
michael@0 4553 masm.pushValue(val);
michael@0 4554 masm.tagValue(JSVAL_TYPE_INT32, key, val);
michael@0 4555 masm.pushValue(val);
michael@0 4556 EmitRepushTailCallReg(masm);
michael@0 4557
michael@0 4558 regs.add(val);
michael@0 4559
michael@0 4560 // Call __noSuchMethod__ checker. Object pointer is in objReg.
michael@0 4561 enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
michael@0 4562
michael@0 4563 regs.take(val);
michael@0 4564
michael@0 4565 masm.tagValue(JSVAL_TYPE_INT32, key, val);
michael@0 4566 masm.pushValue(val);
michael@0 4567 masm.push(obj);
michael@0 4568 if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
michael@0 4569 return false;
michael@0 4570
michael@0 4571 leaveStubFrame(masm);
michael@0 4572
michael@0 4573 // Pop pushed obj and key from baseline stack.
michael@0 4574 EmitUnstowICValues(masm, 2, /* discard = */ true);
michael@0 4575
michael@0 4576 // Result is already in R0
michael@0 4577 masm.jump(&afterNoSuchMethod);
michael@0 4578 masm.bind(&skipNoSuchMethod);
michael@0 4579
michael@0 4580 masm.moveValue(val, R0);
michael@0 4581 masm.bind(&afterNoSuchMethod);
michael@0 4582 } else {
michael@0 4583 masm.loadValue(element, R0);
michael@0 4584 }
michael@0 4585 #else
michael@0 4586 // Load value from element location.
michael@0 4587 masm.loadValue(element, R0);
michael@0 4588 #endif
michael@0 4589
michael@0 4590 // Enter type monitor IC to type-check result.
michael@0 4591 EmitEnterTypeMonitorIC(masm);
michael@0 4592
michael@0 4593 // Failure case - jump to next stub
michael@0 4594 masm.bind(&failure);
michael@0 4595 EmitStubGuardFailure(masm);
michael@0 4596 return true;
michael@0 4597 }
michael@0 4598
michael@0 4599 //
michael@0 4600 // GetElem_TypedArray
michael@0 4601 //
michael@0 4602
michael@0 4603 bool
michael@0 4604 ICGetElem_TypedArray::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 4605 {
michael@0 4606 Label failure;
michael@0 4607 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 4608
michael@0 4609 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 4610 Register scratchReg = regs.takeAny();
michael@0 4611
michael@0 4612 // Unbox R0 and shape guard.
michael@0 4613 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 4614 masm.loadPtr(Address(BaselineStubReg, ICGetElem_TypedArray::offsetOfShape()), scratchReg);
michael@0 4615 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
michael@0 4616
michael@0 4617 // Ensure the index is an integer.
michael@0 4618 if (cx->runtime()->jitSupportsFloatingPoint) {
michael@0 4619 Label isInt32;
michael@0 4620 masm.branchTestInt32(Assembler::Equal, R1, &isInt32);
michael@0 4621 {
michael@0 4622 // If the index is a double, try to convert it to int32. It's okay
michael@0 4623 // to convert -0 to 0: the shape check ensures the object is a typed
michael@0 4624 // array so the difference is not observable.
michael@0 4625 masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
michael@0 4626 masm.unboxDouble(R1, FloatReg0);
michael@0 4627 masm.convertDoubleToInt32(FloatReg0, scratchReg, &failure, /* negZeroCheck = */false);
michael@0 4628 masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R1);
michael@0 4629 }
michael@0 4630 masm.bind(&isInt32);
michael@0 4631 } else {
michael@0 4632 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 4633 }
michael@0 4634
michael@0 4635 // Unbox key.
michael@0 4636 Register key = masm.extractInt32(R1, ExtractTemp1);
michael@0 4637
michael@0 4638 // Bounds check.
michael@0 4639 masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), scratchReg);
michael@0 4640 masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure);
michael@0 4641
michael@0 4642 // Load the elements vector.
michael@0 4643 masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), scratchReg);
michael@0 4644
michael@0 4645 // Load the value.
michael@0 4646 BaseIndex source(scratchReg, key, ScaleFromElemWidth(TypedArrayObject::slotWidth(type_)));
michael@0 4647 masm.loadFromTypedArray(type_, source, R0, false, scratchReg, &failure);
michael@0 4648
michael@0 4649 // Todo: Allow loading doubles from uint32 arrays, but this requires monitoring.
michael@0 4650 EmitReturnFromIC(masm);
michael@0 4651
michael@0 4652 // Failure case - jump to next stub
michael@0 4653 masm.bind(&failure);
michael@0 4654 EmitStubGuardFailure(masm);
michael@0 4655 return true;
michael@0 4656 }
michael@0 4657
michael@0 4658 //
michael@0 4659 // GetEelem_Arguments
michael@0 4660 //
michael@0 4661 bool
michael@0 4662 ICGetElem_Arguments::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 4663 {
michael@0 4664 // Variants of GetElem_Arguments can enter stub frames if entered in CallProp
michael@0 4665 // context when noSuchMethod support is on.
michael@0 4666 #if JS_HAS_NO_SUCH_METHOD
michael@0 4667 #ifdef DEBUG
michael@0 4668 entersStubFrame_ = true;
michael@0 4669 #endif
michael@0 4670 #endif
michael@0 4671
michael@0 4672 Label failure;
michael@0 4673 if (which_ == ICGetElem_Arguments::Magic) {
michael@0 4674 JS_ASSERT(!isCallElem_);
michael@0 4675
michael@0 4676 // Ensure that this is a magic arguments value.
michael@0 4677 masm.branchTestMagicValue(Assembler::NotEqual, R0, JS_OPTIMIZED_ARGUMENTS, &failure);
michael@0 4678
michael@0 4679 // Ensure that frame has not loaded different arguments object since.
michael@0 4680 masm.branchTest32(Assembler::NonZero,
michael@0 4681 Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
michael@0 4682 Imm32(BaselineFrame::HAS_ARGS_OBJ),
michael@0 4683 &failure);
michael@0 4684
michael@0 4685 // Ensure that index is an integer.
michael@0 4686 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 4687 Register idx = masm.extractInt32(R1, ExtractTemp1);
michael@0 4688
michael@0 4689 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 4690 Register scratch = regs.takeAny();
michael@0 4691
michael@0 4692 // Load num actual arguments
michael@0 4693 Address actualArgs(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs());
michael@0 4694 masm.loadPtr(actualArgs, scratch);
michael@0 4695
michael@0 4696 // Ensure idx < argc
michael@0 4697 masm.branch32(Assembler::AboveOrEqual, idx, scratch, &failure);
michael@0 4698
michael@0 4699 // Load argval
michael@0 4700 JS_STATIC_ASSERT(sizeof(Value) == 8);
michael@0 4701 masm.movePtr(BaselineFrameReg, scratch);
michael@0 4702 masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), scratch);
michael@0 4703 BaseIndex element(scratch, idx, TimesEight);
michael@0 4704 masm.loadValue(element, R0);
michael@0 4705
michael@0 4706 // Enter type monitor IC to type-check result.
michael@0 4707 EmitEnterTypeMonitorIC(masm);
michael@0 4708
michael@0 4709 masm.bind(&failure);
michael@0 4710 EmitStubGuardFailure(masm);
michael@0 4711 return true;
michael@0 4712 }
michael@0 4713
michael@0 4714 JS_ASSERT(which_ == ICGetElem_Arguments::Strict ||
michael@0 4715 which_ == ICGetElem_Arguments::Normal);
michael@0 4716
michael@0 4717 bool isStrict = which_ == ICGetElem_Arguments::Strict;
michael@0 4718 const Class *clasp = isStrict ? &StrictArgumentsObject::class_ : &NormalArgumentsObject::class_;
michael@0 4719
michael@0 4720 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 4721 Register scratchReg = regs.takeAny();
michael@0 4722
michael@0 4723 // Guard on input being an arguments object.
michael@0 4724 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 4725 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 4726 masm.branchTestObjClass(Assembler::NotEqual, objReg, scratchReg, clasp, &failure);
michael@0 4727
michael@0 4728 // Guard on index being int32
michael@0 4729 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 4730 Register idxReg = masm.extractInt32(R1, ExtractTemp1);
michael@0 4731
michael@0 4732 // Get initial ArgsObj length value.
michael@0 4733 masm.unboxInt32(Address(objReg, ArgumentsObject::getInitialLengthSlotOffset()), scratchReg);
michael@0 4734
michael@0 4735 // Test if length has been overridden.
michael@0 4736 masm.branchTest32(Assembler::NonZero,
michael@0 4737 scratchReg,
michael@0 4738 Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
michael@0 4739 &failure);
michael@0 4740
michael@0 4741 // Length has not been overridden, ensure that R1 is an integer and is <= length.
michael@0 4742 masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratchReg);
michael@0 4743 masm.branch32(Assembler::AboveOrEqual, idxReg, scratchReg, &failure);
michael@0 4744
michael@0 4745 // Length check succeeded, now check the correct bit. We clobber potential type regs
michael@0 4746 // now. Inputs will have to be reconstructed if we fail after this point, but that's
michael@0 4747 // unlikely.
michael@0 4748 Label failureReconstructInputs;
michael@0 4749 regs = availableGeneralRegs(0);
michael@0 4750 regs.takeUnchecked(objReg);
michael@0 4751 regs.takeUnchecked(idxReg);
michael@0 4752 regs.take(scratchReg);
michael@0 4753 Register argData = regs.takeAny();
michael@0 4754 Register tempReg = regs.takeAny();
michael@0 4755
michael@0 4756 // Load ArgumentsData
michael@0 4757 masm.loadPrivate(Address(objReg, ArgumentsObject::getDataSlotOffset()), argData);
michael@0 4758
michael@0 4759 // Load deletedBits bitArray pointer into scratchReg
michael@0 4760 masm.loadPtr(Address(argData, offsetof(ArgumentsData, deletedBits)), scratchReg);
michael@0 4761
michael@0 4762 // In tempReg, calculate index of word containing bit: (idx >> logBitsPerWord)
michael@0 4763 masm.movePtr(idxReg, tempReg);
michael@0 4764 const uint32_t shift = mozilla::tl::FloorLog2<(sizeof(size_t) * JS_BITS_PER_BYTE)>::value;
michael@0 4765 JS_ASSERT(shift == 5 || shift == 6);
michael@0 4766 masm.rshiftPtr(Imm32(shift), tempReg);
michael@0 4767 masm.loadPtr(BaseIndex(scratchReg, tempReg, ScaleFromElemWidth(sizeof(size_t))), scratchReg);
michael@0 4768
michael@0 4769 // Don't bother testing specific bit, if any bit is set in the word, fail.
michael@0 4770 masm.branchPtr(Assembler::NotEqual, scratchReg, ImmPtr(nullptr), &failureReconstructInputs);
michael@0 4771
michael@0 4772 // Load the value. use scratchReg and tempReg to form a ValueOperand to load into.
michael@0 4773 masm.addPtr(Imm32(ArgumentsData::offsetOfArgs()), argData);
michael@0 4774 regs.add(scratchReg);
michael@0 4775 regs.add(tempReg);
michael@0 4776 ValueOperand tempVal = regs.takeAnyValue();
michael@0 4777 masm.loadValue(BaseIndex(argData, idxReg, ScaleFromElemWidth(sizeof(Value))), tempVal);
michael@0 4778
michael@0 4779 // Makesure that this is not a FORWARD_TO_CALL_SLOT magic value.
michael@0 4780 masm.branchTestMagic(Assembler::Equal, tempVal, &failureReconstructInputs);
michael@0 4781
michael@0 4782 #if JS_HAS_NO_SUCH_METHOD
michael@0 4783 if (isCallElem_) {
michael@0 4784 Label afterNoSuchMethod;
michael@0 4785 Label skipNoSuchMethod;
michael@0 4786
michael@0 4787 masm.branchTestUndefined(Assembler::NotEqual, tempVal, &skipNoSuchMethod);
michael@0 4788
michael@0 4789 // Call __noSuchMethod__ checker. Object pointer is in objReg.
michael@0 4790 regs = availableGeneralRegs(0);
michael@0 4791 regs.takeUnchecked(objReg);
michael@0 4792 regs.takeUnchecked(idxReg);
michael@0 4793 regs.takeUnchecked(BaselineTailCallReg);
michael@0 4794 ValueOperand val = regs.takeValueOperand();
michael@0 4795
michael@0 4796 // Box and push obj and key onto baseline frame stack for decompiler.
michael@0 4797 EmitRestoreTailCallReg(masm);
michael@0 4798 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
michael@0 4799 masm.pushValue(val);
michael@0 4800 masm.tagValue(JSVAL_TYPE_INT32, idxReg, val);
michael@0 4801 masm.pushValue(val);
michael@0 4802 EmitRepushTailCallReg(masm);
michael@0 4803
michael@0 4804 regs.add(val);
michael@0 4805 enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
michael@0 4806 regs.take(val);
michael@0 4807
michael@0 4808 masm.pushValue(val);
michael@0 4809 masm.push(objReg);
michael@0 4810 if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
michael@0 4811 return false;
michael@0 4812
michael@0 4813 leaveStubFrame(masm);
michael@0 4814
michael@0 4815 // Pop pushed obj and key from baseline stack.
michael@0 4816 EmitUnstowICValues(masm, 2, /* discard = */ true);
michael@0 4817
michael@0 4818 // Result is already in R0
michael@0 4819 masm.jump(&afterNoSuchMethod);
michael@0 4820 masm.bind(&skipNoSuchMethod);
michael@0 4821
michael@0 4822 masm.moveValue(tempVal, R0);
michael@0 4823 masm.bind(&afterNoSuchMethod);
michael@0 4824 } else {
michael@0 4825 masm.moveValue(tempVal, R0);
michael@0 4826 }
michael@0 4827 #else
michael@0 4828 // Copy value from temp to R0.
michael@0 4829 masm.moveValue(tempVal, R0);
michael@0 4830 #endif
michael@0 4831
michael@0 4832 // Type-check result
michael@0 4833 EmitEnterTypeMonitorIC(masm);
michael@0 4834
michael@0 4835 // Failed, but inputs are deconstructed into object and int, and need to be
michael@0 4836 // reconstructed into values.
michael@0 4837 masm.bind(&failureReconstructInputs);
michael@0 4838 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0);
michael@0 4839 masm.tagValue(JSVAL_TYPE_INT32, idxReg, R1);
michael@0 4840
michael@0 4841 masm.bind(&failure);
michael@0 4842 EmitStubGuardFailure(masm);
michael@0 4843 return true;
michael@0 4844 }
michael@0 4845
michael@0 4846 //
michael@0 4847 // SetElem_Fallback
michael@0 4848 //
michael@0 4849
michael@0 4850 static bool
michael@0 4851 SetElemDenseAddHasSameShapes(ICSetElem_DenseAdd *stub, JSObject *obj)
michael@0 4852 {
michael@0 4853 size_t numShapes = stub->protoChainDepth() + 1;
michael@0 4854 for (size_t i = 0; i < numShapes; i++) {
michael@0 4855 static const size_t MAX_DEPTH = ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH;
michael@0 4856 if (obj->lastProperty() != stub->toImplUnchecked<MAX_DEPTH>()->shape(i))
michael@0 4857 return false;
michael@0 4858 obj = obj->getProto();
michael@0 4859 if (!obj && i != numShapes - 1)
michael@0 4860 return false;
michael@0 4861 }
michael@0 4862
michael@0 4863 return true;
michael@0 4864 }
michael@0 4865
michael@0 4866 static bool
michael@0 4867 DenseSetElemStubExists(JSContext *cx, ICStub::Kind kind, ICSetElem_Fallback *stub, HandleObject obj)
michael@0 4868 {
michael@0 4869 JS_ASSERT(kind == ICStub::SetElem_Dense || kind == ICStub::SetElem_DenseAdd);
michael@0 4870
michael@0 4871 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
michael@0 4872 if (kind == ICStub::SetElem_Dense && iter->isSetElem_Dense()) {
michael@0 4873 ICSetElem_Dense *dense = iter->toSetElem_Dense();
michael@0 4874 if (obj->lastProperty() == dense->shape() && obj->getType(cx) == dense->type())
michael@0 4875 return true;
michael@0 4876 }
michael@0 4877
michael@0 4878 if (kind == ICStub::SetElem_DenseAdd && iter->isSetElem_DenseAdd()) {
michael@0 4879 ICSetElem_DenseAdd *dense = iter->toSetElem_DenseAdd();
michael@0 4880 if (obj->getType(cx) == dense->type() && SetElemDenseAddHasSameShapes(dense, obj))
michael@0 4881 return true;
michael@0 4882 }
michael@0 4883 }
michael@0 4884 return false;
michael@0 4885 }
michael@0 4886
michael@0 4887 static bool
michael@0 4888 TypedArraySetElemStubExists(ICSetElem_Fallback *stub, HandleObject obj, bool expectOOB)
michael@0 4889 {
michael@0 4890 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
michael@0 4891 if (!iter->isSetElem_TypedArray())
michael@0 4892 continue;
michael@0 4893 ICSetElem_TypedArray *taStub = iter->toSetElem_TypedArray();
michael@0 4894 if (obj->lastProperty() == taStub->shape() && taStub->expectOutOfBounds() == expectOOB)
michael@0 4895 return true;
michael@0 4896 }
michael@0 4897 return false;
michael@0 4898 }
michael@0 4899
michael@0 4900 static bool
michael@0 4901 RemoveExistingTypedArraySetElemStub(JSContext *cx, ICSetElem_Fallback *stub, HandleObject obj)
michael@0 4902 {
michael@0 4903 for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
michael@0 4904 if (!iter->isSetElem_TypedArray())
michael@0 4905 continue;
michael@0 4906
michael@0 4907 if (obj->lastProperty() != iter->toSetElem_TypedArray()->shape())
michael@0 4908 continue;
michael@0 4909
michael@0 4910 // TypedArraySetElem stubs are only removed using this procedure if
michael@0 4911 // being replaced with one that expects out of bounds index.
michael@0 4912 JS_ASSERT(!iter->toSetElem_TypedArray()->expectOutOfBounds());
michael@0 4913 iter.unlink(cx);
michael@0 4914 return true;
michael@0 4915 }
michael@0 4916 return false;
michael@0 4917 }
michael@0 4918
michael@0 4919 static bool
michael@0 4920 CanOptimizeDenseSetElem(JSContext *cx, HandleObject obj, uint32_t index,
michael@0 4921 HandleShape oldShape, uint32_t oldCapacity, uint32_t oldInitLength,
michael@0 4922 bool *isAddingCaseOut, size_t *protoDepthOut)
michael@0 4923 {
michael@0 4924 uint32_t initLength = obj->getDenseInitializedLength();
michael@0 4925 uint32_t capacity = obj->getDenseCapacity();
michael@0 4926
michael@0 4927 *isAddingCaseOut = false;
michael@0 4928 *protoDepthOut = 0;
michael@0 4929
michael@0 4930 // Some initial sanity checks.
michael@0 4931 if (initLength < oldInitLength || capacity < oldCapacity)
michael@0 4932 return false;
michael@0 4933
michael@0 4934 RootedShape shape(cx, obj->lastProperty());
michael@0 4935
michael@0 4936 // Cannot optimize if the shape changed.
michael@0 4937 if (oldShape != shape)
michael@0 4938 return false;
michael@0 4939
michael@0 4940 // Cannot optimize if the capacity changed.
michael@0 4941 if (oldCapacity != capacity)
michael@0 4942 return false;
michael@0 4943
michael@0 4944 // Cannot optimize if the index doesn't fit within the new initialized length.
michael@0 4945 if (index >= initLength)
michael@0 4946 return false;
michael@0 4947
michael@0 4948 // Cannot optimize if the value at position after the set is a hole.
michael@0 4949 if (!obj->containsDenseElement(index))
michael@0 4950 return false;
michael@0 4951
michael@0 4952 // At this point, if we know that the initLength did not change, then
michael@0 4953 // an optimized set is possible.
michael@0 4954 if (oldInitLength == initLength)
michael@0 4955 return true;
michael@0 4956
michael@0 4957 // If it did change, ensure that it changed specifically by incrementing by 1
michael@0 4958 // to accomodate this particular indexed set.
michael@0 4959 if (oldInitLength + 1 != initLength)
michael@0 4960 return false;
michael@0 4961 if (index != oldInitLength)
michael@0 4962 return false;
michael@0 4963
michael@0 4964 // The checks are not complete. The object may have a setter definition,
michael@0 4965 // either directly, or via a prototype, or via the target object for a prototype
michael@0 4966 // which is a proxy, that handles a particular integer write.
michael@0 4967 // Scan the prototype and shape chain to make sure that this is not the case.
michael@0 4968 RootedObject curObj(cx, obj);
michael@0 4969 while (curObj) {
michael@0 4970 // Ensure object is native.
michael@0 4971 if (!curObj->isNative())
michael@0 4972 return false;
michael@0 4973
michael@0 4974 // Ensure all indexed properties are stored in dense elements.
michael@0 4975 if (curObj->isIndexed())
michael@0 4976 return false;
michael@0 4977
michael@0 4978 curObj = curObj->getProto();
michael@0 4979 if (curObj)
michael@0 4980 ++*protoDepthOut;
michael@0 4981 }
michael@0 4982
michael@0 4983 if (*protoDepthOut > ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH)
michael@0 4984 return false;
michael@0 4985
michael@0 4986 *isAddingCaseOut = true;
michael@0 4987
michael@0 4988 return true;
michael@0 4989 }
michael@0 4990
michael@0 4991 static bool
michael@0 4992 DoSetElemFallback(JSContext *cx, BaselineFrame *frame, ICSetElem_Fallback *stub_, Value *stack,
michael@0 4993 HandleValue objv, HandleValue index, HandleValue rhs)
michael@0 4994 {
michael@0 4995 // This fallback stub may trigger debug mode toggling.
michael@0 4996 DebugModeOSRVolatileStub<ICSetElem_Fallback *> stub(frame, stub_);
michael@0 4997
michael@0 4998 RootedScript script(cx, frame->script());
michael@0 4999 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 5000 JSOp op = JSOp(*pc);
michael@0 5001 FallbackICSpew(cx, stub, "SetElem(%s)", js_CodeName[JSOp(*pc)]);
michael@0 5002
michael@0 5003 JS_ASSERT(op == JSOP_SETELEM ||
michael@0 5004 op == JSOP_INITELEM ||
michael@0 5005 op == JSOP_INITELEM_ARRAY);
michael@0 5006
michael@0 5007 RootedObject obj(cx, ToObjectFromStack(cx, objv));
michael@0 5008 if (!obj)
michael@0 5009 return false;
michael@0 5010
michael@0 5011 RootedShape oldShape(cx, obj->lastProperty());
michael@0 5012
michael@0 5013 // Check the old capacity
michael@0 5014 uint32_t oldCapacity = 0;
michael@0 5015 uint32_t oldInitLength = 0;
michael@0 5016 if (obj->isNative() && index.isInt32() && index.toInt32() >= 0) {
michael@0 5017 oldCapacity = obj->getDenseCapacity();
michael@0 5018 oldInitLength = obj->getDenseInitializedLength();
michael@0 5019 }
michael@0 5020
michael@0 5021 if (op == JSOP_INITELEM) {
michael@0 5022 if (!InitElemOperation(cx, obj, index, rhs))
michael@0 5023 return false;
michael@0 5024 } else if (op == JSOP_INITELEM_ARRAY) {
michael@0 5025 JS_ASSERT(uint32_t(index.toInt32()) == GET_UINT24(pc));
michael@0 5026 if (!InitArrayElemOperation(cx, pc, obj, index.toInt32(), rhs))
michael@0 5027 return false;
michael@0 5028 } else {
michael@0 5029 if (!SetObjectElement(cx, obj, index, rhs, script->strict(), script, pc))
michael@0 5030 return false;
michael@0 5031 }
michael@0 5032
michael@0 5033 // Overwrite the object on the stack (pushed for the decompiler) with the rhs.
michael@0 5034 JS_ASSERT(stack[2] == objv);
michael@0 5035 stack[2] = rhs;
michael@0 5036
michael@0 5037 // Check if debug mode toggling made the stub invalid.
michael@0 5038 if (stub.invalid())
michael@0 5039 return true;
michael@0 5040
michael@0 5041 if (stub->numOptimizedStubs() >= ICSetElem_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 5042 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
michael@0 5043 // But for now we just bail.
michael@0 5044 return true;
michael@0 5045 }
michael@0 5046
michael@0 5047 // Try to generate new stubs.
michael@0 5048 if (obj->isNative() &&
michael@0 5049 !obj->is<TypedArrayObject>() &&
michael@0 5050 index.isInt32() && index.toInt32() >= 0 &&
michael@0 5051 !rhs.isMagic(JS_ELEMENTS_HOLE))
michael@0 5052 {
michael@0 5053 bool addingCase;
michael@0 5054 size_t protoDepth;
michael@0 5055
michael@0 5056 if (CanOptimizeDenseSetElem(cx, obj, index.toInt32(), oldShape, oldCapacity, oldInitLength,
michael@0 5057 &addingCase, &protoDepth))
michael@0 5058 {
michael@0 5059 RootedShape shape(cx, obj->lastProperty());
michael@0 5060 RootedTypeObject type(cx, obj->getType(cx));
michael@0 5061 if (!type)
michael@0 5062 return false;
michael@0 5063
michael@0 5064 if (addingCase && !DenseSetElemStubExists(cx, ICStub::SetElem_DenseAdd, stub, obj)) {
michael@0 5065 IonSpew(IonSpew_BaselineIC,
michael@0 5066 " Generating SetElem_DenseAdd stub "
michael@0 5067 "(shape=%p, type=%p, protoDepth=%u)",
michael@0 5068 obj->lastProperty(), type.get(), protoDepth);
michael@0 5069 ICSetElemDenseAddCompiler compiler(cx, obj, protoDepth);
michael@0 5070 ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5071 if (!denseStub)
michael@0 5072 return false;
michael@0 5073 if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs))
michael@0 5074 return false;
michael@0 5075
michael@0 5076 stub->addNewStub(denseStub);
michael@0 5077 } else if (!addingCase &&
michael@0 5078 !DenseSetElemStubExists(cx, ICStub::SetElem_Dense, stub, obj))
michael@0 5079 {
michael@0 5080 IonSpew(IonSpew_BaselineIC,
michael@0 5081 " Generating SetElem_Dense stub (shape=%p, type=%p)",
michael@0 5082 obj->lastProperty(), type.get());
michael@0 5083 ICSetElem_Dense::Compiler compiler(cx, shape, type);
michael@0 5084 ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5085 if (!denseStub)
michael@0 5086 return false;
michael@0 5087 if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs))
michael@0 5088 return false;
michael@0 5089
michael@0 5090 stub->addNewStub(denseStub);
michael@0 5091 }
michael@0 5092 }
michael@0 5093
michael@0 5094 return true;
michael@0 5095 }
michael@0 5096
michael@0 5097 if (obj->is<TypedArrayObject>() && index.isNumber() && rhs.isNumber()) {
michael@0 5098 Rooted<TypedArrayObject*> tarr(cx, &obj->as<TypedArrayObject>());
michael@0 5099 if (!cx->runtime()->jitSupportsFloatingPoint &&
michael@0 5100 (TypedArrayRequiresFloatingPoint(tarr) || index.isDouble()))
michael@0 5101 {
michael@0 5102 return true;
michael@0 5103 }
michael@0 5104
michael@0 5105 uint32_t len = tarr->length();
michael@0 5106 double idx = index.toNumber();
michael@0 5107 bool expectOutOfBounds = (idx < 0 || idx >= double(len));
michael@0 5108
michael@0 5109 if (!TypedArraySetElemStubExists(stub, tarr, expectOutOfBounds)) {
michael@0 5110 // Remove any existing TypedArraySetElemStub that doesn't handle out-of-bounds
michael@0 5111 if (expectOutOfBounds)
michael@0 5112 RemoveExistingTypedArraySetElemStub(cx, stub, tarr);
michael@0 5113
michael@0 5114 IonSpew(IonSpew_BaselineIC,
michael@0 5115 " Generating SetElem_TypedArray stub (shape=%p, type=%u, oob=%s)",
michael@0 5116 tarr->lastProperty(), tarr->type(), expectOutOfBounds ? "yes" : "no");
michael@0 5117 ICSetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type(),
michael@0 5118 expectOutOfBounds);
michael@0 5119 ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5120 if (!typedArrayStub)
michael@0 5121 return false;
michael@0 5122
michael@0 5123 stub->addNewStub(typedArrayStub);
michael@0 5124 return true;
michael@0 5125 }
michael@0 5126 }
michael@0 5127
michael@0 5128 return true;
michael@0 5129 }
michael@0 5130
michael@0 5131 typedef bool (*DoSetElemFallbackFn)(JSContext *, BaselineFrame *, ICSetElem_Fallback *, Value *,
michael@0 5132 HandleValue, HandleValue, HandleValue);
michael@0 5133 static const VMFunction DoSetElemFallbackInfo =
michael@0 5134 FunctionInfo<DoSetElemFallbackFn>(DoSetElemFallback, PopValues(2));
michael@0 5135
michael@0 5136 bool
michael@0 5137 ICSetElem_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 5138 {
michael@0 5139 JS_ASSERT(R0 == JSReturnOperand);
michael@0 5140
michael@0 5141 EmitRestoreTailCallReg(masm);
michael@0 5142
michael@0 5143 // State: R0: object, R1: index, stack: rhs.
michael@0 5144 // For the decompiler, the stack has to be: object, index, rhs,
michael@0 5145 // so we push the index, then overwrite the rhs Value with R0
michael@0 5146 // and push the rhs value.
michael@0 5147 masm.pushValue(R1);
michael@0 5148 masm.loadValue(Address(BaselineStackReg, sizeof(Value)), R1);
michael@0 5149 masm.storeValue(R0, Address(BaselineStackReg, sizeof(Value)));
michael@0 5150 masm.pushValue(R1);
michael@0 5151
michael@0 5152 // Push arguments.
michael@0 5153 masm.pushValue(R1); // RHS
michael@0 5154
michael@0 5155 // Push index. On x86 and ARM two push instructions are emitted so use a
michael@0 5156 // separate register to store the old stack pointer.
michael@0 5157 masm.mov(BaselineStackReg, R1.scratchReg());
michael@0 5158 masm.pushValue(Address(R1.scratchReg(), 2 * sizeof(Value)));
michael@0 5159 masm.pushValue(R0); // Object.
michael@0 5160
michael@0 5161 // Push pointer to stack values, so that the stub can overwrite the object
michael@0 5162 // (pushed for the decompiler) with the rhs.
michael@0 5163 masm.computeEffectiveAddress(Address(BaselineStackReg, 3 * sizeof(Value)), R0.scratchReg());
michael@0 5164 masm.push(R0.scratchReg());
michael@0 5165
michael@0 5166 masm.push(BaselineStubReg);
michael@0 5167 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 5168
michael@0 5169 return tailCallVM(DoSetElemFallbackInfo, masm);
michael@0 5170 }
michael@0 5171
michael@0 5172 void
michael@0 5173 BaselineScript::noteArrayWriteHole(uint32_t pcOffset)
michael@0 5174 {
michael@0 5175 ICEntry &entry = icEntryFromPCOffset(pcOffset);
michael@0 5176 ICFallbackStub *stub = entry.fallbackStub();
michael@0 5177
michael@0 5178 if (stub->isSetElem_Fallback())
michael@0 5179 stub->toSetElem_Fallback()->noteArrayWriteHole();
michael@0 5180 }
michael@0 5181
michael@0 5182 //
michael@0 5183 // SetElem_Dense
michael@0 5184 //
michael@0 5185
michael@0 5186 bool
michael@0 5187 ICSetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 5188 {
michael@0 5189 // R0 = object
michael@0 5190 // R1 = key
michael@0 5191 // Stack = { ... rhs-value, <return-addr>? }
michael@0 5192 Label failure;
michael@0 5193 Label failureUnstow;
michael@0 5194 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 5195 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 5196
michael@0 5197 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 5198 Register scratchReg = regs.takeAny();
michael@0 5199
michael@0 5200 // Unbox R0 and guard on its shape.
michael@0 5201 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 5202 masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfShape()), scratchReg);
michael@0 5203 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
michael@0 5204
michael@0 5205 // Stow both R0 and R1 (object and key)
michael@0 5206 // But R0 and R1 still hold their values.
michael@0 5207 EmitStowICValues(masm, 2);
michael@0 5208
michael@0 5209 // We may need to free up some registers.
michael@0 5210 regs = availableGeneralRegs(0);
michael@0 5211 regs.take(R0);
michael@0 5212
michael@0 5213 // Guard that the type object matches.
michael@0 5214 Register typeReg = regs.takeAny();
michael@0 5215 masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfType()), typeReg);
michael@0 5216 masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg,
michael@0 5217 &failureUnstow);
michael@0 5218 regs.add(typeReg);
michael@0 5219
michael@0 5220 // Stack is now: { ..., rhs-value, object-value, key-value, maybe?-RET-ADDR }
michael@0 5221 // Load rhs-value in to R0
michael@0 5222 masm.loadValue(Address(BaselineStackReg, 2 * sizeof(Value) + ICStackValueOffset), R0);
michael@0 5223
michael@0 5224 // Call the type-update stub.
michael@0 5225 if (!callTypeUpdateIC(masm, sizeof(Value)))
michael@0 5226 return false;
michael@0 5227
michael@0 5228 // Unstow R0 and R1 (object and key)
michael@0 5229 EmitUnstowICValues(masm, 2);
michael@0 5230
michael@0 5231 // Reset register set.
michael@0 5232 regs = availableGeneralRegs(2);
michael@0 5233 scratchReg = regs.takeAny();
michael@0 5234
michael@0 5235 // Unbox object and key.
michael@0 5236 obj = masm.extractObject(R0, ExtractTemp0);
michael@0 5237 Register key = masm.extractInt32(R1, ExtractTemp1);
michael@0 5238
michael@0 5239 // Load obj->elements in scratchReg.
michael@0 5240 masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
michael@0 5241
michael@0 5242 // Bounds check.
michael@0 5243 Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
michael@0 5244 masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure);
michael@0 5245
michael@0 5246 // Hole check.
michael@0 5247 BaseIndex element(scratchReg, key, TimesEight);
michael@0 5248 masm.branchTestMagic(Assembler::Equal, element, &failure);
michael@0 5249
michael@0 5250 // Failure is not possible now. Free up registers.
michael@0 5251 regs.add(R0);
michael@0 5252 regs.add(R1);
michael@0 5253 regs.takeUnchecked(obj);
michael@0 5254 regs.takeUnchecked(key);
michael@0 5255 Address valueAddr(BaselineStackReg, ICStackValueOffset);
michael@0 5256
michael@0 5257 // Convert int32 values to double if convertDoubleElements is set. In this
michael@0 5258 // case the heap typeset is guaranteed to contain both int32 and double, so
michael@0 5259 // it's okay to store a double.
michael@0 5260 Label dontConvertDoubles;
michael@0 5261 Address elementsFlags(scratchReg, ObjectElements::offsetOfFlags());
michael@0 5262 masm.branchTest32(Assembler::Zero, elementsFlags,
michael@0 5263 Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
michael@0 5264 &dontConvertDoubles);
michael@0 5265 // Note that double arrays are only created by IonMonkey, so if we have no
michael@0 5266 // floating-point support Ion is disabled and there should be no double arrays.
michael@0 5267 if (cx->runtime()->jitSupportsFloatingPoint)
michael@0 5268 masm.convertInt32ValueToDouble(valueAddr, regs.getAny(), &dontConvertDoubles);
michael@0 5269 else
michael@0 5270 masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support.");
michael@0 5271 masm.bind(&dontConvertDoubles);
michael@0 5272
michael@0 5273 // Don't overwrite R0 becuase |obj| might overlap with it, and it's needed
michael@0 5274 // for post-write barrier later.
michael@0 5275 ValueOperand tmpVal = regs.takeAnyValue();
michael@0 5276 masm.loadValue(valueAddr, tmpVal);
michael@0 5277 EmitPreBarrier(masm, element, MIRType_Value);
michael@0 5278 masm.storeValue(tmpVal, element);
michael@0 5279 regs.add(key);
michael@0 5280 #ifdef JSGC_GENERATIONAL
michael@0 5281 {
michael@0 5282 Register r = regs.takeAny();
michael@0 5283 GeneralRegisterSet saveRegs;
michael@0 5284 emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
michael@0 5285 regs.add(r);
michael@0 5286 }
michael@0 5287 #endif
michael@0 5288 EmitReturnFromIC(masm);
michael@0 5289
michael@0 5290
michael@0 5291 // Failure case - fail but first unstow R0 and R1
michael@0 5292 masm.bind(&failureUnstow);
michael@0 5293 EmitUnstowICValues(masm, 2);
michael@0 5294
michael@0 5295 // Failure case - jump to next stub
michael@0 5296 masm.bind(&failure);
michael@0 5297 EmitStubGuardFailure(masm);
michael@0 5298 return true;
michael@0 5299 }
michael@0 5300
michael@0 5301 static bool
michael@0 5302 GetProtoShapes(JSObject *obj, size_t protoChainDepth, AutoShapeVector *shapes)
michael@0 5303 {
michael@0 5304 JS_ASSERT(shapes->length() == 1);
michael@0 5305 JSObject *curProto = obj->getProto();
michael@0 5306 for (size_t i = 0; i < protoChainDepth; i++) {
michael@0 5307 if (!shapes->append(curProto->lastProperty()))
michael@0 5308 return false;
michael@0 5309 curProto = curProto->getProto();
michael@0 5310 }
michael@0 5311 JS_ASSERT(!curProto);
michael@0 5312 return true;
michael@0 5313 }
michael@0 5314
michael@0 5315 //
michael@0 5316 // SetElem_DenseAdd
michael@0 5317 //
michael@0 5318
michael@0 5319 ICUpdatedStub *
michael@0 5320 ICSetElemDenseAddCompiler::getStub(ICStubSpace *space)
michael@0 5321 {
michael@0 5322 AutoShapeVector shapes(cx);
michael@0 5323 if (!shapes.append(obj_->lastProperty()))
michael@0 5324 return nullptr;
michael@0 5325
michael@0 5326 if (!GetProtoShapes(obj_, protoChainDepth_, &shapes))
michael@0 5327 return nullptr;
michael@0 5328
michael@0 5329 JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4);
michael@0 5330
michael@0 5331 ICUpdatedStub *stub = nullptr;
michael@0 5332 switch (protoChainDepth_) {
michael@0 5333 case 0: stub = getStubSpecific<0>(space, &shapes); break;
michael@0 5334 case 1: stub = getStubSpecific<1>(space, &shapes); break;
michael@0 5335 case 2: stub = getStubSpecific<2>(space, &shapes); break;
michael@0 5336 case 3: stub = getStubSpecific<3>(space, &shapes); break;
michael@0 5337 case 4: stub = getStubSpecific<4>(space, &shapes); break;
michael@0 5338 default: MOZ_ASSUME_UNREACHABLE("ProtoChainDepth too high.");
michael@0 5339 }
michael@0 5340 if (!stub || !stub->initUpdatingChain(cx, space))
michael@0 5341 return nullptr;
michael@0 5342 return stub;
michael@0 5343 }
michael@0 5344
michael@0 5345 bool
michael@0 5346 ICSetElemDenseAddCompiler::generateStubCode(MacroAssembler &masm)
michael@0 5347 {
michael@0 5348 // R0 = object
michael@0 5349 // R1 = key
michael@0 5350 // Stack = { ... rhs-value, <return-addr>? }
michael@0 5351 Label failure;
michael@0 5352 Label failureUnstow;
michael@0 5353 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 5354 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 5355
michael@0 5356 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 5357 Register scratchReg = regs.takeAny();
michael@0 5358
michael@0 5359 // Unbox R0 and guard on its shape.
michael@0 5360 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 5361 masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAddImpl<0>::offsetOfShape(0)),
michael@0 5362 scratchReg);
michael@0 5363 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
michael@0 5364
michael@0 5365 // Stow both R0 and R1 (object and key)
michael@0 5366 // But R0 and R1 still hold their values.
michael@0 5367 EmitStowICValues(masm, 2);
michael@0 5368
michael@0 5369 // We may need to free up some registers.
michael@0 5370 regs = availableGeneralRegs(0);
michael@0 5371 regs.take(R0);
michael@0 5372
michael@0 5373 // Guard that the type object matches.
michael@0 5374 Register typeReg = regs.takeAny();
michael@0 5375 masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAdd::offsetOfType()), typeReg);
michael@0 5376 masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg,
michael@0 5377 &failureUnstow);
michael@0 5378 regs.add(typeReg);
michael@0 5379
michael@0 5380 // Shape guard objects on the proto chain.
michael@0 5381 scratchReg = regs.takeAny();
michael@0 5382 Register protoReg = regs.takeAny();
michael@0 5383 for (size_t i = 0; i < protoChainDepth_; i++) {
michael@0 5384 masm.loadObjProto(i == 0 ? obj : protoReg, protoReg);
michael@0 5385 masm.branchTestPtr(Assembler::Zero, protoReg, protoReg, &failureUnstow);
michael@0 5386 masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAddImpl<0>::offsetOfShape(i + 1)),
michael@0 5387 scratchReg);
michael@0 5388 masm.branchTestObjShape(Assembler::NotEqual, protoReg, scratchReg, &failureUnstow);
michael@0 5389 }
michael@0 5390 regs.add(protoReg);
michael@0 5391 regs.add(scratchReg);
michael@0 5392
michael@0 5393 // Stack is now: { ..., rhs-value, object-value, key-value, maybe?-RET-ADDR }
michael@0 5394 // Load rhs-value in to R0
michael@0 5395 masm.loadValue(Address(BaselineStackReg, 2 * sizeof(Value) + ICStackValueOffset), R0);
michael@0 5396
michael@0 5397 // Call the type-update stub.
michael@0 5398 if (!callTypeUpdateIC(masm, sizeof(Value)))
michael@0 5399 return false;
michael@0 5400
michael@0 5401 // Unstow R0 and R1 (object and key)
michael@0 5402 EmitUnstowICValues(masm, 2);
michael@0 5403
michael@0 5404 // Reset register set.
michael@0 5405 regs = availableGeneralRegs(2);
michael@0 5406 scratchReg = regs.takeAny();
michael@0 5407
michael@0 5408 // Unbox obj and key.
michael@0 5409 obj = masm.extractObject(R0, ExtractTemp0);
michael@0 5410 Register key = masm.extractInt32(R1, ExtractTemp1);
michael@0 5411
michael@0 5412 // Load obj->elements in scratchReg.
michael@0 5413 masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
michael@0 5414
michael@0 5415 // Bounds check (key == initLength)
michael@0 5416 Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
michael@0 5417 masm.branch32(Assembler::NotEqual, initLength, key, &failure);
michael@0 5418
michael@0 5419 // Capacity check.
michael@0 5420 Address capacity(scratchReg, ObjectElements::offsetOfCapacity());
michael@0 5421 masm.branch32(Assembler::BelowOrEqual, capacity, key, &failure);
michael@0 5422
michael@0 5423 // Failure is not possible now. Free up registers.
michael@0 5424 regs.add(R0);
michael@0 5425 regs.add(R1);
michael@0 5426 regs.takeUnchecked(obj);
michael@0 5427 regs.takeUnchecked(key);
michael@0 5428
michael@0 5429 // Increment initLength before write.
michael@0 5430 masm.add32(Imm32(1), initLength);
michael@0 5431
michael@0 5432 // If length is now <= key, increment length before write.
michael@0 5433 Label skipIncrementLength;
michael@0 5434 Address length(scratchReg, ObjectElements::offsetOfLength());
michael@0 5435 masm.branch32(Assembler::Above, length, key, &skipIncrementLength);
michael@0 5436 masm.add32(Imm32(1), length);
michael@0 5437 masm.bind(&skipIncrementLength);
michael@0 5438
michael@0 5439 Address valueAddr(BaselineStackReg, ICStackValueOffset);
michael@0 5440
michael@0 5441 // Convert int32 values to double if convertDoubleElements is set. In this
michael@0 5442 // case the heap typeset is guaranteed to contain both int32 and double, so
michael@0 5443 // it's okay to store a double.
michael@0 5444 Label dontConvertDoubles;
michael@0 5445 Address elementsFlags(scratchReg, ObjectElements::offsetOfFlags());
michael@0 5446 masm.branchTest32(Assembler::Zero, elementsFlags,
michael@0 5447 Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
michael@0 5448 &dontConvertDoubles);
michael@0 5449 // Note that double arrays are only created by IonMonkey, so if we have no
michael@0 5450 // floating-point support Ion is disabled and there should be no double arrays.
michael@0 5451 if (cx->runtime()->jitSupportsFloatingPoint)
michael@0 5452 masm.convertInt32ValueToDouble(valueAddr, regs.getAny(), &dontConvertDoubles);
michael@0 5453 else
michael@0 5454 masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support.");
michael@0 5455 masm.bind(&dontConvertDoubles);
michael@0 5456
michael@0 5457 // Write the value. No need for pre-barrier since we're not overwriting an old value.
michael@0 5458 ValueOperand tmpVal = regs.takeAnyValue();
michael@0 5459 BaseIndex element(scratchReg, key, TimesEight);
michael@0 5460 masm.loadValue(valueAddr, tmpVal);
michael@0 5461 masm.storeValue(tmpVal, element);
michael@0 5462 regs.add(key);
michael@0 5463 #ifdef JSGC_GENERATIONAL
michael@0 5464 {
michael@0 5465 Register r = regs.takeAny();
michael@0 5466 GeneralRegisterSet saveRegs;
michael@0 5467 emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
michael@0 5468 regs.add(r);
michael@0 5469 }
michael@0 5470 #endif
michael@0 5471 EmitReturnFromIC(masm);
michael@0 5472
michael@0 5473 // Failure case - fail but first unstow R0 and R1
michael@0 5474 masm.bind(&failureUnstow);
michael@0 5475 EmitUnstowICValues(masm, 2);
michael@0 5476
michael@0 5477 // Failure case - jump to next stub
michael@0 5478 masm.bind(&failure);
michael@0 5479 EmitStubGuardFailure(masm);
michael@0 5480 return true;
michael@0 5481 }
michael@0 5482
michael@0 5483 //
michael@0 5484 // SetElem_TypedArray
michael@0 5485 //
michael@0 5486
michael@0 5487 bool
michael@0 5488 ICSetElem_TypedArray::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 5489 {
michael@0 5490 Label failure;
michael@0 5491 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 5492
michael@0 5493 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 5494 Register scratchReg = regs.takeAny();
michael@0 5495
michael@0 5496 // Unbox R0 and shape guard.
michael@0 5497 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 5498 masm.loadPtr(Address(BaselineStubReg, ICSetElem_TypedArray::offsetOfShape()), scratchReg);
michael@0 5499 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
michael@0 5500
michael@0 5501 // Ensure the index is an integer.
michael@0 5502 if (cx->runtime()->jitSupportsFloatingPoint) {
michael@0 5503 Label isInt32;
michael@0 5504 masm.branchTestInt32(Assembler::Equal, R1, &isInt32);
michael@0 5505 {
michael@0 5506 // If the index is a double, try to convert it to int32. It's okay
michael@0 5507 // to convert -0 to 0: the shape check ensures the object is a typed
michael@0 5508 // array so the difference is not observable.
michael@0 5509 masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
michael@0 5510 masm.unboxDouble(R1, FloatReg0);
michael@0 5511 masm.convertDoubleToInt32(FloatReg0, scratchReg, &failure, /* negZeroCheck = */false);
michael@0 5512 masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R1);
michael@0 5513 }
michael@0 5514 masm.bind(&isInt32);
michael@0 5515 } else {
michael@0 5516 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
michael@0 5517 }
michael@0 5518
michael@0 5519 // Unbox key.
michael@0 5520 Register key = masm.extractInt32(R1, ExtractTemp1);
michael@0 5521
michael@0 5522 // Bounds check.
michael@0 5523 Label oobWrite;
michael@0 5524 masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), scratchReg);
michael@0 5525 masm.branch32(Assembler::BelowOrEqual, scratchReg, key,
michael@0 5526 expectOutOfBounds_ ? &oobWrite : &failure);
michael@0 5527
michael@0 5528 // Load the elements vector.
michael@0 5529 masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), scratchReg);
michael@0 5530
michael@0 5531 BaseIndex dest(scratchReg, key, ScaleFromElemWidth(TypedArrayObject::slotWidth(type_)));
michael@0 5532 Address value(BaselineStackReg, ICStackValueOffset);
michael@0 5533
michael@0 5534 // We need a second scratch register. It's okay to clobber the type tag of
michael@0 5535 // R0 or R1, as long as it's restored before jumping to the next stub.
michael@0 5536 regs = availableGeneralRegs(0);
michael@0 5537 regs.takeUnchecked(obj);
michael@0 5538 regs.takeUnchecked(key);
michael@0 5539 regs.take(scratchReg);
michael@0 5540 Register secondScratch = regs.takeAny();
michael@0 5541
michael@0 5542 if (type_ == ScalarTypeDescr::TYPE_FLOAT32 || type_ == ScalarTypeDescr::TYPE_FLOAT64) {
michael@0 5543 masm.ensureDouble(value, FloatReg0, &failure);
michael@0 5544 if (LIRGenerator::allowFloat32Optimizations() &&
michael@0 5545 type_ == ScalarTypeDescr::TYPE_FLOAT32)
michael@0 5546 {
michael@0 5547 masm.convertDoubleToFloat32(FloatReg0, ScratchFloatReg);
michael@0 5548 masm.storeToTypedFloatArray(type_, ScratchFloatReg, dest);
michael@0 5549 } else {
michael@0 5550 masm.storeToTypedFloatArray(type_, FloatReg0, dest);
michael@0 5551 }
michael@0 5552 EmitReturnFromIC(masm);
michael@0 5553 } else if (type_ == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
michael@0 5554 Label notInt32;
michael@0 5555 masm.branchTestInt32(Assembler::NotEqual, value, &notInt32);
michael@0 5556 masm.unboxInt32(value, secondScratch);
michael@0 5557 masm.clampIntToUint8(secondScratch);
michael@0 5558
michael@0 5559 Label clamped;
michael@0 5560 masm.bind(&clamped);
michael@0 5561 masm.storeToTypedIntArray(type_, secondScratch, dest);
michael@0 5562 EmitReturnFromIC(masm);
michael@0 5563
michael@0 5564 // If the value is a double, clamp to uint8 and jump back.
michael@0 5565 // Else, jump to failure.
michael@0 5566 masm.bind(&notInt32);
michael@0 5567 if (cx->runtime()->jitSupportsFloatingPoint) {
michael@0 5568 masm.branchTestDouble(Assembler::NotEqual, value, &failure);
michael@0 5569 masm.unboxDouble(value, FloatReg0);
michael@0 5570 masm.clampDoubleToUint8(FloatReg0, secondScratch);
michael@0 5571 masm.jump(&clamped);
michael@0 5572 } else {
michael@0 5573 masm.jump(&failure);
michael@0 5574 }
michael@0 5575 } else {
michael@0 5576 Label notInt32;
michael@0 5577 masm.branchTestInt32(Assembler::NotEqual, value, &notInt32);
michael@0 5578 masm.unboxInt32(value, secondScratch);
michael@0 5579
michael@0 5580 Label isInt32;
michael@0 5581 masm.bind(&isInt32);
michael@0 5582 masm.storeToTypedIntArray(type_, secondScratch, dest);
michael@0 5583 EmitReturnFromIC(masm);
michael@0 5584
michael@0 5585 // If the value is a double, truncate and jump back.
michael@0 5586 // Else, jump to failure.
michael@0 5587 Label failureRestoreRegs;
michael@0 5588 masm.bind(&notInt32);
michael@0 5589 if (cx->runtime()->jitSupportsFloatingPoint) {
michael@0 5590 masm.branchTestDouble(Assembler::NotEqual, value, &failure);
michael@0 5591 masm.unboxDouble(value, FloatReg0);
michael@0 5592 masm.branchTruncateDouble(FloatReg0, secondScratch, &failureRestoreRegs);
michael@0 5593 masm.jump(&isInt32);
michael@0 5594 } else {
michael@0 5595 masm.jump(&failure);
michael@0 5596 }
michael@0 5597
michael@0 5598 // Writing to secondScratch may have clobbered R0 or R1, restore them
michael@0 5599 // first.
michael@0 5600 masm.bind(&failureRestoreRegs);
michael@0 5601 masm.tagValue(JSVAL_TYPE_OBJECT, obj, R0);
michael@0 5602 masm.tagValue(JSVAL_TYPE_INT32, key, R1);
michael@0 5603 }
michael@0 5604
michael@0 5605 // Failure case - jump to next stub
michael@0 5606 masm.bind(&failure);
michael@0 5607 EmitStubGuardFailure(masm);
michael@0 5608
michael@0 5609 if (expectOutOfBounds_) {
michael@0 5610 masm.bind(&oobWrite);
michael@0 5611 EmitReturnFromIC(masm);
michael@0 5612 }
michael@0 5613 return true;
michael@0 5614 }
michael@0 5615
michael@0 5616 //
michael@0 5617 // In_Fallback
michael@0 5618 //
michael@0 5619
michael@0 5620 static bool
michael@0 5621 DoInFallback(JSContext *cx, ICIn_Fallback *stub, HandleValue key, HandleValue objValue,
michael@0 5622 MutableHandleValue res)
michael@0 5623 {
michael@0 5624 FallbackICSpew(cx, stub, "In");
michael@0 5625
michael@0 5626 if (!objValue.isObject()) {
michael@0 5627 js_ReportValueError(cx, JSMSG_IN_NOT_OBJECT, -1, objValue, NullPtr());
michael@0 5628 return false;
michael@0 5629 }
michael@0 5630
michael@0 5631 RootedObject obj(cx, &objValue.toObject());
michael@0 5632
michael@0 5633 bool cond = false;
michael@0 5634 if (!OperatorIn(cx, key, obj, &cond))
michael@0 5635 return false;
michael@0 5636
michael@0 5637 res.setBoolean(cond);
michael@0 5638 return true;
michael@0 5639 }
michael@0 5640
michael@0 5641 typedef bool (*DoInFallbackFn)(JSContext *, ICIn_Fallback *, HandleValue, HandleValue,
michael@0 5642 MutableHandleValue);
michael@0 5643 static const VMFunction DoInFallbackInfo =
michael@0 5644 FunctionInfo<DoInFallbackFn>(DoInFallback, PopValues(2));
michael@0 5645
michael@0 5646 bool
michael@0 5647 ICIn_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 5648 {
michael@0 5649 EmitRestoreTailCallReg(masm);
michael@0 5650
michael@0 5651 // Sync for the decompiler.
michael@0 5652 masm.pushValue(R0);
michael@0 5653 masm.pushValue(R1);
michael@0 5654
michael@0 5655 // Push arguments.
michael@0 5656 masm.pushValue(R1);
michael@0 5657 masm.pushValue(R0);
michael@0 5658 masm.push(BaselineStubReg);
michael@0 5659
michael@0 5660 return tailCallVM(DoInFallbackInfo, masm);
michael@0 5661 }
michael@0 5662
michael@0 5663 // Attach an optimized stub for a GETGNAME/CALLGNAME op.
michael@0 5664 static bool
michael@0 5665 TryAttachGlobalNameStub(JSContext *cx, HandleScript script, jsbytecode *pc,
michael@0 5666 ICGetName_Fallback *stub, HandleObject global,
michael@0 5667 HandlePropertyName name)
michael@0 5668 {
michael@0 5669 JS_ASSERT(global->is<GlobalObject>());
michael@0 5670
michael@0 5671 RootedId id(cx, NameToId(name));
michael@0 5672
michael@0 5673 // Instantiate this global property, for use during Ion compilation.
michael@0 5674 if (IsIonEnabled(cx))
michael@0 5675 types::EnsureTrackPropertyTypes(cx, global, NameToId(name));
michael@0 5676
michael@0 5677 // The property must be found, and it must be found as a normal data property.
michael@0 5678 RootedShape shape(cx, global->nativeLookup(cx, id));
michael@0 5679 if (!shape)
michael@0 5680 return true;
michael@0 5681
michael@0 5682 if (shape->hasDefaultGetter() && shape->hasSlot()) {
michael@0 5683
michael@0 5684 JS_ASSERT(shape->slot() >= global->numFixedSlots());
michael@0 5685 uint32_t slot = shape->slot() - global->numFixedSlots();
michael@0 5686
michael@0 5687 // TODO: if there's a previous stub discard it, or just update its Shape + slot?
michael@0 5688
michael@0 5689 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
michael@0 5690 IonSpew(IonSpew_BaselineIC, " Generating GetName(GlobalName) stub");
michael@0 5691 ICGetName_Global::Compiler compiler(cx, monitorStub, global->lastProperty(), slot);
michael@0 5692 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5693 if (!newStub)
michael@0 5694 return false;
michael@0 5695
michael@0 5696 stub->addNewStub(newStub);
michael@0 5697 return true;
michael@0 5698 }
michael@0 5699
michael@0 5700 bool isScripted;
michael@0 5701 if (IsCacheableGetPropCall(cx, global, global, shape, &isScripted) && !isScripted)
michael@0 5702 {
michael@0 5703 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
michael@0 5704 IonSpew(IonSpew_BaselineIC, " Generating GetName(GlobalName/NativeGetter) stub");
michael@0 5705 RootedFunction getter(cx, &shape->getterObject()->as<JSFunction>());
michael@0 5706 ICGetProp_CallNative::Compiler compiler(cx, monitorStub, global,
michael@0 5707 getter, script->pcToOffset(pc),
michael@0 5708 /* inputDefinitelyObject = */ true);
michael@0 5709 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5710 if (!newStub)
michael@0 5711 return false;
michael@0 5712
michael@0 5713 stub->addNewStub(newStub);
michael@0 5714 return true;
michael@0 5715 }
michael@0 5716
michael@0 5717 return true;
michael@0 5718 }
michael@0 5719
michael@0 5720 static bool
michael@0 5721 TryAttachScopeNameStub(JSContext *cx, HandleScript script, ICGetName_Fallback *stub,
michael@0 5722 HandleObject initialScopeChain, HandlePropertyName name)
michael@0 5723 {
michael@0 5724 AutoShapeVector shapes(cx);
michael@0 5725 RootedId id(cx, NameToId(name));
michael@0 5726 RootedObject scopeChain(cx, initialScopeChain);
michael@0 5727
michael@0 5728 Shape *shape = nullptr;
michael@0 5729 while (scopeChain) {
michael@0 5730 if (!shapes.append(scopeChain->lastProperty()))
michael@0 5731 return false;
michael@0 5732
michael@0 5733 if (scopeChain->is<GlobalObject>()) {
michael@0 5734 shape = scopeChain->nativeLookup(cx, id);
michael@0 5735 if (shape)
michael@0 5736 break;
michael@0 5737 return true;
michael@0 5738 }
michael@0 5739
michael@0 5740 if (!scopeChain->is<ScopeObject>() || scopeChain->is<DynamicWithObject>())
michael@0 5741 return true;
michael@0 5742
michael@0 5743 // Check for an 'own' property on the scope. There is no need to
michael@0 5744 // check the prototype as non-with scopes do not inherit properties
michael@0 5745 // from any prototype.
michael@0 5746 shape = scopeChain->nativeLookup(cx, id);
michael@0 5747 if (shape)
michael@0 5748 break;
michael@0 5749
michael@0 5750 scopeChain = scopeChain->enclosingScope();
michael@0 5751 }
michael@0 5752
michael@0 5753 if (!IsCacheableGetPropReadSlot(scopeChain, scopeChain, shape))
michael@0 5754 return true;
michael@0 5755
michael@0 5756 bool isFixedSlot;
michael@0 5757 uint32_t offset;
michael@0 5758 GetFixedOrDynamicSlotOffset(scopeChain, shape->slot(), &isFixedSlot, &offset);
michael@0 5759
michael@0 5760 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
michael@0 5761 ICStub *newStub;
michael@0 5762
michael@0 5763 switch (shapes.length()) {
michael@0 5764 case 1: {
michael@0 5765 ICGetName_Scope<0>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
michael@0 5766 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5767 break;
michael@0 5768 }
michael@0 5769 case 2: {
michael@0 5770 ICGetName_Scope<1>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
michael@0 5771 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5772 break;
michael@0 5773 }
michael@0 5774 case 3: {
michael@0 5775 ICGetName_Scope<2>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
michael@0 5776 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5777 break;
michael@0 5778 }
michael@0 5779 case 4: {
michael@0 5780 ICGetName_Scope<3>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
michael@0 5781 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5782 break;
michael@0 5783 }
michael@0 5784 case 5: {
michael@0 5785 ICGetName_Scope<4>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
michael@0 5786 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5787 break;
michael@0 5788 }
michael@0 5789 case 6: {
michael@0 5790 ICGetName_Scope<5>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
michael@0 5791 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5792 break;
michael@0 5793 }
michael@0 5794 case 7: {
michael@0 5795 ICGetName_Scope<6>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
michael@0 5796 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 5797 break;
michael@0 5798 }
michael@0 5799 default:
michael@0 5800 return true;
michael@0 5801 }
michael@0 5802
michael@0 5803 if (!newStub)
michael@0 5804 return false;
michael@0 5805
michael@0 5806 stub->addNewStub(newStub);
michael@0 5807 return true;
michael@0 5808 }
michael@0 5809
michael@0 5810 static bool
michael@0 5811 DoGetNameFallback(JSContext *cx, BaselineFrame *frame, ICGetName_Fallback *stub_,
michael@0 5812 HandleObject scopeChain, MutableHandleValue res)
michael@0 5813 {
michael@0 5814 // This fallback stub may trigger debug mode toggling.
michael@0 5815 DebugModeOSRVolatileStub<ICGetName_Fallback *> stub(frame, stub_);
michael@0 5816
michael@0 5817 RootedScript script(cx, frame->script());
michael@0 5818 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 5819 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
michael@0 5820 FallbackICSpew(cx, stub, "GetName(%s)", js_CodeName[JSOp(*pc)]);
michael@0 5821
michael@0 5822 JS_ASSERT(op == JSOP_NAME || op == JSOP_GETGNAME);
michael@0 5823
michael@0 5824 RootedPropertyName name(cx, script->getName(pc));
michael@0 5825
michael@0 5826 if (JSOp(pc[JSOP_GETGNAME_LENGTH]) == JSOP_TYPEOF) {
michael@0 5827 if (!GetScopeNameForTypeOf(cx, scopeChain, name, res))
michael@0 5828 return false;
michael@0 5829 } else {
michael@0 5830 if (!GetScopeName(cx, scopeChain, name, res))
michael@0 5831 return false;
michael@0 5832 }
michael@0 5833
michael@0 5834 types::TypeScript::Monitor(cx, script, pc, res);
michael@0 5835
michael@0 5836 // Check if debug mode toggling made the stub invalid.
michael@0 5837 if (stub.invalid())
michael@0 5838 return true;
michael@0 5839
michael@0 5840 // Add a type monitor stub for the resulting value.
michael@0 5841 if (!stub->addMonitorStubForValue(cx, script, res))
michael@0 5842 return false;
michael@0 5843
michael@0 5844 // Attach new stub.
michael@0 5845 if (stub->numOptimizedStubs() >= ICGetName_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 5846 // TODO: Discard all stubs in this IC and replace with generic stub.
michael@0 5847 return true;
michael@0 5848 }
michael@0 5849
michael@0 5850 if (js_CodeSpec[*pc].format & JOF_GNAME) {
michael@0 5851 if (!TryAttachGlobalNameStub(cx, script, pc, stub, scopeChain, name))
michael@0 5852 return false;
michael@0 5853 } else {
michael@0 5854 if (!TryAttachScopeNameStub(cx, script, stub, scopeChain, name))
michael@0 5855 return false;
michael@0 5856 }
michael@0 5857
michael@0 5858 return true;
michael@0 5859 }
michael@0 5860
michael@0 5861 typedef bool (*DoGetNameFallbackFn)(JSContext *, BaselineFrame *, ICGetName_Fallback *,
michael@0 5862 HandleObject, MutableHandleValue);
michael@0 5863 static const VMFunction DoGetNameFallbackInfo = FunctionInfo<DoGetNameFallbackFn>(DoGetNameFallback);
michael@0 5864
michael@0 5865 bool
michael@0 5866 ICGetName_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 5867 {
michael@0 5868 JS_ASSERT(R0 == JSReturnOperand);
michael@0 5869
michael@0 5870 EmitRestoreTailCallReg(masm);
michael@0 5871
michael@0 5872 masm.push(R0.scratchReg());
michael@0 5873 masm.push(BaselineStubReg);
michael@0 5874 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 5875
michael@0 5876 return tailCallVM(DoGetNameFallbackInfo, masm);
michael@0 5877 }
michael@0 5878
michael@0 5879 bool
michael@0 5880 ICGetName_Global::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 5881 {
michael@0 5882 Label failure;
michael@0 5883 Register obj = R0.scratchReg();
michael@0 5884 Register scratch = R1.scratchReg();
michael@0 5885
michael@0 5886 // Shape guard.
michael@0 5887 masm.loadPtr(Address(BaselineStubReg, ICGetName_Global::offsetOfShape()), scratch);
michael@0 5888 masm.branchTestObjShape(Assembler::NotEqual, obj, scratch, &failure);
michael@0 5889
michael@0 5890 // Load dynamic slot.
michael@0 5891 masm.loadPtr(Address(obj, JSObject::offsetOfSlots()), obj);
michael@0 5892 masm.load32(Address(BaselineStubReg, ICGetName_Global::offsetOfSlot()), scratch);
michael@0 5893 masm.loadValue(BaseIndex(obj, scratch, TimesEight), R0);
michael@0 5894
michael@0 5895 // Enter type monitor IC to type-check result.
michael@0 5896 EmitEnterTypeMonitorIC(masm);
michael@0 5897
michael@0 5898 // Failure case - jump to next stub
michael@0 5899 masm.bind(&failure);
michael@0 5900 EmitStubGuardFailure(masm);
michael@0 5901 return true;
michael@0 5902 }
michael@0 5903
michael@0 5904 template <size_t NumHops>
michael@0 5905 bool
michael@0 5906 ICGetName_Scope<NumHops>::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 5907 {
michael@0 5908 Label failure;
michael@0 5909 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 5910 Register obj = R0.scratchReg();
michael@0 5911 Register walker = regs.takeAny();
michael@0 5912 Register scratch = regs.takeAny();
michael@0 5913
michael@0 5914 // Use a local to silence Clang tautological-compare warning if NumHops is 0.
michael@0 5915 size_t numHops = NumHops;
michael@0 5916
michael@0 5917 for (size_t index = 0; index < NumHops + 1; index++) {
michael@0 5918 Register scope = index ? walker : obj;
michael@0 5919
michael@0 5920 // Shape guard.
michael@0 5921 masm.loadPtr(Address(BaselineStubReg, ICGetName_Scope::offsetOfShape(index)), scratch);
michael@0 5922 masm.branchTestObjShape(Assembler::NotEqual, scope, scratch, &failure);
michael@0 5923
michael@0 5924 if (index < numHops)
michael@0 5925 masm.extractObject(Address(scope, ScopeObject::offsetOfEnclosingScope()), walker);
michael@0 5926 }
michael@0 5927
michael@0 5928 Register scope = NumHops ? walker : obj;
michael@0 5929
michael@0 5930 if (!isFixedSlot_) {
michael@0 5931 masm.loadPtr(Address(scope, JSObject::offsetOfSlots()), walker);
michael@0 5932 scope = walker;
michael@0 5933 }
michael@0 5934
michael@0 5935 masm.load32(Address(BaselineStubReg, ICGetName_Scope::offsetOfOffset()), scratch);
michael@0 5936 masm.loadValue(BaseIndex(scope, scratch, TimesOne), R0);
michael@0 5937
michael@0 5938 // Enter type monitor IC to type-check result.
michael@0 5939 EmitEnterTypeMonitorIC(masm);
michael@0 5940
michael@0 5941 // Failure case - jump to next stub
michael@0 5942 masm.bind(&failure);
michael@0 5943 EmitStubGuardFailure(masm);
michael@0 5944 return true;
michael@0 5945 }
michael@0 5946
michael@0 5947 //
michael@0 5948 // BindName_Fallback
michael@0 5949 //
michael@0 5950
michael@0 5951 static bool
michael@0 5952 DoBindNameFallback(JSContext *cx, BaselineFrame *frame, ICBindName_Fallback *stub,
michael@0 5953 HandleObject scopeChain, MutableHandleValue res)
michael@0 5954 {
michael@0 5955 jsbytecode *pc = stub->icEntry()->pc(frame->script());
michael@0 5956 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
michael@0 5957 FallbackICSpew(cx, stub, "BindName(%s)", js_CodeName[JSOp(*pc)]);
michael@0 5958
michael@0 5959 JS_ASSERT(op == JSOP_BINDNAME);
michael@0 5960
michael@0 5961 RootedPropertyName name(cx, frame->script()->getName(pc));
michael@0 5962
michael@0 5963 RootedObject scope(cx);
michael@0 5964 if (!LookupNameWithGlobalDefault(cx, name, scopeChain, &scope))
michael@0 5965 return false;
michael@0 5966
michael@0 5967 res.setObject(*scope);
michael@0 5968 return true;
michael@0 5969 }
michael@0 5970
michael@0 5971 typedef bool (*DoBindNameFallbackFn)(JSContext *, BaselineFrame *, ICBindName_Fallback *,
michael@0 5972 HandleObject, MutableHandleValue);
michael@0 5973 static const VMFunction DoBindNameFallbackInfo =
michael@0 5974 FunctionInfo<DoBindNameFallbackFn>(DoBindNameFallback);
michael@0 5975
michael@0 5976 bool
michael@0 5977 ICBindName_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 5978 {
michael@0 5979 JS_ASSERT(R0 == JSReturnOperand);
michael@0 5980
michael@0 5981 EmitRestoreTailCallReg(masm);
michael@0 5982
michael@0 5983 masm.push(R0.scratchReg());
michael@0 5984 masm.push(BaselineStubReg);
michael@0 5985 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 5986
michael@0 5987 return tailCallVM(DoBindNameFallbackInfo, masm);
michael@0 5988 }
michael@0 5989
michael@0 5990 //
michael@0 5991 // GetIntrinsic_Fallback
michael@0 5992 //
michael@0 5993
michael@0 5994 static bool
michael@0 5995 DoGetIntrinsicFallback(JSContext *cx, BaselineFrame *frame, ICGetIntrinsic_Fallback *stub_,
michael@0 5996 MutableHandleValue res)
michael@0 5997 {
michael@0 5998 // This fallback stub may trigger debug mode toggling.
michael@0 5999 DebugModeOSRVolatileStub<ICGetIntrinsic_Fallback *> stub(frame, stub_);
michael@0 6000
michael@0 6001 RootedScript script(cx, frame->script());
michael@0 6002 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 6003 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
michael@0 6004 FallbackICSpew(cx, stub, "GetIntrinsic(%s)", js_CodeName[JSOp(*pc)]);
michael@0 6005
michael@0 6006 JS_ASSERT(op == JSOP_GETINTRINSIC);
michael@0 6007
michael@0 6008 if (!GetIntrinsicOperation(cx, pc, res))
michael@0 6009 return false;
michael@0 6010
michael@0 6011 // An intrinsic operation will always produce the same result, so only
michael@0 6012 // needs to be monitored once. Attach a stub to load the resulting constant
michael@0 6013 // directly.
michael@0 6014
michael@0 6015 types::TypeScript::Monitor(cx, script, pc, res);
michael@0 6016
michael@0 6017 // Check if debug mode toggling made the stub invalid.
michael@0 6018 if (stub.invalid())
michael@0 6019 return true;
michael@0 6020
michael@0 6021 IonSpew(IonSpew_BaselineIC, " Generating GetIntrinsic optimized stub");
michael@0 6022 ICGetIntrinsic_Constant::Compiler compiler(cx, res);
michael@0 6023 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6024 if (!newStub)
michael@0 6025 return false;
michael@0 6026
michael@0 6027 stub->addNewStub(newStub);
michael@0 6028 return true;
michael@0 6029 }
michael@0 6030
michael@0 6031 typedef bool (*DoGetIntrinsicFallbackFn)(JSContext *, BaselineFrame *, ICGetIntrinsic_Fallback *,
michael@0 6032 MutableHandleValue);
michael@0 6033 static const VMFunction DoGetIntrinsicFallbackInfo =
michael@0 6034 FunctionInfo<DoGetIntrinsicFallbackFn>(DoGetIntrinsicFallback);
michael@0 6035
michael@0 6036 bool
michael@0 6037 ICGetIntrinsic_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6038 {
michael@0 6039 EmitRestoreTailCallReg(masm);
michael@0 6040
michael@0 6041 masm.push(BaselineStubReg);
michael@0 6042 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 6043
michael@0 6044 return tailCallVM(DoGetIntrinsicFallbackInfo, masm);
michael@0 6045 }
michael@0 6046
michael@0 6047 bool
michael@0 6048 ICGetIntrinsic_Constant::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6049 {
michael@0 6050 masm.loadValue(Address(BaselineStubReg, ICGetIntrinsic_Constant::offsetOfValue()), R0);
michael@0 6051
michael@0 6052 EmitReturnFromIC(masm);
michael@0 6053 return true;
michael@0 6054 }
michael@0 6055
michael@0 6056 //
michael@0 6057 // GetProp_Fallback
michael@0 6058 //
michael@0 6059
michael@0 6060 static bool
michael@0 6061 TryAttachLengthStub(JSContext *cx, JSScript *script, ICGetProp_Fallback *stub, HandleValue val,
michael@0 6062 HandleValue res, bool *attached)
michael@0 6063 {
michael@0 6064 JS_ASSERT(!*attached);
michael@0 6065
michael@0 6066 if (val.isString()) {
michael@0 6067 JS_ASSERT(res.isInt32());
michael@0 6068 IonSpew(IonSpew_BaselineIC, " Generating GetProp(String.length) stub");
michael@0 6069 ICGetProp_StringLength::Compiler compiler(cx);
michael@0 6070 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6071 if (!newStub)
michael@0 6072 return false;
michael@0 6073
michael@0 6074 *attached = true;
michael@0 6075 stub->addNewStub(newStub);
michael@0 6076 return true;
michael@0 6077 }
michael@0 6078
michael@0 6079 if (val.isMagic(JS_OPTIMIZED_ARGUMENTS) && res.isInt32()) {
michael@0 6080 IonSpew(IonSpew_BaselineIC, " Generating GetProp(MagicArgs.length) stub");
michael@0 6081 ICGetProp_ArgumentsLength::Compiler compiler(cx, ICGetProp_ArgumentsLength::Magic);
michael@0 6082 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6083 if (!newStub)
michael@0 6084 return false;
michael@0 6085
michael@0 6086 *attached = true;
michael@0 6087 stub->addNewStub(newStub);
michael@0 6088 return true;
michael@0 6089 }
michael@0 6090
michael@0 6091 if (!val.isObject())
michael@0 6092 return true;
michael@0 6093
michael@0 6094 RootedObject obj(cx, &val.toObject());
michael@0 6095
michael@0 6096 if (obj->is<ArrayObject>() && res.isInt32()) {
michael@0 6097 IonSpew(IonSpew_BaselineIC, " Generating GetProp(Array.length) stub");
michael@0 6098 ICGetProp_ArrayLength::Compiler compiler(cx);
michael@0 6099 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6100 if (!newStub)
michael@0 6101 return false;
michael@0 6102
michael@0 6103 *attached = true;
michael@0 6104 stub->addNewStub(newStub);
michael@0 6105 return true;
michael@0 6106 }
michael@0 6107
michael@0 6108 if (obj->is<TypedArrayObject>() && res.isInt32()) {
michael@0 6109 IonSpew(IonSpew_BaselineIC, " Generating GetProp(TypedArray.length) stub");
michael@0 6110 ICGetProp_TypedArrayLength::Compiler compiler(cx);
michael@0 6111 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6112 if (!newStub)
michael@0 6113 return false;
michael@0 6114
michael@0 6115 *attached = true;
michael@0 6116 stub->addNewStub(newStub);
michael@0 6117 return true;
michael@0 6118 }
michael@0 6119
michael@0 6120 if (obj->is<ArgumentsObject>() && res.isInt32()) {
michael@0 6121 IonSpew(IonSpew_BaselineIC, " Generating GetProp(ArgsObj.length %s) stub",
michael@0 6122 obj->is<StrictArgumentsObject>() ? "Strict" : "Normal");
michael@0 6123 ICGetProp_ArgumentsLength::Which which = ICGetProp_ArgumentsLength::Normal;
michael@0 6124 if (obj->is<StrictArgumentsObject>())
michael@0 6125 which = ICGetProp_ArgumentsLength::Strict;
michael@0 6126 ICGetProp_ArgumentsLength::Compiler compiler(cx, which);
michael@0 6127 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6128 if (!newStub)
michael@0 6129 return false;
michael@0 6130
michael@0 6131 *attached = true;
michael@0 6132 stub->addNewStub(newStub);
michael@0 6133 return true;
michael@0 6134 }
michael@0 6135
michael@0 6136 return true;
michael@0 6137 }
michael@0 6138
michael@0 6139 static bool
michael@0 6140 UpdateExistingGenerationalDOMProxyStub(ICGetProp_Fallback *stub,
michael@0 6141 HandleObject obj)
michael@0 6142 {
michael@0 6143 Value expandoSlot = obj->getFixedSlot(GetDOMProxyExpandoSlot());
michael@0 6144 JS_ASSERT(!expandoSlot.isObject() && !expandoSlot.isUndefined());
michael@0 6145 ExpandoAndGeneration *expandoAndGeneration = (ExpandoAndGeneration*)expandoSlot.toPrivate();
michael@0 6146 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
michael@0 6147 if (iter->isGetProp_CallDOMProxyWithGenerationNative()) {
michael@0 6148 ICGetProp_CallDOMProxyWithGenerationNative* updateStub =
michael@0 6149 iter->toGetProp_CallDOMProxyWithGenerationNative();
michael@0 6150 if (updateStub->expandoAndGeneration() == expandoAndGeneration) {
michael@0 6151 // Update generation
michael@0 6152 uint32_t generation = expandoAndGeneration->generation;
michael@0 6153 IonSpew(IonSpew_BaselineIC,
michael@0 6154 " Updating existing stub with generation, old value: %i, "
michael@0 6155 "new value: %i", updateStub->generation(),
michael@0 6156 generation);
michael@0 6157 updateStub->setGeneration(generation);
michael@0 6158 return true;
michael@0 6159 }
michael@0 6160 }
michael@0 6161 }
michael@0 6162 return false;
michael@0 6163 }
michael@0 6164
michael@0 6165 static bool
michael@0 6166 TryAttachNativeGetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc,
michael@0 6167 ICGetProp_Fallback *stub, HandlePropertyName name,
michael@0 6168 HandleValue val, HandleValue res, bool *attached)
michael@0 6169 {
michael@0 6170 JS_ASSERT(!*attached);
michael@0 6171
michael@0 6172 if (!val.isObject())
michael@0 6173 return true;
michael@0 6174
michael@0 6175 RootedObject obj(cx, &val.toObject());
michael@0 6176
michael@0 6177 bool isDOMProxy;
michael@0 6178 bool domProxyHasGeneration;
michael@0 6179 DOMProxyShadowsResult domProxyShadowsResult;
michael@0 6180 RootedShape shape(cx);
michael@0 6181 RootedObject holder(cx);
michael@0 6182 if (!EffectlesslyLookupProperty(cx, obj, name, &holder, &shape, &isDOMProxy,
michael@0 6183 &domProxyShadowsResult, &domProxyHasGeneration))
michael@0 6184 {
michael@0 6185 return false;
michael@0 6186 }
michael@0 6187
michael@0 6188 if (!isDOMProxy && !obj->isNative())
michael@0 6189 return true;
michael@0 6190
michael@0 6191 bool isCallProp = (JSOp(*pc) == JSOP_CALLPROP);
michael@0 6192
michael@0 6193 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
michael@0 6194 if (!isDOMProxy && IsCacheableGetPropReadSlot(obj, holder, shape)) {
michael@0 6195 bool isFixedSlot;
michael@0 6196 uint32_t offset;
michael@0 6197 GetFixedOrDynamicSlotOffset(holder, shape->slot(), &isFixedSlot, &offset);
michael@0 6198
michael@0 6199 // Instantiate this property for singleton holders, for use during Ion compilation.
michael@0 6200 if (IsIonEnabled(cx))
michael@0 6201 types::EnsureTrackPropertyTypes(cx, holder, NameToId(name));
michael@0 6202
michael@0 6203 ICStub::Kind kind = (obj == holder) ? ICStub::GetProp_Native
michael@0 6204 : ICStub::GetProp_NativePrototype;
michael@0 6205
michael@0 6206 IonSpew(IonSpew_BaselineIC, " Generating GetProp(%s %s) stub",
michael@0 6207 isDOMProxy ? "DOMProxy" : "Native",
michael@0 6208 (obj == holder) ? "direct" : "prototype");
michael@0 6209 ICGetPropNativeCompiler compiler(cx, kind, isCallProp, monitorStub, obj, holder,
michael@0 6210 name, isFixedSlot, offset);
michael@0 6211 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6212 if (!newStub)
michael@0 6213 return false;
michael@0 6214
michael@0 6215 stub->addNewStub(newStub);
michael@0 6216 *attached = true;
michael@0 6217 return true;
michael@0 6218 }
michael@0 6219
michael@0 6220 bool isScripted = false;
michael@0 6221 bool cacheableCall = IsCacheableGetPropCall(cx, obj, holder, shape, &isScripted, isDOMProxy);
michael@0 6222
michael@0 6223 // Try handling scripted getters.
michael@0 6224 if (cacheableCall && isScripted && !isDOMProxy) {
michael@0 6225 #if JS_HAS_NO_SUCH_METHOD
michael@0 6226 // It's hard to keep the original object alive through a call, and it's unlikely
michael@0 6227 // that a getter will be used to generate functions for calling in CALLPROP locations.
michael@0 6228 // Just don't attach stubs in that case.
michael@0 6229 if (isCallProp)
michael@0 6230 return true;
michael@0 6231 #endif
michael@0 6232
michael@0 6233 // Don't handle scripted own property getters
michael@0 6234 if (obj == holder)
michael@0 6235 return true;
michael@0 6236
michael@0 6237 RootedFunction callee(cx, &shape->getterObject()->as<JSFunction>());
michael@0 6238 JS_ASSERT(obj != holder);
michael@0 6239 JS_ASSERT(callee->hasScript());
michael@0 6240
michael@0 6241 IonSpew(IonSpew_BaselineIC, " Generating GetProp(NativeObj/ScriptedGetter %s:%d) stub",
michael@0 6242 callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno());
michael@0 6243
michael@0 6244 ICGetProp_CallScripted::Compiler compiler(cx, monitorStub, obj, holder, callee,
michael@0 6245 script->pcToOffset(pc));
michael@0 6246 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6247 if (!newStub)
michael@0 6248 return false;
michael@0 6249
michael@0 6250 stub->addNewStub(newStub);
michael@0 6251 *attached = true;
michael@0 6252 return true;
michael@0 6253 }
michael@0 6254
michael@0 6255 // Try handling JSNative getters.
michael@0 6256 if (cacheableCall && !isScripted) {
michael@0 6257 #if JS_HAS_NO_SUCH_METHOD
michael@0 6258 // It's unlikely that a getter function will be used to generate functions for calling
michael@0 6259 // in CALLPROP locations. Just don't attach stubs in that case to avoid issues with
michael@0 6260 // __noSuchMethod__ handling.
michael@0 6261 if (isCallProp)
michael@0 6262 return true;
michael@0 6263 #endif
michael@0 6264
michael@0 6265 RootedFunction callee(cx, &shape->getterObject()->as<JSFunction>());
michael@0 6266 JS_ASSERT(callee->isNative());
michael@0 6267
michael@0 6268 IonSpew(IonSpew_BaselineIC, " Generating GetProp(%s%s/NativeGetter %p) stub",
michael@0 6269 isDOMProxy ? "DOMProxyObj" : "NativeObj",
michael@0 6270 isDOMProxy && domProxyHasGeneration ? "WithGeneration" : "",
michael@0 6271 callee->native());
michael@0 6272
michael@0 6273 ICStub *newStub = nullptr;
michael@0 6274 if (isDOMProxy) {
michael@0 6275 JS_ASSERT(obj != holder);
michael@0 6276 ICStub::Kind kind;
michael@0 6277 if (domProxyHasGeneration) {
michael@0 6278 if (UpdateExistingGenerationalDOMProxyStub(stub, obj)) {
michael@0 6279 *attached = true;
michael@0 6280 return true;
michael@0 6281 }
michael@0 6282 kind = ICStub::GetProp_CallDOMProxyWithGenerationNative;
michael@0 6283 } else {
michael@0 6284 kind = ICStub::GetProp_CallDOMProxyNative;
michael@0 6285 }
michael@0 6286 Rooted<ProxyObject*> proxy(cx, &obj->as<ProxyObject>());
michael@0 6287 ICGetPropCallDOMProxyNativeCompiler
michael@0 6288 compiler(cx, kind, monitorStub, proxy, holder, callee, script->pcToOffset(pc));
michael@0 6289 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6290 } else if (obj == holder) {
michael@0 6291 ICGetProp_CallNative::Compiler compiler(cx, monitorStub, obj, callee,
michael@0 6292 script->pcToOffset(pc));
michael@0 6293 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6294 } else {
michael@0 6295 ICGetProp_CallNativePrototype::Compiler compiler(cx, monitorStub, obj, holder, callee,
michael@0 6296 script->pcToOffset(pc));
michael@0 6297 newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6298 }
michael@0 6299 if (!newStub)
michael@0 6300 return false;
michael@0 6301 stub->addNewStub(newStub);
michael@0 6302 *attached = true;
michael@0 6303 return true;
michael@0 6304 }
michael@0 6305
michael@0 6306 // If it's a shadowed listbase proxy property, attach stub to call Proxy::get instead.
michael@0 6307 if (isDOMProxy && domProxyShadowsResult == Shadows) {
michael@0 6308 JS_ASSERT(obj == holder);
michael@0 6309 #if JS_HAS_NO_SUCH_METHOD
michael@0 6310 if (isCallProp)
michael@0 6311 return true;
michael@0 6312 #endif
michael@0 6313
michael@0 6314 IonSpew(IonSpew_BaselineIC, " Generating GetProp(DOMProxyProxy) stub");
michael@0 6315 Rooted<ProxyObject*> proxy(cx, &obj->as<ProxyObject>());
michael@0 6316 ICGetProp_DOMProxyShadowed::Compiler compiler(cx, monitorStub, proxy, name,
michael@0 6317 script->pcToOffset(pc));
michael@0 6318 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6319 if (!newStub)
michael@0 6320 return false;
michael@0 6321 stub->addNewStub(newStub);
michael@0 6322 *attached = true;
michael@0 6323 return true;
michael@0 6324 }
michael@0 6325
michael@0 6326 return true;
michael@0 6327 }
michael@0 6328
michael@0 6329 static bool
michael@0 6330 TryAttachPrimitiveGetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc,
michael@0 6331 ICGetProp_Fallback *stub, HandlePropertyName name, HandleValue val,
michael@0 6332 HandleValue res, bool *attached)
michael@0 6333 {
michael@0 6334 JS_ASSERT(!*attached);
michael@0 6335
michael@0 6336 JSValueType primitiveType;
michael@0 6337 RootedObject proto(cx);
michael@0 6338 Rooted<GlobalObject*> global(cx, &script->global());
michael@0 6339 if (val.isString()) {
michael@0 6340 primitiveType = JSVAL_TYPE_STRING;
michael@0 6341 proto = GlobalObject::getOrCreateStringPrototype(cx, global);
michael@0 6342 } else if (val.isNumber()) {
michael@0 6343 primitiveType = JSVAL_TYPE_DOUBLE;
michael@0 6344 proto = GlobalObject::getOrCreateNumberPrototype(cx, global);
michael@0 6345 } else {
michael@0 6346 JS_ASSERT(val.isBoolean());
michael@0 6347 primitiveType = JSVAL_TYPE_BOOLEAN;
michael@0 6348 proto = GlobalObject::getOrCreateBooleanPrototype(cx, global);
michael@0 6349 }
michael@0 6350 if (!proto)
michael@0 6351 return false;
michael@0 6352
michael@0 6353 // Instantiate this property, for use during Ion compilation.
michael@0 6354 RootedId id(cx, NameToId(name));
michael@0 6355 if (IsIonEnabled(cx))
michael@0 6356 types::EnsureTrackPropertyTypes(cx, proto, id);
michael@0 6357
michael@0 6358 // For now, only look for properties directly set on the prototype.
michael@0 6359 RootedShape shape(cx, proto->nativeLookup(cx, id));
michael@0 6360 if (!shape || !shape->hasSlot() || !shape->hasDefaultGetter())
michael@0 6361 return true;
michael@0 6362
michael@0 6363 bool isFixedSlot;
michael@0 6364 uint32_t offset;
michael@0 6365 GetFixedOrDynamicSlotOffset(proto, shape->slot(), &isFixedSlot, &offset);
michael@0 6366
michael@0 6367 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
michael@0 6368
michael@0 6369 IonSpew(IonSpew_BaselineIC, " Generating GetProp_Primitive stub");
michael@0 6370 ICGetProp_Primitive::Compiler compiler(cx, monitorStub, primitiveType, proto,
michael@0 6371 isFixedSlot, offset);
michael@0 6372 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 6373 if (!newStub)
michael@0 6374 return false;
michael@0 6375
michael@0 6376 stub->addNewStub(newStub);
michael@0 6377 *attached = true;
michael@0 6378 return true;
michael@0 6379 }
michael@0 6380
michael@0 6381 static bool
michael@0 6382 DoGetPropFallback(JSContext *cx, BaselineFrame *frame, ICGetProp_Fallback *stub_,
michael@0 6383 MutableHandleValue val, MutableHandleValue res)
michael@0 6384 {
michael@0 6385 // This fallback stub may trigger debug mode toggling.
michael@0 6386 DebugModeOSRVolatileStub<ICGetProp_Fallback *> stub(frame, stub_);
michael@0 6387
michael@0 6388 jsbytecode *pc = stub->icEntry()->pc(frame->script());
michael@0 6389 JSOp op = JSOp(*pc);
michael@0 6390 FallbackICSpew(cx, stub, "GetProp(%s)", js_CodeName[op]);
michael@0 6391
michael@0 6392 JS_ASSERT(op == JSOP_GETPROP || op == JSOP_CALLPROP || op == JSOP_LENGTH || op == JSOP_GETXPROP);
michael@0 6393
michael@0 6394 RootedPropertyName name(cx, frame->script()->getName(pc));
michael@0 6395
michael@0 6396 if (op == JSOP_LENGTH && val.isMagic(JS_OPTIMIZED_ARGUMENTS)) {
michael@0 6397 // Handle arguments.length access.
michael@0 6398 if (IsOptimizedArguments(frame, val.address())) {
michael@0 6399 res.setInt32(frame->numActualArgs());
michael@0 6400
michael@0 6401 // Monitor result
michael@0 6402 types::TypeScript::Monitor(cx, frame->script(), pc, res);
michael@0 6403 if (!stub->addMonitorStubForValue(cx, frame->script(), res))
michael@0 6404 return false;
michael@0 6405
michael@0 6406 bool attached = false;
michael@0 6407 if (!TryAttachLengthStub(cx, frame->script(), stub, val, res, &attached))
michael@0 6408 return false;
michael@0 6409 JS_ASSERT(attached);
michael@0 6410
michael@0 6411 return true;
michael@0 6412 }
michael@0 6413 }
michael@0 6414
michael@0 6415 RootedObject obj(cx, ToObjectFromStack(cx, val));
michael@0 6416 if (!obj)
michael@0 6417 return false;
michael@0 6418
michael@0 6419 RootedId id(cx, NameToId(name));
michael@0 6420 if (!JSObject::getGeneric(cx, obj, obj, id, res))
michael@0 6421 return false;
michael@0 6422
michael@0 6423 #if JS_HAS_NO_SUCH_METHOD
michael@0 6424 // Handle objects with __noSuchMethod__.
michael@0 6425 if (op == JSOP_CALLPROP && MOZ_UNLIKELY(res.isUndefined()) && val.isObject()) {
michael@0 6426 if (!OnUnknownMethod(cx, obj, IdToValue(id), res))
michael@0 6427 return false;
michael@0 6428 }
michael@0 6429 #endif
michael@0 6430
michael@0 6431 types::TypeScript::Monitor(cx, frame->script(), pc, res);
michael@0 6432
michael@0 6433 // Check if debug mode toggling made the stub invalid.
michael@0 6434 if (stub.invalid())
michael@0 6435 return true;
michael@0 6436
michael@0 6437 // Add a type monitor stub for the resulting value.
michael@0 6438 if (!stub->addMonitorStubForValue(cx, frame->script(), res))
michael@0 6439 return false;
michael@0 6440
michael@0 6441 if (stub->numOptimizedStubs() >= ICGetProp_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 6442 // TODO: Discard all stubs in this IC and replace with generic getprop stub.
michael@0 6443 return true;
michael@0 6444 }
michael@0 6445
michael@0 6446 bool attached = false;
michael@0 6447
michael@0 6448 if (op == JSOP_LENGTH) {
michael@0 6449 if (!TryAttachLengthStub(cx, frame->script(), stub, val, res, &attached))
michael@0 6450 return false;
michael@0 6451 if (attached)
michael@0 6452 return true;
michael@0 6453 }
michael@0 6454
michael@0 6455 RootedScript script(cx, frame->script());
michael@0 6456
michael@0 6457 if (!TryAttachNativeGetPropStub(cx, script, pc, stub, name, val, res, &attached))
michael@0 6458 return false;
michael@0 6459 if (attached)
michael@0 6460 return true;
michael@0 6461
michael@0 6462 if (val.isString() || val.isNumber() || val.isBoolean()) {
michael@0 6463 if (!TryAttachPrimitiveGetPropStub(cx, script, pc, stub, name, val, res, &attached))
michael@0 6464 return false;
michael@0 6465 if (attached)
michael@0 6466 return true;
michael@0 6467 }
michael@0 6468
michael@0 6469 JS_ASSERT(!attached);
michael@0 6470 stub->noteUnoptimizableAccess();
michael@0 6471
michael@0 6472 return true;
michael@0 6473 }
michael@0 6474
michael@0 6475 typedef bool (*DoGetPropFallbackFn)(JSContext *, BaselineFrame *, ICGetProp_Fallback *,
michael@0 6476 MutableHandleValue, MutableHandleValue);
michael@0 6477 static const VMFunction DoGetPropFallbackInfo =
michael@0 6478 FunctionInfo<DoGetPropFallbackFn>(DoGetPropFallback, PopValues(1));
michael@0 6479
michael@0 6480 bool
michael@0 6481 ICGetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6482 {
michael@0 6483 JS_ASSERT(R0 == JSReturnOperand);
michael@0 6484
michael@0 6485 EmitRestoreTailCallReg(masm);
michael@0 6486
michael@0 6487 // Ensure stack is fully synced for the expression decompiler.
michael@0 6488 masm.pushValue(R0);
michael@0 6489
michael@0 6490 // Push arguments.
michael@0 6491 masm.pushValue(R0);
michael@0 6492 masm.push(BaselineStubReg);
michael@0 6493 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 6494
michael@0 6495 if (!tailCallVM(DoGetPropFallbackInfo, masm))
michael@0 6496 return false;
michael@0 6497
michael@0 6498 // What follows is bailout for inlined scripted getters or for on-stack
michael@0 6499 // debug mode recompile. The return address pointed to by the baseline
michael@0 6500 // stack points here.
michael@0 6501 //
michael@0 6502 // Even though the fallback frame doesn't enter a stub frame, the CallScripted
michael@0 6503 // frame that we are emulating does. Again, we lie.
michael@0 6504 #ifdef DEBUG
michael@0 6505 entersStubFrame_ = true;
michael@0 6506 #endif
michael@0 6507
michael@0 6508 Label leaveStubCommon;
michael@0 6509
michael@0 6510 returnFromStubOffset_ = masm.currentOffset();
michael@0 6511 leaveStubFrameHead(masm, false);
michael@0 6512 masm.jump(&leaveStubCommon);
michael@0 6513
michael@0 6514 returnFromIonOffset_ = masm.currentOffset();
michael@0 6515 leaveStubFrameHead(masm, true);
michael@0 6516
michael@0 6517 masm.bind(&leaveStubCommon);
michael@0 6518 leaveStubFrameCommonTail(masm);
michael@0 6519
michael@0 6520 // When we get here, BaselineStubReg contains the ICGetProp_Fallback stub,
michael@0 6521 // which we can't use to enter the TypeMonitor IC, because it's a MonitoredFallbackStub
michael@0 6522 // instead of a MonitoredStub. So, we cheat.
michael@0 6523 masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
michael@0 6524 BaselineStubReg);
michael@0 6525 EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
michael@0 6526
michael@0 6527 return true;
michael@0 6528 }
michael@0 6529
michael@0 6530 bool
michael@0 6531 ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
michael@0 6532 {
michael@0 6533 JitCompartment *comp = cx->compartment()->jitCompartment();
michael@0 6534
michael@0 6535 CodeOffsetLabel fromIon(returnFromIonOffset_);
michael@0 6536 fromIon.fixup(&masm);
michael@0 6537 comp->initBaselineGetPropReturnFromIonAddr(code->raw() + fromIon.offset());
michael@0 6538
michael@0 6539 CodeOffsetLabel fromVM(returnFromStubOffset_);
michael@0 6540 fromVM.fixup(&masm);
michael@0 6541 comp->initBaselineGetPropReturnFromStubAddr(code->raw() + fromVM.offset());
michael@0 6542
michael@0 6543 return true;
michael@0 6544 }
michael@0 6545
michael@0 6546 bool
michael@0 6547 ICGetProp_ArrayLength::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6548 {
michael@0 6549 Label failure;
michael@0 6550 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 6551
michael@0 6552 Register scratch = R1.scratchReg();
michael@0 6553
michael@0 6554 // Unbox R0 and guard it's an array.
michael@0 6555 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 6556 masm.branchTestObjClass(Assembler::NotEqual, obj, scratch, &ArrayObject::class_, &failure);
michael@0 6557
michael@0 6558 // Load obj->elements->length.
michael@0 6559 masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratch);
michael@0 6560 masm.load32(Address(scratch, ObjectElements::offsetOfLength()), scratch);
michael@0 6561
michael@0 6562 // Guard length fits in an int32.
michael@0 6563 masm.branchTest32(Assembler::Signed, scratch, scratch, &failure);
michael@0 6564
michael@0 6565 masm.tagValue(JSVAL_TYPE_INT32, scratch, R0);
michael@0 6566 EmitReturnFromIC(masm);
michael@0 6567
michael@0 6568 // Failure case - jump to next stub
michael@0 6569 masm.bind(&failure);
michael@0 6570 EmitStubGuardFailure(masm);
michael@0 6571 return true;
michael@0 6572 }
michael@0 6573
michael@0 6574 bool
michael@0 6575 ICGetProp_TypedArrayLength::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6576 {
michael@0 6577 Label failure;
michael@0 6578 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 6579
michael@0 6580 Register scratch = R1.scratchReg();
michael@0 6581
michael@0 6582 // Unbox R0.
michael@0 6583 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 6584
michael@0 6585 // Implement the negated version of JSObject::isTypedArray predicate.
michael@0 6586 masm.loadObjClass(obj, scratch);
michael@0 6587 masm.branchPtr(Assembler::Below, scratch, ImmPtr(&TypedArrayObject::classes[0]),
michael@0 6588 &failure);
michael@0 6589 masm.branchPtr(Assembler::AboveOrEqual, scratch,
michael@0 6590 ImmPtr(&TypedArrayObject::classes[ScalarTypeDescr::TYPE_MAX]),
michael@0 6591 &failure);
michael@0 6592
michael@0 6593 // Load length from fixed slot.
michael@0 6594 masm.loadValue(Address(obj, TypedArrayObject::lengthOffset()), R0);
michael@0 6595 EmitReturnFromIC(masm);
michael@0 6596
michael@0 6597 // Failure case - jump to next stub
michael@0 6598 masm.bind(&failure);
michael@0 6599 EmitStubGuardFailure(masm);
michael@0 6600 return true;
michael@0 6601 }
michael@0 6602
michael@0 6603 bool
michael@0 6604 ICGetProp_StringLength::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6605 {
michael@0 6606 Label failure;
michael@0 6607 masm.branchTestString(Assembler::NotEqual, R0, &failure);
michael@0 6608
michael@0 6609 // Unbox string and load its length.
michael@0 6610 Register string = masm.extractString(R0, ExtractTemp0);
michael@0 6611 masm.loadStringLength(string, string);
michael@0 6612
michael@0 6613 masm.tagValue(JSVAL_TYPE_INT32, string, R0);
michael@0 6614 EmitReturnFromIC(masm);
michael@0 6615
michael@0 6616 // Failure case - jump to next stub
michael@0 6617 masm.bind(&failure);
michael@0 6618 EmitStubGuardFailure(masm);
michael@0 6619 return true;
michael@0 6620 }
michael@0 6621
michael@0 6622 bool
michael@0 6623 ICGetProp_Primitive::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6624 {
michael@0 6625 Label failure;
michael@0 6626 switch (primitiveType_) {
michael@0 6627 case JSVAL_TYPE_STRING:
michael@0 6628 masm.branchTestString(Assembler::NotEqual, R0, &failure);
michael@0 6629 break;
michael@0 6630 case JSVAL_TYPE_DOUBLE: // Also used for int32.
michael@0 6631 masm.branchTestNumber(Assembler::NotEqual, R0, &failure);
michael@0 6632 break;
michael@0 6633 case JSVAL_TYPE_BOOLEAN:
michael@0 6634 masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
michael@0 6635 break;
michael@0 6636 default:
michael@0 6637 MOZ_ASSUME_UNREACHABLE("unexpected type");
michael@0 6638 }
michael@0 6639
michael@0 6640 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 6641 Register holderReg = regs.takeAny();
michael@0 6642 Register scratchReg = regs.takeAny();
michael@0 6643
michael@0 6644 // Verify the shape of the prototype.
michael@0 6645 masm.movePtr(ImmGCPtr(prototype_.get()), holderReg);
michael@0 6646
michael@0 6647 Address shapeAddr(BaselineStubReg, ICGetProp_Primitive::offsetOfProtoShape());
michael@0 6648 masm.loadPtr(Address(holderReg, JSObject::offsetOfShape()), scratchReg);
michael@0 6649 masm.branchPtr(Assembler::NotEqual, shapeAddr, scratchReg, &failure);
michael@0 6650
michael@0 6651 if (!isFixedSlot_)
michael@0 6652 masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), holderReg);
michael@0 6653
michael@0 6654 masm.load32(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfOffset()), scratchReg);
michael@0 6655 masm.loadValue(BaseIndex(holderReg, scratchReg, TimesOne), R0);
michael@0 6656
michael@0 6657 // Enter type monitor IC to type-check result.
michael@0 6658 EmitEnterTypeMonitorIC(masm);
michael@0 6659
michael@0 6660 // Failure case - jump to next stub
michael@0 6661 masm.bind(&failure);
michael@0 6662 EmitStubGuardFailure(masm);
michael@0 6663 return true;
michael@0 6664 }
michael@0 6665
michael@0 6666 bool
michael@0 6667 ICGetPropNativeCompiler::generateStubCode(MacroAssembler &masm)
michael@0 6668 {
michael@0 6669 Label failure;
michael@0 6670 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 6671
michael@0 6672 // Guard input is an object.
michael@0 6673 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 6674
michael@0 6675 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
michael@0 6676
michael@0 6677 // Unbox and shape guard.
michael@0 6678 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 6679 masm.loadPtr(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfShape()), scratch);
michael@0 6680 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
michael@0 6681
michael@0 6682 Register holderReg;
michael@0 6683 if (obj_ == holder_) {
michael@0 6684 holderReg = objReg;
michael@0 6685 } else {
michael@0 6686 // Shape guard holder.
michael@0 6687 holderReg = regs.takeAny();
michael@0 6688 masm.loadPtr(Address(BaselineStubReg, ICGetProp_NativePrototype::offsetOfHolder()),
michael@0 6689 holderReg);
michael@0 6690 masm.loadPtr(Address(BaselineStubReg, ICGetProp_NativePrototype::offsetOfHolderShape()),
michael@0 6691 scratch);
michael@0 6692 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
michael@0 6693 }
michael@0 6694
michael@0 6695 if (!isFixedSlot_) {
michael@0 6696 // Don't overwrite actual holderReg if we need to load a dynamic slots object.
michael@0 6697 // May need to preserve object for noSuchMethod check later.
michael@0 6698 Register nextHolder = regs.takeAny();
michael@0 6699 masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), nextHolder);
michael@0 6700 holderReg = nextHolder;
michael@0 6701 }
michael@0 6702
michael@0 6703 masm.load32(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfOffset()), scratch);
michael@0 6704 BaseIndex result(holderReg, scratch, TimesOne);
michael@0 6705
michael@0 6706 #if JS_HAS_NO_SUCH_METHOD
michael@0 6707 #ifdef DEBUG
michael@0 6708 entersStubFrame_ = true;
michael@0 6709 #endif
michael@0 6710 if (isCallProp_) {
michael@0 6711 // Check for __noSuchMethod__ invocation.
michael@0 6712 Label afterNoSuchMethod;
michael@0 6713 Label skipNoSuchMethod;
michael@0 6714
michael@0 6715 masm.push(objReg);
michael@0 6716 masm.loadValue(result, R0);
michael@0 6717 masm.branchTestUndefined(Assembler::NotEqual, R0, &skipNoSuchMethod);
michael@0 6718
michael@0 6719 masm.pop(objReg);
michael@0 6720
michael@0 6721 // Call __noSuchMethod__ checker. Object pointer is in objReg.
michael@0 6722 regs = availableGeneralRegs(0);
michael@0 6723 regs.takeUnchecked(objReg);
michael@0 6724 regs.takeUnchecked(BaselineTailCallReg);
michael@0 6725 ValueOperand val = regs.takeValueOperand();
michael@0 6726
michael@0 6727 // Box and push obj onto baseline frame stack for decompiler.
michael@0 6728 EmitRestoreTailCallReg(masm);
michael@0 6729 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
michael@0 6730 masm.pushValue(val);
michael@0 6731 EmitRepushTailCallReg(masm);
michael@0 6732
michael@0 6733 enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
michael@0 6734
michael@0 6735 masm.movePtr(ImmGCPtr(propName_.get()), val.scratchReg());
michael@0 6736 masm.tagValue(JSVAL_TYPE_STRING, val.scratchReg(), val);
michael@0 6737 masm.pushValue(val);
michael@0 6738 masm.push(objReg);
michael@0 6739 if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
michael@0 6740 return false;
michael@0 6741
michael@0 6742 leaveStubFrame(masm);
michael@0 6743
michael@0 6744 // Pop pushed obj from baseline stack.
michael@0 6745 EmitUnstowICValues(masm, 1, /* discard = */ true);
michael@0 6746
michael@0 6747 masm.jump(&afterNoSuchMethod);
michael@0 6748 masm.bind(&skipNoSuchMethod);
michael@0 6749
michael@0 6750 // Pop pushed objReg.
michael@0 6751 masm.addPtr(Imm32(sizeof(void *)), BaselineStackReg);
michael@0 6752 masm.bind(&afterNoSuchMethod);
michael@0 6753 } else {
michael@0 6754 masm.loadValue(result, R0);
michael@0 6755 }
michael@0 6756 #else
michael@0 6757 masm.loadValue(result, R0);
michael@0 6758 #endif
michael@0 6759
michael@0 6760 // Enter type monitor IC to type-check result.
michael@0 6761 EmitEnterTypeMonitorIC(masm);
michael@0 6762
michael@0 6763 // Failure case - jump to next stub
michael@0 6764 masm.bind(&failure);
michael@0 6765 EmitStubGuardFailure(masm);
michael@0 6766 return true;
michael@0 6767 }
michael@0 6768
michael@0 6769 bool
michael@0 6770 ICGetProp_CallScripted::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6771 {
michael@0 6772 Label failure;
michael@0 6773 Label failureLeaveStubFrame;
michael@0 6774 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 6775 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
michael@0 6776
michael@0 6777 // Guard input is an object.
michael@0 6778 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 6779
michael@0 6780 // Unbox and shape guard.
michael@0 6781 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 6782 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfReceiverShape()), scratch);
michael@0 6783 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
michael@0 6784
michael@0 6785 Register holderReg = regs.takeAny();
michael@0 6786 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfHolder()), holderReg);
michael@0 6787 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfHolderShape()), scratch);
michael@0 6788 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
michael@0 6789 regs.add(holderReg);
michael@0 6790
michael@0 6791 // Push a stub frame so that we can perform a non-tail call.
michael@0 6792 enterStubFrame(masm, scratch);
michael@0 6793
michael@0 6794 // Load callee function and code. To ensure that |code| doesn't end up being
michael@0 6795 // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
michael@0 6796 Register callee;
michael@0 6797 if (regs.has(ArgumentsRectifierReg)) {
michael@0 6798 callee = ArgumentsRectifierReg;
michael@0 6799 regs.take(callee);
michael@0 6800 } else {
michael@0 6801 callee = regs.takeAny();
michael@0 6802 }
michael@0 6803 Register code = regs.takeAny();
michael@0 6804 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfGetter()), callee);
michael@0 6805 masm.branchIfFunctionHasNoScript(callee, &failureLeaveStubFrame);
michael@0 6806 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
michael@0 6807 masm.loadBaselineOrIonRaw(code, code, SequentialExecution, &failureLeaveStubFrame);
michael@0 6808
michael@0 6809 // Getter is called with 0 arguments, just |obj| as thisv.
michael@0 6810 // Note that we use Push, not push, so that callIon will align the stack
michael@0 6811 // properly on ARM.
michael@0 6812 masm.Push(R0);
michael@0 6813 EmitCreateStubFrameDescriptor(masm, scratch);
michael@0 6814 masm.Push(Imm32(0)); // ActualArgc is 0
michael@0 6815 masm.Push(callee);
michael@0 6816 masm.Push(scratch);
michael@0 6817
michael@0 6818 // Handle arguments underflow.
michael@0 6819 Label noUnderflow;
michael@0 6820 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
michael@0 6821 masm.branch32(Assembler::Equal, scratch, Imm32(0), &noUnderflow);
michael@0 6822 {
michael@0 6823 // Call the arguments rectifier.
michael@0 6824 JS_ASSERT(ArgumentsRectifierReg != code);
michael@0 6825
michael@0 6826 JitCode *argumentsRectifier =
michael@0 6827 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
michael@0 6828
michael@0 6829 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
michael@0 6830 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
michael@0 6831 masm.mov(ImmWord(0), ArgumentsRectifierReg);
michael@0 6832 }
michael@0 6833
michael@0 6834 masm.bind(&noUnderflow);
michael@0 6835
michael@0 6836 // If needed, update SPS Profiler frame entry. At this point, callee and scratch can
michael@0 6837 // be clobbered.
michael@0 6838 {
michael@0 6839 GeneralRegisterSet availRegs = availableGeneralRegs(0);
michael@0 6840 availRegs.take(ArgumentsRectifierReg);
michael@0 6841 availRegs.take(code);
michael@0 6842 emitProfilingUpdate(masm, availRegs, ICGetProp_CallScripted::offsetOfPCOffset());
michael@0 6843 }
michael@0 6844
michael@0 6845 masm.callIon(code);
michael@0 6846
michael@0 6847 leaveStubFrame(masm, true);
michael@0 6848
michael@0 6849 // Enter type monitor IC to type-check result.
michael@0 6850 EmitEnterTypeMonitorIC(masm);
michael@0 6851
michael@0 6852 // Leave stub frame and go to next stub.
michael@0 6853 masm.bind(&failureLeaveStubFrame);
michael@0 6854 leaveStubFrame(masm, false);
michael@0 6855
michael@0 6856 // Failure case - jump to next stub
michael@0 6857 masm.bind(&failure);
michael@0 6858 EmitStubGuardFailure(masm);
michael@0 6859 return true;
michael@0 6860 }
michael@0 6861
michael@0 6862 bool
michael@0 6863 ICGetProp_CallNative::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6864 {
michael@0 6865 Label failure;
michael@0 6866
michael@0 6867 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 6868 Register obj = InvalidReg;
michael@0 6869 if (inputDefinitelyObject_) {
michael@0 6870 obj = R0.scratchReg();
michael@0 6871 } else {
michael@0 6872 regs.take(R0);
michael@0 6873 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 6874 obj = masm.extractObject(R0, ExtractTemp0);
michael@0 6875 }
michael@0 6876 regs.takeUnchecked(obj);
michael@0 6877
michael@0 6878 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
michael@0 6879
michael@0 6880 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfHolderShape()), scratch);
michael@0 6881 masm.branchTestObjShape(Assembler::NotEqual, obj, scratch, &failure);
michael@0 6882
michael@0 6883 enterStubFrame(masm, scratch);
michael@0 6884
michael@0 6885 masm.Push(obj);
michael@0 6886
michael@0 6887 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfGetter()), scratch);
michael@0 6888 masm.Push(scratch);
michael@0 6889
michael@0 6890 regs.add(scratch);
michael@0 6891 if (!inputDefinitelyObject_)
michael@0 6892 regs.add(R0);
michael@0 6893
michael@0 6894 // If needed, update SPS Profiler frame entry.
michael@0 6895 emitProfilingUpdate(masm, regs, ICGetProp_CallNative::offsetOfPCOffset());
michael@0 6896
michael@0 6897 if (!callVM(DoCallNativeGetterInfo, masm))
michael@0 6898 return false;
michael@0 6899 leaveStubFrame(masm);
michael@0 6900
michael@0 6901 EmitEnterTypeMonitorIC(masm);
michael@0 6902
michael@0 6903 masm.bind(&failure);
michael@0 6904 EmitStubGuardFailure(masm);
michael@0 6905
michael@0 6906 return true;
michael@0 6907 }
michael@0 6908
michael@0 6909 bool
michael@0 6910 ICGetProp_CallNativePrototype::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 6911 {
michael@0 6912 Label failure;
michael@0 6913 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 6914 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
michael@0 6915
michael@0 6916 // Guard input is an object.
michael@0 6917 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 6918
michael@0 6919 // Unbox and shape guard.
michael@0 6920 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 6921 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfReceiverShape()), scratch);
michael@0 6922 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
michael@0 6923
michael@0 6924 Register holderReg = regs.takeAny();
michael@0 6925 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfHolder()), holderReg);
michael@0 6926 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfHolderShape()), scratch);
michael@0 6927 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
michael@0 6928 regs.add(holderReg);
michael@0 6929
michael@0 6930 // Push a stub frame so that we can perform a non-tail call.
michael@0 6931 enterStubFrame(masm, scratch);
michael@0 6932
michael@0 6933 // Load callee function.
michael@0 6934 Register callee = regs.takeAny();
michael@0 6935 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfGetter()), callee);
michael@0 6936
michael@0 6937 // Push args for vm call.
michael@0 6938 masm.push(objReg);
michael@0 6939 masm.push(callee);
michael@0 6940
michael@0 6941 // Don't to preserve R0 anymore.
michael@0 6942 regs.add(R0);
michael@0 6943
michael@0 6944 // If needed, update SPS Profiler frame entry.
michael@0 6945 emitProfilingUpdate(masm, regs, ICGetProp_CallNativePrototype::offsetOfPCOffset());
michael@0 6946
michael@0 6947 if (!callVM(DoCallNativeGetterInfo, masm))
michael@0 6948 return false;
michael@0 6949 leaveStubFrame(masm);
michael@0 6950
michael@0 6951 // Enter type monitor IC to type-check result.
michael@0 6952 EmitEnterTypeMonitorIC(masm);
michael@0 6953
michael@0 6954 // Failure case - jump to next stub
michael@0 6955 masm.bind(&failure);
michael@0 6956 EmitStubGuardFailure(masm);
michael@0 6957 return true;
michael@0 6958 }
michael@0 6959
michael@0 6960 bool
michael@0 6961 ICGetPropCallDOMProxyNativeCompiler::generateStubCode(MacroAssembler &masm,
michael@0 6962 Address* expandoAndGenerationAddr,
michael@0 6963 Address* generationAddr)
michael@0 6964 {
michael@0 6965 Label failure;
michael@0 6966 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 6967 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
michael@0 6968
michael@0 6969 // Guard input is an object.
michael@0 6970 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 6971
michael@0 6972 // Unbox.
michael@0 6973 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 6974
michael@0 6975 // Shape guard.
michael@0 6976 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfShape()), scratch);
michael@0 6977 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
michael@0 6978
michael@0 6979 // Guard for ListObject.
michael@0 6980 {
michael@0 6981 GeneralRegisterSet domProxyRegSet(GeneralRegisterSet::All());
michael@0 6982 domProxyRegSet.take(BaselineStubReg);
michael@0 6983 domProxyRegSet.take(objReg);
michael@0 6984 domProxyRegSet.take(scratch);
michael@0 6985 Address expandoShapeAddr(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfExpandoShape());
michael@0 6986 GenerateDOMProxyChecks(
michael@0 6987 cx, masm, objReg,
michael@0 6988 Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfProxyHandler()),
michael@0 6989 &expandoShapeAddr, expandoAndGenerationAddr, generationAddr,
michael@0 6990 scratch,
michael@0 6991 domProxyRegSet,
michael@0 6992 &failure);
michael@0 6993 }
michael@0 6994
michael@0 6995 Register holderReg = regs.takeAny();
michael@0 6996 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfHolder()),
michael@0 6997 holderReg);
michael@0 6998 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfHolderShape()),
michael@0 6999 scratch);
michael@0 7000 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
michael@0 7001 regs.add(holderReg);
michael@0 7002
michael@0 7003 // Push a stub frame so that we can perform a non-tail call.
michael@0 7004 enterStubFrame(masm, scratch);
michael@0 7005
michael@0 7006 // Load callee function.
michael@0 7007 Register callee = regs.takeAny();
michael@0 7008 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfGetter()), callee);
michael@0 7009
michael@0 7010 // Push args for vm call.
michael@0 7011 masm.push(objReg);
michael@0 7012 masm.push(callee);
michael@0 7013
michael@0 7014 // Don't have to preserve R0 anymore.
michael@0 7015 regs.add(R0);
michael@0 7016
michael@0 7017 // If needed, update SPS Profiler frame entry.
michael@0 7018 emitProfilingUpdate(masm, regs, ICGetProp_CallDOMProxyNative::offsetOfPCOffset());
michael@0 7019
michael@0 7020 if (!callVM(DoCallNativeGetterInfo, masm))
michael@0 7021 return false;
michael@0 7022 leaveStubFrame(masm);
michael@0 7023
michael@0 7024 // Enter type monitor IC to type-check result.
michael@0 7025 EmitEnterTypeMonitorIC(masm);
michael@0 7026
michael@0 7027 // Failure case - jump to next stub
michael@0 7028 masm.bind(&failure);
michael@0 7029 EmitStubGuardFailure(masm);
michael@0 7030 return true;
michael@0 7031 }
michael@0 7032
michael@0 7033 bool
michael@0 7034 ICGetPropCallDOMProxyNativeCompiler::generateStubCode(MacroAssembler &masm)
michael@0 7035 {
michael@0 7036 if (kind == ICStub::GetProp_CallDOMProxyNative)
michael@0 7037 return generateStubCode(masm, nullptr, nullptr);
michael@0 7038
michael@0 7039 Address internalStructAddress(BaselineStubReg,
michael@0 7040 ICGetProp_CallDOMProxyWithGenerationNative::offsetOfInternalStruct());
michael@0 7041 Address generationAddress(BaselineStubReg,
michael@0 7042 ICGetProp_CallDOMProxyWithGenerationNative::offsetOfGeneration());
michael@0 7043 return generateStubCode(masm, &internalStructAddress, &generationAddress);
michael@0 7044 }
michael@0 7045
michael@0 7046 ICStub *
michael@0 7047 ICGetPropCallDOMProxyNativeCompiler::getStub(ICStubSpace *space)
michael@0 7048 {
michael@0 7049 RootedShape shape(cx, proxy_->lastProperty());
michael@0 7050 RootedShape holderShape(cx, holder_->lastProperty());
michael@0 7051
michael@0 7052 Value expandoSlot = proxy_->getFixedSlot(GetDOMProxyExpandoSlot());
michael@0 7053 RootedShape expandoShape(cx, nullptr);
michael@0 7054 ExpandoAndGeneration *expandoAndGeneration;
michael@0 7055 int32_t generation;
michael@0 7056 Value expandoVal;
michael@0 7057 if (kind == ICStub::GetProp_CallDOMProxyNative) {
michael@0 7058 expandoVal = expandoSlot;
michael@0 7059 } else {
michael@0 7060 JS_ASSERT(kind == ICStub::GetProp_CallDOMProxyWithGenerationNative);
michael@0 7061 JS_ASSERT(!expandoSlot.isObject() && !expandoSlot.isUndefined());
michael@0 7062 expandoAndGeneration = (ExpandoAndGeneration*)expandoSlot.toPrivate();
michael@0 7063 expandoVal = expandoAndGeneration->expando;
michael@0 7064 generation = expandoAndGeneration->generation;
michael@0 7065 }
michael@0 7066
michael@0 7067 if (expandoVal.isObject())
michael@0 7068 expandoShape = expandoVal.toObject().lastProperty();
michael@0 7069
michael@0 7070 if (kind == ICStub::GetProp_CallDOMProxyNative) {
michael@0 7071 return ICGetProp_CallDOMProxyNative::New(
michael@0 7072 space, getStubCode(), firstMonitorStub_, shape, proxy_->handler(),
michael@0 7073 expandoShape, holder_, holderShape, getter_, pcOffset_);
michael@0 7074 }
michael@0 7075
michael@0 7076 return ICGetProp_CallDOMProxyWithGenerationNative::New(
michael@0 7077 space, getStubCode(), firstMonitorStub_, shape, proxy_->handler(),
michael@0 7078 expandoAndGeneration, generation, expandoShape, holder_, holderShape, getter_,
michael@0 7079 pcOffset_);
michael@0 7080 }
michael@0 7081
michael@0 7082 ICStub *
michael@0 7083 ICGetProp_DOMProxyShadowed::Compiler::getStub(ICStubSpace *space)
michael@0 7084 {
michael@0 7085 RootedShape shape(cx, proxy_->lastProperty());
michael@0 7086 return ICGetProp_DOMProxyShadowed::New(space, getStubCode(), firstMonitorStub_, shape,
michael@0 7087 proxy_->handler(), name_, pcOffset_);
michael@0 7088 }
michael@0 7089
michael@0 7090 static bool
michael@0 7091 ProxyGet(JSContext *cx, HandleObject proxy, HandlePropertyName name, MutableHandleValue vp)
michael@0 7092 {
michael@0 7093 RootedId id(cx, NameToId(name));
michael@0 7094 return Proxy::get(cx, proxy, proxy, id, vp);
michael@0 7095 }
michael@0 7096
michael@0 7097 typedef bool (*ProxyGetFn)(JSContext *cx, HandleObject proxy, HandlePropertyName name,
michael@0 7098 MutableHandleValue vp);
michael@0 7099 static const VMFunction ProxyGetInfo = FunctionInfo<ProxyGetFn>(ProxyGet);
michael@0 7100
michael@0 7101 bool
michael@0 7102 ICGetProp_DOMProxyShadowed::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 7103 {
michael@0 7104 Label failure;
michael@0 7105
michael@0 7106 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 7107 // Need to reserve a scratch register, but the scratch register should not be
michael@0 7108 // BaselineTailCallReg, because it's used for |enterStubFrame| which needs a
michael@0 7109 // non-BaselineTailCallReg scratch reg.
michael@0 7110 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
michael@0 7111
michael@0 7112 // Guard input is an object.
michael@0 7113 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 7114
michael@0 7115 // Unbox.
michael@0 7116 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 7117
michael@0 7118 // Shape guard.
michael@0 7119 masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfShape()), scratch);
michael@0 7120 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
michael@0 7121
michael@0 7122 // Guard for ListObject.
michael@0 7123 {
michael@0 7124 GeneralRegisterSet domProxyRegSet(GeneralRegisterSet::All());
michael@0 7125 domProxyRegSet.take(BaselineStubReg);
michael@0 7126 domProxyRegSet.take(objReg);
michael@0 7127 domProxyRegSet.take(scratch);
michael@0 7128 GenerateDOMProxyChecks(
michael@0 7129 cx, masm, objReg,
michael@0 7130 Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfProxyHandler()),
michael@0 7131 /*expandoShapeAddr=*/nullptr,
michael@0 7132 /*expandoAndGenerationAddr=*/nullptr,
michael@0 7133 /*generationAddr=*/nullptr,
michael@0 7134 scratch,
michael@0 7135 domProxyRegSet,
michael@0 7136 &failure);
michael@0 7137 }
michael@0 7138
michael@0 7139 // Call ProxyGet(JSContext *cx, HandleObject proxy, HandlePropertyName name, MutableHandleValue vp);
michael@0 7140
michael@0 7141 // Push a stub frame so that we can perform a non-tail call.
michael@0 7142 enterStubFrame(masm, scratch);
michael@0 7143
michael@0 7144 // Push property name and proxy object.
michael@0 7145 masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfName()), scratch);
michael@0 7146 masm.push(scratch);
michael@0 7147 masm.push(objReg);
michael@0 7148
michael@0 7149 // Don't have to preserve R0 anymore.
michael@0 7150 regs.add(R0);
michael@0 7151
michael@0 7152 // If needed, update SPS Profiler frame entry.
michael@0 7153 emitProfilingUpdate(masm, regs, ICGetProp_DOMProxyShadowed::offsetOfPCOffset());
michael@0 7154
michael@0 7155 if (!callVM(ProxyGetInfo, masm))
michael@0 7156 return false;
michael@0 7157 leaveStubFrame(masm);
michael@0 7158
michael@0 7159 // Enter type monitor IC to type-check result.
michael@0 7160 EmitEnterTypeMonitorIC(masm);
michael@0 7161
michael@0 7162 // Failure case - jump to next stub
michael@0 7163 masm.bind(&failure);
michael@0 7164 EmitStubGuardFailure(masm);
michael@0 7165 return true;
michael@0 7166 }
michael@0 7167
michael@0 7168 bool
michael@0 7169 ICGetProp_ArgumentsLength::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 7170 {
michael@0 7171 Label failure;
michael@0 7172 if (which_ == ICGetProp_ArgumentsLength::Magic) {
michael@0 7173 // Ensure that this is lazy arguments.
michael@0 7174 masm.branchTestMagicValue(Assembler::NotEqual, R0, JS_OPTIMIZED_ARGUMENTS, &failure);
michael@0 7175
michael@0 7176 // Ensure that frame has not loaded different arguments object since.
michael@0 7177 masm.branchTest32(Assembler::NonZero,
michael@0 7178 Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
michael@0 7179 Imm32(BaselineFrame::HAS_ARGS_OBJ),
michael@0 7180 &failure);
michael@0 7181
michael@0 7182 Address actualArgs(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs());
michael@0 7183 masm.loadPtr(actualArgs, R0.scratchReg());
michael@0 7184 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
michael@0 7185 EmitReturnFromIC(masm);
michael@0 7186
michael@0 7187 masm.bind(&failure);
michael@0 7188 EmitStubGuardFailure(masm);
michael@0 7189 return true;
michael@0 7190 }
michael@0 7191 JS_ASSERT(which_ == ICGetProp_ArgumentsLength::Strict ||
michael@0 7192 which_ == ICGetProp_ArgumentsLength::Normal);
michael@0 7193
michael@0 7194 bool isStrict = which_ == ICGetProp_ArgumentsLength::Strict;
michael@0 7195 const Class *clasp = isStrict ? &StrictArgumentsObject::class_ : &NormalArgumentsObject::class_;
michael@0 7196
michael@0 7197 Register scratchReg = R1.scratchReg();
michael@0 7198
michael@0 7199 // Guard on input being an arguments object.
michael@0 7200 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 7201 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 7202 masm.branchTestObjClass(Assembler::NotEqual, objReg, scratchReg, clasp, &failure);
michael@0 7203
michael@0 7204 // Get initial length value.
michael@0 7205 masm.unboxInt32(Address(objReg, ArgumentsObject::getInitialLengthSlotOffset()), scratchReg);
michael@0 7206
michael@0 7207 // Test if length has been overridden.
michael@0 7208 masm.branchTest32(Assembler::NonZero,
michael@0 7209 scratchReg,
michael@0 7210 Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
michael@0 7211 &failure);
michael@0 7212
michael@0 7213 // Nope, shift out arguments length and return it.
michael@0 7214 // No need to type monitor because this stub always returns Int32.
michael@0 7215 masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratchReg);
michael@0 7216 masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0);
michael@0 7217 EmitReturnFromIC(masm);
michael@0 7218
michael@0 7219 masm.bind(&failure);
michael@0 7220 EmitStubGuardFailure(masm);
michael@0 7221 return true;
michael@0 7222 }
michael@0 7223
michael@0 7224 void
michael@0 7225 BaselineScript::noteAccessedGetter(uint32_t pcOffset)
michael@0 7226 {
michael@0 7227 ICEntry &entry = icEntryFromPCOffset(pcOffset);
michael@0 7228 ICFallbackStub *stub = entry.fallbackStub();
michael@0 7229
michael@0 7230 if (stub->isGetProp_Fallback())
michael@0 7231 stub->toGetProp_Fallback()->noteAccessedGetter();
michael@0 7232 }
michael@0 7233
michael@0 7234 //
michael@0 7235 // SetProp_Fallback
michael@0 7236 //
michael@0 7237
michael@0 7238 // Attach an optimized stub for a SETPROP/SETGNAME/SETNAME op.
michael@0 7239 static bool
michael@0 7240 TryAttachSetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc, ICSetProp_Fallback *stub,
michael@0 7241 HandleObject obj, HandleShape oldShape, uint32_t oldSlots,
michael@0 7242 HandlePropertyName name, HandleId id, HandleValue rhs, bool *attached)
michael@0 7243 {
michael@0 7244 JS_ASSERT(!*attached);
michael@0 7245
michael@0 7246 if (!obj->isNative() || obj->watched())
michael@0 7247 return true;
michael@0 7248
michael@0 7249 RootedShape shape(cx);
michael@0 7250 RootedObject holder(cx);
michael@0 7251 if (!EffectlesslyLookupProperty(cx, obj, name, &holder, &shape))
michael@0 7252 return false;
michael@0 7253
michael@0 7254 size_t chainDepth;
michael@0 7255 if (IsCacheableSetPropAddSlot(cx, obj, oldShape, oldSlots, id, holder, shape, &chainDepth)) {
michael@0 7256 // Don't attach if proto chain depth is too high.
michael@0 7257 if (chainDepth > ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH)
michael@0 7258 return true;
michael@0 7259
michael@0 7260 bool isFixedSlot;
michael@0 7261 uint32_t offset;
michael@0 7262 GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset);
michael@0 7263
michael@0 7264 IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObject.ADD) stub");
michael@0 7265 ICSetPropNativeAddCompiler compiler(cx, obj, oldShape, chainDepth, isFixedSlot, offset);
michael@0 7266 ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 7267 if (!newStub)
michael@0 7268 return false;
michael@0 7269 if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs))
michael@0 7270 return false;
michael@0 7271
michael@0 7272 stub->addNewStub(newStub);
michael@0 7273 *attached = true;
michael@0 7274 return true;
michael@0 7275 }
michael@0 7276
michael@0 7277 if (IsCacheableSetPropWriteSlot(obj, oldShape, holder, shape)) {
michael@0 7278 bool isFixedSlot;
michael@0 7279 uint32_t offset;
michael@0 7280 GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset);
michael@0 7281
michael@0 7282 IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObject.PROP) stub");
michael@0 7283 ICSetProp_Native::Compiler compiler(cx, obj, isFixedSlot, offset);
michael@0 7284 ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 7285 if (!newStub)
michael@0 7286 return false;
michael@0 7287 if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs))
michael@0 7288 return false;
michael@0 7289
michael@0 7290 stub->addNewStub(newStub);
michael@0 7291 *attached = true;
michael@0 7292 return true;
michael@0 7293 }
michael@0 7294
michael@0 7295 bool isScripted = false;
michael@0 7296 bool cacheableCall = IsCacheableSetPropCall(cx, obj, holder, shape, &isScripted);
michael@0 7297
michael@0 7298 // Try handling scripted setters.
michael@0 7299 if (cacheableCall && isScripted) {
michael@0 7300 RootedFunction callee(cx, &shape->setterObject()->as<JSFunction>());
michael@0 7301 JS_ASSERT(obj != holder);
michael@0 7302 JS_ASSERT(callee->hasScript());
michael@0 7303
michael@0 7304 IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObj/ScriptedSetter %s:%d) stub",
michael@0 7305 callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno());
michael@0 7306
michael@0 7307 ICSetProp_CallScripted::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc));
michael@0 7308 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 7309 if (!newStub)
michael@0 7310 return false;
michael@0 7311
michael@0 7312 stub->addNewStub(newStub);
michael@0 7313 *attached = true;
michael@0 7314 return true;
michael@0 7315 }
michael@0 7316
michael@0 7317 // Try handling JSNative setters.
michael@0 7318 if (cacheableCall && !isScripted) {
michael@0 7319 RootedFunction callee(cx, &shape->setterObject()->as<JSFunction>());
michael@0 7320 JS_ASSERT(obj != holder);
michael@0 7321 JS_ASSERT(callee->isNative());
michael@0 7322
michael@0 7323 IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObj/NativeSetter %p) stub",
michael@0 7324 callee->native());
michael@0 7325
michael@0 7326 ICSetProp_CallNative::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc));
michael@0 7327 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 7328 if (!newStub)
michael@0 7329 return false;
michael@0 7330
michael@0 7331 stub->addNewStub(newStub);
michael@0 7332 *attached = true;
michael@0 7333 return true;
michael@0 7334 }
michael@0 7335
michael@0 7336 return true;
michael@0 7337 }
michael@0 7338
michael@0 7339 static bool
michael@0 7340 DoSetPropFallback(JSContext *cx, BaselineFrame *frame, ICSetProp_Fallback *stub_,
michael@0 7341 HandleValue lhs, HandleValue rhs, MutableHandleValue res)
michael@0 7342 {
michael@0 7343 // This fallback stub may trigger debug mode toggling.
michael@0 7344 DebugModeOSRVolatileStub<ICSetProp_Fallback *> stub(frame, stub_);
michael@0 7345
michael@0 7346 RootedScript script(cx, frame->script());
michael@0 7347 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 7348 JSOp op = JSOp(*pc);
michael@0 7349 FallbackICSpew(cx, stub, "SetProp(%s)", js_CodeName[op]);
michael@0 7350
michael@0 7351 JS_ASSERT(op == JSOP_SETPROP ||
michael@0 7352 op == JSOP_SETNAME ||
michael@0 7353 op == JSOP_SETGNAME ||
michael@0 7354 op == JSOP_INITPROP ||
michael@0 7355 op == JSOP_SETALIASEDVAR);
michael@0 7356
michael@0 7357 RootedPropertyName name(cx);
michael@0 7358 if (op == JSOP_SETALIASEDVAR)
michael@0 7359 name = ScopeCoordinateName(cx->runtime()->scopeCoordinateNameCache, script, pc);
michael@0 7360 else
michael@0 7361 name = script->getName(pc);
michael@0 7362 RootedId id(cx, NameToId(name));
michael@0 7363
michael@0 7364 RootedObject obj(cx, ToObjectFromStack(cx, lhs));
michael@0 7365 if (!obj)
michael@0 7366 return false;
michael@0 7367 RootedShape oldShape(cx, obj->lastProperty());
michael@0 7368 uint32_t oldSlots = obj->numDynamicSlots();
michael@0 7369
michael@0 7370 if (op == JSOP_INITPROP) {
michael@0 7371 MOZ_ASSERT(name != cx->names().proto, "should have used JSOP_MUTATEPROTO");
michael@0 7372 MOZ_ASSERT(obj->is<JSObject>());
michael@0 7373 if (!DefineNativeProperty(cx, obj, id, rhs, nullptr, nullptr, JSPROP_ENUMERATE))
michael@0 7374 return false;
michael@0 7375 } else if (op == JSOP_SETNAME || op == JSOP_SETGNAME) {
michael@0 7376 if (!SetNameOperation(cx, script, pc, obj, rhs))
michael@0 7377 return false;
michael@0 7378 } else if (op == JSOP_SETALIASEDVAR) {
michael@0 7379 obj->as<ScopeObject>().setAliasedVar(cx, pc, name, rhs);
michael@0 7380 } else {
michael@0 7381 MOZ_ASSERT(op == JSOP_SETPROP);
michael@0 7382 if (script->strict()) {
michael@0 7383 if (!js::SetProperty<true>(cx, obj, id, rhs))
michael@0 7384 return false;
michael@0 7385 } else {
michael@0 7386 if (!js::SetProperty<false>(cx, obj, id, rhs))
michael@0 7387 return false;
michael@0 7388 }
michael@0 7389 }
michael@0 7390
michael@0 7391 // Leave the RHS on the stack.
michael@0 7392 res.set(rhs);
michael@0 7393
michael@0 7394 // Check if debug mode toggling made the stub invalid.
michael@0 7395 if (stub.invalid())
michael@0 7396 return true;
michael@0 7397
michael@0 7398 if (stub->numOptimizedStubs() >= ICSetProp_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 7399 // TODO: Discard all stubs in this IC and replace with generic setprop stub.
michael@0 7400 return true;
michael@0 7401 }
michael@0 7402
michael@0 7403 bool attached = false;
michael@0 7404 if (!TryAttachSetPropStub(cx, script, pc, stub, obj, oldShape, oldSlots, name, id, rhs,
michael@0 7405 &attached))
michael@0 7406 {
michael@0 7407 return false;
michael@0 7408 }
michael@0 7409 if (attached)
michael@0 7410 return true;
michael@0 7411
michael@0 7412 JS_ASSERT(!attached);
michael@0 7413 stub->noteUnoptimizableAccess();
michael@0 7414
michael@0 7415 return true;
michael@0 7416 }
michael@0 7417
michael@0 7418 typedef bool (*DoSetPropFallbackFn)(JSContext *, BaselineFrame *, ICSetProp_Fallback *,
michael@0 7419 HandleValue, HandleValue, MutableHandleValue);
michael@0 7420 static const VMFunction DoSetPropFallbackInfo =
michael@0 7421 FunctionInfo<DoSetPropFallbackFn>(DoSetPropFallback, PopValues(2));
michael@0 7422
michael@0 7423 bool
michael@0 7424 ICSetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 7425 {
michael@0 7426 JS_ASSERT(R0 == JSReturnOperand);
michael@0 7427
michael@0 7428 EmitRestoreTailCallReg(masm);
michael@0 7429
michael@0 7430 // Ensure stack is fully synced for the expression decompiler.
michael@0 7431 masm.pushValue(R0);
michael@0 7432 masm.pushValue(R1);
michael@0 7433
michael@0 7434 // Push arguments.
michael@0 7435 masm.pushValue(R1);
michael@0 7436 masm.pushValue(R0);
michael@0 7437 masm.push(BaselineStubReg);
michael@0 7438 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 7439
michael@0 7440 if (!tailCallVM(DoSetPropFallbackInfo, masm))
michael@0 7441 return false;
michael@0 7442
michael@0 7443 // What follows is bailout debug mode recompile code for inlined scripted
michael@0 7444 // getters The return address pointed to by the baseline stack points
michael@0 7445 // here.
michael@0 7446 //
michael@0 7447 // Even though the fallback frame doesn't enter a stub frame, the CallScripted
michael@0 7448 // frame that we are emulating does. Again, we lie.
michael@0 7449 #ifdef DEBUG
michael@0 7450 entersStubFrame_ = true;
michael@0 7451 #endif
michael@0 7452
michael@0 7453 Label leaveStubCommon;
michael@0 7454
michael@0 7455 returnFromStubOffset_ = masm.currentOffset();
michael@0 7456 leaveStubFrameHead(masm, false);
michael@0 7457 masm.jump(&leaveStubCommon);
michael@0 7458
michael@0 7459 returnFromIonOffset_ = masm.currentOffset();
michael@0 7460 leaveStubFrameHead(masm, true);
michael@0 7461
michael@0 7462 masm.bind(&leaveStubCommon);
michael@0 7463 leaveStubFrameCommonTail(masm);
michael@0 7464
michael@0 7465 // Retrieve the stashed initial argument from the caller's frame before returning
michael@0 7466 EmitUnstowICValues(masm, 1);
michael@0 7467 EmitReturnFromIC(masm);
michael@0 7468
michael@0 7469 return true;
michael@0 7470 }
michael@0 7471
michael@0 7472 bool
michael@0 7473 ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
michael@0 7474 {
michael@0 7475 JitCompartment *comp = cx->compartment()->jitCompartment();
michael@0 7476
michael@0 7477 CodeOffsetLabel fromIon(returnFromIonOffset_);
michael@0 7478 fromIon.fixup(&masm);
michael@0 7479 comp->initBaselineSetPropReturnFromIonAddr(code->raw() + fromIon.offset());
michael@0 7480
michael@0 7481 CodeOffsetLabel fromVM(returnFromStubOffset_);
michael@0 7482 fromVM.fixup(&masm);
michael@0 7483 comp->initBaselineSetPropReturnFromStubAddr(code->raw() + fromVM.offset());
michael@0 7484
michael@0 7485 return true;
michael@0 7486 }
michael@0 7487
michael@0 7488 bool
michael@0 7489 ICSetProp_Native::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 7490 {
michael@0 7491 Label failure;
michael@0 7492
michael@0 7493 // Guard input is an object.
michael@0 7494 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 7495
michael@0 7496 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 7497 Register scratch = regs.takeAny();
michael@0 7498
michael@0 7499 // Unbox and shape guard.
michael@0 7500 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 7501 masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfShape()), scratch);
michael@0 7502 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
michael@0 7503
michael@0 7504 // Guard that the type object matches.
michael@0 7505 masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfType()), scratch);
michael@0 7506 masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch,
michael@0 7507 &failure);
michael@0 7508
michael@0 7509 // Stow both R0 and R1 (object and value).
michael@0 7510 EmitStowICValues(masm, 2);
michael@0 7511
michael@0 7512 // Type update stub expects the value to check in R0.
michael@0 7513 masm.moveValue(R1, R0);
michael@0 7514
michael@0 7515 // Call the type-update stub.
michael@0 7516 if (!callTypeUpdateIC(masm, sizeof(Value)))
michael@0 7517 return false;
michael@0 7518
michael@0 7519 // Unstow R0 and R1 (object and key)
michael@0 7520 EmitUnstowICValues(masm, 2);
michael@0 7521
michael@0 7522 regs.add(R0);
michael@0 7523 regs.takeUnchecked(objReg);
michael@0 7524
michael@0 7525 Register holderReg;
michael@0 7526 if (isFixedSlot_) {
michael@0 7527 holderReg = objReg;
michael@0 7528 } else {
michael@0 7529 holderReg = regs.takeAny();
michael@0 7530 masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), holderReg);
michael@0 7531 }
michael@0 7532
michael@0 7533 // Perform the store.
michael@0 7534 masm.load32(Address(BaselineStubReg, ICSetProp_Native::offsetOfOffset()), scratch);
michael@0 7535 EmitPreBarrier(masm, BaseIndex(holderReg, scratch, TimesOne), MIRType_Value);
michael@0 7536 masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
michael@0 7537 if (holderReg != objReg)
michael@0 7538 regs.add(holderReg);
michael@0 7539 #ifdef JSGC_GENERATIONAL
michael@0 7540 {
michael@0 7541 Register scr = regs.takeAny();
michael@0 7542 GeneralRegisterSet saveRegs;
michael@0 7543 saveRegs.add(R1);
michael@0 7544 emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
michael@0 7545 regs.add(scr);
michael@0 7546 }
michael@0 7547 #endif
michael@0 7548
michael@0 7549 // The RHS has to be in R0.
michael@0 7550 masm.moveValue(R1, R0);
michael@0 7551 EmitReturnFromIC(masm);
michael@0 7552
michael@0 7553 // Failure case - jump to next stub
michael@0 7554 masm.bind(&failure);
michael@0 7555 EmitStubGuardFailure(masm);
michael@0 7556 return true;
michael@0 7557 }
michael@0 7558
michael@0 7559 ICUpdatedStub *
michael@0 7560 ICSetPropNativeAddCompiler::getStub(ICStubSpace *space)
michael@0 7561 {
michael@0 7562 AutoShapeVector shapes(cx);
michael@0 7563 if (!shapes.append(oldShape_))
michael@0 7564 return nullptr;
michael@0 7565
michael@0 7566 if (!GetProtoShapes(obj_, protoChainDepth_, &shapes))
michael@0 7567 return nullptr;
michael@0 7568
michael@0 7569 JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4);
michael@0 7570
michael@0 7571 ICUpdatedStub *stub = nullptr;
michael@0 7572 switch(protoChainDepth_) {
michael@0 7573 case 0: stub = getStubSpecific<0>(space, &shapes); break;
michael@0 7574 case 1: stub = getStubSpecific<1>(space, &shapes); break;
michael@0 7575 case 2: stub = getStubSpecific<2>(space, &shapes); break;
michael@0 7576 case 3: stub = getStubSpecific<3>(space, &shapes); break;
michael@0 7577 case 4: stub = getStubSpecific<4>(space, &shapes); break;
michael@0 7578 default: MOZ_ASSUME_UNREACHABLE("ProtoChainDepth too high.");
michael@0 7579 }
michael@0 7580 if (!stub || !stub->initUpdatingChain(cx, space))
michael@0 7581 return nullptr;
michael@0 7582 return stub;
michael@0 7583 }
michael@0 7584
michael@0 7585 bool
michael@0 7586 ICSetPropNativeAddCompiler::generateStubCode(MacroAssembler &masm)
michael@0 7587 {
michael@0 7588 Label failure;
michael@0 7589 Label failureUnstow;
michael@0 7590
michael@0 7591 // Guard input is an object.
michael@0 7592 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 7593
michael@0 7594 GeneralRegisterSet regs(availableGeneralRegs(2));
michael@0 7595 Register scratch = regs.takeAny();
michael@0 7596
michael@0 7597 // Unbox and guard against old shape.
michael@0 7598 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 7599 masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(0)), scratch);
michael@0 7600 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
michael@0 7601
michael@0 7602 // Guard that the type object matches.
michael@0 7603 masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfType()), scratch);
michael@0 7604 masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch,
michael@0 7605 &failure);
michael@0 7606
michael@0 7607 // Stow both R0 and R1 (object and value).
michael@0 7608 EmitStowICValues(masm, 2);
michael@0 7609
michael@0 7610 regs = availableGeneralRegs(1);
michael@0 7611 scratch = regs.takeAny();
michael@0 7612 Register protoReg = regs.takeAny();
michael@0 7613 // Check the proto chain.
michael@0 7614 for (size_t i = 0; i < protoChainDepth_; i++) {
michael@0 7615 masm.loadObjProto(i == 0 ? objReg : protoReg, protoReg);
michael@0 7616 masm.branchTestPtr(Assembler::Zero, protoReg, protoReg, &failureUnstow);
michael@0 7617 masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(i + 1)),
michael@0 7618 scratch);
michael@0 7619 masm.branchTestObjShape(Assembler::NotEqual, protoReg, scratch, &failureUnstow);
michael@0 7620 }
michael@0 7621
michael@0 7622 // Shape and type checks succeeded, ok to proceed.
michael@0 7623
michael@0 7624 // Load RHS into R0 for TypeUpdate check.
michael@0 7625 // Stack is currently: [..., ObjValue, RHSValue, MaybeReturnAddr? ]
michael@0 7626 masm.loadValue(Address(BaselineStackReg, ICStackValueOffset), R0);
michael@0 7627
michael@0 7628 // Call the type-update stub.
michael@0 7629 if (!callTypeUpdateIC(masm, sizeof(Value)))
michael@0 7630 return false;
michael@0 7631
michael@0 7632 // Unstow R0 and R1 (object and key)
michael@0 7633 EmitUnstowICValues(masm, 2);
michael@0 7634 regs = availableGeneralRegs(2);
michael@0 7635 scratch = regs.takeAny();
michael@0 7636
michael@0 7637 // Changing object shape. Write the object's new shape.
michael@0 7638 Address shapeAddr(objReg, JSObject::offsetOfShape());
michael@0 7639 EmitPreBarrier(masm, shapeAddr, MIRType_Shape);
michael@0 7640 masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewShape()), scratch);
michael@0 7641 masm.storePtr(scratch, shapeAddr);
michael@0 7642
michael@0 7643 Register holderReg;
michael@0 7644 regs.add(R0);
michael@0 7645 regs.takeUnchecked(objReg);
michael@0 7646 if (isFixedSlot_) {
michael@0 7647 holderReg = objReg;
michael@0 7648 } else {
michael@0 7649 holderReg = regs.takeAny();
michael@0 7650 masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), holderReg);
michael@0 7651 }
michael@0 7652
michael@0 7653 // Perform the store. No write barrier required since this is a new
michael@0 7654 // initialization.
michael@0 7655 masm.load32(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfOffset()), scratch);
michael@0 7656 masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
michael@0 7657
michael@0 7658 if (holderReg != objReg)
michael@0 7659 regs.add(holderReg);
michael@0 7660
michael@0 7661 #ifdef JSGC_GENERATIONAL
michael@0 7662 {
michael@0 7663 Register scr = regs.takeAny();
michael@0 7664 GeneralRegisterSet saveRegs;
michael@0 7665 saveRegs.add(R1);
michael@0 7666 emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
michael@0 7667 }
michael@0 7668 #endif
michael@0 7669
michael@0 7670 // The RHS has to be in R0.
michael@0 7671 masm.moveValue(R1, R0);
michael@0 7672 EmitReturnFromIC(masm);
michael@0 7673
michael@0 7674 // Failure case - jump to next stub
michael@0 7675 masm.bind(&failureUnstow);
michael@0 7676 EmitUnstowICValues(masm, 2);
michael@0 7677
michael@0 7678 masm.bind(&failure);
michael@0 7679 EmitStubGuardFailure(masm);
michael@0 7680 return true;
michael@0 7681 }
michael@0 7682
michael@0 7683 bool
michael@0 7684 ICSetProp_CallScripted::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 7685 {
michael@0 7686 Label failure;
michael@0 7687 Label failureUnstow;
michael@0 7688 Label failureLeaveStubFrame;
michael@0 7689
michael@0 7690 // Guard input is an object.
michael@0 7691 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 7692
michael@0 7693 // Stow R0 and R1 to free up registers.
michael@0 7694 EmitStowICValues(masm, 2);
michael@0 7695
michael@0 7696 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 7697 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
michael@0 7698
michael@0 7699 // Unbox and shape guard.
michael@0 7700 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 7701 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfShape()), scratch);
michael@0 7702 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failureUnstow);
michael@0 7703
michael@0 7704 Register holderReg = regs.takeAny();
michael@0 7705 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfHolder()), holderReg);
michael@0 7706 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfHolderShape()), scratch);
michael@0 7707 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failureUnstow);
michael@0 7708 regs.add(holderReg);
michael@0 7709
michael@0 7710 // Push a stub frame so that we can perform a non-tail call.
michael@0 7711 enterStubFrame(masm, scratch);
michael@0 7712
michael@0 7713 // Load callee function and code. To ensure that |code| doesn't end up being
michael@0 7714 // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
michael@0 7715 Register callee;
michael@0 7716 if (regs.has(ArgumentsRectifierReg)) {
michael@0 7717 callee = ArgumentsRectifierReg;
michael@0 7718 regs.take(callee);
michael@0 7719 } else {
michael@0 7720 callee = regs.takeAny();
michael@0 7721 }
michael@0 7722 Register code = regs.takeAny();
michael@0 7723 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfSetter()), callee);
michael@0 7724 masm.branchIfFunctionHasNoScript(callee, &failureLeaveStubFrame);
michael@0 7725 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
michael@0 7726 masm.loadBaselineOrIonRaw(code, code, SequentialExecution, &failureLeaveStubFrame);
michael@0 7727
michael@0 7728 // Setter is called with the new value as the only argument, and |obj| as thisv.
michael@0 7729 // Note that we use Push, not push, so that callIon will align the stack
michael@0 7730 // properly on ARM.
michael@0 7731
michael@0 7732 // To Push R1, read it off of the stowed values on stack.
michael@0 7733 // Stack: [ ..., R0, R1, ..STUBFRAME-HEADER.. ]
michael@0 7734 masm.movePtr(BaselineStackReg, scratch);
michael@0 7735 masm.PushValue(Address(scratch, STUB_FRAME_SIZE));
michael@0 7736 masm.Push(R0);
michael@0 7737 EmitCreateStubFrameDescriptor(masm, scratch);
michael@0 7738 masm.Push(Imm32(1)); // ActualArgc is 1
michael@0 7739 masm.Push(callee);
michael@0 7740 masm.Push(scratch);
michael@0 7741
michael@0 7742 // Handle arguments underflow.
michael@0 7743 Label noUnderflow;
michael@0 7744 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
michael@0 7745 masm.branch32(Assembler::BelowOrEqual, scratch, Imm32(1), &noUnderflow);
michael@0 7746 {
michael@0 7747 // Call the arguments rectifier.
michael@0 7748 JS_ASSERT(ArgumentsRectifierReg != code);
michael@0 7749
michael@0 7750 JitCode *argumentsRectifier =
michael@0 7751 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
michael@0 7752
michael@0 7753 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
michael@0 7754 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
michael@0 7755 masm.mov(ImmWord(1), ArgumentsRectifierReg);
michael@0 7756 }
michael@0 7757
michael@0 7758 masm.bind(&noUnderflow);
michael@0 7759
michael@0 7760 // If needed, update SPS Profiler frame entry. At this point, callee and scratch can
michael@0 7761 // be clobbered.
michael@0 7762 {
michael@0 7763 GeneralRegisterSet availRegs = availableGeneralRegs(0);
michael@0 7764 availRegs.take(ArgumentsRectifierReg);
michael@0 7765 availRegs.take(code);
michael@0 7766 emitProfilingUpdate(masm, availRegs, ICSetProp_CallScripted::offsetOfPCOffset());
michael@0 7767 }
michael@0 7768
michael@0 7769 masm.callIon(code);
michael@0 7770
michael@0 7771 leaveStubFrame(masm, true);
michael@0 7772 // Do not care about return value from function. The original RHS should be returned
michael@0 7773 // as the result of this operation.
michael@0 7774 EmitUnstowICValues(masm, 2);
michael@0 7775 masm.moveValue(R1, R0);
michael@0 7776 EmitReturnFromIC(masm);
michael@0 7777
michael@0 7778 // Leave stub frame and go to next stub.
michael@0 7779 masm.bind(&failureLeaveStubFrame);
michael@0 7780 leaveStubFrame(masm, false);
michael@0 7781
michael@0 7782 // Unstow R0 and R1
michael@0 7783 masm.bind(&failureUnstow);
michael@0 7784 EmitUnstowICValues(masm, 2);
michael@0 7785
michael@0 7786 // Failure case - jump to next stub
michael@0 7787 masm.bind(&failure);
michael@0 7788 EmitStubGuardFailure(masm);
michael@0 7789 return true;
michael@0 7790 }
michael@0 7791
michael@0 7792 static bool
michael@0 7793 DoCallNativeSetter(JSContext *cx, HandleFunction callee, HandleObject obj, HandleValue val)
michael@0 7794 {
michael@0 7795 JS_ASSERT(callee->isNative());
michael@0 7796 JSNative natfun = callee->native();
michael@0 7797
michael@0 7798 JS::AutoValueArray<3> vp(cx);
michael@0 7799 vp[0].setObject(*callee.get());
michael@0 7800 vp[1].setObject(*obj.get());
michael@0 7801 vp[2].set(val);
michael@0 7802
michael@0 7803 return natfun(cx, 1, vp.begin());
michael@0 7804 }
michael@0 7805
michael@0 7806 typedef bool (*DoCallNativeSetterFn)(JSContext *, HandleFunction, HandleObject, HandleValue);
michael@0 7807 static const VMFunction DoCallNativeSetterInfo =
michael@0 7808 FunctionInfo<DoCallNativeSetterFn>(DoCallNativeSetter);
michael@0 7809
michael@0 7810 bool
michael@0 7811 ICSetProp_CallNative::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 7812 {
michael@0 7813 Label failure;
michael@0 7814 Label failureUnstow;
michael@0 7815
michael@0 7816 // Guard input is an object.
michael@0 7817 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
michael@0 7818
michael@0 7819 // Stow R0 and R1 to free up registers.
michael@0 7820 EmitStowICValues(masm, 2);
michael@0 7821
michael@0 7822 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 7823 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
michael@0 7824
michael@0 7825 // Unbox and shape guard.
michael@0 7826 Register objReg = masm.extractObject(R0, ExtractTemp0);
michael@0 7827 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfShape()), scratch);
michael@0 7828 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failureUnstow);
michael@0 7829
michael@0 7830 Register holderReg = regs.takeAny();
michael@0 7831 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfHolder()), holderReg);
michael@0 7832 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfHolderShape()), scratch);
michael@0 7833 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failureUnstow);
michael@0 7834 regs.add(holderReg);
michael@0 7835
michael@0 7836 // Push a stub frame so that we can perform a non-tail call.
michael@0 7837 enterStubFrame(masm, scratch);
michael@0 7838
michael@0 7839 // Load callee function and code. To ensure that |code| doesn't end up being
michael@0 7840 // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
michael@0 7841 Register callee = regs.takeAny();
michael@0 7842 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfSetter()), callee);
michael@0 7843
michael@0 7844 // To Push R1, read it off of the stowed values on stack.
michael@0 7845 // Stack: [ ..., R0, R1, ..STUBFRAME-HEADER.. ]
michael@0 7846 masm.movePtr(BaselineStackReg, scratch);
michael@0 7847 masm.pushValue(Address(scratch, STUB_FRAME_SIZE));
michael@0 7848 masm.push(objReg);
michael@0 7849 masm.push(callee);
michael@0 7850
michael@0 7851 // Don't need to preserve R0 anymore.
michael@0 7852 regs.add(R0);
michael@0 7853
michael@0 7854 // If needed, update SPS Profiler frame entry.
michael@0 7855 emitProfilingUpdate(masm, regs, ICSetProp_CallNative::offsetOfPCOffset());
michael@0 7856
michael@0 7857 if (!callVM(DoCallNativeSetterInfo, masm))
michael@0 7858 return false;
michael@0 7859 leaveStubFrame(masm);
michael@0 7860
michael@0 7861 // Do not care about return value from function. The original RHS should be returned
michael@0 7862 // as the result of this operation.
michael@0 7863 EmitUnstowICValues(masm, 2);
michael@0 7864 masm.moveValue(R1, R0);
michael@0 7865 EmitReturnFromIC(masm);
michael@0 7866
michael@0 7867 // Unstow R0 and R1
michael@0 7868 masm.bind(&failureUnstow);
michael@0 7869 EmitUnstowICValues(masm, 2);
michael@0 7870
michael@0 7871 // Failure case - jump to next stub
michael@0 7872 masm.bind(&failure);
michael@0 7873 EmitStubGuardFailure(masm);
michael@0 7874 return true;
michael@0 7875 }
michael@0 7876
michael@0 7877 //
michael@0 7878 // Call_Fallback
michael@0 7879 //
michael@0 7880
michael@0 7881 static bool
michael@0 7882 TryAttachFunApplyStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
michael@0 7883 HandleValue thisv, uint32_t argc, Value *argv)
michael@0 7884 {
michael@0 7885 if (argc != 2)
michael@0 7886 return true;
michael@0 7887
michael@0 7888 if (!thisv.isObject() || !thisv.toObject().is<JSFunction>())
michael@0 7889 return true;
michael@0 7890 RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
michael@0 7891
michael@0 7892 bool isScripted = target->hasJITCode();
michael@0 7893
michael@0 7894 // right now, only handle situation where second argument is |arguments|
michael@0 7895 if (argv[1].isMagic(JS_OPTIMIZED_ARGUMENTS) && !script->needsArgsObj()) {
michael@0 7896 if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArguments)) {
michael@0 7897 IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedApplyArguments stub");
michael@0 7898
michael@0 7899 ICCall_ScriptedApplyArguments::Compiler compiler(
michael@0 7900 cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
michael@0 7901 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 7902 if (!newStub)
michael@0 7903 return false;
michael@0 7904
michael@0 7905 stub->addNewStub(newStub);
michael@0 7906 return true;
michael@0 7907 }
michael@0 7908
michael@0 7909 // TODO: handle FUNAPPLY for native targets.
michael@0 7910 }
michael@0 7911
michael@0 7912 if (argv[1].isObject() && argv[1].toObject().is<ArrayObject>()) {
michael@0 7913 if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArray)) {
michael@0 7914 IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedApplyArray stub");
michael@0 7915
michael@0 7916 ICCall_ScriptedApplyArray::Compiler compiler(
michael@0 7917 cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
michael@0 7918 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 7919 if (!newStub)
michael@0 7920 return false;
michael@0 7921
michael@0 7922 stub->addNewStub(newStub);
michael@0 7923 return true;
michael@0 7924 }
michael@0 7925 }
michael@0 7926 return true;
michael@0 7927 }
michael@0 7928
michael@0 7929 static bool
michael@0 7930 TryAttachFunCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
michael@0 7931 HandleValue thisv, bool *attached)
michael@0 7932 {
michael@0 7933 // Try to attach a stub for Function.prototype.call with scripted |this|.
michael@0 7934
michael@0 7935 *attached = false;
michael@0 7936 if (!thisv.isObject() || !thisv.toObject().is<JSFunction>())
michael@0 7937 return true;
michael@0 7938 RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
michael@0 7939
michael@0 7940 // Attach a stub if the script can be Baseline-compiled. We do this also
michael@0 7941 // if the script is not yet compiled to avoid attaching a CallNative stub
michael@0 7942 // that handles everything, even after the callee becomes hot.
michael@0 7943 if (target->hasScript() && target->nonLazyScript()->canBaselineCompile() &&
michael@0 7944 !stub->hasStub(ICStub::Call_ScriptedFunCall))
michael@0 7945 {
michael@0 7946 IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedFunCall stub");
michael@0 7947
michael@0 7948 ICCall_ScriptedFunCall::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
michael@0 7949 script->pcToOffset(pc));
michael@0 7950 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 7951 if (!newStub)
michael@0 7952 return false;
michael@0 7953
michael@0 7954 *attached = true;
michael@0 7955 stub->addNewStub(newStub);
michael@0 7956 return true;
michael@0 7957 }
michael@0 7958
michael@0 7959 return true;
michael@0 7960 }
michael@0 7961
michael@0 7962 static bool
michael@0 7963 GetTemplateObjectForNative(JSContext *cx, HandleScript script, jsbytecode *pc,
michael@0 7964 Native native, const CallArgs &args, MutableHandleObject res)
michael@0 7965 {
michael@0 7966 // Check for natives to which template objects can be attached. This is
michael@0 7967 // done to provide templates to Ion for inlining these natives later on.
michael@0 7968
michael@0 7969 if (native == js_Array) {
michael@0 7970 // Note: the template array won't be used if its length is inaccurately
michael@0 7971 // computed here. (We allocate here because compilation may occur on a
michael@0 7972 // separate thread where allocation is impossible.)
michael@0 7973 size_t count = 0;
michael@0 7974 if (args.length() != 1)
michael@0 7975 count = args.length();
michael@0 7976 else if (args.length() == 1 && args[0].isInt32() && args[0].toInt32() >= 0)
michael@0 7977 count = args[0].toInt32();
michael@0 7978 res.set(NewDenseUnallocatedArray(cx, count, nullptr, TenuredObject));
michael@0 7979 if (!res)
michael@0 7980 return false;
michael@0 7981
michael@0 7982 types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
michael@0 7983 if (!type)
michael@0 7984 return false;
michael@0 7985 res->setType(type);
michael@0 7986 return true;
michael@0 7987 }
michael@0 7988
michael@0 7989 if (native == intrinsic_NewDenseArray) {
michael@0 7990 res.set(NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject));
michael@0 7991 if (!res)
michael@0 7992 return false;
michael@0 7993
michael@0 7994 types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
michael@0 7995 if (!type)
michael@0 7996 return false;
michael@0 7997 res->setType(type);
michael@0 7998 return true;
michael@0 7999 }
michael@0 8000
michael@0 8001 if (native == js::array_concat) {
michael@0 8002 if (args.thisv().isObject() && args.thisv().toObject().is<ArrayObject>() &&
michael@0 8003 !args.thisv().toObject().hasSingletonType())
michael@0 8004 {
michael@0 8005 res.set(NewDenseEmptyArray(cx, args.thisv().toObject().getProto(), TenuredObject));
michael@0 8006 if (!res)
michael@0 8007 return false;
michael@0 8008 res->setType(args.thisv().toObject().type());
michael@0 8009 return true;
michael@0 8010 }
michael@0 8011 }
michael@0 8012
michael@0 8013 if (native == js::str_split && args.length() == 1 && args[0].isString()) {
michael@0 8014 res.set(NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject));
michael@0 8015 if (!res)
michael@0 8016 return false;
michael@0 8017
michael@0 8018 types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
michael@0 8019 if (!type)
michael@0 8020 return false;
michael@0 8021 res->setType(type);
michael@0 8022 return true;
michael@0 8023 }
michael@0 8024
michael@0 8025 if (native == js_String) {
michael@0 8026 RootedString emptyString(cx, cx->runtime()->emptyString);
michael@0 8027 res.set(StringObject::create(cx, emptyString, TenuredObject));
michael@0 8028 if (!res)
michael@0 8029 return false;
michael@0 8030 return true;
michael@0 8031 }
michael@0 8032
michael@0 8033 return true;
michael@0 8034 }
michael@0 8035
michael@0 8036 static bool
michael@0 8037 TryAttachCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
michael@0 8038 JSOp op, uint32_t argc, Value *vp, bool constructing, bool useNewType)
michael@0 8039 {
michael@0 8040 if (useNewType || op == JSOP_EVAL)
michael@0 8041 return true;
michael@0 8042
michael@0 8043 if (stub->numOptimizedStubs() >= ICCall_Fallback::MAX_OPTIMIZED_STUBS) {
michael@0 8044 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
michael@0 8045 // But for now we just bail.
michael@0 8046 return true;
michael@0 8047 }
michael@0 8048
michael@0 8049 RootedValue callee(cx, vp[0]);
michael@0 8050 RootedValue thisv(cx, vp[1]);
michael@0 8051
michael@0 8052 if (!callee.isObject())
michael@0 8053 return true;
michael@0 8054
michael@0 8055 RootedObject obj(cx, &callee.toObject());
michael@0 8056 if (!obj->is<JSFunction>())
michael@0 8057 return true;
michael@0 8058
michael@0 8059 RootedFunction fun(cx, &obj->as<JSFunction>());
michael@0 8060
michael@0 8061 if (fun->hasScript()) {
michael@0 8062 // Never attach optimized scripted call stubs for JSOP_FUNAPPLY.
michael@0 8063 // MagicArguments may escape the frame through them.
michael@0 8064 if (op == JSOP_FUNAPPLY)
michael@0 8065 return true;
michael@0 8066
michael@0 8067 // If callee is not an interpreted constructor, we have to throw.
michael@0 8068 if (constructing && !fun->isInterpretedConstructor())
michael@0 8069 return true;
michael@0 8070
michael@0 8071 RootedScript calleeScript(cx, fun->nonLazyScript());
michael@0 8072 if (!calleeScript->hasBaselineScript() && !calleeScript->hasIonScript())
michael@0 8073 return true;
michael@0 8074
michael@0 8075 if (calleeScript->shouldCloneAtCallsite())
michael@0 8076 return true;
michael@0 8077
michael@0 8078 // Check if this stub chain has already generalized scripted calls.
michael@0 8079 if (stub->scriptedStubsAreGeneralized()) {
michael@0 8080 IonSpew(IonSpew_BaselineIC, " Chain already has generalized scripted call stub!");
michael@0 8081 return true;
michael@0 8082 }
michael@0 8083
michael@0 8084 if (stub->scriptedStubCount() >= ICCall_Fallback::MAX_SCRIPTED_STUBS) {
michael@0 8085 // Create a Call_AnyScripted stub.
michael@0 8086 IonSpew(IonSpew_BaselineIC, " Generating Call_AnyScripted stub (cons=%s)",
michael@0 8087 constructing ? "yes" : "no");
michael@0 8088
michael@0 8089 ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
michael@0 8090 constructing, script->pcToOffset(pc));
michael@0 8091 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 8092 if (!newStub)
michael@0 8093 return false;
michael@0 8094
michael@0 8095 // Before adding new stub, unlink all previous Call_Scripted.
michael@0 8096 stub->unlinkStubsWithKind(cx, ICStub::Call_Scripted);
michael@0 8097
michael@0 8098 // Add new generalized stub.
michael@0 8099 stub->addNewStub(newStub);
michael@0 8100 return true;
michael@0 8101 }
michael@0 8102
michael@0 8103 // Keep track of the function's |prototype| property in type
michael@0 8104 // information, for use during Ion compilation.
michael@0 8105 if (IsIonEnabled(cx))
michael@0 8106 types::EnsureTrackPropertyTypes(cx, fun, NameToId(cx->names().prototype));
michael@0 8107
michael@0 8108 // Remember the template object associated with any script being called
michael@0 8109 // as a constructor, for later use during Ion compilation.
michael@0 8110 RootedObject templateObject(cx);
michael@0 8111 if (constructing) {
michael@0 8112 templateObject = CreateThisForFunction(cx, fun, MaybeSingletonObject);
michael@0 8113 if (!templateObject)
michael@0 8114 return false;
michael@0 8115 }
michael@0 8116
michael@0 8117 IonSpew(IonSpew_BaselineIC,
michael@0 8118 " Generating Call_Scripted stub (fun=%p, %s:%d, cons=%s)",
michael@0 8119 fun.get(), fun->nonLazyScript()->filename(), fun->nonLazyScript()->lineno(),
michael@0 8120 constructing ? "yes" : "no");
michael@0 8121 ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
michael@0 8122 calleeScript, templateObject,
michael@0 8123 constructing, script->pcToOffset(pc));
michael@0 8124 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 8125 if (!newStub)
michael@0 8126 return false;
michael@0 8127
michael@0 8128 stub->addNewStub(newStub);
michael@0 8129 return true;
michael@0 8130 }
michael@0 8131
michael@0 8132 if (fun->isNative() && (!constructing || (constructing && fun->isNativeConstructor()))) {
michael@0 8133 // Generalized native call stubs are not here yet!
michael@0 8134 JS_ASSERT(!stub->nativeStubsAreGeneralized());
michael@0 8135
michael@0 8136 // Check for JSOP_FUNAPPLY
michael@0 8137 if (op == JSOP_FUNAPPLY) {
michael@0 8138 if (fun->native() == js_fun_apply)
michael@0 8139 return TryAttachFunApplyStub(cx, stub, script, pc, thisv, argc, vp + 2);
michael@0 8140
michael@0 8141 // Don't try to attach a "regular" optimized call stubs for FUNAPPLY ops,
michael@0 8142 // since MagicArguments may escape through them.
michael@0 8143 return true;
michael@0 8144 }
michael@0 8145
michael@0 8146 if (op == JSOP_FUNCALL && fun->native() == js_fun_call) {
michael@0 8147 bool attached;
michael@0 8148 if (!TryAttachFunCallStub(cx, stub, script, pc, thisv, &attached))
michael@0 8149 return false;
michael@0 8150 if (attached)
michael@0 8151 return true;
michael@0 8152 }
michael@0 8153
michael@0 8154 if (stub->nativeStubCount() >= ICCall_Fallback::MAX_NATIVE_STUBS) {
michael@0 8155 IonSpew(IonSpew_BaselineIC,
michael@0 8156 " Too many Call_Native stubs. TODO: add Call_AnyNative!");
michael@0 8157 return true;
michael@0 8158 }
michael@0 8159
michael@0 8160 CallArgs args = CallArgsFromVp(argc, vp);
michael@0 8161 RootedObject templateObject(cx);
michael@0 8162 if (!GetTemplateObjectForNative(cx, script, pc, fun->native(), args, &templateObject))
michael@0 8163 return false;
michael@0 8164
michael@0 8165 IonSpew(IonSpew_BaselineIC, " Generating Call_Native stub (fun=%p, cons=%s)",
michael@0 8166 fun.get(), constructing ? "yes" : "no");
michael@0 8167 ICCall_Native::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
michael@0 8168 fun, templateObject, constructing, script->pcToOffset(pc));
michael@0 8169 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 8170 if (!newStub)
michael@0 8171 return false;
michael@0 8172
michael@0 8173 stub->addNewStub(newStub);
michael@0 8174 return true;
michael@0 8175 }
michael@0 8176
michael@0 8177 return true;
michael@0 8178 }
michael@0 8179
michael@0 8180 static bool
michael@0 8181 MaybeCloneFunctionAtCallsite(JSContext *cx, MutableHandleValue callee, HandleScript script,
michael@0 8182 jsbytecode *pc)
michael@0 8183 {
michael@0 8184 RootedFunction fun(cx);
michael@0 8185 if (!IsFunctionObject(callee, fun.address()))
michael@0 8186 return true;
michael@0 8187
michael@0 8188 if (!fun->hasScript() || !fun->nonLazyScript()->shouldCloneAtCallsite())
michael@0 8189 return true;
michael@0 8190
michael@0 8191 fun = CloneFunctionAtCallsite(cx, fun, script, pc);
michael@0 8192 if (!fun)
michael@0 8193 return false;
michael@0 8194
michael@0 8195 callee.setObject(*fun);
michael@0 8196 return true;
michael@0 8197 }
michael@0 8198
michael@0 8199 static bool
michael@0 8200 DoCallFallback(JSContext *cx, BaselineFrame *frame, ICCall_Fallback *stub_, uint32_t argc,
michael@0 8201 Value *vp, MutableHandleValue res)
michael@0 8202 {
michael@0 8203 // This fallback stub may trigger debug mode toggling.
michael@0 8204 DebugModeOSRVolatileStub<ICCall_Fallback *> stub(frame, stub_);
michael@0 8205
michael@0 8206 // Ensure vp array is rooted - we may GC in here.
michael@0 8207 AutoArrayRooter vpRoot(cx, argc + 2, vp);
michael@0 8208
michael@0 8209 RootedScript script(cx, frame->script());
michael@0 8210 jsbytecode *pc = stub->icEntry()->pc(script);
michael@0 8211 JSOp op = JSOp(*pc);
michael@0 8212 FallbackICSpew(cx, stub, "Call(%s)", js_CodeName[op]);
michael@0 8213
michael@0 8214 JS_ASSERT(argc == GET_ARGC(pc));
michael@0 8215
michael@0 8216 RootedValue callee(cx, vp[0]);
michael@0 8217 RootedValue thisv(cx, vp[1]);
michael@0 8218
michael@0 8219 Value *args = vp + 2;
michael@0 8220
michael@0 8221 // Handle funapply with JSOP_ARGUMENTS
michael@0 8222 if (op == JSOP_FUNAPPLY && argc == 2 && args[1].isMagic(JS_OPTIMIZED_ARGUMENTS)) {
michael@0 8223 if (!GuardFunApplyArgumentsOptimization(cx, frame, callee, args, argc))
michael@0 8224 return false;
michael@0 8225 }
michael@0 8226
michael@0 8227 // Compute construcing and useNewType flags.
michael@0 8228 bool constructing = (op == JSOP_NEW);
michael@0 8229 bool newType = types::UseNewType(cx, script, pc);
michael@0 8230
michael@0 8231 // Try attaching a call stub.
michael@0 8232 if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, newType))
michael@0 8233 return false;
michael@0 8234
michael@0 8235 // Maybe update PC in profiler entry before leaving this script by call.
michael@0 8236 if (cx->runtime()->spsProfiler.enabled() && frame->hasPushedSPSFrame())
michael@0 8237 cx->runtime()->spsProfiler.updatePC(script, pc);
michael@0 8238
michael@0 8239 if (!MaybeCloneFunctionAtCallsite(cx, &callee, script, pc))
michael@0 8240 return false;
michael@0 8241
michael@0 8242 if (op == JSOP_NEW) {
michael@0 8243 if (!InvokeConstructor(cx, callee, argc, args, res.address()))
michael@0 8244 return false;
michael@0 8245 } else if (op == JSOP_EVAL && frame->scopeChain()->global().valueIsEval(callee)) {
michael@0 8246 if (!DirectEval(cx, CallArgsFromVp(argc, vp)))
michael@0 8247 return false;
michael@0 8248 res.set(vp[0]);
michael@0 8249 } else {
michael@0 8250 JS_ASSERT(op == JSOP_CALL || op == JSOP_FUNCALL || op == JSOP_FUNAPPLY || op == JSOP_EVAL);
michael@0 8251 if (!Invoke(cx, thisv, callee, argc, args, res))
michael@0 8252 return false;
michael@0 8253 }
michael@0 8254
michael@0 8255 types::TypeScript::Monitor(cx, script, pc, res);
michael@0 8256
michael@0 8257 // Check if debug mode toggling made the stub invalid.
michael@0 8258 if (stub.invalid())
michael@0 8259 return true;
michael@0 8260
michael@0 8261 // Attach a new TypeMonitor stub for this value.
michael@0 8262 ICTypeMonitor_Fallback *typeMonFbStub = stub->fallbackMonitorStub();
michael@0 8263 if (!typeMonFbStub->addMonitorStubForValue(cx, script, res))
michael@0 8264 return false;
michael@0 8265 // Add a type monitor stub for the resulting value.
michael@0 8266 if (!stub->addMonitorStubForValue(cx, script, res))
michael@0 8267 return false;
michael@0 8268
michael@0 8269 return true;
michael@0 8270 }
michael@0 8271
michael@0 8272 void
michael@0 8273 ICCallStubCompiler::pushCallArguments(MacroAssembler &masm, GeneralRegisterSet regs, Register argcReg)
michael@0 8274 {
michael@0 8275 JS_ASSERT(!regs.has(argcReg));
michael@0 8276
michael@0 8277 // Push the callee and |this| too.
michael@0 8278 Register count = regs.takeAny();
michael@0 8279 masm.mov(argcReg, count);
michael@0 8280 masm.add32(Imm32(2), count);
michael@0 8281
michael@0 8282 // argPtr initially points to the last argument.
michael@0 8283 Register argPtr = regs.takeAny();
michael@0 8284 masm.mov(BaselineStackReg, argPtr);
michael@0 8285
michael@0 8286 // Skip 4 pointers pushed on top of the arguments: the frame descriptor,
michael@0 8287 // return address, old frame pointer and stub reg.
michael@0 8288 masm.addPtr(Imm32(STUB_FRAME_SIZE), argPtr);
michael@0 8289
michael@0 8290 // Push all values, starting at the last one.
michael@0 8291 Label loop, done;
michael@0 8292 masm.bind(&loop);
michael@0 8293 masm.branchTest32(Assembler::Zero, count, count, &done);
michael@0 8294 {
michael@0 8295 masm.pushValue(Address(argPtr, 0));
michael@0 8296 masm.addPtr(Imm32(sizeof(Value)), argPtr);
michael@0 8297
michael@0 8298 masm.sub32(Imm32(1), count);
michael@0 8299 masm.jump(&loop);
michael@0 8300 }
michael@0 8301 masm.bind(&done);
michael@0 8302 }
michael@0 8303
michael@0 8304 Register
michael@0 8305 ICCallStubCompiler::guardFunApply(MacroAssembler &masm, GeneralRegisterSet regs, Register argcReg,
michael@0 8306 bool checkNative, FunApplyThing applyThing, Label *failure)
michael@0 8307 {
michael@0 8308 // Ensure argc == 2
michael@0 8309 masm.branch32(Assembler::NotEqual, argcReg, Imm32(2), failure);
michael@0 8310
michael@0 8311 // Stack looks like:
michael@0 8312 // [..., CalleeV, ThisV, Arg0V, Arg1V <MaybeReturnReg>]
michael@0 8313
michael@0 8314 Address secondArgSlot(BaselineStackReg, ICStackValueOffset);
michael@0 8315 if (applyThing == FunApply_MagicArgs) {
michael@0 8316 // Ensure that the second arg is magic arguments.
michael@0 8317 masm.branchTestMagic(Assembler::NotEqual, secondArgSlot, failure);
michael@0 8318
michael@0 8319 // Ensure that this frame doesn't have an arguments object.
michael@0 8320 masm.branchTest32(Assembler::NonZero,
michael@0 8321 Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
michael@0 8322 Imm32(BaselineFrame::HAS_ARGS_OBJ),
michael@0 8323 failure);
michael@0 8324 } else {
michael@0 8325 JS_ASSERT(applyThing == FunApply_Array);
michael@0 8326
michael@0 8327 GeneralRegisterSet regsx = regs;
michael@0 8328
michael@0 8329 // Ensure that the second arg is an array.
michael@0 8330 ValueOperand secondArgVal = regsx.takeAnyValue();
michael@0 8331 masm.loadValue(secondArgSlot, secondArgVal);
michael@0 8332
michael@0 8333 masm.branchTestObject(Assembler::NotEqual, secondArgVal, failure);
michael@0 8334 Register secondArgObj = masm.extractObject(secondArgVal, ExtractTemp1);
michael@0 8335
michael@0 8336 regsx.add(secondArgVal);
michael@0 8337 regsx.takeUnchecked(secondArgObj);
michael@0 8338
michael@0 8339 masm.branchTestObjClass(Assembler::NotEqual, secondArgObj, regsx.getAny(),
michael@0 8340 &ArrayObject::class_, failure);
michael@0 8341
michael@0 8342 // Get the array elements and ensure that initializedLength == length
michael@0 8343 masm.loadPtr(Address(secondArgObj, JSObject::offsetOfElements()), secondArgObj);
michael@0 8344
michael@0 8345 Register lenReg = regsx.takeAny();
michael@0 8346 masm.load32(Address(secondArgObj, ObjectElements::offsetOfLength()), lenReg);
michael@0 8347
michael@0 8348 masm.branch32(Assembler::NotEqual,
michael@0 8349 Address(secondArgObj, ObjectElements::offsetOfInitializedLength()),
michael@0 8350 lenReg, failure);
michael@0 8351
michael@0 8352 // Limit the length to something reasonable (huge number of arguments can
michael@0 8353 // blow the stack limit).
michael@0 8354 masm.branch32(Assembler::Above, lenReg,
michael@0 8355 Imm32(ICCall_ScriptedApplyArray::MAX_ARGS_ARRAY_LENGTH),
michael@0 8356 failure);
michael@0 8357
michael@0 8358 // Ensure no holes. Loop through values in array and make sure none are magic.
michael@0 8359 // Start address is secondArgObj, end address is secondArgObj + (lenReg * sizeof(Value))
michael@0 8360 JS_STATIC_ASSERT(sizeof(Value) == 8);
michael@0 8361 masm.lshiftPtr(Imm32(3), lenReg);
michael@0 8362 masm.addPtr(secondArgObj, lenReg);
michael@0 8363
michael@0 8364 Register start = secondArgObj;
michael@0 8365 Register end = lenReg;
michael@0 8366 Label loop;
michael@0 8367 Label endLoop;
michael@0 8368 masm.bind(&loop);
michael@0 8369 masm.branchPtr(Assembler::AboveOrEqual, start, end, &endLoop);
michael@0 8370 masm.branchTestMagic(Assembler::Equal, Address(start, 0), failure);
michael@0 8371 masm.addPtr(Imm32(sizeof(Value)), start);
michael@0 8372 masm.jump(&loop);
michael@0 8373 masm.bind(&endLoop);
michael@0 8374 }
michael@0 8375
michael@0 8376 // Stack now confirmed to be like:
michael@0 8377 // [..., CalleeV, ThisV, Arg0V, MagicValue(Arguments), <MaybeReturnAddr>]
michael@0 8378
michael@0 8379 // Load the callee, ensure that it's js_fun_apply
michael@0 8380 ValueOperand val = regs.takeAnyValue();
michael@0 8381 Address calleeSlot(BaselineStackReg, ICStackValueOffset + (3 * sizeof(Value)));
michael@0 8382 masm.loadValue(calleeSlot, val);
michael@0 8383
michael@0 8384 masm.branchTestObject(Assembler::NotEqual, val, failure);
michael@0 8385 Register callee = masm.extractObject(val, ExtractTemp1);
michael@0 8386
michael@0 8387 masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
michael@0 8388 failure);
michael@0 8389 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
michael@0 8390
michael@0 8391 masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(js_fun_apply), failure);
michael@0 8392
michael@0 8393 // Load the |thisv|, ensure that it's a scripted function with a valid baseline or ion
michael@0 8394 // script, or a native function.
michael@0 8395 Address thisSlot(BaselineStackReg, ICStackValueOffset + (2 * sizeof(Value)));
michael@0 8396 masm.loadValue(thisSlot, val);
michael@0 8397
michael@0 8398 masm.branchTestObject(Assembler::NotEqual, val, failure);
michael@0 8399 Register target = masm.extractObject(val, ExtractTemp1);
michael@0 8400 regs.add(val);
michael@0 8401 regs.takeUnchecked(target);
michael@0 8402
michael@0 8403 masm.branchTestObjClass(Assembler::NotEqual, target, regs.getAny(), &JSFunction::class_,
michael@0 8404 failure);
michael@0 8405
michael@0 8406 if (checkNative) {
michael@0 8407 masm.branchIfInterpreted(target, failure);
michael@0 8408 } else {
michael@0 8409 masm.branchIfFunctionHasNoScript(target, failure);
michael@0 8410 Register temp = regs.takeAny();
michael@0 8411 masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), temp);
michael@0 8412 masm.loadBaselineOrIonRaw(temp, temp, SequentialExecution, failure);
michael@0 8413 regs.add(temp);
michael@0 8414 }
michael@0 8415 return target;
michael@0 8416 }
michael@0 8417
michael@0 8418 void
michael@0 8419 ICCallStubCompiler::pushCallerArguments(MacroAssembler &masm, GeneralRegisterSet regs)
michael@0 8420 {
michael@0 8421 // Initialize copyReg to point to start caller arguments vector.
michael@0 8422 // Initialize argcReg to poitn to the end of it.
michael@0 8423 Register startReg = regs.takeAny();
michael@0 8424 Register endReg = regs.takeAny();
michael@0 8425 masm.loadPtr(Address(BaselineFrameReg, 0), startReg);
michael@0 8426 masm.loadPtr(Address(startReg, BaselineFrame::offsetOfNumActualArgs()), endReg);
michael@0 8427 masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), startReg);
michael@0 8428 JS_STATIC_ASSERT(sizeof(Value) == 8);
michael@0 8429 masm.lshiftPtr(Imm32(3), endReg);
michael@0 8430 masm.addPtr(startReg, endReg);
michael@0 8431
michael@0 8432 // Copying pre-decrements endReg by 8 until startReg is reached
michael@0 8433 Label copyDone;
michael@0 8434 Label copyStart;
michael@0 8435 masm.bind(&copyStart);
michael@0 8436 masm.branchPtr(Assembler::Equal, endReg, startReg, &copyDone);
michael@0 8437 masm.subPtr(Imm32(sizeof(Value)), endReg);
michael@0 8438 masm.pushValue(Address(endReg, 0));
michael@0 8439 masm.jump(&copyStart);
michael@0 8440 masm.bind(&copyDone);
michael@0 8441 }
michael@0 8442
michael@0 8443 void
michael@0 8444 ICCallStubCompiler::pushArrayArguments(MacroAssembler &masm, Address arrayVal,
michael@0 8445 GeneralRegisterSet regs)
michael@0 8446 {
michael@0 8447 // Load start and end address of values to copy.
michael@0 8448 // guardFunApply has already gauranteed that the array is packed and contains
michael@0 8449 // no holes.
michael@0 8450 Register startReg = regs.takeAny();
michael@0 8451 Register endReg = regs.takeAny();
michael@0 8452 masm.extractObject(arrayVal, startReg);
michael@0 8453 masm.loadPtr(Address(startReg, JSObject::offsetOfElements()), startReg);
michael@0 8454 masm.load32(Address(startReg, ObjectElements::offsetOfInitializedLength()), endReg);
michael@0 8455 JS_STATIC_ASSERT(sizeof(Value) == 8);
michael@0 8456 masm.lshiftPtr(Imm32(3), endReg);
michael@0 8457 masm.addPtr(startReg, endReg);
michael@0 8458
michael@0 8459 // Copying pre-decrements endReg by 8 until startReg is reached
michael@0 8460 Label copyDone;
michael@0 8461 Label copyStart;
michael@0 8462 masm.bind(&copyStart);
michael@0 8463 masm.branchPtr(Assembler::Equal, endReg, startReg, &copyDone);
michael@0 8464 masm.subPtr(Imm32(sizeof(Value)), endReg);
michael@0 8465 masm.pushValue(Address(endReg, 0));
michael@0 8466 masm.jump(&copyStart);
michael@0 8467 masm.bind(&copyDone);
michael@0 8468 }
michael@0 8469
michael@0 8470 typedef bool (*DoCallFallbackFn)(JSContext *, BaselineFrame *, ICCall_Fallback *,
michael@0 8471 uint32_t, Value *, MutableHandleValue);
michael@0 8472 static const VMFunction DoCallFallbackInfo = FunctionInfo<DoCallFallbackFn>(DoCallFallback);
michael@0 8473
michael@0 8474 bool
michael@0 8475 ICCall_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 8476 {
michael@0 8477 JS_ASSERT(R0 == JSReturnOperand);
michael@0 8478
michael@0 8479 // Push a stub frame so that we can perform a non-tail call.
michael@0 8480 enterStubFrame(masm, R1.scratchReg());
michael@0 8481
michael@0 8482 // Values are on the stack left-to-right. Calling convention wants them
michael@0 8483 // right-to-left so duplicate them on the stack in reverse order.
michael@0 8484 // |this| and callee are pushed last.
michael@0 8485
michael@0 8486 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 8487 regs.take(R0.scratchReg()); // argc.
michael@0 8488
michael@0 8489 pushCallArguments(masm, regs, R0.scratchReg());
michael@0 8490
michael@0 8491 masm.push(BaselineStackReg);
michael@0 8492 masm.push(R0.scratchReg());
michael@0 8493 masm.push(BaselineStubReg);
michael@0 8494
michael@0 8495 // Load previous frame pointer, push BaselineFrame *.
michael@0 8496 masm.loadPtr(Address(BaselineFrameReg, 0), R0.scratchReg());
michael@0 8497 masm.pushBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
michael@0 8498
michael@0 8499 if (!callVM(DoCallFallbackInfo, masm))
michael@0 8500 return false;
michael@0 8501
michael@0 8502 leaveStubFrame(masm);
michael@0 8503 EmitReturnFromIC(masm);
michael@0 8504
michael@0 8505 // The following asmcode is only used either when an Ion inlined frame
michael@0 8506 // bails out into baseline jitcode or we need to do on-stack script
michael@0 8507 // replacement for debug mode recompile.
michael@0 8508 Label leaveStubCommon;
michael@0 8509 returnFromStubOffset_ = masm.currentOffset();
michael@0 8510
michael@0 8511 // Load passed-in ThisV into R1 just in case it's needed. Need to do this before
michael@0 8512 // we leave the stub frame since that info will be lost.
michael@0 8513 // Current stack: [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
michael@0 8514 masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
michael@0 8515
michael@0 8516 // Emit the coming-from-VM specific part of the stub-leaving code.
michael@0 8517 leaveStubFrameHead(masm, /* calledIntoIon = */ false);
michael@0 8518
michael@0 8519 // Jump to the common leave stub tail.
michael@0 8520 masm.jump(&leaveStubCommon);
michael@0 8521
michael@0 8522 // For Ion bailouts, the return address pushed onto the reconstructed
michael@0 8523 // baseline stack points here.
michael@0 8524 returnFromIonOffset_ = masm.currentOffset();
michael@0 8525
michael@0 8526 masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
michael@0 8527
michael@0 8528 // Emit the coming-from-Ion specific part of the stub-leaving code.
michael@0 8529 leaveStubFrameHead(masm, /* calledIntoIon = */ true);
michael@0 8530
michael@0 8531 // Emit the common stub-leaving tail.
michael@0 8532 masm.bind(&leaveStubCommon);
michael@0 8533 leaveStubFrameCommonTail(masm);
michael@0 8534
michael@0 8535 // R1 and R0 are taken.
michael@0 8536 regs = availableGeneralRegs(2);
michael@0 8537 Register scratch = regs.takeAny();
michael@0 8538
michael@0 8539 // If this is a |constructing| call, if the callee returns a non-object, we replace it with
michael@0 8540 // the |this| object passed in.
michael@0 8541 JS_ASSERT(JSReturnOperand == R0);
michael@0 8542 Label skipThisReplace;
michael@0 8543 masm.load16ZeroExtend(Address(BaselineStubReg, ICStub::offsetOfExtra()), scratch);
michael@0 8544 masm.branchTest32(Assembler::Zero, scratch, Imm32(ICCall_Fallback::CONSTRUCTING_FLAG),
michael@0 8545 &skipThisReplace);
michael@0 8546 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
michael@0 8547 masm.moveValue(R1, R0);
michael@0 8548 #ifdef DEBUG
michael@0 8549 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
michael@0 8550 masm.assumeUnreachable("Failed to return object in constructing call.");
michael@0 8551 #endif
michael@0 8552 masm.bind(&skipThisReplace);
michael@0 8553
michael@0 8554 // At this point, BaselineStubReg points to the ICCall_Fallback stub, which is NOT
michael@0 8555 // a MonitoredStub, but rather a MonitoredFallbackStub. To use EmitEnterTypeMonitorIC,
michael@0 8556 // first load the ICTypeMonitor_Fallback stub into BaselineStubReg. Then, use
michael@0 8557 // EmitEnterTypeMonitorIC with a custom struct offset.
michael@0 8558 masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
michael@0 8559 BaselineStubReg);
michael@0 8560 EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
michael@0 8561
michael@0 8562 return true;
michael@0 8563 }
michael@0 8564
michael@0 8565 bool
michael@0 8566 ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
michael@0 8567 {
michael@0 8568 JitCompartment *comp = cx->compartment()->jitCompartment();
michael@0 8569
michael@0 8570 CodeOffsetLabel fromIon(returnFromIonOffset_);
michael@0 8571 fromIon.fixup(&masm);
michael@0 8572 comp->initBaselineCallReturnFromIonAddr(code->raw() + fromIon.offset());
michael@0 8573
michael@0 8574 CodeOffsetLabel fromVM(returnFromStubOffset_);
michael@0 8575 fromVM.fixup(&masm);
michael@0 8576 comp->initBaselineCallReturnFromStubAddr(code->raw() + fromVM.offset());
michael@0 8577
michael@0 8578 return true;
michael@0 8579 }
michael@0 8580
michael@0 8581 typedef bool (*CreateThisFn)(JSContext *cx, HandleObject callee, MutableHandleValue rval);
michael@0 8582 static const VMFunction CreateThisInfoBaseline = FunctionInfo<CreateThisFn>(CreateThis);
michael@0 8583
michael@0 8584 bool
michael@0 8585 ICCallScriptedCompiler::generateStubCode(MacroAssembler &masm)
michael@0 8586 {
michael@0 8587 Label failure;
michael@0 8588 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 8589 bool canUseTailCallReg = regs.has(BaselineTailCallReg);
michael@0 8590
michael@0 8591 Register argcReg = R0.scratchReg();
michael@0 8592 JS_ASSERT(argcReg != ArgumentsRectifierReg);
michael@0 8593
michael@0 8594 regs.take(argcReg);
michael@0 8595 regs.take(ArgumentsRectifierReg);
michael@0 8596 regs.takeUnchecked(BaselineTailCallReg);
michael@0 8597
michael@0 8598 // Load the callee in R1.
michael@0 8599 // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ]
michael@0 8600 BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
michael@0 8601 masm.loadValue(calleeSlot, R1);
michael@0 8602 regs.take(R1);
michael@0 8603
michael@0 8604 // Ensure callee is an object.
michael@0 8605 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
michael@0 8606
michael@0 8607 // Ensure callee is a function.
michael@0 8608 Register callee = masm.extractObject(R1, ExtractTemp0);
michael@0 8609 masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
michael@0 8610 &failure);
michael@0 8611
michael@0 8612 // If calling a specific script, check if the script matches. Otherwise, ensure that
michael@0 8613 // callee function is scripted. Leave calleeScript in |callee| reg.
michael@0 8614 if (calleeScript_) {
michael@0 8615 JS_ASSERT(kind == ICStub::Call_Scripted);
michael@0 8616
michael@0 8617 // Callee is a function. Check if script matches.
michael@0 8618 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
michael@0 8619 Address expectedScript(BaselineStubReg, ICCall_Scripted::offsetOfCalleeScript());
michael@0 8620 masm.branchPtr(Assembler::NotEqual, expectedScript, callee, &failure);
michael@0 8621 } else {
michael@0 8622 if (isConstructing_)
michael@0 8623 masm.branchIfNotInterpretedConstructor(callee, regs.getAny(), &failure);
michael@0 8624 else
michael@0 8625 masm.branchIfFunctionHasNoScript(callee, &failure);
michael@0 8626 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
michael@0 8627 }
michael@0 8628
michael@0 8629 // Load the start of the target JitCode.
michael@0 8630 Register code;
michael@0 8631 if (!isConstructing_) {
michael@0 8632 code = regs.takeAny();
michael@0 8633 masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failure);
michael@0 8634 } else {
michael@0 8635 Address scriptCode(callee, JSScript::offsetOfBaselineOrIonRaw());
michael@0 8636 masm.branchPtr(Assembler::Equal, scriptCode, ImmPtr(nullptr), &failure);
michael@0 8637 }
michael@0 8638
michael@0 8639 // We no longer need R1.
michael@0 8640 regs.add(R1);
michael@0 8641
michael@0 8642 // Push a stub frame so that we can perform a non-tail call.
michael@0 8643 enterStubFrame(masm, regs.getAny());
michael@0 8644 if (canUseTailCallReg)
michael@0 8645 regs.add(BaselineTailCallReg);
michael@0 8646
michael@0 8647 Label failureLeaveStubFrame;
michael@0 8648
michael@0 8649 if (isConstructing_) {
michael@0 8650 // Save argc before call.
michael@0 8651 masm.push(argcReg);
michael@0 8652
michael@0 8653 // Stack now looks like:
michael@0 8654 // [..., Callee, ThisV, Arg0V, ..., ArgNV, StubFrameHeader, ArgC ]
michael@0 8655 BaseIndex calleeSlot2(BaselineStackReg, argcReg, TimesEight,
michael@0 8656 sizeof(Value) + STUB_FRAME_SIZE + sizeof(size_t));
michael@0 8657 masm.loadValue(calleeSlot2, R1);
michael@0 8658 masm.push(masm.extractObject(R1, ExtractTemp0));
michael@0 8659 if (!callVM(CreateThisInfoBaseline, masm))
michael@0 8660 return false;
michael@0 8661
michael@0 8662 // Return of CreateThis must be an object.
michael@0 8663 #ifdef DEBUG
michael@0 8664 Label createdThisIsObject;
michael@0 8665 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &createdThisIsObject);
michael@0 8666 masm.assumeUnreachable("The return of CreateThis must be an object.");
michael@0 8667 masm.bind(&createdThisIsObject);
michael@0 8668 #endif
michael@0 8669
michael@0 8670 // Reset the register set from here on in.
michael@0 8671 JS_ASSERT(JSReturnOperand == R0);
michael@0 8672 regs = availableGeneralRegs(0);
michael@0 8673 regs.take(R0);
michael@0 8674 regs.take(ArgumentsRectifierReg);
michael@0 8675 argcReg = regs.takeAny();
michael@0 8676
michael@0 8677 // Restore saved argc so we can use it to calculate the address to save
michael@0 8678 // the resulting this object to.
michael@0 8679 masm.pop(argcReg);
michael@0 8680
michael@0 8681 // Save "this" value back into pushed arguments on stack. R0 can be clobbered after that.
michael@0 8682 // Stack now looks like:
michael@0 8683 // [..., Callee, ThisV, Arg0V, ..., ArgNV, StubFrameHeader ]
michael@0 8684 BaseIndex thisSlot(BaselineStackReg, argcReg, TimesEight, STUB_FRAME_SIZE);
michael@0 8685 masm.storeValue(R0, thisSlot);
michael@0 8686
michael@0 8687 // Restore the stub register from the baseline stub frame.
michael@0 8688 masm.loadPtr(Address(BaselineStackReg, STUB_FRAME_SAVED_STUB_OFFSET), BaselineStubReg);
michael@0 8689
michael@0 8690 // Reload callee script. Note that a GC triggered by CreateThis may
michael@0 8691 // have destroyed the callee BaselineScript and IonScript. CreateThis is
michael@0 8692 // safely repeatable though, so in this case we just leave the stub frame
michael@0 8693 // and jump to the next stub.
michael@0 8694
michael@0 8695 // Just need to load the script now.
michael@0 8696 BaseIndex calleeSlot3(BaselineStackReg, argcReg, TimesEight,
michael@0 8697 sizeof(Value) + STUB_FRAME_SIZE);
michael@0 8698 masm.loadValue(calleeSlot3, R0);
michael@0 8699 callee = masm.extractObject(R0, ExtractTemp0);
michael@0 8700 regs.add(R0);
michael@0 8701 regs.takeUnchecked(callee);
michael@0 8702 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
michael@0 8703
michael@0 8704 code = regs.takeAny();
michael@0 8705 masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failureLeaveStubFrame);
michael@0 8706
michael@0 8707 // Release callee register, but don't add ExtractTemp0 back into the pool
michael@0 8708 // ExtractTemp0 is used later, and if it's allocated to some other register at that
michael@0 8709 // point, it will get clobbered when used.
michael@0 8710 if (callee != ExtractTemp0)
michael@0 8711 regs.add(callee);
michael@0 8712
michael@0 8713 if (canUseTailCallReg)
michael@0 8714 regs.addUnchecked(BaselineTailCallReg);
michael@0 8715 }
michael@0 8716 Register scratch = regs.takeAny();
michael@0 8717
michael@0 8718 // Values are on the stack left-to-right. Calling convention wants them
michael@0 8719 // right-to-left so duplicate them on the stack in reverse order.
michael@0 8720 // |this| and callee are pushed last.
michael@0 8721 pushCallArguments(masm, regs, argcReg);
michael@0 8722
michael@0 8723 // The callee is on top of the stack. Pop and unbox it.
michael@0 8724 ValueOperand val = regs.takeAnyValue();
michael@0 8725 masm.popValue(val);
michael@0 8726 callee = masm.extractObject(val, ExtractTemp0);
michael@0 8727
michael@0 8728 EmitCreateStubFrameDescriptor(masm, scratch);
michael@0 8729
michael@0 8730 // Note that we use Push, not push, so that callIon will align the stack
michael@0 8731 // properly on ARM.
michael@0 8732 masm.Push(argcReg);
michael@0 8733 masm.Push(callee);
michael@0 8734 masm.Push(scratch);
michael@0 8735
michael@0 8736 // Handle arguments underflow.
michael@0 8737 Label noUnderflow;
michael@0 8738 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
michael@0 8739 masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
michael@0 8740 {
michael@0 8741 // Call the arguments rectifier.
michael@0 8742 JS_ASSERT(ArgumentsRectifierReg != code);
michael@0 8743 JS_ASSERT(ArgumentsRectifierReg != argcReg);
michael@0 8744
michael@0 8745 JitCode *argumentsRectifier =
michael@0 8746 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
michael@0 8747
michael@0 8748 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
michael@0 8749 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
michael@0 8750 masm.mov(argcReg, ArgumentsRectifierReg);
michael@0 8751 }
michael@0 8752
michael@0 8753 masm.bind(&noUnderflow);
michael@0 8754
michael@0 8755 // If needed, update SPS Profiler frame entry before and after call.
michael@0 8756 {
michael@0 8757 JS_ASSERT(kind == ICStub::Call_Scripted || kind == ICStub::Call_AnyScripted);
michael@0 8758 GeneralRegisterSet availRegs = availableGeneralRegs(0);
michael@0 8759 availRegs.take(ArgumentsRectifierReg);
michael@0 8760 availRegs.take(code);
michael@0 8761 emitProfilingUpdate(masm, availRegs, kind == ICStub::Call_Scripted ?
michael@0 8762 ICCall_Scripted::offsetOfPCOffset()
michael@0 8763 : ICCall_AnyScripted::offsetOfPCOffset());
michael@0 8764 }
michael@0 8765
michael@0 8766 masm.callIon(code);
michael@0 8767
michael@0 8768 // If this is a constructing call, and the callee returns a non-object, replace it with
michael@0 8769 // the |this| object passed in.
michael@0 8770 if (isConstructing_) {
michael@0 8771 Label skipThisReplace;
michael@0 8772 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
michael@0 8773
michael@0 8774 Register scratchReg = JSReturnOperand.scratchReg();
michael@0 8775
michael@0 8776 // Current stack: [ ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
michael@0 8777 // However, we can't use this ThisVal, because it hasn't been traced. We need to use
michael@0 8778 // The ThisVal higher up the stack:
michael@0 8779 // Current stack: [ ThisVal, ARGVALS..., ...STUB FRAME...,
michael@0 8780 // ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
michael@0 8781 masm.loadPtr(Address(BaselineStackReg, 2*sizeof(size_t)), scratchReg);
michael@0 8782
michael@0 8783 // scratchReg now contains actualArgCount. Double it to account for skipping past two
michael@0 8784 // pushed copies of argument values. Additionally, we need to add:
michael@0 8785 // STUB_FRAME_SIZE + sizeof(ThisVal) + sizeof(size_t) + sizeof(void *) + sizoef(size_t)
michael@0 8786 // for: stub frame, this value, actual argc, callee, and descriptor
michael@0 8787 masm.lshiftPtr(Imm32(1), scratchReg);
michael@0 8788 BaseIndex reloadThisSlot(BaselineStackReg, scratchReg, TimesEight,
michael@0 8789 STUB_FRAME_SIZE + sizeof(Value) + 3*sizeof(size_t));
michael@0 8790 masm.loadValue(reloadThisSlot, JSReturnOperand);
michael@0 8791 #ifdef DEBUG
michael@0 8792 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
michael@0 8793 masm.assumeUnreachable("Return of constructing call should be an object.");
michael@0 8794 #endif
michael@0 8795 masm.bind(&skipThisReplace);
michael@0 8796 }
michael@0 8797
michael@0 8798 leaveStubFrame(masm, true);
michael@0 8799
michael@0 8800 // Enter type monitor IC to type-check result.
michael@0 8801 EmitEnterTypeMonitorIC(masm);
michael@0 8802
michael@0 8803 // Leave stub frame and restore argc for the next stub.
michael@0 8804 masm.bind(&failureLeaveStubFrame);
michael@0 8805 leaveStubFrame(masm, false);
michael@0 8806 if (argcReg != R0.scratchReg())
michael@0 8807 masm.mov(argcReg, R0.scratchReg());
michael@0 8808
michael@0 8809 masm.bind(&failure);
michael@0 8810 EmitStubGuardFailure(masm);
michael@0 8811 return true;
michael@0 8812 }
michael@0 8813
michael@0 8814 bool
michael@0 8815 ICCall_Native::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 8816 {
michael@0 8817 Label failure;
michael@0 8818 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 8819
michael@0 8820 Register argcReg = R0.scratchReg();
michael@0 8821 regs.take(argcReg);
michael@0 8822 regs.takeUnchecked(BaselineTailCallReg);
michael@0 8823
michael@0 8824 // Load the callee in R1.
michael@0 8825 BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
michael@0 8826 masm.loadValue(calleeSlot, R1);
michael@0 8827 regs.take(R1);
michael@0 8828
michael@0 8829 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
michael@0 8830
michael@0 8831 // Ensure callee matches this stub's callee.
michael@0 8832 Register callee = masm.extractObject(R1, ExtractTemp0);
michael@0 8833 Address expectedCallee(BaselineStubReg, ICCall_Native::offsetOfCallee());
michael@0 8834 masm.branchPtr(Assembler::NotEqual, expectedCallee, callee, &failure);
michael@0 8835
michael@0 8836 regs.add(R1);
michael@0 8837 regs.takeUnchecked(callee);
michael@0 8838
michael@0 8839 // Push a stub frame so that we can perform a non-tail call.
michael@0 8840 // Note that this leaves the return address in TailCallReg.
michael@0 8841 enterStubFrame(masm, regs.getAny());
michael@0 8842
michael@0 8843 // Values are on the stack left-to-right. Calling convention wants them
michael@0 8844 // right-to-left so duplicate them on the stack in reverse order.
michael@0 8845 // |this| and callee are pushed last.
michael@0 8846 pushCallArguments(masm, regs, argcReg);
michael@0 8847
michael@0 8848 if (isConstructing_) {
michael@0 8849 // Stack looks like: [ ..., Arg0Val, ThisVal, CalleeVal ]
michael@0 8850 // Replace ThisVal with MagicValue(JS_IS_CONSTRUCTING)
michael@0 8851 masm.storeValue(MagicValue(JS_IS_CONSTRUCTING), Address(BaselineStackReg, sizeof(Value)));
michael@0 8852 }
michael@0 8853
michael@0 8854 masm.checkStackAlignment();
michael@0 8855
michael@0 8856 // Native functions have the signature:
michael@0 8857 //
michael@0 8858 // bool (*)(JSContext *, unsigned, Value *vp)
michael@0 8859 //
michael@0 8860 // Where vp[0] is space for callee/return value, vp[1] is |this|, and vp[2] onward
michael@0 8861 // are the function arguments.
michael@0 8862
michael@0 8863 // Initialize vp.
michael@0 8864 Register vpReg = regs.takeAny();
michael@0 8865 masm.movePtr(StackPointer, vpReg);
michael@0 8866
michael@0 8867 // Construct a native exit frame.
michael@0 8868 masm.push(argcReg);
michael@0 8869
michael@0 8870 Register scratch = regs.takeAny();
michael@0 8871 EmitCreateStubFrameDescriptor(masm, scratch);
michael@0 8872 masm.push(scratch);
michael@0 8873 masm.push(BaselineTailCallReg);
michael@0 8874 masm.enterFakeExitFrame();
michael@0 8875
michael@0 8876 // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg
michael@0 8877 // and scratch can be clobbered.
michael@0 8878 emitProfilingUpdate(masm, BaselineTailCallReg, scratch, ICCall_Native::offsetOfPCOffset());
michael@0 8879
michael@0 8880 // Execute call.
michael@0 8881 masm.setupUnalignedABICall(3, scratch);
michael@0 8882 masm.loadJSContext(scratch);
michael@0 8883 masm.passABIArg(scratch);
michael@0 8884 masm.passABIArg(argcReg);
michael@0 8885 masm.passABIArg(vpReg);
michael@0 8886
michael@0 8887 #ifdef JS_ARM_SIMULATOR
michael@0 8888 // The simulator requires VM calls to be redirected to a special swi
michael@0 8889 // instruction to handle them, so we store the redirected pointer in the
michael@0 8890 // stub and use that instead of the original one.
michael@0 8891 masm.callWithABI(Address(BaselineStubReg, ICCall_Native::offsetOfNative()));
michael@0 8892 #else
michael@0 8893 masm.callWithABI(Address(callee, JSFunction::offsetOfNativeOrScript()));
michael@0 8894 #endif
michael@0 8895
michael@0 8896 // Test for failure.
michael@0 8897 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
michael@0 8898
michael@0 8899 // Load the return value into R0.
michael@0 8900 masm.loadValue(Address(StackPointer, IonNativeExitFrameLayout::offsetOfResult()), R0);
michael@0 8901
michael@0 8902 leaveStubFrame(masm);
michael@0 8903
michael@0 8904 // Enter type monitor IC to type-check result.
michael@0 8905 EmitEnterTypeMonitorIC(masm);
michael@0 8906
michael@0 8907 masm.bind(&failure);
michael@0 8908 EmitStubGuardFailure(masm);
michael@0 8909 return true;
michael@0 8910 }
michael@0 8911
michael@0 8912 bool
michael@0 8913 ICCall_ScriptedApplyArray::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 8914 {
michael@0 8915 Label failure;
michael@0 8916 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 8917
michael@0 8918 Register argcReg = R0.scratchReg();
michael@0 8919 regs.take(argcReg);
michael@0 8920 regs.takeUnchecked(BaselineTailCallReg);
michael@0 8921 regs.takeUnchecked(ArgumentsRectifierReg);
michael@0 8922
michael@0 8923 //
michael@0 8924 // Validate inputs
michael@0 8925 //
michael@0 8926
michael@0 8927 Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false,
michael@0 8928 FunApply_Array, &failure);
michael@0 8929 if (regs.has(target)) {
michael@0 8930 regs.take(target);
michael@0 8931 } else {
michael@0 8932 // If target is already a reserved reg, take another register for it, because it's
michael@0 8933 // probably currently an ExtractTemp, which might get clobbered later.
michael@0 8934 Register targetTemp = regs.takeAny();
michael@0 8935 masm.movePtr(target, targetTemp);
michael@0 8936 target = targetTemp;
michael@0 8937 }
michael@0 8938
michael@0 8939 // Push a stub frame so that we can perform a non-tail call.
michael@0 8940 enterStubFrame(masm, regs.getAny());
michael@0 8941
michael@0 8942 //
michael@0 8943 // Push arguments
michael@0 8944 //
michael@0 8945
michael@0 8946 // Stack now looks like:
michael@0 8947 // BaselineFrameReg -------------------.
michael@0 8948 // v
michael@0 8949 // [..., js_fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader]
michael@0 8950
michael@0 8951 // Push all array elements onto the stack:
michael@0 8952 Address arrayVal(BaselineFrameReg, STUB_FRAME_SIZE);
michael@0 8953 pushArrayArguments(masm, arrayVal, regs);
michael@0 8954
michael@0 8955 // Stack now looks like:
michael@0 8956 // BaselineFrameReg -------------------.
michael@0 8957 // v
michael@0 8958 // [..., js_fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader,
michael@0 8959 // PushedArgN, ..., PushedArg0]
michael@0 8960 // Can't fail after this, so it's ok to clobber argcReg.
michael@0 8961
michael@0 8962 // Push actual argument 0 as |thisv| for call.
michael@0 8963 masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value)));
michael@0 8964
michael@0 8965 // All pushes after this use Push instead of push to make sure ARM can align
michael@0 8966 // stack properly for call.
michael@0 8967 Register scratch = regs.takeAny();
michael@0 8968 EmitCreateStubFrameDescriptor(masm, scratch);
michael@0 8969
michael@0 8970 // Reload argc from length of array.
michael@0 8971 masm.extractObject(arrayVal, argcReg);
michael@0 8972 masm.loadPtr(Address(argcReg, JSObject::offsetOfElements()), argcReg);
michael@0 8973 masm.load32(Address(argcReg, ObjectElements::offsetOfInitializedLength()), argcReg);
michael@0 8974
michael@0 8975 masm.Push(argcReg);
michael@0 8976 masm.Push(target);
michael@0 8977 masm.Push(scratch);
michael@0 8978
michael@0 8979 // Load nargs into scratch for underflow check, and then load jitcode pointer into target.
michael@0 8980 masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch);
michael@0 8981 masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
michael@0 8982 masm.loadBaselineOrIonRaw(target, target, SequentialExecution, nullptr);
michael@0 8983
michael@0 8984 // Handle arguments underflow.
michael@0 8985 Label noUnderflow;
michael@0 8986 masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
michael@0 8987 {
michael@0 8988 // Call the arguments rectifier.
michael@0 8989 JS_ASSERT(ArgumentsRectifierReg != target);
michael@0 8990 JS_ASSERT(ArgumentsRectifierReg != argcReg);
michael@0 8991
michael@0 8992 JitCode *argumentsRectifier =
michael@0 8993 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
michael@0 8994
michael@0 8995 masm.movePtr(ImmGCPtr(argumentsRectifier), target);
michael@0 8996 masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
michael@0 8997 masm.mov(argcReg, ArgumentsRectifierReg);
michael@0 8998 }
michael@0 8999 masm.bind(&noUnderflow);
michael@0 9000 regs.add(argcReg);
michael@0 9001
michael@0 9002 // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg
michael@0 9003 // and scratch can be clobbered.
michael@0 9004 emitProfilingUpdate(masm, regs.getAny(), scratch,
michael@0 9005 ICCall_ScriptedApplyArguments::offsetOfPCOffset());
michael@0 9006
michael@0 9007 // Do call
michael@0 9008 masm.callIon(target);
michael@0 9009 leaveStubFrame(masm, true);
michael@0 9010
michael@0 9011 // Enter type monitor IC to type-check result.
michael@0 9012 EmitEnterTypeMonitorIC(masm);
michael@0 9013
michael@0 9014 masm.bind(&failure);
michael@0 9015 EmitStubGuardFailure(masm);
michael@0 9016 return true;
michael@0 9017 }
michael@0 9018
michael@0 9019 bool
michael@0 9020 ICCall_ScriptedApplyArguments::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9021 {
michael@0 9022 Label failure;
michael@0 9023 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 9024
michael@0 9025 Register argcReg = R0.scratchReg();
michael@0 9026 regs.take(argcReg);
michael@0 9027 regs.takeUnchecked(BaselineTailCallReg);
michael@0 9028 regs.takeUnchecked(ArgumentsRectifierReg);
michael@0 9029
michael@0 9030 //
michael@0 9031 // Validate inputs
michael@0 9032 //
michael@0 9033
michael@0 9034 Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false,
michael@0 9035 FunApply_MagicArgs, &failure);
michael@0 9036 if (regs.has(target)) {
michael@0 9037 regs.take(target);
michael@0 9038 } else {
michael@0 9039 // If target is already a reserved reg, take another register for it, because it's
michael@0 9040 // probably currently an ExtractTemp, which might get clobbered later.
michael@0 9041 Register targetTemp = regs.takeAny();
michael@0 9042 masm.movePtr(target, targetTemp);
michael@0 9043 target = targetTemp;
michael@0 9044 }
michael@0 9045
michael@0 9046 // Push a stub frame so that we can perform a non-tail call.
michael@0 9047 enterStubFrame(masm, regs.getAny());
michael@0 9048
michael@0 9049 //
michael@0 9050 // Push arguments
michael@0 9051 //
michael@0 9052
michael@0 9053 // Stack now looks like:
michael@0 9054 // [..., js_fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader]
michael@0 9055
michael@0 9056 // Push all arguments supplied to caller function onto the stack.
michael@0 9057 pushCallerArguments(masm, regs);
michael@0 9058
michael@0 9059 // Stack now looks like:
michael@0 9060 // BaselineFrameReg -------------------.
michael@0 9061 // v
michael@0 9062 // [..., js_fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader,
michael@0 9063 // PushedArgN, ..., PushedArg0]
michael@0 9064 // Can't fail after this, so it's ok to clobber argcReg.
michael@0 9065
michael@0 9066 // Push actual argument 0 as |thisv| for call.
michael@0 9067 masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value)));
michael@0 9068
michael@0 9069 // All pushes after this use Push instead of push to make sure ARM can align
michael@0 9070 // stack properly for call.
michael@0 9071 Register scratch = regs.takeAny();
michael@0 9072 EmitCreateStubFrameDescriptor(masm, scratch);
michael@0 9073
michael@0 9074 masm.loadPtr(Address(BaselineFrameReg, 0), argcReg);
michael@0 9075 masm.loadPtr(Address(argcReg, BaselineFrame::offsetOfNumActualArgs()), argcReg);
michael@0 9076 masm.Push(argcReg);
michael@0 9077 masm.Push(target);
michael@0 9078 masm.Push(scratch);
michael@0 9079
michael@0 9080 // Load nargs into scratch for underflow check, and then load jitcode pointer into target.
michael@0 9081 masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch);
michael@0 9082 masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
michael@0 9083 masm.loadBaselineOrIonRaw(target, target, SequentialExecution, nullptr);
michael@0 9084
michael@0 9085 // Handle arguments underflow.
michael@0 9086 Label noUnderflow;
michael@0 9087 masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
michael@0 9088 {
michael@0 9089 // Call the arguments rectifier.
michael@0 9090 JS_ASSERT(ArgumentsRectifierReg != target);
michael@0 9091 JS_ASSERT(ArgumentsRectifierReg != argcReg);
michael@0 9092
michael@0 9093 JitCode *argumentsRectifier =
michael@0 9094 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
michael@0 9095
michael@0 9096 masm.movePtr(ImmGCPtr(argumentsRectifier), target);
michael@0 9097 masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
michael@0 9098 masm.mov(argcReg, ArgumentsRectifierReg);
michael@0 9099 }
michael@0 9100 masm.bind(&noUnderflow);
michael@0 9101 regs.add(argcReg);
michael@0 9102
michael@0 9103 // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg
michael@0 9104 // and scratch can be clobbered.
michael@0 9105 emitProfilingUpdate(masm, regs.getAny(), scratch,
michael@0 9106 ICCall_ScriptedApplyArguments::offsetOfPCOffset());
michael@0 9107
michael@0 9108 // Do call
michael@0 9109 masm.callIon(target);
michael@0 9110 leaveStubFrame(masm, true);
michael@0 9111
michael@0 9112 // Enter type monitor IC to type-check result.
michael@0 9113 EmitEnterTypeMonitorIC(masm);
michael@0 9114
michael@0 9115 masm.bind(&failure);
michael@0 9116 EmitStubGuardFailure(masm);
michael@0 9117 return true;
michael@0 9118 }
michael@0 9119
michael@0 9120 bool
michael@0 9121 ICCall_ScriptedFunCall::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9122 {
michael@0 9123 Label failure;
michael@0 9124 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 9125 bool canUseTailCallReg = regs.has(BaselineTailCallReg);
michael@0 9126
michael@0 9127 Register argcReg = R0.scratchReg();
michael@0 9128 JS_ASSERT(argcReg != ArgumentsRectifierReg);
michael@0 9129
michael@0 9130 regs.take(argcReg);
michael@0 9131 regs.take(ArgumentsRectifierReg);
michael@0 9132 regs.takeUnchecked(BaselineTailCallReg);
michael@0 9133
michael@0 9134 // Load the callee in R1.
michael@0 9135 // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ]
michael@0 9136 BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
michael@0 9137 masm.loadValue(calleeSlot, R1);
michael@0 9138 regs.take(R1);
michael@0 9139
michael@0 9140 // Ensure callee is js_fun_call.
michael@0 9141 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
michael@0 9142
michael@0 9143 Register callee = masm.extractObject(R1, ExtractTemp0);
michael@0 9144 masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
michael@0 9145 &failure);
michael@0 9146 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
michael@0 9147 masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(js_fun_call), &failure);
michael@0 9148
michael@0 9149 // Ensure |this| is a scripted function with JIT code.
michael@0 9150 BaseIndex thisSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset);
michael@0 9151 masm.loadValue(thisSlot, R1);
michael@0 9152
michael@0 9153 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
michael@0 9154 callee = masm.extractObject(R1, ExtractTemp0);
michael@0 9155
michael@0 9156 masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
michael@0 9157 &failure);
michael@0 9158 masm.branchIfFunctionHasNoScript(callee, &failure);
michael@0 9159 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
michael@0 9160
michael@0 9161 // Load the start of the target JitCode.
michael@0 9162 Register code = regs.takeAny();
michael@0 9163 masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failure);
michael@0 9164
michael@0 9165 // We no longer need R1.
michael@0 9166 regs.add(R1);
michael@0 9167
michael@0 9168 // Push a stub frame so that we can perform a non-tail call.
michael@0 9169 enterStubFrame(masm, regs.getAny());
michael@0 9170 if (canUseTailCallReg)
michael@0 9171 regs.add(BaselineTailCallReg);
michael@0 9172
michael@0 9173 // Values are on the stack left-to-right. Calling convention wants them
michael@0 9174 // right-to-left so duplicate them on the stack in reverse order.
michael@0 9175 pushCallArguments(masm, regs, argcReg);
michael@0 9176
michael@0 9177 // Discard callee (function.call).
michael@0 9178 masm.addPtr(Imm32(sizeof(Value)), StackPointer);
michael@0 9179
michael@0 9180 // Pop scripted callee (the original |this|).
michael@0 9181 ValueOperand val = regs.takeAnyValue();
michael@0 9182 masm.popValue(val);
michael@0 9183
michael@0 9184 // Decrement argc if argc > 0. If argc == 0, push |undefined| as |this|.
michael@0 9185 Label zeroArgs, done;
michael@0 9186 masm.branchTest32(Assembler::Zero, argcReg, argcReg, &zeroArgs);
michael@0 9187 masm.sub32(Imm32(1), argcReg);
michael@0 9188 masm.jump(&done);
michael@0 9189
michael@0 9190 masm.bind(&zeroArgs);
michael@0 9191 masm.pushValue(UndefinedValue());
michael@0 9192 masm.bind(&done);
michael@0 9193
michael@0 9194 // Unbox scripted callee.
michael@0 9195 callee = masm.extractObject(val, ExtractTemp0);
michael@0 9196
michael@0 9197 Register scratch = regs.takeAny();
michael@0 9198 EmitCreateStubFrameDescriptor(masm, scratch);
michael@0 9199
michael@0 9200 // Note that we use Push, not push, so that callIon will align the stack
michael@0 9201 // properly on ARM.
michael@0 9202 masm.Push(argcReg);
michael@0 9203 masm.Push(callee);
michael@0 9204 masm.Push(scratch);
michael@0 9205
michael@0 9206 // Handle arguments underflow.
michael@0 9207 Label noUnderflow;
michael@0 9208 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
michael@0 9209 masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
michael@0 9210 {
michael@0 9211 // Call the arguments rectifier.
michael@0 9212 JS_ASSERT(ArgumentsRectifierReg != code);
michael@0 9213 JS_ASSERT(ArgumentsRectifierReg != argcReg);
michael@0 9214
michael@0 9215 JitCode *argumentsRectifier =
michael@0 9216 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
michael@0 9217
michael@0 9218 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
michael@0 9219 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
michael@0 9220 masm.mov(argcReg, ArgumentsRectifierReg);
michael@0 9221 }
michael@0 9222
michael@0 9223 masm.bind(&noUnderflow);
michael@0 9224
michael@0 9225 // If needed, update SPS Profiler frame entry.
michael@0 9226 {
michael@0 9227 // Need to avoid using ArgumentsRectifierReg and code register.
michael@0 9228 GeneralRegisterSet availRegs = availableGeneralRegs(0);
michael@0 9229 availRegs.take(ArgumentsRectifierReg);
michael@0 9230 availRegs.take(code);
michael@0 9231 emitProfilingUpdate(masm, availRegs, ICCall_ScriptedFunCall::offsetOfPCOffset());
michael@0 9232 }
michael@0 9233
michael@0 9234 masm.callIon(code);
michael@0 9235
michael@0 9236 leaveStubFrame(masm, true);
michael@0 9237
michael@0 9238 // Enter type monitor IC to type-check result.
michael@0 9239 EmitEnterTypeMonitorIC(masm);
michael@0 9240
michael@0 9241 masm.bind(&failure);
michael@0 9242 EmitStubGuardFailure(masm);
michael@0 9243 return true;
michael@0 9244 }
michael@0 9245
michael@0 9246 static bool
michael@0 9247 DoubleValueToInt32ForSwitch(Value *v)
michael@0 9248 {
michael@0 9249 double d = v->toDouble();
michael@0 9250 int32_t truncated = int32_t(d);
michael@0 9251 if (d != double(truncated))
michael@0 9252 return false;
michael@0 9253
michael@0 9254 v->setInt32(truncated);
michael@0 9255 return true;
michael@0 9256 }
michael@0 9257
michael@0 9258 bool
michael@0 9259 ICTableSwitch::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9260 {
michael@0 9261 Label isInt32, notInt32, outOfRange;
michael@0 9262 Register scratch = R1.scratchReg();
michael@0 9263
michael@0 9264 masm.branchTestInt32(Assembler::NotEqual, R0, &notInt32);
michael@0 9265
michael@0 9266 Register key = masm.extractInt32(R0, ExtractTemp0);
michael@0 9267
michael@0 9268 masm.bind(&isInt32);
michael@0 9269
michael@0 9270 masm.load32(Address(BaselineStubReg, offsetof(ICTableSwitch, min_)), scratch);
michael@0 9271 masm.sub32(scratch, key);
michael@0 9272 masm.branch32(Assembler::BelowOrEqual,
michael@0 9273 Address(BaselineStubReg, offsetof(ICTableSwitch, length_)), key, &outOfRange);
michael@0 9274
michael@0 9275 masm.loadPtr(Address(BaselineStubReg, offsetof(ICTableSwitch, table_)), scratch);
michael@0 9276 masm.loadPtr(BaseIndex(scratch, key, ScalePointer), scratch);
michael@0 9277
michael@0 9278 EmitChangeICReturnAddress(masm, scratch);
michael@0 9279 EmitReturnFromIC(masm);
michael@0 9280
michael@0 9281 masm.bind(&notInt32);
michael@0 9282
michael@0 9283 masm.branchTestDouble(Assembler::NotEqual, R0, &outOfRange);
michael@0 9284 if (cx->runtime()->jitSupportsFloatingPoint) {
michael@0 9285 masm.unboxDouble(R0, FloatReg0);
michael@0 9286
michael@0 9287 // N.B. -0 === 0, so convert -0 to a 0 int32.
michael@0 9288 masm.convertDoubleToInt32(FloatReg0, key, &outOfRange, /* negativeZeroCheck = */ false);
michael@0 9289 } else {
michael@0 9290 // Pass pointer to double value.
michael@0 9291 masm.pushValue(R0);
michael@0 9292 masm.movePtr(StackPointer, R0.scratchReg());
michael@0 9293
michael@0 9294 masm.setupUnalignedABICall(1, scratch);
michael@0 9295 masm.passABIArg(R0.scratchReg());
michael@0 9296 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, DoubleValueToInt32ForSwitch));
michael@0 9297
michael@0 9298 // If the function returns |true|, the value has been converted to
michael@0 9299 // int32.
michael@0 9300 masm.mov(ReturnReg, scratch);
michael@0 9301 masm.popValue(R0);
michael@0 9302 masm.branchIfFalseBool(scratch, &outOfRange);
michael@0 9303 masm.unboxInt32(R0, key);
michael@0 9304 }
michael@0 9305 masm.jump(&isInt32);
michael@0 9306
michael@0 9307 masm.bind(&outOfRange);
michael@0 9308
michael@0 9309 masm.loadPtr(Address(BaselineStubReg, offsetof(ICTableSwitch, defaultTarget_)), scratch);
michael@0 9310
michael@0 9311 EmitChangeICReturnAddress(masm, scratch);
michael@0 9312 EmitReturnFromIC(masm);
michael@0 9313 return true;
michael@0 9314 }
michael@0 9315
michael@0 9316 ICStub *
michael@0 9317 ICTableSwitch::Compiler::getStub(ICStubSpace *space)
michael@0 9318 {
michael@0 9319 JitCode *code = getStubCode();
michael@0 9320 if (!code)
michael@0 9321 return nullptr;
michael@0 9322
michael@0 9323 jsbytecode *pc = pc_;
michael@0 9324 pc += JUMP_OFFSET_LEN;
michael@0 9325 int32_t low = GET_JUMP_OFFSET(pc);
michael@0 9326 pc += JUMP_OFFSET_LEN;
michael@0 9327 int32_t high = GET_JUMP_OFFSET(pc);
michael@0 9328 int32_t length = high - low + 1;
michael@0 9329 pc += JUMP_OFFSET_LEN;
michael@0 9330
michael@0 9331 void **table = (void**) space->alloc(sizeof(void*) * length);
michael@0 9332 if (!table)
michael@0 9333 return nullptr;
michael@0 9334
michael@0 9335 jsbytecode *defaultpc = pc_ + GET_JUMP_OFFSET(pc_);
michael@0 9336
michael@0 9337 for (int32_t i = 0; i < length; i++) {
michael@0 9338 int32_t off = GET_JUMP_OFFSET(pc);
michael@0 9339 if (off)
michael@0 9340 table[i] = pc_ + off;
michael@0 9341 else
michael@0 9342 table[i] = defaultpc;
michael@0 9343 pc += JUMP_OFFSET_LEN;
michael@0 9344 }
michael@0 9345
michael@0 9346 return ICTableSwitch::New(space, code, table, low, length, defaultpc);
michael@0 9347 }
michael@0 9348
michael@0 9349 void
michael@0 9350 ICTableSwitch::fixupJumpTable(JSScript *script, BaselineScript *baseline)
michael@0 9351 {
michael@0 9352 defaultTarget_ = baseline->nativeCodeForPC(script, (jsbytecode *) defaultTarget_);
michael@0 9353
michael@0 9354 for (int32_t i = 0; i < length_; i++)
michael@0 9355 table_[i] = baseline->nativeCodeForPC(script, (jsbytecode *) table_[i]);
michael@0 9356 }
michael@0 9357
michael@0 9358 //
michael@0 9359 // IteratorNew_Fallback
michael@0 9360 //
michael@0 9361
michael@0 9362 static bool
michael@0 9363 DoIteratorNewFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNew_Fallback *stub,
michael@0 9364 HandleValue value, MutableHandleValue res)
michael@0 9365 {
michael@0 9366 jsbytecode *pc = stub->icEntry()->pc(frame->script());
michael@0 9367 FallbackICSpew(cx, stub, "IteratorNew");
michael@0 9368
michael@0 9369 uint8_t flags = GET_UINT8(pc);
michael@0 9370 res.set(value);
michael@0 9371 return ValueToIterator(cx, flags, res);
michael@0 9372 }
michael@0 9373
michael@0 9374 typedef bool (*DoIteratorNewFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNew_Fallback *,
michael@0 9375 HandleValue, MutableHandleValue);
michael@0 9376 static const VMFunction DoIteratorNewFallbackInfo =
michael@0 9377 FunctionInfo<DoIteratorNewFallbackFn>(DoIteratorNewFallback, PopValues(1));
michael@0 9378
michael@0 9379 bool
michael@0 9380 ICIteratorNew_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9381 {
michael@0 9382 EmitRestoreTailCallReg(masm);
michael@0 9383
michael@0 9384 // Sync stack for the decompiler.
michael@0 9385 masm.pushValue(R0);
michael@0 9386
michael@0 9387 masm.pushValue(R0);
michael@0 9388 masm.push(BaselineStubReg);
michael@0 9389 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 9390
michael@0 9391 return tailCallVM(DoIteratorNewFallbackInfo, masm);
michael@0 9392 }
michael@0 9393
michael@0 9394 //
michael@0 9395 // IteratorMore_Fallback
michael@0 9396 //
michael@0 9397
michael@0 9398 static bool
michael@0 9399 DoIteratorMoreFallback(JSContext *cx, BaselineFrame *frame, ICIteratorMore_Fallback *stub_,
michael@0 9400 HandleValue iterValue, MutableHandleValue res)
michael@0 9401 {
michael@0 9402 // This fallback stub may trigger debug mode toggling.
michael@0 9403 DebugModeOSRVolatileStub<ICIteratorMore_Fallback *> stub(frame, stub_);
michael@0 9404
michael@0 9405 FallbackICSpew(cx, stub, "IteratorMore");
michael@0 9406
michael@0 9407 bool cond;
michael@0 9408 if (!IteratorMore(cx, &iterValue.toObject(), &cond, res))
michael@0 9409 return false;
michael@0 9410 res.setBoolean(cond);
michael@0 9411
michael@0 9412 // Check if debug mode toggling made the stub invalid.
michael@0 9413 if (stub.invalid())
michael@0 9414 return true;
michael@0 9415
michael@0 9416 if (iterValue.toObject().is<PropertyIteratorObject>() &&
michael@0 9417 !stub->hasStub(ICStub::IteratorMore_Native))
michael@0 9418 {
michael@0 9419 ICIteratorMore_Native::Compiler compiler(cx);
michael@0 9420 ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
michael@0 9421 if (!newStub)
michael@0 9422 return false;
michael@0 9423 stub->addNewStub(newStub);
michael@0 9424 }
michael@0 9425
michael@0 9426 return true;
michael@0 9427 }
michael@0 9428
michael@0 9429 typedef bool (*DoIteratorMoreFallbackFn)(JSContext *, BaselineFrame *, ICIteratorMore_Fallback *,
michael@0 9430 HandleValue, MutableHandleValue);
michael@0 9431 static const VMFunction DoIteratorMoreFallbackInfo =
michael@0 9432 FunctionInfo<DoIteratorMoreFallbackFn>(DoIteratorMoreFallback);
michael@0 9433
michael@0 9434 bool
michael@0 9435 ICIteratorMore_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9436 {
michael@0 9437 EmitRestoreTailCallReg(masm);
michael@0 9438
michael@0 9439 masm.pushValue(R0);
michael@0 9440 masm.push(BaselineStubReg);
michael@0 9441 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 9442
michael@0 9443 return tailCallVM(DoIteratorMoreFallbackInfo, masm);
michael@0 9444 }
michael@0 9445
michael@0 9446 //
michael@0 9447 // IteratorMore_Native
michael@0 9448 //
michael@0 9449
michael@0 9450 bool
michael@0 9451 ICIteratorMore_Native::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9452 {
michael@0 9453 Label failure;
michael@0 9454
michael@0 9455 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 9456
michael@0 9457 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 9458 Register nativeIterator = regs.takeAny();
michael@0 9459 Register scratch = regs.takeAny();
michael@0 9460
michael@0 9461 masm.branchTestObjClass(Assembler::NotEqual, obj, scratch,
michael@0 9462 &PropertyIteratorObject::class_, &failure);
michael@0 9463 masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator);
michael@0 9464
michael@0 9465 masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)),
michael@0 9466 Imm32(JSITER_FOREACH), &failure);
michael@0 9467
michael@0 9468 // Set output to true if props_cursor < props_end.
michael@0 9469 masm.loadPtr(Address(nativeIterator, offsetof(NativeIterator, props_end)), scratch);
michael@0 9470 Address cursorAddr = Address(nativeIterator, offsetof(NativeIterator, props_cursor));
michael@0 9471 masm.cmpPtrSet(Assembler::LessThan, cursorAddr, scratch, scratch);
michael@0 9472
michael@0 9473 masm.tagValue(JSVAL_TYPE_BOOLEAN, scratch, R0);
michael@0 9474 EmitReturnFromIC(masm);
michael@0 9475
michael@0 9476 // Failure case - jump to next stub
michael@0 9477 masm.bind(&failure);
michael@0 9478 EmitStubGuardFailure(masm);
michael@0 9479 return true;
michael@0 9480 }
michael@0 9481
michael@0 9482 //
michael@0 9483 // IteratorNext_Fallback
michael@0 9484 //
michael@0 9485
michael@0 9486 static bool
michael@0 9487 DoIteratorNextFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNext_Fallback *stub_,
michael@0 9488 HandleValue iterValue, MutableHandleValue res)
michael@0 9489 {
michael@0 9490 // This fallback stub may trigger debug mode toggling.
michael@0 9491 DebugModeOSRVolatileStub<ICIteratorNext_Fallback *> stub(frame, stub_);
michael@0 9492
michael@0 9493 FallbackICSpew(cx, stub, "IteratorNext");
michael@0 9494
michael@0 9495 RootedObject iteratorObject(cx, &iterValue.toObject());
michael@0 9496 if (!IteratorNext(cx, iteratorObject, res))
michael@0 9497 return false;
michael@0 9498
michael@0 9499 // Check if debug mode toggling made the stub invalid.
michael@0 9500 if (stub.invalid())
michael@0 9501 return true;
michael@0 9502
michael@0 9503 if (!res.isString() && !stub->hasNonStringResult())
michael@0 9504 stub->setHasNonStringResult();
michael@0 9505
michael@0 9506 if (iteratorObject->is<PropertyIteratorObject>() &&
michael@0 9507 !stub->hasStub(ICStub::IteratorNext_Native))
michael@0 9508 {
michael@0 9509 ICIteratorNext_Native::Compiler compiler(cx);
michael@0 9510 ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
michael@0 9511 if (!newStub)
michael@0 9512 return false;
michael@0 9513 stub->addNewStub(newStub);
michael@0 9514 }
michael@0 9515
michael@0 9516 return true;
michael@0 9517 }
michael@0 9518
michael@0 9519 typedef bool (*DoIteratorNextFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNext_Fallback *,
michael@0 9520 HandleValue, MutableHandleValue);
michael@0 9521 static const VMFunction DoIteratorNextFallbackInfo =
michael@0 9522 FunctionInfo<DoIteratorNextFallbackFn>(DoIteratorNextFallback);
michael@0 9523
michael@0 9524 bool
michael@0 9525 ICIteratorNext_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9526 {
michael@0 9527 EmitRestoreTailCallReg(masm);
michael@0 9528
michael@0 9529 masm.pushValue(R0);
michael@0 9530 masm.push(BaselineStubReg);
michael@0 9531 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 9532
michael@0 9533 return tailCallVM(DoIteratorNextFallbackInfo, masm);
michael@0 9534 }
michael@0 9535
michael@0 9536 //
michael@0 9537 // IteratorNext_Native
michael@0 9538 //
michael@0 9539
michael@0 9540 bool
michael@0 9541 ICIteratorNext_Native::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9542 {
michael@0 9543 Label failure;
michael@0 9544
michael@0 9545 Register obj = masm.extractObject(R0, ExtractTemp0);
michael@0 9546
michael@0 9547 GeneralRegisterSet regs(availableGeneralRegs(1));
michael@0 9548 Register nativeIterator = regs.takeAny();
michael@0 9549 Register scratch = regs.takeAny();
michael@0 9550
michael@0 9551 masm.branchTestObjClass(Assembler::NotEqual, obj, scratch,
michael@0 9552 &PropertyIteratorObject::class_, &failure);
michael@0 9553 masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator);
michael@0 9554
michael@0 9555 masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)),
michael@0 9556 Imm32(JSITER_FOREACH), &failure);
michael@0 9557
michael@0 9558 // Get cursor, next string.
michael@0 9559 masm.loadPtr(Address(nativeIterator, offsetof(NativeIterator, props_cursor)), scratch);
michael@0 9560 masm.loadPtr(Address(scratch, 0), scratch);
michael@0 9561
michael@0 9562 // Increase the cursor.
michael@0 9563 masm.addPtr(Imm32(sizeof(JSString *)),
michael@0 9564 Address(nativeIterator, offsetof(NativeIterator, props_cursor)));
michael@0 9565
michael@0 9566 masm.tagValue(JSVAL_TYPE_STRING, scratch, R0);
michael@0 9567 EmitReturnFromIC(masm);
michael@0 9568
michael@0 9569 // Failure case - jump to next stub
michael@0 9570 masm.bind(&failure);
michael@0 9571 EmitStubGuardFailure(masm);
michael@0 9572 return true;
michael@0 9573 }
michael@0 9574
michael@0 9575 //
michael@0 9576 // IteratorClose_Fallback
michael@0 9577 //
michael@0 9578
michael@0 9579 static bool
michael@0 9580 DoIteratorCloseFallback(JSContext *cx, ICIteratorClose_Fallback *stub, HandleValue iterValue)
michael@0 9581 {
michael@0 9582 FallbackICSpew(cx, stub, "IteratorClose");
michael@0 9583
michael@0 9584 RootedObject iteratorObject(cx, &iterValue.toObject());
michael@0 9585 return CloseIterator(cx, iteratorObject);
michael@0 9586 }
michael@0 9587
michael@0 9588 typedef bool (*DoIteratorCloseFallbackFn)(JSContext *, ICIteratorClose_Fallback *, HandleValue);
michael@0 9589 static const VMFunction DoIteratorCloseFallbackInfo =
michael@0 9590 FunctionInfo<DoIteratorCloseFallbackFn>(DoIteratorCloseFallback);
michael@0 9591
michael@0 9592 bool
michael@0 9593 ICIteratorClose_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9594 {
michael@0 9595 EmitRestoreTailCallReg(masm);
michael@0 9596
michael@0 9597 masm.pushValue(R0);
michael@0 9598 masm.push(BaselineStubReg);
michael@0 9599
michael@0 9600 return tailCallVM(DoIteratorCloseFallbackInfo, masm);
michael@0 9601 }
michael@0 9602
michael@0 9603 //
michael@0 9604 // InstanceOf_Fallback
michael@0 9605 //
michael@0 9606
michael@0 9607 static bool
michael@0 9608 DoInstanceOfFallback(JSContext *cx, ICInstanceOf_Fallback *stub,
michael@0 9609 HandleValue lhs, HandleValue rhs,
michael@0 9610 MutableHandleValue res)
michael@0 9611 {
michael@0 9612 FallbackICSpew(cx, stub, "InstanceOf");
michael@0 9613
michael@0 9614 if (!rhs.isObject()) {
michael@0 9615 js_ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, NullPtr());
michael@0 9616 return false;
michael@0 9617 }
michael@0 9618
michael@0 9619 RootedObject obj(cx, &rhs.toObject());
michael@0 9620
michael@0 9621 // For functions, keep track of the |prototype| property in type information,
michael@0 9622 // for use during Ion compilation.
michael@0 9623 if (obj->is<JSFunction>() && IsIonEnabled(cx))
michael@0 9624 types::EnsureTrackPropertyTypes(cx, obj, NameToId(cx->names().prototype));
michael@0 9625
michael@0 9626 bool cond = false;
michael@0 9627 if (!HasInstance(cx, obj, lhs, &cond))
michael@0 9628 return false;
michael@0 9629
michael@0 9630 res.setBoolean(cond);
michael@0 9631 return true;
michael@0 9632 }
michael@0 9633
michael@0 9634 typedef bool (*DoInstanceOfFallbackFn)(JSContext *, ICInstanceOf_Fallback *, HandleValue, HandleValue,
michael@0 9635 MutableHandleValue);
michael@0 9636 static const VMFunction DoInstanceOfFallbackInfo =
michael@0 9637 FunctionInfo<DoInstanceOfFallbackFn>(DoInstanceOfFallback, PopValues(2));
michael@0 9638
michael@0 9639 bool
michael@0 9640 ICInstanceOf_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9641 {
michael@0 9642 EmitRestoreTailCallReg(masm);
michael@0 9643
michael@0 9644 // Sync stack for the decompiler.
michael@0 9645 masm.pushValue(R0);
michael@0 9646 masm.pushValue(R1);
michael@0 9647
michael@0 9648 masm.pushValue(R1);
michael@0 9649 masm.pushValue(R0);
michael@0 9650 masm.push(BaselineStubReg);
michael@0 9651
michael@0 9652 return tailCallVM(DoInstanceOfFallbackInfo, masm);
michael@0 9653 }
michael@0 9654
michael@0 9655 //
michael@0 9656 // TypeOf_Fallback
michael@0 9657 //
michael@0 9658
michael@0 9659 static bool
michael@0 9660 DoTypeOfFallback(JSContext *cx, BaselineFrame *frame, ICTypeOf_Fallback *stub, HandleValue val,
michael@0 9661 MutableHandleValue res)
michael@0 9662 {
michael@0 9663 FallbackICSpew(cx, stub, "TypeOf");
michael@0 9664 JSType type = js::TypeOfValue(val);
michael@0 9665 RootedString string(cx, TypeName(type, cx->names()));
michael@0 9666
michael@0 9667 res.setString(string);
michael@0 9668
michael@0 9669 JS_ASSERT(type != JSTYPE_NULL);
michael@0 9670 if (type != JSTYPE_OBJECT && type != JSTYPE_FUNCTION) {
michael@0 9671 // Create a new TypeOf stub.
michael@0 9672 IonSpew(IonSpew_BaselineIC, " Generating TypeOf stub for JSType (%d)", (int) type);
michael@0 9673 ICTypeOf_Typed::Compiler compiler(cx, type, string);
michael@0 9674 ICStub *typeOfStub = compiler.getStub(compiler.getStubSpace(frame->script()));
michael@0 9675 if (!typeOfStub)
michael@0 9676 return false;
michael@0 9677 stub->addNewStub(typeOfStub);
michael@0 9678 }
michael@0 9679
michael@0 9680 return true;
michael@0 9681 }
michael@0 9682
michael@0 9683 typedef bool (*DoTypeOfFallbackFn)(JSContext *, BaselineFrame *frame, ICTypeOf_Fallback *,
michael@0 9684 HandleValue, MutableHandleValue);
michael@0 9685 static const VMFunction DoTypeOfFallbackInfo =
michael@0 9686 FunctionInfo<DoTypeOfFallbackFn>(DoTypeOfFallback);
michael@0 9687
michael@0 9688 bool
michael@0 9689 ICTypeOf_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9690 {
michael@0 9691 EmitRestoreTailCallReg(masm);
michael@0 9692
michael@0 9693 masm.pushValue(R0);
michael@0 9694 masm.push(BaselineStubReg);
michael@0 9695 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 9696
michael@0 9697 return tailCallVM(DoTypeOfFallbackInfo, masm);
michael@0 9698 }
michael@0 9699
michael@0 9700 bool
michael@0 9701 ICTypeOf_Typed::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9702 {
michael@0 9703 JS_ASSERT(type_ != JSTYPE_NULL);
michael@0 9704 JS_ASSERT(type_ != JSTYPE_FUNCTION);
michael@0 9705 JS_ASSERT(type_ != JSTYPE_OBJECT);
michael@0 9706
michael@0 9707 Label failure;
michael@0 9708 switch(type_) {
michael@0 9709 case JSTYPE_VOID:
michael@0 9710 masm.branchTestUndefined(Assembler::NotEqual, R0, &failure);
michael@0 9711 break;
michael@0 9712
michael@0 9713 case JSTYPE_STRING:
michael@0 9714 masm.branchTestString(Assembler::NotEqual, R0, &failure);
michael@0 9715 break;
michael@0 9716
michael@0 9717 case JSTYPE_NUMBER:
michael@0 9718 masm.branchTestNumber(Assembler::NotEqual, R0, &failure);
michael@0 9719 break;
michael@0 9720
michael@0 9721 case JSTYPE_BOOLEAN:
michael@0 9722 masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
michael@0 9723 break;
michael@0 9724
michael@0 9725 default:
michael@0 9726 MOZ_ASSUME_UNREACHABLE("Unexpected type");
michael@0 9727 }
michael@0 9728
michael@0 9729 masm.movePtr(ImmGCPtr(typeString_), R0.scratchReg());
michael@0 9730 masm.tagValue(JSVAL_TYPE_STRING, R0.scratchReg(), R0);
michael@0 9731 EmitReturnFromIC(masm);
michael@0 9732
michael@0 9733 masm.bind(&failure);
michael@0 9734 EmitStubGuardFailure(masm);
michael@0 9735 return true;
michael@0 9736 }
michael@0 9737
michael@0 9738 static bool
michael@0 9739 DoRetSubFallback(JSContext *cx, BaselineFrame *frame, ICRetSub_Fallback *stub,
michael@0 9740 HandleValue val, uint8_t **resumeAddr)
michael@0 9741 {
michael@0 9742 FallbackICSpew(cx, stub, "RetSub");
michael@0 9743
michael@0 9744 // |val| is the bytecode offset where we should resume.
michael@0 9745
michael@0 9746 JS_ASSERT(val.isInt32());
michael@0 9747 JS_ASSERT(val.toInt32() >= 0);
michael@0 9748
michael@0 9749 JSScript *script = frame->script();
michael@0 9750 uint32_t offset = uint32_t(val.toInt32());
michael@0 9751
michael@0 9752 *resumeAddr = script->baselineScript()->nativeCodeForPC(script, script->offsetToPC(offset));
michael@0 9753
michael@0 9754 if (stub->numOptimizedStubs() >= ICRetSub_Fallback::MAX_OPTIMIZED_STUBS)
michael@0 9755 return true;
michael@0 9756
michael@0 9757 // Attach an optimized stub for this pc offset.
michael@0 9758 IonSpew(IonSpew_BaselineIC, " Generating RetSub stub for pc offset %u", offset);
michael@0 9759 ICRetSub_Resume::Compiler compiler(cx, offset, *resumeAddr);
michael@0 9760 ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
michael@0 9761 if (!optStub)
michael@0 9762 return false;
michael@0 9763
michael@0 9764 stub->addNewStub(optStub);
michael@0 9765 return true;
michael@0 9766 }
michael@0 9767
michael@0 9768 typedef bool(*DoRetSubFallbackFn)(JSContext *cx, BaselineFrame *, ICRetSub_Fallback *,
michael@0 9769 HandleValue, uint8_t **);
michael@0 9770 static const VMFunction DoRetSubFallbackInfo = FunctionInfo<DoRetSubFallbackFn>(DoRetSubFallback);
michael@0 9771
michael@0 9772 typedef bool (*ThrowFn)(JSContext *, HandleValue);
michael@0 9773 static const VMFunction ThrowInfoBaseline = FunctionInfo<ThrowFn>(js::Throw);
michael@0 9774
michael@0 9775 bool
michael@0 9776 ICRetSub_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9777 {
michael@0 9778 // If R0 is BooleanValue(true), rethrow R1.
michael@0 9779 Label rethrow;
michael@0 9780 masm.branchTestBooleanTruthy(true, R0, &rethrow);
michael@0 9781 {
michael@0 9782 // Call a stub to get the native code address for the pc offset in R1.
michael@0 9783 GeneralRegisterSet regs(availableGeneralRegs(0));
michael@0 9784 regs.take(R1);
michael@0 9785 regs.takeUnchecked(BaselineTailCallReg);
michael@0 9786
michael@0 9787 Register frame = regs.takeAny();
michael@0 9788 masm.movePtr(BaselineFrameReg, frame);
michael@0 9789
michael@0 9790 enterStubFrame(masm, regs.getAny());
michael@0 9791
michael@0 9792 masm.pushValue(R1);
michael@0 9793 masm.push(BaselineStubReg);
michael@0 9794 masm.pushBaselineFramePtr(frame, frame);
michael@0 9795
michael@0 9796 if (!callVM(DoRetSubFallbackInfo, masm))
michael@0 9797 return false;
michael@0 9798
michael@0 9799 leaveStubFrame(masm);
michael@0 9800
michael@0 9801 EmitChangeICReturnAddress(masm, ReturnReg);
michael@0 9802 EmitReturnFromIC(masm);
michael@0 9803 }
michael@0 9804
michael@0 9805 masm.bind(&rethrow);
michael@0 9806 EmitRestoreTailCallReg(masm);
michael@0 9807 masm.pushValue(R1);
michael@0 9808 return tailCallVM(ThrowInfoBaseline, masm);
michael@0 9809 }
michael@0 9810
michael@0 9811 bool
michael@0 9812 ICRetSub_Resume::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 9813 {
michael@0 9814 // If R0 is BooleanValue(true), rethrow R1.
michael@0 9815 Label fail, rethrow;
michael@0 9816 masm.branchTestBooleanTruthy(true, R0, &rethrow);
michael@0 9817
michael@0 9818 // R1 is the pc offset. Ensure it matches this stub's offset.
michael@0 9819 Register offset = masm.extractInt32(R1, ExtractTemp0);
michael@0 9820 masm.branch32(Assembler::NotEqual,
michael@0 9821 Address(BaselineStubReg, ICRetSub_Resume::offsetOfPCOffset()),
michael@0 9822 offset,
michael@0 9823 &fail);
michael@0 9824
michael@0 9825 // pc offset matches, resume at the target pc.
michael@0 9826 masm.loadPtr(Address(BaselineStubReg, ICRetSub_Resume::offsetOfAddr()), R0.scratchReg());
michael@0 9827 EmitChangeICReturnAddress(masm, R0.scratchReg());
michael@0 9828 EmitReturnFromIC(masm);
michael@0 9829
michael@0 9830 // Rethrow the Value stored in R1.
michael@0 9831 masm.bind(&rethrow);
michael@0 9832 EmitRestoreTailCallReg(masm);
michael@0 9833 masm.pushValue(R1);
michael@0 9834 if (!tailCallVM(ThrowInfoBaseline, masm))
michael@0 9835 return false;
michael@0 9836
michael@0 9837 masm.bind(&fail);
michael@0 9838 EmitStubGuardFailure(masm);
michael@0 9839 return true;
michael@0 9840 }
michael@0 9841
michael@0 9842 ICProfiler_PushFunction::ICProfiler_PushFunction(JitCode *stubCode, const char *str,
michael@0 9843 HandleScript script)
michael@0 9844 : ICStub(ICStub::Profiler_PushFunction, stubCode),
michael@0 9845 str_(str),
michael@0 9846 script_(script)
michael@0 9847 { }
michael@0 9848
michael@0 9849 ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode *stubCode, HandleObject obj)
michael@0 9850 : ICStub(TypeMonitor_SingleObject, stubCode),
michael@0 9851 obj_(obj)
michael@0 9852 { }
michael@0 9853
michael@0 9854 ICTypeMonitor_TypeObject::ICTypeMonitor_TypeObject(JitCode *stubCode, HandleTypeObject type)
michael@0 9855 : ICStub(TypeMonitor_TypeObject, stubCode),
michael@0 9856 type_(type)
michael@0 9857 { }
michael@0 9858
michael@0 9859 ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode *stubCode, HandleObject obj)
michael@0 9860 : ICStub(TypeUpdate_SingleObject, stubCode),
michael@0 9861 obj_(obj)
michael@0 9862 { }
michael@0 9863
michael@0 9864 ICTypeUpdate_TypeObject::ICTypeUpdate_TypeObject(JitCode *stubCode, HandleTypeObject type)
michael@0 9865 : ICStub(TypeUpdate_TypeObject, stubCode),
michael@0 9866 type_(type)
michael@0 9867 { }
michael@0 9868
michael@0 9869 ICGetElemNativeStub::ICGetElemNativeStub(ICStub::Kind kind, JitCode *stubCode,
michael@0 9870 ICStub *firstMonitorStub,
michael@0 9871 HandleShape shape, HandlePropertyName name,
michael@0 9872 AccessType acctype, bool needsAtomize)
michael@0 9873 : ICMonitoredStub(kind, stubCode, firstMonitorStub),
michael@0 9874 shape_(shape),
michael@0 9875 name_(name)
michael@0 9876 {
michael@0 9877 extra_ = (static_cast<uint16_t>(acctype) << ACCESSTYPE_SHIFT) |
michael@0 9878 (static_cast<uint16_t>(needsAtomize) << NEEDS_ATOMIZE_SHIFT);
michael@0 9879 }
michael@0 9880
michael@0 9881 ICGetElemNativeStub::~ICGetElemNativeStub()
michael@0 9882 { }
michael@0 9883
michael@0 9884 ICGetElemNativeGetterStub::ICGetElemNativeGetterStub(
michael@0 9885 ICStub::Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 9886 HandleShape shape, HandlePropertyName name, AccessType acctype,
michael@0 9887 bool needsAtomize, HandleFunction getter, uint32_t pcOffset)
michael@0 9888 : ICGetElemNativeStub(kind, stubCode, firstMonitorStub, shape, name, acctype, needsAtomize),
michael@0 9889 getter_(getter),
michael@0 9890 pcOffset_(pcOffset)
michael@0 9891 {
michael@0 9892 JS_ASSERT(kind == GetElem_NativePrototypeCallNative ||
michael@0 9893 kind == GetElem_NativePrototypeCallScripted);
michael@0 9894 JS_ASSERT(acctype == NativeGetter || acctype == ScriptedGetter);
michael@0 9895 }
michael@0 9896
michael@0 9897 ICGetElem_NativePrototypeSlot::ICGetElem_NativePrototypeSlot(
michael@0 9898 JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 9899 HandleShape shape, HandlePropertyName name,
michael@0 9900 AccessType acctype, bool needsAtomize, uint32_t offset,
michael@0 9901 HandleObject holder, HandleShape holderShape)
michael@0 9902 : ICGetElemNativeSlotStub(ICStub::GetElem_NativePrototypeSlot, stubCode, firstMonitorStub, shape,
michael@0 9903 name, acctype, needsAtomize, offset),
michael@0 9904 holder_(holder),
michael@0 9905 holderShape_(holderShape)
michael@0 9906 { }
michael@0 9907
michael@0 9908 ICGetElemNativePrototypeCallStub::ICGetElemNativePrototypeCallStub(
michael@0 9909 ICStub::Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 9910 HandleShape shape, HandlePropertyName name,
michael@0 9911 AccessType acctype, bool needsAtomize, HandleFunction getter,
michael@0 9912 uint32_t pcOffset, HandleObject holder, HandleShape holderShape)
michael@0 9913 : ICGetElemNativeGetterStub(kind, stubCode, firstMonitorStub, shape, name, acctype, needsAtomize,
michael@0 9914 getter, pcOffset),
michael@0 9915 holder_(holder),
michael@0 9916 holderShape_(holderShape)
michael@0 9917 {}
michael@0 9918
michael@0 9919 ICGetElem_Dense::ICGetElem_Dense(JitCode *stubCode, ICStub *firstMonitorStub, HandleShape shape)
michael@0 9920 : ICMonitoredStub(GetElem_Dense, stubCode, firstMonitorStub),
michael@0 9921 shape_(shape)
michael@0 9922 { }
michael@0 9923
michael@0 9924 ICGetElem_TypedArray::ICGetElem_TypedArray(JitCode *stubCode, HandleShape shape, uint32_t type)
michael@0 9925 : ICStub(GetElem_TypedArray, stubCode),
michael@0 9926 shape_(shape)
michael@0 9927 {
michael@0 9928 extra_ = uint16_t(type);
michael@0 9929 JS_ASSERT(extra_ == type);
michael@0 9930 }
michael@0 9931
michael@0 9932 ICSetElem_Dense::ICSetElem_Dense(JitCode *stubCode, HandleShape shape, HandleTypeObject type)
michael@0 9933 : ICUpdatedStub(SetElem_Dense, stubCode),
michael@0 9934 shape_(shape),
michael@0 9935 type_(type)
michael@0 9936 { }
michael@0 9937
michael@0 9938 ICSetElem_DenseAdd::ICSetElem_DenseAdd(JitCode *stubCode, types::TypeObject *type,
michael@0 9939 size_t protoChainDepth)
michael@0 9940 : ICUpdatedStub(SetElem_DenseAdd, stubCode),
michael@0 9941 type_(type)
michael@0 9942 {
michael@0 9943 JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH);
michael@0 9944 extra_ = protoChainDepth;
michael@0 9945 }
michael@0 9946
michael@0 9947 template <size_t ProtoChainDepth>
michael@0 9948 ICUpdatedStub *
michael@0 9949 ICSetElemDenseAddCompiler::getStubSpecific(ICStubSpace *space, const AutoShapeVector *shapes)
michael@0 9950 {
michael@0 9951 RootedTypeObject objType(cx, obj_->getType(cx));
michael@0 9952 if (!objType)
michael@0 9953 return nullptr;
michael@0 9954 Rooted<JitCode *> stubCode(cx, getStubCode());
michael@0 9955 return ICSetElem_DenseAddImpl<ProtoChainDepth>::New(space, stubCode, objType, shapes);
michael@0 9956 }
michael@0 9957
michael@0 9958 ICSetElem_TypedArray::ICSetElem_TypedArray(JitCode *stubCode, HandleShape shape, uint32_t type,
michael@0 9959 bool expectOutOfBounds)
michael@0 9960 : ICStub(SetElem_TypedArray, stubCode),
michael@0 9961 shape_(shape)
michael@0 9962 {
michael@0 9963 extra_ = uint8_t(type);
michael@0 9964 JS_ASSERT(extra_ == type);
michael@0 9965 extra_ |= (static_cast<uint16_t>(expectOutOfBounds) << 8);
michael@0 9966 }
michael@0 9967
michael@0 9968 ICGetName_Global::ICGetName_Global(JitCode *stubCode, ICStub *firstMonitorStub, HandleShape shape,
michael@0 9969 uint32_t slot)
michael@0 9970 : ICMonitoredStub(GetName_Global, stubCode, firstMonitorStub),
michael@0 9971 shape_(shape),
michael@0 9972 slot_(slot)
michael@0 9973 { }
michael@0 9974
michael@0 9975 template <size_t NumHops>
michael@0 9976 ICGetName_Scope<NumHops>::ICGetName_Scope(JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 9977 AutoShapeVector *shapes, uint32_t offset)
michael@0 9978 : ICMonitoredStub(GetStubKind(), stubCode, firstMonitorStub),
michael@0 9979 offset_(offset)
michael@0 9980 {
michael@0 9981 JS_STATIC_ASSERT(NumHops <= MAX_HOPS);
michael@0 9982 JS_ASSERT(shapes->length() == NumHops + 1);
michael@0 9983 for (size_t i = 0; i < NumHops + 1; i++)
michael@0 9984 shapes_[i].init((*shapes)[i]);
michael@0 9985 }
michael@0 9986
michael@0 9987 ICGetIntrinsic_Constant::ICGetIntrinsic_Constant(JitCode *stubCode, HandleValue value)
michael@0 9988 : ICStub(GetIntrinsic_Constant, stubCode),
michael@0 9989 value_(value)
michael@0 9990 { }
michael@0 9991
michael@0 9992 ICGetIntrinsic_Constant::~ICGetIntrinsic_Constant()
michael@0 9993 { }
michael@0 9994
michael@0 9995 ICGetProp_Primitive::ICGetProp_Primitive(JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 9996 HandleShape protoShape, uint32_t offset)
michael@0 9997 : ICMonitoredStub(GetProp_Primitive, stubCode, firstMonitorStub),
michael@0 9998 protoShape_(protoShape),
michael@0 9999 offset_(offset)
michael@0 10000 { }
michael@0 10001
michael@0 10002 ICGetPropNativeStub::ICGetPropNativeStub(ICStub::Kind kind, JitCode *stubCode,
michael@0 10003 ICStub *firstMonitorStub,
michael@0 10004 HandleShape shape, uint32_t offset)
michael@0 10005 : ICMonitoredStub(kind, stubCode, firstMonitorStub),
michael@0 10006 shape_(shape),
michael@0 10007 offset_(offset)
michael@0 10008 { }
michael@0 10009
michael@0 10010 ICGetProp_NativePrototype::ICGetProp_NativePrototype(JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 10011 HandleShape shape, uint32_t offset,
michael@0 10012 HandleObject holder, HandleShape holderShape)
michael@0 10013 : ICGetPropNativeStub(GetProp_NativePrototype, stubCode, firstMonitorStub, shape, offset),
michael@0 10014 holder_(holder),
michael@0 10015 holderShape_(holderShape)
michael@0 10016 { }
michael@0 10017
michael@0 10018 ICGetPropCallGetter::ICGetPropCallGetter(Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 10019 HandleObject holder, HandleShape holderShape, HandleFunction getter,
michael@0 10020 uint32_t pcOffset)
michael@0 10021 : ICMonitoredStub(kind, stubCode, firstMonitorStub),
michael@0 10022 holder_(holder),
michael@0 10023 holderShape_(holderShape),
michael@0 10024 getter_(getter),
michael@0 10025 pcOffset_(pcOffset)
michael@0 10026 {
michael@0 10027 JS_ASSERT(kind == ICStub::GetProp_CallScripted ||
michael@0 10028 kind == ICStub::GetProp_CallNative ||
michael@0 10029 kind == ICStub::GetProp_CallNativePrototype);
michael@0 10030 }
michael@0 10031
michael@0 10032 ICGetPropCallPrototypeGetter::ICGetPropCallPrototypeGetter(Kind kind, JitCode *stubCode,
michael@0 10033 ICStub *firstMonitorStub,
michael@0 10034 HandleShape receiverShape, HandleObject holder,
michael@0 10035 HandleShape holderShape,
michael@0 10036 HandleFunction getter, uint32_t pcOffset)
michael@0 10037 : ICGetPropCallGetter(kind, stubCode, firstMonitorStub, holder, holderShape, getter, pcOffset),
michael@0 10038 receiverShape_(receiverShape)
michael@0 10039 {
michael@0 10040 JS_ASSERT(kind == ICStub::GetProp_CallScripted || kind == ICStub::GetProp_CallNativePrototype);
michael@0 10041 }
michael@0 10042
michael@0 10043 ICSetProp_Native::ICSetProp_Native(JitCode *stubCode, HandleTypeObject type, HandleShape shape,
michael@0 10044 uint32_t offset)
michael@0 10045 : ICUpdatedStub(SetProp_Native, stubCode),
michael@0 10046 type_(type),
michael@0 10047 shape_(shape),
michael@0 10048 offset_(offset)
michael@0 10049 { }
michael@0 10050
michael@0 10051 ICUpdatedStub *
michael@0 10052 ICSetProp_Native::Compiler::getStub(ICStubSpace *space)
michael@0 10053 {
michael@0 10054 RootedTypeObject type(cx, obj_->getType(cx));
michael@0 10055 if (!type)
michael@0 10056 return nullptr;
michael@0 10057
michael@0 10058 RootedShape shape(cx, obj_->lastProperty());
michael@0 10059 ICUpdatedStub *stub = ICSetProp_Native::New(space, getStubCode(), type, shape, offset_);
michael@0 10060 if (!stub || !stub->initUpdatingChain(cx, space))
michael@0 10061 return nullptr;
michael@0 10062 return stub;
michael@0 10063 }
michael@0 10064
michael@0 10065 ICSetProp_NativeAdd::ICSetProp_NativeAdd(JitCode *stubCode, HandleTypeObject type,
michael@0 10066 size_t protoChainDepth,
michael@0 10067 HandleShape newShape,
michael@0 10068 uint32_t offset)
michael@0 10069 : ICUpdatedStub(SetProp_NativeAdd, stubCode),
michael@0 10070 type_(type),
michael@0 10071 newShape_(newShape),
michael@0 10072 offset_(offset)
michael@0 10073 {
michael@0 10074 JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH);
michael@0 10075 extra_ = protoChainDepth;
michael@0 10076 }
michael@0 10077
michael@0 10078 template <size_t ProtoChainDepth>
michael@0 10079 ICSetProp_NativeAddImpl<ProtoChainDepth>::ICSetProp_NativeAddImpl(JitCode *stubCode,
michael@0 10080 HandleTypeObject type,
michael@0 10081 const AutoShapeVector *shapes,
michael@0 10082 HandleShape newShape,
michael@0 10083 uint32_t offset)
michael@0 10084 : ICSetProp_NativeAdd(stubCode, type, ProtoChainDepth, newShape, offset)
michael@0 10085 {
michael@0 10086 JS_ASSERT(shapes->length() == NumShapes);
michael@0 10087 for (size_t i = 0; i < NumShapes; i++)
michael@0 10088 shapes_[i].init((*shapes)[i]);
michael@0 10089 }
michael@0 10090
michael@0 10091 ICSetPropNativeAddCompiler::ICSetPropNativeAddCompiler(JSContext *cx, HandleObject obj,
michael@0 10092 HandleShape oldShape,
michael@0 10093 size_t protoChainDepth,
michael@0 10094 bool isFixedSlot,
michael@0 10095 uint32_t offset)
michael@0 10096 : ICStubCompiler(cx, ICStub::SetProp_NativeAdd),
michael@0 10097 obj_(cx, obj),
michael@0 10098 oldShape_(cx, oldShape),
michael@0 10099 protoChainDepth_(protoChainDepth),
michael@0 10100 isFixedSlot_(isFixedSlot),
michael@0 10101 offset_(offset)
michael@0 10102 {
michael@0 10103 JS_ASSERT(protoChainDepth_ <= ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH);
michael@0 10104 }
michael@0 10105
michael@0 10106 ICSetPropCallSetter::ICSetPropCallSetter(Kind kind, JitCode *stubCode, HandleShape shape,
michael@0 10107 HandleObject holder, HandleShape holderShape,
michael@0 10108 HandleFunction setter, uint32_t pcOffset)
michael@0 10109 : ICStub(kind, stubCode),
michael@0 10110 shape_(shape),
michael@0 10111 holder_(holder),
michael@0 10112 holderShape_(holderShape),
michael@0 10113 setter_(setter),
michael@0 10114 pcOffset_(pcOffset)
michael@0 10115 {
michael@0 10116 JS_ASSERT(kind == ICStub::SetProp_CallScripted || kind == ICStub::SetProp_CallNative);
michael@0 10117 }
michael@0 10118
michael@0 10119 ICCall_Scripted::ICCall_Scripted(JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 10120 HandleScript calleeScript, HandleObject templateObject,
michael@0 10121 uint32_t pcOffset)
michael@0 10122 : ICMonitoredStub(ICStub::Call_Scripted, stubCode, firstMonitorStub),
michael@0 10123 calleeScript_(calleeScript),
michael@0 10124 templateObject_(templateObject),
michael@0 10125 pcOffset_(pcOffset)
michael@0 10126 { }
michael@0 10127
michael@0 10128 ICCall_Native::ICCall_Native(JitCode *stubCode, ICStub *firstMonitorStub,
michael@0 10129 HandleFunction callee, HandleObject templateObject,
michael@0 10130 uint32_t pcOffset)
michael@0 10131 : ICMonitoredStub(ICStub::Call_Native, stubCode, firstMonitorStub),
michael@0 10132 callee_(callee),
michael@0 10133 templateObject_(templateObject),
michael@0 10134 pcOffset_(pcOffset)
michael@0 10135 {
michael@0 10136 #ifdef JS_ARM_SIMULATOR
michael@0 10137 // The simulator requires VM calls to be redirected to a special swi
michael@0 10138 // instruction to handle them. To make this work, we store the redirected
michael@0 10139 // pointer in the stub.
michael@0 10140 native_ = Simulator::RedirectNativeFunction(JS_FUNC_TO_DATA_PTR(void *, callee->native()),
michael@0 10141 Args_General3);
michael@0 10142 #endif
michael@0 10143 }
michael@0 10144
michael@0 10145 ICGetPropCallDOMProxyNativeStub::ICGetPropCallDOMProxyNativeStub(Kind kind, JitCode *stubCode,
michael@0 10146 ICStub *firstMonitorStub,
michael@0 10147 HandleShape shape,
michael@0 10148 BaseProxyHandler *proxyHandler,
michael@0 10149 HandleShape expandoShape,
michael@0 10150 HandleObject holder,
michael@0 10151 HandleShape holderShape,
michael@0 10152 HandleFunction getter,
michael@0 10153 uint32_t pcOffset)
michael@0 10154 : ICMonitoredStub(kind, stubCode, firstMonitorStub),
michael@0 10155 shape_(shape),
michael@0 10156 proxyHandler_(proxyHandler),
michael@0 10157 expandoShape_(expandoShape),
michael@0 10158 holder_(holder),
michael@0 10159 holderShape_(holderShape),
michael@0 10160 getter_(getter),
michael@0 10161 pcOffset_(pcOffset)
michael@0 10162 { }
michael@0 10163
michael@0 10164 ICGetPropCallDOMProxyNativeCompiler::ICGetPropCallDOMProxyNativeCompiler(JSContext *cx,
michael@0 10165 ICStub::Kind kind,
michael@0 10166 ICStub *firstMonitorStub,
michael@0 10167 Handle<ProxyObject*> proxy,
michael@0 10168 HandleObject holder,
michael@0 10169 HandleFunction getter,
michael@0 10170 uint32_t pcOffset)
michael@0 10171 : ICStubCompiler(cx, kind),
michael@0 10172 firstMonitorStub_(firstMonitorStub),
michael@0 10173 proxy_(cx, proxy),
michael@0 10174 holder_(cx, holder),
michael@0 10175 getter_(cx, getter),
michael@0 10176 pcOffset_(pcOffset)
michael@0 10177 {
michael@0 10178 JS_ASSERT(kind == ICStub::GetProp_CallDOMProxyNative ||
michael@0 10179 kind == ICStub::GetProp_CallDOMProxyWithGenerationNative);
michael@0 10180 JS_ASSERT(proxy_->handler()->family() == GetDOMProxyHandlerFamily());
michael@0 10181 }
michael@0 10182
michael@0 10183 ICGetProp_DOMProxyShadowed::ICGetProp_DOMProxyShadowed(JitCode *stubCode,
michael@0 10184 ICStub *firstMonitorStub,
michael@0 10185 HandleShape shape,
michael@0 10186 BaseProxyHandler *proxyHandler,
michael@0 10187 HandlePropertyName name,
michael@0 10188 uint32_t pcOffset)
michael@0 10189 : ICMonitoredStub(ICStub::GetProp_DOMProxyShadowed, stubCode, firstMonitorStub),
michael@0 10190 shape_(shape),
michael@0 10191 proxyHandler_(proxyHandler),
michael@0 10192 name_(name),
michael@0 10193 pcOffset_(pcOffset)
michael@0 10194 { }
michael@0 10195
michael@0 10196 //
michael@0 10197 // Rest_Fallback
michael@0 10198 //
michael@0 10199
michael@0 10200 static bool DoRestFallback(JSContext *cx, ICRest_Fallback *stub,
michael@0 10201 BaselineFrame *frame, MutableHandleValue res)
michael@0 10202 {
michael@0 10203 unsigned numFormals = frame->numFormalArgs() - 1;
michael@0 10204 unsigned numActuals = frame->numActualArgs();
michael@0 10205 unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
michael@0 10206 Value *rest = frame->argv() + numFormals;
michael@0 10207
michael@0 10208 JSObject *obj = NewDenseCopiedArray(cx, numRest, rest, nullptr);
michael@0 10209 if (!obj)
michael@0 10210 return false;
michael@0 10211 types::FixRestArgumentsType(cx, obj);
michael@0 10212 res.setObject(*obj);
michael@0 10213 return true;
michael@0 10214 }
michael@0 10215
michael@0 10216 typedef bool (*DoRestFallbackFn)(JSContext *, ICRest_Fallback *, BaselineFrame *,
michael@0 10217 MutableHandleValue);
michael@0 10218 static const VMFunction DoRestFallbackInfo =
michael@0 10219 FunctionInfo<DoRestFallbackFn>(DoRestFallback);
michael@0 10220
michael@0 10221 bool
michael@0 10222 ICRest_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
michael@0 10223 {
michael@0 10224 EmitRestoreTailCallReg(masm);
michael@0 10225
michael@0 10226 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
michael@0 10227 masm.push(BaselineStubReg);
michael@0 10228
michael@0 10229 return tailCallVM(DoRestFallbackInfo, masm);
michael@0 10230 }
michael@0 10231
michael@0 10232 } // namespace jit
michael@0 10233 } // namespace js

mercurial