js/src/jit/BaselineIC.cpp

changeset 0
6474c204b198
equal deleted inserted replaced
-1:000000000000 0:68361c5841e8
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #include "jit/BaselineIC.h"
8
9 #include "mozilla/DebugOnly.h"
10 #include "mozilla/TemplateLib.h"
11
12 #include "jslibmath.h"
13 #include "jstypes.h"
14
15 #include "builtin/Eval.h"
16 #include "jit/BaselineDebugModeOSR.h"
17 #include "jit/BaselineHelpers.h"
18 #include "jit/BaselineJIT.h"
19 #include "jit/IonLinker.h"
20 #include "jit/IonSpewer.h"
21 #include "jit/Lowering.h"
22 #ifdef JS_ION_PERF
23 # include "jit/PerfSpewer.h"
24 #endif
25 #include "jit/VMFunctions.h"
26 #include "vm/Opcodes.h"
27
28 #include "jsboolinlines.h"
29 #include "jsscriptinlines.h"
30
31 #include "jit/IonFrames-inl.h"
32 #include "vm/Interpreter-inl.h"
33 #include "vm/ScopeObject-inl.h"
34 #include "vm/StringObject-inl.h"
35
36 using mozilla::DebugOnly;
37
38 namespace js {
39 namespace jit {
40
41 #ifdef DEBUG
42 void
43 FallbackICSpew(JSContext *cx, ICFallbackStub *stub, const char *fmt, ...)
44 {
45 if (IonSpewEnabled(IonSpew_BaselineICFallback)) {
46 RootedScript script(cx, GetTopIonJSScript(cx));
47 jsbytecode *pc = stub->icEntry()->pc(script);
48
49 char fmtbuf[100];
50 va_list args;
51 va_start(args, fmt);
52 vsnprintf(fmtbuf, 100, fmt, args);
53 va_end(args);
54
55 IonSpew(IonSpew_BaselineICFallback,
56 "Fallback hit for (%s:%d) (pc=%d,line=%d,uses=%d,stubs=%d): %s",
57 script->filename(),
58 script->lineno(),
59 (int) script->pcToOffset(pc),
60 PCToLineNumber(script, pc),
61 script->getUseCount(),
62 (int) stub->numOptimizedStubs(),
63 fmtbuf);
64 }
65 }
66
67 void
68 TypeFallbackICSpew(JSContext *cx, ICTypeMonitor_Fallback *stub, const char *fmt, ...)
69 {
70 if (IonSpewEnabled(IonSpew_BaselineICFallback)) {
71 RootedScript script(cx, GetTopIonJSScript(cx));
72 jsbytecode *pc = stub->icEntry()->pc(script);
73
74 char fmtbuf[100];
75 va_list args;
76 va_start(args, fmt);
77 vsnprintf(fmtbuf, 100, fmt, args);
78 va_end(args);
79
80 IonSpew(IonSpew_BaselineICFallback,
81 "Type monitor fallback hit for (%s:%d) (pc=%d,line=%d,uses=%d,stubs=%d): %s",
82 script->filename(),
83 script->lineno(),
84 (int) script->pcToOffset(pc),
85 PCToLineNumber(script, pc),
86 script->getUseCount(),
87 (int) stub->numOptimizedMonitorStubs(),
88 fmtbuf);
89 }
90 }
91
92 #else
93 #define FallbackICSpew(...)
94 #define TypeFallbackICSpew(...)
95 #endif
96
97
98 ICFallbackStub *
99 ICEntry::fallbackStub() const
100 {
101 return firstStub()->getChainFallback();
102 }
103
104
105 ICStubConstIterator &
106 ICStubConstIterator::operator++()
107 {
108 JS_ASSERT(currentStub_ != nullptr);
109 currentStub_ = currentStub_->next();
110 return *this;
111 }
112
113
114 ICStubIterator::ICStubIterator(ICFallbackStub *fallbackStub, bool end)
115 : icEntry_(fallbackStub->icEntry()),
116 fallbackStub_(fallbackStub),
117 previousStub_(nullptr),
118 currentStub_(end ? fallbackStub : icEntry_->firstStub()),
119 unlinked_(false)
120 { }
121
122 ICStubIterator &
123 ICStubIterator::operator++()
124 {
125 JS_ASSERT(currentStub_->next() != nullptr);
126 if (!unlinked_)
127 previousStub_ = currentStub_;
128 currentStub_ = currentStub_->next();
129 unlinked_ = false;
130 return *this;
131 }
132
133 void
134 ICStubIterator::unlink(JSContext *cx)
135 {
136 JS_ASSERT(currentStub_->next() != nullptr);
137 JS_ASSERT(currentStub_ != fallbackStub_);
138 JS_ASSERT(!unlinked_);
139
140 fallbackStub_->unlinkStub(cx->zone(), previousStub_, currentStub_);
141
142 // Mark the current iterator position as unlinked, so operator++ works properly.
143 unlinked_ = true;
144 }
145
146
147 void
148 ICStub::markCode(JSTracer *trc, const char *name)
149 {
150 JitCode *stubJitCode = jitCode();
151 MarkJitCodeUnbarriered(trc, &stubJitCode, name);
152 }
153
154 void
155 ICStub::updateCode(JitCode *code)
156 {
157 // Write barrier on the old code.
158 #ifdef JSGC_INCREMENTAL
159 JitCode::writeBarrierPre(jitCode());
160 #endif
161 stubCode_ = code->raw();
162 }
163
164 /* static */ void
165 ICStub::trace(JSTracer *trc)
166 {
167 markCode(trc, "baseline-stub-jitcode");
168
169 // If the stub is a monitored fallback stub, then mark the monitor ICs hanging
170 // off of that stub. We don't need to worry about the regular monitored stubs,
171 // because the regular monitored stubs will always have a monitored fallback stub
172 // that references the same stub chain.
173 if (isMonitoredFallback()) {
174 ICTypeMonitor_Fallback *lastMonStub = toMonitoredFallbackStub()->fallbackMonitorStub();
175 for (ICStubConstIterator iter = lastMonStub->firstMonitorStub(); !iter.atEnd(); iter++) {
176 JS_ASSERT_IF(iter->next() == nullptr, *iter == lastMonStub);
177 iter->trace(trc);
178 }
179 }
180
181 if (isUpdated()) {
182 for (ICStubConstIterator iter = toUpdatedStub()->firstUpdateStub(); !iter.atEnd(); iter++) {
183 JS_ASSERT_IF(iter->next() == nullptr, iter->isTypeUpdate_Fallback());
184 iter->trace(trc);
185 }
186 }
187
188 switch (kind()) {
189 case ICStub::Call_Scripted: {
190 ICCall_Scripted *callStub = toCall_Scripted();
191 MarkScript(trc, &callStub->calleeScript(), "baseline-callscripted-callee");
192 if (callStub->templateObject())
193 MarkObject(trc, &callStub->templateObject(), "baseline-callscripted-template");
194 break;
195 }
196 case ICStub::Call_Native: {
197 ICCall_Native *callStub = toCall_Native();
198 MarkObject(trc, &callStub->callee(), "baseline-callnative-callee");
199 if (callStub->templateObject())
200 MarkObject(trc, &callStub->templateObject(), "baseline-callnative-template");
201 break;
202 }
203 case ICStub::GetElem_NativeSlot: {
204 ICGetElem_NativeSlot *getElemStub = toGetElem_NativeSlot();
205 MarkShape(trc, &getElemStub->shape(), "baseline-getelem-native-shape");
206 MarkString(trc, &getElemStub->name(), "baseline-getelem-native-name");
207 break;
208 }
209 case ICStub::GetElem_NativePrototypeSlot: {
210 ICGetElem_NativePrototypeSlot *getElemStub = toGetElem_NativePrototypeSlot();
211 MarkShape(trc, &getElemStub->shape(), "baseline-getelem-nativeproto-shape");
212 MarkString(trc, &getElemStub->name(), "baseline-getelem-nativeproto-name");
213 MarkObject(trc, &getElemStub->holder(), "baseline-getelem-nativeproto-holder");
214 MarkShape(trc, &getElemStub->holderShape(), "baseline-getelem-nativeproto-holdershape");
215 break;
216 }
217 case ICStub::GetElem_NativePrototypeCallNative:
218 case ICStub::GetElem_NativePrototypeCallScripted: {
219 ICGetElemNativePrototypeCallStub *callStub =
220 reinterpret_cast<ICGetElemNativePrototypeCallStub *>(this);
221 MarkShape(trc, &callStub->shape(), "baseline-getelem-nativeprotocall-shape");
222 MarkString(trc, &callStub->name(), "baseline-getelem-nativeprotocall-name");
223 MarkObject(trc, &callStub->getter(), "baseline-getelem-nativeprotocall-getter");
224 MarkObject(trc, &callStub->holder(), "baseline-getelem-nativeprotocall-holder");
225 MarkShape(trc, &callStub->holderShape(), "baseline-getelem-nativeprotocall-holdershape");
226 break;
227 }
228 case ICStub::GetElem_Dense: {
229 ICGetElem_Dense *getElemStub = toGetElem_Dense();
230 MarkShape(trc, &getElemStub->shape(), "baseline-getelem-dense-shape");
231 break;
232 }
233 case ICStub::GetElem_TypedArray: {
234 ICGetElem_TypedArray *getElemStub = toGetElem_TypedArray();
235 MarkShape(trc, &getElemStub->shape(), "baseline-getelem-typedarray-shape");
236 break;
237 }
238 case ICStub::SetElem_Dense: {
239 ICSetElem_Dense *setElemStub = toSetElem_Dense();
240 MarkShape(trc, &setElemStub->shape(), "baseline-getelem-dense-shape");
241 MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-dense-type");
242 break;
243 }
244 case ICStub::SetElem_DenseAdd: {
245 ICSetElem_DenseAdd *setElemStub = toSetElem_DenseAdd();
246 MarkTypeObject(trc, &setElemStub->type(), "baseline-setelem-denseadd-type");
247
248 JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4);
249
250 switch (setElemStub->protoChainDepth()) {
251 case 0: setElemStub->toImpl<0>()->traceShapes(trc); break;
252 case 1: setElemStub->toImpl<1>()->traceShapes(trc); break;
253 case 2: setElemStub->toImpl<2>()->traceShapes(trc); break;
254 case 3: setElemStub->toImpl<3>()->traceShapes(trc); break;
255 case 4: setElemStub->toImpl<4>()->traceShapes(trc); break;
256 default: MOZ_ASSUME_UNREACHABLE("Invalid proto stub.");
257 }
258 break;
259 }
260 case ICStub::SetElem_TypedArray: {
261 ICSetElem_TypedArray *setElemStub = toSetElem_TypedArray();
262 MarkShape(trc, &setElemStub->shape(), "baseline-setelem-typedarray-shape");
263 break;
264 }
265 case ICStub::TypeMonitor_SingleObject: {
266 ICTypeMonitor_SingleObject *monitorStub = toTypeMonitor_SingleObject();
267 MarkObject(trc, &monitorStub->object(), "baseline-monitor-singleobject");
268 break;
269 }
270 case ICStub::TypeMonitor_TypeObject: {
271 ICTypeMonitor_TypeObject *monitorStub = toTypeMonitor_TypeObject();
272 MarkTypeObject(trc, &monitorStub->type(), "baseline-monitor-typeobject");
273 break;
274 }
275 case ICStub::TypeUpdate_SingleObject: {
276 ICTypeUpdate_SingleObject *updateStub = toTypeUpdate_SingleObject();
277 MarkObject(trc, &updateStub->object(), "baseline-update-singleobject");
278 break;
279 }
280 case ICStub::TypeUpdate_TypeObject: {
281 ICTypeUpdate_TypeObject *updateStub = toTypeUpdate_TypeObject();
282 MarkTypeObject(trc, &updateStub->type(), "baseline-update-typeobject");
283 break;
284 }
285 case ICStub::Profiler_PushFunction: {
286 ICProfiler_PushFunction *pushFunStub = toProfiler_PushFunction();
287 MarkScript(trc, &pushFunStub->script(), "baseline-profilerpushfunction-stub-script");
288 break;
289 }
290 case ICStub::GetName_Global: {
291 ICGetName_Global *globalStub = toGetName_Global();
292 MarkShape(trc, &globalStub->shape(), "baseline-global-stub-shape");
293 break;
294 }
295 case ICStub::GetName_Scope0:
296 static_cast<ICGetName_Scope<0>*>(this)->traceScopes(trc);
297 break;
298 case ICStub::GetName_Scope1:
299 static_cast<ICGetName_Scope<1>*>(this)->traceScopes(trc);
300 break;
301 case ICStub::GetName_Scope2:
302 static_cast<ICGetName_Scope<2>*>(this)->traceScopes(trc);
303 break;
304 case ICStub::GetName_Scope3:
305 static_cast<ICGetName_Scope<3>*>(this)->traceScopes(trc);
306 break;
307 case ICStub::GetName_Scope4:
308 static_cast<ICGetName_Scope<4>*>(this)->traceScopes(trc);
309 break;
310 case ICStub::GetName_Scope5:
311 static_cast<ICGetName_Scope<5>*>(this)->traceScopes(trc);
312 break;
313 case ICStub::GetName_Scope6:
314 static_cast<ICGetName_Scope<6>*>(this)->traceScopes(trc);
315 break;
316 case ICStub::GetIntrinsic_Constant: {
317 ICGetIntrinsic_Constant *constantStub = toGetIntrinsic_Constant();
318 gc::MarkValue(trc, &constantStub->value(), "baseline-getintrinsic-constant-value");
319 break;
320 }
321 case ICStub::GetProp_Primitive: {
322 ICGetProp_Primitive *propStub = toGetProp_Primitive();
323 MarkShape(trc, &propStub->protoShape(), "baseline-getprop-primitive-stub-shape");
324 break;
325 }
326 case ICStub::GetProp_Native: {
327 ICGetProp_Native *propStub = toGetProp_Native();
328 MarkShape(trc, &propStub->shape(), "baseline-getpropnative-stub-shape");
329 break;
330 }
331 case ICStub::GetProp_NativePrototype: {
332 ICGetProp_NativePrototype *propStub = toGetProp_NativePrototype();
333 MarkShape(trc, &propStub->shape(), "baseline-getpropnativeproto-stub-shape");
334 MarkObject(trc, &propStub->holder(), "baseline-getpropnativeproto-stub-holder");
335 MarkShape(trc, &propStub->holderShape(), "baseline-getpropnativeproto-stub-holdershape");
336 break;
337 }
338 case ICStub::GetProp_CallDOMProxyNative:
339 case ICStub::GetProp_CallDOMProxyWithGenerationNative: {
340 ICGetPropCallDOMProxyNativeStub *propStub;
341 if (kind() == ICStub::GetProp_CallDOMProxyNative)
342 propStub = toGetProp_CallDOMProxyNative();
343 else
344 propStub = toGetProp_CallDOMProxyWithGenerationNative();
345 MarkShape(trc, &propStub->shape(), "baseline-getproplistbasenative-stub-shape");
346 if (propStub->expandoShape()) {
347 MarkShape(trc, &propStub->expandoShape(),
348 "baseline-getproplistbasenative-stub-expandoshape");
349 }
350 MarkObject(trc, &propStub->holder(), "baseline-getproplistbasenative-stub-holder");
351 MarkShape(trc, &propStub->holderShape(), "baseline-getproplistbasenative-stub-holdershape");
352 MarkObject(trc, &propStub->getter(), "baseline-getproplistbasenative-stub-getter");
353 break;
354 }
355 case ICStub::GetProp_DOMProxyShadowed: {
356 ICGetProp_DOMProxyShadowed *propStub = toGetProp_DOMProxyShadowed();
357 MarkShape(trc, &propStub->shape(), "baseline-getproplistbaseshadowed-stub-shape");
358 MarkString(trc, &propStub->name(), "baseline-getproplistbaseshadowed-stub-name");
359 break;
360 }
361 case ICStub::GetProp_CallScripted: {
362 ICGetProp_CallScripted *callStub = toGetProp_CallScripted();
363 MarkShape(trc, &callStub->receiverShape(), "baseline-getpropcallscripted-stub-receivershape");
364 MarkObject(trc, &callStub->holder(), "baseline-getpropcallscripted-stub-holder");
365 MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallscripted-stub-holdershape");
366 MarkObject(trc, &callStub->getter(), "baseline-getpropcallscripted-stub-getter");
367 break;
368 }
369 case ICStub::GetProp_CallNative: {
370 ICGetProp_CallNative *callStub = toGetProp_CallNative();
371 MarkObject(trc, &callStub->holder(), "baseline-getpropcallnative-stub-holder");
372 MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallnative-stub-holdershape");
373 MarkObject(trc, &callStub->getter(), "baseline-getpropcallnative-stub-getter");
374 break;
375 }
376 case ICStub::GetProp_CallNativePrototype: {
377 ICGetProp_CallNativePrototype *callStub = toGetProp_CallNativePrototype();
378 MarkShape(trc, &callStub->receiverShape(), "baseline-getpropcallnativeproto-stub-receivershape");
379 MarkObject(trc, &callStub->holder(), "baseline-getpropcallnativeproto-stub-holder");
380 MarkShape(trc, &callStub->holderShape(), "baseline-getpropcallnativeproto-stub-holdershape");
381 MarkObject(trc, &callStub->getter(), "baseline-getpropcallnativeproto-stub-getter");
382 break;
383 }
384 case ICStub::SetProp_Native: {
385 ICSetProp_Native *propStub = toSetProp_Native();
386 MarkShape(trc, &propStub->shape(), "baseline-setpropnative-stub-shape");
387 MarkTypeObject(trc, &propStub->type(), "baseline-setpropnative-stub-type");
388 break;
389 }
390 case ICStub::SetProp_NativeAdd: {
391 ICSetProp_NativeAdd *propStub = toSetProp_NativeAdd();
392 MarkTypeObject(trc, &propStub->type(), "baseline-setpropnativeadd-stub-type");
393 MarkShape(trc, &propStub->newShape(), "baseline-setpropnativeadd-stub-newshape");
394 JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4);
395 switch (propStub->protoChainDepth()) {
396 case 0: propStub->toImpl<0>()->traceShapes(trc); break;
397 case 1: propStub->toImpl<1>()->traceShapes(trc); break;
398 case 2: propStub->toImpl<2>()->traceShapes(trc); break;
399 case 3: propStub->toImpl<3>()->traceShapes(trc); break;
400 case 4: propStub->toImpl<4>()->traceShapes(trc); break;
401 default: MOZ_ASSUME_UNREACHABLE("Invalid proto stub.");
402 }
403 break;
404 }
405 case ICStub::SetProp_CallScripted: {
406 ICSetProp_CallScripted *callStub = toSetProp_CallScripted();
407 MarkShape(trc, &callStub->shape(), "baseline-setpropcallscripted-stub-shape");
408 MarkObject(trc, &callStub->holder(), "baseline-setpropcallscripted-stub-holder");
409 MarkShape(trc, &callStub->holderShape(), "baseline-setpropcallscripted-stub-holdershape");
410 MarkObject(trc, &callStub->setter(), "baseline-setpropcallscripted-stub-setter");
411 break;
412 }
413 case ICStub::SetProp_CallNative: {
414 ICSetProp_CallNative *callStub = toSetProp_CallNative();
415 MarkShape(trc, &callStub->shape(), "baseline-setpropcallnative-stub-shape");
416 MarkObject(trc, &callStub->holder(), "baseline-setpropcallnative-stub-holder");
417 MarkShape(trc, &callStub->holderShape(), "baseline-setpropcallnative-stub-holdershape");
418 MarkObject(trc, &callStub->setter(), "baseline-setpropcallnative-stub-setter");
419 break;
420 }
421 case ICStub::NewArray_Fallback: {
422 ICNewArray_Fallback *stub = toNewArray_Fallback();
423 MarkObject(trc, &stub->templateObject(), "baseline-newarray-template");
424 break;
425 }
426 case ICStub::NewObject_Fallback: {
427 ICNewObject_Fallback *stub = toNewObject_Fallback();
428 MarkObject(trc, &stub->templateObject(), "baseline-newobject-template");
429 break;
430 }
431 case ICStub::Rest_Fallback: {
432 ICRest_Fallback *stub = toRest_Fallback();
433 MarkObject(trc, &stub->templateObject(), "baseline-rest-template");
434 break;
435 }
436 default:
437 break;
438 }
439 }
440
441 void
442 ICFallbackStub::unlinkStub(Zone *zone, ICStub *prev, ICStub *stub)
443 {
444 JS_ASSERT(stub->next());
445
446 // If stub is the last optimized stub, update lastStubPtrAddr.
447 if (stub->next() == this) {
448 JS_ASSERT(lastStubPtrAddr_ == stub->addressOfNext());
449 if (prev)
450 lastStubPtrAddr_ = prev->addressOfNext();
451 else
452 lastStubPtrAddr_ = icEntry()->addressOfFirstStub();
453 *lastStubPtrAddr_ = this;
454 } else {
455 if (prev) {
456 JS_ASSERT(prev->next() == stub);
457 prev->setNext(stub->next());
458 } else {
459 JS_ASSERT(icEntry()->firstStub() == stub);
460 icEntry()->setFirstStub(stub->next());
461 }
462 }
463
464 JS_ASSERT(numOptimizedStubs_ > 0);
465 numOptimizedStubs_--;
466
467 if (zone->needsBarrier()) {
468 // We are removing edges from ICStub to gcthings. Perform one final trace
469 // of the stub for incremental GC, as it must know about those edges.
470 stub->trace(zone->barrierTracer());
471 }
472
473 if (ICStub::CanMakeCalls(stub->kind()) && stub->isMonitored()) {
474 // This stub can make calls so we can return to it if it's on the stack.
475 // We just have to reset its firstMonitorStub_ field to avoid a stale
476 // pointer when purgeOptimizedStubs destroys all optimized monitor
477 // stubs (unlinked stubs won't be updated).
478 ICTypeMonitor_Fallback *monitorFallback = toMonitoredFallbackStub()->fallbackMonitorStub();
479 stub->toMonitoredStub()->resetFirstMonitorStub(monitorFallback);
480 }
481
482 #ifdef DEBUG
483 // Poison stub code to ensure we don't call this stub again. However, if this
484 // stub can make calls, a pointer to it may be stored in a stub frame on the
485 // stack, so we can't touch the stubCode_ or GC will crash when marking this
486 // pointer.
487 if (!ICStub::CanMakeCalls(stub->kind()))
488 stub->stubCode_ = (uint8_t *)0xbad;
489 #endif
490 }
491
492 void
493 ICFallbackStub::unlinkStubsWithKind(JSContext *cx, ICStub::Kind kind)
494 {
495 for (ICStubIterator iter = beginChain(); !iter.atEnd(); iter++) {
496 if (iter->kind() == kind)
497 iter.unlink(cx);
498 }
499 }
500
501 void
502 ICTypeMonitor_Fallback::resetMonitorStubChain(Zone *zone)
503 {
504 if (zone->needsBarrier()) {
505 // We are removing edges from monitored stubs to gcthings (JitCode).
506 // Perform one final trace of all monitor stubs for incremental GC,
507 // as it must know about those edges.
508 for (ICStub *s = firstMonitorStub_; !s->isTypeMonitor_Fallback(); s = s->next())
509 s->trace(zone->barrierTracer());
510 }
511
512 firstMonitorStub_ = this;
513 numOptimizedMonitorStubs_ = 0;
514
515 if (hasFallbackStub_) {
516 lastMonitorStubPtrAddr_ = nullptr;
517
518 // Reset firstMonitorStub_ field of all monitored stubs.
519 for (ICStubConstIterator iter = mainFallbackStub_->beginChainConst();
520 !iter.atEnd(); iter++)
521 {
522 if (!iter->isMonitored())
523 continue;
524 iter->toMonitoredStub()->resetFirstMonitorStub(this);
525 }
526 } else {
527 icEntry_->setFirstStub(this);
528 lastMonitorStubPtrAddr_ = icEntry_->addressOfFirstStub();
529 }
530 }
531
532 ICMonitoredStub::ICMonitoredStub(Kind kind, JitCode *stubCode, ICStub *firstMonitorStub)
533 : ICStub(kind, ICStub::Monitored, stubCode),
534 firstMonitorStub_(firstMonitorStub)
535 {
536 // If the first monitored stub is a ICTypeMonitor_Fallback stub, then
537 // double check that _its_ firstMonitorStub is the same as this one.
538 JS_ASSERT_IF(firstMonitorStub_->isTypeMonitor_Fallback(),
539 firstMonitorStub_->toTypeMonitor_Fallback()->firstMonitorStub() ==
540 firstMonitorStub_);
541 }
542
543 bool
544 ICMonitoredFallbackStub::initMonitoringChain(JSContext *cx, ICStubSpace *space)
545 {
546 JS_ASSERT(fallbackMonitorStub_ == nullptr);
547
548 ICTypeMonitor_Fallback::Compiler compiler(cx, this);
549 ICTypeMonitor_Fallback *stub = compiler.getStub(space);
550 if (!stub)
551 return false;
552 fallbackMonitorStub_ = stub;
553 return true;
554 }
555
556 bool
557 ICMonitoredFallbackStub::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val)
558 {
559 return fallbackMonitorStub_->addMonitorStubForValue(cx, script, val);
560 }
561
562 bool
563 ICUpdatedStub::initUpdatingChain(JSContext *cx, ICStubSpace *space)
564 {
565 JS_ASSERT(firstUpdateStub_ == nullptr);
566
567 ICTypeUpdate_Fallback::Compiler compiler(cx);
568 ICTypeUpdate_Fallback *stub = compiler.getStub(space);
569 if (!stub)
570 return false;
571
572 firstUpdateStub_ = stub;
573 return true;
574 }
575
576 JitCode *
577 ICStubCompiler::getStubCode()
578 {
579 JitCompartment *comp = cx->compartment()->jitCompartment();
580
581 // Check for existing cached stubcode.
582 uint32_t stubKey = getKey();
583 JitCode *stubCode = comp->getStubCode(stubKey);
584 if (stubCode)
585 return stubCode;
586
587 // Compile new stubcode.
588 IonContext ictx(cx, nullptr);
589 MacroAssembler masm;
590 #ifdef JS_CODEGEN_ARM
591 masm.setSecondScratchReg(BaselineSecondScratchReg);
592 #endif
593
594 if (!generateStubCode(masm))
595 return nullptr;
596 Linker linker(masm);
597 AutoFlushICache afc("getStubCode");
598 Rooted<JitCode *> newStubCode(cx, linker.newCode<CanGC>(cx, JSC::BASELINE_CODE));
599 if (!newStubCode)
600 return nullptr;
601
602 // After generating code, run postGenerateStubCode()
603 if (!postGenerateStubCode(masm, newStubCode))
604 return nullptr;
605
606 // All barriers are emitted off-by-default, enable them if needed.
607 if (cx->zone()->needsBarrier())
608 newStubCode->togglePreBarriers(true);
609
610 // Cache newly compiled stubcode.
611 if (!comp->putStubCode(stubKey, newStubCode))
612 return nullptr;
613
614 JS_ASSERT(entersStubFrame_ == ICStub::CanMakeCalls(kind));
615
616 #ifdef JS_ION_PERF
617 writePerfSpewerJitCodeProfile(newStubCode, "BaselineIC");
618 #endif
619
620 return newStubCode;
621 }
622
623 bool
624 ICStubCompiler::tailCallVM(const VMFunction &fun, MacroAssembler &masm)
625 {
626 JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
627 if (!code)
628 return false;
629
630 uint32_t argSize = fun.explicitStackSlots() * sizeof(void *);
631 EmitTailCallVM(code, masm, argSize);
632 return true;
633 }
634
635 bool
636 ICStubCompiler::callVM(const VMFunction &fun, MacroAssembler &masm)
637 {
638 JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(fun);
639 if (!code)
640 return false;
641
642 EmitCallVM(code, masm);
643 return true;
644 }
645
646 bool
647 ICStubCompiler::callTypeUpdateIC(MacroAssembler &masm, uint32_t objectOffset)
648 {
649 JitCode *code = cx->runtime()->jitRuntime()->getVMWrapper(DoTypeUpdateFallbackInfo);
650 if (!code)
651 return false;
652
653 EmitCallTypeUpdateIC(masm, code, objectOffset);
654 return true;
655 }
656
657 void
658 ICStubCompiler::enterStubFrame(MacroAssembler &masm, Register scratch)
659 {
660 EmitEnterStubFrame(masm, scratch);
661 #ifdef DEBUG
662 entersStubFrame_ = true;
663 #endif
664 }
665
666 void
667 ICStubCompiler::leaveStubFrame(MacroAssembler &masm, bool calledIntoIon)
668 {
669 JS_ASSERT(entersStubFrame_);
670 EmitLeaveStubFrame(masm, calledIntoIon);
671 }
672
673 void
674 ICStubCompiler::leaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon)
675 {
676 JS_ASSERT(entersStubFrame_);
677 EmitLeaveStubFrameHead(masm, calledIntoIon);
678 }
679
680 void
681 ICStubCompiler::leaveStubFrameCommonTail(MacroAssembler &masm)
682 {
683 JS_ASSERT(entersStubFrame_);
684 EmitLeaveStubFrameCommonTail(masm);
685 }
686
687 void
688 ICStubCompiler::guardProfilingEnabled(MacroAssembler &masm, Register scratch, Label *skip)
689 {
690 // This should only be called from the following stubs.
691 JS_ASSERT(kind == ICStub::Call_Scripted ||
692 kind == ICStub::Call_AnyScripted ||
693 kind == ICStub::Call_Native ||
694 kind == ICStub::Call_ScriptedApplyArray ||
695 kind == ICStub::Call_ScriptedApplyArguments ||
696 kind == ICStub::Call_ScriptedFunCall ||
697 kind == ICStub::GetProp_CallScripted ||
698 kind == ICStub::GetProp_CallNative ||
699 kind == ICStub::GetProp_CallNativePrototype ||
700 kind == ICStub::GetProp_CallDOMProxyNative ||
701 kind == ICStub::GetElem_NativePrototypeCallNative ||
702 kind == ICStub::GetElem_NativePrototypeCallScripted ||
703 kind == ICStub::GetProp_CallDOMProxyWithGenerationNative ||
704 kind == ICStub::GetProp_DOMProxyShadowed ||
705 kind == ICStub::SetProp_CallScripted ||
706 kind == ICStub::SetProp_CallNative);
707
708 // Guard on bit in frame that indicates if the SPS frame was pushed in the first
709 // place. This code is expected to be called from within a stub that has already
710 // entered a stub frame.
711 JS_ASSERT(entersStubFrame_);
712 masm.loadPtr(Address(BaselineFrameReg, 0), scratch);
713 masm.branchTest32(Assembler::Zero,
714 Address(scratch, BaselineFrame::reverseOffsetOfFlags()),
715 Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME),
716 skip);
717
718 // Check if profiling is enabled
719 uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled();
720 masm.branch32(Assembler::Equal, AbsoluteAddress(enabledAddr), Imm32(0), skip);
721 }
722
723 void
724 ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, Register pcIdx, Register scratch,
725 uint32_t stubPcOffset)
726 {
727 Label skipProfilerUpdate;
728
729 // Check if profiling is enabled.
730 guardProfilingEnabled(masm, scratch, &skipProfilerUpdate);
731
732 // Update profiling entry before leaving function.
733 masm.load32(Address(BaselineStubReg, stubPcOffset), pcIdx);
734 masm.spsUpdatePCIdx(&cx->runtime()->spsProfiler, pcIdx, scratch);
735
736 masm.bind(&skipProfilerUpdate);
737 }
738
739 void
740 ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, GeneralRegisterSet regs,
741 uint32_t stubPcOffset)
742 {
743 emitProfilingUpdate(masm, regs.takeAny(), regs.takeAny(), stubPcOffset);
744 }
745
746 #ifdef JSGC_GENERATIONAL
747 inline bool
748 ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
749 Register scratch, GeneralRegisterSet saveRegs)
750 {
751 Nursery &nursery = cx->runtime()->gcNursery;
752
753 Label skipBarrier;
754 masm.branchTestObject(Assembler::NotEqual, val, &skipBarrier);
755
756 masm.branchPtrInNurseryRange(obj, scratch, &skipBarrier);
757
758 Register valReg = masm.extractObject(val, scratch);
759 masm.branchPtr(Assembler::Below, valReg, ImmWord(nursery.start()), &skipBarrier);
760 masm.branchPtr(Assembler::AboveOrEqual, valReg, ImmWord(nursery.heapEnd()), &skipBarrier);
761
762 // void PostWriteBarrier(JSRuntime *rt, JSObject *obj);
763 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
764 saveRegs.add(BaselineTailCallReg);
765 #endif
766 saveRegs = GeneralRegisterSet::Intersect(saveRegs, GeneralRegisterSet::Volatile());
767 masm.PushRegsInMask(saveRegs);
768 masm.setupUnalignedABICall(2, scratch);
769 masm.movePtr(ImmPtr(cx->runtime()), scratch);
770 masm.passABIArg(scratch);
771 masm.passABIArg(obj);
772 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
773 masm.PopRegsInMask(saveRegs);
774
775 masm.bind(&skipBarrier);
776 return true;
777 }
778 #endif // JSGC_GENERATIONAL
779
780 //
781 // UseCount_Fallback
782 //
783 static bool
784 IsTopFrameConstructing(JSContext *cx)
785 {
786 JS_ASSERT(cx->currentlyRunningInJit());
787 JitActivationIterator activations(cx->runtime());
788 JitFrameIterator iter(activations);
789 JS_ASSERT(iter.type() == JitFrame_Exit);
790
791 ++iter;
792 JS_ASSERT(iter.type() == JitFrame_BaselineStub);
793
794 ++iter;
795 JS_ASSERT(iter.isBaselineJS());
796
797 return iter.isConstructing();
798 }
799
800 static bool
801 EnsureCanEnterIon(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
802 HandleScript script, jsbytecode *pc, void **jitcodePtr)
803 {
804 JS_ASSERT(jitcodePtr);
805 JS_ASSERT(!*jitcodePtr);
806
807 bool isLoopEntry = (JSOp(*pc) == JSOP_LOOPENTRY);
808
809 bool isConstructing = IsTopFrameConstructing(cx);
810 MethodStatus stat;
811 if (isLoopEntry) {
812 JS_ASSERT(LoopEntryCanIonOsr(pc));
813 IonSpew(IonSpew_BaselineOSR, " Compile at loop entry!");
814 stat = CanEnterAtBranch(cx, script, frame, pc, isConstructing);
815 } else if (frame->isFunctionFrame()) {
816 IonSpew(IonSpew_BaselineOSR, " Compile function from top for later entry!");
817 stat = CompileFunctionForBaseline(cx, script, frame, isConstructing);
818 } else {
819 return true;
820 }
821
822 if (stat == Method_Error) {
823 IonSpew(IonSpew_BaselineOSR, " Compile with Ion errored!");
824 return false;
825 }
826
827 if (stat == Method_CantCompile)
828 IonSpew(IonSpew_BaselineOSR, " Can't compile with Ion!");
829 else if (stat == Method_Skipped)
830 IonSpew(IonSpew_BaselineOSR, " Skipped compile with Ion!");
831 else if (stat == Method_Compiled)
832 IonSpew(IonSpew_BaselineOSR, " Compiled with Ion!");
833 else
834 MOZ_ASSUME_UNREACHABLE("Invalid MethodStatus!");
835
836 // Failed to compile. Reset use count and return.
837 if (stat != Method_Compiled) {
838 // TODO: If stat == Method_CantCompile, insert stub that just skips the useCount
839 // entirely, instead of resetting it.
840 bool bailoutExpected = script->hasIonScript() && script->ionScript()->bailoutExpected();
841 if (stat == Method_CantCompile || bailoutExpected) {
842 IonSpew(IonSpew_BaselineOSR, " Reset UseCount cantCompile=%s bailoutExpected=%s!",
843 stat == Method_CantCompile ? "yes" : "no",
844 bailoutExpected ? "yes" : "no");
845 script->resetUseCount();
846 }
847 return true;
848 }
849
850 if (isLoopEntry) {
851 IonScript *ion = script->ionScript();
852 JS_ASSERT(cx->runtime()->spsProfiler.enabled() == ion->hasSPSInstrumentation());
853 JS_ASSERT(ion->osrPc() == pc);
854
855 // If the baseline frame's SPS handling doesn't match up with the Ion code's SPS
856 // handling, don't OSR.
857 if (frame->hasPushedSPSFrame() != ion->hasSPSInstrumentation()) {
858 IonSpew(IonSpew_BaselineOSR, " OSR crosses SPS handling boundaries, skipping!");
859 return true;
860 }
861
862 IonSpew(IonSpew_BaselineOSR, " OSR possible!");
863 *jitcodePtr = ion->method()->raw() + ion->osrEntryOffset();
864 }
865
866 return true;
867 }
868
869 //
870 // The following data is kept in a temporary heap-allocated buffer, stored in
871 // JitRuntime (high memory addresses at top, low at bottom):
872 //
873 // +----->+=================================+ -- <---- High Address
874 // | | | |
875 // | | ...BaselineFrame... | |-- Copy of BaselineFrame + stack values
876 // | | | |
877 // | +---------------------------------+ |
878 // | | | |
879 // | | ...Locals/Stack... | |
880 // | | | |
881 // | +=================================+ --
882 // | | Padding(Maybe Empty) |
883 // | +=================================+ --
884 // +------|-- baselineFrame | |-- IonOsrTempData
885 // | jitcode | |
886 // +=================================+ -- <---- Low Address
887 //
888 // A pointer to the IonOsrTempData is returned.
889
890 struct IonOsrTempData
891 {
892 void *jitcode;
893 uint8_t *baselineFrame;
894 };
895
896 static IonOsrTempData *
897 PrepareOsrTempData(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
898 HandleScript script, jsbytecode *pc, void *jitcode)
899 {
900 size_t numLocalsAndStackVals = frame->numValueSlots();
901
902 // Calculate the amount of space to allocate:
903 // BaselineFrame space:
904 // (sizeof(Value) * (numLocals + numStackVals))
905 // + sizeof(BaselineFrame)
906 //
907 // IonOsrTempData space:
908 // sizeof(IonOsrTempData)
909
910 size_t frameSpace = sizeof(BaselineFrame) + sizeof(Value) * numLocalsAndStackVals;
911 size_t ionOsrTempDataSpace = sizeof(IonOsrTempData);
912
913 size_t totalSpace = AlignBytes(frameSpace, sizeof(Value)) +
914 AlignBytes(ionOsrTempDataSpace, sizeof(Value));
915
916 IonOsrTempData *info = (IonOsrTempData *)cx->runtime()->getJitRuntime(cx)->allocateOsrTempData(totalSpace);
917 if (!info)
918 return nullptr;
919
920 memset(info, 0, totalSpace);
921
922 info->jitcode = jitcode;
923
924 // Copy the BaselineFrame + local/stack Values to the buffer. Arguments and
925 // |this| are not copied but left on the stack: the Baseline and Ion frame
926 // share the same frame prefix and Ion won't clobber these values. Note
927 // that info->baselineFrame will point to the *end* of the frame data, like
928 // the frame pointer register in baseline frames.
929 uint8_t *frameStart = (uint8_t *)info + AlignBytes(ionOsrTempDataSpace, sizeof(Value));
930 info->baselineFrame = frameStart + frameSpace;
931
932 memcpy(frameStart, (uint8_t *)frame - numLocalsAndStackVals * sizeof(Value), frameSpace);
933
934 IonSpew(IonSpew_BaselineOSR, "Allocated IonOsrTempData at %p", (void *) info);
935 IonSpew(IonSpew_BaselineOSR, "Jitcode is %p", info->jitcode);
936
937 // All done.
938 return info;
939 }
940
941 static bool
942 DoUseCountFallback(JSContext *cx, ICUseCount_Fallback *stub, BaselineFrame *frame,
943 IonOsrTempData **infoPtr)
944 {
945 JS_ASSERT(infoPtr);
946 *infoPtr = nullptr;
947
948 // A TI OOM will disable TI and Ion.
949 if (!jit::IsIonEnabled(cx))
950 return true;
951
952 RootedScript script(cx, frame->script());
953 jsbytecode *pc = stub->icEntry()->pc(script);
954 bool isLoopEntry = JSOp(*pc) == JSOP_LOOPENTRY;
955
956 JS_ASSERT(!isLoopEntry || LoopEntryCanIonOsr(pc));
957
958 FallbackICSpew(cx, stub, "UseCount(%d)", isLoopEntry ? int(script->pcToOffset(pc)) : int(-1));
959
960 if (!script->canIonCompile()) {
961 // TODO: ASSERT that ion-compilation-disabled checker stub doesn't exist.
962 // TODO: Clear all optimized stubs.
963 // TODO: Add a ion-compilation-disabled checker IC stub
964 script->resetUseCount();
965 return true;
966 }
967
968 JS_ASSERT(!script->isIonCompilingOffThread());
969
970 // If Ion script exists, but PC is not at a loop entry, then Ion will be entered for
971 // this script at an appropriate LOOPENTRY or the next time this function is called.
972 if (script->hasIonScript() && !isLoopEntry) {
973 IonSpew(IonSpew_BaselineOSR, "IonScript exists, but not at loop entry!");
974 // TODO: ASSERT that a ion-script-already-exists checker stub doesn't exist.
975 // TODO: Clear all optimized stubs.
976 // TODO: Add a ion-script-already-exists checker stub.
977 return true;
978 }
979
980 // Ensure that Ion-compiled code is available.
981 IonSpew(IonSpew_BaselineOSR,
982 "UseCount for %s:%d reached %d at pc %p, trying to switch to Ion!",
983 script->filename(), script->lineno(), (int) script->getUseCount(), (void *) pc);
984 void *jitcode = nullptr;
985 if (!EnsureCanEnterIon(cx, stub, frame, script, pc, &jitcode))
986 return false;
987
988 // Jitcode should only be set here if not at loop entry.
989 JS_ASSERT_IF(!isLoopEntry, !jitcode);
990 if (!jitcode)
991 return true;
992
993 // Prepare the temporary heap copy of the fake InterpreterFrame and actual args list.
994 IonSpew(IonSpew_BaselineOSR, "Got jitcode. Preparing for OSR into ion.");
995 IonOsrTempData *info = PrepareOsrTempData(cx, stub, frame, script, pc, jitcode);
996 if (!info)
997 return false;
998 *infoPtr = info;
999
1000 return true;
1001 }
1002
1003 typedef bool (*DoUseCountFallbackFn)(JSContext *, ICUseCount_Fallback *, BaselineFrame *frame,
1004 IonOsrTempData **infoPtr);
1005 static const VMFunction DoUseCountFallbackInfo =
1006 FunctionInfo<DoUseCountFallbackFn>(DoUseCountFallback);
1007
1008 bool
1009 ICUseCount_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
1010 {
1011 // enterStubFrame is going to clobber the BaselineFrameReg, save it in R0.scratchReg()
1012 // first.
1013 masm.movePtr(BaselineFrameReg, R0.scratchReg());
1014
1015 // Push a stub frame so that we can perform a non-tail call.
1016 enterStubFrame(masm, R1.scratchReg());
1017
1018 Label noCompiledCode;
1019 // Call DoUseCountFallback to compile/check-for Ion-compiled function
1020 {
1021 // Push IonOsrTempData pointer storage
1022 masm.subPtr(Imm32(sizeof(void *)), BaselineStackReg);
1023 masm.push(BaselineStackReg);
1024
1025 // Push IonJSFrameLayout pointer.
1026 masm.loadBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
1027 masm.push(R0.scratchReg());
1028
1029 // Push stub pointer.
1030 masm.push(BaselineStubReg);
1031
1032 if (!callVM(DoUseCountFallbackInfo, masm))
1033 return false;
1034
1035 // Pop IonOsrTempData pointer.
1036 masm.pop(R0.scratchReg());
1037
1038 leaveStubFrame(masm);
1039
1040 // If no JitCode was found, then skip just exit the IC.
1041 masm.branchPtr(Assembler::Equal, R0.scratchReg(), ImmPtr(nullptr), &noCompiledCode);
1042 }
1043
1044 // Get a scratch register.
1045 GeneralRegisterSet regs(availableGeneralRegs(0));
1046 Register osrDataReg = R0.scratchReg();
1047 regs.take(osrDataReg);
1048 regs.takeUnchecked(OsrFrameReg);
1049
1050 Register scratchReg = regs.takeAny();
1051
1052 // At this point, stack looks like:
1053 // +-> [...Calling-Frame...]
1054 // | [...Actual-Args/ThisV/ArgCount/Callee...]
1055 // | [Descriptor]
1056 // | [Return-Addr]
1057 // +---[Saved-FramePtr] <-- BaselineFrameReg points here.
1058 // [...Baseline-Frame...]
1059
1060 // Restore the stack pointer to point to the saved frame pointer.
1061 masm.movePtr(BaselineFrameReg, BaselineStackReg);
1062
1063 // Discard saved frame pointer, so that the return address is on top of
1064 // the stack.
1065 masm.pop(scratchReg);
1066
1067 // Jump into Ion.
1068 masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, jitcode)), scratchReg);
1069 masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, baselineFrame)), OsrFrameReg);
1070 masm.jump(scratchReg);
1071
1072 // No jitcode available, do nothing.
1073 masm.bind(&noCompiledCode);
1074 EmitReturnFromIC(masm);
1075 return true;
1076 }
1077
1078 //
1079 // ICProfile_Fallback
1080 //
1081
1082 static bool
1083 DoProfilerFallback(JSContext *cx, BaselineFrame *frame, ICProfiler_Fallback *stub)
1084 {
1085 RootedScript script(cx, frame->script());
1086 RootedFunction func(cx, frame->maybeFun());
1087 mozilla::DebugOnly<ICEntry *> icEntry = stub->icEntry();
1088
1089 FallbackICSpew(cx, stub, "Profiler");
1090
1091 SPSProfiler *profiler = &cx->runtime()->spsProfiler;
1092
1093 // Manually enter SPS this time.
1094 JS_ASSERT(profiler->enabled());
1095 if (!cx->runtime()->spsProfiler.enter(script, func))
1096 return false;
1097 frame->setPushedSPSFrame();
1098
1099 // Unlink any existing PushFunction stub (which may hold stale 'const char *' to
1100 // the profile string.
1101 JS_ASSERT_IF(icEntry->firstStub() != stub,
1102 icEntry->firstStub()->isProfiler_PushFunction() &&
1103 icEntry->firstStub()->next() == stub);
1104 stub->unlinkStubsWithKind(cx, ICStub::Profiler_PushFunction);
1105 JS_ASSERT(icEntry->firstStub() == stub);
1106
1107 // Generate the string to use to identify this stack frame.
1108 const char *string = profiler->profileString(script, func);
1109 if (string == nullptr)
1110 return false;
1111
1112 IonSpew(IonSpew_BaselineIC, " Generating Profiler_PushFunction stub for %s:%d",
1113 script->filename(), script->lineno());
1114
1115 // Create a new optimized stub.
1116 ICProfiler_PushFunction::Compiler compiler(cx, string, script);
1117 ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
1118 if (!optStub)
1119 return false;
1120 stub->addNewStub(optStub);
1121
1122 return true;
1123 }
1124
1125 typedef bool (*DoProfilerFallbackFn)(JSContext *, BaselineFrame *frame, ICProfiler_Fallback *);
1126 static const VMFunction DoProfilerFallbackInfo =
1127 FunctionInfo<DoProfilerFallbackFn>(DoProfilerFallback);
1128
1129 bool
1130 ICProfiler_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
1131 {
1132 EmitRestoreTailCallReg(masm);
1133
1134 masm.push(BaselineStubReg); // Push stub.
1135 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); // Push frame.
1136
1137 return tailCallVM(DoProfilerFallbackInfo, masm);
1138 }
1139
1140 bool
1141 ICProfiler_PushFunction::Compiler::generateStubCode(MacroAssembler &masm)
1142 {
1143
1144 Register scratch = R0.scratchReg();
1145 Register scratch2 = R1.scratchReg();
1146
1147 // Profiling should be enabled if we ever reach here.
1148 #ifdef DEBUG
1149 Label spsEnabled;
1150 uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled();
1151 masm.branch32(Assembler::NotEqual, AbsoluteAddress(enabledAddr), Imm32(0), &spsEnabled);
1152 masm.assumeUnreachable("Profiling should have been enabled.");
1153 masm.bind(&spsEnabled);
1154 #endif
1155
1156 // Push SPS entry.
1157 masm.spsPushFrame(&cx->runtime()->spsProfiler,
1158 Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfStr()),
1159 Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfScript()),
1160 scratch,
1161 scratch2);
1162
1163 // Mark frame as having profiler entry pushed.
1164 Address flagsOffset(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags());
1165 masm.or32(Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME), flagsOffset);
1166
1167 EmitReturnFromIC(masm);
1168
1169 return true;
1170 }
1171
1172 //
1173 // TypeMonitor_Fallback
1174 //
1175
1176 bool
1177 ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val)
1178 {
1179 bool wasDetachedMonitorChain = lastMonitorStubPtrAddr_ == nullptr;
1180 JS_ASSERT_IF(wasDetachedMonitorChain, numOptimizedMonitorStubs_ == 0);
1181
1182 if (numOptimizedMonitorStubs_ >= MAX_OPTIMIZED_STUBS) {
1183 // TODO: if the TypeSet becomes unknown or has the AnyObject type,
1184 // replace stubs with a single stub to handle these.
1185 return true;
1186 }
1187
1188 if (val.isPrimitive()) {
1189 JS_ASSERT(!val.isMagic());
1190 JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
1191
1192 // Check for existing TypeMonitor stub.
1193 ICTypeMonitor_PrimitiveSet *existingStub = nullptr;
1194 for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
1195 if (iter->isTypeMonitor_PrimitiveSet()) {
1196 existingStub = iter->toTypeMonitor_PrimitiveSet();
1197 if (existingStub->containsType(type))
1198 return true;
1199 }
1200 }
1201
1202 ICTypeMonitor_PrimitiveSet::Compiler compiler(cx, existingStub, type);
1203 ICStub *stub = existingStub ? compiler.updateStub()
1204 : compiler.getStub(compiler.getStubSpace(script));
1205 if (!stub) {
1206 js_ReportOutOfMemory(cx);
1207 return false;
1208 }
1209
1210 IonSpew(IonSpew_BaselineIC, " %s TypeMonitor stub %p for primitive type %d",
1211 existingStub ? "Modified existing" : "Created new", stub, type);
1212
1213 if (!existingStub) {
1214 JS_ASSERT(!hasStub(TypeMonitor_PrimitiveSet));
1215 addOptimizedMonitorStub(stub);
1216 }
1217
1218 } else if (val.toObject().hasSingletonType()) {
1219 RootedObject obj(cx, &val.toObject());
1220
1221 // Check for existing TypeMonitor stub.
1222 for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
1223 if (iter->isTypeMonitor_SingleObject() &&
1224 iter->toTypeMonitor_SingleObject()->object() == obj)
1225 {
1226 return true;
1227 }
1228 }
1229
1230 ICTypeMonitor_SingleObject::Compiler compiler(cx, obj);
1231 ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
1232 if (!stub) {
1233 js_ReportOutOfMemory(cx);
1234 return false;
1235 }
1236
1237 IonSpew(IonSpew_BaselineIC, " Added TypeMonitor stub %p for singleton %p",
1238 stub, obj.get());
1239
1240 addOptimizedMonitorStub(stub);
1241
1242 } else {
1243 RootedTypeObject type(cx, val.toObject().type());
1244
1245 // Check for existing TypeMonitor stub.
1246 for (ICStubConstIterator iter = firstMonitorStub(); !iter.atEnd(); iter++) {
1247 if (iter->isTypeMonitor_TypeObject() &&
1248 iter->toTypeMonitor_TypeObject()->type() == type)
1249 {
1250 return true;
1251 }
1252 }
1253
1254 ICTypeMonitor_TypeObject::Compiler compiler(cx, type);
1255 ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
1256 if (!stub) {
1257 js_ReportOutOfMemory(cx);
1258 return false;
1259 }
1260
1261 IonSpew(IonSpew_BaselineIC, " Added TypeMonitor stub %p for TypeObject %p",
1262 stub, type.get());
1263
1264 addOptimizedMonitorStub(stub);
1265 }
1266
1267 bool firstMonitorStubAdded = wasDetachedMonitorChain && (numOptimizedMonitorStubs_ > 0);
1268
1269 if (firstMonitorStubAdded) {
1270 // Was an empty monitor chain before, but a new stub was added. This is the
1271 // only time that any main stubs' firstMonitorStub fields need to be updated to
1272 // refer to the newly added monitor stub.
1273 ICStub *firstStub = mainFallbackStub_->icEntry()->firstStub();
1274 for (ICStubConstIterator iter = firstStub; !iter.atEnd(); iter++) {
1275 // Non-monitored stubs are used if the result has always the same type,
1276 // e.g. a StringLength stub will always return int32.
1277 if (!iter->isMonitored())
1278 continue;
1279
1280 // Since we just added the first optimized monitoring stub, any
1281 // existing main stub's |firstMonitorStub| MUST be pointing to the fallback
1282 // monitor stub (i.e. this stub).
1283 JS_ASSERT(iter->toMonitoredStub()->firstMonitorStub() == this);
1284 iter->toMonitoredStub()->updateFirstMonitorStub(firstMonitorStub_);
1285 }
1286 }
1287
1288 return true;
1289 }
1290
1291 static bool
1292 DoTypeMonitorFallback(JSContext *cx, BaselineFrame *frame, ICTypeMonitor_Fallback *stub,
1293 HandleValue value, MutableHandleValue res)
1294 {
1295 RootedScript script(cx, frame->script());
1296 jsbytecode *pc = stub->icEntry()->pc(script);
1297 TypeFallbackICSpew(cx, stub, "TypeMonitor");
1298
1299 uint32_t argument;
1300 if (stub->monitorsThis()) {
1301 JS_ASSERT(pc == script->code());
1302 types::TypeScript::SetThis(cx, script, value);
1303 } else if (stub->monitorsArgument(&argument)) {
1304 JS_ASSERT(pc == script->code());
1305 types::TypeScript::SetArgument(cx, script, argument, value);
1306 } else {
1307 types::TypeScript::Monitor(cx, script, pc, value);
1308 }
1309
1310 if (!stub->addMonitorStubForValue(cx, script, value))
1311 return false;
1312
1313 // Copy input value to res.
1314 res.set(value);
1315 return true;
1316 }
1317
1318 typedef bool (*DoTypeMonitorFallbackFn)(JSContext *, BaselineFrame *, ICTypeMonitor_Fallback *,
1319 HandleValue, MutableHandleValue);
1320 static const VMFunction DoTypeMonitorFallbackInfo =
1321 FunctionInfo<DoTypeMonitorFallbackFn>(DoTypeMonitorFallback);
1322
1323 bool
1324 ICTypeMonitor_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
1325 {
1326 JS_ASSERT(R0 == JSReturnOperand);
1327
1328 // Restore the tail call register.
1329 EmitRestoreTailCallReg(masm);
1330
1331 masm.pushValue(R0);
1332 masm.push(BaselineStubReg);
1333 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1334
1335 return tailCallVM(DoTypeMonitorFallbackInfo, masm);
1336 }
1337
1338 bool
1339 ICTypeMonitor_PrimitiveSet::Compiler::generateStubCode(MacroAssembler &masm)
1340 {
1341 Label success;
1342 if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
1343 masm.branchTestInt32(Assembler::Equal, R0, &success);
1344
1345 if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
1346 masm.branchTestNumber(Assembler::Equal, R0, &success);
1347
1348 if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
1349 masm.branchTestUndefined(Assembler::Equal, R0, &success);
1350
1351 if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
1352 masm.branchTestBoolean(Assembler::Equal, R0, &success);
1353
1354 if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
1355 masm.branchTestString(Assembler::Equal, R0, &success);
1356
1357 // Currently, we will never generate primitive stub checks for object. However,
1358 // when we do get to the point where we want to collapse our monitor chains of
1359 // objects and singletons down (when they get too long) to a generic "any object"
1360 // in coordination with the typeset doing the same thing, this will need to
1361 // be re-enabled.
1362 /*
1363 if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
1364 masm.branchTestObject(Assembler::Equal, R0, &success);
1365 */
1366 JS_ASSERT(!(flags_ & TypeToFlag(JSVAL_TYPE_OBJECT)));
1367
1368 if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
1369 masm.branchTestNull(Assembler::Equal, R0, &success);
1370
1371 EmitStubGuardFailure(masm);
1372
1373 masm.bind(&success);
1374 EmitReturnFromIC(masm);
1375 return true;
1376 }
1377
1378 bool
1379 ICTypeMonitor_SingleObject::Compiler::generateStubCode(MacroAssembler &masm)
1380 {
1381 Label failure;
1382 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1383
1384 // Guard on the object's identity.
1385 Register obj = masm.extractObject(R0, ExtractTemp0);
1386 Address expectedObject(BaselineStubReg, ICTypeMonitor_SingleObject::offsetOfObject());
1387 masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
1388
1389 EmitReturnFromIC(masm);
1390
1391 masm.bind(&failure);
1392 EmitStubGuardFailure(masm);
1393 return true;
1394 }
1395
1396 bool
1397 ICTypeMonitor_TypeObject::Compiler::generateStubCode(MacroAssembler &masm)
1398 {
1399 Label failure;
1400 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1401
1402 // Guard on the object's TypeObject.
1403 Register obj = masm.extractObject(R0, ExtractTemp0);
1404 masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg());
1405
1406 Address expectedType(BaselineStubReg, ICTypeMonitor_TypeObject::offsetOfType());
1407 masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure);
1408
1409 EmitReturnFromIC(masm);
1410
1411 masm.bind(&failure);
1412 EmitStubGuardFailure(masm);
1413 return true;
1414 }
1415
1416 bool
1417 ICUpdatedStub::addUpdateStubForValue(JSContext *cx, HandleScript script, HandleObject obj,
1418 HandleId id, HandleValue val)
1419 {
1420 if (numOptimizedStubs_ >= MAX_OPTIMIZED_STUBS) {
1421 // TODO: if the TypeSet becomes unknown or has the AnyObject type,
1422 // replace stubs with a single stub to handle these.
1423 return true;
1424 }
1425
1426 types::EnsureTrackPropertyTypes(cx, obj, id);
1427
1428 // Make sure that undefined values are explicitly included in the property
1429 // types for an object if generating a stub to write an undefined value.
1430 if (val.isUndefined() && types::CanHaveEmptyPropertyTypesForOwnProperty(obj))
1431 types::AddTypePropertyId(cx, obj, id, val);
1432
1433 if (val.isPrimitive()) {
1434 JSValueType type = val.isDouble() ? JSVAL_TYPE_DOUBLE : val.extractNonDoubleType();
1435
1436 // Check for existing TypeUpdate stub.
1437 ICTypeUpdate_PrimitiveSet *existingStub = nullptr;
1438 for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
1439 if (iter->isTypeUpdate_PrimitiveSet()) {
1440 existingStub = iter->toTypeUpdate_PrimitiveSet();
1441 if (existingStub->containsType(type))
1442 return true;
1443 }
1444 }
1445
1446 ICTypeUpdate_PrimitiveSet::Compiler compiler(cx, existingStub, type);
1447 ICStub *stub = existingStub ? compiler.updateStub()
1448 : compiler.getStub(compiler.getStubSpace(script));
1449 if (!stub)
1450 return false;
1451 if (!existingStub) {
1452 JS_ASSERT(!hasTypeUpdateStub(TypeUpdate_PrimitiveSet));
1453 addOptimizedUpdateStub(stub);
1454 }
1455
1456 IonSpew(IonSpew_BaselineIC, " %s TypeUpdate stub %p for primitive type %d",
1457 existingStub ? "Modified existing" : "Created new", stub, type);
1458
1459 } else if (val.toObject().hasSingletonType()) {
1460 RootedObject obj(cx, &val.toObject());
1461
1462 // Check for existing TypeUpdate stub.
1463 for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
1464 if (iter->isTypeUpdate_SingleObject() &&
1465 iter->toTypeUpdate_SingleObject()->object() == obj)
1466 {
1467 return true;
1468 }
1469 }
1470
1471 ICTypeUpdate_SingleObject::Compiler compiler(cx, obj);
1472 ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
1473 if (!stub)
1474 return false;
1475
1476 IonSpew(IonSpew_BaselineIC, " Added TypeUpdate stub %p for singleton %p", stub, obj.get());
1477
1478 addOptimizedUpdateStub(stub);
1479
1480 } else {
1481 RootedTypeObject type(cx, val.toObject().type());
1482
1483 // Check for existing TypeUpdate stub.
1484 for (ICStubConstIterator iter = firstUpdateStub_; !iter.atEnd(); iter++) {
1485 if (iter->isTypeUpdate_TypeObject() &&
1486 iter->toTypeUpdate_TypeObject()->type() == type)
1487 {
1488 return true;
1489 }
1490 }
1491
1492 ICTypeUpdate_TypeObject::Compiler compiler(cx, type);
1493 ICStub *stub = compiler.getStub(compiler.getStubSpace(script));
1494 if (!stub)
1495 return false;
1496
1497 IonSpew(IonSpew_BaselineIC, " Added TypeUpdate stub %p for TypeObject %p",
1498 stub, type.get());
1499
1500 addOptimizedUpdateStub(stub);
1501 }
1502
1503 return true;
1504 }
1505
1506 //
1507 // TypeUpdate_Fallback
1508 //
1509 static bool
1510 DoTypeUpdateFallback(JSContext *cx, BaselineFrame *frame, ICUpdatedStub *stub, HandleValue objval,
1511 HandleValue value)
1512 {
1513 FallbackICSpew(cx, stub->getChainFallback(), "TypeUpdate(%s)",
1514 ICStub::KindString(stub->kind()));
1515
1516 RootedScript script(cx, frame->script());
1517 RootedObject obj(cx, &objval.toObject());
1518 RootedId id(cx);
1519
1520 switch(stub->kind()) {
1521 case ICStub::SetElem_Dense:
1522 case ICStub::SetElem_DenseAdd: {
1523 JS_ASSERT(obj->isNative());
1524 id = JSID_VOID;
1525 types::AddTypePropertyId(cx, obj, id, value);
1526 break;
1527 }
1528 case ICStub::SetProp_Native:
1529 case ICStub::SetProp_NativeAdd: {
1530 JS_ASSERT(obj->isNative());
1531 jsbytecode *pc = stub->getChainFallback()->icEntry()->pc(script);
1532 if (*pc == JSOP_SETALIASEDVAR)
1533 id = NameToId(ScopeCoordinateName(cx->runtime()->scopeCoordinateNameCache, script, pc));
1534 else
1535 id = NameToId(script->getName(pc));
1536 types::AddTypePropertyId(cx, obj, id, value);
1537 break;
1538 }
1539 default:
1540 MOZ_ASSUME_UNREACHABLE("Invalid stub");
1541 }
1542
1543 return stub->addUpdateStubForValue(cx, script, obj, id, value);
1544 }
1545
1546 typedef bool (*DoTypeUpdateFallbackFn)(JSContext *, BaselineFrame *, ICUpdatedStub *, HandleValue,
1547 HandleValue);
1548 const VMFunction DoTypeUpdateFallbackInfo =
1549 FunctionInfo<DoTypeUpdateFallbackFn>(DoTypeUpdateFallback);
1550
1551 bool
1552 ICTypeUpdate_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
1553 {
1554 // Just store false into R1.scratchReg() and return.
1555 masm.move32(Imm32(0), R1.scratchReg());
1556 EmitReturnFromIC(masm);
1557 return true;
1558 }
1559
1560 bool
1561 ICTypeUpdate_PrimitiveSet::Compiler::generateStubCode(MacroAssembler &masm)
1562 {
1563 Label success;
1564 if ((flags_ & TypeToFlag(JSVAL_TYPE_INT32)) && !(flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE)))
1565 masm.branchTestInt32(Assembler::Equal, R0, &success);
1566
1567 if (flags_ & TypeToFlag(JSVAL_TYPE_DOUBLE))
1568 masm.branchTestNumber(Assembler::Equal, R0, &success);
1569
1570 if (flags_ & TypeToFlag(JSVAL_TYPE_UNDEFINED))
1571 masm.branchTestUndefined(Assembler::Equal, R0, &success);
1572
1573 if (flags_ & TypeToFlag(JSVAL_TYPE_BOOLEAN))
1574 masm.branchTestBoolean(Assembler::Equal, R0, &success);
1575
1576 if (flags_ & TypeToFlag(JSVAL_TYPE_STRING))
1577 masm.branchTestString(Assembler::Equal, R0, &success);
1578
1579 // Currently, we will never generate primitive stub checks for object. However,
1580 // when we do get to the point where we want to collapse our monitor chains of
1581 // objects and singletons down (when they get too long) to a generic "any object"
1582 // in coordination with the typeset doing the same thing, this will need to
1583 // be re-enabled.
1584 /*
1585 if (flags_ & TypeToFlag(JSVAL_TYPE_OBJECT))
1586 masm.branchTestObject(Assembler::Equal, R0, &success);
1587 */
1588 JS_ASSERT(!(flags_ & TypeToFlag(JSVAL_TYPE_OBJECT)));
1589
1590 if (flags_ & TypeToFlag(JSVAL_TYPE_NULL))
1591 masm.branchTestNull(Assembler::Equal, R0, &success);
1592
1593 EmitStubGuardFailure(masm);
1594
1595 // Type matches, load true into R1.scratchReg() and return.
1596 masm.bind(&success);
1597 masm.mov(ImmWord(1), R1.scratchReg());
1598 EmitReturnFromIC(masm);
1599
1600 return true;
1601 }
1602
1603 bool
1604 ICTypeUpdate_SingleObject::Compiler::generateStubCode(MacroAssembler &masm)
1605 {
1606 Label failure;
1607 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1608
1609 // Guard on the object's identity.
1610 Register obj = masm.extractObject(R0, R1.scratchReg());
1611 Address expectedObject(BaselineStubReg, ICTypeUpdate_SingleObject::offsetOfObject());
1612 masm.branchPtr(Assembler::NotEqual, expectedObject, obj, &failure);
1613
1614 // Identity matches, load true into R1.scratchReg() and return.
1615 masm.mov(ImmWord(1), R1.scratchReg());
1616 EmitReturnFromIC(masm);
1617
1618 masm.bind(&failure);
1619 EmitStubGuardFailure(masm);
1620 return true;
1621 }
1622
1623 bool
1624 ICTypeUpdate_TypeObject::Compiler::generateStubCode(MacroAssembler &masm)
1625 {
1626 Label failure;
1627 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
1628
1629 // Guard on the object's TypeObject.
1630 Register obj = masm.extractObject(R0, R1.scratchReg());
1631 masm.loadPtr(Address(obj, JSObject::offsetOfType()), R1.scratchReg());
1632
1633 Address expectedType(BaselineStubReg, ICTypeUpdate_TypeObject::offsetOfType());
1634 masm.branchPtr(Assembler::NotEqual, expectedType, R1.scratchReg(), &failure);
1635
1636 // Type matches, load true into R1.scratchReg() and return.
1637 masm.mov(ImmWord(1), R1.scratchReg());
1638 EmitReturnFromIC(masm);
1639
1640 masm.bind(&failure);
1641 EmitStubGuardFailure(masm);
1642 return true;
1643 }
1644
1645 //
1646 // VM function to help call native getters.
1647 //
1648
1649 static bool
1650 DoCallNativeGetter(JSContext *cx, HandleFunction callee, HandleObject obj,
1651 MutableHandleValue result)
1652 {
1653 JS_ASSERT(callee->isNative());
1654 JSNative natfun = callee->native();
1655
1656 JS::AutoValueArray<2> vp(cx);
1657 vp[0].setObject(*callee.get());
1658 vp[1].setObject(*obj.get());
1659
1660 if (!natfun(cx, 0, vp.begin()))
1661 return false;
1662
1663 result.set(vp[0]);
1664 return true;
1665 }
1666
1667 typedef bool (*DoCallNativeGetterFn)(JSContext *, HandleFunction, HandleObject, MutableHandleValue);
1668 static const VMFunction DoCallNativeGetterInfo =
1669 FunctionInfo<DoCallNativeGetterFn>(DoCallNativeGetter);
1670
1671 //
1672 // This_Fallback
1673 //
1674
1675 static bool
1676 DoThisFallback(JSContext *cx, ICThis_Fallback *stub, HandleValue thisv, MutableHandleValue ret)
1677 {
1678 FallbackICSpew(cx, stub, "This");
1679
1680 JSObject *thisObj = BoxNonStrictThis(cx, thisv);
1681 if (!thisObj)
1682 return false;
1683
1684 ret.setObject(*thisObj);
1685 return true;
1686 }
1687
1688 typedef bool (*DoThisFallbackFn)(JSContext *, ICThis_Fallback *, HandleValue, MutableHandleValue);
1689 static const VMFunction DoThisFallbackInfo = FunctionInfo<DoThisFallbackFn>(DoThisFallback);
1690
1691 bool
1692 ICThis_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
1693 {
1694 JS_ASSERT(R0 == JSReturnOperand);
1695
1696 // Restore the tail call register.
1697 EmitRestoreTailCallReg(masm);
1698
1699 masm.pushValue(R0);
1700 masm.push(BaselineStubReg);
1701
1702 return tailCallVM(DoThisFallbackInfo, masm);
1703 }
1704
1705 //
1706 // NewArray_Fallback
1707 //
1708
1709 static bool
1710 DoNewArray(JSContext *cx, ICNewArray_Fallback *stub, uint32_t length,
1711 HandleTypeObject type, MutableHandleValue res)
1712 {
1713 FallbackICSpew(cx, stub, "NewArray");
1714
1715 JSObject *obj = NewInitArray(cx, length, type);
1716 if (!obj)
1717 return false;
1718
1719 res.setObject(*obj);
1720 return true;
1721 }
1722
1723 typedef bool(*DoNewArrayFn)(JSContext *, ICNewArray_Fallback *, uint32_t, HandleTypeObject,
1724 MutableHandleValue);
1725 static const VMFunction DoNewArrayInfo = FunctionInfo<DoNewArrayFn>(DoNewArray);
1726
1727 bool
1728 ICNewArray_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
1729 {
1730 EmitRestoreTailCallReg(masm);
1731
1732 masm.push(R1.scratchReg()); // type
1733 masm.push(R0.scratchReg()); // length
1734 masm.push(BaselineStubReg); // stub.
1735
1736 return tailCallVM(DoNewArrayInfo, masm);
1737 }
1738
1739 //
1740 // NewObject_Fallback
1741 //
1742
1743 static bool
1744 DoNewObject(JSContext *cx, ICNewObject_Fallback *stub, MutableHandleValue res)
1745 {
1746 FallbackICSpew(cx, stub, "NewObject");
1747
1748 RootedObject templateObject(cx, stub->templateObject());
1749 JSObject *obj = NewInitObject(cx, templateObject);
1750 if (!obj)
1751 return false;
1752
1753 res.setObject(*obj);
1754 return true;
1755 }
1756
1757 typedef bool(*DoNewObjectFn)(JSContext *, ICNewObject_Fallback *, MutableHandleValue);
1758 static const VMFunction DoNewObjectInfo = FunctionInfo<DoNewObjectFn>(DoNewObject);
1759
1760 bool
1761 ICNewObject_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
1762 {
1763 EmitRestoreTailCallReg(masm);
1764
1765 masm.push(BaselineStubReg); // stub.
1766
1767 return tailCallVM(DoNewObjectInfo, masm);
1768 }
1769
1770 //
1771 // Compare_Fallback
1772 //
1773
1774 static bool
1775 DoCompareFallback(JSContext *cx, BaselineFrame *frame, ICCompare_Fallback *stub_, HandleValue lhs,
1776 HandleValue rhs, MutableHandleValue ret)
1777 {
1778 // This fallback stub may trigger debug mode toggling.
1779 DebugModeOSRVolatileStub<ICCompare_Fallback *> stub(frame, stub_);
1780
1781 jsbytecode *pc = stub->icEntry()->pc(frame->script());
1782 JSOp op = JSOp(*pc);
1783
1784 FallbackICSpew(cx, stub, "Compare(%s)", js_CodeName[op]);
1785
1786 // Case operations in a CONDSWITCH are performing strict equality.
1787 if (op == JSOP_CASE)
1788 op = JSOP_STRICTEQ;
1789
1790 // Don't pass lhs/rhs directly, we need the original values when
1791 // generating stubs.
1792 RootedValue lhsCopy(cx, lhs);
1793 RootedValue rhsCopy(cx, rhs);
1794
1795 // Perform the compare operation.
1796 bool out;
1797 switch(op) {
1798 case JSOP_LT:
1799 if (!LessThan(cx, &lhsCopy, &rhsCopy, &out))
1800 return false;
1801 break;
1802 case JSOP_LE:
1803 if (!LessThanOrEqual(cx, &lhsCopy, &rhsCopy, &out))
1804 return false;
1805 break;
1806 case JSOP_GT:
1807 if (!GreaterThan(cx, &lhsCopy, &rhsCopy, &out))
1808 return false;
1809 break;
1810 case JSOP_GE:
1811 if (!GreaterThanOrEqual(cx, &lhsCopy, &rhsCopy, &out))
1812 return false;
1813 break;
1814 case JSOP_EQ:
1815 if (!LooselyEqual<true>(cx, &lhsCopy, &rhsCopy, &out))
1816 return false;
1817 break;
1818 case JSOP_NE:
1819 if (!LooselyEqual<false>(cx, &lhsCopy, &rhsCopy, &out))
1820 return false;
1821 break;
1822 case JSOP_STRICTEQ:
1823 if (!StrictlyEqual<true>(cx, &lhsCopy, &rhsCopy, &out))
1824 return false;
1825 break;
1826 case JSOP_STRICTNE:
1827 if (!StrictlyEqual<false>(cx, &lhsCopy, &rhsCopy, &out))
1828 return false;
1829 break;
1830 default:
1831 JS_ASSERT(!"Unhandled baseline compare op");
1832 return false;
1833 }
1834
1835 ret.setBoolean(out);
1836
1837 // Check if debug mode toggling made the stub invalid.
1838 if (stub.invalid())
1839 return true;
1840
1841 // Check to see if a new stub should be generated.
1842 if (stub->numOptimizedStubs() >= ICCompare_Fallback::MAX_OPTIMIZED_STUBS) {
1843 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
1844 // But for now we just bail.
1845 return true;
1846 }
1847
1848 JSScript *script = frame->script();
1849
1850 // Try to generate new stubs.
1851 if (lhs.isInt32() && rhs.isInt32()) {
1852 IonSpew(IonSpew_BaselineIC, " Generating %s(Int32, Int32) stub", js_CodeName[op]);
1853 ICCompare_Int32::Compiler compiler(cx, op);
1854 ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
1855 if (!int32Stub)
1856 return false;
1857
1858 stub->addNewStub(int32Stub);
1859 return true;
1860 }
1861
1862 if (!cx->runtime()->jitSupportsFloatingPoint && (lhs.isNumber() || rhs.isNumber()))
1863 return true;
1864
1865 if (lhs.isNumber() && rhs.isNumber()) {
1866 IonSpew(IonSpew_BaselineIC, " Generating %s(Number, Number) stub", js_CodeName[op]);
1867
1868 // Unlink int32 stubs, it's faster to always use the double stub.
1869 stub->unlinkStubsWithKind(cx, ICStub::Compare_Int32);
1870
1871 ICCompare_Double::Compiler compiler(cx, op);
1872 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
1873 if (!doubleStub)
1874 return false;
1875
1876 stub->addNewStub(doubleStub);
1877 return true;
1878 }
1879
1880 if ((lhs.isNumber() && rhs.isUndefined()) ||
1881 (lhs.isUndefined() && rhs.isNumber()))
1882 {
1883 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
1884 rhs.isUndefined() ? "Number" : "Undefined",
1885 rhs.isUndefined() ? "Undefined" : "Number");
1886 ICCompare_NumberWithUndefined::Compiler compiler(cx, op, lhs.isUndefined());
1887 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
1888 if (!doubleStub)
1889 return false;
1890
1891 stub->addNewStub(doubleStub);
1892 return true;
1893 }
1894
1895 if (lhs.isBoolean() && rhs.isBoolean()) {
1896 IonSpew(IonSpew_BaselineIC, " Generating %s(Boolean, Boolean) stub", js_CodeName[op]);
1897 ICCompare_Boolean::Compiler compiler(cx, op);
1898 ICStub *booleanStub = compiler.getStub(compiler.getStubSpace(script));
1899 if (!booleanStub)
1900 return false;
1901
1902 stub->addNewStub(booleanStub);
1903 return true;
1904 }
1905
1906 if ((lhs.isBoolean() && rhs.isInt32()) || (lhs.isInt32() && rhs.isBoolean())) {
1907 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
1908 rhs.isInt32() ? "Boolean" : "Int32",
1909 rhs.isInt32() ? "Int32" : "Boolean");
1910 ICCompare_Int32WithBoolean::Compiler compiler(cx, op, lhs.isInt32());
1911 ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
1912 if (!optStub)
1913 return false;
1914
1915 stub->addNewStub(optStub);
1916 return true;
1917 }
1918
1919 if (IsEqualityOp(op)) {
1920 if (lhs.isString() && rhs.isString() && !stub->hasStub(ICStub::Compare_String)) {
1921 IonSpew(IonSpew_BaselineIC, " Generating %s(String, String) stub", js_CodeName[op]);
1922 ICCompare_String::Compiler compiler(cx, op);
1923 ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
1924 if (!stringStub)
1925 return false;
1926
1927 stub->addNewStub(stringStub);
1928 return true;
1929 }
1930
1931 if (lhs.isObject() && rhs.isObject()) {
1932 JS_ASSERT(!stub->hasStub(ICStub::Compare_Object));
1933 IonSpew(IonSpew_BaselineIC, " Generating %s(Object, Object) stub", js_CodeName[op]);
1934 ICCompare_Object::Compiler compiler(cx, op);
1935 ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script));
1936 if (!objectStub)
1937 return false;
1938
1939 stub->addNewStub(objectStub);
1940 return true;
1941 }
1942
1943 if ((lhs.isObject() || lhs.isNull() || lhs.isUndefined()) &&
1944 (rhs.isObject() || rhs.isNull() || rhs.isUndefined()) &&
1945 !stub->hasStub(ICStub::Compare_ObjectWithUndefined))
1946 {
1947 IonSpew(IonSpew_BaselineIC, " Generating %s(Obj/Null/Undef, Obj/Null/Undef) stub",
1948 js_CodeName[op]);
1949 bool lhsIsUndefined = lhs.isNull() || lhs.isUndefined();
1950 bool compareWithNull = lhs.isNull() || rhs.isNull();
1951 ICCompare_ObjectWithUndefined::Compiler compiler(cx, op,
1952 lhsIsUndefined, compareWithNull);
1953 ICStub *objectStub = compiler.getStub(compiler.getStubSpace(script));
1954 if (!objectStub)
1955 return false;
1956
1957 stub->addNewStub(objectStub);
1958 return true;
1959 }
1960 }
1961
1962 return true;
1963 }
1964
1965 typedef bool (*DoCompareFallbackFn)(JSContext *, BaselineFrame *, ICCompare_Fallback *,
1966 HandleValue, HandleValue, MutableHandleValue);
1967 static const VMFunction DoCompareFallbackInfo =
1968 FunctionInfo<DoCompareFallbackFn>(DoCompareFallback, PopValues(2));
1969
1970 bool
1971 ICCompare_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
1972 {
1973 JS_ASSERT(R0 == JSReturnOperand);
1974
1975 // Restore the tail call register.
1976 EmitRestoreTailCallReg(masm);
1977
1978 // Ensure stack is fully synced for the expression decompiler.
1979 masm.pushValue(R0);
1980 masm.pushValue(R1);
1981
1982 // Push arguments.
1983 masm.pushValue(R1);
1984 masm.pushValue(R0);
1985 masm.push(BaselineStubReg);
1986 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
1987 return tailCallVM(DoCompareFallbackInfo, masm);
1988 }
1989
1990 //
1991 // Compare_String
1992 //
1993
1994 bool
1995 ICCompare_String::Compiler::generateStubCode(MacroAssembler &masm)
1996 {
1997 Label failure;
1998 masm.branchTestString(Assembler::NotEqual, R0, &failure);
1999 masm.branchTestString(Assembler::NotEqual, R1, &failure);
2000
2001 JS_ASSERT(IsEqualityOp(op));
2002
2003 Register left = masm.extractString(R0, ExtractTemp0);
2004 Register right = masm.extractString(R1, ExtractTemp1);
2005
2006 GeneralRegisterSet regs(availableGeneralRegs(2));
2007 Register scratchReg = regs.takeAny();
2008 // x86 doesn't have the luxury of a second scratch.
2009 Register scratchReg2;
2010 if (regs.empty()) {
2011 scratchReg2 = BaselineStubReg;
2012 masm.push(BaselineStubReg);
2013 } else {
2014 scratchReg2 = regs.takeAny();
2015 }
2016 JS_ASSERT(scratchReg2 != scratchReg);
2017
2018 Label inlineCompareFailed;
2019 masm.compareStrings(op, left, right, scratchReg2, scratchReg, &inlineCompareFailed);
2020 masm.tagValue(JSVAL_TYPE_BOOLEAN, scratchReg2, R0);
2021 if (scratchReg2 == BaselineStubReg)
2022 masm.pop(BaselineStubReg);
2023 EmitReturnFromIC(masm);
2024
2025 masm.bind(&inlineCompareFailed);
2026 if (scratchReg2 == BaselineStubReg)
2027 masm.pop(BaselineStubReg);
2028 masm.bind(&failure);
2029 EmitStubGuardFailure(masm);
2030 return true;
2031 }
2032
2033 //
2034 // Compare_Boolean
2035 //
2036
2037 bool
2038 ICCompare_Boolean::Compiler::generateStubCode(MacroAssembler &masm)
2039 {
2040 Label failure;
2041 masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
2042 masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
2043
2044 Register left = masm.extractInt32(R0, ExtractTemp0);
2045 Register right = masm.extractInt32(R1, ExtractTemp1);
2046
2047 // Compare payload regs of R0 and R1.
2048 Assembler::Condition cond = JSOpToCondition(op, /* signed = */true);
2049 masm.cmp32Set(cond, left, right, left);
2050
2051 // Box the result and return
2052 masm.tagValue(JSVAL_TYPE_BOOLEAN, left, R0);
2053 EmitReturnFromIC(masm);
2054
2055 // Failure case - jump to next stub
2056 masm.bind(&failure);
2057 EmitStubGuardFailure(masm);
2058 return true;
2059 }
2060
2061 //
2062 // Compare_NumberWithUndefined
2063 //
2064
2065 bool
2066 ICCompare_NumberWithUndefined::Compiler::generateStubCode(MacroAssembler &masm)
2067 {
2068 ValueOperand numberOperand, undefinedOperand;
2069 if (lhsIsUndefined) {
2070 numberOperand = R1;
2071 undefinedOperand = R0;
2072 } else {
2073 numberOperand = R0;
2074 undefinedOperand = R1;
2075 }
2076
2077 Label failure;
2078 masm.branchTestNumber(Assembler::NotEqual, numberOperand, &failure);
2079 masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
2080
2081 // Comparing a number with undefined will always be true for NE/STRICTNE,
2082 // and always be false for other compare ops.
2083 masm.moveValue(BooleanValue(op == JSOP_NE || op == JSOP_STRICTNE), R0);
2084
2085 EmitReturnFromIC(masm);
2086
2087 // Failure case - jump to next stub
2088 masm.bind(&failure);
2089 EmitStubGuardFailure(masm);
2090 return true;
2091 }
2092
2093 //
2094 // Compare_Object
2095 //
2096
2097 bool
2098 ICCompare_Object::Compiler::generateStubCode(MacroAssembler &masm)
2099 {
2100 Label failure;
2101 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
2102 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
2103
2104 JS_ASSERT(IsEqualityOp(op));
2105
2106 Register left = masm.extractObject(R0, ExtractTemp0);
2107 Register right = masm.extractObject(R1, ExtractTemp1);
2108
2109 Label ifTrue;
2110 masm.branchPtr(JSOpToCondition(op, /* signed = */true), left, right, &ifTrue);
2111
2112 masm.moveValue(BooleanValue(false), R0);
2113 EmitReturnFromIC(masm);
2114
2115 masm.bind(&ifTrue);
2116 masm.moveValue(BooleanValue(true), R0);
2117 EmitReturnFromIC(masm);
2118
2119 // Failure case - jump to next stub
2120 masm.bind(&failure);
2121 EmitStubGuardFailure(masm);
2122 return true;
2123 }
2124
2125 //
2126 // Compare_ObjectWithUndefined
2127 //
2128
2129 bool
2130 ICCompare_ObjectWithUndefined::Compiler::generateStubCode(MacroAssembler &masm)
2131 {
2132 JS_ASSERT(IsEqualityOp(op));
2133
2134 ValueOperand objectOperand, undefinedOperand;
2135 if (lhsIsUndefined) {
2136 objectOperand = R1;
2137 undefinedOperand = R0;
2138 } else {
2139 objectOperand = R0;
2140 undefinedOperand = R1;
2141 }
2142
2143 Label failure;
2144 if (compareWithNull)
2145 masm.branchTestNull(Assembler::NotEqual, undefinedOperand, &failure);
2146 else
2147 masm.branchTestUndefined(Assembler::NotEqual, undefinedOperand, &failure);
2148
2149 Label notObject;
2150 masm.branchTestObject(Assembler::NotEqual, objectOperand, &notObject);
2151
2152 if (op == JSOP_STRICTEQ || op == JSOP_STRICTNE) {
2153 // obj !== undefined for all objects.
2154 masm.moveValue(BooleanValue(op == JSOP_STRICTNE), R0);
2155 EmitReturnFromIC(masm);
2156 } else {
2157 // obj != undefined only where !obj->getClass()->emulatesUndefined()
2158 Label emulatesUndefined;
2159 Register obj = masm.extractObject(objectOperand, ExtractTemp0);
2160 masm.loadPtr(Address(obj, JSObject::offsetOfType()), obj);
2161 masm.loadPtr(Address(obj, types::TypeObject::offsetOfClasp()), obj);
2162 masm.branchTest32(Assembler::NonZero,
2163 Address(obj, Class::offsetOfFlags()),
2164 Imm32(JSCLASS_EMULATES_UNDEFINED),
2165 &emulatesUndefined);
2166 masm.moveValue(BooleanValue(op == JSOP_NE), R0);
2167 EmitReturnFromIC(masm);
2168 masm.bind(&emulatesUndefined);
2169 masm.moveValue(BooleanValue(op == JSOP_EQ), R0);
2170 EmitReturnFromIC(masm);
2171 }
2172
2173 masm.bind(&notObject);
2174
2175 // Also support null == null or undefined == undefined comparisons.
2176 if (compareWithNull)
2177 masm.branchTestNull(Assembler::NotEqual, objectOperand, &failure);
2178 else
2179 masm.branchTestUndefined(Assembler::NotEqual, objectOperand, &failure);
2180
2181 masm.moveValue(BooleanValue(op == JSOP_STRICTEQ || op == JSOP_EQ), R0);
2182 EmitReturnFromIC(masm);
2183
2184 // Failure case - jump to next stub
2185 masm.bind(&failure);
2186 EmitStubGuardFailure(masm);
2187 return true;
2188 }
2189
2190 //
2191 // Compare_Int32WithBoolean
2192 //
2193
2194 bool
2195 ICCompare_Int32WithBoolean::Compiler::generateStubCode(MacroAssembler &masm)
2196 {
2197 Label failure;
2198 ValueOperand int32Val;
2199 ValueOperand boolVal;
2200 if (lhsIsInt32_) {
2201 int32Val = R0;
2202 boolVal = R1;
2203 } else {
2204 boolVal = R0;
2205 int32Val = R1;
2206 }
2207 masm.branchTestBoolean(Assembler::NotEqual, boolVal, &failure);
2208 masm.branchTestInt32(Assembler::NotEqual, int32Val, &failure);
2209
2210 if (op_ == JSOP_STRICTEQ || op_ == JSOP_STRICTNE) {
2211 // Ints and booleans are never strictly equal, always strictly not equal.
2212 masm.moveValue(BooleanValue(op_ == JSOP_STRICTNE), R0);
2213 EmitReturnFromIC(masm);
2214 } else {
2215 Register boolReg = masm.extractBoolean(boolVal, ExtractTemp0);
2216 Register int32Reg = masm.extractInt32(int32Val, ExtractTemp1);
2217
2218 // Compare payload regs of R0 and R1.
2219 Assembler::Condition cond = JSOpToCondition(op_, /* signed = */true);
2220 masm.cmp32Set(cond, (lhsIsInt32_ ? int32Reg : boolReg),
2221 (lhsIsInt32_ ? boolReg : int32Reg), R0.scratchReg());
2222
2223 // Box the result and return
2224 masm.tagValue(JSVAL_TYPE_BOOLEAN, R0.scratchReg(), R0);
2225 EmitReturnFromIC(masm);
2226 }
2227
2228 // Failure case - jump to next stub
2229 masm.bind(&failure);
2230 EmitStubGuardFailure(masm);
2231 return true;
2232 }
2233
2234 //
2235 // ToBool_Fallback
2236 //
2237
2238 static bool
2239 DoToBoolFallback(JSContext *cx, BaselineFrame *frame, ICToBool_Fallback *stub, HandleValue arg,
2240 MutableHandleValue ret)
2241 {
2242 FallbackICSpew(cx, stub, "ToBool");
2243
2244 bool cond = ToBoolean(arg);
2245 ret.setBoolean(cond);
2246
2247 // Check to see if a new stub should be generated.
2248 if (stub->numOptimizedStubs() >= ICToBool_Fallback::MAX_OPTIMIZED_STUBS) {
2249 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
2250 // But for now we just bail.
2251 return true;
2252 }
2253
2254 JS_ASSERT(!arg.isBoolean());
2255
2256 JSScript *script = frame->script();
2257
2258 // Try to generate new stubs.
2259 if (arg.isInt32()) {
2260 IonSpew(IonSpew_BaselineIC, " Generating ToBool(Int32) stub.");
2261 ICToBool_Int32::Compiler compiler(cx);
2262 ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
2263 if (!int32Stub)
2264 return false;
2265
2266 stub->addNewStub(int32Stub);
2267 return true;
2268 }
2269
2270 if (arg.isDouble() && cx->runtime()->jitSupportsFloatingPoint) {
2271 IonSpew(IonSpew_BaselineIC, " Generating ToBool(Double) stub.");
2272 ICToBool_Double::Compiler compiler(cx);
2273 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
2274 if (!doubleStub)
2275 return false;
2276
2277 stub->addNewStub(doubleStub);
2278 return true;
2279 }
2280
2281 if (arg.isString()) {
2282 IonSpew(IonSpew_BaselineIC, " Generating ToBool(String) stub");
2283 ICToBool_String::Compiler compiler(cx);
2284 ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
2285 if (!stringStub)
2286 return false;
2287
2288 stub->addNewStub(stringStub);
2289 return true;
2290 }
2291
2292 if (arg.isNull() || arg.isUndefined()) {
2293 ICToBool_NullUndefined::Compiler compiler(cx);
2294 ICStub *nilStub = compiler.getStub(compiler.getStubSpace(script));
2295 if (!nilStub)
2296 return false;
2297
2298 stub->addNewStub(nilStub);
2299 return true;
2300 }
2301
2302 if (arg.isObject()) {
2303 IonSpew(IonSpew_BaselineIC, " Generating ToBool(Object) stub.");
2304 ICToBool_Object::Compiler compiler(cx);
2305 ICStub *objStub = compiler.getStub(compiler.getStubSpace(script));
2306 if (!objStub)
2307 return false;
2308
2309 stub->addNewStub(objStub);
2310 return true;
2311 }
2312
2313 return true;
2314 }
2315
2316 typedef bool (*pf)(JSContext *, BaselineFrame *, ICToBool_Fallback *, HandleValue,
2317 MutableHandleValue);
2318 static const VMFunction fun = FunctionInfo<pf>(DoToBoolFallback);
2319
2320 bool
2321 ICToBool_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
2322 {
2323 JS_ASSERT(R0 == JSReturnOperand);
2324
2325 // Restore the tail call register.
2326 EmitRestoreTailCallReg(masm);
2327
2328 // Push arguments.
2329 masm.pushValue(R0);
2330 masm.push(BaselineStubReg);
2331 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2332
2333 return tailCallVM(fun, masm);
2334 }
2335
2336 //
2337 // ToBool_Int32
2338 //
2339
2340 bool
2341 ICToBool_Int32::Compiler::generateStubCode(MacroAssembler &masm)
2342 {
2343 Label failure;
2344 masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
2345
2346 Label ifFalse;
2347 masm.branchTestInt32Truthy(false, R0, &ifFalse);
2348
2349 masm.moveValue(BooleanValue(true), R0);
2350 EmitReturnFromIC(masm);
2351
2352 masm.bind(&ifFalse);
2353 masm.moveValue(BooleanValue(false), R0);
2354 EmitReturnFromIC(masm);
2355
2356 // Failure case - jump to next stub
2357 masm.bind(&failure);
2358 EmitStubGuardFailure(masm);
2359 return true;
2360 }
2361
2362 //
2363 // ToBool_String
2364 //
2365
2366 bool
2367 ICToBool_String::Compiler::generateStubCode(MacroAssembler &masm)
2368 {
2369 Label failure;
2370 masm.branchTestString(Assembler::NotEqual, R0, &failure);
2371
2372 Label ifFalse;
2373 masm.branchTestStringTruthy(false, R0, &ifFalse);
2374
2375 masm.moveValue(BooleanValue(true), R0);
2376 EmitReturnFromIC(masm);
2377
2378 masm.bind(&ifFalse);
2379 masm.moveValue(BooleanValue(false), R0);
2380 EmitReturnFromIC(masm);
2381
2382 // Failure case - jump to next stub
2383 masm.bind(&failure);
2384 EmitStubGuardFailure(masm);
2385 return true;
2386 }
2387
2388 //
2389 // ToBool_NullUndefined
2390 //
2391
2392 bool
2393 ICToBool_NullUndefined::Compiler::generateStubCode(MacroAssembler &masm)
2394 {
2395 Label failure, ifFalse;
2396 masm.branchTestNull(Assembler::Equal, R0, &ifFalse);
2397 masm.branchTestUndefined(Assembler::NotEqual, R0, &failure);
2398
2399 masm.bind(&ifFalse);
2400 masm.moveValue(BooleanValue(false), R0);
2401 EmitReturnFromIC(masm);
2402
2403 // Failure case - jump to next stub
2404 masm.bind(&failure);
2405 EmitStubGuardFailure(masm);
2406 return true;
2407 }
2408
2409 //
2410 // ToBool_Double
2411 //
2412
2413 bool
2414 ICToBool_Double::Compiler::generateStubCode(MacroAssembler &masm)
2415 {
2416 Label failure, ifTrue;
2417 masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
2418 masm.unboxDouble(R0, FloatReg0);
2419 masm.branchTestDoubleTruthy(true, FloatReg0, &ifTrue);
2420
2421 masm.moveValue(BooleanValue(false), R0);
2422 EmitReturnFromIC(masm);
2423
2424 masm.bind(&ifTrue);
2425 masm.moveValue(BooleanValue(true), R0);
2426 EmitReturnFromIC(masm);
2427
2428 // Failure case - jump to next stub
2429 masm.bind(&failure);
2430 EmitStubGuardFailure(masm);
2431 return true;
2432 }
2433
2434 //
2435 // ToBool_Object
2436 //
2437
2438 bool
2439 ICToBool_Object::Compiler::generateStubCode(MacroAssembler &masm)
2440 {
2441 Label failure, ifFalse, slowPath;
2442 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
2443
2444 Register objReg = masm.extractObject(R0, ExtractTemp0);
2445 Register scratch = R1.scratchReg();
2446 masm.branchTestObjectTruthy(false, objReg, scratch, &slowPath, &ifFalse);
2447
2448 // If object doesn't emulate undefined, it evaulates to true.
2449 masm.moveValue(BooleanValue(true), R0);
2450 EmitReturnFromIC(masm);
2451
2452 masm.bind(&ifFalse);
2453 masm.moveValue(BooleanValue(false), R0);
2454 EmitReturnFromIC(masm);
2455
2456 masm.bind(&slowPath);
2457 masm.setupUnalignedABICall(1, scratch);
2458 masm.passABIArg(objReg);
2459 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::EmulatesUndefined));
2460 masm.convertBoolToInt32(ReturnReg, ReturnReg);
2461 masm.xor32(Imm32(1), ReturnReg);
2462 masm.tagValue(JSVAL_TYPE_BOOLEAN, ReturnReg, R0);
2463 EmitReturnFromIC(masm);
2464
2465 // Failure case - jump to next stub
2466 masm.bind(&failure);
2467 EmitStubGuardFailure(masm);
2468 return true;
2469 }
2470
2471 //
2472 // ToNumber_Fallback
2473 //
2474
2475 static bool
2476 DoToNumberFallback(JSContext *cx, ICToNumber_Fallback *stub, HandleValue arg, MutableHandleValue ret)
2477 {
2478 FallbackICSpew(cx, stub, "ToNumber");
2479 ret.set(arg);
2480 return ToNumber(cx, ret);
2481 }
2482
2483 typedef bool (*DoToNumberFallbackFn)(JSContext *, ICToNumber_Fallback *, HandleValue, MutableHandleValue);
2484 static const VMFunction DoToNumberFallbackInfo =
2485 FunctionInfo<DoToNumberFallbackFn>(DoToNumberFallback, PopValues(1));
2486
2487 bool
2488 ICToNumber_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
2489 {
2490 JS_ASSERT(R0 == JSReturnOperand);
2491
2492 // Restore the tail call register.
2493 EmitRestoreTailCallReg(masm);
2494
2495 // Ensure stack is fully synced for the expression decompiler.
2496 masm.pushValue(R0);
2497
2498 // Push arguments.
2499 masm.pushValue(R0);
2500 masm.push(BaselineStubReg);
2501
2502 return tailCallVM(DoToNumberFallbackInfo, masm);
2503 }
2504
2505 //
2506 // BinaryArith_Fallback
2507 //
2508
2509 // Disable PGO (see bug 851490).
2510 #if defined(_MSC_VER)
2511 # pragma optimize("g", off)
2512 #endif
2513 static bool
2514 DoBinaryArithFallback(JSContext *cx, BaselineFrame *frame, ICBinaryArith_Fallback *stub_,
2515 HandleValue lhs, HandleValue rhs, MutableHandleValue ret)
2516 {
2517 // This fallback stub may trigger debug mode toggling.
2518 DebugModeOSRVolatileStub<ICBinaryArith_Fallback *> stub(frame, stub_);
2519
2520 RootedScript script(cx, frame->script());
2521 jsbytecode *pc = stub->icEntry()->pc(script);
2522 JSOp op = JSOp(*pc);
2523 FallbackICSpew(cx, stub, "BinaryArith(%s,%d,%d)", js_CodeName[op],
2524 int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
2525 int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
2526
2527 // Don't pass lhs/rhs directly, we need the original values when
2528 // generating stubs.
2529 RootedValue lhsCopy(cx, lhs);
2530 RootedValue rhsCopy(cx, rhs);
2531
2532 // Perform the compare operation.
2533 switch(op) {
2534 case JSOP_ADD:
2535 // Do an add.
2536 if (!AddValues(cx, &lhsCopy, &rhsCopy, ret))
2537 return false;
2538 break;
2539 case JSOP_SUB:
2540 if (!SubValues(cx, &lhsCopy, &rhsCopy, ret))
2541 return false;
2542 break;
2543 case JSOP_MUL:
2544 if (!MulValues(cx, &lhsCopy, &rhsCopy, ret))
2545 return false;
2546 break;
2547 case JSOP_DIV:
2548 if (!DivValues(cx, &lhsCopy, &rhsCopy, ret))
2549 return false;
2550 break;
2551 case JSOP_MOD:
2552 if (!ModValues(cx, &lhsCopy, &rhsCopy, ret))
2553 return false;
2554 break;
2555 case JSOP_BITOR: {
2556 int32_t result;
2557 if (!BitOr(cx, lhs, rhs, &result))
2558 return false;
2559 ret.setInt32(result);
2560 break;
2561 }
2562 case JSOP_BITXOR: {
2563 int32_t result;
2564 if (!BitXor(cx, lhs, rhs, &result))
2565 return false;
2566 ret.setInt32(result);
2567 break;
2568 }
2569 case JSOP_BITAND: {
2570 int32_t result;
2571 if (!BitAnd(cx, lhs, rhs, &result))
2572 return false;
2573 ret.setInt32(result);
2574 break;
2575 }
2576 case JSOP_LSH: {
2577 int32_t result;
2578 if (!BitLsh(cx, lhs, rhs, &result))
2579 return false;
2580 ret.setInt32(result);
2581 break;
2582 }
2583 case JSOP_RSH: {
2584 int32_t result;
2585 if (!BitRsh(cx, lhs, rhs, &result))
2586 return false;
2587 ret.setInt32(result);
2588 break;
2589 }
2590 case JSOP_URSH: {
2591 if (!UrshOperation(cx, lhs, rhs, ret))
2592 return false;
2593 break;
2594 }
2595 default:
2596 MOZ_ASSUME_UNREACHABLE("Unhandled baseline arith op");
2597 }
2598
2599 // Check if debug mode toggling made the stub invalid.
2600 if (stub.invalid())
2601 return true;
2602
2603 if (ret.isDouble())
2604 stub->setSawDoubleResult();
2605
2606 // Check to see if a new stub should be generated.
2607 if (stub->numOptimizedStubs() >= ICBinaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
2608 stub->noteUnoptimizableOperands();
2609 return true;
2610 }
2611
2612 // Handle string concat.
2613 if (op == JSOP_ADD) {
2614 if (lhs.isString() && rhs.isString()) {
2615 IonSpew(IonSpew_BaselineIC, " Generating %s(String, String) stub", js_CodeName[op]);
2616 JS_ASSERT(ret.isString());
2617 ICBinaryArith_StringConcat::Compiler compiler(cx);
2618 ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script));
2619 if (!strcatStub)
2620 return false;
2621 stub->addNewStub(strcatStub);
2622 return true;
2623 }
2624
2625 if ((lhs.isString() && rhs.isObject()) || (lhs.isObject() && rhs.isString())) {
2626 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
2627 lhs.isString() ? "String" : "Object",
2628 lhs.isString() ? "Object" : "String");
2629 JS_ASSERT(ret.isString());
2630 ICBinaryArith_StringObjectConcat::Compiler compiler(cx, lhs.isString());
2631 ICStub *strcatStub = compiler.getStub(compiler.getStubSpace(script));
2632 if (!strcatStub)
2633 return false;
2634 stub->addNewStub(strcatStub);
2635 return true;
2636 }
2637 }
2638
2639 if (((lhs.isBoolean() && (rhs.isBoolean() || rhs.isInt32())) ||
2640 (rhs.isBoolean() && (lhs.isBoolean() || lhs.isInt32()))) &&
2641 (op == JSOP_ADD || op == JSOP_SUB || op == JSOP_BITOR || op == JSOP_BITAND ||
2642 op == JSOP_BITXOR))
2643 {
2644 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
2645 lhs.isBoolean() ? "Boolean" : "Int32", rhs.isBoolean() ? "Boolean" : "Int32");
2646 ICBinaryArith_BooleanWithInt32::Compiler compiler(cx, op, lhs.isBoolean(), rhs.isBoolean());
2647 ICStub *arithStub = compiler.getStub(compiler.getStubSpace(script));
2648 if (!arithStub)
2649 return false;
2650 stub->addNewStub(arithStub);
2651 return true;
2652 }
2653
2654 // Handle only int32 or double.
2655 if (!lhs.isNumber() || !rhs.isNumber()) {
2656 stub->noteUnoptimizableOperands();
2657 return true;
2658 }
2659
2660 JS_ASSERT(ret.isNumber());
2661
2662 if (lhs.isDouble() || rhs.isDouble() || ret.isDouble()) {
2663 if (!cx->runtime()->jitSupportsFloatingPoint)
2664 return true;
2665
2666 switch (op) {
2667 case JSOP_ADD:
2668 case JSOP_SUB:
2669 case JSOP_MUL:
2670 case JSOP_DIV:
2671 case JSOP_MOD: {
2672 // Unlink int32 stubs, it's faster to always use the double stub.
2673 stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
2674 IonSpew(IonSpew_BaselineIC, " Generating %s(Double, Double) stub", js_CodeName[op]);
2675
2676 ICBinaryArith_Double::Compiler compiler(cx, op);
2677 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
2678 if (!doubleStub)
2679 return false;
2680 stub->addNewStub(doubleStub);
2681 return true;
2682 }
2683 default:
2684 break;
2685 }
2686 }
2687
2688 if (lhs.isInt32() && rhs.isInt32()) {
2689 bool allowDouble = ret.isDouble();
2690 if (allowDouble)
2691 stub->unlinkStubsWithKind(cx, ICStub::BinaryArith_Int32);
2692 IonSpew(IonSpew_BaselineIC, " Generating %s(Int32, Int32%s) stub", js_CodeName[op],
2693 allowDouble ? " => Double" : "");
2694 ICBinaryArith_Int32::Compiler compilerInt32(cx, op, allowDouble);
2695 ICStub *int32Stub = compilerInt32.getStub(compilerInt32.getStubSpace(script));
2696 if (!int32Stub)
2697 return false;
2698 stub->addNewStub(int32Stub);
2699 return true;
2700 }
2701
2702 // Handle Double <BITOP> Int32 or Int32 <BITOP> Double case.
2703 if (((lhs.isDouble() && rhs.isInt32()) || (lhs.isInt32() && rhs.isDouble())) &&
2704 ret.isInt32())
2705 {
2706 switch(op) {
2707 case JSOP_BITOR:
2708 case JSOP_BITXOR:
2709 case JSOP_BITAND: {
2710 IonSpew(IonSpew_BaselineIC, " Generating %s(%s, %s) stub", js_CodeName[op],
2711 lhs.isDouble() ? "Double" : "Int32",
2712 lhs.isDouble() ? "Int32" : "Double");
2713 ICBinaryArith_DoubleWithInt32::Compiler compiler(cx, op, lhs.isDouble());
2714 ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
2715 if (!optStub)
2716 return false;
2717 stub->addNewStub(optStub);
2718 return true;
2719 }
2720 default:
2721 break;
2722 }
2723 }
2724
2725 stub->noteUnoptimizableOperands();
2726 return true;
2727 }
2728 #if defined(_MSC_VER)
2729 # pragma optimize("", on)
2730 #endif
2731
2732 typedef bool (*DoBinaryArithFallbackFn)(JSContext *, BaselineFrame *, ICBinaryArith_Fallback *,
2733 HandleValue, HandleValue, MutableHandleValue);
2734 static const VMFunction DoBinaryArithFallbackInfo =
2735 FunctionInfo<DoBinaryArithFallbackFn>(DoBinaryArithFallback, PopValues(2));
2736
2737 bool
2738 ICBinaryArith_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
2739 {
2740 JS_ASSERT(R0 == JSReturnOperand);
2741
2742 // Restore the tail call register.
2743 EmitRestoreTailCallReg(masm);
2744
2745 // Ensure stack is fully synced for the expression decompiler.
2746 masm.pushValue(R0);
2747 masm.pushValue(R1);
2748
2749 // Push arguments.
2750 masm.pushValue(R1);
2751 masm.pushValue(R0);
2752 masm.push(BaselineStubReg);
2753 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
2754
2755 return tailCallVM(DoBinaryArithFallbackInfo, masm);
2756 }
2757
2758 static bool
2759 DoConcatStrings(JSContext *cx, HandleValue lhs, HandleValue rhs, MutableHandleValue res)
2760 {
2761 JS_ASSERT(lhs.isString());
2762 JS_ASSERT(rhs.isString());
2763 JSString *lstr = lhs.toString();
2764 JSString *rstr = rhs.toString();
2765 JSString *result = ConcatStrings<NoGC>(cx, lstr, rstr);
2766 if (result) {
2767 res.set(StringValue(result));
2768 return true;
2769 }
2770
2771 RootedString rootedl(cx, lstr), rootedr(cx, rstr);
2772 result = ConcatStrings<CanGC>(cx, rootedl, rootedr);
2773 if (!result)
2774 return false;
2775
2776 res.set(StringValue(result));
2777 return true;
2778 }
2779
2780 typedef bool (*DoConcatStringsFn)(JSContext *, HandleValue, HandleValue, MutableHandleValue);
2781 static const VMFunction DoConcatStringsInfo = FunctionInfo<DoConcatStringsFn>(DoConcatStrings);
2782
2783 bool
2784 ICBinaryArith_StringConcat::Compiler::generateStubCode(MacroAssembler &masm)
2785 {
2786 Label failure;
2787 masm.branchTestString(Assembler::NotEqual, R0, &failure);
2788 masm.branchTestString(Assembler::NotEqual, R1, &failure);
2789
2790 // Restore the tail call register.
2791 EmitRestoreTailCallReg(masm);
2792
2793 masm.pushValue(R1);
2794 masm.pushValue(R0);
2795 if (!tailCallVM(DoConcatStringsInfo, masm))
2796 return false;
2797
2798 // Failure case - jump to next stub
2799 masm.bind(&failure);
2800 EmitStubGuardFailure(masm);
2801 return true;
2802 }
2803
2804 static JSString *
2805 ConvertObjectToStringForConcat(JSContext *cx, HandleValue obj)
2806 {
2807 JS_ASSERT(obj.isObject());
2808 RootedValue rootedObj(cx, obj);
2809 if (!ToPrimitive(cx, &rootedObj))
2810 return nullptr;
2811 return ToString<CanGC>(cx, rootedObj);
2812 }
2813
2814 static bool
2815 DoConcatStringObject(JSContext *cx, bool lhsIsString, HandleValue lhs, HandleValue rhs,
2816 MutableHandleValue res)
2817 {
2818 JSString *lstr = nullptr;
2819 JSString *rstr = nullptr;
2820 if (lhsIsString) {
2821 // Convert rhs first.
2822 JS_ASSERT(lhs.isString() && rhs.isObject());
2823 rstr = ConvertObjectToStringForConcat(cx, rhs);
2824 if (!rstr)
2825 return false;
2826
2827 // lhs is already string.
2828 lstr = lhs.toString();
2829 } else {
2830 JS_ASSERT(rhs.isString() && lhs.isObject());
2831 // Convert lhs first.
2832 lstr = ConvertObjectToStringForConcat(cx, lhs);
2833 if (!lstr)
2834 return false;
2835
2836 // rhs is already string.
2837 rstr = rhs.toString();
2838 }
2839
2840 JSString *str = ConcatStrings<NoGC>(cx, lstr, rstr);
2841 if (!str) {
2842 RootedString nlstr(cx, lstr), nrstr(cx, rstr);
2843 str = ConcatStrings<CanGC>(cx, nlstr, nrstr);
2844 if (!str)
2845 return false;
2846 }
2847
2848 // Technically, we need to call TypeScript::MonitorString for this PC, however
2849 // it was called when this stub was attached so it's OK.
2850
2851 res.setString(str);
2852 return true;
2853 }
2854
2855 typedef bool (*DoConcatStringObjectFn)(JSContext *, bool lhsIsString, HandleValue, HandleValue,
2856 MutableHandleValue);
2857 static const VMFunction DoConcatStringObjectInfo =
2858 FunctionInfo<DoConcatStringObjectFn>(DoConcatStringObject, PopValues(2));
2859
2860 bool
2861 ICBinaryArith_StringObjectConcat::Compiler::generateStubCode(MacroAssembler &masm)
2862 {
2863 Label failure;
2864 if (lhsIsString_) {
2865 masm.branchTestString(Assembler::NotEqual, R0, &failure);
2866 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
2867 } else {
2868 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
2869 masm.branchTestString(Assembler::NotEqual, R1, &failure);
2870 }
2871
2872 // Restore the tail call register.
2873 EmitRestoreTailCallReg(masm);
2874
2875 // Sync for the decompiler.
2876 masm.pushValue(R0);
2877 masm.pushValue(R1);
2878
2879 // Push arguments.
2880 masm.pushValue(R1);
2881 masm.pushValue(R0);
2882 masm.push(Imm32(lhsIsString_));
2883 if (!tailCallVM(DoConcatStringObjectInfo, masm))
2884 return false;
2885
2886 // Failure case - jump to next stub
2887 masm.bind(&failure);
2888 EmitStubGuardFailure(masm);
2889 return true;
2890 }
2891
2892 bool
2893 ICBinaryArith_Double::Compiler::generateStubCode(MacroAssembler &masm)
2894 {
2895 Label failure;
2896 masm.ensureDouble(R0, FloatReg0, &failure);
2897 masm.ensureDouble(R1, FloatReg1, &failure);
2898
2899 switch (op) {
2900 case JSOP_ADD:
2901 masm.addDouble(FloatReg1, FloatReg0);
2902 break;
2903 case JSOP_SUB:
2904 masm.subDouble(FloatReg1, FloatReg0);
2905 break;
2906 case JSOP_MUL:
2907 masm.mulDouble(FloatReg1, FloatReg0);
2908 break;
2909 case JSOP_DIV:
2910 masm.divDouble(FloatReg1, FloatReg0);
2911 break;
2912 case JSOP_MOD:
2913 masm.setupUnalignedABICall(2, R0.scratchReg());
2914 masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
2915 masm.passABIArg(FloatReg1, MoveOp::DOUBLE);
2916 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, NumberMod), MoveOp::DOUBLE);
2917 JS_ASSERT(ReturnFloatReg == FloatReg0);
2918 break;
2919 default:
2920 MOZ_ASSUME_UNREACHABLE("Unexpected op");
2921 }
2922
2923 masm.boxDouble(FloatReg0, R0);
2924 EmitReturnFromIC(masm);
2925
2926 // Failure case - jump to next stub
2927 masm.bind(&failure);
2928 EmitStubGuardFailure(masm);
2929 return true;
2930 }
2931
2932 bool
2933 ICBinaryArith_BooleanWithInt32::Compiler::generateStubCode(MacroAssembler &masm)
2934 {
2935 Label failure;
2936 if (lhsIsBool_)
2937 masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
2938 else
2939 masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
2940
2941 if (rhsIsBool_)
2942 masm.branchTestBoolean(Assembler::NotEqual, R1, &failure);
2943 else
2944 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
2945
2946 Register lhsReg = lhsIsBool_ ? masm.extractBoolean(R0, ExtractTemp0)
2947 : masm.extractInt32(R0, ExtractTemp0);
2948 Register rhsReg = rhsIsBool_ ? masm.extractBoolean(R1, ExtractTemp1)
2949 : masm.extractInt32(R1, ExtractTemp1);
2950
2951 JS_ASSERT(op_ == JSOP_ADD || op_ == JSOP_SUB ||
2952 op_ == JSOP_BITOR || op_ == JSOP_BITXOR || op_ == JSOP_BITAND);
2953
2954 switch(op_) {
2955 case JSOP_ADD: {
2956 Label fixOverflow;
2957
2958 masm.branchAdd32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
2959 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
2960 EmitReturnFromIC(masm);
2961
2962 masm.bind(&fixOverflow);
2963 masm.sub32(rhsReg, lhsReg);
2964 // Proceed to failure below.
2965 break;
2966 }
2967 case JSOP_SUB: {
2968 Label fixOverflow;
2969
2970 masm.branchSub32(Assembler::Overflow, rhsReg, lhsReg, &fixOverflow);
2971 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
2972 EmitReturnFromIC(masm);
2973
2974 masm.bind(&fixOverflow);
2975 masm.add32(rhsReg, lhsReg);
2976 // Proceed to failure below.
2977 break;
2978 }
2979 case JSOP_BITOR: {
2980 masm.orPtr(rhsReg, lhsReg);
2981 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
2982 EmitReturnFromIC(masm);
2983 break;
2984 }
2985 case JSOP_BITXOR: {
2986 masm.xorPtr(rhsReg, lhsReg);
2987 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
2988 EmitReturnFromIC(masm);
2989 break;
2990 }
2991 case JSOP_BITAND: {
2992 masm.andPtr(rhsReg, lhsReg);
2993 masm.tagValue(JSVAL_TYPE_INT32, lhsReg, R0);
2994 EmitReturnFromIC(masm);
2995 break;
2996 }
2997 default:
2998 MOZ_ASSUME_UNREACHABLE("Unhandled op for BinaryArith_BooleanWithInt32.");
2999 }
3000
3001 // Failure case - jump to next stub
3002 masm.bind(&failure);
3003 EmitStubGuardFailure(masm);
3004 return true;
3005 }
3006
3007 bool
3008 ICBinaryArith_DoubleWithInt32::Compiler::generateStubCode(MacroAssembler &masm)
3009 {
3010 JS_ASSERT(op == JSOP_BITOR || op == JSOP_BITAND || op == JSOP_BITXOR);
3011
3012 Label failure;
3013 Register intReg;
3014 Register scratchReg;
3015 if (lhsIsDouble_) {
3016 masm.branchTestDouble(Assembler::NotEqual, R0, &failure);
3017 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
3018 intReg = masm.extractInt32(R1, ExtractTemp0);
3019 masm.unboxDouble(R0, FloatReg0);
3020 scratchReg = R0.scratchReg();
3021 } else {
3022 masm.branchTestInt32(Assembler::NotEqual, R0, &failure);
3023 masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
3024 intReg = masm.extractInt32(R0, ExtractTemp0);
3025 masm.unboxDouble(R1, FloatReg0);
3026 scratchReg = R1.scratchReg();
3027 }
3028
3029 // Truncate the double to an int32.
3030 {
3031 Label doneTruncate;
3032 Label truncateABICall;
3033 masm.branchTruncateDouble(FloatReg0, scratchReg, &truncateABICall);
3034 masm.jump(&doneTruncate);
3035
3036 masm.bind(&truncateABICall);
3037 masm.push(intReg);
3038 masm.setupUnalignedABICall(1, scratchReg);
3039 masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
3040 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::ToInt32));
3041 masm.storeCallResult(scratchReg);
3042 masm.pop(intReg);
3043
3044 masm.bind(&doneTruncate);
3045 }
3046
3047 Register intReg2 = scratchReg;
3048 // All handled ops commute, so no need to worry about ordering.
3049 switch(op) {
3050 case JSOP_BITOR:
3051 masm.orPtr(intReg, intReg2);
3052 break;
3053 case JSOP_BITXOR:
3054 masm.xorPtr(intReg, intReg2);
3055 break;
3056 case JSOP_BITAND:
3057 masm.andPtr(intReg, intReg2);
3058 break;
3059 default:
3060 MOZ_ASSUME_UNREACHABLE("Unhandled op for BinaryArith_DoubleWithInt32.");
3061 }
3062 masm.tagValue(JSVAL_TYPE_INT32, intReg2, R0);
3063 EmitReturnFromIC(masm);
3064
3065 // Failure case - jump to next stub
3066 masm.bind(&failure);
3067 EmitStubGuardFailure(masm);
3068 return true;
3069 }
3070
3071 //
3072 // UnaryArith_Fallback
3073 //
3074
3075 // Disable PGO (see bug 851490).
3076 #if defined(_MSC_VER)
3077 # pragma optimize("g", off)
3078 #endif
3079 static bool
3080 DoUnaryArithFallback(JSContext *cx, BaselineFrame *frame, ICUnaryArith_Fallback *stub_,
3081 HandleValue val, MutableHandleValue res)
3082 {
3083 // This fallback stub may trigger debug mode toggling.
3084 DebugModeOSRVolatileStub<ICUnaryArith_Fallback *> stub(frame, stub_);
3085
3086 RootedScript script(cx, frame->script());
3087 jsbytecode *pc = stub->icEntry()->pc(script);
3088 JSOp op = JSOp(*pc);
3089 FallbackICSpew(cx, stub, "UnaryArith(%s)", js_CodeName[op]);
3090
3091 switch (op) {
3092 case JSOP_BITNOT: {
3093 int32_t result;
3094 if (!BitNot(cx, val, &result))
3095 return false;
3096 res.setInt32(result);
3097 break;
3098 }
3099 case JSOP_NEG:
3100 if (!NegOperation(cx, script, pc, val, res))
3101 return false;
3102 break;
3103 default:
3104 MOZ_ASSUME_UNREACHABLE("Unexpected op");
3105 }
3106
3107 // Check if debug mode toggling made the stub invalid.
3108 if (stub.invalid())
3109 return true;
3110
3111 if (res.isDouble())
3112 stub->setSawDoubleResult();
3113
3114 if (stub->numOptimizedStubs() >= ICUnaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
3115 // TODO: Discard/replace stubs.
3116 return true;
3117 }
3118
3119 if (val.isInt32() && res.isInt32()) {
3120 IonSpew(IonSpew_BaselineIC, " Generating %s(Int32 => Int32) stub", js_CodeName[op]);
3121 ICUnaryArith_Int32::Compiler compiler(cx, op);
3122 ICStub *int32Stub = compiler.getStub(compiler.getStubSpace(script));
3123 if (!int32Stub)
3124 return false;
3125 stub->addNewStub(int32Stub);
3126 return true;
3127 }
3128
3129 if (val.isNumber() && res.isNumber() && cx->runtime()->jitSupportsFloatingPoint) {
3130 IonSpew(IonSpew_BaselineIC, " Generating %s(Number => Number) stub", js_CodeName[op]);
3131
3132 // Unlink int32 stubs, the double stub handles both cases and TI specializes for both.
3133 stub->unlinkStubsWithKind(cx, ICStub::UnaryArith_Int32);
3134
3135 ICUnaryArith_Double::Compiler compiler(cx, op);
3136 ICStub *doubleStub = compiler.getStub(compiler.getStubSpace(script));
3137 if (!doubleStub)
3138 return false;
3139 stub->addNewStub(doubleStub);
3140 return true;
3141 }
3142
3143 return true;
3144 }
3145 #if defined(_MSC_VER)
3146 # pragma optimize("", on)
3147 #endif
3148
3149 typedef bool (*DoUnaryArithFallbackFn)(JSContext *, BaselineFrame *, ICUnaryArith_Fallback *,
3150 HandleValue, MutableHandleValue);
3151 static const VMFunction DoUnaryArithFallbackInfo =
3152 FunctionInfo<DoUnaryArithFallbackFn>(DoUnaryArithFallback, PopValues(1));
3153
3154 bool
3155 ICUnaryArith_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
3156 {
3157 JS_ASSERT(R0 == JSReturnOperand);
3158
3159 // Restore the tail call register.
3160 EmitRestoreTailCallReg(masm);
3161
3162 // Ensure stack is fully synced for the expression decompiler.
3163 masm.pushValue(R0);
3164
3165 // Push arguments.
3166 masm.pushValue(R0);
3167 masm.push(BaselineStubReg);
3168 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
3169
3170 return tailCallVM(DoUnaryArithFallbackInfo, masm);
3171 }
3172
3173 bool
3174 ICUnaryArith_Double::Compiler::generateStubCode(MacroAssembler &masm)
3175 {
3176 Label failure;
3177 masm.ensureDouble(R0, FloatReg0, &failure);
3178
3179 JS_ASSERT(op == JSOP_NEG || op == JSOP_BITNOT);
3180
3181 if (op == JSOP_NEG) {
3182 masm.negateDouble(FloatReg0);
3183 masm.boxDouble(FloatReg0, R0);
3184 } else {
3185 // Truncate the double to an int32.
3186 Register scratchReg = R1.scratchReg();
3187
3188 Label doneTruncate;
3189 Label truncateABICall;
3190 masm.branchTruncateDouble(FloatReg0, scratchReg, &truncateABICall);
3191 masm.jump(&doneTruncate);
3192
3193 masm.bind(&truncateABICall);
3194 masm.setupUnalignedABICall(1, scratchReg);
3195 masm.passABIArg(FloatReg0, MoveOp::DOUBLE);
3196 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, js::ToInt32));
3197 masm.storeCallResult(scratchReg);
3198
3199 masm.bind(&doneTruncate);
3200 masm.not32(scratchReg);
3201 masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0);
3202 }
3203
3204 EmitReturnFromIC(masm);
3205
3206 // Failure case - jump to next stub
3207 masm.bind(&failure);
3208 EmitStubGuardFailure(masm);
3209 return true;
3210 }
3211
3212 //
3213 // GetElem_Fallback
3214 //
3215
3216 static void GetFixedOrDynamicSlotOffset(HandleObject obj, uint32_t slot,
3217 bool *isFixed, uint32_t *offset)
3218 {
3219 JS_ASSERT(isFixed);
3220 JS_ASSERT(offset);
3221 *isFixed = obj->isFixedSlot(slot);
3222 *offset = *isFixed ? JSObject::getFixedSlotOffset(slot)
3223 : obj->dynamicSlotIndex(slot) * sizeof(Value);
3224 }
3225
3226 static bool
3227 IsCacheableDOMProxy(JSObject *obj)
3228 {
3229 if (!obj->is<ProxyObject>())
3230 return false;
3231
3232 BaseProxyHandler *handler = obj->as<ProxyObject>().handler();
3233
3234 if (handler->family() != GetDOMProxyHandlerFamily())
3235 return false;
3236
3237 if (obj->numFixedSlots() <= GetDOMProxyExpandoSlot())
3238 return false;
3239
3240 return true;
3241 }
3242
3243 static JSObject *
3244 GetDOMProxyProto(JSObject *obj)
3245 {
3246 JS_ASSERT(IsCacheableDOMProxy(obj));
3247 return obj->getTaggedProto().toObjectOrNull();
3248 }
3249
3250 static void
3251 GenerateDOMProxyChecks(JSContext *cx, MacroAssembler &masm, Register object,
3252 Address checkProxyHandlerAddr,
3253 Address *checkExpandoShapeAddr,
3254 Address *expandoAndGenerationAddr,
3255 Address *generationAddr,
3256 Register scratch,
3257 GeneralRegisterSet &domProxyRegSet,
3258 Label *checkFailed)
3259 {
3260 // Guard the following:
3261 // 1. The object is a DOMProxy.
3262 // 2. The object does not have expando properties, or has an expando
3263 // which is known to not have the desired property.
3264 Address handlerAddr(object, ProxyObject::offsetOfHandler());
3265 Address expandoAddr(object, JSObject::getFixedSlotOffset(GetDOMProxyExpandoSlot()));
3266
3267 // Check that object is a DOMProxy.
3268 masm.loadPtr(checkProxyHandlerAddr, scratch);
3269 masm.branchPrivatePtr(Assembler::NotEqual, handlerAddr, scratch, checkFailed);
3270
3271 // At this point, if not checking for an expando object, just return.
3272 if (!checkExpandoShapeAddr)
3273 return;
3274
3275 // For the remaining code, we need to reserve some registers to load a value.
3276 // This is ugly, but unavoidable.
3277 ValueOperand tempVal = domProxyRegSet.takeAnyValue();
3278 masm.pushValue(tempVal);
3279
3280 Label failDOMProxyCheck;
3281 Label domProxyOk;
3282
3283 if (expandoAndGenerationAddr) {
3284 JS_ASSERT(generationAddr);
3285
3286 masm.loadPtr(*expandoAndGenerationAddr, tempVal.scratchReg());
3287 masm.branchPrivatePtr(Assembler::NotEqual, expandoAddr, tempVal.scratchReg(),
3288 &failDOMProxyCheck);
3289
3290 masm.load32(*generationAddr, scratch);
3291 masm.branch32(Assembler::NotEqual,
3292 Address(tempVal.scratchReg(), offsetof(ExpandoAndGeneration, generation)),
3293 scratch, &failDOMProxyCheck);
3294
3295 masm.loadValue(Address(tempVal.scratchReg(), 0), tempVal);
3296 } else {
3297 masm.loadValue(expandoAddr, tempVal);
3298 }
3299
3300 // If the incoming object does not have an expando object then we're sure we're not
3301 // shadowing.
3302 masm.branchTestUndefined(Assembler::Equal, tempVal, &domProxyOk);
3303
3304 // The reference object used to generate this check may not have had an
3305 // expando object at all, in which case the presence of a non-undefined
3306 // expando value in the incoming object is automatically a failure.
3307 masm.loadPtr(*checkExpandoShapeAddr, scratch);
3308 masm.branchPtr(Assembler::Equal, scratch, ImmPtr(nullptr), &failDOMProxyCheck);
3309
3310 // Otherwise, ensure that the incoming object has an object for its expando value and that
3311 // the shape matches.
3312 masm.branchTestObject(Assembler::NotEqual, tempVal, &failDOMProxyCheck);
3313 Register objReg = masm.extractObject(tempVal, tempVal.scratchReg());
3314 masm.branchTestObjShape(Assembler::Equal, objReg, scratch, &domProxyOk);
3315
3316 // Failure case: restore the tempVal registers and jump to failures.
3317 masm.bind(&failDOMProxyCheck);
3318 masm.popValue(tempVal);
3319 masm.jump(checkFailed);
3320
3321 // Success case: restore the tempval and proceed.
3322 masm.bind(&domProxyOk);
3323 masm.popValue(tempVal);
3324 }
3325
3326 // Look up a property's shape on an object, being careful never to do any effectful
3327 // operations. This procedure not yielding a shape should not be taken as a lack of
3328 // existence of the property on the object.
3329 static bool
3330 EffectlesslyLookupProperty(JSContext *cx, HandleObject obj, HandlePropertyName name,
3331 MutableHandleObject holder, MutableHandleShape shape,
3332 bool *checkDOMProxy=nullptr,
3333 DOMProxyShadowsResult *shadowsResult=nullptr,
3334 bool *domProxyHasGeneration=nullptr)
3335 {
3336 shape.set(nullptr);
3337 holder.set(nullptr);
3338
3339 if (checkDOMProxy)
3340 *checkDOMProxy = false;
3341
3342 // Check for list base if asked to.
3343 RootedObject checkObj(cx, obj);
3344 if (checkDOMProxy && IsCacheableDOMProxy(obj)) {
3345 JS_ASSERT(domProxyHasGeneration);
3346 JS_ASSERT(shadowsResult);
3347
3348 *checkDOMProxy = true;
3349 if (obj->hasUncacheableProto())
3350 return true;
3351
3352 RootedId id(cx, NameToId(name));
3353 *shadowsResult = GetDOMProxyShadowsCheck()(cx, obj, id);
3354 if (*shadowsResult == ShadowCheckFailed)
3355 return false;
3356
3357 if (*shadowsResult == Shadows) {
3358 holder.set(obj);
3359 return true;
3360 }
3361
3362 *domProxyHasGeneration = (*shadowsResult == DoesntShadowUnique);
3363
3364 checkObj = GetDOMProxyProto(obj);
3365 if (!checkObj)
3366 return true;
3367 } else if (!obj->isNative()) {
3368 return true;
3369 }
3370
3371 if (checkObj->hasIdempotentProtoChain()) {
3372 if (!JSObject::lookupProperty(cx, checkObj, name, holder, shape))
3373 return false;
3374 } else if (checkObj->isNative()) {
3375 shape.set(checkObj->nativeLookup(cx, NameToId(name)));
3376 if (shape)
3377 holder.set(checkObj);
3378 }
3379 return true;
3380 }
3381
3382 static bool
3383 IsCacheableProtoChain(JSObject *obj, JSObject *holder, bool isDOMProxy=false)
3384 {
3385 JS_ASSERT_IF(isDOMProxy, IsCacheableDOMProxy(obj));
3386 JS_ASSERT_IF(!isDOMProxy, obj->isNative());
3387
3388 // Don't handle objects which require a prototype guard. This should
3389 // be uncommon so handling it is likely not worth the complexity.
3390 if (obj->hasUncacheableProto())
3391 return false;
3392
3393 JSObject *cur = obj;
3394 while (cur != holder) {
3395 // We cannot assume that we find the holder object on the prototype
3396 // chain and must check for null proto. The prototype chain can be
3397 // altered during the lookupProperty call.
3398 JSObject *proto;
3399 if (isDOMProxy && cur == obj)
3400 proto = cur->getTaggedProto().toObjectOrNull();
3401 else
3402 proto = cur->getProto();
3403
3404 if (!proto || !proto->isNative())
3405 return false;
3406
3407 if (proto->hasUncacheableProto())
3408 return false;
3409
3410 cur = proto;
3411 }
3412 return true;
3413 }
3414
3415 static bool
3416 IsCacheableGetPropReadSlot(JSObject *obj, JSObject *holder, Shape *shape, bool isDOMProxy=false)
3417 {
3418 if (!shape || !IsCacheableProtoChain(obj, holder, isDOMProxy))
3419 return false;
3420
3421 if (!shape->hasSlot() || !shape->hasDefaultGetter())
3422 return false;
3423
3424 return true;
3425 }
3426
3427 static bool
3428 IsCacheableGetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *shape, bool *isScripted,
3429 bool isDOMProxy=false)
3430 {
3431 JS_ASSERT(isScripted);
3432
3433 if (!shape || !IsCacheableProtoChain(obj, holder, isDOMProxy))
3434 return false;
3435
3436 if (shape->hasSlot() || shape->hasDefaultGetter())
3437 return false;
3438
3439 if (!shape->hasGetterValue())
3440 return false;
3441
3442 if (!shape->getterValue().isObject() || !shape->getterObject()->is<JSFunction>())
3443 return false;
3444
3445 JSFunction *func = &shape->getterObject()->as<JSFunction>();
3446
3447 #ifdef JSGC_GENERATIONAL
3448 // Information from get prop call ICs may be used directly from Ion code,
3449 // and should not be nursery allocated.
3450 if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func))
3451 return false;
3452 #endif
3453
3454 if (func->isNative()) {
3455 *isScripted = false;
3456 return true;
3457 }
3458
3459 if (!func->hasJITCode())
3460 return false;
3461
3462 *isScripted = true;
3463 return true;
3464 }
3465
3466 static bool
3467 IsCacheableSetPropWriteSlot(JSObject *obj, Shape *oldShape, JSObject *holder, Shape *shape)
3468 {
3469 if (!shape)
3470 return false;
3471
3472 // Object shape must not have changed during the property set.
3473 if (obj->lastProperty() != oldShape)
3474 return false;
3475
3476 // Currently we only optimize direct writes.
3477 if (obj != holder)
3478 return false;
3479
3480 if (!shape->hasSlot() || !shape->hasDefaultSetter() || !shape->writable())
3481 return false;
3482
3483 return true;
3484 }
3485
3486 static bool
3487 IsCacheableSetPropAddSlot(JSContext *cx, HandleObject obj, HandleShape oldShape, uint32_t oldSlots,
3488 HandleId id, HandleObject holder, HandleShape shape,
3489 size_t *protoChainDepth)
3490 {
3491 if (!shape)
3492 return false;
3493
3494 // Property must be set directly on object, and be last added property of object.
3495 if (obj != holder || shape != obj->lastProperty())
3496 return false;
3497
3498 // Object must be extensible, oldShape must be immediate parent of curShape.
3499 if (!obj->nonProxyIsExtensible() || obj->lastProperty()->previous() != oldShape)
3500 return false;
3501
3502 // Basic shape checks.
3503 if (shape->inDictionary() || !shape->hasSlot() || !shape->hasDefaultSetter() ||
3504 !shape->writable())
3505 {
3506 return false;
3507 }
3508
3509 // If object has a non-default resolve hook, don't inline
3510 if (obj->getClass()->resolve != JS_ResolveStub)
3511 return false;
3512
3513 size_t chainDepth = 0;
3514 // walk up the object prototype chain and ensure that all prototypes
3515 // are native, and that all prototypes have setter defined on the property
3516 for (JSObject *proto = obj->getProto(); proto; proto = proto->getProto()) {
3517 chainDepth++;
3518 // if prototype is non-native, don't optimize
3519 if (!proto->isNative())
3520 return false;
3521
3522 // if prototype defines this property in a non-plain way, don't optimize
3523 Shape *protoShape = proto->nativeLookup(cx, id);
3524 if (protoShape && !protoShape->hasDefaultSetter())
3525 return false;
3526
3527 // Otherise, if there's no such property, watch out for a resolve hook that would need
3528 // to be invoked and thus prevent inlining of property addition.
3529 if (proto->getClass()->resolve != JS_ResolveStub)
3530 return false;
3531 }
3532
3533 // Only add a IC entry if the dynamic slots didn't change when the shapes
3534 // changed. Need to ensure that a shape change for a subsequent object
3535 // won't involve reallocating the slot array.
3536 if (obj->numDynamicSlots() != oldSlots)
3537 return false;
3538
3539 *protoChainDepth = chainDepth;
3540 return true;
3541 }
3542
3543 static bool
3544 IsCacheableSetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *shape, bool *isScripted)
3545 {
3546 JS_ASSERT(isScripted);
3547
3548 // Currently we only optimize setter calls for setters bound on prototypes.
3549 if (obj == holder)
3550 return false;
3551
3552 if (!shape || !IsCacheableProtoChain(obj, holder))
3553 return false;
3554
3555 if (shape->hasSlot() || shape->hasDefaultSetter())
3556 return false;
3557
3558 if (!shape->hasSetterValue())
3559 return false;
3560
3561 if (!shape->setterValue().isObject() || !shape->setterObject()->is<JSFunction>())
3562 return false;
3563
3564 JSFunction *func = &shape->setterObject()->as<JSFunction>();
3565
3566 #ifdef JSGC_GENERATIONAL
3567 // Information from set prop call ICs may be used directly from Ion code,
3568 // and should not be nursery allocated.
3569 if (cx->runtime()->gcNursery.isInside(holder) || cx->runtime()->gcNursery.isInside(func))
3570 return false;
3571 #endif
3572
3573 if (func->isNative()) {
3574 *isScripted = false;
3575 return true;
3576 }
3577
3578 if (!func->hasJITCode())
3579 return false;
3580
3581 *isScripted = true;
3582 return true;
3583 }
3584
3585 static bool
3586 LookupNoSuchMethodHandler(JSContext *cx, HandleObject obj, HandleValue id,
3587 MutableHandleValue result)
3588 {
3589 return OnUnknownMethod(cx, obj, id, result);
3590 }
3591
3592 typedef bool (*LookupNoSuchMethodHandlerFn)(JSContext *, HandleObject, HandleValue,
3593 MutableHandleValue);
3594 static const VMFunction LookupNoSuchMethodHandlerInfo =
3595 FunctionInfo<LookupNoSuchMethodHandlerFn>(LookupNoSuchMethodHandler);
3596
3597 static bool
3598 GetElemNativeStubExists(ICGetElem_Fallback *stub, HandleObject obj, HandleObject holder,
3599 HandlePropertyName propName, bool needsAtomize)
3600 {
3601 bool indirect = (obj.get() != holder.get());
3602
3603 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
3604 if (iter->kind() != ICStub::GetElem_NativeSlot &&
3605 iter->kind() != ICStub::GetElem_NativePrototypeSlot &&
3606 iter->kind() != ICStub::GetElem_NativePrototypeCallNative &&
3607 iter->kind() != ICStub::GetElem_NativePrototypeCallScripted)
3608 {
3609 continue;
3610 }
3611
3612 if (indirect && (iter->kind() != ICStub::GetElem_NativePrototypeSlot &&
3613 iter->kind() != ICStub::GetElem_NativePrototypeCallNative &&
3614 iter->kind() != ICStub::GetElem_NativePrototypeCallScripted))
3615 {
3616 continue;
3617 }
3618
3619 ICGetElemNativeStub *getElemNativeStub = reinterpret_cast<ICGetElemNativeStub *>(*iter);
3620 if (propName != getElemNativeStub->name())
3621 continue;
3622
3623 if (obj->lastProperty() != getElemNativeStub->shape())
3624 continue;
3625
3626 // If the new stub needs atomization, and the old stub doesn't atomize, then
3627 // an appropriate stub doesn't exist.
3628 if (needsAtomize && !getElemNativeStub->needsAtomize())
3629 continue;
3630
3631 // For prototype gets, check the holder and holder shape.
3632 if (indirect) {
3633 if (iter->isGetElem_NativePrototypeSlot()) {
3634 ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot();
3635
3636 if (holder != protoStub->holder())
3637 continue;
3638
3639 if (holder->lastProperty() != protoStub->holderShape())
3640 continue;
3641 } else {
3642 JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() ||
3643 iter->isGetElem_NativePrototypeCallScripted());
3644
3645 ICGetElemNativePrototypeCallStub *protoStub =
3646 reinterpret_cast<ICGetElemNativePrototypeCallStub *>(*iter);
3647
3648 if (holder != protoStub->holder())
3649 continue;
3650
3651 if (holder->lastProperty() != protoStub->holderShape())
3652 continue;
3653 }
3654 }
3655
3656 return true;
3657 }
3658 return false;
3659 }
3660
3661 static void
3662 RemoveExistingGetElemNativeStubs(JSContext *cx, ICGetElem_Fallback *stub, HandleObject obj,
3663 HandleObject holder, HandlePropertyName propName,
3664 bool needsAtomize)
3665 {
3666 bool indirect = (obj.get() != holder.get());
3667
3668 for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
3669 switch (iter->kind()) {
3670 case ICStub::GetElem_NativeSlot:
3671 if (indirect)
3672 continue;
3673 case ICStub::GetElem_NativePrototypeSlot:
3674 case ICStub::GetElem_NativePrototypeCallNative:
3675 case ICStub::GetElem_NativePrototypeCallScripted:
3676 break;
3677 default:
3678 continue;
3679 }
3680
3681 ICGetElemNativeStub *getElemNativeStub = reinterpret_cast<ICGetElemNativeStub *>(*iter);
3682 if (propName != getElemNativeStub->name())
3683 continue;
3684
3685 if (obj->lastProperty() != getElemNativeStub->shape())
3686 continue;
3687
3688 // For prototype gets, check the holder and holder shape.
3689 if (indirect) {
3690 if (iter->isGetElem_NativePrototypeSlot()) {
3691 ICGetElem_NativePrototypeSlot *protoStub = iter->toGetElem_NativePrototypeSlot();
3692
3693 if (holder != protoStub->holder())
3694 continue;
3695
3696 // If the holder matches, but the holder's lastProperty doesn't match, then
3697 // this stub is invalid anyway. Unlink it.
3698 if (holder->lastProperty() != protoStub->holderShape()) {
3699 iter.unlink(cx);
3700 continue;
3701 }
3702 } else {
3703 JS_ASSERT(iter->isGetElem_NativePrototypeCallNative() ||
3704 iter->isGetElem_NativePrototypeCallScripted());
3705
3706 ICGetElemNativePrototypeCallStub *protoStub =
3707 reinterpret_cast<ICGetElemNativePrototypeCallStub *>(*iter);
3708
3709 if (holder != protoStub->holder())
3710 continue;
3711
3712 // If the holder matches, but the holder's lastProperty doesn't match, then
3713 // this stub is invalid anyway. Unlink it.
3714 if (holder->lastProperty() != protoStub->holderShape()) {
3715 iter.unlink(cx);
3716 continue;
3717 }
3718 }
3719 }
3720
3721 // If the new stub needs atomization, and the old stub doesn't atomize, then
3722 // remove the old stub.
3723 if (needsAtomize && !getElemNativeStub->needsAtomize()) {
3724 iter.unlink(cx);
3725 continue;
3726 }
3727
3728 // Should never get here, because this means a matching stub exists, and if
3729 // a matching stub exists, this procedure should never have been called.
3730 MOZ_ASSUME_UNREACHABLE("Procedure should never have been called.");
3731 }
3732 }
3733
3734 static bool
3735 TypedArrayGetElemStubExists(ICGetElem_Fallback *stub, HandleObject obj)
3736 {
3737 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
3738 if (!iter->isGetElem_TypedArray())
3739 continue;
3740 if (obj->lastProperty() == iter->toGetElem_TypedArray()->shape())
3741 return true;
3742 }
3743 return false;
3744 }
3745
3746 static bool
3747 ArgumentsGetElemStubExists(ICGetElem_Fallback *stub, ICGetElem_Arguments::Which which)
3748 {
3749 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
3750 if (!iter->isGetElem_Arguments())
3751 continue;
3752 if (iter->toGetElem_Arguments()->which() == which)
3753 return true;
3754 }
3755 return false;
3756 }
3757
3758
3759 static bool TryAttachNativeGetElemStub(JSContext *cx, HandleScript script, jsbytecode *pc,
3760 ICGetElem_Fallback *stub, HandleObject obj,
3761 HandleValue key)
3762 {
3763 // Native-object GetElem stubs can't deal with non-string keys.
3764 if (!key.isString())
3765 return true;
3766
3767 // Convert to interned property name.
3768 RootedId id(cx);
3769 if (!ValueToId<CanGC>(cx, key, &id))
3770 return false;
3771
3772 uint32_t dummy;
3773 if (!JSID_IS_ATOM(id) || JSID_TO_ATOM(id)->isIndex(&dummy))
3774 return true;
3775
3776 RootedPropertyName propName(cx, JSID_TO_ATOM(id)->asPropertyName());
3777 bool needsAtomize = !key.toString()->isAtom();
3778 bool isCallElem = (JSOp(*pc) == JSOP_CALLELEM);
3779
3780 RootedShape shape(cx);
3781 RootedObject holder(cx);
3782 if (!EffectlesslyLookupProperty(cx, obj, propName, &holder, &shape))
3783 return false;
3784
3785 if (IsCacheableGetPropReadSlot(obj, holder, shape)) {
3786 // If a suitable stub already exists, nothing else to do.
3787 if (GetElemNativeStubExists(stub, obj, holder, propName, needsAtomize))
3788 return true;
3789
3790 // Remove any existing stubs that may interfere with the new stub being added.
3791 RemoveExistingGetElemNativeStubs(cx, stub, obj, holder, propName, needsAtomize);
3792
3793 bool isFixedSlot;
3794 uint32_t offset;
3795 GetFixedOrDynamicSlotOffset(holder, shape->slot(), &isFixedSlot, &offset);
3796
3797 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
3798 ICStub::Kind kind = (obj == holder) ? ICStub::GetElem_NativeSlot
3799 : ICStub::GetElem_NativePrototypeSlot;
3800
3801 IonSpew(IonSpew_BaselineIC, " Generating GetElem(Native %s%s slot) stub "
3802 "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
3803 (obj == holder) ? "direct" : "prototype",
3804 needsAtomize ? " atomizing" : "",
3805 obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
3806
3807 ICGetElemNativeStub::AccessType acctype = isFixedSlot ? ICGetElemNativeStub::FixedSlot
3808 : ICGetElemNativeStub::DynamicSlot;
3809 ICGetElemNativeCompiler compiler(cx, kind, isCallElem, monitorStub, obj, holder, propName,
3810 acctype, needsAtomize, offset);
3811 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
3812 if (!newStub)
3813 return false;
3814
3815 stub->addNewStub(newStub);
3816 return true;
3817 }
3818
3819 bool getterIsScripted = false;
3820 if (IsCacheableGetPropCall(cx, obj, holder, shape, &getterIsScripted, /*isDOMProxy=*/false)) {
3821 RootedFunction getter(cx, &shape->getterObject()->as<JSFunction>());
3822
3823 #if JS_HAS_NO_SUCH_METHOD
3824 // It's unlikely that a getter function will be used in callelem locations.
3825 // Just don't attach stubs in that case to avoid issues with __noSuchMethod__ handling.
3826 if (isCallElem)
3827 return true;
3828 #endif
3829
3830 // For now, we do not handle own property getters
3831 if (obj == holder)
3832 return true;
3833
3834 // If a suitable stub already exists, nothing else to do.
3835 if (GetElemNativeStubExists(stub, obj, holder, propName, needsAtomize))
3836 return true;
3837
3838 // Remove any existing stubs that may interfere with the new stub being added.
3839 RemoveExistingGetElemNativeStubs(cx, stub, obj, holder, propName, needsAtomize);
3840
3841 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
3842 ICStub::Kind kind = getterIsScripted ? ICStub::GetElem_NativePrototypeCallScripted
3843 : ICStub::GetElem_NativePrototypeCallNative;
3844
3845 if (getterIsScripted) {
3846 IonSpew(IonSpew_BaselineIC,
3847 " Generating GetElem(Native %s%s call scripted %s:%d) stub "
3848 "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
3849 (obj == holder) ? "direct" : "prototype",
3850 needsAtomize ? " atomizing" : "",
3851 getter->nonLazyScript()->filename(), getter->nonLazyScript()->lineno(),
3852 obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
3853 } else {
3854 IonSpew(IonSpew_BaselineIC,
3855 " Generating GetElem(Native %s%s call native) stub "
3856 "(obj=%p, shape=%p, holder=%p, holderShape=%p)",
3857 (obj == holder) ? "direct" : "prototype",
3858 needsAtomize ? " atomizing" : "",
3859 obj.get(), obj->lastProperty(), holder.get(), holder->lastProperty());
3860 }
3861
3862 ICGetElemNativeStub::AccessType acctype = getterIsScripted
3863 ? ICGetElemNativeStub::ScriptedGetter
3864 : ICGetElemNativeStub::NativeGetter;
3865 ICGetElemNativeCompiler compiler(cx, kind, monitorStub, obj, holder, propName, acctype,
3866 needsAtomize, getter, script->pcToOffset(pc), isCallElem);
3867 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
3868 if (!newStub)
3869 return false;
3870
3871 stub->addNewStub(newStub);
3872 return true;
3873 }
3874
3875 return true;
3876 }
3877
3878 static bool
3879 TypedArrayRequiresFloatingPoint(TypedArrayObject *tarr)
3880 {
3881 uint32_t type = tarr->type();
3882 return (type == ScalarTypeDescr::TYPE_UINT32 ||
3883 type == ScalarTypeDescr::TYPE_FLOAT32 ||
3884 type == ScalarTypeDescr::TYPE_FLOAT64);
3885 }
3886
3887 static bool
3888 TryAttachGetElemStub(JSContext *cx, JSScript *script, jsbytecode *pc, ICGetElem_Fallback *stub,
3889 HandleValue lhs, HandleValue rhs, HandleValue res)
3890 {
3891 bool isCallElem = (JSOp(*pc) == JSOP_CALLELEM);
3892
3893 // Check for String[i] => Char accesses.
3894 if (lhs.isString() && rhs.isInt32() && res.isString() &&
3895 !stub->hasStub(ICStub::GetElem_String))
3896 {
3897 // NoSuchMethod handling doesn't apply to string targets.
3898
3899 IonSpew(IonSpew_BaselineIC, " Generating GetElem(String[Int32]) stub");
3900 ICGetElem_String::Compiler compiler(cx);
3901 ICStub *stringStub = compiler.getStub(compiler.getStubSpace(script));
3902 if (!stringStub)
3903 return false;
3904
3905 stub->addNewStub(stringStub);
3906 return true;
3907 }
3908
3909 if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS) && rhs.isInt32() &&
3910 !ArgumentsGetElemStubExists(stub, ICGetElem_Arguments::Magic))
3911 {
3912 // Any script with a CALLPROP on arguments (arguments.foo())
3913 // should not have optimized arguments.
3914 JS_ASSERT(!isCallElem);
3915
3916 IonSpew(IonSpew_BaselineIC, " Generating GetElem(MagicArgs[Int32]) stub");
3917 ICGetElem_Arguments::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
3918 ICGetElem_Arguments::Magic, false);
3919 ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script));
3920 if (!argsStub)
3921 return false;
3922
3923 stub->addNewStub(argsStub);
3924 return true;
3925 }
3926
3927 // Otherwise, GetElem is only optimized on objects.
3928 if (!lhs.isObject())
3929 return true;
3930 RootedObject obj(cx, &lhs.toObject());
3931
3932 // Check for ArgumentsObj[int] accesses
3933 if (obj->is<ArgumentsObject>() && rhs.isInt32()) {
3934 ICGetElem_Arguments::Which which = ICGetElem_Arguments::Normal;
3935 if (obj->is<StrictArgumentsObject>())
3936 which = ICGetElem_Arguments::Strict;
3937 if (!ArgumentsGetElemStubExists(stub, which)) {
3938 IonSpew(IonSpew_BaselineIC, " Generating GetElem(ArgsObj[Int32]) stub");
3939 ICGetElem_Arguments::Compiler compiler(
3940 cx, stub->fallbackMonitorStub()->firstMonitorStub(), which, isCallElem);
3941 ICStub *argsStub = compiler.getStub(compiler.getStubSpace(script));
3942 if (!argsStub)
3943 return false;
3944
3945 stub->addNewStub(argsStub);
3946 return true;
3947 }
3948 }
3949
3950 if (obj->isNative()) {
3951 // Check for NativeObject[int] dense accesses.
3952 if (rhs.isInt32() && rhs.toInt32() >= 0 && !obj->is<TypedArrayObject>()) {
3953 IonSpew(IonSpew_BaselineIC, " Generating GetElem(Native[Int32] dense) stub");
3954 ICGetElem_Dense::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
3955 obj->lastProperty(), isCallElem);
3956 ICStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
3957 if (!denseStub)
3958 return false;
3959
3960 stub->addNewStub(denseStub);
3961 return true;
3962 }
3963
3964 // Check for NativeObject[id] shape-optimizable accesses.
3965 if (rhs.isString()) {
3966 RootedScript rootedScript(cx, script);
3967 if (!TryAttachNativeGetElemStub(cx, rootedScript, pc, stub, obj, rhs))
3968 return false;
3969 script = rootedScript;
3970 }
3971 }
3972
3973 // Check for TypedArray[int] => Number accesses.
3974 if (obj->is<TypedArrayObject>() && rhs.isNumber() && res.isNumber() &&
3975 !TypedArrayGetElemStubExists(stub, obj))
3976 {
3977 // Don't attach CALLELEM stubs for accesses on typed array expected to yield numbers.
3978 #if JS_HAS_NO_SUCH_METHOD
3979 if (isCallElem)
3980 return true;
3981 #endif
3982
3983 TypedArrayObject *tarr = &obj->as<TypedArrayObject>();
3984 if (!cx->runtime()->jitSupportsFloatingPoint &&
3985 (TypedArrayRequiresFloatingPoint(tarr) || rhs.isDouble()))
3986 {
3987 return true;
3988 }
3989
3990 IonSpew(IonSpew_BaselineIC, " Generating GetElem(TypedArray[Int32]) stub");
3991 ICGetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type());
3992 ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script));
3993 if (!typedArrayStub)
3994 return false;
3995
3996 stub->addNewStub(typedArrayStub);
3997 return true;
3998 }
3999
4000 // GetElem operations on non-native objects cannot be cached by either
4001 // Baseline or Ion. Indicate this in the cache so that Ion does not
4002 // generate a cache for this op.
4003 if (!obj->isNative())
4004 stub->noteNonNativeAccess();
4005
4006 // GetElem operations which could access negative indexes generally can't
4007 // be optimized without the potential for bailouts, as we can't statically
4008 // determine that an object has no properties on such indexes.
4009 if (rhs.isNumber() && rhs.toNumber() < 0)
4010 stub->noteNegativeIndex();
4011
4012 return true;
4013 }
4014
4015 static bool
4016 DoGetElemFallback(JSContext *cx, BaselineFrame *frame, ICGetElem_Fallback *stub_, HandleValue lhs,
4017 HandleValue rhs, MutableHandleValue res)
4018 {
4019 // This fallback stub may trigger debug mode toggling.
4020 DebugModeOSRVolatileStub<ICGetElem_Fallback *> stub(frame, stub_);
4021
4022 RootedScript script(cx, frame->script());
4023 jsbytecode *pc = stub->icEntry()->pc(frame->script());
4024 JSOp op = JSOp(*pc);
4025 FallbackICSpew(cx, stub, "GetElem(%s)", js_CodeName[op]);
4026
4027 JS_ASSERT(op == JSOP_GETELEM || op == JSOP_CALLELEM);
4028
4029 // Don't pass lhs directly, we need it when generating stubs.
4030 RootedValue lhsCopy(cx, lhs);
4031
4032 bool isOptimizedArgs = false;
4033 if (lhs.isMagic(JS_OPTIMIZED_ARGUMENTS)) {
4034 // Handle optimized arguments[i] access.
4035 if (!GetElemOptimizedArguments(cx, frame, &lhsCopy, rhs, res, &isOptimizedArgs))
4036 return false;
4037 if (isOptimizedArgs)
4038 types::TypeScript::Monitor(cx, frame->script(), pc, res);
4039 }
4040
4041 if (!isOptimizedArgs) {
4042 if (!GetElementOperation(cx, op, &lhsCopy, rhs, res))
4043 return false;
4044 types::TypeScript::Monitor(cx, frame->script(), pc, res);
4045 }
4046
4047 // Check if debug mode toggling made the stub invalid.
4048 if (stub.invalid())
4049 return true;
4050
4051 // Add a type monitor stub for the resulting value.
4052 if (!stub->addMonitorStubForValue(cx, frame->script(), res))
4053 return false;
4054
4055 if (stub->numOptimizedStubs() >= ICGetElem_Fallback::MAX_OPTIMIZED_STUBS) {
4056 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
4057 // But for now we just bail.
4058 return true;
4059 }
4060
4061 // Try to attach an optimized stub.
4062 if (!TryAttachGetElemStub(cx, frame->script(), pc, stub, lhs, rhs, res))
4063 return false;
4064
4065 return true;
4066 }
4067
4068 typedef bool (*DoGetElemFallbackFn)(JSContext *, BaselineFrame *, ICGetElem_Fallback *,
4069 HandleValue, HandleValue, MutableHandleValue);
4070 static const VMFunction DoGetElemFallbackInfo =
4071 FunctionInfo<DoGetElemFallbackFn>(DoGetElemFallback, PopValues(2));
4072
4073 bool
4074 ICGetElem_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
4075 {
4076 JS_ASSERT(R0 == JSReturnOperand);
4077
4078 // Restore the tail call register.
4079 EmitRestoreTailCallReg(masm);
4080
4081 // Ensure stack is fully synced for the expression decompiler.
4082 masm.pushValue(R0);
4083 masm.pushValue(R1);
4084
4085 // Push arguments.
4086 masm.pushValue(R1);
4087 masm.pushValue(R0);
4088 masm.push(BaselineStubReg);
4089 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
4090
4091 return tailCallVM(DoGetElemFallbackInfo, masm);
4092 }
4093
4094 //
4095 // GetElem_NativeSlot
4096 //
4097
4098 static bool
4099 DoAtomizeString(JSContext *cx, HandleString string, MutableHandleValue result)
4100 {
4101 IonSpew(IonSpew_BaselineIC, " AtomizeString called");
4102
4103 RootedValue key(cx, StringValue(string));
4104
4105 // Convert to interned property name.
4106 RootedId id(cx);
4107 if (!ValueToId<CanGC>(cx, key, &id))
4108 return false;
4109
4110 if (!JSID_IS_ATOM(id)) {
4111 result.set(key);
4112 return true;
4113 }
4114
4115 result.set(StringValue(JSID_TO_ATOM(id)));
4116 return true;
4117 }
4118
4119 typedef bool (*DoAtomizeStringFn)(JSContext *, HandleString, MutableHandleValue);
4120 static const VMFunction DoAtomizeStringInfo = FunctionInfo<DoAtomizeStringFn>(DoAtomizeString);
4121
4122 bool
4123 ICGetElemNativeCompiler::emitCallNative(MacroAssembler &masm, Register objReg)
4124 {
4125 GeneralRegisterSet regs = availableGeneralRegs(0);
4126 regs.takeUnchecked(objReg);
4127 regs.takeUnchecked(BaselineTailCallReg);
4128
4129 enterStubFrame(masm, regs.getAny());
4130
4131 // Push object.
4132 masm.push(objReg);
4133
4134 // Push native callee.
4135 masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), objReg);
4136 masm.push(objReg);
4137
4138 regs.add(objReg);
4139
4140 // Profiler hook.
4141 emitProfilingUpdate(masm, regs, ICGetElemNativeGetterStub::offsetOfPCOffset());
4142
4143 // Call helper.
4144 if (!callVM(DoCallNativeGetterInfo, masm))
4145 return false;
4146
4147 leaveStubFrame(masm);
4148
4149 return true;
4150 }
4151
4152 bool
4153 ICGetElemNativeCompiler::emitCallScripted(MacroAssembler &masm, Register objReg)
4154 {
4155 GeneralRegisterSet regs = availableGeneralRegs(0);
4156 regs.takeUnchecked(objReg);
4157 regs.takeUnchecked(BaselineTailCallReg);
4158
4159 // Enter stub frame.
4160 enterStubFrame(masm, regs.getAny());
4161
4162 // Push |this| for getter (target object).
4163 {
4164 ValueOperand val = regs.takeAnyValue();
4165 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
4166 masm.Push(val);
4167 regs.add(val);
4168 }
4169
4170 regs.add(objReg);
4171
4172 Register callee = regs.takeAny();
4173 masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), callee);
4174
4175 // Push argc, callee, and descriptor.
4176 {
4177 Register callScratch = regs.takeAny();
4178 EmitCreateStubFrameDescriptor(masm, callScratch);
4179 masm.Push(Imm32(0)); // ActualArgc is 0
4180 masm.Push(callee);
4181 masm.Push(callScratch);
4182 regs.add(callScratch);
4183 }
4184
4185 Register code = regs.takeAnyExcluding(ArgumentsRectifierReg);
4186 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
4187 masm.loadBaselineOrIonRaw(code, code, SequentialExecution, nullptr);
4188
4189 Register scratch = regs.takeAny();
4190
4191 // Handle arguments underflow.
4192 Label noUnderflow;
4193 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
4194 masm.branch32(Assembler::Equal, scratch, Imm32(0), &noUnderflow);
4195 {
4196 // Call the arguments rectifier.
4197 JS_ASSERT(ArgumentsRectifierReg != code);
4198
4199 JitCode *argumentsRectifier =
4200 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
4201
4202 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
4203 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
4204 masm.mov(ImmWord(0), ArgumentsRectifierReg);
4205 }
4206
4207 masm.bind(&noUnderflow);
4208
4209 // If needed, update SPS Profiler frame entry. At this point, callee and scratch can
4210 // be clobbered.
4211 {
4212 GeneralRegisterSet availRegs = availableGeneralRegs(0);
4213 availRegs.take(ArgumentsRectifierReg);
4214 availRegs.take(code);
4215 emitProfilingUpdate(masm, availRegs, ICGetElemNativeGetterStub::offsetOfPCOffset());
4216 }
4217
4218 masm.callIon(code);
4219
4220 leaveStubFrame(masm, true);
4221
4222 return true;
4223 }
4224
4225 bool
4226 ICGetElemNativeCompiler::generateStubCode(MacroAssembler &masm)
4227 {
4228 Label failure;
4229 Label failurePopR1;
4230 bool popR1 = false;
4231
4232 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
4233 masm.branchTestString(Assembler::NotEqual, R1, &failure);
4234
4235 GeneralRegisterSet regs(availableGeneralRegs(2));
4236 Register scratchReg = regs.takeAny();
4237
4238 // Unbox object.
4239 Register objReg = masm.extractObject(R0, ExtractTemp0);
4240
4241 // Check object shape.
4242 masm.loadPtr(Address(objReg, JSObject::offsetOfShape()), scratchReg);
4243 Address shapeAddr(BaselineStubReg, ICGetElemNativeStub::offsetOfShape());
4244 masm.branchPtr(Assembler::NotEqual, shapeAddr, scratchReg, &failure);
4245
4246 // Check key identity. Don't automatically fail if this fails, since the incoming
4247 // key maybe a non-interned string. Switch to a slowpath vm-call based check.
4248 Address nameAddr(BaselineStubReg, ICGetElemNativeStub::offsetOfName());
4249 Register strExtract = masm.extractString(R1, ExtractTemp1);
4250
4251 // If needsAtomize_ is true, and the string is not already an atom, then atomize the
4252 // string before proceeding.
4253 if (needsAtomize_) {
4254 Label skipAtomize;
4255
4256 // If string is already an atom, skip the atomize.
4257 masm.branchTestPtr(Assembler::NonZero,
4258 Address(strExtract, JSString::offsetOfLengthAndFlags()),
4259 Imm32(JSString::ATOM_BIT),
4260 &skipAtomize);
4261
4262 // Stow R0.
4263 EmitStowICValues(masm, 1);
4264
4265 enterStubFrame(masm, R0.scratchReg());
4266
4267 // Atomize the string into a new value.
4268 masm.push(strExtract);
4269 if (!callVM(DoAtomizeStringInfo, masm))
4270 return false;
4271
4272 // Atomized string is now in JSReturnOperand (R0).
4273 // Leave stub frame, move atomized string into R1.
4274 JS_ASSERT(R0 == JSReturnOperand);
4275 leaveStubFrame(masm);
4276 masm.moveValue(JSReturnOperand, R1);
4277
4278 // Unstow R0
4279 EmitUnstowICValues(masm, 1);
4280
4281 // Extract string from R1 again.
4282 DebugOnly<Register> strExtract2 = masm.extractString(R1, ExtractTemp1);
4283 JS_ASSERT(Register(strExtract2) == strExtract);
4284
4285 masm.bind(&skipAtomize);
4286 }
4287
4288 // Since this stub sometimes enter a stub frame, we manually set this to true (lie).
4289 #ifdef DEBUG
4290 entersStubFrame_ = true;
4291 #endif
4292
4293 // Key has been atomized if necessary. Do identity check on string pointer.
4294 masm.branchPtr(Assembler::NotEqual, nameAddr, strExtract, &failure);
4295
4296 Register holderReg;
4297 if (obj_ == holder_) {
4298 holderReg = objReg;
4299 } else {
4300 // Shape guard holder.
4301 if (regs.empty()) {
4302 masm.push(R1.scratchReg());
4303 popR1 = true;
4304 holderReg = R1.scratchReg();
4305 } else {
4306 holderReg = regs.takeAny();
4307 }
4308
4309 if (kind == ICStub::GetElem_NativePrototypeCallNative ||
4310 kind == ICStub::GetElem_NativePrototypeCallScripted)
4311 {
4312 masm.loadPtr(Address(BaselineStubReg,
4313 ICGetElemNativePrototypeCallStub::offsetOfHolder()),
4314 holderReg);
4315 masm.loadPtr(Address(BaselineStubReg,
4316 ICGetElemNativePrototypeCallStub::offsetOfHolderShape()),
4317 scratchReg);
4318 } else {
4319 masm.loadPtr(Address(BaselineStubReg,
4320 ICGetElem_NativePrototypeSlot::offsetOfHolder()),
4321 holderReg);
4322 masm.loadPtr(Address(BaselineStubReg,
4323 ICGetElem_NativePrototypeSlot::offsetOfHolderShape()),
4324 scratchReg);
4325 }
4326 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratchReg,
4327 popR1 ? &failurePopR1 : &failure);
4328 }
4329
4330 if (acctype_ == ICGetElemNativeStub::DynamicSlot ||
4331 acctype_ == ICGetElemNativeStub::FixedSlot)
4332 {
4333 masm.load32(Address(BaselineStubReg, ICGetElemNativeSlotStub::offsetOfOffset()),
4334 scratchReg);
4335
4336 // Load from object.
4337 if (acctype_ == ICGetElemNativeStub::DynamicSlot)
4338 masm.addPtr(Address(holderReg, JSObject::offsetOfSlots()), scratchReg);
4339 else
4340 masm.addPtr(holderReg, scratchReg);
4341
4342 Address valAddr(scratchReg, 0);
4343
4344 // Check if __noSuchMethod__ needs to be called.
4345 #if JS_HAS_NO_SUCH_METHOD
4346 if (isCallElem_) {
4347 Label afterNoSuchMethod;
4348 Label skipNoSuchMethod;
4349
4350 masm.branchTestUndefined(Assembler::NotEqual, valAddr, &skipNoSuchMethod);
4351
4352 GeneralRegisterSet regs = availableGeneralRegs(0);
4353 regs.take(R1);
4354 regs.take(R0);
4355 regs.takeUnchecked(objReg);
4356 if (popR1)
4357 masm.pop(R1.scratchReg());
4358
4359 // Box and push obj and key onto baseline frame stack for decompiler.
4360 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0);
4361 EmitStowICValues(masm, 2);
4362
4363 regs.add(R0);
4364 regs.takeUnchecked(objReg);
4365
4366 enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
4367
4368 masm.pushValue(R1);
4369 masm.push(objReg);
4370 if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
4371 return false;
4372
4373 leaveStubFrame(masm);
4374
4375 // Pop pushed obj and key from baseline stack.
4376 EmitUnstowICValues(masm, 2, /* discard = */ true);
4377
4378 // Result is already in R0
4379 masm.jump(&afterNoSuchMethod);
4380 masm.bind(&skipNoSuchMethod);
4381
4382 if (popR1)
4383 masm.pop(R1.scratchReg());
4384 masm.loadValue(valAddr, R0);
4385 masm.bind(&afterNoSuchMethod);
4386 } else {
4387 masm.loadValue(valAddr, R0);
4388 if (popR1)
4389 masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
4390 }
4391 #else
4392 masm.loadValue(valAddr, R0);
4393 if (popR1)
4394 masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
4395 #endif
4396
4397 } else {
4398 JS_ASSERT(acctype_ == ICGetElemNativeStub::NativeGetter ||
4399 acctype_ == ICGetElemNativeStub::ScriptedGetter);
4400 JS_ASSERT(kind == ICStub::GetElem_NativePrototypeCallNative ||
4401 kind == ICStub::GetElem_NativePrototypeCallScripted);
4402
4403 if (acctype_ == ICGetElemNativeStub::NativeGetter) {
4404 // If calling a native getter, there is no chance of failure now.
4405
4406 // GetElem key (R1) is no longer needed.
4407 if (popR1)
4408 masm.addPtr(ImmWord(sizeof(size_t)), BaselineStackReg);
4409
4410 emitCallNative(masm, objReg);
4411
4412 } else {
4413 JS_ASSERT(acctype_ == ICGetElemNativeStub::ScriptedGetter);
4414
4415 // Load function in scratchReg and ensure that it has a jit script.
4416 masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()),
4417 scratchReg);
4418 masm.branchIfFunctionHasNoScript(scratchReg, popR1 ? &failurePopR1 : &failure);
4419 masm.loadPtr(Address(scratchReg, JSFunction::offsetOfNativeOrScript()), scratchReg);
4420 masm.loadBaselineOrIonRaw(scratchReg, scratchReg, SequentialExecution,
4421 popR1 ? &failurePopR1 : &failure);
4422
4423 // At this point, we are guaranteed to successfully complete.
4424 if (popR1)
4425 masm.addPtr(Imm32(sizeof(size_t)), BaselineStackReg);
4426
4427 emitCallScripted(masm, objReg);
4428 }
4429 }
4430
4431 // Enter type monitor IC to type-check result.
4432 EmitEnterTypeMonitorIC(masm);
4433
4434 // Failure case - jump to next stub
4435 if (popR1) {
4436 masm.bind(&failurePopR1);
4437 masm.pop(R1.scratchReg());
4438 }
4439 masm.bind(&failure);
4440 EmitStubGuardFailure(masm);
4441
4442 return true;
4443 }
4444
4445 //
4446 // GetElem_String
4447 //
4448
4449 bool
4450 ICGetElem_String::Compiler::generateStubCode(MacroAssembler &masm)
4451 {
4452 Label failure;
4453 masm.branchTestString(Assembler::NotEqual, R0, &failure);
4454 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
4455
4456 GeneralRegisterSet regs(availableGeneralRegs(2));
4457 Register scratchReg = regs.takeAny();
4458
4459 // Unbox string in R0.
4460 Register str = masm.extractString(R0, ExtractTemp0);
4461
4462 // Load string lengthAndFlags
4463 Address lengthAndFlagsAddr(str, JSString::offsetOfLengthAndFlags());
4464 masm.loadPtr(lengthAndFlagsAddr, scratchReg);
4465
4466 // Check for non-linear strings.
4467 masm.branchTest32(Assembler::Zero, scratchReg, Imm32(JSString::FLAGS_MASK), &failure);
4468
4469 // Unbox key.
4470 Register key = masm.extractInt32(R1, ExtractTemp1);
4471
4472 // Extract length and bounds check.
4473 masm.rshiftPtr(Imm32(JSString::LENGTH_SHIFT), scratchReg);
4474 masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure);
4475
4476 // Get char code.
4477 Address charsAddr(str, JSString::offsetOfChars());
4478 masm.loadPtr(charsAddr, scratchReg);
4479 masm.load16ZeroExtend(BaseIndex(scratchReg, key, TimesTwo, 0), scratchReg);
4480
4481 // Check if char code >= UNIT_STATIC_LIMIT.
4482 masm.branch32(Assembler::AboveOrEqual, scratchReg, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
4483 &failure);
4484
4485 // Load static string.
4486 masm.movePtr(ImmPtr(&cx->staticStrings().unitStaticTable), str);
4487 masm.loadPtr(BaseIndex(str, scratchReg, ScalePointer), str);
4488
4489 // Return.
4490 masm.tagValue(JSVAL_TYPE_STRING, str, R0);
4491 EmitReturnFromIC(masm);
4492
4493 // Failure case - jump to next stub
4494 masm.bind(&failure);
4495 EmitStubGuardFailure(masm);
4496 return true;
4497 }
4498
4499 //
4500 // GetElem_Dense
4501 //
4502
4503 bool
4504 ICGetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
4505 {
4506 Label failure;
4507 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
4508 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
4509
4510 GeneralRegisterSet regs(availableGeneralRegs(2));
4511 Register scratchReg = regs.takeAny();
4512
4513 // Unbox R0 and shape guard.
4514 Register obj = masm.extractObject(R0, ExtractTemp0);
4515 masm.loadPtr(Address(BaselineStubReg, ICGetElem_Dense::offsetOfShape()), scratchReg);
4516 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
4517
4518 // Load obj->elements.
4519 masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
4520
4521 // Unbox key.
4522 Register key = masm.extractInt32(R1, ExtractTemp1);
4523
4524 // Bounds check.
4525 Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
4526 masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure);
4527
4528 // Hole check and load value.
4529 JS_STATIC_ASSERT(sizeof(Value) == 8);
4530 BaseIndex element(scratchReg, key, TimesEight);
4531 masm.branchTestMagic(Assembler::Equal, element, &failure);
4532
4533 // Check if __noSuchMethod__ should be called.
4534 #if JS_HAS_NO_SUCH_METHOD
4535 #ifdef DEBUG
4536 entersStubFrame_ = true;
4537 #endif
4538 if (isCallElem_) {
4539 Label afterNoSuchMethod;
4540 Label skipNoSuchMethod;
4541 regs = availableGeneralRegs(0);
4542 regs.takeUnchecked(obj);
4543 regs.takeUnchecked(key);
4544 regs.takeUnchecked(BaselineTailCallReg);
4545 ValueOperand val = regs.takeValueOperand();
4546
4547 masm.loadValue(element, val);
4548 masm.branchTestUndefined(Assembler::NotEqual, val, &skipNoSuchMethod);
4549
4550 // Box and push obj and key onto baseline frame stack for decompiler.
4551 EmitRestoreTailCallReg(masm);
4552 masm.tagValue(JSVAL_TYPE_OBJECT, obj, val);
4553 masm.pushValue(val);
4554 masm.tagValue(JSVAL_TYPE_INT32, key, val);
4555 masm.pushValue(val);
4556 EmitRepushTailCallReg(masm);
4557
4558 regs.add(val);
4559
4560 // Call __noSuchMethod__ checker. Object pointer is in objReg.
4561 enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
4562
4563 regs.take(val);
4564
4565 masm.tagValue(JSVAL_TYPE_INT32, key, val);
4566 masm.pushValue(val);
4567 masm.push(obj);
4568 if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
4569 return false;
4570
4571 leaveStubFrame(masm);
4572
4573 // Pop pushed obj and key from baseline stack.
4574 EmitUnstowICValues(masm, 2, /* discard = */ true);
4575
4576 // Result is already in R0
4577 masm.jump(&afterNoSuchMethod);
4578 masm.bind(&skipNoSuchMethod);
4579
4580 masm.moveValue(val, R0);
4581 masm.bind(&afterNoSuchMethod);
4582 } else {
4583 masm.loadValue(element, R0);
4584 }
4585 #else
4586 // Load value from element location.
4587 masm.loadValue(element, R0);
4588 #endif
4589
4590 // Enter type monitor IC to type-check result.
4591 EmitEnterTypeMonitorIC(masm);
4592
4593 // Failure case - jump to next stub
4594 masm.bind(&failure);
4595 EmitStubGuardFailure(masm);
4596 return true;
4597 }
4598
4599 //
4600 // GetElem_TypedArray
4601 //
4602
4603 bool
4604 ICGetElem_TypedArray::Compiler::generateStubCode(MacroAssembler &masm)
4605 {
4606 Label failure;
4607 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
4608
4609 GeneralRegisterSet regs(availableGeneralRegs(2));
4610 Register scratchReg = regs.takeAny();
4611
4612 // Unbox R0 and shape guard.
4613 Register obj = masm.extractObject(R0, ExtractTemp0);
4614 masm.loadPtr(Address(BaselineStubReg, ICGetElem_TypedArray::offsetOfShape()), scratchReg);
4615 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
4616
4617 // Ensure the index is an integer.
4618 if (cx->runtime()->jitSupportsFloatingPoint) {
4619 Label isInt32;
4620 masm.branchTestInt32(Assembler::Equal, R1, &isInt32);
4621 {
4622 // If the index is a double, try to convert it to int32. It's okay
4623 // to convert -0 to 0: the shape check ensures the object is a typed
4624 // array so the difference is not observable.
4625 masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
4626 masm.unboxDouble(R1, FloatReg0);
4627 masm.convertDoubleToInt32(FloatReg0, scratchReg, &failure, /* negZeroCheck = */false);
4628 masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R1);
4629 }
4630 masm.bind(&isInt32);
4631 } else {
4632 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
4633 }
4634
4635 // Unbox key.
4636 Register key = masm.extractInt32(R1, ExtractTemp1);
4637
4638 // Bounds check.
4639 masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), scratchReg);
4640 masm.branch32(Assembler::BelowOrEqual, scratchReg, key, &failure);
4641
4642 // Load the elements vector.
4643 masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), scratchReg);
4644
4645 // Load the value.
4646 BaseIndex source(scratchReg, key, ScaleFromElemWidth(TypedArrayObject::slotWidth(type_)));
4647 masm.loadFromTypedArray(type_, source, R0, false, scratchReg, &failure);
4648
4649 // Todo: Allow loading doubles from uint32 arrays, but this requires monitoring.
4650 EmitReturnFromIC(masm);
4651
4652 // Failure case - jump to next stub
4653 masm.bind(&failure);
4654 EmitStubGuardFailure(masm);
4655 return true;
4656 }
4657
4658 //
4659 // GetEelem_Arguments
4660 //
4661 bool
4662 ICGetElem_Arguments::Compiler::generateStubCode(MacroAssembler &masm)
4663 {
4664 // Variants of GetElem_Arguments can enter stub frames if entered in CallProp
4665 // context when noSuchMethod support is on.
4666 #if JS_HAS_NO_SUCH_METHOD
4667 #ifdef DEBUG
4668 entersStubFrame_ = true;
4669 #endif
4670 #endif
4671
4672 Label failure;
4673 if (which_ == ICGetElem_Arguments::Magic) {
4674 JS_ASSERT(!isCallElem_);
4675
4676 // Ensure that this is a magic arguments value.
4677 masm.branchTestMagicValue(Assembler::NotEqual, R0, JS_OPTIMIZED_ARGUMENTS, &failure);
4678
4679 // Ensure that frame has not loaded different arguments object since.
4680 masm.branchTest32(Assembler::NonZero,
4681 Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
4682 Imm32(BaselineFrame::HAS_ARGS_OBJ),
4683 &failure);
4684
4685 // Ensure that index is an integer.
4686 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
4687 Register idx = masm.extractInt32(R1, ExtractTemp1);
4688
4689 GeneralRegisterSet regs(availableGeneralRegs(2));
4690 Register scratch = regs.takeAny();
4691
4692 // Load num actual arguments
4693 Address actualArgs(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs());
4694 masm.loadPtr(actualArgs, scratch);
4695
4696 // Ensure idx < argc
4697 masm.branch32(Assembler::AboveOrEqual, idx, scratch, &failure);
4698
4699 // Load argval
4700 JS_STATIC_ASSERT(sizeof(Value) == 8);
4701 masm.movePtr(BaselineFrameReg, scratch);
4702 masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), scratch);
4703 BaseIndex element(scratch, idx, TimesEight);
4704 masm.loadValue(element, R0);
4705
4706 // Enter type monitor IC to type-check result.
4707 EmitEnterTypeMonitorIC(masm);
4708
4709 masm.bind(&failure);
4710 EmitStubGuardFailure(masm);
4711 return true;
4712 }
4713
4714 JS_ASSERT(which_ == ICGetElem_Arguments::Strict ||
4715 which_ == ICGetElem_Arguments::Normal);
4716
4717 bool isStrict = which_ == ICGetElem_Arguments::Strict;
4718 const Class *clasp = isStrict ? &StrictArgumentsObject::class_ : &NormalArgumentsObject::class_;
4719
4720 GeneralRegisterSet regs(availableGeneralRegs(2));
4721 Register scratchReg = regs.takeAny();
4722
4723 // Guard on input being an arguments object.
4724 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
4725 Register objReg = masm.extractObject(R0, ExtractTemp0);
4726 masm.branchTestObjClass(Assembler::NotEqual, objReg, scratchReg, clasp, &failure);
4727
4728 // Guard on index being int32
4729 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
4730 Register idxReg = masm.extractInt32(R1, ExtractTemp1);
4731
4732 // Get initial ArgsObj length value.
4733 masm.unboxInt32(Address(objReg, ArgumentsObject::getInitialLengthSlotOffset()), scratchReg);
4734
4735 // Test if length has been overridden.
4736 masm.branchTest32(Assembler::NonZero,
4737 scratchReg,
4738 Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
4739 &failure);
4740
4741 // Length has not been overridden, ensure that R1 is an integer and is <= length.
4742 masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratchReg);
4743 masm.branch32(Assembler::AboveOrEqual, idxReg, scratchReg, &failure);
4744
4745 // Length check succeeded, now check the correct bit. We clobber potential type regs
4746 // now. Inputs will have to be reconstructed if we fail after this point, but that's
4747 // unlikely.
4748 Label failureReconstructInputs;
4749 regs = availableGeneralRegs(0);
4750 regs.takeUnchecked(objReg);
4751 regs.takeUnchecked(idxReg);
4752 regs.take(scratchReg);
4753 Register argData = regs.takeAny();
4754 Register tempReg = regs.takeAny();
4755
4756 // Load ArgumentsData
4757 masm.loadPrivate(Address(objReg, ArgumentsObject::getDataSlotOffset()), argData);
4758
4759 // Load deletedBits bitArray pointer into scratchReg
4760 masm.loadPtr(Address(argData, offsetof(ArgumentsData, deletedBits)), scratchReg);
4761
4762 // In tempReg, calculate index of word containing bit: (idx >> logBitsPerWord)
4763 masm.movePtr(idxReg, tempReg);
4764 const uint32_t shift = mozilla::tl::FloorLog2<(sizeof(size_t) * JS_BITS_PER_BYTE)>::value;
4765 JS_ASSERT(shift == 5 || shift == 6);
4766 masm.rshiftPtr(Imm32(shift), tempReg);
4767 masm.loadPtr(BaseIndex(scratchReg, tempReg, ScaleFromElemWidth(sizeof(size_t))), scratchReg);
4768
4769 // Don't bother testing specific bit, if any bit is set in the word, fail.
4770 masm.branchPtr(Assembler::NotEqual, scratchReg, ImmPtr(nullptr), &failureReconstructInputs);
4771
4772 // Load the value. use scratchReg and tempReg to form a ValueOperand to load into.
4773 masm.addPtr(Imm32(ArgumentsData::offsetOfArgs()), argData);
4774 regs.add(scratchReg);
4775 regs.add(tempReg);
4776 ValueOperand tempVal = regs.takeAnyValue();
4777 masm.loadValue(BaseIndex(argData, idxReg, ScaleFromElemWidth(sizeof(Value))), tempVal);
4778
4779 // Makesure that this is not a FORWARD_TO_CALL_SLOT magic value.
4780 masm.branchTestMagic(Assembler::Equal, tempVal, &failureReconstructInputs);
4781
4782 #if JS_HAS_NO_SUCH_METHOD
4783 if (isCallElem_) {
4784 Label afterNoSuchMethod;
4785 Label skipNoSuchMethod;
4786
4787 masm.branchTestUndefined(Assembler::NotEqual, tempVal, &skipNoSuchMethod);
4788
4789 // Call __noSuchMethod__ checker. Object pointer is in objReg.
4790 regs = availableGeneralRegs(0);
4791 regs.takeUnchecked(objReg);
4792 regs.takeUnchecked(idxReg);
4793 regs.takeUnchecked(BaselineTailCallReg);
4794 ValueOperand val = regs.takeValueOperand();
4795
4796 // Box and push obj and key onto baseline frame stack for decompiler.
4797 EmitRestoreTailCallReg(masm);
4798 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
4799 masm.pushValue(val);
4800 masm.tagValue(JSVAL_TYPE_INT32, idxReg, val);
4801 masm.pushValue(val);
4802 EmitRepushTailCallReg(masm);
4803
4804 regs.add(val);
4805 enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
4806 regs.take(val);
4807
4808 masm.pushValue(val);
4809 masm.push(objReg);
4810 if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
4811 return false;
4812
4813 leaveStubFrame(masm);
4814
4815 // Pop pushed obj and key from baseline stack.
4816 EmitUnstowICValues(masm, 2, /* discard = */ true);
4817
4818 // Result is already in R0
4819 masm.jump(&afterNoSuchMethod);
4820 masm.bind(&skipNoSuchMethod);
4821
4822 masm.moveValue(tempVal, R0);
4823 masm.bind(&afterNoSuchMethod);
4824 } else {
4825 masm.moveValue(tempVal, R0);
4826 }
4827 #else
4828 // Copy value from temp to R0.
4829 masm.moveValue(tempVal, R0);
4830 #endif
4831
4832 // Type-check result
4833 EmitEnterTypeMonitorIC(masm);
4834
4835 // Failed, but inputs are deconstructed into object and int, and need to be
4836 // reconstructed into values.
4837 masm.bind(&failureReconstructInputs);
4838 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, R0);
4839 masm.tagValue(JSVAL_TYPE_INT32, idxReg, R1);
4840
4841 masm.bind(&failure);
4842 EmitStubGuardFailure(masm);
4843 return true;
4844 }
4845
4846 //
4847 // SetElem_Fallback
4848 //
4849
4850 static bool
4851 SetElemDenseAddHasSameShapes(ICSetElem_DenseAdd *stub, JSObject *obj)
4852 {
4853 size_t numShapes = stub->protoChainDepth() + 1;
4854 for (size_t i = 0; i < numShapes; i++) {
4855 static const size_t MAX_DEPTH = ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH;
4856 if (obj->lastProperty() != stub->toImplUnchecked<MAX_DEPTH>()->shape(i))
4857 return false;
4858 obj = obj->getProto();
4859 if (!obj && i != numShapes - 1)
4860 return false;
4861 }
4862
4863 return true;
4864 }
4865
4866 static bool
4867 DenseSetElemStubExists(JSContext *cx, ICStub::Kind kind, ICSetElem_Fallback *stub, HandleObject obj)
4868 {
4869 JS_ASSERT(kind == ICStub::SetElem_Dense || kind == ICStub::SetElem_DenseAdd);
4870
4871 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
4872 if (kind == ICStub::SetElem_Dense && iter->isSetElem_Dense()) {
4873 ICSetElem_Dense *dense = iter->toSetElem_Dense();
4874 if (obj->lastProperty() == dense->shape() && obj->getType(cx) == dense->type())
4875 return true;
4876 }
4877
4878 if (kind == ICStub::SetElem_DenseAdd && iter->isSetElem_DenseAdd()) {
4879 ICSetElem_DenseAdd *dense = iter->toSetElem_DenseAdd();
4880 if (obj->getType(cx) == dense->type() && SetElemDenseAddHasSameShapes(dense, obj))
4881 return true;
4882 }
4883 }
4884 return false;
4885 }
4886
4887 static bool
4888 TypedArraySetElemStubExists(ICSetElem_Fallback *stub, HandleObject obj, bool expectOOB)
4889 {
4890 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
4891 if (!iter->isSetElem_TypedArray())
4892 continue;
4893 ICSetElem_TypedArray *taStub = iter->toSetElem_TypedArray();
4894 if (obj->lastProperty() == taStub->shape() && taStub->expectOutOfBounds() == expectOOB)
4895 return true;
4896 }
4897 return false;
4898 }
4899
4900 static bool
4901 RemoveExistingTypedArraySetElemStub(JSContext *cx, ICSetElem_Fallback *stub, HandleObject obj)
4902 {
4903 for (ICStubIterator iter = stub->beginChain(); !iter.atEnd(); iter++) {
4904 if (!iter->isSetElem_TypedArray())
4905 continue;
4906
4907 if (obj->lastProperty() != iter->toSetElem_TypedArray()->shape())
4908 continue;
4909
4910 // TypedArraySetElem stubs are only removed using this procedure if
4911 // being replaced with one that expects out of bounds index.
4912 JS_ASSERT(!iter->toSetElem_TypedArray()->expectOutOfBounds());
4913 iter.unlink(cx);
4914 return true;
4915 }
4916 return false;
4917 }
4918
4919 static bool
4920 CanOptimizeDenseSetElem(JSContext *cx, HandleObject obj, uint32_t index,
4921 HandleShape oldShape, uint32_t oldCapacity, uint32_t oldInitLength,
4922 bool *isAddingCaseOut, size_t *protoDepthOut)
4923 {
4924 uint32_t initLength = obj->getDenseInitializedLength();
4925 uint32_t capacity = obj->getDenseCapacity();
4926
4927 *isAddingCaseOut = false;
4928 *protoDepthOut = 0;
4929
4930 // Some initial sanity checks.
4931 if (initLength < oldInitLength || capacity < oldCapacity)
4932 return false;
4933
4934 RootedShape shape(cx, obj->lastProperty());
4935
4936 // Cannot optimize if the shape changed.
4937 if (oldShape != shape)
4938 return false;
4939
4940 // Cannot optimize if the capacity changed.
4941 if (oldCapacity != capacity)
4942 return false;
4943
4944 // Cannot optimize if the index doesn't fit within the new initialized length.
4945 if (index >= initLength)
4946 return false;
4947
4948 // Cannot optimize if the value at position after the set is a hole.
4949 if (!obj->containsDenseElement(index))
4950 return false;
4951
4952 // At this point, if we know that the initLength did not change, then
4953 // an optimized set is possible.
4954 if (oldInitLength == initLength)
4955 return true;
4956
4957 // If it did change, ensure that it changed specifically by incrementing by 1
4958 // to accomodate this particular indexed set.
4959 if (oldInitLength + 1 != initLength)
4960 return false;
4961 if (index != oldInitLength)
4962 return false;
4963
4964 // The checks are not complete. The object may have a setter definition,
4965 // either directly, or via a prototype, or via the target object for a prototype
4966 // which is a proxy, that handles a particular integer write.
4967 // Scan the prototype and shape chain to make sure that this is not the case.
4968 RootedObject curObj(cx, obj);
4969 while (curObj) {
4970 // Ensure object is native.
4971 if (!curObj->isNative())
4972 return false;
4973
4974 // Ensure all indexed properties are stored in dense elements.
4975 if (curObj->isIndexed())
4976 return false;
4977
4978 curObj = curObj->getProto();
4979 if (curObj)
4980 ++*protoDepthOut;
4981 }
4982
4983 if (*protoDepthOut > ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH)
4984 return false;
4985
4986 *isAddingCaseOut = true;
4987
4988 return true;
4989 }
4990
4991 static bool
4992 DoSetElemFallback(JSContext *cx, BaselineFrame *frame, ICSetElem_Fallback *stub_, Value *stack,
4993 HandleValue objv, HandleValue index, HandleValue rhs)
4994 {
4995 // This fallback stub may trigger debug mode toggling.
4996 DebugModeOSRVolatileStub<ICSetElem_Fallback *> stub(frame, stub_);
4997
4998 RootedScript script(cx, frame->script());
4999 jsbytecode *pc = stub->icEntry()->pc(script);
5000 JSOp op = JSOp(*pc);
5001 FallbackICSpew(cx, stub, "SetElem(%s)", js_CodeName[JSOp(*pc)]);
5002
5003 JS_ASSERT(op == JSOP_SETELEM ||
5004 op == JSOP_INITELEM ||
5005 op == JSOP_INITELEM_ARRAY);
5006
5007 RootedObject obj(cx, ToObjectFromStack(cx, objv));
5008 if (!obj)
5009 return false;
5010
5011 RootedShape oldShape(cx, obj->lastProperty());
5012
5013 // Check the old capacity
5014 uint32_t oldCapacity = 0;
5015 uint32_t oldInitLength = 0;
5016 if (obj->isNative() && index.isInt32() && index.toInt32() >= 0) {
5017 oldCapacity = obj->getDenseCapacity();
5018 oldInitLength = obj->getDenseInitializedLength();
5019 }
5020
5021 if (op == JSOP_INITELEM) {
5022 if (!InitElemOperation(cx, obj, index, rhs))
5023 return false;
5024 } else if (op == JSOP_INITELEM_ARRAY) {
5025 JS_ASSERT(uint32_t(index.toInt32()) == GET_UINT24(pc));
5026 if (!InitArrayElemOperation(cx, pc, obj, index.toInt32(), rhs))
5027 return false;
5028 } else {
5029 if (!SetObjectElement(cx, obj, index, rhs, script->strict(), script, pc))
5030 return false;
5031 }
5032
5033 // Overwrite the object on the stack (pushed for the decompiler) with the rhs.
5034 JS_ASSERT(stack[2] == objv);
5035 stack[2] = rhs;
5036
5037 // Check if debug mode toggling made the stub invalid.
5038 if (stub.invalid())
5039 return true;
5040
5041 if (stub->numOptimizedStubs() >= ICSetElem_Fallback::MAX_OPTIMIZED_STUBS) {
5042 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
5043 // But for now we just bail.
5044 return true;
5045 }
5046
5047 // Try to generate new stubs.
5048 if (obj->isNative() &&
5049 !obj->is<TypedArrayObject>() &&
5050 index.isInt32() && index.toInt32() >= 0 &&
5051 !rhs.isMagic(JS_ELEMENTS_HOLE))
5052 {
5053 bool addingCase;
5054 size_t protoDepth;
5055
5056 if (CanOptimizeDenseSetElem(cx, obj, index.toInt32(), oldShape, oldCapacity, oldInitLength,
5057 &addingCase, &protoDepth))
5058 {
5059 RootedShape shape(cx, obj->lastProperty());
5060 RootedTypeObject type(cx, obj->getType(cx));
5061 if (!type)
5062 return false;
5063
5064 if (addingCase && !DenseSetElemStubExists(cx, ICStub::SetElem_DenseAdd, stub, obj)) {
5065 IonSpew(IonSpew_BaselineIC,
5066 " Generating SetElem_DenseAdd stub "
5067 "(shape=%p, type=%p, protoDepth=%u)",
5068 obj->lastProperty(), type.get(), protoDepth);
5069 ICSetElemDenseAddCompiler compiler(cx, obj, protoDepth);
5070 ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
5071 if (!denseStub)
5072 return false;
5073 if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs))
5074 return false;
5075
5076 stub->addNewStub(denseStub);
5077 } else if (!addingCase &&
5078 !DenseSetElemStubExists(cx, ICStub::SetElem_Dense, stub, obj))
5079 {
5080 IonSpew(IonSpew_BaselineIC,
5081 " Generating SetElem_Dense stub (shape=%p, type=%p)",
5082 obj->lastProperty(), type.get());
5083 ICSetElem_Dense::Compiler compiler(cx, shape, type);
5084 ICUpdatedStub *denseStub = compiler.getStub(compiler.getStubSpace(script));
5085 if (!denseStub)
5086 return false;
5087 if (!denseStub->addUpdateStubForValue(cx, script, obj, JSID_VOIDHANDLE, rhs))
5088 return false;
5089
5090 stub->addNewStub(denseStub);
5091 }
5092 }
5093
5094 return true;
5095 }
5096
5097 if (obj->is<TypedArrayObject>() && index.isNumber() && rhs.isNumber()) {
5098 Rooted<TypedArrayObject*> tarr(cx, &obj->as<TypedArrayObject>());
5099 if (!cx->runtime()->jitSupportsFloatingPoint &&
5100 (TypedArrayRequiresFloatingPoint(tarr) || index.isDouble()))
5101 {
5102 return true;
5103 }
5104
5105 uint32_t len = tarr->length();
5106 double idx = index.toNumber();
5107 bool expectOutOfBounds = (idx < 0 || idx >= double(len));
5108
5109 if (!TypedArraySetElemStubExists(stub, tarr, expectOutOfBounds)) {
5110 // Remove any existing TypedArraySetElemStub that doesn't handle out-of-bounds
5111 if (expectOutOfBounds)
5112 RemoveExistingTypedArraySetElemStub(cx, stub, tarr);
5113
5114 IonSpew(IonSpew_BaselineIC,
5115 " Generating SetElem_TypedArray stub (shape=%p, type=%u, oob=%s)",
5116 tarr->lastProperty(), tarr->type(), expectOutOfBounds ? "yes" : "no");
5117 ICSetElem_TypedArray::Compiler compiler(cx, tarr->lastProperty(), tarr->type(),
5118 expectOutOfBounds);
5119 ICStub *typedArrayStub = compiler.getStub(compiler.getStubSpace(script));
5120 if (!typedArrayStub)
5121 return false;
5122
5123 stub->addNewStub(typedArrayStub);
5124 return true;
5125 }
5126 }
5127
5128 return true;
5129 }
5130
5131 typedef bool (*DoSetElemFallbackFn)(JSContext *, BaselineFrame *, ICSetElem_Fallback *, Value *,
5132 HandleValue, HandleValue, HandleValue);
5133 static const VMFunction DoSetElemFallbackInfo =
5134 FunctionInfo<DoSetElemFallbackFn>(DoSetElemFallback, PopValues(2));
5135
5136 bool
5137 ICSetElem_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
5138 {
5139 JS_ASSERT(R0 == JSReturnOperand);
5140
5141 EmitRestoreTailCallReg(masm);
5142
5143 // State: R0: object, R1: index, stack: rhs.
5144 // For the decompiler, the stack has to be: object, index, rhs,
5145 // so we push the index, then overwrite the rhs Value with R0
5146 // and push the rhs value.
5147 masm.pushValue(R1);
5148 masm.loadValue(Address(BaselineStackReg, sizeof(Value)), R1);
5149 masm.storeValue(R0, Address(BaselineStackReg, sizeof(Value)));
5150 masm.pushValue(R1);
5151
5152 // Push arguments.
5153 masm.pushValue(R1); // RHS
5154
5155 // Push index. On x86 and ARM two push instructions are emitted so use a
5156 // separate register to store the old stack pointer.
5157 masm.mov(BaselineStackReg, R1.scratchReg());
5158 masm.pushValue(Address(R1.scratchReg(), 2 * sizeof(Value)));
5159 masm.pushValue(R0); // Object.
5160
5161 // Push pointer to stack values, so that the stub can overwrite the object
5162 // (pushed for the decompiler) with the rhs.
5163 masm.computeEffectiveAddress(Address(BaselineStackReg, 3 * sizeof(Value)), R0.scratchReg());
5164 masm.push(R0.scratchReg());
5165
5166 masm.push(BaselineStubReg);
5167 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5168
5169 return tailCallVM(DoSetElemFallbackInfo, masm);
5170 }
5171
5172 void
5173 BaselineScript::noteArrayWriteHole(uint32_t pcOffset)
5174 {
5175 ICEntry &entry = icEntryFromPCOffset(pcOffset);
5176 ICFallbackStub *stub = entry.fallbackStub();
5177
5178 if (stub->isSetElem_Fallback())
5179 stub->toSetElem_Fallback()->noteArrayWriteHole();
5180 }
5181
5182 //
5183 // SetElem_Dense
5184 //
5185
5186 bool
5187 ICSetElem_Dense::Compiler::generateStubCode(MacroAssembler &masm)
5188 {
5189 // R0 = object
5190 // R1 = key
5191 // Stack = { ... rhs-value, <return-addr>? }
5192 Label failure;
5193 Label failureUnstow;
5194 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
5195 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
5196
5197 GeneralRegisterSet regs(availableGeneralRegs(2));
5198 Register scratchReg = regs.takeAny();
5199
5200 // Unbox R0 and guard on its shape.
5201 Register obj = masm.extractObject(R0, ExtractTemp0);
5202 masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfShape()), scratchReg);
5203 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
5204
5205 // Stow both R0 and R1 (object and key)
5206 // But R0 and R1 still hold their values.
5207 EmitStowICValues(masm, 2);
5208
5209 // We may need to free up some registers.
5210 regs = availableGeneralRegs(0);
5211 regs.take(R0);
5212
5213 // Guard that the type object matches.
5214 Register typeReg = regs.takeAny();
5215 masm.loadPtr(Address(BaselineStubReg, ICSetElem_Dense::offsetOfType()), typeReg);
5216 masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg,
5217 &failureUnstow);
5218 regs.add(typeReg);
5219
5220 // Stack is now: { ..., rhs-value, object-value, key-value, maybe?-RET-ADDR }
5221 // Load rhs-value in to R0
5222 masm.loadValue(Address(BaselineStackReg, 2 * sizeof(Value) + ICStackValueOffset), R0);
5223
5224 // Call the type-update stub.
5225 if (!callTypeUpdateIC(masm, sizeof(Value)))
5226 return false;
5227
5228 // Unstow R0 and R1 (object and key)
5229 EmitUnstowICValues(masm, 2);
5230
5231 // Reset register set.
5232 regs = availableGeneralRegs(2);
5233 scratchReg = regs.takeAny();
5234
5235 // Unbox object and key.
5236 obj = masm.extractObject(R0, ExtractTemp0);
5237 Register key = masm.extractInt32(R1, ExtractTemp1);
5238
5239 // Load obj->elements in scratchReg.
5240 masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
5241
5242 // Bounds check.
5243 Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
5244 masm.branch32(Assembler::BelowOrEqual, initLength, key, &failure);
5245
5246 // Hole check.
5247 BaseIndex element(scratchReg, key, TimesEight);
5248 masm.branchTestMagic(Assembler::Equal, element, &failure);
5249
5250 // Failure is not possible now. Free up registers.
5251 regs.add(R0);
5252 regs.add(R1);
5253 regs.takeUnchecked(obj);
5254 regs.takeUnchecked(key);
5255 Address valueAddr(BaselineStackReg, ICStackValueOffset);
5256
5257 // Convert int32 values to double if convertDoubleElements is set. In this
5258 // case the heap typeset is guaranteed to contain both int32 and double, so
5259 // it's okay to store a double.
5260 Label dontConvertDoubles;
5261 Address elementsFlags(scratchReg, ObjectElements::offsetOfFlags());
5262 masm.branchTest32(Assembler::Zero, elementsFlags,
5263 Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
5264 &dontConvertDoubles);
5265 // Note that double arrays are only created by IonMonkey, so if we have no
5266 // floating-point support Ion is disabled and there should be no double arrays.
5267 if (cx->runtime()->jitSupportsFloatingPoint)
5268 masm.convertInt32ValueToDouble(valueAddr, regs.getAny(), &dontConvertDoubles);
5269 else
5270 masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support.");
5271 masm.bind(&dontConvertDoubles);
5272
5273 // Don't overwrite R0 becuase |obj| might overlap with it, and it's needed
5274 // for post-write barrier later.
5275 ValueOperand tmpVal = regs.takeAnyValue();
5276 masm.loadValue(valueAddr, tmpVal);
5277 EmitPreBarrier(masm, element, MIRType_Value);
5278 masm.storeValue(tmpVal, element);
5279 regs.add(key);
5280 #ifdef JSGC_GENERATIONAL
5281 {
5282 Register r = regs.takeAny();
5283 GeneralRegisterSet saveRegs;
5284 emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
5285 regs.add(r);
5286 }
5287 #endif
5288 EmitReturnFromIC(masm);
5289
5290
5291 // Failure case - fail but first unstow R0 and R1
5292 masm.bind(&failureUnstow);
5293 EmitUnstowICValues(masm, 2);
5294
5295 // Failure case - jump to next stub
5296 masm.bind(&failure);
5297 EmitStubGuardFailure(masm);
5298 return true;
5299 }
5300
5301 static bool
5302 GetProtoShapes(JSObject *obj, size_t protoChainDepth, AutoShapeVector *shapes)
5303 {
5304 JS_ASSERT(shapes->length() == 1);
5305 JSObject *curProto = obj->getProto();
5306 for (size_t i = 0; i < protoChainDepth; i++) {
5307 if (!shapes->append(curProto->lastProperty()))
5308 return false;
5309 curProto = curProto->getProto();
5310 }
5311 JS_ASSERT(!curProto);
5312 return true;
5313 }
5314
5315 //
5316 // SetElem_DenseAdd
5317 //
5318
5319 ICUpdatedStub *
5320 ICSetElemDenseAddCompiler::getStub(ICStubSpace *space)
5321 {
5322 AutoShapeVector shapes(cx);
5323 if (!shapes.append(obj_->lastProperty()))
5324 return nullptr;
5325
5326 if (!GetProtoShapes(obj_, protoChainDepth_, &shapes))
5327 return nullptr;
5328
5329 JS_STATIC_ASSERT(ICSetElem_DenseAdd::MAX_PROTO_CHAIN_DEPTH == 4);
5330
5331 ICUpdatedStub *stub = nullptr;
5332 switch (protoChainDepth_) {
5333 case 0: stub = getStubSpecific<0>(space, &shapes); break;
5334 case 1: stub = getStubSpecific<1>(space, &shapes); break;
5335 case 2: stub = getStubSpecific<2>(space, &shapes); break;
5336 case 3: stub = getStubSpecific<3>(space, &shapes); break;
5337 case 4: stub = getStubSpecific<4>(space, &shapes); break;
5338 default: MOZ_ASSUME_UNREACHABLE("ProtoChainDepth too high.");
5339 }
5340 if (!stub || !stub->initUpdatingChain(cx, space))
5341 return nullptr;
5342 return stub;
5343 }
5344
5345 bool
5346 ICSetElemDenseAddCompiler::generateStubCode(MacroAssembler &masm)
5347 {
5348 // R0 = object
5349 // R1 = key
5350 // Stack = { ... rhs-value, <return-addr>? }
5351 Label failure;
5352 Label failureUnstow;
5353 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
5354 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
5355
5356 GeneralRegisterSet regs(availableGeneralRegs(2));
5357 Register scratchReg = regs.takeAny();
5358
5359 // Unbox R0 and guard on its shape.
5360 Register obj = masm.extractObject(R0, ExtractTemp0);
5361 masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAddImpl<0>::offsetOfShape(0)),
5362 scratchReg);
5363 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
5364
5365 // Stow both R0 and R1 (object and key)
5366 // But R0 and R1 still hold their values.
5367 EmitStowICValues(masm, 2);
5368
5369 // We may need to free up some registers.
5370 regs = availableGeneralRegs(0);
5371 regs.take(R0);
5372
5373 // Guard that the type object matches.
5374 Register typeReg = regs.takeAny();
5375 masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAdd::offsetOfType()), typeReg);
5376 masm.branchPtr(Assembler::NotEqual, Address(obj, JSObject::offsetOfType()), typeReg,
5377 &failureUnstow);
5378 regs.add(typeReg);
5379
5380 // Shape guard objects on the proto chain.
5381 scratchReg = regs.takeAny();
5382 Register protoReg = regs.takeAny();
5383 for (size_t i = 0; i < protoChainDepth_; i++) {
5384 masm.loadObjProto(i == 0 ? obj : protoReg, protoReg);
5385 masm.branchTestPtr(Assembler::Zero, protoReg, protoReg, &failureUnstow);
5386 masm.loadPtr(Address(BaselineStubReg, ICSetElem_DenseAddImpl<0>::offsetOfShape(i + 1)),
5387 scratchReg);
5388 masm.branchTestObjShape(Assembler::NotEqual, protoReg, scratchReg, &failureUnstow);
5389 }
5390 regs.add(protoReg);
5391 regs.add(scratchReg);
5392
5393 // Stack is now: { ..., rhs-value, object-value, key-value, maybe?-RET-ADDR }
5394 // Load rhs-value in to R0
5395 masm.loadValue(Address(BaselineStackReg, 2 * sizeof(Value) + ICStackValueOffset), R0);
5396
5397 // Call the type-update stub.
5398 if (!callTypeUpdateIC(masm, sizeof(Value)))
5399 return false;
5400
5401 // Unstow R0 and R1 (object and key)
5402 EmitUnstowICValues(masm, 2);
5403
5404 // Reset register set.
5405 regs = availableGeneralRegs(2);
5406 scratchReg = regs.takeAny();
5407
5408 // Unbox obj and key.
5409 obj = masm.extractObject(R0, ExtractTemp0);
5410 Register key = masm.extractInt32(R1, ExtractTemp1);
5411
5412 // Load obj->elements in scratchReg.
5413 masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratchReg);
5414
5415 // Bounds check (key == initLength)
5416 Address initLength(scratchReg, ObjectElements::offsetOfInitializedLength());
5417 masm.branch32(Assembler::NotEqual, initLength, key, &failure);
5418
5419 // Capacity check.
5420 Address capacity(scratchReg, ObjectElements::offsetOfCapacity());
5421 masm.branch32(Assembler::BelowOrEqual, capacity, key, &failure);
5422
5423 // Failure is not possible now. Free up registers.
5424 regs.add(R0);
5425 regs.add(R1);
5426 regs.takeUnchecked(obj);
5427 regs.takeUnchecked(key);
5428
5429 // Increment initLength before write.
5430 masm.add32(Imm32(1), initLength);
5431
5432 // If length is now <= key, increment length before write.
5433 Label skipIncrementLength;
5434 Address length(scratchReg, ObjectElements::offsetOfLength());
5435 masm.branch32(Assembler::Above, length, key, &skipIncrementLength);
5436 masm.add32(Imm32(1), length);
5437 masm.bind(&skipIncrementLength);
5438
5439 Address valueAddr(BaselineStackReg, ICStackValueOffset);
5440
5441 // Convert int32 values to double if convertDoubleElements is set. In this
5442 // case the heap typeset is guaranteed to contain both int32 and double, so
5443 // it's okay to store a double.
5444 Label dontConvertDoubles;
5445 Address elementsFlags(scratchReg, ObjectElements::offsetOfFlags());
5446 masm.branchTest32(Assembler::Zero, elementsFlags,
5447 Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS),
5448 &dontConvertDoubles);
5449 // Note that double arrays are only created by IonMonkey, so if we have no
5450 // floating-point support Ion is disabled and there should be no double arrays.
5451 if (cx->runtime()->jitSupportsFloatingPoint)
5452 masm.convertInt32ValueToDouble(valueAddr, regs.getAny(), &dontConvertDoubles);
5453 else
5454 masm.assumeUnreachable("There shouldn't be double arrays when there is no FP support.");
5455 masm.bind(&dontConvertDoubles);
5456
5457 // Write the value. No need for pre-barrier since we're not overwriting an old value.
5458 ValueOperand tmpVal = regs.takeAnyValue();
5459 BaseIndex element(scratchReg, key, TimesEight);
5460 masm.loadValue(valueAddr, tmpVal);
5461 masm.storeValue(tmpVal, element);
5462 regs.add(key);
5463 #ifdef JSGC_GENERATIONAL
5464 {
5465 Register r = regs.takeAny();
5466 GeneralRegisterSet saveRegs;
5467 emitPostWriteBarrierSlot(masm, obj, tmpVal, r, saveRegs);
5468 regs.add(r);
5469 }
5470 #endif
5471 EmitReturnFromIC(masm);
5472
5473 // Failure case - fail but first unstow R0 and R1
5474 masm.bind(&failureUnstow);
5475 EmitUnstowICValues(masm, 2);
5476
5477 // Failure case - jump to next stub
5478 masm.bind(&failure);
5479 EmitStubGuardFailure(masm);
5480 return true;
5481 }
5482
5483 //
5484 // SetElem_TypedArray
5485 //
5486
5487 bool
5488 ICSetElem_TypedArray::Compiler::generateStubCode(MacroAssembler &masm)
5489 {
5490 Label failure;
5491 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
5492
5493 GeneralRegisterSet regs(availableGeneralRegs(2));
5494 Register scratchReg = regs.takeAny();
5495
5496 // Unbox R0 and shape guard.
5497 Register obj = masm.extractObject(R0, ExtractTemp0);
5498 masm.loadPtr(Address(BaselineStubReg, ICSetElem_TypedArray::offsetOfShape()), scratchReg);
5499 masm.branchTestObjShape(Assembler::NotEqual, obj, scratchReg, &failure);
5500
5501 // Ensure the index is an integer.
5502 if (cx->runtime()->jitSupportsFloatingPoint) {
5503 Label isInt32;
5504 masm.branchTestInt32(Assembler::Equal, R1, &isInt32);
5505 {
5506 // If the index is a double, try to convert it to int32. It's okay
5507 // to convert -0 to 0: the shape check ensures the object is a typed
5508 // array so the difference is not observable.
5509 masm.branchTestDouble(Assembler::NotEqual, R1, &failure);
5510 masm.unboxDouble(R1, FloatReg0);
5511 masm.convertDoubleToInt32(FloatReg0, scratchReg, &failure, /* negZeroCheck = */false);
5512 masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R1);
5513 }
5514 masm.bind(&isInt32);
5515 } else {
5516 masm.branchTestInt32(Assembler::NotEqual, R1, &failure);
5517 }
5518
5519 // Unbox key.
5520 Register key = masm.extractInt32(R1, ExtractTemp1);
5521
5522 // Bounds check.
5523 Label oobWrite;
5524 masm.unboxInt32(Address(obj, TypedArrayObject::lengthOffset()), scratchReg);
5525 masm.branch32(Assembler::BelowOrEqual, scratchReg, key,
5526 expectOutOfBounds_ ? &oobWrite : &failure);
5527
5528 // Load the elements vector.
5529 masm.loadPtr(Address(obj, TypedArrayObject::dataOffset()), scratchReg);
5530
5531 BaseIndex dest(scratchReg, key, ScaleFromElemWidth(TypedArrayObject::slotWidth(type_)));
5532 Address value(BaselineStackReg, ICStackValueOffset);
5533
5534 // We need a second scratch register. It's okay to clobber the type tag of
5535 // R0 or R1, as long as it's restored before jumping to the next stub.
5536 regs = availableGeneralRegs(0);
5537 regs.takeUnchecked(obj);
5538 regs.takeUnchecked(key);
5539 regs.take(scratchReg);
5540 Register secondScratch = regs.takeAny();
5541
5542 if (type_ == ScalarTypeDescr::TYPE_FLOAT32 || type_ == ScalarTypeDescr::TYPE_FLOAT64) {
5543 masm.ensureDouble(value, FloatReg0, &failure);
5544 if (LIRGenerator::allowFloat32Optimizations() &&
5545 type_ == ScalarTypeDescr::TYPE_FLOAT32)
5546 {
5547 masm.convertDoubleToFloat32(FloatReg0, ScratchFloatReg);
5548 masm.storeToTypedFloatArray(type_, ScratchFloatReg, dest);
5549 } else {
5550 masm.storeToTypedFloatArray(type_, FloatReg0, dest);
5551 }
5552 EmitReturnFromIC(masm);
5553 } else if (type_ == ScalarTypeDescr::TYPE_UINT8_CLAMPED) {
5554 Label notInt32;
5555 masm.branchTestInt32(Assembler::NotEqual, value, &notInt32);
5556 masm.unboxInt32(value, secondScratch);
5557 masm.clampIntToUint8(secondScratch);
5558
5559 Label clamped;
5560 masm.bind(&clamped);
5561 masm.storeToTypedIntArray(type_, secondScratch, dest);
5562 EmitReturnFromIC(masm);
5563
5564 // If the value is a double, clamp to uint8 and jump back.
5565 // Else, jump to failure.
5566 masm.bind(&notInt32);
5567 if (cx->runtime()->jitSupportsFloatingPoint) {
5568 masm.branchTestDouble(Assembler::NotEqual, value, &failure);
5569 masm.unboxDouble(value, FloatReg0);
5570 masm.clampDoubleToUint8(FloatReg0, secondScratch);
5571 masm.jump(&clamped);
5572 } else {
5573 masm.jump(&failure);
5574 }
5575 } else {
5576 Label notInt32;
5577 masm.branchTestInt32(Assembler::NotEqual, value, &notInt32);
5578 masm.unboxInt32(value, secondScratch);
5579
5580 Label isInt32;
5581 masm.bind(&isInt32);
5582 masm.storeToTypedIntArray(type_, secondScratch, dest);
5583 EmitReturnFromIC(masm);
5584
5585 // If the value is a double, truncate and jump back.
5586 // Else, jump to failure.
5587 Label failureRestoreRegs;
5588 masm.bind(&notInt32);
5589 if (cx->runtime()->jitSupportsFloatingPoint) {
5590 masm.branchTestDouble(Assembler::NotEqual, value, &failure);
5591 masm.unboxDouble(value, FloatReg0);
5592 masm.branchTruncateDouble(FloatReg0, secondScratch, &failureRestoreRegs);
5593 masm.jump(&isInt32);
5594 } else {
5595 masm.jump(&failure);
5596 }
5597
5598 // Writing to secondScratch may have clobbered R0 or R1, restore them
5599 // first.
5600 masm.bind(&failureRestoreRegs);
5601 masm.tagValue(JSVAL_TYPE_OBJECT, obj, R0);
5602 masm.tagValue(JSVAL_TYPE_INT32, key, R1);
5603 }
5604
5605 // Failure case - jump to next stub
5606 masm.bind(&failure);
5607 EmitStubGuardFailure(masm);
5608
5609 if (expectOutOfBounds_) {
5610 masm.bind(&oobWrite);
5611 EmitReturnFromIC(masm);
5612 }
5613 return true;
5614 }
5615
5616 //
5617 // In_Fallback
5618 //
5619
5620 static bool
5621 DoInFallback(JSContext *cx, ICIn_Fallback *stub, HandleValue key, HandleValue objValue,
5622 MutableHandleValue res)
5623 {
5624 FallbackICSpew(cx, stub, "In");
5625
5626 if (!objValue.isObject()) {
5627 js_ReportValueError(cx, JSMSG_IN_NOT_OBJECT, -1, objValue, NullPtr());
5628 return false;
5629 }
5630
5631 RootedObject obj(cx, &objValue.toObject());
5632
5633 bool cond = false;
5634 if (!OperatorIn(cx, key, obj, &cond))
5635 return false;
5636
5637 res.setBoolean(cond);
5638 return true;
5639 }
5640
5641 typedef bool (*DoInFallbackFn)(JSContext *, ICIn_Fallback *, HandleValue, HandleValue,
5642 MutableHandleValue);
5643 static const VMFunction DoInFallbackInfo =
5644 FunctionInfo<DoInFallbackFn>(DoInFallback, PopValues(2));
5645
5646 bool
5647 ICIn_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
5648 {
5649 EmitRestoreTailCallReg(masm);
5650
5651 // Sync for the decompiler.
5652 masm.pushValue(R0);
5653 masm.pushValue(R1);
5654
5655 // Push arguments.
5656 masm.pushValue(R1);
5657 masm.pushValue(R0);
5658 masm.push(BaselineStubReg);
5659
5660 return tailCallVM(DoInFallbackInfo, masm);
5661 }
5662
5663 // Attach an optimized stub for a GETGNAME/CALLGNAME op.
5664 static bool
5665 TryAttachGlobalNameStub(JSContext *cx, HandleScript script, jsbytecode *pc,
5666 ICGetName_Fallback *stub, HandleObject global,
5667 HandlePropertyName name)
5668 {
5669 JS_ASSERT(global->is<GlobalObject>());
5670
5671 RootedId id(cx, NameToId(name));
5672
5673 // Instantiate this global property, for use during Ion compilation.
5674 if (IsIonEnabled(cx))
5675 types::EnsureTrackPropertyTypes(cx, global, NameToId(name));
5676
5677 // The property must be found, and it must be found as a normal data property.
5678 RootedShape shape(cx, global->nativeLookup(cx, id));
5679 if (!shape)
5680 return true;
5681
5682 if (shape->hasDefaultGetter() && shape->hasSlot()) {
5683
5684 JS_ASSERT(shape->slot() >= global->numFixedSlots());
5685 uint32_t slot = shape->slot() - global->numFixedSlots();
5686
5687 // TODO: if there's a previous stub discard it, or just update its Shape + slot?
5688
5689 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
5690 IonSpew(IonSpew_BaselineIC, " Generating GetName(GlobalName) stub");
5691 ICGetName_Global::Compiler compiler(cx, monitorStub, global->lastProperty(), slot);
5692 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
5693 if (!newStub)
5694 return false;
5695
5696 stub->addNewStub(newStub);
5697 return true;
5698 }
5699
5700 bool isScripted;
5701 if (IsCacheableGetPropCall(cx, global, global, shape, &isScripted) && !isScripted)
5702 {
5703 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
5704 IonSpew(IonSpew_BaselineIC, " Generating GetName(GlobalName/NativeGetter) stub");
5705 RootedFunction getter(cx, &shape->getterObject()->as<JSFunction>());
5706 ICGetProp_CallNative::Compiler compiler(cx, monitorStub, global,
5707 getter, script->pcToOffset(pc),
5708 /* inputDefinitelyObject = */ true);
5709 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
5710 if (!newStub)
5711 return false;
5712
5713 stub->addNewStub(newStub);
5714 return true;
5715 }
5716
5717 return true;
5718 }
5719
5720 static bool
5721 TryAttachScopeNameStub(JSContext *cx, HandleScript script, ICGetName_Fallback *stub,
5722 HandleObject initialScopeChain, HandlePropertyName name)
5723 {
5724 AutoShapeVector shapes(cx);
5725 RootedId id(cx, NameToId(name));
5726 RootedObject scopeChain(cx, initialScopeChain);
5727
5728 Shape *shape = nullptr;
5729 while (scopeChain) {
5730 if (!shapes.append(scopeChain->lastProperty()))
5731 return false;
5732
5733 if (scopeChain->is<GlobalObject>()) {
5734 shape = scopeChain->nativeLookup(cx, id);
5735 if (shape)
5736 break;
5737 return true;
5738 }
5739
5740 if (!scopeChain->is<ScopeObject>() || scopeChain->is<DynamicWithObject>())
5741 return true;
5742
5743 // Check for an 'own' property on the scope. There is no need to
5744 // check the prototype as non-with scopes do not inherit properties
5745 // from any prototype.
5746 shape = scopeChain->nativeLookup(cx, id);
5747 if (shape)
5748 break;
5749
5750 scopeChain = scopeChain->enclosingScope();
5751 }
5752
5753 if (!IsCacheableGetPropReadSlot(scopeChain, scopeChain, shape))
5754 return true;
5755
5756 bool isFixedSlot;
5757 uint32_t offset;
5758 GetFixedOrDynamicSlotOffset(scopeChain, shape->slot(), &isFixedSlot, &offset);
5759
5760 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
5761 ICStub *newStub;
5762
5763 switch (shapes.length()) {
5764 case 1: {
5765 ICGetName_Scope<0>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
5766 newStub = compiler.getStub(compiler.getStubSpace(script));
5767 break;
5768 }
5769 case 2: {
5770 ICGetName_Scope<1>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
5771 newStub = compiler.getStub(compiler.getStubSpace(script));
5772 break;
5773 }
5774 case 3: {
5775 ICGetName_Scope<2>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
5776 newStub = compiler.getStub(compiler.getStubSpace(script));
5777 break;
5778 }
5779 case 4: {
5780 ICGetName_Scope<3>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
5781 newStub = compiler.getStub(compiler.getStubSpace(script));
5782 break;
5783 }
5784 case 5: {
5785 ICGetName_Scope<4>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
5786 newStub = compiler.getStub(compiler.getStubSpace(script));
5787 break;
5788 }
5789 case 6: {
5790 ICGetName_Scope<5>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
5791 newStub = compiler.getStub(compiler.getStubSpace(script));
5792 break;
5793 }
5794 case 7: {
5795 ICGetName_Scope<6>::Compiler compiler(cx, monitorStub, &shapes, isFixedSlot, offset);
5796 newStub = compiler.getStub(compiler.getStubSpace(script));
5797 break;
5798 }
5799 default:
5800 return true;
5801 }
5802
5803 if (!newStub)
5804 return false;
5805
5806 stub->addNewStub(newStub);
5807 return true;
5808 }
5809
5810 static bool
5811 DoGetNameFallback(JSContext *cx, BaselineFrame *frame, ICGetName_Fallback *stub_,
5812 HandleObject scopeChain, MutableHandleValue res)
5813 {
5814 // This fallback stub may trigger debug mode toggling.
5815 DebugModeOSRVolatileStub<ICGetName_Fallback *> stub(frame, stub_);
5816
5817 RootedScript script(cx, frame->script());
5818 jsbytecode *pc = stub->icEntry()->pc(script);
5819 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
5820 FallbackICSpew(cx, stub, "GetName(%s)", js_CodeName[JSOp(*pc)]);
5821
5822 JS_ASSERT(op == JSOP_NAME || op == JSOP_GETGNAME);
5823
5824 RootedPropertyName name(cx, script->getName(pc));
5825
5826 if (JSOp(pc[JSOP_GETGNAME_LENGTH]) == JSOP_TYPEOF) {
5827 if (!GetScopeNameForTypeOf(cx, scopeChain, name, res))
5828 return false;
5829 } else {
5830 if (!GetScopeName(cx, scopeChain, name, res))
5831 return false;
5832 }
5833
5834 types::TypeScript::Monitor(cx, script, pc, res);
5835
5836 // Check if debug mode toggling made the stub invalid.
5837 if (stub.invalid())
5838 return true;
5839
5840 // Add a type monitor stub for the resulting value.
5841 if (!stub->addMonitorStubForValue(cx, script, res))
5842 return false;
5843
5844 // Attach new stub.
5845 if (stub->numOptimizedStubs() >= ICGetName_Fallback::MAX_OPTIMIZED_STUBS) {
5846 // TODO: Discard all stubs in this IC and replace with generic stub.
5847 return true;
5848 }
5849
5850 if (js_CodeSpec[*pc].format & JOF_GNAME) {
5851 if (!TryAttachGlobalNameStub(cx, script, pc, stub, scopeChain, name))
5852 return false;
5853 } else {
5854 if (!TryAttachScopeNameStub(cx, script, stub, scopeChain, name))
5855 return false;
5856 }
5857
5858 return true;
5859 }
5860
5861 typedef bool (*DoGetNameFallbackFn)(JSContext *, BaselineFrame *, ICGetName_Fallback *,
5862 HandleObject, MutableHandleValue);
5863 static const VMFunction DoGetNameFallbackInfo = FunctionInfo<DoGetNameFallbackFn>(DoGetNameFallback);
5864
5865 bool
5866 ICGetName_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
5867 {
5868 JS_ASSERT(R0 == JSReturnOperand);
5869
5870 EmitRestoreTailCallReg(masm);
5871
5872 masm.push(R0.scratchReg());
5873 masm.push(BaselineStubReg);
5874 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5875
5876 return tailCallVM(DoGetNameFallbackInfo, masm);
5877 }
5878
5879 bool
5880 ICGetName_Global::Compiler::generateStubCode(MacroAssembler &masm)
5881 {
5882 Label failure;
5883 Register obj = R0.scratchReg();
5884 Register scratch = R1.scratchReg();
5885
5886 // Shape guard.
5887 masm.loadPtr(Address(BaselineStubReg, ICGetName_Global::offsetOfShape()), scratch);
5888 masm.branchTestObjShape(Assembler::NotEqual, obj, scratch, &failure);
5889
5890 // Load dynamic slot.
5891 masm.loadPtr(Address(obj, JSObject::offsetOfSlots()), obj);
5892 masm.load32(Address(BaselineStubReg, ICGetName_Global::offsetOfSlot()), scratch);
5893 masm.loadValue(BaseIndex(obj, scratch, TimesEight), R0);
5894
5895 // Enter type monitor IC to type-check result.
5896 EmitEnterTypeMonitorIC(masm);
5897
5898 // Failure case - jump to next stub
5899 masm.bind(&failure);
5900 EmitStubGuardFailure(masm);
5901 return true;
5902 }
5903
5904 template <size_t NumHops>
5905 bool
5906 ICGetName_Scope<NumHops>::Compiler::generateStubCode(MacroAssembler &masm)
5907 {
5908 Label failure;
5909 GeneralRegisterSet regs(availableGeneralRegs(1));
5910 Register obj = R0.scratchReg();
5911 Register walker = regs.takeAny();
5912 Register scratch = regs.takeAny();
5913
5914 // Use a local to silence Clang tautological-compare warning if NumHops is 0.
5915 size_t numHops = NumHops;
5916
5917 for (size_t index = 0; index < NumHops + 1; index++) {
5918 Register scope = index ? walker : obj;
5919
5920 // Shape guard.
5921 masm.loadPtr(Address(BaselineStubReg, ICGetName_Scope::offsetOfShape(index)), scratch);
5922 masm.branchTestObjShape(Assembler::NotEqual, scope, scratch, &failure);
5923
5924 if (index < numHops)
5925 masm.extractObject(Address(scope, ScopeObject::offsetOfEnclosingScope()), walker);
5926 }
5927
5928 Register scope = NumHops ? walker : obj;
5929
5930 if (!isFixedSlot_) {
5931 masm.loadPtr(Address(scope, JSObject::offsetOfSlots()), walker);
5932 scope = walker;
5933 }
5934
5935 masm.load32(Address(BaselineStubReg, ICGetName_Scope::offsetOfOffset()), scratch);
5936 masm.loadValue(BaseIndex(scope, scratch, TimesOne), R0);
5937
5938 // Enter type monitor IC to type-check result.
5939 EmitEnterTypeMonitorIC(masm);
5940
5941 // Failure case - jump to next stub
5942 masm.bind(&failure);
5943 EmitStubGuardFailure(masm);
5944 return true;
5945 }
5946
5947 //
5948 // BindName_Fallback
5949 //
5950
5951 static bool
5952 DoBindNameFallback(JSContext *cx, BaselineFrame *frame, ICBindName_Fallback *stub,
5953 HandleObject scopeChain, MutableHandleValue res)
5954 {
5955 jsbytecode *pc = stub->icEntry()->pc(frame->script());
5956 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
5957 FallbackICSpew(cx, stub, "BindName(%s)", js_CodeName[JSOp(*pc)]);
5958
5959 JS_ASSERT(op == JSOP_BINDNAME);
5960
5961 RootedPropertyName name(cx, frame->script()->getName(pc));
5962
5963 RootedObject scope(cx);
5964 if (!LookupNameWithGlobalDefault(cx, name, scopeChain, &scope))
5965 return false;
5966
5967 res.setObject(*scope);
5968 return true;
5969 }
5970
5971 typedef bool (*DoBindNameFallbackFn)(JSContext *, BaselineFrame *, ICBindName_Fallback *,
5972 HandleObject, MutableHandleValue);
5973 static const VMFunction DoBindNameFallbackInfo =
5974 FunctionInfo<DoBindNameFallbackFn>(DoBindNameFallback);
5975
5976 bool
5977 ICBindName_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
5978 {
5979 JS_ASSERT(R0 == JSReturnOperand);
5980
5981 EmitRestoreTailCallReg(masm);
5982
5983 masm.push(R0.scratchReg());
5984 masm.push(BaselineStubReg);
5985 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
5986
5987 return tailCallVM(DoBindNameFallbackInfo, masm);
5988 }
5989
5990 //
5991 // GetIntrinsic_Fallback
5992 //
5993
5994 static bool
5995 DoGetIntrinsicFallback(JSContext *cx, BaselineFrame *frame, ICGetIntrinsic_Fallback *stub_,
5996 MutableHandleValue res)
5997 {
5998 // This fallback stub may trigger debug mode toggling.
5999 DebugModeOSRVolatileStub<ICGetIntrinsic_Fallback *> stub(frame, stub_);
6000
6001 RootedScript script(cx, frame->script());
6002 jsbytecode *pc = stub->icEntry()->pc(script);
6003 mozilla::DebugOnly<JSOp> op = JSOp(*pc);
6004 FallbackICSpew(cx, stub, "GetIntrinsic(%s)", js_CodeName[JSOp(*pc)]);
6005
6006 JS_ASSERT(op == JSOP_GETINTRINSIC);
6007
6008 if (!GetIntrinsicOperation(cx, pc, res))
6009 return false;
6010
6011 // An intrinsic operation will always produce the same result, so only
6012 // needs to be monitored once. Attach a stub to load the resulting constant
6013 // directly.
6014
6015 types::TypeScript::Monitor(cx, script, pc, res);
6016
6017 // Check if debug mode toggling made the stub invalid.
6018 if (stub.invalid())
6019 return true;
6020
6021 IonSpew(IonSpew_BaselineIC, " Generating GetIntrinsic optimized stub");
6022 ICGetIntrinsic_Constant::Compiler compiler(cx, res);
6023 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6024 if (!newStub)
6025 return false;
6026
6027 stub->addNewStub(newStub);
6028 return true;
6029 }
6030
6031 typedef bool (*DoGetIntrinsicFallbackFn)(JSContext *, BaselineFrame *, ICGetIntrinsic_Fallback *,
6032 MutableHandleValue);
6033 static const VMFunction DoGetIntrinsicFallbackInfo =
6034 FunctionInfo<DoGetIntrinsicFallbackFn>(DoGetIntrinsicFallback);
6035
6036 bool
6037 ICGetIntrinsic_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
6038 {
6039 EmitRestoreTailCallReg(masm);
6040
6041 masm.push(BaselineStubReg);
6042 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
6043
6044 return tailCallVM(DoGetIntrinsicFallbackInfo, masm);
6045 }
6046
6047 bool
6048 ICGetIntrinsic_Constant::Compiler::generateStubCode(MacroAssembler &masm)
6049 {
6050 masm.loadValue(Address(BaselineStubReg, ICGetIntrinsic_Constant::offsetOfValue()), R0);
6051
6052 EmitReturnFromIC(masm);
6053 return true;
6054 }
6055
6056 //
6057 // GetProp_Fallback
6058 //
6059
6060 static bool
6061 TryAttachLengthStub(JSContext *cx, JSScript *script, ICGetProp_Fallback *stub, HandleValue val,
6062 HandleValue res, bool *attached)
6063 {
6064 JS_ASSERT(!*attached);
6065
6066 if (val.isString()) {
6067 JS_ASSERT(res.isInt32());
6068 IonSpew(IonSpew_BaselineIC, " Generating GetProp(String.length) stub");
6069 ICGetProp_StringLength::Compiler compiler(cx);
6070 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6071 if (!newStub)
6072 return false;
6073
6074 *attached = true;
6075 stub->addNewStub(newStub);
6076 return true;
6077 }
6078
6079 if (val.isMagic(JS_OPTIMIZED_ARGUMENTS) && res.isInt32()) {
6080 IonSpew(IonSpew_BaselineIC, " Generating GetProp(MagicArgs.length) stub");
6081 ICGetProp_ArgumentsLength::Compiler compiler(cx, ICGetProp_ArgumentsLength::Magic);
6082 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6083 if (!newStub)
6084 return false;
6085
6086 *attached = true;
6087 stub->addNewStub(newStub);
6088 return true;
6089 }
6090
6091 if (!val.isObject())
6092 return true;
6093
6094 RootedObject obj(cx, &val.toObject());
6095
6096 if (obj->is<ArrayObject>() && res.isInt32()) {
6097 IonSpew(IonSpew_BaselineIC, " Generating GetProp(Array.length) stub");
6098 ICGetProp_ArrayLength::Compiler compiler(cx);
6099 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6100 if (!newStub)
6101 return false;
6102
6103 *attached = true;
6104 stub->addNewStub(newStub);
6105 return true;
6106 }
6107
6108 if (obj->is<TypedArrayObject>() && res.isInt32()) {
6109 IonSpew(IonSpew_BaselineIC, " Generating GetProp(TypedArray.length) stub");
6110 ICGetProp_TypedArrayLength::Compiler compiler(cx);
6111 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6112 if (!newStub)
6113 return false;
6114
6115 *attached = true;
6116 stub->addNewStub(newStub);
6117 return true;
6118 }
6119
6120 if (obj->is<ArgumentsObject>() && res.isInt32()) {
6121 IonSpew(IonSpew_BaselineIC, " Generating GetProp(ArgsObj.length %s) stub",
6122 obj->is<StrictArgumentsObject>() ? "Strict" : "Normal");
6123 ICGetProp_ArgumentsLength::Which which = ICGetProp_ArgumentsLength::Normal;
6124 if (obj->is<StrictArgumentsObject>())
6125 which = ICGetProp_ArgumentsLength::Strict;
6126 ICGetProp_ArgumentsLength::Compiler compiler(cx, which);
6127 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6128 if (!newStub)
6129 return false;
6130
6131 *attached = true;
6132 stub->addNewStub(newStub);
6133 return true;
6134 }
6135
6136 return true;
6137 }
6138
6139 static bool
6140 UpdateExistingGenerationalDOMProxyStub(ICGetProp_Fallback *stub,
6141 HandleObject obj)
6142 {
6143 Value expandoSlot = obj->getFixedSlot(GetDOMProxyExpandoSlot());
6144 JS_ASSERT(!expandoSlot.isObject() && !expandoSlot.isUndefined());
6145 ExpandoAndGeneration *expandoAndGeneration = (ExpandoAndGeneration*)expandoSlot.toPrivate();
6146 for (ICStubConstIterator iter = stub->beginChainConst(); !iter.atEnd(); iter++) {
6147 if (iter->isGetProp_CallDOMProxyWithGenerationNative()) {
6148 ICGetProp_CallDOMProxyWithGenerationNative* updateStub =
6149 iter->toGetProp_CallDOMProxyWithGenerationNative();
6150 if (updateStub->expandoAndGeneration() == expandoAndGeneration) {
6151 // Update generation
6152 uint32_t generation = expandoAndGeneration->generation;
6153 IonSpew(IonSpew_BaselineIC,
6154 " Updating existing stub with generation, old value: %i, "
6155 "new value: %i", updateStub->generation(),
6156 generation);
6157 updateStub->setGeneration(generation);
6158 return true;
6159 }
6160 }
6161 }
6162 return false;
6163 }
6164
6165 static bool
6166 TryAttachNativeGetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc,
6167 ICGetProp_Fallback *stub, HandlePropertyName name,
6168 HandleValue val, HandleValue res, bool *attached)
6169 {
6170 JS_ASSERT(!*attached);
6171
6172 if (!val.isObject())
6173 return true;
6174
6175 RootedObject obj(cx, &val.toObject());
6176
6177 bool isDOMProxy;
6178 bool domProxyHasGeneration;
6179 DOMProxyShadowsResult domProxyShadowsResult;
6180 RootedShape shape(cx);
6181 RootedObject holder(cx);
6182 if (!EffectlesslyLookupProperty(cx, obj, name, &holder, &shape, &isDOMProxy,
6183 &domProxyShadowsResult, &domProxyHasGeneration))
6184 {
6185 return false;
6186 }
6187
6188 if (!isDOMProxy && !obj->isNative())
6189 return true;
6190
6191 bool isCallProp = (JSOp(*pc) == JSOP_CALLPROP);
6192
6193 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
6194 if (!isDOMProxy && IsCacheableGetPropReadSlot(obj, holder, shape)) {
6195 bool isFixedSlot;
6196 uint32_t offset;
6197 GetFixedOrDynamicSlotOffset(holder, shape->slot(), &isFixedSlot, &offset);
6198
6199 // Instantiate this property for singleton holders, for use during Ion compilation.
6200 if (IsIonEnabled(cx))
6201 types::EnsureTrackPropertyTypes(cx, holder, NameToId(name));
6202
6203 ICStub::Kind kind = (obj == holder) ? ICStub::GetProp_Native
6204 : ICStub::GetProp_NativePrototype;
6205
6206 IonSpew(IonSpew_BaselineIC, " Generating GetProp(%s %s) stub",
6207 isDOMProxy ? "DOMProxy" : "Native",
6208 (obj == holder) ? "direct" : "prototype");
6209 ICGetPropNativeCompiler compiler(cx, kind, isCallProp, monitorStub, obj, holder,
6210 name, isFixedSlot, offset);
6211 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6212 if (!newStub)
6213 return false;
6214
6215 stub->addNewStub(newStub);
6216 *attached = true;
6217 return true;
6218 }
6219
6220 bool isScripted = false;
6221 bool cacheableCall = IsCacheableGetPropCall(cx, obj, holder, shape, &isScripted, isDOMProxy);
6222
6223 // Try handling scripted getters.
6224 if (cacheableCall && isScripted && !isDOMProxy) {
6225 #if JS_HAS_NO_SUCH_METHOD
6226 // It's hard to keep the original object alive through a call, and it's unlikely
6227 // that a getter will be used to generate functions for calling in CALLPROP locations.
6228 // Just don't attach stubs in that case.
6229 if (isCallProp)
6230 return true;
6231 #endif
6232
6233 // Don't handle scripted own property getters
6234 if (obj == holder)
6235 return true;
6236
6237 RootedFunction callee(cx, &shape->getterObject()->as<JSFunction>());
6238 JS_ASSERT(obj != holder);
6239 JS_ASSERT(callee->hasScript());
6240
6241 IonSpew(IonSpew_BaselineIC, " Generating GetProp(NativeObj/ScriptedGetter %s:%d) stub",
6242 callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno());
6243
6244 ICGetProp_CallScripted::Compiler compiler(cx, monitorStub, obj, holder, callee,
6245 script->pcToOffset(pc));
6246 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6247 if (!newStub)
6248 return false;
6249
6250 stub->addNewStub(newStub);
6251 *attached = true;
6252 return true;
6253 }
6254
6255 // Try handling JSNative getters.
6256 if (cacheableCall && !isScripted) {
6257 #if JS_HAS_NO_SUCH_METHOD
6258 // It's unlikely that a getter function will be used to generate functions for calling
6259 // in CALLPROP locations. Just don't attach stubs in that case to avoid issues with
6260 // __noSuchMethod__ handling.
6261 if (isCallProp)
6262 return true;
6263 #endif
6264
6265 RootedFunction callee(cx, &shape->getterObject()->as<JSFunction>());
6266 JS_ASSERT(callee->isNative());
6267
6268 IonSpew(IonSpew_BaselineIC, " Generating GetProp(%s%s/NativeGetter %p) stub",
6269 isDOMProxy ? "DOMProxyObj" : "NativeObj",
6270 isDOMProxy && domProxyHasGeneration ? "WithGeneration" : "",
6271 callee->native());
6272
6273 ICStub *newStub = nullptr;
6274 if (isDOMProxy) {
6275 JS_ASSERT(obj != holder);
6276 ICStub::Kind kind;
6277 if (domProxyHasGeneration) {
6278 if (UpdateExistingGenerationalDOMProxyStub(stub, obj)) {
6279 *attached = true;
6280 return true;
6281 }
6282 kind = ICStub::GetProp_CallDOMProxyWithGenerationNative;
6283 } else {
6284 kind = ICStub::GetProp_CallDOMProxyNative;
6285 }
6286 Rooted<ProxyObject*> proxy(cx, &obj->as<ProxyObject>());
6287 ICGetPropCallDOMProxyNativeCompiler
6288 compiler(cx, kind, monitorStub, proxy, holder, callee, script->pcToOffset(pc));
6289 newStub = compiler.getStub(compiler.getStubSpace(script));
6290 } else if (obj == holder) {
6291 ICGetProp_CallNative::Compiler compiler(cx, monitorStub, obj, callee,
6292 script->pcToOffset(pc));
6293 newStub = compiler.getStub(compiler.getStubSpace(script));
6294 } else {
6295 ICGetProp_CallNativePrototype::Compiler compiler(cx, monitorStub, obj, holder, callee,
6296 script->pcToOffset(pc));
6297 newStub = compiler.getStub(compiler.getStubSpace(script));
6298 }
6299 if (!newStub)
6300 return false;
6301 stub->addNewStub(newStub);
6302 *attached = true;
6303 return true;
6304 }
6305
6306 // If it's a shadowed listbase proxy property, attach stub to call Proxy::get instead.
6307 if (isDOMProxy && domProxyShadowsResult == Shadows) {
6308 JS_ASSERT(obj == holder);
6309 #if JS_HAS_NO_SUCH_METHOD
6310 if (isCallProp)
6311 return true;
6312 #endif
6313
6314 IonSpew(IonSpew_BaselineIC, " Generating GetProp(DOMProxyProxy) stub");
6315 Rooted<ProxyObject*> proxy(cx, &obj->as<ProxyObject>());
6316 ICGetProp_DOMProxyShadowed::Compiler compiler(cx, monitorStub, proxy, name,
6317 script->pcToOffset(pc));
6318 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6319 if (!newStub)
6320 return false;
6321 stub->addNewStub(newStub);
6322 *attached = true;
6323 return true;
6324 }
6325
6326 return true;
6327 }
6328
6329 static bool
6330 TryAttachPrimitiveGetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc,
6331 ICGetProp_Fallback *stub, HandlePropertyName name, HandleValue val,
6332 HandleValue res, bool *attached)
6333 {
6334 JS_ASSERT(!*attached);
6335
6336 JSValueType primitiveType;
6337 RootedObject proto(cx);
6338 Rooted<GlobalObject*> global(cx, &script->global());
6339 if (val.isString()) {
6340 primitiveType = JSVAL_TYPE_STRING;
6341 proto = GlobalObject::getOrCreateStringPrototype(cx, global);
6342 } else if (val.isNumber()) {
6343 primitiveType = JSVAL_TYPE_DOUBLE;
6344 proto = GlobalObject::getOrCreateNumberPrototype(cx, global);
6345 } else {
6346 JS_ASSERT(val.isBoolean());
6347 primitiveType = JSVAL_TYPE_BOOLEAN;
6348 proto = GlobalObject::getOrCreateBooleanPrototype(cx, global);
6349 }
6350 if (!proto)
6351 return false;
6352
6353 // Instantiate this property, for use during Ion compilation.
6354 RootedId id(cx, NameToId(name));
6355 if (IsIonEnabled(cx))
6356 types::EnsureTrackPropertyTypes(cx, proto, id);
6357
6358 // For now, only look for properties directly set on the prototype.
6359 RootedShape shape(cx, proto->nativeLookup(cx, id));
6360 if (!shape || !shape->hasSlot() || !shape->hasDefaultGetter())
6361 return true;
6362
6363 bool isFixedSlot;
6364 uint32_t offset;
6365 GetFixedOrDynamicSlotOffset(proto, shape->slot(), &isFixedSlot, &offset);
6366
6367 ICStub *monitorStub = stub->fallbackMonitorStub()->firstMonitorStub();
6368
6369 IonSpew(IonSpew_BaselineIC, " Generating GetProp_Primitive stub");
6370 ICGetProp_Primitive::Compiler compiler(cx, monitorStub, primitiveType, proto,
6371 isFixedSlot, offset);
6372 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
6373 if (!newStub)
6374 return false;
6375
6376 stub->addNewStub(newStub);
6377 *attached = true;
6378 return true;
6379 }
6380
6381 static bool
6382 DoGetPropFallback(JSContext *cx, BaselineFrame *frame, ICGetProp_Fallback *stub_,
6383 MutableHandleValue val, MutableHandleValue res)
6384 {
6385 // This fallback stub may trigger debug mode toggling.
6386 DebugModeOSRVolatileStub<ICGetProp_Fallback *> stub(frame, stub_);
6387
6388 jsbytecode *pc = stub->icEntry()->pc(frame->script());
6389 JSOp op = JSOp(*pc);
6390 FallbackICSpew(cx, stub, "GetProp(%s)", js_CodeName[op]);
6391
6392 JS_ASSERT(op == JSOP_GETPROP || op == JSOP_CALLPROP || op == JSOP_LENGTH || op == JSOP_GETXPROP);
6393
6394 RootedPropertyName name(cx, frame->script()->getName(pc));
6395
6396 if (op == JSOP_LENGTH && val.isMagic(JS_OPTIMIZED_ARGUMENTS)) {
6397 // Handle arguments.length access.
6398 if (IsOptimizedArguments(frame, val.address())) {
6399 res.setInt32(frame->numActualArgs());
6400
6401 // Monitor result
6402 types::TypeScript::Monitor(cx, frame->script(), pc, res);
6403 if (!stub->addMonitorStubForValue(cx, frame->script(), res))
6404 return false;
6405
6406 bool attached = false;
6407 if (!TryAttachLengthStub(cx, frame->script(), stub, val, res, &attached))
6408 return false;
6409 JS_ASSERT(attached);
6410
6411 return true;
6412 }
6413 }
6414
6415 RootedObject obj(cx, ToObjectFromStack(cx, val));
6416 if (!obj)
6417 return false;
6418
6419 RootedId id(cx, NameToId(name));
6420 if (!JSObject::getGeneric(cx, obj, obj, id, res))
6421 return false;
6422
6423 #if JS_HAS_NO_SUCH_METHOD
6424 // Handle objects with __noSuchMethod__.
6425 if (op == JSOP_CALLPROP && MOZ_UNLIKELY(res.isUndefined()) && val.isObject()) {
6426 if (!OnUnknownMethod(cx, obj, IdToValue(id), res))
6427 return false;
6428 }
6429 #endif
6430
6431 types::TypeScript::Monitor(cx, frame->script(), pc, res);
6432
6433 // Check if debug mode toggling made the stub invalid.
6434 if (stub.invalid())
6435 return true;
6436
6437 // Add a type monitor stub for the resulting value.
6438 if (!stub->addMonitorStubForValue(cx, frame->script(), res))
6439 return false;
6440
6441 if (stub->numOptimizedStubs() >= ICGetProp_Fallback::MAX_OPTIMIZED_STUBS) {
6442 // TODO: Discard all stubs in this IC and replace with generic getprop stub.
6443 return true;
6444 }
6445
6446 bool attached = false;
6447
6448 if (op == JSOP_LENGTH) {
6449 if (!TryAttachLengthStub(cx, frame->script(), stub, val, res, &attached))
6450 return false;
6451 if (attached)
6452 return true;
6453 }
6454
6455 RootedScript script(cx, frame->script());
6456
6457 if (!TryAttachNativeGetPropStub(cx, script, pc, stub, name, val, res, &attached))
6458 return false;
6459 if (attached)
6460 return true;
6461
6462 if (val.isString() || val.isNumber() || val.isBoolean()) {
6463 if (!TryAttachPrimitiveGetPropStub(cx, script, pc, stub, name, val, res, &attached))
6464 return false;
6465 if (attached)
6466 return true;
6467 }
6468
6469 JS_ASSERT(!attached);
6470 stub->noteUnoptimizableAccess();
6471
6472 return true;
6473 }
6474
6475 typedef bool (*DoGetPropFallbackFn)(JSContext *, BaselineFrame *, ICGetProp_Fallback *,
6476 MutableHandleValue, MutableHandleValue);
6477 static const VMFunction DoGetPropFallbackInfo =
6478 FunctionInfo<DoGetPropFallbackFn>(DoGetPropFallback, PopValues(1));
6479
6480 bool
6481 ICGetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
6482 {
6483 JS_ASSERT(R0 == JSReturnOperand);
6484
6485 EmitRestoreTailCallReg(masm);
6486
6487 // Ensure stack is fully synced for the expression decompiler.
6488 masm.pushValue(R0);
6489
6490 // Push arguments.
6491 masm.pushValue(R0);
6492 masm.push(BaselineStubReg);
6493 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
6494
6495 if (!tailCallVM(DoGetPropFallbackInfo, masm))
6496 return false;
6497
6498 // What follows is bailout for inlined scripted getters or for on-stack
6499 // debug mode recompile. The return address pointed to by the baseline
6500 // stack points here.
6501 //
6502 // Even though the fallback frame doesn't enter a stub frame, the CallScripted
6503 // frame that we are emulating does. Again, we lie.
6504 #ifdef DEBUG
6505 entersStubFrame_ = true;
6506 #endif
6507
6508 Label leaveStubCommon;
6509
6510 returnFromStubOffset_ = masm.currentOffset();
6511 leaveStubFrameHead(masm, false);
6512 masm.jump(&leaveStubCommon);
6513
6514 returnFromIonOffset_ = masm.currentOffset();
6515 leaveStubFrameHead(masm, true);
6516
6517 masm.bind(&leaveStubCommon);
6518 leaveStubFrameCommonTail(masm);
6519
6520 // When we get here, BaselineStubReg contains the ICGetProp_Fallback stub,
6521 // which we can't use to enter the TypeMonitor IC, because it's a MonitoredFallbackStub
6522 // instead of a MonitoredStub. So, we cheat.
6523 masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
6524 BaselineStubReg);
6525 EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
6526
6527 return true;
6528 }
6529
6530 bool
6531 ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
6532 {
6533 JitCompartment *comp = cx->compartment()->jitCompartment();
6534
6535 CodeOffsetLabel fromIon(returnFromIonOffset_);
6536 fromIon.fixup(&masm);
6537 comp->initBaselineGetPropReturnFromIonAddr(code->raw() + fromIon.offset());
6538
6539 CodeOffsetLabel fromVM(returnFromStubOffset_);
6540 fromVM.fixup(&masm);
6541 comp->initBaselineGetPropReturnFromStubAddr(code->raw() + fromVM.offset());
6542
6543 return true;
6544 }
6545
6546 bool
6547 ICGetProp_ArrayLength::Compiler::generateStubCode(MacroAssembler &masm)
6548 {
6549 Label failure;
6550 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
6551
6552 Register scratch = R1.scratchReg();
6553
6554 // Unbox R0 and guard it's an array.
6555 Register obj = masm.extractObject(R0, ExtractTemp0);
6556 masm.branchTestObjClass(Assembler::NotEqual, obj, scratch, &ArrayObject::class_, &failure);
6557
6558 // Load obj->elements->length.
6559 masm.loadPtr(Address(obj, JSObject::offsetOfElements()), scratch);
6560 masm.load32(Address(scratch, ObjectElements::offsetOfLength()), scratch);
6561
6562 // Guard length fits in an int32.
6563 masm.branchTest32(Assembler::Signed, scratch, scratch, &failure);
6564
6565 masm.tagValue(JSVAL_TYPE_INT32, scratch, R0);
6566 EmitReturnFromIC(masm);
6567
6568 // Failure case - jump to next stub
6569 masm.bind(&failure);
6570 EmitStubGuardFailure(masm);
6571 return true;
6572 }
6573
6574 bool
6575 ICGetProp_TypedArrayLength::Compiler::generateStubCode(MacroAssembler &masm)
6576 {
6577 Label failure;
6578 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
6579
6580 Register scratch = R1.scratchReg();
6581
6582 // Unbox R0.
6583 Register obj = masm.extractObject(R0, ExtractTemp0);
6584
6585 // Implement the negated version of JSObject::isTypedArray predicate.
6586 masm.loadObjClass(obj, scratch);
6587 masm.branchPtr(Assembler::Below, scratch, ImmPtr(&TypedArrayObject::classes[0]),
6588 &failure);
6589 masm.branchPtr(Assembler::AboveOrEqual, scratch,
6590 ImmPtr(&TypedArrayObject::classes[ScalarTypeDescr::TYPE_MAX]),
6591 &failure);
6592
6593 // Load length from fixed slot.
6594 masm.loadValue(Address(obj, TypedArrayObject::lengthOffset()), R0);
6595 EmitReturnFromIC(masm);
6596
6597 // Failure case - jump to next stub
6598 masm.bind(&failure);
6599 EmitStubGuardFailure(masm);
6600 return true;
6601 }
6602
6603 bool
6604 ICGetProp_StringLength::Compiler::generateStubCode(MacroAssembler &masm)
6605 {
6606 Label failure;
6607 masm.branchTestString(Assembler::NotEqual, R0, &failure);
6608
6609 // Unbox string and load its length.
6610 Register string = masm.extractString(R0, ExtractTemp0);
6611 masm.loadStringLength(string, string);
6612
6613 masm.tagValue(JSVAL_TYPE_INT32, string, R0);
6614 EmitReturnFromIC(masm);
6615
6616 // Failure case - jump to next stub
6617 masm.bind(&failure);
6618 EmitStubGuardFailure(masm);
6619 return true;
6620 }
6621
6622 bool
6623 ICGetProp_Primitive::Compiler::generateStubCode(MacroAssembler &masm)
6624 {
6625 Label failure;
6626 switch (primitiveType_) {
6627 case JSVAL_TYPE_STRING:
6628 masm.branchTestString(Assembler::NotEqual, R0, &failure);
6629 break;
6630 case JSVAL_TYPE_DOUBLE: // Also used for int32.
6631 masm.branchTestNumber(Assembler::NotEqual, R0, &failure);
6632 break;
6633 case JSVAL_TYPE_BOOLEAN:
6634 masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
6635 break;
6636 default:
6637 MOZ_ASSUME_UNREACHABLE("unexpected type");
6638 }
6639
6640 GeneralRegisterSet regs(availableGeneralRegs(1));
6641 Register holderReg = regs.takeAny();
6642 Register scratchReg = regs.takeAny();
6643
6644 // Verify the shape of the prototype.
6645 masm.movePtr(ImmGCPtr(prototype_.get()), holderReg);
6646
6647 Address shapeAddr(BaselineStubReg, ICGetProp_Primitive::offsetOfProtoShape());
6648 masm.loadPtr(Address(holderReg, JSObject::offsetOfShape()), scratchReg);
6649 masm.branchPtr(Assembler::NotEqual, shapeAddr, scratchReg, &failure);
6650
6651 if (!isFixedSlot_)
6652 masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), holderReg);
6653
6654 masm.load32(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfOffset()), scratchReg);
6655 masm.loadValue(BaseIndex(holderReg, scratchReg, TimesOne), R0);
6656
6657 // Enter type monitor IC to type-check result.
6658 EmitEnterTypeMonitorIC(masm);
6659
6660 // Failure case - jump to next stub
6661 masm.bind(&failure);
6662 EmitStubGuardFailure(masm);
6663 return true;
6664 }
6665
6666 bool
6667 ICGetPropNativeCompiler::generateStubCode(MacroAssembler &masm)
6668 {
6669 Label failure;
6670 GeneralRegisterSet regs(availableGeneralRegs(1));
6671
6672 // Guard input is an object.
6673 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
6674
6675 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
6676
6677 // Unbox and shape guard.
6678 Register objReg = masm.extractObject(R0, ExtractTemp0);
6679 masm.loadPtr(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfShape()), scratch);
6680 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
6681
6682 Register holderReg;
6683 if (obj_ == holder_) {
6684 holderReg = objReg;
6685 } else {
6686 // Shape guard holder.
6687 holderReg = regs.takeAny();
6688 masm.loadPtr(Address(BaselineStubReg, ICGetProp_NativePrototype::offsetOfHolder()),
6689 holderReg);
6690 masm.loadPtr(Address(BaselineStubReg, ICGetProp_NativePrototype::offsetOfHolderShape()),
6691 scratch);
6692 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
6693 }
6694
6695 if (!isFixedSlot_) {
6696 // Don't overwrite actual holderReg if we need to load a dynamic slots object.
6697 // May need to preserve object for noSuchMethod check later.
6698 Register nextHolder = regs.takeAny();
6699 masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), nextHolder);
6700 holderReg = nextHolder;
6701 }
6702
6703 masm.load32(Address(BaselineStubReg, ICGetPropNativeStub::offsetOfOffset()), scratch);
6704 BaseIndex result(holderReg, scratch, TimesOne);
6705
6706 #if JS_HAS_NO_SUCH_METHOD
6707 #ifdef DEBUG
6708 entersStubFrame_ = true;
6709 #endif
6710 if (isCallProp_) {
6711 // Check for __noSuchMethod__ invocation.
6712 Label afterNoSuchMethod;
6713 Label skipNoSuchMethod;
6714
6715 masm.push(objReg);
6716 masm.loadValue(result, R0);
6717 masm.branchTestUndefined(Assembler::NotEqual, R0, &skipNoSuchMethod);
6718
6719 masm.pop(objReg);
6720
6721 // Call __noSuchMethod__ checker. Object pointer is in objReg.
6722 regs = availableGeneralRegs(0);
6723 regs.takeUnchecked(objReg);
6724 regs.takeUnchecked(BaselineTailCallReg);
6725 ValueOperand val = regs.takeValueOperand();
6726
6727 // Box and push obj onto baseline frame stack for decompiler.
6728 EmitRestoreTailCallReg(masm);
6729 masm.tagValue(JSVAL_TYPE_OBJECT, objReg, val);
6730 masm.pushValue(val);
6731 EmitRepushTailCallReg(masm);
6732
6733 enterStubFrame(masm, regs.getAnyExcluding(BaselineTailCallReg));
6734
6735 masm.movePtr(ImmGCPtr(propName_.get()), val.scratchReg());
6736 masm.tagValue(JSVAL_TYPE_STRING, val.scratchReg(), val);
6737 masm.pushValue(val);
6738 masm.push(objReg);
6739 if (!callVM(LookupNoSuchMethodHandlerInfo, masm))
6740 return false;
6741
6742 leaveStubFrame(masm);
6743
6744 // Pop pushed obj from baseline stack.
6745 EmitUnstowICValues(masm, 1, /* discard = */ true);
6746
6747 masm.jump(&afterNoSuchMethod);
6748 masm.bind(&skipNoSuchMethod);
6749
6750 // Pop pushed objReg.
6751 masm.addPtr(Imm32(sizeof(void *)), BaselineStackReg);
6752 masm.bind(&afterNoSuchMethod);
6753 } else {
6754 masm.loadValue(result, R0);
6755 }
6756 #else
6757 masm.loadValue(result, R0);
6758 #endif
6759
6760 // Enter type monitor IC to type-check result.
6761 EmitEnterTypeMonitorIC(masm);
6762
6763 // Failure case - jump to next stub
6764 masm.bind(&failure);
6765 EmitStubGuardFailure(masm);
6766 return true;
6767 }
6768
6769 bool
6770 ICGetProp_CallScripted::Compiler::generateStubCode(MacroAssembler &masm)
6771 {
6772 Label failure;
6773 Label failureLeaveStubFrame;
6774 GeneralRegisterSet regs(availableGeneralRegs(1));
6775 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
6776
6777 // Guard input is an object.
6778 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
6779
6780 // Unbox and shape guard.
6781 Register objReg = masm.extractObject(R0, ExtractTemp0);
6782 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfReceiverShape()), scratch);
6783 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
6784
6785 Register holderReg = regs.takeAny();
6786 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfHolder()), holderReg);
6787 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfHolderShape()), scratch);
6788 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
6789 regs.add(holderReg);
6790
6791 // Push a stub frame so that we can perform a non-tail call.
6792 enterStubFrame(masm, scratch);
6793
6794 // Load callee function and code. To ensure that |code| doesn't end up being
6795 // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
6796 Register callee;
6797 if (regs.has(ArgumentsRectifierReg)) {
6798 callee = ArgumentsRectifierReg;
6799 regs.take(callee);
6800 } else {
6801 callee = regs.takeAny();
6802 }
6803 Register code = regs.takeAny();
6804 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallScripted::offsetOfGetter()), callee);
6805 masm.branchIfFunctionHasNoScript(callee, &failureLeaveStubFrame);
6806 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
6807 masm.loadBaselineOrIonRaw(code, code, SequentialExecution, &failureLeaveStubFrame);
6808
6809 // Getter is called with 0 arguments, just |obj| as thisv.
6810 // Note that we use Push, not push, so that callIon will align the stack
6811 // properly on ARM.
6812 masm.Push(R0);
6813 EmitCreateStubFrameDescriptor(masm, scratch);
6814 masm.Push(Imm32(0)); // ActualArgc is 0
6815 masm.Push(callee);
6816 masm.Push(scratch);
6817
6818 // Handle arguments underflow.
6819 Label noUnderflow;
6820 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
6821 masm.branch32(Assembler::Equal, scratch, Imm32(0), &noUnderflow);
6822 {
6823 // Call the arguments rectifier.
6824 JS_ASSERT(ArgumentsRectifierReg != code);
6825
6826 JitCode *argumentsRectifier =
6827 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
6828
6829 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
6830 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
6831 masm.mov(ImmWord(0), ArgumentsRectifierReg);
6832 }
6833
6834 masm.bind(&noUnderflow);
6835
6836 // If needed, update SPS Profiler frame entry. At this point, callee and scratch can
6837 // be clobbered.
6838 {
6839 GeneralRegisterSet availRegs = availableGeneralRegs(0);
6840 availRegs.take(ArgumentsRectifierReg);
6841 availRegs.take(code);
6842 emitProfilingUpdate(masm, availRegs, ICGetProp_CallScripted::offsetOfPCOffset());
6843 }
6844
6845 masm.callIon(code);
6846
6847 leaveStubFrame(masm, true);
6848
6849 // Enter type monitor IC to type-check result.
6850 EmitEnterTypeMonitorIC(masm);
6851
6852 // Leave stub frame and go to next stub.
6853 masm.bind(&failureLeaveStubFrame);
6854 leaveStubFrame(masm, false);
6855
6856 // Failure case - jump to next stub
6857 masm.bind(&failure);
6858 EmitStubGuardFailure(masm);
6859 return true;
6860 }
6861
6862 bool
6863 ICGetProp_CallNative::Compiler::generateStubCode(MacroAssembler &masm)
6864 {
6865 Label failure;
6866
6867 GeneralRegisterSet regs(availableGeneralRegs(0));
6868 Register obj = InvalidReg;
6869 if (inputDefinitelyObject_) {
6870 obj = R0.scratchReg();
6871 } else {
6872 regs.take(R0);
6873 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
6874 obj = masm.extractObject(R0, ExtractTemp0);
6875 }
6876 regs.takeUnchecked(obj);
6877
6878 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
6879
6880 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfHolderShape()), scratch);
6881 masm.branchTestObjShape(Assembler::NotEqual, obj, scratch, &failure);
6882
6883 enterStubFrame(masm, scratch);
6884
6885 masm.Push(obj);
6886
6887 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfGetter()), scratch);
6888 masm.Push(scratch);
6889
6890 regs.add(scratch);
6891 if (!inputDefinitelyObject_)
6892 regs.add(R0);
6893
6894 // If needed, update SPS Profiler frame entry.
6895 emitProfilingUpdate(masm, regs, ICGetProp_CallNative::offsetOfPCOffset());
6896
6897 if (!callVM(DoCallNativeGetterInfo, masm))
6898 return false;
6899 leaveStubFrame(masm);
6900
6901 EmitEnterTypeMonitorIC(masm);
6902
6903 masm.bind(&failure);
6904 EmitStubGuardFailure(masm);
6905
6906 return true;
6907 }
6908
6909 bool
6910 ICGetProp_CallNativePrototype::Compiler::generateStubCode(MacroAssembler &masm)
6911 {
6912 Label failure;
6913 GeneralRegisterSet regs(availableGeneralRegs(1));
6914 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
6915
6916 // Guard input is an object.
6917 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
6918
6919 // Unbox and shape guard.
6920 Register objReg = masm.extractObject(R0, ExtractTemp0);
6921 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfReceiverShape()), scratch);
6922 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
6923
6924 Register holderReg = regs.takeAny();
6925 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfHolder()), holderReg);
6926 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfHolderShape()), scratch);
6927 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
6928 regs.add(holderReg);
6929
6930 // Push a stub frame so that we can perform a non-tail call.
6931 enterStubFrame(masm, scratch);
6932
6933 // Load callee function.
6934 Register callee = regs.takeAny();
6935 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNativePrototype::offsetOfGetter()), callee);
6936
6937 // Push args for vm call.
6938 masm.push(objReg);
6939 masm.push(callee);
6940
6941 // Don't to preserve R0 anymore.
6942 regs.add(R0);
6943
6944 // If needed, update SPS Profiler frame entry.
6945 emitProfilingUpdate(masm, regs, ICGetProp_CallNativePrototype::offsetOfPCOffset());
6946
6947 if (!callVM(DoCallNativeGetterInfo, masm))
6948 return false;
6949 leaveStubFrame(masm);
6950
6951 // Enter type monitor IC to type-check result.
6952 EmitEnterTypeMonitorIC(masm);
6953
6954 // Failure case - jump to next stub
6955 masm.bind(&failure);
6956 EmitStubGuardFailure(masm);
6957 return true;
6958 }
6959
6960 bool
6961 ICGetPropCallDOMProxyNativeCompiler::generateStubCode(MacroAssembler &masm,
6962 Address* expandoAndGenerationAddr,
6963 Address* generationAddr)
6964 {
6965 Label failure;
6966 GeneralRegisterSet regs(availableGeneralRegs(1));
6967 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
6968
6969 // Guard input is an object.
6970 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
6971
6972 // Unbox.
6973 Register objReg = masm.extractObject(R0, ExtractTemp0);
6974
6975 // Shape guard.
6976 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfShape()), scratch);
6977 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
6978
6979 // Guard for ListObject.
6980 {
6981 GeneralRegisterSet domProxyRegSet(GeneralRegisterSet::All());
6982 domProxyRegSet.take(BaselineStubReg);
6983 domProxyRegSet.take(objReg);
6984 domProxyRegSet.take(scratch);
6985 Address expandoShapeAddr(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfExpandoShape());
6986 GenerateDOMProxyChecks(
6987 cx, masm, objReg,
6988 Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfProxyHandler()),
6989 &expandoShapeAddr, expandoAndGenerationAddr, generationAddr,
6990 scratch,
6991 domProxyRegSet,
6992 &failure);
6993 }
6994
6995 Register holderReg = regs.takeAny();
6996 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfHolder()),
6997 holderReg);
6998 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfHolderShape()),
6999 scratch);
7000 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failure);
7001 regs.add(holderReg);
7002
7003 // Push a stub frame so that we can perform a non-tail call.
7004 enterStubFrame(masm, scratch);
7005
7006 // Load callee function.
7007 Register callee = regs.takeAny();
7008 masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallDOMProxyNative::offsetOfGetter()), callee);
7009
7010 // Push args for vm call.
7011 masm.push(objReg);
7012 masm.push(callee);
7013
7014 // Don't have to preserve R0 anymore.
7015 regs.add(R0);
7016
7017 // If needed, update SPS Profiler frame entry.
7018 emitProfilingUpdate(masm, regs, ICGetProp_CallDOMProxyNative::offsetOfPCOffset());
7019
7020 if (!callVM(DoCallNativeGetterInfo, masm))
7021 return false;
7022 leaveStubFrame(masm);
7023
7024 // Enter type monitor IC to type-check result.
7025 EmitEnterTypeMonitorIC(masm);
7026
7027 // Failure case - jump to next stub
7028 masm.bind(&failure);
7029 EmitStubGuardFailure(masm);
7030 return true;
7031 }
7032
7033 bool
7034 ICGetPropCallDOMProxyNativeCompiler::generateStubCode(MacroAssembler &masm)
7035 {
7036 if (kind == ICStub::GetProp_CallDOMProxyNative)
7037 return generateStubCode(masm, nullptr, nullptr);
7038
7039 Address internalStructAddress(BaselineStubReg,
7040 ICGetProp_CallDOMProxyWithGenerationNative::offsetOfInternalStruct());
7041 Address generationAddress(BaselineStubReg,
7042 ICGetProp_CallDOMProxyWithGenerationNative::offsetOfGeneration());
7043 return generateStubCode(masm, &internalStructAddress, &generationAddress);
7044 }
7045
7046 ICStub *
7047 ICGetPropCallDOMProxyNativeCompiler::getStub(ICStubSpace *space)
7048 {
7049 RootedShape shape(cx, proxy_->lastProperty());
7050 RootedShape holderShape(cx, holder_->lastProperty());
7051
7052 Value expandoSlot = proxy_->getFixedSlot(GetDOMProxyExpandoSlot());
7053 RootedShape expandoShape(cx, nullptr);
7054 ExpandoAndGeneration *expandoAndGeneration;
7055 int32_t generation;
7056 Value expandoVal;
7057 if (kind == ICStub::GetProp_CallDOMProxyNative) {
7058 expandoVal = expandoSlot;
7059 } else {
7060 JS_ASSERT(kind == ICStub::GetProp_CallDOMProxyWithGenerationNative);
7061 JS_ASSERT(!expandoSlot.isObject() && !expandoSlot.isUndefined());
7062 expandoAndGeneration = (ExpandoAndGeneration*)expandoSlot.toPrivate();
7063 expandoVal = expandoAndGeneration->expando;
7064 generation = expandoAndGeneration->generation;
7065 }
7066
7067 if (expandoVal.isObject())
7068 expandoShape = expandoVal.toObject().lastProperty();
7069
7070 if (kind == ICStub::GetProp_CallDOMProxyNative) {
7071 return ICGetProp_CallDOMProxyNative::New(
7072 space, getStubCode(), firstMonitorStub_, shape, proxy_->handler(),
7073 expandoShape, holder_, holderShape, getter_, pcOffset_);
7074 }
7075
7076 return ICGetProp_CallDOMProxyWithGenerationNative::New(
7077 space, getStubCode(), firstMonitorStub_, shape, proxy_->handler(),
7078 expandoAndGeneration, generation, expandoShape, holder_, holderShape, getter_,
7079 pcOffset_);
7080 }
7081
7082 ICStub *
7083 ICGetProp_DOMProxyShadowed::Compiler::getStub(ICStubSpace *space)
7084 {
7085 RootedShape shape(cx, proxy_->lastProperty());
7086 return ICGetProp_DOMProxyShadowed::New(space, getStubCode(), firstMonitorStub_, shape,
7087 proxy_->handler(), name_, pcOffset_);
7088 }
7089
7090 static bool
7091 ProxyGet(JSContext *cx, HandleObject proxy, HandlePropertyName name, MutableHandleValue vp)
7092 {
7093 RootedId id(cx, NameToId(name));
7094 return Proxy::get(cx, proxy, proxy, id, vp);
7095 }
7096
7097 typedef bool (*ProxyGetFn)(JSContext *cx, HandleObject proxy, HandlePropertyName name,
7098 MutableHandleValue vp);
7099 static const VMFunction ProxyGetInfo = FunctionInfo<ProxyGetFn>(ProxyGet);
7100
7101 bool
7102 ICGetProp_DOMProxyShadowed::Compiler::generateStubCode(MacroAssembler &masm)
7103 {
7104 Label failure;
7105
7106 GeneralRegisterSet regs(availableGeneralRegs(1));
7107 // Need to reserve a scratch register, but the scratch register should not be
7108 // BaselineTailCallReg, because it's used for |enterStubFrame| which needs a
7109 // non-BaselineTailCallReg scratch reg.
7110 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
7111
7112 // Guard input is an object.
7113 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
7114
7115 // Unbox.
7116 Register objReg = masm.extractObject(R0, ExtractTemp0);
7117
7118 // Shape guard.
7119 masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfShape()), scratch);
7120 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
7121
7122 // Guard for ListObject.
7123 {
7124 GeneralRegisterSet domProxyRegSet(GeneralRegisterSet::All());
7125 domProxyRegSet.take(BaselineStubReg);
7126 domProxyRegSet.take(objReg);
7127 domProxyRegSet.take(scratch);
7128 GenerateDOMProxyChecks(
7129 cx, masm, objReg,
7130 Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfProxyHandler()),
7131 /*expandoShapeAddr=*/nullptr,
7132 /*expandoAndGenerationAddr=*/nullptr,
7133 /*generationAddr=*/nullptr,
7134 scratch,
7135 domProxyRegSet,
7136 &failure);
7137 }
7138
7139 // Call ProxyGet(JSContext *cx, HandleObject proxy, HandlePropertyName name, MutableHandleValue vp);
7140
7141 // Push a stub frame so that we can perform a non-tail call.
7142 enterStubFrame(masm, scratch);
7143
7144 // Push property name and proxy object.
7145 masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfName()), scratch);
7146 masm.push(scratch);
7147 masm.push(objReg);
7148
7149 // Don't have to preserve R0 anymore.
7150 regs.add(R0);
7151
7152 // If needed, update SPS Profiler frame entry.
7153 emitProfilingUpdate(masm, regs, ICGetProp_DOMProxyShadowed::offsetOfPCOffset());
7154
7155 if (!callVM(ProxyGetInfo, masm))
7156 return false;
7157 leaveStubFrame(masm);
7158
7159 // Enter type monitor IC to type-check result.
7160 EmitEnterTypeMonitorIC(masm);
7161
7162 // Failure case - jump to next stub
7163 masm.bind(&failure);
7164 EmitStubGuardFailure(masm);
7165 return true;
7166 }
7167
7168 bool
7169 ICGetProp_ArgumentsLength::Compiler::generateStubCode(MacroAssembler &masm)
7170 {
7171 Label failure;
7172 if (which_ == ICGetProp_ArgumentsLength::Magic) {
7173 // Ensure that this is lazy arguments.
7174 masm.branchTestMagicValue(Assembler::NotEqual, R0, JS_OPTIMIZED_ARGUMENTS, &failure);
7175
7176 // Ensure that frame has not loaded different arguments object since.
7177 masm.branchTest32(Assembler::NonZero,
7178 Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
7179 Imm32(BaselineFrame::HAS_ARGS_OBJ),
7180 &failure);
7181
7182 Address actualArgs(BaselineFrameReg, BaselineFrame::offsetOfNumActualArgs());
7183 masm.loadPtr(actualArgs, R0.scratchReg());
7184 masm.tagValue(JSVAL_TYPE_INT32, R0.scratchReg(), R0);
7185 EmitReturnFromIC(masm);
7186
7187 masm.bind(&failure);
7188 EmitStubGuardFailure(masm);
7189 return true;
7190 }
7191 JS_ASSERT(which_ == ICGetProp_ArgumentsLength::Strict ||
7192 which_ == ICGetProp_ArgumentsLength::Normal);
7193
7194 bool isStrict = which_ == ICGetProp_ArgumentsLength::Strict;
7195 const Class *clasp = isStrict ? &StrictArgumentsObject::class_ : &NormalArgumentsObject::class_;
7196
7197 Register scratchReg = R1.scratchReg();
7198
7199 // Guard on input being an arguments object.
7200 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
7201 Register objReg = masm.extractObject(R0, ExtractTemp0);
7202 masm.branchTestObjClass(Assembler::NotEqual, objReg, scratchReg, clasp, &failure);
7203
7204 // Get initial length value.
7205 masm.unboxInt32(Address(objReg, ArgumentsObject::getInitialLengthSlotOffset()), scratchReg);
7206
7207 // Test if length has been overridden.
7208 masm.branchTest32(Assembler::NonZero,
7209 scratchReg,
7210 Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT),
7211 &failure);
7212
7213 // Nope, shift out arguments length and return it.
7214 // No need to type monitor because this stub always returns Int32.
7215 masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), scratchReg);
7216 masm.tagValue(JSVAL_TYPE_INT32, scratchReg, R0);
7217 EmitReturnFromIC(masm);
7218
7219 masm.bind(&failure);
7220 EmitStubGuardFailure(masm);
7221 return true;
7222 }
7223
7224 void
7225 BaselineScript::noteAccessedGetter(uint32_t pcOffset)
7226 {
7227 ICEntry &entry = icEntryFromPCOffset(pcOffset);
7228 ICFallbackStub *stub = entry.fallbackStub();
7229
7230 if (stub->isGetProp_Fallback())
7231 stub->toGetProp_Fallback()->noteAccessedGetter();
7232 }
7233
7234 //
7235 // SetProp_Fallback
7236 //
7237
7238 // Attach an optimized stub for a SETPROP/SETGNAME/SETNAME op.
7239 static bool
7240 TryAttachSetPropStub(JSContext *cx, HandleScript script, jsbytecode *pc, ICSetProp_Fallback *stub,
7241 HandleObject obj, HandleShape oldShape, uint32_t oldSlots,
7242 HandlePropertyName name, HandleId id, HandleValue rhs, bool *attached)
7243 {
7244 JS_ASSERT(!*attached);
7245
7246 if (!obj->isNative() || obj->watched())
7247 return true;
7248
7249 RootedShape shape(cx);
7250 RootedObject holder(cx);
7251 if (!EffectlesslyLookupProperty(cx, obj, name, &holder, &shape))
7252 return false;
7253
7254 size_t chainDepth;
7255 if (IsCacheableSetPropAddSlot(cx, obj, oldShape, oldSlots, id, holder, shape, &chainDepth)) {
7256 // Don't attach if proto chain depth is too high.
7257 if (chainDepth > ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH)
7258 return true;
7259
7260 bool isFixedSlot;
7261 uint32_t offset;
7262 GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset);
7263
7264 IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObject.ADD) stub");
7265 ICSetPropNativeAddCompiler compiler(cx, obj, oldShape, chainDepth, isFixedSlot, offset);
7266 ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
7267 if (!newStub)
7268 return false;
7269 if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs))
7270 return false;
7271
7272 stub->addNewStub(newStub);
7273 *attached = true;
7274 return true;
7275 }
7276
7277 if (IsCacheableSetPropWriteSlot(obj, oldShape, holder, shape)) {
7278 bool isFixedSlot;
7279 uint32_t offset;
7280 GetFixedOrDynamicSlotOffset(obj, shape->slot(), &isFixedSlot, &offset);
7281
7282 IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObject.PROP) stub");
7283 ICSetProp_Native::Compiler compiler(cx, obj, isFixedSlot, offset);
7284 ICUpdatedStub *newStub = compiler.getStub(compiler.getStubSpace(script));
7285 if (!newStub)
7286 return false;
7287 if (!newStub->addUpdateStubForValue(cx, script, obj, id, rhs))
7288 return false;
7289
7290 stub->addNewStub(newStub);
7291 *attached = true;
7292 return true;
7293 }
7294
7295 bool isScripted = false;
7296 bool cacheableCall = IsCacheableSetPropCall(cx, obj, holder, shape, &isScripted);
7297
7298 // Try handling scripted setters.
7299 if (cacheableCall && isScripted) {
7300 RootedFunction callee(cx, &shape->setterObject()->as<JSFunction>());
7301 JS_ASSERT(obj != holder);
7302 JS_ASSERT(callee->hasScript());
7303
7304 IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObj/ScriptedSetter %s:%d) stub",
7305 callee->nonLazyScript()->filename(), callee->nonLazyScript()->lineno());
7306
7307 ICSetProp_CallScripted::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc));
7308 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
7309 if (!newStub)
7310 return false;
7311
7312 stub->addNewStub(newStub);
7313 *attached = true;
7314 return true;
7315 }
7316
7317 // Try handling JSNative setters.
7318 if (cacheableCall && !isScripted) {
7319 RootedFunction callee(cx, &shape->setterObject()->as<JSFunction>());
7320 JS_ASSERT(obj != holder);
7321 JS_ASSERT(callee->isNative());
7322
7323 IonSpew(IonSpew_BaselineIC, " Generating SetProp(NativeObj/NativeSetter %p) stub",
7324 callee->native());
7325
7326 ICSetProp_CallNative::Compiler compiler(cx, obj, holder, callee, script->pcToOffset(pc));
7327 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
7328 if (!newStub)
7329 return false;
7330
7331 stub->addNewStub(newStub);
7332 *attached = true;
7333 return true;
7334 }
7335
7336 return true;
7337 }
7338
7339 static bool
7340 DoSetPropFallback(JSContext *cx, BaselineFrame *frame, ICSetProp_Fallback *stub_,
7341 HandleValue lhs, HandleValue rhs, MutableHandleValue res)
7342 {
7343 // This fallback stub may trigger debug mode toggling.
7344 DebugModeOSRVolatileStub<ICSetProp_Fallback *> stub(frame, stub_);
7345
7346 RootedScript script(cx, frame->script());
7347 jsbytecode *pc = stub->icEntry()->pc(script);
7348 JSOp op = JSOp(*pc);
7349 FallbackICSpew(cx, stub, "SetProp(%s)", js_CodeName[op]);
7350
7351 JS_ASSERT(op == JSOP_SETPROP ||
7352 op == JSOP_SETNAME ||
7353 op == JSOP_SETGNAME ||
7354 op == JSOP_INITPROP ||
7355 op == JSOP_SETALIASEDVAR);
7356
7357 RootedPropertyName name(cx);
7358 if (op == JSOP_SETALIASEDVAR)
7359 name = ScopeCoordinateName(cx->runtime()->scopeCoordinateNameCache, script, pc);
7360 else
7361 name = script->getName(pc);
7362 RootedId id(cx, NameToId(name));
7363
7364 RootedObject obj(cx, ToObjectFromStack(cx, lhs));
7365 if (!obj)
7366 return false;
7367 RootedShape oldShape(cx, obj->lastProperty());
7368 uint32_t oldSlots = obj->numDynamicSlots();
7369
7370 if (op == JSOP_INITPROP) {
7371 MOZ_ASSERT(name != cx->names().proto, "should have used JSOP_MUTATEPROTO");
7372 MOZ_ASSERT(obj->is<JSObject>());
7373 if (!DefineNativeProperty(cx, obj, id, rhs, nullptr, nullptr, JSPROP_ENUMERATE))
7374 return false;
7375 } else if (op == JSOP_SETNAME || op == JSOP_SETGNAME) {
7376 if (!SetNameOperation(cx, script, pc, obj, rhs))
7377 return false;
7378 } else if (op == JSOP_SETALIASEDVAR) {
7379 obj->as<ScopeObject>().setAliasedVar(cx, pc, name, rhs);
7380 } else {
7381 MOZ_ASSERT(op == JSOP_SETPROP);
7382 if (script->strict()) {
7383 if (!js::SetProperty<true>(cx, obj, id, rhs))
7384 return false;
7385 } else {
7386 if (!js::SetProperty<false>(cx, obj, id, rhs))
7387 return false;
7388 }
7389 }
7390
7391 // Leave the RHS on the stack.
7392 res.set(rhs);
7393
7394 // Check if debug mode toggling made the stub invalid.
7395 if (stub.invalid())
7396 return true;
7397
7398 if (stub->numOptimizedStubs() >= ICSetProp_Fallback::MAX_OPTIMIZED_STUBS) {
7399 // TODO: Discard all stubs in this IC and replace with generic setprop stub.
7400 return true;
7401 }
7402
7403 bool attached = false;
7404 if (!TryAttachSetPropStub(cx, script, pc, stub, obj, oldShape, oldSlots, name, id, rhs,
7405 &attached))
7406 {
7407 return false;
7408 }
7409 if (attached)
7410 return true;
7411
7412 JS_ASSERT(!attached);
7413 stub->noteUnoptimizableAccess();
7414
7415 return true;
7416 }
7417
7418 typedef bool (*DoSetPropFallbackFn)(JSContext *, BaselineFrame *, ICSetProp_Fallback *,
7419 HandleValue, HandleValue, MutableHandleValue);
7420 static const VMFunction DoSetPropFallbackInfo =
7421 FunctionInfo<DoSetPropFallbackFn>(DoSetPropFallback, PopValues(2));
7422
7423 bool
7424 ICSetProp_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
7425 {
7426 JS_ASSERT(R0 == JSReturnOperand);
7427
7428 EmitRestoreTailCallReg(masm);
7429
7430 // Ensure stack is fully synced for the expression decompiler.
7431 masm.pushValue(R0);
7432 masm.pushValue(R1);
7433
7434 // Push arguments.
7435 masm.pushValue(R1);
7436 masm.pushValue(R0);
7437 masm.push(BaselineStubReg);
7438 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
7439
7440 if (!tailCallVM(DoSetPropFallbackInfo, masm))
7441 return false;
7442
7443 // What follows is bailout debug mode recompile code for inlined scripted
7444 // getters The return address pointed to by the baseline stack points
7445 // here.
7446 //
7447 // Even though the fallback frame doesn't enter a stub frame, the CallScripted
7448 // frame that we are emulating does. Again, we lie.
7449 #ifdef DEBUG
7450 entersStubFrame_ = true;
7451 #endif
7452
7453 Label leaveStubCommon;
7454
7455 returnFromStubOffset_ = masm.currentOffset();
7456 leaveStubFrameHead(masm, false);
7457 masm.jump(&leaveStubCommon);
7458
7459 returnFromIonOffset_ = masm.currentOffset();
7460 leaveStubFrameHead(masm, true);
7461
7462 masm.bind(&leaveStubCommon);
7463 leaveStubFrameCommonTail(masm);
7464
7465 // Retrieve the stashed initial argument from the caller's frame before returning
7466 EmitUnstowICValues(masm, 1);
7467 EmitReturnFromIC(masm);
7468
7469 return true;
7470 }
7471
7472 bool
7473 ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
7474 {
7475 JitCompartment *comp = cx->compartment()->jitCompartment();
7476
7477 CodeOffsetLabel fromIon(returnFromIonOffset_);
7478 fromIon.fixup(&masm);
7479 comp->initBaselineSetPropReturnFromIonAddr(code->raw() + fromIon.offset());
7480
7481 CodeOffsetLabel fromVM(returnFromStubOffset_);
7482 fromVM.fixup(&masm);
7483 comp->initBaselineSetPropReturnFromStubAddr(code->raw() + fromVM.offset());
7484
7485 return true;
7486 }
7487
7488 bool
7489 ICSetProp_Native::Compiler::generateStubCode(MacroAssembler &masm)
7490 {
7491 Label failure;
7492
7493 // Guard input is an object.
7494 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
7495
7496 GeneralRegisterSet regs(availableGeneralRegs(2));
7497 Register scratch = regs.takeAny();
7498
7499 // Unbox and shape guard.
7500 Register objReg = masm.extractObject(R0, ExtractTemp0);
7501 masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfShape()), scratch);
7502 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
7503
7504 // Guard that the type object matches.
7505 masm.loadPtr(Address(BaselineStubReg, ICSetProp_Native::offsetOfType()), scratch);
7506 masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch,
7507 &failure);
7508
7509 // Stow both R0 and R1 (object and value).
7510 EmitStowICValues(masm, 2);
7511
7512 // Type update stub expects the value to check in R0.
7513 masm.moveValue(R1, R0);
7514
7515 // Call the type-update stub.
7516 if (!callTypeUpdateIC(masm, sizeof(Value)))
7517 return false;
7518
7519 // Unstow R0 and R1 (object and key)
7520 EmitUnstowICValues(masm, 2);
7521
7522 regs.add(R0);
7523 regs.takeUnchecked(objReg);
7524
7525 Register holderReg;
7526 if (isFixedSlot_) {
7527 holderReg = objReg;
7528 } else {
7529 holderReg = regs.takeAny();
7530 masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), holderReg);
7531 }
7532
7533 // Perform the store.
7534 masm.load32(Address(BaselineStubReg, ICSetProp_Native::offsetOfOffset()), scratch);
7535 EmitPreBarrier(masm, BaseIndex(holderReg, scratch, TimesOne), MIRType_Value);
7536 masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
7537 if (holderReg != objReg)
7538 regs.add(holderReg);
7539 #ifdef JSGC_GENERATIONAL
7540 {
7541 Register scr = regs.takeAny();
7542 GeneralRegisterSet saveRegs;
7543 saveRegs.add(R1);
7544 emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
7545 regs.add(scr);
7546 }
7547 #endif
7548
7549 // The RHS has to be in R0.
7550 masm.moveValue(R1, R0);
7551 EmitReturnFromIC(masm);
7552
7553 // Failure case - jump to next stub
7554 masm.bind(&failure);
7555 EmitStubGuardFailure(masm);
7556 return true;
7557 }
7558
7559 ICUpdatedStub *
7560 ICSetPropNativeAddCompiler::getStub(ICStubSpace *space)
7561 {
7562 AutoShapeVector shapes(cx);
7563 if (!shapes.append(oldShape_))
7564 return nullptr;
7565
7566 if (!GetProtoShapes(obj_, protoChainDepth_, &shapes))
7567 return nullptr;
7568
7569 JS_STATIC_ASSERT(ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH == 4);
7570
7571 ICUpdatedStub *stub = nullptr;
7572 switch(protoChainDepth_) {
7573 case 0: stub = getStubSpecific<0>(space, &shapes); break;
7574 case 1: stub = getStubSpecific<1>(space, &shapes); break;
7575 case 2: stub = getStubSpecific<2>(space, &shapes); break;
7576 case 3: stub = getStubSpecific<3>(space, &shapes); break;
7577 case 4: stub = getStubSpecific<4>(space, &shapes); break;
7578 default: MOZ_ASSUME_UNREACHABLE("ProtoChainDepth too high.");
7579 }
7580 if (!stub || !stub->initUpdatingChain(cx, space))
7581 return nullptr;
7582 return stub;
7583 }
7584
7585 bool
7586 ICSetPropNativeAddCompiler::generateStubCode(MacroAssembler &masm)
7587 {
7588 Label failure;
7589 Label failureUnstow;
7590
7591 // Guard input is an object.
7592 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
7593
7594 GeneralRegisterSet regs(availableGeneralRegs(2));
7595 Register scratch = regs.takeAny();
7596
7597 // Unbox and guard against old shape.
7598 Register objReg = masm.extractObject(R0, ExtractTemp0);
7599 masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(0)), scratch);
7600 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failure);
7601
7602 // Guard that the type object matches.
7603 masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfType()), scratch);
7604 masm.branchPtr(Assembler::NotEqual, Address(objReg, JSObject::offsetOfType()), scratch,
7605 &failure);
7606
7607 // Stow both R0 and R1 (object and value).
7608 EmitStowICValues(masm, 2);
7609
7610 regs = availableGeneralRegs(1);
7611 scratch = regs.takeAny();
7612 Register protoReg = regs.takeAny();
7613 // Check the proto chain.
7614 for (size_t i = 0; i < protoChainDepth_; i++) {
7615 masm.loadObjProto(i == 0 ? objReg : protoReg, protoReg);
7616 masm.branchTestPtr(Assembler::Zero, protoReg, protoReg, &failureUnstow);
7617 masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAddImpl<0>::offsetOfShape(i + 1)),
7618 scratch);
7619 masm.branchTestObjShape(Assembler::NotEqual, protoReg, scratch, &failureUnstow);
7620 }
7621
7622 // Shape and type checks succeeded, ok to proceed.
7623
7624 // Load RHS into R0 for TypeUpdate check.
7625 // Stack is currently: [..., ObjValue, RHSValue, MaybeReturnAddr? ]
7626 masm.loadValue(Address(BaselineStackReg, ICStackValueOffset), R0);
7627
7628 // Call the type-update stub.
7629 if (!callTypeUpdateIC(masm, sizeof(Value)))
7630 return false;
7631
7632 // Unstow R0 and R1 (object and key)
7633 EmitUnstowICValues(masm, 2);
7634 regs = availableGeneralRegs(2);
7635 scratch = regs.takeAny();
7636
7637 // Changing object shape. Write the object's new shape.
7638 Address shapeAddr(objReg, JSObject::offsetOfShape());
7639 EmitPreBarrier(masm, shapeAddr, MIRType_Shape);
7640 masm.loadPtr(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfNewShape()), scratch);
7641 masm.storePtr(scratch, shapeAddr);
7642
7643 Register holderReg;
7644 regs.add(R0);
7645 regs.takeUnchecked(objReg);
7646 if (isFixedSlot_) {
7647 holderReg = objReg;
7648 } else {
7649 holderReg = regs.takeAny();
7650 masm.loadPtr(Address(objReg, JSObject::offsetOfSlots()), holderReg);
7651 }
7652
7653 // Perform the store. No write barrier required since this is a new
7654 // initialization.
7655 masm.load32(Address(BaselineStubReg, ICSetProp_NativeAdd::offsetOfOffset()), scratch);
7656 masm.storeValue(R1, BaseIndex(holderReg, scratch, TimesOne));
7657
7658 if (holderReg != objReg)
7659 regs.add(holderReg);
7660
7661 #ifdef JSGC_GENERATIONAL
7662 {
7663 Register scr = regs.takeAny();
7664 GeneralRegisterSet saveRegs;
7665 saveRegs.add(R1);
7666 emitPostWriteBarrierSlot(masm, objReg, R1, scr, saveRegs);
7667 }
7668 #endif
7669
7670 // The RHS has to be in R0.
7671 masm.moveValue(R1, R0);
7672 EmitReturnFromIC(masm);
7673
7674 // Failure case - jump to next stub
7675 masm.bind(&failureUnstow);
7676 EmitUnstowICValues(masm, 2);
7677
7678 masm.bind(&failure);
7679 EmitStubGuardFailure(masm);
7680 return true;
7681 }
7682
7683 bool
7684 ICSetProp_CallScripted::Compiler::generateStubCode(MacroAssembler &masm)
7685 {
7686 Label failure;
7687 Label failureUnstow;
7688 Label failureLeaveStubFrame;
7689
7690 // Guard input is an object.
7691 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
7692
7693 // Stow R0 and R1 to free up registers.
7694 EmitStowICValues(masm, 2);
7695
7696 GeneralRegisterSet regs(availableGeneralRegs(1));
7697 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
7698
7699 // Unbox and shape guard.
7700 Register objReg = masm.extractObject(R0, ExtractTemp0);
7701 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfShape()), scratch);
7702 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failureUnstow);
7703
7704 Register holderReg = regs.takeAny();
7705 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfHolder()), holderReg);
7706 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfHolderShape()), scratch);
7707 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failureUnstow);
7708 regs.add(holderReg);
7709
7710 // Push a stub frame so that we can perform a non-tail call.
7711 enterStubFrame(masm, scratch);
7712
7713 // Load callee function and code. To ensure that |code| doesn't end up being
7714 // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
7715 Register callee;
7716 if (regs.has(ArgumentsRectifierReg)) {
7717 callee = ArgumentsRectifierReg;
7718 regs.take(callee);
7719 } else {
7720 callee = regs.takeAny();
7721 }
7722 Register code = regs.takeAny();
7723 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallScripted::offsetOfSetter()), callee);
7724 masm.branchIfFunctionHasNoScript(callee, &failureLeaveStubFrame);
7725 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), code);
7726 masm.loadBaselineOrIonRaw(code, code, SequentialExecution, &failureLeaveStubFrame);
7727
7728 // Setter is called with the new value as the only argument, and |obj| as thisv.
7729 // Note that we use Push, not push, so that callIon will align the stack
7730 // properly on ARM.
7731
7732 // To Push R1, read it off of the stowed values on stack.
7733 // Stack: [ ..., R0, R1, ..STUBFRAME-HEADER.. ]
7734 masm.movePtr(BaselineStackReg, scratch);
7735 masm.PushValue(Address(scratch, STUB_FRAME_SIZE));
7736 masm.Push(R0);
7737 EmitCreateStubFrameDescriptor(masm, scratch);
7738 masm.Push(Imm32(1)); // ActualArgc is 1
7739 masm.Push(callee);
7740 masm.Push(scratch);
7741
7742 // Handle arguments underflow.
7743 Label noUnderflow;
7744 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
7745 masm.branch32(Assembler::BelowOrEqual, scratch, Imm32(1), &noUnderflow);
7746 {
7747 // Call the arguments rectifier.
7748 JS_ASSERT(ArgumentsRectifierReg != code);
7749
7750 JitCode *argumentsRectifier =
7751 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
7752
7753 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
7754 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
7755 masm.mov(ImmWord(1), ArgumentsRectifierReg);
7756 }
7757
7758 masm.bind(&noUnderflow);
7759
7760 // If needed, update SPS Profiler frame entry. At this point, callee and scratch can
7761 // be clobbered.
7762 {
7763 GeneralRegisterSet availRegs = availableGeneralRegs(0);
7764 availRegs.take(ArgumentsRectifierReg);
7765 availRegs.take(code);
7766 emitProfilingUpdate(masm, availRegs, ICSetProp_CallScripted::offsetOfPCOffset());
7767 }
7768
7769 masm.callIon(code);
7770
7771 leaveStubFrame(masm, true);
7772 // Do not care about return value from function. The original RHS should be returned
7773 // as the result of this operation.
7774 EmitUnstowICValues(masm, 2);
7775 masm.moveValue(R1, R0);
7776 EmitReturnFromIC(masm);
7777
7778 // Leave stub frame and go to next stub.
7779 masm.bind(&failureLeaveStubFrame);
7780 leaveStubFrame(masm, false);
7781
7782 // Unstow R0 and R1
7783 masm.bind(&failureUnstow);
7784 EmitUnstowICValues(masm, 2);
7785
7786 // Failure case - jump to next stub
7787 masm.bind(&failure);
7788 EmitStubGuardFailure(masm);
7789 return true;
7790 }
7791
7792 static bool
7793 DoCallNativeSetter(JSContext *cx, HandleFunction callee, HandleObject obj, HandleValue val)
7794 {
7795 JS_ASSERT(callee->isNative());
7796 JSNative natfun = callee->native();
7797
7798 JS::AutoValueArray<3> vp(cx);
7799 vp[0].setObject(*callee.get());
7800 vp[1].setObject(*obj.get());
7801 vp[2].set(val);
7802
7803 return natfun(cx, 1, vp.begin());
7804 }
7805
7806 typedef bool (*DoCallNativeSetterFn)(JSContext *, HandleFunction, HandleObject, HandleValue);
7807 static const VMFunction DoCallNativeSetterInfo =
7808 FunctionInfo<DoCallNativeSetterFn>(DoCallNativeSetter);
7809
7810 bool
7811 ICSetProp_CallNative::Compiler::generateStubCode(MacroAssembler &masm)
7812 {
7813 Label failure;
7814 Label failureUnstow;
7815
7816 // Guard input is an object.
7817 masm.branchTestObject(Assembler::NotEqual, R0, &failure);
7818
7819 // Stow R0 and R1 to free up registers.
7820 EmitStowICValues(masm, 2);
7821
7822 GeneralRegisterSet regs(availableGeneralRegs(1));
7823 Register scratch = regs.takeAnyExcluding(BaselineTailCallReg);
7824
7825 // Unbox and shape guard.
7826 Register objReg = masm.extractObject(R0, ExtractTemp0);
7827 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfShape()), scratch);
7828 masm.branchTestObjShape(Assembler::NotEqual, objReg, scratch, &failureUnstow);
7829
7830 Register holderReg = regs.takeAny();
7831 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfHolder()), holderReg);
7832 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfHolderShape()), scratch);
7833 masm.branchTestObjShape(Assembler::NotEqual, holderReg, scratch, &failureUnstow);
7834 regs.add(holderReg);
7835
7836 // Push a stub frame so that we can perform a non-tail call.
7837 enterStubFrame(masm, scratch);
7838
7839 // Load callee function and code. To ensure that |code| doesn't end up being
7840 // ArgumentsRectifierReg, if it's available we assign it to |callee| instead.
7841 Register callee = regs.takeAny();
7842 masm.loadPtr(Address(BaselineStubReg, ICSetProp_CallNative::offsetOfSetter()), callee);
7843
7844 // To Push R1, read it off of the stowed values on stack.
7845 // Stack: [ ..., R0, R1, ..STUBFRAME-HEADER.. ]
7846 masm.movePtr(BaselineStackReg, scratch);
7847 masm.pushValue(Address(scratch, STUB_FRAME_SIZE));
7848 masm.push(objReg);
7849 masm.push(callee);
7850
7851 // Don't need to preserve R0 anymore.
7852 regs.add(R0);
7853
7854 // If needed, update SPS Profiler frame entry.
7855 emitProfilingUpdate(masm, regs, ICSetProp_CallNative::offsetOfPCOffset());
7856
7857 if (!callVM(DoCallNativeSetterInfo, masm))
7858 return false;
7859 leaveStubFrame(masm);
7860
7861 // Do not care about return value from function. The original RHS should be returned
7862 // as the result of this operation.
7863 EmitUnstowICValues(masm, 2);
7864 masm.moveValue(R1, R0);
7865 EmitReturnFromIC(masm);
7866
7867 // Unstow R0 and R1
7868 masm.bind(&failureUnstow);
7869 EmitUnstowICValues(masm, 2);
7870
7871 // Failure case - jump to next stub
7872 masm.bind(&failure);
7873 EmitStubGuardFailure(masm);
7874 return true;
7875 }
7876
7877 //
7878 // Call_Fallback
7879 //
7880
7881 static bool
7882 TryAttachFunApplyStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
7883 HandleValue thisv, uint32_t argc, Value *argv)
7884 {
7885 if (argc != 2)
7886 return true;
7887
7888 if (!thisv.isObject() || !thisv.toObject().is<JSFunction>())
7889 return true;
7890 RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
7891
7892 bool isScripted = target->hasJITCode();
7893
7894 // right now, only handle situation where second argument is |arguments|
7895 if (argv[1].isMagic(JS_OPTIMIZED_ARGUMENTS) && !script->needsArgsObj()) {
7896 if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArguments)) {
7897 IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedApplyArguments stub");
7898
7899 ICCall_ScriptedApplyArguments::Compiler compiler(
7900 cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
7901 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
7902 if (!newStub)
7903 return false;
7904
7905 stub->addNewStub(newStub);
7906 return true;
7907 }
7908
7909 // TODO: handle FUNAPPLY for native targets.
7910 }
7911
7912 if (argv[1].isObject() && argv[1].toObject().is<ArrayObject>()) {
7913 if (isScripted && !stub->hasStub(ICStub::Call_ScriptedApplyArray)) {
7914 IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedApplyArray stub");
7915
7916 ICCall_ScriptedApplyArray::Compiler compiler(
7917 cx, stub->fallbackMonitorStub()->firstMonitorStub(), script->pcToOffset(pc));
7918 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
7919 if (!newStub)
7920 return false;
7921
7922 stub->addNewStub(newStub);
7923 return true;
7924 }
7925 }
7926 return true;
7927 }
7928
7929 static bool
7930 TryAttachFunCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
7931 HandleValue thisv, bool *attached)
7932 {
7933 // Try to attach a stub for Function.prototype.call with scripted |this|.
7934
7935 *attached = false;
7936 if (!thisv.isObject() || !thisv.toObject().is<JSFunction>())
7937 return true;
7938 RootedFunction target(cx, &thisv.toObject().as<JSFunction>());
7939
7940 // Attach a stub if the script can be Baseline-compiled. We do this also
7941 // if the script is not yet compiled to avoid attaching a CallNative stub
7942 // that handles everything, even after the callee becomes hot.
7943 if (target->hasScript() && target->nonLazyScript()->canBaselineCompile() &&
7944 !stub->hasStub(ICStub::Call_ScriptedFunCall))
7945 {
7946 IonSpew(IonSpew_BaselineIC, " Generating Call_ScriptedFunCall stub");
7947
7948 ICCall_ScriptedFunCall::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
7949 script->pcToOffset(pc));
7950 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
7951 if (!newStub)
7952 return false;
7953
7954 *attached = true;
7955 stub->addNewStub(newStub);
7956 return true;
7957 }
7958
7959 return true;
7960 }
7961
7962 static bool
7963 GetTemplateObjectForNative(JSContext *cx, HandleScript script, jsbytecode *pc,
7964 Native native, const CallArgs &args, MutableHandleObject res)
7965 {
7966 // Check for natives to which template objects can be attached. This is
7967 // done to provide templates to Ion for inlining these natives later on.
7968
7969 if (native == js_Array) {
7970 // Note: the template array won't be used if its length is inaccurately
7971 // computed here. (We allocate here because compilation may occur on a
7972 // separate thread where allocation is impossible.)
7973 size_t count = 0;
7974 if (args.length() != 1)
7975 count = args.length();
7976 else if (args.length() == 1 && args[0].isInt32() && args[0].toInt32() >= 0)
7977 count = args[0].toInt32();
7978 res.set(NewDenseUnallocatedArray(cx, count, nullptr, TenuredObject));
7979 if (!res)
7980 return false;
7981
7982 types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
7983 if (!type)
7984 return false;
7985 res->setType(type);
7986 return true;
7987 }
7988
7989 if (native == intrinsic_NewDenseArray) {
7990 res.set(NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject));
7991 if (!res)
7992 return false;
7993
7994 types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
7995 if (!type)
7996 return false;
7997 res->setType(type);
7998 return true;
7999 }
8000
8001 if (native == js::array_concat) {
8002 if (args.thisv().isObject() && args.thisv().toObject().is<ArrayObject>() &&
8003 !args.thisv().toObject().hasSingletonType())
8004 {
8005 res.set(NewDenseEmptyArray(cx, args.thisv().toObject().getProto(), TenuredObject));
8006 if (!res)
8007 return false;
8008 res->setType(args.thisv().toObject().type());
8009 return true;
8010 }
8011 }
8012
8013 if (native == js::str_split && args.length() == 1 && args[0].isString()) {
8014 res.set(NewDenseUnallocatedArray(cx, 0, nullptr, TenuredObject));
8015 if (!res)
8016 return false;
8017
8018 types::TypeObject *type = types::TypeScript::InitObject(cx, script, pc, JSProto_Array);
8019 if (!type)
8020 return false;
8021 res->setType(type);
8022 return true;
8023 }
8024
8025 if (native == js_String) {
8026 RootedString emptyString(cx, cx->runtime()->emptyString);
8027 res.set(StringObject::create(cx, emptyString, TenuredObject));
8028 if (!res)
8029 return false;
8030 return true;
8031 }
8032
8033 return true;
8034 }
8035
8036 static bool
8037 TryAttachCallStub(JSContext *cx, ICCall_Fallback *stub, HandleScript script, jsbytecode *pc,
8038 JSOp op, uint32_t argc, Value *vp, bool constructing, bool useNewType)
8039 {
8040 if (useNewType || op == JSOP_EVAL)
8041 return true;
8042
8043 if (stub->numOptimizedStubs() >= ICCall_Fallback::MAX_OPTIMIZED_STUBS) {
8044 // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
8045 // But for now we just bail.
8046 return true;
8047 }
8048
8049 RootedValue callee(cx, vp[0]);
8050 RootedValue thisv(cx, vp[1]);
8051
8052 if (!callee.isObject())
8053 return true;
8054
8055 RootedObject obj(cx, &callee.toObject());
8056 if (!obj->is<JSFunction>())
8057 return true;
8058
8059 RootedFunction fun(cx, &obj->as<JSFunction>());
8060
8061 if (fun->hasScript()) {
8062 // Never attach optimized scripted call stubs for JSOP_FUNAPPLY.
8063 // MagicArguments may escape the frame through them.
8064 if (op == JSOP_FUNAPPLY)
8065 return true;
8066
8067 // If callee is not an interpreted constructor, we have to throw.
8068 if (constructing && !fun->isInterpretedConstructor())
8069 return true;
8070
8071 RootedScript calleeScript(cx, fun->nonLazyScript());
8072 if (!calleeScript->hasBaselineScript() && !calleeScript->hasIonScript())
8073 return true;
8074
8075 if (calleeScript->shouldCloneAtCallsite())
8076 return true;
8077
8078 // Check if this stub chain has already generalized scripted calls.
8079 if (stub->scriptedStubsAreGeneralized()) {
8080 IonSpew(IonSpew_BaselineIC, " Chain already has generalized scripted call stub!");
8081 return true;
8082 }
8083
8084 if (stub->scriptedStubCount() >= ICCall_Fallback::MAX_SCRIPTED_STUBS) {
8085 // Create a Call_AnyScripted stub.
8086 IonSpew(IonSpew_BaselineIC, " Generating Call_AnyScripted stub (cons=%s)",
8087 constructing ? "yes" : "no");
8088
8089 ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
8090 constructing, script->pcToOffset(pc));
8091 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
8092 if (!newStub)
8093 return false;
8094
8095 // Before adding new stub, unlink all previous Call_Scripted.
8096 stub->unlinkStubsWithKind(cx, ICStub::Call_Scripted);
8097
8098 // Add new generalized stub.
8099 stub->addNewStub(newStub);
8100 return true;
8101 }
8102
8103 // Keep track of the function's |prototype| property in type
8104 // information, for use during Ion compilation.
8105 if (IsIonEnabled(cx))
8106 types::EnsureTrackPropertyTypes(cx, fun, NameToId(cx->names().prototype));
8107
8108 // Remember the template object associated with any script being called
8109 // as a constructor, for later use during Ion compilation.
8110 RootedObject templateObject(cx);
8111 if (constructing) {
8112 templateObject = CreateThisForFunction(cx, fun, MaybeSingletonObject);
8113 if (!templateObject)
8114 return false;
8115 }
8116
8117 IonSpew(IonSpew_BaselineIC,
8118 " Generating Call_Scripted stub (fun=%p, %s:%d, cons=%s)",
8119 fun.get(), fun->nonLazyScript()->filename(), fun->nonLazyScript()->lineno(),
8120 constructing ? "yes" : "no");
8121 ICCallScriptedCompiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
8122 calleeScript, templateObject,
8123 constructing, script->pcToOffset(pc));
8124 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
8125 if (!newStub)
8126 return false;
8127
8128 stub->addNewStub(newStub);
8129 return true;
8130 }
8131
8132 if (fun->isNative() && (!constructing || (constructing && fun->isNativeConstructor()))) {
8133 // Generalized native call stubs are not here yet!
8134 JS_ASSERT(!stub->nativeStubsAreGeneralized());
8135
8136 // Check for JSOP_FUNAPPLY
8137 if (op == JSOP_FUNAPPLY) {
8138 if (fun->native() == js_fun_apply)
8139 return TryAttachFunApplyStub(cx, stub, script, pc, thisv, argc, vp + 2);
8140
8141 // Don't try to attach a "regular" optimized call stubs for FUNAPPLY ops,
8142 // since MagicArguments may escape through them.
8143 return true;
8144 }
8145
8146 if (op == JSOP_FUNCALL && fun->native() == js_fun_call) {
8147 bool attached;
8148 if (!TryAttachFunCallStub(cx, stub, script, pc, thisv, &attached))
8149 return false;
8150 if (attached)
8151 return true;
8152 }
8153
8154 if (stub->nativeStubCount() >= ICCall_Fallback::MAX_NATIVE_STUBS) {
8155 IonSpew(IonSpew_BaselineIC,
8156 " Too many Call_Native stubs. TODO: add Call_AnyNative!");
8157 return true;
8158 }
8159
8160 CallArgs args = CallArgsFromVp(argc, vp);
8161 RootedObject templateObject(cx);
8162 if (!GetTemplateObjectForNative(cx, script, pc, fun->native(), args, &templateObject))
8163 return false;
8164
8165 IonSpew(IonSpew_BaselineIC, " Generating Call_Native stub (fun=%p, cons=%s)",
8166 fun.get(), constructing ? "yes" : "no");
8167 ICCall_Native::Compiler compiler(cx, stub->fallbackMonitorStub()->firstMonitorStub(),
8168 fun, templateObject, constructing, script->pcToOffset(pc));
8169 ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
8170 if (!newStub)
8171 return false;
8172
8173 stub->addNewStub(newStub);
8174 return true;
8175 }
8176
8177 return true;
8178 }
8179
8180 static bool
8181 MaybeCloneFunctionAtCallsite(JSContext *cx, MutableHandleValue callee, HandleScript script,
8182 jsbytecode *pc)
8183 {
8184 RootedFunction fun(cx);
8185 if (!IsFunctionObject(callee, fun.address()))
8186 return true;
8187
8188 if (!fun->hasScript() || !fun->nonLazyScript()->shouldCloneAtCallsite())
8189 return true;
8190
8191 fun = CloneFunctionAtCallsite(cx, fun, script, pc);
8192 if (!fun)
8193 return false;
8194
8195 callee.setObject(*fun);
8196 return true;
8197 }
8198
8199 static bool
8200 DoCallFallback(JSContext *cx, BaselineFrame *frame, ICCall_Fallback *stub_, uint32_t argc,
8201 Value *vp, MutableHandleValue res)
8202 {
8203 // This fallback stub may trigger debug mode toggling.
8204 DebugModeOSRVolatileStub<ICCall_Fallback *> stub(frame, stub_);
8205
8206 // Ensure vp array is rooted - we may GC in here.
8207 AutoArrayRooter vpRoot(cx, argc + 2, vp);
8208
8209 RootedScript script(cx, frame->script());
8210 jsbytecode *pc = stub->icEntry()->pc(script);
8211 JSOp op = JSOp(*pc);
8212 FallbackICSpew(cx, stub, "Call(%s)", js_CodeName[op]);
8213
8214 JS_ASSERT(argc == GET_ARGC(pc));
8215
8216 RootedValue callee(cx, vp[0]);
8217 RootedValue thisv(cx, vp[1]);
8218
8219 Value *args = vp + 2;
8220
8221 // Handle funapply with JSOP_ARGUMENTS
8222 if (op == JSOP_FUNAPPLY && argc == 2 && args[1].isMagic(JS_OPTIMIZED_ARGUMENTS)) {
8223 if (!GuardFunApplyArgumentsOptimization(cx, frame, callee, args, argc))
8224 return false;
8225 }
8226
8227 // Compute construcing and useNewType flags.
8228 bool constructing = (op == JSOP_NEW);
8229 bool newType = types::UseNewType(cx, script, pc);
8230
8231 // Try attaching a call stub.
8232 if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, newType))
8233 return false;
8234
8235 // Maybe update PC in profiler entry before leaving this script by call.
8236 if (cx->runtime()->spsProfiler.enabled() && frame->hasPushedSPSFrame())
8237 cx->runtime()->spsProfiler.updatePC(script, pc);
8238
8239 if (!MaybeCloneFunctionAtCallsite(cx, &callee, script, pc))
8240 return false;
8241
8242 if (op == JSOP_NEW) {
8243 if (!InvokeConstructor(cx, callee, argc, args, res.address()))
8244 return false;
8245 } else if (op == JSOP_EVAL && frame->scopeChain()->global().valueIsEval(callee)) {
8246 if (!DirectEval(cx, CallArgsFromVp(argc, vp)))
8247 return false;
8248 res.set(vp[0]);
8249 } else {
8250 JS_ASSERT(op == JSOP_CALL || op == JSOP_FUNCALL || op == JSOP_FUNAPPLY || op == JSOP_EVAL);
8251 if (!Invoke(cx, thisv, callee, argc, args, res))
8252 return false;
8253 }
8254
8255 types::TypeScript::Monitor(cx, script, pc, res);
8256
8257 // Check if debug mode toggling made the stub invalid.
8258 if (stub.invalid())
8259 return true;
8260
8261 // Attach a new TypeMonitor stub for this value.
8262 ICTypeMonitor_Fallback *typeMonFbStub = stub->fallbackMonitorStub();
8263 if (!typeMonFbStub->addMonitorStubForValue(cx, script, res))
8264 return false;
8265 // Add a type monitor stub for the resulting value.
8266 if (!stub->addMonitorStubForValue(cx, script, res))
8267 return false;
8268
8269 return true;
8270 }
8271
8272 void
8273 ICCallStubCompiler::pushCallArguments(MacroAssembler &masm, GeneralRegisterSet regs, Register argcReg)
8274 {
8275 JS_ASSERT(!regs.has(argcReg));
8276
8277 // Push the callee and |this| too.
8278 Register count = regs.takeAny();
8279 masm.mov(argcReg, count);
8280 masm.add32(Imm32(2), count);
8281
8282 // argPtr initially points to the last argument.
8283 Register argPtr = regs.takeAny();
8284 masm.mov(BaselineStackReg, argPtr);
8285
8286 // Skip 4 pointers pushed on top of the arguments: the frame descriptor,
8287 // return address, old frame pointer and stub reg.
8288 masm.addPtr(Imm32(STUB_FRAME_SIZE), argPtr);
8289
8290 // Push all values, starting at the last one.
8291 Label loop, done;
8292 masm.bind(&loop);
8293 masm.branchTest32(Assembler::Zero, count, count, &done);
8294 {
8295 masm.pushValue(Address(argPtr, 0));
8296 masm.addPtr(Imm32(sizeof(Value)), argPtr);
8297
8298 masm.sub32(Imm32(1), count);
8299 masm.jump(&loop);
8300 }
8301 masm.bind(&done);
8302 }
8303
8304 Register
8305 ICCallStubCompiler::guardFunApply(MacroAssembler &masm, GeneralRegisterSet regs, Register argcReg,
8306 bool checkNative, FunApplyThing applyThing, Label *failure)
8307 {
8308 // Ensure argc == 2
8309 masm.branch32(Assembler::NotEqual, argcReg, Imm32(2), failure);
8310
8311 // Stack looks like:
8312 // [..., CalleeV, ThisV, Arg0V, Arg1V <MaybeReturnReg>]
8313
8314 Address secondArgSlot(BaselineStackReg, ICStackValueOffset);
8315 if (applyThing == FunApply_MagicArgs) {
8316 // Ensure that the second arg is magic arguments.
8317 masm.branchTestMagic(Assembler::NotEqual, secondArgSlot, failure);
8318
8319 // Ensure that this frame doesn't have an arguments object.
8320 masm.branchTest32(Assembler::NonZero,
8321 Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags()),
8322 Imm32(BaselineFrame::HAS_ARGS_OBJ),
8323 failure);
8324 } else {
8325 JS_ASSERT(applyThing == FunApply_Array);
8326
8327 GeneralRegisterSet regsx = regs;
8328
8329 // Ensure that the second arg is an array.
8330 ValueOperand secondArgVal = regsx.takeAnyValue();
8331 masm.loadValue(secondArgSlot, secondArgVal);
8332
8333 masm.branchTestObject(Assembler::NotEqual, secondArgVal, failure);
8334 Register secondArgObj = masm.extractObject(secondArgVal, ExtractTemp1);
8335
8336 regsx.add(secondArgVal);
8337 regsx.takeUnchecked(secondArgObj);
8338
8339 masm.branchTestObjClass(Assembler::NotEqual, secondArgObj, regsx.getAny(),
8340 &ArrayObject::class_, failure);
8341
8342 // Get the array elements and ensure that initializedLength == length
8343 masm.loadPtr(Address(secondArgObj, JSObject::offsetOfElements()), secondArgObj);
8344
8345 Register lenReg = regsx.takeAny();
8346 masm.load32(Address(secondArgObj, ObjectElements::offsetOfLength()), lenReg);
8347
8348 masm.branch32(Assembler::NotEqual,
8349 Address(secondArgObj, ObjectElements::offsetOfInitializedLength()),
8350 lenReg, failure);
8351
8352 // Limit the length to something reasonable (huge number of arguments can
8353 // blow the stack limit).
8354 masm.branch32(Assembler::Above, lenReg,
8355 Imm32(ICCall_ScriptedApplyArray::MAX_ARGS_ARRAY_LENGTH),
8356 failure);
8357
8358 // Ensure no holes. Loop through values in array and make sure none are magic.
8359 // Start address is secondArgObj, end address is secondArgObj + (lenReg * sizeof(Value))
8360 JS_STATIC_ASSERT(sizeof(Value) == 8);
8361 masm.lshiftPtr(Imm32(3), lenReg);
8362 masm.addPtr(secondArgObj, lenReg);
8363
8364 Register start = secondArgObj;
8365 Register end = lenReg;
8366 Label loop;
8367 Label endLoop;
8368 masm.bind(&loop);
8369 masm.branchPtr(Assembler::AboveOrEqual, start, end, &endLoop);
8370 masm.branchTestMagic(Assembler::Equal, Address(start, 0), failure);
8371 masm.addPtr(Imm32(sizeof(Value)), start);
8372 masm.jump(&loop);
8373 masm.bind(&endLoop);
8374 }
8375
8376 // Stack now confirmed to be like:
8377 // [..., CalleeV, ThisV, Arg0V, MagicValue(Arguments), <MaybeReturnAddr>]
8378
8379 // Load the callee, ensure that it's js_fun_apply
8380 ValueOperand val = regs.takeAnyValue();
8381 Address calleeSlot(BaselineStackReg, ICStackValueOffset + (3 * sizeof(Value)));
8382 masm.loadValue(calleeSlot, val);
8383
8384 masm.branchTestObject(Assembler::NotEqual, val, failure);
8385 Register callee = masm.extractObject(val, ExtractTemp1);
8386
8387 masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
8388 failure);
8389 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
8390
8391 masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(js_fun_apply), failure);
8392
8393 // Load the |thisv|, ensure that it's a scripted function with a valid baseline or ion
8394 // script, or a native function.
8395 Address thisSlot(BaselineStackReg, ICStackValueOffset + (2 * sizeof(Value)));
8396 masm.loadValue(thisSlot, val);
8397
8398 masm.branchTestObject(Assembler::NotEqual, val, failure);
8399 Register target = masm.extractObject(val, ExtractTemp1);
8400 regs.add(val);
8401 regs.takeUnchecked(target);
8402
8403 masm.branchTestObjClass(Assembler::NotEqual, target, regs.getAny(), &JSFunction::class_,
8404 failure);
8405
8406 if (checkNative) {
8407 masm.branchIfInterpreted(target, failure);
8408 } else {
8409 masm.branchIfFunctionHasNoScript(target, failure);
8410 Register temp = regs.takeAny();
8411 masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), temp);
8412 masm.loadBaselineOrIonRaw(temp, temp, SequentialExecution, failure);
8413 regs.add(temp);
8414 }
8415 return target;
8416 }
8417
8418 void
8419 ICCallStubCompiler::pushCallerArguments(MacroAssembler &masm, GeneralRegisterSet regs)
8420 {
8421 // Initialize copyReg to point to start caller arguments vector.
8422 // Initialize argcReg to poitn to the end of it.
8423 Register startReg = regs.takeAny();
8424 Register endReg = regs.takeAny();
8425 masm.loadPtr(Address(BaselineFrameReg, 0), startReg);
8426 masm.loadPtr(Address(startReg, BaselineFrame::offsetOfNumActualArgs()), endReg);
8427 masm.addPtr(Imm32(BaselineFrame::offsetOfArg(0)), startReg);
8428 JS_STATIC_ASSERT(sizeof(Value) == 8);
8429 masm.lshiftPtr(Imm32(3), endReg);
8430 masm.addPtr(startReg, endReg);
8431
8432 // Copying pre-decrements endReg by 8 until startReg is reached
8433 Label copyDone;
8434 Label copyStart;
8435 masm.bind(&copyStart);
8436 masm.branchPtr(Assembler::Equal, endReg, startReg, &copyDone);
8437 masm.subPtr(Imm32(sizeof(Value)), endReg);
8438 masm.pushValue(Address(endReg, 0));
8439 masm.jump(&copyStart);
8440 masm.bind(&copyDone);
8441 }
8442
8443 void
8444 ICCallStubCompiler::pushArrayArguments(MacroAssembler &masm, Address arrayVal,
8445 GeneralRegisterSet regs)
8446 {
8447 // Load start and end address of values to copy.
8448 // guardFunApply has already gauranteed that the array is packed and contains
8449 // no holes.
8450 Register startReg = regs.takeAny();
8451 Register endReg = regs.takeAny();
8452 masm.extractObject(arrayVal, startReg);
8453 masm.loadPtr(Address(startReg, JSObject::offsetOfElements()), startReg);
8454 masm.load32(Address(startReg, ObjectElements::offsetOfInitializedLength()), endReg);
8455 JS_STATIC_ASSERT(sizeof(Value) == 8);
8456 masm.lshiftPtr(Imm32(3), endReg);
8457 masm.addPtr(startReg, endReg);
8458
8459 // Copying pre-decrements endReg by 8 until startReg is reached
8460 Label copyDone;
8461 Label copyStart;
8462 masm.bind(&copyStart);
8463 masm.branchPtr(Assembler::Equal, endReg, startReg, &copyDone);
8464 masm.subPtr(Imm32(sizeof(Value)), endReg);
8465 masm.pushValue(Address(endReg, 0));
8466 masm.jump(&copyStart);
8467 masm.bind(&copyDone);
8468 }
8469
8470 typedef bool (*DoCallFallbackFn)(JSContext *, BaselineFrame *, ICCall_Fallback *,
8471 uint32_t, Value *, MutableHandleValue);
8472 static const VMFunction DoCallFallbackInfo = FunctionInfo<DoCallFallbackFn>(DoCallFallback);
8473
8474 bool
8475 ICCall_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
8476 {
8477 JS_ASSERT(R0 == JSReturnOperand);
8478
8479 // Push a stub frame so that we can perform a non-tail call.
8480 enterStubFrame(masm, R1.scratchReg());
8481
8482 // Values are on the stack left-to-right. Calling convention wants them
8483 // right-to-left so duplicate them on the stack in reverse order.
8484 // |this| and callee are pushed last.
8485
8486 GeneralRegisterSet regs(availableGeneralRegs(0));
8487 regs.take(R0.scratchReg()); // argc.
8488
8489 pushCallArguments(masm, regs, R0.scratchReg());
8490
8491 masm.push(BaselineStackReg);
8492 masm.push(R0.scratchReg());
8493 masm.push(BaselineStubReg);
8494
8495 // Load previous frame pointer, push BaselineFrame *.
8496 masm.loadPtr(Address(BaselineFrameReg, 0), R0.scratchReg());
8497 masm.pushBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
8498
8499 if (!callVM(DoCallFallbackInfo, masm))
8500 return false;
8501
8502 leaveStubFrame(masm);
8503 EmitReturnFromIC(masm);
8504
8505 // The following asmcode is only used either when an Ion inlined frame
8506 // bails out into baseline jitcode or we need to do on-stack script
8507 // replacement for debug mode recompile.
8508 Label leaveStubCommon;
8509 returnFromStubOffset_ = masm.currentOffset();
8510
8511 // Load passed-in ThisV into R1 just in case it's needed. Need to do this before
8512 // we leave the stub frame since that info will be lost.
8513 // Current stack: [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
8514 masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
8515
8516 // Emit the coming-from-VM specific part of the stub-leaving code.
8517 leaveStubFrameHead(masm, /* calledIntoIon = */ false);
8518
8519 // Jump to the common leave stub tail.
8520 masm.jump(&leaveStubCommon);
8521
8522 // For Ion bailouts, the return address pushed onto the reconstructed
8523 // baseline stack points here.
8524 returnFromIonOffset_ = masm.currentOffset();
8525
8526 masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
8527
8528 // Emit the coming-from-Ion specific part of the stub-leaving code.
8529 leaveStubFrameHead(masm, /* calledIntoIon = */ true);
8530
8531 // Emit the common stub-leaving tail.
8532 masm.bind(&leaveStubCommon);
8533 leaveStubFrameCommonTail(masm);
8534
8535 // R1 and R0 are taken.
8536 regs = availableGeneralRegs(2);
8537 Register scratch = regs.takeAny();
8538
8539 // If this is a |constructing| call, if the callee returns a non-object, we replace it with
8540 // the |this| object passed in.
8541 JS_ASSERT(JSReturnOperand == R0);
8542 Label skipThisReplace;
8543 masm.load16ZeroExtend(Address(BaselineStubReg, ICStub::offsetOfExtra()), scratch);
8544 masm.branchTest32(Assembler::Zero, scratch, Imm32(ICCall_Fallback::CONSTRUCTING_FLAG),
8545 &skipThisReplace);
8546 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
8547 masm.moveValue(R1, R0);
8548 #ifdef DEBUG
8549 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
8550 masm.assumeUnreachable("Failed to return object in constructing call.");
8551 #endif
8552 masm.bind(&skipThisReplace);
8553
8554 // At this point, BaselineStubReg points to the ICCall_Fallback stub, which is NOT
8555 // a MonitoredStub, but rather a MonitoredFallbackStub. To use EmitEnterTypeMonitorIC,
8556 // first load the ICTypeMonitor_Fallback stub into BaselineStubReg. Then, use
8557 // EmitEnterTypeMonitorIC with a custom struct offset.
8558 masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
8559 BaselineStubReg);
8560 EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
8561
8562 return true;
8563 }
8564
8565 bool
8566 ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
8567 {
8568 JitCompartment *comp = cx->compartment()->jitCompartment();
8569
8570 CodeOffsetLabel fromIon(returnFromIonOffset_);
8571 fromIon.fixup(&masm);
8572 comp->initBaselineCallReturnFromIonAddr(code->raw() + fromIon.offset());
8573
8574 CodeOffsetLabel fromVM(returnFromStubOffset_);
8575 fromVM.fixup(&masm);
8576 comp->initBaselineCallReturnFromStubAddr(code->raw() + fromVM.offset());
8577
8578 return true;
8579 }
8580
8581 typedef bool (*CreateThisFn)(JSContext *cx, HandleObject callee, MutableHandleValue rval);
8582 static const VMFunction CreateThisInfoBaseline = FunctionInfo<CreateThisFn>(CreateThis);
8583
8584 bool
8585 ICCallScriptedCompiler::generateStubCode(MacroAssembler &masm)
8586 {
8587 Label failure;
8588 GeneralRegisterSet regs(availableGeneralRegs(0));
8589 bool canUseTailCallReg = regs.has(BaselineTailCallReg);
8590
8591 Register argcReg = R0.scratchReg();
8592 JS_ASSERT(argcReg != ArgumentsRectifierReg);
8593
8594 regs.take(argcReg);
8595 regs.take(ArgumentsRectifierReg);
8596 regs.takeUnchecked(BaselineTailCallReg);
8597
8598 // Load the callee in R1.
8599 // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ]
8600 BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
8601 masm.loadValue(calleeSlot, R1);
8602 regs.take(R1);
8603
8604 // Ensure callee is an object.
8605 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
8606
8607 // Ensure callee is a function.
8608 Register callee = masm.extractObject(R1, ExtractTemp0);
8609 masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
8610 &failure);
8611
8612 // If calling a specific script, check if the script matches. Otherwise, ensure that
8613 // callee function is scripted. Leave calleeScript in |callee| reg.
8614 if (calleeScript_) {
8615 JS_ASSERT(kind == ICStub::Call_Scripted);
8616
8617 // Callee is a function. Check if script matches.
8618 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
8619 Address expectedScript(BaselineStubReg, ICCall_Scripted::offsetOfCalleeScript());
8620 masm.branchPtr(Assembler::NotEqual, expectedScript, callee, &failure);
8621 } else {
8622 if (isConstructing_)
8623 masm.branchIfNotInterpretedConstructor(callee, regs.getAny(), &failure);
8624 else
8625 masm.branchIfFunctionHasNoScript(callee, &failure);
8626 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
8627 }
8628
8629 // Load the start of the target JitCode.
8630 Register code;
8631 if (!isConstructing_) {
8632 code = regs.takeAny();
8633 masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failure);
8634 } else {
8635 Address scriptCode(callee, JSScript::offsetOfBaselineOrIonRaw());
8636 masm.branchPtr(Assembler::Equal, scriptCode, ImmPtr(nullptr), &failure);
8637 }
8638
8639 // We no longer need R1.
8640 regs.add(R1);
8641
8642 // Push a stub frame so that we can perform a non-tail call.
8643 enterStubFrame(masm, regs.getAny());
8644 if (canUseTailCallReg)
8645 regs.add(BaselineTailCallReg);
8646
8647 Label failureLeaveStubFrame;
8648
8649 if (isConstructing_) {
8650 // Save argc before call.
8651 masm.push(argcReg);
8652
8653 // Stack now looks like:
8654 // [..., Callee, ThisV, Arg0V, ..., ArgNV, StubFrameHeader, ArgC ]
8655 BaseIndex calleeSlot2(BaselineStackReg, argcReg, TimesEight,
8656 sizeof(Value) + STUB_FRAME_SIZE + sizeof(size_t));
8657 masm.loadValue(calleeSlot2, R1);
8658 masm.push(masm.extractObject(R1, ExtractTemp0));
8659 if (!callVM(CreateThisInfoBaseline, masm))
8660 return false;
8661
8662 // Return of CreateThis must be an object.
8663 #ifdef DEBUG
8664 Label createdThisIsObject;
8665 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &createdThisIsObject);
8666 masm.assumeUnreachable("The return of CreateThis must be an object.");
8667 masm.bind(&createdThisIsObject);
8668 #endif
8669
8670 // Reset the register set from here on in.
8671 JS_ASSERT(JSReturnOperand == R0);
8672 regs = availableGeneralRegs(0);
8673 regs.take(R0);
8674 regs.take(ArgumentsRectifierReg);
8675 argcReg = regs.takeAny();
8676
8677 // Restore saved argc so we can use it to calculate the address to save
8678 // the resulting this object to.
8679 masm.pop(argcReg);
8680
8681 // Save "this" value back into pushed arguments on stack. R0 can be clobbered after that.
8682 // Stack now looks like:
8683 // [..., Callee, ThisV, Arg0V, ..., ArgNV, StubFrameHeader ]
8684 BaseIndex thisSlot(BaselineStackReg, argcReg, TimesEight, STUB_FRAME_SIZE);
8685 masm.storeValue(R0, thisSlot);
8686
8687 // Restore the stub register from the baseline stub frame.
8688 masm.loadPtr(Address(BaselineStackReg, STUB_FRAME_SAVED_STUB_OFFSET), BaselineStubReg);
8689
8690 // Reload callee script. Note that a GC triggered by CreateThis may
8691 // have destroyed the callee BaselineScript and IonScript. CreateThis is
8692 // safely repeatable though, so in this case we just leave the stub frame
8693 // and jump to the next stub.
8694
8695 // Just need to load the script now.
8696 BaseIndex calleeSlot3(BaselineStackReg, argcReg, TimesEight,
8697 sizeof(Value) + STUB_FRAME_SIZE);
8698 masm.loadValue(calleeSlot3, R0);
8699 callee = masm.extractObject(R0, ExtractTemp0);
8700 regs.add(R0);
8701 regs.takeUnchecked(callee);
8702 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
8703
8704 code = regs.takeAny();
8705 masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failureLeaveStubFrame);
8706
8707 // Release callee register, but don't add ExtractTemp0 back into the pool
8708 // ExtractTemp0 is used later, and if it's allocated to some other register at that
8709 // point, it will get clobbered when used.
8710 if (callee != ExtractTemp0)
8711 regs.add(callee);
8712
8713 if (canUseTailCallReg)
8714 regs.addUnchecked(BaselineTailCallReg);
8715 }
8716 Register scratch = regs.takeAny();
8717
8718 // Values are on the stack left-to-right. Calling convention wants them
8719 // right-to-left so duplicate them on the stack in reverse order.
8720 // |this| and callee are pushed last.
8721 pushCallArguments(masm, regs, argcReg);
8722
8723 // The callee is on top of the stack. Pop and unbox it.
8724 ValueOperand val = regs.takeAnyValue();
8725 masm.popValue(val);
8726 callee = masm.extractObject(val, ExtractTemp0);
8727
8728 EmitCreateStubFrameDescriptor(masm, scratch);
8729
8730 // Note that we use Push, not push, so that callIon will align the stack
8731 // properly on ARM.
8732 masm.Push(argcReg);
8733 masm.Push(callee);
8734 masm.Push(scratch);
8735
8736 // Handle arguments underflow.
8737 Label noUnderflow;
8738 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
8739 masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
8740 {
8741 // Call the arguments rectifier.
8742 JS_ASSERT(ArgumentsRectifierReg != code);
8743 JS_ASSERT(ArgumentsRectifierReg != argcReg);
8744
8745 JitCode *argumentsRectifier =
8746 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
8747
8748 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
8749 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
8750 masm.mov(argcReg, ArgumentsRectifierReg);
8751 }
8752
8753 masm.bind(&noUnderflow);
8754
8755 // If needed, update SPS Profiler frame entry before and after call.
8756 {
8757 JS_ASSERT(kind == ICStub::Call_Scripted || kind == ICStub::Call_AnyScripted);
8758 GeneralRegisterSet availRegs = availableGeneralRegs(0);
8759 availRegs.take(ArgumentsRectifierReg);
8760 availRegs.take(code);
8761 emitProfilingUpdate(masm, availRegs, kind == ICStub::Call_Scripted ?
8762 ICCall_Scripted::offsetOfPCOffset()
8763 : ICCall_AnyScripted::offsetOfPCOffset());
8764 }
8765
8766 masm.callIon(code);
8767
8768 // If this is a constructing call, and the callee returns a non-object, replace it with
8769 // the |this| object passed in.
8770 if (isConstructing_) {
8771 Label skipThisReplace;
8772 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
8773
8774 Register scratchReg = JSReturnOperand.scratchReg();
8775
8776 // Current stack: [ ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
8777 // However, we can't use this ThisVal, because it hasn't been traced. We need to use
8778 // The ThisVal higher up the stack:
8779 // Current stack: [ ThisVal, ARGVALS..., ...STUB FRAME...,
8780 // ARGVALS..., ThisVal, ActualArgc, Callee, Descriptor ]
8781 masm.loadPtr(Address(BaselineStackReg, 2*sizeof(size_t)), scratchReg);
8782
8783 // scratchReg now contains actualArgCount. Double it to account for skipping past two
8784 // pushed copies of argument values. Additionally, we need to add:
8785 // STUB_FRAME_SIZE + sizeof(ThisVal) + sizeof(size_t) + sizeof(void *) + sizoef(size_t)
8786 // for: stub frame, this value, actual argc, callee, and descriptor
8787 masm.lshiftPtr(Imm32(1), scratchReg);
8788 BaseIndex reloadThisSlot(BaselineStackReg, scratchReg, TimesEight,
8789 STUB_FRAME_SIZE + sizeof(Value) + 3*sizeof(size_t));
8790 masm.loadValue(reloadThisSlot, JSReturnOperand);
8791 #ifdef DEBUG
8792 masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
8793 masm.assumeUnreachable("Return of constructing call should be an object.");
8794 #endif
8795 masm.bind(&skipThisReplace);
8796 }
8797
8798 leaveStubFrame(masm, true);
8799
8800 // Enter type monitor IC to type-check result.
8801 EmitEnterTypeMonitorIC(masm);
8802
8803 // Leave stub frame and restore argc for the next stub.
8804 masm.bind(&failureLeaveStubFrame);
8805 leaveStubFrame(masm, false);
8806 if (argcReg != R0.scratchReg())
8807 masm.mov(argcReg, R0.scratchReg());
8808
8809 masm.bind(&failure);
8810 EmitStubGuardFailure(masm);
8811 return true;
8812 }
8813
8814 bool
8815 ICCall_Native::Compiler::generateStubCode(MacroAssembler &masm)
8816 {
8817 Label failure;
8818 GeneralRegisterSet regs(availableGeneralRegs(0));
8819
8820 Register argcReg = R0.scratchReg();
8821 regs.take(argcReg);
8822 regs.takeUnchecked(BaselineTailCallReg);
8823
8824 // Load the callee in R1.
8825 BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
8826 masm.loadValue(calleeSlot, R1);
8827 regs.take(R1);
8828
8829 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
8830
8831 // Ensure callee matches this stub's callee.
8832 Register callee = masm.extractObject(R1, ExtractTemp0);
8833 Address expectedCallee(BaselineStubReg, ICCall_Native::offsetOfCallee());
8834 masm.branchPtr(Assembler::NotEqual, expectedCallee, callee, &failure);
8835
8836 regs.add(R1);
8837 regs.takeUnchecked(callee);
8838
8839 // Push a stub frame so that we can perform a non-tail call.
8840 // Note that this leaves the return address in TailCallReg.
8841 enterStubFrame(masm, regs.getAny());
8842
8843 // Values are on the stack left-to-right. Calling convention wants them
8844 // right-to-left so duplicate them on the stack in reverse order.
8845 // |this| and callee are pushed last.
8846 pushCallArguments(masm, regs, argcReg);
8847
8848 if (isConstructing_) {
8849 // Stack looks like: [ ..., Arg0Val, ThisVal, CalleeVal ]
8850 // Replace ThisVal with MagicValue(JS_IS_CONSTRUCTING)
8851 masm.storeValue(MagicValue(JS_IS_CONSTRUCTING), Address(BaselineStackReg, sizeof(Value)));
8852 }
8853
8854 masm.checkStackAlignment();
8855
8856 // Native functions have the signature:
8857 //
8858 // bool (*)(JSContext *, unsigned, Value *vp)
8859 //
8860 // Where vp[0] is space for callee/return value, vp[1] is |this|, and vp[2] onward
8861 // are the function arguments.
8862
8863 // Initialize vp.
8864 Register vpReg = regs.takeAny();
8865 masm.movePtr(StackPointer, vpReg);
8866
8867 // Construct a native exit frame.
8868 masm.push(argcReg);
8869
8870 Register scratch = regs.takeAny();
8871 EmitCreateStubFrameDescriptor(masm, scratch);
8872 masm.push(scratch);
8873 masm.push(BaselineTailCallReg);
8874 masm.enterFakeExitFrame();
8875
8876 // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg
8877 // and scratch can be clobbered.
8878 emitProfilingUpdate(masm, BaselineTailCallReg, scratch, ICCall_Native::offsetOfPCOffset());
8879
8880 // Execute call.
8881 masm.setupUnalignedABICall(3, scratch);
8882 masm.loadJSContext(scratch);
8883 masm.passABIArg(scratch);
8884 masm.passABIArg(argcReg);
8885 masm.passABIArg(vpReg);
8886
8887 #ifdef JS_ARM_SIMULATOR
8888 // The simulator requires VM calls to be redirected to a special swi
8889 // instruction to handle them, so we store the redirected pointer in the
8890 // stub and use that instead of the original one.
8891 masm.callWithABI(Address(BaselineStubReg, ICCall_Native::offsetOfNative()));
8892 #else
8893 masm.callWithABI(Address(callee, JSFunction::offsetOfNativeOrScript()));
8894 #endif
8895
8896 // Test for failure.
8897 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
8898
8899 // Load the return value into R0.
8900 masm.loadValue(Address(StackPointer, IonNativeExitFrameLayout::offsetOfResult()), R0);
8901
8902 leaveStubFrame(masm);
8903
8904 // Enter type monitor IC to type-check result.
8905 EmitEnterTypeMonitorIC(masm);
8906
8907 masm.bind(&failure);
8908 EmitStubGuardFailure(masm);
8909 return true;
8910 }
8911
8912 bool
8913 ICCall_ScriptedApplyArray::Compiler::generateStubCode(MacroAssembler &masm)
8914 {
8915 Label failure;
8916 GeneralRegisterSet regs(availableGeneralRegs(0));
8917
8918 Register argcReg = R0.scratchReg();
8919 regs.take(argcReg);
8920 regs.takeUnchecked(BaselineTailCallReg);
8921 regs.takeUnchecked(ArgumentsRectifierReg);
8922
8923 //
8924 // Validate inputs
8925 //
8926
8927 Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false,
8928 FunApply_Array, &failure);
8929 if (regs.has(target)) {
8930 regs.take(target);
8931 } else {
8932 // If target is already a reserved reg, take another register for it, because it's
8933 // probably currently an ExtractTemp, which might get clobbered later.
8934 Register targetTemp = regs.takeAny();
8935 masm.movePtr(target, targetTemp);
8936 target = targetTemp;
8937 }
8938
8939 // Push a stub frame so that we can perform a non-tail call.
8940 enterStubFrame(masm, regs.getAny());
8941
8942 //
8943 // Push arguments
8944 //
8945
8946 // Stack now looks like:
8947 // BaselineFrameReg -------------------.
8948 // v
8949 // [..., js_fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader]
8950
8951 // Push all array elements onto the stack:
8952 Address arrayVal(BaselineFrameReg, STUB_FRAME_SIZE);
8953 pushArrayArguments(masm, arrayVal, regs);
8954
8955 // Stack now looks like:
8956 // BaselineFrameReg -------------------.
8957 // v
8958 // [..., js_fun_apply, TargetV, TargetThisV, ArgsArrayV, StubFrameHeader,
8959 // PushedArgN, ..., PushedArg0]
8960 // Can't fail after this, so it's ok to clobber argcReg.
8961
8962 // Push actual argument 0 as |thisv| for call.
8963 masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value)));
8964
8965 // All pushes after this use Push instead of push to make sure ARM can align
8966 // stack properly for call.
8967 Register scratch = regs.takeAny();
8968 EmitCreateStubFrameDescriptor(masm, scratch);
8969
8970 // Reload argc from length of array.
8971 masm.extractObject(arrayVal, argcReg);
8972 masm.loadPtr(Address(argcReg, JSObject::offsetOfElements()), argcReg);
8973 masm.load32(Address(argcReg, ObjectElements::offsetOfInitializedLength()), argcReg);
8974
8975 masm.Push(argcReg);
8976 masm.Push(target);
8977 masm.Push(scratch);
8978
8979 // Load nargs into scratch for underflow check, and then load jitcode pointer into target.
8980 masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch);
8981 masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
8982 masm.loadBaselineOrIonRaw(target, target, SequentialExecution, nullptr);
8983
8984 // Handle arguments underflow.
8985 Label noUnderflow;
8986 masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
8987 {
8988 // Call the arguments rectifier.
8989 JS_ASSERT(ArgumentsRectifierReg != target);
8990 JS_ASSERT(ArgumentsRectifierReg != argcReg);
8991
8992 JitCode *argumentsRectifier =
8993 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
8994
8995 masm.movePtr(ImmGCPtr(argumentsRectifier), target);
8996 masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
8997 masm.mov(argcReg, ArgumentsRectifierReg);
8998 }
8999 masm.bind(&noUnderflow);
9000 regs.add(argcReg);
9001
9002 // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg
9003 // and scratch can be clobbered.
9004 emitProfilingUpdate(masm, regs.getAny(), scratch,
9005 ICCall_ScriptedApplyArguments::offsetOfPCOffset());
9006
9007 // Do call
9008 masm.callIon(target);
9009 leaveStubFrame(masm, true);
9010
9011 // Enter type monitor IC to type-check result.
9012 EmitEnterTypeMonitorIC(masm);
9013
9014 masm.bind(&failure);
9015 EmitStubGuardFailure(masm);
9016 return true;
9017 }
9018
9019 bool
9020 ICCall_ScriptedApplyArguments::Compiler::generateStubCode(MacroAssembler &masm)
9021 {
9022 Label failure;
9023 GeneralRegisterSet regs(availableGeneralRegs(0));
9024
9025 Register argcReg = R0.scratchReg();
9026 regs.take(argcReg);
9027 regs.takeUnchecked(BaselineTailCallReg);
9028 regs.takeUnchecked(ArgumentsRectifierReg);
9029
9030 //
9031 // Validate inputs
9032 //
9033
9034 Register target = guardFunApply(masm, regs, argcReg, /*checkNative=*/false,
9035 FunApply_MagicArgs, &failure);
9036 if (regs.has(target)) {
9037 regs.take(target);
9038 } else {
9039 // If target is already a reserved reg, take another register for it, because it's
9040 // probably currently an ExtractTemp, which might get clobbered later.
9041 Register targetTemp = regs.takeAny();
9042 masm.movePtr(target, targetTemp);
9043 target = targetTemp;
9044 }
9045
9046 // Push a stub frame so that we can perform a non-tail call.
9047 enterStubFrame(masm, regs.getAny());
9048
9049 //
9050 // Push arguments
9051 //
9052
9053 // Stack now looks like:
9054 // [..., js_fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader]
9055
9056 // Push all arguments supplied to caller function onto the stack.
9057 pushCallerArguments(masm, regs);
9058
9059 // Stack now looks like:
9060 // BaselineFrameReg -------------------.
9061 // v
9062 // [..., js_fun_apply, TargetV, TargetThisV, MagicArgsV, StubFrameHeader,
9063 // PushedArgN, ..., PushedArg0]
9064 // Can't fail after this, so it's ok to clobber argcReg.
9065
9066 // Push actual argument 0 as |thisv| for call.
9067 masm.pushValue(Address(BaselineFrameReg, STUB_FRAME_SIZE + sizeof(Value)));
9068
9069 // All pushes after this use Push instead of push to make sure ARM can align
9070 // stack properly for call.
9071 Register scratch = regs.takeAny();
9072 EmitCreateStubFrameDescriptor(masm, scratch);
9073
9074 masm.loadPtr(Address(BaselineFrameReg, 0), argcReg);
9075 masm.loadPtr(Address(argcReg, BaselineFrame::offsetOfNumActualArgs()), argcReg);
9076 masm.Push(argcReg);
9077 masm.Push(target);
9078 masm.Push(scratch);
9079
9080 // Load nargs into scratch for underflow check, and then load jitcode pointer into target.
9081 masm.load16ZeroExtend(Address(target, JSFunction::offsetOfNargs()), scratch);
9082 masm.loadPtr(Address(target, JSFunction::offsetOfNativeOrScript()), target);
9083 masm.loadBaselineOrIonRaw(target, target, SequentialExecution, nullptr);
9084
9085 // Handle arguments underflow.
9086 Label noUnderflow;
9087 masm.branch32(Assembler::AboveOrEqual, argcReg, scratch, &noUnderflow);
9088 {
9089 // Call the arguments rectifier.
9090 JS_ASSERT(ArgumentsRectifierReg != target);
9091 JS_ASSERT(ArgumentsRectifierReg != argcReg);
9092
9093 JitCode *argumentsRectifier =
9094 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
9095
9096 masm.movePtr(ImmGCPtr(argumentsRectifier), target);
9097 masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
9098 masm.mov(argcReg, ArgumentsRectifierReg);
9099 }
9100 masm.bind(&noUnderflow);
9101 regs.add(argcReg);
9102
9103 // If needed, update SPS Profiler frame entry. At this point, BaselineTailCallReg
9104 // and scratch can be clobbered.
9105 emitProfilingUpdate(masm, regs.getAny(), scratch,
9106 ICCall_ScriptedApplyArguments::offsetOfPCOffset());
9107
9108 // Do call
9109 masm.callIon(target);
9110 leaveStubFrame(masm, true);
9111
9112 // Enter type monitor IC to type-check result.
9113 EmitEnterTypeMonitorIC(masm);
9114
9115 masm.bind(&failure);
9116 EmitStubGuardFailure(masm);
9117 return true;
9118 }
9119
9120 bool
9121 ICCall_ScriptedFunCall::Compiler::generateStubCode(MacroAssembler &masm)
9122 {
9123 Label failure;
9124 GeneralRegisterSet regs(availableGeneralRegs(0));
9125 bool canUseTailCallReg = regs.has(BaselineTailCallReg);
9126
9127 Register argcReg = R0.scratchReg();
9128 JS_ASSERT(argcReg != ArgumentsRectifierReg);
9129
9130 regs.take(argcReg);
9131 regs.take(ArgumentsRectifierReg);
9132 regs.takeUnchecked(BaselineTailCallReg);
9133
9134 // Load the callee in R1.
9135 // Stack Layout: [ ..., CalleeVal, ThisVal, Arg0Val, ..., ArgNVal, +ICStackValueOffset+ ]
9136 BaseIndex calleeSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset + sizeof(Value));
9137 masm.loadValue(calleeSlot, R1);
9138 regs.take(R1);
9139
9140 // Ensure callee is js_fun_call.
9141 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
9142
9143 Register callee = masm.extractObject(R1, ExtractTemp0);
9144 masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
9145 &failure);
9146 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
9147 masm.branchPtr(Assembler::NotEqual, callee, ImmPtr(js_fun_call), &failure);
9148
9149 // Ensure |this| is a scripted function with JIT code.
9150 BaseIndex thisSlot(BaselineStackReg, argcReg, TimesEight, ICStackValueOffset);
9151 masm.loadValue(thisSlot, R1);
9152
9153 masm.branchTestObject(Assembler::NotEqual, R1, &failure);
9154 callee = masm.extractObject(R1, ExtractTemp0);
9155
9156 masm.branchTestObjClass(Assembler::NotEqual, callee, regs.getAny(), &JSFunction::class_,
9157 &failure);
9158 masm.branchIfFunctionHasNoScript(callee, &failure);
9159 masm.loadPtr(Address(callee, JSFunction::offsetOfNativeOrScript()), callee);
9160
9161 // Load the start of the target JitCode.
9162 Register code = regs.takeAny();
9163 masm.loadBaselineOrIonRaw(callee, code, SequentialExecution, &failure);
9164
9165 // We no longer need R1.
9166 regs.add(R1);
9167
9168 // Push a stub frame so that we can perform a non-tail call.
9169 enterStubFrame(masm, regs.getAny());
9170 if (canUseTailCallReg)
9171 regs.add(BaselineTailCallReg);
9172
9173 // Values are on the stack left-to-right. Calling convention wants them
9174 // right-to-left so duplicate them on the stack in reverse order.
9175 pushCallArguments(masm, regs, argcReg);
9176
9177 // Discard callee (function.call).
9178 masm.addPtr(Imm32(sizeof(Value)), StackPointer);
9179
9180 // Pop scripted callee (the original |this|).
9181 ValueOperand val = regs.takeAnyValue();
9182 masm.popValue(val);
9183
9184 // Decrement argc if argc > 0. If argc == 0, push |undefined| as |this|.
9185 Label zeroArgs, done;
9186 masm.branchTest32(Assembler::Zero, argcReg, argcReg, &zeroArgs);
9187 masm.sub32(Imm32(1), argcReg);
9188 masm.jump(&done);
9189
9190 masm.bind(&zeroArgs);
9191 masm.pushValue(UndefinedValue());
9192 masm.bind(&done);
9193
9194 // Unbox scripted callee.
9195 callee = masm.extractObject(val, ExtractTemp0);
9196
9197 Register scratch = regs.takeAny();
9198 EmitCreateStubFrameDescriptor(masm, scratch);
9199
9200 // Note that we use Push, not push, so that callIon will align the stack
9201 // properly on ARM.
9202 masm.Push(argcReg);
9203 masm.Push(callee);
9204 masm.Push(scratch);
9205
9206 // Handle arguments underflow.
9207 Label noUnderflow;
9208 masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
9209 masm.branch32(Assembler::AboveOrEqual, argcReg, callee, &noUnderflow);
9210 {
9211 // Call the arguments rectifier.
9212 JS_ASSERT(ArgumentsRectifierReg != code);
9213 JS_ASSERT(ArgumentsRectifierReg != argcReg);
9214
9215 JitCode *argumentsRectifier =
9216 cx->runtime()->jitRuntime()->getArgumentsRectifier(SequentialExecution);
9217
9218 masm.movePtr(ImmGCPtr(argumentsRectifier), code);
9219 masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
9220 masm.mov(argcReg, ArgumentsRectifierReg);
9221 }
9222
9223 masm.bind(&noUnderflow);
9224
9225 // If needed, update SPS Profiler frame entry.
9226 {
9227 // Need to avoid using ArgumentsRectifierReg and code register.
9228 GeneralRegisterSet availRegs = availableGeneralRegs(0);
9229 availRegs.take(ArgumentsRectifierReg);
9230 availRegs.take(code);
9231 emitProfilingUpdate(masm, availRegs, ICCall_ScriptedFunCall::offsetOfPCOffset());
9232 }
9233
9234 masm.callIon(code);
9235
9236 leaveStubFrame(masm, true);
9237
9238 // Enter type monitor IC to type-check result.
9239 EmitEnterTypeMonitorIC(masm);
9240
9241 masm.bind(&failure);
9242 EmitStubGuardFailure(masm);
9243 return true;
9244 }
9245
9246 static bool
9247 DoubleValueToInt32ForSwitch(Value *v)
9248 {
9249 double d = v->toDouble();
9250 int32_t truncated = int32_t(d);
9251 if (d != double(truncated))
9252 return false;
9253
9254 v->setInt32(truncated);
9255 return true;
9256 }
9257
9258 bool
9259 ICTableSwitch::Compiler::generateStubCode(MacroAssembler &masm)
9260 {
9261 Label isInt32, notInt32, outOfRange;
9262 Register scratch = R1.scratchReg();
9263
9264 masm.branchTestInt32(Assembler::NotEqual, R0, &notInt32);
9265
9266 Register key = masm.extractInt32(R0, ExtractTemp0);
9267
9268 masm.bind(&isInt32);
9269
9270 masm.load32(Address(BaselineStubReg, offsetof(ICTableSwitch, min_)), scratch);
9271 masm.sub32(scratch, key);
9272 masm.branch32(Assembler::BelowOrEqual,
9273 Address(BaselineStubReg, offsetof(ICTableSwitch, length_)), key, &outOfRange);
9274
9275 masm.loadPtr(Address(BaselineStubReg, offsetof(ICTableSwitch, table_)), scratch);
9276 masm.loadPtr(BaseIndex(scratch, key, ScalePointer), scratch);
9277
9278 EmitChangeICReturnAddress(masm, scratch);
9279 EmitReturnFromIC(masm);
9280
9281 masm.bind(&notInt32);
9282
9283 masm.branchTestDouble(Assembler::NotEqual, R0, &outOfRange);
9284 if (cx->runtime()->jitSupportsFloatingPoint) {
9285 masm.unboxDouble(R0, FloatReg0);
9286
9287 // N.B. -0 === 0, so convert -0 to a 0 int32.
9288 masm.convertDoubleToInt32(FloatReg0, key, &outOfRange, /* negativeZeroCheck = */ false);
9289 } else {
9290 // Pass pointer to double value.
9291 masm.pushValue(R0);
9292 masm.movePtr(StackPointer, R0.scratchReg());
9293
9294 masm.setupUnalignedABICall(1, scratch);
9295 masm.passABIArg(R0.scratchReg());
9296 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, DoubleValueToInt32ForSwitch));
9297
9298 // If the function returns |true|, the value has been converted to
9299 // int32.
9300 masm.mov(ReturnReg, scratch);
9301 masm.popValue(R0);
9302 masm.branchIfFalseBool(scratch, &outOfRange);
9303 masm.unboxInt32(R0, key);
9304 }
9305 masm.jump(&isInt32);
9306
9307 masm.bind(&outOfRange);
9308
9309 masm.loadPtr(Address(BaselineStubReg, offsetof(ICTableSwitch, defaultTarget_)), scratch);
9310
9311 EmitChangeICReturnAddress(masm, scratch);
9312 EmitReturnFromIC(masm);
9313 return true;
9314 }
9315
9316 ICStub *
9317 ICTableSwitch::Compiler::getStub(ICStubSpace *space)
9318 {
9319 JitCode *code = getStubCode();
9320 if (!code)
9321 return nullptr;
9322
9323 jsbytecode *pc = pc_;
9324 pc += JUMP_OFFSET_LEN;
9325 int32_t low = GET_JUMP_OFFSET(pc);
9326 pc += JUMP_OFFSET_LEN;
9327 int32_t high = GET_JUMP_OFFSET(pc);
9328 int32_t length = high - low + 1;
9329 pc += JUMP_OFFSET_LEN;
9330
9331 void **table = (void**) space->alloc(sizeof(void*) * length);
9332 if (!table)
9333 return nullptr;
9334
9335 jsbytecode *defaultpc = pc_ + GET_JUMP_OFFSET(pc_);
9336
9337 for (int32_t i = 0; i < length; i++) {
9338 int32_t off = GET_JUMP_OFFSET(pc);
9339 if (off)
9340 table[i] = pc_ + off;
9341 else
9342 table[i] = defaultpc;
9343 pc += JUMP_OFFSET_LEN;
9344 }
9345
9346 return ICTableSwitch::New(space, code, table, low, length, defaultpc);
9347 }
9348
9349 void
9350 ICTableSwitch::fixupJumpTable(JSScript *script, BaselineScript *baseline)
9351 {
9352 defaultTarget_ = baseline->nativeCodeForPC(script, (jsbytecode *) defaultTarget_);
9353
9354 for (int32_t i = 0; i < length_; i++)
9355 table_[i] = baseline->nativeCodeForPC(script, (jsbytecode *) table_[i]);
9356 }
9357
9358 //
9359 // IteratorNew_Fallback
9360 //
9361
9362 static bool
9363 DoIteratorNewFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNew_Fallback *stub,
9364 HandleValue value, MutableHandleValue res)
9365 {
9366 jsbytecode *pc = stub->icEntry()->pc(frame->script());
9367 FallbackICSpew(cx, stub, "IteratorNew");
9368
9369 uint8_t flags = GET_UINT8(pc);
9370 res.set(value);
9371 return ValueToIterator(cx, flags, res);
9372 }
9373
9374 typedef bool (*DoIteratorNewFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNew_Fallback *,
9375 HandleValue, MutableHandleValue);
9376 static const VMFunction DoIteratorNewFallbackInfo =
9377 FunctionInfo<DoIteratorNewFallbackFn>(DoIteratorNewFallback, PopValues(1));
9378
9379 bool
9380 ICIteratorNew_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
9381 {
9382 EmitRestoreTailCallReg(masm);
9383
9384 // Sync stack for the decompiler.
9385 masm.pushValue(R0);
9386
9387 masm.pushValue(R0);
9388 masm.push(BaselineStubReg);
9389 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
9390
9391 return tailCallVM(DoIteratorNewFallbackInfo, masm);
9392 }
9393
9394 //
9395 // IteratorMore_Fallback
9396 //
9397
9398 static bool
9399 DoIteratorMoreFallback(JSContext *cx, BaselineFrame *frame, ICIteratorMore_Fallback *stub_,
9400 HandleValue iterValue, MutableHandleValue res)
9401 {
9402 // This fallback stub may trigger debug mode toggling.
9403 DebugModeOSRVolatileStub<ICIteratorMore_Fallback *> stub(frame, stub_);
9404
9405 FallbackICSpew(cx, stub, "IteratorMore");
9406
9407 bool cond;
9408 if (!IteratorMore(cx, &iterValue.toObject(), &cond, res))
9409 return false;
9410 res.setBoolean(cond);
9411
9412 // Check if debug mode toggling made the stub invalid.
9413 if (stub.invalid())
9414 return true;
9415
9416 if (iterValue.toObject().is<PropertyIteratorObject>() &&
9417 !stub->hasStub(ICStub::IteratorMore_Native))
9418 {
9419 ICIteratorMore_Native::Compiler compiler(cx);
9420 ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
9421 if (!newStub)
9422 return false;
9423 stub->addNewStub(newStub);
9424 }
9425
9426 return true;
9427 }
9428
9429 typedef bool (*DoIteratorMoreFallbackFn)(JSContext *, BaselineFrame *, ICIteratorMore_Fallback *,
9430 HandleValue, MutableHandleValue);
9431 static const VMFunction DoIteratorMoreFallbackInfo =
9432 FunctionInfo<DoIteratorMoreFallbackFn>(DoIteratorMoreFallback);
9433
9434 bool
9435 ICIteratorMore_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
9436 {
9437 EmitRestoreTailCallReg(masm);
9438
9439 masm.pushValue(R0);
9440 masm.push(BaselineStubReg);
9441 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
9442
9443 return tailCallVM(DoIteratorMoreFallbackInfo, masm);
9444 }
9445
9446 //
9447 // IteratorMore_Native
9448 //
9449
9450 bool
9451 ICIteratorMore_Native::Compiler::generateStubCode(MacroAssembler &masm)
9452 {
9453 Label failure;
9454
9455 Register obj = masm.extractObject(R0, ExtractTemp0);
9456
9457 GeneralRegisterSet regs(availableGeneralRegs(1));
9458 Register nativeIterator = regs.takeAny();
9459 Register scratch = regs.takeAny();
9460
9461 masm.branchTestObjClass(Assembler::NotEqual, obj, scratch,
9462 &PropertyIteratorObject::class_, &failure);
9463 masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator);
9464
9465 masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)),
9466 Imm32(JSITER_FOREACH), &failure);
9467
9468 // Set output to true if props_cursor < props_end.
9469 masm.loadPtr(Address(nativeIterator, offsetof(NativeIterator, props_end)), scratch);
9470 Address cursorAddr = Address(nativeIterator, offsetof(NativeIterator, props_cursor));
9471 masm.cmpPtrSet(Assembler::LessThan, cursorAddr, scratch, scratch);
9472
9473 masm.tagValue(JSVAL_TYPE_BOOLEAN, scratch, R0);
9474 EmitReturnFromIC(masm);
9475
9476 // Failure case - jump to next stub
9477 masm.bind(&failure);
9478 EmitStubGuardFailure(masm);
9479 return true;
9480 }
9481
9482 //
9483 // IteratorNext_Fallback
9484 //
9485
9486 static bool
9487 DoIteratorNextFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNext_Fallback *stub_,
9488 HandleValue iterValue, MutableHandleValue res)
9489 {
9490 // This fallback stub may trigger debug mode toggling.
9491 DebugModeOSRVolatileStub<ICIteratorNext_Fallback *> stub(frame, stub_);
9492
9493 FallbackICSpew(cx, stub, "IteratorNext");
9494
9495 RootedObject iteratorObject(cx, &iterValue.toObject());
9496 if (!IteratorNext(cx, iteratorObject, res))
9497 return false;
9498
9499 // Check if debug mode toggling made the stub invalid.
9500 if (stub.invalid())
9501 return true;
9502
9503 if (!res.isString() && !stub->hasNonStringResult())
9504 stub->setHasNonStringResult();
9505
9506 if (iteratorObject->is<PropertyIteratorObject>() &&
9507 !stub->hasStub(ICStub::IteratorNext_Native))
9508 {
9509 ICIteratorNext_Native::Compiler compiler(cx);
9510 ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
9511 if (!newStub)
9512 return false;
9513 stub->addNewStub(newStub);
9514 }
9515
9516 return true;
9517 }
9518
9519 typedef bool (*DoIteratorNextFallbackFn)(JSContext *, BaselineFrame *, ICIteratorNext_Fallback *,
9520 HandleValue, MutableHandleValue);
9521 static const VMFunction DoIteratorNextFallbackInfo =
9522 FunctionInfo<DoIteratorNextFallbackFn>(DoIteratorNextFallback);
9523
9524 bool
9525 ICIteratorNext_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
9526 {
9527 EmitRestoreTailCallReg(masm);
9528
9529 masm.pushValue(R0);
9530 masm.push(BaselineStubReg);
9531 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
9532
9533 return tailCallVM(DoIteratorNextFallbackInfo, masm);
9534 }
9535
9536 //
9537 // IteratorNext_Native
9538 //
9539
9540 bool
9541 ICIteratorNext_Native::Compiler::generateStubCode(MacroAssembler &masm)
9542 {
9543 Label failure;
9544
9545 Register obj = masm.extractObject(R0, ExtractTemp0);
9546
9547 GeneralRegisterSet regs(availableGeneralRegs(1));
9548 Register nativeIterator = regs.takeAny();
9549 Register scratch = regs.takeAny();
9550
9551 masm.branchTestObjClass(Assembler::NotEqual, obj, scratch,
9552 &PropertyIteratorObject::class_, &failure);
9553 masm.loadObjPrivate(obj, JSObject::ITER_CLASS_NFIXED_SLOTS, nativeIterator);
9554
9555 masm.branchTest32(Assembler::NonZero, Address(nativeIterator, offsetof(NativeIterator, flags)),
9556 Imm32(JSITER_FOREACH), &failure);
9557
9558 // Get cursor, next string.
9559 masm.loadPtr(Address(nativeIterator, offsetof(NativeIterator, props_cursor)), scratch);
9560 masm.loadPtr(Address(scratch, 0), scratch);
9561
9562 // Increase the cursor.
9563 masm.addPtr(Imm32(sizeof(JSString *)),
9564 Address(nativeIterator, offsetof(NativeIterator, props_cursor)));
9565
9566 masm.tagValue(JSVAL_TYPE_STRING, scratch, R0);
9567 EmitReturnFromIC(masm);
9568
9569 // Failure case - jump to next stub
9570 masm.bind(&failure);
9571 EmitStubGuardFailure(masm);
9572 return true;
9573 }
9574
9575 //
9576 // IteratorClose_Fallback
9577 //
9578
9579 static bool
9580 DoIteratorCloseFallback(JSContext *cx, ICIteratorClose_Fallback *stub, HandleValue iterValue)
9581 {
9582 FallbackICSpew(cx, stub, "IteratorClose");
9583
9584 RootedObject iteratorObject(cx, &iterValue.toObject());
9585 return CloseIterator(cx, iteratorObject);
9586 }
9587
9588 typedef bool (*DoIteratorCloseFallbackFn)(JSContext *, ICIteratorClose_Fallback *, HandleValue);
9589 static const VMFunction DoIteratorCloseFallbackInfo =
9590 FunctionInfo<DoIteratorCloseFallbackFn>(DoIteratorCloseFallback);
9591
9592 bool
9593 ICIteratorClose_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
9594 {
9595 EmitRestoreTailCallReg(masm);
9596
9597 masm.pushValue(R0);
9598 masm.push(BaselineStubReg);
9599
9600 return tailCallVM(DoIteratorCloseFallbackInfo, masm);
9601 }
9602
9603 //
9604 // InstanceOf_Fallback
9605 //
9606
9607 static bool
9608 DoInstanceOfFallback(JSContext *cx, ICInstanceOf_Fallback *stub,
9609 HandleValue lhs, HandleValue rhs,
9610 MutableHandleValue res)
9611 {
9612 FallbackICSpew(cx, stub, "InstanceOf");
9613
9614 if (!rhs.isObject()) {
9615 js_ReportValueError(cx, JSMSG_BAD_INSTANCEOF_RHS, -1, rhs, NullPtr());
9616 return false;
9617 }
9618
9619 RootedObject obj(cx, &rhs.toObject());
9620
9621 // For functions, keep track of the |prototype| property in type information,
9622 // for use during Ion compilation.
9623 if (obj->is<JSFunction>() && IsIonEnabled(cx))
9624 types::EnsureTrackPropertyTypes(cx, obj, NameToId(cx->names().prototype));
9625
9626 bool cond = false;
9627 if (!HasInstance(cx, obj, lhs, &cond))
9628 return false;
9629
9630 res.setBoolean(cond);
9631 return true;
9632 }
9633
9634 typedef bool (*DoInstanceOfFallbackFn)(JSContext *, ICInstanceOf_Fallback *, HandleValue, HandleValue,
9635 MutableHandleValue);
9636 static const VMFunction DoInstanceOfFallbackInfo =
9637 FunctionInfo<DoInstanceOfFallbackFn>(DoInstanceOfFallback, PopValues(2));
9638
9639 bool
9640 ICInstanceOf_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
9641 {
9642 EmitRestoreTailCallReg(masm);
9643
9644 // Sync stack for the decompiler.
9645 masm.pushValue(R0);
9646 masm.pushValue(R1);
9647
9648 masm.pushValue(R1);
9649 masm.pushValue(R0);
9650 masm.push(BaselineStubReg);
9651
9652 return tailCallVM(DoInstanceOfFallbackInfo, masm);
9653 }
9654
9655 //
9656 // TypeOf_Fallback
9657 //
9658
9659 static bool
9660 DoTypeOfFallback(JSContext *cx, BaselineFrame *frame, ICTypeOf_Fallback *stub, HandleValue val,
9661 MutableHandleValue res)
9662 {
9663 FallbackICSpew(cx, stub, "TypeOf");
9664 JSType type = js::TypeOfValue(val);
9665 RootedString string(cx, TypeName(type, cx->names()));
9666
9667 res.setString(string);
9668
9669 JS_ASSERT(type != JSTYPE_NULL);
9670 if (type != JSTYPE_OBJECT && type != JSTYPE_FUNCTION) {
9671 // Create a new TypeOf stub.
9672 IonSpew(IonSpew_BaselineIC, " Generating TypeOf stub for JSType (%d)", (int) type);
9673 ICTypeOf_Typed::Compiler compiler(cx, type, string);
9674 ICStub *typeOfStub = compiler.getStub(compiler.getStubSpace(frame->script()));
9675 if (!typeOfStub)
9676 return false;
9677 stub->addNewStub(typeOfStub);
9678 }
9679
9680 return true;
9681 }
9682
9683 typedef bool (*DoTypeOfFallbackFn)(JSContext *, BaselineFrame *frame, ICTypeOf_Fallback *,
9684 HandleValue, MutableHandleValue);
9685 static const VMFunction DoTypeOfFallbackInfo =
9686 FunctionInfo<DoTypeOfFallbackFn>(DoTypeOfFallback);
9687
9688 bool
9689 ICTypeOf_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
9690 {
9691 EmitRestoreTailCallReg(masm);
9692
9693 masm.pushValue(R0);
9694 masm.push(BaselineStubReg);
9695 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
9696
9697 return tailCallVM(DoTypeOfFallbackInfo, masm);
9698 }
9699
9700 bool
9701 ICTypeOf_Typed::Compiler::generateStubCode(MacroAssembler &masm)
9702 {
9703 JS_ASSERT(type_ != JSTYPE_NULL);
9704 JS_ASSERT(type_ != JSTYPE_FUNCTION);
9705 JS_ASSERT(type_ != JSTYPE_OBJECT);
9706
9707 Label failure;
9708 switch(type_) {
9709 case JSTYPE_VOID:
9710 masm.branchTestUndefined(Assembler::NotEqual, R0, &failure);
9711 break;
9712
9713 case JSTYPE_STRING:
9714 masm.branchTestString(Assembler::NotEqual, R0, &failure);
9715 break;
9716
9717 case JSTYPE_NUMBER:
9718 masm.branchTestNumber(Assembler::NotEqual, R0, &failure);
9719 break;
9720
9721 case JSTYPE_BOOLEAN:
9722 masm.branchTestBoolean(Assembler::NotEqual, R0, &failure);
9723 break;
9724
9725 default:
9726 MOZ_ASSUME_UNREACHABLE("Unexpected type");
9727 }
9728
9729 masm.movePtr(ImmGCPtr(typeString_), R0.scratchReg());
9730 masm.tagValue(JSVAL_TYPE_STRING, R0.scratchReg(), R0);
9731 EmitReturnFromIC(masm);
9732
9733 masm.bind(&failure);
9734 EmitStubGuardFailure(masm);
9735 return true;
9736 }
9737
9738 static bool
9739 DoRetSubFallback(JSContext *cx, BaselineFrame *frame, ICRetSub_Fallback *stub,
9740 HandleValue val, uint8_t **resumeAddr)
9741 {
9742 FallbackICSpew(cx, stub, "RetSub");
9743
9744 // |val| is the bytecode offset where we should resume.
9745
9746 JS_ASSERT(val.isInt32());
9747 JS_ASSERT(val.toInt32() >= 0);
9748
9749 JSScript *script = frame->script();
9750 uint32_t offset = uint32_t(val.toInt32());
9751
9752 *resumeAddr = script->baselineScript()->nativeCodeForPC(script, script->offsetToPC(offset));
9753
9754 if (stub->numOptimizedStubs() >= ICRetSub_Fallback::MAX_OPTIMIZED_STUBS)
9755 return true;
9756
9757 // Attach an optimized stub for this pc offset.
9758 IonSpew(IonSpew_BaselineIC, " Generating RetSub stub for pc offset %u", offset);
9759 ICRetSub_Resume::Compiler compiler(cx, offset, *resumeAddr);
9760 ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
9761 if (!optStub)
9762 return false;
9763
9764 stub->addNewStub(optStub);
9765 return true;
9766 }
9767
9768 typedef bool(*DoRetSubFallbackFn)(JSContext *cx, BaselineFrame *, ICRetSub_Fallback *,
9769 HandleValue, uint8_t **);
9770 static const VMFunction DoRetSubFallbackInfo = FunctionInfo<DoRetSubFallbackFn>(DoRetSubFallback);
9771
9772 typedef bool (*ThrowFn)(JSContext *, HandleValue);
9773 static const VMFunction ThrowInfoBaseline = FunctionInfo<ThrowFn>(js::Throw);
9774
9775 bool
9776 ICRetSub_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
9777 {
9778 // If R0 is BooleanValue(true), rethrow R1.
9779 Label rethrow;
9780 masm.branchTestBooleanTruthy(true, R0, &rethrow);
9781 {
9782 // Call a stub to get the native code address for the pc offset in R1.
9783 GeneralRegisterSet regs(availableGeneralRegs(0));
9784 regs.take(R1);
9785 regs.takeUnchecked(BaselineTailCallReg);
9786
9787 Register frame = regs.takeAny();
9788 masm.movePtr(BaselineFrameReg, frame);
9789
9790 enterStubFrame(masm, regs.getAny());
9791
9792 masm.pushValue(R1);
9793 masm.push(BaselineStubReg);
9794 masm.pushBaselineFramePtr(frame, frame);
9795
9796 if (!callVM(DoRetSubFallbackInfo, masm))
9797 return false;
9798
9799 leaveStubFrame(masm);
9800
9801 EmitChangeICReturnAddress(masm, ReturnReg);
9802 EmitReturnFromIC(masm);
9803 }
9804
9805 masm.bind(&rethrow);
9806 EmitRestoreTailCallReg(masm);
9807 masm.pushValue(R1);
9808 return tailCallVM(ThrowInfoBaseline, masm);
9809 }
9810
9811 bool
9812 ICRetSub_Resume::Compiler::generateStubCode(MacroAssembler &masm)
9813 {
9814 // If R0 is BooleanValue(true), rethrow R1.
9815 Label fail, rethrow;
9816 masm.branchTestBooleanTruthy(true, R0, &rethrow);
9817
9818 // R1 is the pc offset. Ensure it matches this stub's offset.
9819 Register offset = masm.extractInt32(R1, ExtractTemp0);
9820 masm.branch32(Assembler::NotEqual,
9821 Address(BaselineStubReg, ICRetSub_Resume::offsetOfPCOffset()),
9822 offset,
9823 &fail);
9824
9825 // pc offset matches, resume at the target pc.
9826 masm.loadPtr(Address(BaselineStubReg, ICRetSub_Resume::offsetOfAddr()), R0.scratchReg());
9827 EmitChangeICReturnAddress(masm, R0.scratchReg());
9828 EmitReturnFromIC(masm);
9829
9830 // Rethrow the Value stored in R1.
9831 masm.bind(&rethrow);
9832 EmitRestoreTailCallReg(masm);
9833 masm.pushValue(R1);
9834 if (!tailCallVM(ThrowInfoBaseline, masm))
9835 return false;
9836
9837 masm.bind(&fail);
9838 EmitStubGuardFailure(masm);
9839 return true;
9840 }
9841
9842 ICProfiler_PushFunction::ICProfiler_PushFunction(JitCode *stubCode, const char *str,
9843 HandleScript script)
9844 : ICStub(ICStub::Profiler_PushFunction, stubCode),
9845 str_(str),
9846 script_(script)
9847 { }
9848
9849 ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode *stubCode, HandleObject obj)
9850 : ICStub(TypeMonitor_SingleObject, stubCode),
9851 obj_(obj)
9852 { }
9853
9854 ICTypeMonitor_TypeObject::ICTypeMonitor_TypeObject(JitCode *stubCode, HandleTypeObject type)
9855 : ICStub(TypeMonitor_TypeObject, stubCode),
9856 type_(type)
9857 { }
9858
9859 ICTypeUpdate_SingleObject::ICTypeUpdate_SingleObject(JitCode *stubCode, HandleObject obj)
9860 : ICStub(TypeUpdate_SingleObject, stubCode),
9861 obj_(obj)
9862 { }
9863
9864 ICTypeUpdate_TypeObject::ICTypeUpdate_TypeObject(JitCode *stubCode, HandleTypeObject type)
9865 : ICStub(TypeUpdate_TypeObject, stubCode),
9866 type_(type)
9867 { }
9868
9869 ICGetElemNativeStub::ICGetElemNativeStub(ICStub::Kind kind, JitCode *stubCode,
9870 ICStub *firstMonitorStub,
9871 HandleShape shape, HandlePropertyName name,
9872 AccessType acctype, bool needsAtomize)
9873 : ICMonitoredStub(kind, stubCode, firstMonitorStub),
9874 shape_(shape),
9875 name_(name)
9876 {
9877 extra_ = (static_cast<uint16_t>(acctype) << ACCESSTYPE_SHIFT) |
9878 (static_cast<uint16_t>(needsAtomize) << NEEDS_ATOMIZE_SHIFT);
9879 }
9880
9881 ICGetElemNativeStub::~ICGetElemNativeStub()
9882 { }
9883
9884 ICGetElemNativeGetterStub::ICGetElemNativeGetterStub(
9885 ICStub::Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
9886 HandleShape shape, HandlePropertyName name, AccessType acctype,
9887 bool needsAtomize, HandleFunction getter, uint32_t pcOffset)
9888 : ICGetElemNativeStub(kind, stubCode, firstMonitorStub, shape, name, acctype, needsAtomize),
9889 getter_(getter),
9890 pcOffset_(pcOffset)
9891 {
9892 JS_ASSERT(kind == GetElem_NativePrototypeCallNative ||
9893 kind == GetElem_NativePrototypeCallScripted);
9894 JS_ASSERT(acctype == NativeGetter || acctype == ScriptedGetter);
9895 }
9896
9897 ICGetElem_NativePrototypeSlot::ICGetElem_NativePrototypeSlot(
9898 JitCode *stubCode, ICStub *firstMonitorStub,
9899 HandleShape shape, HandlePropertyName name,
9900 AccessType acctype, bool needsAtomize, uint32_t offset,
9901 HandleObject holder, HandleShape holderShape)
9902 : ICGetElemNativeSlotStub(ICStub::GetElem_NativePrototypeSlot, stubCode, firstMonitorStub, shape,
9903 name, acctype, needsAtomize, offset),
9904 holder_(holder),
9905 holderShape_(holderShape)
9906 { }
9907
9908 ICGetElemNativePrototypeCallStub::ICGetElemNativePrototypeCallStub(
9909 ICStub::Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
9910 HandleShape shape, HandlePropertyName name,
9911 AccessType acctype, bool needsAtomize, HandleFunction getter,
9912 uint32_t pcOffset, HandleObject holder, HandleShape holderShape)
9913 : ICGetElemNativeGetterStub(kind, stubCode, firstMonitorStub, shape, name, acctype, needsAtomize,
9914 getter, pcOffset),
9915 holder_(holder),
9916 holderShape_(holderShape)
9917 {}
9918
9919 ICGetElem_Dense::ICGetElem_Dense(JitCode *stubCode, ICStub *firstMonitorStub, HandleShape shape)
9920 : ICMonitoredStub(GetElem_Dense, stubCode, firstMonitorStub),
9921 shape_(shape)
9922 { }
9923
9924 ICGetElem_TypedArray::ICGetElem_TypedArray(JitCode *stubCode, HandleShape shape, uint32_t type)
9925 : ICStub(GetElem_TypedArray, stubCode),
9926 shape_(shape)
9927 {
9928 extra_ = uint16_t(type);
9929 JS_ASSERT(extra_ == type);
9930 }
9931
9932 ICSetElem_Dense::ICSetElem_Dense(JitCode *stubCode, HandleShape shape, HandleTypeObject type)
9933 : ICUpdatedStub(SetElem_Dense, stubCode),
9934 shape_(shape),
9935 type_(type)
9936 { }
9937
9938 ICSetElem_DenseAdd::ICSetElem_DenseAdd(JitCode *stubCode, types::TypeObject *type,
9939 size_t protoChainDepth)
9940 : ICUpdatedStub(SetElem_DenseAdd, stubCode),
9941 type_(type)
9942 {
9943 JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH);
9944 extra_ = protoChainDepth;
9945 }
9946
9947 template <size_t ProtoChainDepth>
9948 ICUpdatedStub *
9949 ICSetElemDenseAddCompiler::getStubSpecific(ICStubSpace *space, const AutoShapeVector *shapes)
9950 {
9951 RootedTypeObject objType(cx, obj_->getType(cx));
9952 if (!objType)
9953 return nullptr;
9954 Rooted<JitCode *> stubCode(cx, getStubCode());
9955 return ICSetElem_DenseAddImpl<ProtoChainDepth>::New(space, stubCode, objType, shapes);
9956 }
9957
9958 ICSetElem_TypedArray::ICSetElem_TypedArray(JitCode *stubCode, HandleShape shape, uint32_t type,
9959 bool expectOutOfBounds)
9960 : ICStub(SetElem_TypedArray, stubCode),
9961 shape_(shape)
9962 {
9963 extra_ = uint8_t(type);
9964 JS_ASSERT(extra_ == type);
9965 extra_ |= (static_cast<uint16_t>(expectOutOfBounds) << 8);
9966 }
9967
9968 ICGetName_Global::ICGetName_Global(JitCode *stubCode, ICStub *firstMonitorStub, HandleShape shape,
9969 uint32_t slot)
9970 : ICMonitoredStub(GetName_Global, stubCode, firstMonitorStub),
9971 shape_(shape),
9972 slot_(slot)
9973 { }
9974
9975 template <size_t NumHops>
9976 ICGetName_Scope<NumHops>::ICGetName_Scope(JitCode *stubCode, ICStub *firstMonitorStub,
9977 AutoShapeVector *shapes, uint32_t offset)
9978 : ICMonitoredStub(GetStubKind(), stubCode, firstMonitorStub),
9979 offset_(offset)
9980 {
9981 JS_STATIC_ASSERT(NumHops <= MAX_HOPS);
9982 JS_ASSERT(shapes->length() == NumHops + 1);
9983 for (size_t i = 0; i < NumHops + 1; i++)
9984 shapes_[i].init((*shapes)[i]);
9985 }
9986
9987 ICGetIntrinsic_Constant::ICGetIntrinsic_Constant(JitCode *stubCode, HandleValue value)
9988 : ICStub(GetIntrinsic_Constant, stubCode),
9989 value_(value)
9990 { }
9991
9992 ICGetIntrinsic_Constant::~ICGetIntrinsic_Constant()
9993 { }
9994
9995 ICGetProp_Primitive::ICGetProp_Primitive(JitCode *stubCode, ICStub *firstMonitorStub,
9996 HandleShape protoShape, uint32_t offset)
9997 : ICMonitoredStub(GetProp_Primitive, stubCode, firstMonitorStub),
9998 protoShape_(protoShape),
9999 offset_(offset)
10000 { }
10001
10002 ICGetPropNativeStub::ICGetPropNativeStub(ICStub::Kind kind, JitCode *stubCode,
10003 ICStub *firstMonitorStub,
10004 HandleShape shape, uint32_t offset)
10005 : ICMonitoredStub(kind, stubCode, firstMonitorStub),
10006 shape_(shape),
10007 offset_(offset)
10008 { }
10009
10010 ICGetProp_NativePrototype::ICGetProp_NativePrototype(JitCode *stubCode, ICStub *firstMonitorStub,
10011 HandleShape shape, uint32_t offset,
10012 HandleObject holder, HandleShape holderShape)
10013 : ICGetPropNativeStub(GetProp_NativePrototype, stubCode, firstMonitorStub, shape, offset),
10014 holder_(holder),
10015 holderShape_(holderShape)
10016 { }
10017
10018 ICGetPropCallGetter::ICGetPropCallGetter(Kind kind, JitCode *stubCode, ICStub *firstMonitorStub,
10019 HandleObject holder, HandleShape holderShape, HandleFunction getter,
10020 uint32_t pcOffset)
10021 : ICMonitoredStub(kind, stubCode, firstMonitorStub),
10022 holder_(holder),
10023 holderShape_(holderShape),
10024 getter_(getter),
10025 pcOffset_(pcOffset)
10026 {
10027 JS_ASSERT(kind == ICStub::GetProp_CallScripted ||
10028 kind == ICStub::GetProp_CallNative ||
10029 kind == ICStub::GetProp_CallNativePrototype);
10030 }
10031
10032 ICGetPropCallPrototypeGetter::ICGetPropCallPrototypeGetter(Kind kind, JitCode *stubCode,
10033 ICStub *firstMonitorStub,
10034 HandleShape receiverShape, HandleObject holder,
10035 HandleShape holderShape,
10036 HandleFunction getter, uint32_t pcOffset)
10037 : ICGetPropCallGetter(kind, stubCode, firstMonitorStub, holder, holderShape, getter, pcOffset),
10038 receiverShape_(receiverShape)
10039 {
10040 JS_ASSERT(kind == ICStub::GetProp_CallScripted || kind == ICStub::GetProp_CallNativePrototype);
10041 }
10042
10043 ICSetProp_Native::ICSetProp_Native(JitCode *stubCode, HandleTypeObject type, HandleShape shape,
10044 uint32_t offset)
10045 : ICUpdatedStub(SetProp_Native, stubCode),
10046 type_(type),
10047 shape_(shape),
10048 offset_(offset)
10049 { }
10050
10051 ICUpdatedStub *
10052 ICSetProp_Native::Compiler::getStub(ICStubSpace *space)
10053 {
10054 RootedTypeObject type(cx, obj_->getType(cx));
10055 if (!type)
10056 return nullptr;
10057
10058 RootedShape shape(cx, obj_->lastProperty());
10059 ICUpdatedStub *stub = ICSetProp_Native::New(space, getStubCode(), type, shape, offset_);
10060 if (!stub || !stub->initUpdatingChain(cx, space))
10061 return nullptr;
10062 return stub;
10063 }
10064
10065 ICSetProp_NativeAdd::ICSetProp_NativeAdd(JitCode *stubCode, HandleTypeObject type,
10066 size_t protoChainDepth,
10067 HandleShape newShape,
10068 uint32_t offset)
10069 : ICUpdatedStub(SetProp_NativeAdd, stubCode),
10070 type_(type),
10071 newShape_(newShape),
10072 offset_(offset)
10073 {
10074 JS_ASSERT(protoChainDepth <= MAX_PROTO_CHAIN_DEPTH);
10075 extra_ = protoChainDepth;
10076 }
10077
10078 template <size_t ProtoChainDepth>
10079 ICSetProp_NativeAddImpl<ProtoChainDepth>::ICSetProp_NativeAddImpl(JitCode *stubCode,
10080 HandleTypeObject type,
10081 const AutoShapeVector *shapes,
10082 HandleShape newShape,
10083 uint32_t offset)
10084 : ICSetProp_NativeAdd(stubCode, type, ProtoChainDepth, newShape, offset)
10085 {
10086 JS_ASSERT(shapes->length() == NumShapes);
10087 for (size_t i = 0; i < NumShapes; i++)
10088 shapes_[i].init((*shapes)[i]);
10089 }
10090
10091 ICSetPropNativeAddCompiler::ICSetPropNativeAddCompiler(JSContext *cx, HandleObject obj,
10092 HandleShape oldShape,
10093 size_t protoChainDepth,
10094 bool isFixedSlot,
10095 uint32_t offset)
10096 : ICStubCompiler(cx, ICStub::SetProp_NativeAdd),
10097 obj_(cx, obj),
10098 oldShape_(cx, oldShape),
10099 protoChainDepth_(protoChainDepth),
10100 isFixedSlot_(isFixedSlot),
10101 offset_(offset)
10102 {
10103 JS_ASSERT(protoChainDepth_ <= ICSetProp_NativeAdd::MAX_PROTO_CHAIN_DEPTH);
10104 }
10105
10106 ICSetPropCallSetter::ICSetPropCallSetter(Kind kind, JitCode *stubCode, HandleShape shape,
10107 HandleObject holder, HandleShape holderShape,
10108 HandleFunction setter, uint32_t pcOffset)
10109 : ICStub(kind, stubCode),
10110 shape_(shape),
10111 holder_(holder),
10112 holderShape_(holderShape),
10113 setter_(setter),
10114 pcOffset_(pcOffset)
10115 {
10116 JS_ASSERT(kind == ICStub::SetProp_CallScripted || kind == ICStub::SetProp_CallNative);
10117 }
10118
10119 ICCall_Scripted::ICCall_Scripted(JitCode *stubCode, ICStub *firstMonitorStub,
10120 HandleScript calleeScript, HandleObject templateObject,
10121 uint32_t pcOffset)
10122 : ICMonitoredStub(ICStub::Call_Scripted, stubCode, firstMonitorStub),
10123 calleeScript_(calleeScript),
10124 templateObject_(templateObject),
10125 pcOffset_(pcOffset)
10126 { }
10127
10128 ICCall_Native::ICCall_Native(JitCode *stubCode, ICStub *firstMonitorStub,
10129 HandleFunction callee, HandleObject templateObject,
10130 uint32_t pcOffset)
10131 : ICMonitoredStub(ICStub::Call_Native, stubCode, firstMonitorStub),
10132 callee_(callee),
10133 templateObject_(templateObject),
10134 pcOffset_(pcOffset)
10135 {
10136 #ifdef JS_ARM_SIMULATOR
10137 // The simulator requires VM calls to be redirected to a special swi
10138 // instruction to handle them. To make this work, we store the redirected
10139 // pointer in the stub.
10140 native_ = Simulator::RedirectNativeFunction(JS_FUNC_TO_DATA_PTR(void *, callee->native()),
10141 Args_General3);
10142 #endif
10143 }
10144
10145 ICGetPropCallDOMProxyNativeStub::ICGetPropCallDOMProxyNativeStub(Kind kind, JitCode *stubCode,
10146 ICStub *firstMonitorStub,
10147 HandleShape shape,
10148 BaseProxyHandler *proxyHandler,
10149 HandleShape expandoShape,
10150 HandleObject holder,
10151 HandleShape holderShape,
10152 HandleFunction getter,
10153 uint32_t pcOffset)
10154 : ICMonitoredStub(kind, stubCode, firstMonitorStub),
10155 shape_(shape),
10156 proxyHandler_(proxyHandler),
10157 expandoShape_(expandoShape),
10158 holder_(holder),
10159 holderShape_(holderShape),
10160 getter_(getter),
10161 pcOffset_(pcOffset)
10162 { }
10163
10164 ICGetPropCallDOMProxyNativeCompiler::ICGetPropCallDOMProxyNativeCompiler(JSContext *cx,
10165 ICStub::Kind kind,
10166 ICStub *firstMonitorStub,
10167 Handle<ProxyObject*> proxy,
10168 HandleObject holder,
10169 HandleFunction getter,
10170 uint32_t pcOffset)
10171 : ICStubCompiler(cx, kind),
10172 firstMonitorStub_(firstMonitorStub),
10173 proxy_(cx, proxy),
10174 holder_(cx, holder),
10175 getter_(cx, getter),
10176 pcOffset_(pcOffset)
10177 {
10178 JS_ASSERT(kind == ICStub::GetProp_CallDOMProxyNative ||
10179 kind == ICStub::GetProp_CallDOMProxyWithGenerationNative);
10180 JS_ASSERT(proxy_->handler()->family() == GetDOMProxyHandlerFamily());
10181 }
10182
10183 ICGetProp_DOMProxyShadowed::ICGetProp_DOMProxyShadowed(JitCode *stubCode,
10184 ICStub *firstMonitorStub,
10185 HandleShape shape,
10186 BaseProxyHandler *proxyHandler,
10187 HandlePropertyName name,
10188 uint32_t pcOffset)
10189 : ICMonitoredStub(ICStub::GetProp_DOMProxyShadowed, stubCode, firstMonitorStub),
10190 shape_(shape),
10191 proxyHandler_(proxyHandler),
10192 name_(name),
10193 pcOffset_(pcOffset)
10194 { }
10195
10196 //
10197 // Rest_Fallback
10198 //
10199
10200 static bool DoRestFallback(JSContext *cx, ICRest_Fallback *stub,
10201 BaselineFrame *frame, MutableHandleValue res)
10202 {
10203 unsigned numFormals = frame->numFormalArgs() - 1;
10204 unsigned numActuals = frame->numActualArgs();
10205 unsigned numRest = numActuals > numFormals ? numActuals - numFormals : 0;
10206 Value *rest = frame->argv() + numFormals;
10207
10208 JSObject *obj = NewDenseCopiedArray(cx, numRest, rest, nullptr);
10209 if (!obj)
10210 return false;
10211 types::FixRestArgumentsType(cx, obj);
10212 res.setObject(*obj);
10213 return true;
10214 }
10215
10216 typedef bool (*DoRestFallbackFn)(JSContext *, ICRest_Fallback *, BaselineFrame *,
10217 MutableHandleValue);
10218 static const VMFunction DoRestFallbackInfo =
10219 FunctionInfo<DoRestFallbackFn>(DoRestFallback);
10220
10221 bool
10222 ICRest_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
10223 {
10224 EmitRestoreTailCallReg(masm);
10225
10226 masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
10227 masm.push(BaselineStubReg);
10228
10229 return tailCallVM(DoRestFallbackInfo, masm);
10230 }
10231
10232 } // namespace jit
10233 } // namespace js

mercurial