|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
|
2 * vim: set ts=8 sts=4 et sw=4 tw=99: |
|
3 * This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 #include "jit/IonCaches.h" |
|
8 |
|
9 #include "mozilla/TemplateLib.h" |
|
10 |
|
11 #include "jsproxy.h" |
|
12 #include "jstypes.h" |
|
13 |
|
14 #include "builtin/TypedObject.h" |
|
15 #include "jit/Ion.h" |
|
16 #include "jit/IonLinker.h" |
|
17 #include "jit/IonSpewer.h" |
|
18 #include "jit/Lowering.h" |
|
19 #ifdef JS_ION_PERF |
|
20 # include "jit/PerfSpewer.h" |
|
21 #endif |
|
22 #include "jit/ParallelFunctions.h" |
|
23 #include "jit/VMFunctions.h" |
|
24 #include "vm/Shape.h" |
|
25 |
|
26 #include "jit/IonFrames-inl.h" |
|
27 #include "vm/Interpreter-inl.h" |
|
28 #include "vm/Shape-inl.h" |
|
29 |
|
30 using namespace js; |
|
31 using namespace js::jit; |
|
32 |
|
33 using mozilla::tl::FloorLog2; |
|
34 |
|
35 void |
|
36 CodeLocationJump::repoint(JitCode *code, MacroAssembler *masm) |
|
37 { |
|
38 JS_ASSERT(state_ == Relative); |
|
39 size_t new_off = (size_t)raw_; |
|
40 #ifdef JS_SMALL_BRANCH |
|
41 size_t jumpTableEntryOffset = reinterpret_cast<size_t>(jumpTableEntry_); |
|
42 #endif |
|
43 if (masm != nullptr) { |
|
44 #ifdef JS_CODEGEN_X64 |
|
45 JS_ASSERT((uint64_t)raw_ <= UINT32_MAX); |
|
46 #endif |
|
47 new_off = masm->actualOffset((uintptr_t)raw_); |
|
48 #ifdef JS_SMALL_BRANCH |
|
49 jumpTableEntryOffset = masm->actualIndex(jumpTableEntryOffset); |
|
50 #endif |
|
51 } |
|
52 raw_ = code->raw() + new_off; |
|
53 #ifdef JS_SMALL_BRANCH |
|
54 jumpTableEntry_ = Assembler::PatchableJumpAddress(code, (size_t) jumpTableEntryOffset); |
|
55 #endif |
|
56 setAbsolute(); |
|
57 } |
|
58 |
|
59 void |
|
60 CodeLocationLabel::repoint(JitCode *code, MacroAssembler *masm) |
|
61 { |
|
62 JS_ASSERT(state_ == Relative); |
|
63 size_t new_off = (size_t)raw_; |
|
64 if (masm != nullptr) { |
|
65 #ifdef JS_CODEGEN_X64 |
|
66 JS_ASSERT((uint64_t)raw_ <= UINT32_MAX); |
|
67 #endif |
|
68 new_off = masm->actualOffset((uintptr_t)raw_); |
|
69 } |
|
70 JS_ASSERT(new_off < code->instructionsSize()); |
|
71 |
|
72 raw_ = code->raw() + new_off; |
|
73 setAbsolute(); |
|
74 } |
|
75 |
|
76 void |
|
77 CodeOffsetLabel::fixup(MacroAssembler *masm) |
|
78 { |
|
79 offset_ = masm->actualOffset(offset_); |
|
80 } |
|
81 |
|
82 void |
|
83 CodeOffsetJump::fixup(MacroAssembler *masm) |
|
84 { |
|
85 offset_ = masm->actualOffset(offset_); |
|
86 #ifdef JS_SMALL_BRANCH |
|
87 jumpTableIndex_ = masm->actualIndex(jumpTableIndex_); |
|
88 #endif |
|
89 } |
|
90 |
|
91 const char * |
|
92 IonCache::CacheName(IonCache::Kind kind) |
|
93 { |
|
94 static const char * const names[] = |
|
95 { |
|
96 #define NAME(x) #x, |
|
97 IONCACHE_KIND_LIST(NAME) |
|
98 #undef NAME |
|
99 }; |
|
100 return names[kind]; |
|
101 } |
|
102 |
|
103 IonCache::LinkStatus |
|
104 IonCache::linkCode(JSContext *cx, MacroAssembler &masm, IonScript *ion, JitCode **code) |
|
105 { |
|
106 Linker linker(masm); |
|
107 *code = linker.newCode<CanGC>(cx, JSC::ION_CODE); |
|
108 if (!*code) |
|
109 return LINK_ERROR; |
|
110 |
|
111 if (ion->invalidated()) |
|
112 return CACHE_FLUSHED; |
|
113 |
|
114 return LINK_GOOD; |
|
115 } |
|
116 |
|
117 const size_t IonCache::MAX_STUBS = 16; |
|
118 |
|
119 // Helper class which encapsulates logic to attach a stub to an IC by hooking |
|
120 // up rejoins and next stub jumps. |
|
121 // |
|
122 // The simplest stubs have a single jump to the next stub and look like the |
|
123 // following: |
|
124 // |
|
125 // branch guard NEXTSTUB |
|
126 // ... IC-specific code ... |
|
127 // jump REJOIN |
|
128 // |
|
129 // This corresponds to: |
|
130 // |
|
131 // attacher.branchNextStub(masm, ...); |
|
132 // ... emit IC-specific code ... |
|
133 // attacher.jumpRejoin(masm); |
|
134 // |
|
135 // Whether the stub needs multiple next stub jumps look like: |
|
136 // |
|
137 // branch guard FAILURES |
|
138 // ... IC-specific code ... |
|
139 // branch another-guard FAILURES |
|
140 // ... IC-specific code ... |
|
141 // jump REJOIN |
|
142 // FAILURES: |
|
143 // jump NEXTSTUB |
|
144 // |
|
145 // This corresponds to: |
|
146 // |
|
147 // Label failures; |
|
148 // masm.branchX(..., &failures); |
|
149 // ... emit IC-specific code ... |
|
150 // masm.branchY(..., failures); |
|
151 // ... emit more IC-specific code ... |
|
152 // attacher.jumpRejoin(masm); |
|
153 // masm.bind(&failures); |
|
154 // attacher.jumpNextStub(masm); |
|
155 // |
|
156 // A convenience function |branchNextStubOrLabel| is provided in the case that |
|
157 // the stub sometimes has multiple next stub jumps and sometimes a single |
|
158 // one. If a non-nullptr label is passed in, a |branchPtr| will be made to |
|
159 // that label instead of a |branchPtrWithPatch| to the next stub. |
|
160 class IonCache::StubAttacher |
|
161 { |
|
162 protected: |
|
163 bool hasNextStubOffset_ : 1; |
|
164 bool hasStubCodePatchOffset_ : 1; |
|
165 |
|
166 CodeLocationLabel rejoinLabel_; |
|
167 CodeOffsetJump nextStubOffset_; |
|
168 CodeOffsetJump rejoinOffset_; |
|
169 CodeOffsetLabel stubCodePatchOffset_; |
|
170 |
|
171 public: |
|
172 StubAttacher(CodeLocationLabel rejoinLabel) |
|
173 : hasNextStubOffset_(false), |
|
174 hasStubCodePatchOffset_(false), |
|
175 rejoinLabel_(rejoinLabel), |
|
176 nextStubOffset_(), |
|
177 rejoinOffset_(), |
|
178 stubCodePatchOffset_() |
|
179 { } |
|
180 |
|
181 // Value used instead of the JitCode self-reference of generated |
|
182 // stubs. This value is needed for marking calls made inside stubs. This |
|
183 // value would be replaced by the attachStub function after the allocation |
|
184 // of the JitCode. The self-reference is used to keep the stub path alive |
|
185 // even if the IonScript is invalidated or if the IC is flushed. |
|
186 static const ImmPtr STUB_ADDR; |
|
187 |
|
188 template <class T1, class T2> |
|
189 void branchNextStub(MacroAssembler &masm, Assembler::Condition cond, T1 op1, T2 op2) { |
|
190 JS_ASSERT(!hasNextStubOffset_); |
|
191 RepatchLabel nextStub; |
|
192 nextStubOffset_ = masm.branchPtrWithPatch(cond, op1, op2, &nextStub); |
|
193 hasNextStubOffset_ = true; |
|
194 masm.bind(&nextStub); |
|
195 } |
|
196 |
|
197 template <class T1, class T2> |
|
198 void branchNextStubOrLabel(MacroAssembler &masm, Assembler::Condition cond, T1 op1, T2 op2, |
|
199 Label *label) |
|
200 { |
|
201 if (label != nullptr) |
|
202 masm.branchPtr(cond, op1, op2, label); |
|
203 else |
|
204 branchNextStub(masm, cond, op1, op2); |
|
205 } |
|
206 |
|
207 void jumpRejoin(MacroAssembler &masm) { |
|
208 RepatchLabel rejoin; |
|
209 rejoinOffset_ = masm.jumpWithPatch(&rejoin); |
|
210 masm.bind(&rejoin); |
|
211 } |
|
212 |
|
213 void jumpNextStub(MacroAssembler &masm) { |
|
214 JS_ASSERT(!hasNextStubOffset_); |
|
215 RepatchLabel nextStub; |
|
216 nextStubOffset_ = masm.jumpWithPatch(&nextStub); |
|
217 hasNextStubOffset_ = true; |
|
218 masm.bind(&nextStub); |
|
219 } |
|
220 |
|
221 void pushStubCodePointer(MacroAssembler &masm) { |
|
222 // Push the JitCode pointer for the stub we're generating. |
|
223 // WARNING: |
|
224 // WARNING: If JitCode ever becomes relocatable, the following code is incorrect. |
|
225 // WARNING: Note that we're not marking the pointer being pushed as an ImmGCPtr. |
|
226 // WARNING: This location will be patched with the pointer of the generated stub, |
|
227 // WARNING: such as it can be marked when a call is made with this stub. Be aware |
|
228 // WARNING: that ICs are not marked and so this stub will only be kept alive iff |
|
229 // WARNING: it is on the stack at the time of the GC. No ImmGCPtr is needed as the |
|
230 // WARNING: stubs are flushed on GC. |
|
231 // WARNING: |
|
232 JS_ASSERT(!hasStubCodePatchOffset_); |
|
233 stubCodePatchOffset_ = masm.PushWithPatch(STUB_ADDR); |
|
234 hasStubCodePatchOffset_ = true; |
|
235 } |
|
236 |
|
237 void patchRejoinJump(MacroAssembler &masm, JitCode *code) { |
|
238 rejoinOffset_.fixup(&masm); |
|
239 CodeLocationJump rejoinJump(code, rejoinOffset_); |
|
240 PatchJump(rejoinJump, rejoinLabel_); |
|
241 } |
|
242 |
|
243 void patchStubCodePointer(MacroAssembler &masm, JitCode *code) { |
|
244 if (hasStubCodePatchOffset_) { |
|
245 stubCodePatchOffset_.fixup(&masm); |
|
246 Assembler::patchDataWithValueCheck(CodeLocationLabel(code, stubCodePatchOffset_), |
|
247 ImmPtr(code), STUB_ADDR); |
|
248 } |
|
249 } |
|
250 |
|
251 virtual void patchNextStubJump(MacroAssembler &masm, JitCode *code) = 0; |
|
252 }; |
|
253 |
|
254 const ImmPtr IonCache::StubAttacher::STUB_ADDR = ImmPtr((void*)0xdeadc0de); |
|
255 |
|
256 class RepatchIonCache::RepatchStubAppender : public IonCache::StubAttacher |
|
257 { |
|
258 RepatchIonCache &cache_; |
|
259 |
|
260 public: |
|
261 RepatchStubAppender(RepatchIonCache &cache) |
|
262 : StubAttacher(cache.rejoinLabel()), |
|
263 cache_(cache) |
|
264 { |
|
265 } |
|
266 |
|
267 void patchNextStubJump(MacroAssembler &masm, JitCode *code) { |
|
268 // Patch the previous nextStubJump of the last stub, or the jump from the |
|
269 // codeGen, to jump into the newly allocated code. |
|
270 PatchJump(cache_.lastJump_, CodeLocationLabel(code)); |
|
271 |
|
272 // If this path is not taken, we are producing an entry which can no |
|
273 // longer go back into the update function. |
|
274 if (hasNextStubOffset_) { |
|
275 nextStubOffset_.fixup(&masm); |
|
276 CodeLocationJump nextStubJump(code, nextStubOffset_); |
|
277 PatchJump(nextStubJump, cache_.fallbackLabel_); |
|
278 |
|
279 // When the last stub fails, it fallback to the ool call which can |
|
280 // produce a stub. Next time we generate a stub, we will patch the |
|
281 // nextStub jump to try the new stub. |
|
282 cache_.lastJump_ = nextStubJump; |
|
283 } |
|
284 } |
|
285 }; |
|
286 |
|
287 void |
|
288 RepatchIonCache::reset() |
|
289 { |
|
290 IonCache::reset(); |
|
291 PatchJump(initialJump_, fallbackLabel_); |
|
292 lastJump_ = initialJump_; |
|
293 } |
|
294 |
|
295 void |
|
296 RepatchIonCache::emitInitialJump(MacroAssembler &masm, AddCacheState &addState) |
|
297 { |
|
298 initialJump_ = masm.jumpWithPatch(&addState.repatchEntry); |
|
299 lastJump_ = initialJump_; |
|
300 } |
|
301 |
|
302 void |
|
303 RepatchIonCache::bindInitialJump(MacroAssembler &masm, AddCacheState &addState) |
|
304 { |
|
305 masm.bind(&addState.repatchEntry); |
|
306 } |
|
307 |
|
308 void |
|
309 RepatchIonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm) |
|
310 { |
|
311 IonCache::updateBaseAddress(code, masm); |
|
312 initialJump_.repoint(code, &masm); |
|
313 lastJump_.repoint(code, &masm); |
|
314 } |
|
315 |
|
316 class DispatchIonCache::DispatchStubPrepender : public IonCache::StubAttacher |
|
317 { |
|
318 DispatchIonCache &cache_; |
|
319 |
|
320 public: |
|
321 DispatchStubPrepender(DispatchIonCache &cache) |
|
322 : StubAttacher(cache.rejoinLabel_), |
|
323 cache_(cache) |
|
324 { |
|
325 } |
|
326 |
|
327 void patchNextStubJump(MacroAssembler &masm, JitCode *code) { |
|
328 JS_ASSERT(hasNextStubOffset_); |
|
329 |
|
330 // Jump to the previous entry in the stub dispatch table. We |
|
331 // have not yet executed the code we're patching the jump in. |
|
332 nextStubOffset_.fixup(&masm); |
|
333 CodeLocationJump nextStubJump(code, nextStubOffset_); |
|
334 PatchJump(nextStubJump, CodeLocationLabel(cache_.firstStub_)); |
|
335 |
|
336 // Update the dispatch table. Modification to jumps after the dispatch |
|
337 // table is updated is disallowed, lest we race on entry into an |
|
338 // unfinalized stub. |
|
339 cache_.firstStub_ = code->raw(); |
|
340 } |
|
341 }; |
|
342 |
|
343 void |
|
344 DispatchIonCache::reset() |
|
345 { |
|
346 IonCache::reset(); |
|
347 firstStub_ = fallbackLabel_.raw(); |
|
348 } |
|
349 void |
|
350 DispatchIonCache::emitInitialJump(MacroAssembler &masm, AddCacheState &addState) |
|
351 { |
|
352 Register scratch = addState.dispatchScratch; |
|
353 dispatchLabel_ = masm.movWithPatch(ImmPtr((void*)-1), scratch); |
|
354 masm.loadPtr(Address(scratch, 0), scratch); |
|
355 masm.jump(scratch); |
|
356 rejoinLabel_ = masm.labelForPatch(); |
|
357 } |
|
358 |
|
359 void |
|
360 DispatchIonCache::bindInitialJump(MacroAssembler &masm, AddCacheState &addState) |
|
361 { |
|
362 // Do nothing. |
|
363 } |
|
364 |
|
365 void |
|
366 DispatchIonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm) |
|
367 { |
|
368 // The address of firstStub_ should be pointer aligned. |
|
369 JS_ASSERT(uintptr_t(&firstStub_) % sizeof(uintptr_t) == 0); |
|
370 |
|
371 IonCache::updateBaseAddress(code, masm); |
|
372 dispatchLabel_.fixup(&masm); |
|
373 Assembler::patchDataWithValueCheck(CodeLocationLabel(code, dispatchLabel_), |
|
374 ImmPtr(&firstStub_), |
|
375 ImmPtr((void*)-1)); |
|
376 firstStub_ = fallbackLabel_.raw(); |
|
377 rejoinLabel_.repoint(code, &masm); |
|
378 } |
|
379 |
|
380 void |
|
381 IonCache::attachStub(MacroAssembler &masm, StubAttacher &attacher, Handle<JitCode *> code) |
|
382 { |
|
383 JS_ASSERT(canAttachStub()); |
|
384 incrementStubCount(); |
|
385 |
|
386 // Update the success path to continue after the IC initial jump. |
|
387 attacher.patchRejoinJump(masm, code); |
|
388 |
|
389 // Replace the STUB_ADDR constant by the address of the generated stub, such |
|
390 // as it can be kept alive even if the cache is flushed (see |
|
391 // MarkJitExitFrame). |
|
392 attacher.patchStubCodePointer(masm, code); |
|
393 |
|
394 // Update the failure path. Note it is this patch that makes the stub |
|
395 // accessible for parallel ICs so it should not be moved unless you really |
|
396 // know what is going on. |
|
397 attacher.patchNextStubJump(masm, code); |
|
398 } |
|
399 |
|
400 bool |
|
401 IonCache::linkAndAttachStub(JSContext *cx, MacroAssembler &masm, StubAttacher &attacher, |
|
402 IonScript *ion, const char *attachKind) |
|
403 { |
|
404 Rooted<JitCode *> code(cx); |
|
405 { |
|
406 // Need to exit the AutoFlushICache context to flush the cache |
|
407 // before attaching the stub below. |
|
408 AutoFlushICache afc("IonCache"); |
|
409 LinkStatus status = linkCode(cx, masm, ion, code.address()); |
|
410 if (status != LINK_GOOD) |
|
411 return status != LINK_ERROR; |
|
412 } |
|
413 |
|
414 if (pc_) { |
|
415 IonSpew(IonSpew_InlineCaches, "Cache %p(%s:%d/%d) generated %s %s stub at %p", |
|
416 this, script_->filename(), script_->lineno(), script_->pcToOffset(pc_), |
|
417 attachKind, CacheName(kind()), code->raw()); |
|
418 } else { |
|
419 IonSpew(IonSpew_InlineCaches, "Cache %p generated %s %s stub at %p", |
|
420 this, attachKind, CacheName(kind()), code->raw()); |
|
421 } |
|
422 |
|
423 #ifdef JS_ION_PERF |
|
424 writePerfSpewerJitCodeProfile(code, "IonCache"); |
|
425 #endif |
|
426 |
|
427 attachStub(masm, attacher, code); |
|
428 |
|
429 return true; |
|
430 } |
|
431 |
|
432 void |
|
433 IonCache::updateBaseAddress(JitCode *code, MacroAssembler &masm) |
|
434 { |
|
435 fallbackLabel_.repoint(code, &masm); |
|
436 } |
|
437 |
|
438 void |
|
439 IonCache::initializeAddCacheState(LInstruction *ins, AddCacheState *addState) |
|
440 { |
|
441 } |
|
442 |
|
443 static bool |
|
444 IsCacheableDOMProxy(JSObject *obj) |
|
445 { |
|
446 if (!obj->is<ProxyObject>()) |
|
447 return false; |
|
448 |
|
449 BaseProxyHandler *handler = obj->as<ProxyObject>().handler(); |
|
450 |
|
451 if (handler->family() != GetDOMProxyHandlerFamily()) |
|
452 return false; |
|
453 |
|
454 if (obj->numFixedSlots() <= GetDOMProxyExpandoSlot()) |
|
455 return false; |
|
456 |
|
457 return true; |
|
458 } |
|
459 |
|
460 static void |
|
461 GeneratePrototypeGuards(JSContext *cx, IonScript *ion, MacroAssembler &masm, JSObject *obj, |
|
462 JSObject *holder, Register objectReg, Register scratchReg, |
|
463 Label *failures) |
|
464 { |
|
465 /* The guards here protect against the effects of TradeGuts(). If the prototype chain |
|
466 * is directly altered, then TI will toss the jitcode, so we don't have to worry about |
|
467 * it, and any other change to the holder, or adding a shadowing property will result |
|
468 * in reshaping the holder, and thus the failure of the shape guard. |
|
469 */ |
|
470 JS_ASSERT(obj != holder); |
|
471 |
|
472 if (obj->hasUncacheableProto()) { |
|
473 // Note: objectReg and scratchReg may be the same register, so we cannot |
|
474 // use objectReg in the rest of this function. |
|
475 masm.loadPtr(Address(objectReg, JSObject::offsetOfType()), scratchReg); |
|
476 Address proto(scratchReg, types::TypeObject::offsetOfProto()); |
|
477 masm.branchNurseryPtr(Assembler::NotEqual, proto, |
|
478 ImmMaybeNurseryPtr(obj->getProto()), failures); |
|
479 } |
|
480 |
|
481 JSObject *pobj = IsCacheableDOMProxy(obj) |
|
482 ? obj->getTaggedProto().toObjectOrNull() |
|
483 : obj->getProto(); |
|
484 if (!pobj) |
|
485 return; |
|
486 while (pobj != holder) { |
|
487 if (pobj->hasUncacheableProto()) { |
|
488 JS_ASSERT(!pobj->hasSingletonType()); |
|
489 masm.moveNurseryPtr(ImmMaybeNurseryPtr(pobj), scratchReg); |
|
490 Address objType(scratchReg, JSObject::offsetOfType()); |
|
491 masm.branchPtr(Assembler::NotEqual, objType, ImmGCPtr(pobj->type()), failures); |
|
492 } |
|
493 pobj = pobj->getProto(); |
|
494 } |
|
495 } |
|
496 |
|
497 static bool |
|
498 IsCacheableProtoChain(JSObject *obj, JSObject *holder) |
|
499 { |
|
500 while (obj != holder) { |
|
501 /* |
|
502 * We cannot assume that we find the holder object on the prototype |
|
503 * chain and must check for null proto. The prototype chain can be |
|
504 * altered during the lookupProperty call. |
|
505 */ |
|
506 JSObject *proto = obj->getProto(); |
|
507 if (!proto || !proto->isNative()) |
|
508 return false; |
|
509 obj = proto; |
|
510 } |
|
511 return true; |
|
512 } |
|
513 |
|
514 static bool |
|
515 IsCacheableGetPropReadSlot(JSObject *obj, JSObject *holder, Shape *shape) |
|
516 { |
|
517 if (!shape || !IsCacheableProtoChain(obj, holder)) |
|
518 return false; |
|
519 |
|
520 if (!shape->hasSlot() || !shape->hasDefaultGetter()) |
|
521 return false; |
|
522 |
|
523 return true; |
|
524 } |
|
525 |
|
526 static bool |
|
527 IsCacheableNoProperty(JSObject *obj, JSObject *holder, Shape *shape, jsbytecode *pc, |
|
528 const TypedOrValueRegister &output) |
|
529 { |
|
530 if (shape) |
|
531 return false; |
|
532 |
|
533 JS_ASSERT(!holder); |
|
534 |
|
535 // Just because we didn't find the property on the object doesn't mean it |
|
536 // won't magically appear through various engine hacks: |
|
537 if (obj->getClass()->getProperty && obj->getClass()->getProperty != JS_PropertyStub) |
|
538 return false; |
|
539 |
|
540 // Don't generate missing property ICs if we skipped a non-native object, as |
|
541 // lookups may extend beyond the prototype chain (e.g. for DOMProxy |
|
542 // proxies). |
|
543 JSObject *obj2 = obj; |
|
544 while (obj2) { |
|
545 if (!obj2->isNative()) |
|
546 return false; |
|
547 obj2 = obj2->getProto(); |
|
548 } |
|
549 |
|
550 // The pc is nullptr if the cache is idempotent. We cannot share missing |
|
551 // properties between caches because TI can only try to prove that a type is |
|
552 // contained, but does not attempts to check if something does not exists. |
|
553 // So the infered type of getprop would be missing and would not contain |
|
554 // undefined, as expected for missing properties. |
|
555 if (!pc) |
|
556 return false; |
|
557 |
|
558 #if JS_HAS_NO_SUCH_METHOD |
|
559 // The __noSuchMethod__ hook may substitute in a valid method. Since, |
|
560 // if o.m is missing, o.m() will probably be an error, just mark all |
|
561 // missing callprops as uncacheable. |
|
562 if (JSOp(*pc) == JSOP_CALLPROP || |
|
563 JSOp(*pc) == JSOP_CALLELEM) |
|
564 { |
|
565 return false; |
|
566 } |
|
567 #endif |
|
568 |
|
569 // TI has not yet monitored an Undefined value. The fallback path will |
|
570 // monitor and invalidate the script. |
|
571 if (!output.hasValue()) |
|
572 return false; |
|
573 |
|
574 return true; |
|
575 } |
|
576 |
|
577 static bool |
|
578 IsOptimizableArgumentsObjectForLength(JSObject *obj) |
|
579 { |
|
580 if (!obj->is<ArgumentsObject>()) |
|
581 return false; |
|
582 |
|
583 if (obj->as<ArgumentsObject>().hasOverriddenLength()) |
|
584 return false; |
|
585 |
|
586 return true; |
|
587 } |
|
588 |
|
589 static bool |
|
590 IsOptimizableArgumentsObjectForGetElem(JSObject *obj, Value idval) |
|
591 { |
|
592 if (!IsOptimizableArgumentsObjectForLength(obj)) |
|
593 return false; |
|
594 |
|
595 ArgumentsObject &argsObj = obj->as<ArgumentsObject>(); |
|
596 |
|
597 if (argsObj.isAnyElementDeleted()) |
|
598 return false; |
|
599 |
|
600 if (!idval.isInt32()) |
|
601 return false; |
|
602 |
|
603 int32_t idint = idval.toInt32(); |
|
604 if (idint < 0 || static_cast<uint32_t>(idint) >= argsObj.initialLength()) |
|
605 return false; |
|
606 |
|
607 return true; |
|
608 } |
|
609 |
|
610 static bool |
|
611 IsCacheableGetPropCallNative(JSObject *obj, JSObject *holder, Shape *shape) |
|
612 { |
|
613 if (!shape || !IsCacheableProtoChain(obj, holder)) |
|
614 return false; |
|
615 |
|
616 if (!shape->hasGetterValue() || !shape->getterValue().isObject()) |
|
617 return false; |
|
618 |
|
619 if (!shape->getterValue().toObject().is<JSFunction>()) |
|
620 return false; |
|
621 |
|
622 JSFunction& getter = shape->getterValue().toObject().as<JSFunction>(); |
|
623 if (!getter.isNative()) |
|
624 return false; |
|
625 |
|
626 // Check for a getter that has jitinfo and whose jitinfo says it's |
|
627 // OK with both inner and outer objects. |
|
628 if (getter.jitInfo() && !getter.jitInfo()->needsOuterizedThisObject()) |
|
629 return true; |
|
630 |
|
631 // For getters that need an outerized this object, don't cache if |
|
632 // obj has an outerObject hook, since our cache will pass obj |
|
633 // itself without outerizing. |
|
634 return !obj->getClass()->ext.outerObject; |
|
635 } |
|
636 |
|
637 static bool |
|
638 IsCacheableGetPropCallPropertyOp(JSObject *obj, JSObject *holder, Shape *shape) |
|
639 { |
|
640 if (!shape || !IsCacheableProtoChain(obj, holder)) |
|
641 return false; |
|
642 |
|
643 if (shape->hasSlot() || shape->hasGetterValue() || shape->hasDefaultGetter()) |
|
644 return false; |
|
645 |
|
646 return true; |
|
647 } |
|
648 |
|
649 static inline void |
|
650 EmitLoadSlot(MacroAssembler &masm, JSObject *holder, Shape *shape, Register holderReg, |
|
651 TypedOrValueRegister output, Register scratchReg) |
|
652 { |
|
653 JS_ASSERT(holder); |
|
654 if (holder->isFixedSlot(shape->slot())) { |
|
655 Address addr(holderReg, JSObject::getFixedSlotOffset(shape->slot())); |
|
656 masm.loadTypedOrValue(addr, output); |
|
657 } else { |
|
658 masm.loadPtr(Address(holderReg, JSObject::offsetOfSlots()), scratchReg); |
|
659 |
|
660 Address addr(scratchReg, holder->dynamicSlotIndex(shape->slot()) * sizeof(Value)); |
|
661 masm.loadTypedOrValue(addr, output); |
|
662 } |
|
663 } |
|
664 |
|
665 static void |
|
666 GenerateDOMProxyChecks(JSContext *cx, MacroAssembler &masm, JSObject *obj, |
|
667 PropertyName *name, Register object, Label *stubFailure, |
|
668 bool skipExpandoCheck = false) |
|
669 { |
|
670 JS_ASSERT(IsCacheableDOMProxy(obj)); |
|
671 |
|
672 // Guard the following: |
|
673 // 1. The object is a DOMProxy. |
|
674 // 2. The object does not have expando properties, or has an expando |
|
675 // which is known to not have the desired property. |
|
676 Address handlerAddr(object, ProxyObject::offsetOfHandler()); |
|
677 Address expandoSlotAddr(object, JSObject::getFixedSlotOffset(GetDOMProxyExpandoSlot())); |
|
678 |
|
679 // Check that object is a DOMProxy. |
|
680 masm.branchPrivatePtr(Assembler::NotEqual, handlerAddr, |
|
681 ImmPtr(obj->as<ProxyObject>().handler()), stubFailure); |
|
682 |
|
683 if (skipExpandoCheck) |
|
684 return; |
|
685 |
|
686 // For the remaining code, we need to reserve some registers to load a value. |
|
687 // This is ugly, but unvaoidable. |
|
688 RegisterSet domProxyRegSet(RegisterSet::All()); |
|
689 domProxyRegSet.take(AnyRegister(object)); |
|
690 ValueOperand tempVal = domProxyRegSet.takeValueOperand(); |
|
691 masm.pushValue(tempVal); |
|
692 |
|
693 Label failDOMProxyCheck; |
|
694 Label domProxyOk; |
|
695 |
|
696 Value expandoVal = obj->getFixedSlot(GetDOMProxyExpandoSlot()); |
|
697 masm.loadValue(expandoSlotAddr, tempVal); |
|
698 |
|
699 if (!expandoVal.isObject() && !expandoVal.isUndefined()) { |
|
700 masm.branchTestValue(Assembler::NotEqual, tempVal, expandoVal, &failDOMProxyCheck); |
|
701 |
|
702 ExpandoAndGeneration *expandoAndGeneration = (ExpandoAndGeneration*)expandoVal.toPrivate(); |
|
703 masm.movePtr(ImmPtr(expandoAndGeneration), tempVal.scratchReg()); |
|
704 |
|
705 masm.branch32(Assembler::NotEqual, |
|
706 Address(tempVal.scratchReg(), |
|
707 ExpandoAndGeneration::offsetOfGeneration()), |
|
708 Imm32(expandoAndGeneration->generation), |
|
709 &failDOMProxyCheck); |
|
710 |
|
711 expandoVal = expandoAndGeneration->expando; |
|
712 masm.loadValue(Address(tempVal.scratchReg(), |
|
713 ExpandoAndGeneration::offsetOfExpando()), |
|
714 tempVal); |
|
715 } |
|
716 |
|
717 // If the incoming object does not have an expando object then we're sure we're not |
|
718 // shadowing. |
|
719 masm.branchTestUndefined(Assembler::Equal, tempVal, &domProxyOk); |
|
720 |
|
721 if (expandoVal.isObject()) { |
|
722 JS_ASSERT(!expandoVal.toObject().nativeContains(cx, name)); |
|
723 |
|
724 // Reference object has an expando object that doesn't define the name. Check that |
|
725 // the incoming object has an expando object with the same shape. |
|
726 masm.branchTestObject(Assembler::NotEqual, tempVal, &failDOMProxyCheck); |
|
727 masm.extractObject(tempVal, tempVal.scratchReg()); |
|
728 masm.branchPtr(Assembler::Equal, |
|
729 Address(tempVal.scratchReg(), JSObject::offsetOfShape()), |
|
730 ImmGCPtr(expandoVal.toObject().lastProperty()), |
|
731 &domProxyOk); |
|
732 } |
|
733 |
|
734 // Failure case: restore the tempVal registers and jump to failures. |
|
735 masm.bind(&failDOMProxyCheck); |
|
736 masm.popValue(tempVal); |
|
737 masm.jump(stubFailure); |
|
738 |
|
739 // Success case: restore the tempval and proceed. |
|
740 masm.bind(&domProxyOk); |
|
741 masm.popValue(tempVal); |
|
742 } |
|
743 |
|
744 static void |
|
745 GenerateReadSlot(JSContext *cx, IonScript *ion, MacroAssembler &masm, |
|
746 IonCache::StubAttacher &attacher, JSObject *obj, JSObject *holder, |
|
747 Shape *shape, Register object, TypedOrValueRegister output, |
|
748 Label *failures = nullptr) |
|
749 { |
|
750 JS_ASSERT(obj->isNative()); |
|
751 // If there's a single jump to |failures|, we can patch the shape guard |
|
752 // jump directly. Otherwise, jump to the end of the stub, so there's a |
|
753 // common point to patch. |
|
754 bool multipleFailureJumps = (obj != holder) || (failures != nullptr && failures->used()); |
|
755 |
|
756 // If we have multiple failure jumps but didn't get a label from the |
|
757 // outside, make one ourselves. |
|
758 Label failures_; |
|
759 if (multipleFailureJumps && !failures) |
|
760 failures = &failures_; |
|
761 |
|
762 // Guard on the shape of the object. |
|
763 attacher.branchNextStubOrLabel(masm, Assembler::NotEqual, |
|
764 Address(object, JSObject::offsetOfShape()), |
|
765 ImmGCPtr(obj->lastProperty()), |
|
766 failures); |
|
767 |
|
768 // If we need a scratch register, use either an output register or the |
|
769 // object register. After this point, we cannot jump directly to |
|
770 // |failures| since we may still have to pop the object register. |
|
771 bool restoreScratch = false; |
|
772 Register scratchReg = Register::FromCode(0); // Quell compiler warning. |
|
773 |
|
774 if (obj != holder || !holder->isFixedSlot(shape->slot())) { |
|
775 if (output.hasValue()) { |
|
776 scratchReg = output.valueReg().scratchReg(); |
|
777 } else if (output.type() == MIRType_Double) { |
|
778 scratchReg = object; |
|
779 masm.push(scratchReg); |
|
780 restoreScratch = true; |
|
781 } else { |
|
782 scratchReg = output.typedReg().gpr(); |
|
783 } |
|
784 } |
|
785 |
|
786 // Fast path: single failure jump, no prototype guards. |
|
787 if (!multipleFailureJumps) { |
|
788 EmitLoadSlot(masm, holder, shape, object, output, scratchReg); |
|
789 if (restoreScratch) |
|
790 masm.pop(scratchReg); |
|
791 attacher.jumpRejoin(masm); |
|
792 return; |
|
793 } |
|
794 |
|
795 // Slow path: multiple jumps; generate prototype guards. |
|
796 Label prototypeFailures; |
|
797 Register holderReg; |
|
798 if (obj != holder) { |
|
799 // Note: this may clobber the object register if it's used as scratch. |
|
800 GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, |
|
801 &prototypeFailures); |
|
802 |
|
803 if (holder) { |
|
804 // Guard on the holder's shape. |
|
805 holderReg = scratchReg; |
|
806 masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg); |
|
807 masm.branchPtr(Assembler::NotEqual, |
|
808 Address(holderReg, JSObject::offsetOfShape()), |
|
809 ImmGCPtr(holder->lastProperty()), |
|
810 &prototypeFailures); |
|
811 } else { |
|
812 // The property does not exist. Guard on everything in the |
|
813 // prototype chain. |
|
814 JSObject *proto = obj->getTaggedProto().toObjectOrNull(); |
|
815 Register lastReg = object; |
|
816 JS_ASSERT(scratchReg != object); |
|
817 while (proto) { |
|
818 masm.loadObjProto(lastReg, scratchReg); |
|
819 |
|
820 // Guard the shape of the current prototype. |
|
821 masm.branchPtr(Assembler::NotEqual, |
|
822 Address(scratchReg, JSObject::offsetOfShape()), |
|
823 ImmGCPtr(proto->lastProperty()), |
|
824 &prototypeFailures); |
|
825 |
|
826 proto = proto->getProto(); |
|
827 lastReg = scratchReg; |
|
828 } |
|
829 |
|
830 holderReg = InvalidReg; |
|
831 } |
|
832 } else { |
|
833 holderReg = object; |
|
834 } |
|
835 |
|
836 // Slot access. |
|
837 if (holder) |
|
838 EmitLoadSlot(masm, holder, shape, holderReg, output, scratchReg); |
|
839 else |
|
840 masm.moveValue(UndefinedValue(), output.valueReg()); |
|
841 |
|
842 // Restore scratch on success. |
|
843 if (restoreScratch) |
|
844 masm.pop(scratchReg); |
|
845 |
|
846 attacher.jumpRejoin(masm); |
|
847 |
|
848 masm.bind(&prototypeFailures); |
|
849 if (restoreScratch) |
|
850 masm.pop(scratchReg); |
|
851 masm.bind(failures); |
|
852 |
|
853 attacher.jumpNextStub(masm); |
|
854 |
|
855 } |
|
856 |
|
857 static bool |
|
858 EmitGetterCall(JSContext *cx, MacroAssembler &masm, |
|
859 IonCache::StubAttacher &attacher, JSObject *obj, |
|
860 JSObject *holder, HandleShape shape, |
|
861 RegisterSet liveRegs, Register object, |
|
862 Register scratchReg, TypedOrValueRegister output, |
|
863 void *returnAddr) |
|
864 { |
|
865 JS_ASSERT(output.hasValue()); |
|
866 MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs); |
|
867 |
|
868 // Remaining registers should basically be free, but we need to use |object| still |
|
869 // so leave it alone. |
|
870 RegisterSet regSet(RegisterSet::All()); |
|
871 regSet.take(AnyRegister(object)); |
|
872 |
|
873 // This is a slower stub path, and we're going to be doing a call anyway. Don't need |
|
874 // to try so hard to not use the stack. Scratch regs are just taken from the register |
|
875 // set not including the input, current value saved on the stack, and restored when |
|
876 // we're done with it. |
|
877 scratchReg = regSet.takeGeneral(); |
|
878 Register argJSContextReg = regSet.takeGeneral(); |
|
879 Register argUintNReg = regSet.takeGeneral(); |
|
880 Register argVpReg = regSet.takeGeneral(); |
|
881 |
|
882 // Shape has a getter function. |
|
883 bool callNative = IsCacheableGetPropCallNative(obj, holder, shape); |
|
884 JS_ASSERT_IF(!callNative, IsCacheableGetPropCallPropertyOp(obj, holder, shape)); |
|
885 |
|
886 if (callNative) { |
|
887 JS_ASSERT(shape->hasGetterValue() && shape->getterValue().isObject() && |
|
888 shape->getterValue().toObject().is<JSFunction>()); |
|
889 JSFunction *target = &shape->getterValue().toObject().as<JSFunction>(); |
|
890 |
|
891 JS_ASSERT(target); |
|
892 JS_ASSERT(target->isNative()); |
|
893 |
|
894 // Native functions have the signature: |
|
895 // bool (*)(JSContext *, unsigned, Value *vp) |
|
896 // Where vp[0] is space for an outparam, vp[1] is |this|, and vp[2] onward |
|
897 // are the function arguments. |
|
898 |
|
899 // Construct vp array: |
|
900 // Push object value for |this| |
|
901 masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object))); |
|
902 // Push callee/outparam. |
|
903 masm.Push(ObjectValue(*target)); |
|
904 |
|
905 // Preload arguments into registers. |
|
906 masm.loadJSContext(argJSContextReg); |
|
907 masm.move32(Imm32(0), argUintNReg); |
|
908 masm.movePtr(StackPointer, argVpReg); |
|
909 |
|
910 // Push marking data for later use. |
|
911 masm.Push(argUintNReg); |
|
912 attacher.pushStubCodePointer(masm); |
|
913 |
|
914 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic)) |
|
915 return false; |
|
916 masm.enterFakeExitFrame(ION_FRAME_OOL_NATIVE); |
|
917 |
|
918 // Construct and execute call. |
|
919 masm.setupUnalignedABICall(3, scratchReg); |
|
920 masm.passABIArg(argJSContextReg); |
|
921 masm.passABIArg(argUintNReg); |
|
922 masm.passABIArg(argVpReg); |
|
923 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target->native())); |
|
924 |
|
925 // Test for failure. |
|
926 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel()); |
|
927 |
|
928 // Load the outparam vp[0] into output register(s). |
|
929 Address outparam(StackPointer, IonOOLNativeExitFrameLayout::offsetOfResult()); |
|
930 masm.loadTypedOrValue(outparam, output); |
|
931 |
|
932 // masm.leaveExitFrame & pop locals |
|
933 masm.adjustStack(IonOOLNativeExitFrameLayout::Size(0)); |
|
934 } else { |
|
935 Register argObjReg = argUintNReg; |
|
936 Register argIdReg = regSet.takeGeneral(); |
|
937 |
|
938 PropertyOp target = shape->getterOp(); |
|
939 JS_ASSERT(target); |
|
940 |
|
941 // Push stubCode for marking. |
|
942 attacher.pushStubCodePointer(masm); |
|
943 |
|
944 // JSPropertyOp: bool fn(JSContext *cx, HandleObject obj, HandleId id, MutableHandleValue vp) |
|
945 |
|
946 // Push args on stack first so we can take pointers to make handles. |
|
947 masm.Push(UndefinedValue()); |
|
948 masm.movePtr(StackPointer, argVpReg); |
|
949 |
|
950 // push canonical jsid from shape instead of propertyname. |
|
951 masm.Push(shape->propid(), scratchReg); |
|
952 masm.movePtr(StackPointer, argIdReg); |
|
953 |
|
954 masm.Push(object); |
|
955 masm.movePtr(StackPointer, argObjReg); |
|
956 |
|
957 masm.loadJSContext(argJSContextReg); |
|
958 |
|
959 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic)) |
|
960 return false; |
|
961 masm.enterFakeExitFrame(ION_FRAME_OOL_PROPERTY_OP); |
|
962 |
|
963 // Make the call. |
|
964 masm.setupUnalignedABICall(4, scratchReg); |
|
965 masm.passABIArg(argJSContextReg); |
|
966 masm.passABIArg(argObjReg); |
|
967 masm.passABIArg(argIdReg); |
|
968 masm.passABIArg(argVpReg); |
|
969 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target)); |
|
970 |
|
971 // Test for failure. |
|
972 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel()); |
|
973 |
|
974 // Load the outparam vp[0] into output register(s). |
|
975 Address outparam(StackPointer, IonOOLPropertyOpExitFrameLayout::offsetOfResult()); |
|
976 masm.loadTypedOrValue(outparam, output); |
|
977 |
|
978 // masm.leaveExitFrame & pop locals. |
|
979 masm.adjustStack(IonOOLPropertyOpExitFrameLayout::Size()); |
|
980 } |
|
981 |
|
982 masm.icRestoreLive(liveRegs, aic); |
|
983 return true; |
|
984 } |
|
985 |
|
986 static bool |
|
987 GenerateCallGetter(JSContext *cx, IonScript *ion, MacroAssembler &masm, |
|
988 IonCache::StubAttacher &attacher, JSObject *obj, PropertyName *name, |
|
989 JSObject *holder, HandleShape shape, RegisterSet &liveRegs, Register object, |
|
990 TypedOrValueRegister output, void *returnAddr, Label *failures = nullptr) |
|
991 { |
|
992 JS_ASSERT(obj->isNative()); |
|
993 JS_ASSERT(output.hasValue()); |
|
994 |
|
995 // Use the passed in label if there was one. Otherwise, we'll have to make our own. |
|
996 Label stubFailure; |
|
997 failures = failures ? failures : &stubFailure; |
|
998 |
|
999 // Initial shape check. |
|
1000 masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfShape()), |
|
1001 ImmGCPtr(obj->lastProperty()), failures); |
|
1002 |
|
1003 Register scratchReg = output.valueReg().scratchReg(); |
|
1004 |
|
1005 // Note: this may clobber the object register if it's used as scratch. |
|
1006 if (obj != holder) |
|
1007 GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, failures); |
|
1008 |
|
1009 // Guard on the holder's shape. |
|
1010 Register holderReg = scratchReg; |
|
1011 masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg); |
|
1012 masm.branchPtr(Assembler::NotEqual, |
|
1013 Address(holderReg, JSObject::offsetOfShape()), |
|
1014 ImmGCPtr(holder->lastProperty()), |
|
1015 failures); |
|
1016 |
|
1017 // Now we're good to go to invoke the native call. |
|
1018 if (!EmitGetterCall(cx, masm, attacher, obj, holder, shape, liveRegs, object, |
|
1019 scratchReg, output, returnAddr)) |
|
1020 return false; |
|
1021 |
|
1022 // Rejoin jump. |
|
1023 attacher.jumpRejoin(masm); |
|
1024 |
|
1025 // Jump to next stub. |
|
1026 masm.bind(failures); |
|
1027 attacher.jumpNextStub(masm); |
|
1028 |
|
1029 return true; |
|
1030 } |
|
1031 |
|
1032 static bool |
|
1033 GenerateArrayLength(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
1034 JSObject *obj, Register object, TypedOrValueRegister output) |
|
1035 { |
|
1036 JS_ASSERT(obj->is<ArrayObject>()); |
|
1037 |
|
1038 Label failures; |
|
1039 |
|
1040 // Guard object is a dense array. |
|
1041 RootedShape shape(cx, obj->lastProperty()); |
|
1042 if (!shape) |
|
1043 return false; |
|
1044 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures); |
|
1045 |
|
1046 // Load length. |
|
1047 Register outReg; |
|
1048 if (output.hasValue()) { |
|
1049 outReg = output.valueReg().scratchReg(); |
|
1050 } else { |
|
1051 JS_ASSERT(output.type() == MIRType_Int32); |
|
1052 outReg = output.typedReg().gpr(); |
|
1053 } |
|
1054 |
|
1055 masm.loadPtr(Address(object, JSObject::offsetOfElements()), outReg); |
|
1056 masm.load32(Address(outReg, ObjectElements::offsetOfLength()), outReg); |
|
1057 |
|
1058 // The length is an unsigned int, but the value encodes a signed int. |
|
1059 JS_ASSERT(object != outReg); |
|
1060 masm.branchTest32(Assembler::Signed, outReg, outReg, &failures); |
|
1061 |
|
1062 if (output.hasValue()) |
|
1063 masm.tagValue(JSVAL_TYPE_INT32, outReg, output.valueReg()); |
|
1064 |
|
1065 /* Success. */ |
|
1066 attacher.jumpRejoin(masm); |
|
1067 |
|
1068 /* Failure. */ |
|
1069 masm.bind(&failures); |
|
1070 attacher.jumpNextStub(masm); |
|
1071 |
|
1072 return true; |
|
1073 } |
|
1074 |
|
1075 static void |
|
1076 GenerateTypedArrayLength(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
1077 JSObject *obj, Register object, TypedOrValueRegister output) |
|
1078 { |
|
1079 JS_ASSERT(obj->is<TypedArrayObject>()); |
|
1080 |
|
1081 Label failures; |
|
1082 |
|
1083 Register tmpReg; |
|
1084 if (output.hasValue()) { |
|
1085 tmpReg = output.valueReg().scratchReg(); |
|
1086 } else { |
|
1087 JS_ASSERT(output.type() == MIRType_Int32); |
|
1088 tmpReg = output.typedReg().gpr(); |
|
1089 } |
|
1090 JS_ASSERT(object != tmpReg); |
|
1091 |
|
1092 // Implement the negated version of JSObject::isTypedArray predicate. |
|
1093 masm.loadObjClass(object, tmpReg); |
|
1094 masm.branchPtr(Assembler::Below, tmpReg, ImmPtr(&TypedArrayObject::classes[0]), |
|
1095 &failures); |
|
1096 masm.branchPtr(Assembler::AboveOrEqual, tmpReg, |
|
1097 ImmPtr(&TypedArrayObject::classes[ScalarTypeDescr::TYPE_MAX]), |
|
1098 &failures); |
|
1099 |
|
1100 // Load length. |
|
1101 masm.loadTypedOrValue(Address(object, TypedArrayObject::lengthOffset()), output); |
|
1102 |
|
1103 /* Success. */ |
|
1104 attacher.jumpRejoin(masm); |
|
1105 |
|
1106 /* Failure. */ |
|
1107 masm.bind(&failures); |
|
1108 attacher.jumpNextStub(masm); |
|
1109 } |
|
1110 |
|
1111 static bool |
|
1112 IsCacheableArrayLength(JSContext *cx, HandleObject obj, HandlePropertyName name, |
|
1113 TypedOrValueRegister output) |
|
1114 { |
|
1115 if (!obj->is<ArrayObject>()) |
|
1116 return false; |
|
1117 |
|
1118 if (output.type() != MIRType_Value && output.type() != MIRType_Int32) { |
|
1119 // The stub assumes that we always output Int32, so make sure our output |
|
1120 // is equipped to handle that. |
|
1121 return false; |
|
1122 } |
|
1123 |
|
1124 return true; |
|
1125 } |
|
1126 |
|
1127 template <class GetPropCache> |
|
1128 static GetPropertyIC::NativeGetPropCacheability |
|
1129 CanAttachNativeGetProp(typename GetPropCache::Context cx, const GetPropCache &cache, |
|
1130 HandleObject obj, HandlePropertyName name, |
|
1131 MutableHandleObject holder, MutableHandleShape shape, |
|
1132 bool skipArrayLen = false) |
|
1133 { |
|
1134 if (!obj || !obj->isNative()) |
|
1135 return GetPropertyIC::CanAttachNone; |
|
1136 |
|
1137 // The lookup needs to be universally pure, otherwise we risk calling hooks out |
|
1138 // of turn. We don't mind doing this even when purity isn't required, because we |
|
1139 // only miss out on shape hashification, which is only a temporary perf cost. |
|
1140 // The limits were arbitrarily set, anyways. |
|
1141 if (!LookupPropertyPure(obj, NameToId(name), holder.address(), shape.address())) |
|
1142 return GetPropertyIC::CanAttachNone; |
|
1143 |
|
1144 RootedScript script(cx); |
|
1145 jsbytecode *pc; |
|
1146 cache.getScriptedLocation(&script, &pc); |
|
1147 if (IsCacheableGetPropReadSlot(obj, holder, shape) || |
|
1148 IsCacheableNoProperty(obj, holder, shape, pc, cache.output())) |
|
1149 { |
|
1150 return GetPropertyIC::CanAttachReadSlot; |
|
1151 } |
|
1152 |
|
1153 // |length| is a non-configurable getter property on ArrayObjects. Any time this |
|
1154 // check would have passed, we can install a getter stub instead. Allow people to |
|
1155 // make that decision themselves with skipArrayLen |
|
1156 if (!skipArrayLen && cx->names().length == name && cache.allowArrayLength(cx, obj) && |
|
1157 IsCacheableArrayLength(cx, obj, name, cache.output())) |
|
1158 { |
|
1159 // The array length property is non-configurable, which means both that |
|
1160 // checking the class of the object and the name of the property is enough |
|
1161 // and that we don't need to worry about monitoring, since we know the |
|
1162 // return type statically. |
|
1163 return GetPropertyIC::CanAttachArrayLength; |
|
1164 } |
|
1165 |
|
1166 // IonBuilder guarantees that it's impossible to generate a GetPropertyIC with |
|
1167 // allowGetters() true and cache.output().hasValue() false. If this isn't true, |
|
1168 // we will quickly assert during stub generation. |
|
1169 if (cache.allowGetters() && |
|
1170 (IsCacheableGetPropCallNative(obj, holder, shape) || |
|
1171 IsCacheableGetPropCallPropertyOp(obj, holder, shape))) |
|
1172 { |
|
1173 // Don't enable getter call if cache is parallel or idempotent, since |
|
1174 // they can be effectful. This is handled by allowGetters() |
|
1175 return GetPropertyIC::CanAttachCallGetter; |
|
1176 } |
|
1177 |
|
1178 return GetPropertyIC::CanAttachNone; |
|
1179 } |
|
1180 |
|
1181 bool |
|
1182 GetPropertyIC::allowArrayLength(Context cx, HandleObject obj) const |
|
1183 { |
|
1184 if (!idempotent()) |
|
1185 return true; |
|
1186 |
|
1187 uint32_t locationIndex, numLocations; |
|
1188 getLocationInfo(&locationIndex, &numLocations); |
|
1189 |
|
1190 IonScript *ion = GetTopIonJSScript(cx)->ionScript(); |
|
1191 CacheLocation *locs = ion->getCacheLocs(locationIndex); |
|
1192 for (size_t i = 0; i < numLocations; i++) { |
|
1193 CacheLocation &curLoc = locs[i]; |
|
1194 types::StackTypeSet *bcTypes = |
|
1195 types::TypeScript::BytecodeTypes(curLoc.script, curLoc.pc); |
|
1196 |
|
1197 if (!bcTypes->hasType(types::Type::Int32Type())) |
|
1198 return false; |
|
1199 } |
|
1200 |
|
1201 return true; |
|
1202 } |
|
1203 |
|
1204 bool |
|
1205 GetPropertyIC::tryAttachNative(JSContext *cx, IonScript *ion, HandleObject obj, |
|
1206 HandlePropertyName name, void *returnAddr, bool *emitted) |
|
1207 { |
|
1208 JS_ASSERT(canAttachStub()); |
|
1209 JS_ASSERT(!*emitted); |
|
1210 |
|
1211 RootedShape shape(cx); |
|
1212 RootedObject holder(cx); |
|
1213 |
|
1214 NativeGetPropCacheability type = |
|
1215 CanAttachNativeGetProp(cx, *this, obj, name, &holder, &shape); |
|
1216 if (type == CanAttachNone) |
|
1217 return true; |
|
1218 |
|
1219 *emitted = true; |
|
1220 |
|
1221 MacroAssembler masm(cx, ion, script_, pc_); |
|
1222 |
|
1223 RepatchStubAppender attacher(*this); |
|
1224 const char *attachKind; |
|
1225 |
|
1226 switch (type) { |
|
1227 case CanAttachReadSlot: |
|
1228 GenerateReadSlot(cx, ion, masm, attacher, obj, holder, |
|
1229 shape, object(), output()); |
|
1230 attachKind = idempotent() ? "idempotent reading" |
|
1231 : "non idempotent reading"; |
|
1232 break; |
|
1233 case CanAttachCallGetter: |
|
1234 if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name, holder, shape, |
|
1235 liveRegs_, object(), output(), returnAddr)) |
|
1236 { |
|
1237 return false; |
|
1238 } |
|
1239 attachKind = "getter call"; |
|
1240 break; |
|
1241 case CanAttachArrayLength: |
|
1242 if (!GenerateArrayLength(cx, masm, attacher, obj, object(), output())) |
|
1243 return false; |
|
1244 |
|
1245 attachKind = "array length"; |
|
1246 break; |
|
1247 default: |
|
1248 MOZ_ASSUME_UNREACHABLE("Bad NativeGetPropCacheability"); |
|
1249 } |
|
1250 return linkAndAttachStub(cx, masm, attacher, ion, attachKind); |
|
1251 } |
|
1252 |
|
1253 bool |
|
1254 GetPropertyIC::tryAttachTypedArrayLength(JSContext *cx, IonScript *ion, HandleObject obj, |
|
1255 HandlePropertyName name, bool *emitted) |
|
1256 { |
|
1257 JS_ASSERT(canAttachStub()); |
|
1258 JS_ASSERT(!*emitted); |
|
1259 |
|
1260 if (!obj->is<TypedArrayObject>()) |
|
1261 return true; |
|
1262 |
|
1263 if (cx->names().length != name) |
|
1264 return true; |
|
1265 |
|
1266 if (hasTypedArrayLengthStub()) |
|
1267 return true; |
|
1268 |
|
1269 if (output().type() != MIRType_Value && output().type() != MIRType_Int32) { |
|
1270 // The next execution should cause an invalidation because the type |
|
1271 // does not fit. |
|
1272 return true; |
|
1273 } |
|
1274 |
|
1275 if (idempotent()) |
|
1276 return true; |
|
1277 |
|
1278 *emitted = true; |
|
1279 |
|
1280 MacroAssembler masm(cx, ion); |
|
1281 RepatchStubAppender attacher(*this); |
|
1282 GenerateTypedArrayLength(cx, masm, attacher, obj, object(), output()); |
|
1283 |
|
1284 JS_ASSERT(!hasTypedArrayLengthStub_); |
|
1285 hasTypedArrayLengthStub_ = true; |
|
1286 return linkAndAttachStub(cx, masm, attacher, ion, "typed array length"); |
|
1287 } |
|
1288 |
|
1289 |
|
1290 static bool |
|
1291 EmitCallProxyGet(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
1292 PropertyName *name, RegisterSet liveRegs, Register object, |
|
1293 TypedOrValueRegister output, jsbytecode *pc, void *returnAddr) |
|
1294 { |
|
1295 JS_ASSERT(output.hasValue()); |
|
1296 MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs); |
|
1297 |
|
1298 // Remaining registers should be free, but we need to use |object| still |
|
1299 // so leave it alone. |
|
1300 RegisterSet regSet(RegisterSet::All()); |
|
1301 regSet.take(AnyRegister(object)); |
|
1302 |
|
1303 // Proxy::get(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id, |
|
1304 // MutableHandleValue vp) |
|
1305 Register argJSContextReg = regSet.takeGeneral(); |
|
1306 Register argProxyReg = regSet.takeGeneral(); |
|
1307 Register argIdReg = regSet.takeGeneral(); |
|
1308 Register argVpReg = regSet.takeGeneral(); |
|
1309 |
|
1310 Register scratch = regSet.takeGeneral(); |
|
1311 |
|
1312 void *getFunction = JSOp(*pc) == JSOP_CALLPROP ? |
|
1313 JS_FUNC_TO_DATA_PTR(void *, Proxy::callProp) : |
|
1314 JS_FUNC_TO_DATA_PTR(void *, Proxy::get); |
|
1315 |
|
1316 // Push stubCode for marking. |
|
1317 attacher.pushStubCodePointer(masm); |
|
1318 |
|
1319 // Push args on stack first so we can take pointers to make handles. |
|
1320 masm.Push(UndefinedValue()); |
|
1321 masm.movePtr(StackPointer, argVpReg); |
|
1322 |
|
1323 RootedId propId(cx, AtomToId(name)); |
|
1324 masm.Push(propId, scratch); |
|
1325 masm.movePtr(StackPointer, argIdReg); |
|
1326 |
|
1327 // Pushing object and receiver. Both are the same, so Handle to one is equivalent to |
|
1328 // handle to other. |
|
1329 masm.Push(object); |
|
1330 masm.Push(object); |
|
1331 masm.movePtr(StackPointer, argProxyReg); |
|
1332 |
|
1333 masm.loadJSContext(argJSContextReg); |
|
1334 |
|
1335 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic)) |
|
1336 return false; |
|
1337 masm.enterFakeExitFrame(ION_FRAME_OOL_PROXY); |
|
1338 |
|
1339 // Make the call. |
|
1340 masm.setupUnalignedABICall(5, scratch); |
|
1341 masm.passABIArg(argJSContextReg); |
|
1342 masm.passABIArg(argProxyReg); |
|
1343 masm.passABIArg(argProxyReg); |
|
1344 masm.passABIArg(argIdReg); |
|
1345 masm.passABIArg(argVpReg); |
|
1346 masm.callWithABI(getFunction); |
|
1347 |
|
1348 // Test for failure. |
|
1349 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel()); |
|
1350 |
|
1351 // Load the outparam vp[0] into output register(s). |
|
1352 Address outparam(StackPointer, IonOOLProxyExitFrameLayout::offsetOfResult()); |
|
1353 masm.loadTypedOrValue(outparam, output); |
|
1354 |
|
1355 // masm.leaveExitFrame & pop locals |
|
1356 masm.adjustStack(IonOOLProxyExitFrameLayout::Size()); |
|
1357 |
|
1358 masm.icRestoreLive(liveRegs, aic); |
|
1359 return true; |
|
1360 } |
|
1361 |
|
1362 bool |
|
1363 GetPropertyIC::tryAttachDOMProxyShadowed(JSContext *cx, IonScript *ion, |
|
1364 HandleObject obj, void *returnAddr, |
|
1365 bool *emitted) |
|
1366 { |
|
1367 JS_ASSERT(canAttachStub()); |
|
1368 JS_ASSERT(!*emitted); |
|
1369 JS_ASSERT(IsCacheableDOMProxy(obj)); |
|
1370 JS_ASSERT(monitoredResult()); |
|
1371 JS_ASSERT(output().hasValue()); |
|
1372 |
|
1373 if (idempotent()) |
|
1374 return true; |
|
1375 |
|
1376 *emitted = true; |
|
1377 |
|
1378 Label failures; |
|
1379 MacroAssembler masm(cx, ion, script_, pc_); |
|
1380 RepatchStubAppender attacher(*this); |
|
1381 |
|
1382 // Guard on the shape of the object. |
|
1383 attacher.branchNextStubOrLabel(masm, Assembler::NotEqual, |
|
1384 Address(object(), JSObject::offsetOfShape()), |
|
1385 ImmGCPtr(obj->lastProperty()), |
|
1386 &failures); |
|
1387 |
|
1388 // Make sure object is a DOMProxy |
|
1389 GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures, |
|
1390 /*skipExpandoCheck=*/true); |
|
1391 |
|
1392 if (!EmitCallProxyGet(cx, masm, attacher, name(), liveRegs_, object(), output(), |
|
1393 pc(), returnAddr)) |
|
1394 { |
|
1395 return false; |
|
1396 } |
|
1397 |
|
1398 // Success. |
|
1399 attacher.jumpRejoin(masm); |
|
1400 |
|
1401 // Failure. |
|
1402 masm.bind(&failures); |
|
1403 attacher.jumpNextStub(masm); |
|
1404 |
|
1405 return linkAndAttachStub(cx, masm, attacher, ion, "list base shadowed get"); |
|
1406 } |
|
1407 |
|
1408 bool |
|
1409 GetPropertyIC::tryAttachDOMProxyUnshadowed(JSContext *cx, IonScript *ion, HandleObject obj, |
|
1410 HandlePropertyName name, bool resetNeeded, |
|
1411 void *returnAddr, bool *emitted) |
|
1412 { |
|
1413 JS_ASSERT(canAttachStub()); |
|
1414 JS_ASSERT(!*emitted); |
|
1415 JS_ASSERT(IsCacheableDOMProxy(obj)); |
|
1416 JS_ASSERT(monitoredResult()); |
|
1417 JS_ASSERT(output().hasValue()); |
|
1418 |
|
1419 RootedObject checkObj(cx, obj->getTaggedProto().toObjectOrNull()); |
|
1420 RootedObject holder(cx); |
|
1421 RootedShape shape(cx); |
|
1422 |
|
1423 NativeGetPropCacheability canCache = |
|
1424 CanAttachNativeGetProp(cx, *this, checkObj, name, &holder, &shape, |
|
1425 /* skipArrayLen = */true); |
|
1426 JS_ASSERT(canCache != CanAttachArrayLength); |
|
1427 |
|
1428 if (canCache == CanAttachNone) |
|
1429 return true; |
|
1430 |
|
1431 // Make sure we observe our invariants if we're gonna deoptimize. |
|
1432 if (!holder && idempotent()) |
|
1433 return true; |
|
1434 |
|
1435 *emitted = true; |
|
1436 |
|
1437 if (resetNeeded) { |
|
1438 // If we know that we have a DoesntShadowUnique object, then |
|
1439 // we reset the cache to clear out an existing IC for the object |
|
1440 // (if there is one). The generation is a constant in the generated |
|
1441 // code and we will not have the same generation again for this |
|
1442 // object, so the generation check in the existing IC would always |
|
1443 // fail anyway. |
|
1444 reset(); |
|
1445 } |
|
1446 |
|
1447 Label failures; |
|
1448 MacroAssembler masm(cx, ion, script_, pc_); |
|
1449 RepatchStubAppender attacher(*this); |
|
1450 |
|
1451 // Guard on the shape of the object. |
|
1452 attacher.branchNextStubOrLabel(masm, Assembler::NotEqual, |
|
1453 Address(object(), JSObject::offsetOfShape()), |
|
1454 ImmGCPtr(obj->lastProperty()), |
|
1455 &failures); |
|
1456 |
|
1457 // Make sure object is a DOMProxy proxy |
|
1458 GenerateDOMProxyChecks(cx, masm, obj, name, object(), &failures); |
|
1459 |
|
1460 if (holder) { |
|
1461 // Found the property on the prototype chain. Treat it like a native |
|
1462 // getprop. |
|
1463 Register scratchReg = output().valueReg().scratchReg(); |
|
1464 GeneratePrototypeGuards(cx, ion, masm, obj, holder, object(), scratchReg, &failures); |
|
1465 |
|
1466 // Rename scratch for clarity. |
|
1467 Register holderReg = scratchReg; |
|
1468 |
|
1469 // Guard on the holder of the property |
|
1470 masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), holderReg); |
|
1471 masm.branchPtr(Assembler::NotEqual, |
|
1472 Address(holderReg, JSObject::offsetOfShape()), |
|
1473 ImmGCPtr(holder->lastProperty()), |
|
1474 &failures); |
|
1475 |
|
1476 if (canCache == CanAttachReadSlot) { |
|
1477 EmitLoadSlot(masm, holder, shape, holderReg, output(), scratchReg); |
|
1478 } else { |
|
1479 // EmitGetterCall() expects |obj| to be the object the property is |
|
1480 // on to do some checks. Since we actually looked at checkObj, and |
|
1481 // no extra guards will be generated, we can just pass that instead. |
|
1482 JS_ASSERT(canCache == CanAttachCallGetter); |
|
1483 JS_ASSERT(!idempotent()); |
|
1484 if (!EmitGetterCall(cx, masm, attacher, checkObj, holder, shape, liveRegs_, |
|
1485 object(), scratchReg, output(), returnAddr)) |
|
1486 { |
|
1487 return false; |
|
1488 } |
|
1489 } |
|
1490 } else { |
|
1491 // Property was not found on the prototype chain. Deoptimize down to |
|
1492 // proxy get call |
|
1493 JS_ASSERT(!idempotent()); |
|
1494 if (!EmitCallProxyGet(cx, masm, attacher, name, liveRegs_, object(), output(), |
|
1495 pc(), returnAddr)) |
|
1496 { |
|
1497 return false; |
|
1498 } |
|
1499 } |
|
1500 |
|
1501 attacher.jumpRejoin(masm); |
|
1502 masm.bind(&failures); |
|
1503 attacher.jumpNextStub(masm); |
|
1504 |
|
1505 return linkAndAttachStub(cx, masm, attacher, ion, "unshadowed proxy get"); |
|
1506 } |
|
1507 |
|
1508 bool |
|
1509 GetPropertyIC::tryAttachProxy(JSContext *cx, IonScript *ion, HandleObject obj, |
|
1510 HandlePropertyName name, void *returnAddr, |
|
1511 bool *emitted) |
|
1512 { |
|
1513 JS_ASSERT(canAttachStub()); |
|
1514 JS_ASSERT(!*emitted); |
|
1515 |
|
1516 if (!obj->is<ProxyObject>()) |
|
1517 return true; |
|
1518 |
|
1519 // TI can't be sure about our properties, so make sure anything |
|
1520 // we return can be monitored directly. |
|
1521 if (!monitoredResult()) |
|
1522 return true; |
|
1523 |
|
1524 // Skim off DOM proxies. |
|
1525 if (IsCacheableDOMProxy(obj)) { |
|
1526 RootedId id(cx, NameToId(name)); |
|
1527 DOMProxyShadowsResult shadows = GetDOMProxyShadowsCheck()(cx, obj, id); |
|
1528 if (shadows == ShadowCheckFailed) |
|
1529 return false; |
|
1530 if (shadows == Shadows) |
|
1531 return tryAttachDOMProxyShadowed(cx, ion, obj, returnAddr, emitted); |
|
1532 |
|
1533 return tryAttachDOMProxyUnshadowed(cx, ion, obj, name, shadows == DoesntShadowUnique, |
|
1534 returnAddr, emitted); |
|
1535 } |
|
1536 |
|
1537 return tryAttachGenericProxy(cx, ion, obj, name, returnAddr, emitted); |
|
1538 } |
|
1539 |
|
1540 static void |
|
1541 GenerateProxyClassGuards(MacroAssembler &masm, Register object, Register scratchReg, |
|
1542 Label *failures) |
|
1543 { |
|
1544 masm.loadObjClass(object, scratchReg); |
|
1545 masm.branchTest32(Assembler::Zero, |
|
1546 Address(scratchReg, Class::offsetOfFlags()), |
|
1547 Imm32(JSCLASS_IS_PROXY), failures); |
|
1548 } |
|
1549 |
|
1550 bool |
|
1551 GetPropertyIC::tryAttachGenericProxy(JSContext *cx, IonScript *ion, HandleObject obj, |
|
1552 HandlePropertyName name, void *returnAddr, |
|
1553 bool *emitted) |
|
1554 { |
|
1555 JS_ASSERT(canAttachStub()); |
|
1556 JS_ASSERT(!*emitted); |
|
1557 JS_ASSERT(obj->is<ProxyObject>()); |
|
1558 JS_ASSERT(monitoredResult()); |
|
1559 JS_ASSERT(output().hasValue()); |
|
1560 |
|
1561 if (hasGenericProxyStub()) |
|
1562 return true; |
|
1563 |
|
1564 if (idempotent()) |
|
1565 return true; |
|
1566 |
|
1567 *emitted = true; |
|
1568 |
|
1569 Label failures; |
|
1570 MacroAssembler masm(cx, ion, script_, pc_); |
|
1571 RepatchStubAppender attacher(*this); |
|
1572 |
|
1573 Register scratchReg = output().valueReg().scratchReg(); |
|
1574 |
|
1575 GenerateProxyClassGuards(masm, object(), scratchReg, &failures); |
|
1576 |
|
1577 // Ensure that the incoming object is not a DOM proxy, so that we can get to |
|
1578 // the specialized stubs |
|
1579 masm.branchTestProxyHandlerFamily(Assembler::Equal, object(), scratchReg, |
|
1580 GetDOMProxyHandlerFamily(), &failures); |
|
1581 |
|
1582 if (!EmitCallProxyGet(cx, masm, attacher, name, liveRegs_, object(), output(), |
|
1583 pc(), returnAddr)) |
|
1584 { |
|
1585 return false; |
|
1586 } |
|
1587 |
|
1588 attacher.jumpRejoin(masm); |
|
1589 |
|
1590 masm.bind(&failures); |
|
1591 attacher.jumpNextStub(masm); |
|
1592 |
|
1593 JS_ASSERT(!hasGenericProxyStub_); |
|
1594 hasGenericProxyStub_ = true; |
|
1595 |
|
1596 return linkAndAttachStub(cx, masm, attacher, ion, "Generic Proxy get"); |
|
1597 } |
|
1598 |
|
1599 bool |
|
1600 GetPropertyIC::tryAttachArgumentsLength(JSContext *cx, IonScript *ion, HandleObject obj, |
|
1601 HandlePropertyName name, bool *emitted) |
|
1602 { |
|
1603 JS_ASSERT(canAttachStub()); |
|
1604 JS_ASSERT(!*emitted); |
|
1605 |
|
1606 if (name != cx->names().length) |
|
1607 return true; |
|
1608 if (!IsOptimizableArgumentsObjectForLength(obj)) |
|
1609 return true; |
|
1610 |
|
1611 MIRType outputType = output().type(); |
|
1612 if (!(outputType == MIRType_Value || outputType == MIRType_Int32)) |
|
1613 return true; |
|
1614 |
|
1615 if (hasArgumentsLengthStub(obj->is<StrictArgumentsObject>())) |
|
1616 return true; |
|
1617 |
|
1618 *emitted = true; |
|
1619 |
|
1620 JS_ASSERT(!idempotent()); |
|
1621 |
|
1622 Label failures; |
|
1623 MacroAssembler masm(cx, ion); |
|
1624 RepatchStubAppender attacher(*this); |
|
1625 |
|
1626 Register tmpReg; |
|
1627 if (output().hasValue()) { |
|
1628 tmpReg = output().valueReg().scratchReg(); |
|
1629 } else { |
|
1630 JS_ASSERT(output().type() == MIRType_Int32); |
|
1631 tmpReg = output().typedReg().gpr(); |
|
1632 } |
|
1633 JS_ASSERT(object() != tmpReg); |
|
1634 |
|
1635 const Class *clasp = obj->is<StrictArgumentsObject>() ? &StrictArgumentsObject::class_ |
|
1636 : &NormalArgumentsObject::class_; |
|
1637 |
|
1638 masm.branchTestObjClass(Assembler::NotEqual, object(), tmpReg, clasp, &failures); |
|
1639 |
|
1640 // Get initial ArgsObj length value, test if length has been overridden. |
|
1641 masm.unboxInt32(Address(object(), ArgumentsObject::getInitialLengthSlotOffset()), tmpReg); |
|
1642 masm.branchTest32(Assembler::NonZero, tmpReg, Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT), |
|
1643 &failures); |
|
1644 |
|
1645 masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), tmpReg); |
|
1646 |
|
1647 // If output is Int32, result is already in right place, otherwise box it into output. |
|
1648 if (output().hasValue()) |
|
1649 masm.tagValue(JSVAL_TYPE_INT32, tmpReg, output().valueReg()); |
|
1650 |
|
1651 // Success. |
|
1652 attacher.jumpRejoin(masm); |
|
1653 |
|
1654 // Failure. |
|
1655 masm.bind(&failures); |
|
1656 attacher.jumpNextStub(masm); |
|
1657 |
|
1658 if (obj->is<StrictArgumentsObject>()) { |
|
1659 JS_ASSERT(!hasStrictArgumentsLengthStub_); |
|
1660 hasStrictArgumentsLengthStub_ = true; |
|
1661 return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj length (strict)"); |
|
1662 } |
|
1663 |
|
1664 JS_ASSERT(!hasNormalArgumentsLengthStub_); |
|
1665 hasNormalArgumentsLengthStub_ = true; |
|
1666 return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj length (normal)"); |
|
1667 } |
|
1668 |
|
1669 bool |
|
1670 GetPropertyIC::tryAttachStub(JSContext *cx, IonScript *ion, HandleObject obj, |
|
1671 HandlePropertyName name, void *returnAddr, bool *emitted) |
|
1672 { |
|
1673 JS_ASSERT(!*emitted); |
|
1674 |
|
1675 if (!canAttachStub()) |
|
1676 return true; |
|
1677 |
|
1678 if (!*emitted && !tryAttachArgumentsLength(cx, ion, obj, name, emitted)) |
|
1679 return false; |
|
1680 |
|
1681 if (!*emitted && !tryAttachProxy(cx, ion, obj, name, returnAddr, emitted)) |
|
1682 return false; |
|
1683 |
|
1684 if (!*emitted && !tryAttachNative(cx, ion, obj, name, returnAddr, emitted)) |
|
1685 return false; |
|
1686 |
|
1687 if (!*emitted && !tryAttachTypedArrayLength(cx, ion, obj, name, emitted)) |
|
1688 return false; |
|
1689 |
|
1690 return true; |
|
1691 } |
|
1692 |
|
1693 /* static */ bool |
|
1694 GetPropertyIC::update(JSContext *cx, size_t cacheIndex, |
|
1695 HandleObject obj, MutableHandleValue vp) |
|
1696 { |
|
1697 void *returnAddr; |
|
1698 RootedScript topScript(cx, GetTopIonJSScript(cx, &returnAddr)); |
|
1699 IonScript *ion = topScript->ionScript(); |
|
1700 |
|
1701 GetPropertyIC &cache = ion->getCache(cacheIndex).toGetProperty(); |
|
1702 RootedPropertyName name(cx, cache.name()); |
|
1703 |
|
1704 // Override the return value if we are invalidated (bug 728188). |
|
1705 AutoDetectInvalidation adi(cx, vp.address(), ion); |
|
1706 |
|
1707 // If the cache is idempotent, we will redo the op in the interpreter. |
|
1708 if (cache.idempotent()) |
|
1709 adi.disable(); |
|
1710 |
|
1711 // For now, just stop generating new stubs once we hit the stub count |
|
1712 // limit. Once we can make calls from within generated stubs, a new call |
|
1713 // stub will be generated instead and the previous stubs unlinked. |
|
1714 bool emitted = false; |
|
1715 if (!cache.tryAttachStub(cx, ion, obj, name, returnAddr, &emitted)) |
|
1716 return false; |
|
1717 |
|
1718 if (cache.idempotent() && !emitted) { |
|
1719 // Invalidate the cache if the property was not found, or was found on |
|
1720 // a non-native object. This ensures: |
|
1721 // 1) The property read has no observable side-effects. |
|
1722 // 2) There's no need to dynamically monitor the return type. This would |
|
1723 // be complicated since (due to GVN) there can be multiple pc's |
|
1724 // associated with a single idempotent cache. |
|
1725 IonSpew(IonSpew_InlineCaches, "Invalidating from idempotent cache %s:%d", |
|
1726 topScript->filename(), topScript->lineno()); |
|
1727 |
|
1728 topScript->setInvalidatedIdempotentCache(); |
|
1729 |
|
1730 // Do not re-invalidate if the lookup already caused invalidation. |
|
1731 if (!topScript->hasIonScript()) |
|
1732 return true; |
|
1733 |
|
1734 return Invalidate(cx, topScript); |
|
1735 } |
|
1736 |
|
1737 RootedId id(cx, NameToId(name)); |
|
1738 if (!JSObject::getGeneric(cx, obj, obj, id, vp)) |
|
1739 return false; |
|
1740 |
|
1741 if (!cache.idempotent()) { |
|
1742 RootedScript script(cx); |
|
1743 jsbytecode *pc; |
|
1744 cache.getScriptedLocation(&script, &pc); |
|
1745 |
|
1746 // If the cache is idempotent, the property exists so we don't have to |
|
1747 // call __noSuchMethod__. |
|
1748 |
|
1749 #if JS_HAS_NO_SUCH_METHOD |
|
1750 // Handle objects with __noSuchMethod__. |
|
1751 if (JSOp(*pc) == JSOP_CALLPROP && MOZ_UNLIKELY(vp.isUndefined())) { |
|
1752 if (!OnUnknownMethod(cx, obj, IdToValue(id), vp)) |
|
1753 return false; |
|
1754 } |
|
1755 #endif |
|
1756 |
|
1757 // Monitor changes to cache entry. |
|
1758 if (!cache.monitoredResult()) |
|
1759 types::TypeScript::Monitor(cx, script, pc, vp); |
|
1760 } |
|
1761 |
|
1762 return true; |
|
1763 } |
|
1764 |
|
1765 void |
|
1766 GetPropertyIC::reset() |
|
1767 { |
|
1768 RepatchIonCache::reset(); |
|
1769 hasTypedArrayLengthStub_ = false; |
|
1770 hasStrictArgumentsLengthStub_ = false; |
|
1771 hasNormalArgumentsLengthStub_ = false; |
|
1772 hasGenericProxyStub_ = false; |
|
1773 } |
|
1774 |
|
1775 bool |
|
1776 ParallelIonCache::initStubbedShapes(JSContext *cx) |
|
1777 { |
|
1778 JS_ASSERT(isAllocated()); |
|
1779 if (!stubbedShapes_) { |
|
1780 stubbedShapes_ = cx->new_<ShapeSet>(cx); |
|
1781 return stubbedShapes_ && stubbedShapes_->init(); |
|
1782 } |
|
1783 return true; |
|
1784 } |
|
1785 |
|
1786 bool |
|
1787 ParallelIonCache::hasOrAddStubbedShape(LockedJSContext &cx, Shape *shape, bool *alreadyStubbed) |
|
1788 { |
|
1789 // Check if we have already stubbed the current object to avoid |
|
1790 // attaching a duplicate stub. |
|
1791 if (!initStubbedShapes(cx)) |
|
1792 return false; |
|
1793 ShapeSet::AddPtr p = stubbedShapes_->lookupForAdd(shape); |
|
1794 if ((*alreadyStubbed = !!p)) |
|
1795 return true; |
|
1796 return stubbedShapes_->add(p, shape); |
|
1797 } |
|
1798 |
|
1799 void |
|
1800 ParallelIonCache::reset() |
|
1801 { |
|
1802 DispatchIonCache::reset(); |
|
1803 if (stubbedShapes_) |
|
1804 stubbedShapes_->clear(); |
|
1805 } |
|
1806 |
|
1807 void |
|
1808 ParallelIonCache::destroy() |
|
1809 { |
|
1810 DispatchIonCache::destroy(); |
|
1811 js_delete(stubbedShapes_); |
|
1812 } |
|
1813 |
|
1814 void |
|
1815 GetPropertyParIC::reset() |
|
1816 { |
|
1817 ParallelIonCache::reset(); |
|
1818 hasTypedArrayLengthStub_ = false; |
|
1819 } |
|
1820 |
|
1821 bool |
|
1822 GetPropertyParIC::attachReadSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj, |
|
1823 JSObject *holder, Shape *shape) |
|
1824 { |
|
1825 // Ready to generate the read slot stub. |
|
1826 DispatchStubPrepender attacher(*this); |
|
1827 MacroAssembler masm(cx, ion); |
|
1828 GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output()); |
|
1829 |
|
1830 return linkAndAttachStub(cx, masm, attacher, ion, "parallel reading"); |
|
1831 } |
|
1832 |
|
1833 bool |
|
1834 GetPropertyParIC::attachArrayLength(LockedJSContext &cx, IonScript *ion, JSObject *obj) |
|
1835 { |
|
1836 MacroAssembler masm(cx, ion); |
|
1837 DispatchStubPrepender attacher(*this); |
|
1838 if (!GenerateArrayLength(cx, masm, attacher, obj, object(), output())) |
|
1839 return false; |
|
1840 |
|
1841 return linkAndAttachStub(cx, masm, attacher, ion, "parallel array length"); |
|
1842 } |
|
1843 |
|
1844 bool |
|
1845 GetPropertyParIC::attachTypedArrayLength(LockedJSContext &cx, IonScript *ion, JSObject *obj) |
|
1846 { |
|
1847 MacroAssembler masm(cx, ion); |
|
1848 DispatchStubPrepender attacher(*this); |
|
1849 GenerateTypedArrayLength(cx, masm, attacher, obj, object(), output()); |
|
1850 |
|
1851 JS_ASSERT(!hasTypedArrayLengthStub_); |
|
1852 hasTypedArrayLengthStub_ = true; |
|
1853 return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array length"); |
|
1854 } |
|
1855 |
|
1856 bool |
|
1857 GetPropertyParIC::update(ForkJoinContext *cx, size_t cacheIndex, |
|
1858 HandleObject obj, MutableHandleValue vp) |
|
1859 { |
|
1860 IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript(); |
|
1861 GetPropertyParIC &cache = ion->getCache(cacheIndex).toGetPropertyPar(); |
|
1862 |
|
1863 // Grab the property early, as the pure path is fast anyways and doesn't |
|
1864 // need a lock. If we can't do it purely, bail out of parallel execution. |
|
1865 if (!GetPropertyPure(cx, obj, NameToId(cache.name()), vp.address())) |
|
1866 return false; |
|
1867 |
|
1868 // Avoid unnecessary locking if cannot attach stubs. |
|
1869 if (!cache.canAttachStub()) |
|
1870 return true; |
|
1871 |
|
1872 { |
|
1873 // Lock the context before mutating the cache. Ideally we'd like to do |
|
1874 // finer-grained locking, with one lock per cache. However, generating |
|
1875 // new jitcode uses a global ExecutableAllocator tied to the runtime. |
|
1876 LockedJSContext ncx(cx); |
|
1877 |
|
1878 if (cache.canAttachStub()) { |
|
1879 bool alreadyStubbed; |
|
1880 if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed)) |
|
1881 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
1882 if (alreadyStubbed) |
|
1883 return true; |
|
1884 |
|
1885 // See note about the stub limit in GetPropertyCache. |
|
1886 bool attachedStub = false; |
|
1887 |
|
1888 { |
|
1889 RootedShape shape(ncx); |
|
1890 RootedObject holder(ncx); |
|
1891 RootedPropertyName name(ncx, cache.name()); |
|
1892 |
|
1893 GetPropertyIC::NativeGetPropCacheability canCache = |
|
1894 CanAttachNativeGetProp(ncx, cache, obj, name, &holder, &shape); |
|
1895 |
|
1896 if (canCache == GetPropertyIC::CanAttachReadSlot) { |
|
1897 if (!cache.attachReadSlot(ncx, ion, obj, holder, shape)) |
|
1898 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
1899 attachedStub = true; |
|
1900 } |
|
1901 |
|
1902 if (!attachedStub && canCache == GetPropertyIC::CanAttachArrayLength) { |
|
1903 if (!cache.attachArrayLength(ncx, ion, obj)) |
|
1904 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
1905 attachedStub = true; |
|
1906 } |
|
1907 } |
|
1908 |
|
1909 if (!attachedStub && !cache.hasTypedArrayLengthStub() && |
|
1910 obj->is<TypedArrayObject>() && cx->names().length == cache.name() && |
|
1911 (cache.output().type() == MIRType_Value || cache.output().type() == MIRType_Int32)) |
|
1912 { |
|
1913 if (!cache.attachTypedArrayLength(ncx, ion, obj)) |
|
1914 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
1915 attachedStub = true; |
|
1916 } |
|
1917 } |
|
1918 } |
|
1919 |
|
1920 return true; |
|
1921 } |
|
1922 |
|
1923 void |
|
1924 IonCache::disable() |
|
1925 { |
|
1926 reset(); |
|
1927 this->disabled_ = 1; |
|
1928 } |
|
1929 |
|
1930 void |
|
1931 IonCache::reset() |
|
1932 { |
|
1933 this->stubCount_ = 0; |
|
1934 } |
|
1935 |
|
1936 void |
|
1937 IonCache::destroy() |
|
1938 { |
|
1939 } |
|
1940 |
|
1941 static void |
|
1942 GenerateSetSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
1943 JSObject *obj, Shape *shape, Register object, ConstantOrRegister value, |
|
1944 bool needsTypeBarrier, bool checkTypeset) |
|
1945 { |
|
1946 JS_ASSERT(obj->isNative()); |
|
1947 |
|
1948 Label failures, barrierFailure; |
|
1949 masm.branchPtr(Assembler::NotEqual, |
|
1950 Address(object, JSObject::offsetOfShape()), |
|
1951 ImmGCPtr(obj->lastProperty()), &failures); |
|
1952 |
|
1953 // Guard that the incoming value is in the type set for the property |
|
1954 // if a type barrier is required. |
|
1955 if (needsTypeBarrier) { |
|
1956 // We can't do anything that would change the HeapTypeSet, so |
|
1957 // just guard that it's already there. |
|
1958 |
|
1959 // Obtain and guard on the TypeObject of the object. |
|
1960 types::TypeObject *type = obj->type(); |
|
1961 masm.branchPtr(Assembler::NotEqual, |
|
1962 Address(object, JSObject::offsetOfType()), |
|
1963 ImmGCPtr(type), &failures); |
|
1964 |
|
1965 if (checkTypeset) { |
|
1966 TypedOrValueRegister valReg = value.reg(); |
|
1967 types::HeapTypeSet *propTypes = type->maybeGetProperty(shape->propid()); |
|
1968 JS_ASSERT(propTypes); |
|
1969 JS_ASSERT(!propTypes->unknown()); |
|
1970 |
|
1971 Register scratchReg = object; |
|
1972 masm.push(scratchReg); |
|
1973 |
|
1974 masm.guardTypeSet(valReg, propTypes, scratchReg, &barrierFailure); |
|
1975 masm.pop(object); |
|
1976 } |
|
1977 } |
|
1978 |
|
1979 if (obj->isFixedSlot(shape->slot())) { |
|
1980 Address addr(object, JSObject::getFixedSlotOffset(shape->slot())); |
|
1981 |
|
1982 if (cx->zone()->needsBarrier()) |
|
1983 masm.callPreBarrier(addr, MIRType_Value); |
|
1984 |
|
1985 masm.storeConstantOrRegister(value, addr); |
|
1986 } else { |
|
1987 Register slotsReg = object; |
|
1988 masm.loadPtr(Address(object, JSObject::offsetOfSlots()), slotsReg); |
|
1989 |
|
1990 Address addr(slotsReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value)); |
|
1991 |
|
1992 if (cx->zone()->needsBarrier()) |
|
1993 masm.callPreBarrier(addr, MIRType_Value); |
|
1994 |
|
1995 masm.storeConstantOrRegister(value, addr); |
|
1996 } |
|
1997 |
|
1998 attacher.jumpRejoin(masm); |
|
1999 |
|
2000 if (barrierFailure.used()) { |
|
2001 masm.bind(&barrierFailure); |
|
2002 masm.pop(object); |
|
2003 } |
|
2004 |
|
2005 masm.bind(&failures); |
|
2006 attacher.jumpNextStub(masm); |
|
2007 } |
|
2008 |
|
2009 bool |
|
2010 SetPropertyIC::attachSetSlot(JSContext *cx, IonScript *ion, HandleObject obj, |
|
2011 HandleShape shape, bool checkTypeset) |
|
2012 { |
|
2013 MacroAssembler masm(cx, ion); |
|
2014 RepatchStubAppender attacher(*this); |
|
2015 GenerateSetSlot(cx, masm, attacher, obj, shape, object(), value(), needsTypeBarrier(), |
|
2016 checkTypeset); |
|
2017 return linkAndAttachStub(cx, masm, attacher, ion, "setting"); |
|
2018 } |
|
2019 |
|
2020 static bool |
|
2021 IsCacheableSetPropCallNative(HandleObject obj, HandleObject holder, HandleShape shape) |
|
2022 { |
|
2023 JS_ASSERT(obj->isNative()); |
|
2024 |
|
2025 if (!shape || !IsCacheableProtoChain(obj, holder)) |
|
2026 return false; |
|
2027 |
|
2028 return shape->hasSetterValue() && shape->setterObject() && |
|
2029 shape->setterObject()->is<JSFunction>() && |
|
2030 shape->setterObject()->as<JSFunction>().isNative(); |
|
2031 } |
|
2032 |
|
2033 static bool |
|
2034 IsCacheableSetPropCallPropertyOp(HandleObject obj, HandleObject holder, HandleShape shape) |
|
2035 { |
|
2036 JS_ASSERT(obj->isNative()); |
|
2037 |
|
2038 if (!shape) |
|
2039 return false; |
|
2040 |
|
2041 if (!IsCacheableProtoChain(obj, holder)) |
|
2042 return false; |
|
2043 |
|
2044 if (shape->hasSlot()) |
|
2045 return false; |
|
2046 |
|
2047 if (shape->hasDefaultSetter()) |
|
2048 return false; |
|
2049 |
|
2050 if (shape->hasSetterValue()) |
|
2051 return false; |
|
2052 |
|
2053 // Despite the vehement claims of Shape.h that writable() is only |
|
2054 // relevant for data descriptors, some PropertyOp setters care |
|
2055 // desperately about its value. The flag should be always true, apart |
|
2056 // from these rare instances. |
|
2057 if (!shape->writable()) |
|
2058 return false; |
|
2059 |
|
2060 return true; |
|
2061 } |
|
2062 |
|
2063 static bool |
|
2064 EmitCallProxySet(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
2065 HandleId propId, RegisterSet liveRegs, Register object, |
|
2066 ConstantOrRegister value, void *returnAddr, bool strict) |
|
2067 { |
|
2068 MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs); |
|
2069 |
|
2070 // Remaining registers should be free, but we need to use |object| still |
|
2071 // so leave it alone. |
|
2072 RegisterSet regSet(RegisterSet::All()); |
|
2073 regSet.take(AnyRegister(object)); |
|
2074 |
|
2075 // Proxy::set(JSContext *cx, HandleObject proxy, HandleObject receiver, HandleId id, |
|
2076 // bool strict, MutableHandleValue vp) |
|
2077 Register argJSContextReg = regSet.takeGeneral(); |
|
2078 Register argProxyReg = regSet.takeGeneral(); |
|
2079 Register argIdReg = regSet.takeGeneral(); |
|
2080 Register argVpReg = regSet.takeGeneral(); |
|
2081 Register argStrictReg = regSet.takeGeneral(); |
|
2082 |
|
2083 Register scratch = regSet.takeGeneral(); |
|
2084 |
|
2085 // Push stubCode for marking. |
|
2086 attacher.pushStubCodePointer(masm); |
|
2087 |
|
2088 // Push args on stack first so we can take pointers to make handles. |
|
2089 masm.Push(value); |
|
2090 masm.movePtr(StackPointer, argVpReg); |
|
2091 |
|
2092 masm.Push(propId, scratch); |
|
2093 masm.movePtr(StackPointer, argIdReg); |
|
2094 |
|
2095 // Pushing object and receiver. Both are the same, so Handle to one is equivalent to |
|
2096 // handle to other. |
|
2097 masm.Push(object); |
|
2098 masm.Push(object); |
|
2099 masm.movePtr(StackPointer, argProxyReg); |
|
2100 |
|
2101 masm.loadJSContext(argJSContextReg); |
|
2102 masm.move32(Imm32(strict? 1 : 0), argStrictReg); |
|
2103 |
|
2104 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic)) |
|
2105 return false; |
|
2106 masm.enterFakeExitFrame(ION_FRAME_OOL_PROXY); |
|
2107 |
|
2108 // Make the call. |
|
2109 masm.setupUnalignedABICall(6, scratch); |
|
2110 masm.passABIArg(argJSContextReg); |
|
2111 masm.passABIArg(argProxyReg); |
|
2112 masm.passABIArg(argProxyReg); |
|
2113 masm.passABIArg(argIdReg); |
|
2114 masm.passABIArg(argStrictReg); |
|
2115 masm.passABIArg(argVpReg); |
|
2116 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, Proxy::set)); |
|
2117 |
|
2118 // Test for failure. |
|
2119 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel()); |
|
2120 |
|
2121 // masm.leaveExitFrame & pop locals |
|
2122 masm.adjustStack(IonOOLProxyExitFrameLayout::Size()); |
|
2123 |
|
2124 masm.icRestoreLive(liveRegs, aic); |
|
2125 return true; |
|
2126 } |
|
2127 |
|
2128 bool |
|
2129 SetPropertyIC::attachGenericProxy(JSContext *cx, IonScript *ion, void *returnAddr) |
|
2130 { |
|
2131 JS_ASSERT(!hasGenericProxyStub()); |
|
2132 |
|
2133 MacroAssembler masm(cx, ion, script_, pc_); |
|
2134 RepatchStubAppender attacher(*this); |
|
2135 |
|
2136 Label failures; |
|
2137 { |
|
2138 Label proxyFailures; |
|
2139 Label proxySuccess; |
|
2140 |
|
2141 RegisterSet regSet(RegisterSet::All()); |
|
2142 regSet.take(AnyRegister(object())); |
|
2143 if (!value().constant()) |
|
2144 regSet.takeUnchecked(value().reg()); |
|
2145 |
|
2146 Register scratch = regSet.takeGeneral(); |
|
2147 masm.push(scratch); |
|
2148 |
|
2149 GenerateProxyClassGuards(masm, object(), scratch, &proxyFailures); |
|
2150 |
|
2151 // Remove the DOM proxies. They'll take care of themselves so this stub doesn't |
|
2152 // catch too much. The failure case is actually Equal. Fall through to the failure code. |
|
2153 masm.branchTestProxyHandlerFamily(Assembler::NotEqual, object(), scratch, |
|
2154 GetDOMProxyHandlerFamily(), &proxySuccess); |
|
2155 |
|
2156 masm.bind(&proxyFailures); |
|
2157 masm.pop(scratch); |
|
2158 // Unify the point of failure to allow for later DOM proxy handling. |
|
2159 masm.jump(&failures); |
|
2160 |
|
2161 masm.bind(&proxySuccess); |
|
2162 masm.pop(scratch); |
|
2163 } |
|
2164 |
|
2165 RootedId propId(cx, AtomToId(name())); |
|
2166 if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(), value(), |
|
2167 returnAddr, strict())) |
|
2168 { |
|
2169 return false; |
|
2170 } |
|
2171 |
|
2172 attacher.jumpRejoin(masm); |
|
2173 |
|
2174 masm.bind(&failures); |
|
2175 attacher.jumpNextStub(masm); |
|
2176 |
|
2177 JS_ASSERT(!hasGenericProxyStub_); |
|
2178 hasGenericProxyStub_ = true; |
|
2179 |
|
2180 return linkAndAttachStub(cx, masm, attacher, ion, "generic proxy set"); |
|
2181 } |
|
2182 |
|
2183 bool |
|
2184 SetPropertyIC::attachDOMProxyShadowed(JSContext *cx, IonScript *ion, HandleObject obj, |
|
2185 void *returnAddr) |
|
2186 { |
|
2187 JS_ASSERT(IsCacheableDOMProxy(obj)); |
|
2188 |
|
2189 Label failures; |
|
2190 MacroAssembler masm(cx, ion, script_, pc_); |
|
2191 RepatchStubAppender attacher(*this); |
|
2192 |
|
2193 // Guard on the shape of the object. |
|
2194 masm.branchPtr(Assembler::NotEqual, |
|
2195 Address(object(), JSObject::offsetOfShape()), |
|
2196 ImmGCPtr(obj->lastProperty()), &failures); |
|
2197 |
|
2198 // Make sure object is a DOMProxy |
|
2199 GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures, |
|
2200 /*skipExpandoCheck=*/true); |
|
2201 |
|
2202 RootedId propId(cx, AtomToId(name())); |
|
2203 if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(), |
|
2204 value(), returnAddr, strict())) |
|
2205 { |
|
2206 return false; |
|
2207 } |
|
2208 |
|
2209 // Success. |
|
2210 attacher.jumpRejoin(masm); |
|
2211 |
|
2212 // Failure. |
|
2213 masm.bind(&failures); |
|
2214 attacher.jumpNextStub(masm); |
|
2215 |
|
2216 return linkAndAttachStub(cx, masm, attacher, ion, "DOM proxy shadowed set"); |
|
2217 } |
|
2218 |
|
2219 static bool |
|
2220 GenerateCallSetter(JSContext *cx, IonScript *ion, MacroAssembler &masm, |
|
2221 IonCache::StubAttacher &attacher, HandleObject obj, |
|
2222 HandleObject holder, HandleShape shape, bool strict, Register object, |
|
2223 ConstantOrRegister value, Label *failure, RegisterSet liveRegs, |
|
2224 void *returnAddr) |
|
2225 { |
|
2226 // Generate prototype guards if needed. |
|
2227 // Take a scratch register for use, save on stack. |
|
2228 { |
|
2229 RegisterSet regSet(RegisterSet::All()); |
|
2230 regSet.take(AnyRegister(object)); |
|
2231 if (!value.constant()) |
|
2232 regSet.takeUnchecked(value.reg()); |
|
2233 Register scratchReg = regSet.takeGeneral(); |
|
2234 masm.push(scratchReg); |
|
2235 |
|
2236 Label protoFailure; |
|
2237 Label protoSuccess; |
|
2238 |
|
2239 // Generate prototype/shape guards. |
|
2240 if (obj != holder) |
|
2241 GeneratePrototypeGuards(cx, ion, masm, obj, holder, object, scratchReg, &protoFailure); |
|
2242 |
|
2243 masm.moveNurseryPtr(ImmMaybeNurseryPtr(holder), scratchReg); |
|
2244 masm.branchPtr(Assembler::NotEqual, |
|
2245 Address(scratchReg, JSObject::offsetOfShape()), |
|
2246 ImmGCPtr(holder->lastProperty()), |
|
2247 &protoFailure); |
|
2248 |
|
2249 masm.jump(&protoSuccess); |
|
2250 |
|
2251 masm.bind(&protoFailure); |
|
2252 masm.pop(scratchReg); |
|
2253 masm.jump(failure); |
|
2254 |
|
2255 masm.bind(&protoSuccess); |
|
2256 masm.pop(scratchReg); |
|
2257 } |
|
2258 |
|
2259 // Good to go for invoking setter. |
|
2260 |
|
2261 MacroAssembler::AfterICSaveLive aic = masm.icSaveLive(liveRegs); |
|
2262 |
|
2263 // Remaining registers should basically be free, but we need to use |object| still |
|
2264 // so leave it alone. |
|
2265 RegisterSet regSet(RegisterSet::All()); |
|
2266 regSet.take(AnyRegister(object)); |
|
2267 |
|
2268 // This is a slower stub path, and we're going to be doing a call anyway. Don't need |
|
2269 // to try so hard to not use the stack. Scratch regs are just taken from the register |
|
2270 // set not including the input, current value saved on the stack, and restored when |
|
2271 // we're done with it. |
|
2272 // |
|
2273 // Be very careful not to use any of these before value is pushed, since they |
|
2274 // might shadow. |
|
2275 Register scratchReg = regSet.takeGeneral(); |
|
2276 Register argJSContextReg = regSet.takeGeneral(); |
|
2277 Register argVpReg = regSet.takeGeneral(); |
|
2278 |
|
2279 bool callNative = IsCacheableSetPropCallNative(obj, holder, shape); |
|
2280 JS_ASSERT_IF(!callNative, IsCacheableSetPropCallPropertyOp(obj, holder, shape)); |
|
2281 |
|
2282 if (callNative) { |
|
2283 JS_ASSERT(shape->hasSetterValue() && shape->setterObject() && |
|
2284 shape->setterObject()->is<JSFunction>()); |
|
2285 JSFunction *target = &shape->setterObject()->as<JSFunction>(); |
|
2286 |
|
2287 JS_ASSERT(target->isNative()); |
|
2288 |
|
2289 Register argUintNReg = regSet.takeGeneral(); |
|
2290 |
|
2291 // Set up the call: |
|
2292 // bool (*)(JSContext *, unsigned, Value *vp) |
|
2293 // vp[0] is callee/outparam |
|
2294 // vp[1] is |this| |
|
2295 // vp[2] is the value |
|
2296 |
|
2297 // Build vp and move the base into argVpReg. |
|
2298 masm.Push(value); |
|
2299 masm.Push(TypedOrValueRegister(MIRType_Object, AnyRegister(object))); |
|
2300 masm.Push(ObjectValue(*target)); |
|
2301 masm.movePtr(StackPointer, argVpReg); |
|
2302 |
|
2303 // Preload other regs |
|
2304 masm.loadJSContext(argJSContextReg); |
|
2305 masm.move32(Imm32(1), argUintNReg); |
|
2306 |
|
2307 // Push data for GC marking |
|
2308 masm.Push(argUintNReg); |
|
2309 attacher.pushStubCodePointer(masm); |
|
2310 |
|
2311 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic)) |
|
2312 return false; |
|
2313 masm.enterFakeExitFrame(ION_FRAME_OOL_NATIVE); |
|
2314 |
|
2315 // Make the call |
|
2316 masm.setupUnalignedABICall(3, scratchReg); |
|
2317 masm.passABIArg(argJSContextReg); |
|
2318 masm.passABIArg(argUintNReg); |
|
2319 masm.passABIArg(argVpReg); |
|
2320 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target->native())); |
|
2321 |
|
2322 // Test for failure. |
|
2323 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel()); |
|
2324 |
|
2325 // masm.leaveExitFrame & pop locals. |
|
2326 masm.adjustStack(IonOOLNativeExitFrameLayout::Size(1)); |
|
2327 } else { |
|
2328 Register argObjReg = regSet.takeGeneral(); |
|
2329 Register argIdReg = regSet.takeGeneral(); |
|
2330 Register argStrictReg = regSet.takeGeneral(); |
|
2331 |
|
2332 attacher.pushStubCodePointer(masm); |
|
2333 |
|
2334 StrictPropertyOp target = shape->setterOp(); |
|
2335 JS_ASSERT(target); |
|
2336 // JSStrictPropertyOp: bool fn(JSContext *cx, HandleObject obj, |
|
2337 // HandleId id, bool strict, MutableHandleValue vp); |
|
2338 |
|
2339 // Push args on stack first so we can take pointers to make handles. |
|
2340 if (value.constant()) |
|
2341 masm.Push(value.value()); |
|
2342 else |
|
2343 masm.Push(value.reg()); |
|
2344 masm.movePtr(StackPointer, argVpReg); |
|
2345 |
|
2346 masm.move32(Imm32(strict ? 1 : 0), argStrictReg); |
|
2347 |
|
2348 // push canonical jsid from shape instead of propertyname. |
|
2349 masm.Push(shape->propid(), argIdReg); |
|
2350 masm.movePtr(StackPointer, argIdReg); |
|
2351 |
|
2352 masm.Push(object); |
|
2353 masm.movePtr(StackPointer, argObjReg); |
|
2354 |
|
2355 masm.loadJSContext(argJSContextReg); |
|
2356 |
|
2357 if (!masm.icBuildOOLFakeExitFrame(returnAddr, aic)) |
|
2358 return false; |
|
2359 masm.enterFakeExitFrame(ION_FRAME_OOL_PROPERTY_OP); |
|
2360 |
|
2361 // Make the call. |
|
2362 masm.setupUnalignedABICall(5, scratchReg); |
|
2363 masm.passABIArg(argJSContextReg); |
|
2364 masm.passABIArg(argObjReg); |
|
2365 masm.passABIArg(argIdReg); |
|
2366 masm.passABIArg(argStrictReg); |
|
2367 masm.passABIArg(argVpReg); |
|
2368 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, target)); |
|
2369 |
|
2370 // Test for failure. |
|
2371 masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel()); |
|
2372 |
|
2373 // masm.leaveExitFrame & pop locals. |
|
2374 masm.adjustStack(IonOOLPropertyOpExitFrameLayout::Size()); |
|
2375 } |
|
2376 |
|
2377 masm.icRestoreLive(liveRegs, aic); |
|
2378 return true; |
|
2379 } |
|
2380 |
|
2381 static bool |
|
2382 IsCacheableDOMProxyUnshadowedSetterCall(JSContext *cx, HandleObject obj, HandlePropertyName name, |
|
2383 MutableHandleObject holder, MutableHandleShape shape, |
|
2384 bool *isSetter) |
|
2385 { |
|
2386 JS_ASSERT(IsCacheableDOMProxy(obj)); |
|
2387 |
|
2388 *isSetter = false; |
|
2389 |
|
2390 RootedObject checkObj(cx, obj->getTaggedProto().toObjectOrNull()); |
|
2391 if (!checkObj) |
|
2392 return true; |
|
2393 |
|
2394 if (!JSObject::lookupProperty(cx, obj, name, holder, shape)) |
|
2395 return false; |
|
2396 |
|
2397 if (!holder) |
|
2398 return true; |
|
2399 |
|
2400 if (!IsCacheableSetPropCallNative(checkObj, holder, shape) && |
|
2401 !IsCacheableSetPropCallPropertyOp(checkObj, holder, shape)) |
|
2402 { |
|
2403 return true; |
|
2404 } |
|
2405 |
|
2406 *isSetter = true; |
|
2407 return true; |
|
2408 } |
|
2409 |
|
2410 bool |
|
2411 SetPropertyIC::attachDOMProxyUnshadowed(JSContext *cx, IonScript *ion, HandleObject obj, |
|
2412 void *returnAddr) |
|
2413 { |
|
2414 JS_ASSERT(IsCacheableDOMProxy(obj)); |
|
2415 |
|
2416 Label failures; |
|
2417 MacroAssembler masm(cx, ion, script_, pc_); |
|
2418 RepatchStubAppender attacher(*this); |
|
2419 |
|
2420 // Guard on the shape of the object. |
|
2421 masm.branchPtr(Assembler::NotEqual, |
|
2422 Address(object(), JSObject::offsetOfShape()), |
|
2423 ImmGCPtr(obj->lastProperty()), &failures); |
|
2424 |
|
2425 // Make sure object is a DOMProxy |
|
2426 GenerateDOMProxyChecks(cx, masm, obj, name(), object(), &failures); |
|
2427 |
|
2428 RootedPropertyName propName(cx, name()); |
|
2429 RootedObject holder(cx); |
|
2430 RootedShape shape(cx); |
|
2431 bool isSetter; |
|
2432 if (!IsCacheableDOMProxyUnshadowedSetterCall(cx, obj, propName, &holder, |
|
2433 &shape, &isSetter)) |
|
2434 { |
|
2435 return false; |
|
2436 } |
|
2437 |
|
2438 if (isSetter) { |
|
2439 if (!GenerateCallSetter(cx, ion, masm, attacher, obj, holder, shape, strict(), |
|
2440 object(), value(), &failures, liveRegs_, returnAddr)) |
|
2441 { |
|
2442 return false; |
|
2443 } |
|
2444 } else { |
|
2445 // Either there was no proto, or the property wasn't appropriately found on it. |
|
2446 // Drop back to just a call to Proxy::set(). |
|
2447 RootedId propId(cx, AtomToId(name())); |
|
2448 if (!EmitCallProxySet(cx, masm, attacher, propId, liveRegs_, object(), |
|
2449 value(), returnAddr, strict())) |
|
2450 { |
|
2451 return false; |
|
2452 } |
|
2453 } |
|
2454 |
|
2455 // Success. |
|
2456 attacher.jumpRejoin(masm); |
|
2457 |
|
2458 // Failure. |
|
2459 masm.bind(&failures); |
|
2460 attacher.jumpNextStub(masm); |
|
2461 |
|
2462 return linkAndAttachStub(cx, masm, attacher, ion, "DOM proxy unshadowed set"); |
|
2463 } |
|
2464 |
|
2465 bool |
|
2466 SetPropertyIC::attachCallSetter(JSContext *cx, IonScript *ion, |
|
2467 HandleObject obj, HandleObject holder, HandleShape shape, |
|
2468 void *returnAddr) |
|
2469 { |
|
2470 JS_ASSERT(obj->isNative()); |
|
2471 |
|
2472 MacroAssembler masm(cx, ion, script_, pc_); |
|
2473 RepatchStubAppender attacher(*this); |
|
2474 |
|
2475 Label failure; |
|
2476 masm.branchPtr(Assembler::NotEqual, |
|
2477 Address(object(), JSObject::offsetOfShape()), |
|
2478 ImmGCPtr(obj->lastProperty()), |
|
2479 &failure); |
|
2480 |
|
2481 if (!GenerateCallSetter(cx, ion, masm, attacher, obj, holder, shape, strict(), |
|
2482 object(), value(), &failure, liveRegs_, returnAddr)) |
|
2483 { |
|
2484 return false; |
|
2485 } |
|
2486 |
|
2487 // Rejoin jump. |
|
2488 attacher.jumpRejoin(masm); |
|
2489 |
|
2490 // Jump to next stub. |
|
2491 masm.bind(&failure); |
|
2492 attacher.jumpNextStub(masm); |
|
2493 |
|
2494 return linkAndAttachStub(cx, masm, attacher, ion, "setter call"); |
|
2495 } |
|
2496 |
|
2497 static void |
|
2498 GenerateAddSlot(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
2499 JSObject *obj, Shape *oldShape, Register object, ConstantOrRegister value, |
|
2500 bool checkTypeset) |
|
2501 { |
|
2502 JS_ASSERT(obj->isNative()); |
|
2503 |
|
2504 Label failures; |
|
2505 |
|
2506 // Guard the type of the object |
|
2507 masm.branchPtr(Assembler::NotEqual, Address(object, JSObject::offsetOfType()), |
|
2508 ImmGCPtr(obj->type()), &failures); |
|
2509 |
|
2510 // Guard shapes along prototype chain. |
|
2511 masm.branchTestObjShape(Assembler::NotEqual, object, oldShape, &failures); |
|
2512 |
|
2513 Label failuresPopObject; |
|
2514 masm.push(object); // save object reg because we clobber it |
|
2515 |
|
2516 // Guard that the incoming value is in the type set for the property |
|
2517 // if a type barrier is required. |
|
2518 if (checkTypeset) { |
|
2519 TypedOrValueRegister valReg = value.reg(); |
|
2520 types::TypeObject *type = obj->type(); |
|
2521 types::HeapTypeSet *propTypes = type->maybeGetProperty(obj->lastProperty()->propid()); |
|
2522 JS_ASSERT(propTypes); |
|
2523 JS_ASSERT(!propTypes->unknown()); |
|
2524 |
|
2525 Register scratchReg = object; |
|
2526 masm.guardTypeSet(valReg, propTypes, scratchReg, &failuresPopObject); |
|
2527 masm.loadPtr(Address(StackPointer, 0), object); |
|
2528 } |
|
2529 |
|
2530 JSObject *proto = obj->getProto(); |
|
2531 Register protoReg = object; |
|
2532 while (proto) { |
|
2533 Shape *protoShape = proto->lastProperty(); |
|
2534 |
|
2535 // load next prototype |
|
2536 masm.loadObjProto(protoReg, protoReg); |
|
2537 |
|
2538 // Ensure that its shape matches. |
|
2539 masm.branchTestObjShape(Assembler::NotEqual, protoReg, protoShape, &failuresPopObject); |
|
2540 |
|
2541 proto = proto->getProto(); |
|
2542 } |
|
2543 |
|
2544 masm.pop(object); // restore object reg |
|
2545 |
|
2546 // Changing object shape. Write the object's new shape. |
|
2547 Shape *newShape = obj->lastProperty(); |
|
2548 Address shapeAddr(object, JSObject::offsetOfShape()); |
|
2549 if (cx->zone()->needsBarrier()) |
|
2550 masm.callPreBarrier(shapeAddr, MIRType_Shape); |
|
2551 masm.storePtr(ImmGCPtr(newShape), shapeAddr); |
|
2552 |
|
2553 // Set the value on the object. Since this is an add, obj->lastProperty() |
|
2554 // must be the shape of the property we are adding. |
|
2555 if (obj->isFixedSlot(newShape->slot())) { |
|
2556 Address addr(object, JSObject::getFixedSlotOffset(newShape->slot())); |
|
2557 masm.storeConstantOrRegister(value, addr); |
|
2558 } else { |
|
2559 Register slotsReg = object; |
|
2560 |
|
2561 masm.loadPtr(Address(object, JSObject::offsetOfSlots()), slotsReg); |
|
2562 |
|
2563 Address addr(slotsReg, obj->dynamicSlotIndex(newShape->slot()) * sizeof(Value)); |
|
2564 masm.storeConstantOrRegister(value, addr); |
|
2565 } |
|
2566 |
|
2567 // Success. |
|
2568 attacher.jumpRejoin(masm); |
|
2569 |
|
2570 // Failure. |
|
2571 masm.bind(&failuresPopObject); |
|
2572 masm.pop(object); |
|
2573 masm.bind(&failures); |
|
2574 |
|
2575 attacher.jumpNextStub(masm); |
|
2576 } |
|
2577 |
|
2578 bool |
|
2579 SetPropertyIC::attachAddSlot(JSContext *cx, IonScript *ion, JSObject *obj, HandleShape oldShape, |
|
2580 bool checkTypeset) |
|
2581 { |
|
2582 JS_ASSERT_IF(!needsTypeBarrier(), !checkTypeset); |
|
2583 |
|
2584 MacroAssembler masm(cx, ion); |
|
2585 RepatchStubAppender attacher(*this); |
|
2586 GenerateAddSlot(cx, masm, attacher, obj, oldShape, object(), value(), checkTypeset); |
|
2587 return linkAndAttachStub(cx, masm, attacher, ion, "adding"); |
|
2588 } |
|
2589 |
|
2590 static bool |
|
2591 CanInlineSetPropTypeCheck(JSObject *obj, jsid id, ConstantOrRegister val, bool *checkTypeset) |
|
2592 { |
|
2593 bool shouldCheck = false; |
|
2594 types::TypeObject *type = obj->type(); |
|
2595 if (!type->unknownProperties()) { |
|
2596 types::HeapTypeSet *propTypes = type->maybeGetProperty(id); |
|
2597 if (!propTypes) |
|
2598 return false; |
|
2599 if (!propTypes->unknown()) { |
|
2600 shouldCheck = true; |
|
2601 if (val.constant()) { |
|
2602 // If the input is a constant, then don't bother if the barrier will always fail. |
|
2603 if (!propTypes->hasType(types::GetValueType(val.value()))) |
|
2604 return false; |
|
2605 shouldCheck = false; |
|
2606 } else { |
|
2607 TypedOrValueRegister reg = val.reg(); |
|
2608 // We can do the same trick as above for primitive types of specialized registers. |
|
2609 // TIs handling of objects is complicated enough to warrant a runtime |
|
2610 // check, as we can't statically handle the case where the typeset |
|
2611 // contains the specific object, but doesn't have ANYOBJECT set. |
|
2612 if (reg.hasTyped() && reg.type() != MIRType_Object) { |
|
2613 JSValueType valType = ValueTypeFromMIRType(reg.type()); |
|
2614 if (!propTypes->hasType(types::Type::PrimitiveType(valType))) |
|
2615 return false; |
|
2616 shouldCheck = false; |
|
2617 } |
|
2618 } |
|
2619 } |
|
2620 } |
|
2621 |
|
2622 *checkTypeset = shouldCheck; |
|
2623 return true; |
|
2624 } |
|
2625 |
|
2626 static bool |
|
2627 IsPropertySetInlineable(HandleObject obj, HandleId id, MutableHandleShape pshape, |
|
2628 ConstantOrRegister val, bool needsTypeBarrier, bool *checkTypeset) |
|
2629 { |
|
2630 JS_ASSERT(obj->isNative()); |
|
2631 |
|
2632 // Do a pure non-proto chain climbing lookup. See note in |
|
2633 // CanAttachNativeGetProp. |
|
2634 pshape.set(obj->nativeLookupPure(id)); |
|
2635 |
|
2636 if (!pshape) |
|
2637 return false; |
|
2638 |
|
2639 if (!pshape->hasSlot()) |
|
2640 return false; |
|
2641 |
|
2642 if (!pshape->hasDefaultSetter()) |
|
2643 return false; |
|
2644 |
|
2645 if (!pshape->writable()) |
|
2646 return false; |
|
2647 |
|
2648 if (needsTypeBarrier) |
|
2649 return CanInlineSetPropTypeCheck(obj, id, val, checkTypeset); |
|
2650 |
|
2651 return true; |
|
2652 } |
|
2653 |
|
2654 static bool |
|
2655 IsPropertyAddInlineable(HandleObject obj, HandleId id, ConstantOrRegister val, uint32_t oldSlots, |
|
2656 HandleShape oldShape, bool needsTypeBarrier, bool *checkTypeset) |
|
2657 { |
|
2658 JS_ASSERT(obj->isNative()); |
|
2659 |
|
2660 // If the shape of the object did not change, then this was not an add. |
|
2661 if (obj->lastProperty() == oldShape) |
|
2662 return false; |
|
2663 |
|
2664 Shape *shape = obj->nativeLookupPure(id); |
|
2665 if (!shape || shape->inDictionary() || !shape->hasSlot() || !shape->hasDefaultSetter()) |
|
2666 return false; |
|
2667 |
|
2668 // If we have a shape at this point and the object's shape changed, then |
|
2669 // the shape must be the one we just added. |
|
2670 JS_ASSERT(shape == obj->lastProperty()); |
|
2671 |
|
2672 // If object has a non-default resolve hook, don't inline |
|
2673 if (obj->getClass()->resolve != JS_ResolveStub) |
|
2674 return false; |
|
2675 |
|
2676 // Likewise for a non-default addProperty hook, since we'll need |
|
2677 // to invoke it. |
|
2678 if (obj->getClass()->addProperty != JS_PropertyStub) |
|
2679 return false; |
|
2680 |
|
2681 if (!obj->nonProxyIsExtensible() || !shape->writable()) |
|
2682 return false; |
|
2683 |
|
2684 // Walk up the object prototype chain and ensure that all prototypes |
|
2685 // are native, and that all prototypes have no getter or setter |
|
2686 // defined on the property |
|
2687 for (JSObject *proto = obj->getProto(); proto; proto = proto->getProto()) { |
|
2688 // If prototype is non-native, don't optimize |
|
2689 if (!proto->isNative()) |
|
2690 return false; |
|
2691 |
|
2692 // If prototype defines this property in a non-plain way, don't optimize |
|
2693 Shape *protoShape = proto->nativeLookupPure(id); |
|
2694 if (protoShape && !protoShape->hasDefaultSetter()) |
|
2695 return false; |
|
2696 |
|
2697 // Otherwise, if there's no such property, watch out for a resolve |
|
2698 // hook that would need to be invoked and thus prevent inlining of |
|
2699 // property addition. |
|
2700 if (proto->getClass()->resolve != JS_ResolveStub) |
|
2701 return false; |
|
2702 } |
|
2703 |
|
2704 // Only add a IC entry if the dynamic slots didn't change when the shapes |
|
2705 // changed. Need to ensure that a shape change for a subsequent object |
|
2706 // won't involve reallocating the slot array. |
|
2707 if (obj->numDynamicSlots() != oldSlots) |
|
2708 return false; |
|
2709 |
|
2710 if (needsTypeBarrier) |
|
2711 return CanInlineSetPropTypeCheck(obj, id, val, checkTypeset); |
|
2712 |
|
2713 *checkTypeset = false; |
|
2714 return true; |
|
2715 } |
|
2716 |
|
2717 static SetPropertyIC::NativeSetPropCacheability |
|
2718 CanAttachNativeSetProp(HandleObject obj, HandleId id, ConstantOrRegister val, |
|
2719 bool needsTypeBarrier, MutableHandleObject holder, |
|
2720 MutableHandleShape shape, bool *checkTypeset) |
|
2721 { |
|
2722 if (!obj->isNative()) |
|
2723 return SetPropertyIC::CanAttachNone; |
|
2724 |
|
2725 // See if the property exists on the object. |
|
2726 if (IsPropertySetInlineable(obj, id, shape, val, needsTypeBarrier, checkTypeset)) |
|
2727 return SetPropertyIC::CanAttachSetSlot; |
|
2728 |
|
2729 // If we couldn't find the property on the object itself, do a full, but |
|
2730 // still pure lookup for setters. |
|
2731 if (!LookupPropertyPure(obj, id, holder.address(), shape.address())) |
|
2732 return SetPropertyIC::CanAttachNone; |
|
2733 |
|
2734 // If the object doesn't have the property, we don't know if we can attach |
|
2735 // a stub to add the property until we do the VM call to add. If the |
|
2736 // property exists as a data property on the prototype, we should add |
|
2737 // a new, shadowing property. |
|
2738 if (!shape || (obj != holder && shape->hasDefaultSetter() && shape->hasSlot())) |
|
2739 return SetPropertyIC::MaybeCanAttachAddSlot; |
|
2740 |
|
2741 if (IsCacheableSetPropCallPropertyOp(obj, holder, shape) || |
|
2742 IsCacheableSetPropCallNative(obj, holder, shape)) |
|
2743 { |
|
2744 return SetPropertyIC::CanAttachCallSetter; |
|
2745 } |
|
2746 |
|
2747 return SetPropertyIC::CanAttachNone; |
|
2748 } |
|
2749 |
|
2750 bool |
|
2751 SetPropertyIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj, |
|
2752 HandleValue value) |
|
2753 { |
|
2754 void *returnAddr; |
|
2755 RootedScript script(cx, GetTopIonJSScript(cx, &returnAddr)); |
|
2756 IonScript *ion = script->ionScript(); |
|
2757 SetPropertyIC &cache = ion->getCache(cacheIndex).toSetProperty(); |
|
2758 RootedPropertyName name(cx, cache.name()); |
|
2759 RootedId id(cx, AtomToId(name)); |
|
2760 |
|
2761 // Stop generating new stubs once we hit the stub count limit, see |
|
2762 // GetPropertyCache. |
|
2763 bool inlinable = cache.canAttachStub() && !obj->watched(); |
|
2764 NativeSetPropCacheability canCache = CanAttachNone; |
|
2765 bool addedSetterStub = false; |
|
2766 if (inlinable) { |
|
2767 if (!addedSetterStub && obj->is<ProxyObject>()) { |
|
2768 if (IsCacheableDOMProxy(obj)) { |
|
2769 DOMProxyShadowsResult shadows = GetDOMProxyShadowsCheck()(cx, obj, id); |
|
2770 if (shadows == ShadowCheckFailed) |
|
2771 return false; |
|
2772 if (shadows == Shadows) { |
|
2773 if (!cache.attachDOMProxyShadowed(cx, ion, obj, returnAddr)) |
|
2774 return false; |
|
2775 addedSetterStub = true; |
|
2776 } else { |
|
2777 JS_ASSERT(shadows == DoesntShadow || shadows == DoesntShadowUnique); |
|
2778 if (shadows == DoesntShadowUnique) |
|
2779 cache.reset(); |
|
2780 if (!cache.attachDOMProxyUnshadowed(cx, ion, obj, returnAddr)) |
|
2781 return false; |
|
2782 addedSetterStub = true; |
|
2783 } |
|
2784 } |
|
2785 |
|
2786 if (!addedSetterStub && !cache.hasGenericProxyStub()) { |
|
2787 if (!cache.attachGenericProxy(cx, ion, returnAddr)) |
|
2788 return false; |
|
2789 addedSetterStub = true; |
|
2790 } |
|
2791 } |
|
2792 |
|
2793 // Make sure the object de-lazifies its type. We do this here so that |
|
2794 // the parallel IC can share code that assumes that native objects all |
|
2795 // have a type object. |
|
2796 if (obj->isNative() && !obj->getType(cx)) |
|
2797 return false; |
|
2798 |
|
2799 RootedShape shape(cx); |
|
2800 RootedObject holder(cx); |
|
2801 bool checkTypeset; |
|
2802 canCache = CanAttachNativeSetProp(obj, id, cache.value(), cache.needsTypeBarrier(), |
|
2803 &holder, &shape, &checkTypeset); |
|
2804 |
|
2805 if (!addedSetterStub && canCache == CanAttachSetSlot) { |
|
2806 if (!cache.attachSetSlot(cx, ion, obj, shape, checkTypeset)) |
|
2807 return false; |
|
2808 addedSetterStub = true; |
|
2809 } |
|
2810 |
|
2811 if (!addedSetterStub && canCache == CanAttachCallSetter) { |
|
2812 if (!cache.attachCallSetter(cx, ion, obj, holder, shape, returnAddr)) |
|
2813 return false; |
|
2814 addedSetterStub = true; |
|
2815 } |
|
2816 } |
|
2817 |
|
2818 uint32_t oldSlots = obj->numDynamicSlots(); |
|
2819 RootedShape oldShape(cx, obj->lastProperty()); |
|
2820 |
|
2821 // Set/Add the property on the object, the inlined cache are setup for the next execution. |
|
2822 if (!SetProperty(cx, obj, name, value, cache.strict(), cache.pc())) |
|
2823 return false; |
|
2824 |
|
2825 // The property did not exist before, now we can try to inline the property add. |
|
2826 bool checkTypeset; |
|
2827 if (!addedSetterStub && canCache == MaybeCanAttachAddSlot && |
|
2828 IsPropertyAddInlineable(obj, id, cache.value(), oldSlots, oldShape, cache.needsTypeBarrier(), |
|
2829 &checkTypeset)) |
|
2830 { |
|
2831 if (!cache.attachAddSlot(cx, ion, obj, oldShape, checkTypeset)) |
|
2832 return false; |
|
2833 } |
|
2834 |
|
2835 return true; |
|
2836 } |
|
2837 |
|
2838 void |
|
2839 SetPropertyIC::reset() |
|
2840 { |
|
2841 RepatchIonCache::reset(); |
|
2842 hasGenericProxyStub_ = false; |
|
2843 } |
|
2844 |
|
2845 bool |
|
2846 SetPropertyParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj, |
|
2847 HandleValue value) |
|
2848 { |
|
2849 JS_ASSERT(cx->isThreadLocal(obj)); |
|
2850 |
|
2851 IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript(); |
|
2852 SetPropertyParIC &cache = ion->getCache(cacheIndex).toSetPropertyPar(); |
|
2853 |
|
2854 RootedValue v(cx, value); |
|
2855 RootedId id(cx, AtomToId(cache.name())); |
|
2856 |
|
2857 // Avoid unnecessary locking if cannot attach stubs. |
|
2858 if (!cache.canAttachStub()) { |
|
2859 return baseops::SetPropertyHelper<ParallelExecution>( |
|
2860 cx, obj, obj, id, baseops::Qualified, &v, cache.strict()); |
|
2861 } |
|
2862 |
|
2863 SetPropertyIC::NativeSetPropCacheability canCache = SetPropertyIC::CanAttachNone; |
|
2864 bool attachedStub = false; |
|
2865 |
|
2866 { |
|
2867 // See note about locking context in GetPropertyParIC::update. |
|
2868 LockedJSContext ncx(cx); |
|
2869 |
|
2870 if (cache.canAttachStub()) { |
|
2871 bool alreadyStubbed; |
|
2872 if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed)) |
|
2873 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
2874 if (alreadyStubbed) { |
|
2875 return baseops::SetPropertyHelper<ParallelExecution>( |
|
2876 cx, obj, obj, id, baseops::Qualified, &v, cache.strict()); |
|
2877 } |
|
2878 |
|
2879 // If the object has a lazy type, we need to de-lazify it, but |
|
2880 // this is not safe in parallel. |
|
2881 if (obj->hasLazyType()) |
|
2882 return false; |
|
2883 |
|
2884 { |
|
2885 RootedShape shape(cx); |
|
2886 RootedObject holder(cx); |
|
2887 bool checkTypeset; |
|
2888 canCache = CanAttachNativeSetProp(obj, id, cache.value(), cache.needsTypeBarrier(), |
|
2889 &holder, &shape, &checkTypeset); |
|
2890 |
|
2891 if (canCache == SetPropertyIC::CanAttachSetSlot) { |
|
2892 if (!cache.attachSetSlot(ncx, ion, obj, shape, checkTypeset)) |
|
2893 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
2894 attachedStub = true; |
|
2895 } |
|
2896 } |
|
2897 } |
|
2898 } |
|
2899 |
|
2900 uint32_t oldSlots = obj->numDynamicSlots(); |
|
2901 RootedShape oldShape(cx, obj->lastProperty()); |
|
2902 |
|
2903 if (!baseops::SetPropertyHelper<ParallelExecution>(cx, obj, obj, id, baseops::Qualified, &v, |
|
2904 cache.strict())) |
|
2905 { |
|
2906 return false; |
|
2907 } |
|
2908 |
|
2909 bool checkTypeset; |
|
2910 if (!attachedStub && canCache == SetPropertyIC::MaybeCanAttachAddSlot && |
|
2911 IsPropertyAddInlineable(obj, id, cache.value(), oldSlots, oldShape, cache.needsTypeBarrier(), |
|
2912 &checkTypeset)) |
|
2913 { |
|
2914 LockedJSContext ncx(cx); |
|
2915 if (cache.canAttachStub() && !cache.attachAddSlot(ncx, ion, obj, oldShape, checkTypeset)) |
|
2916 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
2917 } |
|
2918 |
|
2919 return true; |
|
2920 } |
|
2921 |
|
2922 bool |
|
2923 SetPropertyParIC::attachSetSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj, Shape *shape, |
|
2924 bool checkTypeset) |
|
2925 { |
|
2926 MacroAssembler masm(cx, ion); |
|
2927 DispatchStubPrepender attacher(*this); |
|
2928 GenerateSetSlot(cx, masm, attacher, obj, shape, object(), value(), needsTypeBarrier(), |
|
2929 checkTypeset); |
|
2930 return linkAndAttachStub(cx, masm, attacher, ion, "parallel setting"); |
|
2931 } |
|
2932 |
|
2933 bool |
|
2934 SetPropertyParIC::attachAddSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj, Shape *oldShape, |
|
2935 bool checkTypeset) |
|
2936 { |
|
2937 JS_ASSERT_IF(!needsTypeBarrier(), !checkTypeset); |
|
2938 |
|
2939 MacroAssembler masm(cx, ion); |
|
2940 DispatchStubPrepender attacher(*this); |
|
2941 GenerateAddSlot(cx, masm, attacher, obj, oldShape, object(), value(), checkTypeset); |
|
2942 return linkAndAttachStub(cx, masm, attacher, ion, "parallel adding"); |
|
2943 } |
|
2944 |
|
2945 const size_t GetElementIC::MAX_FAILED_UPDATES = 16; |
|
2946 |
|
2947 /* static */ bool |
|
2948 GetElementIC::canAttachGetProp(JSObject *obj, const Value &idval, jsid id) |
|
2949 { |
|
2950 uint32_t dummy; |
|
2951 return (obj->isNative() && |
|
2952 idval.isString() && |
|
2953 JSID_IS_ATOM(id) && |
|
2954 !JSID_TO_ATOM(id)->isIndex(&dummy)); |
|
2955 } |
|
2956 |
|
2957 static bool |
|
2958 EqualStringsHelper(JSString *str1, JSString *str2) |
|
2959 { |
|
2960 JS_ASSERT(str1->isAtom()); |
|
2961 JS_ASSERT(!str2->isAtom()); |
|
2962 JS_ASSERT(str1->length() == str2->length()); |
|
2963 |
|
2964 const jschar *chars = str2->getChars(nullptr); |
|
2965 if (!chars) |
|
2966 return false; |
|
2967 return mozilla::PodEqual(str1->asAtom().chars(), chars, str1->length()); |
|
2968 } |
|
2969 |
|
2970 bool |
|
2971 GetElementIC::attachGetProp(JSContext *cx, IonScript *ion, HandleObject obj, |
|
2972 const Value &idval, HandlePropertyName name, |
|
2973 void *returnAddr) |
|
2974 { |
|
2975 JS_ASSERT(index().reg().hasValue()); |
|
2976 |
|
2977 RootedObject holder(cx); |
|
2978 RootedShape shape(cx); |
|
2979 |
|
2980 GetPropertyIC::NativeGetPropCacheability canCache = |
|
2981 CanAttachNativeGetProp(cx, *this, obj, name, &holder, &shape, |
|
2982 /* skipArrayLen =*/true); |
|
2983 |
|
2984 bool cacheable = canCache == GetPropertyIC::CanAttachReadSlot || |
|
2985 (canCache == GetPropertyIC::CanAttachCallGetter && |
|
2986 output().hasValue()); |
|
2987 |
|
2988 if (!cacheable) { |
|
2989 IonSpew(IonSpew_InlineCaches, "GETELEM uncacheable property"); |
|
2990 return true; |
|
2991 } |
|
2992 |
|
2993 JS_ASSERT(idval.isString()); |
|
2994 JS_ASSERT(idval.toString()->length() == name->length()); |
|
2995 |
|
2996 Label failures; |
|
2997 MacroAssembler masm(cx, ion); |
|
2998 |
|
2999 // Ensure the index is a string. |
|
3000 ValueOperand val = index().reg().valueReg(); |
|
3001 masm.branchTestString(Assembler::NotEqual, val, &failures); |
|
3002 |
|
3003 Register scratch = output().valueReg().scratchReg(); |
|
3004 masm.unboxString(val, scratch); |
|
3005 |
|
3006 Label equal; |
|
3007 masm.branchPtr(Assembler::Equal, scratch, ImmGCPtr(name), &equal); |
|
3008 |
|
3009 // The pointers are not equal, so if the input string is also an atom it |
|
3010 // must be a different string. |
|
3011 masm.loadPtr(Address(scratch, JSString::offsetOfLengthAndFlags()), scratch); |
|
3012 masm.branchTest32(Assembler::NonZero, scratch, Imm32(JSString::ATOM_BIT), &failures); |
|
3013 |
|
3014 // Check the length. |
|
3015 masm.rshiftPtr(Imm32(JSString::LENGTH_SHIFT), scratch); |
|
3016 masm.branch32(Assembler::NotEqual, scratch, Imm32(name->length()), &failures); |
|
3017 |
|
3018 // We have a non-atomized string with the same length. For now call a helper |
|
3019 // function to do the comparison. |
|
3020 RegisterSet volatileRegs = RegisterSet::Volatile(); |
|
3021 masm.PushRegsInMask(volatileRegs); |
|
3022 |
|
3023 Register objReg = object(); |
|
3024 JS_ASSERT(objReg != scratch); |
|
3025 |
|
3026 if (!volatileRegs.has(objReg)) |
|
3027 masm.push(objReg); |
|
3028 |
|
3029 masm.setupUnalignedABICall(2, scratch); |
|
3030 masm.movePtr(ImmGCPtr(name), objReg); |
|
3031 masm.passABIArg(objReg); |
|
3032 masm.unboxString(val, scratch); |
|
3033 masm.passABIArg(scratch); |
|
3034 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, EqualStringsHelper)); |
|
3035 masm.mov(ReturnReg, scratch); |
|
3036 |
|
3037 if (!volatileRegs.has(objReg)) |
|
3038 masm.pop(objReg); |
|
3039 |
|
3040 RegisterSet ignore = RegisterSet(); |
|
3041 ignore.add(scratch); |
|
3042 masm.PopRegsInMaskIgnore(volatileRegs, ignore); |
|
3043 |
|
3044 masm.branchIfFalseBool(scratch, &failures); |
|
3045 masm.bind(&equal); |
|
3046 |
|
3047 RepatchStubAppender attacher(*this); |
|
3048 if (canCache == GetPropertyIC::CanAttachReadSlot) { |
|
3049 GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output(), |
|
3050 &failures); |
|
3051 } else { |
|
3052 JS_ASSERT(canCache == GetPropertyIC::CanAttachCallGetter); |
|
3053 // Set the frame for bailout safety of the OOL call. |
|
3054 if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name, holder, shape, liveRegs_, |
|
3055 object(), output(), returnAddr, &failures)) |
|
3056 { |
|
3057 return false; |
|
3058 } |
|
3059 } |
|
3060 |
|
3061 return linkAndAttachStub(cx, masm, attacher, ion, "property"); |
|
3062 } |
|
3063 |
|
3064 /* static */ bool |
|
3065 GetElementIC::canAttachDenseElement(JSObject *obj, const Value &idval) |
|
3066 { |
|
3067 return obj->isNative() && idval.isInt32(); |
|
3068 } |
|
3069 |
|
3070 static bool |
|
3071 GenerateDenseElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
3072 JSObject *obj, const Value &idval, Register object, |
|
3073 ConstantOrRegister index, TypedOrValueRegister output) |
|
3074 { |
|
3075 JS_ASSERT(GetElementIC::canAttachDenseElement(obj, idval)); |
|
3076 |
|
3077 Label failures; |
|
3078 |
|
3079 // Guard object's shape. |
|
3080 RootedShape shape(cx, obj->lastProperty()); |
|
3081 if (!shape) |
|
3082 return false; |
|
3083 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures); |
|
3084 |
|
3085 // Ensure the index is an int32 value. |
|
3086 Register indexReg = InvalidReg; |
|
3087 |
|
3088 if (index.reg().hasValue()) { |
|
3089 indexReg = output.scratchReg().gpr(); |
|
3090 JS_ASSERT(indexReg != InvalidReg); |
|
3091 ValueOperand val = index.reg().valueReg(); |
|
3092 |
|
3093 masm.branchTestInt32(Assembler::NotEqual, val, &failures); |
|
3094 |
|
3095 // Unbox the index. |
|
3096 masm.unboxInt32(val, indexReg); |
|
3097 } else { |
|
3098 JS_ASSERT(!index.reg().typedReg().isFloat()); |
|
3099 indexReg = index.reg().typedReg().gpr(); |
|
3100 } |
|
3101 |
|
3102 // Load elements vector. |
|
3103 masm.push(object); |
|
3104 masm.loadPtr(Address(object, JSObject::offsetOfElements()), object); |
|
3105 |
|
3106 Label hole; |
|
3107 |
|
3108 // Guard on the initialized length. |
|
3109 Address initLength(object, ObjectElements::offsetOfInitializedLength()); |
|
3110 masm.branch32(Assembler::BelowOrEqual, initLength, indexReg, &hole); |
|
3111 |
|
3112 // Check for holes & load the value. |
|
3113 masm.loadElementTypedOrValue(BaseIndex(object, indexReg, TimesEight), |
|
3114 output, true, &hole); |
|
3115 |
|
3116 masm.pop(object); |
|
3117 attacher.jumpRejoin(masm); |
|
3118 |
|
3119 // All failures flow to here. |
|
3120 masm.bind(&hole); |
|
3121 masm.pop(object); |
|
3122 masm.bind(&failures); |
|
3123 |
|
3124 attacher.jumpNextStub(masm); |
|
3125 |
|
3126 return true; |
|
3127 } |
|
3128 |
|
3129 bool |
|
3130 GetElementIC::attachDenseElement(JSContext *cx, IonScript *ion, JSObject *obj, const Value &idval) |
|
3131 { |
|
3132 MacroAssembler masm(cx, ion); |
|
3133 RepatchStubAppender attacher(*this); |
|
3134 if (!GenerateDenseElement(cx, masm, attacher, obj, idval, object(), index(), output())) |
|
3135 return false; |
|
3136 |
|
3137 setHasDenseStub(); |
|
3138 return linkAndAttachStub(cx, masm, attacher, ion, "dense array"); |
|
3139 } |
|
3140 |
|
3141 /* static */ bool |
|
3142 GetElementIC::canAttachTypedArrayElement(JSObject *obj, const Value &idval, |
|
3143 TypedOrValueRegister output) |
|
3144 { |
|
3145 if (!obj->is<TypedArrayObject>()) |
|
3146 return false; |
|
3147 |
|
3148 if (!idval.isInt32() && !idval.isString()) |
|
3149 return false; |
|
3150 |
|
3151 |
|
3152 // Don't emit a stub if the access is out of bounds. We make to make |
|
3153 // certain that we monitor the type coming out of the typed array when |
|
3154 // we generate the stub. Out of bounds accesses will hit the fallback |
|
3155 // path. |
|
3156 uint32_t index; |
|
3157 if (idval.isInt32()) { |
|
3158 index = idval.toInt32(); |
|
3159 } else { |
|
3160 index = GetIndexFromString(idval.toString()); |
|
3161 if (index == UINT32_MAX) |
|
3162 return false; |
|
3163 } |
|
3164 if (index >= obj->as<TypedArrayObject>().length()) |
|
3165 return false; |
|
3166 |
|
3167 // The output register is not yet specialized as a float register, the only |
|
3168 // way to accept float typed arrays for now is to return a Value type. |
|
3169 uint32_t arrayType = obj->as<TypedArrayObject>().type(); |
|
3170 if (arrayType == ScalarTypeDescr::TYPE_FLOAT32 || |
|
3171 arrayType == ScalarTypeDescr::TYPE_FLOAT64) |
|
3172 { |
|
3173 return output.hasValue(); |
|
3174 } |
|
3175 |
|
3176 return output.hasValue() || !output.typedReg().isFloat(); |
|
3177 } |
|
3178 |
|
3179 static void |
|
3180 GenerateGetTypedArrayElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
3181 TypedArrayObject *tarr, const Value &idval, Register object, |
|
3182 ConstantOrRegister index, TypedOrValueRegister output, |
|
3183 bool allowDoubleResult) |
|
3184 { |
|
3185 JS_ASSERT(GetElementIC::canAttachTypedArrayElement(tarr, idval, output)); |
|
3186 |
|
3187 Label failures; |
|
3188 |
|
3189 // The array type is the object within the table of typed array classes. |
|
3190 int arrayType = tarr->type(); |
|
3191 |
|
3192 // Guard on the shape. |
|
3193 Shape *shape = tarr->lastProperty(); |
|
3194 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures); |
|
3195 |
|
3196 // Decide to what type index the stub should be optimized |
|
3197 Register tmpReg = output.scratchReg().gpr(); |
|
3198 JS_ASSERT(tmpReg != InvalidReg); |
|
3199 Register indexReg = tmpReg; |
|
3200 JS_ASSERT(!index.constant()); |
|
3201 if (idval.isString()) { |
|
3202 JS_ASSERT(GetIndexFromString(idval.toString()) != UINT32_MAX); |
|
3203 |
|
3204 // Part 1: Get the string into a register |
|
3205 Register str; |
|
3206 if (index.reg().hasValue()) { |
|
3207 ValueOperand val = index.reg().valueReg(); |
|
3208 masm.branchTestString(Assembler::NotEqual, val, &failures); |
|
3209 |
|
3210 str = masm.extractString(val, indexReg); |
|
3211 } else { |
|
3212 JS_ASSERT(!index.reg().typedReg().isFloat()); |
|
3213 str = index.reg().typedReg().gpr(); |
|
3214 } |
|
3215 |
|
3216 // Part 2: Call to translate the str into index |
|
3217 RegisterSet regs = RegisterSet::Volatile(); |
|
3218 masm.PushRegsInMask(regs); |
|
3219 regs.takeUnchecked(str); |
|
3220 |
|
3221 Register temp = regs.takeGeneral(); |
|
3222 |
|
3223 masm.setupUnalignedABICall(1, temp); |
|
3224 masm.passABIArg(str); |
|
3225 masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, GetIndexFromString)); |
|
3226 masm.mov(ReturnReg, indexReg); |
|
3227 |
|
3228 RegisterSet ignore = RegisterSet(); |
|
3229 ignore.add(indexReg); |
|
3230 masm.PopRegsInMaskIgnore(RegisterSet::Volatile(), ignore); |
|
3231 |
|
3232 masm.branch32(Assembler::Equal, indexReg, Imm32(UINT32_MAX), &failures); |
|
3233 |
|
3234 } else { |
|
3235 JS_ASSERT(idval.isInt32()); |
|
3236 |
|
3237 if (index.reg().hasValue()) { |
|
3238 ValueOperand val = index.reg().valueReg(); |
|
3239 masm.branchTestInt32(Assembler::NotEqual, val, &failures); |
|
3240 |
|
3241 // Unbox the index. |
|
3242 masm.unboxInt32(val, indexReg); |
|
3243 } else { |
|
3244 JS_ASSERT(!index.reg().typedReg().isFloat()); |
|
3245 indexReg = index.reg().typedReg().gpr(); |
|
3246 } |
|
3247 } |
|
3248 |
|
3249 // Guard on the initialized length. |
|
3250 Address length(object, TypedArrayObject::lengthOffset()); |
|
3251 masm.branch32(Assembler::BelowOrEqual, length, indexReg, &failures); |
|
3252 |
|
3253 // Save the object register on the stack in case of failure. |
|
3254 Label popAndFail; |
|
3255 Register elementReg = object; |
|
3256 masm.push(object); |
|
3257 |
|
3258 // Load elements vector. |
|
3259 masm.loadPtr(Address(object, TypedArrayObject::dataOffset()), elementReg); |
|
3260 |
|
3261 // Load the value. We use an invalid register because the destination |
|
3262 // register is necessary a non double register. |
|
3263 int width = TypedArrayObject::slotWidth(arrayType); |
|
3264 BaseIndex source(elementReg, indexReg, ScaleFromElemWidth(width)); |
|
3265 if (output.hasValue()) { |
|
3266 masm.loadFromTypedArray(arrayType, source, output.valueReg(), allowDoubleResult, |
|
3267 elementReg, &popAndFail); |
|
3268 } else { |
|
3269 masm.loadFromTypedArray(arrayType, source, output.typedReg(), elementReg, &popAndFail); |
|
3270 } |
|
3271 |
|
3272 masm.pop(object); |
|
3273 attacher.jumpRejoin(masm); |
|
3274 |
|
3275 // Restore the object before continuing to the next stub. |
|
3276 masm.bind(&popAndFail); |
|
3277 masm.pop(object); |
|
3278 masm.bind(&failures); |
|
3279 |
|
3280 attacher.jumpNextStub(masm); |
|
3281 } |
|
3282 |
|
3283 bool |
|
3284 GetElementIC::attachTypedArrayElement(JSContext *cx, IonScript *ion, TypedArrayObject *tarr, |
|
3285 const Value &idval) |
|
3286 { |
|
3287 MacroAssembler masm(cx, ion); |
|
3288 RepatchStubAppender attacher(*this); |
|
3289 GenerateGetTypedArrayElement(cx, masm, attacher, tarr, idval, object(), index(), output(), |
|
3290 allowDoubleResult()); |
|
3291 return linkAndAttachStub(cx, masm, attacher, ion, "typed array"); |
|
3292 } |
|
3293 |
|
3294 bool |
|
3295 GetElementIC::attachArgumentsElement(JSContext *cx, IonScript *ion, JSObject *obj) |
|
3296 { |
|
3297 JS_ASSERT(obj->is<ArgumentsObject>()); |
|
3298 |
|
3299 Label failures; |
|
3300 MacroAssembler masm(cx, ion); |
|
3301 RepatchStubAppender attacher(*this); |
|
3302 |
|
3303 Register tmpReg = output().scratchReg().gpr(); |
|
3304 JS_ASSERT(tmpReg != InvalidReg); |
|
3305 |
|
3306 const Class *clasp = obj->is<StrictArgumentsObject>() ? &StrictArgumentsObject::class_ |
|
3307 : &NormalArgumentsObject::class_; |
|
3308 |
|
3309 masm.branchTestObjClass(Assembler::NotEqual, object(), tmpReg, clasp, &failures); |
|
3310 |
|
3311 // Get initial ArgsObj length value, test if length has been overridden. |
|
3312 masm.unboxInt32(Address(object(), ArgumentsObject::getInitialLengthSlotOffset()), tmpReg); |
|
3313 masm.branchTest32(Assembler::NonZero, tmpReg, Imm32(ArgumentsObject::LENGTH_OVERRIDDEN_BIT), |
|
3314 &failures); |
|
3315 masm.rshiftPtr(Imm32(ArgumentsObject::PACKED_BITS_COUNT), tmpReg); |
|
3316 |
|
3317 // Decide to what type index the stub should be optimized |
|
3318 Register indexReg; |
|
3319 JS_ASSERT(!index().constant()); |
|
3320 |
|
3321 // Check index against length. |
|
3322 Label failureRestoreIndex; |
|
3323 if (index().reg().hasValue()) { |
|
3324 ValueOperand val = index().reg().valueReg(); |
|
3325 masm.branchTestInt32(Assembler::NotEqual, val, &failures); |
|
3326 indexReg = val.scratchReg(); |
|
3327 |
|
3328 masm.unboxInt32(val, indexReg); |
|
3329 masm.branch32(Assembler::AboveOrEqual, indexReg, tmpReg, &failureRestoreIndex); |
|
3330 } else { |
|
3331 JS_ASSERT(index().reg().type() == MIRType_Int32); |
|
3332 indexReg = index().reg().typedReg().gpr(); |
|
3333 masm.branch32(Assembler::AboveOrEqual, indexReg, tmpReg, &failures); |
|
3334 } |
|
3335 // Save indexReg because it needs to be clobbered to check deleted bit. |
|
3336 Label failurePopIndex; |
|
3337 masm.push(indexReg); |
|
3338 |
|
3339 // Check if property was deleted on arguments object. |
|
3340 masm.loadPrivate(Address(object(), ArgumentsObject::getDataSlotOffset()), tmpReg); |
|
3341 masm.loadPtr(Address(tmpReg, offsetof(ArgumentsData, deletedBits)), tmpReg); |
|
3342 |
|
3343 // In tempReg, calculate index of word containing bit: (idx >> logBitsPerWord) |
|
3344 const uint32_t shift = FloorLog2<(sizeof(size_t) * JS_BITS_PER_BYTE)>::value; |
|
3345 JS_ASSERT(shift == 5 || shift == 6); |
|
3346 masm.rshiftPtr(Imm32(shift), indexReg); |
|
3347 masm.loadPtr(BaseIndex(tmpReg, indexReg, ScaleFromElemWidth(sizeof(size_t))), tmpReg); |
|
3348 |
|
3349 // Don't bother testing specific bit, if any bit is set in the word, fail. |
|
3350 masm.branchPtr(Assembler::NotEqual, tmpReg, ImmPtr(nullptr), &failurePopIndex); |
|
3351 |
|
3352 // Get the address to load from into tmpReg |
|
3353 masm.loadPrivate(Address(object(), ArgumentsObject::getDataSlotOffset()), tmpReg); |
|
3354 masm.addPtr(Imm32(ArgumentsData::offsetOfArgs()), tmpReg); |
|
3355 |
|
3356 // Restore original index register value, to use for indexing element. |
|
3357 masm.pop(indexReg); |
|
3358 BaseIndex elemIdx(tmpReg, indexReg, ScaleFromElemWidth(sizeof(Value))); |
|
3359 |
|
3360 // Ensure result is not magic value, and type-check result. |
|
3361 masm.branchTestMagic(Assembler::Equal, elemIdx, &failureRestoreIndex); |
|
3362 |
|
3363 if (output().hasTyped()) { |
|
3364 JS_ASSERT(!output().typedReg().isFloat()); |
|
3365 JS_ASSERT(index().reg().type() == MIRType_Boolean || |
|
3366 index().reg().type() == MIRType_Int32 || |
|
3367 index().reg().type() == MIRType_String || |
|
3368 index().reg().type() == MIRType_Object); |
|
3369 masm.branchTestMIRType(Assembler::NotEqual, elemIdx, index().reg().type(), |
|
3370 &failureRestoreIndex); |
|
3371 } |
|
3372 |
|
3373 masm.loadTypedOrValue(elemIdx, output()); |
|
3374 |
|
3375 // indexReg may need to be reconstructed if it was originally a value. |
|
3376 if (index().reg().hasValue()) |
|
3377 masm.tagValue(JSVAL_TYPE_INT32, indexReg, index().reg().valueReg()); |
|
3378 |
|
3379 // Success. |
|
3380 attacher.jumpRejoin(masm); |
|
3381 |
|
3382 // Restore the object before continuing to the next stub. |
|
3383 masm.bind(&failurePopIndex); |
|
3384 masm.pop(indexReg); |
|
3385 masm.bind(&failureRestoreIndex); |
|
3386 if (index().reg().hasValue()) |
|
3387 masm.tagValue(JSVAL_TYPE_INT32, indexReg, index().reg().valueReg()); |
|
3388 masm.bind(&failures); |
|
3389 attacher.jumpNextStub(masm); |
|
3390 |
|
3391 |
|
3392 if (obj->is<StrictArgumentsObject>()) { |
|
3393 JS_ASSERT(!hasStrictArgumentsStub_); |
|
3394 hasStrictArgumentsStub_ = true; |
|
3395 return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj element (strict)"); |
|
3396 } |
|
3397 |
|
3398 JS_ASSERT(!hasNormalArgumentsStub_); |
|
3399 hasNormalArgumentsStub_ = true; |
|
3400 return linkAndAttachStub(cx, masm, attacher, ion, "ArgsObj element (normal)"); |
|
3401 } |
|
3402 |
|
3403 bool |
|
3404 GetElementIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj, |
|
3405 HandleValue idval, MutableHandleValue res) |
|
3406 { |
|
3407 void *returnAddr; |
|
3408 IonScript *ion = GetTopIonJSScript(cx, &returnAddr)->ionScript(); |
|
3409 GetElementIC &cache = ion->getCache(cacheIndex).toGetElement(); |
|
3410 RootedScript script(cx); |
|
3411 jsbytecode *pc; |
|
3412 cache.getScriptedLocation(&script, &pc); |
|
3413 |
|
3414 // Override the return value when the script is invalidated (bug 728188). |
|
3415 AutoDetectInvalidation adi(cx, res.address(), ion); |
|
3416 |
|
3417 if (cache.isDisabled()) { |
|
3418 if (!GetObjectElementOperation(cx, JSOp(*pc), obj, /* wasObject = */true, idval, res)) |
|
3419 return false; |
|
3420 if (!cache.monitoredResult()) |
|
3421 types::TypeScript::Monitor(cx, script, pc, res); |
|
3422 return true; |
|
3423 } |
|
3424 |
|
3425 RootedId id(cx); |
|
3426 if (!ValueToId<CanGC>(cx, idval, &id)) |
|
3427 return false; |
|
3428 |
|
3429 bool attachedStub = false; |
|
3430 if (cache.canAttachStub()) { |
|
3431 if (IsOptimizableArgumentsObjectForGetElem(obj, idval) && |
|
3432 !cache.hasArgumentsStub(obj->is<StrictArgumentsObject>()) && |
|
3433 !cache.index().constant() && |
|
3434 (cache.index().reg().hasValue() || |
|
3435 cache.index().reg().type() == MIRType_Int32) && |
|
3436 (cache.output().hasValue() || !cache.output().typedReg().isFloat())) |
|
3437 { |
|
3438 if (!cache.attachArgumentsElement(cx, ion, obj)) |
|
3439 return false; |
|
3440 attachedStub = true; |
|
3441 } |
|
3442 if (!attachedStub && cache.monitoredResult() && canAttachGetProp(obj, idval, id)) { |
|
3443 RootedPropertyName name(cx, JSID_TO_ATOM(id)->asPropertyName()); |
|
3444 if (!cache.attachGetProp(cx, ion, obj, idval, name, returnAddr)) |
|
3445 return false; |
|
3446 attachedStub = true; |
|
3447 } |
|
3448 if (!attachedStub && !cache.hasDenseStub() && canAttachDenseElement(obj, idval)) { |
|
3449 if (!cache.attachDenseElement(cx, ion, obj, idval)) |
|
3450 return false; |
|
3451 attachedStub = true; |
|
3452 } |
|
3453 if (!attachedStub && canAttachTypedArrayElement(obj, idval, cache.output())) { |
|
3454 Rooted<TypedArrayObject*> tarr(cx, &obj->as<TypedArrayObject>()); |
|
3455 if (!cache.attachTypedArrayElement(cx, ion, tarr, idval)) |
|
3456 return false; |
|
3457 attachedStub = true; |
|
3458 } |
|
3459 } |
|
3460 |
|
3461 if (!GetObjectElementOperation(cx, JSOp(*pc), obj, /* wasObject = */true, idval, res)) |
|
3462 return false; |
|
3463 |
|
3464 // Disable cache when we reach max stubs or update failed too much. |
|
3465 if (!attachedStub) { |
|
3466 cache.incFailedUpdates(); |
|
3467 if (cache.shouldDisable()) { |
|
3468 IonSpew(IonSpew_InlineCaches, "Disable inline cache"); |
|
3469 cache.disable(); |
|
3470 } |
|
3471 } else { |
|
3472 cache.resetFailedUpdates(); |
|
3473 } |
|
3474 |
|
3475 if (!cache.monitoredResult()) |
|
3476 types::TypeScript::Monitor(cx, script, pc, res); |
|
3477 return true; |
|
3478 } |
|
3479 |
|
3480 void |
|
3481 GetElementIC::reset() |
|
3482 { |
|
3483 RepatchIonCache::reset(); |
|
3484 hasDenseStub_ = false; |
|
3485 hasStrictArgumentsStub_ = false; |
|
3486 hasNormalArgumentsStub_ = false; |
|
3487 } |
|
3488 |
|
3489 static bool |
|
3490 IsDenseElementSetInlineable(JSObject *obj, const Value &idval) |
|
3491 { |
|
3492 if (!obj->is<ArrayObject>()) |
|
3493 return false; |
|
3494 |
|
3495 if (obj->watched()) |
|
3496 return false; |
|
3497 |
|
3498 if (!idval.isInt32()) |
|
3499 return false; |
|
3500 |
|
3501 // The object may have a setter definition, |
|
3502 // either directly, or via a prototype, or via the target object for a prototype |
|
3503 // which is a proxy, that handles a particular integer write. |
|
3504 // Scan the prototype and shape chain to make sure that this is not the case. |
|
3505 JSObject *curObj = obj; |
|
3506 while (curObj) { |
|
3507 // Ensure object is native. |
|
3508 if (!curObj->isNative()) |
|
3509 return false; |
|
3510 |
|
3511 // Ensure all indexed properties are stored in dense elements. |
|
3512 if (curObj->isIndexed()) |
|
3513 return false; |
|
3514 |
|
3515 curObj = curObj->getProto(); |
|
3516 } |
|
3517 |
|
3518 return true; |
|
3519 } |
|
3520 |
|
3521 static bool |
|
3522 IsTypedArrayElementSetInlineable(JSObject *obj, const Value &idval, const Value &value) |
|
3523 { |
|
3524 // Don't bother attaching stubs for assigning strings and objects. |
|
3525 return (obj->is<TypedArrayObject>() && idval.isInt32() && |
|
3526 !value.isString() && !value.isObject()); |
|
3527 } |
|
3528 |
|
3529 static void |
|
3530 StoreDenseElement(MacroAssembler &masm, ConstantOrRegister value, Register elements, |
|
3531 BaseIndex target) |
|
3532 { |
|
3533 // If the ObjectElements::CONVERT_DOUBLE_ELEMENTS flag is set, int32 values |
|
3534 // have to be converted to double first. If the value is not int32, it can |
|
3535 // always be stored directly. |
|
3536 |
|
3537 Address elementsFlags(elements, ObjectElements::offsetOfFlags()); |
|
3538 if (value.constant()) { |
|
3539 Value v = value.value(); |
|
3540 Label done; |
|
3541 if (v.isInt32()) { |
|
3542 Label dontConvert; |
|
3543 masm.branchTest32(Assembler::Zero, elementsFlags, |
|
3544 Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS), |
|
3545 &dontConvert); |
|
3546 masm.storeValue(DoubleValue(v.toInt32()), target); |
|
3547 masm.jump(&done); |
|
3548 masm.bind(&dontConvert); |
|
3549 } |
|
3550 masm.storeValue(v, target); |
|
3551 masm.bind(&done); |
|
3552 return; |
|
3553 } |
|
3554 |
|
3555 TypedOrValueRegister reg = value.reg(); |
|
3556 if (reg.hasTyped() && reg.type() != MIRType_Int32) { |
|
3557 masm.storeTypedOrValue(reg, target); |
|
3558 return; |
|
3559 } |
|
3560 |
|
3561 Label convert, storeValue, done; |
|
3562 masm.branchTest32(Assembler::NonZero, elementsFlags, |
|
3563 Imm32(ObjectElements::CONVERT_DOUBLE_ELEMENTS), |
|
3564 &convert); |
|
3565 masm.bind(&storeValue); |
|
3566 masm.storeTypedOrValue(reg, target); |
|
3567 masm.jump(&done); |
|
3568 |
|
3569 masm.bind(&convert); |
|
3570 if (reg.hasValue()) { |
|
3571 masm.branchTestInt32(Assembler::NotEqual, reg.valueReg(), &storeValue); |
|
3572 masm.int32ValueToDouble(reg.valueReg(), ScratchFloatReg); |
|
3573 masm.storeDouble(ScratchFloatReg, target); |
|
3574 } else { |
|
3575 JS_ASSERT(reg.type() == MIRType_Int32); |
|
3576 masm.convertInt32ToDouble(reg.typedReg().gpr(), ScratchFloatReg); |
|
3577 masm.storeDouble(ScratchFloatReg, target); |
|
3578 } |
|
3579 |
|
3580 masm.bind(&done); |
|
3581 } |
|
3582 |
|
3583 static bool |
|
3584 GenerateSetDenseElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
3585 JSObject *obj, const Value &idval, bool guardHoles, Register object, |
|
3586 ValueOperand indexVal, ConstantOrRegister value, Register tempToUnboxIndex, |
|
3587 Register temp) |
|
3588 { |
|
3589 JS_ASSERT(obj->isNative()); |
|
3590 JS_ASSERT(idval.isInt32()); |
|
3591 |
|
3592 Label failures; |
|
3593 Label outOfBounds; // index represents a known hole, or an illegal append |
|
3594 |
|
3595 Label markElem, storeElement; // used if TI protects us from worrying about holes. |
|
3596 |
|
3597 // Guard object is a dense array. |
|
3598 Shape *shape = obj->lastProperty(); |
|
3599 if (!shape) |
|
3600 return false; |
|
3601 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures); |
|
3602 |
|
3603 // Ensure the index is an int32 value. |
|
3604 masm.branchTestInt32(Assembler::NotEqual, indexVal, &failures); |
|
3605 |
|
3606 // Unbox the index. |
|
3607 Register index = masm.extractInt32(indexVal, tempToUnboxIndex); |
|
3608 |
|
3609 { |
|
3610 // Load obj->elements. |
|
3611 Register elements = temp; |
|
3612 masm.loadPtr(Address(object, JSObject::offsetOfElements()), elements); |
|
3613 |
|
3614 // Compute the location of the element. |
|
3615 BaseIndex target(elements, index, TimesEight); |
|
3616 |
|
3617 // If TI cannot help us deal with HOLES by preventing indexed properties |
|
3618 // on the prototype chain, we have to be very careful to check for ourselves |
|
3619 // to avoid stomping on what should be a setter call. Start by only allowing things |
|
3620 // within the initialized length. |
|
3621 if (guardHoles) { |
|
3622 Address initLength(elements, ObjectElements::offsetOfInitializedLength()); |
|
3623 masm.branch32(Assembler::BelowOrEqual, initLength, index, &outOfBounds); |
|
3624 } else { |
|
3625 // Guard that we can increase the initialized length. |
|
3626 Address capacity(elements, ObjectElements::offsetOfCapacity()); |
|
3627 masm.branch32(Assembler::BelowOrEqual, capacity, index, &outOfBounds); |
|
3628 |
|
3629 // Guard on the initialized length. |
|
3630 Address initLength(elements, ObjectElements::offsetOfInitializedLength()); |
|
3631 masm.branch32(Assembler::Below, initLength, index, &outOfBounds); |
|
3632 |
|
3633 // if (initLength == index) |
|
3634 masm.branch32(Assembler::NotEqual, initLength, index, &markElem); |
|
3635 { |
|
3636 // Increase initialize length. |
|
3637 Int32Key newLength(index); |
|
3638 masm.bumpKey(&newLength, 1); |
|
3639 masm.storeKey(newLength, initLength); |
|
3640 |
|
3641 // Increase length if needed. |
|
3642 Label bumpedLength; |
|
3643 Address length(elements, ObjectElements::offsetOfLength()); |
|
3644 masm.branch32(Assembler::AboveOrEqual, length, index, &bumpedLength); |
|
3645 masm.storeKey(newLength, length); |
|
3646 masm.bind(&bumpedLength); |
|
3647 |
|
3648 // Restore the index. |
|
3649 masm.bumpKey(&newLength, -1); |
|
3650 masm.jump(&storeElement); |
|
3651 } |
|
3652 // else |
|
3653 masm.bind(&markElem); |
|
3654 } |
|
3655 |
|
3656 if (cx->zone()->needsBarrier()) |
|
3657 masm.callPreBarrier(target, MIRType_Value); |
|
3658 |
|
3659 // Store the value. |
|
3660 if (guardHoles) |
|
3661 masm.branchTestMagic(Assembler::Equal, target, &failures); |
|
3662 else |
|
3663 masm.bind(&storeElement); |
|
3664 StoreDenseElement(masm, value, elements, target); |
|
3665 } |
|
3666 attacher.jumpRejoin(masm); |
|
3667 |
|
3668 // All failures flow to here. |
|
3669 masm.bind(&outOfBounds); |
|
3670 masm.bind(&failures); |
|
3671 attacher.jumpNextStub(masm); |
|
3672 |
|
3673 return true; |
|
3674 } |
|
3675 |
|
3676 bool |
|
3677 SetElementIC::attachDenseElement(JSContext *cx, IonScript *ion, JSObject *obj, const Value &idval) |
|
3678 { |
|
3679 MacroAssembler masm(cx, ion); |
|
3680 RepatchStubAppender attacher(*this); |
|
3681 if (!GenerateSetDenseElement(cx, masm, attacher, obj, idval, |
|
3682 guardHoles(), object(), index(), |
|
3683 value(), tempToUnboxIndex(), |
|
3684 temp())) |
|
3685 { |
|
3686 return false; |
|
3687 } |
|
3688 |
|
3689 setHasDenseStub(); |
|
3690 const char *message = guardHoles() ? |
|
3691 "dense array (holes)" : |
|
3692 "dense array"; |
|
3693 return linkAndAttachStub(cx, masm, attacher, ion, message); |
|
3694 } |
|
3695 |
|
3696 static bool |
|
3697 GenerateSetTypedArrayElement(JSContext *cx, MacroAssembler &masm, IonCache::StubAttacher &attacher, |
|
3698 TypedArrayObject *tarr, Register object, |
|
3699 ValueOperand indexVal, ConstantOrRegister value, |
|
3700 Register tempUnbox, Register temp, FloatRegister tempFloat) |
|
3701 { |
|
3702 Label failures, done, popObjectAndFail; |
|
3703 |
|
3704 // Guard on the shape. |
|
3705 Shape *shape = tarr->lastProperty(); |
|
3706 if (!shape) |
|
3707 return false; |
|
3708 masm.branchTestObjShape(Assembler::NotEqual, object, shape, &failures); |
|
3709 |
|
3710 // Ensure the index is an int32. |
|
3711 masm.branchTestInt32(Assembler::NotEqual, indexVal, &failures); |
|
3712 Register index = masm.extractInt32(indexVal, tempUnbox); |
|
3713 |
|
3714 // Guard on the length. |
|
3715 Address length(object, TypedArrayObject::lengthOffset()); |
|
3716 masm.unboxInt32(length, temp); |
|
3717 masm.branch32(Assembler::BelowOrEqual, temp, index, &done); |
|
3718 |
|
3719 // Load the elements vector. |
|
3720 Register elements = temp; |
|
3721 masm.loadPtr(Address(object, TypedArrayObject::dataOffset()), elements); |
|
3722 |
|
3723 // Set the value. |
|
3724 int arrayType = tarr->type(); |
|
3725 int width = TypedArrayObject::slotWidth(arrayType); |
|
3726 BaseIndex target(elements, index, ScaleFromElemWidth(width)); |
|
3727 |
|
3728 if (arrayType == ScalarTypeDescr::TYPE_FLOAT32) { |
|
3729 if (LIRGenerator::allowFloat32Optimizations()) { |
|
3730 if (!masm.convertConstantOrRegisterToFloat(cx, value, tempFloat, &failures)) |
|
3731 return false; |
|
3732 } else { |
|
3733 if (!masm.convertConstantOrRegisterToDouble(cx, value, tempFloat, &failures)) |
|
3734 return false; |
|
3735 } |
|
3736 masm.storeToTypedFloatArray(arrayType, tempFloat, target); |
|
3737 } else if (arrayType == ScalarTypeDescr::TYPE_FLOAT64) { |
|
3738 if (!masm.convertConstantOrRegisterToDouble(cx, value, tempFloat, &failures)) |
|
3739 return false; |
|
3740 masm.storeToTypedFloatArray(arrayType, tempFloat, target); |
|
3741 } else { |
|
3742 // On x86 we only have 6 registers available to use, so reuse the object |
|
3743 // register to compute the intermediate value to store and restore it |
|
3744 // afterwards. |
|
3745 masm.push(object); |
|
3746 |
|
3747 if (arrayType == ScalarTypeDescr::TYPE_UINT8_CLAMPED) { |
|
3748 if (!masm.clampConstantOrRegisterToUint8(cx, value, tempFloat, object, |
|
3749 &popObjectAndFail)) |
|
3750 { |
|
3751 return false; |
|
3752 } |
|
3753 } else { |
|
3754 if (!masm.truncateConstantOrRegisterToInt32(cx, value, tempFloat, object, |
|
3755 &popObjectAndFail)) |
|
3756 { |
|
3757 return false; |
|
3758 } |
|
3759 } |
|
3760 masm.storeToTypedIntArray(arrayType, object, target); |
|
3761 |
|
3762 masm.pop(object); |
|
3763 } |
|
3764 |
|
3765 // Out-of-bound writes jump here as they are no-ops. |
|
3766 masm.bind(&done); |
|
3767 attacher.jumpRejoin(masm); |
|
3768 |
|
3769 if (popObjectAndFail.used()) { |
|
3770 masm.bind(&popObjectAndFail); |
|
3771 masm.pop(object); |
|
3772 } |
|
3773 |
|
3774 masm.bind(&failures); |
|
3775 attacher.jumpNextStub(masm); |
|
3776 return true; |
|
3777 } |
|
3778 |
|
3779 bool |
|
3780 SetElementIC::attachTypedArrayElement(JSContext *cx, IonScript *ion, TypedArrayObject *tarr) |
|
3781 { |
|
3782 MacroAssembler masm(cx, ion); |
|
3783 RepatchStubAppender attacher(*this); |
|
3784 if (!GenerateSetTypedArrayElement(cx, masm, attacher, tarr, |
|
3785 object(), index(), value(), |
|
3786 tempToUnboxIndex(), temp(), tempFloat())) |
|
3787 { |
|
3788 return false; |
|
3789 } |
|
3790 |
|
3791 return linkAndAttachStub(cx, masm, attacher, ion, "typed array"); |
|
3792 } |
|
3793 |
|
3794 bool |
|
3795 SetElementIC::update(JSContext *cx, size_t cacheIndex, HandleObject obj, |
|
3796 HandleValue idval, HandleValue value) |
|
3797 { |
|
3798 IonScript *ion = GetTopIonJSScript(cx)->ionScript(); |
|
3799 SetElementIC &cache = ion->getCache(cacheIndex).toSetElement(); |
|
3800 |
|
3801 bool attachedStub = false; |
|
3802 if (cache.canAttachStub()) { |
|
3803 if (!cache.hasDenseStub() && IsDenseElementSetInlineable(obj, idval)) { |
|
3804 if (!cache.attachDenseElement(cx, ion, obj, idval)) |
|
3805 return false; |
|
3806 attachedStub = true; |
|
3807 } |
|
3808 if (!attachedStub && IsTypedArrayElementSetInlineable(obj, idval, value)) { |
|
3809 TypedArrayObject *tarr = &obj->as<TypedArrayObject>(); |
|
3810 if (!cache.attachTypedArrayElement(cx, ion, tarr)) |
|
3811 return false; |
|
3812 } |
|
3813 } |
|
3814 |
|
3815 if (!SetObjectElement(cx, obj, idval, value, cache.strict())) |
|
3816 return false; |
|
3817 return true; |
|
3818 } |
|
3819 |
|
3820 void |
|
3821 SetElementIC::reset() |
|
3822 { |
|
3823 RepatchIonCache::reset(); |
|
3824 hasDenseStub_ = false; |
|
3825 } |
|
3826 |
|
3827 bool |
|
3828 SetElementParIC::attachDenseElement(LockedJSContext &cx, IonScript *ion, JSObject *obj, |
|
3829 const Value &idval) |
|
3830 { |
|
3831 MacroAssembler masm(cx, ion); |
|
3832 DispatchStubPrepender attacher(*this); |
|
3833 if (!GenerateSetDenseElement(cx, masm, attacher, obj, idval, |
|
3834 guardHoles(), object(), index(), |
|
3835 value(), tempToUnboxIndex(), |
|
3836 temp())) |
|
3837 { |
|
3838 return false; |
|
3839 } |
|
3840 |
|
3841 const char *message = guardHoles() ? |
|
3842 "parallel dense array (holes)" : |
|
3843 "parallel dense array"; |
|
3844 |
|
3845 return linkAndAttachStub(cx, masm, attacher, ion, message); |
|
3846 } |
|
3847 |
|
3848 bool |
|
3849 SetElementParIC::attachTypedArrayElement(LockedJSContext &cx, IonScript *ion, |
|
3850 TypedArrayObject *tarr) |
|
3851 { |
|
3852 MacroAssembler masm(cx, ion); |
|
3853 DispatchStubPrepender attacher(*this); |
|
3854 if (!GenerateSetTypedArrayElement(cx, masm, attacher, tarr, |
|
3855 object(), index(), value(), |
|
3856 tempToUnboxIndex(), temp(), tempFloat())) |
|
3857 { |
|
3858 return false; |
|
3859 } |
|
3860 |
|
3861 return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array"); |
|
3862 } |
|
3863 |
|
3864 bool |
|
3865 SetElementParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj, |
|
3866 HandleValue idval, HandleValue value) |
|
3867 { |
|
3868 IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript(); |
|
3869 SetElementParIC &cache = ion->getCache(cacheIndex).toSetElementPar(); |
|
3870 |
|
3871 // Avoid unnecessary locking if cannot attach stubs. |
|
3872 if (!cache.canAttachStub()) |
|
3873 return SetElementPar(cx, obj, idval, value, cache.strict()); |
|
3874 |
|
3875 { |
|
3876 LockedJSContext ncx(cx); |
|
3877 |
|
3878 if (cache.canAttachStub()) { |
|
3879 bool alreadyStubbed; |
|
3880 if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed)) |
|
3881 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
3882 if (alreadyStubbed) |
|
3883 return SetElementPar(cx, obj, idval, value, cache.strict()); |
|
3884 |
|
3885 bool attachedStub = false; |
|
3886 if (IsDenseElementSetInlineable(obj, idval)) { |
|
3887 if (!cache.attachDenseElement(ncx, ion, obj, idval)) |
|
3888 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
3889 attachedStub = true; |
|
3890 } |
|
3891 if (!attachedStub && IsTypedArrayElementSetInlineable(obj, idval, value)) { |
|
3892 TypedArrayObject *tarr = &obj->as<TypedArrayObject>(); |
|
3893 if (!cache.attachTypedArrayElement(ncx, ion, tarr)) |
|
3894 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
3895 } |
|
3896 } |
|
3897 } |
|
3898 |
|
3899 return SetElementPar(cx, obj, idval, value, cache.strict()); |
|
3900 } |
|
3901 |
|
3902 bool |
|
3903 GetElementParIC::attachReadSlot(LockedJSContext &cx, IonScript *ion, JSObject *obj, |
|
3904 const Value &idval, PropertyName *name, JSObject *holder, |
|
3905 Shape *shape) |
|
3906 { |
|
3907 MacroAssembler masm(cx, ion); |
|
3908 DispatchStubPrepender attacher(*this); |
|
3909 |
|
3910 // Guard on the index value. |
|
3911 Label failures; |
|
3912 ValueOperand val = index().reg().valueReg(); |
|
3913 masm.branchTestValue(Assembler::NotEqual, val, idval, &failures); |
|
3914 |
|
3915 GenerateReadSlot(cx, ion, masm, attacher, obj, holder, shape, object(), output(), |
|
3916 &failures); |
|
3917 |
|
3918 return linkAndAttachStub(cx, masm, attacher, ion, "parallel getelem reading"); |
|
3919 } |
|
3920 |
|
3921 bool |
|
3922 GetElementParIC::attachDenseElement(LockedJSContext &cx, IonScript *ion, JSObject *obj, |
|
3923 const Value &idval) |
|
3924 { |
|
3925 MacroAssembler masm(cx, ion); |
|
3926 DispatchStubPrepender attacher(*this); |
|
3927 if (!GenerateDenseElement(cx, masm, attacher, obj, idval, object(), index(), output())) |
|
3928 return false; |
|
3929 |
|
3930 return linkAndAttachStub(cx, masm, attacher, ion, "parallel dense element"); |
|
3931 } |
|
3932 |
|
3933 bool |
|
3934 GetElementParIC::attachTypedArrayElement(LockedJSContext &cx, IonScript *ion, |
|
3935 TypedArrayObject *tarr, const Value &idval) |
|
3936 { |
|
3937 MacroAssembler masm(cx, ion); |
|
3938 DispatchStubPrepender attacher(*this); |
|
3939 GenerateGetTypedArrayElement(cx, masm, attacher, tarr, idval, object(), index(), output(), |
|
3940 allowDoubleResult()); |
|
3941 return linkAndAttachStub(cx, masm, attacher, ion, "parallel typed array"); |
|
3942 } |
|
3943 |
|
3944 bool |
|
3945 GetElementParIC::update(ForkJoinContext *cx, size_t cacheIndex, HandleObject obj, |
|
3946 HandleValue idval, MutableHandleValue vp) |
|
3947 { |
|
3948 IonScript *ion = GetTopIonJSScript(cx)->parallelIonScript(); |
|
3949 GetElementParIC &cache = ion->getCache(cacheIndex).toGetElementPar(); |
|
3950 |
|
3951 // Try to get the element early, as the pure path doesn't need a lock. If |
|
3952 // we can't do it purely, bail out of parallel execution. |
|
3953 if (!GetObjectElementOperationPure(cx, obj, idval, vp.address())) |
|
3954 return false; |
|
3955 |
|
3956 // Avoid unnecessary locking if cannot attach stubs. |
|
3957 if (!cache.canAttachStub()) |
|
3958 return true; |
|
3959 |
|
3960 { |
|
3961 // See note about locking context in GetPropertyParIC::update. |
|
3962 LockedJSContext ncx(cx); |
|
3963 |
|
3964 if (cache.canAttachStub()) { |
|
3965 bool alreadyStubbed; |
|
3966 if (!cache.hasOrAddStubbedShape(ncx, obj->lastProperty(), &alreadyStubbed)) |
|
3967 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
3968 if (alreadyStubbed) |
|
3969 return true; |
|
3970 |
|
3971 jsid id; |
|
3972 if (!ValueToIdPure(idval, &id)) |
|
3973 return false; |
|
3974 |
|
3975 bool attachedStub = false; |
|
3976 if (cache.monitoredResult() && |
|
3977 GetElementIC::canAttachGetProp(obj, idval, id)) |
|
3978 { |
|
3979 RootedShape shape(ncx); |
|
3980 RootedObject holder(ncx); |
|
3981 RootedPropertyName name(ncx, JSID_TO_ATOM(id)->asPropertyName()); |
|
3982 |
|
3983 GetPropertyIC::NativeGetPropCacheability canCache = |
|
3984 CanAttachNativeGetProp(ncx, cache, obj, name, &holder, &shape); |
|
3985 |
|
3986 if (canCache == GetPropertyIC::CanAttachReadSlot) |
|
3987 { |
|
3988 if (!cache.attachReadSlot(ncx, ion, obj, idval, name, holder, shape)) |
|
3989 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
3990 attachedStub = true; |
|
3991 } |
|
3992 } |
|
3993 if (!attachedStub && |
|
3994 GetElementIC::canAttachDenseElement(obj, idval)) |
|
3995 { |
|
3996 if (!cache.attachDenseElement(ncx, ion, obj, idval)) |
|
3997 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
3998 attachedStub = true; |
|
3999 } |
|
4000 if (!attachedStub && |
|
4001 GetElementIC::canAttachTypedArrayElement(obj, idval, cache.output())) |
|
4002 { |
|
4003 if (!cache.attachTypedArrayElement(ncx, ion, &obj->as<TypedArrayObject>(), idval)) |
|
4004 return cx->setPendingAbortFatal(ParallelBailoutFailedIC); |
|
4005 attachedStub = true; |
|
4006 } |
|
4007 } |
|
4008 } |
|
4009 |
|
4010 return true; |
|
4011 } |
|
4012 |
|
4013 bool |
|
4014 BindNameIC::attachGlobal(JSContext *cx, IonScript *ion, JSObject *scopeChain) |
|
4015 { |
|
4016 JS_ASSERT(scopeChain->is<GlobalObject>()); |
|
4017 |
|
4018 MacroAssembler masm(cx, ion); |
|
4019 RepatchStubAppender attacher(*this); |
|
4020 |
|
4021 // Guard on the scope chain. |
|
4022 attacher.branchNextStub(masm, Assembler::NotEqual, scopeChainReg(), |
|
4023 ImmGCPtr(scopeChain)); |
|
4024 masm.movePtr(ImmGCPtr(scopeChain), outputReg()); |
|
4025 |
|
4026 attacher.jumpRejoin(masm); |
|
4027 |
|
4028 return linkAndAttachStub(cx, masm, attacher, ion, "global"); |
|
4029 } |
|
4030 |
|
4031 static inline void |
|
4032 GenerateScopeChainGuard(MacroAssembler &masm, JSObject *scopeObj, |
|
4033 Register scopeObjReg, Shape *shape, Label *failures) |
|
4034 { |
|
4035 if (scopeObj->is<CallObject>()) { |
|
4036 // We can skip a guard on the call object if the script's bindings are |
|
4037 // guaranteed to be immutable (and thus cannot introduce shadowing |
|
4038 // variables). |
|
4039 CallObject *callObj = &scopeObj->as<CallObject>(); |
|
4040 if (!callObj->isForEval()) { |
|
4041 JSFunction *fun = &callObj->callee(); |
|
4042 // The function might have been relazified under rare conditions. |
|
4043 // In that case, we pessimistically create the guard, as we'd |
|
4044 // need to root various pointers to delazify, |
|
4045 if (fun->hasScript()) { |
|
4046 JSScript *script = fun->nonLazyScript(); |
|
4047 if (!script->funHasExtensibleScope()) |
|
4048 return; |
|
4049 } |
|
4050 } |
|
4051 } else if (scopeObj->is<GlobalObject>()) { |
|
4052 // If this is the last object on the scope walk, and the property we've |
|
4053 // found is not configurable, then we don't need a shape guard because |
|
4054 // the shape cannot be removed. |
|
4055 if (shape && !shape->configurable()) |
|
4056 return; |
|
4057 } |
|
4058 |
|
4059 Address shapeAddr(scopeObjReg, JSObject::offsetOfShape()); |
|
4060 masm.branchPtr(Assembler::NotEqual, shapeAddr, ImmGCPtr(scopeObj->lastProperty()), failures); |
|
4061 } |
|
4062 |
|
4063 static void |
|
4064 GenerateScopeChainGuards(MacroAssembler &masm, JSObject *scopeChain, JSObject *holder, |
|
4065 Register outputReg, Label *failures, bool skipLastGuard = false) |
|
4066 { |
|
4067 JSObject *tobj = scopeChain; |
|
4068 |
|
4069 // Walk up the scope chain. Note that IsCacheableScopeChain guarantees the |
|
4070 // |tobj == holder| condition terminates the loop. |
|
4071 while (true) { |
|
4072 JS_ASSERT(IsCacheableNonGlobalScope(tobj) || tobj->is<GlobalObject>()); |
|
4073 |
|
4074 if (skipLastGuard && tobj == holder) |
|
4075 break; |
|
4076 |
|
4077 GenerateScopeChainGuard(masm, tobj, outputReg, nullptr, failures); |
|
4078 |
|
4079 if (tobj == holder) |
|
4080 break; |
|
4081 |
|
4082 // Load the next link. |
|
4083 tobj = &tobj->as<ScopeObject>().enclosingScope(); |
|
4084 masm.extractObject(Address(outputReg, ScopeObject::offsetOfEnclosingScope()), outputReg); |
|
4085 } |
|
4086 } |
|
4087 |
|
4088 bool |
|
4089 BindNameIC::attachNonGlobal(JSContext *cx, IonScript *ion, JSObject *scopeChain, JSObject *holder) |
|
4090 { |
|
4091 JS_ASSERT(IsCacheableNonGlobalScope(scopeChain)); |
|
4092 |
|
4093 MacroAssembler masm(cx, ion); |
|
4094 RepatchStubAppender attacher(*this); |
|
4095 |
|
4096 // Guard on the shape of the scope chain. |
|
4097 Label failures; |
|
4098 attacher.branchNextStubOrLabel(masm, Assembler::NotEqual, |
|
4099 Address(scopeChainReg(), JSObject::offsetOfShape()), |
|
4100 ImmGCPtr(scopeChain->lastProperty()), |
|
4101 holder != scopeChain ? &failures : nullptr); |
|
4102 |
|
4103 if (holder != scopeChain) { |
|
4104 JSObject *parent = &scopeChain->as<ScopeObject>().enclosingScope(); |
|
4105 masm.extractObject(Address(scopeChainReg(), ScopeObject::offsetOfEnclosingScope()), outputReg()); |
|
4106 |
|
4107 GenerateScopeChainGuards(masm, parent, holder, outputReg(), &failures); |
|
4108 } else { |
|
4109 masm.movePtr(scopeChainReg(), outputReg()); |
|
4110 } |
|
4111 |
|
4112 // At this point outputReg holds the object on which the property |
|
4113 // was found, so we're done. |
|
4114 attacher.jumpRejoin(masm); |
|
4115 |
|
4116 // All failures flow to here, so there is a common point to patch. |
|
4117 if (holder != scopeChain) { |
|
4118 masm.bind(&failures); |
|
4119 attacher.jumpNextStub(masm); |
|
4120 } |
|
4121 |
|
4122 return linkAndAttachStub(cx, masm, attacher, ion, "non-global"); |
|
4123 } |
|
4124 |
|
4125 static bool |
|
4126 IsCacheableScopeChain(JSObject *scopeChain, JSObject *holder) |
|
4127 { |
|
4128 while (true) { |
|
4129 if (!IsCacheableNonGlobalScope(scopeChain)) { |
|
4130 IonSpew(IonSpew_InlineCaches, "Non-cacheable object on scope chain"); |
|
4131 return false; |
|
4132 } |
|
4133 |
|
4134 if (scopeChain == holder) |
|
4135 return true; |
|
4136 |
|
4137 scopeChain = &scopeChain->as<ScopeObject>().enclosingScope(); |
|
4138 if (!scopeChain) { |
|
4139 IonSpew(IonSpew_InlineCaches, "Scope chain indirect hit"); |
|
4140 return false; |
|
4141 } |
|
4142 } |
|
4143 |
|
4144 MOZ_ASSUME_UNREACHABLE("Invalid scope chain"); |
|
4145 } |
|
4146 |
|
4147 JSObject * |
|
4148 BindNameIC::update(JSContext *cx, size_t cacheIndex, HandleObject scopeChain) |
|
4149 { |
|
4150 IonScript *ion = GetTopIonJSScript(cx)->ionScript(); |
|
4151 BindNameIC &cache = ion->getCache(cacheIndex).toBindName(); |
|
4152 HandlePropertyName name = cache.name(); |
|
4153 |
|
4154 RootedObject holder(cx); |
|
4155 if (scopeChain->is<GlobalObject>()) { |
|
4156 holder = scopeChain; |
|
4157 } else { |
|
4158 if (!LookupNameWithGlobalDefault(cx, name, scopeChain, &holder)) |
|
4159 return nullptr; |
|
4160 } |
|
4161 |
|
4162 // Stop generating new stubs once we hit the stub count limit, see |
|
4163 // GetPropertyCache. |
|
4164 if (cache.canAttachStub()) { |
|
4165 if (scopeChain->is<GlobalObject>()) { |
|
4166 if (!cache.attachGlobal(cx, ion, scopeChain)) |
|
4167 return nullptr; |
|
4168 } else if (IsCacheableScopeChain(scopeChain, holder)) { |
|
4169 if (!cache.attachNonGlobal(cx, ion, scopeChain, holder)) |
|
4170 return nullptr; |
|
4171 } else { |
|
4172 IonSpew(IonSpew_InlineCaches, "BINDNAME uncacheable scope chain"); |
|
4173 } |
|
4174 } |
|
4175 |
|
4176 return holder; |
|
4177 } |
|
4178 |
|
4179 bool |
|
4180 NameIC::attachReadSlot(JSContext *cx, IonScript *ion, HandleObject scopeChain, |
|
4181 HandleObject holderBase, HandleObject holder, |
|
4182 HandleShape shape) |
|
4183 { |
|
4184 MacroAssembler masm(cx, ion); |
|
4185 Label failures; |
|
4186 RepatchStubAppender attacher(*this); |
|
4187 |
|
4188 Register scratchReg = outputReg().valueReg().scratchReg(); |
|
4189 |
|
4190 // Don't guard the base of the proto chain the name was found on. It will be guarded |
|
4191 // by GenerateReadSlot(). |
|
4192 masm.mov(scopeChainReg(), scratchReg); |
|
4193 GenerateScopeChainGuards(masm, scopeChain, holderBase, scratchReg, &failures, |
|
4194 /* skipLastGuard = */true); |
|
4195 |
|
4196 // GenerateScopeChain leaves the last scope chain in scrachReg, even though it |
|
4197 // doesn't generate the extra guard. |
|
4198 GenerateReadSlot(cx, ion, masm, attacher, holderBase, holder, shape, scratchReg, |
|
4199 outputReg(), failures.used() ? &failures : nullptr); |
|
4200 |
|
4201 return linkAndAttachStub(cx, masm, attacher, ion, "generic"); |
|
4202 } |
|
4203 |
|
4204 static bool |
|
4205 IsCacheableNameReadSlot(JSContext *cx, HandleObject scopeChain, HandleObject obj, |
|
4206 HandleObject holder, HandleShape shape, jsbytecode *pc, |
|
4207 const TypedOrValueRegister &output) |
|
4208 { |
|
4209 if (!shape) |
|
4210 return false; |
|
4211 if (!obj->isNative()) |
|
4212 return false; |
|
4213 |
|
4214 if (obj->is<GlobalObject>()) { |
|
4215 // Support only simple property lookups. |
|
4216 if (!IsCacheableGetPropReadSlot(obj, holder, shape) && |
|
4217 !IsCacheableNoProperty(obj, holder, shape, pc, output)) |
|
4218 return false; |
|
4219 } else if (obj->is<CallObject>()) { |
|
4220 JS_ASSERT(obj == holder); |
|
4221 if (!shape->hasDefaultGetter()) |
|
4222 return false; |
|
4223 } else { |
|
4224 // We don't yet support lookups on Block or DeclEnv objects. |
|
4225 return false; |
|
4226 } |
|
4227 |
|
4228 RootedObject obj2(cx, scopeChain); |
|
4229 while (obj2) { |
|
4230 if (!IsCacheableNonGlobalScope(obj2) && !obj2->is<GlobalObject>()) |
|
4231 return false; |
|
4232 |
|
4233 // Stop once we hit the global or target obj. |
|
4234 if (obj2->is<GlobalObject>() || obj2 == obj) |
|
4235 break; |
|
4236 |
|
4237 obj2 = obj2->enclosingScope(); |
|
4238 } |
|
4239 |
|
4240 return obj == obj2; |
|
4241 } |
|
4242 |
|
4243 bool |
|
4244 NameIC::attachCallGetter(JSContext *cx, IonScript *ion, JSObject *obj, JSObject *holder, |
|
4245 HandleShape shape, void *returnAddr) |
|
4246 { |
|
4247 MacroAssembler masm(cx, ion, script_, pc_); |
|
4248 |
|
4249 RepatchStubAppender attacher(*this); |
|
4250 if (!GenerateCallGetter(cx, ion, masm, attacher, obj, name(), holder, shape, liveRegs_, |
|
4251 scopeChainReg(), outputReg(), returnAddr)) |
|
4252 { |
|
4253 return false; |
|
4254 } |
|
4255 |
|
4256 const char *attachKind = "name getter"; |
|
4257 return linkAndAttachStub(cx, masm, attacher, ion, attachKind); |
|
4258 } |
|
4259 |
|
4260 static bool |
|
4261 IsCacheableNameCallGetter(JSObject *scopeChain, JSObject *obj, JSObject *holder, Shape *shape) |
|
4262 { |
|
4263 if (obj != scopeChain) |
|
4264 return false; |
|
4265 |
|
4266 if (!obj->is<GlobalObject>()) |
|
4267 return false; |
|
4268 |
|
4269 return IsCacheableGetPropCallNative(obj, holder, shape) || |
|
4270 IsCacheableGetPropCallPropertyOp(obj, holder, shape); |
|
4271 } |
|
4272 |
|
4273 bool |
|
4274 NameIC::update(JSContext *cx, size_t cacheIndex, HandleObject scopeChain, |
|
4275 MutableHandleValue vp) |
|
4276 { |
|
4277 void *returnAddr; |
|
4278 IonScript *ion = GetTopIonJSScript(cx, &returnAddr)->ionScript(); |
|
4279 |
|
4280 NameIC &cache = ion->getCache(cacheIndex).toName(); |
|
4281 RootedPropertyName name(cx, cache.name()); |
|
4282 |
|
4283 RootedScript script(cx); |
|
4284 jsbytecode *pc; |
|
4285 cache.getScriptedLocation(&script, &pc); |
|
4286 |
|
4287 RootedObject obj(cx); |
|
4288 RootedObject holder(cx); |
|
4289 RootedShape shape(cx); |
|
4290 if (!LookupName(cx, name, scopeChain, &obj, &holder, &shape)) |
|
4291 return false; |
|
4292 |
|
4293 if (cache.canAttachStub()) { |
|
4294 if (IsCacheableNameReadSlot(cx, scopeChain, obj, holder, shape, pc, cache.outputReg())) { |
|
4295 if (!cache.attachReadSlot(cx, ion, scopeChain, obj, holder, shape)) |
|
4296 return false; |
|
4297 } else if (IsCacheableNameCallGetter(scopeChain, obj, holder, shape)) { |
|
4298 if (!cache.attachCallGetter(cx, ion, obj, holder, shape, returnAddr)) |
|
4299 return false; |
|
4300 } |
|
4301 } |
|
4302 |
|
4303 if (cache.isTypeOf()) { |
|
4304 if (!FetchName<true>(cx, obj, holder, name, shape, vp)) |
|
4305 return false; |
|
4306 } else { |
|
4307 if (!FetchName<false>(cx, obj, holder, name, shape, vp)) |
|
4308 return false; |
|
4309 } |
|
4310 |
|
4311 // Monitor changes to cache entry. |
|
4312 types::TypeScript::Monitor(cx, script, pc, vp); |
|
4313 |
|
4314 return true; |
|
4315 } |
|
4316 |
|
4317 bool |
|
4318 CallsiteCloneIC::attach(JSContext *cx, IonScript *ion, HandleFunction original, |
|
4319 HandleFunction clone) |
|
4320 { |
|
4321 MacroAssembler masm(cx, ion); |
|
4322 RepatchStubAppender attacher(*this); |
|
4323 |
|
4324 // Guard against object identity on the original. |
|
4325 attacher.branchNextStub(masm, Assembler::NotEqual, calleeReg(), ImmGCPtr(original)); |
|
4326 |
|
4327 // Load the clone. |
|
4328 masm.movePtr(ImmGCPtr(clone), outputReg()); |
|
4329 |
|
4330 attacher.jumpRejoin(masm); |
|
4331 |
|
4332 return linkAndAttachStub(cx, masm, attacher, ion, "generic"); |
|
4333 } |
|
4334 |
|
4335 JSObject * |
|
4336 CallsiteCloneIC::update(JSContext *cx, size_t cacheIndex, HandleObject callee) |
|
4337 { |
|
4338 // Act as the identity for functions that are not clone-at-callsite, as we |
|
4339 // generate this cache as long as some callees are clone-at-callsite. |
|
4340 RootedFunction fun(cx, &callee->as<JSFunction>()); |
|
4341 if (!fun->hasScript() || !fun->nonLazyScript()->shouldCloneAtCallsite()) |
|
4342 return fun; |
|
4343 |
|
4344 IonScript *ion = GetTopIonJSScript(cx)->ionScript(); |
|
4345 CallsiteCloneIC &cache = ion->getCache(cacheIndex).toCallsiteClone(); |
|
4346 |
|
4347 RootedFunction clone(cx, CloneFunctionAtCallsite(cx, fun, cache.callScript(), cache.callPc())); |
|
4348 if (!clone) |
|
4349 return nullptr; |
|
4350 |
|
4351 if (cache.canAttachStub()) { |
|
4352 if (!cache.attach(cx, ion, fun, clone)) |
|
4353 return nullptr; |
|
4354 } |
|
4355 |
|
4356 return clone; |
|
4357 } |