Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/BaselineJIT.h"
9 #include "mozilla/MemoryReporting.h"
11 #include "jit/BaselineCompiler.h"
12 #include "jit/BaselineIC.h"
13 #include "jit/CompileInfo.h"
14 #include "jit/IonSpewer.h"
15 #include "jit/JitCommon.h"
16 #include "vm/Interpreter.h"
17 #include "vm/TraceLogging.h"
19 #include "jsgcinlines.h"
20 #include "jsobjinlines.h"
21 #include "jsopcodeinlines.h"
22 #include "jsscriptinlines.h"
24 #include "jit/IonFrames-inl.h"
25 #include "vm/Stack-inl.h"
27 using namespace js;
28 using namespace js::jit;
30 /* static */ PCMappingSlotInfo::SlotLocation
31 PCMappingSlotInfo::ToSlotLocation(const StackValue *stackVal)
32 {
33 if (stackVal->kind() == StackValue::Register) {
34 if (stackVal->reg() == R0)
35 return SlotInR0;
36 JS_ASSERT(stackVal->reg() == R1);
37 return SlotInR1;
38 }
39 JS_ASSERT(stackVal->kind() != StackValue::Stack);
40 return SlotIgnore;
41 }
43 BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
44 uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset)
45 : method_(nullptr),
46 templateScope_(nullptr),
47 fallbackStubSpace_(),
48 prologueOffset_(prologueOffset),
49 epilogueOffset_(epilogueOffset),
50 #ifdef DEBUG
51 spsOn_(false),
52 #endif
53 spsPushToggleOffset_(spsPushToggleOffset),
54 postDebugPrologueOffset_(postDebugPrologueOffset),
55 flags_(0)
56 { }
58 static const size_t BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4096;
59 static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000;
61 static bool
62 CheckFrame(InterpreterFrame *fp)
63 {
64 if (fp->isGeneratorFrame()) {
65 IonSpew(IonSpew_BaselineAbort, "generator frame");
66 return false;
67 }
69 if (fp->isDebuggerFrame()) {
70 // Debugger eval-in-frame. These are likely short-running scripts so
71 // don't bother compiling them for now.
72 IonSpew(IonSpew_BaselineAbort, "debugger frame");
73 return false;
74 }
76 if (fp->isNonEvalFunctionFrame() && fp->numActualArgs() > BASELINE_MAX_ARGS_LENGTH) {
77 // Fall back to the interpreter to avoid running out of stack space.
78 IonSpew(IonSpew_BaselineAbort, "Too many arguments (%u)", fp->numActualArgs());
79 return false;
80 }
82 return true;
83 }
85 static bool
86 IsJSDEnabled(JSContext *cx)
87 {
88 return cx->compartment()->debugMode() && cx->runtime()->debugHooks.callHook;
89 }
91 static IonExecStatus
92 EnterBaseline(JSContext *cx, EnterJitData &data)
93 {
94 if (data.osrFrame) {
95 // Check for potential stack overflow before OSR-ing.
96 uint8_t spDummy;
97 uint32_t extra = BaselineFrame::Size() + (data.osrNumStackValues * sizeof(Value));
98 uint8_t *checkSp = (&spDummy) - extra;
99 JS_CHECK_RECURSION_WITH_SP(cx, checkSp, return IonExec_Aborted);
100 } else {
101 JS_CHECK_RECURSION(cx, return IonExec_Aborted);
102 }
104 JS_ASSERT(jit::IsBaselineEnabled(cx));
105 JS_ASSERT_IF(data.osrFrame, CheckFrame(data.osrFrame));
107 EnterJitCode enter = cx->runtime()->jitRuntime()->enterBaseline();
109 // Caller must construct |this| before invoking the Ion function.
110 JS_ASSERT_IF(data.constructing, data.maxArgv[0].isObject());
112 data.result.setInt32(data.numActualArgs);
113 {
114 AssertCompartmentUnchanged pcc(cx);
115 JitActivation activation(cx, data.constructing);
117 if (data.osrFrame)
118 data.osrFrame->setRunningInJit();
120 JS_ASSERT_IF(data.osrFrame, !IsJSDEnabled(cx));
122 // Single transition point from Interpreter to Baseline.
123 CALL_GENERATED_CODE(enter, data.jitcode, data.maxArgc, data.maxArgv, data.osrFrame, data.calleeToken,
124 data.scopeChain.get(), data.osrNumStackValues, data.result.address());
126 if (data.osrFrame)
127 data.osrFrame->clearRunningInJit();
128 }
130 JS_ASSERT(!cx->runtime()->hasIonReturnOverride());
132 // Jit callers wrap primitive constructor return.
133 if (!data.result.isMagic() && data.constructing && data.result.isPrimitive())
134 data.result = data.maxArgv[0];
136 // Release temporary buffer used for OSR into Ion.
137 cx->runtime()->getJitRuntime(cx)->freeOsrTempData();
139 JS_ASSERT_IF(data.result.isMagic(), data.result.isMagic(JS_ION_ERROR));
140 return data.result.isMagic() ? IonExec_Error : IonExec_Ok;
141 }
143 IonExecStatus
144 jit::EnterBaselineMethod(JSContext *cx, RunState &state)
145 {
146 BaselineScript *baseline = state.script()->baselineScript();
148 EnterJitData data(cx);
149 data.jitcode = baseline->method()->raw();
151 AutoValueVector vals(cx);
152 if (!SetEnterJitData(cx, data, state, vals))
153 return IonExec_Error;
155 IonExecStatus status = EnterBaseline(cx, data);
156 if (status != IonExec_Ok)
157 return status;
159 state.setReturnValue(data.result);
160 return IonExec_Ok;
161 }
163 IonExecStatus
164 jit::EnterBaselineAtBranch(JSContext *cx, InterpreterFrame *fp, jsbytecode *pc)
165 {
166 JS_ASSERT(JSOp(*pc) == JSOP_LOOPENTRY);
168 BaselineScript *baseline = fp->script()->baselineScript();
170 EnterJitData data(cx);
171 data.jitcode = baseline->nativeCodeForPC(fp->script(), pc);
173 // Skip debug breakpoint/trap handler, the interpreter already handled it
174 // for the current op.
175 if (cx->compartment()->debugMode())
176 data.jitcode += MacroAssembler::ToggledCallSize();
178 data.osrFrame = fp;
179 data.osrNumStackValues = fp->script()->nfixed() + cx->interpreterRegs().stackDepth();
181 RootedValue thisv(cx);
183 if (fp->isNonEvalFunctionFrame()) {
184 data.constructing = fp->isConstructing();
185 data.numActualArgs = fp->numActualArgs();
186 data.maxArgc = Max(fp->numActualArgs(), fp->numFormalArgs()) + 1; // +1 = include |this|
187 data.maxArgv = fp->argv() - 1; // -1 = include |this|
188 data.scopeChain = nullptr;
189 data.calleeToken = CalleeToToken(&fp->callee());
190 } else {
191 thisv = fp->thisValue();
192 data.constructing = false;
193 data.numActualArgs = 0;
194 data.maxArgc = 1;
195 data.maxArgv = thisv.address();
196 data.scopeChain = fp->scopeChain();
198 // For eval function frames, set the callee token to the enclosing function.
199 if (fp->isFunctionFrame())
200 data.calleeToken = CalleeToToken(&fp->callee());
201 else
202 data.calleeToken = CalleeToToken(fp->script());
203 }
205 TraceLogger *logger = TraceLoggerForMainThread(cx->runtime());
206 TraceLogStopEvent(logger, TraceLogger::Interpreter);
207 TraceLogStartEvent(logger, TraceLogger::Baseline);
209 IonExecStatus status = EnterBaseline(cx, data);
210 if (status != IonExec_Ok)
211 return status;
213 fp->setReturnValue(data.result);
214 return IonExec_Ok;
215 }
217 MethodStatus
218 jit::BaselineCompile(JSContext *cx, JSScript *script)
219 {
220 JS_ASSERT(!script->hasBaselineScript());
221 JS_ASSERT(script->canBaselineCompile());
222 JS_ASSERT(IsBaselineEnabled(cx));
223 LifoAlloc alloc(BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
225 script->ensureNonLazyCanonicalFunction(cx);
227 TempAllocator *temp = alloc.new_<TempAllocator>(&alloc);
228 if (!temp)
229 return Method_Error;
231 IonContext ictx(cx, temp);
233 BaselineCompiler compiler(cx, *temp, script);
234 if (!compiler.init())
235 return Method_Error;
237 MethodStatus status = compiler.compile();
239 JS_ASSERT_IF(status == Method_Compiled, script->hasBaselineScript());
240 JS_ASSERT_IF(status != Method_Compiled, !script->hasBaselineScript());
242 if (status == Method_CantCompile)
243 script->setBaselineScript(cx, BASELINE_DISABLED_SCRIPT);
245 return status;
246 }
248 static MethodStatus
249 CanEnterBaselineJIT(JSContext *cx, HandleScript script, bool osr)
250 {
251 JS_ASSERT(jit::IsBaselineEnabled(cx));
253 // Skip if the script has been disabled.
254 if (!script->canBaselineCompile())
255 return Method_Skipped;
257 if (script->length() > BaselineScript::MAX_JSSCRIPT_LENGTH)
258 return Method_CantCompile;
260 if (script->nslots() > BaselineScript::MAX_JSSCRIPT_SLOTS)
261 return Method_CantCompile;
263 if (!cx->compartment()->ensureJitCompartmentExists(cx))
264 return Method_Error;
266 if (script->hasBaselineScript())
267 return Method_Compiled;
269 // Check script use count. However, always eagerly compile scripts if JSD
270 // is enabled, so that we don't have to OSR and don't have to update the
271 // frame pointer stored in JSD's frames list.
272 //
273 // Also eagerly compile if we are in parallel warmup, the point of which
274 // is to gather type information so that the script may be compiled for
275 // parallel execution. We want to avoid the situation of OSRing during
276 // warmup and only gathering type information for the loop, and not the
277 // rest of the function.
278 if (IsJSDEnabled(cx) || cx->runtime()->forkJoinWarmup > 0) {
279 if (osr)
280 return Method_Skipped;
281 } else if (script->incUseCount() <= js_JitOptions.baselineUsesBeforeCompile) {
282 return Method_Skipped;
283 }
285 if (script->isCallsiteClone()) {
286 // Ensure the original function is compiled too, so that bailouts from
287 // Ion code have a BaselineScript to resume into.
288 RootedScript original(cx, script->donorFunction()->nonLazyScript());
289 JS_ASSERT(original != script);
291 if (!original->canBaselineCompile())
292 return Method_CantCompile;
294 if (!original->hasBaselineScript()) {
295 MethodStatus status = BaselineCompile(cx, original);
296 if (status != Method_Compiled)
297 return status;
298 }
299 }
301 return BaselineCompile(cx, script);
302 }
304 MethodStatus
305 jit::CanEnterBaselineAtBranch(JSContext *cx, InterpreterFrame *fp, bool newType)
306 {
307 // If constructing, allocate a new |this| object.
308 if (fp->isConstructing() && fp->functionThis().isPrimitive()) {
309 RootedObject callee(cx, &fp->callee());
310 RootedObject obj(cx, CreateThisForFunction(cx, callee, newType ? SingletonObject : GenericObject));
311 if (!obj)
312 return Method_Skipped;
313 fp->functionThis().setObject(*obj);
314 }
316 if (!CheckFrame(fp))
317 return Method_CantCompile;
319 RootedScript script(cx, fp->script());
320 return CanEnterBaselineJIT(cx, script, /* osr = */true);
321 }
323 MethodStatus
324 jit::CanEnterBaselineMethod(JSContext *cx, RunState &state)
325 {
326 if (state.isInvoke()) {
327 InvokeState &invoke = *state.asInvoke();
329 if (invoke.args().length() > BASELINE_MAX_ARGS_LENGTH) {
330 IonSpew(IonSpew_BaselineAbort, "Too many arguments (%u)", invoke.args().length());
331 return Method_CantCompile;
332 }
334 // If constructing, allocate a new |this| object.
335 if (invoke.constructing() && invoke.args().thisv().isPrimitive()) {
336 RootedObject callee(cx, &invoke.args().callee());
337 RootedObject obj(cx, CreateThisForFunction(cx, callee,
338 invoke.useNewType()
339 ? SingletonObject
340 : GenericObject));
341 if (!obj)
342 return Method_Skipped;
343 invoke.args().setThis(ObjectValue(*obj));
344 }
345 } else if (state.isExecute()) {
346 ExecuteType type = state.asExecute()->type();
347 if (type == EXECUTE_DEBUG || type == EXECUTE_DEBUG_GLOBAL) {
348 IonSpew(IonSpew_BaselineAbort, "debugger frame");
349 return Method_CantCompile;
350 }
351 } else {
352 JS_ASSERT(state.isGenerator());
353 IonSpew(IonSpew_BaselineAbort, "generator frame");
354 return Method_CantCompile;
355 }
357 RootedScript script(cx, state.script());
358 return CanEnterBaselineJIT(cx, script, /* osr = */false);
359 };
361 BaselineScript *
362 BaselineScript::New(JSContext *cx, uint32_t prologueOffset, uint32_t epilogueOffset,
363 uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset,
364 size_t icEntries, size_t pcMappingIndexEntries, size_t pcMappingSize,
365 size_t bytecodeTypeMapEntries)
366 {
367 static const unsigned DataAlignment = sizeof(uintptr_t);
369 size_t paddedBaselineScriptSize = AlignBytes(sizeof(BaselineScript), DataAlignment);
371 size_t icEntriesSize = icEntries * sizeof(ICEntry);
372 size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
373 size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t);
375 size_t paddedICEntriesSize = AlignBytes(icEntriesSize, DataAlignment);
376 size_t paddedPCMappingIndexEntriesSize = AlignBytes(pcMappingIndexEntriesSize, DataAlignment);
377 size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment);
378 size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment);
380 size_t allocBytes = paddedBaselineScriptSize +
381 paddedICEntriesSize +
382 paddedPCMappingIndexEntriesSize +
383 paddedPCMappingSize +
384 paddedBytecodeTypesMapSize;
386 uint8_t *buffer = (uint8_t *)cx->malloc_(allocBytes);
387 if (!buffer)
388 return nullptr;
390 BaselineScript *script = reinterpret_cast<BaselineScript *>(buffer);
391 new (script) BaselineScript(prologueOffset, epilogueOffset,
392 spsPushToggleOffset, postDebugPrologueOffset);
394 size_t offsetCursor = paddedBaselineScriptSize;
396 script->icEntriesOffset_ = offsetCursor;
397 script->icEntries_ = icEntries;
398 offsetCursor += paddedICEntriesSize;
400 script->pcMappingIndexOffset_ = offsetCursor;
401 script->pcMappingIndexEntries_ = pcMappingIndexEntries;
402 offsetCursor += paddedPCMappingIndexEntriesSize;
404 script->pcMappingOffset_ = offsetCursor;
405 script->pcMappingSize_ = pcMappingSize;
406 offsetCursor += paddedPCMappingSize;
408 script->bytecodeTypeMapOffset_ = bytecodeTypeMapEntries ? offsetCursor : 0;
410 return script;
411 }
413 void
414 BaselineScript::trace(JSTracer *trc)
415 {
416 MarkJitCode(trc, &method_, "baseline-method");
417 if (templateScope_)
418 MarkObject(trc, &templateScope_, "baseline-template-scope");
420 // Mark all IC stub codes hanging off the IC stub entries.
421 for (size_t i = 0; i < numICEntries(); i++) {
422 ICEntry &ent = icEntry(i);
423 if (!ent.hasStub())
424 continue;
425 for (ICStub *stub = ent.firstStub(); stub; stub = stub->next())
426 stub->trace(trc);
427 }
428 }
430 /* static */
431 void
432 BaselineScript::writeBarrierPre(Zone *zone, BaselineScript *script)
433 {
434 #ifdef JSGC_INCREMENTAL
435 if (zone->needsBarrier())
436 script->trace(zone->barrierTracer());
437 #endif
438 }
440 void
441 BaselineScript::Trace(JSTracer *trc, BaselineScript *script)
442 {
443 script->trace(trc);
444 }
446 void
447 BaselineScript::Destroy(FreeOp *fop, BaselineScript *script)
448 {
449 #ifdef JSGC_GENERATIONAL
450 /*
451 * When the script contains pointers to nursery things, the store buffer
452 * will contain entries refering to the referenced things. Since we can
453 * destroy scripts outside the context of a GC, this situation can result
454 * in invalid store buffer entries. Assert that if we do destroy scripts
455 * outside of a GC that we at least emptied the nursery first.
456 */
457 JS_ASSERT(fop->runtime()->gcNursery.isEmpty());
458 #endif
459 fop->delete_(script);
460 }
462 ICEntry &
463 BaselineScript::icEntry(size_t index)
464 {
465 JS_ASSERT(index < numICEntries());
466 return icEntryList()[index];
467 }
469 PCMappingIndexEntry &
470 BaselineScript::pcMappingIndexEntry(size_t index)
471 {
472 JS_ASSERT(index < numPCMappingIndexEntries());
473 return pcMappingIndexEntryList()[index];
474 }
476 CompactBufferReader
477 BaselineScript::pcMappingReader(size_t indexEntry)
478 {
479 PCMappingIndexEntry &entry = pcMappingIndexEntry(indexEntry);
481 uint8_t *dataStart = pcMappingData() + entry.bufferOffset;
482 uint8_t *dataEnd = (indexEntry == numPCMappingIndexEntries() - 1)
483 ? pcMappingData() + pcMappingSize_
484 : pcMappingData() + pcMappingIndexEntry(indexEntry + 1).bufferOffset;
486 return CompactBufferReader(dataStart, dataEnd);
487 }
489 ICEntry *
490 BaselineScript::maybeICEntryFromReturnOffset(CodeOffsetLabel returnOffset)
491 {
492 size_t bottom = 0;
493 size_t top = numICEntries();
494 size_t mid = bottom + (top - bottom) / 2;
495 while (mid < top) {
496 ICEntry &midEntry = icEntry(mid);
497 if (midEntry.returnOffset().offset() < returnOffset.offset())
498 bottom = mid + 1;
499 else // if (midEntry.returnOffset().offset() >= returnOffset.offset())
500 top = mid;
501 mid = bottom + (top - bottom) / 2;
502 }
503 if (mid >= numICEntries())
504 return nullptr;
506 if (icEntry(mid).returnOffset().offset() != returnOffset.offset())
507 return nullptr;
509 return &icEntry(mid);
510 }
512 ICEntry &
513 BaselineScript::icEntryFromReturnOffset(CodeOffsetLabel returnOffset)
514 {
515 ICEntry *result = maybeICEntryFromReturnOffset(returnOffset);
516 JS_ASSERT(result);
517 return *result;
518 }
520 uint8_t *
521 BaselineScript::returnAddressForIC(const ICEntry &ent)
522 {
523 return method()->raw() + ent.returnOffset().offset();
524 }
526 ICEntry &
527 BaselineScript::icEntryFromPCOffset(uint32_t pcOffset)
528 {
529 // Multiple IC entries can have the same PC offset, but this method only looks for
530 // those which have isForOp() set.
531 size_t bottom = 0;
532 size_t top = numICEntries();
533 size_t mid = bottom + (top - bottom) / 2;
534 while (mid < top) {
535 ICEntry &midEntry = icEntry(mid);
536 if (midEntry.pcOffset() < pcOffset)
537 bottom = mid + 1;
538 else if (midEntry.pcOffset() > pcOffset)
539 top = mid;
540 else
541 break;
542 mid = bottom + (top - bottom) / 2;
543 }
544 // Found an IC entry with a matching PC offset. Search backward, and then
545 // forward from this IC entry, looking for one with the same PC offset which
546 // has isForOp() set.
547 for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) {
548 if (icEntry(i).isForOp())
549 return icEntry(i);
550 }
551 for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) {
552 if (icEntry(i).isForOp())
553 return icEntry(i);
554 }
555 MOZ_ASSUME_UNREACHABLE("Invalid PC offset for IC entry.");
556 }
558 ICEntry &
559 BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, ICEntry *prevLookedUpEntry)
560 {
561 // Do a linear forward search from the last queried PC offset, or fallback to a
562 // binary search if the last offset is too far away.
563 if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() &&
564 (pcOffset - prevLookedUpEntry->pcOffset()) <= 10)
565 {
566 ICEntry *firstEntry = &icEntry(0);
567 ICEntry *lastEntry = &icEntry(numICEntries() - 1);
568 ICEntry *curEntry = prevLookedUpEntry;
569 while (curEntry >= firstEntry && curEntry <= lastEntry) {
570 if (curEntry->pcOffset() == pcOffset && curEntry->isForOp())
571 break;
572 curEntry++;
573 }
574 JS_ASSERT(curEntry->pcOffset() == pcOffset && curEntry->isForOp());
575 return *curEntry;
576 }
578 return icEntryFromPCOffset(pcOffset);
579 }
581 ICEntry *
582 BaselineScript::maybeICEntryFromReturnAddress(uint8_t *returnAddr)
583 {
584 JS_ASSERT(returnAddr > method_->raw());
585 JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
586 CodeOffsetLabel offset(returnAddr - method_->raw());
587 return maybeICEntryFromReturnOffset(offset);
588 }
590 ICEntry &
591 BaselineScript::icEntryFromReturnAddress(uint8_t *returnAddr)
592 {
593 JS_ASSERT(returnAddr > method_->raw());
594 JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
595 CodeOffsetLabel offset(returnAddr - method_->raw());
596 return icEntryFromReturnOffset(offset);
597 }
599 void
600 BaselineScript::copyICEntries(JSScript *script, const ICEntry *entries, MacroAssembler &masm)
601 {
602 // Fix up the return offset in the IC entries and copy them in.
603 // Also write out the IC entry ptrs in any fallback stubs that were added.
604 for (uint32_t i = 0; i < numICEntries(); i++) {
605 ICEntry &realEntry = icEntry(i);
606 realEntry = entries[i];
607 realEntry.fixupReturnOffset(masm);
609 if (!realEntry.hasStub()) {
610 // VM call without any stubs.
611 continue;
612 }
614 // If the attached stub is a fallback stub, then fix it up with
615 // a pointer to the (now available) realEntry.
616 if (realEntry.firstStub()->isFallback())
617 realEntry.firstStub()->toFallbackStub()->fixupICEntry(&realEntry);
619 if (realEntry.firstStub()->isTypeMonitor_Fallback()) {
620 ICTypeMonitor_Fallback *stub = realEntry.firstStub()->toTypeMonitor_Fallback();
621 stub->fixupICEntry(&realEntry);
622 }
624 if (realEntry.firstStub()->isTableSwitch()) {
625 ICTableSwitch *stub = realEntry.firstStub()->toTableSwitch();
626 stub->fixupJumpTable(script, this);
627 }
628 }
629 }
631 void
632 BaselineScript::adoptFallbackStubs(FallbackICStubSpace *stubSpace)
633 {
634 fallbackStubSpace_.adoptFrom(stubSpace);
635 }
637 void
638 BaselineScript::copyPCMappingEntries(const CompactBufferWriter &entries)
639 {
640 JS_ASSERT(entries.length() > 0);
641 JS_ASSERT(entries.length() == pcMappingSize_);
643 memcpy(pcMappingData(), entries.buffer(), entries.length());
644 }
646 void
647 BaselineScript::copyPCMappingIndexEntries(const PCMappingIndexEntry *entries)
648 {
649 for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++)
650 pcMappingIndexEntry(i) = entries[i];
651 }
653 uint8_t *
654 BaselineScript::nativeCodeForPC(JSScript *script, jsbytecode *pc, PCMappingSlotInfo *slotInfo)
655 {
656 JS_ASSERT_IF(script->hasBaselineScript(), script->baselineScript() == this);
658 uint32_t pcOffset = script->pcToOffset(pc);
660 // Look for the first PCMappingIndexEntry with pc > the pc we are
661 // interested in.
662 uint32_t i = 1;
663 for (; i < numPCMappingIndexEntries(); i++) {
664 if (pcMappingIndexEntry(i).pcOffset > pcOffset)
665 break;
666 }
668 // The previous entry contains the current pc.
669 JS_ASSERT(i > 0);
670 i--;
672 PCMappingIndexEntry &entry = pcMappingIndexEntry(i);
673 JS_ASSERT(pcOffset >= entry.pcOffset);
675 CompactBufferReader reader(pcMappingReader(i));
676 jsbytecode *curPC = script->offsetToPC(entry.pcOffset);
677 uint32_t nativeOffset = entry.nativeOffset;
679 JS_ASSERT(script->containsPC(curPC));
680 JS_ASSERT(curPC <= pc);
682 while (true) {
683 // If the high bit is set, the native offset relative to the
684 // previous pc != 0 and comes next.
685 uint8_t b = reader.readByte();
686 if (b & 0x80)
687 nativeOffset += reader.readUnsigned();
689 if (curPC == pc) {
690 if (slotInfo)
691 *slotInfo = PCMappingSlotInfo(b & ~0x80);
692 return method_->raw() + nativeOffset;
693 }
695 curPC += GetBytecodeLength(curPC);
696 }
698 MOZ_ASSUME_UNREACHABLE("Invalid pc");
699 }
701 jsbytecode *
702 BaselineScript::pcForReturnOffset(JSScript *script, uint32_t nativeOffset)
703 {
704 JS_ASSERT(script->baselineScript() == this);
705 JS_ASSERT(nativeOffset < method_->instructionsSize());
707 // Look for the first PCMappingIndexEntry with native offset > the native offset we are
708 // interested in.
709 uint32_t i = 1;
710 for (; i < numPCMappingIndexEntries(); i++) {
711 if (pcMappingIndexEntry(i).nativeOffset > nativeOffset)
712 break;
713 }
715 // Go back an entry to search forward from.
716 JS_ASSERT(i > 0);
717 i--;
719 PCMappingIndexEntry &entry = pcMappingIndexEntry(i);
720 JS_ASSERT(nativeOffset >= entry.nativeOffset);
722 CompactBufferReader reader(pcMappingReader(i));
723 jsbytecode *curPC = script->offsetToPC(entry.pcOffset);
724 uint32_t curNativeOffset = entry.nativeOffset;
726 JS_ASSERT(script->containsPC(curPC));
727 JS_ASSERT(curNativeOffset <= nativeOffset);
729 while (true) {
730 // If the high bit is set, the native offset relative to the
731 // previous pc != 0 and comes next.
732 uint8_t b = reader.readByte();
733 if (b & 0x80)
734 curNativeOffset += reader.readUnsigned();
736 if (curNativeOffset == nativeOffset)
737 return curPC;
739 curPC += GetBytecodeLength(curPC);
740 }
742 MOZ_ASSUME_UNREACHABLE("Invalid pc");
743 }
745 jsbytecode *
746 BaselineScript::pcForReturnAddress(JSScript *script, uint8_t *nativeAddress)
747 {
748 JS_ASSERT(script->baselineScript() == this);
749 JS_ASSERT(nativeAddress >= method_->raw());
750 JS_ASSERT(nativeAddress < method_->raw() + method_->instructionsSize());
751 return pcForReturnOffset(script, uint32_t(nativeAddress - method_->raw()));
752 }
754 void
755 BaselineScript::toggleDebugTraps(JSScript *script, jsbytecode *pc)
756 {
757 JS_ASSERT(script->baselineScript() == this);
759 // Only scripts compiled for debug mode have toggled calls.
760 if (!debugMode())
761 return;
763 SrcNoteLineScanner scanner(script->notes(), script->lineno());
765 for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {
766 PCMappingIndexEntry &entry = pcMappingIndexEntry(i);
768 CompactBufferReader reader(pcMappingReader(i));
769 jsbytecode *curPC = script->offsetToPC(entry.pcOffset);
770 uint32_t nativeOffset = entry.nativeOffset;
772 JS_ASSERT(script->containsPC(curPC));
774 while (reader.more()) {
775 uint8_t b = reader.readByte();
776 if (b & 0x80)
777 nativeOffset += reader.readUnsigned();
779 scanner.advanceTo(script->pcToOffset(curPC));
781 if (!pc || pc == curPC) {
782 bool enabled = (script->stepModeEnabled() && scanner.isLineHeader()) ||
783 script->hasBreakpointsAt(curPC);
785 // Patch the trap.
786 CodeLocationLabel label(method(), nativeOffset);
787 Assembler::ToggleCall(label, enabled);
788 }
790 curPC += GetBytecodeLength(curPC);
791 }
792 }
793 }
795 void
796 BaselineScript::toggleSPS(bool enable)
797 {
798 JS_ASSERT(enable == !(bool)spsOn_);
800 IonSpew(IonSpew_BaselineIC, " toggling SPS %s for BaselineScript %p",
801 enable ? "on" : "off", this);
803 // Toggle the jump
804 CodeLocationLabel pushToggleLocation(method_, CodeOffsetLabel(spsPushToggleOffset_));
805 if (enable)
806 Assembler::ToggleToCmp(pushToggleLocation);
807 else
808 Assembler::ToggleToJmp(pushToggleLocation);
809 #ifdef DEBUG
810 spsOn_ = enable;
811 #endif
812 }
814 void
815 BaselineScript::purgeOptimizedStubs(Zone *zone)
816 {
817 IonSpew(IonSpew_BaselineIC, "Purging optimized stubs");
819 for (size_t i = 0; i < numICEntries(); i++) {
820 ICEntry &entry = icEntry(i);
821 if (!entry.hasStub())
822 continue;
824 ICStub *lastStub = entry.firstStub();
825 while (lastStub->next())
826 lastStub = lastStub->next();
828 if (lastStub->isFallback()) {
829 // Unlink all stubs allocated in the optimized space.
830 ICStub *stub = entry.firstStub();
831 ICStub *prev = nullptr;
833 while (stub->next()) {
834 if (!stub->allocatedInFallbackSpace()) {
835 lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
836 stub = stub->next();
837 continue;
838 }
840 prev = stub;
841 stub = stub->next();
842 }
844 if (lastStub->isMonitoredFallback()) {
845 // Monitor stubs can't make calls, so are always in the
846 // optimized stub space.
847 ICTypeMonitor_Fallback *lastMonStub =
848 lastStub->toMonitoredFallbackStub()->fallbackMonitorStub();
849 lastMonStub->resetMonitorStubChain(zone);
850 }
851 } else if (lastStub->isTypeMonitor_Fallback()) {
852 lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
853 } else {
854 JS_ASSERT(lastStub->isTableSwitch());
855 }
856 }
858 #ifdef DEBUG
859 // All remaining stubs must be allocated in the fallback space.
860 for (size_t i = 0; i < numICEntries(); i++) {
861 ICEntry &entry = icEntry(i);
862 if (!entry.hasStub())
863 continue;
865 ICStub *stub = entry.firstStub();
866 while (stub->next()) {
867 JS_ASSERT(stub->allocatedInFallbackSpace());
868 stub = stub->next();
869 }
870 }
871 #endif
872 }
874 void
875 jit::FinishDiscardBaselineScript(FreeOp *fop, JSScript *script)
876 {
877 if (!script->hasBaselineScript())
878 return;
880 if (script->baselineScript()->active()) {
881 // Script is live on the stack. Keep the BaselineScript, but destroy
882 // stubs allocated in the optimized stub space.
883 script->baselineScript()->purgeOptimizedStubs(script->zone());
885 // Reset |active| flag so that we don't need a separate script
886 // iteration to unmark them.
887 script->baselineScript()->resetActive();
888 return;
889 }
891 BaselineScript *baseline = script->baselineScript();
892 script->setBaselineScript(nullptr, nullptr);
893 BaselineScript::Destroy(fop, baseline);
894 }
896 void
897 jit::JitCompartment::toggleBaselineStubBarriers(bool enabled)
898 {
899 for (ICStubCodeMap::Enum e(*stubCodes_); !e.empty(); e.popFront()) {
900 JitCode *code = *e.front().value().unsafeGet();
901 code->togglePreBarriers(enabled);
902 }
903 }
905 void
906 jit::AddSizeOfBaselineData(JSScript *script, mozilla::MallocSizeOf mallocSizeOf, size_t *data,
907 size_t *fallbackStubs)
908 {
909 if (script->hasBaselineScript())
910 script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
911 }
913 void
914 jit::ToggleBaselineSPS(JSRuntime *runtime, bool enable)
915 {
916 for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
917 for (gc::CellIter i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
918 JSScript *script = i.get<JSScript>();
919 if (!script->hasBaselineScript())
920 continue;
921 script->baselineScript()->toggleSPS(enable);
922 }
923 }
924 }
926 static void
927 MarkActiveBaselineScripts(JSRuntime *rt, const JitActivationIterator &activation)
928 {
929 for (jit::JitFrameIterator iter(activation); !iter.done(); ++iter) {
930 switch (iter.type()) {
931 case JitFrame_BaselineJS:
932 iter.script()->baselineScript()->setActive();
933 break;
934 case JitFrame_IonJS: {
935 // Keep the baseline script around, since bailouts from the ion
936 // jitcode might need to re-enter into the baseline jitcode.
937 iter.script()->baselineScript()->setActive();
938 for (InlineFrameIterator inlineIter(rt, &iter); inlineIter.more(); ++inlineIter)
939 inlineIter.script()->baselineScript()->setActive();
940 break;
941 }
942 default:;
943 }
944 }
945 }
947 void
948 jit::MarkActiveBaselineScripts(Zone *zone)
949 {
950 JSRuntime *rt = zone->runtimeFromMainThread();
951 for (JitActivationIterator iter(rt); !iter.done(); ++iter) {
952 if (iter->compartment()->zone() == zone)
953 MarkActiveBaselineScripts(rt, iter);
954 }
955 }