michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sts=4 et sw=4 tw=99: michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #ifndef jit_JitCompartment_h michael@0: #define jit_JitCompartment_h michael@0: michael@0: #ifdef JS_ION michael@0: michael@0: #include "mozilla/MemoryReporting.h" michael@0: michael@0: #include "jsweakcache.h" michael@0: michael@0: #include "jit/CompileInfo.h" michael@0: #include "jit/IonCode.h" michael@0: #include "jit/IonFrames.h" michael@0: #include "jit/shared/Assembler-shared.h" michael@0: #include "js/Value.h" michael@0: #include "vm/Stack.h" michael@0: michael@0: namespace js { michael@0: namespace jit { michael@0: michael@0: class FrameSizeClass; michael@0: michael@0: enum EnterJitType { michael@0: EnterJitBaseline = 0, michael@0: EnterJitOptimized = 1 michael@0: }; michael@0: michael@0: struct EnterJitData michael@0: { michael@0: explicit EnterJitData(JSContext *cx) michael@0: : scopeChain(cx), michael@0: result(cx) michael@0: {} michael@0: michael@0: uint8_t *jitcode; michael@0: InterpreterFrame *osrFrame; michael@0: michael@0: void *calleeToken; michael@0: michael@0: Value *maxArgv; michael@0: unsigned maxArgc; michael@0: unsigned numActualArgs; michael@0: unsigned osrNumStackValues; michael@0: michael@0: RootedObject scopeChain; michael@0: RootedValue result; michael@0: michael@0: bool constructing; michael@0: }; michael@0: michael@0: typedef void (*EnterJitCode)(void *code, unsigned argc, Value *argv, InterpreterFrame *fp, michael@0: CalleeToken calleeToken, JSObject *scopeChain, michael@0: size_t numStackValues, Value *vp); michael@0: michael@0: class IonBuilder; michael@0: michael@0: // ICStubSpace is an abstraction for allocation policy and storage for stub data. michael@0: // There are two kinds of stubs: optimized stubs and fallback stubs (the latter michael@0: // also includes stubs that can make non-tail calls that can GC). michael@0: // michael@0: // Optimized stubs are allocated per-compartment and are always purged when michael@0: // JIT-code is discarded. Fallback stubs are allocated per BaselineScript and michael@0: // are only destroyed when the BaselineScript is destroyed. michael@0: class ICStubSpace michael@0: { michael@0: protected: michael@0: LifoAlloc allocator_; michael@0: michael@0: explicit ICStubSpace(size_t chunkSize) michael@0: : allocator_(chunkSize) michael@0: {} michael@0: michael@0: public: michael@0: inline void *alloc(size_t size) { michael@0: return allocator_.alloc(size); michael@0: } michael@0: michael@0: JS_DECLARE_NEW_METHODS(allocate, alloc, inline) michael@0: michael@0: size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const { michael@0: return allocator_.sizeOfExcludingThis(mallocSizeOf); michael@0: } michael@0: }; michael@0: michael@0: // Space for optimized stubs. Every JitCompartment has a single michael@0: // OptimizedICStubSpace. michael@0: struct OptimizedICStubSpace : public ICStubSpace michael@0: { michael@0: static const size_t STUB_DEFAULT_CHUNK_SIZE = 4 * 1024; michael@0: michael@0: public: michael@0: OptimizedICStubSpace() michael@0: : ICStubSpace(STUB_DEFAULT_CHUNK_SIZE) michael@0: {} michael@0: michael@0: void free() { michael@0: allocator_.freeAll(); michael@0: } michael@0: }; michael@0: michael@0: // Space for fallback stubs. Every BaselineScript has a michael@0: // FallbackICStubSpace. michael@0: struct FallbackICStubSpace : public ICStubSpace michael@0: { michael@0: static const size_t STUB_DEFAULT_CHUNK_SIZE = 256; michael@0: michael@0: public: michael@0: FallbackICStubSpace() michael@0: : ICStubSpace(STUB_DEFAULT_CHUNK_SIZE) michael@0: {} michael@0: michael@0: inline void adoptFrom(FallbackICStubSpace *other) { michael@0: allocator_.steal(&(other->allocator_)); michael@0: } michael@0: }; michael@0: michael@0: // Information about a loop backedge in the runtime, which can be set to michael@0: // point to either the loop header or to an OOL interrupt checking stub, michael@0: // if signal handlers are being used to implement interrupts. michael@0: class PatchableBackedge : public InlineListNode michael@0: { michael@0: friend class JitRuntime; michael@0: michael@0: CodeLocationJump backedge; michael@0: CodeLocationLabel loopHeader; michael@0: CodeLocationLabel interruptCheck; michael@0: michael@0: public: michael@0: PatchableBackedge(CodeLocationJump backedge, michael@0: CodeLocationLabel loopHeader, michael@0: CodeLocationLabel interruptCheck) michael@0: : backedge(backedge), loopHeader(loopHeader), interruptCheck(interruptCheck) michael@0: {} michael@0: }; michael@0: michael@0: class JitRuntime michael@0: { michael@0: friend class JitCompartment; michael@0: michael@0: // Executable allocator for all code except the main code in an IonScript. michael@0: // Shared with the runtime. michael@0: JSC::ExecutableAllocator *execAlloc_; michael@0: michael@0: // Executable allocator used for allocating the main code in an IonScript. michael@0: // All accesses on this allocator must be protected by the runtime's michael@0: // interrupt lock, as the executable memory may be protected() when michael@0: // requesting an interrupt to force a fault in the Ion code and avoid the michael@0: // need for explicit interrupt checks. michael@0: JSC::ExecutableAllocator *ionAlloc_; michael@0: michael@0: // Shared post-exception-handler tail michael@0: JitCode *exceptionTail_; michael@0: michael@0: // Shared post-bailout-handler tail. michael@0: JitCode *bailoutTail_; michael@0: michael@0: // Trampoline for entering JIT code. Contains OSR prologue. michael@0: JitCode *enterJIT_; michael@0: michael@0: // Trampoline for entering baseline JIT code. michael@0: JitCode *enterBaselineJIT_; michael@0: michael@0: // Vector mapping frame class sizes to bailout tables. michael@0: Vector bailoutTables_; michael@0: michael@0: // Generic bailout table; used if the bailout table overflows. michael@0: JitCode *bailoutHandler_; michael@0: michael@0: // Argument-rectifying thunk, in the case of insufficient arguments passed michael@0: // to a function call site. michael@0: JitCode *argumentsRectifier_; michael@0: void *argumentsRectifierReturnAddr_; michael@0: michael@0: // Arguments-rectifying thunk which loads |parallelIon| instead of |ion|. michael@0: JitCode *parallelArgumentsRectifier_; michael@0: michael@0: // Thunk that invalides an (Ion compiled) caller on the Ion stack. michael@0: JitCode *invalidator_; michael@0: michael@0: // Thunk that calls the GC pre barrier. michael@0: JitCode *valuePreBarrier_; michael@0: JitCode *shapePreBarrier_; michael@0: michael@0: // Thunk used by the debugger for breakpoint and step mode. michael@0: JitCode *debugTrapHandler_; michael@0: michael@0: // Stub used to inline the ForkJoinGetSlice intrinsic. michael@0: JitCode *forkJoinGetSliceStub_; michael@0: michael@0: // Thunk used to fix up on-stack recompile of baseline scripts. michael@0: JitCode *baselineDebugModeOSRHandler_; michael@0: void *baselineDebugModeOSRHandlerNoFrameRegPopAddr_; michael@0: michael@0: // Map VMFunction addresses to the JitCode of the wrapper. michael@0: typedef WeakCache VMWrapperMap; michael@0: VMWrapperMap *functionWrappers_; michael@0: michael@0: // Buffer for OSR from baseline to Ion. To avoid holding on to this for michael@0: // too long, it's also freed in JitCompartment::mark and in EnterBaseline michael@0: // (after returning from JIT code). michael@0: uint8_t *osrTempData_; michael@0: michael@0: // Whether all Ion code in the runtime is protected, and will fault if it michael@0: // is accessed. michael@0: bool ionCodeProtected_; michael@0: michael@0: // If signal handlers are installed, this contains all loop backedges for michael@0: // IonScripts in the runtime. michael@0: InlineList backedgeList_; michael@0: michael@0: private: michael@0: JitCode *generateExceptionTailStub(JSContext *cx); michael@0: JitCode *generateBailoutTailStub(JSContext *cx); michael@0: JitCode *generateEnterJIT(JSContext *cx, EnterJitType type); michael@0: JitCode *generateArgumentsRectifier(JSContext *cx, ExecutionMode mode, void **returnAddrOut); michael@0: JitCode *generateBailoutTable(JSContext *cx, uint32_t frameClass); michael@0: JitCode *generateBailoutHandler(JSContext *cx); michael@0: JitCode *generateInvalidator(JSContext *cx); michael@0: JitCode *generatePreBarrier(JSContext *cx, MIRType type); michael@0: JitCode *generateDebugTrapHandler(JSContext *cx); michael@0: JitCode *generateForkJoinGetSliceStub(JSContext *cx); michael@0: JitCode *generateBaselineDebugModeOSRHandler(JSContext *cx, uint32_t *noFrameRegPopOffsetOut); michael@0: JitCode *generateVMWrapper(JSContext *cx, const VMFunction &f); michael@0: michael@0: JSC::ExecutableAllocator *createIonAlloc(JSContext *cx); michael@0: michael@0: public: michael@0: JitRuntime(); michael@0: ~JitRuntime(); michael@0: bool initialize(JSContext *cx); michael@0: michael@0: uint8_t *allocateOsrTempData(size_t size); michael@0: void freeOsrTempData(); michael@0: michael@0: static void Mark(JSTracer *trc); michael@0: michael@0: JSC::ExecutableAllocator *execAlloc() const { michael@0: return execAlloc_; michael@0: } michael@0: michael@0: JSC::ExecutableAllocator *getIonAlloc(JSContext *cx) { michael@0: JS_ASSERT(cx->runtime()->currentThreadOwnsInterruptLock()); michael@0: return ionAlloc_ ? ionAlloc_ : createIonAlloc(cx); michael@0: } michael@0: michael@0: JSC::ExecutableAllocator *ionAlloc(JSRuntime *rt) { michael@0: JS_ASSERT(rt->currentThreadOwnsInterruptLock()); michael@0: return ionAlloc_; michael@0: } michael@0: michael@0: bool ionCodeProtected() { michael@0: return ionCodeProtected_; michael@0: } michael@0: michael@0: void addPatchableBackedge(PatchableBackedge *backedge) { michael@0: backedgeList_.pushFront(backedge); michael@0: } michael@0: void removePatchableBackedge(PatchableBackedge *backedge) { michael@0: backedgeList_.remove(backedge); michael@0: } michael@0: michael@0: enum BackedgeTarget { michael@0: BackedgeLoopHeader, michael@0: BackedgeInterruptCheck michael@0: }; michael@0: michael@0: void ensureIonCodeProtected(JSRuntime *rt); michael@0: void ensureIonCodeAccessible(JSRuntime *rt); michael@0: void patchIonBackedges(JSRuntime *rt, BackedgeTarget target); michael@0: michael@0: bool handleAccessViolation(JSRuntime *rt, void *faultingAddress); michael@0: michael@0: JitCode *getVMWrapper(const VMFunction &f) const; michael@0: JitCode *debugTrapHandler(JSContext *cx); michael@0: JitCode *getBaselineDebugModeOSRHandler(JSContext *cx); michael@0: void *getBaselineDebugModeOSRHandlerAddress(JSContext *cx, bool popFrameReg); michael@0: michael@0: JitCode *getGenericBailoutHandler() const { michael@0: return bailoutHandler_; michael@0: } michael@0: michael@0: JitCode *getExceptionTail() const { michael@0: return exceptionTail_; michael@0: } michael@0: michael@0: JitCode *getBailoutTail() const { michael@0: return bailoutTail_; michael@0: } michael@0: michael@0: JitCode *getBailoutTable(const FrameSizeClass &frameClass) const; michael@0: michael@0: JitCode *getArgumentsRectifier(ExecutionMode mode) const { michael@0: switch (mode) { michael@0: case SequentialExecution: return argumentsRectifier_; michael@0: case ParallelExecution: return parallelArgumentsRectifier_; michael@0: default: MOZ_ASSUME_UNREACHABLE("No such execution mode"); michael@0: } michael@0: } michael@0: michael@0: void *getArgumentsRectifierReturnAddr() const { michael@0: return argumentsRectifierReturnAddr_; michael@0: } michael@0: michael@0: JitCode *getInvalidationThunk() const { michael@0: return invalidator_; michael@0: } michael@0: michael@0: EnterJitCode enterIon() const { michael@0: return enterJIT_->as(); michael@0: } michael@0: michael@0: EnterJitCode enterBaseline() const { michael@0: return enterBaselineJIT_->as(); michael@0: } michael@0: michael@0: JitCode *valuePreBarrier() const { michael@0: return valuePreBarrier_; michael@0: } michael@0: michael@0: JitCode *shapePreBarrier() const { michael@0: return shapePreBarrier_; michael@0: } michael@0: michael@0: bool ensureForkJoinGetSliceStubExists(JSContext *cx); michael@0: JitCode *forkJoinGetSliceStub() const { michael@0: return forkJoinGetSliceStub_; michael@0: } michael@0: }; michael@0: michael@0: class JitZone michael@0: { michael@0: // Allocated space for optimized baseline stubs. michael@0: OptimizedICStubSpace optimizedStubSpace_; michael@0: michael@0: public: michael@0: OptimizedICStubSpace *optimizedStubSpace() { michael@0: return &optimizedStubSpace_; michael@0: } michael@0: }; michael@0: michael@0: class JitCompartment michael@0: { michael@0: friend class JitActivation; michael@0: michael@0: // Map ICStub keys to ICStub shared code objects. michael@0: typedef WeakValueCache > ICStubCodeMap; michael@0: ICStubCodeMap *stubCodes_; michael@0: michael@0: // Keep track of offset into various baseline stubs' code at return michael@0: // point from called script. michael@0: void *baselineCallReturnFromIonAddr_; michael@0: void *baselineGetPropReturnFromIonAddr_; michael@0: void *baselineSetPropReturnFromIonAddr_; michael@0: michael@0: // Same as above, but is used for return from a baseline stub. This is michael@0: // used for recompiles of on-stack baseline scripts (e.g., for debug michael@0: // mode). michael@0: void *baselineCallReturnFromStubAddr_; michael@0: void *baselineGetPropReturnFromStubAddr_; michael@0: void *baselineSetPropReturnFromStubAddr_; michael@0: michael@0: // Stub to concatenate two strings inline. Note that it can't be michael@0: // stored in JitRuntime because masm.newGCString bakes in zone-specific michael@0: // pointers. This has to be a weak pointer to avoid keeping the whole michael@0: // compartment alive. michael@0: ReadBarriered stringConcatStub_; michael@0: ReadBarriered parallelStringConcatStub_; michael@0: michael@0: // Set of JSScripts invoked by ForkJoin (i.e. the entry script). These michael@0: // scripts are marked if their respective parallel IonScripts' age is less michael@0: // than a certain amount. See IonScript::parallelAge_. michael@0: typedef HashSet ScriptSet; michael@0: ScriptSet *activeParallelEntryScripts_; michael@0: michael@0: JitCode *generateStringConcatStub(JSContext *cx, ExecutionMode mode); michael@0: michael@0: public: michael@0: JitCode *getStubCode(uint32_t key) { michael@0: ICStubCodeMap::AddPtr p = stubCodes_->lookupForAdd(key); michael@0: if (p) michael@0: return p->value(); michael@0: return nullptr; michael@0: } michael@0: bool putStubCode(uint32_t key, Handle stubCode) { michael@0: // Make sure to do a lookupForAdd(key) and then insert into that slot, because michael@0: // that way if stubCode gets moved due to a GC caused by lookupForAdd, then michael@0: // we still write the correct pointer. michael@0: JS_ASSERT(!stubCodes_->has(key)); michael@0: ICStubCodeMap::AddPtr p = stubCodes_->lookupForAdd(key); michael@0: return stubCodes_->add(p, key, stubCode.get()); michael@0: } michael@0: void initBaselineCallReturnFromIonAddr(void *addr) { michael@0: JS_ASSERT(baselineCallReturnFromIonAddr_ == nullptr); michael@0: baselineCallReturnFromIonAddr_ = addr; michael@0: } michael@0: void *baselineCallReturnFromIonAddr() { michael@0: JS_ASSERT(baselineCallReturnFromIonAddr_ != nullptr); michael@0: return baselineCallReturnFromIonAddr_; michael@0: } michael@0: void initBaselineGetPropReturnFromIonAddr(void *addr) { michael@0: JS_ASSERT(baselineGetPropReturnFromIonAddr_ == nullptr); michael@0: baselineGetPropReturnFromIonAddr_ = addr; michael@0: } michael@0: void *baselineGetPropReturnFromIonAddr() { michael@0: JS_ASSERT(baselineGetPropReturnFromIonAddr_ != nullptr); michael@0: return baselineGetPropReturnFromIonAddr_; michael@0: } michael@0: void initBaselineSetPropReturnFromIonAddr(void *addr) { michael@0: JS_ASSERT(baselineSetPropReturnFromIonAddr_ == nullptr); michael@0: baselineSetPropReturnFromIonAddr_ = addr; michael@0: } michael@0: void *baselineSetPropReturnFromIonAddr() { michael@0: JS_ASSERT(baselineSetPropReturnFromIonAddr_ != nullptr); michael@0: return baselineSetPropReturnFromIonAddr_; michael@0: } michael@0: michael@0: void initBaselineCallReturnFromStubAddr(void *addr) { michael@0: MOZ_ASSERT(baselineCallReturnFromStubAddr_ == nullptr); michael@0: baselineCallReturnFromStubAddr_ = addr;; michael@0: } michael@0: void *baselineCallReturnFromStubAddr() { michael@0: JS_ASSERT(baselineCallReturnFromStubAddr_ != nullptr); michael@0: return baselineCallReturnFromStubAddr_; michael@0: } michael@0: void initBaselineGetPropReturnFromStubAddr(void *addr) { michael@0: JS_ASSERT(baselineGetPropReturnFromStubAddr_ == nullptr); michael@0: baselineGetPropReturnFromStubAddr_ = addr; michael@0: } michael@0: void *baselineGetPropReturnFromStubAddr() { michael@0: JS_ASSERT(baselineGetPropReturnFromStubAddr_ != nullptr); michael@0: return baselineGetPropReturnFromStubAddr_; michael@0: } michael@0: void initBaselineSetPropReturnFromStubAddr(void *addr) { michael@0: JS_ASSERT(baselineSetPropReturnFromStubAddr_ == nullptr); michael@0: baselineSetPropReturnFromStubAddr_ = addr; michael@0: } michael@0: void *baselineSetPropReturnFromStubAddr() { michael@0: JS_ASSERT(baselineSetPropReturnFromStubAddr_ != nullptr); michael@0: return baselineSetPropReturnFromStubAddr_; michael@0: } michael@0: michael@0: bool notifyOfActiveParallelEntryScript(JSContext *cx, HandleScript script); michael@0: michael@0: void toggleBaselineStubBarriers(bool enabled); michael@0: michael@0: JSC::ExecutableAllocator *createIonAlloc(); michael@0: michael@0: public: michael@0: JitCompartment(); michael@0: ~JitCompartment(); michael@0: michael@0: bool initialize(JSContext *cx); michael@0: michael@0: // Initialize code stubs only used by Ion, not Baseline. michael@0: bool ensureIonStubsExist(JSContext *cx); michael@0: michael@0: void mark(JSTracer *trc, JSCompartment *compartment); michael@0: void sweep(FreeOp *fop); michael@0: michael@0: JitCode *stringConcatStub(ExecutionMode mode) const { michael@0: switch (mode) { michael@0: case SequentialExecution: return stringConcatStub_; michael@0: case ParallelExecution: return parallelStringConcatStub_; michael@0: default: MOZ_ASSUME_UNREACHABLE("No such execution mode"); michael@0: } michael@0: } michael@0: }; michael@0: michael@0: // Called from JSCompartment::discardJitCode(). michael@0: void InvalidateAll(FreeOp *fop, JS::Zone *zone); michael@0: template michael@0: void FinishInvalidation(FreeOp *fop, JSScript *script); michael@0: michael@0: inline bool michael@0: ShouldPreserveParallelJITCode(JSRuntime *rt, JSScript *script, bool increase = false) michael@0: { michael@0: IonScript *parallelIon = script->parallelIonScript(); michael@0: uint32_t age = increase ? parallelIon->increaseParallelAge() : parallelIon->parallelAge(); michael@0: return age < jit::IonScript::MAX_PARALLEL_AGE && !rt->gcShouldCleanUpEverything; michael@0: } michael@0: michael@0: // On windows systems, really large frames need to be incrementally touched. michael@0: // The following constant defines the minimum increment of the touch. michael@0: #ifdef XP_WIN michael@0: const unsigned WINDOWS_BIG_FRAME_TOUCH_INCREMENT = 4096 - 1; michael@0: #endif michael@0: michael@0: } // namespace jit michael@0: } // namespace js michael@0: michael@0: #endif // JS_ION michael@0: michael@0: #endif /* jit_JitCompartment_h */