michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sts=4 et sw=4 tw=99: michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this michael@0: * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: /* JS Garbage Collector. */ michael@0: michael@0: #ifndef jsgc_h michael@0: #define jsgc_h michael@0: michael@0: #include "mozilla/DebugOnly.h" michael@0: #include "mozilla/MemoryReporting.h" michael@0: michael@0: #include "jslock.h" michael@0: #include "jsobj.h" michael@0: michael@0: #include "js/GCAPI.h" michael@0: #include "js/SliceBudget.h" michael@0: #include "js/Vector.h" michael@0: michael@0: class JSAtom; michael@0: struct JSCompartment; michael@0: class JSFlatString; michael@0: class JSLinearString; michael@0: michael@0: namespace js { michael@0: michael@0: class ArgumentsObject; michael@0: class ArrayBufferObject; michael@0: class ArrayBufferViewObject; michael@0: class SharedArrayBufferObject; michael@0: class BaseShape; michael@0: class DebugScopeObject; michael@0: class GCHelperThread; michael@0: class GlobalObject; michael@0: class LazyScript; michael@0: class Nursery; michael@0: class PropertyName; michael@0: class ScopeObject; michael@0: class Shape; michael@0: class UnownedBaseShape; michael@0: michael@0: unsigned GetCPUCount(); michael@0: michael@0: enum HeapState { michael@0: Idle, // doing nothing with the GC heap michael@0: Tracing, // tracing the GC heap without collecting, e.g. IterateCompartments() michael@0: MajorCollecting, // doing a GC of the major heap michael@0: MinorCollecting // doing a GC of the minor heap (nursery) michael@0: }; michael@0: michael@0: namespace jit { michael@0: class JitCode; michael@0: } michael@0: michael@0: namespace gc { michael@0: michael@0: enum State { michael@0: NO_INCREMENTAL, michael@0: MARK_ROOTS, michael@0: MARK, michael@0: SWEEP, michael@0: INVALID michael@0: }; michael@0: michael@0: class ChunkPool { michael@0: Chunk *emptyChunkListHead; michael@0: size_t emptyCount; michael@0: michael@0: public: michael@0: ChunkPool() michael@0: : emptyChunkListHead(nullptr), michael@0: emptyCount(0) { } michael@0: michael@0: size_t getEmptyCount() const { michael@0: return emptyCount; michael@0: } michael@0: michael@0: inline bool wantBackgroundAllocation(JSRuntime *rt) const; michael@0: michael@0: /* Must be called with the GC lock taken. */ michael@0: inline Chunk *get(JSRuntime *rt); michael@0: michael@0: /* Must be called either during the GC or with the GC lock taken. */ michael@0: inline void put(Chunk *chunk); michael@0: michael@0: /* michael@0: * Return the list of chunks that can be released outside the GC lock. michael@0: * Must be called either during the GC or with the GC lock taken. michael@0: */ michael@0: Chunk *expire(JSRuntime *rt, bool releaseAll); michael@0: michael@0: /* Must be called with the GC lock taken. */ michael@0: void expireAndFree(JSRuntime *rt, bool releaseAll); michael@0: }; michael@0: michael@0: static inline JSGCTraceKind michael@0: MapAllocToTraceKind(AllocKind kind) michael@0: { michael@0: static const JSGCTraceKind map[] = { michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT0_BACKGROUND */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT2 */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT2_BACKGROUND */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT4 */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT4_BACKGROUND */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT8 */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT8_BACKGROUND */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT12 */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */ michael@0: JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */ michael@0: JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */ michael@0: JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */ michael@0: JSTRACE_SHAPE, /* FINALIZE_SHAPE */ michael@0: JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */ michael@0: JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */ michael@0: JSTRACE_STRING, /* FINALIZE_FAT_INLINE_STRING */ michael@0: JSTRACE_STRING, /* FINALIZE_STRING */ michael@0: JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */ michael@0: JSTRACE_JITCODE, /* FINALIZE_JITCODE */ michael@0: }; michael@0: JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT); michael@0: return map[kind]; michael@0: } michael@0: michael@0: template struct MapTypeToTraceKind {}; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind{ static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind{ static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind{ static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_SCRIPT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_LAZY_SCRIPT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_SHAPE; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; }; michael@0: template <> struct MapTypeToTraceKind{ static const JSGCTraceKind kind = JSTRACE_TYPE_OBJECT; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_STRING; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_STRING; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_STRING; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_STRING; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_STRING; }; michael@0: template <> struct MapTypeToTraceKind { static const JSGCTraceKind kind = JSTRACE_JITCODE; }; michael@0: michael@0: /* Map from C++ type to finalize kind. JSObject does not have a 1:1 mapping, so must use Arena::thingSize. */ michael@0: template struct MapTypeToFinalizeKind {}; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_SCRIPT; }; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_LAZY_SCRIPT; }; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_SHAPE; }; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_BASE_SHAPE; }; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_TYPE_OBJECT; }; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_FAT_INLINE_STRING; }; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_STRING; }; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_EXTERNAL_STRING; }; michael@0: template <> struct MapTypeToFinalizeKind { static const AllocKind kind = FINALIZE_JITCODE; }; michael@0: michael@0: #if defined(JSGC_GENERATIONAL) || defined(DEBUG) michael@0: static inline bool michael@0: IsNurseryAllocable(AllocKind kind) michael@0: { michael@0: JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT); michael@0: static const bool map[] = { michael@0: false, /* FINALIZE_OBJECT0 */ michael@0: true, /* FINALIZE_OBJECT0_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT2 */ michael@0: true, /* FINALIZE_OBJECT2_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT4 */ michael@0: true, /* FINALIZE_OBJECT4_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT8 */ michael@0: true, /* FINALIZE_OBJECT8_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT12 */ michael@0: true, /* FINALIZE_OBJECT12_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT16 */ michael@0: true, /* FINALIZE_OBJECT16_BACKGROUND */ michael@0: false, /* FINALIZE_SCRIPT */ michael@0: false, /* FINALIZE_LAZY_SCRIPT */ michael@0: false, /* FINALIZE_SHAPE */ michael@0: false, /* FINALIZE_BASE_SHAPE */ michael@0: false, /* FINALIZE_TYPE_OBJECT */ michael@0: false, /* FINALIZE_FAT_INLINE_STRING */ michael@0: false, /* FINALIZE_STRING */ michael@0: false, /* FINALIZE_EXTERNAL_STRING */ michael@0: false, /* FINALIZE_JITCODE */ michael@0: }; michael@0: JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT); michael@0: return map[kind]; michael@0: } michael@0: #endif michael@0: michael@0: static inline bool michael@0: IsBackgroundFinalized(AllocKind kind) michael@0: { michael@0: JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT); michael@0: static const bool map[] = { michael@0: false, /* FINALIZE_OBJECT0 */ michael@0: true, /* FINALIZE_OBJECT0_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT2 */ michael@0: true, /* FINALIZE_OBJECT2_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT4 */ michael@0: true, /* FINALIZE_OBJECT4_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT8 */ michael@0: true, /* FINALIZE_OBJECT8_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT12 */ michael@0: true, /* FINALIZE_OBJECT12_BACKGROUND */ michael@0: false, /* FINALIZE_OBJECT16 */ michael@0: true, /* FINALIZE_OBJECT16_BACKGROUND */ michael@0: false, /* FINALIZE_SCRIPT */ michael@0: false, /* FINALIZE_LAZY_SCRIPT */ michael@0: true, /* FINALIZE_SHAPE */ michael@0: true, /* FINALIZE_BASE_SHAPE */ michael@0: true, /* FINALIZE_TYPE_OBJECT */ michael@0: true, /* FINALIZE_FAT_INLINE_STRING */ michael@0: true, /* FINALIZE_STRING */ michael@0: false, /* FINALIZE_EXTERNAL_STRING */ michael@0: false, /* FINALIZE_JITCODE */ michael@0: }; michael@0: JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT); michael@0: return map[kind]; michael@0: } michael@0: michael@0: static inline bool michael@0: CanBeFinalizedInBackground(gc::AllocKind kind, const Class *clasp) michael@0: { michael@0: JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST); michael@0: /* If the class has no finalizer or a finalizer that is safe to call on michael@0: * a different thread, we change the finalize kind. For example, michael@0: * FINALIZE_OBJECT0 calls the finalizer on the main thread, michael@0: * FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread. michael@0: * IsBackgroundFinalized is called to prevent recursively incrementing michael@0: * the finalize kind; kind may already be a background finalize kind. michael@0: */ michael@0: return (!gc::IsBackgroundFinalized(kind) && michael@0: (!clasp->finalize || (clasp->flags & JSCLASS_BACKGROUND_FINALIZE))); michael@0: } michael@0: michael@0: inline JSGCTraceKind michael@0: GetGCThingTraceKind(const void *thing); michael@0: michael@0: /* Capacity for slotsToThingKind */ michael@0: const size_t SLOTS_TO_THING_KIND_LIMIT = 17; michael@0: michael@0: extern const AllocKind slotsToThingKind[]; michael@0: michael@0: /* Get the best kind to use when making an object with the given slot count. */ michael@0: static inline AllocKind michael@0: GetGCObjectKind(size_t numSlots) michael@0: { michael@0: if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) michael@0: return FINALIZE_OBJECT16; michael@0: return slotsToThingKind[numSlots]; michael@0: } michael@0: michael@0: /* As for GetGCObjectKind, but for dense array allocation. */ michael@0: static inline AllocKind michael@0: GetGCArrayKind(size_t numSlots) michael@0: { michael@0: /* michael@0: * Dense arrays can use their fixed slots to hold their elements array michael@0: * (less two Values worth of ObjectElements header), but if more than the michael@0: * maximum number of fixed slots is needed then the fixed slots will be michael@0: * unused. michael@0: */ michael@0: JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2); michael@0: if (numSlots > JSObject::NELEMENTS_LIMIT || numSlots + 2 >= SLOTS_TO_THING_KIND_LIMIT) michael@0: return FINALIZE_OBJECT2; michael@0: return slotsToThingKind[numSlots + 2]; michael@0: } michael@0: michael@0: static inline AllocKind michael@0: GetGCObjectFixedSlotsKind(size_t numFixedSlots) michael@0: { michael@0: JS_ASSERT(numFixedSlots < SLOTS_TO_THING_KIND_LIMIT); michael@0: return slotsToThingKind[numFixedSlots]; michael@0: } michael@0: michael@0: static inline AllocKind michael@0: GetBackgroundAllocKind(AllocKind kind) michael@0: { michael@0: JS_ASSERT(!IsBackgroundFinalized(kind)); michael@0: JS_ASSERT(kind <= FINALIZE_OBJECT_LAST); michael@0: return (AllocKind) (kind + 1); michael@0: } michael@0: michael@0: /* michael@0: * Try to get the next larger size for an object, keeping BACKGROUND michael@0: * consistent. michael@0: */ michael@0: static inline bool michael@0: TryIncrementAllocKind(AllocKind *kindp) michael@0: { michael@0: size_t next = size_t(*kindp) + 2; michael@0: if (next >= size_t(FINALIZE_OBJECT_LIMIT)) michael@0: return false; michael@0: *kindp = AllocKind(next); michael@0: return true; michael@0: } michael@0: michael@0: /* Get the number of fixed slots and initial capacity associated with a kind. */ michael@0: static inline size_t michael@0: GetGCKindSlots(AllocKind thingKind) michael@0: { michael@0: /* Using a switch in hopes that thingKind will usually be a compile-time constant. */ michael@0: switch (thingKind) { michael@0: case FINALIZE_OBJECT0: michael@0: case FINALIZE_OBJECT0_BACKGROUND: michael@0: return 0; michael@0: case FINALIZE_OBJECT2: michael@0: case FINALIZE_OBJECT2_BACKGROUND: michael@0: return 2; michael@0: case FINALIZE_OBJECT4: michael@0: case FINALIZE_OBJECT4_BACKGROUND: michael@0: return 4; michael@0: case FINALIZE_OBJECT8: michael@0: case FINALIZE_OBJECT8_BACKGROUND: michael@0: return 8; michael@0: case FINALIZE_OBJECT12: michael@0: case FINALIZE_OBJECT12_BACKGROUND: michael@0: return 12; michael@0: case FINALIZE_OBJECT16: michael@0: case FINALIZE_OBJECT16_BACKGROUND: michael@0: return 16; michael@0: default: michael@0: MOZ_ASSUME_UNREACHABLE("Bad object finalize kind"); michael@0: } michael@0: } michael@0: michael@0: static inline size_t michael@0: GetGCKindSlots(AllocKind thingKind, const Class *clasp) michael@0: { michael@0: size_t nslots = GetGCKindSlots(thingKind); michael@0: michael@0: /* An object's private data uses the space taken by its last fixed slot. */ michael@0: if (clasp->flags & JSCLASS_HAS_PRIVATE) { michael@0: JS_ASSERT(nslots > 0); michael@0: nslots--; michael@0: } michael@0: michael@0: /* michael@0: * Functions have a larger finalize kind than FINALIZE_OBJECT to reserve michael@0: * space for the extra fields in JSFunction, but have no fixed slots. michael@0: */ michael@0: if (clasp == FunctionClassPtr) michael@0: nslots = 0; michael@0: michael@0: return nslots; michael@0: } michael@0: michael@0: /* michael@0: * ArenaList::head points to the start of the list. Normally cursor points michael@0: * to the first arena in the list with some free things and all arenas michael@0: * before cursor are fully allocated. However, as the arena currently being michael@0: * allocated from is considered full while its list of free spans is moved michael@0: * into the freeList, during the GC or cell enumeration, when an michael@0: * unallocated freeList is moved back to the arena, we can see an arena michael@0: * with some free cells before the cursor. The cursor is an indirect michael@0: * pointer to allow for efficient list insertion at the cursor point and michael@0: * other list manipulations. michael@0: */ michael@0: struct ArenaList { michael@0: ArenaHeader *head; michael@0: ArenaHeader **cursor; michael@0: michael@0: ArenaList() { michael@0: clear(); michael@0: } michael@0: michael@0: void clear() { michael@0: head = nullptr; michael@0: cursor = &head; michael@0: } michael@0: michael@0: void insert(ArenaHeader *arena); michael@0: }; michael@0: michael@0: class ArenaLists michael@0: { michael@0: /* michael@0: * For each arena kind its free list is represented as the first span with michael@0: * free things. Initially all the spans are initialized as empty. After we michael@0: * find a new arena with available things we move its first free span into michael@0: * the list and set the arena as fully allocated. way we do not need to michael@0: * update the arena header after the initial allocation. When starting the michael@0: * GC we only move the head of the of the list of spans back to the arena michael@0: * only for the arena that was not fully allocated. michael@0: */ michael@0: FreeSpan freeLists[FINALIZE_LIMIT]; michael@0: michael@0: ArenaList arenaLists[FINALIZE_LIMIT]; michael@0: michael@0: /* michael@0: * The background finalization adds the finalized arenas to the list at michael@0: * the *cursor position. backgroundFinalizeState controls the interaction michael@0: * between the GC lock and the access to the list from the allocation michael@0: * thread. michael@0: * michael@0: * BFS_DONE indicates that the finalizations is not running or cannot michael@0: * affect this arena list. The allocation thread can access the list michael@0: * outside the GC lock. michael@0: * michael@0: * In BFS_RUN and BFS_JUST_FINISHED the allocation thread must take the michael@0: * lock. The former indicates that the finalization still runs. The latter michael@0: * signals that finalization just added to the list finalized arenas. In michael@0: * that case the lock effectively serves as a read barrier to ensure that michael@0: * the allocation thread see all the writes done during finalization. michael@0: */ michael@0: enum BackgroundFinalizeState { michael@0: BFS_DONE, michael@0: BFS_RUN, michael@0: BFS_JUST_FINISHED michael@0: }; michael@0: michael@0: volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT]; michael@0: michael@0: public: michael@0: /* For each arena kind, a list of arenas remaining to be swept. */ michael@0: ArenaHeader *arenaListsToSweep[FINALIZE_LIMIT]; michael@0: michael@0: /* Shape arenas to be swept in the foreground. */ michael@0: ArenaHeader *gcShapeArenasToSweep; michael@0: michael@0: public: michael@0: ArenaLists() { michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) michael@0: freeLists[i].initAsEmpty(); michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) michael@0: backgroundFinalizeState[i] = BFS_DONE; michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) michael@0: arenaListsToSweep[i] = nullptr; michael@0: gcShapeArenasToSweep = nullptr; michael@0: } michael@0: michael@0: ~ArenaLists() { michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) { michael@0: /* michael@0: * We can only call this during the shutdown after the last GC when michael@0: * the background finalization is disabled. michael@0: */ michael@0: JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE); michael@0: ArenaHeader **headp = &arenaLists[i].head; michael@0: while (ArenaHeader *aheader = *headp) { michael@0: *headp = aheader->next; michael@0: aheader->chunk()->releaseArena(aheader); michael@0: } michael@0: } michael@0: } michael@0: michael@0: static uintptr_t getFreeListOffset(AllocKind thingKind) { michael@0: uintptr_t offset = offsetof(ArenaLists, freeLists); michael@0: return offset + thingKind * sizeof(FreeSpan); michael@0: } michael@0: michael@0: const FreeSpan *getFreeList(AllocKind thingKind) const { michael@0: return &freeLists[thingKind]; michael@0: } michael@0: michael@0: ArenaHeader *getFirstArena(AllocKind thingKind) const { michael@0: return arenaLists[thingKind].head; michael@0: } michael@0: michael@0: ArenaHeader *getFirstArenaToSweep(AllocKind thingKind) const { michael@0: return arenaListsToSweep[thingKind]; michael@0: } michael@0: michael@0: bool arenaListsAreEmpty() const { michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) { michael@0: /* michael@0: * The arena cannot be empty if the background finalization is not yet michael@0: * done. michael@0: */ michael@0: if (backgroundFinalizeState[i] != BFS_DONE) michael@0: return false; michael@0: if (arenaLists[i].head) michael@0: return false; michael@0: } michael@0: return true; michael@0: } michael@0: michael@0: bool arenasAreFull(AllocKind thingKind) const { michael@0: return !*arenaLists[thingKind].cursor; michael@0: } michael@0: michael@0: void unmarkAll() { michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) { michael@0: /* The background finalization must have stopped at this point. */ michael@0: JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE || michael@0: backgroundFinalizeState[i] == BFS_JUST_FINISHED); michael@0: for (ArenaHeader *aheader = arenaLists[i].head; aheader; aheader = aheader->next) { michael@0: uintptr_t *word = aheader->chunk()->bitmap.arenaBits(aheader); michael@0: memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t)); michael@0: } michael@0: } michael@0: } michael@0: michael@0: bool doneBackgroundFinalize(AllocKind kind) const { michael@0: return backgroundFinalizeState[kind] == BFS_DONE || michael@0: backgroundFinalizeState[kind] == BFS_JUST_FINISHED; michael@0: } michael@0: michael@0: bool needBackgroundFinalizeWait(AllocKind kind) const { michael@0: return backgroundFinalizeState[kind] != BFS_DONE; michael@0: } michael@0: michael@0: /* michael@0: * Return the free list back to the arena so the GC finalization will not michael@0: * run the finalizers over unitialized bytes from free things. michael@0: */ michael@0: void purge() { michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) michael@0: purge(AllocKind(i)); michael@0: } michael@0: michael@0: void purge(AllocKind i) { michael@0: FreeSpan *headSpan = &freeLists[i]; michael@0: if (!headSpan->isEmpty()) { michael@0: ArenaHeader *aheader = headSpan->arenaHeader(); michael@0: aheader->setFirstFreeSpan(headSpan); michael@0: headSpan->initAsEmpty(); michael@0: } michael@0: } michael@0: michael@0: inline void prepareForIncrementalGC(JSRuntime *rt); michael@0: michael@0: /* michael@0: * Temporarily copy the free list heads to the arenas so the code can see michael@0: * the proper value in ArenaHeader::freeList when accessing the latter michael@0: * outside the GC. michael@0: */ michael@0: void copyFreeListsToArenas() { michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) michael@0: copyFreeListToArena(AllocKind(i)); michael@0: } michael@0: michael@0: void copyFreeListToArena(AllocKind thingKind) { michael@0: FreeSpan *headSpan = &freeLists[thingKind]; michael@0: if (!headSpan->isEmpty()) { michael@0: ArenaHeader *aheader = headSpan->arenaHeader(); michael@0: JS_ASSERT(!aheader->hasFreeThings()); michael@0: aheader->setFirstFreeSpan(headSpan); michael@0: } michael@0: } michael@0: michael@0: /* michael@0: * Clear the free lists in arenas that were temporarily set there using michael@0: * copyToArenas. michael@0: */ michael@0: void clearFreeListsInArenas() { michael@0: for (size_t i = 0; i != FINALIZE_LIMIT; ++i) michael@0: clearFreeListInArena(AllocKind(i)); michael@0: } michael@0: michael@0: michael@0: void clearFreeListInArena(AllocKind kind) { michael@0: FreeSpan *headSpan = &freeLists[kind]; michael@0: if (!headSpan->isEmpty()) { michael@0: ArenaHeader *aheader = headSpan->arenaHeader(); michael@0: JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan)); michael@0: aheader->setAsFullyUsed(); michael@0: } michael@0: } michael@0: michael@0: /* michael@0: * Check that the free list is either empty or were synchronized with the michael@0: * arena using copyToArena(). michael@0: */ michael@0: bool isSynchronizedFreeList(AllocKind kind) { michael@0: FreeSpan *headSpan = &freeLists[kind]; michael@0: if (headSpan->isEmpty()) michael@0: return true; michael@0: ArenaHeader *aheader = headSpan->arenaHeader(); michael@0: if (aheader->hasFreeThings()) { michael@0: /* michael@0: * If the arena has a free list, it must be the same as one in michael@0: * lists. michael@0: */ michael@0: JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan)); michael@0: return true; michael@0: } michael@0: return false; michael@0: } michael@0: michael@0: MOZ_ALWAYS_INLINE void *allocateFromFreeList(AllocKind thingKind, size_t thingSize) { michael@0: return freeLists[thingKind].allocate(thingSize); michael@0: } michael@0: michael@0: template michael@0: static void *refillFreeList(ThreadSafeContext *cx, AllocKind thingKind); michael@0: michael@0: /* michael@0: * Moves all arenas from |fromArenaLists| into |this|. In michael@0: * parallel blocks, we temporarily create one ArenaLists per michael@0: * parallel thread. When the parallel block ends, we move michael@0: * whatever allocations may have been performed back into the michael@0: * compartment's main arena list using this function. michael@0: */ michael@0: void adoptArenas(JSRuntime *runtime, ArenaLists *fromArenaLists); michael@0: michael@0: /* True if the ArenaHeader in question is found in this ArenaLists */ michael@0: bool containsArena(JSRuntime *runtime, ArenaHeader *arenaHeader); michael@0: michael@0: void checkEmptyFreeLists() { michael@0: #ifdef DEBUG michael@0: for (size_t i = 0; i < mozilla::ArrayLength(freeLists); ++i) michael@0: JS_ASSERT(freeLists[i].isEmpty()); michael@0: #endif michael@0: } michael@0: michael@0: void checkEmptyFreeList(AllocKind kind) { michael@0: JS_ASSERT(freeLists[kind].isEmpty()); michael@0: } michael@0: michael@0: void queueObjectsForSweep(FreeOp *fop); michael@0: void queueStringsForSweep(FreeOp *fop); michael@0: void queueShapesForSweep(FreeOp *fop); michael@0: void queueScriptsForSweep(FreeOp *fop); michael@0: void queueJitCodeForSweep(FreeOp *fop); michael@0: michael@0: bool foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget); michael@0: static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, bool onBackgroundThread); michael@0: michael@0: void wipeDuringParallelExecution(JSRuntime *rt); michael@0: michael@0: private: michael@0: inline void finalizeNow(FreeOp *fop, AllocKind thingKind); michael@0: inline void forceFinalizeNow(FreeOp *fop, AllocKind thingKind); michael@0: inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind); michael@0: inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind); michael@0: michael@0: void *allocateFromArena(JS::Zone *zone, AllocKind thingKind); michael@0: inline void *allocateFromArenaInline(JS::Zone *zone, AllocKind thingKind); michael@0: michael@0: inline void normalizeBackgroundFinalizeState(AllocKind thingKind); michael@0: michael@0: friend class js::Nursery; michael@0: }; michael@0: michael@0: /* michael@0: * Initial allocation size for data structures holding chunks is set to hold michael@0: * chunks with total capacity of 16MB to avoid buffer resizes during browser michael@0: * startup. michael@0: */ michael@0: const size_t INITIAL_CHUNK_CAPACITY = 16 * 1024 * 1024 / ChunkSize; michael@0: michael@0: /* The number of GC cycles an empty chunk can survive before been released. */ michael@0: const size_t MAX_EMPTY_CHUNK_AGE = 4; michael@0: michael@0: } /* namespace gc */ michael@0: michael@0: typedef enum JSGCRootType { michael@0: JS_GC_ROOT_VALUE_PTR, michael@0: JS_GC_ROOT_STRING_PTR, michael@0: JS_GC_ROOT_OBJECT_PTR, michael@0: JS_GC_ROOT_SCRIPT_PTR michael@0: } JSGCRootType; michael@0: michael@0: struct RootInfo { michael@0: RootInfo() {} michael@0: RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {} michael@0: const char *name; michael@0: JSGCRootType type; michael@0: }; michael@0: michael@0: typedef js::HashMap, michael@0: js::SystemAllocPolicy> RootedValueMap; michael@0: michael@0: extern bool michael@0: AddValueRoot(JSContext *cx, js::Value *vp, const char *name); michael@0: michael@0: extern bool michael@0: AddValueRootRT(JSRuntime *rt, js::Value *vp, const char *name); michael@0: michael@0: extern bool michael@0: AddStringRoot(JSContext *cx, JSString **rp, const char *name); michael@0: michael@0: extern bool michael@0: AddObjectRoot(JSContext *cx, JSObject **rp, const char *name); michael@0: michael@0: extern bool michael@0: AddObjectRoot(JSRuntime *rt, JSObject **rp, const char *name); michael@0: michael@0: extern bool michael@0: AddScriptRoot(JSContext *cx, JSScript **rp, const char *name); michael@0: michael@0: extern void michael@0: RemoveRoot(JSRuntime *rt, void *rp); michael@0: michael@0: } /* namespace js */ michael@0: michael@0: extern bool michael@0: js_InitGC(JSRuntime *rt, uint32_t maxbytes); michael@0: michael@0: extern void michael@0: js_FinishGC(JSRuntime *rt); michael@0: michael@0: namespace js { michael@0: michael@0: class InterpreterFrame; michael@0: michael@0: extern void michael@0: MarkCompartmentActive(js::InterpreterFrame *fp); michael@0: michael@0: extern void michael@0: TraceRuntime(JSTracer *trc); michael@0: michael@0: /* Must be called with GC lock taken. */ michael@0: extern bool michael@0: TriggerGC(JSRuntime *rt, JS::gcreason::Reason reason); michael@0: michael@0: /* Must be called with GC lock taken. */ michael@0: extern bool michael@0: TriggerZoneGC(Zone *zone, JS::gcreason::Reason reason); michael@0: michael@0: extern void michael@0: MaybeGC(JSContext *cx); michael@0: michael@0: extern void michael@0: ReleaseAllJITCode(FreeOp *op); michael@0: michael@0: /* michael@0: * Kinds of js_GC invocation. michael@0: */ michael@0: typedef enum JSGCInvocationKind { michael@0: /* Normal invocation. */ michael@0: GC_NORMAL = 0, michael@0: michael@0: /* Minimize GC triggers and release empty GC chunks right away. */ michael@0: GC_SHRINK = 1 michael@0: } JSGCInvocationKind; michael@0: michael@0: extern void michael@0: GC(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason); michael@0: michael@0: extern void michael@0: GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis = 0); michael@0: michael@0: extern void michael@0: GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason); michael@0: michael@0: extern void michael@0: GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount); michael@0: michael@0: extern void michael@0: PrepareForDebugGC(JSRuntime *rt); michael@0: michael@0: extern void michael@0: MinorGC(JSRuntime *rt, JS::gcreason::Reason reason); michael@0: michael@0: extern void michael@0: MinorGC(JSContext *cx, JS::gcreason::Reason reason); michael@0: michael@0: #ifdef JS_GC_ZEAL michael@0: extern void michael@0: SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency); michael@0: #endif michael@0: michael@0: /* Functions for managing cross compartment gray pointers. */ michael@0: michael@0: extern void michael@0: DelayCrossCompartmentGrayMarking(JSObject *src); michael@0: michael@0: extern void michael@0: NotifyGCNukeWrapper(JSObject *o); michael@0: michael@0: extern unsigned michael@0: NotifyGCPreSwap(JSObject *a, JSObject *b); michael@0: michael@0: extern void michael@0: NotifyGCPostSwap(JSObject *a, JSObject *b, unsigned preResult); michael@0: michael@0: /* michael@0: * Helper that implements sweeping and allocation for kinds that can be swept michael@0: * and allocated off the main thread. michael@0: * michael@0: * In non-threadsafe builds, all actual sweeping and allocation is performed michael@0: * on the main thread, but GCHelperThread encapsulates this from clients as michael@0: * much as possible. michael@0: */ michael@0: class GCHelperThread { michael@0: enum State { michael@0: IDLE, michael@0: SWEEPING, michael@0: ALLOCATING, michael@0: CANCEL_ALLOCATION, michael@0: SHUTDOWN michael@0: }; michael@0: michael@0: /* michael@0: * During the finalization we do not free immediately. Rather we add the michael@0: * corresponding pointers to a buffer which we later release on a michael@0: * separated thread. michael@0: * michael@0: * The buffer is implemented as a vector of 64K arrays of pointers, not as michael@0: * a simple vector, to avoid realloc calls during the vector growth and to michael@0: * not bloat the binary size of the inlined freeLater method. Any OOM michael@0: * during buffer growth results in the pointer being freed immediately. michael@0: */ michael@0: static const size_t FREE_ARRAY_SIZE = size_t(1) << 16; michael@0: static const size_t FREE_ARRAY_LENGTH = FREE_ARRAY_SIZE / sizeof(void *); michael@0: michael@0: JSRuntime *const rt; michael@0: PRThread *thread; michael@0: PRCondVar *wakeup; michael@0: PRCondVar *done; michael@0: volatile State state; michael@0: michael@0: void wait(PRCondVar *which); michael@0: michael@0: bool sweepFlag; michael@0: bool shrinkFlag; michael@0: michael@0: Vector freeVector; michael@0: void **freeCursor; michael@0: void **freeCursorEnd; michael@0: michael@0: bool backgroundAllocation; michael@0: michael@0: friend class js::gc::ArenaLists; michael@0: michael@0: void michael@0: replenishAndFreeLater(void *ptr); michael@0: michael@0: static void freeElementsAndArray(void **array, void **end) { michael@0: JS_ASSERT(array <= end); michael@0: for (void **p = array; p != end; ++p) michael@0: js_free(*p); michael@0: js_free(array); michael@0: } michael@0: michael@0: static void threadMain(void* arg); michael@0: void threadLoop(); michael@0: michael@0: /* Must be called with the GC lock taken. */ michael@0: void doSweep(); michael@0: michael@0: public: michael@0: GCHelperThread(JSRuntime *rt) michael@0: : rt(rt), michael@0: thread(nullptr), michael@0: wakeup(nullptr), michael@0: done(nullptr), michael@0: state(IDLE), michael@0: sweepFlag(false), michael@0: shrinkFlag(false), michael@0: freeCursor(nullptr), michael@0: freeCursorEnd(nullptr), michael@0: backgroundAllocation(true) michael@0: { } michael@0: michael@0: bool init(); michael@0: void finish(); michael@0: michael@0: /* Must be called with the GC lock taken. */ michael@0: void startBackgroundSweep(bool shouldShrink); michael@0: michael@0: /* Must be called with the GC lock taken. */ michael@0: void startBackgroundShrink(); michael@0: michael@0: /* Must be called without the GC lock taken. */ michael@0: void waitBackgroundSweepEnd(); michael@0: michael@0: /* Must be called without the GC lock taken. */ michael@0: void waitBackgroundSweepOrAllocEnd(); michael@0: michael@0: /* Must be called with the GC lock taken. */ michael@0: inline void startBackgroundAllocationIfIdle(); michael@0: michael@0: bool canBackgroundAllocate() const { michael@0: return backgroundAllocation; michael@0: } michael@0: michael@0: void disableBackgroundAllocation() { michael@0: backgroundAllocation = false; michael@0: } michael@0: michael@0: PRThread *getThread() const { michael@0: return thread; michael@0: } michael@0: michael@0: bool onBackgroundThread(); michael@0: michael@0: /* michael@0: * Outside the GC lock may give true answer when in fact the sweeping has michael@0: * been done. michael@0: */ michael@0: bool sweeping() const { michael@0: return state == SWEEPING; michael@0: } michael@0: michael@0: bool shouldShrink() const { michael@0: JS_ASSERT(sweeping()); michael@0: return shrinkFlag; michael@0: } michael@0: michael@0: void freeLater(void *ptr) { michael@0: JS_ASSERT(!sweeping()); michael@0: if (freeCursor != freeCursorEnd) michael@0: *freeCursor++ = ptr; michael@0: else michael@0: replenishAndFreeLater(ptr); michael@0: } michael@0: }; michael@0: michael@0: struct GCChunkHasher { michael@0: typedef gc::Chunk *Lookup; michael@0: michael@0: /* michael@0: * Strip zeros for better distribution after multiplying by the golden michael@0: * ratio. michael@0: */ michael@0: static HashNumber hash(gc::Chunk *chunk) { michael@0: JS_ASSERT(!(uintptr_t(chunk) & gc::ChunkMask)); michael@0: return HashNumber(uintptr_t(chunk) >> gc::ChunkShift); michael@0: } michael@0: michael@0: static bool match(gc::Chunk *k, gc::Chunk *l) { michael@0: JS_ASSERT(!(uintptr_t(k) & gc::ChunkMask)); michael@0: JS_ASSERT(!(uintptr_t(l) & gc::ChunkMask)); michael@0: return k == l; michael@0: } michael@0: }; michael@0: michael@0: typedef HashSet GCChunkSet; michael@0: michael@0: struct GrayRoot { michael@0: void *thing; michael@0: JSGCTraceKind kind; michael@0: #ifdef DEBUG michael@0: JSTraceNamePrinter debugPrinter; michael@0: const void *debugPrintArg; michael@0: size_t debugPrintIndex; michael@0: #endif michael@0: michael@0: GrayRoot(void *thing, JSGCTraceKind kind) michael@0: : thing(thing), kind(kind) {} michael@0: }; michael@0: michael@0: void michael@0: MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end); michael@0: michael@0: typedef void (*IterateChunkCallback)(JSRuntime *rt, void *data, gc::Chunk *chunk); michael@0: typedef void (*IterateZoneCallback)(JSRuntime *rt, void *data, JS::Zone *zone); michael@0: typedef void (*IterateArenaCallback)(JSRuntime *rt, void *data, gc::Arena *arena, michael@0: JSGCTraceKind traceKind, size_t thingSize); michael@0: typedef void (*IterateCellCallback)(JSRuntime *rt, void *data, void *thing, michael@0: JSGCTraceKind traceKind, size_t thingSize); michael@0: michael@0: /* michael@0: * This function calls |zoneCallback| on every zone, |compartmentCallback| on michael@0: * every compartment, |arenaCallback| on every in-use arena, and |cellCallback| michael@0: * on every in-use cell in the GC heap. michael@0: */ michael@0: extern void michael@0: IterateZonesCompartmentsArenasCells(JSRuntime *rt, void *data, michael@0: IterateZoneCallback zoneCallback, michael@0: JSIterateCompartmentCallback compartmentCallback, michael@0: IterateArenaCallback arenaCallback, michael@0: IterateCellCallback cellCallback); michael@0: michael@0: /* michael@0: * This function is like IterateZonesCompartmentsArenasCells, but does it for a michael@0: * single zone. michael@0: */ michael@0: extern void michael@0: IterateZoneCompartmentsArenasCells(JSRuntime *rt, Zone *zone, void *data, michael@0: IterateZoneCallback zoneCallback, michael@0: JSIterateCompartmentCallback compartmentCallback, michael@0: IterateArenaCallback arenaCallback, michael@0: IterateCellCallback cellCallback); michael@0: michael@0: /* michael@0: * Invoke chunkCallback on every in-use chunk. michael@0: */ michael@0: extern void michael@0: IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback); michael@0: michael@0: typedef void (*IterateScriptCallback)(JSRuntime *rt, void *data, JSScript *script); michael@0: michael@0: /* michael@0: * Invoke scriptCallback on every in-use script for michael@0: * the given compartment or for all compartments if it is null. michael@0: */ michael@0: extern void michael@0: IterateScripts(JSRuntime *rt, JSCompartment *compartment, michael@0: void *data, IterateScriptCallback scriptCallback); michael@0: michael@0: } /* namespace js */ michael@0: michael@0: extern void michael@0: js_FinalizeStringRT(JSRuntime *rt, JSString *str); michael@0: michael@0: namespace js { michael@0: michael@0: JSCompartment * michael@0: NewCompartment(JSContext *cx, JS::Zone *zone, JSPrincipals *principals, michael@0: const JS::CompartmentOptions &options); michael@0: michael@0: namespace gc { michael@0: michael@0: extern void michael@0: GCIfNeeded(JSContext *cx); michael@0: michael@0: /* Tries to run a GC no matter what (used for GC zeal). */ michael@0: void michael@0: RunDebugGC(JSContext *cx); michael@0: michael@0: void michael@0: SetDeterministicGC(JSContext *cx, bool enabled); michael@0: michael@0: void michael@0: SetValidateGC(JSContext *cx, bool enabled); michael@0: michael@0: void michael@0: SetFullCompartmentChecks(JSContext *cx, bool enabled); michael@0: michael@0: /* Wait for the background thread to finish sweeping if it is running. */ michael@0: void michael@0: FinishBackgroundFinalize(JSRuntime *rt); michael@0: michael@0: /* michael@0: * Merge all contents of source into target. This can only be used if source is michael@0: * the only compartment in its zone. michael@0: */ michael@0: void michael@0: MergeCompartments(JSCompartment *source, JSCompartment *target); michael@0: michael@0: const int ZealPokeValue = 1; michael@0: const int ZealAllocValue = 2; michael@0: const int ZealFrameGCValue = 3; michael@0: const int ZealVerifierPreValue = 4; michael@0: const int ZealFrameVerifierPreValue = 5; michael@0: const int ZealStackRootingValue = 6; michael@0: const int ZealGenerationalGCValue = 7; michael@0: const int ZealIncrementalRootsThenFinish = 8; michael@0: const int ZealIncrementalMarkAllThenFinish = 9; michael@0: const int ZealIncrementalMultipleSlices = 10; michael@0: const int ZealVerifierPostValue = 11; michael@0: const int ZealFrameVerifierPostValue = 12; michael@0: const int ZealCheckHashTablesOnMinorGC = 13; michael@0: const int ZealLimit = 13; michael@0: michael@0: enum VerifierType { michael@0: PreBarrierVerifier, michael@0: PostBarrierVerifier michael@0: }; michael@0: michael@0: #ifdef JS_GC_ZEAL michael@0: michael@0: /* Check that write barriers have been used correctly. See jsgc.cpp. */ michael@0: void michael@0: VerifyBarriers(JSRuntime *rt, VerifierType type); michael@0: michael@0: void michael@0: MaybeVerifyBarriers(JSContext *cx, bool always = false); michael@0: michael@0: #else michael@0: michael@0: static inline void michael@0: VerifyBarriers(JSRuntime *rt, VerifierType type) michael@0: { michael@0: } michael@0: michael@0: static inline void michael@0: MaybeVerifyBarriers(JSContext *cx, bool always = false) michael@0: { michael@0: } michael@0: michael@0: #endif michael@0: michael@0: /* michael@0: * Instances of this class set the |JSRuntime::suppressGC| flag for the duration michael@0: * that they are live. Use of this class is highly discouraged. Please carefully michael@0: * read the comment in jscntxt.h above |suppressGC| and take all appropriate michael@0: * precautions before instantiating this class. michael@0: */ michael@0: class AutoSuppressGC michael@0: { michael@0: int32_t &suppressGC_; michael@0: michael@0: public: michael@0: AutoSuppressGC(ExclusiveContext *cx); michael@0: AutoSuppressGC(JSCompartment *comp); michael@0: AutoSuppressGC(JSRuntime *rt); michael@0: michael@0: ~AutoSuppressGC() michael@0: { michael@0: suppressGC_--; michael@0: } michael@0: }; michael@0: michael@0: #ifdef DEBUG michael@0: /* Disable OOM testing in sections which are not OOM safe. */ michael@0: class AutoEnterOOMUnsafeRegion michael@0: { michael@0: uint32_t saved_; michael@0: michael@0: public: michael@0: AutoEnterOOMUnsafeRegion() : saved_(OOM_maxAllocations) { michael@0: OOM_maxAllocations = UINT32_MAX; michael@0: } michael@0: ~AutoEnterOOMUnsafeRegion() { michael@0: OOM_maxAllocations = saved_; michael@0: } michael@0: }; michael@0: #else michael@0: class AutoEnterOOMUnsafeRegion {}; michael@0: #endif /* DEBUG */ michael@0: michael@0: } /* namespace gc */ michael@0: michael@0: #ifdef DEBUG michael@0: /* Use this to avoid assertions when manipulating the wrapper map. */ michael@0: class AutoDisableProxyCheck michael@0: { michael@0: MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER; michael@0: uintptr_t &count; michael@0: michael@0: public: michael@0: AutoDisableProxyCheck(JSRuntime *rt michael@0: MOZ_GUARD_OBJECT_NOTIFIER_PARAM); michael@0: michael@0: ~AutoDisableProxyCheck() { michael@0: count--; michael@0: } michael@0: }; michael@0: #else michael@0: struct AutoDisableProxyCheck michael@0: { michael@0: AutoDisableProxyCheck(JSRuntime *rt) {} michael@0: }; michael@0: #endif michael@0: michael@0: void michael@0: PurgeJITCaches(JS::Zone *zone); michael@0: michael@0: // This is the same as IsInsideNursery, but not inlined. michael@0: bool michael@0: UninlinedIsInsideNursery(JSRuntime *rt, const void *thing); michael@0: michael@0: } /* namespace js */ michael@0: michael@0: #endif /* jsgc_h */