michael@0: /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- michael@0: * vim: set ts=8 sw=4 et tw=78: michael@0: * michael@0: * This Source Code Form is subject to the terms of the Mozilla Public michael@0: * License, v. 2.0. If a copy of the MPL was not distributed with this file, michael@0: * You can obtain one at http://mozilla.org/MPL/2.0/. */ michael@0: michael@0: #ifdef JSGC_GENERATIONAL michael@0: michael@0: #include "gc/Nursery-inl.h" michael@0: michael@0: #include "jscompartment.h" michael@0: #include "jsgc.h" michael@0: #include "jsinfer.h" michael@0: #include "jsutil.h" michael@0: #include "prmjtime.h" michael@0: michael@0: #include "gc/GCInternals.h" michael@0: #include "gc/Memory.h" michael@0: #ifdef JS_ION michael@0: #include "jit/IonFrames.h" michael@0: #endif michael@0: #include "mozilla/IntegerPrintfMacros.h" michael@0: #include "vm/ArrayObject.h" michael@0: #include "vm/Debugger.h" michael@0: #if defined(DEBUG) michael@0: #include "vm/ScopeObject.h" michael@0: #endif michael@0: #include "vm/TypedArrayObject.h" michael@0: michael@0: #include "jsgcinlines.h" michael@0: michael@0: #include "vm/ObjectImpl-inl.h" michael@0: michael@0: using namespace js; michael@0: using namespace gc; michael@0: using namespace mozilla; michael@0: michael@0: //#define PROFILE_NURSERY michael@0: michael@0: #ifdef PROFILE_NURSERY michael@0: /* michael@0: * Print timing information for minor GCs that take longer than this time in microseconds. michael@0: */ michael@0: static int64_t GCReportThreshold = INT64_MAX; michael@0: #endif michael@0: michael@0: bool michael@0: js::Nursery::init() michael@0: { michael@0: JS_ASSERT(start() == 0); michael@0: michael@0: if (!hugeSlots.init()) michael@0: return false; michael@0: michael@0: void *heap = MapAlignedPages(runtime(), NurserySize, Alignment); michael@0: if (!heap) michael@0: return false; michael@0: michael@0: JSRuntime *rt = runtime(); michael@0: rt->gcNurseryStart_ = uintptr_t(heap); michael@0: currentStart_ = start(); michael@0: rt->gcNurseryEnd_ = chunk(LastNurseryChunk).end(); michael@0: numActiveChunks_ = 1; michael@0: JS_POISON(heap, JS_FRESH_NURSERY_PATTERN, NurserySize); michael@0: setCurrentChunk(0); michael@0: updateDecommittedRegion(); michael@0: michael@0: #ifdef PROFILE_NURSERY michael@0: char *env = getenv("JS_MINORGC_TIME"); michael@0: if (env) michael@0: GCReportThreshold = atoi(env); michael@0: #endif michael@0: michael@0: JS_ASSERT(isEnabled()); michael@0: return true; michael@0: } michael@0: michael@0: js::Nursery::~Nursery() michael@0: { michael@0: if (start()) michael@0: UnmapPages(runtime(), (void *)start(), NurserySize); michael@0: } michael@0: michael@0: void michael@0: js::Nursery::enable() michael@0: { michael@0: JS_ASSERT(isEmpty()); michael@0: if (isEnabled()) michael@0: return; michael@0: numActiveChunks_ = 1; michael@0: setCurrentChunk(0); michael@0: currentStart_ = position(); michael@0: #ifdef JS_GC_ZEAL michael@0: if (runtime()->gcZeal_ == ZealGenerationalGCValue) michael@0: enterZealMode(); michael@0: #endif michael@0: } michael@0: michael@0: void michael@0: js::Nursery::disable() michael@0: { michael@0: JS_ASSERT(isEmpty()); michael@0: if (!isEnabled()) michael@0: return; michael@0: numActiveChunks_ = 0; michael@0: currentEnd_ = 0; michael@0: updateDecommittedRegion(); michael@0: } michael@0: michael@0: bool michael@0: js::Nursery::isEmpty() const michael@0: { michael@0: JS_ASSERT(runtime_); michael@0: if (!isEnabled()) michael@0: return true; michael@0: JS_ASSERT_IF(runtime_->gcZeal_ != ZealGenerationalGCValue, currentStart_ == start()); michael@0: return position() == currentStart_; michael@0: } michael@0: michael@0: JSObject * michael@0: js::Nursery::allocateObject(JSContext *cx, size_t size, size_t numDynamic) michael@0: { michael@0: /* Ensure there's enough space to replace the contents with a RelocationOverlay. */ michael@0: JS_ASSERT(size >= sizeof(RelocationOverlay)); michael@0: michael@0: /* Attempt to allocate slots contiguously after object, if possible. */ michael@0: if (numDynamic && numDynamic <= MaxNurserySlots) { michael@0: size_t totalSize = size + sizeof(HeapSlot) * numDynamic; michael@0: JSObject *obj = static_cast(allocate(totalSize)); michael@0: if (obj) { michael@0: obj->setInitialSlots(reinterpret_cast(size_t(obj) + size)); michael@0: return obj; michael@0: } michael@0: /* If we failed to allocate as a block, retry with out-of-line slots. */ michael@0: } michael@0: michael@0: HeapSlot *slots = nullptr; michael@0: if (numDynamic) { michael@0: slots = allocateHugeSlots(cx, numDynamic); michael@0: if (MOZ_UNLIKELY(!slots)) michael@0: return nullptr; michael@0: } michael@0: michael@0: JSObject *obj = static_cast(allocate(size)); michael@0: michael@0: if (obj) michael@0: obj->setInitialSlots(slots); michael@0: else michael@0: freeSlots(cx, slots); michael@0: michael@0: return obj; michael@0: } michael@0: michael@0: void * michael@0: js::Nursery::allocate(size_t size) michael@0: { michael@0: JS_ASSERT(isEnabled()); michael@0: JS_ASSERT(!runtime()->isHeapBusy()); michael@0: JS_ASSERT(position() >= currentStart_); michael@0: michael@0: if (position() + size > currentEnd()) { michael@0: if (currentChunk_ + 1 == numActiveChunks_) michael@0: return nullptr; michael@0: setCurrentChunk(currentChunk_ + 1); michael@0: } michael@0: michael@0: void *thing = (void *)position(); michael@0: position_ = position() + size; michael@0: michael@0: JS_EXTRA_POISON(thing, JS_ALLOCATED_NURSERY_PATTERN, size); michael@0: return thing; michael@0: } michael@0: michael@0: /* Internally, this function is used to allocate elements as well as slots. */ michael@0: HeapSlot * michael@0: js::Nursery::allocateSlots(JSContext *cx, JSObject *obj, uint32_t nslots) michael@0: { michael@0: JS_ASSERT(obj); michael@0: JS_ASSERT(nslots > 0); michael@0: michael@0: if (!isInside(obj)) michael@0: return cx->pod_malloc(nslots); michael@0: michael@0: if (nslots > MaxNurserySlots) michael@0: return allocateHugeSlots(cx, nslots); michael@0: michael@0: size_t size = sizeof(HeapSlot) * nslots; michael@0: HeapSlot *slots = static_cast(allocate(size)); michael@0: if (slots) michael@0: return slots; michael@0: michael@0: return allocateHugeSlots(cx, nslots); michael@0: } michael@0: michael@0: ObjectElements * michael@0: js::Nursery::allocateElements(JSContext *cx, JSObject *obj, uint32_t nelems) michael@0: { michael@0: JS_ASSERT(nelems >= ObjectElements::VALUES_PER_HEADER); michael@0: return reinterpret_cast(allocateSlots(cx, obj, nelems)); michael@0: } michael@0: michael@0: HeapSlot * michael@0: js::Nursery::reallocateSlots(JSContext *cx, JSObject *obj, HeapSlot *oldSlots, michael@0: uint32_t oldCount, uint32_t newCount) michael@0: { michael@0: size_t oldSize = oldCount * sizeof(HeapSlot); michael@0: size_t newSize = newCount * sizeof(HeapSlot); michael@0: michael@0: if (!isInside(obj)) michael@0: return static_cast(cx->realloc_(oldSlots, oldSize, newSize)); michael@0: michael@0: if (!isInside(oldSlots)) { michael@0: HeapSlot *newSlots = static_cast(cx->realloc_(oldSlots, oldSize, newSize)); michael@0: if (oldSlots != newSlots) { michael@0: hugeSlots.remove(oldSlots); michael@0: /* If this put fails, we will only leak the slots. */ michael@0: (void)hugeSlots.put(newSlots); michael@0: } michael@0: return newSlots; michael@0: } michael@0: michael@0: /* The nursery cannot make use of the returned slots data. */ michael@0: if (newCount < oldCount) michael@0: return oldSlots; michael@0: michael@0: HeapSlot *newSlots = allocateSlots(cx, obj, newCount); michael@0: PodCopy(newSlots, oldSlots, oldCount); michael@0: return newSlots; michael@0: } michael@0: michael@0: ObjectElements * michael@0: js::Nursery::reallocateElements(JSContext *cx, JSObject *obj, ObjectElements *oldHeader, michael@0: uint32_t oldCount, uint32_t newCount) michael@0: { michael@0: HeapSlot *slots = reallocateSlots(cx, obj, reinterpret_cast(oldHeader), michael@0: oldCount, newCount); michael@0: return reinterpret_cast(slots); michael@0: } michael@0: michael@0: void michael@0: js::Nursery::freeSlots(JSContext *cx, HeapSlot *slots) michael@0: { michael@0: if (!isInside(slots)) { michael@0: hugeSlots.remove(slots); michael@0: js_free(slots); michael@0: } michael@0: } michael@0: michael@0: HeapSlot * michael@0: js::Nursery::allocateHugeSlots(JSContext *cx, size_t nslots) michael@0: { michael@0: HeapSlot *slots = cx->pod_malloc(nslots); michael@0: /* If this put fails, we will only leak the slots. */ michael@0: (void)hugeSlots.put(slots); michael@0: return slots; michael@0: } michael@0: michael@0: void michael@0: js::Nursery::notifyInitialSlots(Cell *cell, HeapSlot *slots) michael@0: { michael@0: if (isInside(cell) && !isInside(slots)) { michael@0: /* If this put fails, we will only leak the slots. */ michael@0: (void)hugeSlots.put(slots); michael@0: } michael@0: } michael@0: michael@0: namespace js { michael@0: namespace gc { michael@0: michael@0: class MinorCollectionTracer : public JSTracer michael@0: { michael@0: public: michael@0: Nursery *nursery; michael@0: AutoTraceSession session; michael@0: michael@0: /* Amount of data moved to the tenured generation during collection. */ michael@0: size_t tenuredSize; michael@0: michael@0: /* michael@0: * This list is threaded through the Nursery using the space from already michael@0: * moved things. The list is used to fix up the moved things and to find michael@0: * things held live by intra-Nursery pointers. michael@0: */ michael@0: RelocationOverlay *head; michael@0: RelocationOverlay **tail; michael@0: michael@0: /* Save and restore all of the runtime state we use during MinorGC. */ michael@0: bool savedRuntimeNeedBarrier; michael@0: AutoDisableProxyCheck disableStrictProxyChecking; michael@0: AutoEnterOOMUnsafeRegion oomUnsafeRegion; michael@0: ArrayBufferVector liveArrayBuffers; michael@0: michael@0: /* Insert the given relocation entry into the list of things to visit. */ michael@0: MOZ_ALWAYS_INLINE void insertIntoFixupList(RelocationOverlay *entry) { michael@0: *tail = entry; michael@0: tail = &entry->next_; michael@0: *tail = nullptr; michael@0: } michael@0: michael@0: MinorCollectionTracer(JSRuntime *rt, Nursery *nursery) michael@0: : JSTracer(rt, Nursery::MinorGCCallback, TraceWeakMapKeysValues), michael@0: nursery(nursery), michael@0: session(rt, MinorCollecting), michael@0: tenuredSize(0), michael@0: head(nullptr), michael@0: tail(&head), michael@0: savedRuntimeNeedBarrier(rt->needsBarrier()), michael@0: disableStrictProxyChecking(rt) michael@0: { michael@0: rt->gcNumber++; michael@0: michael@0: /* michael@0: * We disable the runtime needsBarrier() check so that pre-barriers do michael@0: * not fire on objects that have been relocated. The pre-barrier's michael@0: * call to obj->zone() will try to look through shape_, which is now michael@0: * the relocation magic and will crash. However, zone->needsBarrier() michael@0: * must still be set correctly so that allocations we make in minor michael@0: * GCs between incremental slices will allocate their objects marked. michael@0: */ michael@0: rt->setNeedsBarrier(false); michael@0: michael@0: /* michael@0: * We use the live array buffer lists to track traced buffers so we can michael@0: * sweep their dead views. Incremental collection also use these lists, michael@0: * so we may need to save and restore their contents here. michael@0: */ michael@0: if (rt->gcIncrementalState != NO_INCREMENTAL) { michael@0: for (GCCompartmentsIter c(rt); !c.done(); c.next()) { michael@0: if (!ArrayBufferObject::saveArrayBufferList(c, liveArrayBuffers)) michael@0: CrashAtUnhandlableOOM("OOM while saving live array buffers"); michael@0: ArrayBufferObject::resetArrayBufferList(c); michael@0: } michael@0: } michael@0: } michael@0: michael@0: ~MinorCollectionTracer() { michael@0: runtime()->setNeedsBarrier(savedRuntimeNeedBarrier); michael@0: if (runtime()->gcIncrementalState != NO_INCREMENTAL) michael@0: ArrayBufferObject::restoreArrayBufferLists(liveArrayBuffers); michael@0: } michael@0: }; michael@0: michael@0: } /* namespace gc */ michael@0: } /* namespace js */ michael@0: michael@0: static AllocKind michael@0: GetObjectAllocKindForCopy(JSRuntime *rt, JSObject *obj) michael@0: { michael@0: if (obj->is()) { michael@0: JS_ASSERT(obj->numFixedSlots() == 0); michael@0: michael@0: /* Use minimal size object if we are just going to copy the pointer. */ michael@0: if (!IsInsideNursery(rt, (void *)obj->getElementsHeader())) michael@0: return FINALIZE_OBJECT0_BACKGROUND; michael@0: michael@0: size_t nelements = obj->getDenseCapacity(); michael@0: return GetBackgroundAllocKind(GetGCArrayKind(nelements)); michael@0: } michael@0: michael@0: if (obj->is()) michael@0: return obj->as().getAllocKind(); michael@0: michael@0: /* michael@0: * Typed arrays in the nursery may have a lazily allocated buffer, make michael@0: * sure there is room for the array's fixed data when moving the array. michael@0: */ michael@0: if (obj->is() && !obj->as().buffer()) { michael@0: size_t nbytes = obj->as().byteLength(); michael@0: return GetBackgroundAllocKind(TypedArrayObject::AllocKindForLazyBuffer(nbytes)); michael@0: } michael@0: michael@0: AllocKind kind = GetGCObjectFixedSlotsKind(obj->numFixedSlots()); michael@0: JS_ASSERT(!IsBackgroundFinalized(kind)); michael@0: JS_ASSERT(CanBeFinalizedInBackground(kind, obj->getClass())); michael@0: return GetBackgroundAllocKind(kind); michael@0: } michael@0: michael@0: void * michael@0: js::Nursery::allocateFromTenured(Zone *zone, AllocKind thingKind) michael@0: { michael@0: void *t = zone->allocator.arenas.allocateFromFreeList(thingKind, Arena::thingSize(thingKind)); michael@0: if (t) michael@0: return t; michael@0: zone->allocator.arenas.checkEmptyFreeList(thingKind); michael@0: return zone->allocator.arenas.allocateFromArena(zone, thingKind); michael@0: } michael@0: michael@0: void michael@0: js::Nursery::setSlotsForwardingPointer(HeapSlot *oldSlots, HeapSlot *newSlots, uint32_t nslots) michael@0: { michael@0: JS_ASSERT(nslots > 0); michael@0: JS_ASSERT(isInside(oldSlots)); michael@0: JS_ASSERT(!isInside(newSlots)); michael@0: *reinterpret_cast(oldSlots) = newSlots; michael@0: } michael@0: michael@0: void michael@0: js::Nursery::setElementsForwardingPointer(ObjectElements *oldHeader, ObjectElements *newHeader, michael@0: uint32_t nelems) michael@0: { michael@0: /* michael@0: * If the JIT has hoisted a zero length pointer, then we do not need to michael@0: * relocate it because reads and writes to/from this pointer are invalid. michael@0: */ michael@0: if (nelems - ObjectElements::VALUES_PER_HEADER < 1) michael@0: return; michael@0: JS_ASSERT(isInside(oldHeader)); michael@0: JS_ASSERT(!isInside(newHeader)); michael@0: *reinterpret_cast(oldHeader->elements()) = newHeader->elements(); michael@0: } michael@0: michael@0: #ifdef DEBUG michael@0: static bool IsWriteableAddress(void *ptr) michael@0: { michael@0: volatile uint64_t *vPtr = reinterpret_cast(ptr); michael@0: *vPtr = *vPtr; michael@0: return true; michael@0: } michael@0: #endif michael@0: michael@0: void michael@0: js::Nursery::forwardBufferPointer(HeapSlot **pSlotsElems) michael@0: { michael@0: HeapSlot *old = *pSlotsElems; michael@0: michael@0: if (!isInside(old)) michael@0: return; michael@0: michael@0: /* michael@0: * If the elements buffer is zero length, the "first" item could be inside michael@0: * of the next object or past the end of the allocable area. However, michael@0: * since we always store the runtime as the last word in the nursery, michael@0: * isInside will still be true, even if this zero-size allocation abuts the michael@0: * end of the allocable area. Thus, it is always safe to read the first michael@0: * word of |old| here. michael@0: */ michael@0: *pSlotsElems = *reinterpret_cast(old); michael@0: JS_ASSERT(!isInside(*pSlotsElems)); michael@0: JS_ASSERT(IsWriteableAddress(*pSlotsElems)); michael@0: } michael@0: michael@0: // Structure for counting how many times objects of a particular type have been michael@0: // tenured during a minor collection. michael@0: struct TenureCount michael@0: { michael@0: types::TypeObject *type; michael@0: int count; michael@0: }; michael@0: michael@0: // Keep rough track of how many times we tenure objects of particular types michael@0: // during minor collections, using a fixed size hash for efficiency at the cost michael@0: // of potential collisions. michael@0: struct Nursery::TenureCountCache michael@0: { michael@0: TenureCount entries[16]; michael@0: michael@0: TenureCountCache() { PodZero(this); } michael@0: michael@0: TenureCount &findEntry(types::TypeObject *type) { michael@0: return entries[PointerHasher::hash(type) % ArrayLength(entries)]; michael@0: } michael@0: }; michael@0: michael@0: void michael@0: js::Nursery::collectToFixedPoint(MinorCollectionTracer *trc, TenureCountCache &tenureCounts) michael@0: { michael@0: for (RelocationOverlay *p = trc->head; p; p = p->next()) { michael@0: JSObject *obj = static_cast(p->forwardingAddress()); michael@0: traceObject(trc, obj); michael@0: michael@0: TenureCount &entry = tenureCounts.findEntry(obj->type()); michael@0: if (entry.type == obj->type()) { michael@0: entry.count++; michael@0: } else if (!entry.type) { michael@0: entry.type = obj->type(); michael@0: entry.count = 1; michael@0: } michael@0: } michael@0: } michael@0: michael@0: MOZ_ALWAYS_INLINE void michael@0: js::Nursery::traceObject(MinorCollectionTracer *trc, JSObject *obj) michael@0: { michael@0: const Class *clasp = obj->getClass(); michael@0: if (clasp->trace) michael@0: clasp->trace(trc, obj); michael@0: michael@0: if (!obj->isNative()) michael@0: return; michael@0: michael@0: if (!obj->hasEmptyElements()) michael@0: markSlots(trc, obj->getDenseElements(), obj->getDenseInitializedLength()); michael@0: michael@0: HeapSlot *fixedStart, *fixedEnd, *dynStart, *dynEnd; michael@0: obj->getSlotRange(0, obj->slotSpan(), &fixedStart, &fixedEnd, &dynStart, &dynEnd); michael@0: markSlots(trc, fixedStart, fixedEnd); michael@0: markSlots(trc, dynStart, dynEnd); michael@0: } michael@0: michael@0: MOZ_ALWAYS_INLINE void michael@0: js::Nursery::markSlots(MinorCollectionTracer *trc, HeapSlot *vp, uint32_t nslots) michael@0: { michael@0: markSlots(trc, vp, vp + nslots); michael@0: } michael@0: michael@0: MOZ_ALWAYS_INLINE void michael@0: js::Nursery::markSlots(MinorCollectionTracer *trc, HeapSlot *vp, HeapSlot *end) michael@0: { michael@0: for (; vp != end; ++vp) michael@0: markSlot(trc, vp); michael@0: } michael@0: michael@0: MOZ_ALWAYS_INLINE void michael@0: js::Nursery::markSlot(MinorCollectionTracer *trc, HeapSlot *slotp) michael@0: { michael@0: if (!slotp->isObject()) michael@0: return; michael@0: michael@0: JSObject *obj = &slotp->toObject(); michael@0: if (!isInside(obj)) michael@0: return; michael@0: michael@0: if (getForwardedPointer(&obj)) { michael@0: slotp->unsafeGet()->setObject(*obj); michael@0: return; michael@0: } michael@0: michael@0: JSObject *tenured = static_cast(moveToTenured(trc, obj)); michael@0: slotp->unsafeGet()->setObject(*tenured); michael@0: } michael@0: michael@0: void * michael@0: js::Nursery::moveToTenured(MinorCollectionTracer *trc, JSObject *src) michael@0: { michael@0: Zone *zone = src->zone(); michael@0: AllocKind dstKind = GetObjectAllocKindForCopy(trc->runtime(), src); michael@0: JSObject *dst = static_cast(allocateFromTenured(zone, dstKind)); michael@0: if (!dst) michael@0: CrashAtUnhandlableOOM("Failed to allocate object while tenuring."); michael@0: michael@0: trc->tenuredSize += moveObjectToTenured(dst, src, dstKind); michael@0: michael@0: RelocationOverlay *overlay = reinterpret_cast(src); michael@0: overlay->forwardTo(dst); michael@0: trc->insertIntoFixupList(overlay); michael@0: michael@0: return static_cast(dst); michael@0: } michael@0: michael@0: size_t michael@0: js::Nursery::moveObjectToTenured(JSObject *dst, JSObject *src, AllocKind dstKind) michael@0: { michael@0: size_t srcSize = Arena::thingSize(dstKind); michael@0: size_t tenuredSize = srcSize; michael@0: michael@0: /* michael@0: * Arrays do not necessarily have the same AllocKind between src and dst. michael@0: * We deal with this by copying elements manually, possibly re-inlining michael@0: * them if there is adequate room inline in dst. michael@0: */ michael@0: if (src->is()) michael@0: srcSize = sizeof(ObjectImpl); michael@0: michael@0: js_memcpy(dst, src, srcSize); michael@0: tenuredSize += moveSlotsToTenured(dst, src, dstKind); michael@0: tenuredSize += moveElementsToTenured(dst, src, dstKind); michael@0: michael@0: if (src->is()) michael@0: forwardTypedArrayPointers(dst, src); michael@0: michael@0: /* The shape's list head may point into the old object. */ michael@0: if (&src->shape_ == dst->shape_->listp) michael@0: dst->shape_->listp = &dst->shape_; michael@0: michael@0: return tenuredSize; michael@0: } michael@0: michael@0: void michael@0: js::Nursery::forwardTypedArrayPointers(JSObject *dst, JSObject *src) michael@0: { michael@0: /* michael@0: * Typed array data may be stored inline inside the object's fixed slots. If michael@0: * so, we need update the private pointer and leave a forwarding pointer at michael@0: * the start of the data. michael@0: */ michael@0: TypedArrayObject &typedArray = src->as(); michael@0: JS_ASSERT_IF(typedArray.buffer(), !isInside(src->getPrivate())); michael@0: if (typedArray.buffer()) michael@0: return; michael@0: michael@0: void *srcData = src->fixedData(TypedArrayObject::FIXED_DATA_START); michael@0: void *dstData = dst->fixedData(TypedArrayObject::FIXED_DATA_START); michael@0: JS_ASSERT(src->getPrivate() == srcData); michael@0: dst->setPrivate(dstData); michael@0: michael@0: /* michael@0: * We don't know the number of slots here, but michael@0: * TypedArrayObject::AllocKindForLazyBuffer ensures that it's always at michael@0: * least one. michael@0: */ michael@0: size_t nslots = 1; michael@0: setSlotsForwardingPointer(reinterpret_cast(srcData), michael@0: reinterpret_cast(dstData), michael@0: nslots); michael@0: } michael@0: michael@0: size_t michael@0: js::Nursery::moveSlotsToTenured(JSObject *dst, JSObject *src, AllocKind dstKind) michael@0: { michael@0: /* Fixed slots have already been copied over. */ michael@0: if (!src->hasDynamicSlots()) michael@0: return 0; michael@0: michael@0: if (!isInside(src->slots)) { michael@0: hugeSlots.remove(src->slots); michael@0: return 0; michael@0: } michael@0: michael@0: Zone *zone = src->zone(); michael@0: size_t count = src->numDynamicSlots(); michael@0: dst->slots = zone->pod_malloc(count); michael@0: if (!dst->slots) michael@0: CrashAtUnhandlableOOM("Failed to allocate slots while tenuring."); michael@0: PodCopy(dst->slots, src->slots, count); michael@0: setSlotsForwardingPointer(src->slots, dst->slots, count); michael@0: return count * sizeof(HeapSlot); michael@0: } michael@0: michael@0: size_t michael@0: js::Nursery::moveElementsToTenured(JSObject *dst, JSObject *src, AllocKind dstKind) michael@0: { michael@0: if (src->hasEmptyElements()) michael@0: return 0; michael@0: michael@0: Zone *zone = src->zone(); michael@0: ObjectElements *srcHeader = src->getElementsHeader(); michael@0: ObjectElements *dstHeader; michael@0: michael@0: /* TODO Bug 874151: Prefer to put element data inline if we have space. */ michael@0: if (!isInside(srcHeader)) { michael@0: JS_ASSERT(src->elements == dst->elements); michael@0: hugeSlots.remove(reinterpret_cast(srcHeader)); michael@0: return 0; michael@0: } michael@0: michael@0: size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->capacity; michael@0: michael@0: /* Unlike other objects, Arrays can have fixed elements. */ michael@0: if (src->is() && nslots <= GetGCKindSlots(dstKind)) { michael@0: dst->setFixedElements(); michael@0: dstHeader = dst->getElementsHeader(); michael@0: js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot)); michael@0: setElementsForwardingPointer(srcHeader, dstHeader, nslots); michael@0: return nslots * sizeof(HeapSlot); michael@0: } michael@0: michael@0: JS_ASSERT(nslots >= 2); michael@0: size_t nbytes = nslots * sizeof(HeapValue); michael@0: dstHeader = static_cast(zone->malloc_(nbytes)); michael@0: if (!dstHeader) michael@0: CrashAtUnhandlableOOM("Failed to allocate elements while tenuring."); michael@0: js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot)); michael@0: setElementsForwardingPointer(srcHeader, dstHeader, nslots); michael@0: dst->elements = dstHeader->elements(); michael@0: return nslots * sizeof(HeapSlot); michael@0: } michael@0: michael@0: static bool michael@0: ShouldMoveToTenured(MinorCollectionTracer *trc, void **thingp) michael@0: { michael@0: Cell *cell = static_cast(*thingp); michael@0: Nursery &nursery = *trc->nursery; michael@0: return !nursery.isInside(thingp) && nursery.isInside(cell) && michael@0: !nursery.getForwardedPointer(thingp); michael@0: } michael@0: michael@0: /* static */ void michael@0: js::Nursery::MinorGCCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind) michael@0: { michael@0: MinorCollectionTracer *trc = static_cast(jstrc); michael@0: if (ShouldMoveToTenured(trc, thingp)) michael@0: *thingp = trc->nursery->moveToTenured(trc, static_cast(*thingp)); michael@0: } michael@0: michael@0: static void michael@0: CheckHashTablesAfterMovingGC(JSRuntime *rt) michael@0: { michael@0: #ifdef JS_GC_ZEAL michael@0: if (rt->gcZeal() == ZealCheckHashTablesOnMinorGC) { michael@0: /* Check that internal hash tables no longer have any pointers into the nursery. */ michael@0: for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { michael@0: c->checkNewTypeObjectTableAfterMovingGC(); michael@0: c->checkInitialShapesTableAfterMovingGC(); michael@0: c->checkWrapperMapAfterMovingGC(); michael@0: if (c->debugScopes) michael@0: c->debugScopes->checkHashTablesAfterMovingGC(rt); michael@0: } michael@0: } michael@0: #endif michael@0: } michael@0: michael@0: #ifdef PROFILE_NURSERY michael@0: #define TIME_START(name) int64_t timstampStart_##name = PRMJ_Now() michael@0: #define TIME_END(name) int64_t timstampEnd_##name = PRMJ_Now() michael@0: #define TIME_TOTAL(name) (timstampEnd_##name - timstampStart_##name) michael@0: #else michael@0: #define TIME_START(name) michael@0: #define TIME_END(name) michael@0: #define TIME_TOTAL(name) michael@0: #endif michael@0: michael@0: void michael@0: js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList *pretenureTypes) michael@0: { michael@0: JS_AbortIfWrongThread(rt); michael@0: michael@0: if (rt->mainThread.suppressGC) michael@0: return; michael@0: michael@0: if (!isEnabled()) michael@0: return; michael@0: michael@0: if (isEmpty()) michael@0: return; michael@0: michael@0: rt->gcStats.count(gcstats::STAT_MINOR_GC); michael@0: michael@0: TIME_START(total); michael@0: michael@0: AutoStopVerifyingBarriers av(rt, false); michael@0: michael@0: // Move objects pointed to by roots from the nursery to the major heap. michael@0: MinorCollectionTracer trc(rt, this); michael@0: michael@0: // Mark the store buffer. This must happen first. michael@0: StoreBuffer &sb = rt->gcStoreBuffer; michael@0: TIME_START(markValues); michael@0: sb.markValues(&trc); michael@0: TIME_END(markValues); michael@0: michael@0: TIME_START(markCells); michael@0: sb.markCells(&trc); michael@0: TIME_END(markCells); michael@0: michael@0: TIME_START(markSlots); michael@0: sb.markSlots(&trc); michael@0: TIME_END(markSlots); michael@0: michael@0: TIME_START(markWholeCells); michael@0: sb.markWholeCells(&trc); michael@0: TIME_END(markWholeCells); michael@0: michael@0: TIME_START(markRelocatableValues); michael@0: sb.markRelocatableValues(&trc); michael@0: TIME_END(markRelocatableValues); michael@0: michael@0: TIME_START(markRelocatableCells); michael@0: sb.markRelocatableCells(&trc); michael@0: TIME_END(markRelocatableCells); michael@0: michael@0: TIME_START(markGenericEntries); michael@0: sb.markGenericEntries(&trc); michael@0: TIME_END(markGenericEntries); michael@0: michael@0: TIME_START(checkHashTables); michael@0: CheckHashTablesAfterMovingGC(rt); michael@0: TIME_END(checkHashTables); michael@0: michael@0: TIME_START(markRuntime); michael@0: MarkRuntime(&trc); michael@0: TIME_END(markRuntime); michael@0: michael@0: TIME_START(markDebugger); michael@0: Debugger::markAll(&trc); michael@0: TIME_END(markDebugger); michael@0: michael@0: TIME_START(clearNewObjectCache); michael@0: rt->newObjectCache.clearNurseryObjects(rt); michael@0: TIME_END(clearNewObjectCache); michael@0: michael@0: // Most of the work is done here. This loop iterates over objects that have michael@0: // been moved to the major heap. If these objects have any outgoing pointers michael@0: // to the nursery, then those nursery objects get moved as well, until no michael@0: // objects are left to move. That is, we iterate to a fixed point. michael@0: TIME_START(collectToFP); michael@0: TenureCountCache tenureCounts; michael@0: collectToFixedPoint(&trc, tenureCounts); michael@0: TIME_END(collectToFP); michael@0: michael@0: // Update the array buffer object's view lists. michael@0: TIME_START(sweepArrayBufferViewList); michael@0: for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) { michael@0: if (!c->gcLiveArrayBuffers.empty()) michael@0: ArrayBufferObject::sweep(c); michael@0: } michael@0: TIME_END(sweepArrayBufferViewList); michael@0: michael@0: // Update any slot or element pointers whose destination has been tenured. michael@0: TIME_START(updateJitActivations); michael@0: #ifdef JS_ION michael@0: js::jit::UpdateJitActivationsForMinorGC(rt, &trc); michael@0: #endif michael@0: TIME_END(updateJitActivations); michael@0: michael@0: // Resize the nursery. michael@0: TIME_START(resize); michael@0: double promotionRate = trc.tenuredSize / double(allocationEnd() - start()); michael@0: if (promotionRate > 0.05) michael@0: growAllocableSpace(); michael@0: else if (promotionRate < 0.01) michael@0: shrinkAllocableSpace(); michael@0: TIME_END(resize); michael@0: michael@0: // If we are promoting the nursery, or exhausted the store buffer with michael@0: // pointers to nursery things, which will force a collection well before michael@0: // the nursery is full, look for object types that are getting promoted michael@0: // excessively and try to pretenure them. michael@0: TIME_START(pretenure); michael@0: if (pretenureTypes && (promotionRate > 0.8 || reason == JS::gcreason::FULL_STORE_BUFFER)) { michael@0: for (size_t i = 0; i < ArrayLength(tenureCounts.entries); i++) { michael@0: const TenureCount &entry = tenureCounts.entries[i]; michael@0: if (entry.count >= 3000) michael@0: pretenureTypes->append(entry.type); // ignore alloc failure michael@0: } michael@0: } michael@0: TIME_END(pretenure); michael@0: michael@0: // Sweep. michael@0: TIME_START(freeHugeSlots); michael@0: freeHugeSlots(rt); michael@0: TIME_END(freeHugeSlots); michael@0: michael@0: TIME_START(sweep); michael@0: sweep(rt); michael@0: TIME_END(sweep); michael@0: michael@0: TIME_START(clearStoreBuffer); michael@0: rt->gcStoreBuffer.clear(); michael@0: TIME_END(clearStoreBuffer); michael@0: michael@0: // We ignore gcMaxBytes when allocating for minor collection. However, if we michael@0: // overflowed, we disable the nursery. The next time we allocate, we'll fail michael@0: // because gcBytes >= gcMaxBytes. michael@0: if (rt->gcBytes >= rt->gcMaxBytes) michael@0: disable(); michael@0: michael@0: TIME_END(total); michael@0: michael@0: #ifdef PROFILE_NURSERY michael@0: int64_t totalTime = TIME_TOTAL(total); michael@0: michael@0: if (totalTime >= GCReportThreshold) { michael@0: static bool printedHeader = false; michael@0: if (!printedHeader) { michael@0: fprintf(stderr, michael@0: "MinorGC: Reason PRate Size Time mkVals mkClls mkSlts mkWCll mkRVal mkRCll mkGnrc ckTbls mkRntm mkDbgr clrNOC collct swpABO updtIn resize pretnr frSlts clrSB sweep\n"); michael@0: printedHeader = true; michael@0: } michael@0: michael@0: #define FMT " %6" PRIu64 michael@0: fprintf(stderr, michael@0: "MinorGC: %20s %5.1f%% %4d" FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT "\n", michael@0: js::gcstats::ExplainReason(reason), michael@0: promotionRate * 100, michael@0: numActiveChunks_, michael@0: totalTime, michael@0: TIME_TOTAL(markValues), michael@0: TIME_TOTAL(markCells), michael@0: TIME_TOTAL(markSlots), michael@0: TIME_TOTAL(markWholeCells), michael@0: TIME_TOTAL(markRelocatableValues), michael@0: TIME_TOTAL(markRelocatableCells), michael@0: TIME_TOTAL(markGenericEntries), michael@0: TIME_TOTAL(checkHashTables), michael@0: TIME_TOTAL(markRuntime), michael@0: TIME_TOTAL(markDebugger), michael@0: TIME_TOTAL(clearNewObjectCache), michael@0: TIME_TOTAL(collectToFP), michael@0: TIME_TOTAL(sweepArrayBufferViewList), michael@0: TIME_TOTAL(updateJitActivations), michael@0: TIME_TOTAL(resize), michael@0: TIME_TOTAL(pretenure), michael@0: TIME_TOTAL(freeHugeSlots), michael@0: TIME_TOTAL(clearStoreBuffer), michael@0: TIME_TOTAL(sweep)); michael@0: #undef FMT michael@0: } michael@0: #endif michael@0: } michael@0: michael@0: void michael@0: js::Nursery::freeHugeSlots(JSRuntime *rt) michael@0: { michael@0: for (HugeSlotsSet::Range r = hugeSlots.all(); !r.empty(); r.popFront()) michael@0: rt->defaultFreeOp()->free_(r.front()); michael@0: hugeSlots.clear(); michael@0: } michael@0: michael@0: void michael@0: js::Nursery::sweep(JSRuntime *rt) michael@0: { michael@0: #ifdef JS_GC_ZEAL michael@0: /* Poison the nursery contents so touching a freed object will crash. */ michael@0: JS_POISON((void *)start(), JS_SWEPT_NURSERY_PATTERN, NurserySize); michael@0: for (int i = 0; i < NumNurseryChunks; ++i) michael@0: initChunk(i); michael@0: michael@0: if (rt->gcZeal_ == ZealGenerationalGCValue) { michael@0: MOZ_ASSERT(numActiveChunks_ == NumNurseryChunks); michael@0: michael@0: /* Only reset the alloc point when we are close to the end. */ michael@0: if (currentChunk_ + 1 == NumNurseryChunks) michael@0: setCurrentChunk(0); michael@0: } else michael@0: #endif michael@0: { michael@0: #ifdef JS_CRASH_DIAGNOSTICS michael@0: JS_POISON((void *)start(), JS_SWEPT_NURSERY_PATTERN, allocationEnd() - start()); michael@0: for (int i = 0; i < numActiveChunks_; ++i) michael@0: chunk(i).trailer.runtime = runtime(); michael@0: #endif michael@0: setCurrentChunk(0); michael@0: } michael@0: michael@0: /* Set current start position for isEmpty checks. */ michael@0: currentStart_ = position(); michael@0: } michael@0: michael@0: void michael@0: js::Nursery::growAllocableSpace() michael@0: { michael@0: #ifdef JS_GC_ZEAL michael@0: MOZ_ASSERT_IF(runtime()->gcZeal_ == ZealGenerationalGCValue, numActiveChunks_ == NumNurseryChunks); michael@0: #endif michael@0: numActiveChunks_ = Min(numActiveChunks_ * 2, NumNurseryChunks); michael@0: } michael@0: michael@0: void michael@0: js::Nursery::shrinkAllocableSpace() michael@0: { michael@0: #ifdef JS_GC_ZEAL michael@0: if (runtime()->gcZeal_ == ZealGenerationalGCValue) michael@0: return; michael@0: #endif michael@0: numActiveChunks_ = Max(numActiveChunks_ - 1, 1); michael@0: updateDecommittedRegion(); michael@0: } michael@0: michael@0: #endif /* JSGC_GENERATIONAL */