js/src/gc/Nursery.cpp

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sw=4 et tw=78:
michael@0 3 *
michael@0 4 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 5 * License, v. 2.0. If a copy of the MPL was not distributed with this file,
michael@0 6 * You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 7
michael@0 8 #ifdef JSGC_GENERATIONAL
michael@0 9
michael@0 10 #include "gc/Nursery-inl.h"
michael@0 11
michael@0 12 #include "jscompartment.h"
michael@0 13 #include "jsgc.h"
michael@0 14 #include "jsinfer.h"
michael@0 15 #include "jsutil.h"
michael@0 16 #include "prmjtime.h"
michael@0 17
michael@0 18 #include "gc/GCInternals.h"
michael@0 19 #include "gc/Memory.h"
michael@0 20 #ifdef JS_ION
michael@0 21 #include "jit/IonFrames.h"
michael@0 22 #endif
michael@0 23 #include "mozilla/IntegerPrintfMacros.h"
michael@0 24 #include "vm/ArrayObject.h"
michael@0 25 #include "vm/Debugger.h"
michael@0 26 #if defined(DEBUG)
michael@0 27 #include "vm/ScopeObject.h"
michael@0 28 #endif
michael@0 29 #include "vm/TypedArrayObject.h"
michael@0 30
michael@0 31 #include "jsgcinlines.h"
michael@0 32
michael@0 33 #include "vm/ObjectImpl-inl.h"
michael@0 34
michael@0 35 using namespace js;
michael@0 36 using namespace gc;
michael@0 37 using namespace mozilla;
michael@0 38
michael@0 39 //#define PROFILE_NURSERY
michael@0 40
michael@0 41 #ifdef PROFILE_NURSERY
michael@0 42 /*
michael@0 43 * Print timing information for minor GCs that take longer than this time in microseconds.
michael@0 44 */
michael@0 45 static int64_t GCReportThreshold = INT64_MAX;
michael@0 46 #endif
michael@0 47
michael@0 48 bool
michael@0 49 js::Nursery::init()
michael@0 50 {
michael@0 51 JS_ASSERT(start() == 0);
michael@0 52
michael@0 53 if (!hugeSlots.init())
michael@0 54 return false;
michael@0 55
michael@0 56 void *heap = MapAlignedPages(runtime(), NurserySize, Alignment);
michael@0 57 if (!heap)
michael@0 58 return false;
michael@0 59
michael@0 60 JSRuntime *rt = runtime();
michael@0 61 rt->gcNurseryStart_ = uintptr_t(heap);
michael@0 62 currentStart_ = start();
michael@0 63 rt->gcNurseryEnd_ = chunk(LastNurseryChunk).end();
michael@0 64 numActiveChunks_ = 1;
michael@0 65 JS_POISON(heap, JS_FRESH_NURSERY_PATTERN, NurserySize);
michael@0 66 setCurrentChunk(0);
michael@0 67 updateDecommittedRegion();
michael@0 68
michael@0 69 #ifdef PROFILE_NURSERY
michael@0 70 char *env = getenv("JS_MINORGC_TIME");
michael@0 71 if (env)
michael@0 72 GCReportThreshold = atoi(env);
michael@0 73 #endif
michael@0 74
michael@0 75 JS_ASSERT(isEnabled());
michael@0 76 return true;
michael@0 77 }
michael@0 78
michael@0 79 js::Nursery::~Nursery()
michael@0 80 {
michael@0 81 if (start())
michael@0 82 UnmapPages(runtime(), (void *)start(), NurserySize);
michael@0 83 }
michael@0 84
michael@0 85 void
michael@0 86 js::Nursery::enable()
michael@0 87 {
michael@0 88 JS_ASSERT(isEmpty());
michael@0 89 if (isEnabled())
michael@0 90 return;
michael@0 91 numActiveChunks_ = 1;
michael@0 92 setCurrentChunk(0);
michael@0 93 currentStart_ = position();
michael@0 94 #ifdef JS_GC_ZEAL
michael@0 95 if (runtime()->gcZeal_ == ZealGenerationalGCValue)
michael@0 96 enterZealMode();
michael@0 97 #endif
michael@0 98 }
michael@0 99
michael@0 100 void
michael@0 101 js::Nursery::disable()
michael@0 102 {
michael@0 103 JS_ASSERT(isEmpty());
michael@0 104 if (!isEnabled())
michael@0 105 return;
michael@0 106 numActiveChunks_ = 0;
michael@0 107 currentEnd_ = 0;
michael@0 108 updateDecommittedRegion();
michael@0 109 }
michael@0 110
michael@0 111 bool
michael@0 112 js::Nursery::isEmpty() const
michael@0 113 {
michael@0 114 JS_ASSERT(runtime_);
michael@0 115 if (!isEnabled())
michael@0 116 return true;
michael@0 117 JS_ASSERT_IF(runtime_->gcZeal_ != ZealGenerationalGCValue, currentStart_ == start());
michael@0 118 return position() == currentStart_;
michael@0 119 }
michael@0 120
michael@0 121 JSObject *
michael@0 122 js::Nursery::allocateObject(JSContext *cx, size_t size, size_t numDynamic)
michael@0 123 {
michael@0 124 /* Ensure there's enough space to replace the contents with a RelocationOverlay. */
michael@0 125 JS_ASSERT(size >= sizeof(RelocationOverlay));
michael@0 126
michael@0 127 /* Attempt to allocate slots contiguously after object, if possible. */
michael@0 128 if (numDynamic && numDynamic <= MaxNurserySlots) {
michael@0 129 size_t totalSize = size + sizeof(HeapSlot) * numDynamic;
michael@0 130 JSObject *obj = static_cast<JSObject *>(allocate(totalSize));
michael@0 131 if (obj) {
michael@0 132 obj->setInitialSlots(reinterpret_cast<HeapSlot *>(size_t(obj) + size));
michael@0 133 return obj;
michael@0 134 }
michael@0 135 /* If we failed to allocate as a block, retry with out-of-line slots. */
michael@0 136 }
michael@0 137
michael@0 138 HeapSlot *slots = nullptr;
michael@0 139 if (numDynamic) {
michael@0 140 slots = allocateHugeSlots(cx, numDynamic);
michael@0 141 if (MOZ_UNLIKELY(!slots))
michael@0 142 return nullptr;
michael@0 143 }
michael@0 144
michael@0 145 JSObject *obj = static_cast<JSObject *>(allocate(size));
michael@0 146
michael@0 147 if (obj)
michael@0 148 obj->setInitialSlots(slots);
michael@0 149 else
michael@0 150 freeSlots(cx, slots);
michael@0 151
michael@0 152 return obj;
michael@0 153 }
michael@0 154
michael@0 155 void *
michael@0 156 js::Nursery::allocate(size_t size)
michael@0 157 {
michael@0 158 JS_ASSERT(isEnabled());
michael@0 159 JS_ASSERT(!runtime()->isHeapBusy());
michael@0 160 JS_ASSERT(position() >= currentStart_);
michael@0 161
michael@0 162 if (position() + size > currentEnd()) {
michael@0 163 if (currentChunk_ + 1 == numActiveChunks_)
michael@0 164 return nullptr;
michael@0 165 setCurrentChunk(currentChunk_ + 1);
michael@0 166 }
michael@0 167
michael@0 168 void *thing = (void *)position();
michael@0 169 position_ = position() + size;
michael@0 170
michael@0 171 JS_EXTRA_POISON(thing, JS_ALLOCATED_NURSERY_PATTERN, size);
michael@0 172 return thing;
michael@0 173 }
michael@0 174
michael@0 175 /* Internally, this function is used to allocate elements as well as slots. */
michael@0 176 HeapSlot *
michael@0 177 js::Nursery::allocateSlots(JSContext *cx, JSObject *obj, uint32_t nslots)
michael@0 178 {
michael@0 179 JS_ASSERT(obj);
michael@0 180 JS_ASSERT(nslots > 0);
michael@0 181
michael@0 182 if (!isInside(obj))
michael@0 183 return cx->pod_malloc<HeapSlot>(nslots);
michael@0 184
michael@0 185 if (nslots > MaxNurserySlots)
michael@0 186 return allocateHugeSlots(cx, nslots);
michael@0 187
michael@0 188 size_t size = sizeof(HeapSlot) * nslots;
michael@0 189 HeapSlot *slots = static_cast<HeapSlot *>(allocate(size));
michael@0 190 if (slots)
michael@0 191 return slots;
michael@0 192
michael@0 193 return allocateHugeSlots(cx, nslots);
michael@0 194 }
michael@0 195
michael@0 196 ObjectElements *
michael@0 197 js::Nursery::allocateElements(JSContext *cx, JSObject *obj, uint32_t nelems)
michael@0 198 {
michael@0 199 JS_ASSERT(nelems >= ObjectElements::VALUES_PER_HEADER);
michael@0 200 return reinterpret_cast<ObjectElements *>(allocateSlots(cx, obj, nelems));
michael@0 201 }
michael@0 202
michael@0 203 HeapSlot *
michael@0 204 js::Nursery::reallocateSlots(JSContext *cx, JSObject *obj, HeapSlot *oldSlots,
michael@0 205 uint32_t oldCount, uint32_t newCount)
michael@0 206 {
michael@0 207 size_t oldSize = oldCount * sizeof(HeapSlot);
michael@0 208 size_t newSize = newCount * sizeof(HeapSlot);
michael@0 209
michael@0 210 if (!isInside(obj))
michael@0 211 return static_cast<HeapSlot *>(cx->realloc_(oldSlots, oldSize, newSize));
michael@0 212
michael@0 213 if (!isInside(oldSlots)) {
michael@0 214 HeapSlot *newSlots = static_cast<HeapSlot *>(cx->realloc_(oldSlots, oldSize, newSize));
michael@0 215 if (oldSlots != newSlots) {
michael@0 216 hugeSlots.remove(oldSlots);
michael@0 217 /* If this put fails, we will only leak the slots. */
michael@0 218 (void)hugeSlots.put(newSlots);
michael@0 219 }
michael@0 220 return newSlots;
michael@0 221 }
michael@0 222
michael@0 223 /* The nursery cannot make use of the returned slots data. */
michael@0 224 if (newCount < oldCount)
michael@0 225 return oldSlots;
michael@0 226
michael@0 227 HeapSlot *newSlots = allocateSlots(cx, obj, newCount);
michael@0 228 PodCopy(newSlots, oldSlots, oldCount);
michael@0 229 return newSlots;
michael@0 230 }
michael@0 231
michael@0 232 ObjectElements *
michael@0 233 js::Nursery::reallocateElements(JSContext *cx, JSObject *obj, ObjectElements *oldHeader,
michael@0 234 uint32_t oldCount, uint32_t newCount)
michael@0 235 {
michael@0 236 HeapSlot *slots = reallocateSlots(cx, obj, reinterpret_cast<HeapSlot *>(oldHeader),
michael@0 237 oldCount, newCount);
michael@0 238 return reinterpret_cast<ObjectElements *>(slots);
michael@0 239 }
michael@0 240
michael@0 241 void
michael@0 242 js::Nursery::freeSlots(JSContext *cx, HeapSlot *slots)
michael@0 243 {
michael@0 244 if (!isInside(slots)) {
michael@0 245 hugeSlots.remove(slots);
michael@0 246 js_free(slots);
michael@0 247 }
michael@0 248 }
michael@0 249
michael@0 250 HeapSlot *
michael@0 251 js::Nursery::allocateHugeSlots(JSContext *cx, size_t nslots)
michael@0 252 {
michael@0 253 HeapSlot *slots = cx->pod_malloc<HeapSlot>(nslots);
michael@0 254 /* If this put fails, we will only leak the slots. */
michael@0 255 (void)hugeSlots.put(slots);
michael@0 256 return slots;
michael@0 257 }
michael@0 258
michael@0 259 void
michael@0 260 js::Nursery::notifyInitialSlots(Cell *cell, HeapSlot *slots)
michael@0 261 {
michael@0 262 if (isInside(cell) && !isInside(slots)) {
michael@0 263 /* If this put fails, we will only leak the slots. */
michael@0 264 (void)hugeSlots.put(slots);
michael@0 265 }
michael@0 266 }
michael@0 267
michael@0 268 namespace js {
michael@0 269 namespace gc {
michael@0 270
michael@0 271 class MinorCollectionTracer : public JSTracer
michael@0 272 {
michael@0 273 public:
michael@0 274 Nursery *nursery;
michael@0 275 AutoTraceSession session;
michael@0 276
michael@0 277 /* Amount of data moved to the tenured generation during collection. */
michael@0 278 size_t tenuredSize;
michael@0 279
michael@0 280 /*
michael@0 281 * This list is threaded through the Nursery using the space from already
michael@0 282 * moved things. The list is used to fix up the moved things and to find
michael@0 283 * things held live by intra-Nursery pointers.
michael@0 284 */
michael@0 285 RelocationOverlay *head;
michael@0 286 RelocationOverlay **tail;
michael@0 287
michael@0 288 /* Save and restore all of the runtime state we use during MinorGC. */
michael@0 289 bool savedRuntimeNeedBarrier;
michael@0 290 AutoDisableProxyCheck disableStrictProxyChecking;
michael@0 291 AutoEnterOOMUnsafeRegion oomUnsafeRegion;
michael@0 292 ArrayBufferVector liveArrayBuffers;
michael@0 293
michael@0 294 /* Insert the given relocation entry into the list of things to visit. */
michael@0 295 MOZ_ALWAYS_INLINE void insertIntoFixupList(RelocationOverlay *entry) {
michael@0 296 *tail = entry;
michael@0 297 tail = &entry->next_;
michael@0 298 *tail = nullptr;
michael@0 299 }
michael@0 300
michael@0 301 MinorCollectionTracer(JSRuntime *rt, Nursery *nursery)
michael@0 302 : JSTracer(rt, Nursery::MinorGCCallback, TraceWeakMapKeysValues),
michael@0 303 nursery(nursery),
michael@0 304 session(rt, MinorCollecting),
michael@0 305 tenuredSize(0),
michael@0 306 head(nullptr),
michael@0 307 tail(&head),
michael@0 308 savedRuntimeNeedBarrier(rt->needsBarrier()),
michael@0 309 disableStrictProxyChecking(rt)
michael@0 310 {
michael@0 311 rt->gcNumber++;
michael@0 312
michael@0 313 /*
michael@0 314 * We disable the runtime needsBarrier() check so that pre-barriers do
michael@0 315 * not fire on objects that have been relocated. The pre-barrier's
michael@0 316 * call to obj->zone() will try to look through shape_, which is now
michael@0 317 * the relocation magic and will crash. However, zone->needsBarrier()
michael@0 318 * must still be set correctly so that allocations we make in minor
michael@0 319 * GCs between incremental slices will allocate their objects marked.
michael@0 320 */
michael@0 321 rt->setNeedsBarrier(false);
michael@0 322
michael@0 323 /*
michael@0 324 * We use the live array buffer lists to track traced buffers so we can
michael@0 325 * sweep their dead views. Incremental collection also use these lists,
michael@0 326 * so we may need to save and restore their contents here.
michael@0 327 */
michael@0 328 if (rt->gcIncrementalState != NO_INCREMENTAL) {
michael@0 329 for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
michael@0 330 if (!ArrayBufferObject::saveArrayBufferList(c, liveArrayBuffers))
michael@0 331 CrashAtUnhandlableOOM("OOM while saving live array buffers");
michael@0 332 ArrayBufferObject::resetArrayBufferList(c);
michael@0 333 }
michael@0 334 }
michael@0 335 }
michael@0 336
michael@0 337 ~MinorCollectionTracer() {
michael@0 338 runtime()->setNeedsBarrier(savedRuntimeNeedBarrier);
michael@0 339 if (runtime()->gcIncrementalState != NO_INCREMENTAL)
michael@0 340 ArrayBufferObject::restoreArrayBufferLists(liveArrayBuffers);
michael@0 341 }
michael@0 342 };
michael@0 343
michael@0 344 } /* namespace gc */
michael@0 345 } /* namespace js */
michael@0 346
michael@0 347 static AllocKind
michael@0 348 GetObjectAllocKindForCopy(JSRuntime *rt, JSObject *obj)
michael@0 349 {
michael@0 350 if (obj->is<ArrayObject>()) {
michael@0 351 JS_ASSERT(obj->numFixedSlots() == 0);
michael@0 352
michael@0 353 /* Use minimal size object if we are just going to copy the pointer. */
michael@0 354 if (!IsInsideNursery(rt, (void *)obj->getElementsHeader()))
michael@0 355 return FINALIZE_OBJECT0_BACKGROUND;
michael@0 356
michael@0 357 size_t nelements = obj->getDenseCapacity();
michael@0 358 return GetBackgroundAllocKind(GetGCArrayKind(nelements));
michael@0 359 }
michael@0 360
michael@0 361 if (obj->is<JSFunction>())
michael@0 362 return obj->as<JSFunction>().getAllocKind();
michael@0 363
michael@0 364 /*
michael@0 365 * Typed arrays in the nursery may have a lazily allocated buffer, make
michael@0 366 * sure there is room for the array's fixed data when moving the array.
michael@0 367 */
michael@0 368 if (obj->is<TypedArrayObject>() && !obj->as<TypedArrayObject>().buffer()) {
michael@0 369 size_t nbytes = obj->as<TypedArrayObject>().byteLength();
michael@0 370 return GetBackgroundAllocKind(TypedArrayObject::AllocKindForLazyBuffer(nbytes));
michael@0 371 }
michael@0 372
michael@0 373 AllocKind kind = GetGCObjectFixedSlotsKind(obj->numFixedSlots());
michael@0 374 JS_ASSERT(!IsBackgroundFinalized(kind));
michael@0 375 JS_ASSERT(CanBeFinalizedInBackground(kind, obj->getClass()));
michael@0 376 return GetBackgroundAllocKind(kind);
michael@0 377 }
michael@0 378
michael@0 379 void *
michael@0 380 js::Nursery::allocateFromTenured(Zone *zone, AllocKind thingKind)
michael@0 381 {
michael@0 382 void *t = zone->allocator.arenas.allocateFromFreeList(thingKind, Arena::thingSize(thingKind));
michael@0 383 if (t)
michael@0 384 return t;
michael@0 385 zone->allocator.arenas.checkEmptyFreeList(thingKind);
michael@0 386 return zone->allocator.arenas.allocateFromArena(zone, thingKind);
michael@0 387 }
michael@0 388
michael@0 389 void
michael@0 390 js::Nursery::setSlotsForwardingPointer(HeapSlot *oldSlots, HeapSlot *newSlots, uint32_t nslots)
michael@0 391 {
michael@0 392 JS_ASSERT(nslots > 0);
michael@0 393 JS_ASSERT(isInside(oldSlots));
michael@0 394 JS_ASSERT(!isInside(newSlots));
michael@0 395 *reinterpret_cast<HeapSlot **>(oldSlots) = newSlots;
michael@0 396 }
michael@0 397
michael@0 398 void
michael@0 399 js::Nursery::setElementsForwardingPointer(ObjectElements *oldHeader, ObjectElements *newHeader,
michael@0 400 uint32_t nelems)
michael@0 401 {
michael@0 402 /*
michael@0 403 * If the JIT has hoisted a zero length pointer, then we do not need to
michael@0 404 * relocate it because reads and writes to/from this pointer are invalid.
michael@0 405 */
michael@0 406 if (nelems - ObjectElements::VALUES_PER_HEADER < 1)
michael@0 407 return;
michael@0 408 JS_ASSERT(isInside(oldHeader));
michael@0 409 JS_ASSERT(!isInside(newHeader));
michael@0 410 *reinterpret_cast<HeapSlot **>(oldHeader->elements()) = newHeader->elements();
michael@0 411 }
michael@0 412
michael@0 413 #ifdef DEBUG
michael@0 414 static bool IsWriteableAddress(void *ptr)
michael@0 415 {
michael@0 416 volatile uint64_t *vPtr = reinterpret_cast<volatile uint64_t *>(ptr);
michael@0 417 *vPtr = *vPtr;
michael@0 418 return true;
michael@0 419 }
michael@0 420 #endif
michael@0 421
michael@0 422 void
michael@0 423 js::Nursery::forwardBufferPointer(HeapSlot **pSlotsElems)
michael@0 424 {
michael@0 425 HeapSlot *old = *pSlotsElems;
michael@0 426
michael@0 427 if (!isInside(old))
michael@0 428 return;
michael@0 429
michael@0 430 /*
michael@0 431 * If the elements buffer is zero length, the "first" item could be inside
michael@0 432 * of the next object or past the end of the allocable area. However,
michael@0 433 * since we always store the runtime as the last word in the nursery,
michael@0 434 * isInside will still be true, even if this zero-size allocation abuts the
michael@0 435 * end of the allocable area. Thus, it is always safe to read the first
michael@0 436 * word of |old| here.
michael@0 437 */
michael@0 438 *pSlotsElems = *reinterpret_cast<HeapSlot **>(old);
michael@0 439 JS_ASSERT(!isInside(*pSlotsElems));
michael@0 440 JS_ASSERT(IsWriteableAddress(*pSlotsElems));
michael@0 441 }
michael@0 442
michael@0 443 // Structure for counting how many times objects of a particular type have been
michael@0 444 // tenured during a minor collection.
michael@0 445 struct TenureCount
michael@0 446 {
michael@0 447 types::TypeObject *type;
michael@0 448 int count;
michael@0 449 };
michael@0 450
michael@0 451 // Keep rough track of how many times we tenure objects of particular types
michael@0 452 // during minor collections, using a fixed size hash for efficiency at the cost
michael@0 453 // of potential collisions.
michael@0 454 struct Nursery::TenureCountCache
michael@0 455 {
michael@0 456 TenureCount entries[16];
michael@0 457
michael@0 458 TenureCountCache() { PodZero(this); }
michael@0 459
michael@0 460 TenureCount &findEntry(types::TypeObject *type) {
michael@0 461 return entries[PointerHasher<types::TypeObject *, 3>::hash(type) % ArrayLength(entries)];
michael@0 462 }
michael@0 463 };
michael@0 464
michael@0 465 void
michael@0 466 js::Nursery::collectToFixedPoint(MinorCollectionTracer *trc, TenureCountCache &tenureCounts)
michael@0 467 {
michael@0 468 for (RelocationOverlay *p = trc->head; p; p = p->next()) {
michael@0 469 JSObject *obj = static_cast<JSObject*>(p->forwardingAddress());
michael@0 470 traceObject(trc, obj);
michael@0 471
michael@0 472 TenureCount &entry = tenureCounts.findEntry(obj->type());
michael@0 473 if (entry.type == obj->type()) {
michael@0 474 entry.count++;
michael@0 475 } else if (!entry.type) {
michael@0 476 entry.type = obj->type();
michael@0 477 entry.count = 1;
michael@0 478 }
michael@0 479 }
michael@0 480 }
michael@0 481
michael@0 482 MOZ_ALWAYS_INLINE void
michael@0 483 js::Nursery::traceObject(MinorCollectionTracer *trc, JSObject *obj)
michael@0 484 {
michael@0 485 const Class *clasp = obj->getClass();
michael@0 486 if (clasp->trace)
michael@0 487 clasp->trace(trc, obj);
michael@0 488
michael@0 489 if (!obj->isNative())
michael@0 490 return;
michael@0 491
michael@0 492 if (!obj->hasEmptyElements())
michael@0 493 markSlots(trc, obj->getDenseElements(), obj->getDenseInitializedLength());
michael@0 494
michael@0 495 HeapSlot *fixedStart, *fixedEnd, *dynStart, *dynEnd;
michael@0 496 obj->getSlotRange(0, obj->slotSpan(), &fixedStart, &fixedEnd, &dynStart, &dynEnd);
michael@0 497 markSlots(trc, fixedStart, fixedEnd);
michael@0 498 markSlots(trc, dynStart, dynEnd);
michael@0 499 }
michael@0 500
michael@0 501 MOZ_ALWAYS_INLINE void
michael@0 502 js::Nursery::markSlots(MinorCollectionTracer *trc, HeapSlot *vp, uint32_t nslots)
michael@0 503 {
michael@0 504 markSlots(trc, vp, vp + nslots);
michael@0 505 }
michael@0 506
michael@0 507 MOZ_ALWAYS_INLINE void
michael@0 508 js::Nursery::markSlots(MinorCollectionTracer *trc, HeapSlot *vp, HeapSlot *end)
michael@0 509 {
michael@0 510 for (; vp != end; ++vp)
michael@0 511 markSlot(trc, vp);
michael@0 512 }
michael@0 513
michael@0 514 MOZ_ALWAYS_INLINE void
michael@0 515 js::Nursery::markSlot(MinorCollectionTracer *trc, HeapSlot *slotp)
michael@0 516 {
michael@0 517 if (!slotp->isObject())
michael@0 518 return;
michael@0 519
michael@0 520 JSObject *obj = &slotp->toObject();
michael@0 521 if (!isInside(obj))
michael@0 522 return;
michael@0 523
michael@0 524 if (getForwardedPointer(&obj)) {
michael@0 525 slotp->unsafeGet()->setObject(*obj);
michael@0 526 return;
michael@0 527 }
michael@0 528
michael@0 529 JSObject *tenured = static_cast<JSObject*>(moveToTenured(trc, obj));
michael@0 530 slotp->unsafeGet()->setObject(*tenured);
michael@0 531 }
michael@0 532
michael@0 533 void *
michael@0 534 js::Nursery::moveToTenured(MinorCollectionTracer *trc, JSObject *src)
michael@0 535 {
michael@0 536 Zone *zone = src->zone();
michael@0 537 AllocKind dstKind = GetObjectAllocKindForCopy(trc->runtime(), src);
michael@0 538 JSObject *dst = static_cast<JSObject *>(allocateFromTenured(zone, dstKind));
michael@0 539 if (!dst)
michael@0 540 CrashAtUnhandlableOOM("Failed to allocate object while tenuring.");
michael@0 541
michael@0 542 trc->tenuredSize += moveObjectToTenured(dst, src, dstKind);
michael@0 543
michael@0 544 RelocationOverlay *overlay = reinterpret_cast<RelocationOverlay *>(src);
michael@0 545 overlay->forwardTo(dst);
michael@0 546 trc->insertIntoFixupList(overlay);
michael@0 547
michael@0 548 return static_cast<void *>(dst);
michael@0 549 }
michael@0 550
michael@0 551 size_t
michael@0 552 js::Nursery::moveObjectToTenured(JSObject *dst, JSObject *src, AllocKind dstKind)
michael@0 553 {
michael@0 554 size_t srcSize = Arena::thingSize(dstKind);
michael@0 555 size_t tenuredSize = srcSize;
michael@0 556
michael@0 557 /*
michael@0 558 * Arrays do not necessarily have the same AllocKind between src and dst.
michael@0 559 * We deal with this by copying elements manually, possibly re-inlining
michael@0 560 * them if there is adequate room inline in dst.
michael@0 561 */
michael@0 562 if (src->is<ArrayObject>())
michael@0 563 srcSize = sizeof(ObjectImpl);
michael@0 564
michael@0 565 js_memcpy(dst, src, srcSize);
michael@0 566 tenuredSize += moveSlotsToTenured(dst, src, dstKind);
michael@0 567 tenuredSize += moveElementsToTenured(dst, src, dstKind);
michael@0 568
michael@0 569 if (src->is<TypedArrayObject>())
michael@0 570 forwardTypedArrayPointers(dst, src);
michael@0 571
michael@0 572 /* The shape's list head may point into the old object. */
michael@0 573 if (&src->shape_ == dst->shape_->listp)
michael@0 574 dst->shape_->listp = &dst->shape_;
michael@0 575
michael@0 576 return tenuredSize;
michael@0 577 }
michael@0 578
michael@0 579 void
michael@0 580 js::Nursery::forwardTypedArrayPointers(JSObject *dst, JSObject *src)
michael@0 581 {
michael@0 582 /*
michael@0 583 * Typed array data may be stored inline inside the object's fixed slots. If
michael@0 584 * so, we need update the private pointer and leave a forwarding pointer at
michael@0 585 * the start of the data.
michael@0 586 */
michael@0 587 TypedArrayObject &typedArray = src->as<TypedArrayObject>();
michael@0 588 JS_ASSERT_IF(typedArray.buffer(), !isInside(src->getPrivate()));
michael@0 589 if (typedArray.buffer())
michael@0 590 return;
michael@0 591
michael@0 592 void *srcData = src->fixedData(TypedArrayObject::FIXED_DATA_START);
michael@0 593 void *dstData = dst->fixedData(TypedArrayObject::FIXED_DATA_START);
michael@0 594 JS_ASSERT(src->getPrivate() == srcData);
michael@0 595 dst->setPrivate(dstData);
michael@0 596
michael@0 597 /*
michael@0 598 * We don't know the number of slots here, but
michael@0 599 * TypedArrayObject::AllocKindForLazyBuffer ensures that it's always at
michael@0 600 * least one.
michael@0 601 */
michael@0 602 size_t nslots = 1;
michael@0 603 setSlotsForwardingPointer(reinterpret_cast<HeapSlot*>(srcData),
michael@0 604 reinterpret_cast<HeapSlot*>(dstData),
michael@0 605 nslots);
michael@0 606 }
michael@0 607
michael@0 608 size_t
michael@0 609 js::Nursery::moveSlotsToTenured(JSObject *dst, JSObject *src, AllocKind dstKind)
michael@0 610 {
michael@0 611 /* Fixed slots have already been copied over. */
michael@0 612 if (!src->hasDynamicSlots())
michael@0 613 return 0;
michael@0 614
michael@0 615 if (!isInside(src->slots)) {
michael@0 616 hugeSlots.remove(src->slots);
michael@0 617 return 0;
michael@0 618 }
michael@0 619
michael@0 620 Zone *zone = src->zone();
michael@0 621 size_t count = src->numDynamicSlots();
michael@0 622 dst->slots = zone->pod_malloc<HeapSlot>(count);
michael@0 623 if (!dst->slots)
michael@0 624 CrashAtUnhandlableOOM("Failed to allocate slots while tenuring.");
michael@0 625 PodCopy(dst->slots, src->slots, count);
michael@0 626 setSlotsForwardingPointer(src->slots, dst->slots, count);
michael@0 627 return count * sizeof(HeapSlot);
michael@0 628 }
michael@0 629
michael@0 630 size_t
michael@0 631 js::Nursery::moveElementsToTenured(JSObject *dst, JSObject *src, AllocKind dstKind)
michael@0 632 {
michael@0 633 if (src->hasEmptyElements())
michael@0 634 return 0;
michael@0 635
michael@0 636 Zone *zone = src->zone();
michael@0 637 ObjectElements *srcHeader = src->getElementsHeader();
michael@0 638 ObjectElements *dstHeader;
michael@0 639
michael@0 640 /* TODO Bug 874151: Prefer to put element data inline if we have space. */
michael@0 641 if (!isInside(srcHeader)) {
michael@0 642 JS_ASSERT(src->elements == dst->elements);
michael@0 643 hugeSlots.remove(reinterpret_cast<HeapSlot*>(srcHeader));
michael@0 644 return 0;
michael@0 645 }
michael@0 646
michael@0 647 size_t nslots = ObjectElements::VALUES_PER_HEADER + srcHeader->capacity;
michael@0 648
michael@0 649 /* Unlike other objects, Arrays can have fixed elements. */
michael@0 650 if (src->is<ArrayObject>() && nslots <= GetGCKindSlots(dstKind)) {
michael@0 651 dst->setFixedElements();
michael@0 652 dstHeader = dst->getElementsHeader();
michael@0 653 js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
michael@0 654 setElementsForwardingPointer(srcHeader, dstHeader, nslots);
michael@0 655 return nslots * sizeof(HeapSlot);
michael@0 656 }
michael@0 657
michael@0 658 JS_ASSERT(nslots >= 2);
michael@0 659 size_t nbytes = nslots * sizeof(HeapValue);
michael@0 660 dstHeader = static_cast<ObjectElements *>(zone->malloc_(nbytes));
michael@0 661 if (!dstHeader)
michael@0 662 CrashAtUnhandlableOOM("Failed to allocate elements while tenuring.");
michael@0 663 js_memcpy(dstHeader, srcHeader, nslots * sizeof(HeapSlot));
michael@0 664 setElementsForwardingPointer(srcHeader, dstHeader, nslots);
michael@0 665 dst->elements = dstHeader->elements();
michael@0 666 return nslots * sizeof(HeapSlot);
michael@0 667 }
michael@0 668
michael@0 669 static bool
michael@0 670 ShouldMoveToTenured(MinorCollectionTracer *trc, void **thingp)
michael@0 671 {
michael@0 672 Cell *cell = static_cast<Cell *>(*thingp);
michael@0 673 Nursery &nursery = *trc->nursery;
michael@0 674 return !nursery.isInside(thingp) && nursery.isInside(cell) &&
michael@0 675 !nursery.getForwardedPointer(thingp);
michael@0 676 }
michael@0 677
michael@0 678 /* static */ void
michael@0 679 js::Nursery::MinorGCCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
michael@0 680 {
michael@0 681 MinorCollectionTracer *trc = static_cast<MinorCollectionTracer *>(jstrc);
michael@0 682 if (ShouldMoveToTenured(trc, thingp))
michael@0 683 *thingp = trc->nursery->moveToTenured(trc, static_cast<JSObject *>(*thingp));
michael@0 684 }
michael@0 685
michael@0 686 static void
michael@0 687 CheckHashTablesAfterMovingGC(JSRuntime *rt)
michael@0 688 {
michael@0 689 #ifdef JS_GC_ZEAL
michael@0 690 if (rt->gcZeal() == ZealCheckHashTablesOnMinorGC) {
michael@0 691 /* Check that internal hash tables no longer have any pointers into the nursery. */
michael@0 692 for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
michael@0 693 c->checkNewTypeObjectTableAfterMovingGC();
michael@0 694 c->checkInitialShapesTableAfterMovingGC();
michael@0 695 c->checkWrapperMapAfterMovingGC();
michael@0 696 if (c->debugScopes)
michael@0 697 c->debugScopes->checkHashTablesAfterMovingGC(rt);
michael@0 698 }
michael@0 699 }
michael@0 700 #endif
michael@0 701 }
michael@0 702
michael@0 703 #ifdef PROFILE_NURSERY
michael@0 704 #define TIME_START(name) int64_t timstampStart_##name = PRMJ_Now()
michael@0 705 #define TIME_END(name) int64_t timstampEnd_##name = PRMJ_Now()
michael@0 706 #define TIME_TOTAL(name) (timstampEnd_##name - timstampStart_##name)
michael@0 707 #else
michael@0 708 #define TIME_START(name)
michael@0 709 #define TIME_END(name)
michael@0 710 #define TIME_TOTAL(name)
michael@0 711 #endif
michael@0 712
michael@0 713 void
michael@0 714 js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason, TypeObjectList *pretenureTypes)
michael@0 715 {
michael@0 716 JS_AbortIfWrongThread(rt);
michael@0 717
michael@0 718 if (rt->mainThread.suppressGC)
michael@0 719 return;
michael@0 720
michael@0 721 if (!isEnabled())
michael@0 722 return;
michael@0 723
michael@0 724 if (isEmpty())
michael@0 725 return;
michael@0 726
michael@0 727 rt->gcStats.count(gcstats::STAT_MINOR_GC);
michael@0 728
michael@0 729 TIME_START(total);
michael@0 730
michael@0 731 AutoStopVerifyingBarriers av(rt, false);
michael@0 732
michael@0 733 // Move objects pointed to by roots from the nursery to the major heap.
michael@0 734 MinorCollectionTracer trc(rt, this);
michael@0 735
michael@0 736 // Mark the store buffer. This must happen first.
michael@0 737 StoreBuffer &sb = rt->gcStoreBuffer;
michael@0 738 TIME_START(markValues);
michael@0 739 sb.markValues(&trc);
michael@0 740 TIME_END(markValues);
michael@0 741
michael@0 742 TIME_START(markCells);
michael@0 743 sb.markCells(&trc);
michael@0 744 TIME_END(markCells);
michael@0 745
michael@0 746 TIME_START(markSlots);
michael@0 747 sb.markSlots(&trc);
michael@0 748 TIME_END(markSlots);
michael@0 749
michael@0 750 TIME_START(markWholeCells);
michael@0 751 sb.markWholeCells(&trc);
michael@0 752 TIME_END(markWholeCells);
michael@0 753
michael@0 754 TIME_START(markRelocatableValues);
michael@0 755 sb.markRelocatableValues(&trc);
michael@0 756 TIME_END(markRelocatableValues);
michael@0 757
michael@0 758 TIME_START(markRelocatableCells);
michael@0 759 sb.markRelocatableCells(&trc);
michael@0 760 TIME_END(markRelocatableCells);
michael@0 761
michael@0 762 TIME_START(markGenericEntries);
michael@0 763 sb.markGenericEntries(&trc);
michael@0 764 TIME_END(markGenericEntries);
michael@0 765
michael@0 766 TIME_START(checkHashTables);
michael@0 767 CheckHashTablesAfterMovingGC(rt);
michael@0 768 TIME_END(checkHashTables);
michael@0 769
michael@0 770 TIME_START(markRuntime);
michael@0 771 MarkRuntime(&trc);
michael@0 772 TIME_END(markRuntime);
michael@0 773
michael@0 774 TIME_START(markDebugger);
michael@0 775 Debugger::markAll(&trc);
michael@0 776 TIME_END(markDebugger);
michael@0 777
michael@0 778 TIME_START(clearNewObjectCache);
michael@0 779 rt->newObjectCache.clearNurseryObjects(rt);
michael@0 780 TIME_END(clearNewObjectCache);
michael@0 781
michael@0 782 // Most of the work is done here. This loop iterates over objects that have
michael@0 783 // been moved to the major heap. If these objects have any outgoing pointers
michael@0 784 // to the nursery, then those nursery objects get moved as well, until no
michael@0 785 // objects are left to move. That is, we iterate to a fixed point.
michael@0 786 TIME_START(collectToFP);
michael@0 787 TenureCountCache tenureCounts;
michael@0 788 collectToFixedPoint(&trc, tenureCounts);
michael@0 789 TIME_END(collectToFP);
michael@0 790
michael@0 791 // Update the array buffer object's view lists.
michael@0 792 TIME_START(sweepArrayBufferViewList);
michael@0 793 for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
michael@0 794 if (!c->gcLiveArrayBuffers.empty())
michael@0 795 ArrayBufferObject::sweep(c);
michael@0 796 }
michael@0 797 TIME_END(sweepArrayBufferViewList);
michael@0 798
michael@0 799 // Update any slot or element pointers whose destination has been tenured.
michael@0 800 TIME_START(updateJitActivations);
michael@0 801 #ifdef JS_ION
michael@0 802 js::jit::UpdateJitActivationsForMinorGC(rt, &trc);
michael@0 803 #endif
michael@0 804 TIME_END(updateJitActivations);
michael@0 805
michael@0 806 // Resize the nursery.
michael@0 807 TIME_START(resize);
michael@0 808 double promotionRate = trc.tenuredSize / double(allocationEnd() - start());
michael@0 809 if (promotionRate > 0.05)
michael@0 810 growAllocableSpace();
michael@0 811 else if (promotionRate < 0.01)
michael@0 812 shrinkAllocableSpace();
michael@0 813 TIME_END(resize);
michael@0 814
michael@0 815 // If we are promoting the nursery, or exhausted the store buffer with
michael@0 816 // pointers to nursery things, which will force a collection well before
michael@0 817 // the nursery is full, look for object types that are getting promoted
michael@0 818 // excessively and try to pretenure them.
michael@0 819 TIME_START(pretenure);
michael@0 820 if (pretenureTypes && (promotionRate > 0.8 || reason == JS::gcreason::FULL_STORE_BUFFER)) {
michael@0 821 for (size_t i = 0; i < ArrayLength(tenureCounts.entries); i++) {
michael@0 822 const TenureCount &entry = tenureCounts.entries[i];
michael@0 823 if (entry.count >= 3000)
michael@0 824 pretenureTypes->append(entry.type); // ignore alloc failure
michael@0 825 }
michael@0 826 }
michael@0 827 TIME_END(pretenure);
michael@0 828
michael@0 829 // Sweep.
michael@0 830 TIME_START(freeHugeSlots);
michael@0 831 freeHugeSlots(rt);
michael@0 832 TIME_END(freeHugeSlots);
michael@0 833
michael@0 834 TIME_START(sweep);
michael@0 835 sweep(rt);
michael@0 836 TIME_END(sweep);
michael@0 837
michael@0 838 TIME_START(clearStoreBuffer);
michael@0 839 rt->gcStoreBuffer.clear();
michael@0 840 TIME_END(clearStoreBuffer);
michael@0 841
michael@0 842 // We ignore gcMaxBytes when allocating for minor collection. However, if we
michael@0 843 // overflowed, we disable the nursery. The next time we allocate, we'll fail
michael@0 844 // because gcBytes >= gcMaxBytes.
michael@0 845 if (rt->gcBytes >= rt->gcMaxBytes)
michael@0 846 disable();
michael@0 847
michael@0 848 TIME_END(total);
michael@0 849
michael@0 850 #ifdef PROFILE_NURSERY
michael@0 851 int64_t totalTime = TIME_TOTAL(total);
michael@0 852
michael@0 853 if (totalTime >= GCReportThreshold) {
michael@0 854 static bool printedHeader = false;
michael@0 855 if (!printedHeader) {
michael@0 856 fprintf(stderr,
michael@0 857 "MinorGC: Reason PRate Size Time mkVals mkClls mkSlts mkWCll mkRVal mkRCll mkGnrc ckTbls mkRntm mkDbgr clrNOC collct swpABO updtIn resize pretnr frSlts clrSB sweep\n");
michael@0 858 printedHeader = true;
michael@0 859 }
michael@0 860
michael@0 861 #define FMT " %6" PRIu64
michael@0 862 fprintf(stderr,
michael@0 863 "MinorGC: %20s %5.1f%% %4d" FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT FMT "\n",
michael@0 864 js::gcstats::ExplainReason(reason),
michael@0 865 promotionRate * 100,
michael@0 866 numActiveChunks_,
michael@0 867 totalTime,
michael@0 868 TIME_TOTAL(markValues),
michael@0 869 TIME_TOTAL(markCells),
michael@0 870 TIME_TOTAL(markSlots),
michael@0 871 TIME_TOTAL(markWholeCells),
michael@0 872 TIME_TOTAL(markRelocatableValues),
michael@0 873 TIME_TOTAL(markRelocatableCells),
michael@0 874 TIME_TOTAL(markGenericEntries),
michael@0 875 TIME_TOTAL(checkHashTables),
michael@0 876 TIME_TOTAL(markRuntime),
michael@0 877 TIME_TOTAL(markDebugger),
michael@0 878 TIME_TOTAL(clearNewObjectCache),
michael@0 879 TIME_TOTAL(collectToFP),
michael@0 880 TIME_TOTAL(sweepArrayBufferViewList),
michael@0 881 TIME_TOTAL(updateJitActivations),
michael@0 882 TIME_TOTAL(resize),
michael@0 883 TIME_TOTAL(pretenure),
michael@0 884 TIME_TOTAL(freeHugeSlots),
michael@0 885 TIME_TOTAL(clearStoreBuffer),
michael@0 886 TIME_TOTAL(sweep));
michael@0 887 #undef FMT
michael@0 888 }
michael@0 889 #endif
michael@0 890 }
michael@0 891
michael@0 892 void
michael@0 893 js::Nursery::freeHugeSlots(JSRuntime *rt)
michael@0 894 {
michael@0 895 for (HugeSlotsSet::Range r = hugeSlots.all(); !r.empty(); r.popFront())
michael@0 896 rt->defaultFreeOp()->free_(r.front());
michael@0 897 hugeSlots.clear();
michael@0 898 }
michael@0 899
michael@0 900 void
michael@0 901 js::Nursery::sweep(JSRuntime *rt)
michael@0 902 {
michael@0 903 #ifdef JS_GC_ZEAL
michael@0 904 /* Poison the nursery contents so touching a freed object will crash. */
michael@0 905 JS_POISON((void *)start(), JS_SWEPT_NURSERY_PATTERN, NurserySize);
michael@0 906 for (int i = 0; i < NumNurseryChunks; ++i)
michael@0 907 initChunk(i);
michael@0 908
michael@0 909 if (rt->gcZeal_ == ZealGenerationalGCValue) {
michael@0 910 MOZ_ASSERT(numActiveChunks_ == NumNurseryChunks);
michael@0 911
michael@0 912 /* Only reset the alloc point when we are close to the end. */
michael@0 913 if (currentChunk_ + 1 == NumNurseryChunks)
michael@0 914 setCurrentChunk(0);
michael@0 915 } else
michael@0 916 #endif
michael@0 917 {
michael@0 918 #ifdef JS_CRASH_DIAGNOSTICS
michael@0 919 JS_POISON((void *)start(), JS_SWEPT_NURSERY_PATTERN, allocationEnd() - start());
michael@0 920 for (int i = 0; i < numActiveChunks_; ++i)
michael@0 921 chunk(i).trailer.runtime = runtime();
michael@0 922 #endif
michael@0 923 setCurrentChunk(0);
michael@0 924 }
michael@0 925
michael@0 926 /* Set current start position for isEmpty checks. */
michael@0 927 currentStart_ = position();
michael@0 928 }
michael@0 929
michael@0 930 void
michael@0 931 js::Nursery::growAllocableSpace()
michael@0 932 {
michael@0 933 #ifdef JS_GC_ZEAL
michael@0 934 MOZ_ASSERT_IF(runtime()->gcZeal_ == ZealGenerationalGCValue, numActiveChunks_ == NumNurseryChunks);
michael@0 935 #endif
michael@0 936 numActiveChunks_ = Min(numActiveChunks_ * 2, NumNurseryChunks);
michael@0 937 }
michael@0 938
michael@0 939 void
michael@0 940 js::Nursery::shrinkAllocableSpace()
michael@0 941 {
michael@0 942 #ifdef JS_GC_ZEAL
michael@0 943 if (runtime()->gcZeal_ == ZealGenerationalGCValue)
michael@0 944 return;
michael@0 945 #endif
michael@0 946 numActiveChunks_ = Max(numActiveChunks_ - 1, 1);
michael@0 947 updateDecommittedRegion();
michael@0 948 }
michael@0 949
michael@0 950 #endif /* JSGC_GENERATIONAL */

mercurial