xpcom/base/nsCycleCollector.cpp

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/xpcom/base/nsCycleCollector.cpp	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,3911 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
     1.5 +/* vim: set ts=8 sts=4 et sw=4 tw=80: */
     1.6 +/* This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +//
    1.11 +// This file implements a garbage-cycle collector based on the paper
    1.12 +//
    1.13 +//   Concurrent Cycle Collection in Reference Counted Systems
    1.14 +//   Bacon & Rajan (2001), ECOOP 2001 / Springer LNCS vol 2072
    1.15 +//
    1.16 +// We are not using the concurrent or acyclic cases of that paper; so
    1.17 +// the green, red and orange colors are not used.
    1.18 +//
    1.19 +// The collector is based on tracking pointers of four colors:
    1.20 +//
    1.21 +// Black nodes are definitely live. If we ever determine a node is
    1.22 +// black, it's ok to forget about, drop from our records.
    1.23 +//
    1.24 +// White nodes are definitely garbage cycles. Once we finish with our
    1.25 +// scanning, we unlink all the white nodes and expect that by
    1.26 +// unlinking them they will self-destruct (since a garbage cycle is
    1.27 +// only keeping itself alive with internal links, by definition).
    1.28 +//
    1.29 +// Snow-white is an addition to the original algorithm. Snow-white object
    1.30 +// has reference count zero and is just waiting for deletion.
    1.31 +//
    1.32 +// Grey nodes are being scanned. Nodes that turn grey will turn
    1.33 +// either black if we determine that they're live, or white if we
    1.34 +// determine that they're a garbage cycle. After the main collection
    1.35 +// algorithm there should be no grey nodes.
    1.36 +//
    1.37 +// Purple nodes are *candidates* for being scanned. They are nodes we
    1.38 +// haven't begun scanning yet because they're not old enough, or we're
    1.39 +// still partway through the algorithm.
    1.40 +//
    1.41 +// XPCOM objects participating in garbage-cycle collection are obliged
    1.42 +// to inform us when they ought to turn purple; that is, when their
    1.43 +// refcount transitions from N+1 -> N, for nonzero N. Furthermore we
    1.44 +// require that *after* an XPCOM object has informed us of turning
    1.45 +// purple, they will tell us when they either transition back to being
    1.46 +// black (incremented refcount) or are ultimately deleted.
    1.47 +
    1.48 +// Incremental cycle collection
    1.49 +//
    1.50 +// Beyond the simple state machine required to implement incremental
    1.51 +// collection, the CC needs to be able to compensate for things the browser
    1.52 +// is doing during the collection. There are two kinds of problems. For each
    1.53 +// of these, there are two cases to deal with: purple-buffered C++ objects
    1.54 +// and JS objects.
    1.55 +
    1.56 +// The first problem is that an object in the CC's graph can become garbage.
    1.57 +// This is bad because the CC touches the objects in its graph at every
    1.58 +// stage of its operation.
    1.59 +//
    1.60 +// All cycle collected C++ objects that die during a cycle collection
    1.61 +// will end up actually getting deleted by the SnowWhiteKiller. Before
    1.62 +// the SWK deletes an object, it checks if an ICC is running, and if so,
    1.63 +// if the object is in the graph. If it is, the CC clears mPointer and
    1.64 +// mParticipant so it does not point to the raw object any more. Because
    1.65 +// objects could die any time the CC returns to the mutator, any time the CC
    1.66 +// accesses a PtrInfo it must perform a null check on mParticipant to
    1.67 +// ensure the object has not gone away.
    1.68 +//
    1.69 +// JS objects don't always run finalizers, so the CC can't remove them from
    1.70 +// the graph when they die. Fortunately, JS objects can only die during a GC,
    1.71 +// so if a GC is begun during an ICC, the browser synchronously finishes off
    1.72 +// the ICC, which clears the entire CC graph. If the GC and CC are scheduled
    1.73 +// properly, this should be rare.
    1.74 +//
    1.75 +// The second problem is that objects in the graph can be changed, say by
    1.76 +// being addrefed or released, or by having a field updated, after the object
    1.77 +// has been added to the graph. The problem is that ICC can miss a newly
    1.78 +// created reference to an object, and end up unlinking an object that is
    1.79 +// actually alive.
    1.80 +//
    1.81 +// The basic idea of the solution, from "An on-the-fly Reference Counting
    1.82 +// Garbage Collector for Java" by Levanoni and Petrank, is to notice if an
    1.83 +// object has had an additional reference to it created during the collection,
    1.84 +// and if so, don't collect it during the current collection. This avoids having
    1.85 +// to rerun the scan as in Bacon & Rajan 2001.
    1.86 +//
    1.87 +// For cycle collected C++ objects, we modify AddRef to place the object in
    1.88 +// the purple buffer, in addition to Release. Then, in the CC, we treat any
    1.89 +// objects in the purple buffer as being alive, after graph building has
    1.90 +// completed. Because they are in the purple buffer, they will be suspected
    1.91 +// in the next CC, so there's no danger of leaks. This is imprecise, because
    1.92 +// we will treat as live an object that has been Released but not AddRefed
    1.93 +// during graph building, but that's probably rare enough that the additional
    1.94 +// bookkeeping overhead is not worthwhile.
    1.95 +//
    1.96 +// For JS objects, the cycle collector is only looking at gray objects. If a
    1.97 +// gray object is touched during ICC, it will be made black by UnmarkGray.
    1.98 +// Thus, if a JS object has become black during the ICC, we treat it as live.
    1.99 +// Merged JS zones have to be handled specially: we scan all zone globals.
   1.100 +// If any are black, we treat the zone as being black.
   1.101 +
   1.102 +
   1.103 +// Safety
   1.104 +//
   1.105 +// An XPCOM object is either scan-safe or scan-unsafe, purple-safe or
   1.106 +// purple-unsafe.
   1.107 +//
   1.108 +// An nsISupports object is scan-safe if:
   1.109 +//
   1.110 +//  - It can be QI'ed to |nsXPCOMCycleCollectionParticipant|, though
   1.111 +//    this operation loses ISupports identity (like nsIClassInfo).
   1.112 +//  - Additionally, the operation |traverse| on the resulting
   1.113 +//    nsXPCOMCycleCollectionParticipant does not cause *any* refcount
   1.114 +//    adjustment to occur (no AddRef / Release calls).
   1.115 +//
   1.116 +// A non-nsISupports ("native") object is scan-safe by explicitly
   1.117 +// providing its nsCycleCollectionParticipant.
   1.118 +//
   1.119 +// An object is purple-safe if it satisfies the following properties:
   1.120 +//
   1.121 +//  - The object is scan-safe.
   1.122 +//
   1.123 +// When we receive a pointer |ptr| via
   1.124 +// |nsCycleCollector::suspect(ptr)|, we assume it is purple-safe. We
   1.125 +// can check the scan-safety, but have no way to ensure the
   1.126 +// purple-safety; objects must obey, or else the entire system falls
   1.127 +// apart. Don't involve an object in this scheme if you can't
   1.128 +// guarantee its purple-safety. The easiest way to ensure that an
   1.129 +// object is purple-safe is to use nsCycleCollectingAutoRefCnt.
   1.130 +//
   1.131 +// When we have a scannable set of purple nodes ready, we begin
   1.132 +// our walks. During the walks, the nodes we |traverse| should only
   1.133 +// feed us more scan-safe nodes, and should not adjust the refcounts
   1.134 +// of those nodes.
   1.135 +//
   1.136 +// We do not |AddRef| or |Release| any objects during scanning. We
   1.137 +// rely on the purple-safety of the roots that call |suspect| to
   1.138 +// hold, such that we will clear the pointer from the purple buffer
   1.139 +// entry to the object before it is destroyed. The pointers that are
   1.140 +// merely scan-safe we hold only for the duration of scanning, and
   1.141 +// there should be no objects released from the scan-safe set during
   1.142 +// the scan.
   1.143 +//
   1.144 +// We *do* call |Root| and |Unroot| on every white object, on
   1.145 +// either side of the calls to |Unlink|. This keeps the set of white
   1.146 +// objects alive during the unlinking.
   1.147 +//
   1.148 +
   1.149 +#if !defined(__MINGW32__)
   1.150 +#ifdef WIN32
   1.151 +#include <crtdbg.h>
   1.152 +#include <errno.h>
   1.153 +#endif
   1.154 +#endif
   1.155 +
   1.156 +#include "base/process_util.h"
   1.157 +
   1.158 +#include "mozilla/ArrayUtils.h"
   1.159 +#include "mozilla/AutoRestore.h"
   1.160 +#include "mozilla/CycleCollectedJSRuntime.h"
   1.161 +#include "mozilla/HoldDropJSObjects.h"
   1.162 +/* This must occur *after* base/process_util.h to avoid typedefs conflicts. */
   1.163 +#include "mozilla/MemoryReporting.h"
   1.164 +#include "mozilla/LinkedList.h"
   1.165 +
   1.166 +#include "nsCycleCollectionParticipant.h"
   1.167 +#include "nsCycleCollectionNoteRootCallback.h"
   1.168 +#include "nsDeque.h"
   1.169 +#include "nsCycleCollector.h"
   1.170 +#include "nsThreadUtils.h"
   1.171 +#include "nsXULAppAPI.h"
   1.172 +#include "prenv.h"
   1.173 +#include "nsPrintfCString.h"
   1.174 +#include "nsTArray.h"
   1.175 +#include "nsIConsoleService.h"
   1.176 +#include "mozilla/Attributes.h"
   1.177 +#include "nsICycleCollectorListener.h"
   1.178 +#include "nsIMemoryReporter.h"
   1.179 +#include "nsIFile.h"
   1.180 +#include "nsDumpUtils.h"
   1.181 +#include "xpcpublic.h"
   1.182 +#include "GeckoProfiler.h"
   1.183 +#include "js/SliceBudget.h"
   1.184 +#include <stdint.h>
   1.185 +#include <stdio.h>
   1.186 +
   1.187 +#include "mozilla/Likely.h"
   1.188 +#include "mozilla/PoisonIOInterposer.h"
   1.189 +#include "mozilla/Telemetry.h"
   1.190 +#include "mozilla/ThreadLocal.h"
   1.191 +
   1.192 +using namespace mozilla;
   1.193 +
   1.194 +//#define COLLECT_TIME_DEBUG
   1.195 +
   1.196 +// Enable assertions that are useful for diagnosing errors in graph construction.
   1.197 +//#define DEBUG_CC_GRAPH
   1.198 +
   1.199 +#define DEFAULT_SHUTDOWN_COLLECTIONS 5
   1.200 +
   1.201 +// One to do the freeing, then another to detect there is no more work to do.
   1.202 +#define NORMAL_SHUTDOWN_COLLECTIONS 2
   1.203 +
   1.204 +// Cycle collector environment variables
   1.205 +//
   1.206 +// MOZ_CC_LOG_ALL: If defined, always log cycle collector heaps.
   1.207 +//
   1.208 +// MOZ_CC_LOG_SHUTDOWN: If defined, log cycle collector heaps at shutdown.
   1.209 +//
   1.210 +// MOZ_CC_LOG_THREAD: If set to "main", only automatically log main thread
   1.211 +// CCs. If set to "worker", only automatically log worker CCs. If set to "all",
   1.212 +// log either. The default value is "all". This must be used with either
   1.213 +// MOZ_CC_LOG_ALL or MOZ_CC_LOG_SHUTDOWN for it to do anything.
   1.214 +//
   1.215 +// MOZ_CC_LOG_PROCESS: If set to "main", only automatically log main process
   1.216 +// CCs. If set to "content", only automatically log tab CCs. If set to
   1.217 +// "plugins", only automatically log plugin CCs. If set to "all", log
   1.218 +// everything. The default value is "all". This must be used with either
   1.219 +// MOZ_CC_LOG_ALL or MOZ_CC_LOG_SHUTDOWN for it to do anything.
   1.220 +//
   1.221 +// MOZ_CC_ALL_TRACES: If set to "all", any cycle collector
   1.222 +// logging done will be WantAllTraces, which disables
   1.223 +// various cycle collector optimizations to give a fuller picture of
   1.224 +// the heap. If set to "shutdown", only shutdown logging will be WantAllTraces.
   1.225 +// The default is none.
   1.226 +//
   1.227 +// MOZ_CC_RUN_DURING_SHUTDOWN: In non-DEBUG or builds, if this is set,
   1.228 +// run cycle collections at shutdown.
   1.229 +//
   1.230 +// MOZ_CC_LOG_DIRECTORY: The directory in which logs are placed (such as
   1.231 +// logs from MOZ_CC_LOG_ALL and MOZ_CC_LOG_SHUTDOWN, or other uses
   1.232 +// of nsICycleCollectorListener)
   1.233 +
   1.234 +// Various parameters of this collector can be tuned using environment
   1.235 +// variables.
   1.236 +
   1.237 +struct nsCycleCollectorParams
   1.238 +{
   1.239 +    bool mLogAll;
   1.240 +    bool mLogShutdown;
   1.241 +    bool mAllTracesAll;
   1.242 +    bool mAllTracesShutdown;
   1.243 +    bool mLogThisThread;
   1.244 +
   1.245 +    nsCycleCollectorParams() :
   1.246 +        mLogAll      (PR_GetEnv("MOZ_CC_LOG_ALL") != nullptr),
   1.247 +        mLogShutdown (PR_GetEnv("MOZ_CC_LOG_SHUTDOWN") != nullptr),
   1.248 +        mAllTracesAll(false),
   1.249 +        mAllTracesShutdown(false)
   1.250 +    {
   1.251 +        const char* logThreadEnv = PR_GetEnv("MOZ_CC_LOG_THREAD");
   1.252 +        bool threadLogging = true;
   1.253 +        if (logThreadEnv && !!strcmp(logThreadEnv, "all")) {
   1.254 +            if (NS_IsMainThread()) {
   1.255 +                threadLogging = !strcmp(logThreadEnv, "main");
   1.256 +            } else {
   1.257 +                threadLogging = !strcmp(logThreadEnv, "worker");
   1.258 +            }
   1.259 +        }
   1.260 +
   1.261 +        const char* logProcessEnv = PR_GetEnv("MOZ_CC_LOG_PROCESS");
   1.262 +        bool processLogging = true;
   1.263 +        if (logProcessEnv && !!strcmp(logProcessEnv, "all")) {
   1.264 +            switch (XRE_GetProcessType()) {
   1.265 +                case GeckoProcessType_Default:
   1.266 +                    processLogging = !strcmp(logProcessEnv, "main");
   1.267 +                    break;
   1.268 +                case GeckoProcessType_Plugin:
   1.269 +                    processLogging = !strcmp(logProcessEnv, "plugins");
   1.270 +                    break;
   1.271 +                case GeckoProcessType_Content:
   1.272 +                    processLogging = !strcmp(logProcessEnv, "content");
   1.273 +                    break;
   1.274 +                default:
   1.275 +                    processLogging = false;
   1.276 +                    break;
   1.277 +            }
   1.278 +        }
   1.279 +        mLogThisThread = threadLogging && processLogging;
   1.280 +
   1.281 +        const char* allTracesEnv = PR_GetEnv("MOZ_CC_ALL_TRACES");
   1.282 +        if (allTracesEnv) {
   1.283 +            if (!strcmp(allTracesEnv, "all")) {
   1.284 +                mAllTracesAll = true;
   1.285 +            } else if (!strcmp(allTracesEnv, "shutdown")) {
   1.286 +                mAllTracesShutdown = true;
   1.287 +            }
   1.288 +        }
   1.289 +    }
   1.290 +
   1.291 +    bool LogThisCC(bool aIsShutdown)
   1.292 +    {
   1.293 +        return (mLogAll || (aIsShutdown && mLogShutdown)) && mLogThisThread;
   1.294 +    }
   1.295 +
   1.296 +    bool AllTracesThisCC(bool aIsShutdown)
   1.297 +    {
   1.298 +        return mAllTracesAll || (aIsShutdown && mAllTracesShutdown);
   1.299 +    }
   1.300 +};
   1.301 +
   1.302 +#ifdef COLLECT_TIME_DEBUG
   1.303 +class TimeLog
   1.304 +{
   1.305 +public:
   1.306 +    TimeLog() : mLastCheckpoint(TimeStamp::Now()) {}
   1.307 +
   1.308 +    void
   1.309 +    Checkpoint(const char* aEvent)
   1.310 +    {
   1.311 +        TimeStamp now = TimeStamp::Now();
   1.312 +        uint32_t dur = (uint32_t) ((now - mLastCheckpoint).ToMilliseconds());
   1.313 +        if (dur > 0) {
   1.314 +            printf("cc: %s took %dms\n", aEvent, dur);
   1.315 +        }
   1.316 +        mLastCheckpoint = now;
   1.317 +    }
   1.318 +
   1.319 +private:
   1.320 +    TimeStamp mLastCheckpoint;
   1.321 +};
   1.322 +#else
   1.323 +class TimeLog
   1.324 +{
   1.325 +public:
   1.326 +    TimeLog() {}
   1.327 +    void Checkpoint(const char* aEvent) {}
   1.328 +};
   1.329 +#endif
   1.330 +
   1.331 +
   1.332 +////////////////////////////////////////////////////////////////////////
   1.333 +// Base types
   1.334 +////////////////////////////////////////////////////////////////////////
   1.335 +
   1.336 +struct PtrInfo;
   1.337 +
   1.338 +class EdgePool
   1.339 +{
   1.340 +public:
   1.341 +    // EdgePool allocates arrays of void*, primarily to hold PtrInfo*.
   1.342 +    // However, at the end of a block, the last two pointers are a null
   1.343 +    // and then a void** pointing to the next block.  This allows
   1.344 +    // EdgePool::Iterators to be a single word but still capable of crossing
   1.345 +    // block boundaries.
   1.346 +
   1.347 +    EdgePool()
   1.348 +    {
   1.349 +        mSentinelAndBlocks[0].block = nullptr;
   1.350 +        mSentinelAndBlocks[1].block = nullptr;
   1.351 +    }
   1.352 +
   1.353 +    ~EdgePool()
   1.354 +    {
   1.355 +        MOZ_ASSERT(!mSentinelAndBlocks[0].block &&
   1.356 +                   !mSentinelAndBlocks[1].block,
   1.357 +                   "Didn't call Clear()?");
   1.358 +    }
   1.359 +
   1.360 +    void Clear()
   1.361 +    {
   1.362 +        Block *b = Blocks();
   1.363 +        while (b) {
   1.364 +            Block *next = b->Next();
   1.365 +            delete b;
   1.366 +            b = next;
   1.367 +        }
   1.368 +
   1.369 +        mSentinelAndBlocks[0].block = nullptr;
   1.370 +        mSentinelAndBlocks[1].block = nullptr;
   1.371 +    }
   1.372 +
   1.373 +#ifdef DEBUG
   1.374 +    bool IsEmpty()
   1.375 +    {
   1.376 +        return !mSentinelAndBlocks[0].block &&
   1.377 +               !mSentinelAndBlocks[1].block;
   1.378 +    }
   1.379 +#endif
   1.380 +
   1.381 +private:
   1.382 +    struct Block;
   1.383 +    union PtrInfoOrBlock {
   1.384 +        // Use a union to avoid reinterpret_cast and the ensuing
   1.385 +        // potential aliasing bugs.
   1.386 +        PtrInfo *ptrInfo;
   1.387 +        Block *block;
   1.388 +    };
   1.389 +    struct Block {
   1.390 +        enum { BlockSize = 16 * 1024 };
   1.391 +
   1.392 +        PtrInfoOrBlock mPointers[BlockSize];
   1.393 +        Block() {
   1.394 +            mPointers[BlockSize - 2].block = nullptr; // sentinel
   1.395 +            mPointers[BlockSize - 1].block = nullptr; // next block pointer
   1.396 +        }
   1.397 +        Block*& Next()          { return mPointers[BlockSize - 1].block; }
   1.398 +        PtrInfoOrBlock* Start() { return &mPointers[0]; }
   1.399 +        PtrInfoOrBlock* End()   { return &mPointers[BlockSize - 2]; }
   1.400 +    };
   1.401 +
   1.402 +    // Store the null sentinel so that we can have valid iterators
   1.403 +    // before adding any edges and without adding any blocks.
   1.404 +    PtrInfoOrBlock mSentinelAndBlocks[2];
   1.405 +
   1.406 +    Block*& Blocks()       { return mSentinelAndBlocks[1].block; }
   1.407 +    Block*  Blocks() const { return mSentinelAndBlocks[1].block; }
   1.408 +
   1.409 +public:
   1.410 +    class Iterator
   1.411 +    {
   1.412 +    public:
   1.413 +        Iterator() : mPointer(nullptr) {}
   1.414 +        Iterator(PtrInfoOrBlock *aPointer) : mPointer(aPointer) {}
   1.415 +        Iterator(const Iterator& aOther) : mPointer(aOther.mPointer) {}
   1.416 +
   1.417 +        Iterator& operator++()
   1.418 +        {
   1.419 +            if (mPointer->ptrInfo == nullptr) {
   1.420 +                // Null pointer is a sentinel for link to the next block.
   1.421 +                mPointer = (mPointer + 1)->block->mPointers;
   1.422 +            }
   1.423 +            ++mPointer;
   1.424 +            return *this;
   1.425 +        }
   1.426 +
   1.427 +        PtrInfo* operator*() const
   1.428 +        {
   1.429 +            if (mPointer->ptrInfo == nullptr) {
   1.430 +                // Null pointer is a sentinel for link to the next block.
   1.431 +                return (mPointer + 1)->block->mPointers->ptrInfo;
   1.432 +            }
   1.433 +            return mPointer->ptrInfo;
   1.434 +        }
   1.435 +        bool operator==(const Iterator& aOther) const
   1.436 +            { return mPointer == aOther.mPointer; }
   1.437 +        bool operator!=(const Iterator& aOther) const
   1.438 +            { return mPointer != aOther.mPointer; }
   1.439 +
   1.440 +#ifdef DEBUG_CC_GRAPH
   1.441 +        bool Initialized() const
   1.442 +        {
   1.443 +            return mPointer != nullptr;
   1.444 +        }
   1.445 +#endif
   1.446 +
   1.447 +    private:
   1.448 +        PtrInfoOrBlock *mPointer;
   1.449 +    };
   1.450 +
   1.451 +    class Builder;
   1.452 +    friend class Builder;
   1.453 +    class Builder {
   1.454 +    public:
   1.455 +        Builder(EdgePool &aPool)
   1.456 +            : mCurrent(&aPool.mSentinelAndBlocks[0]),
   1.457 +              mBlockEnd(&aPool.mSentinelAndBlocks[0]),
   1.458 +              mNextBlockPtr(&aPool.Blocks())
   1.459 +        {
   1.460 +        }
   1.461 +
   1.462 +        Iterator Mark() { return Iterator(mCurrent); }
   1.463 +
   1.464 +        void Add(PtrInfo* aEdge) {
   1.465 +            if (mCurrent == mBlockEnd) {
   1.466 +                Block *b = new Block();
   1.467 +                *mNextBlockPtr = b;
   1.468 +                mCurrent = b->Start();
   1.469 +                mBlockEnd = b->End();
   1.470 +                mNextBlockPtr = &b->Next();
   1.471 +            }
   1.472 +            (mCurrent++)->ptrInfo = aEdge;
   1.473 +        }
   1.474 +    private:
   1.475 +        // mBlockEnd points to space for null sentinel
   1.476 +        PtrInfoOrBlock *mCurrent, *mBlockEnd;
   1.477 +        Block **mNextBlockPtr;
   1.478 +    };
   1.479 +
   1.480 +    size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
   1.481 +        size_t n = 0;
   1.482 +        Block *b = Blocks();
   1.483 +        while (b) {
   1.484 +            n += aMallocSizeOf(b);
   1.485 +            b = b->Next();
   1.486 +        }
   1.487 +        return n;
   1.488 +    }
   1.489 +};
   1.490 +
   1.491 +#ifdef DEBUG_CC_GRAPH
   1.492 +#define CC_GRAPH_ASSERT(b) MOZ_ASSERT(b)
   1.493 +#else
   1.494 +#define CC_GRAPH_ASSERT(b)
   1.495 +#endif
   1.496 +
   1.497 +#define CC_TELEMETRY(_name, _value)                                            \
   1.498 +    PR_BEGIN_MACRO                                                             \
   1.499 +    if (NS_IsMainThread()) {                                                   \
   1.500 +      Telemetry::Accumulate(Telemetry::CYCLE_COLLECTOR##_name, _value);        \
   1.501 +    } else {                                                                   \
   1.502 +      Telemetry::Accumulate(Telemetry::CYCLE_COLLECTOR_WORKER##_name, _value); \
   1.503 +    }                                                                          \
   1.504 +    PR_END_MACRO
   1.505 +
   1.506 +enum NodeColor { black, white, grey };
   1.507 +
   1.508 +// This structure should be kept as small as possible; we may expect
   1.509 +// hundreds of thousands of them to be allocated and touched
   1.510 +// repeatedly during each cycle collection.
   1.511 +
   1.512 +struct PtrInfo
   1.513 +{
   1.514 +    void *mPointer;
   1.515 +    nsCycleCollectionParticipant *mParticipant;
   1.516 +    uint32_t mColor : 2;
   1.517 +    uint32_t mInternalRefs : 30;
   1.518 +    uint32_t mRefCount;
   1.519 +private:
   1.520 +    EdgePool::Iterator mFirstChild;
   1.521 +
   1.522 +public:
   1.523 +
   1.524 +    PtrInfo(void *aPointer, nsCycleCollectionParticipant *aParticipant)
   1.525 +        : mPointer(aPointer),
   1.526 +          mParticipant(aParticipant),
   1.527 +          mColor(grey),
   1.528 +          mInternalRefs(0),
   1.529 +          mRefCount(UINT32_MAX - 1),
   1.530 +          mFirstChild()
   1.531 +    {
   1.532 +        // We initialize mRefCount to a large non-zero value so
   1.533 +        // that it doesn't look like a JS object to the cycle collector
   1.534 +        // in the case where the object dies before being traversed.
   1.535 +
   1.536 +        MOZ_ASSERT(aParticipant);
   1.537 +    }
   1.538 +
   1.539 +    // Allow NodePool::Block's constructor to compile.
   1.540 +    PtrInfo() {
   1.541 +        NS_NOTREACHED("should never be called");
   1.542 +    }
   1.543 +
   1.544 +    EdgePool::Iterator FirstChild()
   1.545 +    {
   1.546 +        CC_GRAPH_ASSERT(mFirstChild.Initialized());
   1.547 +        return mFirstChild;
   1.548 +    }
   1.549 +
   1.550 +    // this PtrInfo must be part of a NodePool
   1.551 +    EdgePool::Iterator LastChild()
   1.552 +    {
   1.553 +        CC_GRAPH_ASSERT((this + 1)->mFirstChild.Initialized());
   1.554 +        return (this + 1)->mFirstChild;
   1.555 +    }
   1.556 +
   1.557 +    void SetFirstChild(EdgePool::Iterator aFirstChild)
   1.558 +    {
   1.559 +        CC_GRAPH_ASSERT(aFirstChild.Initialized());
   1.560 +        mFirstChild = aFirstChild;
   1.561 +    }
   1.562 +
   1.563 +    // this PtrInfo must be part of a NodePool
   1.564 +    void SetLastChild(EdgePool::Iterator aLastChild)
   1.565 +    {
   1.566 +        CC_GRAPH_ASSERT(aLastChild.Initialized());
   1.567 +        (this + 1)->mFirstChild = aLastChild;
   1.568 +    }
   1.569 +};
   1.570 +
   1.571 +/**
   1.572 + * A structure designed to be used like a linked list of PtrInfo, except
   1.573 + * that allocates the PtrInfo 32K-at-a-time.
   1.574 + */
   1.575 +class NodePool
   1.576 +{
   1.577 +private:
   1.578 +    enum { BlockSize = 8 * 1024 }; // could be int template parameter
   1.579 +
   1.580 +    struct Block {
   1.581 +        // We create and destroy Block using NS_Alloc/NS_Free rather
   1.582 +        // than new and delete to avoid calling its constructor and
   1.583 +        // destructor.
   1.584 +        Block()  { NS_NOTREACHED("should never be called"); }
   1.585 +        ~Block() { NS_NOTREACHED("should never be called"); }
   1.586 +
   1.587 +        Block* mNext;
   1.588 +        PtrInfo mEntries[BlockSize + 1]; // +1 to store last child of last node
   1.589 +    };
   1.590 +
   1.591 +public:
   1.592 +    NodePool()
   1.593 +        : mBlocks(nullptr),
   1.594 +          mLast(nullptr)
   1.595 +    {
   1.596 +    }
   1.597 +
   1.598 +    ~NodePool()
   1.599 +    {
   1.600 +        MOZ_ASSERT(!mBlocks, "Didn't call Clear()?");
   1.601 +    }
   1.602 +
   1.603 +    void Clear()
   1.604 +    {
   1.605 +        Block *b = mBlocks;
   1.606 +        while (b) {
   1.607 +            Block *n = b->mNext;
   1.608 +            NS_Free(b);
   1.609 +            b = n;
   1.610 +        }
   1.611 +
   1.612 +        mBlocks = nullptr;
   1.613 +        mLast = nullptr;
   1.614 +    }
   1.615 +
   1.616 +#ifdef DEBUG
   1.617 +    bool IsEmpty()
   1.618 +    {
   1.619 +        return !mBlocks && !mLast;
   1.620 +    }
   1.621 +#endif
   1.622 +
   1.623 +    class Builder;
   1.624 +    friend class Builder;
   1.625 +    class Builder {
   1.626 +    public:
   1.627 +        Builder(NodePool& aPool)
   1.628 +            : mNextBlock(&aPool.mBlocks),
   1.629 +              mNext(aPool.mLast),
   1.630 +              mBlockEnd(nullptr)
   1.631 +        {
   1.632 +            MOZ_ASSERT(aPool.mBlocks == nullptr && aPool.mLast == nullptr,
   1.633 +                       "pool not empty");
   1.634 +        }
   1.635 +        PtrInfo *Add(void *aPointer, nsCycleCollectionParticipant *aParticipant)
   1.636 +        {
   1.637 +            if (mNext == mBlockEnd) {
   1.638 +                Block *block = static_cast<Block*>(NS_Alloc(sizeof(Block)));
   1.639 +                *mNextBlock = block;
   1.640 +                mNext = block->mEntries;
   1.641 +                mBlockEnd = block->mEntries + BlockSize;
   1.642 +                block->mNext = nullptr;
   1.643 +                mNextBlock = &block->mNext;
   1.644 +            }
   1.645 +            return new (mNext++) PtrInfo(aPointer, aParticipant);
   1.646 +        }
   1.647 +    private:
   1.648 +        Block **mNextBlock;
   1.649 +        PtrInfo *&mNext;
   1.650 +        PtrInfo *mBlockEnd;
   1.651 +    };
   1.652 +
   1.653 +    class Enumerator;
   1.654 +    friend class Enumerator;
   1.655 +    class Enumerator {
   1.656 +    public:
   1.657 +        Enumerator(NodePool& aPool)
   1.658 +            : mFirstBlock(aPool.mBlocks),
   1.659 +              mCurBlock(nullptr),
   1.660 +              mNext(nullptr),
   1.661 +              mBlockEnd(nullptr),
   1.662 +              mLast(aPool.mLast)
   1.663 +        {
   1.664 +        }
   1.665 +
   1.666 +        bool IsDone() const
   1.667 +        {
   1.668 +            return mNext == mLast;
   1.669 +        }
   1.670 +
   1.671 +        bool AtBlockEnd() const
   1.672 +        {
   1.673 +            return mNext == mBlockEnd;
   1.674 +        }
   1.675 +
   1.676 +        PtrInfo* GetNext()
   1.677 +        {
   1.678 +            MOZ_ASSERT(!IsDone(), "calling GetNext when done");
   1.679 +            if (mNext == mBlockEnd) {
   1.680 +                Block *nextBlock = mCurBlock ? mCurBlock->mNext : mFirstBlock;
   1.681 +                mNext = nextBlock->mEntries;
   1.682 +                mBlockEnd = mNext + BlockSize;
   1.683 +                mCurBlock = nextBlock;
   1.684 +            }
   1.685 +            return mNext++;
   1.686 +        }
   1.687 +    private:
   1.688 +        // mFirstBlock is a reference to allow an Enumerator to be constructed
   1.689 +        // for an empty graph.
   1.690 +        Block *&mFirstBlock;
   1.691 +        Block *mCurBlock;
   1.692 +        // mNext is the next value we want to return, unless mNext == mBlockEnd
   1.693 +        // NB: mLast is a reference to allow enumerating while building!
   1.694 +        PtrInfo *mNext, *mBlockEnd, *&mLast;
   1.695 +    };
   1.696 +
   1.697 +    size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
   1.698 +        // We don't measure the things pointed to by mEntries[] because those
   1.699 +        // pointers are non-owning.
   1.700 +        size_t n = 0;
   1.701 +        Block *b = mBlocks;
   1.702 +        while (b) {
   1.703 +            n += aMallocSizeOf(b);
   1.704 +            b = b->mNext;
   1.705 +        }
   1.706 +        return n;
   1.707 +    }
   1.708 +
   1.709 +private:
   1.710 +    Block *mBlocks;
   1.711 +    PtrInfo *mLast;
   1.712 +};
   1.713 +
   1.714 +
   1.715 +// Declarations for mPtrToNodeMap.
   1.716 +
   1.717 +struct PtrToNodeEntry : public PLDHashEntryHdr
   1.718 +{
   1.719 +    // The key is mNode->mPointer
   1.720 +    PtrInfo *mNode;
   1.721 +};
   1.722 +
   1.723 +static bool
   1.724 +PtrToNodeMatchEntry(PLDHashTable *table,
   1.725 +                    const PLDHashEntryHdr *entry,
   1.726 +                    const void *key)
   1.727 +{
   1.728 +    const PtrToNodeEntry *n = static_cast<const PtrToNodeEntry*>(entry);
   1.729 +    return n->mNode->mPointer == key;
   1.730 +}
   1.731 +
   1.732 +static PLDHashTableOps PtrNodeOps = {
   1.733 +    PL_DHashAllocTable,
   1.734 +    PL_DHashFreeTable,
   1.735 +    PL_DHashVoidPtrKeyStub,
   1.736 +    PtrToNodeMatchEntry,
   1.737 +    PL_DHashMoveEntryStub,
   1.738 +    PL_DHashClearEntryStub,
   1.739 +    PL_DHashFinalizeStub,
   1.740 +    nullptr
   1.741 +};
   1.742 +
   1.743 +
   1.744 +struct WeakMapping
   1.745 +{
   1.746 +    // map and key will be null if the corresponding objects are GC marked
   1.747 +    PtrInfo *mMap;
   1.748 +    PtrInfo *mKey;
   1.749 +    PtrInfo *mKeyDelegate;
   1.750 +    PtrInfo *mVal;
   1.751 +};
   1.752 +
   1.753 +class GCGraphBuilder;
   1.754 +
   1.755 +struct GCGraph
   1.756 +{
   1.757 +    NodePool mNodes;
   1.758 +    EdgePool mEdges;
   1.759 +    nsTArray<WeakMapping> mWeakMaps;
   1.760 +    uint32_t mRootCount;
   1.761 +
   1.762 +private:
   1.763 +    PLDHashTable mPtrToNodeMap;
   1.764 +
   1.765 +public:
   1.766 +    GCGraph() : mRootCount(0)
   1.767 +    {
   1.768 +        mPtrToNodeMap.ops = nullptr;
   1.769 +    }
   1.770 +
   1.771 +    ~GCGraph()
   1.772 +    {
   1.773 +        if (mPtrToNodeMap.ops) {
   1.774 +            PL_DHashTableFinish(&mPtrToNodeMap);
   1.775 +        }
   1.776 +    }
   1.777 +
   1.778 +    void Init()
   1.779 +    {
   1.780 +        MOZ_ASSERT(IsEmpty(), "Failed to call GCGraph::Clear");
   1.781 +        PL_DHashTableInit(&mPtrToNodeMap, &PtrNodeOps, nullptr,
   1.782 +                          sizeof(PtrToNodeEntry), 32768);
   1.783 +    }
   1.784 +
   1.785 +    void Clear()
   1.786 +    {
   1.787 +        mNodes.Clear();
   1.788 +        mEdges.Clear();
   1.789 +        mWeakMaps.Clear();
   1.790 +        mRootCount = 0;
   1.791 +        PL_DHashTableFinish(&mPtrToNodeMap);
   1.792 +        mPtrToNodeMap.ops = nullptr;
   1.793 +    }
   1.794 +
   1.795 +#ifdef DEBUG
   1.796 +    bool IsEmpty()
   1.797 +    {
   1.798 +        return mNodes.IsEmpty() && mEdges.IsEmpty() &&
   1.799 +            mWeakMaps.IsEmpty() && mRootCount == 0 &&
   1.800 +            !mPtrToNodeMap.ops;
   1.801 +    }
   1.802 +#endif
   1.803 +
   1.804 +    PtrInfo* FindNode(void *aPtr);
   1.805 +    PtrToNodeEntry* AddNodeToMap(void *aPtr);
   1.806 +    void RemoveNodeFromMap(void *aPtr);
   1.807 +
   1.808 +    uint32_t MapCount() const
   1.809 +    {
   1.810 +        return mPtrToNodeMap.entryCount;
   1.811 +    }
   1.812 +
   1.813 +    void SizeOfExcludingThis(MallocSizeOf aMallocSizeOf,
   1.814 +                             size_t *aNodesSize, size_t *aEdgesSize,
   1.815 +                             size_t *aWeakMapsSize) const {
   1.816 +        *aNodesSize = mNodes.SizeOfExcludingThis(aMallocSizeOf);
   1.817 +        *aEdgesSize = mEdges.SizeOfExcludingThis(aMallocSizeOf);
   1.818 +
   1.819 +        // We don't measure what the WeakMappings point to, because the
   1.820 +        // pointers are non-owning.
   1.821 +        *aWeakMapsSize = mWeakMaps.SizeOfExcludingThis(aMallocSizeOf);
   1.822 +    }
   1.823 +};
   1.824 +
   1.825 +PtrInfo*
   1.826 +GCGraph::FindNode(void *aPtr)
   1.827 +{
   1.828 +    PtrToNodeEntry *e = static_cast<PtrToNodeEntry*>(PL_DHashTableOperate(&mPtrToNodeMap, aPtr, PL_DHASH_LOOKUP));
   1.829 +    if (!PL_DHASH_ENTRY_IS_BUSY(e)) {
   1.830 +        return nullptr;
   1.831 +    }
   1.832 +    return e->mNode;
   1.833 +}
   1.834 +
   1.835 +PtrToNodeEntry*
   1.836 +GCGraph::AddNodeToMap(void *aPtr)
   1.837 +{
   1.838 +    PtrToNodeEntry *e = static_cast<PtrToNodeEntry*>(PL_DHashTableOperate(&mPtrToNodeMap, aPtr, PL_DHASH_ADD));
   1.839 +    if (!e) {
   1.840 +        // Caller should track OOMs
   1.841 +        return nullptr;
   1.842 +    }
   1.843 +    return e;
   1.844 +}
   1.845 +
   1.846 +void
   1.847 +GCGraph::RemoveNodeFromMap(void *aPtr)
   1.848 +{
   1.849 +    PL_DHashTableOperate(&mPtrToNodeMap, aPtr, PL_DHASH_REMOVE);
   1.850 +}
   1.851 +
   1.852 +
   1.853 +static nsISupports *
   1.854 +CanonicalizeXPCOMParticipant(nsISupports *in)
   1.855 +{
   1.856 +    nsISupports* out;
   1.857 +    in->QueryInterface(NS_GET_IID(nsCycleCollectionISupports),
   1.858 +                       reinterpret_cast<void**>(&out));
   1.859 +    return out;
   1.860 +}
   1.861 +
   1.862 +static inline void
   1.863 +ToParticipant(nsISupports *s, nsXPCOMCycleCollectionParticipant **cp);
   1.864 +
   1.865 +static void
   1.866 +CanonicalizeParticipant(void **parti, nsCycleCollectionParticipant **cp)
   1.867 +{
   1.868 +    // If the participant is null, this is an nsISupports participant,
   1.869 +    // so we must QI to get the real participant.
   1.870 +
   1.871 +    if (!*cp) {
   1.872 +        nsISupports *nsparti = static_cast<nsISupports*>(*parti);
   1.873 +        nsparti = CanonicalizeXPCOMParticipant(nsparti);
   1.874 +        NS_ASSERTION(nsparti,
   1.875 +                     "Don't add objects that don't participate in collection!");
   1.876 +        nsXPCOMCycleCollectionParticipant *xcp;
   1.877 +        ToParticipant(nsparti, &xcp);
   1.878 +        *parti = nsparti;
   1.879 +        *cp = xcp;
   1.880 +    }
   1.881 +}
   1.882 +
   1.883 +struct nsPurpleBufferEntry {
   1.884 +  union {
   1.885 +    void *mObject;                        // when low bit unset
   1.886 +    nsPurpleBufferEntry *mNextInFreeList; // when low bit set
   1.887 +  };
   1.888 +
   1.889 +  nsCycleCollectingAutoRefCnt *mRefCnt;
   1.890 +
   1.891 +  nsCycleCollectionParticipant *mParticipant; // nullptr for nsISupports
   1.892 +};
   1.893 +
   1.894 +class nsCycleCollector;
   1.895 +
   1.896 +struct nsPurpleBuffer
   1.897 +{
   1.898 +private:
   1.899 +    struct Block {
   1.900 +        Block *mNext;
   1.901 +        // Try to match the size of a jemalloc bucket, to minimize slop bytes.
   1.902 +        // - On 32-bit platforms sizeof(nsPurpleBufferEntry) is 12, so mEntries
   1.903 +        //   is 16,380 bytes, which leaves 4 bytes for mNext.
   1.904 +        // - On 64-bit platforms sizeof(nsPurpleBufferEntry) is 24, so mEntries
   1.905 +        //   is 32,544 bytes, which leaves 8 bytes for mNext.
   1.906 +        nsPurpleBufferEntry mEntries[1365];
   1.907 +
   1.908 +        Block() : mNext(nullptr) {
   1.909 +            // Ensure Block is the right size (see above).
   1.910 +            static_assert(
   1.911 +                sizeof(Block) == 16384 ||       // 32-bit
   1.912 +                sizeof(Block) == 32768,         // 64-bit
   1.913 +                "ill-sized nsPurpleBuffer::Block"
   1.914 +            );
   1.915 +        }
   1.916 +
   1.917 +        template <class PurpleVisitor>
   1.918 +        void VisitEntries(nsPurpleBuffer &aBuffer, PurpleVisitor &aVisitor)
   1.919 +        {
   1.920 +            nsPurpleBufferEntry *eEnd = ArrayEnd(mEntries);
   1.921 +            for (nsPurpleBufferEntry *e = mEntries; e != eEnd; ++e) {
   1.922 +                if (!(uintptr_t(e->mObject) & uintptr_t(1))) {
   1.923 +                    aVisitor.Visit(aBuffer, e);
   1.924 +                }
   1.925 +            }
   1.926 +        }
   1.927 +    };
   1.928 +    // This class wraps a linked list of the elements in the purple
   1.929 +    // buffer.
   1.930 +
   1.931 +    uint32_t mCount;
   1.932 +    Block mFirstBlock;
   1.933 +    nsPurpleBufferEntry *mFreeList;
   1.934 +
   1.935 +public:
   1.936 +    nsPurpleBuffer()
   1.937 +    {
   1.938 +        InitBlocks();
   1.939 +    }
   1.940 +
   1.941 +    ~nsPurpleBuffer()
   1.942 +    {
   1.943 +        FreeBlocks();
   1.944 +    }
   1.945 +
   1.946 +    template <class PurpleVisitor>
   1.947 +    void VisitEntries(PurpleVisitor &aVisitor)
   1.948 +    {
   1.949 +        for (Block *b = &mFirstBlock; b; b = b->mNext) {
   1.950 +            b->VisitEntries(*this, aVisitor);
   1.951 +        }
   1.952 +    }
   1.953 +
   1.954 +    void InitBlocks()
   1.955 +    {
   1.956 +        mCount = 0;
   1.957 +        mFreeList = nullptr;
   1.958 +        StartBlock(&mFirstBlock);
   1.959 +    }
   1.960 +
   1.961 +    void StartBlock(Block *aBlock)
   1.962 +    {
   1.963 +        NS_ABORT_IF_FALSE(!mFreeList, "should not have free list");
   1.964 +
   1.965 +        // Put all the entries in the block on the free list.
   1.966 +        nsPurpleBufferEntry *entries = aBlock->mEntries;
   1.967 +        mFreeList = entries;
   1.968 +        for (uint32_t i = 1; i < ArrayLength(aBlock->mEntries); ++i) {
   1.969 +            entries[i - 1].mNextInFreeList =
   1.970 +                (nsPurpleBufferEntry*)(uintptr_t(entries + i) | 1);
   1.971 +        }
   1.972 +        entries[ArrayLength(aBlock->mEntries) - 1].mNextInFreeList =
   1.973 +            (nsPurpleBufferEntry*)1;
   1.974 +    }
   1.975 +
   1.976 +    void FreeBlocks()
   1.977 +    {
   1.978 +        if (mCount > 0)
   1.979 +            UnmarkRemainingPurple(&mFirstBlock);
   1.980 +        Block *b = mFirstBlock.mNext;
   1.981 +        while (b) {
   1.982 +            if (mCount > 0)
   1.983 +                UnmarkRemainingPurple(b);
   1.984 +            Block *next = b->mNext;
   1.985 +            delete b;
   1.986 +            b = next;
   1.987 +        }
   1.988 +        mFirstBlock.mNext = nullptr;
   1.989 +    }
   1.990 +
   1.991 +    struct UnmarkRemainingPurpleVisitor
   1.992 +    {
   1.993 +        void
   1.994 +        Visit(nsPurpleBuffer &aBuffer, nsPurpleBufferEntry *aEntry)
   1.995 +        {
   1.996 +            if (aEntry->mRefCnt) {
   1.997 +                aEntry->mRefCnt->RemoveFromPurpleBuffer();
   1.998 +                aEntry->mRefCnt = nullptr;
   1.999 +            }
  1.1000 +            aEntry->mObject = nullptr;
  1.1001 +            --aBuffer.mCount;
  1.1002 +        }
  1.1003 +    };
  1.1004 +
  1.1005 +    void UnmarkRemainingPurple(Block *b)
  1.1006 +    {
  1.1007 +        UnmarkRemainingPurpleVisitor visitor;
  1.1008 +        b->VisitEntries(*this, visitor);
  1.1009 +    }
  1.1010 +
  1.1011 +    void SelectPointers(GCGraphBuilder &builder);
  1.1012 +
  1.1013 +    // RemoveSkippable removes entries from the purple buffer synchronously
  1.1014 +    // (1) if aAsyncSnowWhiteFreeing is false and nsPurpleBufferEntry::mRefCnt is 0 or
  1.1015 +    // (2) if the object's nsXPCOMCycleCollectionParticipant::CanSkip() returns true or
  1.1016 +    // (3) if nsPurpleBufferEntry::mRefCnt->IsPurple() is false.
  1.1017 +    // (4) If removeChildlessNodes is true, then any nodes in the purple buffer
  1.1018 +    //     that will have no children in the cycle collector graph will also be
  1.1019 +    //     removed. CanSkip() may be run on these children.
  1.1020 +    void RemoveSkippable(nsCycleCollector* aCollector,
  1.1021 +                         bool removeChildlessNodes,
  1.1022 +                         bool aAsyncSnowWhiteFreeing,
  1.1023 +                         CC_ForgetSkippableCallback aCb);
  1.1024 +
  1.1025 +    MOZ_ALWAYS_INLINE nsPurpleBufferEntry* NewEntry()
  1.1026 +    {
  1.1027 +        if (MOZ_UNLIKELY(!mFreeList)) {
  1.1028 +            Block *b = new Block;
  1.1029 +            StartBlock(b);
  1.1030 +
  1.1031 +            // Add the new block as the second block in the list.
  1.1032 +            b->mNext = mFirstBlock.mNext;
  1.1033 +            mFirstBlock.mNext = b;
  1.1034 +        }
  1.1035 +
  1.1036 +        nsPurpleBufferEntry *e = mFreeList;
  1.1037 +        mFreeList = (nsPurpleBufferEntry*)
  1.1038 +            (uintptr_t(mFreeList->mNextInFreeList) & ~uintptr_t(1));
  1.1039 +        return e;
  1.1040 +    }
  1.1041 +
  1.1042 +    MOZ_ALWAYS_INLINE void Put(void *p, nsCycleCollectionParticipant *cp,
  1.1043 +                               nsCycleCollectingAutoRefCnt *aRefCnt)
  1.1044 +    {
  1.1045 +        nsPurpleBufferEntry *e = NewEntry();
  1.1046 +
  1.1047 +        ++mCount;
  1.1048 +
  1.1049 +        e->mObject = p;
  1.1050 +        e->mRefCnt = aRefCnt;
  1.1051 +        e->mParticipant = cp;
  1.1052 +    }
  1.1053 +
  1.1054 +    void Remove(nsPurpleBufferEntry *e)
  1.1055 +    {
  1.1056 +        MOZ_ASSERT(mCount != 0, "must have entries");
  1.1057 +
  1.1058 +        if (e->mRefCnt) {
  1.1059 +            e->mRefCnt->RemoveFromPurpleBuffer();
  1.1060 +            e->mRefCnt = nullptr;
  1.1061 +        }
  1.1062 +        e->mNextInFreeList =
  1.1063 +            (nsPurpleBufferEntry*)(uintptr_t(mFreeList) | uintptr_t(1));
  1.1064 +        mFreeList = e;
  1.1065 +
  1.1066 +        --mCount;
  1.1067 +    }
  1.1068 +
  1.1069 +    uint32_t Count() const
  1.1070 +    {
  1.1071 +        return mCount;
  1.1072 +    }
  1.1073 +
  1.1074 +    size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
  1.1075 +    {
  1.1076 +        size_t n = 0;
  1.1077 +
  1.1078 +        // Don't measure mFirstBlock because it's within |this|.
  1.1079 +        const Block *block = mFirstBlock.mNext;
  1.1080 +        while (block) {
  1.1081 +            n += aMallocSizeOf(block);
  1.1082 +            block = block->mNext;
  1.1083 +        }
  1.1084 +
  1.1085 +        // mFreeList is deliberately not measured because it points into
  1.1086 +        // the purple buffer, which is within mFirstBlock and thus within |this|.
  1.1087 +        //
  1.1088 +        // We also don't measure the things pointed to by mEntries[] because
  1.1089 +        // those pointers are non-owning.
  1.1090 +
  1.1091 +        return n;
  1.1092 +    }
  1.1093 +};
  1.1094 +
  1.1095 +static bool
  1.1096 +AddPurpleRoot(GCGraphBuilder &aBuilder, void *aRoot, nsCycleCollectionParticipant *aParti);
  1.1097 +
  1.1098 +struct SelectPointersVisitor
  1.1099 +{
  1.1100 +    SelectPointersVisitor(GCGraphBuilder &aBuilder)
  1.1101 +        : mBuilder(aBuilder)
  1.1102 +    {}
  1.1103 +
  1.1104 +    void
  1.1105 +    Visit(nsPurpleBuffer &aBuffer, nsPurpleBufferEntry *aEntry)
  1.1106 +    {
  1.1107 +        MOZ_ASSERT(aEntry->mObject, "Null object in purple buffer");
  1.1108 +        MOZ_ASSERT(aEntry->mRefCnt->get() != 0,
  1.1109 +                   "SelectPointersVisitor: snow-white object in the purple buffer");
  1.1110 +        if (!aEntry->mRefCnt->IsPurple() ||
  1.1111 +            AddPurpleRoot(mBuilder, aEntry->mObject, aEntry->mParticipant)) {
  1.1112 +            aBuffer.Remove(aEntry);
  1.1113 +        }
  1.1114 +    }
  1.1115 +
  1.1116 +private:
  1.1117 +    GCGraphBuilder &mBuilder;
  1.1118 +};
  1.1119 +
  1.1120 +void
  1.1121 +nsPurpleBuffer::SelectPointers(GCGraphBuilder &aBuilder)
  1.1122 +{
  1.1123 +    SelectPointersVisitor visitor(aBuilder);
  1.1124 +    VisitEntries(visitor);
  1.1125 +
  1.1126 +    NS_ASSERTION(mCount == 0, "AddPurpleRoot failed");
  1.1127 +    if (mCount == 0) {
  1.1128 +        FreeBlocks();
  1.1129 +        InitBlocks();
  1.1130 +    }
  1.1131 +}
  1.1132 +
  1.1133 +enum ccPhase {
  1.1134 +    IdlePhase,
  1.1135 +    GraphBuildingPhase,
  1.1136 +    ScanAndCollectWhitePhase,
  1.1137 +    CleanupPhase
  1.1138 +};
  1.1139 +
  1.1140 +enum ccType {
  1.1141 +    SliceCC,     /* If a CC is in progress, continue it. Otherwise, start a new one. */
  1.1142 +    ManualCC,    /* Explicitly triggered. */
  1.1143 +    ShutdownCC   /* Shutdown CC, used for finding leaks. */
  1.1144 +};
  1.1145 +
  1.1146 +#ifdef MOZ_NUWA_PROCESS
  1.1147 +#include "ipc/Nuwa.h"
  1.1148 +#endif
  1.1149 +
  1.1150 +////////////////////////////////////////////////////////////////////////
  1.1151 +// Top level structure for the cycle collector.
  1.1152 +////////////////////////////////////////////////////////////////////////
  1.1153 +
  1.1154 +typedef js::SliceBudget SliceBudget;
  1.1155 +
  1.1156 +class JSPurpleBuffer;
  1.1157 +
  1.1158 +class nsCycleCollector : public nsIMemoryReporter
  1.1159 +{
  1.1160 +    NS_DECL_ISUPPORTS
  1.1161 +    NS_DECL_NSIMEMORYREPORTER
  1.1162 +
  1.1163 +    bool mActivelyCollecting;
  1.1164 +    bool mFreeingSnowWhite;
  1.1165 +    // mScanInProgress should be false when we're collecting white objects.
  1.1166 +    bool mScanInProgress;
  1.1167 +    CycleCollectorResults mResults;
  1.1168 +    TimeStamp mCollectionStart;
  1.1169 +
  1.1170 +    CycleCollectedJSRuntime *mJSRuntime;
  1.1171 +
  1.1172 +    ccPhase mIncrementalPhase;
  1.1173 +    GCGraph mGraph;
  1.1174 +    nsAutoPtr<GCGraphBuilder> mBuilder;
  1.1175 +    nsAutoPtr<NodePool::Enumerator> mCurrNode;
  1.1176 +    nsCOMPtr<nsICycleCollectorListener> mListener;
  1.1177 +
  1.1178 +    nsIThread* mThread;
  1.1179 +
  1.1180 +    nsCycleCollectorParams mParams;
  1.1181 +
  1.1182 +    uint32_t mWhiteNodeCount;
  1.1183 +
  1.1184 +    CC_BeforeUnlinkCallback mBeforeUnlinkCB;
  1.1185 +    CC_ForgetSkippableCallback mForgetSkippableCB;
  1.1186 +
  1.1187 +    nsPurpleBuffer mPurpleBuf;
  1.1188 +
  1.1189 +    uint32_t mUnmergedNeeded;
  1.1190 +    uint32_t mMergedInARow;
  1.1191 +
  1.1192 +    JSPurpleBuffer* mJSPurpleBuffer;
  1.1193 +
  1.1194 +public:
  1.1195 +    nsCycleCollector();
  1.1196 +    virtual ~nsCycleCollector();
  1.1197 +
  1.1198 +    void RegisterJSRuntime(CycleCollectedJSRuntime *aJSRuntime);
  1.1199 +    void ForgetJSRuntime();
  1.1200 +
  1.1201 +    void SetBeforeUnlinkCallback(CC_BeforeUnlinkCallback aBeforeUnlinkCB)
  1.1202 +    {
  1.1203 +        CheckThreadSafety();
  1.1204 +        mBeforeUnlinkCB = aBeforeUnlinkCB;
  1.1205 +    }
  1.1206 +
  1.1207 +    void SetForgetSkippableCallback(CC_ForgetSkippableCallback aForgetSkippableCB)
  1.1208 +    {
  1.1209 +        CheckThreadSafety();
  1.1210 +        mForgetSkippableCB = aForgetSkippableCB;
  1.1211 +    }
  1.1212 +
  1.1213 +    void Suspect(void *n, nsCycleCollectionParticipant *cp,
  1.1214 +                 nsCycleCollectingAutoRefCnt *aRefCnt);
  1.1215 +    uint32_t SuspectedCount();
  1.1216 +    void ForgetSkippable(bool aRemoveChildlessNodes, bool aAsyncSnowWhiteFreeing);
  1.1217 +    bool FreeSnowWhite(bool aUntilNoSWInPurpleBuffer);
  1.1218 +
  1.1219 +    // This method assumes its argument is already canonicalized.
  1.1220 +    void RemoveObjectFromGraph(void *aPtr);
  1.1221 +
  1.1222 +    void PrepareForGarbageCollection();
  1.1223 +
  1.1224 +    bool Collect(ccType aCCType,
  1.1225 +                 SliceBudget &aBudget,
  1.1226 +                 nsICycleCollectorListener *aManualListener);
  1.1227 +    void Shutdown();
  1.1228 +
  1.1229 +    void SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf,
  1.1230 +                             size_t *aObjectSize,
  1.1231 +                             size_t *aGraphNodesSize,
  1.1232 +                             size_t *aGraphEdgesSize,
  1.1233 +                             size_t *aWeakMapsSize,
  1.1234 +                             size_t *aPurpleBufferSize) const;
  1.1235 +
  1.1236 +    JSPurpleBuffer* GetJSPurpleBuffer();
  1.1237 +private:
  1.1238 +    void CheckThreadSafety();
  1.1239 +    void ShutdownCollect();
  1.1240 +
  1.1241 +    void FixGrayBits(bool aForceGC);
  1.1242 +    bool ShouldMergeZones(ccType aCCType);
  1.1243 +
  1.1244 +    void BeginCollection(ccType aCCType, nsICycleCollectorListener *aManualListener);
  1.1245 +    void MarkRoots(SliceBudget &aBudget);
  1.1246 +    void ScanRoots(bool aFullySynchGraphBuild);
  1.1247 +    void ScanIncrementalRoots();
  1.1248 +    void ScanWeakMaps();
  1.1249 +
  1.1250 +    // returns whether anything was collected
  1.1251 +    bool CollectWhite();
  1.1252 +
  1.1253 +    void CleanupAfterCollection();
  1.1254 +};
  1.1255 +
  1.1256 +NS_IMPL_ISUPPORTS(nsCycleCollector, nsIMemoryReporter)
  1.1257 +
  1.1258 +/**
  1.1259 + * GraphWalker is templatized over a Visitor class that must provide
  1.1260 + * the following two methods:
  1.1261 + *
  1.1262 + * bool ShouldVisitNode(PtrInfo const *pi);
  1.1263 + * void VisitNode(PtrInfo *pi);
  1.1264 + */
  1.1265 +template <class Visitor>
  1.1266 +class GraphWalker
  1.1267 +{
  1.1268 +private:
  1.1269 +    Visitor mVisitor;
  1.1270 +
  1.1271 +    void DoWalk(nsDeque &aQueue);
  1.1272 +
  1.1273 +    void CheckedPush(nsDeque &aQueue, PtrInfo *pi)
  1.1274 +    {
  1.1275 +        if (!pi) {
  1.1276 +            MOZ_CRASH();
  1.1277 +        }
  1.1278 +        if (!aQueue.Push(pi, fallible_t())) {
  1.1279 +            mVisitor.Failed();
  1.1280 +        }
  1.1281 +    }
  1.1282 +
  1.1283 +public:
  1.1284 +    void Walk(PtrInfo *s0);
  1.1285 +    void WalkFromRoots(GCGraph &aGraph);
  1.1286 +    // copy-constructing the visitor should be cheap, and less
  1.1287 +    // indirection than using a reference
  1.1288 +    GraphWalker(const Visitor aVisitor) : mVisitor(aVisitor) {}
  1.1289 +};
  1.1290 +
  1.1291 +
  1.1292 +////////////////////////////////////////////////////////////////////////
  1.1293 +// The static collector struct
  1.1294 +////////////////////////////////////////////////////////////////////////
  1.1295 +
  1.1296 +struct CollectorData {
  1.1297 +  nsRefPtr<nsCycleCollector> mCollector;
  1.1298 +  CycleCollectedJSRuntime* mRuntime;
  1.1299 +};
  1.1300 +
  1.1301 +static mozilla::ThreadLocal<CollectorData*> sCollectorData;
  1.1302 +
  1.1303 +////////////////////////////////////////////////////////////////////////
  1.1304 +// Utility functions
  1.1305 +////////////////////////////////////////////////////////////////////////
  1.1306 +
  1.1307 +MOZ_NEVER_INLINE static void
  1.1308 +Fault(const char *msg, const void *ptr=nullptr)
  1.1309 +{
  1.1310 +    if (ptr)
  1.1311 +        printf("Fault in cycle collector: %s (ptr: %p)\n", msg, ptr);
  1.1312 +    else
  1.1313 +        printf("Fault in cycle collector: %s\n", msg);
  1.1314 +
  1.1315 +    NS_RUNTIMEABORT("cycle collector fault");
  1.1316 +}
  1.1317 +
  1.1318 +static void
  1.1319 +Fault(const char *msg, PtrInfo *pi)
  1.1320 +{
  1.1321 +    Fault(msg, pi->mPointer);
  1.1322 +}
  1.1323 +
  1.1324 +static inline void
  1.1325 +ToParticipant(nsISupports *s, nsXPCOMCycleCollectionParticipant **cp)
  1.1326 +{
  1.1327 +    // We use QI to move from an nsISupports to an
  1.1328 +    // nsXPCOMCycleCollectionParticipant, which is a per-class singleton helper
  1.1329 +    // object that implements traversal and unlinking logic for the nsISupports
  1.1330 +    // in question.
  1.1331 +    CallQueryInterface(s, cp);
  1.1332 +}
  1.1333 +
  1.1334 +template <class Visitor>
  1.1335 +MOZ_NEVER_INLINE void
  1.1336 +GraphWalker<Visitor>::Walk(PtrInfo *s0)
  1.1337 +{
  1.1338 +    nsDeque queue;
  1.1339 +    CheckedPush(queue, s0);
  1.1340 +    DoWalk(queue);
  1.1341 +}
  1.1342 +
  1.1343 +template <class Visitor>
  1.1344 +MOZ_NEVER_INLINE void
  1.1345 +GraphWalker<Visitor>::WalkFromRoots(GCGraph& aGraph)
  1.1346 +{
  1.1347 +    nsDeque queue;
  1.1348 +    NodePool::Enumerator etor(aGraph.mNodes);
  1.1349 +    for (uint32_t i = 0; i < aGraph.mRootCount; ++i) {
  1.1350 +        CheckedPush(queue, etor.GetNext());
  1.1351 +    }
  1.1352 +    DoWalk(queue);
  1.1353 +}
  1.1354 +
  1.1355 +template <class Visitor>
  1.1356 +MOZ_NEVER_INLINE void
  1.1357 +GraphWalker<Visitor>::DoWalk(nsDeque &aQueue)
  1.1358 +{
  1.1359 +    // Use a aQueue to match the breadth-first traversal used when we
  1.1360 +    // built the graph, for hopefully-better locality.
  1.1361 +    while (aQueue.GetSize() > 0) {
  1.1362 +        PtrInfo *pi = static_cast<PtrInfo*>(aQueue.PopFront());
  1.1363 +
  1.1364 +        if (pi->mParticipant && mVisitor.ShouldVisitNode(pi)) {
  1.1365 +            mVisitor.VisitNode(pi);
  1.1366 +            for (EdgePool::Iterator child = pi->FirstChild(),
  1.1367 +                                child_end = pi->LastChild();
  1.1368 +                 child != child_end; ++child) {
  1.1369 +                CheckedPush(aQueue, *child);
  1.1370 +            }
  1.1371 +        }
  1.1372 +    }
  1.1373 +}
  1.1374 +
  1.1375 +struct CCGraphDescriber : public LinkedListElement<CCGraphDescriber>
  1.1376 +{
  1.1377 +  CCGraphDescriber()
  1.1378 +  : mAddress("0x"), mCnt(0), mType(eUnknown) {}
  1.1379 +
  1.1380 +  enum Type
  1.1381 +  {
  1.1382 +    eRefCountedObject,
  1.1383 +    eGCedObject,
  1.1384 +    eGCMarkedObject,
  1.1385 +    eEdge,
  1.1386 +    eRoot,
  1.1387 +    eGarbage,
  1.1388 +    eUnknown
  1.1389 +  };
  1.1390 +
  1.1391 +  nsCString mAddress;
  1.1392 +  nsCString mName;
  1.1393 +  nsCString mCompartmentOrToAddress;
  1.1394 +  uint32_t mCnt;
  1.1395 +  Type mType;
  1.1396 +};
  1.1397 +
  1.1398 +class nsCycleCollectorLogger MOZ_FINAL : public nsICycleCollectorListener
  1.1399 +{
  1.1400 +public:
  1.1401 +    nsCycleCollectorLogger() :
  1.1402 +      mStream(nullptr), mWantAllTraces(false),
  1.1403 +      mDisableLog(false), mWantAfterProcessing(false)
  1.1404 +    {
  1.1405 +    }
  1.1406 +    ~nsCycleCollectorLogger()
  1.1407 +    {
  1.1408 +        ClearDescribers();
  1.1409 +        if (mStream) {
  1.1410 +            MozillaUnRegisterDebugFILE(mStream);
  1.1411 +            fclose(mStream);
  1.1412 +        }
  1.1413 +    }
  1.1414 +    NS_DECL_ISUPPORTS
  1.1415 +
  1.1416 +    void SetAllTraces()
  1.1417 +    {
  1.1418 +        mWantAllTraces = true;
  1.1419 +    }
  1.1420 +
  1.1421 +    NS_IMETHOD AllTraces(nsICycleCollectorListener** aListener)
  1.1422 +    {
  1.1423 +        SetAllTraces();
  1.1424 +        NS_ADDREF(*aListener = this);
  1.1425 +        return NS_OK;
  1.1426 +    }
  1.1427 +
  1.1428 +    NS_IMETHOD GetWantAllTraces(bool* aAllTraces)
  1.1429 +    {
  1.1430 +        *aAllTraces = mWantAllTraces;
  1.1431 +        return NS_OK;
  1.1432 +    }
  1.1433 +
  1.1434 +    NS_IMETHOD GetDisableLog(bool* aDisableLog)
  1.1435 +    {
  1.1436 +        *aDisableLog = mDisableLog;
  1.1437 +        return NS_OK;
  1.1438 +    }
  1.1439 +
  1.1440 +    NS_IMETHOD SetDisableLog(bool aDisableLog)
  1.1441 +    {
  1.1442 +        mDisableLog = aDisableLog;
  1.1443 +        return NS_OK;
  1.1444 +    }
  1.1445 +
  1.1446 +    NS_IMETHOD GetWantAfterProcessing(bool* aWantAfterProcessing)
  1.1447 +    {
  1.1448 +        *aWantAfterProcessing = mWantAfterProcessing;
  1.1449 +        return NS_OK;
  1.1450 +    }
  1.1451 +
  1.1452 +    NS_IMETHOD SetWantAfterProcessing(bool aWantAfterProcessing)
  1.1453 +    {
  1.1454 +        mWantAfterProcessing = aWantAfterProcessing;
  1.1455 +        return NS_OK;
  1.1456 +    }
  1.1457 +
  1.1458 +    NS_IMETHOD GetFilenameIdentifier(nsAString& aIdentifier)
  1.1459 +    {
  1.1460 +        aIdentifier = mFilenameIdentifier;
  1.1461 +        return NS_OK;
  1.1462 +    }
  1.1463 +
  1.1464 +    NS_IMETHOD SetFilenameIdentifier(const nsAString& aIdentifier)
  1.1465 +    {
  1.1466 +        mFilenameIdentifier = aIdentifier;
  1.1467 +        return NS_OK;
  1.1468 +    }
  1.1469 +
  1.1470 +    NS_IMETHOD GetGcLogPath(nsAString &aPath)
  1.1471 +    {
  1.1472 +      aPath = mGCLogPath;
  1.1473 +      return NS_OK;
  1.1474 +    }
  1.1475 +
  1.1476 +    NS_IMETHOD GetCcLogPath(nsAString &aPath)
  1.1477 +    {
  1.1478 +      aPath = mCCLogPath;
  1.1479 +      return NS_OK;
  1.1480 +    }
  1.1481 +
  1.1482 +    NS_IMETHOD Begin()
  1.1483 +    {
  1.1484 +        mCurrentAddress.AssignLiteral("0x");
  1.1485 +        ClearDescribers();
  1.1486 +        if (mDisableLog) {
  1.1487 +            return NS_OK;
  1.1488 +        }
  1.1489 +
  1.1490 +        // Initially create the log in a file starting with
  1.1491 +        // "incomplete-gc-edges".  We'll move the file and strip off the
  1.1492 +        // "incomplete-" once the dump completes.  (We do this because we don't
  1.1493 +        // want scripts which poll the filesystem looking for gc/cc dumps to
  1.1494 +        // grab a file before we're finished writing to it.)
  1.1495 +        nsCOMPtr<nsIFile> gcLogFile = CreateTempFile("incomplete-gc-edges");
  1.1496 +        if (NS_WARN_IF(!gcLogFile))
  1.1497 +            return NS_ERROR_UNEXPECTED;
  1.1498 +
  1.1499 +        // Dump the JS heap.
  1.1500 +        FILE* gcLogANSIFile = nullptr;
  1.1501 +        gcLogFile->OpenANSIFileDesc("w", &gcLogANSIFile);
  1.1502 +        if (NS_WARN_IF(!gcLogANSIFile))
  1.1503 +            return NS_ERROR_UNEXPECTED;
  1.1504 +        MozillaRegisterDebugFILE(gcLogANSIFile);
  1.1505 +        CollectorData *data = sCollectorData.get();
  1.1506 +        if (data && data->mRuntime)
  1.1507 +            data->mRuntime->DumpJSHeap(gcLogANSIFile);
  1.1508 +        MozillaUnRegisterDebugFILE(gcLogANSIFile);
  1.1509 +        fclose(gcLogANSIFile);
  1.1510 +
  1.1511 +        // Strip off "incomplete-".
  1.1512 +        nsCOMPtr<nsIFile> gcLogFileFinalDestination =
  1.1513 +            CreateTempFile("gc-edges");
  1.1514 +        if (NS_WARN_IF(!gcLogFileFinalDestination))
  1.1515 +            return NS_ERROR_UNEXPECTED;
  1.1516 +
  1.1517 +        nsAutoString gcLogFileFinalDestinationName;
  1.1518 +        gcLogFileFinalDestination->GetLeafName(gcLogFileFinalDestinationName);
  1.1519 +        if (NS_WARN_IF(gcLogFileFinalDestinationName.IsEmpty()))
  1.1520 +            return NS_ERROR_UNEXPECTED;
  1.1521 +
  1.1522 +        gcLogFile->MoveTo(/* directory */ nullptr, gcLogFileFinalDestinationName);
  1.1523 +
  1.1524 +        // Log to the error console.
  1.1525 +        nsCOMPtr<nsIConsoleService> cs =
  1.1526 +            do_GetService(NS_CONSOLESERVICE_CONTRACTID);
  1.1527 +        if (cs) {
  1.1528 +            nsAutoString gcLogPath;
  1.1529 +            gcLogFileFinalDestination->GetPath(gcLogPath);
  1.1530 +
  1.1531 +            nsString msg = NS_LITERAL_STRING("Garbage Collector log dumped to ") +
  1.1532 +                           gcLogPath;
  1.1533 +            cs->LogStringMessage(msg.get());
  1.1534 +
  1.1535 +            mGCLogPath = gcLogPath;
  1.1536 +        }
  1.1537 +
  1.1538 +        // Open a file for dumping the CC graph.  We again prefix with
  1.1539 +        // "incomplete-".
  1.1540 +        mOutFile = CreateTempFile("incomplete-cc-edges");
  1.1541 +        if (NS_WARN_IF(!mOutFile))
  1.1542 +            return NS_ERROR_UNEXPECTED;
  1.1543 +        MOZ_ASSERT(!mStream);
  1.1544 +        mOutFile->OpenANSIFileDesc("w", &mStream);
  1.1545 +        if (NS_WARN_IF(!mStream))
  1.1546 +            return NS_ERROR_UNEXPECTED;
  1.1547 +        MozillaRegisterDebugFILE(mStream);
  1.1548 +
  1.1549 +        fprintf(mStream, "# WantAllTraces=%s\n", mWantAllTraces ? "true" : "false");
  1.1550 +
  1.1551 +        return NS_OK;
  1.1552 +    }
  1.1553 +    NS_IMETHOD NoteRefCountedObject(uint64_t aAddress, uint32_t refCount,
  1.1554 +                                    const char *aObjectDescription)
  1.1555 +    {
  1.1556 +        if (!mDisableLog) {
  1.1557 +            fprintf(mStream, "%p [rc=%u] %s\n", (void*)aAddress, refCount,
  1.1558 +                    aObjectDescription);
  1.1559 +        }
  1.1560 +        if (mWantAfterProcessing) {
  1.1561 +            CCGraphDescriber* d =  new CCGraphDescriber();
  1.1562 +            mDescribers.insertBack(d);
  1.1563 +            mCurrentAddress.AssignLiteral("0x");
  1.1564 +            mCurrentAddress.AppendInt(aAddress, 16);
  1.1565 +            d->mType = CCGraphDescriber::eRefCountedObject;
  1.1566 +            d->mAddress = mCurrentAddress;
  1.1567 +            d->mCnt = refCount;
  1.1568 +            d->mName.Append(aObjectDescription);
  1.1569 +        }
  1.1570 +        return NS_OK;
  1.1571 +    }
  1.1572 +    NS_IMETHOD NoteGCedObject(uint64_t aAddress, bool aMarked,
  1.1573 +                              const char *aObjectDescription,
  1.1574 +                              uint64_t aCompartmentAddress)
  1.1575 +    {
  1.1576 +        if (!mDisableLog) {
  1.1577 +            fprintf(mStream, "%p [gc%s] %s\n", (void*)aAddress,
  1.1578 +                    aMarked ? ".marked" : "", aObjectDescription);
  1.1579 +        }
  1.1580 +        if (mWantAfterProcessing) {
  1.1581 +            CCGraphDescriber* d =  new CCGraphDescriber();
  1.1582 +            mDescribers.insertBack(d);
  1.1583 +            mCurrentAddress.AssignLiteral("0x");
  1.1584 +            mCurrentAddress.AppendInt(aAddress, 16);
  1.1585 +            d->mType = aMarked ? CCGraphDescriber::eGCMarkedObject :
  1.1586 +                                 CCGraphDescriber::eGCedObject;
  1.1587 +            d->mAddress = mCurrentAddress;
  1.1588 +            d->mName.Append(aObjectDescription);
  1.1589 +            if (aCompartmentAddress) {
  1.1590 +                d->mCompartmentOrToAddress.AssignLiteral("0x");
  1.1591 +                d->mCompartmentOrToAddress.AppendInt(aCompartmentAddress, 16);
  1.1592 +            } else {
  1.1593 +                d->mCompartmentOrToAddress.SetIsVoid(true);
  1.1594 +            }
  1.1595 +        }
  1.1596 +        return NS_OK;
  1.1597 +    }
  1.1598 +    NS_IMETHOD NoteEdge(uint64_t aToAddress, const char *aEdgeName)
  1.1599 +    {
  1.1600 +        if (!mDisableLog) {
  1.1601 +            fprintf(mStream, "> %p %s\n", (void*)aToAddress, aEdgeName);
  1.1602 +        }
  1.1603 +        if (mWantAfterProcessing) {
  1.1604 +            CCGraphDescriber* d =  new CCGraphDescriber();
  1.1605 +            mDescribers.insertBack(d);
  1.1606 +            d->mType = CCGraphDescriber::eEdge;
  1.1607 +            d->mAddress = mCurrentAddress;
  1.1608 +            d->mCompartmentOrToAddress.AssignLiteral("0x");
  1.1609 +            d->mCompartmentOrToAddress.AppendInt(aToAddress, 16);
  1.1610 +            d->mName.Append(aEdgeName);
  1.1611 +        }
  1.1612 +        return NS_OK;
  1.1613 +    }
  1.1614 +    NS_IMETHOD NoteWeakMapEntry(uint64_t aMap, uint64_t aKey,
  1.1615 +                                uint64_t aKeyDelegate, uint64_t aValue)
  1.1616 +    {
  1.1617 +        if (!mDisableLog) {
  1.1618 +            fprintf(mStream, "WeakMapEntry map=%p key=%p keyDelegate=%p value=%p\n",
  1.1619 +                    (void*)aMap, (void*)aKey, (void*)aKeyDelegate, (void*)aValue);
  1.1620 +        }
  1.1621 +        // We don't support after-processing for weak map entries.
  1.1622 +        return NS_OK;
  1.1623 +    }
  1.1624 +    NS_IMETHOD NoteIncrementalRoot(uint64_t aAddress)
  1.1625 +    {
  1.1626 +        if (!mDisableLog) {
  1.1627 +            fprintf(mStream, "IncrementalRoot %p\n", (void*)aAddress);
  1.1628 +        }
  1.1629 +        // We don't support after-processing for incremental roots.
  1.1630 +        return NS_OK;
  1.1631 +    }
  1.1632 +    NS_IMETHOD BeginResults()
  1.1633 +    {
  1.1634 +        if (!mDisableLog) {
  1.1635 +            fputs("==========\n", mStream);
  1.1636 +        }
  1.1637 +        return NS_OK;
  1.1638 +    }
  1.1639 +    NS_IMETHOD DescribeRoot(uint64_t aAddress, uint32_t aKnownEdges)
  1.1640 +    {
  1.1641 +        if (!mDisableLog) {
  1.1642 +            fprintf(mStream, "%p [known=%u]\n", (void*)aAddress, aKnownEdges);
  1.1643 +        }
  1.1644 +        if (mWantAfterProcessing) {
  1.1645 +            CCGraphDescriber* d =  new CCGraphDescriber();
  1.1646 +            mDescribers.insertBack(d);
  1.1647 +            d->mType = CCGraphDescriber::eRoot;
  1.1648 +            d->mAddress.AppendInt(aAddress, 16);
  1.1649 +            d->mCnt = aKnownEdges;
  1.1650 +        }
  1.1651 +        return NS_OK;
  1.1652 +    }
  1.1653 +    NS_IMETHOD DescribeGarbage(uint64_t aAddress)
  1.1654 +    {
  1.1655 +        if (!mDisableLog) {
  1.1656 +            fprintf(mStream, "%p [garbage]\n", (void*)aAddress);
  1.1657 +        }
  1.1658 +        if (mWantAfterProcessing) {
  1.1659 +            CCGraphDescriber* d =  new CCGraphDescriber();
  1.1660 +            mDescribers.insertBack(d);
  1.1661 +            d->mType = CCGraphDescriber::eGarbage;
  1.1662 +            d->mAddress.AppendInt(aAddress, 16);
  1.1663 +        }
  1.1664 +        return NS_OK;
  1.1665 +    }
  1.1666 +    NS_IMETHOD End()
  1.1667 +    {
  1.1668 +        if (!mDisableLog) {
  1.1669 +            MOZ_ASSERT(mStream);
  1.1670 +            MOZ_ASSERT(mOutFile);
  1.1671 +
  1.1672 +            MozillaUnRegisterDebugFILE(mStream);
  1.1673 +            fclose(mStream);
  1.1674 +            mStream = nullptr;
  1.1675 +
  1.1676 +            // Strip off "incomplete-" from the log file's name.
  1.1677 +            nsCOMPtr<nsIFile> logFileFinalDestination =
  1.1678 +                CreateTempFile("cc-edges");
  1.1679 +            if (NS_WARN_IF(!logFileFinalDestination))
  1.1680 +                return NS_ERROR_UNEXPECTED;
  1.1681 +
  1.1682 +            nsAutoString logFileFinalDestinationName;
  1.1683 +            logFileFinalDestination->GetLeafName(logFileFinalDestinationName);
  1.1684 +            if (NS_WARN_IF(logFileFinalDestinationName.IsEmpty()))
  1.1685 +                return NS_ERROR_UNEXPECTED;
  1.1686 +
  1.1687 +            mOutFile->MoveTo(/* directory = */ nullptr,
  1.1688 +                             logFileFinalDestinationName);
  1.1689 +            mOutFile = nullptr;
  1.1690 +
  1.1691 +            // Log to the error console.
  1.1692 +            nsCOMPtr<nsIConsoleService> cs =
  1.1693 +                do_GetService(NS_CONSOLESERVICE_CONTRACTID);
  1.1694 +            if (cs) {
  1.1695 +                nsAutoString ccLogPath;
  1.1696 +                logFileFinalDestination->GetPath(ccLogPath);
  1.1697 +
  1.1698 +                nsString msg = NS_LITERAL_STRING("Cycle Collector log dumped to ") +
  1.1699 +                               ccLogPath;
  1.1700 +                cs->LogStringMessage(msg.get());
  1.1701 +
  1.1702 +                mCCLogPath = ccLogPath;
  1.1703 +            }
  1.1704 +        }
  1.1705 +        return NS_OK;
  1.1706 +    }
  1.1707 +    NS_IMETHOD ProcessNext(nsICycleCollectorHandler* aHandler,
  1.1708 +                           bool* aCanContinue)
  1.1709 +    {
  1.1710 +        if (NS_WARN_IF(!aHandler) || NS_WARN_IF(!mWantAfterProcessing))
  1.1711 +            return NS_ERROR_UNEXPECTED;
  1.1712 +        CCGraphDescriber* d = mDescribers.popFirst();
  1.1713 +        if (d) {
  1.1714 +            switch (d->mType) {
  1.1715 +                case CCGraphDescriber::eRefCountedObject:
  1.1716 +                    aHandler->NoteRefCountedObject(d->mAddress,
  1.1717 +                                                   d->mCnt,
  1.1718 +                                                   d->mName);
  1.1719 +                    break;
  1.1720 +                case CCGraphDescriber::eGCedObject:
  1.1721 +                case CCGraphDescriber::eGCMarkedObject:
  1.1722 +                    aHandler->NoteGCedObject(d->mAddress,
  1.1723 +                                             d->mType ==
  1.1724 +                                               CCGraphDescriber::eGCMarkedObject,
  1.1725 +                                             d->mName,
  1.1726 +                                             d->mCompartmentOrToAddress);
  1.1727 +                    break;
  1.1728 +                case CCGraphDescriber::eEdge:
  1.1729 +                    aHandler->NoteEdge(d->mAddress,
  1.1730 +                                       d->mCompartmentOrToAddress,
  1.1731 +                                       d->mName);
  1.1732 +                    break;
  1.1733 +                case CCGraphDescriber::eRoot:
  1.1734 +                    aHandler->DescribeRoot(d->mAddress,
  1.1735 +                                           d->mCnt);
  1.1736 +                    break;
  1.1737 +                case CCGraphDescriber::eGarbage:
  1.1738 +                    aHandler->DescribeGarbage(d->mAddress);
  1.1739 +                    break;
  1.1740 +                case CCGraphDescriber::eUnknown:
  1.1741 +                    NS_NOTREACHED("CCGraphDescriber::eUnknown");
  1.1742 +                    break;
  1.1743 +            }
  1.1744 +            delete d;
  1.1745 +        }
  1.1746 +        if (!(*aCanContinue = !mDescribers.isEmpty())) {
  1.1747 +            mCurrentAddress.AssignLiteral("0x");
  1.1748 +        }
  1.1749 +        return NS_OK;
  1.1750 +    }
  1.1751 +private:
  1.1752 +    /**
  1.1753 +     * Create a new file named something like aPrefix.$PID.$IDENTIFIER.log in
  1.1754 +     * $MOZ_CC_LOG_DIRECTORY or in the system's temp directory.  No existing
  1.1755 +     * file will be overwritten; if aPrefix.$PID.$IDENTIFIER.log exists, we'll
  1.1756 +     * try a file named something like aPrefix.$PID.$IDENTIFIER-1.log, and so
  1.1757 +     * on.
  1.1758 +     */
  1.1759 +    already_AddRefed<nsIFile>
  1.1760 +    CreateTempFile(const char* aPrefix)
  1.1761 +    {
  1.1762 +        nsPrintfCString filename("%s.%d%s%s.log",
  1.1763 +            aPrefix,
  1.1764 +            base::GetCurrentProcId(),
  1.1765 +            mFilenameIdentifier.IsEmpty() ? "" : ".",
  1.1766 +            NS_ConvertUTF16toUTF8(mFilenameIdentifier).get());
  1.1767 +
  1.1768 +        // Get the log directory either from $MOZ_CC_LOG_DIRECTORY or from
  1.1769 +        // the fallback directories in OpenTempFile.  We don't use an nsCOMPtr
  1.1770 +        // here because OpenTempFile uses an in/out param and getter_AddRefs
  1.1771 +        // wouldn't work.
  1.1772 +        nsIFile* logFile = nullptr;
  1.1773 +        if (char* env = PR_GetEnv("MOZ_CC_LOG_DIRECTORY")) {
  1.1774 +            NS_NewNativeLocalFile(nsCString(env), /* followLinks = */ true,
  1.1775 +                                  &logFile);
  1.1776 +        }
  1.1777 +
  1.1778 +        // In Android case, this function will open a file named aFilename under
  1.1779 +        // specific folder (/data/local/tmp/memory-reports). Otherwise, it will
  1.1780 +        // open a file named aFilename under "NS_OS_TEMP_DIR".
  1.1781 +        nsresult rv = nsDumpUtils::OpenTempFile(
  1.1782 +                                     filename,
  1.1783 +                                     &logFile,
  1.1784 +                                     NS_LITERAL_CSTRING("memory-reports"));
  1.1785 +        if (NS_FAILED(rv)) {
  1.1786 +          NS_IF_RELEASE(logFile);
  1.1787 +          return nullptr;
  1.1788 +        }
  1.1789 +
  1.1790 +        return dont_AddRef(logFile);
  1.1791 +    }
  1.1792 +
  1.1793 +    void ClearDescribers()
  1.1794 +    {
  1.1795 +      CCGraphDescriber* d;
  1.1796 +      while((d = mDescribers.popFirst())) {
  1.1797 +        delete d;
  1.1798 +      }
  1.1799 +    }
  1.1800 +
  1.1801 +    FILE *mStream;
  1.1802 +    nsCOMPtr<nsIFile> mOutFile;
  1.1803 +    bool mWantAllTraces;
  1.1804 +    bool mDisableLog;
  1.1805 +    bool mWantAfterProcessing;
  1.1806 +    nsString mFilenameIdentifier;
  1.1807 +    nsString mGCLogPath;
  1.1808 +    nsString mCCLogPath;
  1.1809 +    nsCString mCurrentAddress;
  1.1810 +    mozilla::LinkedList<CCGraphDescriber> mDescribers;
  1.1811 +};
  1.1812 +
  1.1813 +NS_IMPL_ISUPPORTS(nsCycleCollectorLogger, nsICycleCollectorListener)
  1.1814 +
  1.1815 +nsresult
  1.1816 +nsCycleCollectorLoggerConstructor(nsISupports* aOuter,
  1.1817 +                                  const nsIID& aIID,
  1.1818 +                                  void* *aInstancePtr)
  1.1819 +{
  1.1820 +    if (NS_WARN_IF(aOuter))
  1.1821 +        return NS_ERROR_NO_AGGREGATION;
  1.1822 +
  1.1823 +    nsISupports *logger = new nsCycleCollectorLogger();
  1.1824 +
  1.1825 +    return logger->QueryInterface(aIID, aInstancePtr);
  1.1826 +}
  1.1827 +
  1.1828 +////////////////////////////////////////////////////////////////////////
  1.1829 +// Bacon & Rajan's |MarkRoots| routine.
  1.1830 +////////////////////////////////////////////////////////////////////////
  1.1831 +
  1.1832 +class GCGraphBuilder : public nsCycleCollectionTraversalCallback,
  1.1833 +                       public nsCycleCollectionNoteRootCallback
  1.1834 +{
  1.1835 +private:
  1.1836 +    GCGraph &mGraph;
  1.1837 +    CycleCollectorResults &mResults;
  1.1838 +    NodePool::Builder mNodeBuilder;
  1.1839 +    EdgePool::Builder mEdgeBuilder;
  1.1840 +    PtrInfo *mCurrPi;
  1.1841 +    nsCycleCollectionParticipant *mJSParticipant;
  1.1842 +    nsCycleCollectionParticipant *mJSZoneParticipant;
  1.1843 +    nsCString mNextEdgeName;
  1.1844 +    nsICycleCollectorListener *mListener;
  1.1845 +    bool mMergeZones;
  1.1846 +    bool mRanOutOfMemory;
  1.1847 +
  1.1848 +public:
  1.1849 +    GCGraphBuilder(GCGraph &aGraph,
  1.1850 +                   CycleCollectorResults &aResults,
  1.1851 +                   CycleCollectedJSRuntime *aJSRuntime,
  1.1852 +                   nsICycleCollectorListener *aListener,
  1.1853 +                   bool aMergeZones);
  1.1854 +    virtual ~GCGraphBuilder();
  1.1855 +
  1.1856 +    bool WantAllTraces() const
  1.1857 +    {
  1.1858 +        return nsCycleCollectionNoteRootCallback::WantAllTraces();
  1.1859 +    }
  1.1860 +
  1.1861 +    PtrInfo* AddNode(void *aPtr, nsCycleCollectionParticipant *aParticipant);
  1.1862 +    PtrInfo* AddWeakMapNode(void* node);
  1.1863 +    void Traverse(PtrInfo* aPtrInfo);
  1.1864 +    void SetLastChild();
  1.1865 +
  1.1866 +    bool RanOutOfMemory() const { return mRanOutOfMemory; }
  1.1867 +
  1.1868 +private:
  1.1869 +    void DescribeNode(uint32_t refCount, const char *objName)
  1.1870 +    {
  1.1871 +        mCurrPi->mRefCount = refCount;
  1.1872 +    }
  1.1873 +
  1.1874 +public:
  1.1875 +    // nsCycleCollectionNoteRootCallback methods.
  1.1876 +    NS_IMETHOD_(void) NoteXPCOMRoot(nsISupports *root);
  1.1877 +    NS_IMETHOD_(void) NoteJSRoot(void *root);
  1.1878 +    NS_IMETHOD_(void) NoteNativeRoot(void *root, nsCycleCollectionParticipant *participant);
  1.1879 +    NS_IMETHOD_(void) NoteWeakMapping(void *map, void *key, void *kdelegate, void *val);
  1.1880 +
  1.1881 +    // nsCycleCollectionTraversalCallback methods.
  1.1882 +    NS_IMETHOD_(void) DescribeRefCountedNode(nsrefcnt refCount,
  1.1883 +                                             const char *objName);
  1.1884 +    NS_IMETHOD_(void) DescribeGCedNode(bool isMarked, const char *objName,
  1.1885 +                                       uint64_t aCompartmentAddress);
  1.1886 +
  1.1887 +    NS_IMETHOD_(void) NoteXPCOMChild(nsISupports *child);
  1.1888 +    NS_IMETHOD_(void) NoteJSChild(void *child);
  1.1889 +    NS_IMETHOD_(void) NoteNativeChild(void *child,
  1.1890 +                                      nsCycleCollectionParticipant *participant);
  1.1891 +    NS_IMETHOD_(void) NoteNextEdgeName(const char* name);
  1.1892 +
  1.1893 +private:
  1.1894 +    NS_IMETHOD_(void) NoteRoot(void *root,
  1.1895 +                               nsCycleCollectionParticipant *participant)
  1.1896 +    {
  1.1897 +        MOZ_ASSERT(root);
  1.1898 +        MOZ_ASSERT(participant);
  1.1899 +
  1.1900 +        if (!participant->CanSkipInCC(root) || MOZ_UNLIKELY(WantAllTraces())) {
  1.1901 +            AddNode(root, participant);
  1.1902 +        }
  1.1903 +    }
  1.1904 +
  1.1905 +    NS_IMETHOD_(void) NoteChild(void *child, nsCycleCollectionParticipant *cp,
  1.1906 +                                nsCString edgeName)
  1.1907 +    {
  1.1908 +        PtrInfo *childPi = AddNode(child, cp);
  1.1909 +        if (!childPi)
  1.1910 +            return;
  1.1911 +        mEdgeBuilder.Add(childPi);
  1.1912 +        if (mListener) {
  1.1913 +            mListener->NoteEdge((uint64_t)child, edgeName.get());
  1.1914 +        }
  1.1915 +        ++childPi->mInternalRefs;
  1.1916 +    }
  1.1917 +
  1.1918 +    JS::Zone *MergeZone(void *gcthing) {
  1.1919 +        if (!mMergeZones) {
  1.1920 +            return nullptr;
  1.1921 +        }
  1.1922 +        JS::Zone *zone = JS::GetGCThingZone(gcthing);
  1.1923 +        if (js::IsSystemZone(zone)) {
  1.1924 +            return nullptr;
  1.1925 +        }
  1.1926 +        return zone;
  1.1927 +    }
  1.1928 +};
  1.1929 +
  1.1930 +GCGraphBuilder::GCGraphBuilder(GCGraph &aGraph,
  1.1931 +                               CycleCollectorResults &aResults,
  1.1932 +                               CycleCollectedJSRuntime *aJSRuntime,
  1.1933 +                               nsICycleCollectorListener *aListener,
  1.1934 +                               bool aMergeZones)
  1.1935 +    : mGraph(aGraph),
  1.1936 +      mResults(aResults),
  1.1937 +      mNodeBuilder(aGraph.mNodes),
  1.1938 +      mEdgeBuilder(aGraph.mEdges),
  1.1939 +      mJSParticipant(nullptr),
  1.1940 +      mJSZoneParticipant(nullptr),
  1.1941 +      mListener(aListener),
  1.1942 +      mMergeZones(aMergeZones),
  1.1943 +      mRanOutOfMemory(false)
  1.1944 +{
  1.1945 +    if (aJSRuntime) {
  1.1946 +        mJSParticipant = aJSRuntime->GCThingParticipant();
  1.1947 +        mJSZoneParticipant = aJSRuntime->ZoneParticipant();
  1.1948 +    }
  1.1949 +
  1.1950 +    uint32_t flags = 0;
  1.1951 +    if (!flags && mListener) {
  1.1952 +        flags = nsCycleCollectionTraversalCallback::WANT_DEBUG_INFO;
  1.1953 +        bool all = false;
  1.1954 +        mListener->GetWantAllTraces(&all);
  1.1955 +        if (all) {
  1.1956 +            flags |= nsCycleCollectionTraversalCallback::WANT_ALL_TRACES;
  1.1957 +            mWantAllTraces = true; // for nsCycleCollectionNoteRootCallback
  1.1958 +        }
  1.1959 +    }
  1.1960 +
  1.1961 +    mFlags |= flags;
  1.1962 +
  1.1963 +    mMergeZones = mMergeZones && MOZ_LIKELY(!WantAllTraces());
  1.1964 +
  1.1965 +    MOZ_ASSERT(nsCycleCollectionNoteRootCallback::WantAllTraces() ==
  1.1966 +               nsCycleCollectionTraversalCallback::WantAllTraces());
  1.1967 +}
  1.1968 +
  1.1969 +GCGraphBuilder::~GCGraphBuilder()
  1.1970 +{
  1.1971 +}
  1.1972 +
  1.1973 +PtrInfo*
  1.1974 +GCGraphBuilder::AddNode(void *aPtr, nsCycleCollectionParticipant *aParticipant)
  1.1975 +{
  1.1976 +    PtrToNodeEntry *e = mGraph.AddNodeToMap(aPtr);
  1.1977 +    if (!e) {
  1.1978 +        mRanOutOfMemory = true;
  1.1979 +        return nullptr;
  1.1980 +    }
  1.1981 +
  1.1982 +    PtrInfo *result;
  1.1983 +    if (!e->mNode) {
  1.1984 +        // New entry.
  1.1985 +        result = mNodeBuilder.Add(aPtr, aParticipant);
  1.1986 +        e->mNode = result;
  1.1987 +        NS_ASSERTION(result, "mNodeBuilder.Add returned null");
  1.1988 +    } else {
  1.1989 +        result = e->mNode;
  1.1990 +        MOZ_ASSERT(result->mParticipant == aParticipant,
  1.1991 +                   "nsCycleCollectionParticipant shouldn't change!");
  1.1992 +    }
  1.1993 +    return result;
  1.1994 +}
  1.1995 +
  1.1996 +MOZ_NEVER_INLINE void
  1.1997 +GCGraphBuilder::Traverse(PtrInfo* aPtrInfo)
  1.1998 +{
  1.1999 +    mCurrPi = aPtrInfo;
  1.2000 +
  1.2001 +    mCurrPi->SetFirstChild(mEdgeBuilder.Mark());
  1.2002 +
  1.2003 +    if (!aPtrInfo->mParticipant) {
  1.2004 +        return;
  1.2005 +    }
  1.2006 +
  1.2007 +    nsresult rv = aPtrInfo->mParticipant->Traverse(aPtrInfo->mPointer, *this);
  1.2008 +    if (NS_FAILED(rv)) {
  1.2009 +        Fault("script pointer traversal failed", aPtrInfo);
  1.2010 +    }
  1.2011 +}
  1.2012 +
  1.2013 +void
  1.2014 +GCGraphBuilder::SetLastChild()
  1.2015 +{
  1.2016 +    mCurrPi->SetLastChild(mEdgeBuilder.Mark());
  1.2017 +}
  1.2018 +
  1.2019 +NS_IMETHODIMP_(void)
  1.2020 +GCGraphBuilder::NoteXPCOMRoot(nsISupports *root)
  1.2021 +{
  1.2022 +    root = CanonicalizeXPCOMParticipant(root);
  1.2023 +    NS_ASSERTION(root,
  1.2024 +                 "Don't add objects that don't participate in collection!");
  1.2025 +
  1.2026 +    nsXPCOMCycleCollectionParticipant *cp;
  1.2027 +    ToParticipant(root, &cp);
  1.2028 +
  1.2029 +    NoteRoot(root, cp);
  1.2030 +}
  1.2031 +
  1.2032 +NS_IMETHODIMP_(void)
  1.2033 +GCGraphBuilder::NoteJSRoot(void *root)
  1.2034 +{
  1.2035 +    if (JS::Zone *zone = MergeZone(root)) {
  1.2036 +        NoteRoot(zone, mJSZoneParticipant);
  1.2037 +    } else {
  1.2038 +        NoteRoot(root, mJSParticipant);
  1.2039 +    }
  1.2040 +}
  1.2041 +
  1.2042 +NS_IMETHODIMP_(void)
  1.2043 +GCGraphBuilder::NoteNativeRoot(void *root, nsCycleCollectionParticipant *participant)
  1.2044 +{
  1.2045 +    NoteRoot(root, participant);
  1.2046 +}
  1.2047 +
  1.2048 +NS_IMETHODIMP_(void)
  1.2049 +GCGraphBuilder::DescribeRefCountedNode(nsrefcnt refCount, const char *objName)
  1.2050 +{
  1.2051 +    if (refCount == 0)
  1.2052 +        Fault("zero refcount", mCurrPi);
  1.2053 +    if (refCount == UINT32_MAX)
  1.2054 +        Fault("overflowing refcount", mCurrPi);
  1.2055 +    mResults.mVisitedRefCounted++;
  1.2056 +
  1.2057 +    if (mListener) {
  1.2058 +        mListener->NoteRefCountedObject((uint64_t)mCurrPi->mPointer, refCount,
  1.2059 +                                        objName);
  1.2060 +    }
  1.2061 +
  1.2062 +    DescribeNode(refCount, objName);
  1.2063 +}
  1.2064 +
  1.2065 +NS_IMETHODIMP_(void)
  1.2066 +GCGraphBuilder::DescribeGCedNode(bool isMarked, const char *objName,
  1.2067 +                                 uint64_t aCompartmentAddress)
  1.2068 +{
  1.2069 +    uint32_t refCount = isMarked ? UINT32_MAX : 0;
  1.2070 +    mResults.mVisitedGCed++;
  1.2071 +
  1.2072 +    if (mListener) {
  1.2073 +        mListener->NoteGCedObject((uint64_t)mCurrPi->mPointer, isMarked,
  1.2074 +                                  objName, aCompartmentAddress);
  1.2075 +    }
  1.2076 +
  1.2077 +    DescribeNode(refCount, objName);
  1.2078 +}
  1.2079 +
  1.2080 +NS_IMETHODIMP_(void)
  1.2081 +GCGraphBuilder::NoteXPCOMChild(nsISupports *child)
  1.2082 +{
  1.2083 +    nsCString edgeName;
  1.2084 +    if (WantDebugInfo()) {
  1.2085 +        edgeName.Assign(mNextEdgeName);
  1.2086 +        mNextEdgeName.Truncate();
  1.2087 +    }
  1.2088 +    if (!child || !(child = CanonicalizeXPCOMParticipant(child)))
  1.2089 +        return;
  1.2090 +
  1.2091 +    nsXPCOMCycleCollectionParticipant *cp;
  1.2092 +    ToParticipant(child, &cp);
  1.2093 +    if (cp && (!cp->CanSkipThis(child) || WantAllTraces())) {
  1.2094 +        NoteChild(child, cp, edgeName);
  1.2095 +    }
  1.2096 +}
  1.2097 +
  1.2098 +NS_IMETHODIMP_(void)
  1.2099 +GCGraphBuilder::NoteNativeChild(void *child,
  1.2100 +                                nsCycleCollectionParticipant *participant)
  1.2101 +{
  1.2102 +    nsCString edgeName;
  1.2103 +    if (WantDebugInfo()) {
  1.2104 +        edgeName.Assign(mNextEdgeName);
  1.2105 +        mNextEdgeName.Truncate();
  1.2106 +    }
  1.2107 +    if (!child)
  1.2108 +        return;
  1.2109 +
  1.2110 +    MOZ_ASSERT(participant, "Need a nsCycleCollectionParticipant!");
  1.2111 +    NoteChild(child, participant, edgeName);
  1.2112 +}
  1.2113 +
  1.2114 +NS_IMETHODIMP_(void)
  1.2115 +GCGraphBuilder::NoteJSChild(void *child)
  1.2116 +{
  1.2117 +    if (!child) {
  1.2118 +        return;
  1.2119 +    }
  1.2120 +
  1.2121 +    nsCString edgeName;
  1.2122 +    if (MOZ_UNLIKELY(WantDebugInfo())) {
  1.2123 +        edgeName.Assign(mNextEdgeName);
  1.2124 +        mNextEdgeName.Truncate();
  1.2125 +    }
  1.2126 +
  1.2127 +    if (xpc_GCThingIsGrayCCThing(child) || MOZ_UNLIKELY(WantAllTraces())) {
  1.2128 +        if (JS::Zone *zone = MergeZone(child)) {
  1.2129 +            NoteChild(zone, mJSZoneParticipant, edgeName);
  1.2130 +        } else {
  1.2131 +            NoteChild(child, mJSParticipant, edgeName);
  1.2132 +        }
  1.2133 +    }
  1.2134 +}
  1.2135 +
  1.2136 +NS_IMETHODIMP_(void)
  1.2137 +GCGraphBuilder::NoteNextEdgeName(const char* name)
  1.2138 +{
  1.2139 +    if (WantDebugInfo()) {
  1.2140 +        mNextEdgeName = name;
  1.2141 +    }
  1.2142 +}
  1.2143 +
  1.2144 +PtrInfo*
  1.2145 +GCGraphBuilder::AddWeakMapNode(void *node)
  1.2146 +{
  1.2147 +    MOZ_ASSERT(node, "Weak map node should be non-null.");
  1.2148 +
  1.2149 +    if (!xpc_GCThingIsGrayCCThing(node) && !WantAllTraces())
  1.2150 +        return nullptr;
  1.2151 +
  1.2152 +    if (JS::Zone *zone = MergeZone(node)) {
  1.2153 +        return AddNode(zone, mJSZoneParticipant);
  1.2154 +    } else {
  1.2155 +        return AddNode(node, mJSParticipant);
  1.2156 +    }
  1.2157 +}
  1.2158 +
  1.2159 +NS_IMETHODIMP_(void)
  1.2160 +GCGraphBuilder::NoteWeakMapping(void *map, void *key, void *kdelegate, void *val)
  1.2161 +{
  1.2162 +    // Don't try to optimize away the entry here, as we've already attempted to
  1.2163 +    // do that in TraceWeakMapping in nsXPConnect.
  1.2164 +    WeakMapping *mapping = mGraph.mWeakMaps.AppendElement();
  1.2165 +    mapping->mMap = map ? AddWeakMapNode(map) : nullptr;
  1.2166 +    mapping->mKey = key ? AddWeakMapNode(key) : nullptr;
  1.2167 +    mapping->mKeyDelegate = kdelegate ? AddWeakMapNode(kdelegate) : mapping->mKey;
  1.2168 +    mapping->mVal = val ? AddWeakMapNode(val) : nullptr;
  1.2169 +
  1.2170 +    if (mListener) {
  1.2171 +        mListener->NoteWeakMapEntry((uint64_t)map, (uint64_t)key,
  1.2172 +                                    (uint64_t)kdelegate, (uint64_t)val);
  1.2173 +    }
  1.2174 +}
  1.2175 +
  1.2176 +static bool
  1.2177 +AddPurpleRoot(GCGraphBuilder &aBuilder, void *aRoot, nsCycleCollectionParticipant *aParti)
  1.2178 +{
  1.2179 +    CanonicalizeParticipant(&aRoot, &aParti);
  1.2180 +
  1.2181 +    if (aBuilder.WantAllTraces() || !aParti->CanSkipInCC(aRoot)) {
  1.2182 +        PtrInfo *pinfo = aBuilder.AddNode(aRoot, aParti);
  1.2183 +        if (!pinfo) {
  1.2184 +            return false;
  1.2185 +        }
  1.2186 +    }
  1.2187 +
  1.2188 +    return true;
  1.2189 +}
  1.2190 +
  1.2191 +// MayHaveChild() will be false after a Traverse if the object does
  1.2192 +// not have any children the CC will visit.
  1.2193 +class ChildFinder : public nsCycleCollectionTraversalCallback
  1.2194 +{
  1.2195 +public:
  1.2196 +    ChildFinder() : mMayHaveChild(false) {}
  1.2197 +
  1.2198 +    // The logic of the Note*Child functions must mirror that of their
  1.2199 +    // respective functions in GCGraphBuilder.
  1.2200 +    NS_IMETHOD_(void) NoteXPCOMChild(nsISupports *child);
  1.2201 +    NS_IMETHOD_(void) NoteNativeChild(void *child,
  1.2202 +                                      nsCycleCollectionParticipant *helper);
  1.2203 +    NS_IMETHOD_(void) NoteJSChild(void *child);
  1.2204 +
  1.2205 +    NS_IMETHOD_(void) DescribeRefCountedNode(nsrefcnt refcount,
  1.2206 +                                             const char *objname) {}
  1.2207 +    NS_IMETHOD_(void) DescribeGCedNode(bool ismarked,
  1.2208 +                                       const char *objname,
  1.2209 +                                       uint64_t aCompartmentAddress) {}
  1.2210 +    NS_IMETHOD_(void) NoteNextEdgeName(const char* name) {}
  1.2211 +    bool MayHaveChild() {
  1.2212 +        return mMayHaveChild;
  1.2213 +    }
  1.2214 +private:
  1.2215 +    bool mMayHaveChild;
  1.2216 +};
  1.2217 +
  1.2218 +NS_IMETHODIMP_(void)
  1.2219 +ChildFinder::NoteXPCOMChild(nsISupports *child)
  1.2220 +{
  1.2221 +    if (!child || !(child = CanonicalizeXPCOMParticipant(child)))
  1.2222 +        return;
  1.2223 +    nsXPCOMCycleCollectionParticipant *cp;
  1.2224 +    ToParticipant(child, &cp);
  1.2225 +    if (cp && !cp->CanSkip(child, true))
  1.2226 +        mMayHaveChild = true;
  1.2227 +}
  1.2228 +
  1.2229 +NS_IMETHODIMP_(void)
  1.2230 +ChildFinder::NoteNativeChild(void *child,
  1.2231 +                             nsCycleCollectionParticipant *helper)
  1.2232 +{
  1.2233 +    if (child)
  1.2234 +        mMayHaveChild = true;
  1.2235 +}
  1.2236 +
  1.2237 +NS_IMETHODIMP_(void)
  1.2238 +ChildFinder::NoteJSChild(void *child)
  1.2239 +{
  1.2240 +    if (child && xpc_GCThingIsGrayCCThing(child)) {
  1.2241 +        mMayHaveChild = true;
  1.2242 +    }
  1.2243 +}
  1.2244 +
  1.2245 +static bool
  1.2246 +MayHaveChild(void *o, nsCycleCollectionParticipant* cp)
  1.2247 +{
  1.2248 +    ChildFinder cf;
  1.2249 +    cp->Traverse(o, cf);
  1.2250 +    return cf.MayHaveChild();
  1.2251 +}
  1.2252 +
  1.2253 +template<class T>
  1.2254 +class SegmentedArrayElement : public LinkedListElement<SegmentedArrayElement<T>>
  1.2255 +                            , public AutoFallibleTArray<T, 60>
  1.2256 +{
  1.2257 +};
  1.2258 +
  1.2259 +template<class T>
  1.2260 +class SegmentedArray
  1.2261 +{
  1.2262 +public:
  1.2263 +    ~SegmentedArray()
  1.2264 +    {
  1.2265 +        MOZ_ASSERT(IsEmpty());
  1.2266 +    }
  1.2267 +
  1.2268 +    void AppendElement(T& aElement)
  1.2269 +    {
  1.2270 +        SegmentedArrayElement<T>* last = mSegments.getLast();
  1.2271 +        if (!last || last->Length() == last->Capacity()) {
  1.2272 +            last = new SegmentedArrayElement<T>();
  1.2273 +            mSegments.insertBack(last);
  1.2274 +        }
  1.2275 +        last->AppendElement(aElement);
  1.2276 +    }
  1.2277 +
  1.2278 +    void Clear()
  1.2279 +    {
  1.2280 +        SegmentedArrayElement<T>* first;
  1.2281 +        while ((first = mSegments.popFirst())) {
  1.2282 +            delete first;
  1.2283 +        }
  1.2284 +    }
  1.2285 +
  1.2286 +    SegmentedArrayElement<T>* GetFirstSegment()
  1.2287 +    {
  1.2288 +        return mSegments.getFirst();
  1.2289 +    }
  1.2290 +
  1.2291 +    bool IsEmpty()
  1.2292 +    {
  1.2293 +        return !GetFirstSegment();
  1.2294 +    }
  1.2295 +
  1.2296 +private:
  1.2297 +    mozilla::LinkedList<SegmentedArrayElement<T>> mSegments;
  1.2298 +};
  1.2299 +
  1.2300 +// JSPurpleBuffer keeps references to GCThings which might affect the
  1.2301 +// next cycle collection. It is owned only by itself and during unlink its
  1.2302 +// self reference is broken down and the object ends up killing itself.
  1.2303 +// If GC happens before CC, references to GCthings and the self reference are
  1.2304 +// removed.
  1.2305 +class JSPurpleBuffer
  1.2306 +{
  1.2307 +public:
  1.2308 +    JSPurpleBuffer(JSPurpleBuffer*& aReferenceToThis)
  1.2309 +      : mReferenceToThis(aReferenceToThis)
  1.2310 +    {
  1.2311 +        mReferenceToThis = this;
  1.2312 +        NS_ADDREF_THIS();
  1.2313 +        mozilla::HoldJSObjects(this);
  1.2314 +    }
  1.2315 +
  1.2316 +    ~JSPurpleBuffer()
  1.2317 +    {
  1.2318 +        MOZ_ASSERT(mValues.IsEmpty());
  1.2319 +        MOZ_ASSERT(mObjects.IsEmpty());
  1.2320 +        MOZ_ASSERT(mTenuredObjects.IsEmpty());
  1.2321 +    }
  1.2322 +
  1.2323 +    void Destroy()
  1.2324 +    {
  1.2325 +        mReferenceToThis = nullptr;
  1.2326 +        mValues.Clear();
  1.2327 +        mObjects.Clear();
  1.2328 +        mTenuredObjects.Clear();
  1.2329 +        mozilla::DropJSObjects(this);
  1.2330 +        NS_RELEASE_THIS();
  1.2331 +    }
  1.2332 +
  1.2333 +    NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(JSPurpleBuffer)
  1.2334 +    NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_NATIVE_CLASS(JSPurpleBuffer)
  1.2335 +
  1.2336 +    JSPurpleBuffer*& mReferenceToThis;
  1.2337 +    SegmentedArray<JS::Heap<JS::Value>> mValues;
  1.2338 +    SegmentedArray<JS::Heap<JSObject*>> mObjects;
  1.2339 +    SegmentedArray<JS::TenuredHeap<JSObject*>> mTenuredObjects;
  1.2340 +};
  1.2341 +
  1.2342 +NS_IMPL_CYCLE_COLLECTION_CLASS(JSPurpleBuffer)
  1.2343 +
  1.2344 +NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(JSPurpleBuffer)
  1.2345 +    tmp->Destroy();
  1.2346 +NS_IMPL_CYCLE_COLLECTION_UNLINK_END
  1.2347 +
  1.2348 +NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(JSPurpleBuffer)
  1.2349 +    CycleCollectionNoteChild(cb, tmp, "self");
  1.2350 +    NS_IMPL_CYCLE_COLLECTION_TRAVERSE_SCRIPT_OBJECTS
  1.2351 +NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
  1.2352 +
  1.2353 +#define NS_TRACE_SEGMENTED_ARRAY(_field)                                       \
  1.2354 +    {                                                                          \
  1.2355 +        auto segment = tmp->_field.GetFirstSegment();                          \
  1.2356 +        while (segment) {                                                      \
  1.2357 +            for (uint32_t i = segment->Length(); i > 0;) {                     \
  1.2358 +                aCallbacks.Trace(&segment->ElementAt(--i), #_field, aClosure); \
  1.2359 +            }                                                                  \
  1.2360 +            segment = segment->getNext();                                      \
  1.2361 +        }                                                                      \
  1.2362 +    }
  1.2363 +
  1.2364 +NS_IMPL_CYCLE_COLLECTION_TRACE_BEGIN(JSPurpleBuffer)
  1.2365 +    NS_TRACE_SEGMENTED_ARRAY(mValues)
  1.2366 +    NS_TRACE_SEGMENTED_ARRAY(mObjects)
  1.2367 +    NS_TRACE_SEGMENTED_ARRAY(mTenuredObjects)
  1.2368 +NS_IMPL_CYCLE_COLLECTION_TRACE_END
  1.2369 +
  1.2370 +NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(JSPurpleBuffer, AddRef)
  1.2371 +NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(JSPurpleBuffer, Release)
  1.2372 +
  1.2373 +struct SnowWhiteObject
  1.2374 +{
  1.2375 +  void* mPointer;
  1.2376 +  nsCycleCollectionParticipant* mParticipant;
  1.2377 +  nsCycleCollectingAutoRefCnt* mRefCnt;
  1.2378 +};
  1.2379 +
  1.2380 +class SnowWhiteKiller : public TraceCallbacks
  1.2381 +{
  1.2382 +public:
  1.2383 +    SnowWhiteKiller(nsCycleCollector *aCollector, uint32_t aMaxCount)
  1.2384 +        : mCollector(aCollector)
  1.2385 +    {
  1.2386 +        MOZ_ASSERT(mCollector, "Calling SnowWhiteKiller after nsCC went away");
  1.2387 +        while (true) {
  1.2388 +            if (mObjects.SetCapacity(aMaxCount)) {
  1.2389 +                break;
  1.2390 +            }
  1.2391 +            if (aMaxCount == 1) {
  1.2392 +                NS_RUNTIMEABORT("Not enough memory to even delete objects!");
  1.2393 +            }
  1.2394 +            aMaxCount /= 2;
  1.2395 +        }
  1.2396 +    }
  1.2397 +
  1.2398 +    ~SnowWhiteKiller()
  1.2399 +    {
  1.2400 +        for (uint32_t i = 0; i < mObjects.Length(); ++i) {
  1.2401 +            SnowWhiteObject& o = mObjects[i];
  1.2402 +            if (!o.mRefCnt->get() && !o.mRefCnt->IsInPurpleBuffer()) {
  1.2403 +                mCollector->RemoveObjectFromGraph(o.mPointer);
  1.2404 +                o.mRefCnt->stabilizeForDeletion();
  1.2405 +                o.mParticipant->Trace(o.mPointer, *this, nullptr);
  1.2406 +                o.mParticipant->DeleteCycleCollectable(o.mPointer);
  1.2407 +            }
  1.2408 +        }
  1.2409 +    }
  1.2410 +
  1.2411 +    void
  1.2412 +    Visit(nsPurpleBuffer& aBuffer, nsPurpleBufferEntry* aEntry)
  1.2413 +    {
  1.2414 +        MOZ_ASSERT(aEntry->mObject, "Null object in purple buffer");
  1.2415 +        if (!aEntry->mRefCnt->get()) {
  1.2416 +            void *o = aEntry->mObject;
  1.2417 +            nsCycleCollectionParticipant *cp = aEntry->mParticipant;
  1.2418 +            CanonicalizeParticipant(&o, &cp);
  1.2419 +            SnowWhiteObject swo = { o, cp, aEntry->mRefCnt };
  1.2420 +            if (mObjects.AppendElement(swo)) {
  1.2421 +                aBuffer.Remove(aEntry);
  1.2422 +            }
  1.2423 +        }
  1.2424 +    }
  1.2425 +
  1.2426 +    bool HasSnowWhiteObjects() const
  1.2427 +    {
  1.2428 +      return mObjects.Length() > 0;
  1.2429 +    }
  1.2430 +
  1.2431 +    virtual void Trace(JS::Heap<JS::Value>* aValue, const char* aName,
  1.2432 +                       void* aClosure) const
  1.2433 +    {
  1.2434 +        if (aValue->isMarkable()) {
  1.2435 +            void* thing = aValue->toGCThing();
  1.2436 +            if (thing && xpc_GCThingIsGrayCCThing(thing)) {
  1.2437 +                mCollector->GetJSPurpleBuffer()->mValues.AppendElement(*aValue);
  1.2438 +            }
  1.2439 +        }
  1.2440 +    }
  1.2441 +
  1.2442 +    virtual void Trace(JS::Heap<jsid>* aId, const char* aName,
  1.2443 +                       void* aClosure) const
  1.2444 +    {
  1.2445 +    }
  1.2446 +
  1.2447 +    virtual void Trace(JS::Heap<JSObject*>* aObject, const char* aName,
  1.2448 +                       void* aClosure) const
  1.2449 +    {
  1.2450 +        if (*aObject && xpc_GCThingIsGrayCCThing(*aObject)) {
  1.2451 +            mCollector->GetJSPurpleBuffer()->mObjects.AppendElement(*aObject);
  1.2452 +        }
  1.2453 +    }
  1.2454 +
  1.2455 +    virtual void Trace(JS::TenuredHeap<JSObject*>* aObject, const char* aName,
  1.2456 +                       void* aClosure) const
  1.2457 +    {
  1.2458 +        if (*aObject && xpc_GCThingIsGrayCCThing(*aObject)) {
  1.2459 +            mCollector->GetJSPurpleBuffer()->mTenuredObjects.AppendElement(*aObject);
  1.2460 +        }
  1.2461 +    }
  1.2462 +
  1.2463 +    virtual void Trace(JS::Heap<JSString*>* aString, const char* aName,
  1.2464 +                       void* aClosure) const
  1.2465 +    {
  1.2466 +    }
  1.2467 +
  1.2468 +    virtual void Trace(JS::Heap<JSScript*>* aScript, const char* aName,
  1.2469 +                       void* aClosure) const
  1.2470 +    {
  1.2471 +    }
  1.2472 +
  1.2473 +    virtual void Trace(JS::Heap<JSFunction*>* aFunction, const char* aName,
  1.2474 +                       void* aClosure) const
  1.2475 +    {
  1.2476 +    }
  1.2477 +
  1.2478 +private:
  1.2479 +    nsCycleCollector *mCollector;
  1.2480 +    FallibleTArray<SnowWhiteObject> mObjects;
  1.2481 +};
  1.2482 +
  1.2483 +class RemoveSkippableVisitor : public SnowWhiteKiller
  1.2484 +{
  1.2485 +public:
  1.2486 +    RemoveSkippableVisitor(nsCycleCollector* aCollector,
  1.2487 +                           uint32_t aMaxCount, bool aRemoveChildlessNodes,
  1.2488 +                           bool aAsyncSnowWhiteFreeing,
  1.2489 +                           CC_ForgetSkippableCallback aCb)
  1.2490 +        : SnowWhiteKiller(aCollector, aAsyncSnowWhiteFreeing ? 0 : aMaxCount),
  1.2491 +          mRemoveChildlessNodes(aRemoveChildlessNodes),
  1.2492 +          mAsyncSnowWhiteFreeing(aAsyncSnowWhiteFreeing),
  1.2493 +          mDispatchedDeferredDeletion(false),
  1.2494 +          mCallback(aCb)
  1.2495 +    {}
  1.2496 +
  1.2497 +    ~RemoveSkippableVisitor()
  1.2498 +    {
  1.2499 +        // Note, we must call the callback before SnowWhiteKiller calls
  1.2500 +        // DeleteCycleCollectable!
  1.2501 +        if (mCallback) {
  1.2502 +            mCallback();
  1.2503 +        }
  1.2504 +        if (HasSnowWhiteObjects()) {
  1.2505 +            // Effectively a continuation.
  1.2506 +            nsCycleCollector_dispatchDeferredDeletion(true);
  1.2507 +        }
  1.2508 +    }
  1.2509 +
  1.2510 +    void
  1.2511 +    Visit(nsPurpleBuffer &aBuffer, nsPurpleBufferEntry *aEntry)
  1.2512 +    {
  1.2513 +        MOZ_ASSERT(aEntry->mObject, "null mObject in purple buffer");
  1.2514 +        if (!aEntry->mRefCnt->get()) {
  1.2515 +            if (!mAsyncSnowWhiteFreeing) {
  1.2516 +                SnowWhiteKiller::Visit(aBuffer, aEntry);
  1.2517 +            } else if (!mDispatchedDeferredDeletion) {
  1.2518 +                mDispatchedDeferredDeletion = true;
  1.2519 +                nsCycleCollector_dispatchDeferredDeletion(false);
  1.2520 +            }
  1.2521 +            return;
  1.2522 +        }
  1.2523 +        void *o = aEntry->mObject;
  1.2524 +        nsCycleCollectionParticipant *cp = aEntry->mParticipant;
  1.2525 +        CanonicalizeParticipant(&o, &cp);
  1.2526 +        if (aEntry->mRefCnt->IsPurple() && !cp->CanSkip(o, false) &&
  1.2527 +            (!mRemoveChildlessNodes || MayHaveChild(o, cp))) {
  1.2528 +            return;
  1.2529 +        }
  1.2530 +        aBuffer.Remove(aEntry);
  1.2531 +    }
  1.2532 +
  1.2533 +private:
  1.2534 +    bool mRemoveChildlessNodes;
  1.2535 +    bool mAsyncSnowWhiteFreeing;
  1.2536 +    bool mDispatchedDeferredDeletion;
  1.2537 +    CC_ForgetSkippableCallback mCallback;
  1.2538 +};
  1.2539 +
  1.2540 +void
  1.2541 +nsPurpleBuffer::RemoveSkippable(nsCycleCollector* aCollector,
  1.2542 +                                bool aRemoveChildlessNodes,
  1.2543 +                                bool aAsyncSnowWhiteFreeing,
  1.2544 +                                CC_ForgetSkippableCallback aCb)
  1.2545 +{
  1.2546 +    RemoveSkippableVisitor visitor(aCollector, Count(), aRemoveChildlessNodes,
  1.2547 +                                   aAsyncSnowWhiteFreeing, aCb);
  1.2548 +    VisitEntries(visitor);
  1.2549 +}
  1.2550 +
  1.2551 +bool
  1.2552 +nsCycleCollector::FreeSnowWhite(bool aUntilNoSWInPurpleBuffer)
  1.2553 +{
  1.2554 +    CheckThreadSafety();
  1.2555 +
  1.2556 +    if (mFreeingSnowWhite) {
  1.2557 +        return false;
  1.2558 +    }
  1.2559 +
  1.2560 +    AutoRestore<bool> ar(mFreeingSnowWhite);
  1.2561 +    mFreeingSnowWhite = true;
  1.2562 +
  1.2563 +    bool hadSnowWhiteObjects = false;
  1.2564 +    do {
  1.2565 +        SnowWhiteKiller visitor(this, mPurpleBuf.Count());
  1.2566 +        mPurpleBuf.VisitEntries(visitor);
  1.2567 +        hadSnowWhiteObjects = hadSnowWhiteObjects ||
  1.2568 +                              visitor.HasSnowWhiteObjects();
  1.2569 +        if (!visitor.HasSnowWhiteObjects()) {
  1.2570 +            break;
  1.2571 +        }
  1.2572 +    } while (aUntilNoSWInPurpleBuffer);
  1.2573 +    return hadSnowWhiteObjects;
  1.2574 +}
  1.2575 +
  1.2576 +void
  1.2577 +nsCycleCollector::ForgetSkippable(bool aRemoveChildlessNodes,
  1.2578 +                                  bool aAsyncSnowWhiteFreeing)
  1.2579 +{
  1.2580 +    CheckThreadSafety();
  1.2581 +
  1.2582 +    // If we remove things from the purple buffer during graph building, we may
  1.2583 +    // lose track of an object that was mutated during graph building.
  1.2584 +    MOZ_ASSERT(mIncrementalPhase == IdlePhase);
  1.2585 +
  1.2586 +    if (mJSRuntime) {
  1.2587 +        mJSRuntime->PrepareForForgetSkippable();
  1.2588 +    }
  1.2589 +    MOZ_ASSERT(!mScanInProgress, "Don't forget skippable or free snow-white while scan is in progress.");
  1.2590 +    mPurpleBuf.RemoveSkippable(this, aRemoveChildlessNodes,
  1.2591 +                               aAsyncSnowWhiteFreeing, mForgetSkippableCB);
  1.2592 +}
  1.2593 +
  1.2594 +MOZ_NEVER_INLINE void
  1.2595 +nsCycleCollector::MarkRoots(SliceBudget &aBudget)
  1.2596 +{
  1.2597 +    const intptr_t kNumNodesBetweenTimeChecks = 1000;
  1.2598 +    const intptr_t kStep = SliceBudget::CounterReset / kNumNodesBetweenTimeChecks;
  1.2599 +
  1.2600 +    TimeLog timeLog;
  1.2601 +    AutoRestore<bool> ar(mScanInProgress);
  1.2602 +    MOZ_ASSERT(!mScanInProgress);
  1.2603 +    mScanInProgress = true;
  1.2604 +    MOZ_ASSERT(mIncrementalPhase == GraphBuildingPhase);
  1.2605 +    MOZ_ASSERT(mCurrNode);
  1.2606 +
  1.2607 +    while (!aBudget.isOverBudget() && !mCurrNode->IsDone()) {
  1.2608 +        PtrInfo *pi = mCurrNode->GetNext();
  1.2609 +        if (!pi) {
  1.2610 +            MOZ_CRASH();
  1.2611 +        }
  1.2612 +
  1.2613 +        // We need to call the builder's Traverse() method on deleted nodes, to
  1.2614 +        // set their firstChild() that may be read by a prior non-deleted
  1.2615 +        // neighbor.
  1.2616 +        mBuilder->Traverse(pi);
  1.2617 +        if (mCurrNode->AtBlockEnd()) {
  1.2618 +            mBuilder->SetLastChild();
  1.2619 +        }
  1.2620 +        aBudget.step(kStep);
  1.2621 +    }
  1.2622 +
  1.2623 +    if (!mCurrNode->IsDone()) {
  1.2624 +        timeLog.Checkpoint("MarkRoots()");
  1.2625 +        return;
  1.2626 +    }
  1.2627 +
  1.2628 +    if (mGraph.mRootCount > 0) {
  1.2629 +        mBuilder->SetLastChild();
  1.2630 +    }
  1.2631 +
  1.2632 +    if (mBuilder->RanOutOfMemory()) {
  1.2633 +        MOZ_ASSERT(false, "Ran out of memory while building cycle collector graph");
  1.2634 +        CC_TELEMETRY(_OOM, true);
  1.2635 +    }
  1.2636 +
  1.2637 +    mBuilder = nullptr;
  1.2638 +    mCurrNode = nullptr;
  1.2639 +    mIncrementalPhase = ScanAndCollectWhitePhase;
  1.2640 +    timeLog.Checkpoint("MarkRoots()");
  1.2641 +}
  1.2642 +
  1.2643 +
  1.2644 +////////////////////////////////////////////////////////////////////////
  1.2645 +// Bacon & Rajan's |ScanRoots| routine.
  1.2646 +////////////////////////////////////////////////////////////////////////
  1.2647 +
  1.2648 +
  1.2649 +struct ScanBlackVisitor
  1.2650 +{
  1.2651 +    ScanBlackVisitor(uint32_t &aWhiteNodeCount, bool &aFailed)
  1.2652 +        : mWhiteNodeCount(aWhiteNodeCount), mFailed(aFailed)
  1.2653 +    {
  1.2654 +    }
  1.2655 +
  1.2656 +    bool ShouldVisitNode(PtrInfo const *pi)
  1.2657 +    {
  1.2658 +        return pi->mColor != black;
  1.2659 +    }
  1.2660 +
  1.2661 +    MOZ_NEVER_INLINE void VisitNode(PtrInfo *pi)
  1.2662 +    {
  1.2663 +        if (pi->mColor == white)
  1.2664 +            --mWhiteNodeCount;
  1.2665 +        pi->mColor = black;
  1.2666 +    }
  1.2667 +
  1.2668 +    void Failed()
  1.2669 +    {
  1.2670 +        mFailed = true;
  1.2671 +    }
  1.2672 +
  1.2673 +private:
  1.2674 +    uint32_t &mWhiteNodeCount;
  1.2675 +    bool &mFailed;
  1.2676 +};
  1.2677 +
  1.2678 +
  1.2679 +struct scanVisitor
  1.2680 +{
  1.2681 +    scanVisitor(uint32_t &aWhiteNodeCount, bool &aFailed, bool aWasIncremental)
  1.2682 +        : mWhiteNodeCount(aWhiteNodeCount), mFailed(aFailed),
  1.2683 +          mWasIncremental(aWasIncremental)
  1.2684 +    {
  1.2685 +    }
  1.2686 +
  1.2687 +    bool ShouldVisitNode(PtrInfo const *pi)
  1.2688 +    {
  1.2689 +        return pi->mColor == grey;
  1.2690 +    }
  1.2691 +
  1.2692 +    MOZ_NEVER_INLINE void VisitNode(PtrInfo *pi)
  1.2693 +    {
  1.2694 +        if (pi->mInternalRefs > pi->mRefCount && pi->mRefCount > 0) {
  1.2695 +            // If we found more references to an object than its ref count, then
  1.2696 +            // the object should have already been marked as an incremental
  1.2697 +            // root. Note that this is imprecise, because pi could have been
  1.2698 +            // marked black for other reasons. Always fault if we weren't
  1.2699 +            // incremental, as there were no incremental roots in that case.
  1.2700 +            if (!mWasIncremental || pi->mColor != black) {
  1.2701 +                Fault("traversed refs exceed refcount", pi);
  1.2702 +            }
  1.2703 +        }
  1.2704 +
  1.2705 +        if (pi->mInternalRefs == pi->mRefCount || pi->mRefCount == 0) {
  1.2706 +            pi->mColor = white;
  1.2707 +            ++mWhiteNodeCount;
  1.2708 +        } else {
  1.2709 +            GraphWalker<ScanBlackVisitor>(ScanBlackVisitor(mWhiteNodeCount, mFailed)).Walk(pi);
  1.2710 +            MOZ_ASSERT(pi->mColor == black,
  1.2711 +                       "Why didn't ScanBlackVisitor make pi black?");
  1.2712 +        }
  1.2713 +    }
  1.2714 +
  1.2715 +    void Failed() {
  1.2716 +        mFailed = true;
  1.2717 +    }
  1.2718 +
  1.2719 +private:
  1.2720 +    uint32_t &mWhiteNodeCount;
  1.2721 +    bool &mFailed;
  1.2722 +    bool mWasIncremental;
  1.2723 +};
  1.2724 +
  1.2725 +// Iterate over the WeakMaps.  If we mark anything while iterating
  1.2726 +// over the WeakMaps, we must iterate over all of the WeakMaps again.
  1.2727 +void
  1.2728 +nsCycleCollector::ScanWeakMaps()
  1.2729 +{
  1.2730 +    bool anyChanged;
  1.2731 +    bool failed = false;
  1.2732 +    do {
  1.2733 +        anyChanged = false;
  1.2734 +        for (uint32_t i = 0; i < mGraph.mWeakMaps.Length(); i++) {
  1.2735 +            WeakMapping *wm = &mGraph.mWeakMaps[i];
  1.2736 +
  1.2737 +            // If any of these are null, the original object was marked black.
  1.2738 +            uint32_t mColor = wm->mMap ? wm->mMap->mColor : black;
  1.2739 +            uint32_t kColor = wm->mKey ? wm->mKey->mColor : black;
  1.2740 +            uint32_t kdColor = wm->mKeyDelegate ? wm->mKeyDelegate->mColor : black;
  1.2741 +            uint32_t vColor = wm->mVal ? wm->mVal->mColor : black;
  1.2742 +
  1.2743 +            // All non-null weak mapping maps, keys and values are
  1.2744 +            // roots (in the sense of WalkFromRoots) in the cycle
  1.2745 +            // collector graph, and thus should have been colored
  1.2746 +            // either black or white in ScanRoots().
  1.2747 +            MOZ_ASSERT(mColor != grey, "Uncolored weak map");
  1.2748 +            MOZ_ASSERT(kColor != grey, "Uncolored weak map key");
  1.2749 +            MOZ_ASSERT(kdColor != grey, "Uncolored weak map key delegate");
  1.2750 +            MOZ_ASSERT(vColor != grey, "Uncolored weak map value");
  1.2751 +
  1.2752 +            if (mColor == black && kColor != black && kdColor == black) {
  1.2753 +                GraphWalker<ScanBlackVisitor>(ScanBlackVisitor(mWhiteNodeCount, failed)).Walk(wm->mKey);
  1.2754 +                anyChanged = true;
  1.2755 +            }
  1.2756 +
  1.2757 +            if (mColor == black && kColor == black && vColor != black) {
  1.2758 +                GraphWalker<ScanBlackVisitor>(ScanBlackVisitor(mWhiteNodeCount, failed)).Walk(wm->mVal);
  1.2759 +                anyChanged = true;
  1.2760 +            }
  1.2761 +        }
  1.2762 +    } while (anyChanged);
  1.2763 +
  1.2764 +    if (failed) {
  1.2765 +        MOZ_ASSERT(false, "Ran out of memory in ScanWeakMaps");
  1.2766 +        CC_TELEMETRY(_OOM, true);
  1.2767 +    }
  1.2768 +}
  1.2769 +
  1.2770 +// Flood black from any objects in the purple buffer that are in the CC graph.
  1.2771 +class PurpleScanBlackVisitor
  1.2772 +{
  1.2773 +public:
  1.2774 +    PurpleScanBlackVisitor(GCGraph &aGraph, nsICycleCollectorListener *aListener,
  1.2775 +                           uint32_t &aCount, bool &aFailed)
  1.2776 +        : mGraph(aGraph), mListener(aListener), mCount(aCount), mFailed(aFailed)
  1.2777 +    {
  1.2778 +    }
  1.2779 +
  1.2780 +    void
  1.2781 +    Visit(nsPurpleBuffer &aBuffer, nsPurpleBufferEntry *aEntry)
  1.2782 +    {
  1.2783 +        MOZ_ASSERT(aEntry->mObject, "Entries with null mObject shouldn't be in the purple buffer.");
  1.2784 +        MOZ_ASSERT(aEntry->mRefCnt->get() != 0, "Snow-white objects shouldn't be in the purple buffer.");
  1.2785 +
  1.2786 +        void *obj = aEntry->mObject;
  1.2787 +        if (!aEntry->mParticipant) {
  1.2788 +            obj = CanonicalizeXPCOMParticipant(static_cast<nsISupports*>(obj));
  1.2789 +            MOZ_ASSERT(obj, "Don't add objects that don't participate in collection!");
  1.2790 +        }
  1.2791 +
  1.2792 +        PtrInfo *pi = mGraph.FindNode(obj);
  1.2793 +        if (!pi) {
  1.2794 +            return;
  1.2795 +        }
  1.2796 +        MOZ_ASSERT(pi->mParticipant, "No dead objects should be in the purple buffer.");
  1.2797 +        if (MOZ_UNLIKELY(mListener)) {
  1.2798 +            mListener->NoteIncrementalRoot((uint64_t)pi->mPointer);
  1.2799 +        }
  1.2800 +        if (pi->mColor == black) {
  1.2801 +            return;
  1.2802 +        }
  1.2803 +        GraphWalker<ScanBlackVisitor>(ScanBlackVisitor(mCount, mFailed)).Walk(pi);
  1.2804 +    }
  1.2805 +
  1.2806 +private:
  1.2807 +    GCGraph &mGraph;
  1.2808 +    nsICycleCollectorListener *mListener;
  1.2809 +    uint32_t &mCount;
  1.2810 +    bool &mFailed;
  1.2811 +};
  1.2812 +
  1.2813 +// Objects that have been stored somewhere since the start of incremental graph building must
  1.2814 +// be treated as live for this cycle collection, because we may not have accurate information
  1.2815 +// about who holds references to them.
  1.2816 +void
  1.2817 +nsCycleCollector::ScanIncrementalRoots()
  1.2818 +{
  1.2819 +    TimeLog timeLog;
  1.2820 +
  1.2821 +    // Reference counted objects:
  1.2822 +    // We cleared the purple buffer at the start of the current ICC, so if a
  1.2823 +    // refcounted object is purple, it may have been AddRef'd during the current
  1.2824 +    // ICC. (It may also have only been released.) If that is the case, we cannot
  1.2825 +    // be sure that the set of things pointing to the object in the CC graph
  1.2826 +    // is accurate. Therefore, for safety, we treat any purple objects as being
  1.2827 +    // live during the current CC. We don't remove anything from the purple
  1.2828 +    // buffer here, so these objects will be suspected and freed in the next CC
  1.2829 +    // if they are garbage.
  1.2830 +    bool failed = false;
  1.2831 +    PurpleScanBlackVisitor purpleScanBlackVisitor(mGraph, mListener, mWhiteNodeCount, failed);
  1.2832 +    mPurpleBuf.VisitEntries(purpleScanBlackVisitor);
  1.2833 +    timeLog.Checkpoint("ScanIncrementalRoots::fix purple");
  1.2834 +
  1.2835 +    // Garbage collected objects:
  1.2836 +    // If a GCed object was added to the graph with a refcount of zero, and is
  1.2837 +    // now marked black by the GC, it was probably gray before and was exposed
  1.2838 +    // to active JS, so it may have been stored somewhere, so it needs to be
  1.2839 +    // treated as live.
  1.2840 +    if (mJSRuntime) {
  1.2841 +        nsCycleCollectionParticipant *jsParticipant = mJSRuntime->GCThingParticipant();
  1.2842 +        nsCycleCollectionParticipant *zoneParticipant = mJSRuntime->ZoneParticipant();
  1.2843 +        NodePool::Enumerator etor(mGraph.mNodes);
  1.2844 +
  1.2845 +        while (!etor.IsDone()) {
  1.2846 +            PtrInfo *pi = etor.GetNext();
  1.2847 +
  1.2848 +            // If the refcount is non-zero, pi can't have been a gray JS object.
  1.2849 +            if (pi->mRefCount != 0) {
  1.2850 +                continue;
  1.2851 +            }
  1.2852 +
  1.2853 +            // As an optimization, if an object has already been determined to be live,
  1.2854 +            // don't consider it further.  We can't do this if there is a listener,
  1.2855 +            // because the listener wants to know the complete set of incremental roots.
  1.2856 +            if (pi->mColor == black && MOZ_LIKELY(!mListener)) {
  1.2857 +                continue;
  1.2858 +            }
  1.2859 +
  1.2860 +            // If the object is still marked gray by the GC, nothing could have gotten
  1.2861 +            // hold of it, so it isn't an incremental root.
  1.2862 +            if (pi->mParticipant == jsParticipant) {
  1.2863 +                if (xpc_GCThingIsGrayCCThing(pi->mPointer)) {
  1.2864 +                    continue;
  1.2865 +                }
  1.2866 +            } else if (pi->mParticipant == zoneParticipant) {
  1.2867 +                JS::Zone *zone = static_cast<JS::Zone*>(pi->mPointer);
  1.2868 +                if (js::ZoneGlobalsAreAllGray(zone)) {
  1.2869 +                    continue;
  1.2870 +                }
  1.2871 +            } else {
  1.2872 +                MOZ_ASSERT(false, "Non-JS thing with 0 refcount? Treating as live.");
  1.2873 +            }
  1.2874 +
  1.2875 +            // At this point, pi must be an incremental root.
  1.2876 +
  1.2877 +            // If there's a listener, tell it about this root. We don't bother with the
  1.2878 +            // optimization of skipping the Walk() if pi is black: it will just return
  1.2879 +            // without doing anything and there's no need to make this case faster.
  1.2880 +            if (MOZ_UNLIKELY(mListener)) {
  1.2881 +                mListener->NoteIncrementalRoot((uint64_t)pi->mPointer);
  1.2882 +            }
  1.2883 +
  1.2884 +            GraphWalker<ScanBlackVisitor>(ScanBlackVisitor(mWhiteNodeCount, failed)).Walk(pi);
  1.2885 +        }
  1.2886 +
  1.2887 +        timeLog.Checkpoint("ScanIncrementalRoots::fix JS");
  1.2888 +    }
  1.2889 +
  1.2890 +    if (failed) {
  1.2891 +        NS_ASSERTION(false, "Ran out of memory in ScanIncrementalRoots");
  1.2892 +        CC_TELEMETRY(_OOM, true);
  1.2893 +    }
  1.2894 +}
  1.2895 +
  1.2896 +void
  1.2897 +nsCycleCollector::ScanRoots(bool aFullySynchGraphBuild)
  1.2898 +{
  1.2899 +    AutoRestore<bool> ar(mScanInProgress);
  1.2900 +    MOZ_ASSERT(!mScanInProgress);
  1.2901 +    mScanInProgress = true;
  1.2902 +    mWhiteNodeCount = 0;
  1.2903 +    MOZ_ASSERT(mIncrementalPhase == ScanAndCollectWhitePhase);
  1.2904 +
  1.2905 +    if (!aFullySynchGraphBuild) {
  1.2906 +        ScanIncrementalRoots();
  1.2907 +    }
  1.2908 +
  1.2909 +    TimeLog timeLog;
  1.2910 +
  1.2911 +    // On the assumption that most nodes will be black, it's
  1.2912 +    // probably faster to use a GraphWalker than a
  1.2913 +    // NodePool::Enumerator.
  1.2914 +    bool failed = false;
  1.2915 +    scanVisitor sv(mWhiteNodeCount, failed, !aFullySynchGraphBuild);
  1.2916 +    GraphWalker<scanVisitor>(sv).WalkFromRoots(mGraph);
  1.2917 +    timeLog.Checkpoint("ScanRoots::WalkFromRoots");
  1.2918 +
  1.2919 +    if (failed) {
  1.2920 +        NS_ASSERTION(false, "Ran out of memory in ScanRoots");
  1.2921 +        CC_TELEMETRY(_OOM, true);
  1.2922 +    }
  1.2923 +
  1.2924 +    // Scanning weak maps must be done last.
  1.2925 +    ScanWeakMaps();
  1.2926 +    timeLog.Checkpoint("ScanRoots::ScanWeakMaps");
  1.2927 +
  1.2928 +    if (mListener) {
  1.2929 +        mListener->BeginResults();
  1.2930 +
  1.2931 +        NodePool::Enumerator etor(mGraph.mNodes);
  1.2932 +        while (!etor.IsDone()) {
  1.2933 +            PtrInfo *pi = etor.GetNext();
  1.2934 +            if (!pi->mParticipant) {
  1.2935 +                continue;
  1.2936 +            }
  1.2937 +            switch (pi->mColor) {
  1.2938 +            case black:
  1.2939 +                if (pi->mRefCount > 0 && pi->mRefCount < UINT32_MAX &&
  1.2940 +                    pi->mInternalRefs != pi->mRefCount) {
  1.2941 +                    mListener->DescribeRoot((uint64_t)pi->mPointer,
  1.2942 +                                            pi->mInternalRefs);
  1.2943 +                }
  1.2944 +                break;
  1.2945 +            case white:
  1.2946 +                mListener->DescribeGarbage((uint64_t)pi->mPointer);
  1.2947 +                break;
  1.2948 +            case grey:
  1.2949 +                // With incremental CC, we can end up with a grey object after
  1.2950 +                // scanning if it is only reachable from an object that gets freed.
  1.2951 +                break;
  1.2952 +            }
  1.2953 +        }
  1.2954 +
  1.2955 +        mListener->End();
  1.2956 +        mListener = nullptr;
  1.2957 +        timeLog.Checkpoint("ScanRoots::listener");
  1.2958 +    }
  1.2959 +}
  1.2960 +
  1.2961 +
  1.2962 +////////////////////////////////////////////////////////////////////////
  1.2963 +// Bacon & Rajan's |CollectWhite| routine, somewhat modified.
  1.2964 +////////////////////////////////////////////////////////////////////////
  1.2965 +
  1.2966 +bool
  1.2967 +nsCycleCollector::CollectWhite()
  1.2968 +{
  1.2969 +    // Explanation of "somewhat modified": we have no way to collect the
  1.2970 +    // set of whites "all at once", we have to ask each of them to drop
  1.2971 +    // their outgoing links and assume this will cause the garbage cycle
  1.2972 +    // to *mostly* self-destruct (except for the reference we continue
  1.2973 +    // to hold).
  1.2974 +    //
  1.2975 +    // To do this "safely" we must make sure that the white nodes we're
  1.2976 +    // operating on are stable for the duration of our operation. So we
  1.2977 +    // make 3 sets of calls to language runtimes:
  1.2978 +    //
  1.2979 +    //   - Root(whites), which should pin the whites in memory.
  1.2980 +    //   - Unlink(whites), which drops outgoing links on each white.
  1.2981 +    //   - Unroot(whites), which returns the whites to normal GC.
  1.2982 +
  1.2983 +    TimeLog timeLog;
  1.2984 +    nsAutoTArray<PtrInfo*, 4000> whiteNodes;
  1.2985 +
  1.2986 +    MOZ_ASSERT(mIncrementalPhase == ScanAndCollectWhitePhase);
  1.2987 +
  1.2988 +    whiteNodes.SetCapacity(mWhiteNodeCount);
  1.2989 +    uint32_t numWhiteGCed = 0;
  1.2990 +
  1.2991 +    NodePool::Enumerator etor(mGraph.mNodes);
  1.2992 +    while (!etor.IsDone())
  1.2993 +    {
  1.2994 +        PtrInfo *pinfo = etor.GetNext();
  1.2995 +        if (pinfo->mColor == white && pinfo->mParticipant) {
  1.2996 +            whiteNodes.AppendElement(pinfo);
  1.2997 +            pinfo->mParticipant->Root(pinfo->mPointer);
  1.2998 +            if (pinfo->mRefCount == 0) {
  1.2999 +                // only JS objects have a refcount of 0
  1.3000 +                ++numWhiteGCed;
  1.3001 +            }
  1.3002 +        }
  1.3003 +    }
  1.3004 +
  1.3005 +    uint32_t count = whiteNodes.Length();
  1.3006 +    MOZ_ASSERT(numWhiteGCed <= count,
  1.3007 +               "More freed GCed nodes than total freed nodes.");
  1.3008 +    mResults.mFreedRefCounted += count - numWhiteGCed;
  1.3009 +    mResults.mFreedGCed += numWhiteGCed;
  1.3010 +
  1.3011 +    timeLog.Checkpoint("CollectWhite::Root");
  1.3012 +
  1.3013 +    if (mBeforeUnlinkCB) {
  1.3014 +        mBeforeUnlinkCB();
  1.3015 +        timeLog.Checkpoint("CollectWhite::BeforeUnlinkCB");
  1.3016 +    }
  1.3017 +
  1.3018 +    for (uint32_t i = 0; i < count; ++i) {
  1.3019 +        PtrInfo *pinfo = whiteNodes.ElementAt(i);
  1.3020 +        MOZ_ASSERT(pinfo->mParticipant, "Unlink shouldn't see objects removed from graph.");
  1.3021 +        pinfo->mParticipant->Unlink(pinfo->mPointer);
  1.3022 +#ifdef DEBUG
  1.3023 +        if (mJSRuntime) {
  1.3024 +            mJSRuntime->AssertNoObjectsToTrace(pinfo->mPointer);
  1.3025 +        }
  1.3026 +#endif
  1.3027 +    }
  1.3028 +    timeLog.Checkpoint("CollectWhite::Unlink");
  1.3029 +
  1.3030 +    for (uint32_t i = 0; i < count; ++i) {
  1.3031 +        PtrInfo *pinfo = whiteNodes.ElementAt(i);
  1.3032 +        MOZ_ASSERT(pinfo->mParticipant, "Unroot shouldn't see objects removed from graph.");
  1.3033 +        pinfo->mParticipant->Unroot(pinfo->mPointer);
  1.3034 +    }
  1.3035 +    timeLog.Checkpoint("CollectWhite::Unroot");
  1.3036 +
  1.3037 +    nsCycleCollector_dispatchDeferredDeletion(false);
  1.3038 +    mIncrementalPhase = CleanupPhase;
  1.3039 +
  1.3040 +    return count > 0;
  1.3041 +}
  1.3042 +
  1.3043 +
  1.3044 +////////////////////////
  1.3045 +// Memory reporting
  1.3046 +////////////////////////
  1.3047 +
  1.3048 +MOZ_DEFINE_MALLOC_SIZE_OF(CycleCollectorMallocSizeOf)
  1.3049 +
  1.3050 +NS_IMETHODIMP
  1.3051 +nsCycleCollector::CollectReports(nsIHandleReportCallback* aHandleReport,
  1.3052 +                                 nsISupports* aData)
  1.3053 +{
  1.3054 +    size_t objectSize, graphNodesSize, graphEdgesSize, weakMapsSize,
  1.3055 +           purpleBufferSize;
  1.3056 +    SizeOfIncludingThis(CycleCollectorMallocSizeOf,
  1.3057 +                        &objectSize,
  1.3058 +                        &graphNodesSize, &graphEdgesSize,
  1.3059 +                        &weakMapsSize,
  1.3060 +                        &purpleBufferSize);
  1.3061 +
  1.3062 +#define REPORT(_path, _amount, _desc)                                     \
  1.3063 +    do {                                                                  \
  1.3064 +        size_t amount = _amount;  /* evaluate |_amount| only once */      \
  1.3065 +        if (amount > 0) {                                                 \
  1.3066 +            nsresult rv;                                                  \
  1.3067 +            rv = aHandleReport->Callback(EmptyCString(),                  \
  1.3068 +                                         NS_LITERAL_CSTRING(_path),       \
  1.3069 +                                         KIND_HEAP, UNITS_BYTES, _amount, \
  1.3070 +                                         NS_LITERAL_CSTRING(_desc),       \
  1.3071 +                                         aData);                          \
  1.3072 +            if (NS_WARN_IF(NS_FAILED(rv)))                                \
  1.3073 +                return rv;                                                \
  1.3074 +        }                                                                 \
  1.3075 +    } while (0)
  1.3076 +
  1.3077 +    REPORT("explicit/cycle-collector/collector-object", objectSize,
  1.3078 +           "Memory used for the cycle collector object itself.");
  1.3079 +
  1.3080 +    REPORT("explicit/cycle-collector/graph-nodes", graphNodesSize,
  1.3081 +           "Memory used for the nodes of the cycle collector's graph. "
  1.3082 +           "This should be zero when the collector is idle.");
  1.3083 +
  1.3084 +    REPORT("explicit/cycle-collector/graph-edges", graphEdgesSize,
  1.3085 +           "Memory used for the edges of the cycle collector's graph. "
  1.3086 +           "This should be zero when the collector is idle.");
  1.3087 +
  1.3088 +    REPORT("explicit/cycle-collector/weak-maps", weakMapsSize,
  1.3089 +           "Memory used for the representation of weak maps in the "
  1.3090 +           "cycle collector's graph. "
  1.3091 +           "This should be zero when the collector is idle.");
  1.3092 +
  1.3093 +    REPORT("explicit/cycle-collector/purple-buffer", purpleBufferSize,
  1.3094 +           "Memory used for the cycle collector's purple buffer.");
  1.3095 +
  1.3096 +#undef REPORT
  1.3097 +
  1.3098 +    return NS_OK;
  1.3099 +};
  1.3100 +
  1.3101 +
  1.3102 +////////////////////////////////////////////////////////////////////////
  1.3103 +// Collector implementation
  1.3104 +////////////////////////////////////////////////////////////////////////
  1.3105 +
  1.3106 +nsCycleCollector::nsCycleCollector() :
  1.3107 +    mActivelyCollecting(false),
  1.3108 +    mFreeingSnowWhite(false),
  1.3109 +    mScanInProgress(false),
  1.3110 +    mJSRuntime(nullptr),
  1.3111 +    mIncrementalPhase(IdlePhase),
  1.3112 +    mThread(NS_GetCurrentThread()),
  1.3113 +    mWhiteNodeCount(0),
  1.3114 +    mBeforeUnlinkCB(nullptr),
  1.3115 +    mForgetSkippableCB(nullptr),
  1.3116 +    mUnmergedNeeded(0),
  1.3117 +    mMergedInARow(0),
  1.3118 +    mJSPurpleBuffer(nullptr)
  1.3119 +{
  1.3120 +}
  1.3121 +
  1.3122 +nsCycleCollector::~nsCycleCollector()
  1.3123 +{
  1.3124 +    UnregisterWeakMemoryReporter(this);
  1.3125 +}
  1.3126 +
  1.3127 +void
  1.3128 +nsCycleCollector::RegisterJSRuntime(CycleCollectedJSRuntime *aJSRuntime)
  1.3129 +{
  1.3130 +    if (mJSRuntime)
  1.3131 +        Fault("multiple registrations of cycle collector JS runtime", aJSRuntime);
  1.3132 +
  1.3133 +    mJSRuntime = aJSRuntime;
  1.3134 +
  1.3135 +    // We can't register as a reporter in nsCycleCollector() because that runs
  1.3136 +    // before the memory reporter manager is initialized.  So we do it here
  1.3137 +    // instead.
  1.3138 +    static bool registered = false;
  1.3139 +    if (!registered) {
  1.3140 +        RegisterWeakMemoryReporter(this);
  1.3141 +        registered = true;
  1.3142 +    }
  1.3143 +}
  1.3144 +
  1.3145 +void
  1.3146 +nsCycleCollector::ForgetJSRuntime()
  1.3147 +{
  1.3148 +    if (!mJSRuntime)
  1.3149 +        Fault("forgetting non-registered cycle collector JS runtime");
  1.3150 +
  1.3151 +    mJSRuntime = nullptr;
  1.3152 +}
  1.3153 +
  1.3154 +#ifdef DEBUG
  1.3155 +static bool
  1.3156 +HasParticipant(void *aPtr, nsCycleCollectionParticipant *aParti)
  1.3157 +{
  1.3158 +    if (aParti) {
  1.3159 +        return true;
  1.3160 +    }
  1.3161 +
  1.3162 +    nsXPCOMCycleCollectionParticipant *xcp;
  1.3163 +    ToParticipant(static_cast<nsISupports*>(aPtr), &xcp);
  1.3164 +    return xcp != nullptr;
  1.3165 +}
  1.3166 +#endif
  1.3167 +
  1.3168 +MOZ_ALWAYS_INLINE void
  1.3169 +nsCycleCollector::Suspect(void *aPtr, nsCycleCollectionParticipant *aParti,
  1.3170 +                          nsCycleCollectingAutoRefCnt *aRefCnt)
  1.3171 +{
  1.3172 +    CheckThreadSafety();
  1.3173 +
  1.3174 +    // Re-entering ::Suspect during collection used to be a fault, but
  1.3175 +    // we are canonicalizing nsISupports pointers using QI, so we will
  1.3176 +    // see some spurious refcount traffic here.
  1.3177 +
  1.3178 +    if (MOZ_UNLIKELY(mScanInProgress)) {
  1.3179 +        return;
  1.3180 +    }
  1.3181 +
  1.3182 +    MOZ_ASSERT(aPtr, "Don't suspect null pointers");
  1.3183 +
  1.3184 +    MOZ_ASSERT(HasParticipant(aPtr, aParti),
  1.3185 +               "Suspected nsISupports pointer must QI to nsXPCOMCycleCollectionParticipant");
  1.3186 +
  1.3187 +    mPurpleBuf.Put(aPtr, aParti, aRefCnt);
  1.3188 +}
  1.3189 +
  1.3190 +void
  1.3191 +nsCycleCollector::CheckThreadSafety()
  1.3192 +{
  1.3193 +#ifdef DEBUG
  1.3194 +    nsIThread* currentThread = NS_GetCurrentThread();
  1.3195 +    // XXXkhuey we can be called so late in shutdown that NS_GetCurrentThread
  1.3196 +    // returns null (after the thread manager has shut down)
  1.3197 +    MOZ_ASSERT(mThread == currentThread || !currentThread);
  1.3198 +#endif
  1.3199 +}
  1.3200 +
  1.3201 +// The cycle collector uses the mark bitmap to discover what JS objects
  1.3202 +// were reachable only from XPConnect roots that might participate in
  1.3203 +// cycles. We ask the JS runtime whether we need to force a GC before
  1.3204 +// this CC. It returns true on startup (before the mark bits have been set),
  1.3205 +// and also when UnmarkGray has run out of stack.  We also force GCs on shut
  1.3206 +// down to collect cycles involving both DOM and JS.
  1.3207 +void
  1.3208 +nsCycleCollector::FixGrayBits(bool aForceGC)
  1.3209 +{
  1.3210 +    CheckThreadSafety();
  1.3211 +
  1.3212 +    if (!mJSRuntime)
  1.3213 +        return;
  1.3214 +
  1.3215 +    if (!aForceGC) {
  1.3216 +        mJSRuntime->FixWeakMappingGrayBits();
  1.3217 +
  1.3218 +        bool needGC = mJSRuntime->NeedCollect();
  1.3219 +        // Only do a telemetry ping for non-shutdown CCs.
  1.3220 +        CC_TELEMETRY(_NEED_GC, needGC);
  1.3221 +        if (!needGC)
  1.3222 +            return;
  1.3223 +        mResults.mForcedGC = true;
  1.3224 +    }
  1.3225 +
  1.3226 +    TimeLog timeLog;
  1.3227 +    mJSRuntime->Collect(aForceGC ? JS::gcreason::SHUTDOWN_CC : JS::gcreason::CC_FORCED);
  1.3228 +    timeLog.Checkpoint("GC()");
  1.3229 +}
  1.3230 +
  1.3231 +void
  1.3232 +nsCycleCollector::CleanupAfterCollection()
  1.3233 +{
  1.3234 +    MOZ_ASSERT(mIncrementalPhase == CleanupPhase);
  1.3235 +    mGraph.Clear();
  1.3236 +
  1.3237 +    uint32_t interval = (uint32_t) ((TimeStamp::Now() - mCollectionStart).ToMilliseconds());
  1.3238 +#ifdef COLLECT_TIME_DEBUG
  1.3239 +    printf("cc: total cycle collector time was %ums\n", interval);
  1.3240 +    printf("cc: visited %u ref counted and %u GCed objects, freed %d ref counted and %d GCed objects",
  1.3241 +           mResults.mVisitedRefCounted, mResults.mVisitedGCed,
  1.3242 +           mResults.mFreedRefCounted, mResults.mFreedGCed);
  1.3243 +    uint32_t numVisited = mResults.mVisitedRefCounted + mResults.mVisitedGCed;
  1.3244 +    if (numVisited > 1000) {
  1.3245 +        uint32_t numFreed = mResults.mFreedRefCounted + mResults.mFreedGCed;
  1.3246 +        printf(" (%d%%)", 100 * numFreed / numVisited);
  1.3247 +    }
  1.3248 +    printf(".\ncc: \n");
  1.3249 +#endif
  1.3250 +    CC_TELEMETRY( , interval);
  1.3251 +    CC_TELEMETRY(_VISITED_REF_COUNTED, mResults.mVisitedRefCounted);
  1.3252 +    CC_TELEMETRY(_VISITED_GCED, mResults.mVisitedGCed);
  1.3253 +    CC_TELEMETRY(_COLLECTED, mWhiteNodeCount);
  1.3254 +
  1.3255 +    if (mJSRuntime) {
  1.3256 +        mJSRuntime->EndCycleCollectionCallback(mResults);
  1.3257 +    }
  1.3258 +    mIncrementalPhase = IdlePhase;
  1.3259 +}
  1.3260 +
  1.3261 +void
  1.3262 +nsCycleCollector::ShutdownCollect()
  1.3263 +{
  1.3264 +    SliceBudget unlimitedBudget;
  1.3265 +    uint32_t i;
  1.3266 +    for (i = 0; i < DEFAULT_SHUTDOWN_COLLECTIONS; ++i) {
  1.3267 +        if (!Collect(ShutdownCC, unlimitedBudget, nullptr)) {
  1.3268 +            break;
  1.3269 +        }
  1.3270 +    }
  1.3271 +    NS_WARN_IF_FALSE(i < NORMAL_SHUTDOWN_COLLECTIONS, "Extra shutdown CC");
  1.3272 +}
  1.3273 +
  1.3274 +static void
  1.3275 +PrintPhase(const char *aPhase)
  1.3276 +{
  1.3277 +#ifdef DEBUG_PHASES
  1.3278 +    printf("cc: begin %s on %s\n", aPhase,
  1.3279 +           NS_IsMainThread() ? "mainthread" : "worker");
  1.3280 +#endif
  1.3281 +}
  1.3282 +
  1.3283 +bool
  1.3284 +nsCycleCollector::Collect(ccType aCCType,
  1.3285 +                          SliceBudget &aBudget,
  1.3286 +                          nsICycleCollectorListener *aManualListener)
  1.3287 +{
  1.3288 +    CheckThreadSafety();
  1.3289 +
  1.3290 +    // This can legitimately happen in a few cases. See bug 383651.
  1.3291 +    if (mActivelyCollecting || mFreeingSnowWhite) {
  1.3292 +        return false;
  1.3293 +    }
  1.3294 +    mActivelyCollecting = true;
  1.3295 +
  1.3296 +    bool startedIdle = (mIncrementalPhase == IdlePhase);
  1.3297 +    bool collectedAny = false;
  1.3298 +
  1.3299 +    // If the CC started idle, it will call BeginCollection, which
  1.3300 +    // will do FreeSnowWhite, so it doesn't need to be done here.
  1.3301 +    if (!startedIdle) {
  1.3302 +        FreeSnowWhite(true);
  1.3303 +    }
  1.3304 +
  1.3305 +    bool finished = false;
  1.3306 +    do {
  1.3307 +        switch (mIncrementalPhase) {
  1.3308 +        case IdlePhase:
  1.3309 +            PrintPhase("BeginCollection");
  1.3310 +            BeginCollection(aCCType, aManualListener);
  1.3311 +            break;
  1.3312 +        case GraphBuildingPhase:
  1.3313 +            PrintPhase("MarkRoots");
  1.3314 +            MarkRoots(aBudget);
  1.3315 +            break;
  1.3316 +        case ScanAndCollectWhitePhase:
  1.3317 +            // We do ScanRoots and CollectWhite in a single slice to ensure
  1.3318 +            // that we won't unlink a live object if a weak reference is
  1.3319 +            // promoted to a strong reference after ScanRoots has finished.
  1.3320 +            // See bug 926533.
  1.3321 +            PrintPhase("ScanRoots");
  1.3322 +            ScanRoots(startedIdle);
  1.3323 +            PrintPhase("CollectWhite");
  1.3324 +            collectedAny = CollectWhite();
  1.3325 +            break;
  1.3326 +        case CleanupPhase:
  1.3327 +            PrintPhase("CleanupAfterCollection");
  1.3328 +            CleanupAfterCollection();
  1.3329 +            finished = true;
  1.3330 +            break;
  1.3331 +        }
  1.3332 +    } while (!aBudget.checkOverBudget() && !finished);
  1.3333 +
  1.3334 +    // Clear mActivelyCollecting here to ensure that a recursive call to
  1.3335 +    // Collect() does something.
  1.3336 +    mActivelyCollecting = false;
  1.3337 +
  1.3338 +    if (aCCType != SliceCC && !startedIdle) {
  1.3339 +        // We were in the middle of an incremental CC (using its own listener).
  1.3340 +        // Somebody has forced a CC, so after having finished out the current CC,
  1.3341 +        // run the CC again using the new listener.
  1.3342 +        MOZ_ASSERT(mIncrementalPhase == IdlePhase);
  1.3343 +        if (Collect(aCCType, aBudget, aManualListener)) {
  1.3344 +            collectedAny = true;
  1.3345 +        }
  1.3346 +    }
  1.3347 +
  1.3348 +    MOZ_ASSERT_IF(aCCType != SliceCC, mIncrementalPhase == IdlePhase);
  1.3349 +
  1.3350 +    return collectedAny;
  1.3351 +}
  1.3352 +
  1.3353 +// Any JS objects we have in the graph could die when we GC, but we
  1.3354 +// don't want to abandon the current CC, because the graph contains
  1.3355 +// information about purple roots. So we synchronously finish off
  1.3356 +// the current CC.
  1.3357 +void
  1.3358 +nsCycleCollector::PrepareForGarbageCollection()
  1.3359 +{
  1.3360 +    if (mIncrementalPhase == IdlePhase) {
  1.3361 +        MOZ_ASSERT(mGraph.IsEmpty(), "Non-empty graph when idle");
  1.3362 +        MOZ_ASSERT(!mBuilder, "Non-null builder when idle");
  1.3363 +        if (mJSPurpleBuffer) {
  1.3364 +            mJSPurpleBuffer->Destroy();
  1.3365 +        }
  1.3366 +        return;
  1.3367 +    }
  1.3368 +
  1.3369 +    SliceBudget unlimitedBudget;
  1.3370 +    PrintPhase("PrepareForGarbageCollection");
  1.3371 +    // Use SliceCC because we only want to finish the CC in progress.
  1.3372 +    Collect(SliceCC, unlimitedBudget, nullptr);
  1.3373 +    MOZ_ASSERT(mIncrementalPhase == IdlePhase);
  1.3374 +}
  1.3375 +
  1.3376 +// Don't merge too many times in a row, and do at least a minimum
  1.3377 +// number of unmerged CCs in a row.
  1.3378 +static const uint32_t kMinConsecutiveUnmerged = 3;
  1.3379 +static const uint32_t kMaxConsecutiveMerged = 3;
  1.3380 +
  1.3381 +bool
  1.3382 +nsCycleCollector::ShouldMergeZones(ccType aCCType)
  1.3383 +{
  1.3384 +    if (!mJSRuntime) {
  1.3385 +        return false;
  1.3386 +    }
  1.3387 +
  1.3388 +    MOZ_ASSERT(mUnmergedNeeded <= kMinConsecutiveUnmerged);
  1.3389 +    MOZ_ASSERT(mMergedInARow <= kMaxConsecutiveMerged);
  1.3390 +
  1.3391 +    if (mMergedInARow == kMaxConsecutiveMerged) {
  1.3392 +        MOZ_ASSERT(mUnmergedNeeded == 0);
  1.3393 +        mUnmergedNeeded = kMinConsecutiveUnmerged;
  1.3394 +    }
  1.3395 +
  1.3396 +    if (mUnmergedNeeded > 0) {
  1.3397 +        mUnmergedNeeded--;
  1.3398 +        mMergedInARow = 0;
  1.3399 +        return false;
  1.3400 +    }
  1.3401 +
  1.3402 +    if (aCCType == SliceCC && mJSRuntime->UsefulToMergeZones()) {
  1.3403 +        mMergedInARow++;
  1.3404 +        return true;
  1.3405 +    } else {
  1.3406 +        mMergedInARow = 0;
  1.3407 +        return false;
  1.3408 +    }
  1.3409 +}
  1.3410 +
  1.3411 +void
  1.3412 +nsCycleCollector::BeginCollection(ccType aCCType,
  1.3413 +                                  nsICycleCollectorListener *aManualListener)
  1.3414 +{
  1.3415 +    TimeLog timeLog;
  1.3416 +    MOZ_ASSERT(mIncrementalPhase == IdlePhase);
  1.3417 +
  1.3418 +    mCollectionStart = TimeStamp::Now();
  1.3419 +
  1.3420 +    if (mJSRuntime) {
  1.3421 +        mJSRuntime->BeginCycleCollectionCallback();
  1.3422 +        timeLog.Checkpoint("BeginCycleCollectionCallback()");
  1.3423 +    }
  1.3424 +
  1.3425 +    bool isShutdown = (aCCType == ShutdownCC);
  1.3426 +
  1.3427 +    // Set up the listener for this CC.
  1.3428 +    MOZ_ASSERT_IF(isShutdown, !aManualListener);
  1.3429 +    MOZ_ASSERT(!mListener, "Forgot to clear a previous listener?");
  1.3430 +    mListener = aManualListener;
  1.3431 +    aManualListener = nullptr;
  1.3432 +    if (!mListener && mParams.LogThisCC(isShutdown)) {
  1.3433 +        nsRefPtr<nsCycleCollectorLogger> logger = new nsCycleCollectorLogger();
  1.3434 +        if (mParams.AllTracesThisCC(isShutdown)) {
  1.3435 +            logger->SetAllTraces();
  1.3436 +        }
  1.3437 +        mListener = logger.forget();
  1.3438 +    }
  1.3439 +
  1.3440 +    bool forceGC = isShutdown;
  1.3441 +    if (!forceGC && mListener) {
  1.3442 +        // On a WantAllTraces CC, force a synchronous global GC to prevent
  1.3443 +        // hijinks from ForgetSkippable and compartmental GCs.
  1.3444 +        mListener->GetWantAllTraces(&forceGC);
  1.3445 +    }
  1.3446 +    FixGrayBits(forceGC);
  1.3447 +
  1.3448 +    FreeSnowWhite(true);
  1.3449 +
  1.3450 +    if (mListener && NS_FAILED(mListener->Begin())) {
  1.3451 +        mListener = nullptr;
  1.3452 +    }
  1.3453 +
  1.3454 +    // Set up the data structures for building the graph.
  1.3455 +    mGraph.Init();
  1.3456 +    mResults.Init();
  1.3457 +    bool mergeZones = ShouldMergeZones(aCCType);
  1.3458 +    mResults.mMergedZones = mergeZones;
  1.3459 +
  1.3460 +    MOZ_ASSERT(!mBuilder, "Forgot to clear mBuilder");
  1.3461 +    mBuilder = new GCGraphBuilder(mGraph, mResults, mJSRuntime, mListener, mergeZones);
  1.3462 +
  1.3463 +    if (mJSRuntime) {
  1.3464 +        mJSRuntime->TraverseRoots(*mBuilder);
  1.3465 +        timeLog.Checkpoint("mJSRuntime->TraverseRoots()");
  1.3466 +    }
  1.3467 +
  1.3468 +    AutoRestore<bool> ar(mScanInProgress);
  1.3469 +    MOZ_ASSERT(!mScanInProgress);
  1.3470 +    mScanInProgress = true;
  1.3471 +    mPurpleBuf.SelectPointers(*mBuilder);
  1.3472 +    timeLog.Checkpoint("SelectPointers()");
  1.3473 +
  1.3474 +    // We've finished adding roots, and everything in the graph is a root.
  1.3475 +    mGraph.mRootCount = mGraph.MapCount();
  1.3476 +
  1.3477 +    mCurrNode = new NodePool::Enumerator(mGraph.mNodes);
  1.3478 +    mIncrementalPhase = GraphBuildingPhase;
  1.3479 +}
  1.3480 +
  1.3481 +uint32_t
  1.3482 +nsCycleCollector::SuspectedCount()
  1.3483 +{
  1.3484 +    CheckThreadSafety();
  1.3485 +    return mPurpleBuf.Count();
  1.3486 +}
  1.3487 +
  1.3488 +void
  1.3489 +nsCycleCollector::Shutdown()
  1.3490 +{
  1.3491 +    CheckThreadSafety();
  1.3492 +
  1.3493 +    // Always delete snow white objects.
  1.3494 +    FreeSnowWhite(true);
  1.3495 +
  1.3496 +#ifndef DEBUG
  1.3497 +    if (PR_GetEnv("MOZ_CC_RUN_DURING_SHUTDOWN"))
  1.3498 +#endif
  1.3499 +    {
  1.3500 +        ShutdownCollect();
  1.3501 +    }
  1.3502 +}
  1.3503 +
  1.3504 +void
  1.3505 +nsCycleCollector::RemoveObjectFromGraph(void *aObj)
  1.3506 +{
  1.3507 +    if (mIncrementalPhase == IdlePhase) {
  1.3508 +        return;
  1.3509 +    }
  1.3510 +
  1.3511 +    if (PtrInfo *pinfo = mGraph.FindNode(aObj)) {
  1.3512 +        mGraph.RemoveNodeFromMap(aObj);
  1.3513 +
  1.3514 +        pinfo->mPointer = nullptr;
  1.3515 +        pinfo->mParticipant = nullptr;
  1.3516 +    }
  1.3517 +}
  1.3518 +
  1.3519 +void
  1.3520 +nsCycleCollector::SizeOfIncludingThis(mozilla::MallocSizeOf aMallocSizeOf,
  1.3521 +                                      size_t *aObjectSize,
  1.3522 +                                      size_t *aGraphNodesSize,
  1.3523 +                                      size_t *aGraphEdgesSize,
  1.3524 +                                      size_t *aWeakMapsSize,
  1.3525 +                                      size_t *aPurpleBufferSize) const
  1.3526 +{
  1.3527 +    *aObjectSize = aMallocSizeOf(this);
  1.3528 +
  1.3529 +    mGraph.SizeOfExcludingThis(aMallocSizeOf, aGraphNodesSize, aGraphEdgesSize,
  1.3530 +                               aWeakMapsSize);
  1.3531 +
  1.3532 +    *aPurpleBufferSize = mPurpleBuf.SizeOfExcludingThis(aMallocSizeOf);
  1.3533 +
  1.3534 +    // These fields are deliberately not measured:
  1.3535 +    // - mJSRuntime: because it's non-owning and measured by JS reporters.
  1.3536 +    // - mParams: because it only contains scalars.
  1.3537 +}
  1.3538 +
  1.3539 +JSPurpleBuffer*
  1.3540 +nsCycleCollector::GetJSPurpleBuffer()
  1.3541 +{
  1.3542 +  if (!mJSPurpleBuffer) {
  1.3543 +    // JSPurpleBuffer keeps itself alive, but we need to create it in such way
  1.3544 +    // that it ends up in the normal purple buffer. That happens when
  1.3545 +    // nsRefPtr goes out of the scope and calls Release.
  1.3546 +    nsRefPtr<JSPurpleBuffer> pb = new JSPurpleBuffer(mJSPurpleBuffer);
  1.3547 +  }
  1.3548 +  return mJSPurpleBuffer;
  1.3549 +}
  1.3550 +
  1.3551 +////////////////////////////////////////////////////////////////////////
  1.3552 +// Module public API (exported in nsCycleCollector.h)
  1.3553 +// Just functions that redirect into the singleton, once it's built.
  1.3554 +////////////////////////////////////////////////////////////////////////
  1.3555 +
  1.3556 +void
  1.3557 +nsCycleCollector_registerJSRuntime(CycleCollectedJSRuntime *rt)
  1.3558 +{
  1.3559 +    CollectorData *data = sCollectorData.get();
  1.3560 +
  1.3561 +    // We should have started the cycle collector by now.
  1.3562 +    MOZ_ASSERT(data);
  1.3563 +    MOZ_ASSERT(data->mCollector);
  1.3564 +    // But we shouldn't already have a runtime.
  1.3565 +    MOZ_ASSERT(!data->mRuntime);
  1.3566 +
  1.3567 +    data->mRuntime = rt;
  1.3568 +    data->mCollector->RegisterJSRuntime(rt);
  1.3569 +}
  1.3570 +
  1.3571 +void
  1.3572 +nsCycleCollector_forgetJSRuntime()
  1.3573 +{
  1.3574 +    CollectorData *data = sCollectorData.get();
  1.3575 +
  1.3576 +    // We should have started the cycle collector by now.
  1.3577 +    MOZ_ASSERT(data);
  1.3578 +    // And we shouldn't have already forgotten our runtime.
  1.3579 +    MOZ_ASSERT(data->mRuntime);
  1.3580 +
  1.3581 +    // But it may have shutdown already.
  1.3582 +    if (data->mCollector) {
  1.3583 +        data->mCollector->ForgetJSRuntime();
  1.3584 +        data->mRuntime = nullptr;
  1.3585 +    } else {
  1.3586 +        data->mRuntime = nullptr;
  1.3587 +        delete data;
  1.3588 +        sCollectorData.set(nullptr);
  1.3589 +    }
  1.3590 +}
  1.3591 +
  1.3592 +/* static */ CycleCollectedJSRuntime*
  1.3593 +CycleCollectedJSRuntime::Get()
  1.3594 +{
  1.3595 +    CollectorData* data = sCollectorData.get();
  1.3596 +    if (data) {
  1.3597 +        return data->mRuntime;
  1.3598 +    }
  1.3599 +    return nullptr;
  1.3600 +}
  1.3601 +
  1.3602 +
  1.3603 +namespace mozilla {
  1.3604 +namespace cyclecollector {
  1.3605 +
  1.3606 +void
  1.3607 +HoldJSObjectsImpl(void* aHolder, nsScriptObjectTracer* aTracer)
  1.3608 +{
  1.3609 +    CollectorData* data = sCollectorData.get();
  1.3610 +
  1.3611 +    // We should have started the cycle collector by now.
  1.3612 +    MOZ_ASSERT(data);
  1.3613 +    MOZ_ASSERT(data->mCollector);
  1.3614 +    // And we should have a runtime.
  1.3615 +    MOZ_ASSERT(data->mRuntime);
  1.3616 +
  1.3617 +    data->mRuntime->AddJSHolder(aHolder, aTracer);
  1.3618 +}
  1.3619 +
  1.3620 +void
  1.3621 +HoldJSObjectsImpl(nsISupports* aHolder)
  1.3622 +{
  1.3623 +    nsXPCOMCycleCollectionParticipant* participant;
  1.3624 +    CallQueryInterface(aHolder, &participant);
  1.3625 +    MOZ_ASSERT(participant, "Failed to QI to nsXPCOMCycleCollectionParticipant!");
  1.3626 +    MOZ_ASSERT(participant->CheckForRightISupports(aHolder),
  1.3627 +               "The result of QIing a JS holder should be the same as ToSupports");
  1.3628 +
  1.3629 +    HoldJSObjectsImpl(aHolder, participant);
  1.3630 +}
  1.3631 +
  1.3632 +void
  1.3633 +DropJSObjectsImpl(void* aHolder)
  1.3634 +{
  1.3635 +    CollectorData* data = sCollectorData.get();
  1.3636 +
  1.3637 +    // We should have started the cycle collector by now, and not completely
  1.3638 +    // shut down.
  1.3639 +    MOZ_ASSERT(data);
  1.3640 +    // And we should have a runtime.
  1.3641 +    MOZ_ASSERT(data->mRuntime);
  1.3642 +
  1.3643 +    data->mRuntime->RemoveJSHolder(aHolder);
  1.3644 +}
  1.3645 +
  1.3646 +void
  1.3647 +DropJSObjectsImpl(nsISupports* aHolder)
  1.3648 +{
  1.3649 +#ifdef DEBUG
  1.3650 +    nsXPCOMCycleCollectionParticipant* participant;
  1.3651 +    CallQueryInterface(aHolder, &participant);
  1.3652 +    MOZ_ASSERT(participant, "Failed to QI to nsXPCOMCycleCollectionParticipant!");
  1.3653 +    MOZ_ASSERT(participant->CheckForRightISupports(aHolder),
  1.3654 +               "The result of QIing a JS holder should be the same as ToSupports");
  1.3655 +#endif
  1.3656 +    DropJSObjectsImpl(static_cast<void*>(aHolder));
  1.3657 +}
  1.3658 +
  1.3659 +#ifdef DEBUG
  1.3660 +bool
  1.3661 +IsJSHolder(void* aHolder)
  1.3662 +{
  1.3663 +    CollectorData *data = sCollectorData.get();
  1.3664 +
  1.3665 +    // We should have started the cycle collector by now, and not completely
  1.3666 +    // shut down.
  1.3667 +    MOZ_ASSERT(data);
  1.3668 +    // And we should have a runtime.
  1.3669 +    MOZ_ASSERT(data->mRuntime);
  1.3670 +
  1.3671 +    return data->mRuntime->IsJSHolder(aHolder);
  1.3672 +}
  1.3673 +#endif
  1.3674 +
  1.3675 +void
  1.3676 +DeferredFinalize(nsISupports* aSupports)
  1.3677 +{
  1.3678 +    CollectorData *data = sCollectorData.get();
  1.3679 +
  1.3680 +    // We should have started the cycle collector by now, and not completely
  1.3681 +    // shut down.
  1.3682 +    MOZ_ASSERT(data);
  1.3683 +    // And we should have a runtime.
  1.3684 +    MOZ_ASSERT(data->mRuntime);
  1.3685 +
  1.3686 +    data->mRuntime->DeferredFinalize(aSupports);
  1.3687 +}
  1.3688 +
  1.3689 +void
  1.3690 +DeferredFinalize(DeferredFinalizeAppendFunction aAppendFunc,
  1.3691 +                 DeferredFinalizeFunction aFunc,
  1.3692 +                 void* aThing)
  1.3693 +{
  1.3694 +    CollectorData *data = sCollectorData.get();
  1.3695 +
  1.3696 +    // We should have started the cycle collector by now, and not completely
  1.3697 +    // shut down.
  1.3698 +    MOZ_ASSERT(data);
  1.3699 +    // And we should have a runtime.
  1.3700 +    MOZ_ASSERT(data->mRuntime);
  1.3701 +
  1.3702 +    data->mRuntime->DeferredFinalize(aAppendFunc, aFunc, aThing);
  1.3703 +}
  1.3704 +
  1.3705 +} // namespace cyclecollector
  1.3706 +} // namespace mozilla
  1.3707 +
  1.3708 +
  1.3709 +MOZ_NEVER_INLINE static void
  1.3710 +SuspectAfterShutdown(void* n, nsCycleCollectionParticipant* cp,
  1.3711 +                     nsCycleCollectingAutoRefCnt* aRefCnt,
  1.3712 +                     bool* aShouldDelete)
  1.3713 +{
  1.3714 +    if (aRefCnt->get() == 0) {
  1.3715 +        if (!aShouldDelete) {
  1.3716 +            // The CC is shut down, so we can't be in the middle of an ICC.
  1.3717 +            CanonicalizeParticipant(&n, &cp);
  1.3718 +            aRefCnt->stabilizeForDeletion();
  1.3719 +            cp->DeleteCycleCollectable(n);
  1.3720 +        } else {
  1.3721 +            *aShouldDelete = true;
  1.3722 +        }
  1.3723 +    } else {
  1.3724 +        // Make sure we'll get called again.
  1.3725 +        aRefCnt->RemoveFromPurpleBuffer();
  1.3726 +    }
  1.3727 +}
  1.3728 +
  1.3729 +void
  1.3730 +NS_CycleCollectorSuspect3(void *n, nsCycleCollectionParticipant *cp,
  1.3731 +                          nsCycleCollectingAutoRefCnt *aRefCnt,
  1.3732 +                          bool* aShouldDelete)
  1.3733 +{
  1.3734 +    CollectorData *data = sCollectorData.get();
  1.3735 +
  1.3736 +    // We should have started the cycle collector by now.
  1.3737 +    MOZ_ASSERT(data);
  1.3738 +
  1.3739 +    if (MOZ_LIKELY(data->mCollector)) {
  1.3740 +        data->mCollector->Suspect(n, cp, aRefCnt);
  1.3741 +        return;
  1.3742 +    }
  1.3743 +    SuspectAfterShutdown(n, cp, aRefCnt, aShouldDelete);
  1.3744 +}
  1.3745 +
  1.3746 +uint32_t
  1.3747 +nsCycleCollector_suspectedCount()
  1.3748 +{
  1.3749 +    CollectorData *data = sCollectorData.get();
  1.3750 +
  1.3751 +    // We should have started the cycle collector by now.
  1.3752 +    MOZ_ASSERT(data);
  1.3753 +
  1.3754 +    if (!data->mCollector) {
  1.3755 +        return 0;
  1.3756 +    }
  1.3757 +
  1.3758 +    return data->mCollector->SuspectedCount();
  1.3759 +}
  1.3760 +
  1.3761 +bool
  1.3762 +nsCycleCollector_init()
  1.3763 +{
  1.3764 +    MOZ_ASSERT(NS_IsMainThread(), "Wrong thread!");
  1.3765 +    MOZ_ASSERT(!sCollectorData.initialized(), "Called twice!?");
  1.3766 +
  1.3767 +    return sCollectorData.init();
  1.3768 +}
  1.3769 +
  1.3770 +void
  1.3771 +nsCycleCollector_startup()
  1.3772 +{
  1.3773 +    MOZ_ASSERT(sCollectorData.initialized(),
  1.3774 +               "Forgot to call nsCycleCollector_init!");
  1.3775 +    if (sCollectorData.get()) {
  1.3776 +        MOZ_CRASH();
  1.3777 +    }
  1.3778 +
  1.3779 +    CollectorData* data = new CollectorData;
  1.3780 +    data->mCollector = new nsCycleCollector();
  1.3781 +    data->mRuntime = nullptr;
  1.3782 +
  1.3783 +    sCollectorData.set(data);
  1.3784 +}
  1.3785 +
  1.3786 +void
  1.3787 +nsCycleCollector_setBeforeUnlinkCallback(CC_BeforeUnlinkCallback aCB)
  1.3788 +{
  1.3789 +    CollectorData *data = sCollectorData.get();
  1.3790 +
  1.3791 +    // We should have started the cycle collector by now.
  1.3792 +    MOZ_ASSERT(data);
  1.3793 +    MOZ_ASSERT(data->mCollector);
  1.3794 +
  1.3795 +    data->mCollector->SetBeforeUnlinkCallback(aCB);
  1.3796 +}
  1.3797 +
  1.3798 +void
  1.3799 +nsCycleCollector_setForgetSkippableCallback(CC_ForgetSkippableCallback aCB)
  1.3800 +{
  1.3801 +    CollectorData *data = sCollectorData.get();
  1.3802 +
  1.3803 +    // We should have started the cycle collector by now.
  1.3804 +    MOZ_ASSERT(data);
  1.3805 +    MOZ_ASSERT(data->mCollector);
  1.3806 +
  1.3807 +    data->mCollector->SetForgetSkippableCallback(aCB);
  1.3808 +}
  1.3809 +
  1.3810 +void
  1.3811 +nsCycleCollector_forgetSkippable(bool aRemoveChildlessNodes,
  1.3812 +                                 bool aAsyncSnowWhiteFreeing)
  1.3813 +{
  1.3814 +    CollectorData *data = sCollectorData.get();
  1.3815 +
  1.3816 +    // We should have started the cycle collector by now.
  1.3817 +    MOZ_ASSERT(data);
  1.3818 +    MOZ_ASSERT(data->mCollector);
  1.3819 +
  1.3820 +    PROFILER_LABEL("CC", "nsCycleCollector_forgetSkippable");
  1.3821 +    TimeLog timeLog;
  1.3822 +    data->mCollector->ForgetSkippable(aRemoveChildlessNodes,
  1.3823 +                                      aAsyncSnowWhiteFreeing);
  1.3824 +    timeLog.Checkpoint("ForgetSkippable()");
  1.3825 +}
  1.3826 +
  1.3827 +void
  1.3828 +nsCycleCollector_dispatchDeferredDeletion(bool aContinuation)
  1.3829 +{
  1.3830 +    CollectorData *data = sCollectorData.get();
  1.3831 +
  1.3832 +    if (!data || !data->mRuntime) {
  1.3833 +        return;
  1.3834 +    }
  1.3835 +
  1.3836 +    data->mRuntime->DispatchDeferredDeletion(aContinuation);
  1.3837 +}
  1.3838 +
  1.3839 +bool
  1.3840 +nsCycleCollector_doDeferredDeletion()
  1.3841 +{
  1.3842 +    CollectorData *data = sCollectorData.get();
  1.3843 +
  1.3844 +    // We should have started the cycle collector by now.
  1.3845 +    MOZ_ASSERT(data);
  1.3846 +    MOZ_ASSERT(data->mCollector);
  1.3847 +    MOZ_ASSERT(data->mRuntime);
  1.3848 +
  1.3849 +    return data->mCollector->FreeSnowWhite(false);
  1.3850 +}
  1.3851 +
  1.3852 +void
  1.3853 +nsCycleCollector_collect(nsICycleCollectorListener *aManualListener)
  1.3854 +{
  1.3855 +    CollectorData *data = sCollectorData.get();
  1.3856 +
  1.3857 +    // We should have started the cycle collector by now.
  1.3858 +    MOZ_ASSERT(data);
  1.3859 +    MOZ_ASSERT(data->mCollector);
  1.3860 +
  1.3861 +    PROFILER_LABEL("CC", "nsCycleCollector_collect");
  1.3862 +    SliceBudget unlimitedBudget;
  1.3863 +    data->mCollector->Collect(ManualCC, unlimitedBudget, aManualListener);
  1.3864 +}
  1.3865 +
  1.3866 +void
  1.3867 +nsCycleCollector_collectSlice(int64_t aSliceTime)
  1.3868 +{
  1.3869 +    CollectorData *data = sCollectorData.get();
  1.3870 +
  1.3871 +    // We should have started the cycle collector by now.
  1.3872 +    MOZ_ASSERT(data);
  1.3873 +    MOZ_ASSERT(data->mCollector);
  1.3874 +
  1.3875 +    PROFILER_LABEL("CC", "nsCycleCollector_collectSlice");
  1.3876 +    SliceBudget budget;
  1.3877 +    if (aSliceTime > 0) {
  1.3878 +        budget = SliceBudget::TimeBudget(aSliceTime);
  1.3879 +    } else if (aSliceTime == 0) {
  1.3880 +        budget = SliceBudget::WorkBudget(1);
  1.3881 +    }
  1.3882 +    data->mCollector->Collect(SliceCC, budget, nullptr);
  1.3883 +}
  1.3884 +
  1.3885 +void
  1.3886 +nsCycleCollector_prepareForGarbageCollection()
  1.3887 +{
  1.3888 +    CollectorData *data = sCollectorData.get();
  1.3889 +
  1.3890 +    MOZ_ASSERT(data);
  1.3891 +
  1.3892 +    if (!data->mCollector) {
  1.3893 +        return;
  1.3894 +    }
  1.3895 +
  1.3896 +    data->mCollector->PrepareForGarbageCollection();
  1.3897 +}
  1.3898 +
  1.3899 +void
  1.3900 +nsCycleCollector_shutdown()
  1.3901 +{
  1.3902 +    CollectorData *data = sCollectorData.get();
  1.3903 +
  1.3904 +    if (data) {
  1.3905 +        MOZ_ASSERT(data->mCollector);
  1.3906 +        PROFILER_LABEL("CC", "nsCycleCollector_shutdown");
  1.3907 +        data->mCollector->Shutdown();
  1.3908 +        data->mCollector = nullptr;
  1.3909 +        if (!data->mRuntime) {
  1.3910 +          delete data;
  1.3911 +          sCollectorData.set(nullptr);
  1.3912 +        }
  1.3913 +    }
  1.3914 +}

mercurial