js/src/gc/Heap.h

changeset 0
6474c204b198
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/js/src/gc/Heap.h	Wed Dec 31 06:09:35 2014 +0100
     1.3 @@ -0,0 +1,1148 @@
     1.4 +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
     1.5 + * vim: set ts=8 sts=4 et sw=4 tw=99:
     1.6 + * This Source Code Form is subject to the terms of the Mozilla Public
     1.7 + * License, v. 2.0. If a copy of the MPL was not distributed with this
     1.8 + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
     1.9 +
    1.10 +#ifndef gc_Heap_h
    1.11 +#define gc_Heap_h
    1.12 +
    1.13 +#include "mozilla/Attributes.h"
    1.14 +#include "mozilla/PodOperations.h"
    1.15 +
    1.16 +#include <stddef.h>
    1.17 +#include <stdint.h>
    1.18 +
    1.19 +#include "jspubtd.h"
    1.20 +#include "jstypes.h"
    1.21 +#include "jsutil.h"
    1.22 +
    1.23 +#include "ds/BitArray.h"
    1.24 +#include "gc/Memory.h"
    1.25 +#include "js/HeapAPI.h"
    1.26 +
    1.27 +struct JSCompartment;
    1.28 +
    1.29 +struct JSRuntime;
    1.30 +
    1.31 +namespace JS {
    1.32 +namespace shadow {
    1.33 +class Runtime;
    1.34 +}
    1.35 +}
    1.36 +
    1.37 +namespace js {
    1.38 +
    1.39 +class FreeOp;
    1.40 +
    1.41 +namespace gc {
    1.42 +
    1.43 +struct Arena;
    1.44 +struct ArenaList;
    1.45 +struct ArenaHeader;
    1.46 +struct Chunk;
    1.47 +
    1.48 +/*
    1.49 + * This flag allows an allocation site to request a specific heap based upon the
    1.50 + * estimated lifetime or lifetime requirements of objects allocated from that
    1.51 + * site.
    1.52 + */
    1.53 +enum InitialHeap {
    1.54 +    DefaultHeap,
    1.55 +    TenuredHeap
    1.56 +};
    1.57 +
    1.58 +/* The GC allocation kinds. */
    1.59 +enum AllocKind {
    1.60 +    FINALIZE_OBJECT0,
    1.61 +    FINALIZE_OBJECT0_BACKGROUND,
    1.62 +    FINALIZE_OBJECT2,
    1.63 +    FINALIZE_OBJECT2_BACKGROUND,
    1.64 +    FINALIZE_OBJECT4,
    1.65 +    FINALIZE_OBJECT4_BACKGROUND,
    1.66 +    FINALIZE_OBJECT8,
    1.67 +    FINALIZE_OBJECT8_BACKGROUND,
    1.68 +    FINALIZE_OBJECT12,
    1.69 +    FINALIZE_OBJECT12_BACKGROUND,
    1.70 +    FINALIZE_OBJECT16,
    1.71 +    FINALIZE_OBJECT16_BACKGROUND,
    1.72 +    FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
    1.73 +    FINALIZE_SCRIPT,
    1.74 +    FINALIZE_LAZY_SCRIPT,
    1.75 +    FINALIZE_SHAPE,
    1.76 +    FINALIZE_BASE_SHAPE,
    1.77 +    FINALIZE_TYPE_OBJECT,
    1.78 +    FINALIZE_FAT_INLINE_STRING,
    1.79 +    FINALIZE_STRING,
    1.80 +    FINALIZE_EXTERNAL_STRING,
    1.81 +    FINALIZE_JITCODE,
    1.82 +    FINALIZE_LAST = FINALIZE_JITCODE
    1.83 +};
    1.84 +
    1.85 +static const unsigned FINALIZE_LIMIT = FINALIZE_LAST + 1;
    1.86 +static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT_LAST + 1;
    1.87 +
    1.88 +/*
    1.89 + * This must be an upper bound, but we do not need the least upper bound, so
    1.90 + * we just exclude non-background objects.
    1.91 + */
    1.92 +static const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
    1.93 +
    1.94 +/*
    1.95 + * A GC cell is the base class for all GC things.
    1.96 + */
    1.97 +struct Cell
    1.98 +{
    1.99 +  public:
   1.100 +    inline ArenaHeader *arenaHeader() const;
   1.101 +    inline AllocKind tenuredGetAllocKind() const;
   1.102 +    MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
   1.103 +    MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
   1.104 +    MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
   1.105 +
   1.106 +    inline JSRuntime *runtimeFromMainThread() const;
   1.107 +    inline JS::shadow::Runtime *shadowRuntimeFromMainThread() const;
   1.108 +    inline JS::Zone *tenuredZone() const;
   1.109 +    inline JS::Zone *tenuredZoneFromAnyThread() const;
   1.110 +    inline bool tenuredIsInsideZone(JS::Zone *zone) const;
   1.111 +
   1.112 +    // Note: Unrestricted access to the runtime of a GC thing from an arbitrary
   1.113 +    // thread can easily lead to races. Use this method very carefully.
   1.114 +    inline JSRuntime *runtimeFromAnyThread() const;
   1.115 +    inline JS::shadow::Runtime *shadowRuntimeFromAnyThread() const;
   1.116 +
   1.117 +#ifdef DEBUG
   1.118 +    inline bool isAligned() const;
   1.119 +    inline bool isTenured() const;
   1.120 +#endif
   1.121 +
   1.122 +  protected:
   1.123 +    inline uintptr_t address() const;
   1.124 +    inline Chunk *chunk() const;
   1.125 +};
   1.126 +
   1.127 +/*
   1.128 + * The mark bitmap has one bit per each GC cell. For multi-cell GC things this
   1.129 + * wastes space but allows to avoid expensive devisions by thing's size when
   1.130 + * accessing the bitmap. In addition this allows to use some bits for colored
   1.131 + * marking during the cycle GC.
   1.132 + */
   1.133 +const size_t ArenaCellCount = size_t(1) << (ArenaShift - CellShift);
   1.134 +const size_t ArenaBitmapBits = ArenaCellCount;
   1.135 +const size_t ArenaBitmapBytes = ArenaBitmapBits / 8;
   1.136 +const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
   1.137 +
   1.138 +/*
   1.139 + * A FreeSpan represents a contiguous sequence of free cells in an Arena.
   1.140 + * |first| is the address of the first free cell in the span. |last| is the
   1.141 + * address of the last free cell in the span. This last cell holds a FreeSpan
   1.142 + * data structure for the next span unless this is the last span on the list
   1.143 + * of spans in the arena. For this last span |last| points to the last byte of
   1.144 + * the last thing in the arena and no linkage is stored there, so
   1.145 + * |last| == arenaStart + ArenaSize - 1. If the space at the arena end is
   1.146 + * fully used this last span is empty and |first| == |last + 1|.
   1.147 + *
   1.148 + * Thus |first| < |last| implies that we have either the last span with at least
   1.149 + * one element or that the span is not the last and contains at least 2
   1.150 + * elements. In both cases to allocate a thing from this span we need simply
   1.151 + * to increment |first| by the allocation size.
   1.152 + *
   1.153 + * |first| == |last| implies that we have a one element span that records the
   1.154 + * next span. So to allocate from it we need to update the span list head
   1.155 + * with a copy of the span stored at |last| address so the following
   1.156 + * allocations will use that span.
   1.157 + *
   1.158 + * |first| > |last| implies that we have an empty last span and the arena is
   1.159 + * fully used.
   1.160 + *
   1.161 + * Also only for the last span (|last| & 1)! = 0 as all allocation sizes are
   1.162 + * multiples of CellSize.
   1.163 + */
   1.164 +struct FreeSpan
   1.165 +{
   1.166 +    uintptr_t   first;
   1.167 +    uintptr_t   last;
   1.168 +
   1.169 +  public:
   1.170 +    FreeSpan() {}
   1.171 +
   1.172 +    FreeSpan(uintptr_t first, uintptr_t last)
   1.173 +      : first(first), last(last) {
   1.174 +        checkSpan();
   1.175 +    }
   1.176 +
   1.177 +    /*
   1.178 +     * To minimize the size of the arena header the first span is encoded
   1.179 +     * there as offsets from the arena start.
   1.180 +     */
   1.181 +    static size_t encodeOffsets(size_t firstOffset, size_t lastOffset) {
   1.182 +        static_assert(ArenaShift < 16, "Check that we can pack offsets into uint16_t.");
   1.183 +        JS_ASSERT(firstOffset <= ArenaSize);
   1.184 +        JS_ASSERT(lastOffset < ArenaSize);
   1.185 +        JS_ASSERT(firstOffset <= ((lastOffset + 1) & ~size_t(1)));
   1.186 +        return firstOffset | (lastOffset << 16);
   1.187 +    }
   1.188 +
   1.189 +    /*
   1.190 +     * Encoded offsets for a full arena when its first span is the last one
   1.191 +     * and empty.
   1.192 +     */
   1.193 +    static const size_t FullArenaOffsets = ArenaSize | ((ArenaSize - 1) << 16);
   1.194 +
   1.195 +    static FreeSpan decodeOffsets(uintptr_t arenaAddr, size_t offsets) {
   1.196 +        JS_ASSERT(!(arenaAddr & ArenaMask));
   1.197 +
   1.198 +        size_t firstOffset = offsets & 0xFFFF;
   1.199 +        size_t lastOffset = offsets >> 16;
   1.200 +        JS_ASSERT(firstOffset <= ArenaSize);
   1.201 +        JS_ASSERT(lastOffset < ArenaSize);
   1.202 +
   1.203 +        /*
   1.204 +         * We must not use | when calculating first as firstOffset is
   1.205 +         * ArenaMask + 1 for the empty span.
   1.206 +         */
   1.207 +        return FreeSpan(arenaAddr + firstOffset, arenaAddr | lastOffset);
   1.208 +    }
   1.209 +
   1.210 +    void initAsEmpty(uintptr_t arenaAddr = 0) {
   1.211 +        JS_ASSERT(!(arenaAddr & ArenaMask));
   1.212 +        first = arenaAddr + ArenaSize;
   1.213 +        last = arenaAddr | (ArenaSize  - 1);
   1.214 +        JS_ASSERT(isEmpty());
   1.215 +    }
   1.216 +
   1.217 +    bool isEmpty() const {
   1.218 +        checkSpan();
   1.219 +        return first > last;
   1.220 +    }
   1.221 +
   1.222 +    bool hasNext() const {
   1.223 +        checkSpan();
   1.224 +        return !(last & uintptr_t(1));
   1.225 +    }
   1.226 +
   1.227 +    const FreeSpan *nextSpan() const {
   1.228 +        JS_ASSERT(hasNext());
   1.229 +        return reinterpret_cast<FreeSpan *>(last);
   1.230 +    }
   1.231 +
   1.232 +    FreeSpan *nextSpanUnchecked(size_t thingSize) const {
   1.233 +#ifdef DEBUG
   1.234 +        uintptr_t lastOffset = last & ArenaMask;
   1.235 +        JS_ASSERT(!(lastOffset & 1));
   1.236 +        JS_ASSERT((ArenaSize - lastOffset) % thingSize == 0);
   1.237 +#endif
   1.238 +        return reinterpret_cast<FreeSpan *>(last);
   1.239 +    }
   1.240 +
   1.241 +    uintptr_t arenaAddressUnchecked() const {
   1.242 +        return last & ~ArenaMask;
   1.243 +    }
   1.244 +
   1.245 +    uintptr_t arenaAddress() const {
   1.246 +        checkSpan();
   1.247 +        return arenaAddressUnchecked();
   1.248 +    }
   1.249 +
   1.250 +    ArenaHeader *arenaHeader() const {
   1.251 +        return reinterpret_cast<ArenaHeader *>(arenaAddress());
   1.252 +    }
   1.253 +
   1.254 +    bool isSameNonEmptySpan(const FreeSpan *another) const {
   1.255 +        JS_ASSERT(!isEmpty());
   1.256 +        JS_ASSERT(!another->isEmpty());
   1.257 +        return first == another->first && last == another->last;
   1.258 +    }
   1.259 +
   1.260 +    bool isWithinArena(uintptr_t arenaAddr) const {
   1.261 +        JS_ASSERT(!(arenaAddr & ArenaMask));
   1.262 +
   1.263 +        /* Return true for the last empty span as well. */
   1.264 +        return arenaAddress() == arenaAddr;
   1.265 +    }
   1.266 +
   1.267 +    size_t encodeAsOffsets() const {
   1.268 +        /*
   1.269 +         * We must use first - arenaAddress(), not first & ArenaMask as
   1.270 +         * first == ArenaMask + 1 for an empty span.
   1.271 +         */
   1.272 +        uintptr_t arenaAddr = arenaAddress();
   1.273 +        return encodeOffsets(first - arenaAddr, last & ArenaMask);
   1.274 +    }
   1.275 +
   1.276 +    /* See comments before FreeSpan for details. */
   1.277 +    MOZ_ALWAYS_INLINE void *allocate(size_t thingSize) {
   1.278 +        JS_ASSERT(thingSize % CellSize == 0);
   1.279 +        checkSpan();
   1.280 +        uintptr_t thing = first;
   1.281 +        if (thing < last) {
   1.282 +            /* Bump-allocate from the current span. */
   1.283 +            first = thing + thingSize;
   1.284 +        } else if (MOZ_LIKELY(thing == last)) {
   1.285 +            /*
   1.286 +             * Move to the next span. We use MOZ_LIKELY as without PGO
   1.287 +             * compilers mis-predict == here as unlikely to succeed.
   1.288 +             */
   1.289 +            *this = *reinterpret_cast<FreeSpan *>(thing);
   1.290 +        } else {
   1.291 +            return nullptr;
   1.292 +        }
   1.293 +        checkSpan();
   1.294 +        JS_EXTRA_POISON(reinterpret_cast<void *>(thing), JS_ALLOCATED_TENURED_PATTERN, thingSize);
   1.295 +        return reinterpret_cast<void *>(thing);
   1.296 +    }
   1.297 +
   1.298 +    /* A version of allocate when we know that the span is not empty. */
   1.299 +    MOZ_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
   1.300 +        JS_ASSERT(thingSize % CellSize == 0);
   1.301 +        checkSpan();
   1.302 +        uintptr_t thing = first;
   1.303 +        if (thing < last) {
   1.304 +            first = thing + thingSize;
   1.305 +        } else {
   1.306 +            JS_ASSERT(thing == last);
   1.307 +            *this = *reinterpret_cast<FreeSpan *>(thing);
   1.308 +        }
   1.309 +        checkSpan();
   1.310 +        JS_EXTRA_POISON(reinterpret_cast<void *>(thing), JS_ALLOCATED_TENURED_PATTERN, thingSize);
   1.311 +        return reinterpret_cast<void *>(thing);
   1.312 +    }
   1.313 +
   1.314 +    /*
   1.315 +     * Allocate from a newly allocated arena. We do not move the free list
   1.316 +     * from the arena. Rather we set the arena up as fully used during the
   1.317 +     * initialization so to allocate we simply return the first thing in the
   1.318 +     * arena and set the free list to point to the second.
   1.319 +     */
   1.320 +    MOZ_ALWAYS_INLINE void *allocateFromNewArena(uintptr_t arenaAddr, size_t firstThingOffset,
   1.321 +                                                size_t thingSize) {
   1.322 +        JS_ASSERT(!(arenaAddr & ArenaMask));
   1.323 +        uintptr_t thing = arenaAddr | firstThingOffset;
   1.324 +        first = thing + thingSize;
   1.325 +        last = arenaAddr | ArenaMask;
   1.326 +        checkSpan();
   1.327 +        JS_EXTRA_POISON(reinterpret_cast<void *>(thing), JS_ALLOCATED_TENURED_PATTERN, thingSize);
   1.328 +        return reinterpret_cast<void *>(thing);
   1.329 +    }
   1.330 +
   1.331 +    void checkSpan() const {
   1.332 +#ifdef DEBUG
   1.333 +        /* We do not allow spans at the end of the address space. */
   1.334 +        JS_ASSERT(last != uintptr_t(-1));
   1.335 +        JS_ASSERT(first);
   1.336 +        JS_ASSERT(last);
   1.337 +        JS_ASSERT(first - 1 <= last);
   1.338 +        uintptr_t arenaAddr = arenaAddressUnchecked();
   1.339 +        if (last & 1) {
   1.340 +            /* The span is the last. */
   1.341 +            JS_ASSERT((last & ArenaMask) == ArenaMask);
   1.342 +
   1.343 +            if (first - 1 == last) {
   1.344 +                /* The span is last and empty. The above start != 0 check
   1.345 +                 * implies that we are not at the end of the address space.
   1.346 +                 */
   1.347 +                return;
   1.348 +            }
   1.349 +            size_t spanLength = last - first + 1;
   1.350 +            JS_ASSERT(spanLength % CellSize == 0);
   1.351 +
   1.352 +            /* Start and end must belong to the same arena. */
   1.353 +            JS_ASSERT((first & ~ArenaMask) == arenaAddr);
   1.354 +            return;
   1.355 +        }
   1.356 +
   1.357 +        /* The span is not the last and we have more spans to follow. */
   1.358 +        JS_ASSERT(first <= last);
   1.359 +        size_t spanLengthWithoutOneThing = last - first;
   1.360 +        JS_ASSERT(spanLengthWithoutOneThing % CellSize == 0);
   1.361 +
   1.362 +        JS_ASSERT((first & ~ArenaMask) == arenaAddr);
   1.363 +
   1.364 +        /*
   1.365 +         * If there is not enough space before the arena end to allocate one
   1.366 +         * more thing, then the span must be marked as the last one to avoid
   1.367 +         * storing useless empty span reference.
   1.368 +         */
   1.369 +        size_t beforeTail = ArenaSize - (last & ArenaMask);
   1.370 +        JS_ASSERT(beforeTail >= sizeof(FreeSpan) + CellSize);
   1.371 +
   1.372 +        FreeSpan *next = reinterpret_cast<FreeSpan *>(last);
   1.373 +
   1.374 +        /*
   1.375 +         * The GC things on the list of free spans come from one arena
   1.376 +         * and the spans are linked in ascending address order with
   1.377 +         * at least one non-free thing between spans.
   1.378 +         */
   1.379 +        JS_ASSERT(last < next->first);
   1.380 +        JS_ASSERT(arenaAddr == next->arenaAddressUnchecked());
   1.381 +
   1.382 +        if (next->first > next->last) {
   1.383 +            /*
   1.384 +             * The next span is the empty span that terminates the list for
   1.385 +             * arenas that do not have any free things at the end.
   1.386 +             */
   1.387 +            JS_ASSERT(next->first - 1 == next->last);
   1.388 +            JS_ASSERT(arenaAddr + ArenaSize == next->first);
   1.389 +        }
   1.390 +#endif
   1.391 +    }
   1.392 +
   1.393 +};
   1.394 +
   1.395 +/* Every arena has a header. */
   1.396 +struct ArenaHeader : public JS::shadow::ArenaHeader
   1.397 +{
   1.398 +    friend struct FreeLists;
   1.399 +
   1.400 +    /*
   1.401 +     * ArenaHeader::next has two purposes: when unallocated, it points to the
   1.402 +     * next available Arena's header. When allocated, it points to the next
   1.403 +     * arena of the same size class and compartment.
   1.404 +     */
   1.405 +    ArenaHeader     *next;
   1.406 +
   1.407 +  private:
   1.408 +    /*
   1.409 +     * The first span of free things in the arena. We encode it as the start
   1.410 +     * and end offsets within the arena, not as FreeSpan structure, to
   1.411 +     * minimize the header size.
   1.412 +     */
   1.413 +    size_t          firstFreeSpanOffsets;
   1.414 +
   1.415 +    /*
   1.416 +     * One of AllocKind constants or FINALIZE_LIMIT when the arena does not
   1.417 +     * contain any GC things and is on the list of empty arenas in the GC
   1.418 +     * chunk. The latter allows to quickly check if the arena is allocated
   1.419 +     * during the conservative GC scanning without searching the arena in the
   1.420 +     * list.
   1.421 +     *
   1.422 +     * We use 8 bits for the allocKind so the compiler can use byte-level memory
   1.423 +     * instructions to access it.
   1.424 +     */
   1.425 +    size_t       allocKind          : 8;
   1.426 +
   1.427 +    /*
   1.428 +     * When collecting we sometimes need to keep an auxillary list of arenas,
   1.429 +     * for which we use the following fields.  This happens for several reasons:
   1.430 +     *
   1.431 +     * When recursive marking uses too much stack the marking is delayed and the
   1.432 +     * corresponding arenas are put into a stack. To distinguish the bottom of
   1.433 +     * the stack from the arenas not present in the stack we use the
   1.434 +     * markOverflow flag to tag arenas on the stack.
   1.435 +     *
   1.436 +     * Delayed marking is also used for arenas that we allocate into during an
   1.437 +     * incremental GC. In this case, we intend to mark all the objects in the
   1.438 +     * arena, and it's faster to do this marking in bulk.
   1.439 +     *
   1.440 +     * When sweeping we keep track of which arenas have been allocated since the
   1.441 +     * end of the mark phase.  This allows us to tell whether a pointer to an
   1.442 +     * unmarked object is yet to be finalized or has already been reallocated.
   1.443 +     * We set the allocatedDuringIncremental flag for this and clear it at the
   1.444 +     * end of the sweep phase.
   1.445 +     *
   1.446 +     * To minimize the ArenaHeader size we record the next linkage as
   1.447 +     * arenaAddress() >> ArenaShift and pack it with the allocKind field and the
   1.448 +     * flags.
   1.449 +     */
   1.450 +  public:
   1.451 +    size_t       hasDelayedMarking  : 1;
   1.452 +    size_t       allocatedDuringIncremental : 1;
   1.453 +    size_t       markOverflow : 1;
   1.454 +    size_t       auxNextLink : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
   1.455 +    static_assert(ArenaShift >= 8 + 1 + 1 + 1,
   1.456 +                  "ArenaHeader::auxNextLink packing assumes that ArenaShift has enough bits to "
   1.457 +                  "cover allocKind and hasDelayedMarking.");
   1.458 +
   1.459 +    inline uintptr_t address() const;
   1.460 +    inline Chunk *chunk() const;
   1.461 +
   1.462 +    bool allocated() const {
   1.463 +        JS_ASSERT(allocKind <= size_t(FINALIZE_LIMIT));
   1.464 +        return allocKind < size_t(FINALIZE_LIMIT);
   1.465 +    }
   1.466 +
   1.467 +    void init(JS::Zone *zoneArg, AllocKind kind) {
   1.468 +        JS_ASSERT(!allocated());
   1.469 +        JS_ASSERT(!markOverflow);
   1.470 +        JS_ASSERT(!allocatedDuringIncremental);
   1.471 +        JS_ASSERT(!hasDelayedMarking);
   1.472 +        zone = zoneArg;
   1.473 +
   1.474 +        static_assert(FINALIZE_LIMIT <= 255, "We must be able to fit the allockind into uint8_t.");
   1.475 +        allocKind = size_t(kind);
   1.476 +
   1.477 +        /* See comments in FreeSpan::allocateFromNewArena. */
   1.478 +        firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
   1.479 +    }
   1.480 +
   1.481 +    void setAsNotAllocated() {
   1.482 +        allocKind = size_t(FINALIZE_LIMIT);
   1.483 +        markOverflow = 0;
   1.484 +        allocatedDuringIncremental = 0;
   1.485 +        hasDelayedMarking = 0;
   1.486 +        auxNextLink = 0;
   1.487 +    }
   1.488 +
   1.489 +    inline uintptr_t arenaAddress() const;
   1.490 +    inline Arena *getArena();
   1.491 +
   1.492 +    AllocKind getAllocKind() const {
   1.493 +        JS_ASSERT(allocated());
   1.494 +        return AllocKind(allocKind);
   1.495 +    }
   1.496 +
   1.497 +    inline size_t getThingSize() const;
   1.498 +
   1.499 +    bool hasFreeThings() const {
   1.500 +        return firstFreeSpanOffsets != FreeSpan::FullArenaOffsets;
   1.501 +    }
   1.502 +
   1.503 +    inline bool isEmpty() const;
   1.504 +
   1.505 +    void setAsFullyUsed() {
   1.506 +        firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
   1.507 +    }
   1.508 +
   1.509 +    inline FreeSpan getFirstFreeSpan() const;
   1.510 +    inline void setFirstFreeSpan(const FreeSpan *span);
   1.511 +
   1.512 +#ifdef DEBUG
   1.513 +    void checkSynchronizedWithFreeList() const;
   1.514 +#endif
   1.515 +
   1.516 +    inline ArenaHeader *getNextDelayedMarking() const;
   1.517 +    inline void setNextDelayedMarking(ArenaHeader *aheader);
   1.518 +    inline void unsetDelayedMarking();
   1.519 +
   1.520 +    inline ArenaHeader *getNextAllocDuringSweep() const;
   1.521 +    inline void setNextAllocDuringSweep(ArenaHeader *aheader);
   1.522 +    inline void unsetAllocDuringSweep();
   1.523 +};
   1.524 +
   1.525 +struct Arena
   1.526 +{
   1.527 +    /*
   1.528 +     * Layout of an arena:
   1.529 +     * An arena is 4K in size and 4K-aligned. It starts with the ArenaHeader
   1.530 +     * descriptor followed by some pad bytes. The remainder of the arena is
   1.531 +     * filled with the array of T things. The pad bytes ensure that the thing
   1.532 +     * array ends exactly at the end of the arena.
   1.533 +     *
   1.534 +     * +-------------+-----+----+----+-----+----+
   1.535 +     * | ArenaHeader | pad | T0 | T1 | ... | Tn |
   1.536 +     * +-------------+-----+----+----+-----+----+
   1.537 +     *
   1.538 +     * <----------------------------------------> = ArenaSize bytes
   1.539 +     * <-------------------> = first thing offset
   1.540 +     */
   1.541 +    ArenaHeader aheader;
   1.542 +    uint8_t     data[ArenaSize - sizeof(ArenaHeader)];
   1.543 +
   1.544 +  private:
   1.545 +    static JS_FRIEND_DATA(const uint32_t) ThingSizes[];
   1.546 +    static JS_FRIEND_DATA(const uint32_t) FirstThingOffsets[];
   1.547 +
   1.548 +  public:
   1.549 +    static void staticAsserts();
   1.550 +
   1.551 +    static size_t thingSize(AllocKind kind) {
   1.552 +        return ThingSizes[kind];
   1.553 +    }
   1.554 +
   1.555 +    static size_t firstThingOffset(AllocKind kind) {
   1.556 +        return FirstThingOffsets[kind];
   1.557 +    }
   1.558 +
   1.559 +    static size_t thingsPerArena(size_t thingSize) {
   1.560 +        JS_ASSERT(thingSize % CellSize == 0);
   1.561 +
   1.562 +        /* We should be able to fit FreeSpan in any GC thing. */
   1.563 +        JS_ASSERT(thingSize >= sizeof(FreeSpan));
   1.564 +
   1.565 +        return (ArenaSize - sizeof(ArenaHeader)) / thingSize;
   1.566 +    }
   1.567 +
   1.568 +    static size_t thingsSpan(size_t thingSize) {
   1.569 +        return thingsPerArena(thingSize) * thingSize;
   1.570 +    }
   1.571 +
   1.572 +    static bool isAligned(uintptr_t thing, size_t thingSize) {
   1.573 +        /* Things ends at the arena end. */
   1.574 +        uintptr_t tailOffset = (ArenaSize - thing) & ArenaMask;
   1.575 +        return tailOffset % thingSize == 0;
   1.576 +    }
   1.577 +
   1.578 +    uintptr_t address() const {
   1.579 +        return aheader.address();
   1.580 +    }
   1.581 +
   1.582 +    uintptr_t thingsStart(AllocKind thingKind) {
   1.583 +        return address() | firstThingOffset(thingKind);
   1.584 +    }
   1.585 +
   1.586 +    uintptr_t thingsEnd() {
   1.587 +        return address() + ArenaSize;
   1.588 +    }
   1.589 +
   1.590 +    void setAsFullyUnused(AllocKind thingKind);
   1.591 +
   1.592 +    template <typename T>
   1.593 +    bool finalize(FreeOp *fop, AllocKind thingKind, size_t thingSize);
   1.594 +};
   1.595 +
   1.596 +static_assert(sizeof(Arena) == ArenaSize, "The hardcoded arena size must match the struct size.");
   1.597 +
   1.598 +inline size_t
   1.599 +ArenaHeader::getThingSize() const
   1.600 +{
   1.601 +    JS_ASSERT(allocated());
   1.602 +    return Arena::thingSize(getAllocKind());
   1.603 +}
   1.604 +
   1.605 +/*
   1.606 + * The tail of the chunk info is shared between all chunks in the system, both
   1.607 + * nursery and tenured. This structure is locatable from any GC pointer by
   1.608 + * aligning to 1MiB.
   1.609 + */
   1.610 +struct ChunkTrailer
   1.611 +{
   1.612 +    /* The index the chunk in the nursery, or LocationTenuredHeap. */
   1.613 +    uint32_t        location;
   1.614 +
   1.615 +#if JS_BITS_PER_WORD == 64
   1.616 +    uint32_t        padding;
   1.617 +#endif
   1.618 +
   1.619 +    JSRuntime       *runtime;
   1.620 +};
   1.621 +
   1.622 +static_assert(sizeof(ChunkTrailer) == 2 * sizeof(uintptr_t), "ChunkTrailer size is incorrect.");
   1.623 +
   1.624 +/* The chunk header (located at the end of the chunk to preserve arena alignment). */
   1.625 +struct ChunkInfo
   1.626 +{
   1.627 +    Chunk           *next;
   1.628 +    Chunk           **prevp;
   1.629 +
   1.630 +    /* Free arenas are linked together with aheader.next. */
   1.631 +    ArenaHeader     *freeArenasHead;
   1.632 +
   1.633 +#if JS_BITS_PER_WORD == 32
   1.634 +    /*
   1.635 +     * Calculating sizes and offsets is simpler if sizeof(ChunkInfo) is
   1.636 +     * architecture-independent.
   1.637 +     */
   1.638 +    char            padding[20];
   1.639 +#endif
   1.640 +
   1.641 +    /*
   1.642 +     * Decommitted arenas are tracked by a bitmap in the chunk header. We use
   1.643 +     * this offset to start our search iteration close to a decommitted arena
   1.644 +     * that we can allocate.
   1.645 +     */
   1.646 +    uint32_t        lastDecommittedArenaOffset;
   1.647 +
   1.648 +    /* Number of free arenas, either committed or decommitted. */
   1.649 +    uint32_t        numArenasFree;
   1.650 +
   1.651 +    /* Number of free, committed arenas. */
   1.652 +    uint32_t        numArenasFreeCommitted;
   1.653 +
   1.654 +    /* Number of GC cycles this chunk has survived. */
   1.655 +    uint32_t        age;
   1.656 +
   1.657 +    /* Information shared by all Chunk types. */
   1.658 +    ChunkTrailer    trailer;
   1.659 +};
   1.660 +
   1.661 +/*
   1.662 + * Calculating ArenasPerChunk:
   1.663 + *
   1.664 + * In order to figure out how many Arenas will fit in a chunk, we need to know
   1.665 + * how much extra space is available after we allocate the header data. This
   1.666 + * is a problem because the header size depends on the number of arenas in the
   1.667 + * chunk. The two dependent fields are bitmap and decommittedArenas.
   1.668 + *
   1.669 + * For the mark bitmap, we know that each arena will use a fixed number of full
   1.670 + * bytes: ArenaBitmapBytes. The full size of the header data is this number
   1.671 + * multiplied by the eventual number of arenas we have in the header. We,
   1.672 + * conceptually, distribute this header data among the individual arenas and do
   1.673 + * not include it in the header. This way we do not have to worry about its
   1.674 + * variable size: it gets attached to the variable number we are computing.
   1.675 + *
   1.676 + * For the decommitted arena bitmap, we only have 1 bit per arena, so this
   1.677 + * technique will not work. Instead, we observe that we do not have enough
   1.678 + * header info to fill 8 full arenas: it is currently 4 on 64bit, less on
   1.679 + * 32bit. Thus, with current numbers, we need 64 bytes for decommittedArenas.
   1.680 + * This will not become 63 bytes unless we double the data required in the
   1.681 + * header. Therefore, we just compute the number of bytes required to track
   1.682 + * every possible arena and do not worry about slop bits, since there are too
   1.683 + * few to usefully allocate.
   1.684 + *
   1.685 + * To actually compute the number of arenas we can allocate in a chunk, we
   1.686 + * divide the amount of available space less the header info (not including
   1.687 + * the mark bitmap which is distributed into the arena size) by the size of
   1.688 + * the arena (with the mark bitmap bytes it uses).
   1.689 + */
   1.690 +const size_t BytesPerArenaWithHeader = ArenaSize + ArenaBitmapBytes;
   1.691 +const size_t ChunkDecommitBitmapBytes = ChunkSize / ArenaSize / JS_BITS_PER_BYTE;
   1.692 +const size_t ChunkBytesAvailable = ChunkSize - sizeof(ChunkInfo) - ChunkDecommitBitmapBytes;
   1.693 +const size_t ArenasPerChunk = ChunkBytesAvailable / BytesPerArenaWithHeader;
   1.694 +static_assert(ArenasPerChunk == 252, "Do not accidentally change our heap's density.");
   1.695 +
   1.696 +/* A chunk bitmap contains enough mark bits for all the cells in a chunk. */
   1.697 +struct ChunkBitmap
   1.698 +{
   1.699 +    volatile uintptr_t bitmap[ArenaBitmapWords * ArenasPerChunk];
   1.700 +
   1.701 +  public:
   1.702 +    ChunkBitmap() { }
   1.703 +
   1.704 +    MOZ_ALWAYS_INLINE void getMarkWordAndMask(const Cell *cell, uint32_t color,
   1.705 +                                              uintptr_t **wordp, uintptr_t *maskp)
   1.706 +    {
   1.707 +        GetGCThingMarkWordAndMask(cell, color, wordp, maskp);
   1.708 +    }
   1.709 +
   1.710 +    MOZ_ALWAYS_INLINE MOZ_TSAN_BLACKLIST bool isMarked(const Cell *cell, uint32_t color) {
   1.711 +        uintptr_t *word, mask;
   1.712 +        getMarkWordAndMask(cell, color, &word, &mask);
   1.713 +        return *word & mask;
   1.714 +    }
   1.715 +
   1.716 +    MOZ_ALWAYS_INLINE bool markIfUnmarked(const Cell *cell, uint32_t color) {
   1.717 +        uintptr_t *word, mask;
   1.718 +        getMarkWordAndMask(cell, BLACK, &word, &mask);
   1.719 +        if (*word & mask)
   1.720 +            return false;
   1.721 +        *word |= mask;
   1.722 +        if (color != BLACK) {
   1.723 +            /*
   1.724 +             * We use getMarkWordAndMask to recalculate both mask and word as
   1.725 +             * doing just mask << color may overflow the mask.
   1.726 +             */
   1.727 +            getMarkWordAndMask(cell, color, &word, &mask);
   1.728 +            if (*word & mask)
   1.729 +                return false;
   1.730 +            *word |= mask;
   1.731 +        }
   1.732 +        return true;
   1.733 +    }
   1.734 +
   1.735 +    MOZ_ALWAYS_INLINE void unmark(const Cell *cell, uint32_t color) {
   1.736 +        uintptr_t *word, mask;
   1.737 +        getMarkWordAndMask(cell, color, &word, &mask);
   1.738 +        *word &= ~mask;
   1.739 +    }
   1.740 +
   1.741 +    void clear() {
   1.742 +        memset((void *)bitmap, 0, sizeof(bitmap));
   1.743 +    }
   1.744 +
   1.745 +    uintptr_t *arenaBits(ArenaHeader *aheader) {
   1.746 +        static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
   1.747 +                      "We assume that the part of the bitmap corresponding to the arena "
   1.748 +                      "has the exact number of words so we do not need to deal with a word "
   1.749 +                      "that covers bits from two arenas.");
   1.750 +
   1.751 +        uintptr_t *word, unused;
   1.752 +        getMarkWordAndMask(reinterpret_cast<Cell *>(aheader->address()), BLACK, &word, &unused);
   1.753 +        return word;
   1.754 +    }
   1.755 +};
   1.756 +
   1.757 +static_assert(ArenaBitmapBytes * ArenasPerChunk == sizeof(ChunkBitmap),
   1.758 +              "Ensure our ChunkBitmap actually covers all arenas.");
   1.759 +static_assert(js::gc::ChunkMarkBitmapBits == ArenaBitmapBits * ArenasPerChunk,
   1.760 +              "Ensure that the mark bitmap has the right number of bits.");
   1.761 +
   1.762 +typedef BitArray<ArenasPerChunk> PerArenaBitmap;
   1.763 +
   1.764 +const size_t ChunkPadSize = ChunkSize
   1.765 +                            - (sizeof(Arena) * ArenasPerChunk)
   1.766 +                            - sizeof(ChunkBitmap)
   1.767 +                            - sizeof(PerArenaBitmap)
   1.768 +                            - sizeof(ChunkInfo);
   1.769 +static_assert(ChunkPadSize < BytesPerArenaWithHeader,
   1.770 +              "If the chunk padding is larger than an arena, we should have one more arena.");
   1.771 +
   1.772 +/*
   1.773 + * Chunks contain arenas and associated data structures (mark bitmap, delayed
   1.774 + * marking state).
   1.775 + */
   1.776 +struct Chunk
   1.777 +{
   1.778 +    Arena           arenas[ArenasPerChunk];
   1.779 +
   1.780 +    /* Pad to full size to ensure cache alignment of ChunkInfo. */
   1.781 +    uint8_t         padding[ChunkPadSize];
   1.782 +
   1.783 +    ChunkBitmap     bitmap;
   1.784 +    PerArenaBitmap  decommittedArenas;
   1.785 +    ChunkInfo       info;
   1.786 +
   1.787 +    static Chunk *fromAddress(uintptr_t addr) {
   1.788 +        addr &= ~ChunkMask;
   1.789 +        return reinterpret_cast<Chunk *>(addr);
   1.790 +    }
   1.791 +
   1.792 +    static bool withinArenasRange(uintptr_t addr) {
   1.793 +        uintptr_t offset = addr & ChunkMask;
   1.794 +        return offset < ArenasPerChunk * ArenaSize;
   1.795 +    }
   1.796 +
   1.797 +    static size_t arenaIndex(uintptr_t addr) {
   1.798 +        JS_ASSERT(withinArenasRange(addr));
   1.799 +        return (addr & ChunkMask) >> ArenaShift;
   1.800 +    }
   1.801 +
   1.802 +    uintptr_t address() const {
   1.803 +        uintptr_t addr = reinterpret_cast<uintptr_t>(this);
   1.804 +        JS_ASSERT(!(addr & ChunkMask));
   1.805 +        return addr;
   1.806 +    }
   1.807 +
   1.808 +    bool unused() const {
   1.809 +        return info.numArenasFree == ArenasPerChunk;
   1.810 +    }
   1.811 +
   1.812 +    bool hasAvailableArenas() const {
   1.813 +        return info.numArenasFree != 0;
   1.814 +    }
   1.815 +
   1.816 +    inline void addToAvailableList(JS::Zone *zone);
   1.817 +    inline void insertToAvailableList(Chunk **insertPoint);
   1.818 +    inline void removeFromAvailableList();
   1.819 +
   1.820 +    ArenaHeader *allocateArena(JS::Zone *zone, AllocKind kind);
   1.821 +
   1.822 +    void releaseArena(ArenaHeader *aheader);
   1.823 +    void recycleArena(ArenaHeader *aheader, ArenaList &dest, AllocKind thingKind);
   1.824 +
   1.825 +    static Chunk *allocate(JSRuntime *rt);
   1.826 +
   1.827 +    void decommitAllArenas(JSRuntime *rt) {
   1.828 +        decommittedArenas.clear(true);
   1.829 +        MarkPagesUnused(rt, &arenas[0], ArenasPerChunk * ArenaSize);
   1.830 +
   1.831 +        info.freeArenasHead = nullptr;
   1.832 +        info.lastDecommittedArenaOffset = 0;
   1.833 +        info.numArenasFree = ArenasPerChunk;
   1.834 +        info.numArenasFreeCommitted = 0;
   1.835 +    }
   1.836 +
   1.837 +    /* Must be called with the GC lock taken. */
   1.838 +    static inline void release(JSRuntime *rt, Chunk *chunk);
   1.839 +    static inline void releaseList(JSRuntime *rt, Chunk *chunkListHead);
   1.840 +
   1.841 +    /* Must be called with the GC lock taken. */
   1.842 +    inline void prepareToBeFreed(JSRuntime *rt);
   1.843 +
   1.844 +    /*
   1.845 +     * Assuming that the info.prevp points to the next field of the previous
   1.846 +     * chunk in a doubly-linked list, get that chunk.
   1.847 +     */
   1.848 +    Chunk *getPrevious() {
   1.849 +        JS_ASSERT(info.prevp);
   1.850 +        return fromPointerToNext(info.prevp);
   1.851 +    }
   1.852 +
   1.853 +    /* Get the chunk from a pointer to its info.next field. */
   1.854 +    static Chunk *fromPointerToNext(Chunk **nextFieldPtr) {
   1.855 +        uintptr_t addr = reinterpret_cast<uintptr_t>(nextFieldPtr);
   1.856 +        JS_ASSERT((addr & ChunkMask) == offsetof(Chunk, info.next));
   1.857 +        return reinterpret_cast<Chunk *>(addr - offsetof(Chunk, info.next));
   1.858 +    }
   1.859 +
   1.860 +  private:
   1.861 +    inline void init(JSRuntime *rt);
   1.862 +
   1.863 +    /* Search for a decommitted arena to allocate. */
   1.864 +    unsigned findDecommittedArenaOffset();
   1.865 +    ArenaHeader* fetchNextDecommittedArena();
   1.866 +
   1.867 +  public:
   1.868 +    /* Unlink and return the freeArenasHead. */
   1.869 +    inline ArenaHeader* fetchNextFreeArena(JSRuntime *rt);
   1.870 +
   1.871 +    inline void addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader);
   1.872 +};
   1.873 +
   1.874 +static_assert(sizeof(Chunk) == ChunkSize,
   1.875 +              "Ensure the hardcoded chunk size definition actually matches the struct.");
   1.876 +static_assert(js::gc::ChunkMarkBitmapOffset == offsetof(Chunk, bitmap),
   1.877 +              "The hardcoded API bitmap offset must match the actual offset.");
   1.878 +static_assert(js::gc::ChunkRuntimeOffset == offsetof(Chunk, info) +
   1.879 +                                               offsetof(ChunkInfo, trailer) +
   1.880 +                                               offsetof(ChunkTrailer, runtime),
   1.881 +              "The hardcoded API runtime offset must match the actual offset.");
   1.882 +
   1.883 +inline uintptr_t
   1.884 +ArenaHeader::address() const
   1.885 +{
   1.886 +    uintptr_t addr = reinterpret_cast<uintptr_t>(this);
   1.887 +    JS_ASSERT(!(addr & ArenaMask));
   1.888 +    JS_ASSERT(Chunk::withinArenasRange(addr));
   1.889 +    return addr;
   1.890 +}
   1.891 +
   1.892 +inline Chunk *
   1.893 +ArenaHeader::chunk() const
   1.894 +{
   1.895 +    return Chunk::fromAddress(address());
   1.896 +}
   1.897 +
   1.898 +inline uintptr_t
   1.899 +ArenaHeader::arenaAddress() const
   1.900 +{
   1.901 +    return address();
   1.902 +}
   1.903 +
   1.904 +inline Arena *
   1.905 +ArenaHeader::getArena()
   1.906 +{
   1.907 +    return reinterpret_cast<Arena *>(arenaAddress());
   1.908 +}
   1.909 +
   1.910 +inline bool
   1.911 +ArenaHeader::isEmpty() const
   1.912 +{
   1.913 +    /* Arena is empty if its first span covers the whole arena. */
   1.914 +    JS_ASSERT(allocated());
   1.915 +    size_t firstThingOffset = Arena::firstThingOffset(getAllocKind());
   1.916 +    return firstFreeSpanOffsets == FreeSpan::encodeOffsets(firstThingOffset, ArenaMask);
   1.917 +}
   1.918 +
   1.919 +FreeSpan
   1.920 +ArenaHeader::getFirstFreeSpan() const
   1.921 +{
   1.922 +#ifdef DEBUG
   1.923 +    checkSynchronizedWithFreeList();
   1.924 +#endif
   1.925 +    return FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
   1.926 +}
   1.927 +
   1.928 +void
   1.929 +ArenaHeader::setFirstFreeSpan(const FreeSpan *span)
   1.930 +{
   1.931 +    JS_ASSERT(span->isWithinArena(arenaAddress()));
   1.932 +    firstFreeSpanOffsets = span->encodeAsOffsets();
   1.933 +}
   1.934 +
   1.935 +inline ArenaHeader *
   1.936 +ArenaHeader::getNextDelayedMarking() const
   1.937 +{
   1.938 +    JS_ASSERT(hasDelayedMarking);
   1.939 +    return &reinterpret_cast<Arena *>(auxNextLink << ArenaShift)->aheader;
   1.940 +}
   1.941 +
   1.942 +inline void
   1.943 +ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
   1.944 +{
   1.945 +    JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
   1.946 +    JS_ASSERT(!auxNextLink && !hasDelayedMarking);
   1.947 +    hasDelayedMarking = 1;
   1.948 +    auxNextLink = aheader->arenaAddress() >> ArenaShift;
   1.949 +}
   1.950 +
   1.951 +inline void
   1.952 +ArenaHeader::unsetDelayedMarking()
   1.953 +{
   1.954 +    JS_ASSERT(hasDelayedMarking);
   1.955 +    hasDelayedMarking = 0;
   1.956 +    auxNextLink = 0;
   1.957 +}
   1.958 +
   1.959 +inline ArenaHeader *
   1.960 +ArenaHeader::getNextAllocDuringSweep() const
   1.961 +{
   1.962 +    JS_ASSERT(allocatedDuringIncremental);
   1.963 +    return &reinterpret_cast<Arena *>(auxNextLink << ArenaShift)->aheader;
   1.964 +}
   1.965 +
   1.966 +inline void
   1.967 +ArenaHeader::setNextAllocDuringSweep(ArenaHeader *aheader)
   1.968 +{
   1.969 +    JS_ASSERT(!auxNextLink && !allocatedDuringIncremental);
   1.970 +    allocatedDuringIncremental = 1;
   1.971 +    auxNextLink = aheader->arenaAddress() >> ArenaShift;
   1.972 +}
   1.973 +
   1.974 +inline void
   1.975 +ArenaHeader::unsetAllocDuringSweep()
   1.976 +{
   1.977 +    JS_ASSERT(allocatedDuringIncremental);
   1.978 +    allocatedDuringIncremental = 0;
   1.979 +    auxNextLink = 0;
   1.980 +}
   1.981 +
   1.982 +static void
   1.983 +AssertValidColor(const void *thing, uint32_t color)
   1.984 +{
   1.985 +#ifdef DEBUG
   1.986 +    ArenaHeader *aheader = reinterpret_cast<const Cell *>(thing)->arenaHeader();
   1.987 +    JS_ASSERT(color < aheader->getThingSize() / CellSize);
   1.988 +#endif
   1.989 +}
   1.990 +
   1.991 +inline ArenaHeader *
   1.992 +Cell::arenaHeader() const
   1.993 +{
   1.994 +    JS_ASSERT(isTenured());
   1.995 +    uintptr_t addr = address();
   1.996 +    addr &= ~ArenaMask;
   1.997 +    return reinterpret_cast<ArenaHeader *>(addr);
   1.998 +}
   1.999 +
  1.1000 +inline JSRuntime *
  1.1001 +Cell::runtimeFromMainThread() const
  1.1002 +{
  1.1003 +    JSRuntime *rt = chunk()->info.trailer.runtime;
  1.1004 +    JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
  1.1005 +    return rt;
  1.1006 +}
  1.1007 +
  1.1008 +inline JS::shadow::Runtime *
  1.1009 +Cell::shadowRuntimeFromMainThread() const
  1.1010 +{
  1.1011 +    return reinterpret_cast<JS::shadow::Runtime*>(runtimeFromMainThread());
  1.1012 +}
  1.1013 +
  1.1014 +inline JSRuntime *
  1.1015 +Cell::runtimeFromAnyThread() const
  1.1016 +{
  1.1017 +    return chunk()->info.trailer.runtime;
  1.1018 +}
  1.1019 +
  1.1020 +inline JS::shadow::Runtime *
  1.1021 +Cell::shadowRuntimeFromAnyThread() const
  1.1022 +{
  1.1023 +    return reinterpret_cast<JS::shadow::Runtime*>(runtimeFromAnyThread());
  1.1024 +}
  1.1025 +
  1.1026 +bool
  1.1027 +Cell::isMarked(uint32_t color /* = BLACK */) const
  1.1028 +{
  1.1029 +    JS_ASSERT(isTenured());
  1.1030 +    JS_ASSERT(arenaHeader()->allocated());
  1.1031 +    AssertValidColor(this, color);
  1.1032 +    return chunk()->bitmap.isMarked(this, color);
  1.1033 +}
  1.1034 +
  1.1035 +bool
  1.1036 +Cell::markIfUnmarked(uint32_t color /* = BLACK */) const
  1.1037 +{
  1.1038 +    JS_ASSERT(isTenured());
  1.1039 +    AssertValidColor(this, color);
  1.1040 +    return chunk()->bitmap.markIfUnmarked(this, color);
  1.1041 +}
  1.1042 +
  1.1043 +void
  1.1044 +Cell::unmark(uint32_t color) const
  1.1045 +{
  1.1046 +    JS_ASSERT(isTenured());
  1.1047 +    JS_ASSERT(color != BLACK);
  1.1048 +    AssertValidColor(this, color);
  1.1049 +    chunk()->bitmap.unmark(this, color);
  1.1050 +}
  1.1051 +
  1.1052 +JS::Zone *
  1.1053 +Cell::tenuredZone() const
  1.1054 +{
  1.1055 +    JS::Zone *zone = arenaHeader()->zone;
  1.1056 +    JS_ASSERT(CurrentThreadCanAccessZone(zone));
  1.1057 +    JS_ASSERT(isTenured());
  1.1058 +    return zone;
  1.1059 +}
  1.1060 +
  1.1061 +JS::Zone *
  1.1062 +Cell::tenuredZoneFromAnyThread() const
  1.1063 +{
  1.1064 +    JS_ASSERT(isTenured());
  1.1065 +    return arenaHeader()->zone;
  1.1066 +}
  1.1067 +
  1.1068 +bool
  1.1069 +Cell::tenuredIsInsideZone(JS::Zone *zone) const
  1.1070 +{
  1.1071 +    JS_ASSERT(isTenured());
  1.1072 +    return zone == arenaHeader()->zone;
  1.1073 +}
  1.1074 +
  1.1075 +#ifdef DEBUG
  1.1076 +bool
  1.1077 +Cell::isAligned() const
  1.1078 +{
  1.1079 +    return Arena::isAligned(address(), arenaHeader()->getThingSize());
  1.1080 +}
  1.1081 +
  1.1082 +bool
  1.1083 +Cell::isTenured() const
  1.1084 +{
  1.1085 +#ifdef JSGC_GENERATIONAL
  1.1086 +    JS::shadow::Runtime *rt = js::gc::GetGCThingRuntime(this);
  1.1087 +    return !IsInsideNursery(rt, this);
  1.1088 +#endif
  1.1089 +    return true;
  1.1090 +}
  1.1091 +#endif
  1.1092 +
  1.1093 +inline uintptr_t
  1.1094 +Cell::address() const
  1.1095 +{
  1.1096 +    uintptr_t addr = uintptr_t(this);
  1.1097 +    JS_ASSERT(addr % CellSize == 0);
  1.1098 +    JS_ASSERT(Chunk::withinArenasRange(addr));
  1.1099 +    return addr;
  1.1100 +}
  1.1101 +
  1.1102 +Chunk *
  1.1103 +Cell::chunk() const
  1.1104 +{
  1.1105 +    uintptr_t addr = uintptr_t(this);
  1.1106 +    JS_ASSERT(addr % CellSize == 0);
  1.1107 +    addr &= ~(ChunkSize - 1);
  1.1108 +    return reinterpret_cast<Chunk *>(addr);
  1.1109 +}
  1.1110 +
  1.1111 +inline bool
  1.1112 +InFreeList(ArenaHeader *aheader, void *thing)
  1.1113 +{
  1.1114 +    if (!aheader->hasFreeThings())
  1.1115 +        return false;
  1.1116 +
  1.1117 +    FreeSpan firstSpan(aheader->getFirstFreeSpan());
  1.1118 +    uintptr_t addr = reinterpret_cast<uintptr_t>(thing);
  1.1119 +
  1.1120 +    for (const FreeSpan *span = &firstSpan;;) {
  1.1121 +        /* If the thing comes before the current span, it's not free. */
  1.1122 +        if (addr < span->first)
  1.1123 +            return false;
  1.1124 +
  1.1125 +        /*
  1.1126 +         * If we find it inside the span, it's dead. We use here "<=" and not
  1.1127 +         * "<" even for the last span as we know that thing is inside the
  1.1128 +         * arena. Thus, for the last span thing < span->end.
  1.1129 +         */
  1.1130 +        if (addr <= span->last)
  1.1131 +            return true;
  1.1132 +
  1.1133 +        /*
  1.1134 +         * The last possible empty span is an the end of the arena. Here
  1.1135 +         * span->end < thing < thingsEnd and so we must have more spans.
  1.1136 +         */
  1.1137 +        span = span->nextSpan();
  1.1138 +    }
  1.1139 +}
  1.1140 +
  1.1141 +} /* namespace gc */
  1.1142 +
  1.1143 +gc::AllocKind
  1.1144 +gc::Cell::tenuredGetAllocKind() const
  1.1145 +{
  1.1146 +    return arenaHeader()->getAllocKind();
  1.1147 +}
  1.1148 +
  1.1149 +} /* namespace js */
  1.1150 +
  1.1151 +#endif /* gc_Heap_h */

mercurial