js/src/jsgc.h

Thu, 22 Jan 2015 13:21:57 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Thu, 22 Jan 2015 13:21:57 +0100
branch
TOR_BUG_9701
changeset 15
b8a032363ba2
permissions
-rw-r--r--

Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sts=4 et sw=4 tw=99:
michael@0 3 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 /* JS Garbage Collector. */
michael@0 8
michael@0 9 #ifndef jsgc_h
michael@0 10 #define jsgc_h
michael@0 11
michael@0 12 #include "mozilla/DebugOnly.h"
michael@0 13 #include "mozilla/MemoryReporting.h"
michael@0 14
michael@0 15 #include "jslock.h"
michael@0 16 #include "jsobj.h"
michael@0 17
michael@0 18 #include "js/GCAPI.h"
michael@0 19 #include "js/SliceBudget.h"
michael@0 20 #include "js/Vector.h"
michael@0 21
michael@0 22 class JSAtom;
michael@0 23 struct JSCompartment;
michael@0 24 class JSFlatString;
michael@0 25 class JSLinearString;
michael@0 26
michael@0 27 namespace js {
michael@0 28
michael@0 29 class ArgumentsObject;
michael@0 30 class ArrayBufferObject;
michael@0 31 class ArrayBufferViewObject;
michael@0 32 class SharedArrayBufferObject;
michael@0 33 class BaseShape;
michael@0 34 class DebugScopeObject;
michael@0 35 class GCHelperThread;
michael@0 36 class GlobalObject;
michael@0 37 class LazyScript;
michael@0 38 class Nursery;
michael@0 39 class PropertyName;
michael@0 40 class ScopeObject;
michael@0 41 class Shape;
michael@0 42 class UnownedBaseShape;
michael@0 43
michael@0 44 unsigned GetCPUCount();
michael@0 45
michael@0 46 enum HeapState {
michael@0 47 Idle, // doing nothing with the GC heap
michael@0 48 Tracing, // tracing the GC heap without collecting, e.g. IterateCompartments()
michael@0 49 MajorCollecting, // doing a GC of the major heap
michael@0 50 MinorCollecting // doing a GC of the minor heap (nursery)
michael@0 51 };
michael@0 52
michael@0 53 namespace jit {
michael@0 54 class JitCode;
michael@0 55 }
michael@0 56
michael@0 57 namespace gc {
michael@0 58
michael@0 59 enum State {
michael@0 60 NO_INCREMENTAL,
michael@0 61 MARK_ROOTS,
michael@0 62 MARK,
michael@0 63 SWEEP,
michael@0 64 INVALID
michael@0 65 };
michael@0 66
michael@0 67 class ChunkPool {
michael@0 68 Chunk *emptyChunkListHead;
michael@0 69 size_t emptyCount;
michael@0 70
michael@0 71 public:
michael@0 72 ChunkPool()
michael@0 73 : emptyChunkListHead(nullptr),
michael@0 74 emptyCount(0) { }
michael@0 75
michael@0 76 size_t getEmptyCount() const {
michael@0 77 return emptyCount;
michael@0 78 }
michael@0 79
michael@0 80 inline bool wantBackgroundAllocation(JSRuntime *rt) const;
michael@0 81
michael@0 82 /* Must be called with the GC lock taken. */
michael@0 83 inline Chunk *get(JSRuntime *rt);
michael@0 84
michael@0 85 /* Must be called either during the GC or with the GC lock taken. */
michael@0 86 inline void put(Chunk *chunk);
michael@0 87
michael@0 88 /*
michael@0 89 * Return the list of chunks that can be released outside the GC lock.
michael@0 90 * Must be called either during the GC or with the GC lock taken.
michael@0 91 */
michael@0 92 Chunk *expire(JSRuntime *rt, bool releaseAll);
michael@0 93
michael@0 94 /* Must be called with the GC lock taken. */
michael@0 95 void expireAndFree(JSRuntime *rt, bool releaseAll);
michael@0 96 };
michael@0 97
michael@0 98 static inline JSGCTraceKind
michael@0 99 MapAllocToTraceKind(AllocKind kind)
michael@0 100 {
michael@0 101 static const JSGCTraceKind map[] = {
michael@0 102 JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */
michael@0 103 JSTRACE_OBJECT, /* FINALIZE_OBJECT0_BACKGROUND */
michael@0 104 JSTRACE_OBJECT, /* FINALIZE_OBJECT2 */
michael@0 105 JSTRACE_OBJECT, /* FINALIZE_OBJECT2_BACKGROUND */
michael@0 106 JSTRACE_OBJECT, /* FINALIZE_OBJECT4 */
michael@0 107 JSTRACE_OBJECT, /* FINALIZE_OBJECT4_BACKGROUND */
michael@0 108 JSTRACE_OBJECT, /* FINALIZE_OBJECT8 */
michael@0 109 JSTRACE_OBJECT, /* FINALIZE_OBJECT8_BACKGROUND */
michael@0 110 JSTRACE_OBJECT, /* FINALIZE_OBJECT12 */
michael@0 111 JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */
michael@0 112 JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */
michael@0 113 JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */
michael@0 114 JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */
michael@0 115 JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */
michael@0 116 JSTRACE_SHAPE, /* FINALIZE_SHAPE */
michael@0 117 JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
michael@0 118 JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
michael@0 119 JSTRACE_STRING, /* FINALIZE_FAT_INLINE_STRING */
michael@0 120 JSTRACE_STRING, /* FINALIZE_STRING */
michael@0 121 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */
michael@0 122 JSTRACE_JITCODE, /* FINALIZE_JITCODE */
michael@0 123 };
michael@0 124 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
michael@0 125 return map[kind];
michael@0 126 }
michael@0 127
michael@0 128 template <typename T> struct MapTypeToTraceKind {};
michael@0 129 template <> struct MapTypeToTraceKind<ObjectImpl> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 130 template <> struct MapTypeToTraceKind<JSObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 131 template <> struct MapTypeToTraceKind<JSFunction> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 132 template <> struct MapTypeToTraceKind<ArgumentsObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 133 template <> struct MapTypeToTraceKind<ArrayBufferObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 134 template <> struct MapTypeToTraceKind<ArrayBufferViewObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 135 template <> struct MapTypeToTraceKind<SharedArrayBufferObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 136 template <> struct MapTypeToTraceKind<DebugScopeObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 137 template <> struct MapTypeToTraceKind<GlobalObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 138 template <> struct MapTypeToTraceKind<ScopeObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
michael@0 139 template <> struct MapTypeToTraceKind<JSScript> { static const JSGCTraceKind kind = JSTRACE_SCRIPT; };
michael@0 140 template <> struct MapTypeToTraceKind<LazyScript> { static const JSGCTraceKind kind = JSTRACE_LAZY_SCRIPT; };
michael@0 141 template <> struct MapTypeToTraceKind<Shape> { static const JSGCTraceKind kind = JSTRACE_SHAPE; };
michael@0 142 template <> struct MapTypeToTraceKind<BaseShape> { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; };
michael@0 143 template <> struct MapTypeToTraceKind<UnownedBaseShape> { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; };
michael@0 144 template <> struct MapTypeToTraceKind<types::TypeObject>{ static const JSGCTraceKind kind = JSTRACE_TYPE_OBJECT; };
michael@0 145 template <> struct MapTypeToTraceKind<JSAtom> { static const JSGCTraceKind kind = JSTRACE_STRING; };
michael@0 146 template <> struct MapTypeToTraceKind<JSString> { static const JSGCTraceKind kind = JSTRACE_STRING; };
michael@0 147 template <> struct MapTypeToTraceKind<JSFlatString> { static const JSGCTraceKind kind = JSTRACE_STRING; };
michael@0 148 template <> struct MapTypeToTraceKind<JSLinearString> { static const JSGCTraceKind kind = JSTRACE_STRING; };
michael@0 149 template <> struct MapTypeToTraceKind<PropertyName> { static const JSGCTraceKind kind = JSTRACE_STRING; };
michael@0 150 template <> struct MapTypeToTraceKind<jit::JitCode> { static const JSGCTraceKind kind = JSTRACE_JITCODE; };
michael@0 151
michael@0 152 /* Map from C++ type to finalize kind. JSObject does not have a 1:1 mapping, so must use Arena::thingSize. */
michael@0 153 template <typename T> struct MapTypeToFinalizeKind {};
michael@0 154 template <> struct MapTypeToFinalizeKind<JSScript> { static const AllocKind kind = FINALIZE_SCRIPT; };
michael@0 155 template <> struct MapTypeToFinalizeKind<LazyScript> { static const AllocKind kind = FINALIZE_LAZY_SCRIPT; };
michael@0 156 template <> struct MapTypeToFinalizeKind<Shape> { static const AllocKind kind = FINALIZE_SHAPE; };
michael@0 157 template <> struct MapTypeToFinalizeKind<BaseShape> { static const AllocKind kind = FINALIZE_BASE_SHAPE; };
michael@0 158 template <> struct MapTypeToFinalizeKind<types::TypeObject> { static const AllocKind kind = FINALIZE_TYPE_OBJECT; };
michael@0 159 template <> struct MapTypeToFinalizeKind<JSFatInlineString> { static const AllocKind kind = FINALIZE_FAT_INLINE_STRING; };
michael@0 160 template <> struct MapTypeToFinalizeKind<JSString> { static const AllocKind kind = FINALIZE_STRING; };
michael@0 161 template <> struct MapTypeToFinalizeKind<JSExternalString> { static const AllocKind kind = FINALIZE_EXTERNAL_STRING; };
michael@0 162 template <> struct MapTypeToFinalizeKind<jit::JitCode> { static const AllocKind kind = FINALIZE_JITCODE; };
michael@0 163
michael@0 164 #if defined(JSGC_GENERATIONAL) || defined(DEBUG)
michael@0 165 static inline bool
michael@0 166 IsNurseryAllocable(AllocKind kind)
michael@0 167 {
michael@0 168 JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
michael@0 169 static const bool map[] = {
michael@0 170 false, /* FINALIZE_OBJECT0 */
michael@0 171 true, /* FINALIZE_OBJECT0_BACKGROUND */
michael@0 172 false, /* FINALIZE_OBJECT2 */
michael@0 173 true, /* FINALIZE_OBJECT2_BACKGROUND */
michael@0 174 false, /* FINALIZE_OBJECT4 */
michael@0 175 true, /* FINALIZE_OBJECT4_BACKGROUND */
michael@0 176 false, /* FINALIZE_OBJECT8 */
michael@0 177 true, /* FINALIZE_OBJECT8_BACKGROUND */
michael@0 178 false, /* FINALIZE_OBJECT12 */
michael@0 179 true, /* FINALIZE_OBJECT12_BACKGROUND */
michael@0 180 false, /* FINALIZE_OBJECT16 */
michael@0 181 true, /* FINALIZE_OBJECT16_BACKGROUND */
michael@0 182 false, /* FINALIZE_SCRIPT */
michael@0 183 false, /* FINALIZE_LAZY_SCRIPT */
michael@0 184 false, /* FINALIZE_SHAPE */
michael@0 185 false, /* FINALIZE_BASE_SHAPE */
michael@0 186 false, /* FINALIZE_TYPE_OBJECT */
michael@0 187 false, /* FINALIZE_FAT_INLINE_STRING */
michael@0 188 false, /* FINALIZE_STRING */
michael@0 189 false, /* FINALIZE_EXTERNAL_STRING */
michael@0 190 false, /* FINALIZE_JITCODE */
michael@0 191 };
michael@0 192 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
michael@0 193 return map[kind];
michael@0 194 }
michael@0 195 #endif
michael@0 196
michael@0 197 static inline bool
michael@0 198 IsBackgroundFinalized(AllocKind kind)
michael@0 199 {
michael@0 200 JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT);
michael@0 201 static const bool map[] = {
michael@0 202 false, /* FINALIZE_OBJECT0 */
michael@0 203 true, /* FINALIZE_OBJECT0_BACKGROUND */
michael@0 204 false, /* FINALIZE_OBJECT2 */
michael@0 205 true, /* FINALIZE_OBJECT2_BACKGROUND */
michael@0 206 false, /* FINALIZE_OBJECT4 */
michael@0 207 true, /* FINALIZE_OBJECT4_BACKGROUND */
michael@0 208 false, /* FINALIZE_OBJECT8 */
michael@0 209 true, /* FINALIZE_OBJECT8_BACKGROUND */
michael@0 210 false, /* FINALIZE_OBJECT12 */
michael@0 211 true, /* FINALIZE_OBJECT12_BACKGROUND */
michael@0 212 false, /* FINALIZE_OBJECT16 */
michael@0 213 true, /* FINALIZE_OBJECT16_BACKGROUND */
michael@0 214 false, /* FINALIZE_SCRIPT */
michael@0 215 false, /* FINALIZE_LAZY_SCRIPT */
michael@0 216 true, /* FINALIZE_SHAPE */
michael@0 217 true, /* FINALIZE_BASE_SHAPE */
michael@0 218 true, /* FINALIZE_TYPE_OBJECT */
michael@0 219 true, /* FINALIZE_FAT_INLINE_STRING */
michael@0 220 true, /* FINALIZE_STRING */
michael@0 221 false, /* FINALIZE_EXTERNAL_STRING */
michael@0 222 false, /* FINALIZE_JITCODE */
michael@0 223 };
michael@0 224 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
michael@0 225 return map[kind];
michael@0 226 }
michael@0 227
michael@0 228 static inline bool
michael@0 229 CanBeFinalizedInBackground(gc::AllocKind kind, const Class *clasp)
michael@0 230 {
michael@0 231 JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
michael@0 232 /* If the class has no finalizer or a finalizer that is safe to call on
michael@0 233 * a different thread, we change the finalize kind. For example,
michael@0 234 * FINALIZE_OBJECT0 calls the finalizer on the main thread,
michael@0 235 * FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread.
michael@0 236 * IsBackgroundFinalized is called to prevent recursively incrementing
michael@0 237 * the finalize kind; kind may already be a background finalize kind.
michael@0 238 */
michael@0 239 return (!gc::IsBackgroundFinalized(kind) &&
michael@0 240 (!clasp->finalize || (clasp->flags & JSCLASS_BACKGROUND_FINALIZE)));
michael@0 241 }
michael@0 242
michael@0 243 inline JSGCTraceKind
michael@0 244 GetGCThingTraceKind(const void *thing);
michael@0 245
michael@0 246 /* Capacity for slotsToThingKind */
michael@0 247 const size_t SLOTS_TO_THING_KIND_LIMIT = 17;
michael@0 248
michael@0 249 extern const AllocKind slotsToThingKind[];
michael@0 250
michael@0 251 /* Get the best kind to use when making an object with the given slot count. */
michael@0 252 static inline AllocKind
michael@0 253 GetGCObjectKind(size_t numSlots)
michael@0 254 {
michael@0 255 if (numSlots >= SLOTS_TO_THING_KIND_LIMIT)
michael@0 256 return FINALIZE_OBJECT16;
michael@0 257 return slotsToThingKind[numSlots];
michael@0 258 }
michael@0 259
michael@0 260 /* As for GetGCObjectKind, but for dense array allocation. */
michael@0 261 static inline AllocKind
michael@0 262 GetGCArrayKind(size_t numSlots)
michael@0 263 {
michael@0 264 /*
michael@0 265 * Dense arrays can use their fixed slots to hold their elements array
michael@0 266 * (less two Values worth of ObjectElements header), but if more than the
michael@0 267 * maximum number of fixed slots is needed then the fixed slots will be
michael@0 268 * unused.
michael@0 269 */
michael@0 270 JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2);
michael@0 271 if (numSlots > JSObject::NELEMENTS_LIMIT || numSlots + 2 >= SLOTS_TO_THING_KIND_LIMIT)
michael@0 272 return FINALIZE_OBJECT2;
michael@0 273 return slotsToThingKind[numSlots + 2];
michael@0 274 }
michael@0 275
michael@0 276 static inline AllocKind
michael@0 277 GetGCObjectFixedSlotsKind(size_t numFixedSlots)
michael@0 278 {
michael@0 279 JS_ASSERT(numFixedSlots < SLOTS_TO_THING_KIND_LIMIT);
michael@0 280 return slotsToThingKind[numFixedSlots];
michael@0 281 }
michael@0 282
michael@0 283 static inline AllocKind
michael@0 284 GetBackgroundAllocKind(AllocKind kind)
michael@0 285 {
michael@0 286 JS_ASSERT(!IsBackgroundFinalized(kind));
michael@0 287 JS_ASSERT(kind <= FINALIZE_OBJECT_LAST);
michael@0 288 return (AllocKind) (kind + 1);
michael@0 289 }
michael@0 290
michael@0 291 /*
michael@0 292 * Try to get the next larger size for an object, keeping BACKGROUND
michael@0 293 * consistent.
michael@0 294 */
michael@0 295 static inline bool
michael@0 296 TryIncrementAllocKind(AllocKind *kindp)
michael@0 297 {
michael@0 298 size_t next = size_t(*kindp) + 2;
michael@0 299 if (next >= size_t(FINALIZE_OBJECT_LIMIT))
michael@0 300 return false;
michael@0 301 *kindp = AllocKind(next);
michael@0 302 return true;
michael@0 303 }
michael@0 304
michael@0 305 /* Get the number of fixed slots and initial capacity associated with a kind. */
michael@0 306 static inline size_t
michael@0 307 GetGCKindSlots(AllocKind thingKind)
michael@0 308 {
michael@0 309 /* Using a switch in hopes that thingKind will usually be a compile-time constant. */
michael@0 310 switch (thingKind) {
michael@0 311 case FINALIZE_OBJECT0:
michael@0 312 case FINALIZE_OBJECT0_BACKGROUND:
michael@0 313 return 0;
michael@0 314 case FINALIZE_OBJECT2:
michael@0 315 case FINALIZE_OBJECT2_BACKGROUND:
michael@0 316 return 2;
michael@0 317 case FINALIZE_OBJECT4:
michael@0 318 case FINALIZE_OBJECT4_BACKGROUND:
michael@0 319 return 4;
michael@0 320 case FINALIZE_OBJECT8:
michael@0 321 case FINALIZE_OBJECT8_BACKGROUND:
michael@0 322 return 8;
michael@0 323 case FINALIZE_OBJECT12:
michael@0 324 case FINALIZE_OBJECT12_BACKGROUND:
michael@0 325 return 12;
michael@0 326 case FINALIZE_OBJECT16:
michael@0 327 case FINALIZE_OBJECT16_BACKGROUND:
michael@0 328 return 16;
michael@0 329 default:
michael@0 330 MOZ_ASSUME_UNREACHABLE("Bad object finalize kind");
michael@0 331 }
michael@0 332 }
michael@0 333
michael@0 334 static inline size_t
michael@0 335 GetGCKindSlots(AllocKind thingKind, const Class *clasp)
michael@0 336 {
michael@0 337 size_t nslots = GetGCKindSlots(thingKind);
michael@0 338
michael@0 339 /* An object's private data uses the space taken by its last fixed slot. */
michael@0 340 if (clasp->flags & JSCLASS_HAS_PRIVATE) {
michael@0 341 JS_ASSERT(nslots > 0);
michael@0 342 nslots--;
michael@0 343 }
michael@0 344
michael@0 345 /*
michael@0 346 * Functions have a larger finalize kind than FINALIZE_OBJECT to reserve
michael@0 347 * space for the extra fields in JSFunction, but have no fixed slots.
michael@0 348 */
michael@0 349 if (clasp == FunctionClassPtr)
michael@0 350 nslots = 0;
michael@0 351
michael@0 352 return nslots;
michael@0 353 }
michael@0 354
michael@0 355 /*
michael@0 356 * ArenaList::head points to the start of the list. Normally cursor points
michael@0 357 * to the first arena in the list with some free things and all arenas
michael@0 358 * before cursor are fully allocated. However, as the arena currently being
michael@0 359 * allocated from is considered full while its list of free spans is moved
michael@0 360 * into the freeList, during the GC or cell enumeration, when an
michael@0 361 * unallocated freeList is moved back to the arena, we can see an arena
michael@0 362 * with some free cells before the cursor. The cursor is an indirect
michael@0 363 * pointer to allow for efficient list insertion at the cursor point and
michael@0 364 * other list manipulations.
michael@0 365 */
michael@0 366 struct ArenaList {
michael@0 367 ArenaHeader *head;
michael@0 368 ArenaHeader **cursor;
michael@0 369
michael@0 370 ArenaList() {
michael@0 371 clear();
michael@0 372 }
michael@0 373
michael@0 374 void clear() {
michael@0 375 head = nullptr;
michael@0 376 cursor = &head;
michael@0 377 }
michael@0 378
michael@0 379 void insert(ArenaHeader *arena);
michael@0 380 };
michael@0 381
michael@0 382 class ArenaLists
michael@0 383 {
michael@0 384 /*
michael@0 385 * For each arena kind its free list is represented as the first span with
michael@0 386 * free things. Initially all the spans are initialized as empty. After we
michael@0 387 * find a new arena with available things we move its first free span into
michael@0 388 * the list and set the arena as fully allocated. way we do not need to
michael@0 389 * update the arena header after the initial allocation. When starting the
michael@0 390 * GC we only move the head of the of the list of spans back to the arena
michael@0 391 * only for the arena that was not fully allocated.
michael@0 392 */
michael@0 393 FreeSpan freeLists[FINALIZE_LIMIT];
michael@0 394
michael@0 395 ArenaList arenaLists[FINALIZE_LIMIT];
michael@0 396
michael@0 397 /*
michael@0 398 * The background finalization adds the finalized arenas to the list at
michael@0 399 * the *cursor position. backgroundFinalizeState controls the interaction
michael@0 400 * between the GC lock and the access to the list from the allocation
michael@0 401 * thread.
michael@0 402 *
michael@0 403 * BFS_DONE indicates that the finalizations is not running or cannot
michael@0 404 * affect this arena list. The allocation thread can access the list
michael@0 405 * outside the GC lock.
michael@0 406 *
michael@0 407 * In BFS_RUN and BFS_JUST_FINISHED the allocation thread must take the
michael@0 408 * lock. The former indicates that the finalization still runs. The latter
michael@0 409 * signals that finalization just added to the list finalized arenas. In
michael@0 410 * that case the lock effectively serves as a read barrier to ensure that
michael@0 411 * the allocation thread see all the writes done during finalization.
michael@0 412 */
michael@0 413 enum BackgroundFinalizeState {
michael@0 414 BFS_DONE,
michael@0 415 BFS_RUN,
michael@0 416 BFS_JUST_FINISHED
michael@0 417 };
michael@0 418
michael@0 419 volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT];
michael@0 420
michael@0 421 public:
michael@0 422 /* For each arena kind, a list of arenas remaining to be swept. */
michael@0 423 ArenaHeader *arenaListsToSweep[FINALIZE_LIMIT];
michael@0 424
michael@0 425 /* Shape arenas to be swept in the foreground. */
michael@0 426 ArenaHeader *gcShapeArenasToSweep;
michael@0 427
michael@0 428 public:
michael@0 429 ArenaLists() {
michael@0 430 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
michael@0 431 freeLists[i].initAsEmpty();
michael@0 432 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
michael@0 433 backgroundFinalizeState[i] = BFS_DONE;
michael@0 434 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
michael@0 435 arenaListsToSweep[i] = nullptr;
michael@0 436 gcShapeArenasToSweep = nullptr;
michael@0 437 }
michael@0 438
michael@0 439 ~ArenaLists() {
michael@0 440 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
michael@0 441 /*
michael@0 442 * We can only call this during the shutdown after the last GC when
michael@0 443 * the background finalization is disabled.
michael@0 444 */
michael@0 445 JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE);
michael@0 446 ArenaHeader **headp = &arenaLists[i].head;
michael@0 447 while (ArenaHeader *aheader = *headp) {
michael@0 448 *headp = aheader->next;
michael@0 449 aheader->chunk()->releaseArena(aheader);
michael@0 450 }
michael@0 451 }
michael@0 452 }
michael@0 453
michael@0 454 static uintptr_t getFreeListOffset(AllocKind thingKind) {
michael@0 455 uintptr_t offset = offsetof(ArenaLists, freeLists);
michael@0 456 return offset + thingKind * sizeof(FreeSpan);
michael@0 457 }
michael@0 458
michael@0 459 const FreeSpan *getFreeList(AllocKind thingKind) const {
michael@0 460 return &freeLists[thingKind];
michael@0 461 }
michael@0 462
michael@0 463 ArenaHeader *getFirstArena(AllocKind thingKind) const {
michael@0 464 return arenaLists[thingKind].head;
michael@0 465 }
michael@0 466
michael@0 467 ArenaHeader *getFirstArenaToSweep(AllocKind thingKind) const {
michael@0 468 return arenaListsToSweep[thingKind];
michael@0 469 }
michael@0 470
michael@0 471 bool arenaListsAreEmpty() const {
michael@0 472 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
michael@0 473 /*
michael@0 474 * The arena cannot be empty if the background finalization is not yet
michael@0 475 * done.
michael@0 476 */
michael@0 477 if (backgroundFinalizeState[i] != BFS_DONE)
michael@0 478 return false;
michael@0 479 if (arenaLists[i].head)
michael@0 480 return false;
michael@0 481 }
michael@0 482 return true;
michael@0 483 }
michael@0 484
michael@0 485 bool arenasAreFull(AllocKind thingKind) const {
michael@0 486 return !*arenaLists[thingKind].cursor;
michael@0 487 }
michael@0 488
michael@0 489 void unmarkAll() {
michael@0 490 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) {
michael@0 491 /* The background finalization must have stopped at this point. */
michael@0 492 JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE ||
michael@0 493 backgroundFinalizeState[i] == BFS_JUST_FINISHED);
michael@0 494 for (ArenaHeader *aheader = arenaLists[i].head; aheader; aheader = aheader->next) {
michael@0 495 uintptr_t *word = aheader->chunk()->bitmap.arenaBits(aheader);
michael@0 496 memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t));
michael@0 497 }
michael@0 498 }
michael@0 499 }
michael@0 500
michael@0 501 bool doneBackgroundFinalize(AllocKind kind) const {
michael@0 502 return backgroundFinalizeState[kind] == BFS_DONE ||
michael@0 503 backgroundFinalizeState[kind] == BFS_JUST_FINISHED;
michael@0 504 }
michael@0 505
michael@0 506 bool needBackgroundFinalizeWait(AllocKind kind) const {
michael@0 507 return backgroundFinalizeState[kind] != BFS_DONE;
michael@0 508 }
michael@0 509
michael@0 510 /*
michael@0 511 * Return the free list back to the arena so the GC finalization will not
michael@0 512 * run the finalizers over unitialized bytes from free things.
michael@0 513 */
michael@0 514 void purge() {
michael@0 515 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
michael@0 516 purge(AllocKind(i));
michael@0 517 }
michael@0 518
michael@0 519 void purge(AllocKind i) {
michael@0 520 FreeSpan *headSpan = &freeLists[i];
michael@0 521 if (!headSpan->isEmpty()) {
michael@0 522 ArenaHeader *aheader = headSpan->arenaHeader();
michael@0 523 aheader->setFirstFreeSpan(headSpan);
michael@0 524 headSpan->initAsEmpty();
michael@0 525 }
michael@0 526 }
michael@0 527
michael@0 528 inline void prepareForIncrementalGC(JSRuntime *rt);
michael@0 529
michael@0 530 /*
michael@0 531 * Temporarily copy the free list heads to the arenas so the code can see
michael@0 532 * the proper value in ArenaHeader::freeList when accessing the latter
michael@0 533 * outside the GC.
michael@0 534 */
michael@0 535 void copyFreeListsToArenas() {
michael@0 536 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
michael@0 537 copyFreeListToArena(AllocKind(i));
michael@0 538 }
michael@0 539
michael@0 540 void copyFreeListToArena(AllocKind thingKind) {
michael@0 541 FreeSpan *headSpan = &freeLists[thingKind];
michael@0 542 if (!headSpan->isEmpty()) {
michael@0 543 ArenaHeader *aheader = headSpan->arenaHeader();
michael@0 544 JS_ASSERT(!aheader->hasFreeThings());
michael@0 545 aheader->setFirstFreeSpan(headSpan);
michael@0 546 }
michael@0 547 }
michael@0 548
michael@0 549 /*
michael@0 550 * Clear the free lists in arenas that were temporarily set there using
michael@0 551 * copyToArenas.
michael@0 552 */
michael@0 553 void clearFreeListsInArenas() {
michael@0 554 for (size_t i = 0; i != FINALIZE_LIMIT; ++i)
michael@0 555 clearFreeListInArena(AllocKind(i));
michael@0 556 }
michael@0 557
michael@0 558
michael@0 559 void clearFreeListInArena(AllocKind kind) {
michael@0 560 FreeSpan *headSpan = &freeLists[kind];
michael@0 561 if (!headSpan->isEmpty()) {
michael@0 562 ArenaHeader *aheader = headSpan->arenaHeader();
michael@0 563 JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan));
michael@0 564 aheader->setAsFullyUsed();
michael@0 565 }
michael@0 566 }
michael@0 567
michael@0 568 /*
michael@0 569 * Check that the free list is either empty or were synchronized with the
michael@0 570 * arena using copyToArena().
michael@0 571 */
michael@0 572 bool isSynchronizedFreeList(AllocKind kind) {
michael@0 573 FreeSpan *headSpan = &freeLists[kind];
michael@0 574 if (headSpan->isEmpty())
michael@0 575 return true;
michael@0 576 ArenaHeader *aheader = headSpan->arenaHeader();
michael@0 577 if (aheader->hasFreeThings()) {
michael@0 578 /*
michael@0 579 * If the arena has a free list, it must be the same as one in
michael@0 580 * lists.
michael@0 581 */
michael@0 582 JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan));
michael@0 583 return true;
michael@0 584 }
michael@0 585 return false;
michael@0 586 }
michael@0 587
michael@0 588 MOZ_ALWAYS_INLINE void *allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
michael@0 589 return freeLists[thingKind].allocate(thingSize);
michael@0 590 }
michael@0 591
michael@0 592 template <AllowGC allowGC>
michael@0 593 static void *refillFreeList(ThreadSafeContext *cx, AllocKind thingKind);
michael@0 594
michael@0 595 /*
michael@0 596 * Moves all arenas from |fromArenaLists| into |this|. In
michael@0 597 * parallel blocks, we temporarily create one ArenaLists per
michael@0 598 * parallel thread. When the parallel block ends, we move
michael@0 599 * whatever allocations may have been performed back into the
michael@0 600 * compartment's main arena list using this function.
michael@0 601 */
michael@0 602 void adoptArenas(JSRuntime *runtime, ArenaLists *fromArenaLists);
michael@0 603
michael@0 604 /* True if the ArenaHeader in question is found in this ArenaLists */
michael@0 605 bool containsArena(JSRuntime *runtime, ArenaHeader *arenaHeader);
michael@0 606
michael@0 607 void checkEmptyFreeLists() {
michael@0 608 #ifdef DEBUG
michael@0 609 for (size_t i = 0; i < mozilla::ArrayLength(freeLists); ++i)
michael@0 610 JS_ASSERT(freeLists[i].isEmpty());
michael@0 611 #endif
michael@0 612 }
michael@0 613
michael@0 614 void checkEmptyFreeList(AllocKind kind) {
michael@0 615 JS_ASSERT(freeLists[kind].isEmpty());
michael@0 616 }
michael@0 617
michael@0 618 void queueObjectsForSweep(FreeOp *fop);
michael@0 619 void queueStringsForSweep(FreeOp *fop);
michael@0 620 void queueShapesForSweep(FreeOp *fop);
michael@0 621 void queueScriptsForSweep(FreeOp *fop);
michael@0 622 void queueJitCodeForSweep(FreeOp *fop);
michael@0 623
michael@0 624 bool foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget);
michael@0 625 static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, bool onBackgroundThread);
michael@0 626
michael@0 627 void wipeDuringParallelExecution(JSRuntime *rt);
michael@0 628
michael@0 629 private:
michael@0 630 inline void finalizeNow(FreeOp *fop, AllocKind thingKind);
michael@0 631 inline void forceFinalizeNow(FreeOp *fop, AllocKind thingKind);
michael@0 632 inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind);
michael@0 633 inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind);
michael@0 634
michael@0 635 void *allocateFromArena(JS::Zone *zone, AllocKind thingKind);
michael@0 636 inline void *allocateFromArenaInline(JS::Zone *zone, AllocKind thingKind);
michael@0 637
michael@0 638 inline void normalizeBackgroundFinalizeState(AllocKind thingKind);
michael@0 639
michael@0 640 friend class js::Nursery;
michael@0 641 };
michael@0 642
michael@0 643 /*
michael@0 644 * Initial allocation size for data structures holding chunks is set to hold
michael@0 645 * chunks with total capacity of 16MB to avoid buffer resizes during browser
michael@0 646 * startup.
michael@0 647 */
michael@0 648 const size_t INITIAL_CHUNK_CAPACITY = 16 * 1024 * 1024 / ChunkSize;
michael@0 649
michael@0 650 /* The number of GC cycles an empty chunk can survive before been released. */
michael@0 651 const size_t MAX_EMPTY_CHUNK_AGE = 4;
michael@0 652
michael@0 653 } /* namespace gc */
michael@0 654
michael@0 655 typedef enum JSGCRootType {
michael@0 656 JS_GC_ROOT_VALUE_PTR,
michael@0 657 JS_GC_ROOT_STRING_PTR,
michael@0 658 JS_GC_ROOT_OBJECT_PTR,
michael@0 659 JS_GC_ROOT_SCRIPT_PTR
michael@0 660 } JSGCRootType;
michael@0 661
michael@0 662 struct RootInfo {
michael@0 663 RootInfo() {}
michael@0 664 RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {}
michael@0 665 const char *name;
michael@0 666 JSGCRootType type;
michael@0 667 };
michael@0 668
michael@0 669 typedef js::HashMap<void *,
michael@0 670 RootInfo,
michael@0 671 js::DefaultHasher<void *>,
michael@0 672 js::SystemAllocPolicy> RootedValueMap;
michael@0 673
michael@0 674 extern bool
michael@0 675 AddValueRoot(JSContext *cx, js::Value *vp, const char *name);
michael@0 676
michael@0 677 extern bool
michael@0 678 AddValueRootRT(JSRuntime *rt, js::Value *vp, const char *name);
michael@0 679
michael@0 680 extern bool
michael@0 681 AddStringRoot(JSContext *cx, JSString **rp, const char *name);
michael@0 682
michael@0 683 extern bool
michael@0 684 AddObjectRoot(JSContext *cx, JSObject **rp, const char *name);
michael@0 685
michael@0 686 extern bool
michael@0 687 AddObjectRoot(JSRuntime *rt, JSObject **rp, const char *name);
michael@0 688
michael@0 689 extern bool
michael@0 690 AddScriptRoot(JSContext *cx, JSScript **rp, const char *name);
michael@0 691
michael@0 692 extern void
michael@0 693 RemoveRoot(JSRuntime *rt, void *rp);
michael@0 694
michael@0 695 } /* namespace js */
michael@0 696
michael@0 697 extern bool
michael@0 698 js_InitGC(JSRuntime *rt, uint32_t maxbytes);
michael@0 699
michael@0 700 extern void
michael@0 701 js_FinishGC(JSRuntime *rt);
michael@0 702
michael@0 703 namespace js {
michael@0 704
michael@0 705 class InterpreterFrame;
michael@0 706
michael@0 707 extern void
michael@0 708 MarkCompartmentActive(js::InterpreterFrame *fp);
michael@0 709
michael@0 710 extern void
michael@0 711 TraceRuntime(JSTracer *trc);
michael@0 712
michael@0 713 /* Must be called with GC lock taken. */
michael@0 714 extern bool
michael@0 715 TriggerGC(JSRuntime *rt, JS::gcreason::Reason reason);
michael@0 716
michael@0 717 /* Must be called with GC lock taken. */
michael@0 718 extern bool
michael@0 719 TriggerZoneGC(Zone *zone, JS::gcreason::Reason reason);
michael@0 720
michael@0 721 extern void
michael@0 722 MaybeGC(JSContext *cx);
michael@0 723
michael@0 724 extern void
michael@0 725 ReleaseAllJITCode(FreeOp *op);
michael@0 726
michael@0 727 /*
michael@0 728 * Kinds of js_GC invocation.
michael@0 729 */
michael@0 730 typedef enum JSGCInvocationKind {
michael@0 731 /* Normal invocation. */
michael@0 732 GC_NORMAL = 0,
michael@0 733
michael@0 734 /* Minimize GC triggers and release empty GC chunks right away. */
michael@0 735 GC_SHRINK = 1
michael@0 736 } JSGCInvocationKind;
michael@0 737
michael@0 738 extern void
michael@0 739 GC(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason);
michael@0 740
michael@0 741 extern void
michael@0 742 GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis = 0);
michael@0 743
michael@0 744 extern void
michael@0 745 GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason);
michael@0 746
michael@0 747 extern void
michael@0 748 GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount);
michael@0 749
michael@0 750 extern void
michael@0 751 PrepareForDebugGC(JSRuntime *rt);
michael@0 752
michael@0 753 extern void
michael@0 754 MinorGC(JSRuntime *rt, JS::gcreason::Reason reason);
michael@0 755
michael@0 756 extern void
michael@0 757 MinorGC(JSContext *cx, JS::gcreason::Reason reason);
michael@0 758
michael@0 759 #ifdef JS_GC_ZEAL
michael@0 760 extern void
michael@0 761 SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency);
michael@0 762 #endif
michael@0 763
michael@0 764 /* Functions for managing cross compartment gray pointers. */
michael@0 765
michael@0 766 extern void
michael@0 767 DelayCrossCompartmentGrayMarking(JSObject *src);
michael@0 768
michael@0 769 extern void
michael@0 770 NotifyGCNukeWrapper(JSObject *o);
michael@0 771
michael@0 772 extern unsigned
michael@0 773 NotifyGCPreSwap(JSObject *a, JSObject *b);
michael@0 774
michael@0 775 extern void
michael@0 776 NotifyGCPostSwap(JSObject *a, JSObject *b, unsigned preResult);
michael@0 777
michael@0 778 /*
michael@0 779 * Helper that implements sweeping and allocation for kinds that can be swept
michael@0 780 * and allocated off the main thread.
michael@0 781 *
michael@0 782 * In non-threadsafe builds, all actual sweeping and allocation is performed
michael@0 783 * on the main thread, but GCHelperThread encapsulates this from clients as
michael@0 784 * much as possible.
michael@0 785 */
michael@0 786 class GCHelperThread {
michael@0 787 enum State {
michael@0 788 IDLE,
michael@0 789 SWEEPING,
michael@0 790 ALLOCATING,
michael@0 791 CANCEL_ALLOCATION,
michael@0 792 SHUTDOWN
michael@0 793 };
michael@0 794
michael@0 795 /*
michael@0 796 * During the finalization we do not free immediately. Rather we add the
michael@0 797 * corresponding pointers to a buffer which we later release on a
michael@0 798 * separated thread.
michael@0 799 *
michael@0 800 * The buffer is implemented as a vector of 64K arrays of pointers, not as
michael@0 801 * a simple vector, to avoid realloc calls during the vector growth and to
michael@0 802 * not bloat the binary size of the inlined freeLater method. Any OOM
michael@0 803 * during buffer growth results in the pointer being freed immediately.
michael@0 804 */
michael@0 805 static const size_t FREE_ARRAY_SIZE = size_t(1) << 16;
michael@0 806 static const size_t FREE_ARRAY_LENGTH = FREE_ARRAY_SIZE / sizeof(void *);
michael@0 807
michael@0 808 JSRuntime *const rt;
michael@0 809 PRThread *thread;
michael@0 810 PRCondVar *wakeup;
michael@0 811 PRCondVar *done;
michael@0 812 volatile State state;
michael@0 813
michael@0 814 void wait(PRCondVar *which);
michael@0 815
michael@0 816 bool sweepFlag;
michael@0 817 bool shrinkFlag;
michael@0 818
michael@0 819 Vector<void **, 16, js::SystemAllocPolicy> freeVector;
michael@0 820 void **freeCursor;
michael@0 821 void **freeCursorEnd;
michael@0 822
michael@0 823 bool backgroundAllocation;
michael@0 824
michael@0 825 friend class js::gc::ArenaLists;
michael@0 826
michael@0 827 void
michael@0 828 replenishAndFreeLater(void *ptr);
michael@0 829
michael@0 830 static void freeElementsAndArray(void **array, void **end) {
michael@0 831 JS_ASSERT(array <= end);
michael@0 832 for (void **p = array; p != end; ++p)
michael@0 833 js_free(*p);
michael@0 834 js_free(array);
michael@0 835 }
michael@0 836
michael@0 837 static void threadMain(void* arg);
michael@0 838 void threadLoop();
michael@0 839
michael@0 840 /* Must be called with the GC lock taken. */
michael@0 841 void doSweep();
michael@0 842
michael@0 843 public:
michael@0 844 GCHelperThread(JSRuntime *rt)
michael@0 845 : rt(rt),
michael@0 846 thread(nullptr),
michael@0 847 wakeup(nullptr),
michael@0 848 done(nullptr),
michael@0 849 state(IDLE),
michael@0 850 sweepFlag(false),
michael@0 851 shrinkFlag(false),
michael@0 852 freeCursor(nullptr),
michael@0 853 freeCursorEnd(nullptr),
michael@0 854 backgroundAllocation(true)
michael@0 855 { }
michael@0 856
michael@0 857 bool init();
michael@0 858 void finish();
michael@0 859
michael@0 860 /* Must be called with the GC lock taken. */
michael@0 861 void startBackgroundSweep(bool shouldShrink);
michael@0 862
michael@0 863 /* Must be called with the GC lock taken. */
michael@0 864 void startBackgroundShrink();
michael@0 865
michael@0 866 /* Must be called without the GC lock taken. */
michael@0 867 void waitBackgroundSweepEnd();
michael@0 868
michael@0 869 /* Must be called without the GC lock taken. */
michael@0 870 void waitBackgroundSweepOrAllocEnd();
michael@0 871
michael@0 872 /* Must be called with the GC lock taken. */
michael@0 873 inline void startBackgroundAllocationIfIdle();
michael@0 874
michael@0 875 bool canBackgroundAllocate() const {
michael@0 876 return backgroundAllocation;
michael@0 877 }
michael@0 878
michael@0 879 void disableBackgroundAllocation() {
michael@0 880 backgroundAllocation = false;
michael@0 881 }
michael@0 882
michael@0 883 PRThread *getThread() const {
michael@0 884 return thread;
michael@0 885 }
michael@0 886
michael@0 887 bool onBackgroundThread();
michael@0 888
michael@0 889 /*
michael@0 890 * Outside the GC lock may give true answer when in fact the sweeping has
michael@0 891 * been done.
michael@0 892 */
michael@0 893 bool sweeping() const {
michael@0 894 return state == SWEEPING;
michael@0 895 }
michael@0 896
michael@0 897 bool shouldShrink() const {
michael@0 898 JS_ASSERT(sweeping());
michael@0 899 return shrinkFlag;
michael@0 900 }
michael@0 901
michael@0 902 void freeLater(void *ptr) {
michael@0 903 JS_ASSERT(!sweeping());
michael@0 904 if (freeCursor != freeCursorEnd)
michael@0 905 *freeCursor++ = ptr;
michael@0 906 else
michael@0 907 replenishAndFreeLater(ptr);
michael@0 908 }
michael@0 909 };
michael@0 910
michael@0 911 struct GCChunkHasher {
michael@0 912 typedef gc::Chunk *Lookup;
michael@0 913
michael@0 914 /*
michael@0 915 * Strip zeros for better distribution after multiplying by the golden
michael@0 916 * ratio.
michael@0 917 */
michael@0 918 static HashNumber hash(gc::Chunk *chunk) {
michael@0 919 JS_ASSERT(!(uintptr_t(chunk) & gc::ChunkMask));
michael@0 920 return HashNumber(uintptr_t(chunk) >> gc::ChunkShift);
michael@0 921 }
michael@0 922
michael@0 923 static bool match(gc::Chunk *k, gc::Chunk *l) {
michael@0 924 JS_ASSERT(!(uintptr_t(k) & gc::ChunkMask));
michael@0 925 JS_ASSERT(!(uintptr_t(l) & gc::ChunkMask));
michael@0 926 return k == l;
michael@0 927 }
michael@0 928 };
michael@0 929
michael@0 930 typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet;
michael@0 931
michael@0 932 struct GrayRoot {
michael@0 933 void *thing;
michael@0 934 JSGCTraceKind kind;
michael@0 935 #ifdef DEBUG
michael@0 936 JSTraceNamePrinter debugPrinter;
michael@0 937 const void *debugPrintArg;
michael@0 938 size_t debugPrintIndex;
michael@0 939 #endif
michael@0 940
michael@0 941 GrayRoot(void *thing, JSGCTraceKind kind)
michael@0 942 : thing(thing), kind(kind) {}
michael@0 943 };
michael@0 944
michael@0 945 void
michael@0 946 MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
michael@0 947
michael@0 948 typedef void (*IterateChunkCallback)(JSRuntime *rt, void *data, gc::Chunk *chunk);
michael@0 949 typedef void (*IterateZoneCallback)(JSRuntime *rt, void *data, JS::Zone *zone);
michael@0 950 typedef void (*IterateArenaCallback)(JSRuntime *rt, void *data, gc::Arena *arena,
michael@0 951 JSGCTraceKind traceKind, size_t thingSize);
michael@0 952 typedef void (*IterateCellCallback)(JSRuntime *rt, void *data, void *thing,
michael@0 953 JSGCTraceKind traceKind, size_t thingSize);
michael@0 954
michael@0 955 /*
michael@0 956 * This function calls |zoneCallback| on every zone, |compartmentCallback| on
michael@0 957 * every compartment, |arenaCallback| on every in-use arena, and |cellCallback|
michael@0 958 * on every in-use cell in the GC heap.
michael@0 959 */
michael@0 960 extern void
michael@0 961 IterateZonesCompartmentsArenasCells(JSRuntime *rt, void *data,
michael@0 962 IterateZoneCallback zoneCallback,
michael@0 963 JSIterateCompartmentCallback compartmentCallback,
michael@0 964 IterateArenaCallback arenaCallback,
michael@0 965 IterateCellCallback cellCallback);
michael@0 966
michael@0 967 /*
michael@0 968 * This function is like IterateZonesCompartmentsArenasCells, but does it for a
michael@0 969 * single zone.
michael@0 970 */
michael@0 971 extern void
michael@0 972 IterateZoneCompartmentsArenasCells(JSRuntime *rt, Zone *zone, void *data,
michael@0 973 IterateZoneCallback zoneCallback,
michael@0 974 JSIterateCompartmentCallback compartmentCallback,
michael@0 975 IterateArenaCallback arenaCallback,
michael@0 976 IterateCellCallback cellCallback);
michael@0 977
michael@0 978 /*
michael@0 979 * Invoke chunkCallback on every in-use chunk.
michael@0 980 */
michael@0 981 extern void
michael@0 982 IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback);
michael@0 983
michael@0 984 typedef void (*IterateScriptCallback)(JSRuntime *rt, void *data, JSScript *script);
michael@0 985
michael@0 986 /*
michael@0 987 * Invoke scriptCallback on every in-use script for
michael@0 988 * the given compartment or for all compartments if it is null.
michael@0 989 */
michael@0 990 extern void
michael@0 991 IterateScripts(JSRuntime *rt, JSCompartment *compartment,
michael@0 992 void *data, IterateScriptCallback scriptCallback);
michael@0 993
michael@0 994 } /* namespace js */
michael@0 995
michael@0 996 extern void
michael@0 997 js_FinalizeStringRT(JSRuntime *rt, JSString *str);
michael@0 998
michael@0 999 namespace js {
michael@0 1000
michael@0 1001 JSCompartment *
michael@0 1002 NewCompartment(JSContext *cx, JS::Zone *zone, JSPrincipals *principals,
michael@0 1003 const JS::CompartmentOptions &options);
michael@0 1004
michael@0 1005 namespace gc {
michael@0 1006
michael@0 1007 extern void
michael@0 1008 GCIfNeeded(JSContext *cx);
michael@0 1009
michael@0 1010 /* Tries to run a GC no matter what (used for GC zeal). */
michael@0 1011 void
michael@0 1012 RunDebugGC(JSContext *cx);
michael@0 1013
michael@0 1014 void
michael@0 1015 SetDeterministicGC(JSContext *cx, bool enabled);
michael@0 1016
michael@0 1017 void
michael@0 1018 SetValidateGC(JSContext *cx, bool enabled);
michael@0 1019
michael@0 1020 void
michael@0 1021 SetFullCompartmentChecks(JSContext *cx, bool enabled);
michael@0 1022
michael@0 1023 /* Wait for the background thread to finish sweeping if it is running. */
michael@0 1024 void
michael@0 1025 FinishBackgroundFinalize(JSRuntime *rt);
michael@0 1026
michael@0 1027 /*
michael@0 1028 * Merge all contents of source into target. This can only be used if source is
michael@0 1029 * the only compartment in its zone.
michael@0 1030 */
michael@0 1031 void
michael@0 1032 MergeCompartments(JSCompartment *source, JSCompartment *target);
michael@0 1033
michael@0 1034 const int ZealPokeValue = 1;
michael@0 1035 const int ZealAllocValue = 2;
michael@0 1036 const int ZealFrameGCValue = 3;
michael@0 1037 const int ZealVerifierPreValue = 4;
michael@0 1038 const int ZealFrameVerifierPreValue = 5;
michael@0 1039 const int ZealStackRootingValue = 6;
michael@0 1040 const int ZealGenerationalGCValue = 7;
michael@0 1041 const int ZealIncrementalRootsThenFinish = 8;
michael@0 1042 const int ZealIncrementalMarkAllThenFinish = 9;
michael@0 1043 const int ZealIncrementalMultipleSlices = 10;
michael@0 1044 const int ZealVerifierPostValue = 11;
michael@0 1045 const int ZealFrameVerifierPostValue = 12;
michael@0 1046 const int ZealCheckHashTablesOnMinorGC = 13;
michael@0 1047 const int ZealLimit = 13;
michael@0 1048
michael@0 1049 enum VerifierType {
michael@0 1050 PreBarrierVerifier,
michael@0 1051 PostBarrierVerifier
michael@0 1052 };
michael@0 1053
michael@0 1054 #ifdef JS_GC_ZEAL
michael@0 1055
michael@0 1056 /* Check that write barriers have been used correctly. See jsgc.cpp. */
michael@0 1057 void
michael@0 1058 VerifyBarriers(JSRuntime *rt, VerifierType type);
michael@0 1059
michael@0 1060 void
michael@0 1061 MaybeVerifyBarriers(JSContext *cx, bool always = false);
michael@0 1062
michael@0 1063 #else
michael@0 1064
michael@0 1065 static inline void
michael@0 1066 VerifyBarriers(JSRuntime *rt, VerifierType type)
michael@0 1067 {
michael@0 1068 }
michael@0 1069
michael@0 1070 static inline void
michael@0 1071 MaybeVerifyBarriers(JSContext *cx, bool always = false)
michael@0 1072 {
michael@0 1073 }
michael@0 1074
michael@0 1075 #endif
michael@0 1076
michael@0 1077 /*
michael@0 1078 * Instances of this class set the |JSRuntime::suppressGC| flag for the duration
michael@0 1079 * that they are live. Use of this class is highly discouraged. Please carefully
michael@0 1080 * read the comment in jscntxt.h above |suppressGC| and take all appropriate
michael@0 1081 * precautions before instantiating this class.
michael@0 1082 */
michael@0 1083 class AutoSuppressGC
michael@0 1084 {
michael@0 1085 int32_t &suppressGC_;
michael@0 1086
michael@0 1087 public:
michael@0 1088 AutoSuppressGC(ExclusiveContext *cx);
michael@0 1089 AutoSuppressGC(JSCompartment *comp);
michael@0 1090 AutoSuppressGC(JSRuntime *rt);
michael@0 1091
michael@0 1092 ~AutoSuppressGC()
michael@0 1093 {
michael@0 1094 suppressGC_--;
michael@0 1095 }
michael@0 1096 };
michael@0 1097
michael@0 1098 #ifdef DEBUG
michael@0 1099 /* Disable OOM testing in sections which are not OOM safe. */
michael@0 1100 class AutoEnterOOMUnsafeRegion
michael@0 1101 {
michael@0 1102 uint32_t saved_;
michael@0 1103
michael@0 1104 public:
michael@0 1105 AutoEnterOOMUnsafeRegion() : saved_(OOM_maxAllocations) {
michael@0 1106 OOM_maxAllocations = UINT32_MAX;
michael@0 1107 }
michael@0 1108 ~AutoEnterOOMUnsafeRegion() {
michael@0 1109 OOM_maxAllocations = saved_;
michael@0 1110 }
michael@0 1111 };
michael@0 1112 #else
michael@0 1113 class AutoEnterOOMUnsafeRegion {};
michael@0 1114 #endif /* DEBUG */
michael@0 1115
michael@0 1116 } /* namespace gc */
michael@0 1117
michael@0 1118 #ifdef DEBUG
michael@0 1119 /* Use this to avoid assertions when manipulating the wrapper map. */
michael@0 1120 class AutoDisableProxyCheck
michael@0 1121 {
michael@0 1122 MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER;
michael@0 1123 uintptr_t &count;
michael@0 1124
michael@0 1125 public:
michael@0 1126 AutoDisableProxyCheck(JSRuntime *rt
michael@0 1127 MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
michael@0 1128
michael@0 1129 ~AutoDisableProxyCheck() {
michael@0 1130 count--;
michael@0 1131 }
michael@0 1132 };
michael@0 1133 #else
michael@0 1134 struct AutoDisableProxyCheck
michael@0 1135 {
michael@0 1136 AutoDisableProxyCheck(JSRuntime *rt) {}
michael@0 1137 };
michael@0 1138 #endif
michael@0 1139
michael@0 1140 void
michael@0 1141 PurgeJITCaches(JS::Zone *zone);
michael@0 1142
michael@0 1143 // This is the same as IsInsideNursery, but not inlined.
michael@0 1144 bool
michael@0 1145 UninlinedIsInsideNursery(JSRuntime *rt, const void *thing);
michael@0 1146
michael@0 1147 } /* namespace js */
michael@0 1148
michael@0 1149 #endif /* jsgc_h */

mercurial