js/src/gc/Zone.h

Wed, 31 Dec 2014 06:09:35 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Wed, 31 Dec 2014 06:09:35 +0100
changeset 0
6474c204b198
permissions
-rw-r--r--

Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.

michael@0 1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
michael@0 2 * vim: set ts=8 sts=4 et sw=4 tw=99:
michael@0 3 * This Source Code Form is subject to the terms of the Mozilla Public
michael@0 4 * License, v. 2.0. If a copy of the MPL was not distributed with this
michael@0 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
michael@0 6
michael@0 7 #ifndef gc_Zone_h
michael@0 8 #define gc_Zone_h
michael@0 9
michael@0 10 #include "mozilla/Atomics.h"
michael@0 11 #include "mozilla/MemoryReporting.h"
michael@0 12
michael@0 13 #include "jscntxt.h"
michael@0 14 #include "jsgc.h"
michael@0 15 #include "jsinfer.h"
michael@0 16
michael@0 17 #include "gc/FindSCCs.h"
michael@0 18
michael@0 19 namespace js {
michael@0 20
michael@0 21 namespace jit {
michael@0 22 class JitZone;
michael@0 23 }
michael@0 24
michael@0 25 /*
michael@0 26 * Encapsulates the data needed to perform allocation. Typically there is
michael@0 27 * precisely one of these per zone (|cx->zone().allocator|). However, in
michael@0 28 * parallel execution mode, there will be one per worker thread.
michael@0 29 */
michael@0 30 class Allocator
michael@0 31 {
michael@0 32 /*
michael@0 33 * Since allocators can be accessed from worker threads, the parent zone_
michael@0 34 * should not be accessed in general. ArenaLists is allowed to actually do
michael@0 35 * the allocation, however.
michael@0 36 */
michael@0 37 friend class gc::ArenaLists;
michael@0 38
michael@0 39 JS::Zone *zone_;
michael@0 40
michael@0 41 public:
michael@0 42 explicit Allocator(JS::Zone *zone);
michael@0 43
michael@0 44 js::gc::ArenaLists arenas;
michael@0 45 };
michael@0 46
michael@0 47 typedef Vector<JSCompartment *, 1, SystemAllocPolicy> CompartmentVector;
michael@0 48
michael@0 49 } /* namespace js */
michael@0 50
michael@0 51 namespace JS {
michael@0 52
michael@0 53 /*
michael@0 54 * A zone is a collection of compartments. Every compartment belongs to exactly
michael@0 55 * one zone. In Firefox, there is roughly one zone per tab along with a system
michael@0 56 * zone for everything else. Zones mainly serve as boundaries for garbage
michael@0 57 * collection. Unlike compartments, they have no special security properties.
michael@0 58 *
michael@0 59 * Every GC thing belongs to exactly one zone. GC things from the same zone but
michael@0 60 * different compartments can share an arena (4k page). GC things from different
michael@0 61 * zones cannot be stored in the same arena. The garbage collector is capable of
michael@0 62 * collecting one zone at a time; it cannot collect at the granularity of
michael@0 63 * compartments.
michael@0 64 *
michael@0 65 * GC things are tied to zones and compartments as follows:
michael@0 66 *
michael@0 67 * - JSObjects belong to a compartment and cannot be shared between
michael@0 68 * compartments. If an object needs to point to a JSObject in a different
michael@0 69 * compartment, regardless of zone, it must go through a cross-compartment
michael@0 70 * wrapper. Each compartment keeps track of its outgoing wrappers in a table.
michael@0 71 *
michael@0 72 * - JSStrings do not belong to any particular compartment, but they do belong
michael@0 73 * to a zone. Thus, two different compartments in the same zone can point to a
michael@0 74 * JSString. When a string needs to be wrapped, we copy it if it's in a
michael@0 75 * different zone and do nothing if it's in the same zone. Thus, transferring
michael@0 76 * strings within a zone is very efficient.
michael@0 77 *
michael@0 78 * - Shapes and base shapes belong to a compartment and cannot be shared between
michael@0 79 * compartments. A base shape holds a pointer to its compartment. Shapes find
michael@0 80 * their compartment via their base shape. JSObjects find their compartment
michael@0 81 * via their shape.
michael@0 82 *
michael@0 83 * - Scripts are also compartment-local and cannot be shared. A script points to
michael@0 84 * its compartment.
michael@0 85 *
michael@0 86 * - Type objects and JitCode objects belong to a compartment and cannot be
michael@0 87 * shared. However, there is no mechanism to obtain their compartments.
michael@0 88 *
michael@0 89 * A zone remains alive as long as any GC things in the zone are alive. A
michael@0 90 * compartment remains alive as long as any JSObjects, scripts, shapes, or base
michael@0 91 * shapes within it are alive.
michael@0 92 *
michael@0 93 * We always guarantee that a zone has at least one live compartment by refusing
michael@0 94 * to delete the last compartment in a live zone. (This could happen, for
michael@0 95 * example, if the conservative scanner marks a string in an otherwise dead
michael@0 96 * zone.)
michael@0 97 */
michael@0 98
michael@0 99 struct Zone : public JS::shadow::Zone,
michael@0 100 public js::gc::GraphNodeBase<JS::Zone>,
michael@0 101 public js::MallocProvider<JS::Zone>
michael@0 102 {
michael@0 103 private:
michael@0 104 friend bool js::CurrentThreadCanAccessZone(Zone *zone);
michael@0 105
michael@0 106 public:
michael@0 107 js::Allocator allocator;
michael@0 108
michael@0 109 js::CompartmentVector compartments;
michael@0 110
michael@0 111 private:
michael@0 112 bool ionUsingBarriers_;
michael@0 113
michael@0 114 public:
michael@0 115 bool active; // GC flag, whether there are active frames
michael@0 116
michael@0 117 bool compileBarriers(bool needsBarrier) const {
michael@0 118 return needsBarrier || runtimeFromMainThread()->gcZeal() == js::gc::ZealVerifierPreValue;
michael@0 119 }
michael@0 120
michael@0 121 bool compileBarriers() const {
michael@0 122 return compileBarriers(needsBarrier());
michael@0 123 }
michael@0 124
michael@0 125 enum ShouldUpdateIon {
michael@0 126 DontUpdateIon,
michael@0 127 UpdateIon
michael@0 128 };
michael@0 129
michael@0 130 void setNeedsBarrier(bool needs, ShouldUpdateIon updateIon);
michael@0 131
michael@0 132 const bool *addressOfNeedsBarrier() const {
michael@0 133 return &needsBarrier_;
michael@0 134 }
michael@0 135
michael@0 136 public:
michael@0 137 enum GCState {
michael@0 138 NoGC,
michael@0 139 Mark,
michael@0 140 MarkGray,
michael@0 141 Sweep,
michael@0 142 Finished
michael@0 143 };
michael@0 144
michael@0 145 private:
michael@0 146 bool gcScheduled;
michael@0 147 GCState gcState;
michael@0 148 bool gcPreserveCode;
michael@0 149
michael@0 150 public:
michael@0 151 bool isCollecting() const {
michael@0 152 if (runtimeFromMainThread()->isHeapCollecting())
michael@0 153 return gcState != NoGC;
michael@0 154 else
michael@0 155 return needsBarrier();
michael@0 156 }
michael@0 157
michael@0 158 bool isPreservingCode() const {
michael@0 159 return gcPreserveCode;
michael@0 160 }
michael@0 161
michael@0 162 /*
michael@0 163 * If this returns true, all object tracing must be done with a GC marking
michael@0 164 * tracer.
michael@0 165 */
michael@0 166 bool requireGCTracer() const {
michael@0 167 return runtimeFromMainThread()->isHeapMajorCollecting() && gcState != NoGC;
michael@0 168 }
michael@0 169
michael@0 170 void setGCState(GCState state) {
michael@0 171 JS_ASSERT(runtimeFromMainThread()->isHeapBusy());
michael@0 172 JS_ASSERT_IF(state != NoGC, canCollect());
michael@0 173 gcState = state;
michael@0 174 }
michael@0 175
michael@0 176 void scheduleGC() {
michael@0 177 JS_ASSERT(!runtimeFromMainThread()->isHeapBusy());
michael@0 178 gcScheduled = true;
michael@0 179 }
michael@0 180
michael@0 181 void unscheduleGC() {
michael@0 182 gcScheduled = false;
michael@0 183 }
michael@0 184
michael@0 185 bool isGCScheduled() {
michael@0 186 return gcScheduled && canCollect();
michael@0 187 }
michael@0 188
michael@0 189 void setPreservingCode(bool preserving) {
michael@0 190 gcPreserveCode = preserving;
michael@0 191 }
michael@0 192
michael@0 193 bool canCollect() {
michael@0 194 // Zones cannot be collected while in use by other threads.
michael@0 195 if (usedByExclusiveThread)
michael@0 196 return false;
michael@0 197 JSRuntime *rt = runtimeFromAnyThread();
michael@0 198 if (rt->isAtomsZone(this) && rt->exclusiveThreadsPresent())
michael@0 199 return false;
michael@0 200 return true;
michael@0 201 }
michael@0 202
michael@0 203 bool wasGCStarted() const {
michael@0 204 return gcState != NoGC;
michael@0 205 }
michael@0 206
michael@0 207 bool isGCMarking() {
michael@0 208 if (runtimeFromMainThread()->isHeapCollecting())
michael@0 209 return gcState == Mark || gcState == MarkGray;
michael@0 210 else
michael@0 211 return needsBarrier();
michael@0 212 }
michael@0 213
michael@0 214 bool isGCMarkingBlack() {
michael@0 215 return gcState == Mark;
michael@0 216 }
michael@0 217
michael@0 218 bool isGCMarkingGray() {
michael@0 219 return gcState == MarkGray;
michael@0 220 }
michael@0 221
michael@0 222 bool isGCSweeping() {
michael@0 223 return gcState == Sweep;
michael@0 224 }
michael@0 225
michael@0 226 bool isGCFinished() {
michael@0 227 return gcState == Finished;
michael@0 228 }
michael@0 229
michael@0 230 /* This is updated by both the main and GC helper threads. */
michael@0 231 mozilla::Atomic<size_t, mozilla::ReleaseAcquire> gcBytes;
michael@0 232
michael@0 233 size_t gcTriggerBytes;
michael@0 234 size_t gcMaxMallocBytes;
michael@0 235 double gcHeapGrowthFactor;
michael@0 236
michael@0 237 bool isSystem;
michael@0 238
michael@0 239 /* Whether this zone is being used by a thread with an ExclusiveContext. */
michael@0 240 bool usedByExclusiveThread;
michael@0 241
michael@0 242 /*
michael@0 243 * Get a number that is incremented whenever this zone is collected, and
michael@0 244 * possibly at other times too.
michael@0 245 */
michael@0 246 uint64_t gcNumber();
michael@0 247
michael@0 248 /*
michael@0 249 * These flags help us to discover if a compartment that shouldn't be alive
michael@0 250 * manages to outlive a GC.
michael@0 251 */
michael@0 252 bool scheduledForDestruction;
michael@0 253 bool maybeAlive;
michael@0 254
michael@0 255 /*
michael@0 256 * Malloc counter to measure memory pressure for GC scheduling. It runs from
michael@0 257 * gcMaxMallocBytes down to zero. This counter should be used only when it's
michael@0 258 * not possible to know the size of a free.
michael@0 259 */
michael@0 260 mozilla::Atomic<ptrdiff_t, mozilla::ReleaseAcquire> gcMallocBytes;
michael@0 261
michael@0 262 /*
michael@0 263 * Whether a GC has been triggered as a result of gcMallocBytes falling
michael@0 264 * below zero.
michael@0 265 *
michael@0 266 * This should be a bool, but Atomic only supports 32-bit and pointer-sized
michael@0 267 * types.
michael@0 268 */
michael@0 269 mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> gcMallocGCTriggered;
michael@0 270
michael@0 271 /* This compartment's gray roots. */
michael@0 272 js::Vector<js::GrayRoot, 0, js::SystemAllocPolicy> gcGrayRoots;
michael@0 273
michael@0 274 /* Per-zone data for use by an embedder. */
michael@0 275 void *data;
michael@0 276
michael@0 277 Zone(JSRuntime *rt);
michael@0 278 ~Zone();
michael@0 279
michael@0 280 void findOutgoingEdges(js::gc::ComponentFinder<JS::Zone> &finder);
michael@0 281
michael@0 282 void discardJitCode(js::FreeOp *fop);
michael@0 283
michael@0 284 void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
michael@0 285 size_t *typePool,
michael@0 286 size_t *baselineStubsOptimized);
michael@0 287
michael@0 288 void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
michael@0 289 void reduceGCTriggerBytes(size_t amount);
michael@0 290
michael@0 291 void resetGCMallocBytes();
michael@0 292 void setGCMaxMallocBytes(size_t value);
michael@0 293 void updateMallocCounter(size_t nbytes) {
michael@0 294 /*
michael@0 295 * Note: this code may be run from worker threads. We
michael@0 296 * tolerate any thread races when updating gcMallocBytes.
michael@0 297 */
michael@0 298 gcMallocBytes -= ptrdiff_t(nbytes);
michael@0 299 if (MOZ_UNLIKELY(isTooMuchMalloc()))
michael@0 300 onTooMuchMalloc();
michael@0 301 }
michael@0 302
michael@0 303 bool isTooMuchMalloc() const {
michael@0 304 return gcMallocBytes <= 0;
michael@0 305 }
michael@0 306
michael@0 307 void onTooMuchMalloc();
michael@0 308
michael@0 309 void *onOutOfMemory(void *p, size_t nbytes) {
michael@0 310 return runtimeFromMainThread()->onOutOfMemory(p, nbytes);
michael@0 311 }
michael@0 312 void reportAllocationOverflow() {
michael@0 313 js_ReportAllocationOverflow(nullptr);
michael@0 314 }
michael@0 315
michael@0 316 js::types::TypeZone types;
michael@0 317
michael@0 318 void sweep(js::FreeOp *fop, bool releaseTypes, bool *oom);
michael@0 319
michael@0 320 bool hasMarkedCompartments();
michael@0 321
michael@0 322 private:
michael@0 323 void sweepBreakpoints(js::FreeOp *fop);
michael@0 324
michael@0 325 #ifdef JS_ION
michael@0 326 js::jit::JitZone *jitZone_;
michael@0 327 js::jit::JitZone *createJitZone(JSContext *cx);
michael@0 328
michael@0 329 public:
michael@0 330 js::jit::JitZone *getJitZone(JSContext *cx) {
michael@0 331 return jitZone_ ? jitZone_ : createJitZone(cx);
michael@0 332 }
michael@0 333 js::jit::JitZone *jitZone() {
michael@0 334 return jitZone_;
michael@0 335 }
michael@0 336 #endif
michael@0 337 };
michael@0 338
michael@0 339 } /* namespace JS */
michael@0 340
michael@0 341 namespace js {
michael@0 342
michael@0 343 /*
michael@0 344 * Using the atoms zone without holding the exclusive access lock is dangerous
michael@0 345 * because worker threads may be using it simultaneously. Therefore, it's
michael@0 346 * better to skip the atoms zone when iterating over zones. If you need to
michael@0 347 * iterate over the atoms zone, consider taking the exclusive access lock first.
michael@0 348 */
michael@0 349 enum ZoneSelector {
michael@0 350 WithAtoms,
michael@0 351 SkipAtoms
michael@0 352 };
michael@0 353
michael@0 354 class ZonesIter {
michael@0 355 private:
michael@0 356 JS::Zone **it, **end;
michael@0 357
michael@0 358 public:
michael@0 359 ZonesIter(JSRuntime *rt, ZoneSelector selector) {
michael@0 360 it = rt->zones.begin();
michael@0 361 end = rt->zones.end();
michael@0 362
michael@0 363 if (selector == SkipAtoms) {
michael@0 364 JS_ASSERT(rt->isAtomsZone(*it));
michael@0 365 it++;
michael@0 366 }
michael@0 367 }
michael@0 368
michael@0 369 bool done() const { return it == end; }
michael@0 370
michael@0 371 void next() {
michael@0 372 JS_ASSERT(!done());
michael@0 373 do {
michael@0 374 it++;
michael@0 375 } while (!done() && (*it)->usedByExclusiveThread);
michael@0 376 }
michael@0 377
michael@0 378 JS::Zone *get() const {
michael@0 379 JS_ASSERT(!done());
michael@0 380 return *it;
michael@0 381 }
michael@0 382
michael@0 383 operator JS::Zone *() const { return get(); }
michael@0 384 JS::Zone *operator->() const { return get(); }
michael@0 385 };
michael@0 386
michael@0 387 struct CompartmentsInZoneIter
michael@0 388 {
michael@0 389 // This is for the benefit of CompartmentsIterT::comp.
michael@0 390 friend class mozilla::Maybe<CompartmentsInZoneIter>;
michael@0 391 private:
michael@0 392 JSCompartment **it, **end;
michael@0 393
michael@0 394 CompartmentsInZoneIter()
michael@0 395 : it(nullptr), end(nullptr)
michael@0 396 {}
michael@0 397
michael@0 398 public:
michael@0 399 explicit CompartmentsInZoneIter(JS::Zone *zone) {
michael@0 400 it = zone->compartments.begin();
michael@0 401 end = zone->compartments.end();
michael@0 402 }
michael@0 403
michael@0 404 bool done() const {
michael@0 405 JS_ASSERT(it);
michael@0 406 return it == end;
michael@0 407 }
michael@0 408 void next() {
michael@0 409 JS_ASSERT(!done());
michael@0 410 it++;
michael@0 411 }
michael@0 412
michael@0 413 JSCompartment *get() const {
michael@0 414 JS_ASSERT(it);
michael@0 415 return *it;
michael@0 416 }
michael@0 417
michael@0 418 operator JSCompartment *() const { return get(); }
michael@0 419 JSCompartment *operator->() const { return get(); }
michael@0 420 };
michael@0 421
michael@0 422 /*
michael@0 423 * This iterator iterates over all the compartments in a given set of zones. The
michael@0 424 * set of zones is determined by iterating ZoneIterT.
michael@0 425 */
michael@0 426 template<class ZonesIterT>
michael@0 427 class CompartmentsIterT
michael@0 428 {
michael@0 429 private:
michael@0 430 ZonesIterT zone;
michael@0 431 mozilla::Maybe<CompartmentsInZoneIter> comp;
michael@0 432
michael@0 433 public:
michael@0 434 explicit CompartmentsIterT(JSRuntime *rt)
michael@0 435 : zone(rt)
michael@0 436 {
michael@0 437 if (zone.done())
michael@0 438 comp.construct();
michael@0 439 else
michael@0 440 comp.construct(zone);
michael@0 441 }
michael@0 442
michael@0 443 CompartmentsIterT(JSRuntime *rt, ZoneSelector selector)
michael@0 444 : zone(rt, selector)
michael@0 445 {
michael@0 446 if (zone.done())
michael@0 447 comp.construct();
michael@0 448 else
michael@0 449 comp.construct(zone);
michael@0 450 }
michael@0 451
michael@0 452 bool done() const { return zone.done(); }
michael@0 453
michael@0 454 void next() {
michael@0 455 JS_ASSERT(!done());
michael@0 456 JS_ASSERT(!comp.ref().done());
michael@0 457 comp.ref().next();
michael@0 458 if (comp.ref().done()) {
michael@0 459 comp.destroy();
michael@0 460 zone.next();
michael@0 461 if (!zone.done())
michael@0 462 comp.construct(zone);
michael@0 463 }
michael@0 464 }
michael@0 465
michael@0 466 JSCompartment *get() const {
michael@0 467 JS_ASSERT(!done());
michael@0 468 return comp.ref();
michael@0 469 }
michael@0 470
michael@0 471 operator JSCompartment *() const { return get(); }
michael@0 472 JSCompartment *operator->() const { return get(); }
michael@0 473 };
michael@0 474
michael@0 475 typedef CompartmentsIterT<ZonesIter> CompartmentsIter;
michael@0 476
michael@0 477 } /* namespace js */
michael@0 478
michael@0 479 #endif /* gc_Zone_h */

mercurial