Sat, 03 Jan 2015 20:18:00 +0100
Conditionally enable double key logic according to:
private browsing mode or privacy.thirdparty.isolate preference and
implement in GetCookieStringCommon and FindCookie where it counts...
With some reservations of how to convince FindCookie users to test
condition and pass a nullptr when disabling double key logic.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef gc_Zone_h
8 #define gc_Zone_h
10 #include "mozilla/Atomics.h"
11 #include "mozilla/MemoryReporting.h"
13 #include "jscntxt.h"
14 #include "jsgc.h"
15 #include "jsinfer.h"
17 #include "gc/FindSCCs.h"
19 namespace js {
21 namespace jit {
22 class JitZone;
23 }
25 /*
26 * Encapsulates the data needed to perform allocation. Typically there is
27 * precisely one of these per zone (|cx->zone().allocator|). However, in
28 * parallel execution mode, there will be one per worker thread.
29 */
30 class Allocator
31 {
32 /*
33 * Since allocators can be accessed from worker threads, the parent zone_
34 * should not be accessed in general. ArenaLists is allowed to actually do
35 * the allocation, however.
36 */
37 friend class gc::ArenaLists;
39 JS::Zone *zone_;
41 public:
42 explicit Allocator(JS::Zone *zone);
44 js::gc::ArenaLists arenas;
45 };
47 typedef Vector<JSCompartment *, 1, SystemAllocPolicy> CompartmentVector;
49 } /* namespace js */
51 namespace JS {
53 /*
54 * A zone is a collection of compartments. Every compartment belongs to exactly
55 * one zone. In Firefox, there is roughly one zone per tab along with a system
56 * zone for everything else. Zones mainly serve as boundaries for garbage
57 * collection. Unlike compartments, they have no special security properties.
58 *
59 * Every GC thing belongs to exactly one zone. GC things from the same zone but
60 * different compartments can share an arena (4k page). GC things from different
61 * zones cannot be stored in the same arena. The garbage collector is capable of
62 * collecting one zone at a time; it cannot collect at the granularity of
63 * compartments.
64 *
65 * GC things are tied to zones and compartments as follows:
66 *
67 * - JSObjects belong to a compartment and cannot be shared between
68 * compartments. If an object needs to point to a JSObject in a different
69 * compartment, regardless of zone, it must go through a cross-compartment
70 * wrapper. Each compartment keeps track of its outgoing wrappers in a table.
71 *
72 * - JSStrings do not belong to any particular compartment, but they do belong
73 * to a zone. Thus, two different compartments in the same zone can point to a
74 * JSString. When a string needs to be wrapped, we copy it if it's in a
75 * different zone and do nothing if it's in the same zone. Thus, transferring
76 * strings within a zone is very efficient.
77 *
78 * - Shapes and base shapes belong to a compartment and cannot be shared between
79 * compartments. A base shape holds a pointer to its compartment. Shapes find
80 * their compartment via their base shape. JSObjects find their compartment
81 * via their shape.
82 *
83 * - Scripts are also compartment-local and cannot be shared. A script points to
84 * its compartment.
85 *
86 * - Type objects and JitCode objects belong to a compartment and cannot be
87 * shared. However, there is no mechanism to obtain their compartments.
88 *
89 * A zone remains alive as long as any GC things in the zone are alive. A
90 * compartment remains alive as long as any JSObjects, scripts, shapes, or base
91 * shapes within it are alive.
92 *
93 * We always guarantee that a zone has at least one live compartment by refusing
94 * to delete the last compartment in a live zone. (This could happen, for
95 * example, if the conservative scanner marks a string in an otherwise dead
96 * zone.)
97 */
99 struct Zone : public JS::shadow::Zone,
100 public js::gc::GraphNodeBase<JS::Zone>,
101 public js::MallocProvider<JS::Zone>
102 {
103 private:
104 friend bool js::CurrentThreadCanAccessZone(Zone *zone);
106 public:
107 js::Allocator allocator;
109 js::CompartmentVector compartments;
111 private:
112 bool ionUsingBarriers_;
114 public:
115 bool active; // GC flag, whether there are active frames
117 bool compileBarriers(bool needsBarrier) const {
118 return needsBarrier || runtimeFromMainThread()->gcZeal() == js::gc::ZealVerifierPreValue;
119 }
121 bool compileBarriers() const {
122 return compileBarriers(needsBarrier());
123 }
125 enum ShouldUpdateIon {
126 DontUpdateIon,
127 UpdateIon
128 };
130 void setNeedsBarrier(bool needs, ShouldUpdateIon updateIon);
132 const bool *addressOfNeedsBarrier() const {
133 return &needsBarrier_;
134 }
136 public:
137 enum GCState {
138 NoGC,
139 Mark,
140 MarkGray,
141 Sweep,
142 Finished
143 };
145 private:
146 bool gcScheduled;
147 GCState gcState;
148 bool gcPreserveCode;
150 public:
151 bool isCollecting() const {
152 if (runtimeFromMainThread()->isHeapCollecting())
153 return gcState != NoGC;
154 else
155 return needsBarrier();
156 }
158 bool isPreservingCode() const {
159 return gcPreserveCode;
160 }
162 /*
163 * If this returns true, all object tracing must be done with a GC marking
164 * tracer.
165 */
166 bool requireGCTracer() const {
167 return runtimeFromMainThread()->isHeapMajorCollecting() && gcState != NoGC;
168 }
170 void setGCState(GCState state) {
171 JS_ASSERT(runtimeFromMainThread()->isHeapBusy());
172 JS_ASSERT_IF(state != NoGC, canCollect());
173 gcState = state;
174 }
176 void scheduleGC() {
177 JS_ASSERT(!runtimeFromMainThread()->isHeapBusy());
178 gcScheduled = true;
179 }
181 void unscheduleGC() {
182 gcScheduled = false;
183 }
185 bool isGCScheduled() {
186 return gcScheduled && canCollect();
187 }
189 void setPreservingCode(bool preserving) {
190 gcPreserveCode = preserving;
191 }
193 bool canCollect() {
194 // Zones cannot be collected while in use by other threads.
195 if (usedByExclusiveThread)
196 return false;
197 JSRuntime *rt = runtimeFromAnyThread();
198 if (rt->isAtomsZone(this) && rt->exclusiveThreadsPresent())
199 return false;
200 return true;
201 }
203 bool wasGCStarted() const {
204 return gcState != NoGC;
205 }
207 bool isGCMarking() {
208 if (runtimeFromMainThread()->isHeapCollecting())
209 return gcState == Mark || gcState == MarkGray;
210 else
211 return needsBarrier();
212 }
214 bool isGCMarkingBlack() {
215 return gcState == Mark;
216 }
218 bool isGCMarkingGray() {
219 return gcState == MarkGray;
220 }
222 bool isGCSweeping() {
223 return gcState == Sweep;
224 }
226 bool isGCFinished() {
227 return gcState == Finished;
228 }
230 /* This is updated by both the main and GC helper threads. */
231 mozilla::Atomic<size_t, mozilla::ReleaseAcquire> gcBytes;
233 size_t gcTriggerBytes;
234 size_t gcMaxMallocBytes;
235 double gcHeapGrowthFactor;
237 bool isSystem;
239 /* Whether this zone is being used by a thread with an ExclusiveContext. */
240 bool usedByExclusiveThread;
242 /*
243 * Get a number that is incremented whenever this zone is collected, and
244 * possibly at other times too.
245 */
246 uint64_t gcNumber();
248 /*
249 * These flags help us to discover if a compartment that shouldn't be alive
250 * manages to outlive a GC.
251 */
252 bool scheduledForDestruction;
253 bool maybeAlive;
255 /*
256 * Malloc counter to measure memory pressure for GC scheduling. It runs from
257 * gcMaxMallocBytes down to zero. This counter should be used only when it's
258 * not possible to know the size of a free.
259 */
260 mozilla::Atomic<ptrdiff_t, mozilla::ReleaseAcquire> gcMallocBytes;
262 /*
263 * Whether a GC has been triggered as a result of gcMallocBytes falling
264 * below zero.
265 *
266 * This should be a bool, but Atomic only supports 32-bit and pointer-sized
267 * types.
268 */
269 mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> gcMallocGCTriggered;
271 /* This compartment's gray roots. */
272 js::Vector<js::GrayRoot, 0, js::SystemAllocPolicy> gcGrayRoots;
274 /* Per-zone data for use by an embedder. */
275 void *data;
277 Zone(JSRuntime *rt);
278 ~Zone();
280 void findOutgoingEdges(js::gc::ComponentFinder<JS::Zone> &finder);
282 void discardJitCode(js::FreeOp *fop);
284 void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
285 size_t *typePool,
286 size_t *baselineStubsOptimized);
288 void setGCLastBytes(size_t lastBytes, js::JSGCInvocationKind gckind);
289 void reduceGCTriggerBytes(size_t amount);
291 void resetGCMallocBytes();
292 void setGCMaxMallocBytes(size_t value);
293 void updateMallocCounter(size_t nbytes) {
294 /*
295 * Note: this code may be run from worker threads. We
296 * tolerate any thread races when updating gcMallocBytes.
297 */
298 gcMallocBytes -= ptrdiff_t(nbytes);
299 if (MOZ_UNLIKELY(isTooMuchMalloc()))
300 onTooMuchMalloc();
301 }
303 bool isTooMuchMalloc() const {
304 return gcMallocBytes <= 0;
305 }
307 void onTooMuchMalloc();
309 void *onOutOfMemory(void *p, size_t nbytes) {
310 return runtimeFromMainThread()->onOutOfMemory(p, nbytes);
311 }
312 void reportAllocationOverflow() {
313 js_ReportAllocationOverflow(nullptr);
314 }
316 js::types::TypeZone types;
318 void sweep(js::FreeOp *fop, bool releaseTypes, bool *oom);
320 bool hasMarkedCompartments();
322 private:
323 void sweepBreakpoints(js::FreeOp *fop);
325 #ifdef JS_ION
326 js::jit::JitZone *jitZone_;
327 js::jit::JitZone *createJitZone(JSContext *cx);
329 public:
330 js::jit::JitZone *getJitZone(JSContext *cx) {
331 return jitZone_ ? jitZone_ : createJitZone(cx);
332 }
333 js::jit::JitZone *jitZone() {
334 return jitZone_;
335 }
336 #endif
337 };
339 } /* namespace JS */
341 namespace js {
343 /*
344 * Using the atoms zone without holding the exclusive access lock is dangerous
345 * because worker threads may be using it simultaneously. Therefore, it's
346 * better to skip the atoms zone when iterating over zones. If you need to
347 * iterate over the atoms zone, consider taking the exclusive access lock first.
348 */
349 enum ZoneSelector {
350 WithAtoms,
351 SkipAtoms
352 };
354 class ZonesIter {
355 private:
356 JS::Zone **it, **end;
358 public:
359 ZonesIter(JSRuntime *rt, ZoneSelector selector) {
360 it = rt->zones.begin();
361 end = rt->zones.end();
363 if (selector == SkipAtoms) {
364 JS_ASSERT(rt->isAtomsZone(*it));
365 it++;
366 }
367 }
369 bool done() const { return it == end; }
371 void next() {
372 JS_ASSERT(!done());
373 do {
374 it++;
375 } while (!done() && (*it)->usedByExclusiveThread);
376 }
378 JS::Zone *get() const {
379 JS_ASSERT(!done());
380 return *it;
381 }
383 operator JS::Zone *() const { return get(); }
384 JS::Zone *operator->() const { return get(); }
385 };
387 struct CompartmentsInZoneIter
388 {
389 // This is for the benefit of CompartmentsIterT::comp.
390 friend class mozilla::Maybe<CompartmentsInZoneIter>;
391 private:
392 JSCompartment **it, **end;
394 CompartmentsInZoneIter()
395 : it(nullptr), end(nullptr)
396 {}
398 public:
399 explicit CompartmentsInZoneIter(JS::Zone *zone) {
400 it = zone->compartments.begin();
401 end = zone->compartments.end();
402 }
404 bool done() const {
405 JS_ASSERT(it);
406 return it == end;
407 }
408 void next() {
409 JS_ASSERT(!done());
410 it++;
411 }
413 JSCompartment *get() const {
414 JS_ASSERT(it);
415 return *it;
416 }
418 operator JSCompartment *() const { return get(); }
419 JSCompartment *operator->() const { return get(); }
420 };
422 /*
423 * This iterator iterates over all the compartments in a given set of zones. The
424 * set of zones is determined by iterating ZoneIterT.
425 */
426 template<class ZonesIterT>
427 class CompartmentsIterT
428 {
429 private:
430 ZonesIterT zone;
431 mozilla::Maybe<CompartmentsInZoneIter> comp;
433 public:
434 explicit CompartmentsIterT(JSRuntime *rt)
435 : zone(rt)
436 {
437 if (zone.done())
438 comp.construct();
439 else
440 comp.construct(zone);
441 }
443 CompartmentsIterT(JSRuntime *rt, ZoneSelector selector)
444 : zone(rt, selector)
445 {
446 if (zone.done())
447 comp.construct();
448 else
449 comp.construct(zone);
450 }
452 bool done() const { return zone.done(); }
454 void next() {
455 JS_ASSERT(!done());
456 JS_ASSERT(!comp.ref().done());
457 comp.ref().next();
458 if (comp.ref().done()) {
459 comp.destroy();
460 zone.next();
461 if (!zone.done())
462 comp.construct(zone);
463 }
464 }
466 JSCompartment *get() const {
467 JS_ASSERT(!done());
468 return comp.ref();
469 }
471 operator JSCompartment *() const { return get(); }
472 JSCompartment *operator->() const { return get(); }
473 };
475 typedef CompartmentsIterT<ZonesIter> CompartmentsIter;
477 } /* namespace js */
479 #endif /* gc_Zone_h */