|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
|
2 * vim: set ts=8 sts=4 et sw=4 tw=99: |
|
3 * This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 /* JS Garbage Collector. */ |
|
8 |
|
9 #ifndef jsgc_h |
|
10 #define jsgc_h |
|
11 |
|
12 #include "mozilla/DebugOnly.h" |
|
13 #include "mozilla/MemoryReporting.h" |
|
14 |
|
15 #include "jslock.h" |
|
16 #include "jsobj.h" |
|
17 |
|
18 #include "js/GCAPI.h" |
|
19 #include "js/SliceBudget.h" |
|
20 #include "js/Vector.h" |
|
21 |
|
22 class JSAtom; |
|
23 struct JSCompartment; |
|
24 class JSFlatString; |
|
25 class JSLinearString; |
|
26 |
|
27 namespace js { |
|
28 |
|
29 class ArgumentsObject; |
|
30 class ArrayBufferObject; |
|
31 class ArrayBufferViewObject; |
|
32 class SharedArrayBufferObject; |
|
33 class BaseShape; |
|
34 class DebugScopeObject; |
|
35 class GCHelperThread; |
|
36 class GlobalObject; |
|
37 class LazyScript; |
|
38 class Nursery; |
|
39 class PropertyName; |
|
40 class ScopeObject; |
|
41 class Shape; |
|
42 class UnownedBaseShape; |
|
43 |
|
44 unsigned GetCPUCount(); |
|
45 |
|
46 enum HeapState { |
|
47 Idle, // doing nothing with the GC heap |
|
48 Tracing, // tracing the GC heap without collecting, e.g. IterateCompartments() |
|
49 MajorCollecting, // doing a GC of the major heap |
|
50 MinorCollecting // doing a GC of the minor heap (nursery) |
|
51 }; |
|
52 |
|
53 namespace jit { |
|
54 class JitCode; |
|
55 } |
|
56 |
|
57 namespace gc { |
|
58 |
|
59 enum State { |
|
60 NO_INCREMENTAL, |
|
61 MARK_ROOTS, |
|
62 MARK, |
|
63 SWEEP, |
|
64 INVALID |
|
65 }; |
|
66 |
|
67 class ChunkPool { |
|
68 Chunk *emptyChunkListHead; |
|
69 size_t emptyCount; |
|
70 |
|
71 public: |
|
72 ChunkPool() |
|
73 : emptyChunkListHead(nullptr), |
|
74 emptyCount(0) { } |
|
75 |
|
76 size_t getEmptyCount() const { |
|
77 return emptyCount; |
|
78 } |
|
79 |
|
80 inline bool wantBackgroundAllocation(JSRuntime *rt) const; |
|
81 |
|
82 /* Must be called with the GC lock taken. */ |
|
83 inline Chunk *get(JSRuntime *rt); |
|
84 |
|
85 /* Must be called either during the GC or with the GC lock taken. */ |
|
86 inline void put(Chunk *chunk); |
|
87 |
|
88 /* |
|
89 * Return the list of chunks that can be released outside the GC lock. |
|
90 * Must be called either during the GC or with the GC lock taken. |
|
91 */ |
|
92 Chunk *expire(JSRuntime *rt, bool releaseAll); |
|
93 |
|
94 /* Must be called with the GC lock taken. */ |
|
95 void expireAndFree(JSRuntime *rt, bool releaseAll); |
|
96 }; |
|
97 |
|
98 static inline JSGCTraceKind |
|
99 MapAllocToTraceKind(AllocKind kind) |
|
100 { |
|
101 static const JSGCTraceKind map[] = { |
|
102 JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */ |
|
103 JSTRACE_OBJECT, /* FINALIZE_OBJECT0_BACKGROUND */ |
|
104 JSTRACE_OBJECT, /* FINALIZE_OBJECT2 */ |
|
105 JSTRACE_OBJECT, /* FINALIZE_OBJECT2_BACKGROUND */ |
|
106 JSTRACE_OBJECT, /* FINALIZE_OBJECT4 */ |
|
107 JSTRACE_OBJECT, /* FINALIZE_OBJECT4_BACKGROUND */ |
|
108 JSTRACE_OBJECT, /* FINALIZE_OBJECT8 */ |
|
109 JSTRACE_OBJECT, /* FINALIZE_OBJECT8_BACKGROUND */ |
|
110 JSTRACE_OBJECT, /* FINALIZE_OBJECT12 */ |
|
111 JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */ |
|
112 JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */ |
|
113 JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */ |
|
114 JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */ |
|
115 JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */ |
|
116 JSTRACE_SHAPE, /* FINALIZE_SHAPE */ |
|
117 JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */ |
|
118 JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */ |
|
119 JSTRACE_STRING, /* FINALIZE_FAT_INLINE_STRING */ |
|
120 JSTRACE_STRING, /* FINALIZE_STRING */ |
|
121 JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */ |
|
122 JSTRACE_JITCODE, /* FINALIZE_JITCODE */ |
|
123 }; |
|
124 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT); |
|
125 return map[kind]; |
|
126 } |
|
127 |
|
128 template <typename T> struct MapTypeToTraceKind {}; |
|
129 template <> struct MapTypeToTraceKind<ObjectImpl> { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
130 template <> struct MapTypeToTraceKind<JSObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
131 template <> struct MapTypeToTraceKind<JSFunction> { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
132 template <> struct MapTypeToTraceKind<ArgumentsObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
133 template <> struct MapTypeToTraceKind<ArrayBufferObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
134 template <> struct MapTypeToTraceKind<ArrayBufferViewObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
135 template <> struct MapTypeToTraceKind<SharedArrayBufferObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
136 template <> struct MapTypeToTraceKind<DebugScopeObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
137 template <> struct MapTypeToTraceKind<GlobalObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
138 template <> struct MapTypeToTraceKind<ScopeObject> { static const JSGCTraceKind kind = JSTRACE_OBJECT; }; |
|
139 template <> struct MapTypeToTraceKind<JSScript> { static const JSGCTraceKind kind = JSTRACE_SCRIPT; }; |
|
140 template <> struct MapTypeToTraceKind<LazyScript> { static const JSGCTraceKind kind = JSTRACE_LAZY_SCRIPT; }; |
|
141 template <> struct MapTypeToTraceKind<Shape> { static const JSGCTraceKind kind = JSTRACE_SHAPE; }; |
|
142 template <> struct MapTypeToTraceKind<BaseShape> { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; }; |
|
143 template <> struct MapTypeToTraceKind<UnownedBaseShape> { static const JSGCTraceKind kind = JSTRACE_BASE_SHAPE; }; |
|
144 template <> struct MapTypeToTraceKind<types::TypeObject>{ static const JSGCTraceKind kind = JSTRACE_TYPE_OBJECT; }; |
|
145 template <> struct MapTypeToTraceKind<JSAtom> { static const JSGCTraceKind kind = JSTRACE_STRING; }; |
|
146 template <> struct MapTypeToTraceKind<JSString> { static const JSGCTraceKind kind = JSTRACE_STRING; }; |
|
147 template <> struct MapTypeToTraceKind<JSFlatString> { static const JSGCTraceKind kind = JSTRACE_STRING; }; |
|
148 template <> struct MapTypeToTraceKind<JSLinearString> { static const JSGCTraceKind kind = JSTRACE_STRING; }; |
|
149 template <> struct MapTypeToTraceKind<PropertyName> { static const JSGCTraceKind kind = JSTRACE_STRING; }; |
|
150 template <> struct MapTypeToTraceKind<jit::JitCode> { static const JSGCTraceKind kind = JSTRACE_JITCODE; }; |
|
151 |
|
152 /* Map from C++ type to finalize kind. JSObject does not have a 1:1 mapping, so must use Arena::thingSize. */ |
|
153 template <typename T> struct MapTypeToFinalizeKind {}; |
|
154 template <> struct MapTypeToFinalizeKind<JSScript> { static const AllocKind kind = FINALIZE_SCRIPT; }; |
|
155 template <> struct MapTypeToFinalizeKind<LazyScript> { static const AllocKind kind = FINALIZE_LAZY_SCRIPT; }; |
|
156 template <> struct MapTypeToFinalizeKind<Shape> { static const AllocKind kind = FINALIZE_SHAPE; }; |
|
157 template <> struct MapTypeToFinalizeKind<BaseShape> { static const AllocKind kind = FINALIZE_BASE_SHAPE; }; |
|
158 template <> struct MapTypeToFinalizeKind<types::TypeObject> { static const AllocKind kind = FINALIZE_TYPE_OBJECT; }; |
|
159 template <> struct MapTypeToFinalizeKind<JSFatInlineString> { static const AllocKind kind = FINALIZE_FAT_INLINE_STRING; }; |
|
160 template <> struct MapTypeToFinalizeKind<JSString> { static const AllocKind kind = FINALIZE_STRING; }; |
|
161 template <> struct MapTypeToFinalizeKind<JSExternalString> { static const AllocKind kind = FINALIZE_EXTERNAL_STRING; }; |
|
162 template <> struct MapTypeToFinalizeKind<jit::JitCode> { static const AllocKind kind = FINALIZE_JITCODE; }; |
|
163 |
|
164 #if defined(JSGC_GENERATIONAL) || defined(DEBUG) |
|
165 static inline bool |
|
166 IsNurseryAllocable(AllocKind kind) |
|
167 { |
|
168 JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT); |
|
169 static const bool map[] = { |
|
170 false, /* FINALIZE_OBJECT0 */ |
|
171 true, /* FINALIZE_OBJECT0_BACKGROUND */ |
|
172 false, /* FINALIZE_OBJECT2 */ |
|
173 true, /* FINALIZE_OBJECT2_BACKGROUND */ |
|
174 false, /* FINALIZE_OBJECT4 */ |
|
175 true, /* FINALIZE_OBJECT4_BACKGROUND */ |
|
176 false, /* FINALIZE_OBJECT8 */ |
|
177 true, /* FINALIZE_OBJECT8_BACKGROUND */ |
|
178 false, /* FINALIZE_OBJECT12 */ |
|
179 true, /* FINALIZE_OBJECT12_BACKGROUND */ |
|
180 false, /* FINALIZE_OBJECT16 */ |
|
181 true, /* FINALIZE_OBJECT16_BACKGROUND */ |
|
182 false, /* FINALIZE_SCRIPT */ |
|
183 false, /* FINALIZE_LAZY_SCRIPT */ |
|
184 false, /* FINALIZE_SHAPE */ |
|
185 false, /* FINALIZE_BASE_SHAPE */ |
|
186 false, /* FINALIZE_TYPE_OBJECT */ |
|
187 false, /* FINALIZE_FAT_INLINE_STRING */ |
|
188 false, /* FINALIZE_STRING */ |
|
189 false, /* FINALIZE_EXTERNAL_STRING */ |
|
190 false, /* FINALIZE_JITCODE */ |
|
191 }; |
|
192 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT); |
|
193 return map[kind]; |
|
194 } |
|
195 #endif |
|
196 |
|
197 static inline bool |
|
198 IsBackgroundFinalized(AllocKind kind) |
|
199 { |
|
200 JS_ASSERT(kind >= 0 && unsigned(kind) < FINALIZE_LIMIT); |
|
201 static const bool map[] = { |
|
202 false, /* FINALIZE_OBJECT0 */ |
|
203 true, /* FINALIZE_OBJECT0_BACKGROUND */ |
|
204 false, /* FINALIZE_OBJECT2 */ |
|
205 true, /* FINALIZE_OBJECT2_BACKGROUND */ |
|
206 false, /* FINALIZE_OBJECT4 */ |
|
207 true, /* FINALIZE_OBJECT4_BACKGROUND */ |
|
208 false, /* FINALIZE_OBJECT8 */ |
|
209 true, /* FINALIZE_OBJECT8_BACKGROUND */ |
|
210 false, /* FINALIZE_OBJECT12 */ |
|
211 true, /* FINALIZE_OBJECT12_BACKGROUND */ |
|
212 false, /* FINALIZE_OBJECT16 */ |
|
213 true, /* FINALIZE_OBJECT16_BACKGROUND */ |
|
214 false, /* FINALIZE_SCRIPT */ |
|
215 false, /* FINALIZE_LAZY_SCRIPT */ |
|
216 true, /* FINALIZE_SHAPE */ |
|
217 true, /* FINALIZE_BASE_SHAPE */ |
|
218 true, /* FINALIZE_TYPE_OBJECT */ |
|
219 true, /* FINALIZE_FAT_INLINE_STRING */ |
|
220 true, /* FINALIZE_STRING */ |
|
221 false, /* FINALIZE_EXTERNAL_STRING */ |
|
222 false, /* FINALIZE_JITCODE */ |
|
223 }; |
|
224 JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT); |
|
225 return map[kind]; |
|
226 } |
|
227 |
|
228 static inline bool |
|
229 CanBeFinalizedInBackground(gc::AllocKind kind, const Class *clasp) |
|
230 { |
|
231 JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST); |
|
232 /* If the class has no finalizer or a finalizer that is safe to call on |
|
233 * a different thread, we change the finalize kind. For example, |
|
234 * FINALIZE_OBJECT0 calls the finalizer on the main thread, |
|
235 * FINALIZE_OBJECT0_BACKGROUND calls the finalizer on the gcHelperThread. |
|
236 * IsBackgroundFinalized is called to prevent recursively incrementing |
|
237 * the finalize kind; kind may already be a background finalize kind. |
|
238 */ |
|
239 return (!gc::IsBackgroundFinalized(kind) && |
|
240 (!clasp->finalize || (clasp->flags & JSCLASS_BACKGROUND_FINALIZE))); |
|
241 } |
|
242 |
|
243 inline JSGCTraceKind |
|
244 GetGCThingTraceKind(const void *thing); |
|
245 |
|
246 /* Capacity for slotsToThingKind */ |
|
247 const size_t SLOTS_TO_THING_KIND_LIMIT = 17; |
|
248 |
|
249 extern const AllocKind slotsToThingKind[]; |
|
250 |
|
251 /* Get the best kind to use when making an object with the given slot count. */ |
|
252 static inline AllocKind |
|
253 GetGCObjectKind(size_t numSlots) |
|
254 { |
|
255 if (numSlots >= SLOTS_TO_THING_KIND_LIMIT) |
|
256 return FINALIZE_OBJECT16; |
|
257 return slotsToThingKind[numSlots]; |
|
258 } |
|
259 |
|
260 /* As for GetGCObjectKind, but for dense array allocation. */ |
|
261 static inline AllocKind |
|
262 GetGCArrayKind(size_t numSlots) |
|
263 { |
|
264 /* |
|
265 * Dense arrays can use their fixed slots to hold their elements array |
|
266 * (less two Values worth of ObjectElements header), but if more than the |
|
267 * maximum number of fixed slots is needed then the fixed slots will be |
|
268 * unused. |
|
269 */ |
|
270 JS_STATIC_ASSERT(ObjectElements::VALUES_PER_HEADER == 2); |
|
271 if (numSlots > JSObject::NELEMENTS_LIMIT || numSlots + 2 >= SLOTS_TO_THING_KIND_LIMIT) |
|
272 return FINALIZE_OBJECT2; |
|
273 return slotsToThingKind[numSlots + 2]; |
|
274 } |
|
275 |
|
276 static inline AllocKind |
|
277 GetGCObjectFixedSlotsKind(size_t numFixedSlots) |
|
278 { |
|
279 JS_ASSERT(numFixedSlots < SLOTS_TO_THING_KIND_LIMIT); |
|
280 return slotsToThingKind[numFixedSlots]; |
|
281 } |
|
282 |
|
283 static inline AllocKind |
|
284 GetBackgroundAllocKind(AllocKind kind) |
|
285 { |
|
286 JS_ASSERT(!IsBackgroundFinalized(kind)); |
|
287 JS_ASSERT(kind <= FINALIZE_OBJECT_LAST); |
|
288 return (AllocKind) (kind + 1); |
|
289 } |
|
290 |
|
291 /* |
|
292 * Try to get the next larger size for an object, keeping BACKGROUND |
|
293 * consistent. |
|
294 */ |
|
295 static inline bool |
|
296 TryIncrementAllocKind(AllocKind *kindp) |
|
297 { |
|
298 size_t next = size_t(*kindp) + 2; |
|
299 if (next >= size_t(FINALIZE_OBJECT_LIMIT)) |
|
300 return false; |
|
301 *kindp = AllocKind(next); |
|
302 return true; |
|
303 } |
|
304 |
|
305 /* Get the number of fixed slots and initial capacity associated with a kind. */ |
|
306 static inline size_t |
|
307 GetGCKindSlots(AllocKind thingKind) |
|
308 { |
|
309 /* Using a switch in hopes that thingKind will usually be a compile-time constant. */ |
|
310 switch (thingKind) { |
|
311 case FINALIZE_OBJECT0: |
|
312 case FINALIZE_OBJECT0_BACKGROUND: |
|
313 return 0; |
|
314 case FINALIZE_OBJECT2: |
|
315 case FINALIZE_OBJECT2_BACKGROUND: |
|
316 return 2; |
|
317 case FINALIZE_OBJECT4: |
|
318 case FINALIZE_OBJECT4_BACKGROUND: |
|
319 return 4; |
|
320 case FINALIZE_OBJECT8: |
|
321 case FINALIZE_OBJECT8_BACKGROUND: |
|
322 return 8; |
|
323 case FINALIZE_OBJECT12: |
|
324 case FINALIZE_OBJECT12_BACKGROUND: |
|
325 return 12; |
|
326 case FINALIZE_OBJECT16: |
|
327 case FINALIZE_OBJECT16_BACKGROUND: |
|
328 return 16; |
|
329 default: |
|
330 MOZ_ASSUME_UNREACHABLE("Bad object finalize kind"); |
|
331 } |
|
332 } |
|
333 |
|
334 static inline size_t |
|
335 GetGCKindSlots(AllocKind thingKind, const Class *clasp) |
|
336 { |
|
337 size_t nslots = GetGCKindSlots(thingKind); |
|
338 |
|
339 /* An object's private data uses the space taken by its last fixed slot. */ |
|
340 if (clasp->flags & JSCLASS_HAS_PRIVATE) { |
|
341 JS_ASSERT(nslots > 0); |
|
342 nslots--; |
|
343 } |
|
344 |
|
345 /* |
|
346 * Functions have a larger finalize kind than FINALIZE_OBJECT to reserve |
|
347 * space for the extra fields in JSFunction, but have no fixed slots. |
|
348 */ |
|
349 if (clasp == FunctionClassPtr) |
|
350 nslots = 0; |
|
351 |
|
352 return nslots; |
|
353 } |
|
354 |
|
355 /* |
|
356 * ArenaList::head points to the start of the list. Normally cursor points |
|
357 * to the first arena in the list with some free things and all arenas |
|
358 * before cursor are fully allocated. However, as the arena currently being |
|
359 * allocated from is considered full while its list of free spans is moved |
|
360 * into the freeList, during the GC or cell enumeration, when an |
|
361 * unallocated freeList is moved back to the arena, we can see an arena |
|
362 * with some free cells before the cursor. The cursor is an indirect |
|
363 * pointer to allow for efficient list insertion at the cursor point and |
|
364 * other list manipulations. |
|
365 */ |
|
366 struct ArenaList { |
|
367 ArenaHeader *head; |
|
368 ArenaHeader **cursor; |
|
369 |
|
370 ArenaList() { |
|
371 clear(); |
|
372 } |
|
373 |
|
374 void clear() { |
|
375 head = nullptr; |
|
376 cursor = &head; |
|
377 } |
|
378 |
|
379 void insert(ArenaHeader *arena); |
|
380 }; |
|
381 |
|
382 class ArenaLists |
|
383 { |
|
384 /* |
|
385 * For each arena kind its free list is represented as the first span with |
|
386 * free things. Initially all the spans are initialized as empty. After we |
|
387 * find a new arena with available things we move its first free span into |
|
388 * the list and set the arena as fully allocated. way we do not need to |
|
389 * update the arena header after the initial allocation. When starting the |
|
390 * GC we only move the head of the of the list of spans back to the arena |
|
391 * only for the arena that was not fully allocated. |
|
392 */ |
|
393 FreeSpan freeLists[FINALIZE_LIMIT]; |
|
394 |
|
395 ArenaList arenaLists[FINALIZE_LIMIT]; |
|
396 |
|
397 /* |
|
398 * The background finalization adds the finalized arenas to the list at |
|
399 * the *cursor position. backgroundFinalizeState controls the interaction |
|
400 * between the GC lock and the access to the list from the allocation |
|
401 * thread. |
|
402 * |
|
403 * BFS_DONE indicates that the finalizations is not running or cannot |
|
404 * affect this arena list. The allocation thread can access the list |
|
405 * outside the GC lock. |
|
406 * |
|
407 * In BFS_RUN and BFS_JUST_FINISHED the allocation thread must take the |
|
408 * lock. The former indicates that the finalization still runs. The latter |
|
409 * signals that finalization just added to the list finalized arenas. In |
|
410 * that case the lock effectively serves as a read barrier to ensure that |
|
411 * the allocation thread see all the writes done during finalization. |
|
412 */ |
|
413 enum BackgroundFinalizeState { |
|
414 BFS_DONE, |
|
415 BFS_RUN, |
|
416 BFS_JUST_FINISHED |
|
417 }; |
|
418 |
|
419 volatile uintptr_t backgroundFinalizeState[FINALIZE_LIMIT]; |
|
420 |
|
421 public: |
|
422 /* For each arena kind, a list of arenas remaining to be swept. */ |
|
423 ArenaHeader *arenaListsToSweep[FINALIZE_LIMIT]; |
|
424 |
|
425 /* Shape arenas to be swept in the foreground. */ |
|
426 ArenaHeader *gcShapeArenasToSweep; |
|
427 |
|
428 public: |
|
429 ArenaLists() { |
|
430 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) |
|
431 freeLists[i].initAsEmpty(); |
|
432 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) |
|
433 backgroundFinalizeState[i] = BFS_DONE; |
|
434 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) |
|
435 arenaListsToSweep[i] = nullptr; |
|
436 gcShapeArenasToSweep = nullptr; |
|
437 } |
|
438 |
|
439 ~ArenaLists() { |
|
440 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) { |
|
441 /* |
|
442 * We can only call this during the shutdown after the last GC when |
|
443 * the background finalization is disabled. |
|
444 */ |
|
445 JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE); |
|
446 ArenaHeader **headp = &arenaLists[i].head; |
|
447 while (ArenaHeader *aheader = *headp) { |
|
448 *headp = aheader->next; |
|
449 aheader->chunk()->releaseArena(aheader); |
|
450 } |
|
451 } |
|
452 } |
|
453 |
|
454 static uintptr_t getFreeListOffset(AllocKind thingKind) { |
|
455 uintptr_t offset = offsetof(ArenaLists, freeLists); |
|
456 return offset + thingKind * sizeof(FreeSpan); |
|
457 } |
|
458 |
|
459 const FreeSpan *getFreeList(AllocKind thingKind) const { |
|
460 return &freeLists[thingKind]; |
|
461 } |
|
462 |
|
463 ArenaHeader *getFirstArena(AllocKind thingKind) const { |
|
464 return arenaLists[thingKind].head; |
|
465 } |
|
466 |
|
467 ArenaHeader *getFirstArenaToSweep(AllocKind thingKind) const { |
|
468 return arenaListsToSweep[thingKind]; |
|
469 } |
|
470 |
|
471 bool arenaListsAreEmpty() const { |
|
472 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) { |
|
473 /* |
|
474 * The arena cannot be empty if the background finalization is not yet |
|
475 * done. |
|
476 */ |
|
477 if (backgroundFinalizeState[i] != BFS_DONE) |
|
478 return false; |
|
479 if (arenaLists[i].head) |
|
480 return false; |
|
481 } |
|
482 return true; |
|
483 } |
|
484 |
|
485 bool arenasAreFull(AllocKind thingKind) const { |
|
486 return !*arenaLists[thingKind].cursor; |
|
487 } |
|
488 |
|
489 void unmarkAll() { |
|
490 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) { |
|
491 /* The background finalization must have stopped at this point. */ |
|
492 JS_ASSERT(backgroundFinalizeState[i] == BFS_DONE || |
|
493 backgroundFinalizeState[i] == BFS_JUST_FINISHED); |
|
494 for (ArenaHeader *aheader = arenaLists[i].head; aheader; aheader = aheader->next) { |
|
495 uintptr_t *word = aheader->chunk()->bitmap.arenaBits(aheader); |
|
496 memset(word, 0, ArenaBitmapWords * sizeof(uintptr_t)); |
|
497 } |
|
498 } |
|
499 } |
|
500 |
|
501 bool doneBackgroundFinalize(AllocKind kind) const { |
|
502 return backgroundFinalizeState[kind] == BFS_DONE || |
|
503 backgroundFinalizeState[kind] == BFS_JUST_FINISHED; |
|
504 } |
|
505 |
|
506 bool needBackgroundFinalizeWait(AllocKind kind) const { |
|
507 return backgroundFinalizeState[kind] != BFS_DONE; |
|
508 } |
|
509 |
|
510 /* |
|
511 * Return the free list back to the arena so the GC finalization will not |
|
512 * run the finalizers over unitialized bytes from free things. |
|
513 */ |
|
514 void purge() { |
|
515 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) |
|
516 purge(AllocKind(i)); |
|
517 } |
|
518 |
|
519 void purge(AllocKind i) { |
|
520 FreeSpan *headSpan = &freeLists[i]; |
|
521 if (!headSpan->isEmpty()) { |
|
522 ArenaHeader *aheader = headSpan->arenaHeader(); |
|
523 aheader->setFirstFreeSpan(headSpan); |
|
524 headSpan->initAsEmpty(); |
|
525 } |
|
526 } |
|
527 |
|
528 inline void prepareForIncrementalGC(JSRuntime *rt); |
|
529 |
|
530 /* |
|
531 * Temporarily copy the free list heads to the arenas so the code can see |
|
532 * the proper value in ArenaHeader::freeList when accessing the latter |
|
533 * outside the GC. |
|
534 */ |
|
535 void copyFreeListsToArenas() { |
|
536 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) |
|
537 copyFreeListToArena(AllocKind(i)); |
|
538 } |
|
539 |
|
540 void copyFreeListToArena(AllocKind thingKind) { |
|
541 FreeSpan *headSpan = &freeLists[thingKind]; |
|
542 if (!headSpan->isEmpty()) { |
|
543 ArenaHeader *aheader = headSpan->arenaHeader(); |
|
544 JS_ASSERT(!aheader->hasFreeThings()); |
|
545 aheader->setFirstFreeSpan(headSpan); |
|
546 } |
|
547 } |
|
548 |
|
549 /* |
|
550 * Clear the free lists in arenas that were temporarily set there using |
|
551 * copyToArenas. |
|
552 */ |
|
553 void clearFreeListsInArenas() { |
|
554 for (size_t i = 0; i != FINALIZE_LIMIT; ++i) |
|
555 clearFreeListInArena(AllocKind(i)); |
|
556 } |
|
557 |
|
558 |
|
559 void clearFreeListInArena(AllocKind kind) { |
|
560 FreeSpan *headSpan = &freeLists[kind]; |
|
561 if (!headSpan->isEmpty()) { |
|
562 ArenaHeader *aheader = headSpan->arenaHeader(); |
|
563 JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan)); |
|
564 aheader->setAsFullyUsed(); |
|
565 } |
|
566 } |
|
567 |
|
568 /* |
|
569 * Check that the free list is either empty or were synchronized with the |
|
570 * arena using copyToArena(). |
|
571 */ |
|
572 bool isSynchronizedFreeList(AllocKind kind) { |
|
573 FreeSpan *headSpan = &freeLists[kind]; |
|
574 if (headSpan->isEmpty()) |
|
575 return true; |
|
576 ArenaHeader *aheader = headSpan->arenaHeader(); |
|
577 if (aheader->hasFreeThings()) { |
|
578 /* |
|
579 * If the arena has a free list, it must be the same as one in |
|
580 * lists. |
|
581 */ |
|
582 JS_ASSERT(aheader->getFirstFreeSpan().isSameNonEmptySpan(headSpan)); |
|
583 return true; |
|
584 } |
|
585 return false; |
|
586 } |
|
587 |
|
588 MOZ_ALWAYS_INLINE void *allocateFromFreeList(AllocKind thingKind, size_t thingSize) { |
|
589 return freeLists[thingKind].allocate(thingSize); |
|
590 } |
|
591 |
|
592 template <AllowGC allowGC> |
|
593 static void *refillFreeList(ThreadSafeContext *cx, AllocKind thingKind); |
|
594 |
|
595 /* |
|
596 * Moves all arenas from |fromArenaLists| into |this|. In |
|
597 * parallel blocks, we temporarily create one ArenaLists per |
|
598 * parallel thread. When the parallel block ends, we move |
|
599 * whatever allocations may have been performed back into the |
|
600 * compartment's main arena list using this function. |
|
601 */ |
|
602 void adoptArenas(JSRuntime *runtime, ArenaLists *fromArenaLists); |
|
603 |
|
604 /* True if the ArenaHeader in question is found in this ArenaLists */ |
|
605 bool containsArena(JSRuntime *runtime, ArenaHeader *arenaHeader); |
|
606 |
|
607 void checkEmptyFreeLists() { |
|
608 #ifdef DEBUG |
|
609 for (size_t i = 0; i < mozilla::ArrayLength(freeLists); ++i) |
|
610 JS_ASSERT(freeLists[i].isEmpty()); |
|
611 #endif |
|
612 } |
|
613 |
|
614 void checkEmptyFreeList(AllocKind kind) { |
|
615 JS_ASSERT(freeLists[kind].isEmpty()); |
|
616 } |
|
617 |
|
618 void queueObjectsForSweep(FreeOp *fop); |
|
619 void queueStringsForSweep(FreeOp *fop); |
|
620 void queueShapesForSweep(FreeOp *fop); |
|
621 void queueScriptsForSweep(FreeOp *fop); |
|
622 void queueJitCodeForSweep(FreeOp *fop); |
|
623 |
|
624 bool foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget); |
|
625 static void backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, bool onBackgroundThread); |
|
626 |
|
627 void wipeDuringParallelExecution(JSRuntime *rt); |
|
628 |
|
629 private: |
|
630 inline void finalizeNow(FreeOp *fop, AllocKind thingKind); |
|
631 inline void forceFinalizeNow(FreeOp *fop, AllocKind thingKind); |
|
632 inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind); |
|
633 inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind); |
|
634 |
|
635 void *allocateFromArena(JS::Zone *zone, AllocKind thingKind); |
|
636 inline void *allocateFromArenaInline(JS::Zone *zone, AllocKind thingKind); |
|
637 |
|
638 inline void normalizeBackgroundFinalizeState(AllocKind thingKind); |
|
639 |
|
640 friend class js::Nursery; |
|
641 }; |
|
642 |
|
643 /* |
|
644 * Initial allocation size for data structures holding chunks is set to hold |
|
645 * chunks with total capacity of 16MB to avoid buffer resizes during browser |
|
646 * startup. |
|
647 */ |
|
648 const size_t INITIAL_CHUNK_CAPACITY = 16 * 1024 * 1024 / ChunkSize; |
|
649 |
|
650 /* The number of GC cycles an empty chunk can survive before been released. */ |
|
651 const size_t MAX_EMPTY_CHUNK_AGE = 4; |
|
652 |
|
653 } /* namespace gc */ |
|
654 |
|
655 typedef enum JSGCRootType { |
|
656 JS_GC_ROOT_VALUE_PTR, |
|
657 JS_GC_ROOT_STRING_PTR, |
|
658 JS_GC_ROOT_OBJECT_PTR, |
|
659 JS_GC_ROOT_SCRIPT_PTR |
|
660 } JSGCRootType; |
|
661 |
|
662 struct RootInfo { |
|
663 RootInfo() {} |
|
664 RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {} |
|
665 const char *name; |
|
666 JSGCRootType type; |
|
667 }; |
|
668 |
|
669 typedef js::HashMap<void *, |
|
670 RootInfo, |
|
671 js::DefaultHasher<void *>, |
|
672 js::SystemAllocPolicy> RootedValueMap; |
|
673 |
|
674 extern bool |
|
675 AddValueRoot(JSContext *cx, js::Value *vp, const char *name); |
|
676 |
|
677 extern bool |
|
678 AddValueRootRT(JSRuntime *rt, js::Value *vp, const char *name); |
|
679 |
|
680 extern bool |
|
681 AddStringRoot(JSContext *cx, JSString **rp, const char *name); |
|
682 |
|
683 extern bool |
|
684 AddObjectRoot(JSContext *cx, JSObject **rp, const char *name); |
|
685 |
|
686 extern bool |
|
687 AddObjectRoot(JSRuntime *rt, JSObject **rp, const char *name); |
|
688 |
|
689 extern bool |
|
690 AddScriptRoot(JSContext *cx, JSScript **rp, const char *name); |
|
691 |
|
692 extern void |
|
693 RemoveRoot(JSRuntime *rt, void *rp); |
|
694 |
|
695 } /* namespace js */ |
|
696 |
|
697 extern bool |
|
698 js_InitGC(JSRuntime *rt, uint32_t maxbytes); |
|
699 |
|
700 extern void |
|
701 js_FinishGC(JSRuntime *rt); |
|
702 |
|
703 namespace js { |
|
704 |
|
705 class InterpreterFrame; |
|
706 |
|
707 extern void |
|
708 MarkCompartmentActive(js::InterpreterFrame *fp); |
|
709 |
|
710 extern void |
|
711 TraceRuntime(JSTracer *trc); |
|
712 |
|
713 /* Must be called with GC lock taken. */ |
|
714 extern bool |
|
715 TriggerGC(JSRuntime *rt, JS::gcreason::Reason reason); |
|
716 |
|
717 /* Must be called with GC lock taken. */ |
|
718 extern bool |
|
719 TriggerZoneGC(Zone *zone, JS::gcreason::Reason reason); |
|
720 |
|
721 extern void |
|
722 MaybeGC(JSContext *cx); |
|
723 |
|
724 extern void |
|
725 ReleaseAllJITCode(FreeOp *op); |
|
726 |
|
727 /* |
|
728 * Kinds of js_GC invocation. |
|
729 */ |
|
730 typedef enum JSGCInvocationKind { |
|
731 /* Normal invocation. */ |
|
732 GC_NORMAL = 0, |
|
733 |
|
734 /* Minimize GC triggers and release empty GC chunks right away. */ |
|
735 GC_SHRINK = 1 |
|
736 } JSGCInvocationKind; |
|
737 |
|
738 extern void |
|
739 GC(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason); |
|
740 |
|
741 extern void |
|
742 GCSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason, int64_t millis = 0); |
|
743 |
|
744 extern void |
|
745 GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason); |
|
746 |
|
747 extern void |
|
748 GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount); |
|
749 |
|
750 extern void |
|
751 PrepareForDebugGC(JSRuntime *rt); |
|
752 |
|
753 extern void |
|
754 MinorGC(JSRuntime *rt, JS::gcreason::Reason reason); |
|
755 |
|
756 extern void |
|
757 MinorGC(JSContext *cx, JS::gcreason::Reason reason); |
|
758 |
|
759 #ifdef JS_GC_ZEAL |
|
760 extern void |
|
761 SetGCZeal(JSRuntime *rt, uint8_t zeal, uint32_t frequency); |
|
762 #endif |
|
763 |
|
764 /* Functions for managing cross compartment gray pointers. */ |
|
765 |
|
766 extern void |
|
767 DelayCrossCompartmentGrayMarking(JSObject *src); |
|
768 |
|
769 extern void |
|
770 NotifyGCNukeWrapper(JSObject *o); |
|
771 |
|
772 extern unsigned |
|
773 NotifyGCPreSwap(JSObject *a, JSObject *b); |
|
774 |
|
775 extern void |
|
776 NotifyGCPostSwap(JSObject *a, JSObject *b, unsigned preResult); |
|
777 |
|
778 /* |
|
779 * Helper that implements sweeping and allocation for kinds that can be swept |
|
780 * and allocated off the main thread. |
|
781 * |
|
782 * In non-threadsafe builds, all actual sweeping and allocation is performed |
|
783 * on the main thread, but GCHelperThread encapsulates this from clients as |
|
784 * much as possible. |
|
785 */ |
|
786 class GCHelperThread { |
|
787 enum State { |
|
788 IDLE, |
|
789 SWEEPING, |
|
790 ALLOCATING, |
|
791 CANCEL_ALLOCATION, |
|
792 SHUTDOWN |
|
793 }; |
|
794 |
|
795 /* |
|
796 * During the finalization we do not free immediately. Rather we add the |
|
797 * corresponding pointers to a buffer which we later release on a |
|
798 * separated thread. |
|
799 * |
|
800 * The buffer is implemented as a vector of 64K arrays of pointers, not as |
|
801 * a simple vector, to avoid realloc calls during the vector growth and to |
|
802 * not bloat the binary size of the inlined freeLater method. Any OOM |
|
803 * during buffer growth results in the pointer being freed immediately. |
|
804 */ |
|
805 static const size_t FREE_ARRAY_SIZE = size_t(1) << 16; |
|
806 static const size_t FREE_ARRAY_LENGTH = FREE_ARRAY_SIZE / sizeof(void *); |
|
807 |
|
808 JSRuntime *const rt; |
|
809 PRThread *thread; |
|
810 PRCondVar *wakeup; |
|
811 PRCondVar *done; |
|
812 volatile State state; |
|
813 |
|
814 void wait(PRCondVar *which); |
|
815 |
|
816 bool sweepFlag; |
|
817 bool shrinkFlag; |
|
818 |
|
819 Vector<void **, 16, js::SystemAllocPolicy> freeVector; |
|
820 void **freeCursor; |
|
821 void **freeCursorEnd; |
|
822 |
|
823 bool backgroundAllocation; |
|
824 |
|
825 friend class js::gc::ArenaLists; |
|
826 |
|
827 void |
|
828 replenishAndFreeLater(void *ptr); |
|
829 |
|
830 static void freeElementsAndArray(void **array, void **end) { |
|
831 JS_ASSERT(array <= end); |
|
832 for (void **p = array; p != end; ++p) |
|
833 js_free(*p); |
|
834 js_free(array); |
|
835 } |
|
836 |
|
837 static void threadMain(void* arg); |
|
838 void threadLoop(); |
|
839 |
|
840 /* Must be called with the GC lock taken. */ |
|
841 void doSweep(); |
|
842 |
|
843 public: |
|
844 GCHelperThread(JSRuntime *rt) |
|
845 : rt(rt), |
|
846 thread(nullptr), |
|
847 wakeup(nullptr), |
|
848 done(nullptr), |
|
849 state(IDLE), |
|
850 sweepFlag(false), |
|
851 shrinkFlag(false), |
|
852 freeCursor(nullptr), |
|
853 freeCursorEnd(nullptr), |
|
854 backgroundAllocation(true) |
|
855 { } |
|
856 |
|
857 bool init(); |
|
858 void finish(); |
|
859 |
|
860 /* Must be called with the GC lock taken. */ |
|
861 void startBackgroundSweep(bool shouldShrink); |
|
862 |
|
863 /* Must be called with the GC lock taken. */ |
|
864 void startBackgroundShrink(); |
|
865 |
|
866 /* Must be called without the GC lock taken. */ |
|
867 void waitBackgroundSweepEnd(); |
|
868 |
|
869 /* Must be called without the GC lock taken. */ |
|
870 void waitBackgroundSweepOrAllocEnd(); |
|
871 |
|
872 /* Must be called with the GC lock taken. */ |
|
873 inline void startBackgroundAllocationIfIdle(); |
|
874 |
|
875 bool canBackgroundAllocate() const { |
|
876 return backgroundAllocation; |
|
877 } |
|
878 |
|
879 void disableBackgroundAllocation() { |
|
880 backgroundAllocation = false; |
|
881 } |
|
882 |
|
883 PRThread *getThread() const { |
|
884 return thread; |
|
885 } |
|
886 |
|
887 bool onBackgroundThread(); |
|
888 |
|
889 /* |
|
890 * Outside the GC lock may give true answer when in fact the sweeping has |
|
891 * been done. |
|
892 */ |
|
893 bool sweeping() const { |
|
894 return state == SWEEPING; |
|
895 } |
|
896 |
|
897 bool shouldShrink() const { |
|
898 JS_ASSERT(sweeping()); |
|
899 return shrinkFlag; |
|
900 } |
|
901 |
|
902 void freeLater(void *ptr) { |
|
903 JS_ASSERT(!sweeping()); |
|
904 if (freeCursor != freeCursorEnd) |
|
905 *freeCursor++ = ptr; |
|
906 else |
|
907 replenishAndFreeLater(ptr); |
|
908 } |
|
909 }; |
|
910 |
|
911 struct GCChunkHasher { |
|
912 typedef gc::Chunk *Lookup; |
|
913 |
|
914 /* |
|
915 * Strip zeros for better distribution after multiplying by the golden |
|
916 * ratio. |
|
917 */ |
|
918 static HashNumber hash(gc::Chunk *chunk) { |
|
919 JS_ASSERT(!(uintptr_t(chunk) & gc::ChunkMask)); |
|
920 return HashNumber(uintptr_t(chunk) >> gc::ChunkShift); |
|
921 } |
|
922 |
|
923 static bool match(gc::Chunk *k, gc::Chunk *l) { |
|
924 JS_ASSERT(!(uintptr_t(k) & gc::ChunkMask)); |
|
925 JS_ASSERT(!(uintptr_t(l) & gc::ChunkMask)); |
|
926 return k == l; |
|
927 } |
|
928 }; |
|
929 |
|
930 typedef HashSet<js::gc::Chunk *, GCChunkHasher, SystemAllocPolicy> GCChunkSet; |
|
931 |
|
932 struct GrayRoot { |
|
933 void *thing; |
|
934 JSGCTraceKind kind; |
|
935 #ifdef DEBUG |
|
936 JSTraceNamePrinter debugPrinter; |
|
937 const void *debugPrintArg; |
|
938 size_t debugPrintIndex; |
|
939 #endif |
|
940 |
|
941 GrayRoot(void *thing, JSGCTraceKind kind) |
|
942 : thing(thing), kind(kind) {} |
|
943 }; |
|
944 |
|
945 void |
|
946 MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end); |
|
947 |
|
948 typedef void (*IterateChunkCallback)(JSRuntime *rt, void *data, gc::Chunk *chunk); |
|
949 typedef void (*IterateZoneCallback)(JSRuntime *rt, void *data, JS::Zone *zone); |
|
950 typedef void (*IterateArenaCallback)(JSRuntime *rt, void *data, gc::Arena *arena, |
|
951 JSGCTraceKind traceKind, size_t thingSize); |
|
952 typedef void (*IterateCellCallback)(JSRuntime *rt, void *data, void *thing, |
|
953 JSGCTraceKind traceKind, size_t thingSize); |
|
954 |
|
955 /* |
|
956 * This function calls |zoneCallback| on every zone, |compartmentCallback| on |
|
957 * every compartment, |arenaCallback| on every in-use arena, and |cellCallback| |
|
958 * on every in-use cell in the GC heap. |
|
959 */ |
|
960 extern void |
|
961 IterateZonesCompartmentsArenasCells(JSRuntime *rt, void *data, |
|
962 IterateZoneCallback zoneCallback, |
|
963 JSIterateCompartmentCallback compartmentCallback, |
|
964 IterateArenaCallback arenaCallback, |
|
965 IterateCellCallback cellCallback); |
|
966 |
|
967 /* |
|
968 * This function is like IterateZonesCompartmentsArenasCells, but does it for a |
|
969 * single zone. |
|
970 */ |
|
971 extern void |
|
972 IterateZoneCompartmentsArenasCells(JSRuntime *rt, Zone *zone, void *data, |
|
973 IterateZoneCallback zoneCallback, |
|
974 JSIterateCompartmentCallback compartmentCallback, |
|
975 IterateArenaCallback arenaCallback, |
|
976 IterateCellCallback cellCallback); |
|
977 |
|
978 /* |
|
979 * Invoke chunkCallback on every in-use chunk. |
|
980 */ |
|
981 extern void |
|
982 IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback); |
|
983 |
|
984 typedef void (*IterateScriptCallback)(JSRuntime *rt, void *data, JSScript *script); |
|
985 |
|
986 /* |
|
987 * Invoke scriptCallback on every in-use script for |
|
988 * the given compartment or for all compartments if it is null. |
|
989 */ |
|
990 extern void |
|
991 IterateScripts(JSRuntime *rt, JSCompartment *compartment, |
|
992 void *data, IterateScriptCallback scriptCallback); |
|
993 |
|
994 } /* namespace js */ |
|
995 |
|
996 extern void |
|
997 js_FinalizeStringRT(JSRuntime *rt, JSString *str); |
|
998 |
|
999 namespace js { |
|
1000 |
|
1001 JSCompartment * |
|
1002 NewCompartment(JSContext *cx, JS::Zone *zone, JSPrincipals *principals, |
|
1003 const JS::CompartmentOptions &options); |
|
1004 |
|
1005 namespace gc { |
|
1006 |
|
1007 extern void |
|
1008 GCIfNeeded(JSContext *cx); |
|
1009 |
|
1010 /* Tries to run a GC no matter what (used for GC zeal). */ |
|
1011 void |
|
1012 RunDebugGC(JSContext *cx); |
|
1013 |
|
1014 void |
|
1015 SetDeterministicGC(JSContext *cx, bool enabled); |
|
1016 |
|
1017 void |
|
1018 SetValidateGC(JSContext *cx, bool enabled); |
|
1019 |
|
1020 void |
|
1021 SetFullCompartmentChecks(JSContext *cx, bool enabled); |
|
1022 |
|
1023 /* Wait for the background thread to finish sweeping if it is running. */ |
|
1024 void |
|
1025 FinishBackgroundFinalize(JSRuntime *rt); |
|
1026 |
|
1027 /* |
|
1028 * Merge all contents of source into target. This can only be used if source is |
|
1029 * the only compartment in its zone. |
|
1030 */ |
|
1031 void |
|
1032 MergeCompartments(JSCompartment *source, JSCompartment *target); |
|
1033 |
|
1034 const int ZealPokeValue = 1; |
|
1035 const int ZealAllocValue = 2; |
|
1036 const int ZealFrameGCValue = 3; |
|
1037 const int ZealVerifierPreValue = 4; |
|
1038 const int ZealFrameVerifierPreValue = 5; |
|
1039 const int ZealStackRootingValue = 6; |
|
1040 const int ZealGenerationalGCValue = 7; |
|
1041 const int ZealIncrementalRootsThenFinish = 8; |
|
1042 const int ZealIncrementalMarkAllThenFinish = 9; |
|
1043 const int ZealIncrementalMultipleSlices = 10; |
|
1044 const int ZealVerifierPostValue = 11; |
|
1045 const int ZealFrameVerifierPostValue = 12; |
|
1046 const int ZealCheckHashTablesOnMinorGC = 13; |
|
1047 const int ZealLimit = 13; |
|
1048 |
|
1049 enum VerifierType { |
|
1050 PreBarrierVerifier, |
|
1051 PostBarrierVerifier |
|
1052 }; |
|
1053 |
|
1054 #ifdef JS_GC_ZEAL |
|
1055 |
|
1056 /* Check that write barriers have been used correctly. See jsgc.cpp. */ |
|
1057 void |
|
1058 VerifyBarriers(JSRuntime *rt, VerifierType type); |
|
1059 |
|
1060 void |
|
1061 MaybeVerifyBarriers(JSContext *cx, bool always = false); |
|
1062 |
|
1063 #else |
|
1064 |
|
1065 static inline void |
|
1066 VerifyBarriers(JSRuntime *rt, VerifierType type) |
|
1067 { |
|
1068 } |
|
1069 |
|
1070 static inline void |
|
1071 MaybeVerifyBarriers(JSContext *cx, bool always = false) |
|
1072 { |
|
1073 } |
|
1074 |
|
1075 #endif |
|
1076 |
|
1077 /* |
|
1078 * Instances of this class set the |JSRuntime::suppressGC| flag for the duration |
|
1079 * that they are live. Use of this class is highly discouraged. Please carefully |
|
1080 * read the comment in jscntxt.h above |suppressGC| and take all appropriate |
|
1081 * precautions before instantiating this class. |
|
1082 */ |
|
1083 class AutoSuppressGC |
|
1084 { |
|
1085 int32_t &suppressGC_; |
|
1086 |
|
1087 public: |
|
1088 AutoSuppressGC(ExclusiveContext *cx); |
|
1089 AutoSuppressGC(JSCompartment *comp); |
|
1090 AutoSuppressGC(JSRuntime *rt); |
|
1091 |
|
1092 ~AutoSuppressGC() |
|
1093 { |
|
1094 suppressGC_--; |
|
1095 } |
|
1096 }; |
|
1097 |
|
1098 #ifdef DEBUG |
|
1099 /* Disable OOM testing in sections which are not OOM safe. */ |
|
1100 class AutoEnterOOMUnsafeRegion |
|
1101 { |
|
1102 uint32_t saved_; |
|
1103 |
|
1104 public: |
|
1105 AutoEnterOOMUnsafeRegion() : saved_(OOM_maxAllocations) { |
|
1106 OOM_maxAllocations = UINT32_MAX; |
|
1107 } |
|
1108 ~AutoEnterOOMUnsafeRegion() { |
|
1109 OOM_maxAllocations = saved_; |
|
1110 } |
|
1111 }; |
|
1112 #else |
|
1113 class AutoEnterOOMUnsafeRegion {}; |
|
1114 #endif /* DEBUG */ |
|
1115 |
|
1116 } /* namespace gc */ |
|
1117 |
|
1118 #ifdef DEBUG |
|
1119 /* Use this to avoid assertions when manipulating the wrapper map. */ |
|
1120 class AutoDisableProxyCheck |
|
1121 { |
|
1122 MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER; |
|
1123 uintptr_t &count; |
|
1124 |
|
1125 public: |
|
1126 AutoDisableProxyCheck(JSRuntime *rt |
|
1127 MOZ_GUARD_OBJECT_NOTIFIER_PARAM); |
|
1128 |
|
1129 ~AutoDisableProxyCheck() { |
|
1130 count--; |
|
1131 } |
|
1132 }; |
|
1133 #else |
|
1134 struct AutoDisableProxyCheck |
|
1135 { |
|
1136 AutoDisableProxyCheck(JSRuntime *rt) {} |
|
1137 }; |
|
1138 #endif |
|
1139 |
|
1140 void |
|
1141 PurgeJITCaches(JS::Zone *zone); |
|
1142 |
|
1143 // This is the same as IsInsideNursery, but not inlined. |
|
1144 bool |
|
1145 UninlinedIsInsideNursery(JSRuntime *rt, const void *thing); |
|
1146 |
|
1147 } /* namespace js */ |
|
1148 |
|
1149 #endif /* jsgc_h */ |