js/src/jsgcinlines.h

branch
TOR_BUG_9701
changeset 15
b8a032363ba2
equal deleted inserted replaced
-1:000000000000 0:d62bc8dce805
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6
7 #ifndef jsgcinlines_h
8 #define jsgcinlines_h
9
10 #include "jsgc.h"
11
12 #include "gc/Zone.h"
13
14 namespace js {
15
16 class Shape;
17
18 /*
19 * This auto class should be used around any code that might cause a mark bit to
20 * be set on an object in a dead zone. See AutoMaybeTouchDeadZones
21 * for more details.
22 */
23 struct AutoMarkInDeadZone
24 {
25 AutoMarkInDeadZone(JS::Zone *zone)
26 : zone(zone),
27 scheduled(zone->scheduledForDestruction)
28 {
29 JSRuntime *rt = zone->runtimeFromMainThread();
30 if (rt->gcManipulatingDeadZones && zone->scheduledForDestruction) {
31 rt->gcObjectsMarkedInDeadZones++;
32 zone->scheduledForDestruction = false;
33 }
34 }
35
36 ~AutoMarkInDeadZone() {
37 zone->scheduledForDestruction = scheduled;
38 }
39
40 private:
41 JS::Zone *zone;
42 bool scheduled;
43 };
44
45 inline Allocator *const
46 ThreadSafeContext::allocator()
47 {
48 JS_ASSERT_IF(isJSContext(), &asJSContext()->zone()->allocator == allocator_);
49 return allocator_;
50 }
51
52 template <typename T>
53 inline bool
54 ThreadSafeContext::isThreadLocal(T thing) const
55 {
56 if (!isForkJoinContext())
57 return true;
58
59 if (!IsInsideNursery(runtime_, thing) &&
60 allocator_->arenas.containsArena(runtime_, thing->arenaHeader()))
61 {
62 // GC should be suppressed in preparation for mutating thread local
63 // objects, as we don't want to trip any barriers.
64 JS_ASSERT(!thing->zoneFromAnyThread()->needsBarrier());
65 JS_ASSERT(!thing->runtimeFromAnyThread()->needsBarrier());
66
67 return true;
68 }
69
70 return false;
71 }
72
73 namespace gc {
74
75 static inline AllocKind
76 GetGCObjectKind(const Class *clasp)
77 {
78 if (clasp == FunctionClassPtr)
79 return JSFunction::FinalizeKind;
80 uint32_t nslots = JSCLASS_RESERVED_SLOTS(clasp);
81 if (clasp->flags & JSCLASS_HAS_PRIVATE)
82 nslots++;
83 return GetGCObjectKind(nslots);
84 }
85
86 #ifdef JSGC_GENERATIONAL
87 inline bool
88 ShouldNurseryAllocate(const Nursery &nursery, AllocKind kind, InitialHeap heap)
89 {
90 return nursery.isEnabled() && IsNurseryAllocable(kind) && heap != TenuredHeap;
91 }
92 #endif
93
94 inline JSGCTraceKind
95 GetGCThingTraceKind(const void *thing)
96 {
97 JS_ASSERT(thing);
98 const Cell *cell = static_cast<const Cell *>(thing);
99 #ifdef JSGC_GENERATIONAL
100 if (IsInsideNursery(cell->runtimeFromAnyThread(), cell))
101 return JSTRACE_OBJECT;
102 #endif
103 return MapAllocToTraceKind(cell->tenuredGetAllocKind());
104 }
105
106 static inline void
107 GCPoke(JSRuntime *rt)
108 {
109 rt->gcPoke = true;
110
111 #ifdef JS_GC_ZEAL
112 /* Schedule a GC to happen "soon" after a GC poke. */
113 if (rt->gcZeal() == js::gc::ZealPokeValue)
114 rt->gcNextScheduled = 1;
115 #endif
116 }
117
118 class ArenaIter
119 {
120 ArenaHeader *aheader;
121 ArenaHeader *remainingHeader;
122
123 public:
124 ArenaIter() {
125 init();
126 }
127
128 ArenaIter(JS::Zone *zone, AllocKind kind) {
129 init(zone, kind);
130 }
131
132 void init() {
133 aheader = nullptr;
134 remainingHeader = nullptr;
135 }
136
137 void init(ArenaHeader *aheaderArg) {
138 aheader = aheaderArg;
139 remainingHeader = nullptr;
140 }
141
142 void init(JS::Zone *zone, AllocKind kind) {
143 aheader = zone->allocator.arenas.getFirstArena(kind);
144 remainingHeader = zone->allocator.arenas.getFirstArenaToSweep(kind);
145 if (!aheader) {
146 aheader = remainingHeader;
147 remainingHeader = nullptr;
148 }
149 }
150
151 bool done() {
152 return !aheader;
153 }
154
155 ArenaHeader *get() {
156 return aheader;
157 }
158
159 void next() {
160 aheader = aheader->next;
161 if (!aheader) {
162 aheader = remainingHeader;
163 remainingHeader = nullptr;
164 }
165 }
166 };
167
168 class CellIterImpl
169 {
170 size_t firstThingOffset;
171 size_t thingSize;
172 ArenaIter aiter;
173 FreeSpan firstSpan;
174 const FreeSpan *span;
175 uintptr_t thing;
176 Cell *cell;
177
178 protected:
179 CellIterImpl() {
180 }
181
182 void initSpan(JS::Zone *zone, AllocKind kind) {
183 JS_ASSERT(zone->allocator.arenas.isSynchronizedFreeList(kind));
184 firstThingOffset = Arena::firstThingOffset(kind);
185 thingSize = Arena::thingSize(kind);
186 firstSpan.initAsEmpty();
187 span = &firstSpan;
188 thing = span->first;
189 }
190
191 void init(ArenaHeader *singleAheader) {
192 initSpan(singleAheader->zone, singleAheader->getAllocKind());
193 aiter.init(singleAheader);
194 next();
195 aiter.init();
196 }
197
198 void init(JS::Zone *zone, AllocKind kind) {
199 initSpan(zone, kind);
200 aiter.init(zone, kind);
201 next();
202 }
203
204 public:
205 bool done() const {
206 return !cell;
207 }
208
209 template<typename T> T *get() const {
210 JS_ASSERT(!done());
211 return static_cast<T *>(cell);
212 }
213
214 Cell *getCell() const {
215 JS_ASSERT(!done());
216 return cell;
217 }
218
219 void next() {
220 for (;;) {
221 if (thing != span->first)
222 break;
223 if (MOZ_LIKELY(span->hasNext())) {
224 thing = span->last + thingSize;
225 span = span->nextSpan();
226 break;
227 }
228 if (aiter.done()) {
229 cell = nullptr;
230 return;
231 }
232 ArenaHeader *aheader = aiter.get();
233 firstSpan = aheader->getFirstFreeSpan();
234 span = &firstSpan;
235 thing = aheader->arenaAddress() | firstThingOffset;
236 aiter.next();
237 }
238 cell = reinterpret_cast<Cell *>(thing);
239 thing += thingSize;
240 }
241 };
242
243 class CellIterUnderGC : public CellIterImpl
244 {
245 public:
246 CellIterUnderGC(JS::Zone *zone, AllocKind kind) {
247 #ifdef JSGC_GENERATIONAL
248 JS_ASSERT(zone->runtimeFromAnyThread()->gcNursery.isEmpty());
249 #endif
250 JS_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
251 init(zone, kind);
252 }
253
254 CellIterUnderGC(ArenaHeader *aheader) {
255 JS_ASSERT(aheader->zone->runtimeFromAnyThread()->isHeapBusy());
256 init(aheader);
257 }
258 };
259
260 class CellIter : public CellIterImpl
261 {
262 ArenaLists *lists;
263 AllocKind kind;
264 #ifdef DEBUG
265 size_t *counter;
266 #endif
267 public:
268 CellIter(JS::Zone *zone, AllocKind kind)
269 : lists(&zone->allocator.arenas),
270 kind(kind)
271 {
272 /*
273 * We have a single-threaded runtime, so there's no need to protect
274 * against other threads iterating or allocating. However, we do have
275 * background finalization; we have to wait for this to finish if it's
276 * currently active.
277 */
278 if (IsBackgroundFinalized(kind) &&
279 zone->allocator.arenas.needBackgroundFinalizeWait(kind))
280 {
281 gc::FinishBackgroundFinalize(zone->runtimeFromMainThread());
282 }
283
284 #ifdef JSGC_GENERATIONAL
285 /* Evict the nursery before iterating so we can see all things. */
286 JSRuntime *rt = zone->runtimeFromMainThread();
287 if (!rt->gcNursery.isEmpty())
288 MinorGC(rt, JS::gcreason::EVICT_NURSERY);
289 #endif
290
291 if (lists->isSynchronizedFreeList(kind)) {
292 lists = nullptr;
293 } else {
294 JS_ASSERT(!zone->runtimeFromMainThread()->isHeapBusy());
295 lists->copyFreeListToArena(kind);
296 }
297
298 #ifdef DEBUG
299 /* Assert that no GCs can occur while a CellIter is live. */
300 counter = &zone->runtimeFromAnyThread()->noGCOrAllocationCheck;
301 ++*counter;
302 #endif
303
304 init(zone, kind);
305 }
306
307 ~CellIter() {
308 #ifdef DEBUG
309 JS_ASSERT(*counter > 0);
310 --*counter;
311 #endif
312 if (lists)
313 lists->clearFreeListInArena(kind);
314 }
315 };
316
317 class GCZonesIter
318 {
319 private:
320 ZonesIter zone;
321
322 public:
323 GCZonesIter(JSRuntime *rt) : zone(rt, WithAtoms) {
324 if (!zone->isCollecting())
325 next();
326 }
327
328 bool done() const { return zone.done(); }
329
330 void next() {
331 JS_ASSERT(!done());
332 do {
333 zone.next();
334 } while (!zone.done() && !zone->isCollecting());
335 }
336
337 JS::Zone *get() const {
338 JS_ASSERT(!done());
339 return zone;
340 }
341
342 operator JS::Zone *() const { return get(); }
343 JS::Zone *operator->() const { return get(); }
344 };
345
346 typedef CompartmentsIterT<GCZonesIter> GCCompartmentsIter;
347
348 /* Iterates over all zones in the current zone group. */
349 class GCZoneGroupIter {
350 private:
351 JS::Zone *current;
352
353 public:
354 GCZoneGroupIter(JSRuntime *rt) {
355 JS_ASSERT(rt->isHeapBusy());
356 current = rt->gcCurrentZoneGroup;
357 }
358
359 bool done() const { return !current; }
360
361 void next() {
362 JS_ASSERT(!done());
363 current = current->nextNodeInGroup();
364 }
365
366 JS::Zone *get() const {
367 JS_ASSERT(!done());
368 return current;
369 }
370
371 operator JS::Zone *() const { return get(); }
372 JS::Zone *operator->() const { return get(); }
373 };
374
375 typedef CompartmentsIterT<GCZoneGroupIter> GCCompartmentGroupIter;
376
377 #ifdef JSGC_GENERATIONAL
378 /*
379 * Attempt to allocate a new GC thing out of the nursery. If there is not enough
380 * room in the nursery or there is an OOM, this method will return nullptr.
381 */
382 template <AllowGC allowGC>
383 inline JSObject *
384 TryNewNurseryObject(ThreadSafeContext *cxArg, size_t thingSize, size_t nDynamicSlots)
385 {
386 JSContext *cx = cxArg->asJSContext();
387
388 JS_ASSERT(!IsAtomsCompartment(cx->compartment()));
389 JSRuntime *rt = cx->runtime();
390 Nursery &nursery = rt->gcNursery;
391 JSObject *obj = nursery.allocateObject(cx, thingSize, nDynamicSlots);
392 if (obj)
393 return obj;
394 if (allowGC && !rt->mainThread.suppressGC) {
395 MinorGC(cx, JS::gcreason::OUT_OF_NURSERY);
396
397 /* Exceeding gcMaxBytes while tenuring can disable the Nursery. */
398 if (nursery.isEnabled()) {
399 JSObject *obj = nursery.allocateObject(cx, thingSize, nDynamicSlots);
400 JS_ASSERT(obj);
401 return obj;
402 }
403 }
404 return nullptr;
405 }
406 #endif /* JSGC_GENERATIONAL */
407
408 static inline bool
409 PossiblyFail()
410 {
411 JS_OOM_POSSIBLY_FAIL();
412 return true;
413 }
414
415 template <AllowGC allowGC>
416 static inline bool
417 CheckAllocatorState(ThreadSafeContext *cx, AllocKind kind)
418 {
419 if (!cx->isJSContext())
420 return true;
421
422 JSContext *ncx = cx->asJSContext();
423 JSRuntime *rt = ncx->runtime();
424 #if defined(JS_GC_ZEAL) || defined(DEBUG)
425 JS_ASSERT_IF(rt->isAtomsCompartment(ncx->compartment()),
426 kind == FINALIZE_STRING ||
427 kind == FINALIZE_FAT_INLINE_STRING ||
428 kind == FINALIZE_JITCODE);
429 JS_ASSERT(!rt->isHeapBusy());
430 JS_ASSERT(!rt->noGCOrAllocationCheck);
431 #endif
432
433 // For testing out of memory conditions
434 if (!PossiblyFail()) {
435 js_ReportOutOfMemory(cx);
436 return false;
437 }
438
439 if (allowGC) {
440 #ifdef JS_GC_ZEAL
441 if (rt->needZealousGC())
442 js::gc::RunDebugGC(ncx);
443 #endif
444
445 if (rt->interrupt) {
446 // Invoking the interrupt callback can fail and we can't usefully
447 // handle that here. Just check in case we need to collect instead.
448 js::gc::GCIfNeeded(ncx);
449 }
450 }
451
452 return true;
453 }
454
455 template <typename T>
456 static inline void
457 CheckIncrementalZoneState(ThreadSafeContext *cx, T *t)
458 {
459 #ifdef DEBUG
460 if (!cx->isJSContext())
461 return;
462
463 Zone *zone = cx->asJSContext()->zone();
464 JS_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
465 t->arenaHeader()->allocatedDuringIncremental);
466 #endif
467 }
468
469 /*
470 * Allocate a new GC thing. After a successful allocation the caller must
471 * fully initialize the thing before calling any function that can potentially
472 * trigger GC. This will ensure that GC tracing never sees junk values stored
473 * in the partially initialized thing.
474 */
475
476 template <AllowGC allowGC>
477 inline JSObject *
478 AllocateObject(ThreadSafeContext *cx, AllocKind kind, size_t nDynamicSlots, InitialHeap heap)
479 {
480 size_t thingSize = Arena::thingSize(kind);
481
482 JS_ASSERT(thingSize == Arena::thingSize(kind));
483 if (!CheckAllocatorState<allowGC>(cx, kind))
484 return nullptr;
485
486 #ifdef JSGC_GENERATIONAL
487 if (cx->hasNursery() && ShouldNurseryAllocate(cx->nursery(), kind, heap)) {
488 JSObject *obj = TryNewNurseryObject<allowGC>(cx, thingSize, nDynamicSlots);
489 if (obj)
490 return obj;
491 }
492 #endif
493
494 HeapSlot *slots = nullptr;
495 if (nDynamicSlots) {
496 slots = cx->pod_malloc<HeapSlot>(nDynamicSlots);
497 if (MOZ_UNLIKELY(!slots))
498 return nullptr;
499 js::Debug_SetSlotRangeToCrashOnTouch(slots, nDynamicSlots);
500 }
501
502 JSObject *obj = static_cast<JSObject *>(cx->allocator()->arenas.allocateFromFreeList(kind, thingSize));
503 if (!obj)
504 obj = static_cast<JSObject *>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind));
505
506 if (obj)
507 obj->setInitialSlots(slots);
508 else
509 js_free(slots);
510
511 CheckIncrementalZoneState(cx, obj);
512 return obj;
513 }
514
515 template <typename T, AllowGC allowGC>
516 inline T *
517 AllocateNonObject(ThreadSafeContext *cx)
518 {
519 AllocKind kind = MapTypeToFinalizeKind<T>::kind;
520 size_t thingSize = sizeof(T);
521
522 JS_ASSERT(thingSize == Arena::thingSize(kind));
523 if (!CheckAllocatorState<allowGC>(cx, kind))
524 return nullptr;
525
526 T *t = static_cast<T *>(cx->allocator()->arenas.allocateFromFreeList(kind, thingSize));
527 if (!t)
528 t = static_cast<T *>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind));
529
530 CheckIncrementalZoneState(cx, t);
531 return t;
532 }
533
534 /*
535 * When allocating for initialization from a cached object copy, we will
536 * potentially destroy the cache entry we want to copy if we allow GC. On the
537 * other hand, since these allocations are extremely common, we don't want to
538 * delay GC from these allocation sites. Instead we allow the GC, but still
539 * fail the allocation, forcing the non-cached path.
540 */
541 template <AllowGC allowGC>
542 inline JSObject *
543 AllocateObjectForCacheHit(JSContext *cx, AllocKind kind, InitialHeap heap)
544 {
545 #ifdef JSGC_GENERATIONAL
546 if (ShouldNurseryAllocate(cx->nursery(), kind, heap)) {
547 size_t thingSize = Arena::thingSize(kind);
548
549 JS_ASSERT(thingSize == Arena::thingSize(kind));
550 if (!CheckAllocatorState<NoGC>(cx, kind))
551 return nullptr;
552
553 JSObject *obj = TryNewNurseryObject<NoGC>(cx, thingSize, 0);
554 if (!obj && allowGC) {
555 MinorGC(cx, JS::gcreason::OUT_OF_NURSERY);
556 return nullptr;
557 }
558 return obj;
559 }
560 #endif
561
562 JSObject *obj = AllocateObject<NoGC>(cx, kind, 0, heap);
563 if (!obj && allowGC) {
564 MaybeGC(cx);
565 return nullptr;
566 }
567
568 return obj;
569 }
570
571 } /* namespace gc */
572
573 template <js::AllowGC allowGC>
574 inline JSObject *
575 NewGCObject(js::ThreadSafeContext *cx, js::gc::AllocKind kind, size_t nDynamicSlots, js::gc::InitialHeap heap)
576 {
577 JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
578 return js::gc::AllocateObject<allowGC>(cx, kind, nDynamicSlots, heap);
579 }
580
581 template <js::AllowGC allowGC>
582 inline jit::JitCode *
583 NewJitCode(js::ThreadSafeContext *cx)
584 {
585 return gc::AllocateNonObject<jit::JitCode, allowGC>(cx);
586 }
587
588 inline
589 types::TypeObject *
590 NewTypeObject(js::ThreadSafeContext *cx)
591 {
592 return gc::AllocateNonObject<types::TypeObject, js::CanGC>(cx);
593 }
594
595 } /* namespace js */
596
597 template <js::AllowGC allowGC>
598 inline JSString *
599 js_NewGCString(js::ThreadSafeContext *cx)
600 {
601 return js::gc::AllocateNonObject<JSString, allowGC>(cx);
602 }
603
604 template <js::AllowGC allowGC>
605 inline JSFatInlineString *
606 js_NewGCFatInlineString(js::ThreadSafeContext *cx)
607 {
608 return js::gc::AllocateNonObject<JSFatInlineString, allowGC>(cx);
609 }
610
611 inline JSExternalString *
612 js_NewGCExternalString(js::ThreadSafeContext *cx)
613 {
614 return js::gc::AllocateNonObject<JSExternalString, js::CanGC>(cx);
615 }
616
617 inline JSScript *
618 js_NewGCScript(js::ThreadSafeContext *cx)
619 {
620 return js::gc::AllocateNonObject<JSScript, js::CanGC>(cx);
621 }
622
623 inline js::LazyScript *
624 js_NewGCLazyScript(js::ThreadSafeContext *cx)
625 {
626 return js::gc::AllocateNonObject<js::LazyScript, js::CanGC>(cx);
627 }
628
629 inline js::Shape *
630 js_NewGCShape(js::ThreadSafeContext *cx)
631 {
632 return js::gc::AllocateNonObject<js::Shape, js::CanGC>(cx);
633 }
634
635 template <js::AllowGC allowGC>
636 inline js::BaseShape *
637 js_NewGCBaseShape(js::ThreadSafeContext *cx)
638 {
639 return js::gc::AllocateNonObject<js::BaseShape, allowGC>(cx);
640 }
641
642 #endif /* jsgcinlines_h */

mercurial