Thu, 22 Jan 2015 13:21:57 +0100
Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
michael@0 | 2 | * vim: set ts=8 sts=4 et sw=4 tw=99: |
michael@0 | 3 | * This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | #ifndef jsgcinlines_h |
michael@0 | 8 | #define jsgcinlines_h |
michael@0 | 9 | |
michael@0 | 10 | #include "jsgc.h" |
michael@0 | 11 | |
michael@0 | 12 | #include "gc/Zone.h" |
michael@0 | 13 | |
michael@0 | 14 | namespace js { |
michael@0 | 15 | |
michael@0 | 16 | class Shape; |
michael@0 | 17 | |
michael@0 | 18 | /* |
michael@0 | 19 | * This auto class should be used around any code that might cause a mark bit to |
michael@0 | 20 | * be set on an object in a dead zone. See AutoMaybeTouchDeadZones |
michael@0 | 21 | * for more details. |
michael@0 | 22 | */ |
michael@0 | 23 | struct AutoMarkInDeadZone |
michael@0 | 24 | { |
michael@0 | 25 | AutoMarkInDeadZone(JS::Zone *zone) |
michael@0 | 26 | : zone(zone), |
michael@0 | 27 | scheduled(zone->scheduledForDestruction) |
michael@0 | 28 | { |
michael@0 | 29 | JSRuntime *rt = zone->runtimeFromMainThread(); |
michael@0 | 30 | if (rt->gcManipulatingDeadZones && zone->scheduledForDestruction) { |
michael@0 | 31 | rt->gcObjectsMarkedInDeadZones++; |
michael@0 | 32 | zone->scheduledForDestruction = false; |
michael@0 | 33 | } |
michael@0 | 34 | } |
michael@0 | 35 | |
michael@0 | 36 | ~AutoMarkInDeadZone() { |
michael@0 | 37 | zone->scheduledForDestruction = scheduled; |
michael@0 | 38 | } |
michael@0 | 39 | |
michael@0 | 40 | private: |
michael@0 | 41 | JS::Zone *zone; |
michael@0 | 42 | bool scheduled; |
michael@0 | 43 | }; |
michael@0 | 44 | |
michael@0 | 45 | inline Allocator *const |
michael@0 | 46 | ThreadSafeContext::allocator() |
michael@0 | 47 | { |
michael@0 | 48 | JS_ASSERT_IF(isJSContext(), &asJSContext()->zone()->allocator == allocator_); |
michael@0 | 49 | return allocator_; |
michael@0 | 50 | } |
michael@0 | 51 | |
michael@0 | 52 | template <typename T> |
michael@0 | 53 | inline bool |
michael@0 | 54 | ThreadSafeContext::isThreadLocal(T thing) const |
michael@0 | 55 | { |
michael@0 | 56 | if (!isForkJoinContext()) |
michael@0 | 57 | return true; |
michael@0 | 58 | |
michael@0 | 59 | if (!IsInsideNursery(runtime_, thing) && |
michael@0 | 60 | allocator_->arenas.containsArena(runtime_, thing->arenaHeader())) |
michael@0 | 61 | { |
michael@0 | 62 | // GC should be suppressed in preparation for mutating thread local |
michael@0 | 63 | // objects, as we don't want to trip any barriers. |
michael@0 | 64 | JS_ASSERT(!thing->zoneFromAnyThread()->needsBarrier()); |
michael@0 | 65 | JS_ASSERT(!thing->runtimeFromAnyThread()->needsBarrier()); |
michael@0 | 66 | |
michael@0 | 67 | return true; |
michael@0 | 68 | } |
michael@0 | 69 | |
michael@0 | 70 | return false; |
michael@0 | 71 | } |
michael@0 | 72 | |
michael@0 | 73 | namespace gc { |
michael@0 | 74 | |
michael@0 | 75 | static inline AllocKind |
michael@0 | 76 | GetGCObjectKind(const Class *clasp) |
michael@0 | 77 | { |
michael@0 | 78 | if (clasp == FunctionClassPtr) |
michael@0 | 79 | return JSFunction::FinalizeKind; |
michael@0 | 80 | uint32_t nslots = JSCLASS_RESERVED_SLOTS(clasp); |
michael@0 | 81 | if (clasp->flags & JSCLASS_HAS_PRIVATE) |
michael@0 | 82 | nslots++; |
michael@0 | 83 | return GetGCObjectKind(nslots); |
michael@0 | 84 | } |
michael@0 | 85 | |
michael@0 | 86 | #ifdef JSGC_GENERATIONAL |
michael@0 | 87 | inline bool |
michael@0 | 88 | ShouldNurseryAllocate(const Nursery &nursery, AllocKind kind, InitialHeap heap) |
michael@0 | 89 | { |
michael@0 | 90 | return nursery.isEnabled() && IsNurseryAllocable(kind) && heap != TenuredHeap; |
michael@0 | 91 | } |
michael@0 | 92 | #endif |
michael@0 | 93 | |
michael@0 | 94 | inline JSGCTraceKind |
michael@0 | 95 | GetGCThingTraceKind(const void *thing) |
michael@0 | 96 | { |
michael@0 | 97 | JS_ASSERT(thing); |
michael@0 | 98 | const Cell *cell = static_cast<const Cell *>(thing); |
michael@0 | 99 | #ifdef JSGC_GENERATIONAL |
michael@0 | 100 | if (IsInsideNursery(cell->runtimeFromAnyThread(), cell)) |
michael@0 | 101 | return JSTRACE_OBJECT; |
michael@0 | 102 | #endif |
michael@0 | 103 | return MapAllocToTraceKind(cell->tenuredGetAllocKind()); |
michael@0 | 104 | } |
michael@0 | 105 | |
michael@0 | 106 | static inline void |
michael@0 | 107 | GCPoke(JSRuntime *rt) |
michael@0 | 108 | { |
michael@0 | 109 | rt->gcPoke = true; |
michael@0 | 110 | |
michael@0 | 111 | #ifdef JS_GC_ZEAL |
michael@0 | 112 | /* Schedule a GC to happen "soon" after a GC poke. */ |
michael@0 | 113 | if (rt->gcZeal() == js::gc::ZealPokeValue) |
michael@0 | 114 | rt->gcNextScheduled = 1; |
michael@0 | 115 | #endif |
michael@0 | 116 | } |
michael@0 | 117 | |
michael@0 | 118 | class ArenaIter |
michael@0 | 119 | { |
michael@0 | 120 | ArenaHeader *aheader; |
michael@0 | 121 | ArenaHeader *remainingHeader; |
michael@0 | 122 | |
michael@0 | 123 | public: |
michael@0 | 124 | ArenaIter() { |
michael@0 | 125 | init(); |
michael@0 | 126 | } |
michael@0 | 127 | |
michael@0 | 128 | ArenaIter(JS::Zone *zone, AllocKind kind) { |
michael@0 | 129 | init(zone, kind); |
michael@0 | 130 | } |
michael@0 | 131 | |
michael@0 | 132 | void init() { |
michael@0 | 133 | aheader = nullptr; |
michael@0 | 134 | remainingHeader = nullptr; |
michael@0 | 135 | } |
michael@0 | 136 | |
michael@0 | 137 | void init(ArenaHeader *aheaderArg) { |
michael@0 | 138 | aheader = aheaderArg; |
michael@0 | 139 | remainingHeader = nullptr; |
michael@0 | 140 | } |
michael@0 | 141 | |
michael@0 | 142 | void init(JS::Zone *zone, AllocKind kind) { |
michael@0 | 143 | aheader = zone->allocator.arenas.getFirstArena(kind); |
michael@0 | 144 | remainingHeader = zone->allocator.arenas.getFirstArenaToSweep(kind); |
michael@0 | 145 | if (!aheader) { |
michael@0 | 146 | aheader = remainingHeader; |
michael@0 | 147 | remainingHeader = nullptr; |
michael@0 | 148 | } |
michael@0 | 149 | } |
michael@0 | 150 | |
michael@0 | 151 | bool done() { |
michael@0 | 152 | return !aheader; |
michael@0 | 153 | } |
michael@0 | 154 | |
michael@0 | 155 | ArenaHeader *get() { |
michael@0 | 156 | return aheader; |
michael@0 | 157 | } |
michael@0 | 158 | |
michael@0 | 159 | void next() { |
michael@0 | 160 | aheader = aheader->next; |
michael@0 | 161 | if (!aheader) { |
michael@0 | 162 | aheader = remainingHeader; |
michael@0 | 163 | remainingHeader = nullptr; |
michael@0 | 164 | } |
michael@0 | 165 | } |
michael@0 | 166 | }; |
michael@0 | 167 | |
michael@0 | 168 | class CellIterImpl |
michael@0 | 169 | { |
michael@0 | 170 | size_t firstThingOffset; |
michael@0 | 171 | size_t thingSize; |
michael@0 | 172 | ArenaIter aiter; |
michael@0 | 173 | FreeSpan firstSpan; |
michael@0 | 174 | const FreeSpan *span; |
michael@0 | 175 | uintptr_t thing; |
michael@0 | 176 | Cell *cell; |
michael@0 | 177 | |
michael@0 | 178 | protected: |
michael@0 | 179 | CellIterImpl() { |
michael@0 | 180 | } |
michael@0 | 181 | |
michael@0 | 182 | void initSpan(JS::Zone *zone, AllocKind kind) { |
michael@0 | 183 | JS_ASSERT(zone->allocator.arenas.isSynchronizedFreeList(kind)); |
michael@0 | 184 | firstThingOffset = Arena::firstThingOffset(kind); |
michael@0 | 185 | thingSize = Arena::thingSize(kind); |
michael@0 | 186 | firstSpan.initAsEmpty(); |
michael@0 | 187 | span = &firstSpan; |
michael@0 | 188 | thing = span->first; |
michael@0 | 189 | } |
michael@0 | 190 | |
michael@0 | 191 | void init(ArenaHeader *singleAheader) { |
michael@0 | 192 | initSpan(singleAheader->zone, singleAheader->getAllocKind()); |
michael@0 | 193 | aiter.init(singleAheader); |
michael@0 | 194 | next(); |
michael@0 | 195 | aiter.init(); |
michael@0 | 196 | } |
michael@0 | 197 | |
michael@0 | 198 | void init(JS::Zone *zone, AllocKind kind) { |
michael@0 | 199 | initSpan(zone, kind); |
michael@0 | 200 | aiter.init(zone, kind); |
michael@0 | 201 | next(); |
michael@0 | 202 | } |
michael@0 | 203 | |
michael@0 | 204 | public: |
michael@0 | 205 | bool done() const { |
michael@0 | 206 | return !cell; |
michael@0 | 207 | } |
michael@0 | 208 | |
michael@0 | 209 | template<typename T> T *get() const { |
michael@0 | 210 | JS_ASSERT(!done()); |
michael@0 | 211 | return static_cast<T *>(cell); |
michael@0 | 212 | } |
michael@0 | 213 | |
michael@0 | 214 | Cell *getCell() const { |
michael@0 | 215 | JS_ASSERT(!done()); |
michael@0 | 216 | return cell; |
michael@0 | 217 | } |
michael@0 | 218 | |
michael@0 | 219 | void next() { |
michael@0 | 220 | for (;;) { |
michael@0 | 221 | if (thing != span->first) |
michael@0 | 222 | break; |
michael@0 | 223 | if (MOZ_LIKELY(span->hasNext())) { |
michael@0 | 224 | thing = span->last + thingSize; |
michael@0 | 225 | span = span->nextSpan(); |
michael@0 | 226 | break; |
michael@0 | 227 | } |
michael@0 | 228 | if (aiter.done()) { |
michael@0 | 229 | cell = nullptr; |
michael@0 | 230 | return; |
michael@0 | 231 | } |
michael@0 | 232 | ArenaHeader *aheader = aiter.get(); |
michael@0 | 233 | firstSpan = aheader->getFirstFreeSpan(); |
michael@0 | 234 | span = &firstSpan; |
michael@0 | 235 | thing = aheader->arenaAddress() | firstThingOffset; |
michael@0 | 236 | aiter.next(); |
michael@0 | 237 | } |
michael@0 | 238 | cell = reinterpret_cast<Cell *>(thing); |
michael@0 | 239 | thing += thingSize; |
michael@0 | 240 | } |
michael@0 | 241 | }; |
michael@0 | 242 | |
michael@0 | 243 | class CellIterUnderGC : public CellIterImpl |
michael@0 | 244 | { |
michael@0 | 245 | public: |
michael@0 | 246 | CellIterUnderGC(JS::Zone *zone, AllocKind kind) { |
michael@0 | 247 | #ifdef JSGC_GENERATIONAL |
michael@0 | 248 | JS_ASSERT(zone->runtimeFromAnyThread()->gcNursery.isEmpty()); |
michael@0 | 249 | #endif |
michael@0 | 250 | JS_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy()); |
michael@0 | 251 | init(zone, kind); |
michael@0 | 252 | } |
michael@0 | 253 | |
michael@0 | 254 | CellIterUnderGC(ArenaHeader *aheader) { |
michael@0 | 255 | JS_ASSERT(aheader->zone->runtimeFromAnyThread()->isHeapBusy()); |
michael@0 | 256 | init(aheader); |
michael@0 | 257 | } |
michael@0 | 258 | }; |
michael@0 | 259 | |
michael@0 | 260 | class CellIter : public CellIterImpl |
michael@0 | 261 | { |
michael@0 | 262 | ArenaLists *lists; |
michael@0 | 263 | AllocKind kind; |
michael@0 | 264 | #ifdef DEBUG |
michael@0 | 265 | size_t *counter; |
michael@0 | 266 | #endif |
michael@0 | 267 | public: |
michael@0 | 268 | CellIter(JS::Zone *zone, AllocKind kind) |
michael@0 | 269 | : lists(&zone->allocator.arenas), |
michael@0 | 270 | kind(kind) |
michael@0 | 271 | { |
michael@0 | 272 | /* |
michael@0 | 273 | * We have a single-threaded runtime, so there's no need to protect |
michael@0 | 274 | * against other threads iterating or allocating. However, we do have |
michael@0 | 275 | * background finalization; we have to wait for this to finish if it's |
michael@0 | 276 | * currently active. |
michael@0 | 277 | */ |
michael@0 | 278 | if (IsBackgroundFinalized(kind) && |
michael@0 | 279 | zone->allocator.arenas.needBackgroundFinalizeWait(kind)) |
michael@0 | 280 | { |
michael@0 | 281 | gc::FinishBackgroundFinalize(zone->runtimeFromMainThread()); |
michael@0 | 282 | } |
michael@0 | 283 | |
michael@0 | 284 | #ifdef JSGC_GENERATIONAL |
michael@0 | 285 | /* Evict the nursery before iterating so we can see all things. */ |
michael@0 | 286 | JSRuntime *rt = zone->runtimeFromMainThread(); |
michael@0 | 287 | if (!rt->gcNursery.isEmpty()) |
michael@0 | 288 | MinorGC(rt, JS::gcreason::EVICT_NURSERY); |
michael@0 | 289 | #endif |
michael@0 | 290 | |
michael@0 | 291 | if (lists->isSynchronizedFreeList(kind)) { |
michael@0 | 292 | lists = nullptr; |
michael@0 | 293 | } else { |
michael@0 | 294 | JS_ASSERT(!zone->runtimeFromMainThread()->isHeapBusy()); |
michael@0 | 295 | lists->copyFreeListToArena(kind); |
michael@0 | 296 | } |
michael@0 | 297 | |
michael@0 | 298 | #ifdef DEBUG |
michael@0 | 299 | /* Assert that no GCs can occur while a CellIter is live. */ |
michael@0 | 300 | counter = &zone->runtimeFromAnyThread()->noGCOrAllocationCheck; |
michael@0 | 301 | ++*counter; |
michael@0 | 302 | #endif |
michael@0 | 303 | |
michael@0 | 304 | init(zone, kind); |
michael@0 | 305 | } |
michael@0 | 306 | |
michael@0 | 307 | ~CellIter() { |
michael@0 | 308 | #ifdef DEBUG |
michael@0 | 309 | JS_ASSERT(*counter > 0); |
michael@0 | 310 | --*counter; |
michael@0 | 311 | #endif |
michael@0 | 312 | if (lists) |
michael@0 | 313 | lists->clearFreeListInArena(kind); |
michael@0 | 314 | } |
michael@0 | 315 | }; |
michael@0 | 316 | |
michael@0 | 317 | class GCZonesIter |
michael@0 | 318 | { |
michael@0 | 319 | private: |
michael@0 | 320 | ZonesIter zone; |
michael@0 | 321 | |
michael@0 | 322 | public: |
michael@0 | 323 | GCZonesIter(JSRuntime *rt) : zone(rt, WithAtoms) { |
michael@0 | 324 | if (!zone->isCollecting()) |
michael@0 | 325 | next(); |
michael@0 | 326 | } |
michael@0 | 327 | |
michael@0 | 328 | bool done() const { return zone.done(); } |
michael@0 | 329 | |
michael@0 | 330 | void next() { |
michael@0 | 331 | JS_ASSERT(!done()); |
michael@0 | 332 | do { |
michael@0 | 333 | zone.next(); |
michael@0 | 334 | } while (!zone.done() && !zone->isCollecting()); |
michael@0 | 335 | } |
michael@0 | 336 | |
michael@0 | 337 | JS::Zone *get() const { |
michael@0 | 338 | JS_ASSERT(!done()); |
michael@0 | 339 | return zone; |
michael@0 | 340 | } |
michael@0 | 341 | |
michael@0 | 342 | operator JS::Zone *() const { return get(); } |
michael@0 | 343 | JS::Zone *operator->() const { return get(); } |
michael@0 | 344 | }; |
michael@0 | 345 | |
michael@0 | 346 | typedef CompartmentsIterT<GCZonesIter> GCCompartmentsIter; |
michael@0 | 347 | |
michael@0 | 348 | /* Iterates over all zones in the current zone group. */ |
michael@0 | 349 | class GCZoneGroupIter { |
michael@0 | 350 | private: |
michael@0 | 351 | JS::Zone *current; |
michael@0 | 352 | |
michael@0 | 353 | public: |
michael@0 | 354 | GCZoneGroupIter(JSRuntime *rt) { |
michael@0 | 355 | JS_ASSERT(rt->isHeapBusy()); |
michael@0 | 356 | current = rt->gcCurrentZoneGroup; |
michael@0 | 357 | } |
michael@0 | 358 | |
michael@0 | 359 | bool done() const { return !current; } |
michael@0 | 360 | |
michael@0 | 361 | void next() { |
michael@0 | 362 | JS_ASSERT(!done()); |
michael@0 | 363 | current = current->nextNodeInGroup(); |
michael@0 | 364 | } |
michael@0 | 365 | |
michael@0 | 366 | JS::Zone *get() const { |
michael@0 | 367 | JS_ASSERT(!done()); |
michael@0 | 368 | return current; |
michael@0 | 369 | } |
michael@0 | 370 | |
michael@0 | 371 | operator JS::Zone *() const { return get(); } |
michael@0 | 372 | JS::Zone *operator->() const { return get(); } |
michael@0 | 373 | }; |
michael@0 | 374 | |
michael@0 | 375 | typedef CompartmentsIterT<GCZoneGroupIter> GCCompartmentGroupIter; |
michael@0 | 376 | |
michael@0 | 377 | #ifdef JSGC_GENERATIONAL |
michael@0 | 378 | /* |
michael@0 | 379 | * Attempt to allocate a new GC thing out of the nursery. If there is not enough |
michael@0 | 380 | * room in the nursery or there is an OOM, this method will return nullptr. |
michael@0 | 381 | */ |
michael@0 | 382 | template <AllowGC allowGC> |
michael@0 | 383 | inline JSObject * |
michael@0 | 384 | TryNewNurseryObject(ThreadSafeContext *cxArg, size_t thingSize, size_t nDynamicSlots) |
michael@0 | 385 | { |
michael@0 | 386 | JSContext *cx = cxArg->asJSContext(); |
michael@0 | 387 | |
michael@0 | 388 | JS_ASSERT(!IsAtomsCompartment(cx->compartment())); |
michael@0 | 389 | JSRuntime *rt = cx->runtime(); |
michael@0 | 390 | Nursery &nursery = rt->gcNursery; |
michael@0 | 391 | JSObject *obj = nursery.allocateObject(cx, thingSize, nDynamicSlots); |
michael@0 | 392 | if (obj) |
michael@0 | 393 | return obj; |
michael@0 | 394 | if (allowGC && !rt->mainThread.suppressGC) { |
michael@0 | 395 | MinorGC(cx, JS::gcreason::OUT_OF_NURSERY); |
michael@0 | 396 | |
michael@0 | 397 | /* Exceeding gcMaxBytes while tenuring can disable the Nursery. */ |
michael@0 | 398 | if (nursery.isEnabled()) { |
michael@0 | 399 | JSObject *obj = nursery.allocateObject(cx, thingSize, nDynamicSlots); |
michael@0 | 400 | JS_ASSERT(obj); |
michael@0 | 401 | return obj; |
michael@0 | 402 | } |
michael@0 | 403 | } |
michael@0 | 404 | return nullptr; |
michael@0 | 405 | } |
michael@0 | 406 | #endif /* JSGC_GENERATIONAL */ |
michael@0 | 407 | |
michael@0 | 408 | static inline bool |
michael@0 | 409 | PossiblyFail() |
michael@0 | 410 | { |
michael@0 | 411 | JS_OOM_POSSIBLY_FAIL(); |
michael@0 | 412 | return true; |
michael@0 | 413 | } |
michael@0 | 414 | |
michael@0 | 415 | template <AllowGC allowGC> |
michael@0 | 416 | static inline bool |
michael@0 | 417 | CheckAllocatorState(ThreadSafeContext *cx, AllocKind kind) |
michael@0 | 418 | { |
michael@0 | 419 | if (!cx->isJSContext()) |
michael@0 | 420 | return true; |
michael@0 | 421 | |
michael@0 | 422 | JSContext *ncx = cx->asJSContext(); |
michael@0 | 423 | JSRuntime *rt = ncx->runtime(); |
michael@0 | 424 | #if defined(JS_GC_ZEAL) || defined(DEBUG) |
michael@0 | 425 | JS_ASSERT_IF(rt->isAtomsCompartment(ncx->compartment()), |
michael@0 | 426 | kind == FINALIZE_STRING || |
michael@0 | 427 | kind == FINALIZE_FAT_INLINE_STRING || |
michael@0 | 428 | kind == FINALIZE_JITCODE); |
michael@0 | 429 | JS_ASSERT(!rt->isHeapBusy()); |
michael@0 | 430 | JS_ASSERT(!rt->noGCOrAllocationCheck); |
michael@0 | 431 | #endif |
michael@0 | 432 | |
michael@0 | 433 | // For testing out of memory conditions |
michael@0 | 434 | if (!PossiblyFail()) { |
michael@0 | 435 | js_ReportOutOfMemory(cx); |
michael@0 | 436 | return false; |
michael@0 | 437 | } |
michael@0 | 438 | |
michael@0 | 439 | if (allowGC) { |
michael@0 | 440 | #ifdef JS_GC_ZEAL |
michael@0 | 441 | if (rt->needZealousGC()) |
michael@0 | 442 | js::gc::RunDebugGC(ncx); |
michael@0 | 443 | #endif |
michael@0 | 444 | |
michael@0 | 445 | if (rt->interrupt) { |
michael@0 | 446 | // Invoking the interrupt callback can fail and we can't usefully |
michael@0 | 447 | // handle that here. Just check in case we need to collect instead. |
michael@0 | 448 | js::gc::GCIfNeeded(ncx); |
michael@0 | 449 | } |
michael@0 | 450 | } |
michael@0 | 451 | |
michael@0 | 452 | return true; |
michael@0 | 453 | } |
michael@0 | 454 | |
michael@0 | 455 | template <typename T> |
michael@0 | 456 | static inline void |
michael@0 | 457 | CheckIncrementalZoneState(ThreadSafeContext *cx, T *t) |
michael@0 | 458 | { |
michael@0 | 459 | #ifdef DEBUG |
michael@0 | 460 | if (!cx->isJSContext()) |
michael@0 | 461 | return; |
michael@0 | 462 | |
michael@0 | 463 | Zone *zone = cx->asJSContext()->zone(); |
michael@0 | 464 | JS_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()), |
michael@0 | 465 | t->arenaHeader()->allocatedDuringIncremental); |
michael@0 | 466 | #endif |
michael@0 | 467 | } |
michael@0 | 468 | |
michael@0 | 469 | /* |
michael@0 | 470 | * Allocate a new GC thing. After a successful allocation the caller must |
michael@0 | 471 | * fully initialize the thing before calling any function that can potentially |
michael@0 | 472 | * trigger GC. This will ensure that GC tracing never sees junk values stored |
michael@0 | 473 | * in the partially initialized thing. |
michael@0 | 474 | */ |
michael@0 | 475 | |
michael@0 | 476 | template <AllowGC allowGC> |
michael@0 | 477 | inline JSObject * |
michael@0 | 478 | AllocateObject(ThreadSafeContext *cx, AllocKind kind, size_t nDynamicSlots, InitialHeap heap) |
michael@0 | 479 | { |
michael@0 | 480 | size_t thingSize = Arena::thingSize(kind); |
michael@0 | 481 | |
michael@0 | 482 | JS_ASSERT(thingSize == Arena::thingSize(kind)); |
michael@0 | 483 | if (!CheckAllocatorState<allowGC>(cx, kind)) |
michael@0 | 484 | return nullptr; |
michael@0 | 485 | |
michael@0 | 486 | #ifdef JSGC_GENERATIONAL |
michael@0 | 487 | if (cx->hasNursery() && ShouldNurseryAllocate(cx->nursery(), kind, heap)) { |
michael@0 | 488 | JSObject *obj = TryNewNurseryObject<allowGC>(cx, thingSize, nDynamicSlots); |
michael@0 | 489 | if (obj) |
michael@0 | 490 | return obj; |
michael@0 | 491 | } |
michael@0 | 492 | #endif |
michael@0 | 493 | |
michael@0 | 494 | HeapSlot *slots = nullptr; |
michael@0 | 495 | if (nDynamicSlots) { |
michael@0 | 496 | slots = cx->pod_malloc<HeapSlot>(nDynamicSlots); |
michael@0 | 497 | if (MOZ_UNLIKELY(!slots)) |
michael@0 | 498 | return nullptr; |
michael@0 | 499 | js::Debug_SetSlotRangeToCrashOnTouch(slots, nDynamicSlots); |
michael@0 | 500 | } |
michael@0 | 501 | |
michael@0 | 502 | JSObject *obj = static_cast<JSObject *>(cx->allocator()->arenas.allocateFromFreeList(kind, thingSize)); |
michael@0 | 503 | if (!obj) |
michael@0 | 504 | obj = static_cast<JSObject *>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind)); |
michael@0 | 505 | |
michael@0 | 506 | if (obj) |
michael@0 | 507 | obj->setInitialSlots(slots); |
michael@0 | 508 | else |
michael@0 | 509 | js_free(slots); |
michael@0 | 510 | |
michael@0 | 511 | CheckIncrementalZoneState(cx, obj); |
michael@0 | 512 | return obj; |
michael@0 | 513 | } |
michael@0 | 514 | |
michael@0 | 515 | template <typename T, AllowGC allowGC> |
michael@0 | 516 | inline T * |
michael@0 | 517 | AllocateNonObject(ThreadSafeContext *cx) |
michael@0 | 518 | { |
michael@0 | 519 | AllocKind kind = MapTypeToFinalizeKind<T>::kind; |
michael@0 | 520 | size_t thingSize = sizeof(T); |
michael@0 | 521 | |
michael@0 | 522 | JS_ASSERT(thingSize == Arena::thingSize(kind)); |
michael@0 | 523 | if (!CheckAllocatorState<allowGC>(cx, kind)) |
michael@0 | 524 | return nullptr; |
michael@0 | 525 | |
michael@0 | 526 | T *t = static_cast<T *>(cx->allocator()->arenas.allocateFromFreeList(kind, thingSize)); |
michael@0 | 527 | if (!t) |
michael@0 | 528 | t = static_cast<T *>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind)); |
michael@0 | 529 | |
michael@0 | 530 | CheckIncrementalZoneState(cx, t); |
michael@0 | 531 | return t; |
michael@0 | 532 | } |
michael@0 | 533 | |
michael@0 | 534 | /* |
michael@0 | 535 | * When allocating for initialization from a cached object copy, we will |
michael@0 | 536 | * potentially destroy the cache entry we want to copy if we allow GC. On the |
michael@0 | 537 | * other hand, since these allocations are extremely common, we don't want to |
michael@0 | 538 | * delay GC from these allocation sites. Instead we allow the GC, but still |
michael@0 | 539 | * fail the allocation, forcing the non-cached path. |
michael@0 | 540 | */ |
michael@0 | 541 | template <AllowGC allowGC> |
michael@0 | 542 | inline JSObject * |
michael@0 | 543 | AllocateObjectForCacheHit(JSContext *cx, AllocKind kind, InitialHeap heap) |
michael@0 | 544 | { |
michael@0 | 545 | #ifdef JSGC_GENERATIONAL |
michael@0 | 546 | if (ShouldNurseryAllocate(cx->nursery(), kind, heap)) { |
michael@0 | 547 | size_t thingSize = Arena::thingSize(kind); |
michael@0 | 548 | |
michael@0 | 549 | JS_ASSERT(thingSize == Arena::thingSize(kind)); |
michael@0 | 550 | if (!CheckAllocatorState<NoGC>(cx, kind)) |
michael@0 | 551 | return nullptr; |
michael@0 | 552 | |
michael@0 | 553 | JSObject *obj = TryNewNurseryObject<NoGC>(cx, thingSize, 0); |
michael@0 | 554 | if (!obj && allowGC) { |
michael@0 | 555 | MinorGC(cx, JS::gcreason::OUT_OF_NURSERY); |
michael@0 | 556 | return nullptr; |
michael@0 | 557 | } |
michael@0 | 558 | return obj; |
michael@0 | 559 | } |
michael@0 | 560 | #endif |
michael@0 | 561 | |
michael@0 | 562 | JSObject *obj = AllocateObject<NoGC>(cx, kind, 0, heap); |
michael@0 | 563 | if (!obj && allowGC) { |
michael@0 | 564 | MaybeGC(cx); |
michael@0 | 565 | return nullptr; |
michael@0 | 566 | } |
michael@0 | 567 | |
michael@0 | 568 | return obj; |
michael@0 | 569 | } |
michael@0 | 570 | |
michael@0 | 571 | } /* namespace gc */ |
michael@0 | 572 | |
michael@0 | 573 | template <js::AllowGC allowGC> |
michael@0 | 574 | inline JSObject * |
michael@0 | 575 | NewGCObject(js::ThreadSafeContext *cx, js::gc::AllocKind kind, size_t nDynamicSlots, js::gc::InitialHeap heap) |
michael@0 | 576 | { |
michael@0 | 577 | JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST); |
michael@0 | 578 | return js::gc::AllocateObject<allowGC>(cx, kind, nDynamicSlots, heap); |
michael@0 | 579 | } |
michael@0 | 580 | |
michael@0 | 581 | template <js::AllowGC allowGC> |
michael@0 | 582 | inline jit::JitCode * |
michael@0 | 583 | NewJitCode(js::ThreadSafeContext *cx) |
michael@0 | 584 | { |
michael@0 | 585 | return gc::AllocateNonObject<jit::JitCode, allowGC>(cx); |
michael@0 | 586 | } |
michael@0 | 587 | |
michael@0 | 588 | inline |
michael@0 | 589 | types::TypeObject * |
michael@0 | 590 | NewTypeObject(js::ThreadSafeContext *cx) |
michael@0 | 591 | { |
michael@0 | 592 | return gc::AllocateNonObject<types::TypeObject, js::CanGC>(cx); |
michael@0 | 593 | } |
michael@0 | 594 | |
michael@0 | 595 | } /* namespace js */ |
michael@0 | 596 | |
michael@0 | 597 | template <js::AllowGC allowGC> |
michael@0 | 598 | inline JSString * |
michael@0 | 599 | js_NewGCString(js::ThreadSafeContext *cx) |
michael@0 | 600 | { |
michael@0 | 601 | return js::gc::AllocateNonObject<JSString, allowGC>(cx); |
michael@0 | 602 | } |
michael@0 | 603 | |
michael@0 | 604 | template <js::AllowGC allowGC> |
michael@0 | 605 | inline JSFatInlineString * |
michael@0 | 606 | js_NewGCFatInlineString(js::ThreadSafeContext *cx) |
michael@0 | 607 | { |
michael@0 | 608 | return js::gc::AllocateNonObject<JSFatInlineString, allowGC>(cx); |
michael@0 | 609 | } |
michael@0 | 610 | |
michael@0 | 611 | inline JSExternalString * |
michael@0 | 612 | js_NewGCExternalString(js::ThreadSafeContext *cx) |
michael@0 | 613 | { |
michael@0 | 614 | return js::gc::AllocateNonObject<JSExternalString, js::CanGC>(cx); |
michael@0 | 615 | } |
michael@0 | 616 | |
michael@0 | 617 | inline JSScript * |
michael@0 | 618 | js_NewGCScript(js::ThreadSafeContext *cx) |
michael@0 | 619 | { |
michael@0 | 620 | return js::gc::AllocateNonObject<JSScript, js::CanGC>(cx); |
michael@0 | 621 | } |
michael@0 | 622 | |
michael@0 | 623 | inline js::LazyScript * |
michael@0 | 624 | js_NewGCLazyScript(js::ThreadSafeContext *cx) |
michael@0 | 625 | { |
michael@0 | 626 | return js::gc::AllocateNonObject<js::LazyScript, js::CanGC>(cx); |
michael@0 | 627 | } |
michael@0 | 628 | |
michael@0 | 629 | inline js::Shape * |
michael@0 | 630 | js_NewGCShape(js::ThreadSafeContext *cx) |
michael@0 | 631 | { |
michael@0 | 632 | return js::gc::AllocateNonObject<js::Shape, js::CanGC>(cx); |
michael@0 | 633 | } |
michael@0 | 634 | |
michael@0 | 635 | template <js::AllowGC allowGC> |
michael@0 | 636 | inline js::BaseShape * |
michael@0 | 637 | js_NewGCBaseShape(js::ThreadSafeContext *cx) |
michael@0 | 638 | { |
michael@0 | 639 | return js::gc::AllocateNonObject<js::BaseShape, allowGC>(cx); |
michael@0 | 640 | } |
michael@0 | 641 | |
michael@0 | 642 | #endif /* jsgcinlines_h */ |