|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
|
2 * vim: set ts=8 sts=4 et sw=4 tw=99: |
|
3 * This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 #ifdef JSGC_GENERATIONAL |
|
8 |
|
9 #include "gc/StoreBuffer.h" |
|
10 |
|
11 #include "mozilla/Assertions.h" |
|
12 |
|
13 #include "vm/ArgumentsObject.h" |
|
14 #include "vm/ForkJoin.h" |
|
15 |
|
16 #include "jsgcinlines.h" |
|
17 |
|
18 using namespace js; |
|
19 using namespace js::gc; |
|
20 using mozilla::ReentrancyGuard; |
|
21 |
|
22 /*** Edges ***/ |
|
23 |
|
24 void |
|
25 StoreBuffer::SlotsEdge::mark(JSTracer *trc) |
|
26 { |
|
27 JSObject *obj = object(); |
|
28 |
|
29 if (trc->runtime()->gcNursery.isInside(obj)) |
|
30 return; |
|
31 |
|
32 if (!obj->isNative()) { |
|
33 const Class *clasp = obj->getClass(); |
|
34 if (clasp) |
|
35 clasp->trace(trc, obj); |
|
36 return; |
|
37 } |
|
38 |
|
39 if (kind() == ElementKind) { |
|
40 int32_t initLen = obj->getDenseInitializedLength(); |
|
41 int32_t clampedStart = Min(start_, initLen); |
|
42 int32_t clampedEnd = Min(start_ + count_, initLen); |
|
43 gc::MarkArraySlots(trc, clampedEnd - clampedStart, |
|
44 obj->getDenseElements() + clampedStart, "element"); |
|
45 } else { |
|
46 int32_t start = Min(uint32_t(start_), obj->slotSpan()); |
|
47 int32_t end = Min(uint32_t(start_) + count_, obj->slotSpan()); |
|
48 MOZ_ASSERT(end >= start); |
|
49 MarkObjectSlots(trc, obj, start, end - start); |
|
50 } |
|
51 } |
|
52 |
|
53 void |
|
54 StoreBuffer::WholeCellEdges::mark(JSTracer *trc) |
|
55 { |
|
56 JS_ASSERT(edge->isTenured()); |
|
57 JSGCTraceKind kind = GetGCThingTraceKind(edge); |
|
58 if (kind <= JSTRACE_OBJECT) { |
|
59 JSObject *object = static_cast<JSObject *>(edge); |
|
60 if (object->is<ArgumentsObject>()) |
|
61 ArgumentsObject::trace(trc, object); |
|
62 MarkChildren(trc, object); |
|
63 return; |
|
64 } |
|
65 #ifdef JS_ION |
|
66 JS_ASSERT(kind == JSTRACE_JITCODE); |
|
67 static_cast<jit::JitCode *>(edge)->trace(trc); |
|
68 #else |
|
69 MOZ_ASSUME_UNREACHABLE("Only objects can be in the wholeCellBuffer if IonMonkey is disabled."); |
|
70 #endif |
|
71 } |
|
72 |
|
73 void |
|
74 StoreBuffer::CellPtrEdge::mark(JSTracer *trc) |
|
75 { |
|
76 if (!*edge) |
|
77 return; |
|
78 |
|
79 JS_ASSERT(GetGCThingTraceKind(*edge) == JSTRACE_OBJECT); |
|
80 MarkObjectRoot(trc, reinterpret_cast<JSObject**>(edge), "store buffer edge"); |
|
81 } |
|
82 |
|
83 void |
|
84 StoreBuffer::ValueEdge::mark(JSTracer *trc) |
|
85 { |
|
86 if (!deref()) |
|
87 return; |
|
88 |
|
89 MarkValueRoot(trc, edge, "store buffer edge"); |
|
90 } |
|
91 |
|
92 /*** MonoTypeBuffer ***/ |
|
93 |
|
94 template <typename T> |
|
95 void |
|
96 StoreBuffer::MonoTypeBuffer<T>::handleOverflow(StoreBuffer *owner) |
|
97 { |
|
98 if (!owner->isAboutToOverflow()) { |
|
99 /* |
|
100 * Compact the buffer now, and if that fails to free enough space then |
|
101 * trigger a minor collection. |
|
102 */ |
|
103 compact(owner); |
|
104 if (isAboutToOverflow()) |
|
105 owner->setAboutToOverflow(); |
|
106 } else { |
|
107 /* |
|
108 * A minor GC has already been triggered, so there's no point |
|
109 * compacting unless the buffer is totally full. |
|
110 */ |
|
111 if (storage_->availableInCurrentChunk() < sizeof(T)) |
|
112 maybeCompact(owner); |
|
113 } |
|
114 } |
|
115 |
|
116 template <typename T> |
|
117 void |
|
118 StoreBuffer::MonoTypeBuffer<T>::compactRemoveDuplicates(StoreBuffer *owner) |
|
119 { |
|
120 typedef HashSet<T, typename T::Hasher, SystemAllocPolicy> DedupSet; |
|
121 |
|
122 DedupSet duplicates; |
|
123 if (!duplicates.init()) |
|
124 return; /* Failure to de-dup is acceptable. */ |
|
125 |
|
126 LifoAlloc::Enum insert(*storage_); |
|
127 for (LifoAlloc::Enum e(*storage_); !e.empty(); e.popFront<T>()) { |
|
128 T *edge = e.get<T>(); |
|
129 if (!duplicates.has(*edge)) { |
|
130 insert.updateFront<T>(*edge); |
|
131 insert.popFront<T>(); |
|
132 |
|
133 /* Failure to insert will leave the set with duplicates. Oh well. */ |
|
134 duplicates.put(*edge); |
|
135 } |
|
136 } |
|
137 storage_->release(insert.mark()); |
|
138 |
|
139 duplicates.clear(); |
|
140 } |
|
141 |
|
142 template <typename T> |
|
143 void |
|
144 StoreBuffer::MonoTypeBuffer<T>::compact(StoreBuffer *owner) |
|
145 { |
|
146 JS_ASSERT(storage_); |
|
147 compactRemoveDuplicates(owner); |
|
148 usedAtLastCompact_ = storage_->used(); |
|
149 } |
|
150 |
|
151 template <typename T> |
|
152 void |
|
153 StoreBuffer::MonoTypeBuffer<T>::maybeCompact(StoreBuffer *owner) |
|
154 { |
|
155 JS_ASSERT(storage_); |
|
156 if (storage_->used() != usedAtLastCompact_) |
|
157 compact(owner); |
|
158 } |
|
159 |
|
160 template <typename T> |
|
161 void |
|
162 StoreBuffer::MonoTypeBuffer<T>::mark(StoreBuffer *owner, JSTracer *trc) |
|
163 { |
|
164 JS_ASSERT(owner->isEnabled()); |
|
165 ReentrancyGuard g(*owner); |
|
166 if (!storage_) |
|
167 return; |
|
168 |
|
169 maybeCompact(owner); |
|
170 for (LifoAlloc::Enum e(*storage_); !e.empty(); e.popFront<T>()) { |
|
171 T *edge = e.get<T>(); |
|
172 edge->mark(trc); |
|
173 } |
|
174 } |
|
175 |
|
176 /*** RelocatableMonoTypeBuffer ***/ |
|
177 |
|
178 template <typename T> |
|
179 void |
|
180 StoreBuffer::RelocatableMonoTypeBuffer<T>::compactMoved(StoreBuffer *owner) |
|
181 { |
|
182 LifoAlloc &storage = *this->storage_; |
|
183 EdgeSet invalidated; |
|
184 if (!invalidated.init()) |
|
185 CrashAtUnhandlableOOM("RelocatableMonoTypeBuffer::compactMoved: Failed to init table."); |
|
186 |
|
187 /* Collect the set of entries which are currently invalid. */ |
|
188 for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) { |
|
189 T *edge = e.get<T>(); |
|
190 if (edge->isTagged()) { |
|
191 if (!invalidated.put(edge->untagged().edge)) |
|
192 CrashAtUnhandlableOOM("RelocatableMonoTypeBuffer::compactMoved: Failed to put removal."); |
|
193 } else { |
|
194 invalidated.remove(edge->untagged().edge); |
|
195 } |
|
196 } |
|
197 |
|
198 /* Remove all entries which are in the invalidated set. */ |
|
199 LifoAlloc::Enum insert(storage); |
|
200 for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) { |
|
201 T *edge = e.get<T>(); |
|
202 if (!edge->isTagged() && !invalidated.has(edge->untagged().edge)) { |
|
203 insert.updateFront<T>(*edge); |
|
204 insert.popFront<T>(); |
|
205 } |
|
206 } |
|
207 storage.release(insert.mark()); |
|
208 |
|
209 invalidated.clear(); |
|
210 |
|
211 #ifdef DEBUG |
|
212 for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) |
|
213 JS_ASSERT(!e.get<T>()->isTagged()); |
|
214 #endif |
|
215 } |
|
216 |
|
217 template <typename T> |
|
218 void |
|
219 StoreBuffer::RelocatableMonoTypeBuffer<T>::compact(StoreBuffer *owner) |
|
220 { |
|
221 compactMoved(owner); |
|
222 StoreBuffer::MonoTypeBuffer<T>::compact(owner); |
|
223 } |
|
224 |
|
225 /*** GenericBuffer ***/ |
|
226 |
|
227 void |
|
228 StoreBuffer::GenericBuffer::mark(StoreBuffer *owner, JSTracer *trc) |
|
229 { |
|
230 JS_ASSERT(owner->isEnabled()); |
|
231 ReentrancyGuard g(*owner); |
|
232 if (!storage_) |
|
233 return; |
|
234 |
|
235 for (LifoAlloc::Enum e(*storage_); !e.empty();) { |
|
236 unsigned size = *e.get<unsigned>(); |
|
237 e.popFront<unsigned>(); |
|
238 BufferableRef *edge = e.get<BufferableRef>(size); |
|
239 edge->mark(trc); |
|
240 e.popFront(size); |
|
241 } |
|
242 } |
|
243 |
|
244 /*** StoreBuffer ***/ |
|
245 |
|
246 bool |
|
247 StoreBuffer::enable() |
|
248 { |
|
249 if (enabled_) |
|
250 return true; |
|
251 |
|
252 if (!bufferVal.init() || |
|
253 !bufferCell.init() || |
|
254 !bufferSlot.init() || |
|
255 !bufferWholeCell.init() || |
|
256 !bufferRelocVal.init() || |
|
257 !bufferRelocCell.init() || |
|
258 !bufferGeneric.init()) |
|
259 { |
|
260 return false; |
|
261 } |
|
262 |
|
263 enabled_ = true; |
|
264 return true; |
|
265 } |
|
266 |
|
267 void |
|
268 StoreBuffer::disable() |
|
269 { |
|
270 if (!enabled_) |
|
271 return; |
|
272 |
|
273 aboutToOverflow_ = false; |
|
274 |
|
275 enabled_ = false; |
|
276 } |
|
277 |
|
278 bool |
|
279 StoreBuffer::clear() |
|
280 { |
|
281 if (!enabled_) |
|
282 return true; |
|
283 |
|
284 aboutToOverflow_ = false; |
|
285 |
|
286 bufferVal.clear(); |
|
287 bufferCell.clear(); |
|
288 bufferSlot.clear(); |
|
289 bufferWholeCell.clear(); |
|
290 bufferRelocVal.clear(); |
|
291 bufferRelocCell.clear(); |
|
292 bufferGeneric.clear(); |
|
293 |
|
294 return true; |
|
295 } |
|
296 |
|
297 void |
|
298 StoreBuffer::markAll(JSTracer *trc) |
|
299 { |
|
300 bufferVal.mark(this, trc); |
|
301 bufferCell.mark(this, trc); |
|
302 bufferSlot.mark(this, trc); |
|
303 bufferWholeCell.mark(this, trc); |
|
304 bufferRelocVal.mark(this, trc); |
|
305 bufferRelocCell.mark(this, trc); |
|
306 bufferGeneric.mark(this, trc); |
|
307 } |
|
308 |
|
309 void |
|
310 StoreBuffer::setAboutToOverflow() |
|
311 { |
|
312 aboutToOverflow_ = true; |
|
313 runtime_->requestInterrupt(JSRuntime::RequestInterruptMainThread); |
|
314 } |
|
315 |
|
316 bool |
|
317 StoreBuffer::inParallelSection() const |
|
318 { |
|
319 return InParallelSection(); |
|
320 } |
|
321 |
|
322 void |
|
323 StoreBuffer::addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::GCSizes |
|
324 *sizes) |
|
325 { |
|
326 sizes->storeBufferVals += bufferVal.sizeOfExcludingThis(mallocSizeOf); |
|
327 sizes->storeBufferCells += bufferCell.sizeOfExcludingThis(mallocSizeOf); |
|
328 sizes->storeBufferSlots += bufferSlot.sizeOfExcludingThis(mallocSizeOf); |
|
329 sizes->storeBufferWholeCells += bufferWholeCell.sizeOfExcludingThis(mallocSizeOf); |
|
330 sizes->storeBufferRelocVals += bufferRelocVal.sizeOfExcludingThis(mallocSizeOf); |
|
331 sizes->storeBufferRelocCells += bufferRelocCell.sizeOfExcludingThis(mallocSizeOf); |
|
332 sizes->storeBufferGenerics += bufferGeneric.sizeOfExcludingThis(mallocSizeOf); |
|
333 } |
|
334 |
|
335 JS_PUBLIC_API(void) |
|
336 JS::HeapCellPostBarrier(js::gc::Cell **cellp) |
|
337 { |
|
338 JS_ASSERT(*cellp); |
|
339 JSRuntime *runtime = (*cellp)->runtimeFromMainThread(); |
|
340 runtime->gcStoreBuffer.putRelocatableCell(cellp); |
|
341 } |
|
342 |
|
343 JS_PUBLIC_API(void) |
|
344 JS::HeapCellRelocate(js::gc::Cell **cellp) |
|
345 { |
|
346 /* Called with old contents of *pp before overwriting. */ |
|
347 JS_ASSERT(*cellp); |
|
348 JSRuntime *runtime = (*cellp)->runtimeFromMainThread(); |
|
349 runtime->gcStoreBuffer.removeRelocatableCell(cellp); |
|
350 } |
|
351 |
|
352 JS_PUBLIC_API(void) |
|
353 JS::HeapValuePostBarrier(JS::Value *valuep) |
|
354 { |
|
355 JS_ASSERT(valuep->isMarkable()); |
|
356 if (valuep->isString() && StringIsPermanentAtom(valuep->toString())) |
|
357 return; |
|
358 JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread(); |
|
359 runtime->gcStoreBuffer.putRelocatableValue(valuep); |
|
360 } |
|
361 |
|
362 JS_PUBLIC_API(void) |
|
363 JS::HeapValueRelocate(JS::Value *valuep) |
|
364 { |
|
365 /* Called with old contents of *valuep before overwriting. */ |
|
366 JS_ASSERT(valuep->isMarkable()); |
|
367 if (valuep->isString() && StringIsPermanentAtom(valuep->toString())) |
|
368 return; |
|
369 JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread(); |
|
370 runtime->gcStoreBuffer.removeRelocatableValue(valuep); |
|
371 } |
|
372 |
|
373 template class StoreBuffer::MonoTypeBuffer<StoreBuffer::ValueEdge>; |
|
374 template class StoreBuffer::MonoTypeBuffer<StoreBuffer::CellPtrEdge>; |
|
375 template class StoreBuffer::MonoTypeBuffer<StoreBuffer::SlotsEdge>; |
|
376 template class StoreBuffer::MonoTypeBuffer<StoreBuffer::WholeCellEdges>; |
|
377 template class StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::ValueEdge>; |
|
378 template class StoreBuffer::RelocatableMonoTypeBuffer<StoreBuffer::CellPtrEdge>; |
|
379 |
|
380 #endif /* JSGC_GENERATIONAL */ |