|
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- |
|
2 * vim: set ts=8 sts=4 et sw=4 tw=99: |
|
3 * This Source Code Form is subject to the terms of the Mozilla Public |
|
4 * License, v. 2.0. If a copy of the MPL was not distributed with this |
|
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
|
6 |
|
7 #include "shell/jsheaptools.h" |
|
8 |
|
9 #include "mozilla/Move.h" |
|
10 |
|
11 #include <string.h> |
|
12 |
|
13 #include "jsalloc.h" |
|
14 #include "jsapi.h" |
|
15 #include "jscntxt.h" |
|
16 #include "jscompartment.h" |
|
17 #include "jsobj.h" |
|
18 #include "jsprf.h" |
|
19 |
|
20 #include "jsobjinlines.h" |
|
21 |
|
22 using namespace js; |
|
23 |
|
24 using mozilla::Move; |
|
25 |
|
26 #ifdef DEBUG |
|
27 |
|
28 |
|
29 /*** class HeapReverser **************************************************************************/ |
|
30 |
|
31 /* |
|
32 * A class for constructing a map of the JavaScript heap, with all |
|
33 * reference edges reversed. |
|
34 * |
|
35 * Unfortunately, it's not possible to build the results for findReferences |
|
36 * while visiting things solely in the order that JS_TraceRuntime and |
|
37 * JS_TraceChildren reaches them. For example, as you work outward from the |
|
38 * roots, suppose an edge from thing T reaches a "gray" thing G --- G being gray |
|
39 * because you're still in the midst of traversing its descendants. At this |
|
40 * point, you don't know yet whether G will be a referrer or not, and so you |
|
41 * can't tell whether T should be a referrer either. And you won't visit T |
|
42 * again. |
|
43 * |
|
44 * So we take a brute-force approach. We reverse the entire graph, and then walk |
|
45 * outward from |target| to the representable objects that refer to it, stopping |
|
46 * at such objects. |
|
47 */ |
|
48 |
|
49 /* |
|
50 * A JSTracer that produces a map of the heap with edges reversed. |
|
51 * |
|
52 * HeapReversers must be allocated in a stack frame. (They are derived from |
|
53 * CustomAutoRooter, and those must be allocated and destroyed in a stack-like |
|
54 * order.) |
|
55 * |
|
56 * HeapReversers keep all the roots they find in their traversal alive until |
|
57 * they are destroyed. So you don't need to worry about nodes going away while |
|
58 * you're using them. |
|
59 */ |
|
60 class HeapReverser : public JSTracer, public JS::CustomAutoRooter |
|
61 { |
|
62 public: |
|
63 struct Edge; |
|
64 |
|
65 /* Metadata for a given Cell we have visited. */ |
|
66 class Node { |
|
67 public: |
|
68 Node() { } |
|
69 Node(JSGCTraceKind kind) |
|
70 : kind(kind), incoming(), marked(false) { } |
|
71 |
|
72 /* |
|
73 * Move constructor and move assignment. These allow us to store our |
|
74 * incoming edge Vector in the hash table: Vectors support moves, but |
|
75 * not assignments or copy construction. |
|
76 */ |
|
77 Node(Node &&rhs) |
|
78 : kind(rhs.kind), incoming(Move(rhs.incoming)), marked(rhs.marked) { } |
|
79 Node &operator=(Node &&rhs) { |
|
80 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited"); |
|
81 this->~Node(); |
|
82 new(this) Node(Move(rhs)); |
|
83 return *this; |
|
84 } |
|
85 |
|
86 void trace(JSTracer *trc) { |
|
87 for (Edge *e = incoming.begin(); e != incoming.end(); e++) |
|
88 e->trace(trc); |
|
89 } |
|
90 |
|
91 /* What kind of Cell this is. */ |
|
92 JSGCTraceKind kind; |
|
93 |
|
94 /* |
|
95 * A vector of this Cell's incoming edges. |
|
96 * This must use SystemAllocPolicy because HashMap requires its elements to |
|
97 * be constructible with no arguments. |
|
98 */ |
|
99 Vector<Edge, 0, SystemAllocPolicy> incoming; |
|
100 |
|
101 /* A mark bit, for other traversals. */ |
|
102 bool marked; |
|
103 |
|
104 private: |
|
105 Node(const Node &) MOZ_DELETE; |
|
106 Node &operator=(const Node &) MOZ_DELETE; |
|
107 }; |
|
108 |
|
109 /* Metadata for a heap edge we have traversed. */ |
|
110 struct Edge { |
|
111 public: |
|
112 Edge(char *name, void *origin) : name(name), origin(origin) { } |
|
113 ~Edge() { js_free(name); } |
|
114 |
|
115 /* |
|
116 * Move constructor and move assignment. These allow us to live in |
|
117 * Vectors without needing to copy our name string when the vector is |
|
118 * resized. |
|
119 */ |
|
120 Edge(Edge &&rhs) : name(rhs.name), origin(rhs.origin) { |
|
121 rhs.name = nullptr; |
|
122 } |
|
123 Edge &operator=(Edge &&rhs) { |
|
124 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited"); |
|
125 this->~Edge(); |
|
126 new(this) Edge(Move(rhs)); |
|
127 return *this; |
|
128 } |
|
129 |
|
130 void trace(JSTracer *trc) { |
|
131 if (origin) |
|
132 gc::MarkGCThingRoot(trc, &origin, "HeapReverser::Edge"); |
|
133 } |
|
134 |
|
135 /* The name of this heap edge. Owned by this Edge. */ |
|
136 char *name; |
|
137 |
|
138 /* |
|
139 * The Cell from which this edge originates. nullptr means a root. This |
|
140 * is a cell address instead of a Node * because Nodes live in HashMap |
|
141 * table entries; if the HashMap reallocates its table, all pointers to |
|
142 * the Nodes it contains would become invalid. You should look up the |
|
143 * address here in |map| to find its Node. |
|
144 */ |
|
145 void *origin; |
|
146 }; |
|
147 |
|
148 /* |
|
149 * The result of a reversal is a map from Cells' addresses to Node |
|
150 * structures describing their incoming edges. |
|
151 */ |
|
152 typedef HashMap<void *, Node, DefaultHasher<void *>, SystemAllocPolicy> Map; |
|
153 Map map; |
|
154 |
|
155 /* Construct a HeapReverser for |context|'s heap. */ |
|
156 HeapReverser(JSContext *cx) |
|
157 : JSTracer(cx->runtime(), traverseEdgeWithThis), |
|
158 JS::CustomAutoRooter(cx), |
|
159 noggc(JS_GetRuntime(cx)), |
|
160 runtime(JS_GetRuntime(cx)), |
|
161 parent(nullptr) |
|
162 { |
|
163 } |
|
164 |
|
165 bool init() { return map.init(); } |
|
166 |
|
167 /* Build a reversed map of the heap in |map|. */ |
|
168 bool reverseHeap(); |
|
169 |
|
170 private: |
|
171 JS::AutoDisableGenerationalGC noggc; |
|
172 |
|
173 /* A runtime pointer for use by the destructor. */ |
|
174 JSRuntime *runtime; |
|
175 |
|
176 /* |
|
177 * Return the name of the most recent edge this JSTracer has traversed. The |
|
178 * result is allocated with malloc; if we run out of memory, raise an error |
|
179 * in this HeapReverser's context and return nullptr. |
|
180 * |
|
181 * This may not be called after that edge's call to traverseEdge has |
|
182 * returned. |
|
183 */ |
|
184 char *getEdgeDescription(); |
|
185 |
|
186 /* Class for setting new parent, and then restoring the original. */ |
|
187 class AutoParent { |
|
188 public: |
|
189 AutoParent(HeapReverser *reverser, void *newParent) : reverser(reverser) { |
|
190 savedParent = reverser->parent; |
|
191 reverser->parent = newParent; |
|
192 } |
|
193 ~AutoParent() { |
|
194 reverser->parent = savedParent; |
|
195 } |
|
196 private: |
|
197 HeapReverser *reverser; |
|
198 void *savedParent; |
|
199 }; |
|
200 |
|
201 /* A work item in the stack of nodes whose children we need to traverse. */ |
|
202 struct Child { |
|
203 Child(void *cell, JSGCTraceKind kind) : cell(cell), kind(kind) { } |
|
204 void *cell; |
|
205 JSGCTraceKind kind; |
|
206 }; |
|
207 |
|
208 /* |
|
209 * A stack of work items. We represent the stack explicitly to avoid |
|
210 * overflowing the C++ stack when traversing long chains of objects. |
|
211 */ |
|
212 Vector<Child, 0, SystemAllocPolicy> work; |
|
213 |
|
214 /* When traverseEdge is called, the Cell and kind at which the edge originated. */ |
|
215 void *parent; |
|
216 |
|
217 /* Traverse an edge. */ |
|
218 bool traverseEdge(void *cell, JSGCTraceKind kind); |
|
219 |
|
220 /* |
|
221 * JS_TraceRuntime and JS_TraceChildren don't propagate error returns, |
|
222 * and out-of-memory errors, by design, don't establish an exception in |
|
223 * |context|, so traverseEdgeWithThis uses this to communicate the |
|
224 * result of the traversal to reverseHeap. |
|
225 */ |
|
226 bool traversalStatus; |
|
227 |
|
228 /* Static member function wrapping 'traverseEdge'. */ |
|
229 static void traverseEdgeWithThis(JSTracer *tracer, void **thingp, JSGCTraceKind kind) { |
|
230 HeapReverser *reverser = static_cast<HeapReverser *>(tracer); |
|
231 if (!reverser->traverseEdge(*thingp, kind)) |
|
232 reverser->traversalStatus = false; |
|
233 } |
|
234 |
|
235 /* Return a jsval representing a node, if possible; otherwise, return JSVAL_VOID. */ |
|
236 jsval nodeToValue(void *cell, int kind) { |
|
237 if (kind != JSTRACE_OBJECT) |
|
238 return JSVAL_VOID; |
|
239 JSObject *object = static_cast<JSObject *>(cell); |
|
240 return OBJECT_TO_JSVAL(object); |
|
241 } |
|
242 |
|
243 /* Keep all tracked objects live across GC. */ |
|
244 virtual void trace(JSTracer *trc) MOZ_OVERRIDE { |
|
245 if (!map.initialized()) |
|
246 return; |
|
247 for (Map::Enum e(map); !e.empty(); e.popFront()) { |
|
248 gc::MarkGCThingRoot(trc, const_cast<void **>(&e.front().key()), "HeapReverser::map::key"); |
|
249 e.front().value().trace(trc); |
|
250 } |
|
251 for (Child *c = work.begin(); c != work.end(); ++c) |
|
252 gc::MarkGCThingRoot(trc, &c->cell, "HeapReverser::Child"); |
|
253 } |
|
254 }; |
|
255 |
|
256 bool |
|
257 HeapReverser::traverseEdge(void *cell, JSGCTraceKind kind) |
|
258 { |
|
259 /* Capture this edge before the JSTracer members get overwritten. */ |
|
260 char *edgeDescription = getEdgeDescription(); |
|
261 if (!edgeDescription) |
|
262 return false; |
|
263 Edge e(edgeDescription, parent); |
|
264 |
|
265 Map::AddPtr a = map.lookupForAdd(cell); |
|
266 if (!a) { |
|
267 /* |
|
268 * We've never visited this cell before. Add it to the map (thus |
|
269 * marking it as visited), and put it on the work stack, to be |
|
270 * visited from the main loop. |
|
271 */ |
|
272 Node n(kind); |
|
273 uint32_t generation = map.generation(); |
|
274 if (!map.add(a, cell, Move(n)) || |
|
275 !work.append(Child(cell, kind))) |
|
276 return false; |
|
277 /* If the map has been resized, re-check the pointer. */ |
|
278 if (map.generation() != generation) |
|
279 a = map.lookupForAdd(cell); |
|
280 } |
|
281 |
|
282 /* Add this edge to the reversed map. */ |
|
283 return a->value().incoming.append(Move(e)); |
|
284 } |
|
285 |
|
286 bool |
|
287 HeapReverser::reverseHeap() |
|
288 { |
|
289 traversalStatus = true; |
|
290 |
|
291 /* Prime the work stack with the roots of collection. */ |
|
292 JS_TraceRuntime(this); |
|
293 if (!traversalStatus) |
|
294 return false; |
|
295 |
|
296 /* Traverse children until the stack is empty. */ |
|
297 while (!work.empty()) { |
|
298 const Child child = work.popCopy(); |
|
299 AutoParent autoParent(this, child.cell); |
|
300 JS_TraceChildren(this, child.cell, child.kind); |
|
301 if (!traversalStatus) |
|
302 return false; |
|
303 } |
|
304 |
|
305 return true; |
|
306 } |
|
307 |
|
308 char * |
|
309 HeapReverser::getEdgeDescription() |
|
310 { |
|
311 if (!debugPrinter() && debugPrintIndex() == (size_t) -1) { |
|
312 const char *arg = static_cast<const char *>(debugPrintArg()); |
|
313 char *name = js_pod_malloc<char>(strlen(arg) + 1); |
|
314 if (!name) |
|
315 return nullptr; |
|
316 strcpy(name, arg); |
|
317 return name; |
|
318 } |
|
319 |
|
320 /* Lovely; but a fixed size is required by JSTraceNamePrinter. */ |
|
321 static const int nameSize = 200; |
|
322 char *name = js_pod_malloc<char>(nameSize); |
|
323 if (!name) |
|
324 return nullptr; |
|
325 if (debugPrinter()) |
|
326 debugPrinter()(this, name, nameSize); |
|
327 else |
|
328 JS_snprintf(name, nameSize, "%s[%lu]", |
|
329 static_cast<const char *>(debugPrintArg()), debugPrintIndex()); |
|
330 |
|
331 /* Shrink storage to fit. */ |
|
332 return static_cast<char *>(js_realloc(name, strlen(name) + 1)); |
|
333 } |
|
334 |
|
335 |
|
336 /*** class ReferenceFinder ***********************************************************************/ |
|
337 |
|
338 /* A class for finding an object's referrers, given a reversed heap map. */ |
|
339 class ReferenceFinder { |
|
340 public: |
|
341 ReferenceFinder(JSContext *cx, const HeapReverser &reverser) |
|
342 : context(cx), reverser(reverser), result(cx) { } |
|
343 |
|
344 /* Produce an object describing all references to |target|. */ |
|
345 JSObject *findReferences(HandleObject target); |
|
346 |
|
347 private: |
|
348 /* The context in which to do allocation and error-handling. */ |
|
349 JSContext *context; |
|
350 |
|
351 /* A reversed map of the current heap. */ |
|
352 const HeapReverser &reverser; |
|
353 |
|
354 /* The results object we're currently building. */ |
|
355 RootedObject result; |
|
356 |
|
357 /* A list of edges we've traversed to get to a certain point. */ |
|
358 class Path { |
|
359 public: |
|
360 Path(const HeapReverser::Edge &edge, Path *next) : edge(edge), next(next) { } |
|
361 |
|
362 /* |
|
363 * Compute the full path represented by this Path. The result is |
|
364 * owned by the caller. |
|
365 */ |
|
366 char *computeName(JSContext *cx); |
|
367 |
|
368 private: |
|
369 const HeapReverser::Edge &edge; |
|
370 Path *next; |
|
371 }; |
|
372 |
|
373 struct AutoNodeMarker { |
|
374 AutoNodeMarker(HeapReverser::Node *node) : node(node) { node->marked = true; } |
|
375 ~AutoNodeMarker() { node->marked = false; } |
|
376 private: |
|
377 HeapReverser::Node *node; |
|
378 }; |
|
379 |
|
380 /* |
|
381 * Given that we've reached |cell| via |path|, with all Nodes along that |
|
382 * path marked, add paths from all reportable objects reachable from cell |
|
383 * to |result|. |
|
384 */ |
|
385 bool visit(void *cell, Path *path); |
|
386 |
|
387 /* |
|
388 * If |cell|, of |kind|, is representable as a JavaScript value, return that |
|
389 * value; otherwise, return JSVAL_VOID. |
|
390 */ |
|
391 jsval representable(void *cell, int kind) { |
|
392 if (kind == JSTRACE_OBJECT) { |
|
393 JSObject *object = static_cast<JSObject *>(cell); |
|
394 |
|
395 /* Certain classes of object are for internal use only. */ |
|
396 if (object->is<BlockObject>() || |
|
397 object->is<CallObject>() || |
|
398 object->is<StaticWithObject>() || |
|
399 object->is<DynamicWithObject>() || |
|
400 object->is<DeclEnvObject>()) { |
|
401 return JSVAL_VOID; |
|
402 } |
|
403 |
|
404 /* Internal function objects should also not be revealed. */ |
|
405 if (JS_ObjectIsFunction(context, object) && IsInternalFunctionObject(object)) |
|
406 return JSVAL_VOID; |
|
407 |
|
408 return OBJECT_TO_JSVAL(object); |
|
409 } |
|
410 |
|
411 return JSVAL_VOID; |
|
412 } |
|
413 |
|
414 /* Add |referrer| as something that refers to |target| via |path|. */ |
|
415 bool addReferrer(jsval referrer, Path *path); |
|
416 }; |
|
417 |
|
418 bool |
|
419 ReferenceFinder::visit(void *cell, Path *path) |
|
420 { |
|
421 /* In ReferenceFinder, paths will almost certainly fit on the C++ stack. */ |
|
422 JS_CHECK_RECURSION(context, return false); |
|
423 |
|
424 /* Have we reached a root? Always report that. */ |
|
425 if (!cell) |
|
426 return addReferrer(JSVAL_NULL, path); |
|
427 |
|
428 HeapReverser::Map::Ptr p = reverser.map.lookup(cell); |
|
429 JS_ASSERT(p); |
|
430 HeapReverser::Node *node = &p->value(); |
|
431 |
|
432 /* Is |cell| a representable cell, reached via a non-empty path? */ |
|
433 if (path != nullptr) { |
|
434 jsval representation = representable(cell, node->kind); |
|
435 if (!JSVAL_IS_VOID(representation)) |
|
436 return addReferrer(representation, path); |
|
437 } |
|
438 |
|
439 /* |
|
440 * If we've made a cycle, don't traverse further. We *do* want to include |
|
441 * paths from the target to itself, so we don't want to do this check until |
|
442 * after we've possibly reported this cell as a referrer. |
|
443 */ |
|
444 if (node->marked) |
|
445 return true; |
|
446 AutoNodeMarker marker(node); |
|
447 |
|
448 /* Visit the origins of all |cell|'s incoming edges. */ |
|
449 for (size_t i = 0; i < node->incoming.length(); i++) { |
|
450 const HeapReverser::Edge &edge = node->incoming[i]; |
|
451 Path extendedPath(edge, path); |
|
452 if (!visit(edge.origin, &extendedPath)) |
|
453 return false; |
|
454 } |
|
455 |
|
456 return true; |
|
457 } |
|
458 |
|
459 char * |
|
460 ReferenceFinder::Path::computeName(JSContext *cx) |
|
461 { |
|
462 /* Walk the edge list and compute the total size of the path. */ |
|
463 size_t size = 6; |
|
464 for (Path *l = this; l; l = l->next) |
|
465 size += strlen(l->edge.name) + (l->next ? 2 : 0); |
|
466 size += 1; |
|
467 |
|
468 char *path = cx->pod_malloc<char>(size); |
|
469 if (!path) |
|
470 return nullptr; |
|
471 |
|
472 /* |
|
473 * Walk the edge list again, and copy the edge names into place, with |
|
474 * appropriate separators. Note that we constructed the edge list from |
|
475 * target to referrer, which means that the list links point *towards* the |
|
476 * target, so we can walk the list and build the path from left to right. |
|
477 */ |
|
478 strcpy(path, "edge: "); |
|
479 char *next = path + 6; |
|
480 for (Path *l = this; l; l = l->next) { |
|
481 strcpy(next, l->edge.name); |
|
482 next += strlen(next); |
|
483 if (l->next) { |
|
484 strcpy(next, "; "); |
|
485 next += 2; |
|
486 } |
|
487 } |
|
488 JS_ASSERT(next + 1 == path + size); |
|
489 |
|
490 return path; |
|
491 } |
|
492 |
|
493 bool |
|
494 ReferenceFinder::addReferrer(jsval referrerArg, Path *path) |
|
495 { |
|
496 RootedValue referrer(context, referrerArg); |
|
497 |
|
498 if (!context->compartment()->wrap(context, &referrer)) |
|
499 return false; |
|
500 |
|
501 ScopedJSFreePtr<char> pathName(path->computeName(context)); |
|
502 if (!pathName) |
|
503 return false; |
|
504 |
|
505 /* Find the property of the results object named |pathName|. */ |
|
506 RootedValue v(context); |
|
507 |
|
508 if (!JS_GetProperty(context, result, pathName, &v)) |
|
509 return false; |
|
510 if (v.isUndefined()) { |
|
511 /* Create an array to accumulate referents under this path. */ |
|
512 JSObject *array = JS_NewArrayObject(context, referrer); |
|
513 if (!array) |
|
514 return false; |
|
515 v.setObject(*array); |
|
516 return !!JS_SetProperty(context, result, pathName, v); |
|
517 } |
|
518 |
|
519 /* The property's value had better be an array. */ |
|
520 RootedObject array(context, &v.toObject()); |
|
521 JS_ASSERT(JS_IsArrayObject(context, array)); |
|
522 |
|
523 /* Append our referrer to this array. */ |
|
524 uint32_t length; |
|
525 return JS_GetArrayLength(context, array, &length) && |
|
526 JS_SetElement(context, array, length, referrer); |
|
527 } |
|
528 |
|
529 JSObject * |
|
530 ReferenceFinder::findReferences(HandleObject target) |
|
531 { |
|
532 result = JS_NewObject(context, nullptr, JS::NullPtr(), JS::NullPtr()); |
|
533 if (!result) |
|
534 return nullptr; |
|
535 if (!visit(target, nullptr)) |
|
536 return nullptr; |
|
537 |
|
538 return result; |
|
539 } |
|
540 |
|
541 /* See help(findReferences). */ |
|
542 bool |
|
543 FindReferences(JSContext *cx, unsigned argc, jsval *vp) |
|
544 { |
|
545 CallArgs args = CallArgsFromVp(argc, vp); |
|
546 if (args.length() < 1) { |
|
547 JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_MORE_ARGS_NEEDED, |
|
548 "findReferences", "0", "s"); |
|
549 return false; |
|
550 } |
|
551 |
|
552 RootedValue target(cx, args[0]); |
|
553 if (!target.isObject()) { |
|
554 JS_ReportErrorNumber(cx, js_GetErrorMessage, nullptr, JSMSG_UNEXPECTED_TYPE, |
|
555 "argument", "not an object"); |
|
556 return false; |
|
557 } |
|
558 |
|
559 /* Walk the JSRuntime, producing a reversed map of the heap. */ |
|
560 HeapReverser reverser(cx); |
|
561 if (!reverser.init() || !reverser.reverseHeap()) |
|
562 return false; |
|
563 |
|
564 /* Given the reversed map, find the referents of target. */ |
|
565 ReferenceFinder finder(cx, reverser); |
|
566 Rooted<JSObject*> targetObj(cx, &target.toObject()); |
|
567 JSObject *references = finder.findReferences(targetObj); |
|
568 if (!references) |
|
569 return false; |
|
570 |
|
571 args.rval().setObject(*references); |
|
572 return true; |
|
573 } |
|
574 |
|
575 #endif /* DEBUG */ |