Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "vm/String-inl.h"
9 #include "mozilla/MathAlgorithms.h"
10 #include "mozilla/MemoryReporting.h"
11 #include "mozilla/PodOperations.h"
12 #include "mozilla/RangedPtr.h"
14 #include "gc/Marking.h"
16 #include "jscntxtinlines.h"
17 #include "jscompartmentinlines.h"
19 using namespace js;
21 using mozilla::PodCopy;
22 using mozilla::RangedPtr;
23 using mozilla::RoundUpPow2;
25 bool
26 JSString::isFatInline() const
27 {
28 // It's possible for fat-inline strings to be converted to flat strings;
29 // as a result, checking just for the arena isn't enough to determine if a
30 // string is fat-inline. Hence the isInline() check.
31 bool is_FatInline = (getAllocKind() == gc::FINALIZE_FAT_INLINE_STRING) && isInline();
32 JS_ASSERT_IF(is_FatInline, isFlat());
33 return is_FatInline;
34 }
36 bool
37 JSString::isExternal() const
38 {
39 bool is_external = (getAllocKind() == gc::FINALIZE_EXTERNAL_STRING);
40 JS_ASSERT_IF(is_external, isFlat());
41 return is_external;
42 }
44 size_t
45 JSString::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
46 {
47 // JSRope: do nothing, we'll count all children chars when we hit the leaf strings.
48 if (isRope())
49 return 0;
51 JS_ASSERT(isLinear());
53 // JSDependentString: do nothing, we'll count the chars when we hit the base string.
54 if (isDependent())
55 return 0;
57 JS_ASSERT(isFlat());
59 // JSExtensibleString: count the full capacity, not just the used space.
60 if (isExtensible()) {
61 JSExtensibleString &extensible = asExtensible();
62 return mallocSizeOf(extensible.chars());
63 }
65 // JSExternalString: don't count, the chars could be stored anywhere.
66 if (isExternal())
67 return 0;
69 // JSInlineString, JSFatInlineString [JSInlineAtom, JSFatInlineAtom]: the chars are inline.
70 if (isInline())
71 return 0;
73 // JSAtom, JSUndependedString: measure the space for the chars. For
74 // JSUndependedString, there is no need to count the base string, for the
75 // same reason as JSDependentString above.
76 JSFlatString &flat = asFlat();
77 return mallocSizeOf(flat.chars());
78 }
80 #ifdef DEBUG
82 void
83 JSString::dumpChars(const jschar *s, size_t n)
84 {
85 if (n == SIZE_MAX) {
86 n = 0;
87 while (s[n])
88 n++;
89 }
91 fputc('"', stderr);
92 for (size_t i = 0; i < n; i++) {
93 if (s[i] == '\n')
94 fprintf(stderr, "\\n");
95 else if (s[i] == '\t')
96 fprintf(stderr, "\\t");
97 else if (s[i] >= 32 && s[i] < 127)
98 fputc(s[i], stderr);
99 else if (s[i] <= 255)
100 fprintf(stderr, "\\x%02x", (unsigned int) s[i]);
101 else
102 fprintf(stderr, "\\u%04x", (unsigned int) s[i]);
103 }
104 fputc('"', stderr);
105 }
107 void
108 JSString::dump()
109 {
110 if (const jschar *chars = getChars(nullptr)) {
111 fprintf(stderr, "JSString* (%p) = jschar * (%p) = ",
112 (void *) this, (void *) chars);
113 dumpChars(chars, length());
114 } else {
115 fprintf(stderr, "(oom in JSString::dump)");
116 }
117 fputc('\n', stderr);
118 }
120 bool
121 JSString::equals(const char *s)
122 {
123 const jschar *c = getChars(nullptr);
124 if (!c) {
125 fprintf(stderr, "OOM in JSString::equals!\n");
126 return false;
127 }
128 while (*c && *s) {
129 if (*c != *s)
130 return false;
131 c++;
132 s++;
133 }
134 return *c == *s;
135 }
136 #endif /* DEBUG */
138 static MOZ_ALWAYS_INLINE bool
139 AllocChars(ThreadSafeContext *maybecx, size_t length, jschar **chars, size_t *capacity)
140 {
141 /*
142 * String length doesn't include the null char, so include it here before
143 * doubling. Adding the null char after doubling would interact poorly with
144 * round-up malloc schemes.
145 */
146 size_t numChars = length + 1;
148 /*
149 * Grow by 12.5% if the buffer is very large. Otherwise, round up to the
150 * next power of 2. This is similar to what we do with arrays; see
151 * JSObject::ensureDenseArrayElements.
152 */
153 static const size_t DOUBLING_MAX = 1024 * 1024;
154 numChars = numChars > DOUBLING_MAX ? numChars + (numChars / 8) : RoundUpPow2(numChars);
156 /* Like length, capacity does not include the null char, so take it out. */
157 *capacity = numChars - 1;
159 JS_STATIC_ASSERT(JSString::MAX_LENGTH * sizeof(jschar) < UINT32_MAX);
160 size_t bytes = numChars * sizeof(jschar);
161 *chars = (jschar *)(maybecx ? maybecx->malloc_(bytes) : js_malloc(bytes));
162 return *chars != nullptr;
163 }
165 bool
166 JSRope::copyNonPureChars(ThreadSafeContext *cx, ScopedJSFreePtr<jschar> &out) const
167 {
168 return copyNonPureCharsInternal(cx, out, false);
169 }
171 bool
172 JSRope::copyNonPureCharsZ(ThreadSafeContext *cx, ScopedJSFreePtr<jschar> &out) const
173 {
174 return copyNonPureCharsInternal(cx, out, true);
175 }
177 bool
178 JSRope::copyNonPureCharsInternal(ThreadSafeContext *cx, ScopedJSFreePtr<jschar> &out,
179 bool nullTerminate) const
180 {
181 /*
182 * Perform non-destructive post-order traversal of the rope, splatting
183 * each node's characters into a contiguous buffer.
184 */
186 size_t n = length();
187 if (cx)
188 out.reset(cx->pod_malloc<jschar>(n + 1));
189 else
190 out.reset(js_pod_malloc<jschar>(n + 1));
192 if (!out)
193 return false;
195 Vector<const JSString *, 8, SystemAllocPolicy> nodeStack;
196 const JSString *str = this;
197 jschar *pos = out;
198 while (true) {
199 if (str->isRope()) {
200 if (!nodeStack.append(str->asRope().rightChild()))
201 return false;
202 str = str->asRope().leftChild();
203 } else {
204 size_t len = str->length();
205 PodCopy(pos, str->asLinear().chars(), len);
206 pos += len;
207 if (nodeStack.empty())
208 break;
209 str = nodeStack.popCopy();
210 }
211 }
213 JS_ASSERT(pos == out + n);
215 if (nullTerminate)
216 out[n] = 0;
218 return true;
219 }
221 template<JSRope::UsingBarrier b>
222 JSFlatString *
223 JSRope::flattenInternal(ExclusiveContext *maybecx)
224 {
225 /*
226 * Perform a depth-first dag traversal, splatting each node's characters
227 * into a contiguous buffer. Visit each rope node three times:
228 * 1. record position in the buffer and recurse into left child;
229 * 2. recurse into the right child;
230 * 3. transform the node into a dependent string.
231 * To avoid maintaining a stack, tree nodes are mutated to indicate how many
232 * times they have been visited. Since ropes can be dags, a node may be
233 * encountered multiple times during traversal. However, step 3 above leaves
234 * a valid dependent string, so everything works out. This algorithm is
235 * homomorphic to marking code.
236 *
237 * While ropes avoid all sorts of quadratic cases with string
238 * concatenation, they can't help when ropes are immediately flattened.
239 * One idiomatic case that we'd like to keep linear (and has traditionally
240 * been linear in SM and other JS engines) is:
241 *
242 * while (...) {
243 * s += ...
244 * s.flatten
245 * }
246 *
247 * To do this, when the buffer for a to-be-flattened rope is allocated, the
248 * allocation size is rounded up. Then, if the resulting flat string is the
249 * left-hand side of a new rope that gets flattened and there is enough
250 * capacity, the rope is flattened into the same buffer, thereby avoiding
251 * copying the left-hand side. Clearing the 'extensible' bit turns off this
252 * optimization. This is necessary, e.g., when the JSAPI hands out the raw
253 * null-terminated char array of a flat string.
254 *
255 * N.B. This optimization can create chains of dependent strings.
256 */
257 const size_t wholeLength = length();
258 size_t wholeCapacity;
259 jschar *wholeChars;
260 JSString *str = this;
261 jschar *pos;
263 /* Find the left most string, containing the first string. */
264 JSRope *leftMostRope = this;
265 while (leftMostRope->leftChild()->isRope())
266 leftMostRope = &leftMostRope->leftChild()->asRope();
268 if (leftMostRope->leftChild()->isExtensible()) {
269 JSExtensibleString &left = leftMostRope->leftChild()->asExtensible();
270 size_t capacity = left.capacity();
271 if (capacity >= wholeLength) {
272 /*
273 * Simulate a left-most traversal from the root to leftMost->leftChild()
274 * via first_visit_node
275 */
276 while (str != leftMostRope) {
277 JS_ASSERT(str->isRope());
278 if (b == WithIncrementalBarrier) {
279 JSString::writeBarrierPre(str->d.u1.left);
280 JSString::writeBarrierPre(str->d.s.u2.right);
281 }
282 JSString *child = str->d.u1.left;
283 str->d.u1.chars = left.chars();
284 child->d.s.u3.parent = str;
285 child->d.lengthAndFlags = 0x200;
286 str = child;
287 }
288 if (b == WithIncrementalBarrier) {
289 JSString::writeBarrierPre(str->d.u1.left);
290 JSString::writeBarrierPre(str->d.s.u2.right);
291 }
292 str->d.u1.chars = left.chars();
293 wholeCapacity = capacity;
294 wholeChars = const_cast<jschar *>(left.chars());
295 size_t bits = left.d.lengthAndFlags;
296 pos = wholeChars + (bits >> LENGTH_SHIFT);
297 JS_STATIC_ASSERT(!(EXTENSIBLE_FLAGS & DEPENDENT_FLAGS));
298 left.d.lengthAndFlags = bits ^ (EXTENSIBLE_FLAGS | DEPENDENT_FLAGS);
299 left.d.s.u2.base = (JSLinearString *)this; /* will be true on exit */
300 StringWriteBarrierPostRemove(maybecx, &left.d.u1.left);
301 StringWriteBarrierPost(maybecx, (JSString **)&left.d.s.u2.base);
302 goto visit_right_child;
303 }
304 }
306 if (!AllocChars(maybecx, wholeLength, &wholeChars, &wholeCapacity))
307 return nullptr;
309 pos = wholeChars;
310 first_visit_node: {
311 if (b == WithIncrementalBarrier) {
312 JSString::writeBarrierPre(str->d.u1.left);
313 JSString::writeBarrierPre(str->d.s.u2.right);
314 }
316 JSString &left = *str->d.u1.left;
317 str->d.u1.chars = pos;
318 StringWriteBarrierPostRemove(maybecx, &str->d.u1.left);
319 if (left.isRope()) {
320 left.d.s.u3.parent = str; /* Return to this when 'left' done, */
321 left.d.lengthAndFlags = 0x200; /* but goto visit_right_child. */
322 str = &left;
323 goto first_visit_node;
324 }
325 size_t len = left.length();
326 PodCopy(pos, left.d.u1.chars, len);
327 pos += len;
328 }
329 visit_right_child: {
330 JSString &right = *str->d.s.u2.right;
331 if (right.isRope()) {
332 right.d.s.u3.parent = str; /* Return to this node when 'right' done, */
333 right.d.lengthAndFlags = 0x300; /* but goto finish_node. */
334 str = &right;
335 goto first_visit_node;
336 }
337 size_t len = right.length();
338 PodCopy(pos, right.d.u1.chars, len);
339 pos += len;
340 }
341 finish_node: {
342 if (str == this) {
343 JS_ASSERT(pos == wholeChars + wholeLength);
344 *pos = '\0';
345 str->d.lengthAndFlags = buildLengthAndFlags(wholeLength, EXTENSIBLE_FLAGS);
346 str->d.u1.chars = wholeChars;
347 str->d.s.u2.capacity = wholeCapacity;
348 StringWriteBarrierPostRemove(maybecx, &str->d.u1.left);
349 StringWriteBarrierPostRemove(maybecx, &str->d.s.u2.right);
350 return &this->asFlat();
351 }
352 size_t progress = str->d.lengthAndFlags;
353 str->d.lengthAndFlags = buildLengthAndFlags(pos - str->d.u1.chars, DEPENDENT_FLAGS);
354 str->d.s.u2.base = (JSLinearString *)this; /* will be true on exit */
355 StringWriteBarrierPost(maybecx, (JSString **)&str->d.s.u2.base);
356 str = str->d.s.u3.parent;
357 if (progress == 0x200)
358 goto visit_right_child;
359 JS_ASSERT(progress == 0x300);
360 goto finish_node;
361 }
362 }
364 JSFlatString *
365 JSRope::flatten(ExclusiveContext *maybecx)
366 {
367 #if JSGC_INCREMENTAL
368 if (zone()->needsBarrier())
369 return flattenInternal<WithIncrementalBarrier>(maybecx);
370 else
371 return flattenInternal<NoBarrier>(maybecx);
372 #else
373 return flattenInternal<NoBarrier>(maybecx);
374 #endif
375 }
377 template <AllowGC allowGC>
378 JSString *
379 js::ConcatStrings(ThreadSafeContext *cx,
380 typename MaybeRooted<JSString*, allowGC>::HandleType left,
381 typename MaybeRooted<JSString*, allowGC>::HandleType right)
382 {
383 JS_ASSERT_IF(!left->isAtom(), cx->isInsideCurrentZone(left));
384 JS_ASSERT_IF(!right->isAtom(), cx->isInsideCurrentZone(right));
386 size_t leftLen = left->length();
387 if (leftLen == 0)
388 return right;
390 size_t rightLen = right->length();
391 if (rightLen == 0)
392 return left;
394 size_t wholeLength = leftLen + rightLen;
395 if (!JSString::validateLength(cx, wholeLength))
396 return nullptr;
398 if (JSFatInlineString::lengthFits(wholeLength) && cx->isJSContext()) {
399 JSFatInlineString *str = js_NewGCFatInlineString<allowGC>(cx);
400 if (!str)
401 return nullptr;
403 ScopedThreadSafeStringInspector leftInspector(left);
404 ScopedThreadSafeStringInspector rightInspector(right);
405 if (!leftInspector.ensureChars(cx) || !rightInspector.ensureChars(cx))
406 return nullptr;
408 jschar *buf = str->init(wholeLength);
409 PodCopy(buf, leftInspector.chars(), leftLen);
410 PodCopy(buf + leftLen, rightInspector.chars(), rightLen);
412 buf[wholeLength] = 0;
413 return str;
414 }
416 return JSRope::new_<allowGC>(cx, left, right, wholeLength);
417 }
419 template JSString *
420 js::ConcatStrings<CanGC>(ThreadSafeContext *cx, HandleString left, HandleString right);
422 template JSString *
423 js::ConcatStrings<NoGC>(ThreadSafeContext *cx, JSString *left, JSString *right);
425 bool
426 JSDependentString::copyNonPureCharsZ(ThreadSafeContext *cx, ScopedJSFreePtr<jschar> &out) const
427 {
428 JS_ASSERT(JSString::isDependent());
430 size_t n = length();
431 jschar *s = cx->pod_malloc<jschar>(n + 1);
432 if (!s)
433 return false;
435 PodCopy(s, chars(), n);
436 s[n] = 0;
438 out.reset(s);
439 return true;
440 }
442 JSFlatString *
443 JSDependentString::undepend(ExclusiveContext *cx)
444 {
445 JS_ASSERT(JSString::isDependent());
447 /*
448 * We destroy the base() pointer in undepend, so we need a pre-barrier. We
449 * don't need a post-barrier because there aren't any outgoing pointers
450 * afterwards.
451 */
452 JSString::writeBarrierPre(base());
454 size_t n = length();
455 size_t size = (n + 1) * sizeof(jschar);
456 jschar *s = (jschar *) cx->malloc_(size);
457 if (!s)
458 return nullptr;
460 PodCopy(s, chars(), n);
461 s[n] = 0;
462 d.u1.chars = s;
464 /*
465 * Transform *this into an undepended string so 'base' will remain rooted
466 * for the benefit of any other dependent string that depends on *this.
467 */
468 d.lengthAndFlags = buildLengthAndFlags(n, UNDEPENDED_FLAGS);
470 return &this->asFlat();
471 }
473 bool
474 JSFlatString::isIndexSlow(uint32_t *indexp) const
475 {
476 const jschar *s = charsZ();
477 jschar ch = *s;
479 if (!JS7_ISDEC(ch))
480 return false;
482 size_t n = length();
483 if (n > UINT32_CHAR_BUFFER_LENGTH)
484 return false;
486 /*
487 * Make sure to account for the '\0' at the end of characters, dereferenced
488 * in the loop below.
489 */
490 RangedPtr<const jschar> cp(s, n + 1);
491 const RangedPtr<const jschar> end(s + n, s, n + 1);
493 uint32_t index = JS7_UNDEC(*cp++);
494 uint32_t oldIndex = 0;
495 uint32_t c = 0;
497 if (index != 0) {
498 while (JS7_ISDEC(*cp)) {
499 oldIndex = index;
500 c = JS7_UNDEC(*cp);
501 index = 10 * index + c;
502 cp++;
503 }
504 }
506 /* It's not an element if there are characters after the number. */
507 if (cp != end)
508 return false;
510 /*
511 * Look out for "4294967296" and larger-number strings that fit in
512 * UINT32_CHAR_BUFFER_LENGTH: only unsigned 32-bit integers shall pass.
513 */
514 if (oldIndex < UINT32_MAX / 10 || (oldIndex == UINT32_MAX / 10 && c <= (UINT32_MAX % 10))) {
515 *indexp = index;
516 return true;
517 }
519 return false;
520 }
522 bool
523 ScopedThreadSafeStringInspector::ensureChars(ThreadSafeContext *cx)
524 {
525 if (chars_)
526 return true;
528 if (cx->isExclusiveContext()) {
529 JSLinearString *linear = str_->ensureLinear(cx->asExclusiveContext());
530 if (!linear)
531 return false;
532 chars_ = linear->chars();
533 } else {
534 if (str_->hasPureChars()) {
535 chars_ = str_->pureChars();
536 } else {
537 if (!str_->copyNonPureChars(cx, scopedChars_))
538 return false;
539 chars_ = scopedChars_;
540 }
541 }
543 JS_ASSERT(chars_);
544 return true;
545 }
547 /*
548 * Set up some tools to make it easier to generate large tables. After constant
549 * folding, for each n, Rn(0) is the comma-separated list R(0), R(1), ..., R(2^n-1).
550 * Similary, Rn(k) (for any k and n) generates the list R(k), R(k+1), ..., R(k+2^n-1).
551 * To use this, define R appropriately, then use Rn(0) (for some value of n), then
552 * undefine R.
553 */
554 #define R2(n) R(n), R((n) + (1 << 0)), R((n) + (2 << 0)), R((n) + (3 << 0))
555 #define R4(n) R2(n), R2((n) + (1 << 2)), R2((n) + (2 << 2)), R2((n) + (3 << 2))
556 #define R6(n) R4(n), R4((n) + (1 << 4)), R4((n) + (2 << 4)), R4((n) + (3 << 4))
557 #define R7(n) R6(n), R6((n) + (1 << 6))
559 /*
560 * This is used when we generate our table of short strings, so the compiler is
561 * happier if we use |c| as few times as possible.
562 */
563 #define FROM_SMALL_CHAR(c) jschar((c) + ((c) < 10 ? '0' : \
564 (c) < 36 ? 'a' - 10 : \
565 'A' - 36))
567 /*
568 * Declare length-2 strings. We only store strings where both characters are
569 * alphanumeric. The lower 10 short chars are the numerals, the next 26 are
570 * the lowercase letters, and the next 26 are the uppercase letters.
571 */
572 #define TO_SMALL_CHAR(c) ((c) >= '0' && (c) <= '9' ? (c) - '0' : \
573 (c) >= 'a' && (c) <= 'z' ? (c) - 'a' + 10 : \
574 (c) >= 'A' && (c) <= 'Z' ? (c) - 'A' + 36 : \
575 StaticStrings::INVALID_SMALL_CHAR)
577 #define R TO_SMALL_CHAR
578 const StaticStrings::SmallChar StaticStrings::toSmallChar[] = { R7(0) };
579 #undef R
581 bool
582 StaticStrings::init(JSContext *cx)
583 {
584 AutoLockForExclusiveAccess lock(cx);
585 AutoCompartment ac(cx, cx->runtime()->atomsCompartment());
587 for (uint32_t i = 0; i < UNIT_STATIC_LIMIT; i++) {
588 jschar buffer[] = { jschar(i), '\0' };
589 JSFlatString *s = js_NewStringCopyN<NoGC>(cx, buffer, 1);
590 if (!s)
591 return false;
592 unitStaticTable[i] = s->morphAtomizedStringIntoPermanentAtom();
593 }
595 for (uint32_t i = 0; i < NUM_SMALL_CHARS * NUM_SMALL_CHARS; i++) {
596 jschar buffer[] = { FROM_SMALL_CHAR(i >> 6), FROM_SMALL_CHAR(i & 0x3F), '\0' };
597 JSFlatString *s = js_NewStringCopyN<NoGC>(cx, buffer, 2);
598 if (!s)
599 return false;
600 length2StaticTable[i] = s->morphAtomizedStringIntoPermanentAtom();
601 }
603 for (uint32_t i = 0; i < INT_STATIC_LIMIT; i++) {
604 if (i < 10) {
605 intStaticTable[i] = unitStaticTable[i + '0'];
606 } else if (i < 100) {
607 size_t index = ((size_t)TO_SMALL_CHAR((i / 10) + '0') << 6) +
608 TO_SMALL_CHAR((i % 10) + '0');
609 intStaticTable[i] = length2StaticTable[index];
610 } else {
611 jschar buffer[] = { jschar('0' + (i / 100)),
612 jschar('0' + ((i / 10) % 10)),
613 jschar('0' + (i % 10)),
614 '\0' };
615 JSFlatString *s = js_NewStringCopyN<NoGC>(cx, buffer, 3);
616 if (!s)
617 return false;
618 intStaticTable[i] = s->morphAtomizedStringIntoPermanentAtom();
619 }
620 }
622 return true;
623 }
625 void
626 StaticStrings::trace(JSTracer *trc)
627 {
628 /* These strings never change, so barriers are not needed. */
630 for (uint32_t i = 0; i < UNIT_STATIC_LIMIT; i++)
631 MarkPermanentAtom(trc, unitStaticTable[i], "unit-static-string");
633 for (uint32_t i = 0; i < NUM_SMALL_CHARS * NUM_SMALL_CHARS; i++)
634 MarkPermanentAtom(trc, length2StaticTable[i], "length2-static-string");
636 /* This may mark some strings more than once, but so be it. */
637 for (uint32_t i = 0; i < INT_STATIC_LIMIT; i++)
638 MarkPermanentAtom(trc, intStaticTable[i], "int-static-string");
639 }
641 bool
642 StaticStrings::isStatic(JSAtom *atom)
643 {
644 const jschar *chars = atom->chars();
645 switch (atom->length()) {
646 case 1:
647 return chars[0] < UNIT_STATIC_LIMIT;
648 case 2:
649 return fitsInSmallChar(chars[0]) && fitsInSmallChar(chars[1]);
650 case 3:
651 if ('1' <= chars[0] && chars[0] <= '9' &&
652 '0' <= chars[1] && chars[1] <= '9' &&
653 '0' <= chars[2] && chars[2] <= '9') {
654 int i = (chars[0] - '0') * 100 +
655 (chars[1] - '0') * 10 +
656 (chars[2] - '0');
658 return unsigned(i) < INT_STATIC_LIMIT;
659 }
660 return false;
661 default:
662 return false;
663 }
664 }
666 #ifdef DEBUG
667 void
668 JSAtom::dump()
669 {
670 fprintf(stderr, "JSAtom* (%p) = ", (void *) this);
671 this->JSString::dump();
672 }
673 #endif /* DEBUG */