Tue, 06 Jan 2015 21:39:09 +0100
Conditionally force memory storage according to privacy.thirdparty.isolate;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
3 /* This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 /* A type/length-parametrized vector class. */
9 #ifndef mozilla_Vector_h
10 #define mozilla_Vector_h
12 #include "mozilla/Alignment.h"
13 #include "mozilla/AllocPolicy.h"
14 #include "mozilla/ArrayUtils.h" // for PointerRangeSize
15 #include "mozilla/Assertions.h"
16 #include "mozilla/Attributes.h"
17 #include "mozilla/MathAlgorithms.h"
18 #include "mozilla/MemoryReporting.h"
19 #include "mozilla/Move.h"
20 #include "mozilla/NullPtr.h"
21 #include "mozilla/ReentrancyGuard.h"
22 #include "mozilla/TemplateLib.h"
23 #include "mozilla/TypeTraits.h"
25 #include <new> // for placement new
27 /* Silence dire "bugs in previous versions of MSVC have been fixed" warnings */
28 #ifdef _MSC_VER
29 #pragma warning(push)
30 #pragma warning(disable:4345)
31 #endif
33 namespace mozilla {
35 template<typename T, size_t N, class AllocPolicy, class ThisVector>
36 class VectorBase;
38 namespace detail {
40 /*
41 * Check that the given capacity wastes the minimal amount of space if
42 * allocated on the heap. This means that cap*sizeof(T) is as close to a
43 * power-of-two as possible. growStorageBy() is responsible for ensuring
44 * this.
45 */
46 template<typename T>
47 static bool CapacityHasExcessSpace(size_t cap)
48 {
49 size_t size = cap * sizeof(T);
50 return RoundUpPow2(size) - size >= sizeof(T);
51 }
53 /*
54 * This template class provides a default implementation for vector operations
55 * when the element type is not known to be a POD, as judged by IsPod.
56 */
57 template<typename T, size_t N, class AP, class ThisVector, bool IsPod>
58 struct VectorImpl
59 {
60 /* Destroys constructed objects in the range [begin, end). */
61 static inline void destroy(T* begin, T* end) {
62 MOZ_ASSERT(begin <= end);
63 for (T* p = begin; p < end; ++p)
64 p->~T();
65 }
67 /* Constructs objects in the uninitialized range [begin, end). */
68 static inline void initialize(T* begin, T* end) {
69 MOZ_ASSERT(begin <= end);
70 for (T* p = begin; p < end; ++p)
71 new(p) T();
72 }
74 /*
75 * Copy-constructs objects in the uninitialized range
76 * [dst, dst+(srcend-srcbeg)) from the range [srcbeg, srcend).
77 */
78 template<typename U>
79 static inline void copyConstruct(T* dst, const U* srcbeg, const U* srcend) {
80 MOZ_ASSERT(srcbeg <= srcend);
81 for (const U* p = srcbeg; p < srcend; ++p, ++dst)
82 new(dst) T(*p);
83 }
85 /*
86 * Move-constructs objects in the uninitialized range
87 * [dst, dst+(srcend-srcbeg)) from the range [srcbeg, srcend).
88 */
89 template<typename U>
90 static inline void moveConstruct(T* dst, U* srcbeg, U* srcend) {
91 MOZ_ASSERT(srcbeg <= srcend);
92 for (U* p = srcbeg; p < srcend; ++p, ++dst)
93 new(dst) T(Move(*p));
94 }
96 /*
97 * Copy-constructs objects in the uninitialized range [dst, dst+n) from the
98 * same object u.
99 */
100 template<typename U>
101 static inline void copyConstructN(T* dst, size_t n, const U& u) {
102 for (T* end = dst + n; dst < end; ++dst)
103 new(dst) T(u);
104 }
106 /*
107 * Grows the given buffer to have capacity newCap, preserving the objects
108 * constructed in the range [begin, end) and updating v. Assumes that (1)
109 * newCap has not overflowed, and (2) multiplying newCap by sizeof(T) will
110 * not overflow.
111 */
112 static inline bool
113 growTo(VectorBase<T, N, AP, ThisVector>& v, size_t newCap) {
114 MOZ_ASSERT(!v.usingInlineStorage());
115 MOZ_ASSERT(!CapacityHasExcessSpace<T>(newCap));
116 T* newbuf = reinterpret_cast<T*>(v.malloc_(newCap * sizeof(T)));
117 if (!newbuf)
118 return false;
119 T* dst = newbuf;
120 T* src = v.beginNoCheck();
121 for (; src < v.endNoCheck(); ++dst, ++src)
122 new(dst) T(Move(*src));
123 VectorImpl::destroy(v.beginNoCheck(), v.endNoCheck());
124 v.free_(v.mBegin);
125 v.mBegin = newbuf;
126 /* v.mLength is unchanged. */
127 v.mCapacity = newCap;
128 return true;
129 }
130 };
132 /*
133 * This partial template specialization provides a default implementation for
134 * vector operations when the element type is known to be a POD, as judged by
135 * IsPod.
136 */
137 template<typename T, size_t N, class AP, class ThisVector>
138 struct VectorImpl<T, N, AP, ThisVector, true>
139 {
140 static inline void destroy(T*, T*) {}
142 static inline void initialize(T* begin, T* end) {
143 /*
144 * You would think that memset would be a big win (or even break even)
145 * when we know T is a POD. But currently it's not. This is probably
146 * because |append| tends to be given small ranges and memset requires
147 * a function call that doesn't get inlined.
148 *
149 * memset(begin, 0, sizeof(T) * (end-begin));
150 */
151 MOZ_ASSERT(begin <= end);
152 for (T* p = begin; p < end; ++p)
153 new(p) T();
154 }
156 template<typename U>
157 static inline void copyConstruct(T* dst, const U* srcbeg, const U* srcend) {
158 /*
159 * See above memset comment. Also, notice that copyConstruct is
160 * currently templated (T != U), so memcpy won't work without
161 * requiring T == U.
162 *
163 * memcpy(dst, srcbeg, sizeof(T) * (srcend - srcbeg));
164 */
165 MOZ_ASSERT(srcbeg <= srcend);
166 for (const U* p = srcbeg; p < srcend; ++p, ++dst)
167 *dst = *p;
168 }
170 template<typename U>
171 static inline void moveConstruct(T* dst, const U* srcbeg, const U* srcend) {
172 copyConstruct(dst, srcbeg, srcend);
173 }
175 static inline void copyConstructN(T* dst, size_t n, const T& t) {
176 for (T* end = dst + n; dst < end; ++dst)
177 *dst = t;
178 }
180 static inline bool
181 growTo(VectorBase<T, N, AP, ThisVector>& v, size_t newCap) {
182 MOZ_ASSERT(!v.usingInlineStorage());
183 MOZ_ASSERT(!CapacityHasExcessSpace<T>(newCap));
184 size_t oldSize = sizeof(T) * v.mCapacity;
185 size_t newSize = sizeof(T) * newCap;
186 T* newbuf = reinterpret_cast<T*>(v.realloc_(v.mBegin, oldSize, newSize));
187 if (!newbuf)
188 return false;
189 v.mBegin = newbuf;
190 /* v.mLength is unchanged. */
191 v.mCapacity = newCap;
192 return true;
193 }
194 };
196 } // namespace detail
198 /*
199 * A CRTP base class for vector-like classes. Unless you really really want
200 * your own vector class -- and you almost certainly don't -- you should use
201 * mozilla::Vector instead!
202 *
203 * See mozilla::Vector for interface requirements.
204 */
205 template<typename T, size_t N, class AllocPolicy, class ThisVector>
206 class VectorBase : private AllocPolicy
207 {
208 /* utilities */
210 static const bool sElemIsPod = IsPod<T>::value;
211 typedef detail::VectorImpl<T, N, AllocPolicy, ThisVector, sElemIsPod> Impl;
212 friend struct detail::VectorImpl<T, N, AllocPolicy, ThisVector, sElemIsPod>;
214 bool growStorageBy(size_t incr);
215 bool convertToHeapStorage(size_t newCap);
217 /* magic constants */
219 static const int sMaxInlineBytes = 1024;
221 /* compute constants */
223 /*
224 * Consider element size to be 1 for buffer sizing if there are 0 inline
225 * elements. This allows us to compile when the definition of the element
226 * type is not visible here.
227 *
228 * Explicit specialization is only allowed at namespace scope, so in order
229 * to keep everything here, we use a dummy template parameter with partial
230 * specialization.
231 */
232 template<int M, int Dummy>
233 struct ElemSize
234 {
235 static const size_t value = sizeof(T);
236 };
237 template<int Dummy>
238 struct ElemSize<0, Dummy>
239 {
240 static const size_t value = 1;
241 };
243 static const size_t sInlineCapacity =
244 tl::Min<N, sMaxInlineBytes / ElemSize<N, 0>::value>::value;
246 /* Calculate inline buffer size; avoid 0-sized array. */
247 static const size_t sInlineBytes =
248 tl::Max<1, sInlineCapacity * ElemSize<N, 0>::value>::value;
250 /* member data */
252 /*
253 * Pointer to the buffer, be it inline or heap-allocated. Only [mBegin,
254 * mBegin + mLength) hold valid constructed T objects. The range [mBegin +
255 * mLength, mBegin + mCapacity) holds uninitialized memory. The range
256 * [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory
257 * previously allocated by a call to reserve().
258 */
259 T* mBegin;
261 /* Number of elements in the vector. */
262 size_t mLength;
264 /* Max number of elements storable in the vector without resizing. */
265 size_t mCapacity;
267 #ifdef DEBUG
268 /* Max elements of reserved or used space in this vector. */
269 size_t mReserved;
270 #endif
272 /* Memory used for inline storage. */
273 AlignedStorage<sInlineBytes> storage;
275 #ifdef DEBUG
276 friend class ReentrancyGuard;
277 bool entered;
278 #endif
280 /* private accessors */
282 bool usingInlineStorage() const {
283 return mBegin == const_cast<VectorBase*>(this)->inlineStorage();
284 }
286 T* inlineStorage() {
287 return static_cast<T*>(storage.addr());
288 }
290 T* beginNoCheck() const {
291 return mBegin;
292 }
294 T* endNoCheck() {
295 return mBegin + mLength;
296 }
298 const T* endNoCheck() const {
299 return mBegin + mLength;
300 }
302 #ifdef DEBUG
303 size_t reserved() const {
304 MOZ_ASSERT(mReserved <= mCapacity);
305 MOZ_ASSERT(mLength <= mReserved);
306 return mReserved;
307 }
308 #endif
310 /* Append operations guaranteed to succeed due to pre-reserved space. */
311 template<typename U> void internalAppend(U&& u);
312 template<typename U, size_t O, class BP, class UV>
313 void internalAppendAll(const VectorBase<U, O, BP, UV>& u);
314 void internalAppendN(const T& t, size_t n);
315 template<typename U> void internalAppend(const U* begin, size_t length);
317 public:
318 static const size_t sMaxInlineStorage = N;
320 typedef T ElementType;
322 VectorBase(AllocPolicy = AllocPolicy());
323 VectorBase(ThisVector&&); /* Move constructor. */
324 ThisVector& operator=(ThisVector&&); /* Move assignment. */
325 ~VectorBase();
327 /* accessors */
329 const AllocPolicy& allocPolicy() const {
330 return *this;
331 }
333 AllocPolicy& allocPolicy() {
334 return *this;
335 }
337 enum { InlineLength = N };
339 size_t length() const {
340 return mLength;
341 }
343 bool empty() const {
344 return mLength == 0;
345 }
347 size_t capacity() const {
348 return mCapacity;
349 }
351 T* begin() {
352 MOZ_ASSERT(!entered);
353 return mBegin;
354 }
356 const T* begin() const {
357 MOZ_ASSERT(!entered);
358 return mBegin;
359 }
361 T* end() {
362 MOZ_ASSERT(!entered);
363 return mBegin + mLength;
364 }
366 const T* end() const {
367 MOZ_ASSERT(!entered);
368 return mBegin + mLength;
369 }
371 T& operator[](size_t i) {
372 MOZ_ASSERT(!entered);
373 MOZ_ASSERT(i < mLength);
374 return begin()[i];
375 }
377 const T& operator[](size_t i) const {
378 MOZ_ASSERT(!entered);
379 MOZ_ASSERT(i < mLength);
380 return begin()[i];
381 }
383 T& back() {
384 MOZ_ASSERT(!entered);
385 MOZ_ASSERT(!empty());
386 return *(end() - 1);
387 }
389 const T& back() const {
390 MOZ_ASSERT(!entered);
391 MOZ_ASSERT(!empty());
392 return *(end() - 1);
393 }
395 class Range
396 {
397 friend class VectorBase;
398 T* cur_;
399 T* end_;
400 Range(T* cur, T* end) : cur_(cur), end_(end) {
401 MOZ_ASSERT(cur <= end);
402 }
404 public:
405 Range() {}
406 bool empty() const { return cur_ == end_; }
407 size_t remain() const { return PointerRangeSize(cur_, end_); }
408 T& front() const { MOZ_ASSERT(!empty()); return *cur_; }
409 void popFront() { MOZ_ASSERT(!empty()); ++cur_; }
410 T popCopyFront() { MOZ_ASSERT(!empty()); return *cur_++; }
411 };
413 Range all() {
414 return Range(begin(), end());
415 }
417 /* mutators */
419 /**
420 * Given that the vector is empty and has no inline storage, grow to
421 * |capacity|.
422 */
423 bool initCapacity(size_t request);
425 /**
426 * If reserve(length() + N) succeeds, the N next appends are guaranteed to
427 * succeed.
428 */
429 bool reserve(size_t request);
431 /**
432 * Destroy elements in the range [end() - incr, end()). Does not deallocate
433 * or unreserve storage for those elements.
434 */
435 void shrinkBy(size_t incr);
437 /** Grow the vector by incr elements. */
438 bool growBy(size_t incr);
440 /** Call shrinkBy or growBy based on whether newSize > length(). */
441 bool resize(size_t newLength);
443 /**
444 * Increase the length of the vector, but don't initialize the new elements
445 * -- leave them as uninitialized memory.
446 */
447 bool growByUninitialized(size_t incr);
448 bool resizeUninitialized(size_t newLength);
450 /** Shorthand for shrinkBy(length()). */
451 void clear();
453 /** Clears and releases any heap-allocated storage. */
454 void clearAndFree();
456 /**
457 * If true, appending |needed| elements won't reallocate elements storage.
458 * This *doesn't* mean that infallibleAppend may be used! You still must
459 * reserve the extra space, even if this method indicates that appends won't
460 * need to reallocate elements storage.
461 */
462 bool canAppendWithoutRealloc(size_t needed) const;
464 /** Potentially fallible append operations. */
466 /**
467 * This can take either a T& or a T&&. Given a T&&, it moves |u| into the
468 * vector, instead of copying it. If it fails, |u| is left unmoved. ("We are
469 * not amused.")
470 */
471 template<typename U> bool append(U&& u);
473 template<typename U, size_t O, class BP, class UV>
474 bool appendAll(const VectorBase<U, O, BP, UV>& u);
475 bool appendN(const T& t, size_t n);
476 template<typename U> bool append(const U* begin, const U* end);
477 template<typename U> bool append(const U* begin, size_t length);
479 /*
480 * Guaranteed-infallible append operations for use upon vectors whose
481 * memory has been pre-reserved. Don't use this if you haven't reserved the
482 * memory!
483 */
484 template<typename U> void infallibleAppend(U&& u) {
485 internalAppend(Forward<U>(u));
486 }
487 void infallibleAppendN(const T& t, size_t n) {
488 internalAppendN(t, n);
489 }
490 template<typename U> void infallibleAppend(const U* aBegin, const U* aEnd) {
491 internalAppend(aBegin, PointerRangeSize(aBegin, aEnd));
492 }
493 template<typename U> void infallibleAppend(const U* aBegin, size_t aLength) {
494 internalAppend(aBegin, aLength);
495 }
497 void popBack();
499 T popCopy();
501 /**
502 * Transfers ownership of the internal buffer used by this vector to the
503 * caller. (It's the caller's responsibility to properly deallocate this
504 * buffer, in accordance with this vector's AllocPolicy.) After this call,
505 * the vector is empty. Since the returned buffer may need to be allocated
506 * (if the elements are currently stored in-place), the call can fail,
507 * returning nullptr.
508 *
509 * N.B. Although a T*, only the range [0, length()) is constructed.
510 */
511 T* extractRawBuffer();
513 /**
514 * Transfer ownership of an array of objects into the vector. The caller
515 * must have allocated the array in accordance with this vector's
516 * AllocPolicy.
517 *
518 * N.B. This call assumes that there are no uninitialized elements in the
519 * passed array.
520 */
521 void replaceRawBuffer(T* p, size_t length);
523 /**
524 * Places |val| at position |p|, shifting existing elements from |p| onward
525 * one position higher. On success, |p| should not be reused because it'll
526 * be a dangling pointer if reallocation of the vector storage occurred; the
527 * return value should be used instead. On failure, nullptr is returned.
528 *
529 * Example usage:
530 *
531 * if (!(p = vec.insert(p, val)))
532 * <handle failure>
533 * <keep working with p>
534 *
535 * This is inherently a linear-time operation. Be careful!
536 */
537 template<typename U>
538 T* insert(T* p, U&& val);
540 /**
541 * Removes the element |t|, which must fall in the bounds [begin, end),
542 * shifting existing elements from |t + 1| onward one position lower.
543 */
544 void erase(T* t);
546 /**
547 * Measure the size of the vector's heap-allocated storage.
548 */
549 size_t sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const;
551 /**
552 * Like sizeOfExcludingThis, but also measures the size of the vector
553 * object (which must be heap-allocated) itself.
554 */
555 size_t sizeOfIncludingThis(MallocSizeOf mallocSizeOf) const;
557 void swap(ThisVector& other);
559 private:
560 VectorBase(const VectorBase&) MOZ_DELETE;
561 void operator=(const VectorBase&) MOZ_DELETE;
563 /* Move-construct/assign only from our derived class, ThisVector. */
564 VectorBase(VectorBase&&) MOZ_DELETE;
565 void operator=(VectorBase&&) MOZ_DELETE;
566 };
568 /* This does the re-entrancy check plus several other sanity checks. */
569 #define MOZ_REENTRANCY_GUARD_ET_AL \
570 ReentrancyGuard g(*this); \
571 MOZ_ASSERT_IF(usingInlineStorage(), mCapacity == sInlineCapacity); \
572 MOZ_ASSERT(reserved() <= mCapacity); \
573 MOZ_ASSERT(mLength <= reserved()); \
574 MOZ_ASSERT(mLength <= mCapacity)
576 /* Vector Implementation */
578 template<typename T, size_t N, class AP, class TV>
579 MOZ_ALWAYS_INLINE
580 VectorBase<T, N, AP, TV>::VectorBase(AP ap)
581 : AP(ap),
582 mLength(0),
583 mCapacity(sInlineCapacity)
584 #ifdef DEBUG
585 , mReserved(sInlineCapacity),
586 entered(false)
587 #endif
588 {
589 mBegin = static_cast<T*>(storage.addr());
590 }
592 /* Move constructor. */
593 template<typename T, size_t N, class AllocPolicy, class TV>
594 MOZ_ALWAYS_INLINE
595 VectorBase<T, N, AllocPolicy, TV>::VectorBase(TV&& rhs)
596 : AllocPolicy(Move(rhs))
597 #ifdef DEBUG
598 , entered(false)
599 #endif
600 {
601 mLength = rhs.mLength;
602 mCapacity = rhs.mCapacity;
603 #ifdef DEBUG
604 mReserved = rhs.mReserved;
605 #endif
607 if (rhs.usingInlineStorage()) {
608 /* We can't move the buffer over in this case, so copy elements. */
609 mBegin = static_cast<T*>(storage.addr());
610 Impl::moveConstruct(mBegin, rhs.beginNoCheck(), rhs.endNoCheck());
611 /*
612 * Leave rhs's mLength, mBegin, mCapacity, and mReserved as they are.
613 * The elements in its in-line storage still need to be destroyed.
614 */
615 } else {
616 /*
617 * Take src's buffer, and turn src into an empty vector using
618 * in-line storage.
619 */
620 mBegin = rhs.mBegin;
621 rhs.mBegin = static_cast<T*>(rhs.storage.addr());
622 rhs.mCapacity = sInlineCapacity;
623 rhs.mLength = 0;
624 #ifdef DEBUG
625 rhs.mReserved = sInlineCapacity;
626 #endif
627 }
628 }
630 /* Move assignment. */
631 template<typename T, size_t N, class AP, class TV>
632 MOZ_ALWAYS_INLINE
633 TV&
634 VectorBase<T, N, AP, TV>::operator=(TV&& rhs)
635 {
636 MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited");
637 TV* tv = static_cast<TV*>(this);
638 tv->~TV();
639 new(tv) TV(Move(rhs));
640 return *tv;
641 }
643 template<typename T, size_t N, class AP, class TV>
644 MOZ_ALWAYS_INLINE
645 VectorBase<T, N, AP, TV>::~VectorBase()
646 {
647 MOZ_REENTRANCY_GUARD_ET_AL;
648 Impl::destroy(beginNoCheck(), endNoCheck());
649 if (!usingInlineStorage())
650 this->free_(beginNoCheck());
651 }
653 /*
654 * This function will create a new heap buffer with capacity newCap,
655 * move all elements in the inline buffer to this new buffer,
656 * and fail on OOM.
657 */
658 template<typename T, size_t N, class AP, class TV>
659 inline bool
660 VectorBase<T, N, AP, TV>::convertToHeapStorage(size_t newCap)
661 {
662 MOZ_ASSERT(usingInlineStorage());
664 /* Allocate buffer. */
665 MOZ_ASSERT(!detail::CapacityHasExcessSpace<T>(newCap));
666 T* newBuf = reinterpret_cast<T*>(this->malloc_(newCap * sizeof(T)));
667 if (!newBuf)
668 return false;
670 /* Copy inline elements into heap buffer. */
671 Impl::moveConstruct(newBuf, beginNoCheck(), endNoCheck());
672 Impl::destroy(beginNoCheck(), endNoCheck());
674 /* Switch in heap buffer. */
675 mBegin = newBuf;
676 /* mLength is unchanged. */
677 mCapacity = newCap;
678 return true;
679 }
681 template<typename T, size_t N, class AP, class TV>
682 MOZ_NEVER_INLINE bool
683 VectorBase<T, N, AP, TV>::growStorageBy(size_t incr)
684 {
685 MOZ_ASSERT(mLength + incr > mCapacity);
686 MOZ_ASSERT_IF(!usingInlineStorage(),
687 !detail::CapacityHasExcessSpace<T>(mCapacity));
689 /*
690 * When choosing a new capacity, its size should is as close to 2**N bytes
691 * as possible. 2**N-sized requests are best because they are unlikely to
692 * be rounded up by the allocator. Asking for a 2**N number of elements
693 * isn't as good, because if sizeof(T) is not a power-of-two that would
694 * result in a non-2**N request size.
695 */
697 size_t newCap;
699 if (incr == 1) {
700 if (usingInlineStorage()) {
701 /* This case occurs in ~70--80% of the calls to this function. */
702 size_t newSize =
703 tl::RoundUpPow2<(sInlineCapacity + 1) * sizeof(T)>::value;
704 newCap = newSize / sizeof(T);
705 goto convert;
706 }
708 if (mLength == 0) {
709 /* This case occurs in ~0--10% of the calls to this function. */
710 newCap = 1;
711 goto grow;
712 }
714 /* This case occurs in ~15--20% of the calls to this function. */
716 /*
717 * Will mLength * 4 *sizeof(T) overflow? This condition limits a vector
718 * to 1GB of memory on a 32-bit system, which is a reasonable limit. It
719 * also ensures that
720 *
721 * static_cast<char*>(end()) - static_cast<char*>(begin())
722 *
723 * doesn't overflow ptrdiff_t (see bug 510319).
724 */
725 if (mLength & tl::MulOverflowMask<4 * sizeof(T)>::value) {
726 this->reportAllocOverflow();
727 return false;
728 }
730 /*
731 * If we reach here, the existing capacity will have a size that is already
732 * as close to 2^N as sizeof(T) will allow. Just double the capacity, and
733 * then there might be space for one more element.
734 */
735 newCap = mLength * 2;
736 if (detail::CapacityHasExcessSpace<T>(newCap))
737 newCap += 1;
738 } else {
739 /* This case occurs in ~2% of the calls to this function. */
740 size_t newMinCap = mLength + incr;
742 /* Did mLength + incr overflow? Will newCap * sizeof(T) overflow? */
743 if (newMinCap < mLength ||
744 newMinCap & tl::MulOverflowMask<2 * sizeof(T)>::value)
745 {
746 this->reportAllocOverflow();
747 return false;
748 }
750 size_t newMinSize = newMinCap * sizeof(T);
751 size_t newSize = RoundUpPow2(newMinSize);
752 newCap = newSize / sizeof(T);
753 }
755 if (usingInlineStorage()) {
756 convert:
757 return convertToHeapStorage(newCap);
758 }
760 grow:
761 return Impl::growTo(*this, newCap);
762 }
764 template<typename T, size_t N, class AP, class TV>
765 inline bool
766 VectorBase<T, N, AP, TV>::initCapacity(size_t request)
767 {
768 MOZ_ASSERT(empty());
769 MOZ_ASSERT(usingInlineStorage());
770 if (request == 0)
771 return true;
772 T* newbuf = reinterpret_cast<T*>(this->malloc_(request * sizeof(T)));
773 if (!newbuf)
774 return false;
775 mBegin = newbuf;
776 mCapacity = request;
777 #ifdef DEBUG
778 mReserved = request;
779 #endif
780 return true;
781 }
783 template<typename T, size_t N, class AP, class TV>
784 inline bool
785 VectorBase<T, N, AP, TV>::reserve(size_t request)
786 {
787 MOZ_REENTRANCY_GUARD_ET_AL;
788 if (request > mCapacity && !growStorageBy(request - mLength))
789 return false;
791 #ifdef DEBUG
792 if (request > mReserved)
793 mReserved = request;
794 MOZ_ASSERT(mLength <= mReserved);
795 MOZ_ASSERT(mReserved <= mCapacity);
796 #endif
797 return true;
798 }
800 template<typename T, size_t N, class AP, class TV>
801 inline void
802 VectorBase<T, N, AP, TV>::shrinkBy(size_t incr)
803 {
804 MOZ_REENTRANCY_GUARD_ET_AL;
805 MOZ_ASSERT(incr <= mLength);
806 Impl::destroy(endNoCheck() - incr, endNoCheck());
807 mLength -= incr;
808 }
810 template<typename T, size_t N, class AP, class TV>
811 MOZ_ALWAYS_INLINE bool
812 VectorBase<T, N, AP, TV>::growBy(size_t incr)
813 {
814 MOZ_REENTRANCY_GUARD_ET_AL;
815 if (incr > mCapacity - mLength && !growStorageBy(incr))
816 return false;
818 MOZ_ASSERT(mLength + incr <= mCapacity);
819 T* newend = endNoCheck() + incr;
820 Impl::initialize(endNoCheck(), newend);
821 mLength += incr;
822 #ifdef DEBUG
823 if (mLength > mReserved)
824 mReserved = mLength;
825 #endif
826 return true;
827 }
829 template<typename T, size_t N, class AP, class TV>
830 MOZ_ALWAYS_INLINE bool
831 VectorBase<T, N, AP, TV>::growByUninitialized(size_t incr)
832 {
833 MOZ_REENTRANCY_GUARD_ET_AL;
834 if (incr > mCapacity - mLength && !growStorageBy(incr))
835 return false;
837 MOZ_ASSERT(mLength + incr <= mCapacity);
838 mLength += incr;
839 #ifdef DEBUG
840 if (mLength > mReserved)
841 mReserved = mLength;
842 #endif
843 return true;
844 }
846 template<typename T, size_t N, class AP, class TV>
847 inline bool
848 VectorBase<T, N, AP, TV>::resize(size_t newLength)
849 {
850 size_t curLength = mLength;
851 if (newLength > curLength)
852 return growBy(newLength - curLength);
853 shrinkBy(curLength - newLength);
854 return true;
855 }
857 template<typename T, size_t N, class AP, class TV>
858 MOZ_ALWAYS_INLINE bool
859 VectorBase<T, N, AP, TV>::resizeUninitialized(size_t newLength)
860 {
861 size_t curLength = mLength;
862 if (newLength > curLength)
863 return growByUninitialized(newLength - curLength);
864 shrinkBy(curLength - newLength);
865 return true;
866 }
868 template<typename T, size_t N, class AP, class TV>
869 inline void
870 VectorBase<T, N, AP, TV>::clear()
871 {
872 MOZ_REENTRANCY_GUARD_ET_AL;
873 Impl::destroy(beginNoCheck(), endNoCheck());
874 mLength = 0;
875 }
877 template<typename T, size_t N, class AP, class TV>
878 inline void
879 VectorBase<T, N, AP, TV>::clearAndFree()
880 {
881 clear();
883 if (usingInlineStorage())
884 return;
886 this->free_(beginNoCheck());
887 mBegin = static_cast<T*>(storage.addr());
888 mCapacity = sInlineCapacity;
889 #ifdef DEBUG
890 mReserved = sInlineCapacity;
891 #endif
892 }
894 template<typename T, size_t N, class AP, class TV>
895 inline bool
896 VectorBase<T, N, AP, TV>::canAppendWithoutRealloc(size_t needed) const
897 {
898 return mLength + needed <= mCapacity;
899 }
901 template<typename T, size_t N, class AP, class TV>
902 template<typename U, size_t O, class BP, class UV>
903 MOZ_ALWAYS_INLINE void
904 VectorBase<T, N, AP, TV>::internalAppendAll(const VectorBase<U, O, BP, UV>& other)
905 {
906 internalAppend(other.begin(), other.length());
907 }
909 template<typename T, size_t N, class AP, class TV>
910 template<typename U>
911 MOZ_ALWAYS_INLINE void
912 VectorBase<T, N, AP, TV>::internalAppend(U&& u)
913 {
914 MOZ_ASSERT(mLength + 1 <= mReserved);
915 MOZ_ASSERT(mReserved <= mCapacity);
916 new(endNoCheck()) T(Forward<U>(u));
917 ++mLength;
918 }
920 template<typename T, size_t N, class AP, class TV>
921 MOZ_ALWAYS_INLINE bool
922 VectorBase<T, N, AP, TV>::appendN(const T& t, size_t needed)
923 {
924 MOZ_REENTRANCY_GUARD_ET_AL;
925 if (mLength + needed > mCapacity && !growStorageBy(needed))
926 return false;
928 #ifdef DEBUG
929 if (mLength + needed > mReserved)
930 mReserved = mLength + needed;
931 #endif
932 internalAppendN(t, needed);
933 return true;
934 }
936 template<typename T, size_t N, class AP, class TV>
937 MOZ_ALWAYS_INLINE void
938 VectorBase<T, N, AP, TV>::internalAppendN(const T& t, size_t needed)
939 {
940 MOZ_ASSERT(mLength + needed <= mReserved);
941 MOZ_ASSERT(mReserved <= mCapacity);
942 Impl::copyConstructN(endNoCheck(), needed, t);
943 mLength += needed;
944 }
946 template<typename T, size_t N, class AP, class TV>
947 template<typename U>
948 inline T*
949 VectorBase<T, N, AP, TV>::insert(T* p, U&& val)
950 {
951 MOZ_ASSERT(begin() <= p);
952 MOZ_ASSERT(p <= end());
953 size_t pos = p - begin();
954 MOZ_ASSERT(pos <= mLength);
955 size_t oldLength = mLength;
956 if (pos == oldLength) {
957 if (!append(Forward<U>(val)))
958 return nullptr;
959 } else {
960 T oldBack = Move(back());
961 if (!append(Move(oldBack))) /* Dup the last element. */
962 return nullptr;
963 for (size_t i = oldLength; i > pos; --i)
964 (*this)[i] = Move((*this)[i - 1]);
965 (*this)[pos] = Forward<U>(val);
966 }
967 return begin() + pos;
968 }
970 template<typename T, size_t N, class AP, class TV>
971 inline void
972 VectorBase<T, N, AP, TV>::erase(T* it)
973 {
974 MOZ_ASSERT(begin() <= it);
975 MOZ_ASSERT(it < end());
976 while (it + 1 < end()) {
977 *it = *(it + 1);
978 ++it;
979 }
980 popBack();
981 }
983 template<typename T, size_t N, class AP, class TV>
984 template<typename U>
985 MOZ_ALWAYS_INLINE bool
986 VectorBase<T, N, AP, TV>::append(const U* insBegin, const U* insEnd)
987 {
988 MOZ_REENTRANCY_GUARD_ET_AL;
989 size_t needed = PointerRangeSize(insBegin, insEnd);
990 if (mLength + needed > mCapacity && !growStorageBy(needed))
991 return false;
993 #ifdef DEBUG
994 if (mLength + needed > mReserved)
995 mReserved = mLength + needed;
996 #endif
997 internalAppend(insBegin, needed);
998 return true;
999 }
1001 template<typename T, size_t N, class AP, class TV>
1002 template<typename U>
1003 MOZ_ALWAYS_INLINE void
1004 VectorBase<T, N, AP, TV>::internalAppend(const U* insBegin, size_t insLength)
1005 {
1006 MOZ_ASSERT(mLength + insLength <= mReserved);
1007 MOZ_ASSERT(mReserved <= mCapacity);
1008 Impl::copyConstruct(endNoCheck(), insBegin, insBegin + insLength);
1009 mLength += insLength;
1010 }
1012 template<typename T, size_t N, class AP, class TV>
1013 template<typename U>
1014 MOZ_ALWAYS_INLINE bool
1015 VectorBase<T, N, AP, TV>::append(U&& u)
1016 {
1017 MOZ_REENTRANCY_GUARD_ET_AL;
1018 if (mLength == mCapacity && !growStorageBy(1))
1019 return false;
1021 #ifdef DEBUG
1022 if (mLength + 1 > mReserved)
1023 mReserved = mLength + 1;
1024 #endif
1025 internalAppend(Forward<U>(u));
1026 return true;
1027 }
1029 template<typename T, size_t N, class AP, class TV>
1030 template<typename U, size_t O, class BP, class UV>
1031 MOZ_ALWAYS_INLINE bool
1032 VectorBase<T, N, AP, TV>::appendAll(const VectorBase<U, O, BP, UV>& other)
1033 {
1034 return append(other.begin(), other.length());
1035 }
1037 template<typename T, size_t N, class AP, class TV>
1038 template<class U>
1039 MOZ_ALWAYS_INLINE bool
1040 VectorBase<T, N, AP, TV>::append(const U *insBegin, size_t insLength)
1041 {
1042 return append(insBegin, insBegin + insLength);
1043 }
1045 template<typename T, size_t N, class AP, class TV>
1046 MOZ_ALWAYS_INLINE void
1047 VectorBase<T, N, AP, TV>::popBack()
1048 {
1049 MOZ_REENTRANCY_GUARD_ET_AL;
1050 MOZ_ASSERT(!empty());
1051 --mLength;
1052 endNoCheck()->~T();
1053 }
1055 template<typename T, size_t N, class AP, class TV>
1056 MOZ_ALWAYS_INLINE T
1057 VectorBase<T, N, AP, TV>::popCopy()
1058 {
1059 T ret = back();
1060 popBack();
1061 return ret;
1062 }
1064 template<typename T, size_t N, class AP, class TV>
1065 inline T*
1066 VectorBase<T, N, AP, TV>::extractRawBuffer()
1067 {
1068 T* ret;
1069 if (usingInlineStorage()) {
1070 ret = reinterpret_cast<T*>(this->malloc_(mLength * sizeof(T)));
1071 if (!ret)
1072 return nullptr;
1073 Impl::copyConstruct(ret, beginNoCheck(), endNoCheck());
1074 Impl::destroy(beginNoCheck(), endNoCheck());
1075 /* mBegin, mCapacity are unchanged. */
1076 mLength = 0;
1077 } else {
1078 ret = mBegin;
1079 mBegin = static_cast<T*>(storage.addr());
1080 mLength = 0;
1081 mCapacity = sInlineCapacity;
1082 #ifdef DEBUG
1083 mReserved = sInlineCapacity;
1084 #endif
1085 }
1086 return ret;
1087 }
1089 template<typename T, size_t N, class AP, class TV>
1090 inline void
1091 VectorBase<T, N, AP, TV>::replaceRawBuffer(T* p, size_t aLength)
1092 {
1093 MOZ_REENTRANCY_GUARD_ET_AL;
1095 /* Destroy what we have. */
1096 Impl::destroy(beginNoCheck(), endNoCheck());
1097 if (!usingInlineStorage())
1098 this->free_(beginNoCheck());
1100 /* Take in the new buffer. */
1101 if (aLength <= sInlineCapacity) {
1102 /*
1103 * We convert to inline storage if possible, even though p might
1104 * otherwise be acceptable. Maybe this behaviour should be
1105 * specifiable with an argument to this function.
1106 */
1107 mBegin = static_cast<T*>(storage.addr());
1108 mLength = aLength;
1109 mCapacity = sInlineCapacity;
1110 Impl::moveConstruct(mBegin, p, p + aLength);
1111 Impl::destroy(p, p + aLength);
1112 this->free_(p);
1113 } else {
1114 mBegin = p;
1115 mLength = aLength;
1116 mCapacity = aLength;
1117 }
1118 #ifdef DEBUG
1119 mReserved = aLength;
1120 #endif
1121 }
1123 template<typename T, size_t N, class AP, class TV>
1124 inline size_t
1125 VectorBase<T, N, AP, TV>::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
1126 {
1127 return usingInlineStorage() ? 0 : mallocSizeOf(beginNoCheck());
1128 }
1130 template<typename T, size_t N, class AP, class TV>
1131 inline size_t
1132 VectorBase<T, N, AP, TV>::sizeOfIncludingThis(MallocSizeOf mallocSizeOf) const
1133 {
1134 return mallocSizeOf(this) + sizeOfExcludingThis(mallocSizeOf);
1135 }
1137 template<typename T, size_t N, class AP, class TV>
1138 inline void
1139 VectorBase<T, N, AP, TV>::swap(TV& other)
1140 {
1141 static_assert(N == 0,
1142 "still need to implement this for N != 0");
1144 // This only works when inline storage is always empty.
1145 if (!usingInlineStorage() && other.usingInlineStorage()) {
1146 other.mBegin = mBegin;
1147 mBegin = inlineStorage();
1148 } else if (usingInlineStorage() && !other.usingInlineStorage()) {
1149 mBegin = other.mBegin;
1150 other.mBegin = other.inlineStorage();
1151 } else if (!usingInlineStorage() && !other.usingInlineStorage()) {
1152 Swap(mBegin, other.mBegin);
1153 } else {
1154 // This case is a no-op, since we'd set both to use their inline storage.
1155 }
1157 Swap(mLength, other.mLength);
1158 Swap(mCapacity, other.mCapacity);
1159 #ifdef DEBUG
1160 Swap(mReserved, other.mReserved);
1161 #endif
1162 }
1164 /*
1165 * STL-like container providing a short-lived, dynamic buffer. Vector calls the
1166 * constructors/destructors of all elements stored in its internal buffer, so
1167 * non-PODs may be safely used. Additionally, Vector will store the first N
1168 * elements in-place before resorting to dynamic allocation.
1169 *
1170 * T requirements:
1171 * - default and copy constructible, assignable, destructible
1172 * - operations do not throw
1173 * N requirements:
1174 * - any value, however, N is clamped to min/max values
1175 * AllocPolicy:
1176 * - see "Allocation policies" in AllocPolicy.h (defaults to
1177 * mozilla::MallocAllocPolicy)
1178 *
1179 * Vector is not reentrant: T member functions called during Vector member
1180 * functions must not call back into the same object!
1181 */
1182 template<typename T,
1183 size_t MinInlineCapacity = 0,
1184 class AllocPolicy = MallocAllocPolicy>
1185 class Vector
1186 : public VectorBase<T,
1187 MinInlineCapacity,
1188 AllocPolicy,
1189 Vector<T, MinInlineCapacity, AllocPolicy> >
1190 {
1191 typedef VectorBase<T, MinInlineCapacity, AllocPolicy, Vector> Base;
1193 public:
1194 Vector(AllocPolicy alloc = AllocPolicy()) : Base(alloc) {}
1195 Vector(Vector&& vec) : Base(Move(vec)) {}
1196 Vector& operator=(Vector&& vec) {
1197 return Base::operator=(Move(vec));
1198 }
1199 };
1201 } // namespace mozilla
1203 #ifdef _MSC_VER
1204 #pragma warning(pop)
1205 #endif
1207 #endif /* mozilla_Vector_h */