Wed, 31 Dec 2014 06:55:50 +0100
Added tag UPSTREAM_283F7C6 for changeset ca08bd8f51b2
michael@0 | 1 | /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ |
michael@0 | 2 | /* vim: set ts=8 sts=2 et sw=2 tw=80: */ |
michael@0 | 3 | /* This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ |
michael@0 | 6 | |
michael@0 | 7 | /* A type/length-parametrized vector class. */ |
michael@0 | 8 | |
michael@0 | 9 | #ifndef mozilla_Vector_h |
michael@0 | 10 | #define mozilla_Vector_h |
michael@0 | 11 | |
michael@0 | 12 | #include "mozilla/Alignment.h" |
michael@0 | 13 | #include "mozilla/AllocPolicy.h" |
michael@0 | 14 | #include "mozilla/ArrayUtils.h" // for PointerRangeSize |
michael@0 | 15 | #include "mozilla/Assertions.h" |
michael@0 | 16 | #include "mozilla/Attributes.h" |
michael@0 | 17 | #include "mozilla/MathAlgorithms.h" |
michael@0 | 18 | #include "mozilla/MemoryReporting.h" |
michael@0 | 19 | #include "mozilla/Move.h" |
michael@0 | 20 | #include "mozilla/NullPtr.h" |
michael@0 | 21 | #include "mozilla/ReentrancyGuard.h" |
michael@0 | 22 | #include "mozilla/TemplateLib.h" |
michael@0 | 23 | #include "mozilla/TypeTraits.h" |
michael@0 | 24 | |
michael@0 | 25 | #include <new> // for placement new |
michael@0 | 26 | |
michael@0 | 27 | /* Silence dire "bugs in previous versions of MSVC have been fixed" warnings */ |
michael@0 | 28 | #ifdef _MSC_VER |
michael@0 | 29 | #pragma warning(push) |
michael@0 | 30 | #pragma warning(disable:4345) |
michael@0 | 31 | #endif |
michael@0 | 32 | |
michael@0 | 33 | namespace mozilla { |
michael@0 | 34 | |
michael@0 | 35 | template<typename T, size_t N, class AllocPolicy, class ThisVector> |
michael@0 | 36 | class VectorBase; |
michael@0 | 37 | |
michael@0 | 38 | namespace detail { |
michael@0 | 39 | |
michael@0 | 40 | /* |
michael@0 | 41 | * Check that the given capacity wastes the minimal amount of space if |
michael@0 | 42 | * allocated on the heap. This means that cap*sizeof(T) is as close to a |
michael@0 | 43 | * power-of-two as possible. growStorageBy() is responsible for ensuring |
michael@0 | 44 | * this. |
michael@0 | 45 | */ |
michael@0 | 46 | template<typename T> |
michael@0 | 47 | static bool CapacityHasExcessSpace(size_t cap) |
michael@0 | 48 | { |
michael@0 | 49 | size_t size = cap * sizeof(T); |
michael@0 | 50 | return RoundUpPow2(size) - size >= sizeof(T); |
michael@0 | 51 | } |
michael@0 | 52 | |
michael@0 | 53 | /* |
michael@0 | 54 | * This template class provides a default implementation for vector operations |
michael@0 | 55 | * when the element type is not known to be a POD, as judged by IsPod. |
michael@0 | 56 | */ |
michael@0 | 57 | template<typename T, size_t N, class AP, class ThisVector, bool IsPod> |
michael@0 | 58 | struct VectorImpl |
michael@0 | 59 | { |
michael@0 | 60 | /* Destroys constructed objects in the range [begin, end). */ |
michael@0 | 61 | static inline void destroy(T* begin, T* end) { |
michael@0 | 62 | MOZ_ASSERT(begin <= end); |
michael@0 | 63 | for (T* p = begin; p < end; ++p) |
michael@0 | 64 | p->~T(); |
michael@0 | 65 | } |
michael@0 | 66 | |
michael@0 | 67 | /* Constructs objects in the uninitialized range [begin, end). */ |
michael@0 | 68 | static inline void initialize(T* begin, T* end) { |
michael@0 | 69 | MOZ_ASSERT(begin <= end); |
michael@0 | 70 | for (T* p = begin; p < end; ++p) |
michael@0 | 71 | new(p) T(); |
michael@0 | 72 | } |
michael@0 | 73 | |
michael@0 | 74 | /* |
michael@0 | 75 | * Copy-constructs objects in the uninitialized range |
michael@0 | 76 | * [dst, dst+(srcend-srcbeg)) from the range [srcbeg, srcend). |
michael@0 | 77 | */ |
michael@0 | 78 | template<typename U> |
michael@0 | 79 | static inline void copyConstruct(T* dst, const U* srcbeg, const U* srcend) { |
michael@0 | 80 | MOZ_ASSERT(srcbeg <= srcend); |
michael@0 | 81 | for (const U* p = srcbeg; p < srcend; ++p, ++dst) |
michael@0 | 82 | new(dst) T(*p); |
michael@0 | 83 | } |
michael@0 | 84 | |
michael@0 | 85 | /* |
michael@0 | 86 | * Move-constructs objects in the uninitialized range |
michael@0 | 87 | * [dst, dst+(srcend-srcbeg)) from the range [srcbeg, srcend). |
michael@0 | 88 | */ |
michael@0 | 89 | template<typename U> |
michael@0 | 90 | static inline void moveConstruct(T* dst, U* srcbeg, U* srcend) { |
michael@0 | 91 | MOZ_ASSERT(srcbeg <= srcend); |
michael@0 | 92 | for (U* p = srcbeg; p < srcend; ++p, ++dst) |
michael@0 | 93 | new(dst) T(Move(*p)); |
michael@0 | 94 | } |
michael@0 | 95 | |
michael@0 | 96 | /* |
michael@0 | 97 | * Copy-constructs objects in the uninitialized range [dst, dst+n) from the |
michael@0 | 98 | * same object u. |
michael@0 | 99 | */ |
michael@0 | 100 | template<typename U> |
michael@0 | 101 | static inline void copyConstructN(T* dst, size_t n, const U& u) { |
michael@0 | 102 | for (T* end = dst + n; dst < end; ++dst) |
michael@0 | 103 | new(dst) T(u); |
michael@0 | 104 | } |
michael@0 | 105 | |
michael@0 | 106 | /* |
michael@0 | 107 | * Grows the given buffer to have capacity newCap, preserving the objects |
michael@0 | 108 | * constructed in the range [begin, end) and updating v. Assumes that (1) |
michael@0 | 109 | * newCap has not overflowed, and (2) multiplying newCap by sizeof(T) will |
michael@0 | 110 | * not overflow. |
michael@0 | 111 | */ |
michael@0 | 112 | static inline bool |
michael@0 | 113 | growTo(VectorBase<T, N, AP, ThisVector>& v, size_t newCap) { |
michael@0 | 114 | MOZ_ASSERT(!v.usingInlineStorage()); |
michael@0 | 115 | MOZ_ASSERT(!CapacityHasExcessSpace<T>(newCap)); |
michael@0 | 116 | T* newbuf = reinterpret_cast<T*>(v.malloc_(newCap * sizeof(T))); |
michael@0 | 117 | if (!newbuf) |
michael@0 | 118 | return false; |
michael@0 | 119 | T* dst = newbuf; |
michael@0 | 120 | T* src = v.beginNoCheck(); |
michael@0 | 121 | for (; src < v.endNoCheck(); ++dst, ++src) |
michael@0 | 122 | new(dst) T(Move(*src)); |
michael@0 | 123 | VectorImpl::destroy(v.beginNoCheck(), v.endNoCheck()); |
michael@0 | 124 | v.free_(v.mBegin); |
michael@0 | 125 | v.mBegin = newbuf; |
michael@0 | 126 | /* v.mLength is unchanged. */ |
michael@0 | 127 | v.mCapacity = newCap; |
michael@0 | 128 | return true; |
michael@0 | 129 | } |
michael@0 | 130 | }; |
michael@0 | 131 | |
michael@0 | 132 | /* |
michael@0 | 133 | * This partial template specialization provides a default implementation for |
michael@0 | 134 | * vector operations when the element type is known to be a POD, as judged by |
michael@0 | 135 | * IsPod. |
michael@0 | 136 | */ |
michael@0 | 137 | template<typename T, size_t N, class AP, class ThisVector> |
michael@0 | 138 | struct VectorImpl<T, N, AP, ThisVector, true> |
michael@0 | 139 | { |
michael@0 | 140 | static inline void destroy(T*, T*) {} |
michael@0 | 141 | |
michael@0 | 142 | static inline void initialize(T* begin, T* end) { |
michael@0 | 143 | /* |
michael@0 | 144 | * You would think that memset would be a big win (or even break even) |
michael@0 | 145 | * when we know T is a POD. But currently it's not. This is probably |
michael@0 | 146 | * because |append| tends to be given small ranges and memset requires |
michael@0 | 147 | * a function call that doesn't get inlined. |
michael@0 | 148 | * |
michael@0 | 149 | * memset(begin, 0, sizeof(T) * (end-begin)); |
michael@0 | 150 | */ |
michael@0 | 151 | MOZ_ASSERT(begin <= end); |
michael@0 | 152 | for (T* p = begin; p < end; ++p) |
michael@0 | 153 | new(p) T(); |
michael@0 | 154 | } |
michael@0 | 155 | |
michael@0 | 156 | template<typename U> |
michael@0 | 157 | static inline void copyConstruct(T* dst, const U* srcbeg, const U* srcend) { |
michael@0 | 158 | /* |
michael@0 | 159 | * See above memset comment. Also, notice that copyConstruct is |
michael@0 | 160 | * currently templated (T != U), so memcpy won't work without |
michael@0 | 161 | * requiring T == U. |
michael@0 | 162 | * |
michael@0 | 163 | * memcpy(dst, srcbeg, sizeof(T) * (srcend - srcbeg)); |
michael@0 | 164 | */ |
michael@0 | 165 | MOZ_ASSERT(srcbeg <= srcend); |
michael@0 | 166 | for (const U* p = srcbeg; p < srcend; ++p, ++dst) |
michael@0 | 167 | *dst = *p; |
michael@0 | 168 | } |
michael@0 | 169 | |
michael@0 | 170 | template<typename U> |
michael@0 | 171 | static inline void moveConstruct(T* dst, const U* srcbeg, const U* srcend) { |
michael@0 | 172 | copyConstruct(dst, srcbeg, srcend); |
michael@0 | 173 | } |
michael@0 | 174 | |
michael@0 | 175 | static inline void copyConstructN(T* dst, size_t n, const T& t) { |
michael@0 | 176 | for (T* end = dst + n; dst < end; ++dst) |
michael@0 | 177 | *dst = t; |
michael@0 | 178 | } |
michael@0 | 179 | |
michael@0 | 180 | static inline bool |
michael@0 | 181 | growTo(VectorBase<T, N, AP, ThisVector>& v, size_t newCap) { |
michael@0 | 182 | MOZ_ASSERT(!v.usingInlineStorage()); |
michael@0 | 183 | MOZ_ASSERT(!CapacityHasExcessSpace<T>(newCap)); |
michael@0 | 184 | size_t oldSize = sizeof(T) * v.mCapacity; |
michael@0 | 185 | size_t newSize = sizeof(T) * newCap; |
michael@0 | 186 | T* newbuf = reinterpret_cast<T*>(v.realloc_(v.mBegin, oldSize, newSize)); |
michael@0 | 187 | if (!newbuf) |
michael@0 | 188 | return false; |
michael@0 | 189 | v.mBegin = newbuf; |
michael@0 | 190 | /* v.mLength is unchanged. */ |
michael@0 | 191 | v.mCapacity = newCap; |
michael@0 | 192 | return true; |
michael@0 | 193 | } |
michael@0 | 194 | }; |
michael@0 | 195 | |
michael@0 | 196 | } // namespace detail |
michael@0 | 197 | |
michael@0 | 198 | /* |
michael@0 | 199 | * A CRTP base class for vector-like classes. Unless you really really want |
michael@0 | 200 | * your own vector class -- and you almost certainly don't -- you should use |
michael@0 | 201 | * mozilla::Vector instead! |
michael@0 | 202 | * |
michael@0 | 203 | * See mozilla::Vector for interface requirements. |
michael@0 | 204 | */ |
michael@0 | 205 | template<typename T, size_t N, class AllocPolicy, class ThisVector> |
michael@0 | 206 | class VectorBase : private AllocPolicy |
michael@0 | 207 | { |
michael@0 | 208 | /* utilities */ |
michael@0 | 209 | |
michael@0 | 210 | static const bool sElemIsPod = IsPod<T>::value; |
michael@0 | 211 | typedef detail::VectorImpl<T, N, AllocPolicy, ThisVector, sElemIsPod> Impl; |
michael@0 | 212 | friend struct detail::VectorImpl<T, N, AllocPolicy, ThisVector, sElemIsPod>; |
michael@0 | 213 | |
michael@0 | 214 | bool growStorageBy(size_t incr); |
michael@0 | 215 | bool convertToHeapStorage(size_t newCap); |
michael@0 | 216 | |
michael@0 | 217 | /* magic constants */ |
michael@0 | 218 | |
michael@0 | 219 | static const int sMaxInlineBytes = 1024; |
michael@0 | 220 | |
michael@0 | 221 | /* compute constants */ |
michael@0 | 222 | |
michael@0 | 223 | /* |
michael@0 | 224 | * Consider element size to be 1 for buffer sizing if there are 0 inline |
michael@0 | 225 | * elements. This allows us to compile when the definition of the element |
michael@0 | 226 | * type is not visible here. |
michael@0 | 227 | * |
michael@0 | 228 | * Explicit specialization is only allowed at namespace scope, so in order |
michael@0 | 229 | * to keep everything here, we use a dummy template parameter with partial |
michael@0 | 230 | * specialization. |
michael@0 | 231 | */ |
michael@0 | 232 | template<int M, int Dummy> |
michael@0 | 233 | struct ElemSize |
michael@0 | 234 | { |
michael@0 | 235 | static const size_t value = sizeof(T); |
michael@0 | 236 | }; |
michael@0 | 237 | template<int Dummy> |
michael@0 | 238 | struct ElemSize<0, Dummy> |
michael@0 | 239 | { |
michael@0 | 240 | static const size_t value = 1; |
michael@0 | 241 | }; |
michael@0 | 242 | |
michael@0 | 243 | static const size_t sInlineCapacity = |
michael@0 | 244 | tl::Min<N, sMaxInlineBytes / ElemSize<N, 0>::value>::value; |
michael@0 | 245 | |
michael@0 | 246 | /* Calculate inline buffer size; avoid 0-sized array. */ |
michael@0 | 247 | static const size_t sInlineBytes = |
michael@0 | 248 | tl::Max<1, sInlineCapacity * ElemSize<N, 0>::value>::value; |
michael@0 | 249 | |
michael@0 | 250 | /* member data */ |
michael@0 | 251 | |
michael@0 | 252 | /* |
michael@0 | 253 | * Pointer to the buffer, be it inline or heap-allocated. Only [mBegin, |
michael@0 | 254 | * mBegin + mLength) hold valid constructed T objects. The range [mBegin + |
michael@0 | 255 | * mLength, mBegin + mCapacity) holds uninitialized memory. The range |
michael@0 | 256 | * [mBegin + mLength, mBegin + mReserved) also holds uninitialized memory |
michael@0 | 257 | * previously allocated by a call to reserve(). |
michael@0 | 258 | */ |
michael@0 | 259 | T* mBegin; |
michael@0 | 260 | |
michael@0 | 261 | /* Number of elements in the vector. */ |
michael@0 | 262 | size_t mLength; |
michael@0 | 263 | |
michael@0 | 264 | /* Max number of elements storable in the vector without resizing. */ |
michael@0 | 265 | size_t mCapacity; |
michael@0 | 266 | |
michael@0 | 267 | #ifdef DEBUG |
michael@0 | 268 | /* Max elements of reserved or used space in this vector. */ |
michael@0 | 269 | size_t mReserved; |
michael@0 | 270 | #endif |
michael@0 | 271 | |
michael@0 | 272 | /* Memory used for inline storage. */ |
michael@0 | 273 | AlignedStorage<sInlineBytes> storage; |
michael@0 | 274 | |
michael@0 | 275 | #ifdef DEBUG |
michael@0 | 276 | friend class ReentrancyGuard; |
michael@0 | 277 | bool entered; |
michael@0 | 278 | #endif |
michael@0 | 279 | |
michael@0 | 280 | /* private accessors */ |
michael@0 | 281 | |
michael@0 | 282 | bool usingInlineStorage() const { |
michael@0 | 283 | return mBegin == const_cast<VectorBase*>(this)->inlineStorage(); |
michael@0 | 284 | } |
michael@0 | 285 | |
michael@0 | 286 | T* inlineStorage() { |
michael@0 | 287 | return static_cast<T*>(storage.addr()); |
michael@0 | 288 | } |
michael@0 | 289 | |
michael@0 | 290 | T* beginNoCheck() const { |
michael@0 | 291 | return mBegin; |
michael@0 | 292 | } |
michael@0 | 293 | |
michael@0 | 294 | T* endNoCheck() { |
michael@0 | 295 | return mBegin + mLength; |
michael@0 | 296 | } |
michael@0 | 297 | |
michael@0 | 298 | const T* endNoCheck() const { |
michael@0 | 299 | return mBegin + mLength; |
michael@0 | 300 | } |
michael@0 | 301 | |
michael@0 | 302 | #ifdef DEBUG |
michael@0 | 303 | size_t reserved() const { |
michael@0 | 304 | MOZ_ASSERT(mReserved <= mCapacity); |
michael@0 | 305 | MOZ_ASSERT(mLength <= mReserved); |
michael@0 | 306 | return mReserved; |
michael@0 | 307 | } |
michael@0 | 308 | #endif |
michael@0 | 309 | |
michael@0 | 310 | /* Append operations guaranteed to succeed due to pre-reserved space. */ |
michael@0 | 311 | template<typename U> void internalAppend(U&& u); |
michael@0 | 312 | template<typename U, size_t O, class BP, class UV> |
michael@0 | 313 | void internalAppendAll(const VectorBase<U, O, BP, UV>& u); |
michael@0 | 314 | void internalAppendN(const T& t, size_t n); |
michael@0 | 315 | template<typename U> void internalAppend(const U* begin, size_t length); |
michael@0 | 316 | |
michael@0 | 317 | public: |
michael@0 | 318 | static const size_t sMaxInlineStorage = N; |
michael@0 | 319 | |
michael@0 | 320 | typedef T ElementType; |
michael@0 | 321 | |
michael@0 | 322 | VectorBase(AllocPolicy = AllocPolicy()); |
michael@0 | 323 | VectorBase(ThisVector&&); /* Move constructor. */ |
michael@0 | 324 | ThisVector& operator=(ThisVector&&); /* Move assignment. */ |
michael@0 | 325 | ~VectorBase(); |
michael@0 | 326 | |
michael@0 | 327 | /* accessors */ |
michael@0 | 328 | |
michael@0 | 329 | const AllocPolicy& allocPolicy() const { |
michael@0 | 330 | return *this; |
michael@0 | 331 | } |
michael@0 | 332 | |
michael@0 | 333 | AllocPolicy& allocPolicy() { |
michael@0 | 334 | return *this; |
michael@0 | 335 | } |
michael@0 | 336 | |
michael@0 | 337 | enum { InlineLength = N }; |
michael@0 | 338 | |
michael@0 | 339 | size_t length() const { |
michael@0 | 340 | return mLength; |
michael@0 | 341 | } |
michael@0 | 342 | |
michael@0 | 343 | bool empty() const { |
michael@0 | 344 | return mLength == 0; |
michael@0 | 345 | } |
michael@0 | 346 | |
michael@0 | 347 | size_t capacity() const { |
michael@0 | 348 | return mCapacity; |
michael@0 | 349 | } |
michael@0 | 350 | |
michael@0 | 351 | T* begin() { |
michael@0 | 352 | MOZ_ASSERT(!entered); |
michael@0 | 353 | return mBegin; |
michael@0 | 354 | } |
michael@0 | 355 | |
michael@0 | 356 | const T* begin() const { |
michael@0 | 357 | MOZ_ASSERT(!entered); |
michael@0 | 358 | return mBegin; |
michael@0 | 359 | } |
michael@0 | 360 | |
michael@0 | 361 | T* end() { |
michael@0 | 362 | MOZ_ASSERT(!entered); |
michael@0 | 363 | return mBegin + mLength; |
michael@0 | 364 | } |
michael@0 | 365 | |
michael@0 | 366 | const T* end() const { |
michael@0 | 367 | MOZ_ASSERT(!entered); |
michael@0 | 368 | return mBegin + mLength; |
michael@0 | 369 | } |
michael@0 | 370 | |
michael@0 | 371 | T& operator[](size_t i) { |
michael@0 | 372 | MOZ_ASSERT(!entered); |
michael@0 | 373 | MOZ_ASSERT(i < mLength); |
michael@0 | 374 | return begin()[i]; |
michael@0 | 375 | } |
michael@0 | 376 | |
michael@0 | 377 | const T& operator[](size_t i) const { |
michael@0 | 378 | MOZ_ASSERT(!entered); |
michael@0 | 379 | MOZ_ASSERT(i < mLength); |
michael@0 | 380 | return begin()[i]; |
michael@0 | 381 | } |
michael@0 | 382 | |
michael@0 | 383 | T& back() { |
michael@0 | 384 | MOZ_ASSERT(!entered); |
michael@0 | 385 | MOZ_ASSERT(!empty()); |
michael@0 | 386 | return *(end() - 1); |
michael@0 | 387 | } |
michael@0 | 388 | |
michael@0 | 389 | const T& back() const { |
michael@0 | 390 | MOZ_ASSERT(!entered); |
michael@0 | 391 | MOZ_ASSERT(!empty()); |
michael@0 | 392 | return *(end() - 1); |
michael@0 | 393 | } |
michael@0 | 394 | |
michael@0 | 395 | class Range |
michael@0 | 396 | { |
michael@0 | 397 | friend class VectorBase; |
michael@0 | 398 | T* cur_; |
michael@0 | 399 | T* end_; |
michael@0 | 400 | Range(T* cur, T* end) : cur_(cur), end_(end) { |
michael@0 | 401 | MOZ_ASSERT(cur <= end); |
michael@0 | 402 | } |
michael@0 | 403 | |
michael@0 | 404 | public: |
michael@0 | 405 | Range() {} |
michael@0 | 406 | bool empty() const { return cur_ == end_; } |
michael@0 | 407 | size_t remain() const { return PointerRangeSize(cur_, end_); } |
michael@0 | 408 | T& front() const { MOZ_ASSERT(!empty()); return *cur_; } |
michael@0 | 409 | void popFront() { MOZ_ASSERT(!empty()); ++cur_; } |
michael@0 | 410 | T popCopyFront() { MOZ_ASSERT(!empty()); return *cur_++; } |
michael@0 | 411 | }; |
michael@0 | 412 | |
michael@0 | 413 | Range all() { |
michael@0 | 414 | return Range(begin(), end()); |
michael@0 | 415 | } |
michael@0 | 416 | |
michael@0 | 417 | /* mutators */ |
michael@0 | 418 | |
michael@0 | 419 | /** |
michael@0 | 420 | * Given that the vector is empty and has no inline storage, grow to |
michael@0 | 421 | * |capacity|. |
michael@0 | 422 | */ |
michael@0 | 423 | bool initCapacity(size_t request); |
michael@0 | 424 | |
michael@0 | 425 | /** |
michael@0 | 426 | * If reserve(length() + N) succeeds, the N next appends are guaranteed to |
michael@0 | 427 | * succeed. |
michael@0 | 428 | */ |
michael@0 | 429 | bool reserve(size_t request); |
michael@0 | 430 | |
michael@0 | 431 | /** |
michael@0 | 432 | * Destroy elements in the range [end() - incr, end()). Does not deallocate |
michael@0 | 433 | * or unreserve storage for those elements. |
michael@0 | 434 | */ |
michael@0 | 435 | void shrinkBy(size_t incr); |
michael@0 | 436 | |
michael@0 | 437 | /** Grow the vector by incr elements. */ |
michael@0 | 438 | bool growBy(size_t incr); |
michael@0 | 439 | |
michael@0 | 440 | /** Call shrinkBy or growBy based on whether newSize > length(). */ |
michael@0 | 441 | bool resize(size_t newLength); |
michael@0 | 442 | |
michael@0 | 443 | /** |
michael@0 | 444 | * Increase the length of the vector, but don't initialize the new elements |
michael@0 | 445 | * -- leave them as uninitialized memory. |
michael@0 | 446 | */ |
michael@0 | 447 | bool growByUninitialized(size_t incr); |
michael@0 | 448 | bool resizeUninitialized(size_t newLength); |
michael@0 | 449 | |
michael@0 | 450 | /** Shorthand for shrinkBy(length()). */ |
michael@0 | 451 | void clear(); |
michael@0 | 452 | |
michael@0 | 453 | /** Clears and releases any heap-allocated storage. */ |
michael@0 | 454 | void clearAndFree(); |
michael@0 | 455 | |
michael@0 | 456 | /** |
michael@0 | 457 | * If true, appending |needed| elements won't reallocate elements storage. |
michael@0 | 458 | * This *doesn't* mean that infallibleAppend may be used! You still must |
michael@0 | 459 | * reserve the extra space, even if this method indicates that appends won't |
michael@0 | 460 | * need to reallocate elements storage. |
michael@0 | 461 | */ |
michael@0 | 462 | bool canAppendWithoutRealloc(size_t needed) const; |
michael@0 | 463 | |
michael@0 | 464 | /** Potentially fallible append operations. */ |
michael@0 | 465 | |
michael@0 | 466 | /** |
michael@0 | 467 | * This can take either a T& or a T&&. Given a T&&, it moves |u| into the |
michael@0 | 468 | * vector, instead of copying it. If it fails, |u| is left unmoved. ("We are |
michael@0 | 469 | * not amused.") |
michael@0 | 470 | */ |
michael@0 | 471 | template<typename U> bool append(U&& u); |
michael@0 | 472 | |
michael@0 | 473 | template<typename U, size_t O, class BP, class UV> |
michael@0 | 474 | bool appendAll(const VectorBase<U, O, BP, UV>& u); |
michael@0 | 475 | bool appendN(const T& t, size_t n); |
michael@0 | 476 | template<typename U> bool append(const U* begin, const U* end); |
michael@0 | 477 | template<typename U> bool append(const U* begin, size_t length); |
michael@0 | 478 | |
michael@0 | 479 | /* |
michael@0 | 480 | * Guaranteed-infallible append operations for use upon vectors whose |
michael@0 | 481 | * memory has been pre-reserved. Don't use this if you haven't reserved the |
michael@0 | 482 | * memory! |
michael@0 | 483 | */ |
michael@0 | 484 | template<typename U> void infallibleAppend(U&& u) { |
michael@0 | 485 | internalAppend(Forward<U>(u)); |
michael@0 | 486 | } |
michael@0 | 487 | void infallibleAppendN(const T& t, size_t n) { |
michael@0 | 488 | internalAppendN(t, n); |
michael@0 | 489 | } |
michael@0 | 490 | template<typename U> void infallibleAppend(const U* aBegin, const U* aEnd) { |
michael@0 | 491 | internalAppend(aBegin, PointerRangeSize(aBegin, aEnd)); |
michael@0 | 492 | } |
michael@0 | 493 | template<typename U> void infallibleAppend(const U* aBegin, size_t aLength) { |
michael@0 | 494 | internalAppend(aBegin, aLength); |
michael@0 | 495 | } |
michael@0 | 496 | |
michael@0 | 497 | void popBack(); |
michael@0 | 498 | |
michael@0 | 499 | T popCopy(); |
michael@0 | 500 | |
michael@0 | 501 | /** |
michael@0 | 502 | * Transfers ownership of the internal buffer used by this vector to the |
michael@0 | 503 | * caller. (It's the caller's responsibility to properly deallocate this |
michael@0 | 504 | * buffer, in accordance with this vector's AllocPolicy.) After this call, |
michael@0 | 505 | * the vector is empty. Since the returned buffer may need to be allocated |
michael@0 | 506 | * (if the elements are currently stored in-place), the call can fail, |
michael@0 | 507 | * returning nullptr. |
michael@0 | 508 | * |
michael@0 | 509 | * N.B. Although a T*, only the range [0, length()) is constructed. |
michael@0 | 510 | */ |
michael@0 | 511 | T* extractRawBuffer(); |
michael@0 | 512 | |
michael@0 | 513 | /** |
michael@0 | 514 | * Transfer ownership of an array of objects into the vector. The caller |
michael@0 | 515 | * must have allocated the array in accordance with this vector's |
michael@0 | 516 | * AllocPolicy. |
michael@0 | 517 | * |
michael@0 | 518 | * N.B. This call assumes that there are no uninitialized elements in the |
michael@0 | 519 | * passed array. |
michael@0 | 520 | */ |
michael@0 | 521 | void replaceRawBuffer(T* p, size_t length); |
michael@0 | 522 | |
michael@0 | 523 | /** |
michael@0 | 524 | * Places |val| at position |p|, shifting existing elements from |p| onward |
michael@0 | 525 | * one position higher. On success, |p| should not be reused because it'll |
michael@0 | 526 | * be a dangling pointer if reallocation of the vector storage occurred; the |
michael@0 | 527 | * return value should be used instead. On failure, nullptr is returned. |
michael@0 | 528 | * |
michael@0 | 529 | * Example usage: |
michael@0 | 530 | * |
michael@0 | 531 | * if (!(p = vec.insert(p, val))) |
michael@0 | 532 | * <handle failure> |
michael@0 | 533 | * <keep working with p> |
michael@0 | 534 | * |
michael@0 | 535 | * This is inherently a linear-time operation. Be careful! |
michael@0 | 536 | */ |
michael@0 | 537 | template<typename U> |
michael@0 | 538 | T* insert(T* p, U&& val); |
michael@0 | 539 | |
michael@0 | 540 | /** |
michael@0 | 541 | * Removes the element |t|, which must fall in the bounds [begin, end), |
michael@0 | 542 | * shifting existing elements from |t + 1| onward one position lower. |
michael@0 | 543 | */ |
michael@0 | 544 | void erase(T* t); |
michael@0 | 545 | |
michael@0 | 546 | /** |
michael@0 | 547 | * Measure the size of the vector's heap-allocated storage. |
michael@0 | 548 | */ |
michael@0 | 549 | size_t sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const; |
michael@0 | 550 | |
michael@0 | 551 | /** |
michael@0 | 552 | * Like sizeOfExcludingThis, but also measures the size of the vector |
michael@0 | 553 | * object (which must be heap-allocated) itself. |
michael@0 | 554 | */ |
michael@0 | 555 | size_t sizeOfIncludingThis(MallocSizeOf mallocSizeOf) const; |
michael@0 | 556 | |
michael@0 | 557 | void swap(ThisVector& other); |
michael@0 | 558 | |
michael@0 | 559 | private: |
michael@0 | 560 | VectorBase(const VectorBase&) MOZ_DELETE; |
michael@0 | 561 | void operator=(const VectorBase&) MOZ_DELETE; |
michael@0 | 562 | |
michael@0 | 563 | /* Move-construct/assign only from our derived class, ThisVector. */ |
michael@0 | 564 | VectorBase(VectorBase&&) MOZ_DELETE; |
michael@0 | 565 | void operator=(VectorBase&&) MOZ_DELETE; |
michael@0 | 566 | }; |
michael@0 | 567 | |
michael@0 | 568 | /* This does the re-entrancy check plus several other sanity checks. */ |
michael@0 | 569 | #define MOZ_REENTRANCY_GUARD_ET_AL \ |
michael@0 | 570 | ReentrancyGuard g(*this); \ |
michael@0 | 571 | MOZ_ASSERT_IF(usingInlineStorage(), mCapacity == sInlineCapacity); \ |
michael@0 | 572 | MOZ_ASSERT(reserved() <= mCapacity); \ |
michael@0 | 573 | MOZ_ASSERT(mLength <= reserved()); \ |
michael@0 | 574 | MOZ_ASSERT(mLength <= mCapacity) |
michael@0 | 575 | |
michael@0 | 576 | /* Vector Implementation */ |
michael@0 | 577 | |
michael@0 | 578 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 579 | MOZ_ALWAYS_INLINE |
michael@0 | 580 | VectorBase<T, N, AP, TV>::VectorBase(AP ap) |
michael@0 | 581 | : AP(ap), |
michael@0 | 582 | mLength(0), |
michael@0 | 583 | mCapacity(sInlineCapacity) |
michael@0 | 584 | #ifdef DEBUG |
michael@0 | 585 | , mReserved(sInlineCapacity), |
michael@0 | 586 | entered(false) |
michael@0 | 587 | #endif |
michael@0 | 588 | { |
michael@0 | 589 | mBegin = static_cast<T*>(storage.addr()); |
michael@0 | 590 | } |
michael@0 | 591 | |
michael@0 | 592 | /* Move constructor. */ |
michael@0 | 593 | template<typename T, size_t N, class AllocPolicy, class TV> |
michael@0 | 594 | MOZ_ALWAYS_INLINE |
michael@0 | 595 | VectorBase<T, N, AllocPolicy, TV>::VectorBase(TV&& rhs) |
michael@0 | 596 | : AllocPolicy(Move(rhs)) |
michael@0 | 597 | #ifdef DEBUG |
michael@0 | 598 | , entered(false) |
michael@0 | 599 | #endif |
michael@0 | 600 | { |
michael@0 | 601 | mLength = rhs.mLength; |
michael@0 | 602 | mCapacity = rhs.mCapacity; |
michael@0 | 603 | #ifdef DEBUG |
michael@0 | 604 | mReserved = rhs.mReserved; |
michael@0 | 605 | #endif |
michael@0 | 606 | |
michael@0 | 607 | if (rhs.usingInlineStorage()) { |
michael@0 | 608 | /* We can't move the buffer over in this case, so copy elements. */ |
michael@0 | 609 | mBegin = static_cast<T*>(storage.addr()); |
michael@0 | 610 | Impl::moveConstruct(mBegin, rhs.beginNoCheck(), rhs.endNoCheck()); |
michael@0 | 611 | /* |
michael@0 | 612 | * Leave rhs's mLength, mBegin, mCapacity, and mReserved as they are. |
michael@0 | 613 | * The elements in its in-line storage still need to be destroyed. |
michael@0 | 614 | */ |
michael@0 | 615 | } else { |
michael@0 | 616 | /* |
michael@0 | 617 | * Take src's buffer, and turn src into an empty vector using |
michael@0 | 618 | * in-line storage. |
michael@0 | 619 | */ |
michael@0 | 620 | mBegin = rhs.mBegin; |
michael@0 | 621 | rhs.mBegin = static_cast<T*>(rhs.storage.addr()); |
michael@0 | 622 | rhs.mCapacity = sInlineCapacity; |
michael@0 | 623 | rhs.mLength = 0; |
michael@0 | 624 | #ifdef DEBUG |
michael@0 | 625 | rhs.mReserved = sInlineCapacity; |
michael@0 | 626 | #endif |
michael@0 | 627 | } |
michael@0 | 628 | } |
michael@0 | 629 | |
michael@0 | 630 | /* Move assignment. */ |
michael@0 | 631 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 632 | MOZ_ALWAYS_INLINE |
michael@0 | 633 | TV& |
michael@0 | 634 | VectorBase<T, N, AP, TV>::operator=(TV&& rhs) |
michael@0 | 635 | { |
michael@0 | 636 | MOZ_ASSERT(this != &rhs, "self-move assignment is prohibited"); |
michael@0 | 637 | TV* tv = static_cast<TV*>(this); |
michael@0 | 638 | tv->~TV(); |
michael@0 | 639 | new(tv) TV(Move(rhs)); |
michael@0 | 640 | return *tv; |
michael@0 | 641 | } |
michael@0 | 642 | |
michael@0 | 643 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 644 | MOZ_ALWAYS_INLINE |
michael@0 | 645 | VectorBase<T, N, AP, TV>::~VectorBase() |
michael@0 | 646 | { |
michael@0 | 647 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 648 | Impl::destroy(beginNoCheck(), endNoCheck()); |
michael@0 | 649 | if (!usingInlineStorage()) |
michael@0 | 650 | this->free_(beginNoCheck()); |
michael@0 | 651 | } |
michael@0 | 652 | |
michael@0 | 653 | /* |
michael@0 | 654 | * This function will create a new heap buffer with capacity newCap, |
michael@0 | 655 | * move all elements in the inline buffer to this new buffer, |
michael@0 | 656 | * and fail on OOM. |
michael@0 | 657 | */ |
michael@0 | 658 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 659 | inline bool |
michael@0 | 660 | VectorBase<T, N, AP, TV>::convertToHeapStorage(size_t newCap) |
michael@0 | 661 | { |
michael@0 | 662 | MOZ_ASSERT(usingInlineStorage()); |
michael@0 | 663 | |
michael@0 | 664 | /* Allocate buffer. */ |
michael@0 | 665 | MOZ_ASSERT(!detail::CapacityHasExcessSpace<T>(newCap)); |
michael@0 | 666 | T* newBuf = reinterpret_cast<T*>(this->malloc_(newCap * sizeof(T))); |
michael@0 | 667 | if (!newBuf) |
michael@0 | 668 | return false; |
michael@0 | 669 | |
michael@0 | 670 | /* Copy inline elements into heap buffer. */ |
michael@0 | 671 | Impl::moveConstruct(newBuf, beginNoCheck(), endNoCheck()); |
michael@0 | 672 | Impl::destroy(beginNoCheck(), endNoCheck()); |
michael@0 | 673 | |
michael@0 | 674 | /* Switch in heap buffer. */ |
michael@0 | 675 | mBegin = newBuf; |
michael@0 | 676 | /* mLength is unchanged. */ |
michael@0 | 677 | mCapacity = newCap; |
michael@0 | 678 | return true; |
michael@0 | 679 | } |
michael@0 | 680 | |
michael@0 | 681 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 682 | MOZ_NEVER_INLINE bool |
michael@0 | 683 | VectorBase<T, N, AP, TV>::growStorageBy(size_t incr) |
michael@0 | 684 | { |
michael@0 | 685 | MOZ_ASSERT(mLength + incr > mCapacity); |
michael@0 | 686 | MOZ_ASSERT_IF(!usingInlineStorage(), |
michael@0 | 687 | !detail::CapacityHasExcessSpace<T>(mCapacity)); |
michael@0 | 688 | |
michael@0 | 689 | /* |
michael@0 | 690 | * When choosing a new capacity, its size should is as close to 2**N bytes |
michael@0 | 691 | * as possible. 2**N-sized requests are best because they are unlikely to |
michael@0 | 692 | * be rounded up by the allocator. Asking for a 2**N number of elements |
michael@0 | 693 | * isn't as good, because if sizeof(T) is not a power-of-two that would |
michael@0 | 694 | * result in a non-2**N request size. |
michael@0 | 695 | */ |
michael@0 | 696 | |
michael@0 | 697 | size_t newCap; |
michael@0 | 698 | |
michael@0 | 699 | if (incr == 1) { |
michael@0 | 700 | if (usingInlineStorage()) { |
michael@0 | 701 | /* This case occurs in ~70--80% of the calls to this function. */ |
michael@0 | 702 | size_t newSize = |
michael@0 | 703 | tl::RoundUpPow2<(sInlineCapacity + 1) * sizeof(T)>::value; |
michael@0 | 704 | newCap = newSize / sizeof(T); |
michael@0 | 705 | goto convert; |
michael@0 | 706 | } |
michael@0 | 707 | |
michael@0 | 708 | if (mLength == 0) { |
michael@0 | 709 | /* This case occurs in ~0--10% of the calls to this function. */ |
michael@0 | 710 | newCap = 1; |
michael@0 | 711 | goto grow; |
michael@0 | 712 | } |
michael@0 | 713 | |
michael@0 | 714 | /* This case occurs in ~15--20% of the calls to this function. */ |
michael@0 | 715 | |
michael@0 | 716 | /* |
michael@0 | 717 | * Will mLength * 4 *sizeof(T) overflow? This condition limits a vector |
michael@0 | 718 | * to 1GB of memory on a 32-bit system, which is a reasonable limit. It |
michael@0 | 719 | * also ensures that |
michael@0 | 720 | * |
michael@0 | 721 | * static_cast<char*>(end()) - static_cast<char*>(begin()) |
michael@0 | 722 | * |
michael@0 | 723 | * doesn't overflow ptrdiff_t (see bug 510319). |
michael@0 | 724 | */ |
michael@0 | 725 | if (mLength & tl::MulOverflowMask<4 * sizeof(T)>::value) { |
michael@0 | 726 | this->reportAllocOverflow(); |
michael@0 | 727 | return false; |
michael@0 | 728 | } |
michael@0 | 729 | |
michael@0 | 730 | /* |
michael@0 | 731 | * If we reach here, the existing capacity will have a size that is already |
michael@0 | 732 | * as close to 2^N as sizeof(T) will allow. Just double the capacity, and |
michael@0 | 733 | * then there might be space for one more element. |
michael@0 | 734 | */ |
michael@0 | 735 | newCap = mLength * 2; |
michael@0 | 736 | if (detail::CapacityHasExcessSpace<T>(newCap)) |
michael@0 | 737 | newCap += 1; |
michael@0 | 738 | } else { |
michael@0 | 739 | /* This case occurs in ~2% of the calls to this function. */ |
michael@0 | 740 | size_t newMinCap = mLength + incr; |
michael@0 | 741 | |
michael@0 | 742 | /* Did mLength + incr overflow? Will newCap * sizeof(T) overflow? */ |
michael@0 | 743 | if (newMinCap < mLength || |
michael@0 | 744 | newMinCap & tl::MulOverflowMask<2 * sizeof(T)>::value) |
michael@0 | 745 | { |
michael@0 | 746 | this->reportAllocOverflow(); |
michael@0 | 747 | return false; |
michael@0 | 748 | } |
michael@0 | 749 | |
michael@0 | 750 | size_t newMinSize = newMinCap * sizeof(T); |
michael@0 | 751 | size_t newSize = RoundUpPow2(newMinSize); |
michael@0 | 752 | newCap = newSize / sizeof(T); |
michael@0 | 753 | } |
michael@0 | 754 | |
michael@0 | 755 | if (usingInlineStorage()) { |
michael@0 | 756 | convert: |
michael@0 | 757 | return convertToHeapStorage(newCap); |
michael@0 | 758 | } |
michael@0 | 759 | |
michael@0 | 760 | grow: |
michael@0 | 761 | return Impl::growTo(*this, newCap); |
michael@0 | 762 | } |
michael@0 | 763 | |
michael@0 | 764 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 765 | inline bool |
michael@0 | 766 | VectorBase<T, N, AP, TV>::initCapacity(size_t request) |
michael@0 | 767 | { |
michael@0 | 768 | MOZ_ASSERT(empty()); |
michael@0 | 769 | MOZ_ASSERT(usingInlineStorage()); |
michael@0 | 770 | if (request == 0) |
michael@0 | 771 | return true; |
michael@0 | 772 | T* newbuf = reinterpret_cast<T*>(this->malloc_(request * sizeof(T))); |
michael@0 | 773 | if (!newbuf) |
michael@0 | 774 | return false; |
michael@0 | 775 | mBegin = newbuf; |
michael@0 | 776 | mCapacity = request; |
michael@0 | 777 | #ifdef DEBUG |
michael@0 | 778 | mReserved = request; |
michael@0 | 779 | #endif |
michael@0 | 780 | return true; |
michael@0 | 781 | } |
michael@0 | 782 | |
michael@0 | 783 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 784 | inline bool |
michael@0 | 785 | VectorBase<T, N, AP, TV>::reserve(size_t request) |
michael@0 | 786 | { |
michael@0 | 787 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 788 | if (request > mCapacity && !growStorageBy(request - mLength)) |
michael@0 | 789 | return false; |
michael@0 | 790 | |
michael@0 | 791 | #ifdef DEBUG |
michael@0 | 792 | if (request > mReserved) |
michael@0 | 793 | mReserved = request; |
michael@0 | 794 | MOZ_ASSERT(mLength <= mReserved); |
michael@0 | 795 | MOZ_ASSERT(mReserved <= mCapacity); |
michael@0 | 796 | #endif |
michael@0 | 797 | return true; |
michael@0 | 798 | } |
michael@0 | 799 | |
michael@0 | 800 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 801 | inline void |
michael@0 | 802 | VectorBase<T, N, AP, TV>::shrinkBy(size_t incr) |
michael@0 | 803 | { |
michael@0 | 804 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 805 | MOZ_ASSERT(incr <= mLength); |
michael@0 | 806 | Impl::destroy(endNoCheck() - incr, endNoCheck()); |
michael@0 | 807 | mLength -= incr; |
michael@0 | 808 | } |
michael@0 | 809 | |
michael@0 | 810 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 811 | MOZ_ALWAYS_INLINE bool |
michael@0 | 812 | VectorBase<T, N, AP, TV>::growBy(size_t incr) |
michael@0 | 813 | { |
michael@0 | 814 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 815 | if (incr > mCapacity - mLength && !growStorageBy(incr)) |
michael@0 | 816 | return false; |
michael@0 | 817 | |
michael@0 | 818 | MOZ_ASSERT(mLength + incr <= mCapacity); |
michael@0 | 819 | T* newend = endNoCheck() + incr; |
michael@0 | 820 | Impl::initialize(endNoCheck(), newend); |
michael@0 | 821 | mLength += incr; |
michael@0 | 822 | #ifdef DEBUG |
michael@0 | 823 | if (mLength > mReserved) |
michael@0 | 824 | mReserved = mLength; |
michael@0 | 825 | #endif |
michael@0 | 826 | return true; |
michael@0 | 827 | } |
michael@0 | 828 | |
michael@0 | 829 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 830 | MOZ_ALWAYS_INLINE bool |
michael@0 | 831 | VectorBase<T, N, AP, TV>::growByUninitialized(size_t incr) |
michael@0 | 832 | { |
michael@0 | 833 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 834 | if (incr > mCapacity - mLength && !growStorageBy(incr)) |
michael@0 | 835 | return false; |
michael@0 | 836 | |
michael@0 | 837 | MOZ_ASSERT(mLength + incr <= mCapacity); |
michael@0 | 838 | mLength += incr; |
michael@0 | 839 | #ifdef DEBUG |
michael@0 | 840 | if (mLength > mReserved) |
michael@0 | 841 | mReserved = mLength; |
michael@0 | 842 | #endif |
michael@0 | 843 | return true; |
michael@0 | 844 | } |
michael@0 | 845 | |
michael@0 | 846 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 847 | inline bool |
michael@0 | 848 | VectorBase<T, N, AP, TV>::resize(size_t newLength) |
michael@0 | 849 | { |
michael@0 | 850 | size_t curLength = mLength; |
michael@0 | 851 | if (newLength > curLength) |
michael@0 | 852 | return growBy(newLength - curLength); |
michael@0 | 853 | shrinkBy(curLength - newLength); |
michael@0 | 854 | return true; |
michael@0 | 855 | } |
michael@0 | 856 | |
michael@0 | 857 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 858 | MOZ_ALWAYS_INLINE bool |
michael@0 | 859 | VectorBase<T, N, AP, TV>::resizeUninitialized(size_t newLength) |
michael@0 | 860 | { |
michael@0 | 861 | size_t curLength = mLength; |
michael@0 | 862 | if (newLength > curLength) |
michael@0 | 863 | return growByUninitialized(newLength - curLength); |
michael@0 | 864 | shrinkBy(curLength - newLength); |
michael@0 | 865 | return true; |
michael@0 | 866 | } |
michael@0 | 867 | |
michael@0 | 868 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 869 | inline void |
michael@0 | 870 | VectorBase<T, N, AP, TV>::clear() |
michael@0 | 871 | { |
michael@0 | 872 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 873 | Impl::destroy(beginNoCheck(), endNoCheck()); |
michael@0 | 874 | mLength = 0; |
michael@0 | 875 | } |
michael@0 | 876 | |
michael@0 | 877 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 878 | inline void |
michael@0 | 879 | VectorBase<T, N, AP, TV>::clearAndFree() |
michael@0 | 880 | { |
michael@0 | 881 | clear(); |
michael@0 | 882 | |
michael@0 | 883 | if (usingInlineStorage()) |
michael@0 | 884 | return; |
michael@0 | 885 | |
michael@0 | 886 | this->free_(beginNoCheck()); |
michael@0 | 887 | mBegin = static_cast<T*>(storage.addr()); |
michael@0 | 888 | mCapacity = sInlineCapacity; |
michael@0 | 889 | #ifdef DEBUG |
michael@0 | 890 | mReserved = sInlineCapacity; |
michael@0 | 891 | #endif |
michael@0 | 892 | } |
michael@0 | 893 | |
michael@0 | 894 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 895 | inline bool |
michael@0 | 896 | VectorBase<T, N, AP, TV>::canAppendWithoutRealloc(size_t needed) const |
michael@0 | 897 | { |
michael@0 | 898 | return mLength + needed <= mCapacity; |
michael@0 | 899 | } |
michael@0 | 900 | |
michael@0 | 901 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 902 | template<typename U, size_t O, class BP, class UV> |
michael@0 | 903 | MOZ_ALWAYS_INLINE void |
michael@0 | 904 | VectorBase<T, N, AP, TV>::internalAppendAll(const VectorBase<U, O, BP, UV>& other) |
michael@0 | 905 | { |
michael@0 | 906 | internalAppend(other.begin(), other.length()); |
michael@0 | 907 | } |
michael@0 | 908 | |
michael@0 | 909 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 910 | template<typename U> |
michael@0 | 911 | MOZ_ALWAYS_INLINE void |
michael@0 | 912 | VectorBase<T, N, AP, TV>::internalAppend(U&& u) |
michael@0 | 913 | { |
michael@0 | 914 | MOZ_ASSERT(mLength + 1 <= mReserved); |
michael@0 | 915 | MOZ_ASSERT(mReserved <= mCapacity); |
michael@0 | 916 | new(endNoCheck()) T(Forward<U>(u)); |
michael@0 | 917 | ++mLength; |
michael@0 | 918 | } |
michael@0 | 919 | |
michael@0 | 920 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 921 | MOZ_ALWAYS_INLINE bool |
michael@0 | 922 | VectorBase<T, N, AP, TV>::appendN(const T& t, size_t needed) |
michael@0 | 923 | { |
michael@0 | 924 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 925 | if (mLength + needed > mCapacity && !growStorageBy(needed)) |
michael@0 | 926 | return false; |
michael@0 | 927 | |
michael@0 | 928 | #ifdef DEBUG |
michael@0 | 929 | if (mLength + needed > mReserved) |
michael@0 | 930 | mReserved = mLength + needed; |
michael@0 | 931 | #endif |
michael@0 | 932 | internalAppendN(t, needed); |
michael@0 | 933 | return true; |
michael@0 | 934 | } |
michael@0 | 935 | |
michael@0 | 936 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 937 | MOZ_ALWAYS_INLINE void |
michael@0 | 938 | VectorBase<T, N, AP, TV>::internalAppendN(const T& t, size_t needed) |
michael@0 | 939 | { |
michael@0 | 940 | MOZ_ASSERT(mLength + needed <= mReserved); |
michael@0 | 941 | MOZ_ASSERT(mReserved <= mCapacity); |
michael@0 | 942 | Impl::copyConstructN(endNoCheck(), needed, t); |
michael@0 | 943 | mLength += needed; |
michael@0 | 944 | } |
michael@0 | 945 | |
michael@0 | 946 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 947 | template<typename U> |
michael@0 | 948 | inline T* |
michael@0 | 949 | VectorBase<T, N, AP, TV>::insert(T* p, U&& val) |
michael@0 | 950 | { |
michael@0 | 951 | MOZ_ASSERT(begin() <= p); |
michael@0 | 952 | MOZ_ASSERT(p <= end()); |
michael@0 | 953 | size_t pos = p - begin(); |
michael@0 | 954 | MOZ_ASSERT(pos <= mLength); |
michael@0 | 955 | size_t oldLength = mLength; |
michael@0 | 956 | if (pos == oldLength) { |
michael@0 | 957 | if (!append(Forward<U>(val))) |
michael@0 | 958 | return nullptr; |
michael@0 | 959 | } else { |
michael@0 | 960 | T oldBack = Move(back()); |
michael@0 | 961 | if (!append(Move(oldBack))) /* Dup the last element. */ |
michael@0 | 962 | return nullptr; |
michael@0 | 963 | for (size_t i = oldLength; i > pos; --i) |
michael@0 | 964 | (*this)[i] = Move((*this)[i - 1]); |
michael@0 | 965 | (*this)[pos] = Forward<U>(val); |
michael@0 | 966 | } |
michael@0 | 967 | return begin() + pos; |
michael@0 | 968 | } |
michael@0 | 969 | |
michael@0 | 970 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 971 | inline void |
michael@0 | 972 | VectorBase<T, N, AP, TV>::erase(T* it) |
michael@0 | 973 | { |
michael@0 | 974 | MOZ_ASSERT(begin() <= it); |
michael@0 | 975 | MOZ_ASSERT(it < end()); |
michael@0 | 976 | while (it + 1 < end()) { |
michael@0 | 977 | *it = *(it + 1); |
michael@0 | 978 | ++it; |
michael@0 | 979 | } |
michael@0 | 980 | popBack(); |
michael@0 | 981 | } |
michael@0 | 982 | |
michael@0 | 983 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 984 | template<typename U> |
michael@0 | 985 | MOZ_ALWAYS_INLINE bool |
michael@0 | 986 | VectorBase<T, N, AP, TV>::append(const U* insBegin, const U* insEnd) |
michael@0 | 987 | { |
michael@0 | 988 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 989 | size_t needed = PointerRangeSize(insBegin, insEnd); |
michael@0 | 990 | if (mLength + needed > mCapacity && !growStorageBy(needed)) |
michael@0 | 991 | return false; |
michael@0 | 992 | |
michael@0 | 993 | #ifdef DEBUG |
michael@0 | 994 | if (mLength + needed > mReserved) |
michael@0 | 995 | mReserved = mLength + needed; |
michael@0 | 996 | #endif |
michael@0 | 997 | internalAppend(insBegin, needed); |
michael@0 | 998 | return true; |
michael@0 | 999 | } |
michael@0 | 1000 | |
michael@0 | 1001 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1002 | template<typename U> |
michael@0 | 1003 | MOZ_ALWAYS_INLINE void |
michael@0 | 1004 | VectorBase<T, N, AP, TV>::internalAppend(const U* insBegin, size_t insLength) |
michael@0 | 1005 | { |
michael@0 | 1006 | MOZ_ASSERT(mLength + insLength <= mReserved); |
michael@0 | 1007 | MOZ_ASSERT(mReserved <= mCapacity); |
michael@0 | 1008 | Impl::copyConstruct(endNoCheck(), insBegin, insBegin + insLength); |
michael@0 | 1009 | mLength += insLength; |
michael@0 | 1010 | } |
michael@0 | 1011 | |
michael@0 | 1012 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1013 | template<typename U> |
michael@0 | 1014 | MOZ_ALWAYS_INLINE bool |
michael@0 | 1015 | VectorBase<T, N, AP, TV>::append(U&& u) |
michael@0 | 1016 | { |
michael@0 | 1017 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 1018 | if (mLength == mCapacity && !growStorageBy(1)) |
michael@0 | 1019 | return false; |
michael@0 | 1020 | |
michael@0 | 1021 | #ifdef DEBUG |
michael@0 | 1022 | if (mLength + 1 > mReserved) |
michael@0 | 1023 | mReserved = mLength + 1; |
michael@0 | 1024 | #endif |
michael@0 | 1025 | internalAppend(Forward<U>(u)); |
michael@0 | 1026 | return true; |
michael@0 | 1027 | } |
michael@0 | 1028 | |
michael@0 | 1029 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1030 | template<typename U, size_t O, class BP, class UV> |
michael@0 | 1031 | MOZ_ALWAYS_INLINE bool |
michael@0 | 1032 | VectorBase<T, N, AP, TV>::appendAll(const VectorBase<U, O, BP, UV>& other) |
michael@0 | 1033 | { |
michael@0 | 1034 | return append(other.begin(), other.length()); |
michael@0 | 1035 | } |
michael@0 | 1036 | |
michael@0 | 1037 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1038 | template<class U> |
michael@0 | 1039 | MOZ_ALWAYS_INLINE bool |
michael@0 | 1040 | VectorBase<T, N, AP, TV>::append(const U *insBegin, size_t insLength) |
michael@0 | 1041 | { |
michael@0 | 1042 | return append(insBegin, insBegin + insLength); |
michael@0 | 1043 | } |
michael@0 | 1044 | |
michael@0 | 1045 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1046 | MOZ_ALWAYS_INLINE void |
michael@0 | 1047 | VectorBase<T, N, AP, TV>::popBack() |
michael@0 | 1048 | { |
michael@0 | 1049 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 1050 | MOZ_ASSERT(!empty()); |
michael@0 | 1051 | --mLength; |
michael@0 | 1052 | endNoCheck()->~T(); |
michael@0 | 1053 | } |
michael@0 | 1054 | |
michael@0 | 1055 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1056 | MOZ_ALWAYS_INLINE T |
michael@0 | 1057 | VectorBase<T, N, AP, TV>::popCopy() |
michael@0 | 1058 | { |
michael@0 | 1059 | T ret = back(); |
michael@0 | 1060 | popBack(); |
michael@0 | 1061 | return ret; |
michael@0 | 1062 | } |
michael@0 | 1063 | |
michael@0 | 1064 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1065 | inline T* |
michael@0 | 1066 | VectorBase<T, N, AP, TV>::extractRawBuffer() |
michael@0 | 1067 | { |
michael@0 | 1068 | T* ret; |
michael@0 | 1069 | if (usingInlineStorage()) { |
michael@0 | 1070 | ret = reinterpret_cast<T*>(this->malloc_(mLength * sizeof(T))); |
michael@0 | 1071 | if (!ret) |
michael@0 | 1072 | return nullptr; |
michael@0 | 1073 | Impl::copyConstruct(ret, beginNoCheck(), endNoCheck()); |
michael@0 | 1074 | Impl::destroy(beginNoCheck(), endNoCheck()); |
michael@0 | 1075 | /* mBegin, mCapacity are unchanged. */ |
michael@0 | 1076 | mLength = 0; |
michael@0 | 1077 | } else { |
michael@0 | 1078 | ret = mBegin; |
michael@0 | 1079 | mBegin = static_cast<T*>(storage.addr()); |
michael@0 | 1080 | mLength = 0; |
michael@0 | 1081 | mCapacity = sInlineCapacity; |
michael@0 | 1082 | #ifdef DEBUG |
michael@0 | 1083 | mReserved = sInlineCapacity; |
michael@0 | 1084 | #endif |
michael@0 | 1085 | } |
michael@0 | 1086 | return ret; |
michael@0 | 1087 | } |
michael@0 | 1088 | |
michael@0 | 1089 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1090 | inline void |
michael@0 | 1091 | VectorBase<T, N, AP, TV>::replaceRawBuffer(T* p, size_t aLength) |
michael@0 | 1092 | { |
michael@0 | 1093 | MOZ_REENTRANCY_GUARD_ET_AL; |
michael@0 | 1094 | |
michael@0 | 1095 | /* Destroy what we have. */ |
michael@0 | 1096 | Impl::destroy(beginNoCheck(), endNoCheck()); |
michael@0 | 1097 | if (!usingInlineStorage()) |
michael@0 | 1098 | this->free_(beginNoCheck()); |
michael@0 | 1099 | |
michael@0 | 1100 | /* Take in the new buffer. */ |
michael@0 | 1101 | if (aLength <= sInlineCapacity) { |
michael@0 | 1102 | /* |
michael@0 | 1103 | * We convert to inline storage if possible, even though p might |
michael@0 | 1104 | * otherwise be acceptable. Maybe this behaviour should be |
michael@0 | 1105 | * specifiable with an argument to this function. |
michael@0 | 1106 | */ |
michael@0 | 1107 | mBegin = static_cast<T*>(storage.addr()); |
michael@0 | 1108 | mLength = aLength; |
michael@0 | 1109 | mCapacity = sInlineCapacity; |
michael@0 | 1110 | Impl::moveConstruct(mBegin, p, p + aLength); |
michael@0 | 1111 | Impl::destroy(p, p + aLength); |
michael@0 | 1112 | this->free_(p); |
michael@0 | 1113 | } else { |
michael@0 | 1114 | mBegin = p; |
michael@0 | 1115 | mLength = aLength; |
michael@0 | 1116 | mCapacity = aLength; |
michael@0 | 1117 | } |
michael@0 | 1118 | #ifdef DEBUG |
michael@0 | 1119 | mReserved = aLength; |
michael@0 | 1120 | #endif |
michael@0 | 1121 | } |
michael@0 | 1122 | |
michael@0 | 1123 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1124 | inline size_t |
michael@0 | 1125 | VectorBase<T, N, AP, TV>::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const |
michael@0 | 1126 | { |
michael@0 | 1127 | return usingInlineStorage() ? 0 : mallocSizeOf(beginNoCheck()); |
michael@0 | 1128 | } |
michael@0 | 1129 | |
michael@0 | 1130 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1131 | inline size_t |
michael@0 | 1132 | VectorBase<T, N, AP, TV>::sizeOfIncludingThis(MallocSizeOf mallocSizeOf) const |
michael@0 | 1133 | { |
michael@0 | 1134 | return mallocSizeOf(this) + sizeOfExcludingThis(mallocSizeOf); |
michael@0 | 1135 | } |
michael@0 | 1136 | |
michael@0 | 1137 | template<typename T, size_t N, class AP, class TV> |
michael@0 | 1138 | inline void |
michael@0 | 1139 | VectorBase<T, N, AP, TV>::swap(TV& other) |
michael@0 | 1140 | { |
michael@0 | 1141 | static_assert(N == 0, |
michael@0 | 1142 | "still need to implement this for N != 0"); |
michael@0 | 1143 | |
michael@0 | 1144 | // This only works when inline storage is always empty. |
michael@0 | 1145 | if (!usingInlineStorage() && other.usingInlineStorage()) { |
michael@0 | 1146 | other.mBegin = mBegin; |
michael@0 | 1147 | mBegin = inlineStorage(); |
michael@0 | 1148 | } else if (usingInlineStorage() && !other.usingInlineStorage()) { |
michael@0 | 1149 | mBegin = other.mBegin; |
michael@0 | 1150 | other.mBegin = other.inlineStorage(); |
michael@0 | 1151 | } else if (!usingInlineStorage() && !other.usingInlineStorage()) { |
michael@0 | 1152 | Swap(mBegin, other.mBegin); |
michael@0 | 1153 | } else { |
michael@0 | 1154 | // This case is a no-op, since we'd set both to use their inline storage. |
michael@0 | 1155 | } |
michael@0 | 1156 | |
michael@0 | 1157 | Swap(mLength, other.mLength); |
michael@0 | 1158 | Swap(mCapacity, other.mCapacity); |
michael@0 | 1159 | #ifdef DEBUG |
michael@0 | 1160 | Swap(mReserved, other.mReserved); |
michael@0 | 1161 | #endif |
michael@0 | 1162 | } |
michael@0 | 1163 | |
michael@0 | 1164 | /* |
michael@0 | 1165 | * STL-like container providing a short-lived, dynamic buffer. Vector calls the |
michael@0 | 1166 | * constructors/destructors of all elements stored in its internal buffer, so |
michael@0 | 1167 | * non-PODs may be safely used. Additionally, Vector will store the first N |
michael@0 | 1168 | * elements in-place before resorting to dynamic allocation. |
michael@0 | 1169 | * |
michael@0 | 1170 | * T requirements: |
michael@0 | 1171 | * - default and copy constructible, assignable, destructible |
michael@0 | 1172 | * - operations do not throw |
michael@0 | 1173 | * N requirements: |
michael@0 | 1174 | * - any value, however, N is clamped to min/max values |
michael@0 | 1175 | * AllocPolicy: |
michael@0 | 1176 | * - see "Allocation policies" in AllocPolicy.h (defaults to |
michael@0 | 1177 | * mozilla::MallocAllocPolicy) |
michael@0 | 1178 | * |
michael@0 | 1179 | * Vector is not reentrant: T member functions called during Vector member |
michael@0 | 1180 | * functions must not call back into the same object! |
michael@0 | 1181 | */ |
michael@0 | 1182 | template<typename T, |
michael@0 | 1183 | size_t MinInlineCapacity = 0, |
michael@0 | 1184 | class AllocPolicy = MallocAllocPolicy> |
michael@0 | 1185 | class Vector |
michael@0 | 1186 | : public VectorBase<T, |
michael@0 | 1187 | MinInlineCapacity, |
michael@0 | 1188 | AllocPolicy, |
michael@0 | 1189 | Vector<T, MinInlineCapacity, AllocPolicy> > |
michael@0 | 1190 | { |
michael@0 | 1191 | typedef VectorBase<T, MinInlineCapacity, AllocPolicy, Vector> Base; |
michael@0 | 1192 | |
michael@0 | 1193 | public: |
michael@0 | 1194 | Vector(AllocPolicy alloc = AllocPolicy()) : Base(alloc) {} |
michael@0 | 1195 | Vector(Vector&& vec) : Base(Move(vec)) {} |
michael@0 | 1196 | Vector& operator=(Vector&& vec) { |
michael@0 | 1197 | return Base::operator=(Move(vec)); |
michael@0 | 1198 | } |
michael@0 | 1199 | }; |
michael@0 | 1200 | |
michael@0 | 1201 | } // namespace mozilla |
michael@0 | 1202 | |
michael@0 | 1203 | #ifdef _MSC_VER |
michael@0 | 1204 | #pragma warning(pop) |
michael@0 | 1205 | #endif |
michael@0 | 1206 | |
michael@0 | 1207 | #endif /* mozilla_Vector_h */ |